Skip to content

Commit

Permalink
Merge pull request #87 from rv2931/ruff_fix_quality
Browse files Browse the repository at this point in the history
[01] solve somes ruff exception in code quality
  • Loading branch information
RonanMorgan authored Mar 7, 2024
2 parents db47f32 + 1d25b30 commit bc8bd68
Show file tree
Hide file tree
Showing 15 changed files with 47 additions and 59 deletions.
File renamed without changes.
File renamed without changes.
8 changes: 6 additions & 2 deletions bloom/Trawlwatcher.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,10 +10,14 @@
layout="wide",
)

class EnvFileNotFoundError(FileNotFoundError):
def __init__(self, env_path:str) -> None:
super().__init__(f"Couldn't find .env file at {env_path}")

# FILL IN YOUR CREDENTIALS .env file HERE !!
env_path = Path('.') / '.env.template'
env_path = Path('.') / '.env.template2'
if not env_path.is_file():
raise FileNotFoundError(f"Couldn't find .env file at {env_path.absolute()}")
raise EnvFileNotFoundError(env_path.absolute())
load_dotenv(env_path)

def local_css(file_name: str) -> None:
Expand Down
2 changes: 1 addition & 1 deletion bloom/alembic/init_script/load_amp_data.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,7 @@
engine = create_engine(db_url, echo=False)

df = pd.read_csv(
Path(os.path.dirname(__file__)).joinpath("../../data/zones_subset_02022024.csv"),
Path(__file__).parent.joinpath("../../data/zones_subset_02022024.csv"),
sep=",",
)

Expand Down
2 changes: 1 addition & 1 deletion bloom/alembic/init_script/load_positions_data.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@
engine = create_engine(db_url)

df = pd.read_csv(
Path(os.path.dirname(__file__)).joinpath("../../data/spire_positions_subset_02022024.csv"),
Path(__file__).parent.joinpath("../../data/spire_positions_subset_02022024.csv"),
sep=","
)

Expand Down
2 changes: 1 addition & 1 deletion bloom/alembic/init_script/load_vessels_data.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@
)
engine = create_engine(db_url)
df = pd.read_csv(
Path(os.path.dirname(__file__)).joinpath("../../data/chalutiers_pelagiques.csv"),
Path(__file__).parent.joinpath("../../data/chalutiers_pelagiques.csv"),
sep=";",
dtype={"loa": float, "IMO": str},
)
Expand Down
4 changes: 3 additions & 1 deletion bloom/alembic/versions/961cee5426d6_create_amp_table.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy import Inspector
import geoalchemy2

# revision identifiers, used by Alembic.
Expand All @@ -26,7 +27,8 @@ def upgrade() -> None:
sa.Column("iucn_cat", sa.String),
sa.Column("parent_iso", sa.String),
sa.Column("iso3", sa.String),
sa.Column("geometry", geoalchemy2.types.Geometry(geometry_type="GEOMETRY", srid=4326)),
sa.Column("geometry", geoalchemy2.types.Geometry(geometry_type="GEOMETRY",
srid=4326)),
sa.Column("benificiaries", sa.String)
)

Expand Down
14 changes: 8 additions & 6 deletions bloom/app.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,17 +34,19 @@ def main() -> None:

if args.mode == ExecutionMode.LOCAL:
logger.info("Starting scraping with internal scheduler")
scheduler = PeriodicScheduler(
function=marine_traffic_usecase.scrap_vessels,
interval=SCRAP_INTERVAL,
)
# Commented as marine_traffic_usecase was commented before
# error F821 Undefined name `marine_traffic_usecase`
#scheduler = PeriodicScheduler(
# function=marine_traffic_usecase.scrap_vessels,
# interval=SCRAP_INTERVAL,
#)
spire_traffic_usecase.save_vessels(
spire_traffic_usecase.get_all_vessels(timestamp),
)
#marine_traffic_usecase.scrap_vessels(timestamp)
alert_usecase.generate_alerts(timestamp)
while True:
scheduler.start()
#while True:
# scheduler.start()
else:
logger.info("Starting scraping with external scheduler")
spire_traffic_usecase.save_vessels(
Expand Down
21 changes: 11 additions & 10 deletions bloom/bloom/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,19 +9,20 @@ class Settings(BaseSettings):
postgres_port = os.environ.get("POSTGRES_PORT")
postgres_db = os.environ.get("POSTGRES_DB")

print("db_url: ", "postgresql://"+postgres_user+":"+postgres_password+"@"+postgres_hostname+":"+postgres_port+"/"+postgres_db)
#print("db_url: ", f"postgresql://{postgres_user}:{postgres_password}@{postgres_hostname}:"
# f"{postgres_port}/{postgres_db}")

db_url = (
"postgresql://"
+ postgres_user
+ ":"
+ postgres_password
+ "@"
+ postgres_hostname
+ ":"
+ postgres_port
+ "/"
+ postgres_db
f"{postgres_user}"
":"
f"{postgres_password}"
"@"
f"{postgres_hostname}"
":"
f"{postgres_port}"
"/"
f"{postgres_db}"
)

srid: int = 4326
Expand Down
7 changes: 5 additions & 2 deletions bloom/bloom/domain/vessels/vessel_trajectory.py
Original file line number Diff line number Diff line change
Expand Up @@ -84,6 +84,10 @@ def mpas(self) -> []:
if hasattr(self, "_mpas"):
return self._mpas
return None

@mpas.setter
def mpas(self, value:[])->None:
self._mpas = value

def get_closest_marine_protected_areas(self, radius: int = 100) -> None:
self._mpas, self._mpas_gdf = get_closest_marine_protected_areas(
Expand Down Expand Up @@ -121,8 +125,7 @@ def query(
assert len(filtered_data) > 0
filtered_vessel = VesselTrajectory(self.metadata, filtered_data.copy())

if hasattr(self, "_mpas"):
filtered_vessel._mpas = self._mpas
filtered_vessel.mpas(self.mpas())

filtered_vessel.positions.index = filtered_data.index

Expand Down
22 changes: 11 additions & 11 deletions bloom/bloom/infra/repositories/repository_alert.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,36 +23,36 @@ def save_alerts(self, timestamp: datetime) -> None:
(
SELECT timestamp, vessel_id, (CAST(ST_Contains(mpa_fr_with_mn.geometry,current_position) AS INT) - CAST(ST_Contains(mpa_fr_with_mn.geometry,previous_position) AS INT)) as cross_mpa, ARRAY_AGG(mpa_fr_with_mn.index ORDER BY mpa_fr_with_mn.index DESC) AS mpa_ids FROM
(SELECT spire_vessel_positions.vessel_id AS vessel_id,
spire_vessel_positions.position AS current_position,
spire_vessel_positions.timestamp AS timestamp,
spire_vessel_positions.position AS current_position,
spire_vessel_positions.timestamp AS timestamp,
LAG(spire_vessel_positions.position) OVER (PARTITION BY spire_vessel_positions.vessel_id ORDER BY spire_vessel_positions.timestamp) AS previous_position
FROM spire_vessel_positions WHERE spire_vessel_positions.timestamp >= TIMESTAMP '{timestamp}' - INTERVAL '15 minutes' AND spire_vessel_positions.timestamp < TIMESTAMP '{timestamp}' + INTERVAL '15 minutes' ) AS foo
CROSS JOIN mpa_fr_with_mn WHERE previous_position IS NOT NULL and ST_Contains(mpa_fr_with_mn.geometry,current_position) != ST_Contains(mpa_fr_with_mn.geometry,previous_position) GROUP BY vessel_id, timestamp,cross_mpa
);
""", # nosec: B608
""", # nosec: B608 # noqa: E501
)
session.execute(sql)
session.commit()
return

# an other query with the same result :
# WITH cte_query1 AS (
# SELECT spire_vessel_positions.vessel_id AS vessel_id, ARRAY_AGG(mpa_fr_with_mn.index ORDER BY mpa_fr_with_mn.index DESC) AS mpa_ids
# SELECT spire_vessel_positions.vessel_id AS vessel_id, ARRAY_AGG(mpa_fr_with_mn.index ORDER BY mpa_fr_with_mn.index DESC) AS mpa_ids # noqa: E501
# FROM spire_vessel_positions
# JOIN mpa_fr_with_mn ON ST_Contains(mpa_fr_with_mn.geometry, spire_vessel_positions.position)
# WHERE spire_vessel_positions.timestamp = TO_TIMESTAMP('2023-11-17 12:00', 'YYYY-MM-DD HH24:MI')
# JOIN mpa_fr_with_mn ON ST_Contains(mpa_fr_with_mn.geometry, spire_vessel_positions.position) # noqa: E501
# WHERE spire_vessel_positions.timestamp = TO_TIMESTAMP('2023-11-17 12:00', 'YYYY-MM-DD HH24:MI') # noqa: E501
# GROUP BY vessel_id
# ),
# cte_query2 AS (
# SELECT DISTINCT spire_vessel_positions.vessel_id AS vessel_id, ARRAY_AGG(mpa_fr_with_mn.index ORDER BY mpa_fr_with_mn.index DESC) AS mpa_ids
# SELECT DISTINCT spire_vessel_positions.vessel_id AS vessel_id, ARRAY_AGG(mpa_fr_with_mn.index ORDER BY mpa_fr_with_mn.index DESC) AS mpa_ids # noqa: E501
# FROM spire_vessel_positions
# JOIN mpa_fr_with_mn ON ST_Contains(mpa_fr_with_mn.geometry, spire_vessel_positions.position)
# WHERE spire_vessel_positions.timestamp = TO_TIMESTAMP('2023-11-17 12:15', 'YYYY-MM-DD HH24:MI')
# JOIN mpa_fr_with_mn ON ST_Contains(mpa_fr_with_mn.geometry, spire_vessel_positions.position) # noqa: E501
# WHERE spire_vessel_positions.timestamp = TO_TIMESTAMP('2023-11-17 12:15', 'YYYY-MM-DD HH24:MI') # noqa: E501
# GROUP BY vessel_id
# )
# SELECT vessel_id, mpa_ids, -1 AS value FROM cte_query1 EXCEPT SELECT vessel_id, mpa_ids, -1 AS value FROM cte_query2
# SELECT vessel_id, mpa_ids, -1 AS value FROM cte_query1 EXCEPT SELECT vessel_id, mpa_ids, -1 AS value FROM cte_query2 # noqa: E501
# UNION ALL
# SELECT vessel_id, mpa_ids, 1 AS value FROM cte_query2 EXCEPT SELECT vessel_id, mpa_ids, 1 AS value FROM cte_query1
# SELECT vessel_id, mpa_ids, 1 AS value FROM cte_query2 EXCEPT SELECT vessel_id, mpa_ids, 1 AS value FROM cte_query1 # noqa: E501

def load_alert(self, timestamp: datetime) -> list[Alert]:
with self.session_factory() as session:
Expand Down
24 changes: 0 additions & 24 deletions bloom/requirements.txt

This file was deleted.

File renamed without changes.
File renamed without changes.
File renamed without changes.

0 comments on commit bc8bd68

Please sign in to comment.