Skip to content

Commit

Permalink
Merge pull request #474 from PnX-SI/feat/xy-comma
Browse files Browse the repository at this point in the history
X/Y geometries: allows comma as decimal separator
  • Loading branch information
camillemonchicourt authored Sep 13, 2023
2 parents 40a0b12 + 05149a9 commit 46a719d
Show file tree
Hide file tree
Showing 5 changed files with 36 additions and 70 deletions.
2 changes: 1 addition & 1 deletion backend/gn_module_import/checks/dataframe/geography.py
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,7 @@ def wkt_to_geometry(value):

def x_y_to_geometry(x, y):
try:
return Point(float(x), float(y))
return Point(float(x.replace(",", ".")), float(y.replace(",", ".")))
except Exception:
return None

Expand Down
64 changes: 11 additions & 53 deletions backend/gn_module_import/checks/sql/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -703,10 +703,11 @@ def complete_others_geom_columns(imprt, fields):

def check_is_valid_geography(imprt, fields):
if "WKT" in fields:
# It is useless to check valid WKT when created from X/Y
where_clause = sa.and_(
ImportSyntheseData.src_WKT != None,
ImportSyntheseData.src_WKT != "",
sa.not_(ST_IsValid(ST_GeomFromText(ImportSyntheseData.src_WKT))),
sa.not_(ST_IsValid(ImportSyntheseData.the_geom_4326)),
)
report_erroneous_rows(
imprt,
Expand All @@ -718,58 +719,15 @@ def check_is_valid_geography(imprt, fields):

def check_geography_outside(imprt, fields):
id_area = current_app.config["IMPORT"]["ID_AREA_RESTRICTION"]
where_clause = ()
if id_area:
local_srid = db.session.execute(sa.func.Find_SRID("ref_geo", "l_areas", "geom")).scalar()
area = LAreas.query.filter(LAreas.id_area == id_area).one()

lat_long_present = sa.and_(
ImportSyntheseData.src_longitude != None,
ImportSyntheseData.src_longitude != "",
ImportSyntheseData.src_latitude != None,
ImportSyntheseData.src_latitude != "",
)
WKT_present = sa.and_(ImportSyntheseData.src_WKT != None, ImportSyntheseData.src_WKT != "")

if "WKT" in fields:
where_clause = sa.and_(
WKT_present,
sa.not_(lat_long_present),
area.geom.ST_Intersects(
ST_Transform(
ST_GeomFromText(ImportSyntheseData.src_WKT, imprt.srid), local_srid
)
)
== False,
)
report_erroneous_rows(
imprt,
error_type="GEOMETRY_OUTSIDE",
error_column="WKT",
whereclause=where_clause,
)

if "longitude" in fields and "latitude" in fields:
where_clause = sa.and_(
sa.not_(WKT_present),
lat_long_present,
area.geom.ST_Intersects(
ST_Transform(
ST_SetSRID(
ST_MakePoint(
ImportSyntheseData.src_longitude.cast(sa.Float),
ImportSyntheseData.src_latitude.cast(sa.Float),
),
imprt.srid,
),
local_srid,
),
)
== False,
)
report_erroneous_rows(
imprt,
error_type="GEOMETRY_OUTSIDE",
error_column="longitude",
whereclause=where_clause,
)
report_erroneous_rows(
imprt,
error_type="GEOMETRY_OUTSIDE",
error_column="Champs géométriques",
whereclause=sa.and_(
ImportSyntheseData.valid == True,
ImportSyntheseData.the_geom_local.ST_Disjoint(area.geom),
),
)
2 changes: 1 addition & 1 deletion backend/gn_module_import/tasks.py
Original file line number Diff line number Diff line change
Expand Up @@ -104,8 +104,8 @@ def do_import_checks(self, import_id):
check_depths,
check_digital_proof_urls,
check_mandatory_fields,
check_geography_outside,
check_is_valid_geography,
check_geography_outside,
]
with start_sentry_child(op="check.sql", description="run all checks"):
for i, check in enumerate(sql_checks):
Expand Down
19 changes: 11 additions & 8 deletions backend/gn_module_import/tests/files/geom_file.csv
Original file line number Diff line number Diff line change
@@ -1,16 +1,19 @@
date_min;cd_nom;nom_cite;observateurs;WKT;latitude;longitude;codecommune;codedepartement;codemaille;erreur attendue
2017-01-01;67111;Ablette;Toto;;;;05101;;;Valide (codecommune)
2017-01-01;67111;Ablette;Toto;;;;13088;;;Valide (codecommune)
2017-01-01;67111;Ablette;Toto;;;;code com invalide;;;INVALID_ATTACHMENT_CODE (codecommune)
2017-01-01;67111;Ablette;Toto;;;;;05;;Valide (codedépartement)
2017-01-01;67111;Ablette;Toto;;;;;13;;Valide (codedépartement) # FIXME invalide altitude_min (bord de mer)
2017-01-01;67111;Ablette;Toto;;;;;code dep invalide;;INVALID_ATTACHMENT_CODE (codedepartement)
2017-01-01;67111;Ablette;Toto;;;;;;10kmL93E097N642;Valide (codemaille)
2017-01-01;67111;Ablette;Toto;;;;;;10kmL93E091N625;Valide (codemaille)
2017-01-01;67111;Ablette;Toto;;;;;;code maille invalide;INVALID_ATTACHMENT_CODE (codemaille)
2017-01-01;67111;Ablette;Toto;;;;05101;05;10kmL93E097N642;MULTIPLE_CODE_ATTACHMENT
2017-01-01;67111;Ablette;Toto;POINT(6.5 44.85);;;05101;05;10kmL93E097N642;Valide (WKT) (FIXME ?)
2017-01-01;67111;Ablette;Toto;;44.85;6.5;05101;05;10kmL93E097N642;Valide (X/Y) (FIXME ?)
2017-01-01;67111;Ablette;Toto;POINT(6.5 44.85);44.85;6.5;05101;05;10kmL93E097N642;MULTIPLE_ATTACHMENT_TYPE_CODE
2017-01-01;67111;Ablette;Toto;POINT(6.5 44.85);;;;;;Valide (WKT)
2017-01-01;67111;Ablette;Toto;;44.85;6.5;;;;Valide (X/Y)
2017-01-01;67111;Ablette;Toto;POINT(5.4877 43.3056);;;05101;05;10kmL93E097N642;Valide (WKT)
2017-01-01;67111;Ablette;Toto;;43.3056;5.4877;05101;05;10kmL93E097N642;Valide (X/Y)
2017-01-01;67111;Ablette;Toto;POINT(5.4877 43.3056);44.85;6.5;05101;05;10kmL93E097N642;MULTIPLE_ATTACHMENT_TYPE_CODE
2017-01-01;67111;Ablette;Toto;POINT(5.4877 43.3056);;;;;;Valide (WKT)
2017-01-01;67111;Ablette;Toto;;43.3056;5.4877;;;;Valide (X/Y)
2017-01-01;67111;Ablette;Toto;;43,3056;5,4877;;;;Valide (X/Y)
2017-01-01;67111;Ablette;Toto;POINT(6.5 44.85);44.85;6.5;;;;MULTIPLE_ATTACHMENT_TYPE_CODE
2017-01-01;67111;Ablette;Toto;;;;;;;NO-GEOM
2017-01-01;67111;Ablette;Toto;POLYGON((0 0, 1 1, 1 2, 1 1, 0 0));;;;;;INVALID_GEOMETRY
2017-01-01;67111;Ablette;Toto;POINT(6.5 44.85);;;;;;GEOMETRY_OUTSIDE
2017-01-01;67111;Ablette;Toto;;44.85;6.5;;;;GEOMETRY_OUTSIDE
19 changes: 12 additions & 7 deletions backend/gn_module_import/tests/test_imports.py
Original file line number Diff line number Diff line change
Expand Up @@ -141,6 +141,11 @@ def import_file_name():
return "valid_file.csv"


@pytest.fixture()
def autogenerate():
return True


@pytest.fixture()
def import_dataset(datasets, import_file_name):
ds = datasets["own_dataset"]
Expand Down Expand Up @@ -199,12 +204,12 @@ def decoded_import(client, uploaded_import):


@pytest.fixture()
def fieldmapping(import_file_name):
def fieldmapping(import_file_name, autogenerate):
if import_file_name == "valid_file.csv":
return FieldMapping.query.filter_by(label="Synthese GeoNature").one().values
else:
return {
f.name_field: True
f.name_field: autogenerate
if f.autogenerated
else ([f.name_field, "cd_nom"] if f.multi else f.name_field)
for f in BibFields.query.filter_by(display=True)
Expand Down Expand Up @@ -938,6 +943,7 @@ def test_import_valid_file(self, users, datasets):
assert r.status_code == 200, r.data

@pytest.mark.parametrize("import_file_name", ["geom_file.csv"])
@pytest.mark.parametrize("autogenerate", [False])
def test_import_geometry_file(self, area_restriction, prepared_import):
assert_import_errors(
prepared_import,
Expand All @@ -949,12 +955,11 @@ def test_import_geometry_file(self, area_restriction, prepared_import):
(
"MULTIPLE_ATTACHMENT_TYPE_CODE",
"Champs géométriques",
frozenset([10, 13]),
frozenset([10, 14]),
),
("GEOMETRY_OUTSIDE", "WKT", frozenset([8, 11, 15])),
("GEOMETRY_OUTSIDE", "longitude", frozenset([9, 12])),
("NO-GEOM", "Champs géométriques", frozenset([14])),
("INVALID_GEOMETRY", "WKT", frozenset([15])),
("NO-GEOM", "Champs géométriques", frozenset([15])),
("INVALID_GEOMETRY", "WKT", frozenset([16])),
("GEOMETRY_OUTSIDE", "Champs géométriques", frozenset([17, 18])),
},
)

Expand Down

0 comments on commit 46a719d

Please sign in to comment.