Skip to content

Commit

Permalink
Refactor with black (#226)
Browse files Browse the repository at this point in the history
  • Loading branch information
mattiagiupponi authored Feb 14, 2024
1 parent 666b818 commit d985ff9
Show file tree
Hide file tree
Showing 17 changed files with 159 additions and 89 deletions.
5 changes: 2 additions & 3 deletions importer/handlers/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -119,8 +119,7 @@ def fixup_name(self, name):
.replace(")", "")
.replace("(", "")
.replace(",", "")
.replace("&", "")
[:62]
.replace("&", "")[:62]
)

def extract_resource_to_publish(self, files, layer_name, alternate, **kwargs):
Expand All @@ -132,7 +131,7 @@ def extract_resource_to_publish(self, files, layer_name, alternate, **kwargs):
]
"""
return NotImplementedError

def overwrite_geoserver_resource(self, resource, catalog, store, workspace):
"""
Base method for override the geoserver resource. For vector file usually
Expand Down
26 changes: 18 additions & 8 deletions importer/handlers/common/raster.py
Original file line number Diff line number Diff line change
Expand Up @@ -130,15 +130,21 @@ def publish_resources(resources: List[str], catalog, store, workspace):
raise e
return True

def overwrite_geoserver_resource(self, resource: List[str], catalog, store, workspace):
def overwrite_geoserver_resource(
self, resource: List[str], catalog, store, workspace
):
# we need to delete the resource before recreating it
self._delete_resource(resource, catalog, workspace)
self._delete_store(resource, catalog, workspace)
return self.publish_resources([resource], catalog, store, workspace)

def _delete_store(self, resource, catalog, workspace):
store = None
possible_layer_name = [resource.get("name"), resource.get("name").split(":")[-1], f"{workspace.name}:{resource.get('name')}"]
possible_layer_name = [
resource.get("name"),
resource.get("name").split(":")[-1],
f"{workspace.name}:{resource.get('name')}",
]
for el in possible_layer_name:
store = catalog.get_store(el, workspace=workspace)
if store:
Expand All @@ -149,7 +155,11 @@ def _delete_store(self, resource, catalog, workspace):

def _delete_resource(self, resource, catalog, workspace):
res = None
possible_layer_name = [resource.get("name"), resource.get("name").split(":")[-1], f"{workspace.name}:{resource.get('name')}"]
possible_layer_name = [
resource.get("name"),
resource.get("name").split(":")[-1],
f"{workspace.name}:{resource.get('name')}",
]
for el in possible_layer_name:
res = catalog.get_resource(el, workspace=workspace)
if res:
Expand Down Expand Up @@ -221,9 +231,9 @@ def extract_resource_to_publish(
return [
{
"name": alternate or layer_name,
"crs": self.identify_authority(layers)
if layers.GetSpatialRef()
else None,
"crs": (
self.identify_authority(layers) if layers.GetSpatialRef() else None
),
"raster_path": files.get("base_file"),
}
]
Expand Down Expand Up @@ -388,7 +398,7 @@ def overwrite_geonode_resource(
_overwrite = _exec.input_params.get("overwrite_existing_layer", False)
# if the layer exists, we just update the information of the dataset by
# let it recreate the catalogue

if dataset.exists() and _overwrite:
dataset = dataset.first()

Expand Down Expand Up @@ -511,7 +521,7 @@ def rollback(
):
steps = self.ACTIONS.get(action_to_rollback)
if rollback_from_step not in steps:
return
return
step_index = steps.index(rollback_from_step)
# the start_import, start_copy etc.. dont do anything as step, is just the start
# so there is nothing to rollback
Expand Down
20 changes: 14 additions & 6 deletions importer/handlers/common/tests_vector.py
Original file line number Diff line number Diff line change
Expand Up @@ -226,7 +226,11 @@ def test_import_with_ogr2ogr_without_errors_should_call_the_right_command(

_open.assert_called_once()
_open.assert_called_with(
f'/usr/bin/ogr2ogr --config PG_USE_COPY YES -f PostgreSQL PG:" dbname=\'test_geonode_data\' host=' + os.getenv('DATABASE_HOST', 'localhost') + ' port=5432 user=\'geonode_data\' password=\'geonode_data\' " "' + self.valid_files.get("base_file") + '" -nln alternate "dataset"',
f"/usr/bin/ogr2ogr --config PG_USE_COPY YES -f PostgreSQL PG:\" dbname='test_geonode_data' host="
+ os.getenv("DATABASE_HOST", "localhost")
+ " port=5432 user='geonode_data' password='geonode_data' \" \""
+ self.valid_files.get("base_file")
+ '" -nln alternate "dataset"',
stdout=-1,
stderr=-1,
shell=True, # noqa
Expand All @@ -252,7 +256,11 @@ def test_import_with_ogr2ogr_with_errors_should_raise_exception(self, _open):

_open.assert_called_once()
_open.assert_called_with(
f'/usr/bin/ogr2ogr --config PG_USE_COPY YES -f PostgreSQL PG:" dbname=\'test_geonode_data\' host=' + os.getenv('DATABASE_HOST', 'localhost') + ' port=5432 user=\'geonode_data\' password=\'geonode_data\' " "' + self.valid_files.get("base_file") + '" -nln alternate "dataset"',
f"/usr/bin/ogr2ogr --config PG_USE_COPY YES -f PostgreSQL PG:\" dbname='test_geonode_data' host="
+ os.getenv("DATABASE_HOST", "localhost")
+ " port=5432 user='geonode_data' password='geonode_data' \" \""
+ self.valid_files.get("base_file")
+ '" -nln alternate "dataset"',
stdout=-1,
stderr=-1,
shell=True, # noqa
Expand Down Expand Up @@ -284,7 +292,7 @@ def test_import_with_ogr2ogr_without_errors_should_call_the_right_command_if_dum

_open.assert_called_once()
_call_as_string = _open.mock_calls[0][1][0]
self.assertTrue('-f PGDump /vsistdout/' in _call_as_string)
self.assertTrue('psql -d' in _call_as_string)
self.assertFalse('-f PostgreSQL PG' in _call_as_string)

self.assertTrue("-f PGDump /vsistdout/" in _call_as_string)
self.assertTrue("psql -d" in _call_as_string)
self.assertFalse("-f PostgreSQL PG" in _call_as_string)
21 changes: 10 additions & 11 deletions importer/handlers/common/vector.py
Original file line number Diff line number Diff line change
Expand Up @@ -130,10 +130,7 @@ def publish_resources(resources: List[str], catalog, store, workspace):
jdbc_virtual_table=_resource.get("name"),
)
except Exception as e:
if (
f"Resource named {_resource} already exists in store:"
in str(e)
):
if f"Resource named {_resource} already exists in store:" in str(e):
logger.error(f"error during publishing: {e}")
continue
logger.error(f"error during publishing: {e}")
Expand All @@ -154,13 +151,13 @@ def create_ogr2ogr_command(files, original_name, ovverwrite_layer, alternate):
This is a default command that is needed to import a vector file
"""
_datastore = settings.DATABASES["datastore"]

options = "--config PG_USE_COPY YES"
copy_with_dump = ast.literal_eval(os.getenv("OGR2OGR_COPY_WITH_DUMP", "False"))

if copy_with_dump:
# use PGDump to load the dataset with ogr2ogr
options += ' -f PGDump /vsistdout/ '
options += " -f PGDump /vsistdout/ "
else:
# default option with postgres copy
options += (
Expand Down Expand Up @@ -520,11 +517,13 @@ def create_dynamic_model_fields(
ogr.GeometryTypeToName(layer.GetGeomType())
)
),
"dim": 2
if not ogr.GeometryTypeToName(layer.GetGeomType())
.lower()
.startswith("3d")
else 3,
"dim": (
2
if not ogr.GeometryTypeToName(layer.GetGeomType())
.lower()
.startswith("3d")
else 3
),
}
]

Expand Down
18 changes: 10 additions & 8 deletions importer/handlers/csv/handler.py
Original file line number Diff line number Diff line change
Expand Up @@ -195,11 +195,13 @@ def create_dynamic_model_fields(
"name": layer.GetGeometryColumn()
or self.default_geometry_column_name,
"class_name": class_name,
"dim": 2
if not ogr.GeometryTypeToName(layer.GetGeomType())
.lower()
.startswith("3d")
else 3,
"dim": (
2
if not ogr.GeometryTypeToName(layer.GetGeomType())
.lower()
.startswith("3d")
else 3
),
}
]

Expand Down Expand Up @@ -241,9 +243,9 @@ def extract_resource_to_publish(
return [
{
"name": alternate or layer_name,
"crs": self.identify_authority(_l)
if _l.GetSpatialRef()
else "EPSG:4326",
"crs": (
self.identify_authority(_l) if _l.GetSpatialRef() else "EPSG:4326"
),
}
for _l in layers
if self.fixup_name(_l.GetName()) == layer_name
Expand Down
6 changes: 5 additions & 1 deletion importer/handlers/csv/tests.py
Original file line number Diff line number Diff line change
Expand Up @@ -165,7 +165,11 @@ def test_import_with_ogr2ogr_without_errors_should_call_the_right_command(

_open.assert_called_once()
_open.assert_called_with(
f'/usr/bin/ogr2ogr --config PG_USE_COPY YES -f PostgreSQL PG:" dbname=\'test_geonode_data\' host=' + os.getenv('DATABASE_HOST', 'localhost') + ' port=5432 user=\'geonode_data\' password=\'geonode_data\' " "' + self.valid_csv + '" -nln alternate "dataset" -oo KEEP_GEOM_COLUMNS=NO -lco GEOMETRY_NAME=geometry -oo "GEOM_POSSIBLE_NAMES=geom*,the_geom*,wkt_geom" -oo "X_POSSIBLE_NAMES=x,long*" -oo "Y_POSSIBLE_NAMES=y,lat*"',
f"/usr/bin/ogr2ogr --config PG_USE_COPY YES -f PostgreSQL PG:\" dbname='test_geonode_data' host="
+ os.getenv("DATABASE_HOST", "localhost")
+ " port=5432 user='geonode_data' password='geonode_data' \" \""
+ self.valid_csv
+ '" -nln alternate "dataset" -oo KEEP_GEOM_COLUMNS=NO -lco GEOMETRY_NAME=geometry -oo "GEOM_POSSIBLE_NAMES=geom*,the_geom*,wkt_geom" -oo "X_POSSIBLE_NAMES=x,long*" -oo "Y_POSSIBLE_NAMES=y,lat*"',
stdout=-1,
stderr=-1,
shell=True, # noqa
Expand Down
6 changes: 5 additions & 1 deletion importer/handlers/geojson/tests.py
Original file line number Diff line number Diff line change
Expand Up @@ -131,7 +131,11 @@ def test_import_with_ogr2ogr_without_errors_should_call_the_right_command(

_open.assert_called_once()
_open.assert_called_with(
f'/usr/bin/ogr2ogr --config PG_USE_COPY YES -f PostgreSQL PG:" dbname=\'test_geonode_data\' host=' + os.getenv('DATABASE_HOST', 'localhost') + ' port=5432 user=\'geonode_data\' password=\'geonode_data\' " "' + self.valid_files.get("base_file") + '" -nln alternate "dataset" -lco GEOMETRY_NAME=geometry',
f"/usr/bin/ogr2ogr --config PG_USE_COPY YES -f PostgreSQL PG:\" dbname='test_geonode_data' host="
+ os.getenv("DATABASE_HOST", "localhost")
+ " port=5432 user='geonode_data' password='geonode_data' \" \""
+ self.valid_files.get("base_file")
+ '" -nln alternate "dataset" -lco GEOMETRY_NAME=geometry',
stdout=-1,
stderr=-1,
shell=True, # noqa
Expand Down
5 changes: 1 addition & 4 deletions importer/handlers/gpkg/tests.py
Original file line number Diff line number Diff line change
Expand Up @@ -56,10 +56,7 @@ def test_is_valid_should_raise_exception_if_the_gpkg_is_invalid(self):
self.handler.is_valid(files=self.invalid_files, user=self.user)

self.assertIsNotNone(_exc)
self.assertTrue(
"Error layer: INVALID LAYER_name"
in str(_exc.exception.detail)
)
self.assertTrue("Error layer: INVALID LAYER_name" in str(_exc.exception.detail))

def test_is_valid_should_raise_exception_if_the_parallelism_is_met(self):
parallelism, created = UploadParallelismLimit.objects.get_or_create(
Expand Down
18 changes: 11 additions & 7 deletions importer/handlers/shapefile/handler.py
Original file line number Diff line number Diff line change
Expand Up @@ -123,9 +123,11 @@ def is_valid(files, user):
is_valid = all(
map(
lambda x: any(
_ext.endswith(f"{_filename}.{x}")
if isinstance(_ext, str)
else _ext.name.endswith(f"{_filename}.{x}")
(
_ext.endswith(f"{_filename}.{x}")
if isinstance(_ext, str)
else _ext.name.endswith(f"{_filename}.{x}")
)
for _ext in files.values()
),
_shp_ext_needed,
Expand Down Expand Up @@ -156,22 +158,24 @@ def create_ogr2ogr_command(files, original_name, ovverwrite_layer, alternate):
encoding = ShapeFileHandler._get_encoding(files)

additional_options = []
if layer is not None and "Point" not in ogr.GeometryTypeToName(layer.GetGeomType()):
if layer is not None and "Point" not in ogr.GeometryTypeToName(
layer.GetGeomType()
):
additional_options.append("-nlt PROMOTE_TO_MULTI")
if encoding:
additional_options.append(f"-lco ENCODING={encoding}")

return (
f"{base_command } -lco precision=no -lco GEOMETRY_NAME={BaseVectorFileHandler().default_geometry_column_name} "
+ " ".join(additional_options)
)

@staticmethod
def _get_encoding(files):
if files.get("cpg_file"):
# prefer cpg file which is handled by gdal
return None

encoding = None
if files.get("cst_file"):
# GeoServer exports cst-file
Expand Down
14 changes: 9 additions & 5 deletions importer/handlers/shapefile/tests.py
Original file line number Diff line number Diff line change
Expand Up @@ -116,11 +116,11 @@ def test_should_create_ogr2ogr_command_with_encoding_from_cst(self):
shp_with_cst = self.valid_shp.copy()
cst_file = self.valid_shp["base_file"].replace("shp", "cst")
shp_with_cst["cst_file"] = cst_file
patch_location = 'importer.handlers.shapefile.handler.open'
with patch(patch_location, new=mock_open(read_data='UTF-8')) as _file:
patch_location = "importer.handlers.shapefile.handler.open"
with patch(patch_location, new=mock_open(read_data="UTF-8")) as _file:
actual = self.handler.create_ogr2ogr_command(shp_with_cst, "a", False, "a")
_file.assert_called_once_with(cst_file, 'r')

_file.assert_called_once_with(cst_file, "r")
self.assertIn("ENCODING=UTF-8", actual)

@patch("importer.handlers.common.vector.Popen")
Expand Down Expand Up @@ -148,7 +148,11 @@ def test_import_with_ogr2ogr_without_errors_should_call_the_right_command(

_open.assert_called_once()
_open.assert_called_with(
f'/usr/bin/ogr2ogr --config PG_USE_COPY YES -f PostgreSQL PG:" dbname=\'test_geonode_data\' host=' + os.getenv('DATABASE_HOST', 'localhost') + ' port=5432 user=\'geonode_data\' password=\'geonode_data\' " "' + self.valid_shp.get("base_file") + '" -nln alternate "dataset" -lco precision=no -lco GEOMETRY_NAME=geometry ',
f"/usr/bin/ogr2ogr --config PG_USE_COPY YES -f PostgreSQL PG:\" dbname='test_geonode_data' host="
+ os.getenv("DATABASE_HOST", "localhost")
+ " port=5432 user='geonode_data' password='geonode_data' \" \""
+ self.valid_shp.get("base_file")
+ '" -nln alternate "dataset" -lco precision=no -lco GEOMETRY_NAME=geometry ',
stdout=-1,
stderr=-1,
shell=True, # noqa
Expand Down
4 changes: 3 additions & 1 deletion importer/handlers/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -72,7 +72,9 @@ def create_alternate(layer_name, execution_id):
"""
_hash = hashlib.md5(f"{layer_name}_{execution_id}".encode("utf-8")).hexdigest()
alternate = f"{layer_name}_{_hash}"
if len(alternate) > 63: # 63 is the max table lengh in postgres to stay safe, we cut at 12
if (
len(alternate) > 63
): # 63 is the max table lengh in postgres to stay safe, we cut at 12
return f"{layer_name[:50]}{_hash[:12]}"
return alternate

Expand Down
5 changes: 4 additions & 1 deletion importer/migrations/0005_fixup_dynamic_shema_table_names.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,10 @@ def fixup_table_name(apps, schema_editor):
The dynamic model should exists to apply the migration.
In case it does not exists we can skip it
"""
if 'dynamic_models_modelschema' in schema_editor.connection.introspection.table_names():
if (
"dynamic_models_modelschema"
in schema_editor.connection.introspection.table_names()
):
schema = apps.get_model("dynamic_models", "ModelSchema")
for val in schema.objects.all():
if val.name != val.db_table_name:
Expand Down
1 change: 0 additions & 1 deletion importer/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,6 @@ def delete_dynamic_model(instance, sender, **kwargs):


class ResourceHandlerInfo(models.Model):

"""
Here we save the relation between the geonode resource created and the handler that created that resource
"""
Expand Down
Loading

0 comments on commit d985ff9

Please sign in to comment.