# Issue #381
* adds another validity check to SimpleGeomForm (is_size_valid) to make sure the area of the entered geometry is somehow rational (>= 1m²) * optimizes performance of django command sanitize_db * extends Geometry model with two new attributes, holding timestamps when a parcel calculation has been started and ended * finally drops unused update_parcel_wfs in favor of update_parcels in Geometry model * refactors update_parcel method * adds geometry buffer fallback in schneider/fetcher.py to avoid emptying of geometries when parcels shall be fetched * finally removes utils/wfs/spatial.py * extends GeomParcelsView according to #381 * updates translations * removes redundant psycopg2-binary requirement
This commit is contained in:
parent
d911f4a3a3
commit
50bd6feb89
@ -98,12 +98,14 @@ class SimpleGeomForm(BaseForm):
|
|||||||
|
|
||||||
if g.geom_type not in accepted_ogr_types:
|
if g.geom_type not in accepted_ogr_types:
|
||||||
self.add_error("geom", _("Only surfaces allowed. Points or lines must be buffered."))
|
self.add_error("geom", _("Only surfaces allowed. Points or lines must be buffered."))
|
||||||
is_valid = False
|
is_valid &= False
|
||||||
return is_valid
|
return is_valid
|
||||||
|
|
||||||
|
is_valid &= self.__is_size_valid(g)
|
||||||
|
|
||||||
polygon = Polygon.from_ewkt(g.ewkt)
|
polygon = Polygon.from_ewkt(g.ewkt)
|
||||||
is_valid = polygon.valid
|
is_valid &= polygon.valid
|
||||||
if not is_valid:
|
if not polygon.valid:
|
||||||
self.add_error("geom", polygon.valid_reason)
|
self.add_error("geom", polygon.valid_reason)
|
||||||
return is_valid
|
return is_valid
|
||||||
|
|
||||||
@ -137,6 +139,24 @@ class SimpleGeomForm(BaseForm):
|
|||||||
|
|
||||||
return num_vertices <= GEOM_MAX_VERTICES
|
return num_vertices <= GEOM_MAX_VERTICES
|
||||||
|
|
||||||
|
def __is_size_valid(self, geom: gdal.OGRGeometry):
|
||||||
|
""" Checks whether the number of vertices in the geometry is not too high
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
|
||||||
|
"""
|
||||||
|
is_area_valid = geom.area > 1 # > 1m² (SRID:25832)
|
||||||
|
|
||||||
|
if not is_area_valid:
|
||||||
|
self.add_error(
|
||||||
|
"geom",
|
||||||
|
_("Geometry must be greater than 1m². Currently is {}m²").format(
|
||||||
|
float(geom.area)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
return is_area_valid
|
||||||
|
|
||||||
def __simplify_geometry(self, geom, max_vert: int):
|
def __simplify_geometry(self, geom, max_vert: int):
|
||||||
""" Simplifies a geometry
|
""" Simplifies a geometry
|
||||||
|
|
||||||
|
@ -61,14 +61,24 @@ class Command(BaseKonovaCommand):
|
|||||||
action=UserAction.CREATED
|
action=UserAction.CREATED
|
||||||
)
|
)
|
||||||
|
|
||||||
intervention_log_entries_ids = self.get_all_log_entries_ids(Intervention)
|
EIV_log_entries_ids = self.get_all_log_entries_ids(Intervention)
|
||||||
attached_log_entries_id = intervention_log_entries_ids.union(
|
self._write_warning(f" EIV: {EIV_log_entries_ids.count()} attached log entries")
|
||||||
self.get_all_log_entries_ids(Compensation),
|
KOM_log_entries_ids = self.get_all_log_entries_ids(Compensation)
|
||||||
self.get_all_log_entries_ids(EcoAccount),
|
self._write_warning(f" KOM: {KOM_log_entries_ids.count()} attached log entries")
|
||||||
self.get_all_log_entries_ids(Ema),
|
OEK_log_entries_ids = self.get_all_log_entries_ids(EcoAccount)
|
||||||
)
|
self._write_warning(f" OEK: {OEK_log_entries_ids.count()} attached log entries")
|
||||||
|
EMA_log_entries_ids = self.get_all_log_entries_ids(Ema)
|
||||||
|
self._write_warning(f" EMA: {EMA_log_entries_ids.count()} attached log entries")
|
||||||
|
|
||||||
unattached_log_entries = all_log_entries.exclude(id__in=attached_log_entries_id)
|
unattached_log_entries = all_log_entries.exclude(
|
||||||
|
id__in=EIV_log_entries_ids
|
||||||
|
).exclude(
|
||||||
|
id__in=KOM_log_entries_ids
|
||||||
|
).exclude(
|
||||||
|
id__in=OEK_log_entries_ids
|
||||||
|
).exclude(
|
||||||
|
id__in=EMA_log_entries_ids
|
||||||
|
)
|
||||||
|
|
||||||
num_entries = unattached_log_entries.count()
|
num_entries = unattached_log_entries.count()
|
||||||
if num_entries > 0:
|
if num_entries > 0:
|
||||||
@ -108,13 +118,20 @@ class Command(BaseKonovaCommand):
|
|||||||
self._write_warning("=== Sanitize compensation actions ===")
|
self._write_warning("=== Sanitize compensation actions ===")
|
||||||
all_actions = CompensationAction.objects.all()
|
all_actions = CompensationAction.objects.all()
|
||||||
|
|
||||||
compensation_action_ids = self.get_all_action_ids(Compensation)
|
kom_action_ids = self.get_all_action_ids(Compensation)
|
||||||
attached_action_ids = compensation_action_ids.union(
|
self._write_warning(f" KOM: {kom_action_ids.count()} attached actions")
|
||||||
self.get_all_action_ids(EcoAccount),
|
oek_action_ids = self.get_all_action_ids(EcoAccount)
|
||||||
self.get_all_action_ids(Ema),
|
self._write_warning(f" OEK: {oek_action_ids.count()} attached actions")
|
||||||
)
|
ema_action_ids = self.get_all_action_ids(Ema)
|
||||||
|
self._write_warning(f" EMA: {ema_action_ids.count()} attached actions")
|
||||||
|
|
||||||
unattached_actions = all_actions.exclude(id__in=attached_action_ids)
|
unattached_actions = all_actions.exclude(
|
||||||
|
id__in=kom_action_ids
|
||||||
|
).exclude(
|
||||||
|
id__in=oek_action_ids
|
||||||
|
).exclude(
|
||||||
|
id__in=ema_action_ids
|
||||||
|
)
|
||||||
|
|
||||||
num_entries = unattached_actions.count()
|
num_entries = unattached_actions.count()
|
||||||
if num_entries > 0:
|
if num_entries > 0:
|
||||||
@ -125,7 +142,7 @@ class Command(BaseKonovaCommand):
|
|||||||
self._write_success("No unattached actions found.")
|
self._write_success("No unattached actions found.")
|
||||||
self._break_line()
|
self._break_line()
|
||||||
|
|
||||||
def get_all_deadline_ids(self, cls):
|
def _get_all_deadline_ids(self, cls):
|
||||||
""" Getter for all deadline ids of a model
|
""" Getter for all deadline ids of a model
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
@ -154,13 +171,20 @@ class Command(BaseKonovaCommand):
|
|||||||
self._write_warning("=== Sanitize deadlines ===")
|
self._write_warning("=== Sanitize deadlines ===")
|
||||||
all_deadlines = Deadline.objects.all()
|
all_deadlines = Deadline.objects.all()
|
||||||
|
|
||||||
compensation_deadline_ids = self.get_all_deadline_ids(Compensation)
|
kom_deadline_ids = self._get_all_deadline_ids(Compensation)
|
||||||
attached_deadline_ids = compensation_deadline_ids.union(
|
self._write_warning(f" KOM: {kom_deadline_ids.count()} attached deadlines")
|
||||||
self.get_all_deadline_ids(EcoAccount),
|
oek_deadline_ids = self._get_all_deadline_ids(EcoAccount)
|
||||||
self.get_all_deadline_ids(Ema),
|
self._write_warning(f" OEK: {kom_deadline_ids.count()} attached deadlines")
|
||||||
)
|
ema_deadline_ids = self._get_all_deadline_ids(Ema)
|
||||||
|
self._write_warning(f" EMA: {kom_deadline_ids.count()} attached deadlines")
|
||||||
|
|
||||||
unattached_deadlines = all_deadlines.exclude(id__in=attached_deadline_ids)
|
unattached_deadlines = all_deadlines.exclude(
|
||||||
|
id__in=kom_deadline_ids
|
||||||
|
).exclude(
|
||||||
|
id__in=oek_deadline_ids
|
||||||
|
).exclude(
|
||||||
|
id__in=ema_deadline_ids
|
||||||
|
)
|
||||||
|
|
||||||
num_entries = unattached_deadlines.count()
|
num_entries = unattached_deadlines.count()
|
||||||
if num_entries > 0:
|
if num_entries > 0:
|
||||||
@ -171,7 +195,7 @@ class Command(BaseKonovaCommand):
|
|||||||
self._write_success("No unattached deadlines found.")
|
self._write_success("No unattached deadlines found.")
|
||||||
self._break_line()
|
self._break_line()
|
||||||
|
|
||||||
def get_all_geometry_ids(self, cls):
|
def _get_all_geometry_ids(self, cls):
|
||||||
""" Getter for all geometry ids of a model
|
""" Getter for all geometry ids of a model
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
@ -200,14 +224,24 @@ class Command(BaseKonovaCommand):
|
|||||||
self._write_warning("=== Sanitize geometries ===")
|
self._write_warning("=== Sanitize geometries ===")
|
||||||
all_geometries = Geometry.objects.all()
|
all_geometries = Geometry.objects.all()
|
||||||
|
|
||||||
compensation_geometry_ids = self.get_all_geometry_ids(Compensation)
|
kom_geometry_ids = self._get_all_geometry_ids(Compensation)
|
||||||
attached_geometry_ids = compensation_geometry_ids.union(
|
self._write_warning(f" EMA: {kom_geometry_ids.count()} attached geometries")
|
||||||
self.get_all_geometry_ids(Intervention),
|
eiv_geometry_ids = self._get_all_geometry_ids(Intervention)
|
||||||
self.get_all_geometry_ids(EcoAccount),
|
self._write_warning(f" EMA: {eiv_geometry_ids.count()} attached geometries")
|
||||||
self.get_all_geometry_ids(Ema),
|
oek_geometry_ids = self._get_all_geometry_ids(EcoAccount)
|
||||||
)
|
self._write_warning(f" EMA: {oek_geometry_ids.count()} attached geometries")
|
||||||
|
ema_geometry_ids = self._get_all_geometry_ids(Ema)
|
||||||
|
self._write_warning(f" EMA: {ema_geometry_ids.count()} attached geometries")
|
||||||
|
|
||||||
unattached_geometries = all_geometries.exclude(id__in=attached_geometry_ids)
|
unattached_geometries = all_geometries.exclude(
|
||||||
|
id__in=kom_geometry_ids
|
||||||
|
).exclude(
|
||||||
|
id__in=eiv_geometry_ids
|
||||||
|
).exclude(
|
||||||
|
id__in=oek_geometry_ids
|
||||||
|
).exclude(
|
||||||
|
id__in=ema_geometry_ids
|
||||||
|
)
|
||||||
|
|
||||||
num_entries = unattached_geometries.count()
|
num_entries = unattached_geometries.count()
|
||||||
if num_entries > 0:
|
if num_entries > 0:
|
||||||
@ -218,7 +252,7 @@ class Command(BaseKonovaCommand):
|
|||||||
self._write_success("No unattached geometries found.")
|
self._write_success("No unattached geometries found.")
|
||||||
self._break_line()
|
self._break_line()
|
||||||
|
|
||||||
def get_all_state_ids(self, cls):
|
def _get_all_state_ids(self, cls):
|
||||||
""" Getter for all states (before and after) of a class
|
""" Getter for all states (before and after) of a class
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
@ -254,14 +288,19 @@ class Command(BaseKonovaCommand):
|
|||||||
"""
|
"""
|
||||||
self._write_warning("=== Sanitize compensation states ===")
|
self._write_warning("=== Sanitize compensation states ===")
|
||||||
all_states = CompensationState.objects.all()
|
all_states = CompensationState.objects.all()
|
||||||
compensation_state_ids = self.get_all_state_ids(Compensation)
|
|
||||||
account_state_ids = self.get_all_state_ids(EcoAccount)
|
kom_state_ids = self._get_all_state_ids(Compensation)
|
||||||
ema_state_ids = self.get_all_state_ids(Ema)
|
oek_state_ids = self._get_all_state_ids(EcoAccount)
|
||||||
attached_state_ids = compensation_state_ids.union(account_state_ids, ema_state_ids)
|
ema_state_ids = self._get_all_state_ids(Ema)
|
||||||
|
|
||||||
unattached_states = all_states.exclude(
|
unattached_states = all_states.exclude(
|
||||||
id__in=attached_state_ids
|
id__in=kom_state_ids
|
||||||
|
).exclude(
|
||||||
|
id__in=oek_state_ids
|
||||||
|
).exclude(
|
||||||
|
id__in=ema_state_ids
|
||||||
)
|
)
|
||||||
|
|
||||||
num_unattached_states = unattached_states.count()
|
num_unattached_states = unattached_states.count()
|
||||||
if num_unattached_states > 0:
|
if num_unattached_states > 0:
|
||||||
self._write_error(f"Found {num_unattached_states} unused compensation states. Delete now...")
|
self._write_error(f"Found {num_unattached_states} unused compensation states. Delete now...")
|
||||||
|
@ -0,0 +1,23 @@
|
|||||||
|
# Generated by Django 5.0.1 on 2024-01-09 10:38
|
||||||
|
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
('konova', '0014_resubmission'),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='geometry',
|
||||||
|
name='parcel_update_end',
|
||||||
|
field=models.DateTimeField(blank=True, db_comment='When the last parcel calculation finished', help_text='When the last parcel calculation finished', null=True),
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='geometry',
|
||||||
|
name='parcel_update_start',
|
||||||
|
field=models.DateTimeField(blank=True, db_comment='When the last parcel calculation started', help_text='When the last parcel calculation started', null=True),
|
||||||
|
),
|
||||||
|
]
|
@ -14,13 +14,24 @@ from django.utils import timezone
|
|||||||
from konova.models import BaseResource, UuidModel
|
from konova.models import BaseResource, UuidModel
|
||||||
from konova.sub_settings.lanis_settings import DEFAULT_SRID_RLP
|
from konova.sub_settings.lanis_settings import DEFAULT_SRID_RLP
|
||||||
from konova.utils.schneider.fetcher import ParcelFetcher
|
from konova.utils.schneider.fetcher import ParcelFetcher
|
||||||
from konova.utils.wfs.spatial import ParcelWFSFetcher
|
|
||||||
|
|
||||||
|
|
||||||
class Geometry(BaseResource):
|
class Geometry(BaseResource):
|
||||||
"""
|
"""
|
||||||
Geometry model
|
Geometry model
|
||||||
"""
|
"""
|
||||||
|
parcel_update_start = models.DateTimeField(
|
||||||
|
blank=True,
|
||||||
|
null=True,
|
||||||
|
db_comment="When the last parcel calculation started",
|
||||||
|
help_text="When the last parcel calculation started"
|
||||||
|
)
|
||||||
|
parcel_update_end = models.DateTimeField(
|
||||||
|
blank=True,
|
||||||
|
null=True,
|
||||||
|
db_comment="When the last parcel calculation finished",
|
||||||
|
help_text="When the last parcel calculation finished",
|
||||||
|
)
|
||||||
geom = MultiPolygonField(null=True, blank=True, srid=DEFAULT_SRID_RLP)
|
geom = MultiPolygonField(null=True, blank=True, srid=DEFAULT_SRID_RLP)
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
@ -109,82 +120,14 @@ class Geometry(BaseResource):
|
|||||||
objs += set_objs
|
objs += set_objs
|
||||||
return objs
|
return objs
|
||||||
|
|
||||||
@transaction.atomic
|
def get_data_object(self):
|
||||||
def update_parcels_wfs(self):
|
|
||||||
""" Updates underlying parcel information using the WFS of LVermGeo
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
|
|
||||||
"""
|
"""
|
||||||
from konova.models import Parcel, District, ParcelIntersection, Municipal, ParcelGroup
|
Getter for the specific data object which is related to this geometry
|
||||||
|
"""
|
||||||
if self.geom.empty:
|
objs = self.get_data_objects()
|
||||||
# Nothing to do
|
assert (len(objs) <= 1)
|
||||||
return
|
result = objs.pop()
|
||||||
|
return result
|
||||||
parcel_fetcher = ParcelWFSFetcher(
|
|
||||||
geometry_id=self.id,
|
|
||||||
)
|
|
||||||
typename = "ave:Flurstueck"
|
|
||||||
fetched_parcels = parcel_fetcher.get_features(
|
|
||||||
typename
|
|
||||||
)
|
|
||||||
_now = timezone.now()
|
|
||||||
underlying_parcels = []
|
|
||||||
for result in fetched_parcels:
|
|
||||||
parcel_properties = result["properties"]
|
|
||||||
# There could be parcels which include the word 'Flur',
|
|
||||||
# which needs to be deleted and just keep the numerical values
|
|
||||||
## THIS CAN BE REMOVED IN THE FUTURE, WHEN 'Flur' WON'T OCCUR ANYMORE!
|
|
||||||
flr_val = parcel_properties["flur"].replace("Flur ", "")
|
|
||||||
district = District.objects.get_or_create(
|
|
||||||
key=parcel_properties["kreisschl"],
|
|
||||||
name=parcel_properties["kreis"],
|
|
||||||
)[0]
|
|
||||||
municipal = Municipal.objects.get_or_create(
|
|
||||||
key=parcel_properties["gmdschl"],
|
|
||||||
name=parcel_properties["gemeinde"],
|
|
||||||
district=district,
|
|
||||||
)[0]
|
|
||||||
parcel_group = ParcelGroup.objects.get_or_create(
|
|
||||||
key=parcel_properties["gemaschl"],
|
|
||||||
name=parcel_properties["gemarkung"],
|
|
||||||
municipal=municipal,
|
|
||||||
)[0]
|
|
||||||
flrstck_nnr = parcel_properties['flstnrnen']
|
|
||||||
if not flrstck_nnr:
|
|
||||||
flrstck_nnr = None
|
|
||||||
flrstck_zhlr = parcel_properties['flstnrzae']
|
|
||||||
if not flrstck_zhlr:
|
|
||||||
flrstck_zhlr = None
|
|
||||||
parcel_obj = Parcel.objects.get_or_create(
|
|
||||||
district=district,
|
|
||||||
municipal=municipal,
|
|
||||||
parcel_group=parcel_group,
|
|
||||||
flr=flr_val,
|
|
||||||
flrstck_nnr=flrstck_nnr,
|
|
||||||
flrstck_zhlr=flrstck_zhlr,
|
|
||||||
)[0]
|
|
||||||
parcel_obj.district = district
|
|
||||||
parcel_obj.updated_on = _now
|
|
||||||
parcel_obj.save()
|
|
||||||
underlying_parcels.append(parcel_obj)
|
|
||||||
|
|
||||||
# Update the linked parcels
|
|
||||||
self.parcels.clear()
|
|
||||||
self.parcels.set(underlying_parcels)
|
|
||||||
|
|
||||||
# Set the calculated_on intermediate field, so this related data will be found on lookups
|
|
||||||
intersections_without_ts = self.parcelintersection_set.filter(
|
|
||||||
parcel__in=self.parcels.all(),
|
|
||||||
calculated_on__isnull=True,
|
|
||||||
)
|
|
||||||
for entry in intersections_without_ts:
|
|
||||||
entry.calculated_on = _now
|
|
||||||
ParcelIntersection.objects.bulk_update(
|
|
||||||
intersections_without_ts,
|
|
||||||
["calculated_on"]
|
|
||||||
)
|
|
||||||
|
|
||||||
def update_parcels(self):
|
def update_parcels(self):
|
||||||
""" Updates underlying parcel information
|
""" Updates underlying parcel information
|
||||||
@ -192,20 +135,28 @@ class Geometry(BaseResource):
|
|||||||
Returns:
|
Returns:
|
||||||
|
|
||||||
"""
|
"""
|
||||||
from konova.models import Parcel, District, ParcelIntersection, Municipal, ParcelGroup
|
|
||||||
|
|
||||||
if self.geom.empty:
|
if self.geom.empty:
|
||||||
# Nothing to do
|
# Nothing to do
|
||||||
return
|
return
|
||||||
|
|
||||||
|
self._set_parcel_update_start_time()
|
||||||
|
self._perform_parcel_update()
|
||||||
|
self._set_parcel_update_end_time()
|
||||||
|
|
||||||
|
def _perform_parcel_update(self):
|
||||||
|
"""
|
||||||
|
Performs the main logic of parcel updating.
|
||||||
|
"""
|
||||||
|
from konova.models import Parcel, District, ParcelIntersection, Municipal, ParcelGroup
|
||||||
|
|
||||||
parcel_fetcher = ParcelFetcher(
|
parcel_fetcher = ParcelFetcher(
|
||||||
geometry=self
|
geometry=self
|
||||||
)
|
)
|
||||||
fetched_parcels = parcel_fetcher.get_parcels()
|
fetched_parcels = parcel_fetcher.get_parcels()
|
||||||
|
|
||||||
_now = timezone.now()
|
_now = timezone.now()
|
||||||
underlying_parcels = []
|
underlying_parcels = []
|
||||||
for result in fetched_parcels:
|
for result in fetched_parcels:
|
||||||
|
with transaction.atomic():
|
||||||
# There could be parcels which include the word 'Flur',
|
# There could be parcels which include the word 'Flur',
|
||||||
# which needs to be deleted and just keep the numerical values
|
# which needs to be deleted and just keep the numerical values
|
||||||
## THIS CAN BE REMOVED IN THE FUTURE, WHEN 'Flur' WON'T OCCUR ANYMORE!
|
## THIS CAN BE REMOVED IN THE FUTURE, WHEN 'Flur' WON'T OCCUR ANYMORE!
|
||||||
@ -259,6 +210,23 @@ class Geometry(BaseResource):
|
|||||||
["calculated_on"]
|
["calculated_on"]
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@transaction.atomic
|
||||||
|
def _set_parcel_update_start_time(self):
|
||||||
|
"""
|
||||||
|
Sets the current time for the parcel calculation begin
|
||||||
|
"""
|
||||||
|
self.parcel_update_start = timezone.now()
|
||||||
|
self.parcel_update_end = None
|
||||||
|
self.save()
|
||||||
|
|
||||||
|
@transaction.atomic
|
||||||
|
def _set_parcel_update_end_time(self):
|
||||||
|
"""
|
||||||
|
Sets the current time for the parcel calculation end
|
||||||
|
"""
|
||||||
|
self.parcel_update_end = timezone.now()
|
||||||
|
self.save()
|
||||||
|
|
||||||
def get_underlying_parcels(self):
|
def get_underlying_parcels(self):
|
||||||
""" Getter for related parcels and their districts
|
""" Getter for related parcels and their districts
|
||||||
|
|
||||||
|
@ -46,4 +46,8 @@ DEFAULT_GROUP = "Default"
|
|||||||
ZB_GROUP = "Registration office"
|
ZB_GROUP = "Registration office"
|
||||||
ETS_GROUP = "Conservation office"
|
ETS_GROUP = "Conservation office"
|
||||||
|
|
||||||
|
# GEOMETRY
|
||||||
|
## Max number of allowed vertices. Geometries larger will be simplified until they reach this threshold
|
||||||
GEOM_MAX_VERTICES = 10000
|
GEOM_MAX_VERTICES = 10000
|
||||||
|
## Max seconds to wait for a parcel calculation result before a new request will be started (default: 5 minutes)
|
||||||
|
GEOM_THRESHOLD_RECALCULATION_SECONDS = 300
|
||||||
|
@ -28,7 +28,11 @@ class ParcelFetcher:
|
|||||||
self.geometry = geometry
|
self.geometry = geometry
|
||||||
|
|
||||||
# Reduce size of geometry to avoid "intersections" because of exact border matching
|
# Reduce size of geometry to avoid "intersections" because of exact border matching
|
||||||
geom = geometry.geom.buffer(-0.001)
|
buffer_threshold = 0.001
|
||||||
|
geom = geometry.geom.buffer(-buffer_threshold)
|
||||||
|
if geom.area < buffer_threshold:
|
||||||
|
# Fallback for malicious geometries which are way too small but would disappear on buffering
|
||||||
|
geom = geometry.geom
|
||||||
self.geojson = geom.ewkt
|
self.geojson = geom.ewkt
|
||||||
self.results = []
|
self.results = []
|
||||||
|
|
||||||
|
@ -1,189 +0,0 @@
|
|||||||
"""
|
|
||||||
Author: Michel Peltriaux
|
|
||||||
Organization: Struktur- und Genehmigungsdirektion Nord, Rhineland-Palatinate, Germany
|
|
||||||
Contact: michel.peltriaux@sgdnord.rlp.de
|
|
||||||
Created on: 17.12.21
|
|
||||||
|
|
||||||
"""
|
|
||||||
import json
|
|
||||||
from abc import abstractmethod
|
|
||||||
from json import JSONDecodeError
|
|
||||||
from time import sleep
|
|
||||||
|
|
||||||
import requests
|
|
||||||
from django.contrib.gis.db.models.functions import AsGML, MakeValid
|
|
||||||
from django.db.models import Func, F
|
|
||||||
from requests.auth import HTTPDigestAuth
|
|
||||||
|
|
||||||
from konova.settings import PARCEL_WFS_USER, PARCEL_WFS_PW, PROXIES
|
|
||||||
|
|
||||||
|
|
||||||
class AbstractWFSFetcher:
|
|
||||||
""" Base class for fetching WFS data
|
|
||||||
|
|
||||||
"""
|
|
||||||
# base_url represents not the capabilities url but the parameter-free base url
|
|
||||||
base_url = None
|
|
||||||
version = None
|
|
||||||
auth_user = None
|
|
||||||
auth_pw = None
|
|
||||||
auth_digest_obj = None
|
|
||||||
|
|
||||||
class Meta:
|
|
||||||
abstract = True
|
|
||||||
|
|
||||||
def __init__(self, base_url: str, version: str = "1.1.0", auth_user: str = None, auth_pw: str = None, *args, **kwargs):
|
|
||||||
self.base_url = base_url
|
|
||||||
self.version = version
|
|
||||||
self.auth_pw = auth_pw
|
|
||||||
self.auth_user = auth_user
|
|
||||||
|
|
||||||
self._create_auth_obj()
|
|
||||||
|
|
||||||
def _create_auth_obj(self):
|
|
||||||
if self.auth_pw is not None and self.auth_user is not None:
|
|
||||||
self.auth_digest_obj = HTTPDigestAuth(
|
|
||||||
self.auth_user,
|
|
||||||
self.auth_pw
|
|
||||||
)
|
|
||||||
|
|
||||||
@abstractmethod
|
|
||||||
def get_features(self, feature_identifier: str, filter_str: str):
|
|
||||||
raise NotImplementedError
|
|
||||||
|
|
||||||
|
|
||||||
class ParcelWFSFetcher(AbstractWFSFetcher):
|
|
||||||
""" Fetches features from a special parcel WFS
|
|
||||||
|
|
||||||
"""
|
|
||||||
geometry_id = None
|
|
||||||
geometry_property_name = None
|
|
||||||
count = 100
|
|
||||||
|
|
||||||
def __init__(self, geometry_id: str, geometry_property_name: str = "msGeometry", *args, **kwargs):
|
|
||||||
super().__init__(
|
|
||||||
version="2.0.0",
|
|
||||||
base_url="https://www.geoportal.rlp.de/registry/wfs/519",
|
|
||||||
auth_user=PARCEL_WFS_USER,
|
|
||||||
auth_pw=PARCEL_WFS_PW,
|
|
||||||
*args,
|
|
||||||
**kwargs
|
|
||||||
)
|
|
||||||
self.geometry_id = geometry_id
|
|
||||||
self.geometry_property_name = geometry_property_name
|
|
||||||
|
|
||||||
def _create_spatial_filter(self,
|
|
||||||
geometry_operation: str):
|
|
||||||
""" Creates a xml spatial filter according to the WFS filter specification
|
|
||||||
|
|
||||||
The geometry needs to be shrinked by a very small factor (-0.01) before a GML can be created for intersection
|
|
||||||
checking. Otherwise perfect parcel outline placement on top of a neighbouring parcel would result in an
|
|
||||||
intersection hit, despite the fact they do not truly intersect just because their vertices match.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
geometry_operation (str): One of the WFS supported spatial filter operations (according to capabilities)
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
spatial_filter (str): The spatial filter element
|
|
||||||
"""
|
|
||||||
from konova.models import Geometry
|
|
||||||
|
|
||||||
geom = Geometry.objects.filter(
|
|
||||||
id=self.geometry_id
|
|
||||||
).annotate(
|
|
||||||
smaller=Func(F('geom'), -0.001, function="ST_Buffer") # same as geometry.geom_small_buffered but for QuerySet
|
|
||||||
).annotate(
|
|
||||||
gml=AsGML(MakeValid('smaller'))
|
|
||||||
).first()
|
|
||||||
geom_gml = geom.gml
|
|
||||||
spatial_filter = f"<Filter><{geometry_operation}><PropertyName>{self.geometry_property_name}</PropertyName>{geom_gml}</{geometry_operation}></Filter>"
|
|
||||||
return spatial_filter
|
|
||||||
|
|
||||||
def _create_post_data(self,
|
|
||||||
geometry_operation: str,
|
|
||||||
typenames: str = None,
|
|
||||||
start_index: int = 0,
|
|
||||||
):
|
|
||||||
""" Creates a POST body content for fetching features
|
|
||||||
|
|
||||||
Args:
|
|
||||||
geometry_operation (str): One of the WFS supported spatial filter operations (according to capabilities)
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
_filter (str): A proper xml WFS filter
|
|
||||||
"""
|
|
||||||
start_index = str(start_index)
|
|
||||||
spatial_filter = self._create_spatial_filter(
|
|
||||||
geometry_operation
|
|
||||||
)
|
|
||||||
_filter = f'<wfs:GetFeature service="WFS" version="{self.version}" xmlns:wfs="http://www.opengis.net/wfs/2.0" xmlns:fes="http://www.opengis.net/fes/2.0" xmlns:myns="http://www.someserver.com/myns" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://www.opengis.net/wfs/2.0 http://schemas.opengis.net/wfs/2.0.0/wfs.xsd" count="{self.count}" startindex="{start_index}" outputFormat="application/json; subtype=geojson"><wfs:Query typeNames="{typenames}">{spatial_filter}</wfs:Query></wfs:GetFeature>'
|
|
||||||
return _filter
|
|
||||||
|
|
||||||
def get_features(self,
|
|
||||||
typenames: str,
|
|
||||||
spatial_operator: str = "Intersects",
|
|
||||||
filter_srid: str = None,
|
|
||||||
start_index: int = 0,
|
|
||||||
rerun_on_exception: bool = True
|
|
||||||
):
|
|
||||||
""" Fetches features from the WFS using POST
|
|
||||||
|
|
||||||
POST is required since GET has a character limit around 4000. Having a larger filter would result in errors,
|
|
||||||
which do not occur in case of POST.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
typenames (str): References to parameter 'typenames' in a WFS GetFeature request
|
|
||||||
spatial_operator (str): Defines the spatial operation for filtering
|
|
||||||
filter_srid (str): Defines the spatial reference system, the geometry shall be transformed into for filtering
|
|
||||||
start_index (str): References to parameter 'startindex' in a
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
features (list): A list of returned features
|
|
||||||
"""
|
|
||||||
found_features = []
|
|
||||||
while start_index is not None:
|
|
||||||
post_body = self._create_post_data(
|
|
||||||
spatial_operator,
|
|
||||||
typenames,
|
|
||||||
start_index
|
|
||||||
)
|
|
||||||
response = requests.post(
|
|
||||||
url=self.base_url,
|
|
||||||
data=post_body,
|
|
||||||
auth=self.auth_digest_obj,
|
|
||||||
proxies=PROXIES,
|
|
||||||
)
|
|
||||||
|
|
||||||
content = response.content.decode("utf-8")
|
|
||||||
try:
|
|
||||||
# Check if collection is an exception and does not contain the requested data
|
|
||||||
content = json.loads(content)
|
|
||||||
except JSONDecodeError as e:
|
|
||||||
if rerun_on_exception:
|
|
||||||
# Wait a second before another try
|
|
||||||
sleep(1)
|
|
||||||
self.get_features(
|
|
||||||
typenames,
|
|
||||||
spatial_operator,
|
|
||||||
filter_srid,
|
|
||||||
start_index,
|
|
||||||
rerun_on_exception=False
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
e.msg += content
|
|
||||||
raise e
|
|
||||||
fetched_features = content.get(
|
|
||||||
"features",
|
|
||||||
{},
|
|
||||||
)
|
|
||||||
|
|
||||||
found_features += fetched_features
|
|
||||||
|
|
||||||
if len(fetched_features) < self.count:
|
|
||||||
# The response was not 'full', so we got everything to fetch
|
|
||||||
start_index = None
|
|
||||||
else:
|
|
||||||
# If a 'full' response returned, there might be more to fetch. Increase the start_index!
|
|
||||||
start_index += self.count
|
|
||||||
|
|
||||||
return found_features
|
|
@ -10,10 +10,13 @@ from django.contrib.gis.geos import MultiPolygon
|
|||||||
from django.http import HttpResponse, HttpRequest
|
from django.http import HttpResponse, HttpRequest
|
||||||
from django.shortcuts import get_object_or_404
|
from django.shortcuts import get_object_or_404
|
||||||
from django.template.loader import render_to_string
|
from django.template.loader import render_to_string
|
||||||
|
from django.utils import timezone
|
||||||
from django.views import View
|
from django.views import View
|
||||||
|
|
||||||
from konova.models import Geometry
|
from konova.models import Geometry
|
||||||
|
from konova.settings import GEOM_THRESHOLD_RECALCULATION_SECONDS
|
||||||
from konova.sub_settings.lanis_settings import DEFAULT_SRID_RLP
|
from konova.sub_settings.lanis_settings import DEFAULT_SRID_RLP
|
||||||
|
from konova.tasks import celery_update_parcels
|
||||||
|
|
||||||
|
|
||||||
class GeomParcelsView(LoginRequiredMixin, View):
|
class GeomParcelsView(LoginRequiredMixin, View):
|
||||||
@ -38,15 +41,25 @@ class GeomParcelsView(LoginRequiredMixin, View):
|
|||||||
parcels = geom.get_underlying_parcels()
|
parcels = geom.get_underlying_parcels()
|
||||||
geos_geom = geom.geom or MultiPolygon(srid=DEFAULT_SRID_RLP)
|
geos_geom = geom.geom or MultiPolygon(srid=DEFAULT_SRID_RLP)
|
||||||
|
|
||||||
geometry_exists = not geos_geom.empty
|
waiting_too_long = self._check_waiting_too_long(geom)
|
||||||
parcels_are_currently_calculated = geometry_exists and geos_geom.area > 0 and len(parcels) == 0
|
|
||||||
|
geometry_exists = not geos_geom.empty and geos_geom.area > 0
|
||||||
|
parcels_are_currently_calculated = (
|
||||||
|
geometry_exists and
|
||||||
|
geom.parcel_update_start and
|
||||||
|
not geom.parcel_update_end
|
||||||
|
)
|
||||||
parcels_available = len(parcels) > 0
|
parcels_available = len(parcels) > 0
|
||||||
|
|
||||||
if parcels_are_currently_calculated:
|
if parcels_are_currently_calculated:
|
||||||
# Parcels are being calculated right now. Change the status code, so polling stays active for fetching
|
# Parcels are being calculated right now. Change the status code, so polling stays active for fetching
|
||||||
# resutls after the calculation
|
# results after the calculation
|
||||||
status_code = 200
|
status_code = 200
|
||||||
|
|
||||||
|
if waiting_too_long:
|
||||||
|
# Trigger calculation again
|
||||||
|
celery_update_parcels.delay(geom.id)
|
||||||
|
|
||||||
if parcels_available or not geometry_exists:
|
if parcels_available or not geometry_exists:
|
||||||
municipals = geom.get_underlying_municipals(parcels)
|
municipals = geom.get_underlying_municipals(parcels)
|
||||||
|
|
||||||
@ -69,6 +82,21 @@ class GeomParcelsView(LoginRequiredMixin, View):
|
|||||||
else:
|
else:
|
||||||
return HttpResponse(None, status=404)
|
return HttpResponse(None, status=404)
|
||||||
|
|
||||||
|
def _check_waiting_too_long(self, geom: Geometry):
|
||||||
|
""" Check whether the client is waiting too long for a parcel calculation result
|
||||||
|
|
||||||
|
Depending on the geometry's modified attribute
|
||||||
|
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
pcs_diff = (timezone.now() - geom.parcel_update_start).seconds
|
||||||
|
except TypeError:
|
||||||
|
pcs_diff = GEOM_THRESHOLD_RECALCULATION_SECONDS
|
||||||
|
|
||||||
|
calculation_not_finished = geom.parcel_update_end is None
|
||||||
|
waiting_too_long = (pcs_diff >= GEOM_THRESHOLD_RECALCULATION_SECONDS) and calculation_not_finished
|
||||||
|
return waiting_too_long
|
||||||
|
|
||||||
|
|
||||||
class GeomParcelsContentView(LoginRequiredMixin, View):
|
class GeomParcelsContentView(LoginRequiredMixin, View):
|
||||||
|
|
||||||
|
Binary file not shown.
File diff suppressed because it is too large
Load Diff
@ -34,7 +34,6 @@ pika==1.3.2
|
|||||||
prompt-toolkit==3.0.43
|
prompt-toolkit==3.0.43
|
||||||
psycopg==3.1.16
|
psycopg==3.1.16
|
||||||
psycopg-binary==3.1.16
|
psycopg-binary==3.1.16
|
||||||
psycopg2-binary==2.9.9
|
|
||||||
pyparsing==3.1.1
|
pyparsing==3.1.1
|
||||||
pypng==0.20220715.0
|
pypng==0.20220715.0
|
||||||
pyproj==3.6.1
|
pyproj==3.6.1
|
||||||
|
Loading…
Reference in New Issue
Block a user