* improves handling of parcel calculation (speed up by ~30%) * ToDo: Clean up code
450 lines
15 KiB
Python
450 lines
15 KiB
Python
"""
|
|
Author: Michel Peltriaux
|
|
Organization: Struktur- und Genehmigungsdirektion Nord, Rhineland-Palatinate, Germany
|
|
Contact: michel.peltriaux@sgdnord.rlp.de
|
|
Created on: 15.11.21
|
|
|
|
"""
|
|
import json
|
|
from time import process_time
|
|
|
|
from django.contrib.gis.db.models import MultiPolygonField
|
|
from django.db import models, transaction
|
|
from django.utils import timezone
|
|
|
|
from konova.models import BaseResource, UuidModel
|
|
from konova.sub_settings.lanis_settings import DEFAULT_SRID_RLP
|
|
from konova.utils.schneider.fetcher import ParcelFetcher
|
|
|
|
|
|
class Geometry(BaseResource):
|
|
"""
|
|
Geometry model
|
|
"""
|
|
parcel_update_start = models.DateTimeField(
|
|
blank=True,
|
|
null=True,
|
|
db_comment="When the last parcel calculation started",
|
|
help_text="When the last parcel calculation started"
|
|
)
|
|
parcel_update_end = models.DateTimeField(
|
|
blank=True,
|
|
null=True,
|
|
db_comment="When the last parcel calculation finished",
|
|
help_text="When the last parcel calculation finished",
|
|
)
|
|
geom = MultiPolygonField(null=True, blank=True, srid=DEFAULT_SRID_RLP)
|
|
|
|
def __str__(self):
|
|
return str(self.id)
|
|
|
|
def save(self, *args, **kwargs):
|
|
super().save(*args, **kwargs)
|
|
|
|
@property
|
|
def geom_small_buffered(self):
|
|
"""
|
|
Returns a smaller buffered version of the geometry.
|
|
Can be used to shrink the geometry used for intersection purposes to avoid intersection detection on
|
|
neighbouring geometries.
|
|
|
|
Returns:
|
|
|
|
"""
|
|
return self.geom.buffer(-0.001)
|
|
|
|
def check_for_conflicts(self):
|
|
""" Checks for new geometry overlaps
|
|
|
|
Creates a new GeometryConflict entry for each overlap to another geometry, which has already been there before
|
|
|
|
Returns:
|
|
|
|
"""
|
|
# If no geometry is given or important data is missing, we can not perform any checks
|
|
if self.geom is None:
|
|
return None
|
|
|
|
self.recheck_existing_conflicts()
|
|
overlapping_geoms = Geometry.objects.filter(
|
|
geom__intersects=self.geom_small_buffered,
|
|
).exclude(
|
|
id=self.id
|
|
).distinct()
|
|
|
|
for match in overlapping_geoms:
|
|
# Make sure this conflict is not already known but in a swapped constellation
|
|
conflict_exists_swapped = GeometryConflict.objects.filter(conflicting_geometry=match, affected_geometry=self).exists()
|
|
if not conflict_exists_swapped:
|
|
GeometryConflict.objects.get_or_create(conflicting_geometry=self, affected_geometry=match)
|
|
|
|
def recheck_existing_conflicts(self):
|
|
""" Rechecks GeometryConflict entries
|
|
|
|
If a conflict seems to be resolved due to no longer intersection between the two geometries, the entry
|
|
will be deleted.
|
|
|
|
Returns:
|
|
|
|
"""
|
|
all_conflicts_as_conflicting = self.conflicts_geometries.all()
|
|
still_conflicting_conflicts = all_conflicts_as_conflicting.filter(
|
|
affected_geometry__geom__intersects=self.geom_small_buffered
|
|
)
|
|
resolved_conflicts = all_conflicts_as_conflicting.exclude(id__in=still_conflicting_conflicts)
|
|
resolved_conflicts.delete()
|
|
|
|
all_conflicted_by_conflicts = self.conflicted_by_geometries.all()
|
|
still_conflicting_conflicts = all_conflicted_by_conflicts.filter(
|
|
conflicting_geometry__geom__intersects=self.geom_small_buffered
|
|
)
|
|
resolved_conflicts = all_conflicted_by_conflicts.exclude(id__in=still_conflicting_conflicts)
|
|
resolved_conflicts.delete()
|
|
|
|
def get_data_objects(self):
|
|
""" Getter for all objects which are related to this geometry
|
|
|
|
Returns:
|
|
objs (list): The list of objects
|
|
"""
|
|
objs = []
|
|
sets = [
|
|
self.intervention_set,
|
|
self.compensation_set,
|
|
self.ema_set,
|
|
self.ecoaccount_set,
|
|
]
|
|
for _set in sets:
|
|
set_objs = _set.filter(
|
|
deleted=None
|
|
)
|
|
objs += set_objs
|
|
return objs
|
|
|
|
def get_data_object(self):
|
|
"""
|
|
Getter for the specific data object which is related to this geometry
|
|
"""
|
|
objs = self.get_data_objects()
|
|
assert (len(objs) <= 1)
|
|
result = objs.pop()
|
|
return result
|
|
|
|
def update_parcels(self):
|
|
""" Updates underlying parcel information
|
|
|
|
Returns:
|
|
|
|
"""
|
|
if self.geom.empty:
|
|
# Nothing to do
|
|
return
|
|
|
|
self._set_parcel_update_start_time()
|
|
|
|
t1 = process_time()
|
|
self._perform_parcel_update_fast()
|
|
print(f"Parcel processing: {process_time() - t1}")
|
|
self._set_parcel_update_end_time()
|
|
|
|
def _perform_parcel_update(self):
|
|
"""
|
|
Performs the main logic of parcel updating.
|
|
"""
|
|
from konova.models import Parcel, District, ParcelIntersection, Municipal, ParcelGroup
|
|
|
|
parcel_fetcher = ParcelFetcher(
|
|
geometry=self
|
|
)
|
|
fetched_parcels = parcel_fetcher.get_parcels()
|
|
_now = timezone.now()
|
|
underlying_parcels = []
|
|
i = 0
|
|
len_fetched_parcels = len(fetched_parcels)
|
|
print("Process fetched parcels:")
|
|
for result in fetched_parcels:
|
|
# There could be parcels which include the word 'Flur',
|
|
# which needs to be deleted and just keep the numerical values
|
|
## THIS CAN BE REMOVED IN THE FUTURE, WHEN 'Flur' WON'T OCCUR ANYMORE!
|
|
flr_val = result["flur"].replace("Flur ", "")
|
|
district = District.objects.get_or_create(
|
|
key=result["kreisschl"],
|
|
name=result["kreis"],
|
|
)[0]
|
|
municipal = Municipal.objects.get_or_create(
|
|
key=result["gmdschl"],
|
|
name=result["gemeinde"],
|
|
district=district,
|
|
)[0]
|
|
parcel_group = ParcelGroup.objects.get_or_create(
|
|
key=result["gemaschl"],
|
|
name=result["gemarkung"],
|
|
municipal=municipal,
|
|
)[0]
|
|
flrstck_nnr = result['flstnrnen']
|
|
if not flrstck_nnr:
|
|
flrstck_nnr = None
|
|
flrstck_zhlr = result['flstnrzae']
|
|
if not flrstck_zhlr:
|
|
flrstck_zhlr = None
|
|
parcel_obj = Parcel.objects.get_or_create(
|
|
district=district,
|
|
municipal=municipal,
|
|
parcel_group=parcel_group,
|
|
flr=flr_val,
|
|
flrstck_nnr=flrstck_nnr,
|
|
flrstck_zhlr=flrstck_zhlr,
|
|
)[0]
|
|
parcel_obj.district = district
|
|
parcel_obj.updated_on = _now
|
|
parcel_obj.save()
|
|
underlying_parcels.append(parcel_obj)
|
|
i += 1
|
|
if i % 100 == 0:
|
|
print(f" {i}/{len_fetched_parcels}")
|
|
|
|
# Update the linked parcels
|
|
#self.parcels.clear()
|
|
self.parcels.set(underlying_parcels)
|
|
|
|
# Set the calculated_on intermediate field, so this related data will be found on lookups
|
|
#intersections_without_ts = self.parcelintersection_set.filter(
|
|
# parcel__in=self.parcels.all(),
|
|
# calculated_on__isnull=True,
|
|
#)
|
|
#for entry in intersections_without_ts:
|
|
# entry.calculated_on = _now
|
|
#ParcelIntersection.objects.bulk_update(
|
|
# intersections_without_ts,
|
|
# ["calculated_on"]
|
|
#)
|
|
|
|
def _perform_parcel_update_fast(self):
|
|
"""
|
|
Performs the main logic of parcel updating.
|
|
"""
|
|
from konova.models import Parcel, District, Municipal, ParcelGroup
|
|
|
|
parcel_fetcher = ParcelFetcher(
|
|
geometry=self
|
|
)
|
|
fetched_parcels = parcel_fetcher.get_parcels()
|
|
_now = timezone.now()
|
|
underlying_parcels = []
|
|
|
|
i = 0
|
|
len_fetched_parcels = len(fetched_parcels)
|
|
print("Process fetched parcels:")
|
|
|
|
districts = {}
|
|
municipals = {}
|
|
parcel_groups = {}
|
|
|
|
for result in fetched_parcels:
|
|
# There could be parcels which include the word 'Flur',
|
|
# which needs to be deleted and just keep the numerical values
|
|
## THIS CAN BE REMOVED IN THE FUTURE, WHEN 'Flur' WON'T OCCUR ANYMORE!
|
|
flr_val = result["flur"].replace("Flur ", "")
|
|
|
|
# Get district (cache in dict)
|
|
try:
|
|
district = districts["kreisschl"]
|
|
except KeyError:
|
|
district = District.objects.get_or_create(
|
|
key=result["kreisschl"],
|
|
name=result["kreis"],
|
|
)[0]
|
|
districts[district.key] = district
|
|
|
|
# Get municipal (cache in dict)
|
|
try:
|
|
municipal = municipals["gmdschl"]
|
|
except KeyError:
|
|
municipal = Municipal.objects.get_or_create(
|
|
key=result["gmdschl"],
|
|
name=result["gemeinde"],
|
|
district=district,
|
|
)[0]
|
|
municipals[municipal.key] = municipal
|
|
|
|
# Get parcel group (cache in dict)
|
|
try:
|
|
parcel_group = parcel_groups["gemaschl"]
|
|
except KeyError:
|
|
parcel_group = ParcelGroup.objects.get_or_create(
|
|
key=result["gemaschl"],
|
|
name=result["gemarkung"],
|
|
municipal=municipal,
|
|
)[0]
|
|
parcel_groups[parcel_group.key] = parcel_group
|
|
|
|
# Preprocess parcel data
|
|
flrstck_nnr = result['flstnrnen']
|
|
if not flrstck_nnr:
|
|
flrstck_nnr = None
|
|
flrstck_zhlr = result['flstnrzae']
|
|
if not flrstck_zhlr:
|
|
flrstck_zhlr = None
|
|
|
|
parcel_obj = Parcel.objects.get_or_create(
|
|
district=district,
|
|
municipal=municipal,
|
|
parcel_group=parcel_group,
|
|
flr=flr_val,
|
|
flrstck_nnr=flrstck_nnr,
|
|
flrstck_zhlr=flrstck_zhlr,
|
|
)[0]
|
|
parcel_obj.updated_on = _now
|
|
parcel_obj.save()
|
|
underlying_parcels.append(parcel_obj)
|
|
i += 1
|
|
if i % 100 == 0:
|
|
print(f" {i}/{len_fetched_parcels}")
|
|
|
|
# Update linked parcels
|
|
self.parcels.set(underlying_parcels)
|
|
|
|
@transaction.atomic
|
|
def _set_parcel_update_start_time(self):
|
|
"""
|
|
Sets the current time for the parcel calculation begin
|
|
"""
|
|
self.parcel_update_start = timezone.now()
|
|
self.parcel_update_end = None
|
|
self.save()
|
|
|
|
@transaction.atomic
|
|
def _set_parcel_update_end_time(self):
|
|
"""
|
|
Sets the current time for the parcel calculation end
|
|
"""
|
|
self.parcel_update_end = timezone.now()
|
|
self.save()
|
|
|
|
def get_underlying_parcels(self):
|
|
""" Getter for related parcels and their districts
|
|
|
|
Returns:
|
|
parcels (QuerySet): The related parcels as queryset
|
|
"""
|
|
parcels = self.parcels.prefetch_related(
|
|
"district",
|
|
"municipal",
|
|
).order_by(
|
|
"municipal__name",
|
|
)
|
|
|
|
return parcels
|
|
|
|
def get_underlying_municipals(self, parcels=None):
|
|
""" Getter for related municipals
|
|
|
|
If no QuerySet of parcels is provided, the parcels will be fetched
|
|
|
|
Returns:
|
|
municipals (QuerySet): The related municipals as queryset
|
|
"""
|
|
from konova.models import Municipal
|
|
|
|
if parcels is None:
|
|
parcels = self.get_underlying_parcels()
|
|
municipals = parcels.order_by("municipal").distinct("municipal").values("municipal__id")
|
|
municipals = Municipal.objects.filter(id__in=municipals).order_by("name")
|
|
return municipals
|
|
|
|
def count_underlying_parcels(self):
|
|
""" Getter for number of underlying parcels
|
|
|
|
Returns:
|
|
|
|
"""
|
|
num_parcels = self.parcels.filter(
|
|
parcelintersection__calculated_on__isnull=False,
|
|
).count()
|
|
return num_parcels
|
|
|
|
def as_feature_collection(self, srid=DEFAULT_SRID_RLP):
|
|
""" Returns a FeatureCollection structure holding all polygons of the MultiPolygon as single features
|
|
|
|
This method is used to convert a single MultiPolygon into multiple Polygons, which can be used as separated
|
|
features in the NETGIS map client.
|
|
|
|
Args:
|
|
srid (int): The spatial reference system identifier to be transformed to
|
|
|
|
Returns:
|
|
geojson (dict): The FeatureCollection json (as dict)
|
|
"""
|
|
geom = self.geom
|
|
if geom.srid != srid:
|
|
geom.transform(srid)
|
|
polygons = [p for p in geom]
|
|
geojson = {
|
|
"type": "FeatureCollection",
|
|
"crs": {
|
|
"type": "name",
|
|
"properties": {
|
|
"name": f"urn:ogc:def:crs:EPSG::{geom.srid}"
|
|
}
|
|
},
|
|
"features": [
|
|
{
|
|
"type": "Feature",
|
|
"geometry": json.loads(p.json),
|
|
}
|
|
for p in polygons
|
|
]
|
|
}
|
|
return geojson
|
|
|
|
@property
|
|
def complexity_factor(self) -> float:
|
|
""" Calculates a factor to estimate the complexity of a Geometry
|
|
|
|
0 = very low complexity
|
|
1 = very high complexity
|
|
|
|
ASSUMPTION:
|
|
The envelope is the bounding box of a geometry. If the geometry's area is similar to the area of it's bounding
|
|
box, it is considered as rather simple, since it seems to be a closer shape like a simple box.
|
|
If the geometry has a very big bounding box, but the geometry's own area is rather small,
|
|
compared to the one of the bounding box, the complexity can be higher.
|
|
|
|
Example:
|
|
geometry area similar to bounding box --> geometry / bounding_box ~ 1
|
|
geometry area far smaller than bb --> geometry / bounding_box ~ 0
|
|
|
|
Result is being inverted for better understanding of 'low' and 'high' complexity.
|
|
|
|
Returns:
|
|
complexity_factor (float): The estimated complexity
|
|
"""
|
|
geom_envelope = self.geom.envelope
|
|
diff = geom_envelope - self.geom
|
|
complexity_factor = 1 - self.geom.area / diff.area
|
|
return complexity_factor
|
|
|
|
|
|
class GeometryConflict(UuidModel):
|
|
"""
|
|
Geometry conflicts model
|
|
|
|
If a new/edited geometry overlays an existing geometry, there will be a new GeometryConflict on the db
|
|
"""
|
|
conflicting_geometry = models.ForeignKey(
|
|
Geometry,
|
|
on_delete=models.CASCADE,
|
|
help_text="The geometry which came second",
|
|
related_name="conflicts_geometries"
|
|
)
|
|
affected_geometry = models.ForeignKey(
|
|
Geometry,
|
|
on_delete=models.CASCADE,
|
|
help_text="The geometry which came first",
|
|
related_name="conflicted_by_geometries"
|
|
)
|
|
detected_on = models.DateTimeField(auto_now_add=True, null=True)
|
|
|
|
def __str__(self):
|
|
return f"{self.conflicting_geometry.id} conflicts with {self.affected_geometry.id}"
|