Compare commits
132 Commits
1.17
...
2399ca9bf9
| Author | SHA1 | Date | |
|---|---|---|---|
| 2399ca9bf9 | |||
| c48ccd5334 | |||
| e2f8fc9c6d | |||
| fac658e52c | |||
| d6a65dd59a | |||
| cbc8acf6f6 | |||
| 31de477f26 | |||
| 19b6e633df | |||
| b98f821c98 | |||
| 5421de4e80 | |||
| a7a0044fc5 | |||
| 36552b3886 | |||
| a766c4dbe8 | |||
| 8126781b77 | |||
| a6a66d7499 | |||
| 1c0b67693d | |||
| ce6bb6b23b | |||
| 0b8176db2e | |||
| 3a299a040a | |||
| 3c5206139b | |||
| 6c53f39a28 | |||
| 64d8f47174 | |||
| f5f3246e89 | |||
| ad8961ab82 | |||
| c2c8630c82 | |||
| dce9e1fc71 | |||
| 2b84bab1d0 | |||
| 303583daa1 | |||
| d07b2ffbfb | |||
| 335800c44b | |||
| 5766cfde47 | |||
| 2ed3fcc0f9 | |||
| bf72295615 | |||
| 6b860f8ea5 | |||
| 2fa2fa547b | |||
| 3de956872c | |||
| 1c8e3992d6 | |||
| e6e9e141c8 | |||
| f8ece06ee8 | |||
| 149a351bfd | |||
| 0164717b8e | |||
| 104952bfc3 | |||
| f96241c8d1 | |||
| ac6b534f58 | |||
| 06910cd69a | |||
| a48ba520fc | |||
| 9f18aa5890 | |||
| ab3bd84f3b | |||
| f829cd5a4c | |||
| 0f2bf95b71 | |||
| 6a307016ec | |||
| 51017ef8fa | |||
| 05560534bc | |||
| c882173e78 | |||
| 1d94211428 | |||
| 37357080d8 | |||
| 5afa13ac92 | |||
| 416cad1c8f | |||
| b5f83b7163 | |||
| 20cfb5f345 | |||
| 88c96b95f2 | |||
| f6c500b02a | |||
| d702cd8716 | |||
| 329cdd4838 | |||
| 1b70024a29 | |||
| 58206853ee | |||
| 6356398c40 | |||
| 8519922d78 | |||
| 5ac0654fd4 | |||
| 6c07a81b4f | |||
| ba45b4f961 | |||
| 280de82a52 | |||
| 6022e2d879 | |||
| 1996efcc0d | |||
| 80569119cb | |||
| 98e71d4e8a | |||
| fec7191ac2 | |||
| 9b1085f206 | |||
| b35d175a5c | |||
| 7f5fb022ac | |||
| 2d3314ab18 | |||
| 8b489f013d | |||
| 16ce5506d8 | |||
| e440bf8372 | |||
| 607db267e6 | |||
| 352ca64e09 | |||
| f2b735da6e | |||
| 6f7cfb713e | |||
| 103b703ee9 | |||
| daf8b1dce6 | |||
| c088affd74 | |||
| ecc727c991 | |||
| 632569fa5d | |||
| 6c6cbb7396 | |||
| 5e6bfdf77e | |||
| 35e5e18b79 | |||
| c0e8c6bd84 | |||
| 64541b76c5 | |||
| f65b9262cb | |||
| 2765d0548e | |||
| 951f810ce5 | |||
| d2c177d448 | |||
| 299727a7b4 | |||
| b97976b2c5 | |||
| 20241661ff | |||
| ad5c0bea67 | |||
| 80a44277bc | |||
| 5c2b5affc9 | |||
| cd99743d1e | |||
| b39432be1a | |||
| 03f9a33e54 | |||
| 699a9c1e76 | |||
| 4dfd02291e | |||
| e7ca485a88 | |||
| 8319cbfe17 | |||
| 4a023e9f10 | |||
| 4100f96dc6 | |||
| ca24f098e4 | |||
| 80dcd62199 | |||
| 0cfd3da728 | |||
| e141851a87 | |||
| 89ec67999b | |||
| ec38daaedc | |||
| 45c0826a84 | |||
| 45a383cf85 | |||
| 90aff209f9 | |||
| 13528e91e9 | |||
| 04179d633c | |||
| 0a241305d3 | |||
| 31565a0bc4 | |||
| af747417d3 | |||
| c6606c4151 |
36
Dockerfile
Normal file
36
Dockerfile
Normal file
@@ -0,0 +1,36 @@
|
||||
# Nutze ein schlankes Python-Image
|
||||
FROM python:3.11-slim-bullseye
|
||||
|
||||
ENV PYTHONUNBUFFERED 1
|
||||
|
||||
WORKDIR /konova
|
||||
|
||||
# Installiere System-Abhängigkeiten
|
||||
RUN apt-get update && apt-get install -y --no-install-recommends \
|
||||
gdal-bin redis-server nginx \
|
||||
&& rm -rf /var/lib/apt/lists/* # Platz sparen
|
||||
|
||||
# Erstelle benötigte Verzeichnisse & setze Berechtigungen
|
||||
RUN mkdir -p /var/log/nginx /var/log/gunicorn /var/lib/nginx /tmp/nginx_client_body \
|
||||
&& touch /var/log/nginx/access.log /var/log/nginx/error.log \
|
||||
&& chown -R root:root /var/log/nginx /var/lib/nginx /tmp/nginx_client_body
|
||||
|
||||
# Kopiere und installiere Python-Abhängigkeiten
|
||||
COPY ./requirements.txt /konova/
|
||||
RUN pip install --upgrade pip && pip install --no-cache-dir -r requirements.txt
|
||||
|
||||
# Entferne Standard-Nginx-Site und ersetze sie durch eigene Config
|
||||
RUN rm -rf /etc/nginx/sites-enabled/default
|
||||
COPY ./nginx.conf /etc/nginx/conf.d
|
||||
|
||||
# Kopiere restliche Projektdateien
|
||||
COPY . /konova/
|
||||
|
||||
# Sammle statische Dateien
|
||||
RUN python manage.py collectstatic --noinput
|
||||
|
||||
# Exponiere Ports
|
||||
#EXPOSE 80 6379 8000
|
||||
|
||||
# Setze Entrypoint
|
||||
ENTRYPOINT ["/konova/docker-entrypoint.sh"]
|
||||
56
README.md
56
README.md
@@ -4,6 +4,7 @@ the database postgresql and the css library bootstrap as well as the icon packag
|
||||
fontawesome for a modern look, following best practices from the industry.
|
||||
|
||||
## Background processes
|
||||
### !!! For non-docker run
|
||||
Konova uses celery for background processing. To start the worker you need to run
|
||||
```shell
|
||||
$ celery -A konova worker -l INFO
|
||||
@@ -18,3 +19,58 @@ Technical documention is provided in the projects git wiki.
|
||||
A user documentation is not available (and not needed, yet).
|
||||
|
||||
|
||||
# Docker
|
||||
To run the docker-compose as expected, you need to take the following steps:
|
||||
|
||||
1. Create a database containing docker, using an appropriate Dockerfile, e.g. the following
|
||||
```
|
||||
version: '3.3'
|
||||
services:
|
||||
postgis:
|
||||
image: postgis/postgis
|
||||
restart: always
|
||||
container_name: postgis-docker
|
||||
ports:
|
||||
- 5433:5432
|
||||
volumes:
|
||||
- db-volume:/var/lib/postgresql/data
|
||||
environment:
|
||||
- POSTGRES_PASSWORD=postgres
|
||||
- POSTGRES_USER=postgres
|
||||
networks:
|
||||
- db-network-bridge
|
||||
|
||||
networks:
|
||||
db-network-bridge:
|
||||
driver: "bridge"
|
||||
|
||||
volumes:
|
||||
db-volume:
|
||||
```
|
||||
This Dockerfile creates a Docker container running postgresql and postgis, creates the default superuser postgres,
|
||||
creates a named volume for persisting the database and creates a new network bridge, which **must be used by any other
|
||||
container, which wants to write/read on this database**.
|
||||
|
||||
2. Make sure the name of the network bridge above matches the network in the konova docker-compose.yml
|
||||
3. Get into the running postgis container (`docker exec -it postgis-docker bash`) and create new databases, users and so on. Make sure the database `konova` exists now!
|
||||
4. Replace all `CHANGE_ME_xy` values inside of konova/docker-compose.yml for your installation. Make sure the `SSO_HOST` holds the proper SSO host, e.g. for the arnova project `arnova.example.org` (Arnova must be installed and the webserver configured as well, of course)
|
||||
5. Take a look on konova/settings.py and konova/sub_settings/django_settings.py. Again: Replace all occurences of `CHANGE_ME` with proper values for your installation.
|
||||
1. Make sure you have the proper host strings added to `ALLOWED_HOSTS` inside of django_settings.py.
|
||||
6. Build and run the docker setup using `docker-compose build` and `docker-compose start` from the main directory of this project (where the docker-compose.yml lives)
|
||||
7. Run migrations! To do so, get into the konova service container (`docker exec -it konova-docker bash`) and run the needed commands (`python manage.py makemigrations LIST_OF_ALL_MIGRATABLE_APPS`, then `python manage.py migrate`)
|
||||
8. Run the setup command `python manage.py setup` and follow the instructions on the CLI
|
||||
9. To enable **SMTP** mail support, make sure your host machine (the one where the docker container run) has the postfix service configured properly. Make sure the `mynetworks` variable is xtended using the docker network bridge ip, created in the postgis container and used by the konova services.
|
||||
1. **Hint**: You can find out this easily by trying to perform a test mail in the running konova web application (which will fail, of course). Then take a look to the latest entries in `/var/log/mail.log` on your host machine. The failed IP will be displayed there.
|
||||
2. **Please note**: This installation guide is based on SMTP using postfix!
|
||||
3. Restart the postfix service on your host machine to reload the new configuration (`service postfix restart`)
|
||||
10. Finally, make sure your host machine webserver passes incoming requests properly to the docker nginx webserver of konova. A proper nginx config for the host machine may look like this:
|
||||
```
|
||||
server {
|
||||
server_name konova.domain.org;
|
||||
|
||||
location / {
|
||||
proxy_pass http://localhost:KONOVA_NGINX_DOCKER_PORT/;
|
||||
proxy_set_header Host $host;
|
||||
}
|
||||
}
|
||||
```
|
||||
@@ -1,23 +0,0 @@
|
||||
# Generated by Django 6.0.5 on 2026-05-10 07:18
|
||||
|
||||
import django.db.models.deletion
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('api', '0003_oauthtoken'),
|
||||
('user', '0010_user_sso_identifier'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name='ExternalIdentifier',
|
||||
fields=[
|
||||
('external_id', models.CharField(db_comment='Identifier from a source system', max_length=255, primary_key=True, serialize=False)),
|
||||
('internal_id', models.UUIDField(db_comment='Identifier in konova')),
|
||||
('created', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to='user.useractionlogentry')),
|
||||
],
|
||||
),
|
||||
]
|
||||
@@ -5,5 +5,4 @@ Contact: michel.peltriaux@sgdnord.rlp.de
|
||||
Created on: 21.01.22
|
||||
|
||||
"""
|
||||
from .token import *
|
||||
from .external_identifier import *
|
||||
from .token import *
|
||||
@@ -1,33 +0,0 @@
|
||||
"""
|
||||
Author: Michel Peltriaux
|
||||
Created on: 10.05.26
|
||||
|
||||
"""
|
||||
from django.db import models
|
||||
|
||||
|
||||
class ExternalIdentifier(models.Model):
|
||||
""" Holds a lookup to match a given external identifier against the internal identifier in konova.
|
||||
|
||||
Relevant in cases of API transmitted entries, which are updates using external identifiers instead of
|
||||
the internal ones directly.
|
||||
|
||||
"""
|
||||
external_id = models.CharField(
|
||||
max_length=255,
|
||||
primary_key=True,
|
||||
db_comment="Identifier from a source system"
|
||||
)
|
||||
internal_id = models.UUIDField(
|
||||
db_comment="Identifier in konova"
|
||||
)
|
||||
created = models.ForeignKey(
|
||||
"user.UserActionLogEntry",
|
||||
on_delete=models.SET_NULL,
|
||||
null=True,
|
||||
blank=True,
|
||||
related_name='+'
|
||||
)
|
||||
|
||||
def __str__(self):
|
||||
return f"{self.external_id} -> {self.internal_id}"
|
||||
@@ -8,7 +8,6 @@
|
||||
"is_coherence_keeping": false,
|
||||
"is_pik": false,
|
||||
"intervention": "MUST_BE_SET_IN_TEST",
|
||||
"external_identifier": "LOREMIPSUM-123",
|
||||
"before_states": [
|
||||
],
|
||||
"after_states": [
|
||||
|
||||
@@ -4,7 +4,6 @@
|
||||
],
|
||||
"properties": {
|
||||
"title": "Test_ecoaccount",
|
||||
"external_identifier": "LOREMIPSUM-1234",
|
||||
"deductable_surface": 10000.0,
|
||||
"is_pik": false,
|
||||
"responsible": {
|
||||
|
||||
@@ -4,7 +4,6 @@
|
||||
],
|
||||
"properties": {
|
||||
"title": "Test_ema",
|
||||
"external_identifier": "LOREMIPSUM-1235",
|
||||
"is_pik": false,
|
||||
"responsible": {
|
||||
"conservation_office": null,
|
||||
|
||||
@@ -4,7 +4,6 @@
|
||||
],
|
||||
"properties": {
|
||||
"title": "Test_intervention",
|
||||
"external_identifier": "LOREMIPSUM-1236",
|
||||
"responsible": {
|
||||
"registration_office": null,
|
||||
"registration_file_number": null,
|
||||
|
||||
@@ -9,7 +9,6 @@ import json
|
||||
|
||||
from django.urls import reverse
|
||||
|
||||
from api.models import ExternalIdentifier
|
||||
from api.tests.v1.share.test_api_sharing import BaseAPIV1TestCase
|
||||
|
||||
|
||||
@@ -43,22 +42,7 @@ class APIV1CreateTestCase(BaseAPIV1TestCase):
|
||||
response = self._run_create_request(url, post_body)
|
||||
self.assertEqual(response.status_code, 200, msg=response.content)
|
||||
content = json.loads(response.content)
|
||||
_id = content.get("id", None)
|
||||
self.assertIsNotNone(_id, msg=response.content)
|
||||
return _id
|
||||
|
||||
def _test_external_identifier_created(self, internal_id, external_id):
|
||||
""" Tests whether an external identifier has been created
|
||||
|
||||
Args:
|
||||
internal_id ():
|
||||
external_id ():
|
||||
|
||||
Returns:
|
||||
|
||||
"""
|
||||
external_identifier = ExternalIdentifier.objects.get(internal_id=internal_id)
|
||||
self.assertEqual(external_identifier.external_id, external_id)
|
||||
self.assertIsNotNone(content.get("id", None), msg=response.content)
|
||||
|
||||
def test_create_intervention(self):
|
||||
""" Tests api creation
|
||||
@@ -70,8 +54,7 @@ class APIV1CreateTestCase(BaseAPIV1TestCase):
|
||||
json_file_path = "api/tests/v1/create/intervention_create_post_body.json"
|
||||
with open(json_file_path) as json_file:
|
||||
post_body = json.load(fp=json_file)
|
||||
internal_id = self._test_create_object(url, post_body)
|
||||
self._test_external_identifier_created(internal_id, post_body["properties"]["external_identifier"])
|
||||
self._test_create_object(url, post_body)
|
||||
|
||||
def test_create_compensation(self):
|
||||
""" Tests api creation
|
||||
@@ -94,8 +77,7 @@ class APIV1CreateTestCase(BaseAPIV1TestCase):
|
||||
|
||||
# Add the user to the shared users of the intervention and try again! Now everything should work as expected.
|
||||
self.intervention.users.add(self.superuser)
|
||||
internal_id = self._test_create_object(url, post_body)
|
||||
self._test_external_identifier_created(internal_id, post_body["properties"]["external_identifier"])
|
||||
self._test_create_object(url, post_body)
|
||||
|
||||
def test_create_eco_account(self):
|
||||
""" Tests api creation
|
||||
@@ -107,8 +89,7 @@ class APIV1CreateTestCase(BaseAPIV1TestCase):
|
||||
json_file_path = "api/tests/v1/create/ecoaccount_create_post_body.json"
|
||||
with open(json_file_path) as json_file:
|
||||
post_body = json.load(fp=json_file)
|
||||
internal_id = self._test_create_object(url, post_body)
|
||||
self._test_external_identifier_created(internal_id, post_body["properties"]["external_identifier"])
|
||||
self._test_create_object(url, post_body)
|
||||
|
||||
def test_create_ema(self):
|
||||
""" Tests api creation
|
||||
@@ -120,8 +101,7 @@ class APIV1CreateTestCase(BaseAPIV1TestCase):
|
||||
json_file_path = "api/tests/v1/create/ema_create_post_body.json"
|
||||
with open(json_file_path) as json_file:
|
||||
post_body = json.load(fp=json_file)
|
||||
internal_id = self._test_create_object(url, post_body)
|
||||
self._test_external_identifier_created(internal_id, post_body["properties"]["external_identifier"])
|
||||
self._test_create_object(url, post_body)
|
||||
|
||||
def test_create_deduction(self):
|
||||
""" Tests api creation
|
||||
|
||||
@@ -44,7 +44,6 @@
|
||||
],
|
||||
"properties": {
|
||||
"title": "TEST_compensation_CHANGED",
|
||||
"external_identifier": "LOREMIPSUM-123_CHANGED",
|
||||
"is_cef": true,
|
||||
"is_coherence_keeping": true,
|
||||
"is_pik": true,
|
||||
|
||||
@@ -44,7 +44,6 @@
|
||||
],
|
||||
"properties": {
|
||||
"title": "TEST_account_CHANGED",
|
||||
"external_identifier": "LOREMIPSUM-1234_CHANGED",
|
||||
"deductable_surface": "100000.0",
|
||||
"is_pik": true,
|
||||
"responsible": {
|
||||
|
||||
@@ -44,7 +44,6 @@
|
||||
],
|
||||
"properties": {
|
||||
"title": "TEST_EMA_CHANGED",
|
||||
"external_identifier": "LOREMIPSUM-1235_CHANGED",
|
||||
"responsible": {
|
||||
"conservation_office": null,
|
||||
"conservation_file_number": "TEST_CHANGED",
|
||||
|
||||
@@ -44,7 +44,6 @@
|
||||
],
|
||||
"properties": {
|
||||
"title": "Test_intervention_CHANGED",
|
||||
"external_identifier": "LOREMIPSUM-1236_CHANGED",
|
||||
"responsible": {
|
||||
"registration_office": null,
|
||||
"registration_file_number": "CHANGED",
|
||||
|
||||
@@ -10,11 +10,9 @@ from abc import abstractmethod
|
||||
|
||||
from django.contrib.gis import geos
|
||||
from django.contrib.gis.geos import GEOSGeometry
|
||||
from django.core.exceptions import ObjectDoesNotExist
|
||||
from django.core.paginator import Paginator
|
||||
from django.db.models import Q
|
||||
|
||||
from api.models import ExternalIdentifier
|
||||
from konova.models import Geometry
|
||||
from konova.utils.message_templates import DATA_UNSHARED
|
||||
|
||||
@@ -78,14 +76,6 @@ class AbstractModelAPISerializer:
|
||||
del self.lookup["id"]
|
||||
else:
|
||||
# Return certain object
|
||||
## But first check, whether this is an external identifier ...
|
||||
try:
|
||||
## If we can find this _id on our ExternalIdentifier model, we need to map it on the internal id
|
||||
ext_id = ExternalIdentifier.objects.get(external_id=_id)
|
||||
_id = ext_id.internal_id
|
||||
except ObjectDoesNotExist:
|
||||
# If we did not find it, we assume that this is already an internal id. (Or it does not exist at all)
|
||||
pass
|
||||
self.lookup["id"] = _id
|
||||
|
||||
self.shared_lookup = Q(
|
||||
@@ -171,14 +161,6 @@ class AbstractModelAPISerializer:
|
||||
Returns:
|
||||
|
||||
"""
|
||||
# First if there is an external identifier linked to an internal one, so we can continue with the internal
|
||||
try:
|
||||
ext_id = ExternalIdentifier.objects.get(external_id=id)
|
||||
id = ext_id.internal_id
|
||||
except ObjectDoesNotExist:
|
||||
# No external id found - let's hope the given id exists internally
|
||||
pass
|
||||
|
||||
obj = self.model.objects.get(
|
||||
id=id,
|
||||
deleted__isnull=True,
|
||||
|
||||
@@ -88,11 +88,6 @@ class CompensationAPISerializerV1(AbstractModelAPISerializerV1, AbstractCompensa
|
||||
# Nothing to do here
|
||||
return obj
|
||||
|
||||
# Transform a potential external identifier into an internal one
|
||||
intervention_ext_id = self._get_external_identifier(intervention_id)
|
||||
if intervention_ext_id:
|
||||
intervention_id = intervention_ext_id.internal_id
|
||||
|
||||
intervention = Intervention.objects.get(
|
||||
id=intervention_id,
|
||||
)
|
||||
@@ -119,10 +114,6 @@ class CompensationAPISerializerV1(AbstractModelAPISerializerV1, AbstractCompensa
|
||||
|
||||
# Fill in data to objects
|
||||
properties = json_model["properties"]
|
||||
|
||||
external_identifier = properties.get("external_identifier", None)
|
||||
self._check_external_identifier_on_entry_creation(external_identifier)
|
||||
|
||||
obj.identifier = obj.generate_new_identifier()
|
||||
obj.title = properties["title"]
|
||||
obj.is_cef = properties["is_cef"]
|
||||
@@ -138,7 +129,6 @@ class CompensationAPISerializerV1(AbstractModelAPISerializerV1, AbstractCompensa
|
||||
obj = self._set_compensation_states(obj, properties["after_states"], obj.after_states)
|
||||
obj = self._set_deadlines(obj, properties["deadlines"])
|
||||
|
||||
self._set_external_identifier(obj.id, properties.get("external_identifier", None), obj.created)
|
||||
obj.log.add(obj.created)
|
||||
|
||||
celery_update_parcels.delay(obj.geometry.id)
|
||||
@@ -180,7 +170,6 @@ class CompensationAPISerializerV1(AbstractModelAPISerializerV1, AbstractCompensa
|
||||
obj = self._set_compensation_states(obj, properties["after_states"], obj.after_states)
|
||||
obj = self._set_deadlines(obj, properties["deadlines"])
|
||||
|
||||
self._set_external_identifier(obj.id, properties.get("external_identifier", None), update_action)
|
||||
obj.log.add(update_action)
|
||||
|
||||
celery_update_parcels.delay(obj.geometry.id)
|
||||
|
||||
@@ -62,14 +62,6 @@ class DeductionAPISerializerV1(AbstractModelAPISerializerV1,
|
||||
if surface <= 0:
|
||||
raise ValueError("Surface must be > 0 m²")
|
||||
|
||||
# Check if external identifiers need to be mapped onto internal ones
|
||||
acc_ext_id = self._get_external_identifier(acc_id)
|
||||
intervention_ext_id = self._get_external_identifier(intervention_id)
|
||||
if acc_ext_id:
|
||||
acc_id = acc_ext_id.internal_id
|
||||
if intervention_ext_id:
|
||||
intervention_id = intervention_ext_id.internal_id
|
||||
|
||||
acc = EcoAccount.objects.get(
|
||||
id=acc_id,
|
||||
deleted__isnull=True,
|
||||
|
||||
@@ -121,9 +121,7 @@ class EcoAccountAPISerializerV1(AbstractModelAPISerializerV1,
|
||||
obj = self._initialize_objects(json_model, user)
|
||||
|
||||
# Fill in data to objects
|
||||
properties = json_model.get("properties", None)
|
||||
if not properties:
|
||||
raise AssertionError("No 'properties' found in payload!")
|
||||
properties = json_model["properties"]
|
||||
obj.identifier = obj.generate_new_identifier()
|
||||
obj.title = properties["title"]
|
||||
obj.is_pik = properties.get("is_pik", False)
|
||||
@@ -149,7 +147,6 @@ class EcoAccountAPISerializerV1(AbstractModelAPISerializerV1,
|
||||
obj = self._set_compensation_states(obj, properties["after_states"], obj.after_states)
|
||||
obj = self._set_deadlines(obj, properties["deadlines"])
|
||||
|
||||
self._set_external_identifier(obj.id, properties.get("external_identifier", None), obj.created)
|
||||
obj.log.add(obj.created)
|
||||
obj.users.add(user)
|
||||
|
||||
@@ -175,10 +172,6 @@ class EcoAccountAPISerializerV1(AbstractModelAPISerializerV1,
|
||||
|
||||
# Fill in data to objects
|
||||
properties = json_model["properties"]
|
||||
|
||||
external_identifier = properties.get("external_identifier", None)
|
||||
self._check_external_identifier_on_entry_creation(external_identifier)
|
||||
|
||||
obj.title = properties["title"]
|
||||
obj.is_pik = properties.get("is_pik", False)
|
||||
obj.deductable_surface = float(properties["deductable_surface"])
|
||||
@@ -199,7 +192,6 @@ class EcoAccountAPISerializerV1(AbstractModelAPISerializerV1,
|
||||
obj = self._set_compensation_states(obj, properties["after_states"], obj.after_states)
|
||||
obj = self._set_deadlines(obj, properties["deadlines"])
|
||||
|
||||
self._set_external_identifier(obj.id, external_identifier, update_action)
|
||||
obj.log.add(update_action)
|
||||
|
||||
celery_update_parcels.delay(obj.geometry.id)
|
||||
|
||||
@@ -104,10 +104,6 @@ class EmaAPISerializerV1(AbstractModelAPISerializerV1, AbstractCompensationAPISe
|
||||
|
||||
# Fill in data to objects
|
||||
properties = json_model["properties"]
|
||||
|
||||
external_identifier = properties.get("external_identifier", None)
|
||||
self._check_external_identifier_on_entry_creation(external_identifier)
|
||||
|
||||
obj.identifier = obj.generate_new_identifier()
|
||||
obj.title = properties["title"]
|
||||
obj.is_pik = properties.get("is_pik", False)
|
||||
@@ -123,7 +119,6 @@ class EmaAPISerializerV1(AbstractModelAPISerializerV1, AbstractCompensationAPISe
|
||||
obj = self._set_compensation_states(obj, properties["after_states"], obj.after_states)
|
||||
obj = self._set_deadlines(obj, properties["deadlines"])
|
||||
|
||||
self._set_external_identifier(obj.id, external_identifier, obj.created)
|
||||
obj.log.add(obj.created)
|
||||
obj.users.add(user)
|
||||
|
||||
@@ -166,7 +161,6 @@ class EmaAPISerializerV1(AbstractModelAPISerializerV1, AbstractCompensationAPISe
|
||||
obj = self._set_compensation_states(obj, properties["after_states"], obj.after_states)
|
||||
obj = self._set_deadlines(obj, properties["deadlines"])
|
||||
|
||||
self._set_external_identifier(obj.id, properties.get("external_identifier", None), update_action)
|
||||
obj.log.add(update_action)
|
||||
|
||||
celery_update_parcels.delay(obj.geometry.id)
|
||||
|
||||
@@ -150,14 +150,10 @@ class InterventionAPISerializerV1(AbstractModelAPISerializerV1,
|
||||
|
||||
# Fill in data to objects
|
||||
properties = json_model["properties"]
|
||||
|
||||
external_identifier = properties.get("external_identifier", None)
|
||||
self._check_external_identifier_on_entry_creation(external_identifier)
|
||||
|
||||
obj.identifier = obj.generate_new_identifier()
|
||||
obj.title = properties.get("title", None)
|
||||
self._set_responsibility(obj, properties.get("responsible", None))
|
||||
self._set_legal(obj, properties.get("legal", None))
|
||||
obj.title = properties["title"]
|
||||
self._set_responsibility(obj, properties["responsible"])
|
||||
self._set_legal(obj, properties["legal"])
|
||||
|
||||
obj.responsible.handler.save()
|
||||
obj.responsible.save()
|
||||
@@ -165,7 +161,6 @@ class InterventionAPISerializerV1(AbstractModelAPISerializerV1,
|
||||
obj.legal.save()
|
||||
obj.save()
|
||||
|
||||
self._set_external_identifier(obj.id, external_identifier, obj.created)
|
||||
obj.users.add(user)
|
||||
obj.log.add(obj.created)
|
||||
|
||||
@@ -191,7 +186,7 @@ class InterventionAPISerializerV1(AbstractModelAPISerializerV1,
|
||||
|
||||
# Fill in data to objects
|
||||
properties = json_model["properties"]
|
||||
obj.title = properties.get("title")
|
||||
obj.title = properties["title"]
|
||||
self._set_responsibility(obj, properties.get("responsible", None))
|
||||
self._set_legal(obj, properties.get("legal", None))
|
||||
self._set_payments(obj, properties.get("payments", None))
|
||||
@@ -205,7 +200,6 @@ class InterventionAPISerializerV1(AbstractModelAPISerializerV1,
|
||||
obj.save()
|
||||
|
||||
obj.mark_as_edited(user, edit_comment="API update")
|
||||
self._set_external_identifier(obj.id, properties.get("external_identifier", None), update_action)
|
||||
obj.send_data_to_egon()
|
||||
|
||||
celery_update_parcels.delay(obj.geometry.id)
|
||||
|
||||
@@ -12,7 +12,6 @@ from django.contrib.gis.geos import MultiPolygon
|
||||
from django.core.exceptions import ObjectDoesNotExist
|
||||
from django.db.models import QuerySet
|
||||
|
||||
from api.models import ExternalIdentifier
|
||||
from api.utils.serializer.serializer import AbstractModelAPISerializer
|
||||
from codelist.models import KonovaCode
|
||||
from codelist.settings import CODELIST_COMPENSATION_ACTION_ID, CODELIST_BIOTOPES_ID, CODELIST_PROCESS_TYPE_ID, \
|
||||
@@ -40,20 +39,12 @@ class AbstractModelAPISerializerV1(AbstractModelAPISerializer):
|
||||
else:
|
||||
geom = MultiPolygon().geojson
|
||||
geo_json = json.loads(geom)
|
||||
ext_ids = list(
|
||||
ExternalIdentifier.objects.filter(
|
||||
internal_id=entry.id
|
||||
).values_list(
|
||||
"external_id", flat=True
|
||||
)
|
||||
)
|
||||
self.properties_data = {
|
||||
"id": entry.id,
|
||||
"identifier": entry.identifier,
|
||||
"title": entry.title,
|
||||
"created_on": self._created_on_to_json(entry),
|
||||
"modified_on": self._modified_on_to_json(entry),
|
||||
"external_identifiers": ext_ids,
|
||||
}
|
||||
self._extend_properties_data(entry)
|
||||
geo_json["properties"] = self.properties_data
|
||||
@@ -146,63 +137,6 @@ class AbstractModelAPISerializerV1(AbstractModelAPISerializer):
|
||||
success = entry.deleted is not None
|
||||
return success
|
||||
|
||||
def _set_external_identifier(self, internal_identifier, external_identifier, log_entry):
|
||||
""" If an external identifier was provided in the payload, we set it
|
||||
in the database
|
||||
|
||||
Args:
|
||||
internal_identifier (BaseObject): The already processed konova object (EIV, KOM, ...)
|
||||
external_identifier (any): The external identifier taken from the payload
|
||||
|
||||
Returns:
|
||||
|
||||
"""
|
||||
if external_identifier is None:
|
||||
return None
|
||||
|
||||
ext_id_obj = ExternalIdentifier.objects.get_or_create(
|
||||
internal_id=internal_identifier,
|
||||
external_id=external_identifier
|
||||
)[0]
|
||||
if not ext_id_obj.created:
|
||||
ext_id_obj.created = log_entry
|
||||
ext_id_obj.save()
|
||||
|
||||
return ext_id_obj
|
||||
|
||||
def _get_external_identifier(self, external_identifier):
|
||||
""" Checks whether a linkage based on an external identifier already exists and returns it if so.
|
||||
|
||||
Args:
|
||||
external_identifier (any): The external identifier according to payload
|
||||
|
||||
Returns:
|
||||
ExternalIdentifier | None
|
||||
"""
|
||||
if external_identifier:
|
||||
try:
|
||||
obj = ExternalIdentifier.objects.get(external_id=external_identifier)
|
||||
return obj
|
||||
except ObjectDoesNotExist:
|
||||
pass
|
||||
return None
|
||||
|
||||
def _check_external_identifier_on_entry_creation(self, external_identifier):
|
||||
""" Special check for POST processing:
|
||||
Checks whether an external identifier already exists on the database. This hints that
|
||||
the entry already has been created in the past. Instead of POST, the PUT method shall be used
|
||||
to avoid creating duplicates.
|
||||
|
||||
Args:
|
||||
external_identifier (any): The external identifier according to payload
|
||||
|
||||
Returns:
|
||||
|
||||
"""
|
||||
persisted_external_identifier = self._get_external_identifier(external_identifier)
|
||||
if persisted_external_identifier:
|
||||
raise AssertionError(f"{external_identifier} has already been initially created! Use PUT for updates!")
|
||||
|
||||
|
||||
class DeductableAPISerializerV1Mixin:
|
||||
class Meta:
|
||||
|
||||
@@ -124,7 +124,7 @@ class CompensationTable(BaseTable, TableRenderMixin, TableOrderMixin):
|
||||
html += self.render_previously_checked_star(
|
||||
tooltip=tooltip,
|
||||
)
|
||||
return format_html(html, None)
|
||||
return format_html(html)
|
||||
|
||||
def render_r(self, value, record: Compensation):
|
||||
""" Renders the registered column for a compensation
|
||||
@@ -146,5 +146,5 @@ class CompensationTable(BaseTable, TableRenderMixin, TableOrderMixin):
|
||||
tooltip=tooltip,
|
||||
icn_filled=recorded,
|
||||
)
|
||||
return format_html(html, None)
|
||||
return format_html(html)
|
||||
|
||||
|
||||
@@ -95,7 +95,7 @@ class EcoAccountTable(BaseTable, TableRenderMixin, TableOrderMixin):
|
||||
txt=value,
|
||||
new_tab=False,
|
||||
)
|
||||
return format_html(html, None)
|
||||
return format_html(html)
|
||||
|
||||
def render_av(self, value, record: EcoAccount):
|
||||
""" Renders the available column for an eco account
|
||||
@@ -113,7 +113,7 @@ class EcoAccountTable(BaseTable, TableRenderMixin, TableOrderMixin):
|
||||
value_relative = 0
|
||||
html = render_to_string("konova/widgets/progressbar.html", {"value": value_relative})
|
||||
html += f"{number_format(record.deductable_rest, decimal_pos=2)} m²"
|
||||
return format_html(html, None)
|
||||
return format_html(html)
|
||||
|
||||
def render_r(self, value, record: EcoAccount):
|
||||
""" Renders the recorded column for an eco account
|
||||
@@ -135,4 +135,4 @@ class EcoAccountTable(BaseTable, TableRenderMixin, TableOrderMixin):
|
||||
tooltip=tooltip,
|
||||
icn_filled=checked,
|
||||
)
|
||||
return format_html(html, None)
|
||||
return format_html(html)
|
||||
|
||||
21
docker-compose.yml
Normal file
21
docker-compose.yml
Normal file
@@ -0,0 +1,21 @@
|
||||
services:
|
||||
konova:
|
||||
external_links:
|
||||
- postgis:db
|
||||
- arnova-nginx-server:arnova
|
||||
build: .
|
||||
image: "ksp/konova:x.y"
|
||||
container_name: "konova-docker"
|
||||
command: ./docker-entrypoint.sh
|
||||
restart: always
|
||||
volumes:
|
||||
- /data/apps/konova/uploaded_files:/konova_uploaded_files
|
||||
ports:
|
||||
- "1337:80"
|
||||
|
||||
# Instead of an own, new network, we need to connect to the existing one, which is provided by the postgis container
|
||||
# NOTE: THIS NETWORK MUST EXIST
|
||||
networks:
|
||||
default:
|
||||
name: postgis_nat_it_backend
|
||||
external: true
|
||||
27
docker-entrypoint.sh
Executable file
27
docker-entrypoint.sh
Executable file
@@ -0,0 +1,27 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -e # Beende Skript bei Fehlern
|
||||
set -o pipefail # Fehler in Pipelines nicht ignorieren
|
||||
|
||||
# Starte Redis
|
||||
redis-server --daemonize yes
|
||||
|
||||
# Starte Celery Worker im Hintergrund
|
||||
celery -A konova worker --loglevel=info &
|
||||
|
||||
# Starte Nginx als Hintergrundprozess
|
||||
nginx -g "daemon off;" &
|
||||
|
||||
# Setze Gunicorn Worker-Anzahl (Standard: (2*CPUs)+1)
|
||||
WORKERS=${GUNICORN_WORKERS:-$((2 * $(nproc) + 1))}
|
||||
|
||||
# Stelle sicher, dass Logs existieren
|
||||
mkdir -p /var/log/gunicorn
|
||||
touch /var/log/gunicorn/access.log /var/log/gunicorn/error.log
|
||||
|
||||
# Starte Gunicorn als Hauptprozess
|
||||
exec gunicorn --workers="$WORKERS" konova.wsgi:application \
|
||||
--bind=0.0.0.0:8000 \
|
||||
--access-logfile /var/log/gunicorn/access.log \
|
||||
--error-logfile /var/log/gunicorn/error.log \
|
||||
--access-logformat '%({x-real-ip}i)s via %(h)s %(l)s %(u)s %(t)s "%(r)s" %(s)s %(b)s "%(f)s" "%(a)s"'
|
||||
@@ -88,7 +88,7 @@ class EmaTable(BaseTable, TableRenderMixin, TableOrderMixin):
|
||||
txt=value,
|
||||
new_tab=False,
|
||||
)
|
||||
return format_html(html, None)
|
||||
return format_html(html)
|
||||
|
||||
def render_r(self, value, record: Ema):
|
||||
""" Renders the registered column for a EMA
|
||||
@@ -110,4 +110,4 @@ class EmaTable(BaseTable, TableRenderMixin, TableOrderMixin):
|
||||
tooltip=tooltip,
|
||||
icn_filled=recorded,
|
||||
)
|
||||
return format_html(html, None)
|
||||
return format_html(html)
|
||||
|
||||
@@ -127,7 +127,7 @@ class InterventionTable(BaseTable, TableRenderMixin, TableOrderMixin):
|
||||
html += self.render_previously_checked_star(
|
||||
tooltip=tooltip,
|
||||
)
|
||||
return format_html(html, None)
|
||||
return format_html(html)
|
||||
|
||||
def render_r(self, value, record: Intervention):
|
||||
""" Renders the recorded column for an intervention
|
||||
@@ -149,5 +149,5 @@ class InterventionTable(BaseTable, TableRenderMixin, TableOrderMixin):
|
||||
tooltip=tooltip,
|
||||
icn_filled=checked,
|
||||
)
|
||||
return format_html(html, None)
|
||||
return format_html(html)
|
||||
|
||||
|
||||
@@ -191,11 +191,10 @@ STATICFILES_DIRS = [
|
||||
]
|
||||
|
||||
# EMAIL (see https://docs.djangoproject.com/en/dev/topics/email/)
|
||||
|
||||
# CHANGE_ME !!! ONLY FOR DEVELOPMENT !!!
|
||||
if DEBUG:
|
||||
# ONLY FOR DEVELOPMENT NEEDED
|
||||
EMAIL_BACKEND = 'django.core.mail.backends.filebased.EmailBackend'
|
||||
EMAIL_FILE_PATH = '/tmp/app-messages' # change this to a proper location
|
||||
EMAIL_FILE_PATH = '/tmp/app-messages'
|
||||
|
||||
DEFAULT_FROM_EMAIL = env("DEFAULT_FROM_EMAIL") # The default email address for the 'from' element
|
||||
SERVER_EMAIL = DEFAULT_FROM_EMAIL # The default email sender address, which is used by Django to send errors via mail
|
||||
|
||||
@@ -30,17 +30,15 @@ class QrCode:
|
||||
Returns:
|
||||
qrcode_svg (str): The qr code as svg
|
||||
"""
|
||||
qr = qrcode.QRCode(
|
||||
image_factory=qrcode.image.svg.SvgPathImage,
|
||||
img_factory = svg.SvgImage
|
||||
qrcode_img = qrcode.make(
|
||||
content,
|
||||
image_factory=img_factory,
|
||||
box_size=size
|
||||
)
|
||||
qr.add_data(content)
|
||||
qr.make(
|
||||
fit=True
|
||||
)
|
||||
|
||||
img = qr.make_image()
|
||||
return img.to_string(encoding="unicode")
|
||||
stream = BytesIO()
|
||||
qrcode_img.save(stream)
|
||||
return stream.getvalue().decode()
|
||||
|
||||
def get_img(self):
|
||||
return self._img
|
||||
|
||||
@@ -178,9 +178,7 @@ class TableRenderMixin:
|
||||
if len(value) > max_length:
|
||||
value = f"{value[:max_length]}..."
|
||||
value = format_html(
|
||||
'<div title="{}">{}</div>',
|
||||
value_orig,
|
||||
value
|
||||
f'<div title="{value_orig}">{value}</div>'
|
||||
)
|
||||
return value
|
||||
|
||||
@@ -224,7 +222,7 @@ class TableRenderMixin:
|
||||
tooltip=_("Full access granted") if is_entry_shared else _("Access not granted"),
|
||||
icn_class="fas fa-edit rlp-r-inv" if is_entry_shared else "far fa-edit",
|
||||
)
|
||||
return format_html(html, None)
|
||||
return format_html(html)
|
||||
|
||||
|
||||
class TableOrderMixin:
|
||||
|
||||
25
nginx.conf
Normal file
25
nginx.conf
Normal file
@@ -0,0 +1,25 @@
|
||||
server {
|
||||
listen 80;
|
||||
client_max_body_size 25M;
|
||||
|
||||
location / {
|
||||
proxy_pass http://127.0.0.1:8000;
|
||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
proxy_set_header X-Real-IP $remote_addr;
|
||||
proxy_set_header Host $host;
|
||||
proxy_redirect off;
|
||||
proxy_cache_bypass $http_upgrade;
|
||||
}
|
||||
|
||||
location /static/ {
|
||||
alias /konova/static/;
|
||||
access_log /var/log/nginx/access.log;
|
||||
autoindex off;
|
||||
types {
|
||||
text/css css;
|
||||
application/javascript js;
|
||||
}
|
||||
}
|
||||
|
||||
error_log /var/log/nginx/error.log;
|
||||
}
|
||||
102
requirements.txt
102
requirements.txt
@@ -1,65 +1,63 @@
|
||||
amqp==5.3.1
|
||||
asgiref==3.11.1
|
||||
asgiref==3.8.1
|
||||
async-timeout==5.0.1
|
||||
beautifulsoup4==4.14.3
|
||||
billiard==4.2.4
|
||||
beautifulsoup4==4.13.0b2
|
||||
billiard==4.2.1
|
||||
cached-property==2.0.1
|
||||
celery==5.6.3
|
||||
certifi==2026.4.22
|
||||
cffi==2.0.0
|
||||
chardet==7.4.3
|
||||
charset-normalizer==3.4.7
|
||||
click==8.3.3
|
||||
celery==5.4.0
|
||||
certifi==2024.12.14
|
||||
cffi==1.17.1
|
||||
chardet==5.2.0
|
||||
charset-normalizer==3.4.0
|
||||
click==8.1.8
|
||||
click-didyoumean==0.3.1
|
||||
click-plugins==1.1.1.2
|
||||
click-plugins==1.1.1
|
||||
click-repl==0.3.0
|
||||
coverage==7.13.5
|
||||
cryptography==48.0.0
|
||||
Deprecated==1.3.1
|
||||
Django==6.0.5
|
||||
django-autocomplete-light==4.0.0
|
||||
coverage==7.6.9
|
||||
cryptography==44.0.0
|
||||
Deprecated==1.2.15
|
||||
Django==5.1.4
|
||||
django-autocomplete-light==3.11.0
|
||||
django-bootstrap-modal-forms==3.0.5
|
||||
django-bootstrap4==26.1
|
||||
django-environ==0.13.0
|
||||
django-filter==25.2
|
||||
django-bootstrap4==24.4
|
||||
django-environ==0.11.2
|
||||
django-filter==24.3
|
||||
django-fontawesome-5==1.0.18
|
||||
django-oauth-toolkit==3.2.0
|
||||
django-tables2==3.0.0
|
||||
django-oauth-toolkit==3.0.1
|
||||
django-tables2==2.7.1
|
||||
et_xmlfile==2.0.0
|
||||
gunicorn==26.0.0
|
||||
idna==3.13
|
||||
importlib_metadata==9.0.0
|
||||
itsdangerous==2.2.0
|
||||
jwcrypto==1.5.7
|
||||
kombu==5.6.2
|
||||
oauthlib==3.3.1
|
||||
gunicorn==23.0.0
|
||||
idna==3.10
|
||||
importlib_metadata==8.5.0
|
||||
jwcrypto==1.5.6
|
||||
kombu==5.4.0rc1
|
||||
oauthlib==3.2.2
|
||||
openpyxl==3.2.0b1
|
||||
packaging==26.2
|
||||
pika==1.4.0
|
||||
pillow==12.2.0
|
||||
prompt_toolkit==3.0.52
|
||||
psycopg==3.3.4
|
||||
psycopg-binary==3.3.4
|
||||
pycparser==3.0
|
||||
pyparsing==3.3.2
|
||||
packaging==24.2
|
||||
pika==1.3.2
|
||||
pillow==11.0.0
|
||||
prompt_toolkit==3.0.48
|
||||
psycopg==3.2.3
|
||||
psycopg-binary==3.2.3
|
||||
pycparser==2.22
|
||||
pyparsing==3.2.0
|
||||
pypng==0.20220715.0
|
||||
pyproj==3.7.2
|
||||
pyproj==3.7.0
|
||||
python-dateutil==2.9.0.post0
|
||||
pytz==2026.2
|
||||
PyYAML==6.0.3
|
||||
qrcode==8.2
|
||||
redis==7.4.0
|
||||
requests==2.33.1
|
||||
six==1.17.0
|
||||
soupsieve==2.8.3
|
||||
sqlparse==0.5.5
|
||||
typing_extensions==4.15.0
|
||||
tzdata==2026.2
|
||||
tzlocal==5.3.1
|
||||
urllib3==2.7.0
|
||||
pytz==2024.2
|
||||
PyYAML==6.0.2
|
||||
qrcode==7.3.1
|
||||
redis==5.1.0b6
|
||||
requests<2.32.0
|
||||
six==1.16.0
|
||||
soupsieve==2.5
|
||||
sqlparse==0.5.1
|
||||
typing_extensions==4.12.2
|
||||
tzdata==2024.2
|
||||
urllib3==2.3.0
|
||||
vine==5.1.0
|
||||
wcwidth==0.7.0
|
||||
wcwidth==0.2.13
|
||||
webservices==0.7
|
||||
wrapt==2.1.2
|
||||
xmltodict==1.0.4
|
||||
zipp==3.23.1
|
||||
wrapt==1.16.0
|
||||
xmltodict==0.14.2
|
||||
zipp==3.21.0
|
||||
|
||||
@@ -70,7 +70,7 @@
|
||||
</div>
|
||||
<div class="dropdown-menu dropdown-menu-right">
|
||||
{% for rpp_option in table.results_per_page_choices %}
|
||||
<a class="dropdown-item {% if table.results_per_page_chosen == rpp_option %}selected{% endif %}" href="{% querystring %}&{{ table.results_per_page_parameter}}={{rpp_option }}">
|
||||
<a class="dropdown-item {% if table.results_per_page_chosen == rpp_option %}selected{% endif %}" href="{% querystring table.results_per_page_parameter=rpp_option %}">
|
||||
{{ rpp_option }}
|
||||
</a>
|
||||
{% endfor %}
|
||||
|
||||
Reference in New Issue
Block a user