Compare commits
No commits in common. "5858a5fdf99794b9fc7895176b0c864becd2f612" and "master" have entirely different histories.
5858a5fdf9
...
master
48
.env.sample
Normal file
48
.env.sample
Normal file
@ -0,0 +1,48 @@
|
||||
# General
|
||||
SECRET_KEY=CHANGE_ME
|
||||
DEBUG=True
|
||||
ALLOWED_HOSTS=127.0.0.1,localhost,example.org
|
||||
BASE_URL=http://localhost:8002
|
||||
ADMINS=Admin1:mail@example.org,Admin2:mail2@example.org
|
||||
|
||||
# Database
|
||||
DB_USER=postgres
|
||||
DB_PASSWORD=
|
||||
DB_NAME=konova
|
||||
DB_HOST=127.0.0.1
|
||||
DB_PORT=5432
|
||||
|
||||
# Redis (for celery)
|
||||
REDIS_HOST=CHANGE_ME
|
||||
REDIS_PORT=CHANGE_ME
|
||||
|
||||
# E-Mail
|
||||
SMTP_HOST=localhost
|
||||
SMTP_PORT=25
|
||||
REPLY_TO_ADDR=ksp-servicestelle@sgdnord.rlp.de
|
||||
DEFAULT_FROM_EMAIL=service@ksp.de
|
||||
|
||||
# Proxy
|
||||
PROXY=CHANGE_ME
|
||||
MAP_PROXY_HOST_WHITELIST=CHANGE_ME_1,CHANGE_ME_2
|
||||
GEOPORTAL_RLP_USER=CHANGE_ME
|
||||
GEOPORTAL_RLP_PASSWORD=CHANGE_ME
|
||||
|
||||
# Schneider
|
||||
SCHNEIDER_BASE_URL=https://schneider.naturschutz.rlp.de
|
||||
SCHNEIDER_AUTH_TOKEN=CHANGE_ME
|
||||
SCHNEIDER_AUTH_HEADER=auth
|
||||
|
||||
# SSO
|
||||
SSO_SERVER_BASE_URL=https://login.naturschutz.rlp.de
|
||||
OAUTH_CODE_VERIFIER=CHANGE_ME
|
||||
OAUTH_CLIENT_ID=CHANGE_ME
|
||||
OAUTH_CLIENT_SECRET=CHANGE_ME
|
||||
PROPAGATION_SECRET=CHANGE_ME
|
||||
|
||||
# RabbitMQ
|
||||
## For connections to EGON
|
||||
EGON_RABBITMQ_HOST=CHANGE_ME
|
||||
EGON_RABBITMQ_PORT=CHANGE_ME
|
||||
EGON_RABBITMQ_USER=CHANGE_ME
|
||||
EGON_RABBITMQ_PW=CHANGE_ME
|
4
.gitignore
vendored
4
.gitignore
vendored
@ -1,4 +1,6 @@
|
||||
# Project exclude paths
|
||||
/venv/
|
||||
/.idea/
|
||||
*/migrations/
|
||||
/.coverage
|
||||
/htmlcov/
|
||||
/.env
|
||||
|
109
LICENSE_de.md
Normal file
109
LICENSE_de.md
Normal file
@ -0,0 +1,109 @@
|
||||
# OPEN-SOURCE-LIZENZ FÜR DIE EUROPÄISCHE UNION v. 1.2
|
||||
EUPL © Europäische Union 2007, 2016
|
||||
|
||||
Diese Open-Source-Lizenz für die Europäische Union („EUPL“) gilt für Werke (im Sinne der nachfolgenden Begriffsbestimmung), die unter EUPL-Bedingungen zur Verfügung gestellt werden. Das Werk darf nur in der durch diese Lizenz gestatteten Form genutzt werden (insoweit eine solche Nutzung dem Urheber vorbehalten ist).
|
||||
Das Werk wird unter den Bedingungen dieser Lizenz zur Verfügung gestellt, wenn der Lizenzgeber (im Sinne der nachfolgenden Begriffsbestimmung) den folgenden Hinweis unmittelbar hinter dem Urheberrechtshinweis dieses Werks anbringt:
|
||||
Lizenziert unter der EUPL
|
||||
oder in einer anderen Form zum Ausdruck bringt, dass er es unter der EUPL lizenzieren möchte.
|
||||
|
||||
## 1. Begriffsbestimmungen
|
||||
|
||||
Für diese Lizenz gelten folgende Begriffsbestimmungen:
|
||||
- „Lizenz“:diese Lizenz.
|
||||
- „Originalwerk“:das Werk oder die Software, die vom Lizenzgeber unter dieser Lizenz verbreitet oder zugänglich gemacht wird, und zwar als Quellcode und gegebenenfalls auch als ausführbarer Code.
|
||||
- „Bearbeitungen“:die Werke oder Software, die der Lizenznehmer auf der Grundlage des Originalwerks oder seiner Bearbeitungen schaffen kann. In dieser Lizenz wird nicht festgelegt, wie umfangreich die Änderung oder wie stark die Abhängigkeit vom Originalwerk für eine Einstufung als Bearbeitung sein muss; dies bestimmt sich nach dem Urheberrecht, das in dem unter Artikel 15 aufgeführten Land anwendbar ist.
|
||||
- „Werk“:das Originalwerk oder seine Bearbeitungen.
|
||||
- „Quellcode“:diejenige Form des Werkes, die zur Auffassung durch den Menschen bestimmt ist und die am besten geeignet ist, um vom Menschen verstanden und verändert zu werden.
|
||||
- „Ausführbarer Code“:die - üblicherweise - kompilierte Form des Werks, die von einem Computer als Programm ausgeführt werden soll.
|
||||
- „Lizenzgeber“:die natürliche oder juristische Person, die das Werk unter der Lizenz verbreitet oder zugänglich macht.
|
||||
- „Bearbeiter“:jede natürliche oder juristische Person, die das Werk unter der Lizenz verändert oder auf andere Weise zur Schaffung einer Bearbeitung beiträgt.
|
||||
- „Lizenznehmer“ („Sie“):jede natürliche oder juristische Person, die das Werk unter den Lizenzbedingungen nutzt.
|
||||
- „Verbreitung“ oder „Zugänglichmachung“:alle Formen von Verkauf, Überlassung, Verleih, Vermietung, Verbreitung, Weitergabe, Übermittlung oder anderweitiger Online- oder Offline-Bereitstellung von Vervielfältigungen des Werks oder Zugänglichmachung seiner wesentlichen Funktionen für dritte natürliche oder juristische Personen.
|
||||
|
||||
## 2. Umfang der Lizenzrechte
|
||||
|
||||
Der Lizenzgeber erteilt Ihnen hiermit für die Gültigkeitsdauer der am Originalwerk bestehenden Urheberrechte eine weltweite, unentgeltliche, nicht ausschließliche, unterlizenzierbare Lizenz, die Sie berechtigt:
|
||||
- das Werk uneingeschränkt zu nutzen,
|
||||
|
||||
- das Werk zu vervielfältigen,
|
||||
|
||||
- das Werk zu verändern und Bearbeitungen auf der Grundlage des Werks zu schaffen,
|
||||
- das Werk öffentlich zugänglich zu machen, was das Recht einschließt, das Werk oder Vervielfältigungsstücke davon öffentlich bereitzustellen oder wahrnehmbar zu machen oder das Werk, soweit möglich, öffentlich aufzuführen,
|
||||
- das Werk oder Vervielfältigungen davon zu verbreiten,
|
||||
- das Werk oder Vervielfältigungen davon zu vermieten oder zu verleihen,
|
||||
- das Werk oder Vervielfältigungen davon weiter zu lizenzieren.
|
||||
Für die Wahrnehmung dieser Rechte können beliebige, derzeit bekannte oder künftige Medien, Träger und Formate verwendet werden, soweit das geltende Recht dem nicht entgegensteht. Für die Länder, in denen Urheberpersönlichkeitsrechte an dem Werk bestehen, verzichtet der Lizenzgeber im gesetzlich zulässigen Umfang auf seine Urheberpersönlichkeitsrechte, um die Lizenzierung der oben aufgeführten Verwertungsrechte wirksam durchführen zu können. Der Lizenzgeber erteilt dem Lizenznehmer ein nicht ausschließliches, unentgeltliches Nutzungsrecht an seinen Patenten, sofern dies zur Ausübung der durch die Lizenz erteilten Nutzungsrechte am Werk notwendig ist.
|
||||
|
||||
## 3. Zugänglichmachung des Quellcodes
|
||||
|
||||
Der Lizenzgeber kann das Werk entweder als Quellcode oder als ausführbaren Code zur Verfügung stellen. Stellt er es als ausführbaren Code zur Verfügung, so stellt er darüber hinaus eine maschinenlesbare Kopie des Quellcodes für jedes von ihm verbreitete Vervielfältigungsstück des Werks zur Verfügung, oder er verweist in einem Vermerk im Anschluss an den dem Werk beigefügten Urheberrechtshinweis auf einen Speicherort, an dem problemlos und unentgeltlich auf den Quellcode zugegriffen werden kann, solange der Lizenzgeber das Werk verbreitet oder zugänglich macht.
|
||||
|
||||
## 4. Einschränkungen des Urheberrechts
|
||||
Es ist nicht Zweck dieser Lizenz, Ausnahmen oder Schranken der ausschließlichen Rechte des Urhebers am Werk, die dem Lizenznehmer zugutekommen, einzuschränken. Auch die Erschöpfung dieser Rechte bleibt von dieser Lizenz unberührt.
|
||||
|
||||
## 5. Pflichten des Lizenznehmers
|
||||
Die Einräumung der oben genannten Rechte ist an mehrere Beschränkungen und Pflichten für den Lizenznehmer gebunden:
|
||||
|
||||
Urheberrechtshinweis, Lizenztext, Nennung des Bearbeiters: Der Lizenznehmer muss alle Urheberrechts-, Patent- oder Markenrechtshinweise und alle Hinweise auf die Lizenz und den Haftungsausschluss unverändert lassen. Jedem von ihm verbreiteten oder zugänglich gemachten Vervielfältigungsstück des Werks muss der Lizenznehmer diese Hinweise sowie diese Lizenz beifügen. Der Lizenznehmer muss auf jedem abgeleiteten Werk deutlich darauf hinweisen, dass das Werk geändert wurde, und das Datum der Bearbeitung angeben.
|
||||
|
||||
„Copyleft“-Klausel: Der Lizenznehmer darf Vervielfältigungen des Originalwerks oder Bearbeitungen nur unter den Bedingungen dieser EUPL oder einer neueren Version dieser Lizenz verbreiten oder zugänglich machen, außer wenn das Originalwerk ausdrücklich nur unter dieser Lizenzversion - z. B. mit der Angabe „Nur EUPL V. 1.2“ - verbreitet werden darf. Der Lizenznehmer (der zum Lizenzgeber wird) darf für das Werk oder die Bearbeitung keine zusätzlichen Bedingungen anbieten oder vorschreiben, die die Bedingungen dieser Lizenz verändern oder einschränken.
|
||||
|
||||
Kompatibilitäts-Klausel: Wenn der Lizenznehmer Bearbeitungen, die auf dem Werk und einem anderen Werk, das unter einer kompatiblen Lizenz lizenziert wurde, basieren, oder die Kopien dieser Bearbeitungen verbreitet oder zugänglich macht, kann dies unter den Bedingungen dieser kompatiblen Lizenz erfolgen. Unter „kompatibler Lizenz“ ist eine im Anhang dieser Lizenz angeführte Lizenz zu verstehen. Sollten die Verpflichtungen des Lizenznehmers aus der kompatiblen Lizenz mit denjenigen aus der vorliegenden Lizenz (EUPL) in Konflikt stehen, werden die Verpflichtungen aus der kompatiblen Lizenz Vorrang haben.
|
||||
|
||||
Bereitstellung des Quellcodes: Wenn der Lizenznehmer Vervielfältigungsstücke des Werks verbreitet oder zugänglich macht, muss er eine maschinenlesbare Fassung des Quellcodes mitliefern oder einen Speicherort angeben, über den problemlos und unentgeltlich so lange auf diesen Quellcode zugegriffen werden kann, wie der Lizenznehmer das Werk verbreitet oder zugänglich macht.
|
||||
|
||||
Rechtsschutz: Diese Lizenz erlaubt nicht die Benutzung von Kennzeichen, Marken oder geschützten Namensrechten des Lizenzgebers, soweit dies nicht für die angemessene und übliche Beschreibung der Herkunft des Werks und der inhaltlichen Wiedergabe des Urheberrechtshinweises erforderlich ist.
|
||||
|
||||
## 6. Urheber und Bearbeiter
|
||||
Der ursprüngliche Lizenzgeber gewährleistet, dass er das Urheberrecht am Originalwerk innehat oder dieses an ihn lizenziert wurde und dass er befugt ist, diese Lizenz zu erteilen.
|
||||
Jeder Bearbeiter gewährleistet, dass er das Urheberrecht an den von ihm vorgenommenen Änderungen des Werks besitzt und befugt ist, diese Lizenz zu erteilen.
|
||||
Jedes Mal, wenn Sie die Lizenz annehmen, erteilen Ihnen der ursprüngliche Lizenzgeber und alle folgenden Bearbeiter eine Befugnis zur Nutzung ihrer Beiträge zum Werk unter den Bedingungen dieser Lizenz.
|
||||
|
||||
## 7. Gewährleistungsausschluss
|
||||
Die Arbeit an diesem Werk wird laufend fortgeführt; es wird durch unzählige Bearbeiter ständig verbessert. Das Werk ist nicht vollendet und kann daher Fehler („bugs“) enthalten, die dieser Art der Entwicklung inhärent sind.
|
||||
Aus den genannten Gründen wird das Werk unter dieser Lizenz „so, wie es ist“ ohne jegliche Gewährleistung zur Verfügung gestellt. Dies gilt unter anderem - aber nicht ausschließlich - für Marktreife, Verwendbarkeit für einen bestimmten Zweck, Mängelfreiheit, Richtigkeit sowie Nichtverletzung von anderen Immaterialgüterrechten als dem Urheberrecht (vgl. dazu Artikel 6 dieser Lizenz).
|
||||
Dieser Gewährleistungsausschluss ist wesentlicher Bestandteil der Lizenz und Bedingung für die Einräumung von Rechten an dem Werk.
|
||||
|
||||
## 8. Haftungsausschluss/Haftungsbeschränkung
|
||||
Außer in Fällen von Vorsatz oder der Verursachung von Personenschäden haftet der Lizenzgeber nicht für direkte oder indirekte, materielle oder immaterielle Schäden irgendwelcher Art, die aus der Lizenz oder der Benutzung des Werks folgen; dies gilt unter anderem, aber nicht ausschließlich, für Firmenwertverluste, Produktionsausfall, Computerausfall oder Computerfehler, Datenverlust oder wirtschaftliche Schäden, und zwar auch dann, wenn der Lizenzgeber auf die Möglichkeit solcher Schäden hingewiesen wurde. Unabhängig davon haftet der Lizenzgeber im Rahmen der gesetzlichen Produkthaftung, soweit die entsprechenden Regelungen auf das Werk anwendbar sind.
|
||||
|
||||
## 9. Zusatzvereinbarungen
|
||||
Wenn Sie das Werk verbreiten, können Sie Zusatzvereinbarungen schließen, in denen Verpflichtungen oder Dienstleistungen festgelegt werden, die mit dieser Lizenz vereinbar sind. Sie dürfen Verpflichtungen indessen nur in Ihrem eigenen Namen und auf Ihre eigene Verantwortung eingehen, nicht jedoch im Namen des ursprünglichen Lizenzgebers oder eines anderen Bearbeiters, und nur, wenn Sie sich gegenüber allen Bearbeitern verpflichten, sie zu entschädigen, zu verteidigen und von der Haftung freizustellen, falls aufgrund der von Ihnen eingegangenen Gewährleistungsverpflichtung oder Haftungsübernahme Forderungen gegen sie geltend gemacht werden oder eine Haftungsverpflichtung entsteht.
|
||||
|
||||
## 10. Annahme der Lizenz
|
||||
Sie können den Bestimmungen dieser Lizenz zustimmen, indem Sie das Symbol „Lizenz annehmen“ unter dem Fenster mit dem Lizenztext anklicken oder indem Sie Ihre Zustimmung auf vergleichbare Weise in einer nach anwendbarem Recht zulässigen Form geben. Das Anklicken des Symbols gilt als Anzeichen Ihrer eindeutigen und unwiderruflichen Annahme der Lizenz und der darin enthaltenen Klauseln und Bedingungen. In gleicher Weise gilt als Zeichen der eindeutigen und unwiderruflichen Zustimmung die Ausübung eines Rechtes, das in Artikel 2 dieser Lizenz angeführt ist, wie das Erstellen einer Bearbeitung oder die Verbreitung oder Zugänglichmachung des Werks oder dessen Vervielfältigungen.
|
||||
|
||||
## 11. Informationspflichten
|
||||
Wenn Sie das Werk verbreiten oder zugänglich machen (beispielsweise, indem Sie es zum Herunterladen von einer Website anbieten), müssen Sie über den Vertriebskanal oder das benutzte Verbreitungsmedium der Öffentlichkeit zumindest jene Informationen bereitstellen, die nach dem anwendbaren Recht bezüglich der Lizenzgeber, der Lizenz und ihrer Zugänglichkeit, des Abschlusses des Lizenzvertrags sowie darüber, wie die Lizenz durch den Lizenznehmer gespeichert und vervielfältigt werden kann, erforderlich sind.
|
||||
|
||||
## 12. Beendigung der Lizenz
|
||||
Die Lizenz und die damit eingeräumten Rechte erlöschen automatisch, wenn der Lizenznehmer gegen die Lizenzbedingungen verstößt. Ein solches Erlöschen der Lizenz führt nicht zum Erlöschen der Lizenzen von Personen, denen das Werk vom Lizenznehmer unter dieser Lizenz zur Verfügung gestellt worden ist, solange diese Personen die Lizenzbedingungen erfüllen.
|
||||
|
||||
## 13. Sonstiges
|
||||
Unbeschadet des Artikels 9 stellt die Lizenz die vollständige Vereinbarung der Parteien über das Werk dar. Sind einzelne Bestimmungen der Lizenz nach geltendem Recht nichtig oder unwirksam, so berührt dies nicht die Wirksamkeit oder Durchsetzbarkeit der Lizenz an sich. Solche Bestimmungen werden vielmehr so ausgelegt oder modifiziert, dass sie wirksam und durchsetzbar sind. Die Europäische Kommission kann weitere Sprachfassungen oder neue Versionen dieser Lizenz oder aktualisierte Fassungen des Anhangs veröffentlichen, soweit dies notwendig und angemessen ist, ohne den Umfang der Lizenzrechte zu verringern. Neue Versionen werden mit einer eindeutigen Versionsnummer veröffentlicht. Alle von der Europäischen Kommission anerkannten Sprachfassungen dieser Lizenz sind gleichwertig. Die Parteien können sich auf die Sprachfassung ihrer Wahl berufen.
|
||||
|
||||
## 14. Gerichtsstand
|
||||
Unbeschadet besonderer Vereinbarungen zwischen den Parteien gilt Folgendes:
|
||||
- Für alle Streitigkeiten über die Auslegung dieser Lizenz zwischen den Organen, Einrichtungen und sonstigen Stellen der Europäischen Union als Lizenzgeber und einem Lizenznehmer ist der Gerichtshof der Europäischen Union gemäß Artikel 272 des Vertrags über die Arbeitsweise der Europäischen Union zuständig;
|
||||
- Gerichtsstand für Streitigkeiten zwischen anderen Parteien über die Auslegung dieser Lizenz ist allein der Ort, an dem der Lizenzgeber seinen Wohnsitz oder den wirtschaftlichen Mittelpunkt seiner Tätigkeit hat.
|
||||
|
||||
## 15. Anwendbares Recht
|
||||
Unbeschadet besonderer Vereinbarungen zwischen den Parteien gilt Folgendes:
|
||||
- Diese Lizenz unterliegt dem Recht des Mitgliedstaats der Europäischen Union, in dem der Lizenzgeber seinen Sitz, Wohnsitz oder eingetragenen Sitz hat;
|
||||
- diese Lizenz unterliegt dem belgischen Recht, wenn der Lizenzgeber keinen Sitz, Wohnsitz oder eingetragenen Sitz in einem Mitgliedstaat der Europäischen Union hat.
|
||||
|
||||
# Anlage
|
||||
„Kompatible Lizenzen“ nach Artikel 5 der EUPL sind:
|
||||
- GNU General Public License (GPL) v. 2, v. 3
|
||||
- GNU Affero General Public License (AGPL) v. 3
|
||||
- Open Software License (OSL) v. 2.1, v. 3.0
|
||||
- Eclipse Public License (EPL) v. 1.0
|
||||
- CeCILL v. 2.0, v. 2.1
|
||||
- Mozilla Public Licence (MPL) v. 2
|
||||
- GNU Lesser General Public Licence (LGPL) v. 2.1, v. 3
|
||||
- Creative Commons Attribution-ShareAlike v. 3.0 Unported (CC BY-SA 3.0) für andere Werke als Software
|
||||
- European Union Public Licence (EUPL) v. 1.1, v. 1.2
|
||||
- Québec Free and Open-Source Licence - Reciprocity (LiLiQ-R) oder Strong Reciprocity (LiLiQ-R+)
|
||||
|
||||
Die Europäische Kommission kann diesen Anhang aktualisieren, um neuere Fassungen der obigen Lizenzen aufzunehmen, ohne hierfür eine neue Fassung der EUPL auszuarbeiten, solange diese Lizenzen die in Artikel 2 gewährten Rechte gewährleisten und den erfassten Quellcode vor ausschließlicher Aneignung schützen.
|
||||
Alle sonstigen Änderungen oder Ergänzungen dieses Anhangs bedürfen der Ausarbeitung einer neuen Version der EUPL.
|
287
LICENSE_en.md
Normal file
287
LICENSE_en.md
Normal file
@ -0,0 +1,287 @@
|
||||
EUROPEAN UNION PUBLIC LICENCE v. 1.2
|
||||
EUPL © the European Union 2007, 2016
|
||||
|
||||
This European Union Public Licence (the ‘EUPL’) applies to the Work (as defined
|
||||
below) which is provided under the terms of this Licence. Any use of the Work,
|
||||
other than as authorised under this Licence is prohibited (to the extent such
|
||||
use is covered by a right of the copyright holder of the Work).
|
||||
|
||||
The Work is provided under the terms of this Licence when the Licensor (as
|
||||
defined below) has placed the following notice immediately following the
|
||||
copyright notice for the Work:
|
||||
|
||||
Licensed under the EUPL
|
||||
|
||||
or has expressed by any other means his willingness to license under the EUPL.
|
||||
|
||||
1. Definitions
|
||||
|
||||
In this Licence, the following terms have the following meaning:
|
||||
|
||||
- ‘The Licence’: this Licence.
|
||||
|
||||
- ‘The Original Work’: the work or software distributed or communicated by the
|
||||
Licensor under this Licence, available as Source Code and also as Executable
|
||||
Code as the case may be.
|
||||
|
||||
- ‘Derivative Works’: the works or software that could be created by the
|
||||
Licensee, based upon the Original Work or modifications thereof. This Licence
|
||||
does not define the extent of modification or dependence on the Original Work
|
||||
required in order to classify a work as a Derivative Work; this extent is
|
||||
determined by copyright law applicable in the country mentioned in Article 15.
|
||||
|
||||
- ‘The Work’: the Original Work or its Derivative Works.
|
||||
|
||||
- ‘The Source Code’: the human-readable form of the Work which is the most
|
||||
convenient for people to study and modify.
|
||||
|
||||
- ‘The Executable Code’: any code which has generally been compiled and which is
|
||||
meant to be interpreted by a computer as a program.
|
||||
|
||||
- ‘The Licensor’: the natural or legal person that distributes or communicates
|
||||
the Work under the Licence.
|
||||
|
||||
- ‘Contributor(s)’: any natural or legal person who modifies the Work under the
|
||||
Licence, or otherwise contributes to the creation of a Derivative Work.
|
||||
|
||||
- ‘The Licensee’ or ‘You’: any natural or legal person who makes any usage of
|
||||
the Work under the terms of the Licence.
|
||||
|
||||
- ‘Distribution’ or ‘Communication’: any act of selling, giving, lending,
|
||||
renting, distributing, communicating, transmitting, or otherwise making
|
||||
available, online or offline, copies of the Work or providing access to its
|
||||
essential functionalities at the disposal of any other natural or legal
|
||||
person.
|
||||
|
||||
2. Scope of the rights granted by the Licence
|
||||
|
||||
The Licensor hereby grants You a worldwide, royalty-free, non-exclusive,
|
||||
sublicensable licence to do the following, for the duration of copyright vested
|
||||
in the Original Work:
|
||||
|
||||
- use the Work in any circumstance and for all usage,
|
||||
- reproduce the Work,
|
||||
- modify the Work, and make Derivative Works based upon the Work,
|
||||
- communicate to the public, including the right to make available or display
|
||||
the Work or copies thereof to the public and perform publicly, as the case may
|
||||
be, the Work,
|
||||
- distribute the Work or copies thereof,
|
||||
- lend and rent the Work or copies thereof,
|
||||
- sublicense rights in the Work or copies thereof.
|
||||
|
||||
Those rights can be exercised on any media, supports and formats, whether now
|
||||
known or later invented, as far as the applicable law permits so.
|
||||
|
||||
In the countries where moral rights apply, the Licensor waives his right to
|
||||
exercise his moral right to the extent allowed by law in order to make effective
|
||||
the licence of the economic rights here above listed.
|
||||
|
||||
The Licensor grants to the Licensee royalty-free, non-exclusive usage rights to
|
||||
any patents held by the Licensor, to the extent necessary to make use of the
|
||||
rights granted on the Work under this Licence.
|
||||
|
||||
3. Communication of the Source Code
|
||||
|
||||
The Licensor may provide the Work either in its Source Code form, or as
|
||||
Executable Code. If the Work is provided as Executable Code, the Licensor
|
||||
provides in addition a machine-readable copy of the Source Code of the Work
|
||||
along with each copy of the Work that the Licensor distributes or indicates, in
|
||||
a notice following the copyright notice attached to the Work, a repository where
|
||||
the Source Code is easily and freely accessible for as long as the Licensor
|
||||
continues to distribute or communicate the Work.
|
||||
|
||||
4. Limitations on copyright
|
||||
|
||||
Nothing in this Licence is intended to deprive the Licensee of the benefits from
|
||||
any exception or limitation to the exclusive rights of the rights owners in the
|
||||
Work, of the exhaustion of those rights or of other applicable limitations
|
||||
thereto.
|
||||
|
||||
5. Obligations of the Licensee
|
||||
|
||||
The grant of the rights mentioned above is subject to some restrictions and
|
||||
obligations imposed on the Licensee. Those obligations are the following:
|
||||
|
||||
Attribution right: The Licensee shall keep intact all copyright, patent or
|
||||
trademarks notices and all notices that refer to the Licence and to the
|
||||
disclaimer of warranties. The Licensee must include a copy of such notices and a
|
||||
copy of the Licence with every copy of the Work he/she distributes or
|
||||
communicates. The Licensee must cause any Derivative Work to carry prominent
|
||||
notices stating that the Work has been modified and the date of modification.
|
||||
|
||||
Copyleft clause: If the Licensee distributes or communicates copies of the
|
||||
Original Works or Derivative Works, this Distribution or Communication will be
|
||||
done under the terms of this Licence or of a later version of this Licence
|
||||
unless the Original Work is expressly distributed only under this version of the
|
||||
Licence — for example by communicating ‘EUPL v. 1.2 only’. The Licensee
|
||||
(becoming Licensor) cannot offer or impose any additional terms or conditions on
|
||||
the Work or Derivative Work that alter or restrict the terms of the Licence.
|
||||
|
||||
Compatibility clause: If the Licensee Distributes or Communicates Derivative
|
||||
Works or copies thereof based upon both the Work and another work licensed under
|
||||
a Compatible Licence, this Distribution or Communication can be done under the
|
||||
terms of this Compatible Licence. For the sake of this clause, ‘Compatible
|
||||
Licence’ refers to the licences listed in the appendix attached to this Licence.
|
||||
Should the Licensee's obligations under the Compatible Licence conflict with
|
||||
his/her obligations under this Licence, the obligations of the Compatible
|
||||
Licence shall prevail.
|
||||
|
||||
Provision of Source Code: When distributing or communicating copies of the Work,
|
||||
the Licensee will provide a machine-readable copy of the Source Code or indicate
|
||||
a repository where this Source will be easily and freely available for as long
|
||||
as the Licensee continues to distribute or communicate the Work.
|
||||
|
||||
Legal Protection: This Licence does not grant permission to use the trade names,
|
||||
trademarks, service marks, or names of the Licensor, except as required for
|
||||
reasonable and customary use in describing the origin of the Work and
|
||||
reproducing the content of the copyright notice.
|
||||
|
||||
6. Chain of Authorship
|
||||
|
||||
The original Licensor warrants that the copyright in the Original Work granted
|
||||
hereunder is owned by him/her or licensed to him/her and that he/she has the
|
||||
power and authority to grant the Licence.
|
||||
|
||||
Each Contributor warrants that the copyright in the modifications he/she brings
|
||||
to the Work are owned by him/her or licensed to him/her and that he/she has the
|
||||
power and authority to grant the Licence.
|
||||
|
||||
Each time You accept the Licence, the original Licensor and subsequent
|
||||
Contributors grant You a licence to their contributions to the Work, under the
|
||||
terms of this Licence.
|
||||
|
||||
7. Disclaimer of Warranty
|
||||
|
||||
The Work is a work in progress, which is continuously improved by numerous
|
||||
Contributors. It is not a finished work and may therefore contain defects or
|
||||
‘bugs’ inherent to this type of development.
|
||||
|
||||
For the above reason, the Work is provided under the Licence on an ‘as is’ basis
|
||||
and without warranties of any kind concerning the Work, including without
|
||||
limitation merchantability, fitness for a particular purpose, absence of defects
|
||||
or errors, accuracy, non-infringement of intellectual property rights other than
|
||||
copyright as stated in Article 6 of this Licence.
|
||||
|
||||
This disclaimer of warranty is an essential part of the Licence and a condition
|
||||
for the grant of any rights to the Work.
|
||||
|
||||
8. Disclaimer of Liability
|
||||
|
||||
Except in the cases of wilful misconduct or damages directly caused to natural
|
||||
persons, the Licensor will in no event be liable for any direct or indirect,
|
||||
material or moral, damages of any kind, arising out of the Licence or of the use
|
||||
of the Work, including without limitation, damages for loss of goodwill, work
|
||||
stoppage, computer failure or malfunction, loss of data or any commercial
|
||||
damage, even if the Licensor has been advised of the possibility of such damage.
|
||||
However, the Licensor will be liable under statutory product liability laws as
|
||||
far such laws apply to the Work.
|
||||
|
||||
9. Additional agreements
|
||||
|
||||
While distributing the Work, You may choose to conclude an additional agreement,
|
||||
defining obligations or services consistent with this Licence. However, if
|
||||
accepting obligations, You may act only on your own behalf and on your sole
|
||||
responsibility, not on behalf of the original Licensor or any other Contributor,
|
||||
and only if You agree to indemnify, defend, and hold each Contributor harmless
|
||||
for any liability incurred by, or claims asserted against such Contributor by
|
||||
the fact You have accepted any warranty or additional liability.
|
||||
|
||||
10. Acceptance of the Licence
|
||||
|
||||
The provisions of this Licence can be accepted by clicking on an icon ‘I agree’
|
||||
placed under the bottom of a window displaying the text of this Licence or by
|
||||
affirming consent in any other similar way, in accordance with the rules of
|
||||
applicable law. Clicking on that icon indicates your clear and irrevocable
|
||||
acceptance of this Licence and all of its terms and conditions.
|
||||
|
||||
Similarly, you irrevocably accept this Licence and all of its terms and
|
||||
conditions by exercising any rights granted to You by Article 2 of this Licence,
|
||||
such as the use of the Work, the creation by You of a Derivative Work or the
|
||||
Distribution or Communication by You of the Work or copies thereof.
|
||||
|
||||
11. Information to the public
|
||||
|
||||
In case of any Distribution or Communication of the Work by means of electronic
|
||||
communication by You (for example, by offering to download the Work from a
|
||||
remote location) the distribution channel or media (for example, a website) must
|
||||
at least provide to the public the information requested by the applicable law
|
||||
regarding the Licensor, the Licence and the way it may be accessible, concluded,
|
||||
stored and reproduced by the Licensee.
|
||||
|
||||
12. Termination of the Licence
|
||||
|
||||
The Licence and the rights granted hereunder will terminate automatically upon
|
||||
any breach by the Licensee of the terms of the Licence.
|
||||
|
||||
Such a termination will not terminate the licences of any person who has
|
||||
received the Work from the Licensee under the Licence, provided such persons
|
||||
remain in full compliance with the Licence.
|
||||
|
||||
13. Miscellaneous
|
||||
|
||||
Without prejudice of Article 9 above, the Licence represents the complete
|
||||
agreement between the Parties as to the Work.
|
||||
|
||||
If any provision of the Licence is invalid or unenforceable under applicable
|
||||
law, this will not affect the validity or enforceability of the Licence as a
|
||||
whole. Such provision will be construed or reformed so as necessary to make it
|
||||
valid and enforceable.
|
||||
|
||||
The European Commission may publish other linguistic versions or new versions of
|
||||
this Licence or updated versions of the Appendix, so far this is required and
|
||||
reasonable, without reducing the scope of the rights granted by the Licence. New
|
||||
versions of the Licence will be published with a unique version number.
|
||||
|
||||
All linguistic versions of this Licence, approved by the European Commission,
|
||||
have identical value. Parties can take advantage of the linguistic version of
|
||||
their choice.
|
||||
|
||||
14. Jurisdiction
|
||||
|
||||
Without prejudice to specific agreement between parties,
|
||||
|
||||
- any litigation resulting from the interpretation of this License, arising
|
||||
between the European Union institutions, bodies, offices or agencies, as a
|
||||
Licensor, and any Licensee, will be subject to the jurisdiction of the Court
|
||||
of Justice of the European Union, as laid down in article 272 of the Treaty on
|
||||
the Functioning of the European Union,
|
||||
|
||||
- any litigation arising between other parties and resulting from the
|
||||
interpretation of this License, will be subject to the exclusive jurisdiction
|
||||
of the competent court where the Licensor resides or conducts its primary
|
||||
business.
|
||||
|
||||
15. Applicable Law
|
||||
|
||||
Without prejudice to specific agreement between parties,
|
||||
|
||||
- this Licence shall be governed by the law of the European Union Member State
|
||||
where the Licensor has his seat, resides or has his registered office,
|
||||
|
||||
- this licence shall be governed by Belgian law if the Licensor has no seat,
|
||||
residence or registered office inside a European Union Member State.
|
||||
|
||||
Appendix
|
||||
|
||||
‘Compatible Licences’ according to Article 5 EUPL are:
|
||||
|
||||
- GNU General Public License (GPL) v. 2, v. 3
|
||||
- GNU Affero General Public License (AGPL) v. 3
|
||||
- Open Software License (OSL) v. 2.1, v. 3.0
|
||||
- Eclipse Public License (EPL) v. 1.0
|
||||
- CeCILL v. 2.0, v. 2.1
|
||||
- Mozilla Public Licence (MPL) v. 2
|
||||
- GNU Lesser General Public Licence (LGPL) v. 2.1, v. 3
|
||||
- Creative Commons Attribution-ShareAlike v. 3.0 Unported (CC BY-SA 3.0) for
|
||||
works other than software
|
||||
- European Union Public Licence (EUPL) v. 1.1, v. 1.2
|
||||
- Québec Free and Open-Source Licence — Reciprocity (LiLiQ-R) or Strong
|
||||
Reciprocity (LiLiQ-R+).
|
||||
|
||||
The European Commission may update this Appendix to later versions of the above
|
||||
licences without producing a new version of the EUPL, as long as they provide
|
||||
the rights granted in Article 2 of this Licence and protect the covered Source
|
||||
Code from exclusive appropriation.
|
||||
|
||||
All other changes or additions to this Appendix require the production of a new
|
||||
EUPL version.
|
@ -3,6 +3,14 @@ Konova is the successor of KSP. It's build using the python webframework Django,
|
||||
the database postgresql and the css library bootstrap as well as the icon package
|
||||
fontawesome for a modern look, following best practices from the industry.
|
||||
|
||||
## Background processes
|
||||
Konova uses celery for background processing. To start the worker you need to run
|
||||
```shell
|
||||
$ celery -A konova worker -l INFO
|
||||
```
|
||||
More info can be found [here](https://docs.celeryproject.org/en/stable/getting-started/first-steps-with-celery.html#running-the-celery-worker-server).
|
||||
Redis must be installed.
|
||||
|
||||
## Technical documentation
|
||||
Technical documention is provided in the projects git wiki.
|
||||
|
||||
|
3
analysis/admin.py
Normal file
3
analysis/admin.py
Normal file
@ -0,0 +1,3 @@
|
||||
from django.contrib import admin
|
||||
|
||||
# Register your models here.
|
5
analysis/apps.py
Normal file
5
analysis/apps.py
Normal file
@ -0,0 +1,5 @@
|
||||
from django.apps import AppConfig
|
||||
|
||||
|
||||
class AnalysisConfig(AppConfig):
|
||||
name = 'analysis'
|
87
analysis/forms.py
Normal file
87
analysis/forms.py
Normal file
@ -0,0 +1,87 @@
|
||||
"""
|
||||
Author: Michel Peltriaux
|
||||
Organization: Struktur- und Genehmigungsdirektion Nord, Rhineland-Palatinate, Germany
|
||||
Contact: michel.peltriaux@sgdnord.rlp.de
|
||||
Created on: 20.10.21
|
||||
|
||||
"""
|
||||
from dal import autocomplete
|
||||
from django import forms
|
||||
from django.urls import reverse
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
|
||||
from codelist.models import KonovaCode
|
||||
from codelist.settings import CODELIST_CONSERVATION_OFFICE_ID
|
||||
from konova.forms import BaseForm
|
||||
from konova.utils import validators
|
||||
|
||||
|
||||
class TimespanReportForm(BaseForm):
|
||||
""" TimespanReporForm is used for allowing simple creation of an e.g. annual report for conservation offices
|
||||
|
||||
"""
|
||||
date_from = forms.DateField(
|
||||
label_suffix="",
|
||||
label=_("From"),
|
||||
validators=[validators.reasonable_date],
|
||||
help_text=_("Entries created from..."),
|
||||
widget=forms.DateInput(
|
||||
attrs={
|
||||
"type": "date",
|
||||
"data-provide": "datepicker",
|
||||
"class": "form-control",
|
||||
},
|
||||
format="%d.%m.%Y"
|
||||
)
|
||||
)
|
||||
date_to = forms.DateField(
|
||||
label_suffix="",
|
||||
label=_("To"),
|
||||
validators=[validators.reasonable_date],
|
||||
help_text=_("Entries created until..."),
|
||||
widget=forms.DateInput(
|
||||
attrs={
|
||||
"type": "date",
|
||||
"data-provide": "datepicker",
|
||||
"class": "form-control",
|
||||
},
|
||||
format="%d.%m.%Y"
|
||||
)
|
||||
)
|
||||
conservation_office = forms.ModelChoiceField(
|
||||
label=_("Conservation office"),
|
||||
label_suffix="",
|
||||
help_text=_("Select the responsible office"),
|
||||
queryset=KonovaCode.objects.filter(
|
||||
is_archived=False,
|
||||
is_leaf=True,
|
||||
code_lists__in=[CODELIST_CONSERVATION_OFFICE_ID],
|
||||
),
|
||||
widget=autocomplete.ModelSelect2(
|
||||
url="codelist:conservation-office-autocomplete",
|
||||
attrs={
|
||||
"data-placeholder": _("Click for selection")
|
||||
}
|
||||
),
|
||||
)
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
self.form_title = _("Generate report")
|
||||
self.form_caption = _("Select a timespan and the desired conservation office")
|
||||
self.action_url = reverse("analysis:reports")
|
||||
self.show_cancel_btn = False
|
||||
self.action_btn_label = _("Continue")
|
||||
|
||||
def save(self) -> str:
|
||||
""" Generates a redirect url for the detail report
|
||||
|
||||
Returns:
|
||||
detail_report_url (str): The constructed detail report url
|
||||
|
||||
"""
|
||||
date_from = self.cleaned_data.get("date_from", None)
|
||||
date_to = self.cleaned_data.get("date_to", None)
|
||||
office = self.cleaned_data.get("conservation_office", None)
|
||||
detail_report_url = reverse("analysis:report-detail", args=(office.id,)) + f"?df={date_from}&dt={date_to}"
|
||||
return detail_report_url
|
3
analysis/models.py
Normal file
3
analysis/models.py
Normal file
@ -0,0 +1,3 @@
|
||||
from django.db import models
|
||||
|
||||
# Create your models here.
|
16
analysis/settings.py
Normal file
16
analysis/settings.py
Normal file
@ -0,0 +1,16 @@
|
||||
"""
|
||||
Author: Michel Peltriaux
|
||||
Organization: Struktur- und Genehmigungsdirektion Nord, Rhineland-Palatinate, Germany
|
||||
Contact: michel.peltriaux@sgdnord.rlp.de
|
||||
Created on: 19.10.21
|
||||
|
||||
"""
|
||||
|
||||
# Defines the date of the legal publishing of the LKompVzVo
|
||||
from django.utils import timezone
|
||||
|
||||
LKOMPVZVO_PUBLISH_DATE = timezone.make_aware(
|
||||
timezone.datetime.fromisoformat(
|
||||
"2018-06-16"
|
||||
)
|
||||
).date()
|
36
analysis/templates/analysis/reports/detail.html
Normal file
36
analysis/templates/analysis/reports/detail.html
Normal file
@ -0,0 +1,36 @@
|
||||
{% extends 'base.html' %}
|
||||
{% load i18n fontawesome_5 %}
|
||||
|
||||
{% block body %}
|
||||
<div class="row">
|
||||
<div class="col-sm-12 col-md-12 col-lg-12 col-xl-6">
|
||||
<h3>{% trans 'Evaluation report' %} {{office.long_name}}</h3>
|
||||
<h5>{% trans 'From' %} {{report.date_from.date}} {% trans 'to' %} {{report.date_to.date}}</h5>
|
||||
</div>
|
||||
<div class="col-sm-12 col-md-12 col-lg-12 col-xl-6">
|
||||
<div class="d-flex justify-content-end">
|
||||
<div class="dropdown">
|
||||
<div class="btn btn" data-toggle="dropdown" role="button" aria-haspopup="true" aria-expanded="false">
|
||||
<button class="btn btn-default" title="{% trans 'Download' %}">
|
||||
{% fa5_icon 'download' %}
|
||||
</button>
|
||||
</div>
|
||||
<div class="dropdown-menu dropdown-menu-right">
|
||||
<a href="{{request.url}}?format=excel&{{request.GET.urlencode}}">
|
||||
<button class="dropdown-item" title="Excel">
|
||||
{% fa5_icon 'file-excel' %} Excel
|
||||
</button>
|
||||
</a>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<hr>
|
||||
<div class="col-sm-12 col-md-12 col-lg-12">
|
||||
{% include 'analysis/reports/includes/intervention/card_intervention.html' %}
|
||||
{% include 'analysis/reports/includes/compensation/card_compensation.html' %}
|
||||
{% include 'analysis/reports/includes/eco_account/card_eco_account.html' %}
|
||||
{% include 'analysis/reports/includes/old_data/card_old_data.html' %}
|
||||
</div>
|
||||
{% endblock %}
|
@ -0,0 +1,55 @@
|
||||
{% load i18n fontawesome_5 ksp_filters %}
|
||||
|
||||
<h3>{% trans 'Amount' %}</h3>
|
||||
<strong>
|
||||
{% blocktrans %}
|
||||
Checked = Has been checked by the registration office according to LKompVzVo
|
||||
{% endblocktrans %}
|
||||
<br>
|
||||
{% blocktrans %}
|
||||
Recorded = Has been checked and published by the conservation office
|
||||
{% endblocktrans %}
|
||||
</strong>
|
||||
<div class="table-container">
|
||||
<table class="table table-hover">
|
||||
<thead>
|
||||
<tr>
|
||||
<th scope="col">{% trans 'Area of responsibility' %}</th>
|
||||
<th scope="col">{% trans 'Total' %}</th>
|
||||
<th scope="col">{% trans 'Number single areas' %}</th>
|
||||
<th scope="col">{% fa5_icon 'star' %} {% trans 'Checked' %}</th>
|
||||
<th scope="col">{% fa5_icon 'bookmark' %} {% trans 'Recorded' %}</th>
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody>
|
||||
<tr>
|
||||
<td>{% trans 'Conservation office by law' %}</td>
|
||||
<td>{{report.compensation_report.queryset_registration_office_unb_count|default_if_zero:"-"}}</td>
|
||||
<td>{{report.compensation_report.num_single_surfaces_total_unb|default_if_zero:"-"}}</td>
|
||||
<td>{{report.compensation_report.queryset_registration_office_unb_checked_count|default_if_zero:"-"}}</td>
|
||||
<td>{{report.compensation_report.queryset_registration_office_unb_recorded_count|default_if_zero:"-"}}</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>{% trans 'Land-use planning' %}</td>
|
||||
<td>{{report.compensation_report.queryset_registration_office_tbp_count|default_if_zero:"-"}}</td>
|
||||
<td>{{report.compensation_report.num_single_surfaces_total_tbp|default_if_zero:"-"}}</td>
|
||||
<td>{{report.compensation_report.queryset_registration_office_tbp_checked_count|default_if_zero:"-"}}</td>
|
||||
<td>{{report.compensation_report.queryset_registration_office_tbp_recorded_count|default_if_zero:"-"}}</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>{% trans 'Other registration office' %}</td>
|
||||
<td>{{report.compensation_report.queryset_registration_office_other_count|default_if_zero:"-"}}</td>
|
||||
<td>{{report.compensation_report.num_single_surfaces_total_other|default_if_zero:"-"}}</td>
|
||||
<td>{{report.compensation_report.queryset_registration_office_other_checked_count|default_if_zero:"-"}}</td>
|
||||
<td>{{report.compensation_report.queryset_registration_office_other_recorded_count|default_if_zero:"-"}}</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><strong>{% trans 'Total' %}</strong></td>
|
||||
<td><strong>{{report.compensation_report.queryset_count|default_if_zero:"-"}}</strong></td>
|
||||
<td><strong>{{report.compensation_report.num_single_surfaces_total|default_if_zero:"-"}}</strong></td>
|
||||
<td><strong>{{report.compensation_report.queryset_checked_count|default_if_zero:"-"}}</strong></td>
|
||||
<td><strong>{{report.compensation_report.queryset_recorded_count|default_if_zero:"-"}}</strong></td>
|
||||
</tr>
|
||||
</tbody>
|
||||
</table>
|
||||
</div>
|
@ -0,0 +1,24 @@
|
||||
{% load i18n fontawesome_5 %}
|
||||
|
||||
<div class="row">
|
||||
<div class="col-sm-12 col-md-12 col-lg-12">
|
||||
<div class="card">
|
||||
<div id="compensation" class="card-header cursor-pointer rlp-r" data-toggle="collapse" data-target="#compensationBody" aria-expanded="true" aria-controls="compensationBody">
|
||||
<div class="row">
|
||||
<div class="col-sm-6">
|
||||
<h5>
|
||||
{% fa5_icon 'leaf' %}
|
||||
{% trans 'Compensations' %}
|
||||
</h5>
|
||||
<span>{% trans 'Binding date after' %} 16.06.2018</span>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<div id="compensationBody" class="collapse" aria-labelledby="compensation">
|
||||
<div class="card-body">
|
||||
{% include 'analysis/reports/includes/compensation/amount.html' %}
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
@ -0,0 +1,24 @@
|
||||
{% load i18n fontawesome_5 ksp_filters %}
|
||||
|
||||
<h3>{% trans 'Amount' %}</h3>
|
||||
<strong>
|
||||
{% blocktrans %}
|
||||
Recorded = Has been checked and published by the conservation office
|
||||
{% endblocktrans %}
|
||||
</strong>
|
||||
<div class="table-container">
|
||||
<table class="table table-hover">
|
||||
<thead>
|
||||
<tr>
|
||||
<th scope="col">{% trans 'Total' %}</th>
|
||||
<th scope="col" class="w-25">{% fa5_icon 'bookmark' %} {% trans 'Recorded' %}</th>
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody>
|
||||
<tr>
|
||||
<td>{{report.eco_account_report.queryset_count|default_if_zero:"-"}}</td>
|
||||
<td>{{report.eco_account_report.queryset_recorded_count|default_if_zero:"-"}}</td>
|
||||
</tr>
|
||||
</tbody>
|
||||
</table>
|
||||
</div>
|
@ -0,0 +1,26 @@
|
||||
{% load i18n fontawesome_5 %}
|
||||
|
||||
<div class="row">
|
||||
<div class="col-sm-12 col-md-12 col-lg-12">
|
||||
<div class="card">
|
||||
<div id="ecoAccounts" class="card-header cursor-pointer rlp-r" data-toggle="collapse" data-target="#ecoAccountsBody" aria-expanded="true" aria-controls="ecoAccountsBody">
|
||||
<div class="row">
|
||||
<div class="col-sm-6">
|
||||
<h5>
|
||||
{% fa5_icon 'tree' %}
|
||||
{% trans 'Eco-Accounts' %}
|
||||
</h5>
|
||||
<span>{% trans 'Binding date after' %} 16.06.2018</span>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<div id="ecoAccountsBody" class="collapse" aria-labelledby="ecoAccounts">
|
||||
<div class="card-body">
|
||||
{% include 'analysis/reports/includes/eco_account/amount.html' %}
|
||||
<hr>
|
||||
{% include 'analysis/reports/includes/eco_account/deductions.html' %}
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
@ -0,0 +1,38 @@
|
||||
{% load i18n fontawesome_5 ksp_filters %}
|
||||
|
||||
<h3>{% trans 'Deductions' %}</h3>
|
||||
<strong>
|
||||
{% blocktrans %}
|
||||
Recorded = Counts the deductions whose interventions have been recorded
|
||||
{% endblocktrans %}
|
||||
</strong>
|
||||
<div class="table-container">
|
||||
<table class="table table-hover">
|
||||
<thead>
|
||||
<tr>
|
||||
<th scope="col" class="w-25">{% trans 'Total' %}</th>
|
||||
<th scope="col" class="w-25">{% trans 'Total' %} {% trans 'Surface' %}</th>
|
||||
<th scope="col">{% fa5_icon 'bookmark' %} {% trans 'Recorded' %}</th>
|
||||
<th scope="col">{% fa5_icon 'bookmark' %} {% trans 'Recorded' %} {% trans 'Surface' %}</th>
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody>
|
||||
<tr>
|
||||
<td>{{report.eco_account_report.queryset_deductions_count|default_if_zero:"-"}}</td>
|
||||
<td>
|
||||
{{report.eco_account_report.deductions_sq_m|default_if_zero:"-"}}
|
||||
{% if report.eco_account_report.deductions_sq_m > 0 %}
|
||||
m²
|
||||
{% endif %}
|
||||
</td>
|
||||
<td>{{report.eco_account_report.queryset_deductions_recorded_count|default_if_zero:"-"}}</td>
|
||||
<td>
|
||||
{{report.eco_account_report.recorded_deductions_sq_m|default_if_zero:"-"}}
|
||||
{% if report.eco_account_report.recorded_deductions_sq_m > 0 %}
|
||||
m²
|
||||
{% endif %}
|
||||
</td>
|
||||
</tr>
|
||||
</tbody>
|
||||
</table>
|
||||
</div>
|
@ -0,0 +1,30 @@
|
||||
{% load i18n fontawesome_5 ksp_filters %}
|
||||
|
||||
<h3>{% trans 'Amount' %}</h3>
|
||||
<strong>
|
||||
{% blocktrans %}
|
||||
Checked = Has been checked by the registration office according to LKompVzVo
|
||||
{% endblocktrans %}
|
||||
<br>
|
||||
{% blocktrans %}
|
||||
Recorded = Has been checked and published by the conservation office
|
||||
{% endblocktrans %}
|
||||
</strong>
|
||||
<div class="table-container">
|
||||
<table class="table table-hover">
|
||||
<thead>
|
||||
<tr>
|
||||
<th scope="col" class="w-25">{% trans 'Total' %}</th>
|
||||
<th scope="col">{% fa5_icon 'star' %} {% trans 'Checked' %}</th>
|
||||
<th scope="col">{% fa5_icon 'bookmark' %} {% trans 'Recorded' %}</th>
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody>
|
||||
<tr>
|
||||
<td>{{report.intervention_report.queryset_count|default_if_zero:"-"}}</td>
|
||||
<td>{{report.intervention_report.queryset_checked_count|default_if_zero:"-"}}</td>
|
||||
<td>{{report.intervention_report.queryset_recorded_count|default_if_zero:"-"}}</td>
|
||||
</tr>
|
||||
</tbody>
|
||||
</table>
|
||||
</div>
|
@ -0,0 +1,27 @@
|
||||
{% load i18n fontawesome_5 %}
|
||||
<div class="row">
|
||||
<div class="col-sm-12 col-md-12 col-lg-12">
|
||||
<div class="card">
|
||||
<div id="intervention" class="card-header cursor-pointer rlp-r" data-toggle="collapse" data-target="#interventionBody" aria-expanded="true" aria-controls="interventionBody">
|
||||
<div class="row">
|
||||
<div class="col-sm-6">
|
||||
<h5>
|
||||
{% fa5_icon 'pencil-ruler' %}
|
||||
{% trans 'Interventions' %}
|
||||
</h5>
|
||||
<span>{% trans 'Binding date after' %} 16.06.2018</span>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<div id="interventionBody" class="collapse" aria-labelledby="intervention">
|
||||
<div class="card-body">
|
||||
{% include 'analysis/reports/includes/intervention/amount.html' %}
|
||||
<hr>
|
||||
{% include 'analysis/reports/includes/intervention/compensated_by.html' %}
|
||||
<hr>
|
||||
{% include 'analysis/reports/includes/intervention/laws.html' %}
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
@ -0,0 +1,34 @@
|
||||
{% load i18n fontawesome_5 ksp_filters %}
|
||||
<h3>{% trans 'Compensated by' %}</h3>
|
||||
<div class="table-container scroll-300">
|
||||
<table class="table table-hover">
|
||||
<thead>
|
||||
<tr>
|
||||
<th class="w-25" scope="col">{% trans 'Compensation type' %}</th>
|
||||
<th class="w-25" scope="col">{% trans 'Total' %}</th>
|
||||
<th class="w-25" scope="col">{% fa5_icon 'star' %} {% trans 'Checked' %}</th>
|
||||
<th class="w-25" scope="col">{% fa5_icon 'bookmark' %} {% trans 'Recorded' %}</th>
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody>
|
||||
<tr>
|
||||
<th>{% trans 'Compensation' %}</th>
|
||||
<td>{{report.intervention_report.compensation_sum|default_if_zero:"-"}}</td>
|
||||
<td>{{report.intervention_report.compensation_sum_checked|default_if_zero:"-"}}</td>
|
||||
<td>{{report.intervention_report.compensation_sum_recorded|default_if_zero:"-"}}</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<th>{% trans 'Payment' %}</th>
|
||||
<td>{{report.intervention_report.payment_sum|default_if_zero:"-"}}</td>
|
||||
<td>{{report.intervention_report.payment_sum_checked|default_if_zero:"-"}}</td>
|
||||
<td>{{report.intervention_report.payment_sum_recorded|default_if_zero:"-"}}</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<th>{% trans 'Deductions' %}</th>
|
||||
<td>{{report.intervention_report.deduction_sum|default_if_zero:"-"}}</td>
|
||||
<td>{{report.intervention_report.deduction_sum_checked|default_if_zero:"-"}}</td>
|
||||
<td>{{report.intervention_report.deduction_sum_recorded|default_if_zero:"-"}}</td>
|
||||
</tr>
|
||||
</tbody>
|
||||
</table>
|
||||
</div>
|
@ -0,0 +1,50 @@
|
||||
{% load i18n fontawesome_5 ksp_filters %}
|
||||
<h3>{% trans 'Law usage' %}</h3>
|
||||
<strong>
|
||||
{% blocktrans %}
|
||||
Please note: One intervention can be based on multiple laws. This table therefore does not
|
||||
count
|
||||
{% endblocktrans %}
|
||||
</strong>
|
||||
<div class="table-container scroll-300">
|
||||
<table class="table table-hover">
|
||||
<thead>
|
||||
<tr>
|
||||
<th class="w-25" scope="col">
|
||||
{% trans 'Law' %}
|
||||
</th>
|
||||
<th scope="col">
|
||||
{% trans 'Total' %}
|
||||
</th>
|
||||
<th scope="col">
|
||||
{% fa5_icon 'star' %} {% trans 'Checked' %}
|
||||
</th>
|
||||
<th scope="col">
|
||||
{% fa5_icon 'bookmark' %} {% trans 'Recorded' %}
|
||||
</th>
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody>
|
||||
{% for law in report.intervention_report.evaluated_laws %}
|
||||
<tr>
|
||||
<td>
|
||||
{{law.short_name}}
|
||||
<br>
|
||||
<small>
|
||||
{{law.long_name}}
|
||||
</small>
|
||||
</td>
|
||||
<td>{{law.num|default_if_zero:"-"}}</td>
|
||||
<td>{{law.num_checked|default_if_zero:"-"}}</td>
|
||||
<td>{{law.num_recorded|default_if_zero:"-"}}</td>
|
||||
</tr>
|
||||
{% endfor %}
|
||||
<tr>
|
||||
<td><strong>{% trans 'Total' %}</strong></td>
|
||||
<td><strong>{{report.intervention_report.law_sum|default_if_zero:"-"}}</strong></td>
|
||||
<td><strong>{{report.intervention_report.law_sum_checked|default_if_zero:"-"}}</strong></td>
|
||||
<td><strong>{{report.intervention_report.law_sum_recorded|default_if_zero:"-"}}</strong></td>
|
||||
</tr>
|
||||
</tbody>
|
||||
</table>
|
||||
</div>
|
@ -0,0 +1,40 @@
|
||||
{% load i18n fontawesome_5 ksp_filters %}
|
||||
|
||||
<h3>{% trans 'Amount' %}</h3>
|
||||
<strong>
|
||||
{% blocktrans %}
|
||||
Checked = Has been checked by the registration office according to LKompVzVo
|
||||
{% endblocktrans %}
|
||||
<br>
|
||||
{% blocktrans %}
|
||||
Recorded = Has been checked and published by the conservation office
|
||||
{% endblocktrans %}
|
||||
</strong>
|
||||
<div class="table-container">
|
||||
<table class="table table-hover">
|
||||
<thead>
|
||||
<tr>
|
||||
<th scope="col" class="w-25">{% trans 'Type' %}</th>
|
||||
<th scope="col">{% trans 'Total' %}</th>
|
||||
<th scope="col">{% fa5_icon 'bookmark' %} {% trans 'Recorded' %}</th>
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody>
|
||||
<tr>
|
||||
<td>{% trans 'Intervention' %}</td>
|
||||
<td>{{report.old_data_report.queryset_intervention_count|default_if_zero:"-"}}</td>
|
||||
<td>{{report.old_data_report.queryset_intervention_recorded_count|default_if_zero:"-"}}</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>{% trans 'Compensation' %}</td>
|
||||
<td>{{report.old_data_report.queryset_comps_count|default_if_zero:"-"}}</td>
|
||||
<td>{{report.old_data_report.queryset_comps_recorded_count|default_if_zero:"-"}}</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>{% trans 'Eco-account' %}</td>
|
||||
<td>{{report.old_data_report.queryset_acc_count|default_if_zero:"-"}}</td>
|
||||
<td>{{report.old_data_report.queryset_acc_recorded_count|default_if_zero:"-"}}</td>
|
||||
</tr>
|
||||
</tbody>
|
||||
</table>
|
||||
</div>
|
@ -0,0 +1,24 @@
|
||||
{% load i18n fontawesome_5 %}
|
||||
|
||||
<div class="row">
|
||||
<div class="col-sm-12 col-md-12 col-lg-12">
|
||||
<div class="card">
|
||||
<div id="oldIntervention" class="card-header cursor-pointer rlp-r" data-toggle="collapse" data-target="#oldInterventionBody" aria-expanded="true" aria-controls="oldInterventionBody">
|
||||
<div class="row">
|
||||
<div class="col-sm-6">
|
||||
<h5>
|
||||
{% fa5_icon 'pencil-ruler' %}
|
||||
{% trans 'Old interventions' %}
|
||||
</h5>
|
||||
<span>{% trans 'Binding date before' %} 16.06.2018</span>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<div id="oldInterventionBody" class="collapse" aria-labelledby="oldIntervention">
|
||||
<div class="card-body">
|
||||
{% include 'analysis/reports/includes/old_data/amount.html' %}
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
10
analysis/templates/analysis/reports/index.html
Normal file
10
analysis/templates/analysis/reports/index.html
Normal file
@ -0,0 +1,10 @@
|
||||
{% extends 'base.html' %}
|
||||
{% load i18n fontawesome_5 %}
|
||||
|
||||
{% block body %}
|
||||
<div class="row">
|
||||
<div class="col-sm-12 col-md-12 col-lg-12">
|
||||
{% include 'form/table/generic_table_form.html' %}
|
||||
</div>
|
||||
</div>
|
||||
{% endblock %}
|
7
analysis/tests/__init__.py
Normal file
7
analysis/tests/__init__.py
Normal file
@ -0,0 +1,7 @@
|
||||
"""
|
||||
Author: Michel Peltriaux
|
||||
Organization: Struktur- und Genehmigungsdirektion Nord, Rhineland-Palatinate, Germany
|
||||
Contact: ksp-servicestelle@sgdnord.rlp.de
|
||||
Created on: 15.08.23
|
||||
|
||||
"""
|
7
analysis/tests/unit/__init__.py
Normal file
7
analysis/tests/unit/__init__.py
Normal file
@ -0,0 +1,7 @@
|
||||
"""
|
||||
Author: Michel Peltriaux
|
||||
Organization: Struktur- und Genehmigungsdirektion Nord, Rhineland-Palatinate, Germany
|
||||
Contact: ksp-servicestelle@sgdnord.rlp.de
|
||||
Created on: 15.08.23
|
||||
|
||||
"""
|
47
analysis/tests/unit/test_forms.py
Normal file
47
analysis/tests/unit/test_forms.py
Normal file
@ -0,0 +1,47 @@
|
||||
"""
|
||||
Author: Michel Peltriaux
|
||||
Organization: Struktur- und Genehmigungsdirektion Nord, Rhineland-Palatinate, Germany
|
||||
Contact: ksp-servicestelle@sgdnord.rlp.de
|
||||
Created on: 15.08.23
|
||||
|
||||
"""
|
||||
from datetime import timedelta
|
||||
|
||||
from django.urls import reverse
|
||||
from django.utils.timezone import now
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
|
||||
from analysis.forms import TimespanReportForm
|
||||
from konova.tests.test_views import BaseTestCase
|
||||
|
||||
|
||||
class TimeSpanReportFormTestCase(BaseTestCase):
|
||||
def setUp(self) -> None:
|
||||
super().setUp()
|
||||
eiv = self.create_dummy_intervention()
|
||||
|
||||
def test_init(self):
|
||||
form = TimespanReportForm()
|
||||
self.assertEqual(form.form_title, str(_("Generate report")))
|
||||
self.assertEqual(form.form_caption, str(_("Select a timespan and the desired conservation office") ))
|
||||
self.assertEqual(form.action_url, reverse("analysis:reports"))
|
||||
self.assertFalse(form.show_cancel_btn)
|
||||
self.assertEqual(form.action_btn_label, str(_("Continue")))
|
||||
|
||||
def test_save(self):
|
||||
date_from = now().date() - timedelta(days=365)
|
||||
date_to = now().date()
|
||||
office = self.get_conservation_office_code()
|
||||
data = {
|
||||
"date_from": date_from,
|
||||
"date_to": date_to,
|
||||
"conservation_office": office,
|
||||
}
|
||||
form = TimespanReportForm(data)
|
||||
self.assertTrue(form.is_valid(), msg=f"{form.errors}")
|
||||
|
||||
detail_report_url = form.save()
|
||||
self.assertEqual(
|
||||
detail_report_url,
|
||||
reverse("analysis:report-detail", args=(office.id,)) + f"?df={date_from}&dt={date_to}"
|
||||
)
|
98
analysis/tests/unit/test_report.py
Normal file
98
analysis/tests/unit/test_report.py
Normal file
@ -0,0 +1,98 @@
|
||||
"""
|
||||
Author: Michel Peltriaux
|
||||
Organization: Struktur- und Genehmigungsdirektion Nord, Rhineland-Palatinate, Germany
|
||||
Contact: ksp-servicestelle@sgdnord.rlp.de
|
||||
Created on: 17.08.23
|
||||
|
||||
"""
|
||||
from datetime import timedelta
|
||||
|
||||
from django.utils.timezone import now
|
||||
|
||||
from analysis.settings import LKOMPVZVO_PUBLISH_DATE
|
||||
from analysis.utils.report import TimespanReport
|
||||
from konova.sub_settings.django_settings import DEFAULT_DATE_FORMAT
|
||||
from konova.tests.test_views import BaseTestCase
|
||||
|
||||
|
||||
class TimeSpanReportTestCase(BaseTestCase):
|
||||
def setUp(self) -> None:
|
||||
super().setUp()
|
||||
|
||||
today = now().date()
|
||||
old_date = LKOMPVZVO_PUBLISH_DATE - timedelta(days=1)
|
||||
|
||||
self.conservation_office = self.get_conservation_office_code()
|
||||
self.eiv_old = self.create_dummy_intervention()
|
||||
self.kom_old = self.create_dummy_compensation(interv=self.eiv_old)
|
||||
self.assertNotEqual(self.compensation.intervention, self.kom_old.intervention)
|
||||
self.eiv = self.compensation.intervention
|
||||
self.oek_old = self.create_dummy_eco_account()
|
||||
|
||||
self.eiv_old.responsible.conservation_office = self.conservation_office
|
||||
self.eiv_old.legal.binding_date = old_date
|
||||
self.eiv_old.legal.registration_date = old_date
|
||||
|
||||
self.eiv.responsible.conservation_office = self.conservation_office
|
||||
self.eiv.legal.binding_date = today
|
||||
self.eiv.legal.registration_date = today
|
||||
|
||||
self.eco_account.responsible.conservation_office = self.conservation_office
|
||||
self.eco_account.legal.registration_date = today
|
||||
self.eco_account.legal.binding_date = today
|
||||
|
||||
self.oek_old.responsible.conservation_office = self.conservation_office
|
||||
self.oek_old.legal.registration_date = old_date
|
||||
self.oek_old.legal.binding_date = old_date
|
||||
|
||||
self.eiv.legal.save()
|
||||
self.eiv.responsible.save()
|
||||
|
||||
self.eiv_old.legal.save()
|
||||
self.eiv_old.responsible.save()
|
||||
|
||||
self.eco_account.legal.save()
|
||||
self.eco_account.responsible.save()
|
||||
|
||||
self.oek_old.legal.save()
|
||||
self.oek_old.responsible.save()
|
||||
|
||||
self.deduction.account = self.eco_account
|
||||
self.deduction.intervention = self.eiv
|
||||
self.deduction.save()
|
||||
|
||||
def test_init(self):
|
||||
date_from = now().date() - timedelta(days=365)
|
||||
date_to = now().date()
|
||||
report = TimespanReport(self.conservation_office.id, date_from, date_to)
|
||||
|
||||
self.assertEqual(report.office_id, self.conservation_office.id)
|
||||
self.assertEqual(report.date_from, date_from)
|
||||
self.assertEqual(report.date_to, date_to)
|
||||
|
||||
self.assertIsNotNone(report.intervention_report)
|
||||
self.assertIsNotNone(report.compensation_report)
|
||||
self.assertIsNotNone(report.eco_account_report)
|
||||
self.assertIsNotNone(report.old_data_report)
|
||||
|
||||
self.assertEqual(report.excel_map["date_from"], date_from.strftime(DEFAULT_DATE_FORMAT))
|
||||
self.assertEqual(report.excel_map["date_to"], date_to.strftime(DEFAULT_DATE_FORMAT))
|
||||
|
||||
self.assertEqual(report.old_data_report.queryset_intervention_count, 1)
|
||||
self.assertEqual(report.old_data_report.queryset_intervention_recorded_count, 0)
|
||||
self.assertEqual(report.old_data_report.queryset_comps_count, 1)
|
||||
self.assertEqual(report.old_data_report.queryset_acc_count, 1)
|
||||
self.assertEqual(report.old_data_report.queryset_acc_recorded_count, 0)
|
||||
|
||||
self.assertEqual(report.intervention_report.queryset_count, 1)
|
||||
self.assertEqual(report.intervention_report.queryset_checked_count, 0)
|
||||
self.assertEqual(report.intervention_report.queryset_recorded_count, 0)
|
||||
|
||||
self.assertEqual(report.compensation_report.queryset_count, 1)
|
||||
self.assertEqual(report.compensation_report.queryset_checked_count, 0)
|
||||
self.assertEqual(report.compensation_report.queryset_recorded_count, 0)
|
||||
|
||||
self.assertEqual(report.eco_account_report.queryset_count, 1)
|
||||
self.assertEqual(report.eco_account_report.queryset_recorded_count, 0)
|
||||
self.assertEqual(report.eco_account_report.queryset_deductions_count, 1)
|
||||
self.assertEqual(report.eco_account_report.queryset_deductions_recorded_count, 0)
|
15
analysis/urls.py
Normal file
15
analysis/urls.py
Normal file
@ -0,0 +1,15 @@
|
||||
"""
|
||||
Author: Michel Peltriaux
|
||||
Organization: Struktur- und Genehmigungsdirektion Nord, Rhineland-Palatinate, Germany
|
||||
Contact: michel.peltriaux@sgdnord.rlp.de
|
||||
Created on: 15.10.21
|
||||
|
||||
"""
|
||||
from django.urls import path
|
||||
from analysis.views import *
|
||||
|
||||
app_name = "analysis"
|
||||
urlpatterns = [
|
||||
path("reports/", index_reports_view, name="reports"),
|
||||
path("reports/<id>", detail_report_view, name="report-detail"),
|
||||
]
|
114
analysis/utils/excel/excel.py
Normal file
114
analysis/utils/excel/excel.py
Normal file
@ -0,0 +1,114 @@
|
||||
"""
|
||||
Author: Michel Peltriaux
|
||||
Organization: Struktur- und Genehmigungsdirektion Nord, Rhineland-Palatinate, Germany
|
||||
Contact: michel.peltriaux@sgdnord.rlp.de
|
||||
Created on: 21.10.21
|
||||
|
||||
"""
|
||||
from django.core.files.temp import NamedTemporaryFile
|
||||
from openpyxl import load_workbook
|
||||
|
||||
|
||||
class TempExcelFile:
|
||||
""" A temporary excel sheet which will not be saved on the hard drive permanently.
|
||||
|
||||
Using a template file and a template_map dictionary, this class can be used to fill in automatically
|
||||
predefined values into certain cells.
|
||||
|
||||
Can be used to create excel files from data and sending it as a response like
|
||||
_file = TempExcelFile()
|
||||
response = HttpResponse(
|
||||
content=file.stream,
|
||||
content_type="application/ms-excel",
|
||||
)
|
||||
response['Content-Disposition'] = 'attachment; filename=my_file.xlsx'
|
||||
return response
|
||||
|
||||
"""
|
||||
stream = None
|
||||
_template_file_path = None
|
||||
_template_map = {}
|
||||
_data_obj = None
|
||||
|
||||
def __init__(self, template_file_path: str = None, template_map: dict = None):
|
||||
self._template_map = template_map or {}
|
||||
self._template_file_path = template_file_path
|
||||
self._workbook = load_workbook(template_file_path)
|
||||
self._file = NamedTemporaryFile()
|
||||
|
||||
self._replace_template_placeholders()
|
||||
|
||||
def _replace_template_placeholders(self, start_row: int = 0):
|
||||
""" Replaces all placeholder inside the template according to _template_map
|
||||
|
||||
Args:
|
||||
start_row (int): Defines where to start
|
||||
|
||||
Returns:
|
||||
|
||||
"""
|
||||
sheets = self._workbook.worksheets
|
||||
for sheet in sheets:
|
||||
ws = sheet
|
||||
# Always activate sheet protection
|
||||
ws.protection.sheet = True
|
||||
ws.protection.enable()
|
||||
_rows = ws.iter_rows(start_row)
|
||||
for row in _rows:
|
||||
for cell in row:
|
||||
val = cell.value
|
||||
if val in self._template_map:
|
||||
attr = self._template_map[val]
|
||||
# If keyword '_iter' can be found inside the placeholder value it's an iterable and we
|
||||
# need to process it differently
|
||||
if isinstance(attr, dict):
|
||||
# Read the iterable object and related attributes from the dict
|
||||
_iter_obj = attr.get("iterable", None)
|
||||
_attrs = attr.get("attrs", [])
|
||||
self._add_cells_from_iterable(ws, cell, _iter_obj, _attrs)
|
||||
# Since the sheet length did change now, we need to rerun this function starting with the new
|
||||
# row counter
|
||||
self._replace_template_placeholders(start_row=cell.row + len(_iter_obj))
|
||||
else:
|
||||
cell.value = attr
|
||||
self._workbook.save(self._file.name)
|
||||
self._file.seek(0)
|
||||
self.stream = self._file.read()
|
||||
|
||||
def _add_cells_from_iterable(self, ws, start_cell, _iter_obj: iter, _attrs: list):
|
||||
"""
|
||||
Adds iterable data defined by _template_map like
|
||||
...
|
||||
"some_placeholder_iter": {
|
||||
"iterable": iterable_object,
|
||||
"attrs": [
|
||||
"attr1",
|
||||
"attr2",
|
||||
"attr3",
|
||||
...
|
||||
]
|
||||
},
|
||||
...
|
||||
|
||||
Args:
|
||||
ws (Workbook): The active workbook
|
||||
_iter_obj (dict): Iterable definitions from template_map
|
||||
|
||||
Returns:
|
||||
|
||||
"""
|
||||
# Save border style
|
||||
border_style = start_cell.border.copy()
|
||||
# Drop current row, since it is just placeholder
|
||||
ws.delete_rows(start_cell.row)
|
||||
# Add enoug empty rows for the data
|
||||
ws.insert_rows(start_cell.row, len(_iter_obj))
|
||||
|
||||
i = 0
|
||||
for _iter_entry in _iter_obj:
|
||||
j = 0
|
||||
for _iter_attr in _attrs:
|
||||
_new_cell = ws.cell(start_cell.row + i, start_cell.column + j, _iter_entry.get(_iter_attr, "MISSING"))
|
||||
_new_cell.border = border_style
|
||||
j += 1
|
||||
i += 1
|
BIN
analysis/utils/excel/excel_report.xlsx
Normal file
BIN
analysis/utils/excel/excel_report.xlsx
Normal file
Binary file not shown.
563
analysis/utils/report.py
Normal file
563
analysis/utils/report.py
Normal file
@ -0,0 +1,563 @@
|
||||
"""
|
||||
Author: Michel Peltriaux
|
||||
Organization: Struktur- und Genehmigungsdirektion Nord, Rhineland-Palatinate, Germany
|
||||
Contact: michel.peltriaux@sgdnord.rlp.de
|
||||
Created on: 18.10.21
|
||||
|
||||
"""
|
||||
from django.contrib.gis.db.models import MultiPolygonField
|
||||
from django.contrib.gis.db.models.functions import NumGeometries
|
||||
from django.db.models import Count, Sum, Q
|
||||
from django.db.models.functions import Cast
|
||||
|
||||
from analysis.settings import LKOMPVZVO_PUBLISH_DATE
|
||||
from codelist.models import KonovaCode
|
||||
from codelist.settings import CODELIST_LAW_ID
|
||||
from compensation.models import Compensation, Payment, EcoAccountDeduction, EcoAccount
|
||||
from intervention.models import Intervention
|
||||
from konova.models import Geometry
|
||||
from konova.sub_settings.django_settings import BASE_DIR, DEFAULT_DATE_FORMAT
|
||||
from konova.sub_settings.lanis_settings import DEFAULT_SRID_RLP
|
||||
|
||||
|
||||
class TimespanReport:
|
||||
""" Holds multiple report elements for a timespan report
|
||||
|
||||
"""
|
||||
office_id = -1
|
||||
date_from = -1
|
||||
date_to = -1
|
||||
|
||||
# Excel map is used to map a cell value ("A1") to an attribute
|
||||
excel_map = {}
|
||||
excel_template_path = f"{BASE_DIR}/analysis/utils/excel/excel_report.xlsx"
|
||||
|
||||
class InterventionReport:
|
||||
queryset = Intervention.objects.none()
|
||||
queryset_checked = Intervention.objects.none()
|
||||
queryset_recorded = Intervention.objects.none()
|
||||
|
||||
queryset_count = -1
|
||||
queryset_checked_count = -1
|
||||
queryset_recorded_count = -1
|
||||
|
||||
# Law related
|
||||
law_sum = -1
|
||||
law_sum_checked = -1
|
||||
law_sum_recorded = -1
|
||||
evaluated_laws = None
|
||||
|
||||
# Compensations related
|
||||
compensation_sum = -1
|
||||
compensation_sum_checked = -1
|
||||
compensation_sum_recorded = -1
|
||||
payment_sum = -1
|
||||
payment_sum_checked = -1
|
||||
payment_sum_recorded = -1
|
||||
deduction_sum = -1
|
||||
deduction_sum_checked = -1
|
||||
deduction_sum_recorded = -1
|
||||
|
||||
excel_map = {}
|
||||
|
||||
def __init__(self, id: str, date_from: str, date_to: str):
|
||||
self.queryset = Intervention.objects.filter(
|
||||
responsible__conservation_office__id=id,
|
||||
legal__registration_date__gt=LKOMPVZVO_PUBLISH_DATE,
|
||||
deleted=None,
|
||||
created__timestamp__date__gte=date_from,
|
||||
created__timestamp__date__lte=date_to,
|
||||
)
|
||||
self.queryset_checked = self.queryset.filter(
|
||||
checked__isnull=False
|
||||
)
|
||||
self.queryset_recorded = self.queryset.filter(
|
||||
recorded__isnull=False
|
||||
)
|
||||
self.queryset_count = self.queryset.count()
|
||||
self.queryset_checked_count = self.queryset_checked.count()
|
||||
self.queryset_recorded_count = self.queryset_recorded.count()
|
||||
|
||||
self._create_report()
|
||||
self._define_excel_map()
|
||||
|
||||
def _define_excel_map(self):
|
||||
""" Define the excel map, which holds values for each placeholder used in the template
|
||||
|
||||
Returns:
|
||||
|
||||
"""
|
||||
self.excel_map = {
|
||||
"i_checked": self.queryset_checked_count,
|
||||
"i_recorded": self.queryset_recorded_count,
|
||||
"i_total": self.queryset_count,
|
||||
"i_compensations_checked": self.compensation_sum_checked,
|
||||
"i_compensations_recorded": self.compensation_sum_recorded,
|
||||
"i_compensations_total": self.compensation_sum,
|
||||
"i_payments_recorded": self.payment_sum_recorded,
|
||||
"i_payments_checked": self.payment_sum_checked,
|
||||
"i_payments_total": self.payment_sum,
|
||||
"i_deductions_recorded": self.deduction_sum_recorded,
|
||||
"i_deductions_checked": self.deduction_sum_checked,
|
||||
"i_deductions_total": self.deduction_sum,
|
||||
"i_laws_iter": {
|
||||
"iterable": self.evaluated_laws,
|
||||
"attrs": [
|
||||
"short_name",
|
||||
"num",
|
||||
"num_checked",
|
||||
"num_recorded",
|
||||
]
|
||||
},
|
||||
"i_laws_checked": self.law_sum_checked,
|
||||
"i_laws_recorded": self.law_sum_recorded,
|
||||
"i_laws_total": self.law_sum,
|
||||
}
|
||||
|
||||
def _create_report(self):
|
||||
""" Creates all report information
|
||||
|
||||
Returns:
|
||||
|
||||
"""
|
||||
self._evaluate_laws()
|
||||
self._evaluate_compensations()
|
||||
|
||||
def _evaluate_laws(self):
|
||||
""" Analyzes the intervention-law distribution
|
||||
|
||||
Returns:
|
||||
|
||||
"""
|
||||
# Count interventions based on law
|
||||
# Fetch all KonovaCodes for laws, sorted alphabetically
|
||||
laws = KonovaCode.objects.filter(
|
||||
is_archived=False,
|
||||
is_leaf=True,
|
||||
code_lists__in=[CODELIST_LAW_ID],
|
||||
).order_by(
|
||||
"long_name"
|
||||
)
|
||||
|
||||
evaluated_laws = []
|
||||
sum_num_checked = 0
|
||||
sum_num_recorded = 0
|
||||
sum_num = 0
|
||||
for law in laws:
|
||||
num = self.queryset.filter(
|
||||
legal__laws__atom_id=law.atom_id
|
||||
).count()
|
||||
num_checked = self.queryset_checked.filter(
|
||||
legal__laws__atom_id=law.atom_id
|
||||
).count()
|
||||
num_recorded = self.queryset_recorded.filter(
|
||||
legal__laws__atom_id=law.atom_id
|
||||
).count()
|
||||
evaluated_laws.append({
|
||||
"short_name": law.short_name,
|
||||
"long_name": law.long_name,
|
||||
"num": num,
|
||||
"num_checked": num_checked,
|
||||
"num_recorded": num_recorded,
|
||||
})
|
||||
sum_num += num
|
||||
sum_num_checked += num_checked
|
||||
sum_num_recorded += num_recorded
|
||||
|
||||
self.evaluated_laws = evaluated_laws
|
||||
self.law_sum = sum_num
|
||||
self.law_sum_checked = sum_num_checked
|
||||
self.law_sum_recorded = sum_num_recorded
|
||||
|
||||
def _evaluate_compensations(self):
|
||||
""" Analyzes the types of compensation distribution
|
||||
|
||||
Returns:
|
||||
|
||||
"""
|
||||
# Count all compensations
|
||||
comps = Compensation.objects.filter(
|
||||
intervention__in=self.queryset
|
||||
)
|
||||
self.compensation_sum = comps.count()
|
||||
self.compensation_sum_checked = comps.filter(intervention__checked__isnull=False).count()
|
||||
self.compensation_sum_recorded = comps.filter(intervention__recorded__isnull=False).count()
|
||||
|
||||
# Count all payments
|
||||
payments = Payment.objects.filter(
|
||||
intervention__in=self.queryset
|
||||
)
|
||||
self.payment_sum = payments.count()
|
||||
self.payment_sum_checked = payments.filter(intervention__checked__isnull=False).count()
|
||||
self.payment_sum_recorded = payments.filter(intervention__recorded__isnull=False).count()
|
||||
|
||||
# Count all deductions
|
||||
deductions = EcoAccountDeduction.objects.filter(
|
||||
intervention__in=self.queryset
|
||||
)
|
||||
self.deduction_sum = deductions.count()
|
||||
self.deduction_sum_checked = deductions.filter(intervention__checked__isnull=False).count()
|
||||
self.deduction_sum_recorded = deductions.filter(intervention__recorded__isnull=False).count()
|
||||
|
||||
class CompensationReport:
|
||||
queryset = Compensation.objects.none()
|
||||
queryset_checked = Compensation.objects.none()
|
||||
queryset_recorded = Compensation.objects.none()
|
||||
queryset_count = -1
|
||||
queryset_checked_count = -1
|
||||
queryset_recorded_count = -1
|
||||
|
||||
queryset_registration_office_unb = Compensation.objects.none()
|
||||
queryset_registration_office_unb_checked = Compensation.objects.none()
|
||||
queryset_registration_office_unb_recorded = Compensation.objects.none()
|
||||
queryset_registration_office_unb_count = -1
|
||||
queryset_registration_office_unb_checked_count = -1
|
||||
queryset_registration_office_unb_recorded_count = -1
|
||||
num_single_surfaces_total_unb = -1
|
||||
|
||||
queryset_registration_office_tbp = Compensation.objects.none()
|
||||
queryset_registration_office_tbp_checked = Compensation.objects.none()
|
||||
queryset_registration_office_tbp_recorded = Compensation.objects.none()
|
||||
queryset_registration_office_tbp_count = -1
|
||||
queryset_registration_office_tbp_checked_count = -1
|
||||
queryset_registration_office_tbp_recorded_count = -1
|
||||
num_single_surfaces_total_tbp = -1
|
||||
|
||||
queryset_registration_office_other = Compensation.objects.none()
|
||||
queryset_registration_office_other_checked = Compensation.objects.none()
|
||||
queryset_registration_office_other_recorded = Compensation.objects.none()
|
||||
queryset_registration_office_other_count = -1
|
||||
queryset_registration_office_other_checked_count = -1
|
||||
queryset_registration_office_other_recorded_count = -1
|
||||
num_single_surfaces_total_other = -1
|
||||
|
||||
num_single_surfaces_total = -1
|
||||
num_single_surfaces_recorded = -1
|
||||
|
||||
# Code list id for 'Träger der Bauleitplanung' parent
|
||||
id_tbp = 1943695
|
||||
# Code list id for 'untere Naturschutzbehörde'
|
||||
id_unb = 1943087
|
||||
# Code list id for 'obere Naturschutzbehörde'
|
||||
id_onb = 1943084
|
||||
|
||||
def __init__(self, id: str, date_from: str, date_to: str):
|
||||
self.queryset = Compensation.objects.filter(
|
||||
intervention__responsible__conservation_office__id=id,
|
||||
intervention__legal__registration_date__gt=LKOMPVZVO_PUBLISH_DATE,
|
||||
deleted=None,
|
||||
intervention__created__timestamp__date__gte=date_from,
|
||||
intervention__created__timestamp__date__lte=date_to,
|
||||
)
|
||||
self.queryset_checked = self.queryset.filter(
|
||||
intervention__checked__isnull=False
|
||||
)
|
||||
self.queryset_recorded = self.queryset.filter(
|
||||
intervention__recorded__isnull=False
|
||||
)
|
||||
|
||||
self.queryset_count = self.queryset.count()
|
||||
self.queryset_checked_count = self.queryset_checked.count()
|
||||
self.queryset_recorded_count = self.queryset_recorded.count()
|
||||
|
||||
self._create_report()
|
||||
self._define_excel_map()
|
||||
|
||||
def _define_excel_map(self):
|
||||
""" Define the excel map, which holds values for each placeholder used in the template
|
||||
|
||||
Returns:
|
||||
|
||||
"""
|
||||
self.excel_map = {
|
||||
"c_unb_checked": self.queryset_registration_office_unb_checked_count,
|
||||
"c_unb_recorded": self.queryset_registration_office_unb_recorded_count,
|
||||
"c_unb": self.queryset_registration_office_unb_count,
|
||||
"c_surfaces_unb": self.num_single_surfaces_total_unb,
|
||||
"c_tbp_checked": self.queryset_registration_office_tbp_checked_count,
|
||||
"c_tbp_recorded": self.queryset_registration_office_tbp_recorded_count,
|
||||
"c_tbp": self.queryset_registration_office_tbp_count,
|
||||
"c_surfaces_tbp": self.num_single_surfaces_total_tbp,
|
||||
"c_other_checked": self.queryset_registration_office_other_checked_count,
|
||||
"c_other_recorded": self.queryset_registration_office_other_recorded_count,
|
||||
"c_other": self.queryset_registration_office_other_count,
|
||||
"c_surfaces_other": self.num_single_surfaces_total_other,
|
||||
"c_checked": self.queryset_checked_count,
|
||||
"c_recorded": self.queryset_recorded_count,
|
||||
"c_total": self.queryset_count,
|
||||
"c_surfaces": self.num_single_surfaces_total,
|
||||
}
|
||||
|
||||
def _create_report(self):
|
||||
""" Creates all report information
|
||||
|
||||
Returns:
|
||||
|
||||
"""
|
||||
self._evaluate_compensation_responsibility()
|
||||
self._evaluate_surfaces()
|
||||
|
||||
def _evaluate_surfaces(self):
|
||||
""" Evaluates the surfaces of compensation Multipolygon fields
|
||||
|
||||
Returns:
|
||||
|
||||
"""
|
||||
# Evaluate all surfaces
|
||||
ids = self.queryset.values_list("geometry_id")
|
||||
self.num_single_surfaces_total = self._count_geometry_surfaces(ids)
|
||||
|
||||
# Evaluate surfaces where the conservation office is the registration office as well
|
||||
ids = self.queryset_registration_office_unb.values_list("geometry_id")
|
||||
self.num_single_surfaces_total_unb = self._count_geometry_surfaces(ids)
|
||||
|
||||
# Evaluates surfaces where the registration office is a Träger Bauleitplanung
|
||||
ids = self.queryset_registration_office_tbp.values_list("geometry_id")
|
||||
self.num_single_surfaces_total_tbp = self._count_geometry_surfaces(ids)
|
||||
|
||||
# Evaluates surfaces where any other registration office is responsible
|
||||
ids = self.queryset_registration_office_other.values_list("geometry_id")
|
||||
self.num_single_surfaces_total_other = self._count_geometry_surfaces(ids)
|
||||
|
||||
def _count_geometry_surfaces(self, ids: list):
|
||||
""" Wraps counting of geometry surfaces from a given list of ids
|
||||
|
||||
Args:
|
||||
ids (list): List of geometry ids
|
||||
|
||||
Returns:
|
||||
|
||||
"""
|
||||
# Now select all geometries matching the ids
|
||||
# Then perform a ST_NumGeometries variant over all geometries
|
||||
# Then sum up all of the calculated surface numbers
|
||||
return Geometry.objects.filter(
|
||||
id__in=ids
|
||||
).annotate(
|
||||
geom_cast=Cast("geom", MultiPolygonField(srid=DEFAULT_SRID_RLP))
|
||||
).annotate(
|
||||
num=NumGeometries("geom_cast")
|
||||
).aggregate(
|
||||
num_geoms=Sum("num")
|
||||
)["num_geoms"] or 0
|
||||
|
||||
def _evaluate_compensation_responsibility(self):
|
||||
""" Evaluates compensations based on different responsibility areas
|
||||
|
||||
unb -> Untere Naturschutzbehörde
|
||||
Holds entries where conservation_office and registration_office basically are the same
|
||||
tbp -> Träger Bauleitplanung
|
||||
Holds entries where registration_office is a Träger der Bauleitplanung
|
||||
other -> Other registration offices
|
||||
Holds all other entries
|
||||
|
||||
Returns:
|
||||
|
||||
"""
|
||||
self.queryset_registration_office_unb = self.queryset.filter(
|
||||
intervention__responsible__registration_office__parent__id=self.id_unb
|
||||
)
|
||||
self.queryset_registration_office_unb_recorded = self.queryset_registration_office_unb.filter(
|
||||
intervention__recorded__isnull=False,
|
||||
)
|
||||
self.queryset_registration_office_unb_checked = self.queryset_registration_office_unb.filter(
|
||||
intervention__checked__isnull=False,
|
||||
)
|
||||
self.queryset_registration_office_unb_count = self.queryset_registration_office_unb.count()
|
||||
self.queryset_registration_office_unb_checked_count = self.queryset_registration_office_unb_checked.count()
|
||||
self.queryset_registration_office_unb_recorded_count = self.queryset_registration_office_unb_recorded.count()
|
||||
|
||||
self.queryset_registration_office_tbp = self.queryset.filter(
|
||||
intervention__responsible__registration_office__parent__id=self.id_tbp
|
||||
)
|
||||
self.queryset_registration_office_tbp_recorded = self.queryset_registration_office_tbp.filter(
|
||||
intervention__recorded__isnull=False,
|
||||
)
|
||||
self.queryset_registration_office_tbp_checked = self.queryset_registration_office_tbp.filter(
|
||||
intervention__checked__isnull=False,
|
||||
)
|
||||
self.queryset_registration_office_tbp_count = self.queryset_registration_office_tbp.count()
|
||||
self.queryset_registration_office_tbp_checked_count = self.queryset_registration_office_tbp_checked.count()
|
||||
self.queryset_registration_office_tbp_recorded_count = self.queryset_registration_office_tbp_recorded.count()
|
||||
|
||||
self.queryset_registration_office_other = self.queryset.exclude(
|
||||
Q(id__in=self.queryset_registration_office_tbp) | Q(id__in=self.queryset_registration_office_unb)
|
||||
)
|
||||
self.queryset_registration_office_other_recorded = self.queryset_registration_office_other.filter(
|
||||
intervention__recorded__isnull=False,
|
||||
)
|
||||
self.queryset_registration_office_other_checked = self.queryset_registration_office_other.filter(
|
||||
intervention__checked__isnull=False,
|
||||
)
|
||||
self.queryset_registration_office_other_count = self.queryset_registration_office_other.count()
|
||||
self.queryset_registration_office_other_checked_count = self.queryset_registration_office_other_checked.count()
|
||||
self.queryset_registration_office_other_recorded_count = self.queryset_registration_office_other_recorded.count()
|
||||
|
||||
class EcoAccountReport:
|
||||
queryset = EcoAccount.objects.none()
|
||||
queryset_recorded = EcoAccount.objects.none()
|
||||
queryset_count = -1
|
||||
queryset_recorded_count = -1
|
||||
|
||||
queryset_deductions = EcoAccountDeduction.objects.none()
|
||||
queryset_deductions_recorded = EcoAccountDeduction.objects.none()
|
||||
queryset_has_deductions = EcoAccountDeduction.objects.none()
|
||||
queryset_deductions_count = -1
|
||||
queryset_deductions_recorded_count = -1
|
||||
queryset_has_deductions_count = -1
|
||||
|
||||
# Total size of deductions
|
||||
deductions_sq_m = -1
|
||||
recorded_deductions_sq_m = -1
|
||||
|
||||
def __init__(self, id: str, date_from: str, date_to: str):
|
||||
# First fetch all eco account for this office
|
||||
self.queryset = EcoAccount.objects.filter(
|
||||
legal__registration_date__gt=LKOMPVZVO_PUBLISH_DATE,
|
||||
responsible__conservation_office__id=id,
|
||||
deleted=None,
|
||||
created__timestamp__date__gte=date_from,
|
||||
created__timestamp__date__lte=date_to,
|
||||
)
|
||||
self.queryset_recorded = self.queryset.filter(
|
||||
recorded__isnull=False
|
||||
)
|
||||
# Fetch all related deductions
|
||||
self.queryset_deductions = EcoAccountDeduction.objects.filter(
|
||||
account__id__in=self.queryset.values_list("id")
|
||||
)
|
||||
# Fetch deductions for interventions which are already recorded
|
||||
self.queryset_deductions_recorded = self.queryset_deductions.filter(
|
||||
intervention__recorded__isnull=False
|
||||
)
|
||||
|
||||
self.queryset_count = self.queryset.count()
|
||||
self.queryset_recorded_count = self.queryset_recorded.count()
|
||||
self.queryset_deductions_count = self.queryset_deductions.count()
|
||||
self.queryset_deductions_recorded_count = self.queryset_deductions_recorded.count()
|
||||
self.queryset_has_deductions_count = self.queryset_has_deductions.count()
|
||||
|
||||
self._create_report()
|
||||
self._define_excel_map()
|
||||
|
||||
def _define_excel_map(self):
|
||||
""" Define the excel map, which holds values for each placeholder used in the template
|
||||
|
||||
Returns:
|
||||
|
||||
"""
|
||||
self.excel_map = {
|
||||
"acc_total": self.queryset_count,
|
||||
"acc_recorded": self.queryset_recorded_count,
|
||||
"acc_deduc_recorded": self.queryset_deductions_recorded_count,
|
||||
"acc_deduc_surface_recorded": self.recorded_deductions_sq_m,
|
||||
"acc_deduc_total": self.queryset_deductions_count,
|
||||
"acc_deduc_surface_total": self.deductions_sq_m,
|
||||
}
|
||||
|
||||
def _create_report(self):
|
||||
""" Creates all report information
|
||||
|
||||
Returns:
|
||||
|
||||
"""
|
||||
self._evaluate_deductions()
|
||||
|
||||
def _evaluate_deductions(self):
|
||||
self.deductions_sq_m = self.queryset_deductions.aggregate(
|
||||
sum=Sum("surface")
|
||||
)["sum"] or 0
|
||||
self.recorded_deductions_sq_m = self.queryset_deductions_recorded.aggregate(
|
||||
sum=Sum("surface")
|
||||
)["sum"] or 0
|
||||
|
||||
class OldDataReport:
|
||||
"""
|
||||
Evaluates 'old data' (registered (zugelassen) before 16.06.2018)
|
||||
"""
|
||||
queryset_intervention = Intervention.objects.none()
|
||||
queryset_intervention_recorded = Intervention.objects.none()
|
||||
queryset_intervention_count = -1
|
||||
queryset_intervention_recorded_count = -1
|
||||
|
||||
queryset_comps = Compensation.objects.none()
|
||||
queryset_comps_recorded = Compensation.objects.none()
|
||||
queryset_comps_count = -1
|
||||
queryset_comps_recorded_count = -1
|
||||
|
||||
queryset_acc = EcoAccount.objects.none()
|
||||
queryset_acc_recorded = EcoAccount.objects.none()
|
||||
queryset_acc_count = -1
|
||||
queryset_acc_recorded_count = -1
|
||||
|
||||
def __init__(self, id: str, date_from: str, date_to: str):
|
||||
self.queryset_intervention = Intervention.objects.filter(
|
||||
legal__registration_date__lte=LKOMPVZVO_PUBLISH_DATE,
|
||||
responsible__conservation_office__id=id,
|
||||
deleted=None,
|
||||
created__timestamp__date__gte=date_from,
|
||||
created__timestamp__date__lte=date_to,
|
||||
)
|
||||
self.queryset_intervention_recorded = self.queryset_intervention.filter(
|
||||
recorded__isnull=False
|
||||
)
|
||||
self.queryset_intervention_count = self.queryset_intervention.count()
|
||||
self.queryset_intervention_recorded_count = self.queryset_intervention_recorded.count()
|
||||
|
||||
self.queryset_comps = Compensation.objects.filter(
|
||||
intervention__in=self.queryset_intervention
|
||||
)
|
||||
self.queryset_comps_recorded = Compensation.objects.filter(
|
||||
intervention__in=self.queryset_intervention_recorded,
|
||||
)
|
||||
self.queryset_comps_count = self.queryset_comps.count()
|
||||
self.queryset_comps_recorded_count = self.queryset_comps_recorded.count()
|
||||
|
||||
self.queryset_acc = EcoAccount.objects.filter(
|
||||
legal__registration_date__lte=LKOMPVZVO_PUBLISH_DATE,
|
||||
responsible__conservation_office__id=id,
|
||||
deleted=None,
|
||||
created__timestamp__date__gte=date_from,
|
||||
created__timestamp__date__lte=date_to,
|
||||
)
|
||||
self.queryset_acc_recorded = self.queryset_acc.filter(
|
||||
recorded__isnull=False,
|
||||
)
|
||||
self.queryset_acc_count = self.queryset_acc.count()
|
||||
self.queryset_acc_recorded_count = self.queryset_acc_recorded.count()
|
||||
self._define_excel_map()
|
||||
|
||||
def _define_excel_map(self):
|
||||
""" Define the excel map, which holds values for each placeholder used in the template
|
||||
|
||||
Returns:
|
||||
|
||||
"""
|
||||
self.excel_map = {
|
||||
"old_i_recorded": self.queryset_intervention_recorded_count,
|
||||
"old_i_total": self.queryset_intervention_count,
|
||||
"old_c_recorded": self.queryset_comps_recorded_count,
|
||||
"old_c_total": self.queryset_comps_count,
|
||||
"old_ea_recorded": self.queryset_acc_recorded_count,
|
||||
"old_ea_total": self.queryset_acc_count,
|
||||
}
|
||||
|
||||
def __init__(self, office_id: str, date_from: str, date_to: str):
|
||||
self.office_id = office_id
|
||||
self.date_from = date_from
|
||||
self.date_to = date_to
|
||||
|
||||
self.intervention_report = self.InterventionReport(self.office_id, date_from, date_to)
|
||||
self.compensation_report = self.CompensationReport(self.office_id, date_from, date_to)
|
||||
self.eco_account_report = self.EcoAccountReport(self.office_id, date_from, date_to)
|
||||
self.old_data_report = self.OldDataReport(self.office_id, date_from, date_to)
|
||||
|
||||
# Build excel map
|
||||
self.excel_map = {
|
||||
"date_from": date_from.strftime(DEFAULT_DATE_FORMAT),
|
||||
"date_to": date_to.strftime(DEFAULT_DATE_FORMAT),
|
||||
}
|
||||
self.excel_map.update(self.intervention_report.excel_map)
|
||||
self.excel_map.update(self.compensation_report.excel_map)
|
||||
self.excel_map.update(self.eco_account_report.excel_map)
|
||||
self.excel_map.update(self.old_data_report.excel_map)
|
98
analysis/views.py
Normal file
98
analysis/views.py
Normal file
@ -0,0 +1,98 @@
|
||||
from django.contrib import messages
|
||||
from django.contrib.auth.decorators import login_required
|
||||
from django.http import HttpRequest, HttpResponse
|
||||
from django.shortcuts import render, redirect, get_object_or_404
|
||||
from django.utils import timezone
|
||||
|
||||
from analysis.forms import TimespanReportForm
|
||||
from analysis.utils.excel.excel import TempExcelFile
|
||||
from analysis.utils.report import TimespanReport
|
||||
from codelist.models import KonovaCode
|
||||
from konova.contexts import BaseContext
|
||||
from konova.decorators import conservation_office_group_required
|
||||
from konova.utils.message_templates import FORM_INVALID, PARAMS_INVALID
|
||||
|
||||
|
||||
@login_required
|
||||
@conservation_office_group_required
|
||||
def index_reports_view(request: HttpRequest):
|
||||
"""
|
||||
|
||||
Args:
|
||||
request (HttpRequest): The incoming request
|
||||
|
||||
Returns:
|
||||
|
||||
"""
|
||||
template = "analysis/reports/index.html"
|
||||
form = TimespanReportForm(request.POST or None)
|
||||
if request.method == "POST":
|
||||
if form.is_valid():
|
||||
redirect_url = form.save()
|
||||
return redirect(redirect_url)
|
||||
else:
|
||||
messages.error(
|
||||
request,
|
||||
FORM_INVALID,
|
||||
extra_tags="danger",
|
||||
)
|
||||
context = {
|
||||
"form": form
|
||||
}
|
||||
context = BaseContext(request, context).context
|
||||
return render(request, template, context)
|
||||
|
||||
|
||||
@login_required
|
||||
@conservation_office_group_required
|
||||
def detail_report_view(request: HttpRequest, id: str):
|
||||
""" Renders the detailed report for a conservation office
|
||||
|
||||
Args:
|
||||
request (HttpRequest): The incoming request
|
||||
id (str): The conservation_office KonovaCode id
|
||||
|
||||
Returns:
|
||||
|
||||
"""
|
||||
# Try to resolve the requested office id
|
||||
cons_office = get_object_or_404(
|
||||
KonovaCode,
|
||||
id=id
|
||||
)
|
||||
# Try to resolve the date parameters into Date objects -> redirect if this fails
|
||||
try:
|
||||
df = request.GET.get("df", None)
|
||||
dt = request.GET.get("dt", None)
|
||||
date_from = timezone.make_aware(timezone.datetime.fromisoformat(df))
|
||||
date_to = timezone.make_aware(timezone.datetime.fromisoformat(dt))
|
||||
except ValueError:
|
||||
messages.error(
|
||||
request,
|
||||
PARAMS_INVALID,
|
||||
extra_tags="danger",
|
||||
)
|
||||
return redirect("analysis:reports")
|
||||
|
||||
# Check whether the html default rendering is requested or an alternative
|
||||
format_param = request.GET.get("format", "html")
|
||||
report = TimespanReport(id, date_from, date_to)
|
||||
|
||||
if format_param == "html":
|
||||
template = "analysis/reports/detail.html"
|
||||
context = {
|
||||
"office": cons_office,
|
||||
"report": report,
|
||||
}
|
||||
context = BaseContext(request, context).context
|
||||
return render(request, template, context)
|
||||
elif format_param == "excel":
|
||||
file = TempExcelFile(report.excel_template_path, report.excel_map)
|
||||
response = HttpResponse(
|
||||
content=file.stream,
|
||||
content_type="application/ms-excel",
|
||||
)
|
||||
response['Content-Disposition'] = f'attachment; filename={cons_office.long_name}_{df}_{dt}.xlsx'
|
||||
return response
|
||||
else:
|
||||
raise NotImplementedError
|
33
api/admin.py
Normal file
33
api/admin.py
Normal file
@ -0,0 +1,33 @@
|
||||
from django.contrib import admin
|
||||
|
||||
from api.models.token import APIUserToken, OAuthToken
|
||||
|
||||
|
||||
class APITokenAdmin(admin.ModelAdmin):
|
||||
list_display = [
|
||||
"token",
|
||||
"valid_until",
|
||||
"is_active",
|
||||
]
|
||||
readonly_fields = [
|
||||
"token"
|
||||
]
|
||||
search_fields = [
|
||||
"token"
|
||||
]
|
||||
|
||||
|
||||
class OAuthTokenAdmin(admin.ModelAdmin):
|
||||
list_display = [
|
||||
"access_token",
|
||||
"refresh_token",
|
||||
"expires_on",
|
||||
]
|
||||
search_fields = [
|
||||
"access_token",
|
||||
"refresh_token",
|
||||
]
|
||||
|
||||
|
||||
admin.site.register(APIUserToken, APITokenAdmin)
|
||||
admin.site.register(OAuthToken, OAuthTokenAdmin)
|
5
api/apps.py
Normal file
5
api/apps.py
Normal file
@ -0,0 +1,5 @@
|
||||
from django.apps import AppConfig
|
||||
|
||||
|
||||
class ApiConfig(AppConfig):
|
||||
name = 'api'
|
23
api/migrations/0001_initial.py
Normal file
23
api/migrations/0001_initial.py
Normal file
@ -0,0 +1,23 @@
|
||||
# Generated by Django 3.1.3 on 2022-01-28 15:48
|
||||
|
||||
from django.db import migrations, models
|
||||
import konova.utils.generators
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
initial = True
|
||||
|
||||
dependencies = [
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name='APIUserToken',
|
||||
fields=[
|
||||
('token', models.CharField(default=konova.utils.generators.generate_token, max_length=1000, primary_key=True, serialize=False)),
|
||||
('valid_until', models.DateField(blank=True, help_text='Token is only valid until this date', null=True)),
|
||||
('is_active', models.BooleanField(default=False, help_text='Must be activated by an admin')),
|
||||
],
|
||||
),
|
||||
]
|
18
api/migrations/0002_alter_apiusertoken_valid_until.py
Normal file
18
api/migrations/0002_alter_apiusertoken_valid_until.py
Normal file
@ -0,0 +1,18 @@
|
||||
# Generated by Django 4.2.6 on 2023-11-30 11:56
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('api', '0001_initial'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name='apiusertoken',
|
||||
name='valid_until',
|
||||
field=models.DateField(blank=True, help_text='Token is only valid until this date. Forever if null/blank.', null=True),
|
||||
),
|
||||
]
|
26
api/migrations/0003_oauthtoken.py
Normal file
26
api/migrations/0003_oauthtoken.py
Normal file
@ -0,0 +1,26 @@
|
||||
# Generated by Django 5.0.4 on 2024-04-30 07:20
|
||||
|
||||
import uuid
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('api', '0002_alter_apiusertoken_valid_until'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name='OAuthToken',
|
||||
fields=[
|
||||
('id', models.UUIDField(default=uuid.uuid4, editable=False, primary_key=True, serialize=False)),
|
||||
('access_token', models.CharField(db_comment='OAuth access token', max_length=255)),
|
||||
('refresh_token', models.CharField(db_comment='OAuth refresh token', max_length=255)),
|
||||
('expires_on', models.DateTimeField(db_comment='When the token will be expired')),
|
||||
],
|
||||
options={
|
||||
'abstract': False,
|
||||
},
|
||||
),
|
||||
]
|
0
api/migrations/__init__.py
Normal file
0
api/migrations/__init__.py
Normal file
@ -2,7 +2,7 @@
|
||||
Author: Michel Peltriaux
|
||||
Organization: Struktur- und Genehmigungsdirektion Nord, Rhineland-Palatinate, Germany
|
||||
Contact: michel.peltriaux@sgdnord.rlp.de
|
||||
Created on: 07.12.20
|
||||
Created on: 21.01.22
|
||||
|
||||
"""
|
||||
from konova.enums import BaseEnum
|
||||
from .token import *
|
179
api/models/token.py
Normal file
179
api/models/token.py
Normal file
@ -0,0 +1,179 @@
|
||||
import json
|
||||
from datetime import timedelta
|
||||
|
||||
import requests
|
||||
from django.core.exceptions import ObjectDoesNotExist
|
||||
from django.db import models
|
||||
from django.utils import timezone
|
||||
from django.utils.timezone import now
|
||||
|
||||
from konova.models import UuidModel
|
||||
from konova.sub_settings.sso_settings import OAUTH_CLIENT_ID, OAUTH_CLIENT_SECRET, SSO_SERVER_BASE
|
||||
from konova.utils.generators import generate_token
|
||||
|
||||
|
||||
class APIUserToken(models.Model):
|
||||
token = models.CharField(
|
||||
primary_key=True,
|
||||
max_length=1000,
|
||||
default=generate_token,
|
||||
)
|
||||
valid_until = models.DateField(
|
||||
blank=True,
|
||||
null=True,
|
||||
help_text="Token is only valid until this date. Forever if null/blank.",
|
||||
)
|
||||
is_active = models.BooleanField(
|
||||
default=False,
|
||||
help_text="Must be activated by an admin"
|
||||
)
|
||||
|
||||
def __str__(self):
|
||||
return self.token
|
||||
|
||||
@staticmethod
|
||||
def get_user_from_token(token: str):
|
||||
""" Getter for the related user object
|
||||
|
||||
Args:
|
||||
token (str): The used token
|
||||
|
||||
Returns:
|
||||
user (User): Otherwise None
|
||||
"""
|
||||
_today = timezone.now().date()
|
||||
try:
|
||||
token_obj = APIUserToken.objects.get(
|
||||
token=token,
|
||||
)
|
||||
if not token_obj.is_active:
|
||||
raise PermissionError("Token unverified")
|
||||
if token_obj.valid_until is not None and token_obj.valid_until < _today:
|
||||
raise PermissionError("Token validity expired")
|
||||
except ObjectDoesNotExist:
|
||||
raise PermissionError("Token unknown")
|
||||
return token_obj.user
|
||||
|
||||
|
||||
class OAuthToken(UuidModel):
|
||||
access_token = models.CharField(
|
||||
max_length=255,
|
||||
blank=False,
|
||||
null=False,
|
||||
db_comment="OAuth access token"
|
||||
)
|
||||
refresh_token = models.CharField(
|
||||
max_length=255,
|
||||
blank=False,
|
||||
null=False,
|
||||
db_comment="OAuth refresh token"
|
||||
)
|
||||
expires_on = models.DateTimeField(
|
||||
db_comment="When the token will be expired"
|
||||
)
|
||||
|
||||
ASSUMED_LATENCY = 1000 # assumed latency between creation and receiving of an access token
|
||||
|
||||
def __str__(self):
|
||||
return str(self.access_token)
|
||||
|
||||
@staticmethod
|
||||
def from_access_token_response(access_token_data: str, received_on):
|
||||
"""
|
||||
Creates an OAuthToken based on retrieved access token data (OAuth2.0 specification)
|
||||
|
||||
Args:
|
||||
access_token_data (str): OAuth2.0 response data
|
||||
received_on (): Timestamp when the response has been received
|
||||
|
||||
Returns:
|
||||
|
||||
"""
|
||||
oauth_token = OAuthToken()
|
||||
data = json.loads(access_token_data)
|
||||
|
||||
oauth_token.access_token = data.get("access_token")
|
||||
oauth_token.refresh_token = data.get("refresh_token")
|
||||
|
||||
expires_on = received_on + timedelta(
|
||||
seconds=(data.get("expires_in") + OAuthToken.ASSUMED_LATENCY)
|
||||
)
|
||||
oauth_token.expires_on = expires_on
|
||||
|
||||
return oauth_token
|
||||
|
||||
def refresh(self):
|
||||
url = f"{SSO_SERVER_BASE}o/token/"
|
||||
params = {
|
||||
"grant_type": "refresh_token",
|
||||
"refresh_token": self.refresh_token,
|
||||
"client_id": OAUTH_CLIENT_ID,
|
||||
"client_secret": OAUTH_CLIENT_SECRET
|
||||
}
|
||||
response = requests.post(
|
||||
url,
|
||||
params
|
||||
)
|
||||
_now = now()
|
||||
is_response_invalid = response.status_code != 200
|
||||
if is_response_invalid:
|
||||
raise RuntimeError(f"Refreshing token not possible: {response.status_code}")
|
||||
|
||||
response_content = response.content.decode("utf-8")
|
||||
response_content = json.loads(response_content)
|
||||
|
||||
access_token = response_content.get("access_token")
|
||||
refresh_token = response_content.get("refresh_token")
|
||||
expires_in = response_content.get("expires")
|
||||
|
||||
self.access_token = access_token
|
||||
self.refresh_token = refresh_token
|
||||
self.expires_in = expires_in
|
||||
self.save()
|
||||
|
||||
return self
|
||||
|
||||
def update_and_get_user(self):
|
||||
from user.models import User
|
||||
url = f"{SSO_SERVER_BASE}users/oauth/data/"
|
||||
|
||||
access_token = self.access_token
|
||||
response = requests.get(
|
||||
url,
|
||||
headers={
|
||||
"Authorization": f"Bearer {access_token}",
|
||||
}
|
||||
)
|
||||
|
||||
is_response_code_invalid = response.status_code != 200
|
||||
if is_response_code_invalid:
|
||||
raise RuntimeError(f"OAuth user data fetching unsuccessful: {response.status_code}")
|
||||
|
||||
response_content = response.content.decode("utf-8")
|
||||
response_content = json.loads(response_content)
|
||||
user = User.oauth_update_user(response_content)
|
||||
|
||||
return user
|
||||
|
||||
def revoke(self) -> int:
|
||||
""" Revokes the OAuth2 token of the user
|
||||
|
||||
(/o/revoke_token/ indeed removes the corresponding access token on provider side and invalidates the
|
||||
submitted refresh token in one step)
|
||||
|
||||
Returns:
|
||||
revocation_status_code (int): HTTP status code for revocation of refresh_token
|
||||
"""
|
||||
revoke_url = f"{SSO_SERVER_BASE}o/revoke_token/"
|
||||
token = self.refresh_token
|
||||
revocation_status_code = requests.post(
|
||||
revoke_url,
|
||||
data={
|
||||
'token': token,
|
||||
'token_type_hint': "refresh_token",
|
||||
},
|
||||
auth=(OAUTH_CLIENT_ID, OAUTH_CLIENT_SECRET),
|
||||
).status_code
|
||||
|
||||
return revocation_status_code
|
||||
|
9
api/settings.py
Normal file
9
api/settings.py
Normal file
@ -0,0 +1,9 @@
|
||||
"""
|
||||
Author: Michel Peltriaux
|
||||
Organization: Struktur- und Genehmigungsdirektion Nord, Rhineland-Palatinate, Germany
|
||||
Contact: michel.peltriaux@sgdnord.rlp.de
|
||||
Created on: 21.01.22
|
||||
|
||||
"""
|
||||
KSP_TOKEN_HEADER_IDENTIFIER = "Ksptoken"
|
||||
KSP_USER_HEADER_IDENTIFIER = "Kspuser"
|
@ -2,7 +2,6 @@
|
||||
Author: Michel Peltriaux
|
||||
Organization: Struktur- und Genehmigungsdirektion Nord, Rhineland-Palatinate, Germany
|
||||
Contact: michel.peltriaux@sgdnord.rlp.de
|
||||
Created on: 07.12.20
|
||||
Created on: 21.01.22
|
||||
|
||||
"""
|
||||
from django.utils.translation import gettext_lazy as _
|
7
api/tests/unit/__init__.py
Normal file
7
api/tests/unit/__init__.py
Normal file
@ -0,0 +1,7 @@
|
||||
"""
|
||||
Author: Michel Peltriaux
|
||||
Organization: Struktur- und Genehmigungsdirektion Nord, Rhineland-Palatinate, Germany
|
||||
Contact: ksp-servicestelle@sgdnord.rlp.de
|
||||
Created on: 17.08.23
|
||||
|
||||
"""
|
71
api/tests/unit/test_token.py
Normal file
71
api/tests/unit/test_token.py
Normal file
@ -0,0 +1,71 @@
|
||||
"""
|
||||
Author: Michel Peltriaux
|
||||
Organization: Struktur- und Genehmigungsdirektion Nord, Rhineland-Palatinate, Germany
|
||||
Contact: ksp-servicestelle@sgdnord.rlp.de
|
||||
Created on: 17.08.23
|
||||
|
||||
"""
|
||||
from datetime import timedelta
|
||||
|
||||
from django.utils.timezone import now
|
||||
|
||||
from api.models import APIUserToken
|
||||
from konova.tests.test_views import BaseTestCase
|
||||
|
||||
|
||||
class APIUserTokenTestCase(BaseTestCase):
|
||||
def setUp(self) -> None:
|
||||
super().setUp()
|
||||
|
||||
self.token = APIUserToken.objects.create()
|
||||
self.superuser.api_token = self.token
|
||||
self.superuser.save()
|
||||
|
||||
def test_str(self):
|
||||
self.assertEqual(str(self.token), self.token.token)
|
||||
|
||||
def test_get_user_from_token(self):
|
||||
a_day = timedelta(days=1)
|
||||
today = now().date()
|
||||
|
||||
self.assertFalse(self.token.is_active)
|
||||
self.assertIsNone(self.token.valid_until)
|
||||
|
||||
try:
|
||||
#Token not existing --> fail
|
||||
token_user = APIUserToken.get_user_from_token(self.token.token[::-1])
|
||||
self.fail("There should not have been any token")
|
||||
except PermissionError:
|
||||
pass
|
||||
|
||||
try:
|
||||
# Token not active --> fail
|
||||
token_user = APIUserToken.get_user_from_token(self.token.token)
|
||||
self.fail("Token is unverified but token user has been fetchable.")
|
||||
except PermissionError:
|
||||
pass
|
||||
self.token.is_active = True
|
||||
self.token.valid_until = today - a_day
|
||||
self.token.save()
|
||||
|
||||
try:
|
||||
# Token valid until yesterday --> fail
|
||||
token_user = APIUserToken.get_user_from_token(self.token.token)
|
||||
self.fail("Token reached end of lifetime but token user has been fetchable.")
|
||||
except PermissionError:
|
||||
pass
|
||||
|
||||
# Token valid until tomorrow --> success
|
||||
self.token.valid_until = today + a_day
|
||||
self.token.save()
|
||||
|
||||
token_user = APIUserToken.get_user_from_token(self.token.token)
|
||||
self.assertEqual(token_user, self.superuser)
|
||||
del token_user
|
||||
|
||||
# Token valid forever --> success
|
||||
self.token.valid_until = None
|
||||
self.token.save()
|
||||
token_user = APIUserToken.get_user_from_token(self.token.token)
|
||||
self.assertEqual(token_user, self.superuser)
|
||||
|
7
api/tests/v1/__init__.py
Normal file
7
api/tests/v1/__init__.py
Normal file
@ -0,0 +1,7 @@
|
||||
"""
|
||||
Author: Michel Peltriaux
|
||||
Organization: Struktur- und Genehmigungsdirektion Nord, Rhineland-Palatinate, Germany
|
||||
Contact: michel.peltriaux@sgdnord.rlp.de
|
||||
Created on: 27.01.22
|
||||
|
||||
"""
|
7
api/tests/v1/create/__init__.py
Normal file
7
api/tests/v1/create/__init__.py
Normal file
@ -0,0 +1,7 @@
|
||||
"""
|
||||
Author: Michel Peltriaux
|
||||
Organization: Struktur- und Genehmigungsdirektion Nord, Rhineland-Palatinate, Germany
|
||||
Contact: michel.peltriaux@sgdnord.rlp.de
|
||||
Created on: 27.01.22
|
||||
|
||||
"""
|
20
api/tests/v1/create/compensation_create_post_body.json
Normal file
20
api/tests/v1/create/compensation_create_post_body.json
Normal file
@ -0,0 +1,20 @@
|
||||
{
|
||||
"type": "MultiPolygon",
|
||||
"coordinates": [
|
||||
],
|
||||
"properties": {
|
||||
"title": "Test_compensation",
|
||||
"is_cef": false,
|
||||
"is_coherence_keeping": false,
|
||||
"is_pik": false,
|
||||
"intervention": "MUST_BE_SET_IN_TEST",
|
||||
"before_states": [
|
||||
],
|
||||
"after_states": [
|
||||
],
|
||||
"actions": [
|
||||
],
|
||||
"deadlines": [
|
||||
]
|
||||
}
|
||||
}
|
5
api/tests/v1/create/deduction_create_post_body.json
Normal file
5
api/tests/v1/create/deduction_create_post_body.json
Normal file
@ -0,0 +1,5 @@
|
||||
{
|
||||
"eco_account": "CHANGE_BEFORE_RUN!!!",
|
||||
"surface": 500.50,
|
||||
"intervention": "CHANGE_BEFORE_RUN!!!"
|
||||
}
|
29
api/tests/v1/create/ecoaccount_create_post_body.json
Normal file
29
api/tests/v1/create/ecoaccount_create_post_body.json
Normal file
@ -0,0 +1,29 @@
|
||||
{
|
||||
"type": "MultiPolygon",
|
||||
"coordinates": [
|
||||
],
|
||||
"properties": {
|
||||
"title": "Test_ecoaccount",
|
||||
"deductable_surface": 10000.0,
|
||||
"is_pik": false,
|
||||
"responsible": {
|
||||
"conservation_office": null,
|
||||
"conservation_file_number": null,
|
||||
"handler": {
|
||||
"type": null,
|
||||
"detail": "Someone"
|
||||
}
|
||||
},
|
||||
"legal": {
|
||||
"agreement_date": null
|
||||
},
|
||||
"before_states": [
|
||||
],
|
||||
"after_states": [
|
||||
],
|
||||
"actions": [
|
||||
],
|
||||
"deadlines": [
|
||||
]
|
||||
}
|
||||
}
|
25
api/tests/v1/create/ema_create_post_body.json
Normal file
25
api/tests/v1/create/ema_create_post_body.json
Normal file
@ -0,0 +1,25 @@
|
||||
{
|
||||
"type": "MultiPolygon",
|
||||
"coordinates": [
|
||||
],
|
||||
"properties": {
|
||||
"title": "Test_ema",
|
||||
"is_pik": false,
|
||||
"responsible": {
|
||||
"conservation_office": null,
|
||||
"conservation_file_number": null,
|
||||
"handler": {
|
||||
"type": null,
|
||||
"detail": "Someone"
|
||||
}
|
||||
},
|
||||
"before_states": [
|
||||
],
|
||||
"after_states": [
|
||||
],
|
||||
"actions": [
|
||||
],
|
||||
"deadlines": [
|
||||
]
|
||||
}
|
||||
}
|
24
api/tests/v1/create/intervention_create_post_body.json
Normal file
24
api/tests/v1/create/intervention_create_post_body.json
Normal file
@ -0,0 +1,24 @@
|
||||
{
|
||||
"type": "MultiPolygon",
|
||||
"coordinates": [
|
||||
],
|
||||
"properties": {
|
||||
"title": "Test_intervention",
|
||||
"responsible": {
|
||||
"registration_office": null,
|
||||
"registration_file_number": null,
|
||||
"conservation_office": null,
|
||||
"conservation_file_number": null,
|
||||
"handler": {
|
||||
"type": null,
|
||||
"detail": "Someone"
|
||||
}
|
||||
},
|
||||
"legal": {
|
||||
"registration_date": null,
|
||||
"binding_date": null,
|
||||
"process_type": null,
|
||||
"laws": []
|
||||
}
|
||||
}
|
||||
}
|
122
api/tests/v1/create/test_api_create.py
Normal file
122
api/tests/v1/create/test_api_create.py
Normal file
@ -0,0 +1,122 @@
|
||||
"""
|
||||
Author: Michel Peltriaux
|
||||
Organization: Struktur- und Genehmigungsdirektion Nord, Rhineland-Palatinate, Germany
|
||||
Contact: michel.peltriaux@sgdnord.rlp.de
|
||||
Created on: 27.01.22
|
||||
|
||||
"""
|
||||
import json
|
||||
|
||||
from django.urls import reverse
|
||||
|
||||
from api.tests.v1.share.test_api_sharing import BaseAPIV1TestCase
|
||||
|
||||
|
||||
class APIV1CreateTestCase(BaseAPIV1TestCase):
|
||||
@classmethod
|
||||
def setUpTestData(cls):
|
||||
super().setUpTestData()
|
||||
|
||||
def _run_create_request(self, url, data):
|
||||
data = json.dumps(data)
|
||||
response = self.client.post(
|
||||
url,
|
||||
data=data,
|
||||
content_type="application/json",
|
||||
**self.header_data
|
||||
)
|
||||
return response
|
||||
|
||||
def _test_create_object(self, url, post_body):
|
||||
""" Tests the API creation of a new data object.
|
||||
|
||||
Post body data stored in a local json file
|
||||
|
||||
Args:
|
||||
url (str): The api creation url
|
||||
post_body (dict): The post body content as dict
|
||||
|
||||
Returns:
|
||||
|
||||
"""
|
||||
response = self._run_create_request(url, post_body)
|
||||
self.assertEqual(response.status_code, 200, msg=response.content)
|
||||
content = json.loads(response.content)
|
||||
self.assertIsNotNone(content.get("id", None), msg=response.content)
|
||||
|
||||
def test_create_intervention(self):
|
||||
""" Tests api creation
|
||||
|
||||
Returns:
|
||||
|
||||
"""
|
||||
url = reverse("api:v1:intervention")
|
||||
json_file_path = "api/tests/v1/create/intervention_create_post_body.json"
|
||||
with open(json_file_path) as json_file:
|
||||
post_body = json.load(fp=json_file)
|
||||
self._test_create_object(url, post_body)
|
||||
|
||||
def test_create_compensation(self):
|
||||
""" Tests api creation
|
||||
|
||||
Returns:
|
||||
|
||||
"""
|
||||
url = reverse("api:v1:compensation")
|
||||
json_file_path = "api/tests/v1/create/compensation_create_post_body.json"
|
||||
with open(json_file_path) as json_file:
|
||||
post_body = json.load(fp=json_file)
|
||||
post_body["properties"]["intervention"] = str(self.intervention.id)
|
||||
|
||||
# Expect this first request to fail, since user has no shared access on the intervention, we want to create
|
||||
# a compensation for
|
||||
response = self._run_create_request(url, post_body)
|
||||
self.assertEqual(response.status_code, 500, msg=response.content)
|
||||
content = json.loads(response.content)
|
||||
self.assertGreater(len(content.get("errors", [])), 0, msg=response.content)
|
||||
|
||||
# Add the user to the shared users of the intervention and try again! Now everything should work as expected.
|
||||
self.intervention.users.add(self.superuser)
|
||||
self._test_create_object(url, post_body)
|
||||
|
||||
def test_create_eco_account(self):
|
||||
""" Tests api creation
|
||||
|
||||
Returns:
|
||||
|
||||
"""
|
||||
url = reverse("api:v1:ecoaccount")
|
||||
json_file_path = "api/tests/v1/create/ecoaccount_create_post_body.json"
|
||||
with open(json_file_path) as json_file:
|
||||
post_body = json.load(fp=json_file)
|
||||
self._test_create_object(url, post_body)
|
||||
|
||||
def test_create_ema(self):
|
||||
""" Tests api creation
|
||||
|
||||
Returns:
|
||||
|
||||
"""
|
||||
url = reverse("api:v1:ema")
|
||||
json_file_path = "api/tests/v1/create/ema_create_post_body.json"
|
||||
with open(json_file_path) as json_file:
|
||||
post_body = json.load(fp=json_file)
|
||||
self._test_create_object(url, post_body)
|
||||
|
||||
def test_create_deduction(self):
|
||||
""" Tests api creation
|
||||
|
||||
Returns:
|
||||
|
||||
"""
|
||||
self.intervention.share_with_user(self.superuser)
|
||||
self.eco_account.share_with_user(self.superuser)
|
||||
|
||||
url = reverse("api:v1:deduction")
|
||||
json_file_path = "api/tests/v1/create/deduction_create_post_body.json"
|
||||
with open(json_file_path) as json_file:
|
||||
post_body = json.load(fp=json_file)
|
||||
post_body["intervention"] = str(self.intervention.id)
|
||||
post_body["eco_account"] = str(self.eco_account.id)
|
||||
self._test_create_object(url, post_body)
|
||||
|
7
api/tests/v1/delete/__init__.py
Normal file
7
api/tests/v1/delete/__init__.py
Normal file
@ -0,0 +1,7 @@
|
||||
"""
|
||||
Author: Michel Peltriaux
|
||||
Organization: Struktur- und Genehmigungsdirektion Nord, Rhineland-Palatinate, Germany
|
||||
Contact: michel.peltriaux@sgdnord.rlp.de
|
||||
Created on: 28.01.22
|
||||
|
||||
"""
|
118
api/tests/v1/delete/test_api_delete.py
Normal file
118
api/tests/v1/delete/test_api_delete.py
Normal file
@ -0,0 +1,118 @@
|
||||
"""
|
||||
Author: Michel Peltriaux
|
||||
Organization: Struktur- und Genehmigungsdirektion Nord, Rhineland-Palatinate, Germany
|
||||
Contact: michel.peltriaux@sgdnord.rlp.de
|
||||
Created on: 28.01.22
|
||||
|
||||
"""
|
||||
|
||||
import json
|
||||
|
||||
from django.core.exceptions import ObjectDoesNotExist
|
||||
from django.urls import reverse
|
||||
|
||||
from api.tests.v1.share.test_api_sharing import BaseAPIV1TestCase
|
||||
|
||||
|
||||
class APIV1DeleteTestCase(BaseAPIV1TestCase):
|
||||
@classmethod
|
||||
def setUpTestData(cls):
|
||||
super().setUpTestData()
|
||||
|
||||
def _run_delete_request(self, url):
|
||||
response = self.client.delete(
|
||||
url,
|
||||
**self.header_data
|
||||
)
|
||||
return response
|
||||
|
||||
def _test_delete_object(self, obj, url):
|
||||
""" Tests the API DELETE of a data object.
|
||||
|
||||
Args:
|
||||
url (str): The api delete url
|
||||
|
||||
Returns:
|
||||
|
||||
"""
|
||||
obj.refresh_from_db()
|
||||
_id = obj.id
|
||||
self.assertIsNotNone(_id)
|
||||
self.assertIsNone(obj.deleted)
|
||||
|
||||
response = self._run_delete_request(url)
|
||||
content = json.loads(response.content)
|
||||
|
||||
self.assertEqual(response.status_code, 200, msg=response.content)
|
||||
self.assertTrue(content.get("success", False), msg=response.content)
|
||||
|
||||
obj.refresh_from_db()
|
||||
self.assertIsNotNone(obj.deleted)
|
||||
self.assertEqual(obj.deleted.user, self.superuser)
|
||||
|
||||
def test_delete_intervention(self):
|
||||
""" Tests api creation of bare minimum interventions
|
||||
|
||||
Returns:
|
||||
|
||||
"""
|
||||
test_intervention = self.create_dummy_intervention()
|
||||
test_intervention.share_with_user(self.superuser)
|
||||
url = reverse("api:v1:intervention", args=(str(test_intervention.id),))
|
||||
self._test_delete_object(test_intervention, url)
|
||||
|
||||
def test_delete_compensation(self):
|
||||
""" Tests api creation of bare minimum interventions
|
||||
|
||||
Returns:
|
||||
|
||||
"""
|
||||
test_comp = self.create_dummy_compensation()
|
||||
test_comp.share_with_user(self.superuser)
|
||||
url = reverse("api:v1:compensation", args=(str(test_comp.id),))
|
||||
self._test_delete_object(test_comp, url)
|
||||
|
||||
def test_delete_eco_account(self):
|
||||
""" Tests api creation of bare minimum interventions
|
||||
|
||||
Returns:
|
||||
|
||||
"""
|
||||
test_acc = self.create_dummy_eco_account()
|
||||
test_acc.share_with_user(self.superuser)
|
||||
url = reverse("api:v1:ecoaccount", args=(str(test_acc.id),))
|
||||
self._test_delete_object(test_acc, url)
|
||||
|
||||
def test_delete_ema(self):
|
||||
""" Tests api creation of bare minimum interventions
|
||||
|
||||
Returns:
|
||||
|
||||
"""
|
||||
test_ema = self.create_dummy_ema()
|
||||
test_ema.share_with_user(self.superuser)
|
||||
url = reverse("api:v1:ema", args=(str(test_ema.id),))
|
||||
self._test_delete_object(test_ema, url)
|
||||
|
||||
def test_delete_deduction(self):
|
||||
""" Tests api creation of bare minimum interventions
|
||||
|
||||
Returns:
|
||||
|
||||
"""
|
||||
test_deduction = self.create_dummy_deduction()
|
||||
test_deduction.intervention.share_with_user(self.superuser)
|
||||
url = reverse("api:v1:deduction", args=(str(test_deduction.id),))
|
||||
|
||||
response = self._run_delete_request(url)
|
||||
content = json.loads(response.content)
|
||||
|
||||
self.assertEqual(response.status_code, 200, msg=response.content)
|
||||
self.assertTrue(content.get("success", False), msg=response.content)
|
||||
|
||||
try:
|
||||
test_deduction.refresh_from_db()
|
||||
self.fail("Deduction is not deleted from db!")
|
||||
except ObjectDoesNotExist:
|
||||
pass
|
||||
|
7
api/tests/v1/get/__init__.py
Normal file
7
api/tests/v1/get/__init__.py
Normal file
@ -0,0 +1,7 @@
|
||||
"""
|
||||
Author: Michel Peltriaux
|
||||
Organization: Struktur- und Genehmigungsdirektion Nord, Rhineland-Palatinate, Germany
|
||||
Contact: michel.peltriaux@sgdnord.rlp.de
|
||||
Created on: 28.01.22
|
||||
|
||||
"""
|
213
api/tests/v1/get/test_api_get.py
Normal file
213
api/tests/v1/get/test_api_get.py
Normal file
@ -0,0 +1,213 @@
|
||||
"""
|
||||
Author: Michel Peltriaux
|
||||
Organization: Struktur- und Genehmigungsdirektion Nord, Rhineland-Palatinate, Germany
|
||||
Contact: michel.peltriaux@sgdnord.rlp.de
|
||||
Created on: 28.01.22
|
||||
|
||||
"""
|
||||
|
||||
import json
|
||||
|
||||
from django.urls import reverse
|
||||
|
||||
from api.tests.v1.share.test_api_sharing import BaseAPIV1TestCase
|
||||
|
||||
|
||||
class APIV1GetTestCase(BaseAPIV1TestCase):
|
||||
@classmethod
|
||||
def setUpTestData(cls):
|
||||
super().setUpTestData()
|
||||
|
||||
def _run_get_request(self, url):
|
||||
response = self.client.get(
|
||||
url,
|
||||
**self.header_data
|
||||
)
|
||||
return response
|
||||
|
||||
def _test_get_object(self, obj, url):
|
||||
""" Tests the API GET of a data object.
|
||||
|
||||
Args:
|
||||
url (str): The api get url
|
||||
|
||||
Returns:
|
||||
|
||||
"""
|
||||
response = self._run_get_request(url)
|
||||
content = json.loads(response.content)
|
||||
self.assertIn("rpp", content)
|
||||
self.assertIn("p", content)
|
||||
self.assertIn("next", content)
|
||||
self.assertIn("results", content)
|
||||
paginated_content = content["results"]
|
||||
geojson = paginated_content[str(obj.id)]
|
||||
self.assertEqual(response.status_code, 200, msg=response.content)
|
||||
return geojson
|
||||
|
||||
def _assert_geojson_format(self, geojson):
|
||||
try:
|
||||
geojson["type"]
|
||||
geojson["coordinates"]
|
||||
props = geojson["properties"]
|
||||
props["id"]
|
||||
props["identifier"]
|
||||
props["title"]
|
||||
props["created_on"]
|
||||
props["modified_on"]
|
||||
except KeyError as e:
|
||||
self.fail(e)
|
||||
|
||||
def test_get_intervention(self):
|
||||
""" Tests api GET
|
||||
|
||||
Returns:
|
||||
|
||||
"""
|
||||
self.intervention.share_with_user(self.superuser)
|
||||
url = reverse("api:v1:intervention", args=(str(self.intervention.id),))
|
||||
geojson = self._test_get_object(self.intervention, url)
|
||||
self._assert_geojson_format(geojson)
|
||||
try:
|
||||
props = geojson["properties"]
|
||||
props["responsible"]
|
||||
props["responsible"]["registration_office"]
|
||||
props["responsible"]["registration_file_number"]
|
||||
props["responsible"]["conservation_office"]
|
||||
props["responsible"]["conservation_file_number"]
|
||||
props["legal"]["registration_date"]
|
||||
props["legal"]["binding_date"]
|
||||
props["legal"]["process_type"]
|
||||
props["legal"]["laws"]
|
||||
props["compensations"]
|
||||
props["payments"]
|
||||
props["deductions"]
|
||||
except KeyError as e:
|
||||
self.fail(e)
|
||||
|
||||
def test_get_shared(self):
|
||||
""" Tests api GET on shared info of the intervention
|
||||
|
||||
Returns:
|
||||
|
||||
"""
|
||||
self.intervention.share_with_user(self.superuser)
|
||||
self.intervention.share_with_team(self.team)
|
||||
url = reverse("api:v1:intervention-share", args=(str(self.intervention.id),))
|
||||
response = self._run_get_request(url)
|
||||
content = json.loads(response.content)
|
||||
self.assertIn("users", content)
|
||||
self.assertIn(self.superuser.username, content["users"])
|
||||
self.assertEqual(1, len(content["users"]))
|
||||
self.assertIn("teams", content)
|
||||
self.assertEqual(1, len(content["teams"]))
|
||||
for team in content["teams"]:
|
||||
self.assertEqual(team["id"], str(self.team.id))
|
||||
self.assertEqual(team["name"], self.team.name)
|
||||
|
||||
def test_get_compensation(self):
|
||||
""" Tests api GET
|
||||
|
||||
Returns:
|
||||
|
||||
"""
|
||||
self.intervention.share_with_user(self.superuser)
|
||||
self.compensation.intervention = self.intervention
|
||||
self.compensation.save()
|
||||
|
||||
url = reverse("api:v1:compensation", args=(str(self.compensation.id),))
|
||||
geojson = self._test_get_object(self.compensation, url)
|
||||
self._assert_geojson_format(geojson)
|
||||
try:
|
||||
props = geojson["properties"]
|
||||
props["is_cef"]
|
||||
props["is_coherence_keeping"]
|
||||
props["is_pik"]
|
||||
props["intervention"]
|
||||
props["intervention"]["id"]
|
||||
props["intervention"]["identifier"]
|
||||
props["intervention"]["title"]
|
||||
props["before_states"]
|
||||
props["after_states"]
|
||||
props["actions"]
|
||||
props["deadlines"]
|
||||
except KeyError as e:
|
||||
self.fail(e)
|
||||
|
||||
def test_get_eco_account(self):
|
||||
""" Tests api GET
|
||||
|
||||
Returns:
|
||||
|
||||
"""
|
||||
self.eco_account.share_with_user(self.superuser)
|
||||
|
||||
url = reverse("api:v1:ecoaccount", args=(str(self.eco_account.id),))
|
||||
geojson = self._test_get_object(self.eco_account, url)
|
||||
self._assert_geojson_format(geojson)
|
||||
try:
|
||||
props = geojson["properties"]
|
||||
props["deductable_surface"]
|
||||
props["deductable_surface_available"]
|
||||
props["responsible"]
|
||||
props["responsible"]["conservation_office"]
|
||||
props["responsible"]["conservation_file_number"]
|
||||
props["responsible"]["handler"]
|
||||
props["legal"]
|
||||
props["legal"]["agreement_date"]
|
||||
props["before_states"]
|
||||
props["after_states"]
|
||||
props["actions"]
|
||||
props["deadlines"]
|
||||
props["deductions"]
|
||||
except KeyError as e:
|
||||
self.fail(e)
|
||||
|
||||
def test_get_ema(self):
|
||||
""" Tests api GET
|
||||
|
||||
Returns:
|
||||
|
||||
"""
|
||||
self.ema.share_with_user(self.superuser)
|
||||
|
||||
url = reverse("api:v1:ema", args=(str(self.ema.id),))
|
||||
geojson = self._test_get_object(self.ema, url)
|
||||
self._assert_geojson_format(geojson)
|
||||
try:
|
||||
props = geojson["properties"]
|
||||
props["responsible"]
|
||||
props["responsible"]["conservation_office"]
|
||||
props["responsible"]["conservation_file_number"]
|
||||
props["responsible"]["handler"]
|
||||
props["before_states"]
|
||||
props["after_states"]
|
||||
props["actions"]
|
||||
props["deadlines"]
|
||||
except KeyError as e:
|
||||
self.fail(e)
|
||||
|
||||
def test_get_deduction(self):
|
||||
""" Tests api GET
|
||||
|
||||
Returns:
|
||||
|
||||
"""
|
||||
self.deduction.intervention.share_with_user(self.superuser)
|
||||
|
||||
url = reverse("api:v1:deduction", args=(str(self.deduction.id),))
|
||||
_json = self._test_get_object(self.deduction, url)
|
||||
try:
|
||||
_json["id"]
|
||||
_json["eco_account"]
|
||||
_json["eco_account"]["id"]
|
||||
_json["eco_account"]["identifier"]
|
||||
_json["eco_account"]["title"]
|
||||
_json["surface"]
|
||||
_json["intervention"]
|
||||
_json["intervention"]["id"]
|
||||
_json["intervention"]["identifier"]
|
||||
_json["intervention"]["title"]
|
||||
except KeyError as e:
|
||||
self.fail(e)
|
||||
|
7
api/tests/v1/share/__init__.py
Normal file
7
api/tests/v1/share/__init__.py
Normal file
@ -0,0 +1,7 @@
|
||||
"""
|
||||
Author: Michel Peltriaux
|
||||
Organization: Struktur- und Genehmigungsdirektion Nord, Rhineland-Palatinate, Germany
|
||||
Contact: michel.peltriaux@sgdnord.rlp.de
|
||||
Created on: 28.01.22
|
||||
|
||||
"""
|
154
api/tests/v1/share/test_api_sharing.py
Normal file
154
api/tests/v1/share/test_api_sharing.py
Normal file
@ -0,0 +1,154 @@
|
||||
import json
|
||||
|
||||
from django.urls import reverse
|
||||
|
||||
from konova.settings import DEFAULT_GROUP
|
||||
from konova.tests.test_views import BaseTestCase
|
||||
|
||||
|
||||
class BaseAPIV1TestCase(BaseTestCase):
|
||||
@classmethod
|
||||
def setUpTestData(cls):
|
||||
super().setUpTestData()
|
||||
|
||||
def setUp(self) -> None:
|
||||
super().setUp()
|
||||
self.superuser.get_API_token()
|
||||
self.superuser.api_token.is_active = True
|
||||
self.superuser.api_token.save()
|
||||
default_group = self.groups.get(name=DEFAULT_GROUP)
|
||||
self.superuser.groups.add(default_group)
|
||||
|
||||
self.header_data = {
|
||||
"HTTP_ksptoken": self.superuser.api_token.token,
|
||||
"HTTP_kspuser": self.superuser.username,
|
||||
}
|
||||
|
||||
|
||||
class APIV1SharingTestCase(BaseAPIV1TestCase):
|
||||
|
||||
@classmethod
|
||||
def setUpTestData(cls):
|
||||
super().setUpTestData()
|
||||
|
||||
def _run_share_request(self, url, user_list: list):
|
||||
data = {
|
||||
"users": user_list
|
||||
}
|
||||
data = json.dumps(data)
|
||||
response = self.client.put(
|
||||
url,
|
||||
data,
|
||||
**self.header_data
|
||||
)
|
||||
return response
|
||||
|
||||
def _test_api_sharing(self, obj, url):
|
||||
""" Generic test for testing sharing of a ShareableObjectMixin object
|
||||
|
||||
Args:
|
||||
obj (ShareableObjectMixin): The object
|
||||
url (str): The url to be used for a request
|
||||
|
||||
Returns:
|
||||
|
||||
"""
|
||||
self.assertEqual(obj.users.count(), 0)
|
||||
user_list = [
|
||||
self.superuser.username,
|
||||
self.user.username,
|
||||
]
|
||||
|
||||
response = self._run_share_request(url, user_list)
|
||||
|
||||
# Must fail, since performing user has no access on requested object
|
||||
self.assertEqual(response.status_code, 500)
|
||||
self.assertTrue(len(json.loads(response.content.decode("utf-8")).get("errors", [])) > 0)
|
||||
|
||||
# Add performing user to shared access users and rerun the request
|
||||
obj.users.add(self.superuser)
|
||||
response = self._run_share_request(url, user_list)
|
||||
|
||||
shared_users = obj.shared_users
|
||||
self.assertEqual(response.status_code, 200)
|
||||
self.assertEqual(shared_users.count(), 2)
|
||||
self.assertIn(self.superuser, shared_users)
|
||||
self.assertIn(self.user, shared_users)
|
||||
|
||||
def test_api_token_invalid(self):
|
||||
""" Tests that a request with an invalid token won't be successfull
|
||||
|
||||
Returns:
|
||||
|
||||
"""
|
||||
share_url = reverse("api:v1:intervention-share", args=(self.intervention.id,))
|
||||
# Expect the first request to work properly
|
||||
self.intervention.users.add(self.superuser)
|
||||
response = self._run_share_request(share_url, [self.superuser.username])
|
||||
self.assertEqual(response.status_code, 200)
|
||||
|
||||
# Change the token
|
||||
self.header_data["HTTP_ksptoken"] = f"{self.superuser.api_token.token}__X"
|
||||
|
||||
# Expect the request to fail now
|
||||
response = self._run_share_request(share_url, [self.superuser.username])
|
||||
self.assertEqual(response.status_code, 403)
|
||||
|
||||
def test_api_intervention_sharing(self):
|
||||
""" Tests proper sharing of intervention
|
||||
|
||||
Returns:
|
||||
|
||||
"""
|
||||
share_url = reverse("api:v1:intervention-share", args=(self.intervention.id,))
|
||||
self._test_api_sharing(self.intervention, share_url)
|
||||
|
||||
def test_api_eco_account_sharing(self):
|
||||
""" Tests proper sharing of eco account
|
||||
|
||||
Returns:
|
||||
|
||||
"""
|
||||
share_url = reverse("api:v1:ecoaccount-share", args=(self.eco_account.id,))
|
||||
self._test_api_sharing(self.eco_account, share_url)
|
||||
|
||||
def test_api_ema_sharing(self):
|
||||
""" Tests proper sharing of ema
|
||||
|
||||
Returns:
|
||||
|
||||
"""
|
||||
share_url = reverse("api:v1:ema-share", args=(self.ema.id,))
|
||||
self._test_api_sharing(self.ema, share_url)
|
||||
|
||||
def test_api_sharing_as_default_group_only(self):
|
||||
""" Tests that sharing using the API as an only default group user works as expected.
|
||||
|
||||
Expected:
|
||||
Default only user can only add new users, having shared access. Removing them from the list of users
|
||||
having shared access is only possible if the user has further rights, e.g. being part of a registration
|
||||
or conservation office group.
|
||||
|
||||
Returns:
|
||||
|
||||
"""
|
||||
share_url = reverse("api:v1:intervention-share", args=(self.intervention.id,))
|
||||
|
||||
# Give the user only default group rights
|
||||
default_group = self.groups.get(name=DEFAULT_GROUP)
|
||||
self.superuser.groups.set([default_group])
|
||||
self.assertTrue(self.superuser.is_default_group_only())
|
||||
|
||||
# Add only him as shared_users an object
|
||||
self.intervention.users.set([self.superuser])
|
||||
self.assertEqual(self.intervention.users.count(), 1)
|
||||
|
||||
# Try to add another user via API -> must work!
|
||||
response = self._run_share_request(share_url, [self.superuser.username, self.user.username])
|
||||
self.assertEqual(response.status_code, 200)
|
||||
self.assertEqual(self.intervention.users.count(), 2)
|
||||
|
||||
# Now try to remove the user again -> expect no changes at all to the shared user list
|
||||
response = self._run_share_request(share_url, [self.superuser.username])
|
||||
self.assertEqual(response.status_code, 200)
|
||||
self.assertEqual(self.intervention.users.count(), 2)
|
7
api/tests/v1/update/__init__.py
Normal file
7
api/tests/v1/update/__init__.py
Normal file
@ -0,0 +1,7 @@
|
||||
"""
|
||||
Author: Michel Peltriaux
|
||||
Organization: Struktur- und Genehmigungsdirektion Nord, Rhineland-Palatinate, Germany
|
||||
Contact: michel.peltriaux@sgdnord.rlp.de
|
||||
Created on: 28.01.22
|
||||
|
||||
"""
|
62
api/tests/v1/update/compensation_update_put_body.json
Normal file
62
api/tests/v1/update/compensation_update_put_body.json
Normal file
@ -0,0 +1,62 @@
|
||||
{
|
||||
"type": "MultiPolygon",
|
||||
"coordinates": [
|
||||
[
|
||||
[
|
||||
[
|
||||
7.845568656921382,
|
||||
50.79829702304368
|
||||
],
|
||||
[
|
||||
7.837371826171871,
|
||||
50.80155187891526
|
||||
],
|
||||
[
|
||||
7.835698127746578,
|
||||
50.805267562209806
|
||||
],
|
||||
[
|
||||
7.841062545776364,
|
||||
50.806623577403386
|
||||
],
|
||||
[
|
||||
7.848916053771969,
|
||||
50.808359219420474
|
||||
],
|
||||
[
|
||||
7.855696678161618,
|
||||
50.807057493952975
|
||||
],
|
||||
[
|
||||
7.854666709899899,
|
||||
50.80423696434001
|
||||
],
|
||||
[
|
||||
7.850461006164548,
|
||||
50.80217570040005
|
||||
],
|
||||
[
|
||||
7.845568656921382,
|
||||
50.79829702304368
|
||||
]
|
||||
]
|
||||
]
|
||||
],
|
||||
"properties": {
|
||||
"title": "TEST_compensation_CHANGED",
|
||||
"is_cef": true,
|
||||
"is_coherence_keeping": true,
|
||||
"is_pik": true,
|
||||
"intervention": "CHANGE_BEFORE_RUN!!!",
|
||||
"before_states": [],
|
||||
"after_states": [],
|
||||
"actions": [],
|
||||
"deadlines": [
|
||||
{
|
||||
"type": "finished",
|
||||
"date": "2022-01-31",
|
||||
"comment": "TEST_CHANGED"
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
5
api/tests/v1/update/deduction_update_put_body.json
Normal file
5
api/tests/v1/update/deduction_update_put_body.json
Normal file
@ -0,0 +1,5 @@
|
||||
{
|
||||
"eco_account": "CHANGE_BEFORE_RUN!!!",
|
||||
"surface": 523400.50,
|
||||
"intervention": "CHANGE_BEFORE_RUN!!!"
|
||||
}
|
74
api/tests/v1/update/ecoaccount_update_put_body.json
Normal file
74
api/tests/v1/update/ecoaccount_update_put_body.json
Normal file
@ -0,0 +1,74 @@
|
||||
{
|
||||
"type": "MultiPolygon",
|
||||
"coordinates": [
|
||||
[
|
||||
[
|
||||
[
|
||||
7.845568656921382,
|
||||
50.79829702304368
|
||||
],
|
||||
[
|
||||
7.837371826171871,
|
||||
50.80155187891526
|
||||
],
|
||||
[
|
||||
7.835698127746578,
|
||||
50.805267562209806
|
||||
],
|
||||
[
|
||||
7.841062545776364,
|
||||
50.806623577403386
|
||||
],
|
||||
[
|
||||
7.848916053771969,
|
||||
50.808359219420474
|
||||
],
|
||||
[
|
||||
7.855696678161618,
|
||||
50.807057493952975
|
||||
],
|
||||
[
|
||||
7.854666709899899,
|
||||
50.80423696434001
|
||||
],
|
||||
[
|
||||
7.850461006164548,
|
||||
50.80217570040005
|
||||
],
|
||||
[
|
||||
7.845568656921382,
|
||||
50.79829702304368
|
||||
]
|
||||
]
|
||||
]
|
||||
],
|
||||
"properties": {
|
||||
"title": "TEST_account_CHANGED",
|
||||
"deductable_surface": "100000.0",
|
||||
"is_pik": true,
|
||||
"responsible": {
|
||||
"conservation_office": null,
|
||||
"conservation_file_number": "123-TEST",
|
||||
"handler": {
|
||||
"type": null,
|
||||
"detail": "TEST HANDLER CHANGED"
|
||||
}
|
||||
},
|
||||
"legal": {
|
||||
"agreement_date": "2022-01-11"
|
||||
},
|
||||
"before_states": [
|
||||
],
|
||||
"after_states": [
|
||||
],
|
||||
"actions": [
|
||||
],
|
||||
"deadlines": [
|
||||
{
|
||||
"type": "finished",
|
||||
"date": "2022-01-31",
|
||||
"comment": "TEST_CHANGED"
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
67
api/tests/v1/update/ema_update_put_body.json
Normal file
67
api/tests/v1/update/ema_update_put_body.json
Normal file
@ -0,0 +1,67 @@
|
||||
{
|
||||
"type": "MultiPolygon",
|
||||
"coordinates": [
|
||||
[
|
||||
[
|
||||
[
|
||||
7.845568656921382,
|
||||
50.79829702304368
|
||||
],
|
||||
[
|
||||
7.837371826171871,
|
||||
50.80155187891526
|
||||
],
|
||||
[
|
||||
7.835698127746578,
|
||||
50.805267562209806
|
||||
],
|
||||
[
|
||||
7.841062545776364,
|
||||
50.806623577403386
|
||||
],
|
||||
[
|
||||
7.848916053771969,
|
||||
50.808359219420474
|
||||
],
|
||||
[
|
||||
7.855696678161618,
|
||||
50.807057493952975
|
||||
],
|
||||
[
|
||||
7.854666709899899,
|
||||
50.80423696434001
|
||||
],
|
||||
[
|
||||
7.850461006164548,
|
||||
50.80217570040005
|
||||
],
|
||||
[
|
||||
7.845568656921382,
|
||||
50.79829702304368
|
||||
]
|
||||
]
|
||||
]
|
||||
],
|
||||
"properties": {
|
||||
"title": "TEST_EMA_CHANGED",
|
||||
"responsible": {
|
||||
"conservation_office": null,
|
||||
"conservation_file_number": "TEST_CHANGED",
|
||||
"handler": {
|
||||
"type": null,
|
||||
"detail": "TEST_HANDLER_CHANGED"
|
||||
}
|
||||
},
|
||||
"is_pik": true,
|
||||
"before_states": [],
|
||||
"after_states": [],
|
||||
"actions": [],
|
||||
"deadlines": [
|
||||
{
|
||||
"type": "finished",
|
||||
"date": "2022-01-31",
|
||||
"comment": "TEST_CHANGED"
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
@ -0,0 +1,8 @@
|
||||
{
|
||||
"users": [
|
||||
"CHANGE_ME"
|
||||
],
|
||||
"teams": [
|
||||
"CHANGE_ME"
|
||||
]
|
||||
}
|
64
api/tests/v1/update/intervention_update_put_body.json
Normal file
64
api/tests/v1/update/intervention_update_put_body.json
Normal file
@ -0,0 +1,64 @@
|
||||
{
|
||||
"type": "MultiPolygon",
|
||||
"coordinates": [
|
||||
[
|
||||
[
|
||||
[
|
||||
7.845568656921382,
|
||||
50.79829702304368
|
||||
],
|
||||
[
|
||||
7.837371826171871,
|
||||
50.80155187891526
|
||||
],
|
||||
[
|
||||
7.835698127746578,
|
||||
50.805267562209806
|
||||
],
|
||||
[
|
||||
7.841062545776364,
|
||||
50.806623577403386
|
||||
],
|
||||
[
|
||||
7.848916053771969,
|
||||
50.808359219420474
|
||||
],
|
||||
[
|
||||
7.855696678161618,
|
||||
50.807057493952975
|
||||
],
|
||||
[
|
||||
7.854666709899899,
|
||||
50.80423696434001
|
||||
],
|
||||
[
|
||||
7.850461006164548,
|
||||
50.80217570040005
|
||||
],
|
||||
[
|
||||
7.845568656921382,
|
||||
50.79829702304368
|
||||
]
|
||||
]
|
||||
]
|
||||
],
|
||||
"properties": {
|
||||
"title": "Test_intervention_CHANGED",
|
||||
"responsible": {
|
||||
"registration_office": null,
|
||||
"registration_file_number": "CHANGED",
|
||||
"conservation_office": null,
|
||||
"conservation_file_number": "CHANGED",
|
||||
"handler": {
|
||||
"type": null,
|
||||
"detail": "TEST_HANDLER_CHANGED"
|
||||
}
|
||||
},
|
||||
"legal": {
|
||||
"registration_date": "2022-02-01",
|
||||
"binding_date": "2022-02-01",
|
||||
"process_type": null,
|
||||
"laws": []
|
||||
}
|
||||
}
|
||||
}
|
213
api/tests/v1/update/test_api_update.py
Normal file
213
api/tests/v1/update/test_api_update.py
Normal file
@ -0,0 +1,213 @@
|
||||
"""
|
||||
Author: Michel Peltriaux
|
||||
Organization: Struktur- und Genehmigungsdirektion Nord, Rhineland-Palatinate, Germany
|
||||
Contact: michel.peltriaux@sgdnord.rlp.de
|
||||
Created on: 28.01.22
|
||||
|
||||
"""
|
||||
|
||||
import json
|
||||
|
||||
from django.contrib.gis import geos
|
||||
from django.urls import reverse
|
||||
|
||||
from api.tests.v1.share.test_api_sharing import BaseAPIV1TestCase
|
||||
from konova.sub_settings.lanis_settings import DEFAULT_SRID_RLP
|
||||
|
||||
|
||||
class APIV1UpdateTestCase(BaseAPIV1TestCase):
|
||||
@classmethod
|
||||
def setUpTestData(cls):
|
||||
super().setUpTestData()
|
||||
|
||||
def _run_update_request(self, url, data):
|
||||
data = json.dumps(data)
|
||||
response = self.client.put(
|
||||
url,
|
||||
data=data,
|
||||
content_type="application/json",
|
||||
**self.header_data
|
||||
)
|
||||
return response
|
||||
|
||||
def _test_update_object(self, url, put_body):
|
||||
""" Tests the API update of a data object.
|
||||
|
||||
Put body data stored in a local json file
|
||||
|
||||
Args:
|
||||
url (str): The api creation url
|
||||
put_body (dict): The put body content as dict
|
||||
|
||||
Returns:
|
||||
|
||||
"""
|
||||
response = self._run_update_request(url, put_body)
|
||||
self.assertEqual(response.status_code, 200, msg=response.content)
|
||||
content = json.loads(response.content)
|
||||
self.assertIsNotNone(content.get("id", None), msg=response.content)
|
||||
|
||||
def test_update_intervention(self):
|
||||
""" Tests api update
|
||||
|
||||
Returns:
|
||||
|
||||
"""
|
||||
self.intervention.share_with_user(self.superuser)
|
||||
modified_on = self.intervention.modified
|
||||
url = reverse("api:v1:intervention", args=(str(self.intervention.id),))
|
||||
json_file_path = "api/tests/v1/update/intervention_update_put_body.json"
|
||||
with open(json_file_path) as json_file:
|
||||
put_body = json.load(fp=json_file)
|
||||
self._test_update_object(url, put_body)
|
||||
self.intervention.refresh_from_db()
|
||||
|
||||
put_props = put_body["properties"]
|
||||
put_geom = geos.fromstr(json.dumps(put_body))
|
||||
put_geom.transform(DEFAULT_SRID_RLP)
|
||||
self.assertEqual(put_geom, self.intervention.geometry.geom)
|
||||
self.assertEqual(put_props["title"], self.intervention.title)
|
||||
self.assertNotEqual(modified_on, self.intervention.modified)
|
||||
self.assertEqual(put_props["responsible"]["registration_file_number"], self.intervention.responsible.registration_file_number)
|
||||
self.assertEqual(put_props["responsible"]["conservation_file_number"], self.intervention.responsible.conservation_file_number)
|
||||
self.assertEqual(put_props["legal"]["registration_date"], str(self.intervention.legal.registration_date))
|
||||
self.assertEqual(put_props["legal"]["binding_date"], str(self.intervention.legal.binding_date))
|
||||
|
||||
def test_update_compensation(self):
|
||||
""" Tests api update
|
||||
|
||||
Returns:
|
||||
|
||||
"""
|
||||
self.compensation.intervention = self.intervention
|
||||
self.compensation.save()
|
||||
self.intervention.share_with_user(self.superuser)
|
||||
|
||||
modified_on = self.compensation.modified
|
||||
url = reverse("api:v1:compensation", args=(str(self.compensation.id),))
|
||||
json_file_path = "api/tests/v1/update/compensation_update_put_body.json"
|
||||
with open(json_file_path) as json_file:
|
||||
put_body = json.load(fp=json_file)
|
||||
put_body["properties"]["intervention"] = str(self.intervention.id)
|
||||
self._test_update_object(url, put_body)
|
||||
self.compensation.refresh_from_db()
|
||||
|
||||
put_props = put_body["properties"]
|
||||
put_geom = geos.fromstr(json.dumps(put_body))
|
||||
put_geom.transform(DEFAULT_SRID_RLP)
|
||||
self.assertEqual(put_geom, self.compensation.geometry.geom)
|
||||
self.assertEqual(put_props["title"], self.compensation.title)
|
||||
self.assertNotEqual(modified_on, self.compensation.modified)
|
||||
self.assertEqual(put_props["is_cef"], self.compensation.is_cef)
|
||||
self.assertEqual(put_props["is_coherence_keeping"], self.compensation.is_coherence_keeping)
|
||||
self.assertEqual(put_props["is_pik"], self.compensation.is_pik)
|
||||
self.assertEqual(len(put_props["actions"]), self.compensation.actions.count())
|
||||
self.assertEqual(len(put_props["before_states"]), self.compensation.before_states.count())
|
||||
self.assertEqual(len(put_props["after_states"]), self.compensation.after_states.count())
|
||||
self.assertEqual(len(put_props["deadlines"]), self.compensation.deadlines.count())
|
||||
|
||||
def test_update_ecoaccount(self):
|
||||
""" Tests api update
|
||||
|
||||
Returns:
|
||||
|
||||
"""
|
||||
self.eco_account.share_with_user(self.superuser)
|
||||
|
||||
modified_on = self.eco_account.modified
|
||||
url = reverse("api:v1:ecoaccount", args=(str(self.eco_account.id),))
|
||||
json_file_path = "api/tests/v1/update/ecoaccount_update_put_body.json"
|
||||
with open(json_file_path) as json_file:
|
||||
put_body = json.load(fp=json_file)
|
||||
self._test_update_object(url, put_body)
|
||||
self.eco_account.refresh_from_db()
|
||||
|
||||
put_props = put_body["properties"]
|
||||
put_geom = geos.fromstr(json.dumps(put_body))
|
||||
put_geom.transform(DEFAULT_SRID_RLP)
|
||||
self.assertEqual(put_geom, self.eco_account.geometry.geom)
|
||||
self.assertEqual(put_props["title"], self.eco_account.title)
|
||||
self.assertNotEqual(modified_on, self.eco_account.modified)
|
||||
self.assertEqual(put_props["deductable_surface"], str(self.eco_account.deductable_surface))
|
||||
self.assertEqual(put_props["responsible"]["conservation_office"], self.eco_account.responsible.conservation_office)
|
||||
self.assertEqual(put_props["responsible"]["conservation_file_number"], self.eco_account.responsible.conservation_file_number)
|
||||
self.assertEqual(put_props["responsible"]["handler"]["detail"], self.eco_account.responsible.handler.detail)
|
||||
self.assertEqual(put_props["legal"]["agreement_date"], str(self.eco_account.legal.registration_date))
|
||||
self.assertEqual(len(put_props["actions"]), self.eco_account.actions.count())
|
||||
self.assertEqual(len(put_props["before_states"]), self.eco_account.before_states.count())
|
||||
self.assertEqual(len(put_props["after_states"]), self.eco_account.after_states.count())
|
||||
self.assertEqual(len(put_props["deadlines"]), self.eco_account.deadlines.count())
|
||||
|
||||
def test_update_ema(self):
|
||||
""" Tests api update
|
||||
|
||||
Returns:
|
||||
|
||||
"""
|
||||
self.ema.share_with_user(self.superuser)
|
||||
|
||||
modified_on = self.ema.modified
|
||||
url = reverse("api:v1:ema", args=(str(self.ema.id),))
|
||||
json_file_path = "api/tests/v1/update/ema_update_put_body.json"
|
||||
with open(json_file_path) as json_file:
|
||||
put_body = json.load(fp=json_file)
|
||||
self._test_update_object(url, put_body)
|
||||
self.ema.refresh_from_db()
|
||||
|
||||
put_props = put_body["properties"]
|
||||
put_geom = geos.fromstr(json.dumps(put_body))
|
||||
put_geom.transform(DEFAULT_SRID_RLP)
|
||||
self.assertEqual(put_geom, self.ema.geometry.geom)
|
||||
self.assertEqual(put_props["title"], self.ema.title)
|
||||
self.assertNotEqual(modified_on, self.ema.modified)
|
||||
self.assertEqual(put_props["responsible"]["conservation_office"], self.ema.responsible.conservation_office)
|
||||
self.assertEqual(put_props["responsible"]["conservation_file_number"], self.ema.responsible.conservation_file_number)
|
||||
self.assertEqual(put_props["responsible"]["handler"]["detail"], self.ema.responsible.handler.detail)
|
||||
self.assertEqual(len(put_props["actions"]), self.ema.actions.count())
|
||||
self.assertEqual(len(put_props["before_states"]), self.ema.before_states.count())
|
||||
self.assertEqual(len(put_props["after_states"]), self.ema.after_states.count())
|
||||
self.assertEqual(len(put_props["deadlines"]), self.ema.deadlines.count())
|
||||
|
||||
def test_update_deduction(self):
|
||||
""" Tests api update
|
||||
|
||||
Returns:
|
||||
|
||||
"""
|
||||
self.deduction.intervention.share_with_user(self.superuser)
|
||||
self.deduction.account.share_with_user(self.superuser)
|
||||
|
||||
url = reverse("api:v1:deduction", args=(str(self.deduction.id),))
|
||||
json_file_path = "api/tests/v1/update/deduction_update_put_body.json"
|
||||
with open(json_file_path) as json_file:
|
||||
put_body = json.load(fp=json_file)
|
||||
put_body["intervention"] = str(self.deduction.intervention.id)
|
||||
put_body["eco_account"] = str(self.deduction.account.id)
|
||||
|
||||
self._test_update_object(url, put_body)
|
||||
self.deduction.refresh_from_db()
|
||||
|
||||
self.assertEqual(put_body["intervention"], str(self.deduction.intervention.id))
|
||||
self.assertEqual(put_body["eco_account"], str(self.deduction.account.id))
|
||||
self.assertEqual(put_body["surface"], self.deduction.surface)
|
||||
|
||||
def test_update_share_intervention(self):
|
||||
self.intervention.share_with_user(self.superuser)
|
||||
url = reverse("api:v1:intervention-share", args=(str(self.intervention.id),))
|
||||
json_file_path = "api/tests/v1/update/intervention_share_update_put_body.json"
|
||||
with open(json_file_path) as json_file:
|
||||
put_body = json.load(fp=json_file)
|
||||
put_body["users"] = [self.user.username]
|
||||
put_body["teams"] = [self.team.name]
|
||||
|
||||
self.assertFalse(self.intervention.is_shared_with(self.user))
|
||||
self.assertEqual(0, self.intervention.shared_teams.count())
|
||||
|
||||
response = self._run_update_request(url, put_body)
|
||||
self.assertEqual(response.status_code, 200, msg=response.content)
|
||||
self.intervention.refresh_from_db()
|
||||
|
||||
self.assertEqual(1, self.intervention.shared_teams.count())
|
||||
self.assertEqual(2, self.intervention.shared_users.count())
|
||||
self.assertEqual(self.team.name, self.intervention.shared_teams.first().name)
|
||||
self.assertTrue(self.intervention.is_shared_with(self.user))
|
8
api/urls/__init__.py
Normal file
8
api/urls/__init__.py
Normal file
@ -0,0 +1,8 @@
|
||||
"""
|
||||
Author: Michel Peltriaux
|
||||
Organization: Struktur- und Genehmigungsdirektion Nord, Rhineland-Palatinate, Germany
|
||||
Contact: michel.peltriaux@sgdnord.rlp.de
|
||||
Created on: 21.01.22
|
||||
|
||||
"""
|
||||
from .urls import *
|
17
api/urls/urls.py
Normal file
17
api/urls/urls.py
Normal file
@ -0,0 +1,17 @@
|
||||
"""
|
||||
Author: Michel Peltriaux
|
||||
Organization: Struktur- und Genehmigungsdirektion Nord, Rhineland-Palatinate, Germany
|
||||
Contact: michel.peltriaux@sgdnord.rlp.de
|
||||
Created on: 21.01.22
|
||||
|
||||
"""
|
||||
from django.urls import path, include
|
||||
|
||||
from api.views.method_views import generate_new_token_view
|
||||
|
||||
app_name = "api"
|
||||
|
||||
urlpatterns = [
|
||||
path("v1/", include("api.urls.v1.urls", namespace="v1")),
|
||||
path("token/generate", generate_new_token_view, name="generate-new-token"),
|
||||
]
|
7
api/urls/v1/__init__.py
Normal file
7
api/urls/v1/__init__.py
Normal file
@ -0,0 +1,7 @@
|
||||
"""
|
||||
Author: Michel Peltriaux
|
||||
Organization: Struktur- und Genehmigungsdirektion Nord, Rhineland-Palatinate, Germany
|
||||
Contact: michel.peltriaux@sgdnord.rlp.de
|
||||
Created on: 21.01.22
|
||||
|
||||
"""
|
34
api/urls/v1/urls.py
Normal file
34
api/urls/v1/urls.py
Normal file
@ -0,0 +1,34 @@
|
||||
"""
|
||||
Author: Michel Peltriaux
|
||||
Organization: Struktur- und Genehmigungsdirektion Nord, Rhineland-Palatinate, Germany
|
||||
Contact: michel.peltriaux@sgdnord.rlp.de
|
||||
Created on: 21.01.22
|
||||
|
||||
"""
|
||||
from django.urls import path
|
||||
|
||||
from api.views.v1.views import EmaAPIViewV1, EcoAccountAPIViewV1, CompensationAPIViewV1, InterventionAPIViewV1, \
|
||||
DeductionAPIViewV1
|
||||
from api.views.views import InterventionCheckAPIView, InterventionAPIShareView, EcoAccountAPIShareView, EmaAPIShareView
|
||||
|
||||
app_name = "v1"
|
||||
urlpatterns = [
|
||||
path("intervention/<id>/check", InterventionCheckAPIView.as_view(), name="intervention-check"),
|
||||
path("intervention/<id>/share", InterventionAPIShareView.as_view(), name="intervention-share"),
|
||||
path("intervention/<id>", InterventionAPIViewV1.as_view(), name="intervention"),
|
||||
path("intervention/", InterventionAPIViewV1.as_view(), name="intervention"),
|
||||
|
||||
path("compensation/<id>", CompensationAPIViewV1.as_view(), name="compensation"),
|
||||
path("compensation/", CompensationAPIViewV1.as_view(), name="compensation"),
|
||||
|
||||
path("ecoaccount/<id>/share", EcoAccountAPIShareView.as_view(), name="ecoaccount-share"),
|
||||
path("ecoaccount/<id>", EcoAccountAPIViewV1.as_view(), name="ecoaccount"),
|
||||
path("ecoaccount/", EcoAccountAPIViewV1.as_view(), name="ecoaccount"),
|
||||
|
||||
path("deduction/<id>", DeductionAPIViewV1.as_view(), name="deduction"),
|
||||
path("deduction/", DeductionAPIViewV1.as_view(), name="deduction"),
|
||||
|
||||
path("ema/<id>/share", EmaAPIShareView.as_view(), name="ema-share"),
|
||||
path("ema/<id>", EmaAPIViewV1.as_view(), name="ema"),
|
||||
path("ema/", EmaAPIViewV1.as_view(), name="ema"),
|
||||
]
|
7
api/utils/__init__.py
Normal file
7
api/utils/__init__.py
Normal file
@ -0,0 +1,7 @@
|
||||
"""
|
||||
Author: Michel Peltriaux
|
||||
Organization: Struktur- und Genehmigungsdirektion Nord, Rhineland-Palatinate, Germany
|
||||
Contact: michel.peltriaux@sgdnord.rlp.de
|
||||
Created on: 24.01.22
|
||||
|
||||
"""
|
7
api/utils/serializer/__init__.py
Normal file
7
api/utils/serializer/__init__.py
Normal file
@ -0,0 +1,7 @@
|
||||
"""
|
||||
Author: Michel Peltriaux
|
||||
Organization: Struktur- und Genehmigungsdirektion Nord, Rhineland-Palatinate, Germany
|
||||
Contact: michel.peltriaux@sgdnord.rlp.de
|
||||
Created on: 24.01.22
|
||||
|
||||
"""
|
186
api/utils/serializer/serializer.py
Normal file
186
api/utils/serializer/serializer.py
Normal file
@ -0,0 +1,186 @@
|
||||
"""
|
||||
Author: Michel Peltriaux
|
||||
Organization: Struktur- und Genehmigungsdirektion Nord, Rhineland-Palatinate, Germany
|
||||
Contact: michel.peltriaux@sgdnord.rlp.de
|
||||
Created on: 24.01.22
|
||||
|
||||
"""
|
||||
import json
|
||||
from abc import abstractmethod
|
||||
|
||||
from django.contrib.gis import geos
|
||||
from django.contrib.gis.geos import GEOSGeometry
|
||||
from django.core.paginator import Paginator
|
||||
from django.db.models import Q
|
||||
|
||||
from konova.sub_settings.lanis_settings import DEFAULT_SRID_RLP
|
||||
from konova.utils.message_templates import DATA_UNSHARED
|
||||
|
||||
|
||||
class AbstractModelAPISerializer:
|
||||
model = None
|
||||
lookup = None
|
||||
properties_data = None
|
||||
|
||||
rpp = None
|
||||
page_number = None
|
||||
paginator = None
|
||||
|
||||
class Meta:
|
||||
abstract = True
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
self.lookup = {
|
||||
"id": None, # must be set
|
||||
"deleted__isnull": True,
|
||||
}
|
||||
self.shared_lookup = Q() # must be set, so user or team based share will be used properly
|
||||
super().__init__(*args, **kwargs)
|
||||
|
||||
@abstractmethod
|
||||
def _model_to_geo_json(self, entry):
|
||||
""" Defines the model as geo json
|
||||
|
||||
Args:
|
||||
entry (): The found entry from the database
|
||||
|
||||
Returns:
|
||||
|
||||
"""
|
||||
raise NotImplementedError("Must be implemented in subclasses")
|
||||
|
||||
@abstractmethod
|
||||
def _extend_properties_data(self, entry):
|
||||
""" Defines the 'properties' part of geo json
|
||||
|
||||
Args:
|
||||
entry (): The found entry from the database
|
||||
|
||||
Returns:
|
||||
|
||||
"""
|
||||
raise NotImplementedError("Must be implemented in subclasses")
|
||||
|
||||
def prepare_lookup(self, _id, user):
|
||||
""" Updates lookup dict for db fetching
|
||||
|
||||
Args:
|
||||
_id (str): The object's id
|
||||
user (User): The user requesting for
|
||||
|
||||
Returns:
|
||||
|
||||
"""
|
||||
if _id is None:
|
||||
# Return all objects
|
||||
del self.lookup["id"]
|
||||
else:
|
||||
# Return certain object
|
||||
self.lookup["id"] = _id
|
||||
|
||||
self.shared_lookup = Q(
|
||||
Q(users__in=[user]) |
|
||||
Q(teams__in=list(user.shared_teams))
|
||||
)
|
||||
|
||||
def fetch_and_serialize(self):
|
||||
""" Serializes the model entry according to the given lookup data
|
||||
|
||||
Args:
|
||||
|
||||
Returns:
|
||||
serialized_data (dict)
|
||||
"""
|
||||
entries = self.model.objects.filter(
|
||||
**self.lookup
|
||||
).filter(
|
||||
self.shared_lookup
|
||||
).order_by(
|
||||
"id"
|
||||
).distinct()
|
||||
self.paginator = Paginator(entries, self.rpp)
|
||||
requested_entries = self.paginator.page(self.page_number)
|
||||
|
||||
serialized_data = {}
|
||||
for entry in requested_entries.object_list:
|
||||
serialized_data[str(entry.id)] = self._model_to_geo_json(entry)
|
||||
return serialized_data
|
||||
|
||||
@abstractmethod
|
||||
def update_model_from_json(self, id, json_model, user):
|
||||
""" Updates an instance from given json data
|
||||
|
||||
Args:
|
||||
id (str): The instance's to be updated
|
||||
json_model (dict): JSON data
|
||||
user (User): The performing user
|
||||
|
||||
Returns:
|
||||
|
||||
"""
|
||||
raise NotImplementedError("Must be implemented in subclasses")
|
||||
|
||||
@abstractmethod
|
||||
def create_model_from_json(self, json_model, user):
|
||||
""" Creates a new instance from given json data
|
||||
|
||||
Args:
|
||||
json_model (dict): JSON data
|
||||
user (User): The performing user
|
||||
|
||||
Returns:
|
||||
|
||||
"""
|
||||
raise NotImplementedError("Must be implemented in subclasses")
|
||||
|
||||
def _create_geometry_from_json(self, geojson) -> GEOSGeometry:
|
||||
""" Creates a GEOSGeometry object based on the given geojson
|
||||
|
||||
Args:
|
||||
geojson (str|dict): The geojson as str or dict
|
||||
|
||||
Returns:
|
||||
geometry (GEOSGeometry)
|
||||
"""
|
||||
if isinstance(geojson, dict):
|
||||
geojson = json.dumps(geojson)
|
||||
geometry = geos.fromstr(geojson)
|
||||
if geometry.srid != DEFAULT_SRID_RLP:
|
||||
geometry.transform(DEFAULT_SRID_RLP)
|
||||
return geometry
|
||||
|
||||
def _get_obj_from_db(self, id, user):
|
||||
""" Returns the object from database
|
||||
|
||||
Fails if id not found or user does not have shared access
|
||||
|
||||
Args:
|
||||
id (str): The object's id
|
||||
user (User): The API user
|
||||
|
||||
Returns:
|
||||
|
||||
"""
|
||||
obj = self.model.objects.get(
|
||||
id=id,
|
||||
deleted__isnull=True,
|
||||
)
|
||||
is_shared = obj.is_shared_with(user)
|
||||
if not is_shared:
|
||||
raise PermissionError(DATA_UNSHARED)
|
||||
return obj
|
||||
|
||||
@abstractmethod
|
||||
def _initialize_objects(self, json_model, user):
|
||||
""" Initializes all needed objects from the json_model data
|
||||
|
||||
Does not persist data to the DB!
|
||||
|
||||
Args:
|
||||
json_model (dict): The json data
|
||||
user (User): The API user
|
||||
|
||||
Returns:
|
||||
obj (Intervention)
|
||||
"""
|
||||
raise NotImplementedError("Must be implemented in subclasses")
|
7
api/utils/serializer/v1/__init__.py
Normal file
7
api/utils/serializer/v1/__init__.py
Normal file
@ -0,0 +1,7 @@
|
||||
"""
|
||||
Author: Michel Peltriaux
|
||||
Organization: Struktur- und Genehmigungsdirektion Nord, Rhineland-Palatinate, Germany
|
||||
Contact: michel.peltriaux@sgdnord.rlp.de
|
||||
Created on: 24.01.22
|
||||
|
||||
"""
|
177
api/utils/serializer/v1/compensation.py
Normal file
177
api/utils/serializer/v1/compensation.py
Normal file
@ -0,0 +1,177 @@
|
||||
"""
|
||||
Author: Michel Peltriaux
|
||||
Organization: Struktur- und Genehmigungsdirektion Nord, Rhineland-Palatinate, Germany
|
||||
Contact: michel.peltriaux@sgdnord.rlp.de
|
||||
Created on: 24.01.22
|
||||
|
||||
"""
|
||||
from django.db import transaction
|
||||
from django.db.models import Q
|
||||
|
||||
from api.utils.serializer.v1.serializer import AbstractModelAPISerializerV1, AbstractCompensationAPISerializerV1Mixin
|
||||
from compensation.models import Compensation
|
||||
from intervention.models import Intervention
|
||||
from konova.models import Geometry
|
||||
from konova.tasks import celery_update_parcels, celery_check_for_geometry_conflicts
|
||||
from konova.utils.message_templates import DATA_UNSHARED
|
||||
from user.models import UserActionLogEntry
|
||||
|
||||
|
||||
class CompensationAPISerializerV1(AbstractModelAPISerializerV1, AbstractCompensationAPISerializerV1Mixin):
|
||||
model = Compensation
|
||||
|
||||
def prepare_lookup(self, id, user):
|
||||
super().prepare_lookup(id, user)
|
||||
self.shared_lookup = Q(
|
||||
Q(intervention__users__in=[user]) |
|
||||
Q(intervention__teams__in=user.shared_teams)
|
||||
)
|
||||
|
||||
def intervention_to_json(self, entry):
|
||||
return {
|
||||
"id": entry.pk,
|
||||
"identifier": entry.identifier,
|
||||
"title": entry.title,
|
||||
}
|
||||
|
||||
def _extend_properties_data(self, entry):
|
||||
self.properties_data["is_cef"] = entry.is_cef
|
||||
self.properties_data["is_coherence_keeping"] = entry.is_coherence_keeping
|
||||
self.properties_data["is_pik"] = entry.is_pik
|
||||
self.properties_data["intervention"] = self.intervention_to_json(entry.intervention)
|
||||
self.properties_data["before_states"] = self._compensation_state_to_json(entry.before_states.all())
|
||||
self.properties_data["after_states"] = self._compensation_state_to_json(entry.after_states.all())
|
||||
self.properties_data["actions"] = self._compensation_actions_to_json(entry.actions.all())
|
||||
self.properties_data["deadlines"] = self._deadlines_to_json(entry.deadlines.all())
|
||||
|
||||
def _initialize_objects(self, json_model, user):
|
||||
""" Initializes all needed objects from the json_model data
|
||||
|
||||
Does not persist data to the DB!
|
||||
|
||||
Args:
|
||||
json_model (dict): The json data
|
||||
user (User): The API user
|
||||
|
||||
Returns:
|
||||
obj (Compensation)
|
||||
"""
|
||||
create_action = UserActionLogEntry.get_created_action(user, comment="API Import")
|
||||
# Create geometry
|
||||
json_geom = self._create_geometry_from_json(json_model)
|
||||
geometry = Geometry()
|
||||
geometry.geom = json_geom
|
||||
geometry.created = create_action
|
||||
|
||||
# Create linked objects
|
||||
obj = Compensation()
|
||||
created = create_action
|
||||
obj.created = created
|
||||
obj.modified = created
|
||||
obj.geometry = geometry
|
||||
return obj
|
||||
|
||||
def set_intervention(self, obj, intervention_id, user):
|
||||
""" Sets the linked compensation according to the given id
|
||||
|
||||
Fails if no such intervention found or user has no shared access
|
||||
|
||||
Args:
|
||||
obj (Compensation): The Compensation object
|
||||
intervention_id (str): The intervention's id
|
||||
user (User): The API user
|
||||
|
||||
Returns:
|
||||
obj (Compensation)
|
||||
"""
|
||||
if obj.intervention is not None and obj.intervention.id == intervention_id:
|
||||
# Nothing to do here
|
||||
return obj
|
||||
|
||||
intervention = Intervention.objects.get(
|
||||
id=intervention_id,
|
||||
)
|
||||
is_shared = intervention.is_shared_with(user)
|
||||
|
||||
if not is_shared:
|
||||
raise PermissionError(DATA_UNSHARED)
|
||||
|
||||
obj.intervention = intervention
|
||||
return obj
|
||||
|
||||
def create_model_from_json(self, json_model, user):
|
||||
""" Creates a new entry for the model based on the contents of json_model
|
||||
|
||||
Args:
|
||||
json_model (dict): The json containing data
|
||||
user (User): The API user
|
||||
|
||||
Returns:
|
||||
created_id (str): The id of the newly created Compensation entry
|
||||
"""
|
||||
with transaction.atomic():
|
||||
obj = self._initialize_objects(json_model, user)
|
||||
|
||||
# Fill in data to objects
|
||||
properties = json_model["properties"]
|
||||
obj.identifier = obj.generate_new_identifier()
|
||||
obj.title = properties["title"]
|
||||
obj.is_cef = properties["is_cef"]
|
||||
obj.is_coherence_keeping = properties["is_coherence_keeping"]
|
||||
obj.is_pik = properties.get("is_pik", False)
|
||||
obj = self.set_intervention(obj, properties["intervention"], user)
|
||||
|
||||
obj.geometry.save()
|
||||
obj.save()
|
||||
|
||||
obj = self._set_compensation_actions(obj, properties["actions"])
|
||||
obj = self._set_compensation_states(obj, properties["before_states"], obj.before_states)
|
||||
obj = self._set_compensation_states(obj, properties["after_states"], obj.after_states)
|
||||
obj = self._set_deadlines(obj, properties["deadlines"])
|
||||
|
||||
obj.log.add(obj.created)
|
||||
|
||||
celery_update_parcels.delay(obj.geometry.id)
|
||||
celery_check_for_geometry_conflicts.delay(obj.geometry.id)
|
||||
|
||||
return obj.id
|
||||
|
||||
def update_model_from_json(self, id, json_model, user):
|
||||
""" Updates an entry for the model based on the contents of json_model
|
||||
|
||||
Args:
|
||||
id (str): The object's id
|
||||
json_model (dict): The json containing data
|
||||
user (User): The API user
|
||||
|
||||
Returns:
|
||||
created_id (str): The id of the newly created Compensation entry
|
||||
"""
|
||||
with transaction.atomic():
|
||||
update_action = UserActionLogEntry.get_edited_action(user, "API update")
|
||||
obj = self._get_obj_from_db(id, user)
|
||||
|
||||
# Fill in data to objects
|
||||
properties = json_model["properties"]
|
||||
obj.title = properties["title"]
|
||||
obj.is_cef = properties["is_cef"]
|
||||
obj.is_coherence_keeping = properties["is_coherence_keeping"]
|
||||
obj.is_pik = properties.get("is_pik", False)
|
||||
obj.modified = update_action
|
||||
obj.geometry.geom = self._create_geometry_from_json(json_model)
|
||||
obj.geometry.modified = update_action
|
||||
obj = self.set_intervention(obj, properties["intervention"], user)
|
||||
|
||||
obj.geometry.save()
|
||||
obj.save()
|
||||
|
||||
obj = self._set_compensation_actions(obj, properties["actions"])
|
||||
obj = self._set_compensation_states(obj, properties["before_states"], obj.before_states)
|
||||
obj = self._set_compensation_states(obj, properties["after_states"], obj.after_states)
|
||||
obj = self._set_deadlines(obj, properties["deadlines"])
|
||||
|
||||
obj.log.add(update_action)
|
||||
|
||||
celery_update_parcels.delay(obj.geometry.id)
|
||||
|
||||
return obj.id
|
169
api/utils/serializer/v1/deduction.py
Normal file
169
api/utils/serializer/v1/deduction.py
Normal file
@ -0,0 +1,169 @@
|
||||
"""
|
||||
Author: Michel Peltriaux
|
||||
Organization: Struktur- und Genehmigungsdirektion Nord, Rhineland-Palatinate, Germany
|
||||
Contact: michel.peltriaux@sgdnord.rlp.de
|
||||
Created on: 28.01.22
|
||||
|
||||
"""
|
||||
from django.core.exceptions import ObjectDoesNotExist
|
||||
from django.db.models import Q
|
||||
|
||||
from api.utils.serializer.v1.serializer import DeductableAPISerializerV1Mixin, AbstractModelAPISerializerV1
|
||||
from compensation.models import EcoAccountDeduction, EcoAccount
|
||||
from intervention.models import Intervention
|
||||
from konova.utils.message_templates import DATA_UNSHARED
|
||||
|
||||
|
||||
class DeductionAPISerializerV1(AbstractModelAPISerializerV1,
|
||||
DeductableAPISerializerV1Mixin):
|
||||
model = EcoAccountDeduction
|
||||
|
||||
def prepare_lookup(self, _id, user):
|
||||
""" Updates lookup dict for db fetching
|
||||
|
||||
Args:
|
||||
_id (str): The object's id
|
||||
user (User): The user requesting for
|
||||
|
||||
Returns:
|
||||
|
||||
"""
|
||||
super().prepare_lookup(_id, user)
|
||||
del self.lookup["deleted__isnull"]
|
||||
self.shared_lookup = Q(
|
||||
Q(intervention__users__in=[user]) |
|
||||
Q(intervention__teams__in=user.shared_teams)
|
||||
)
|
||||
|
||||
def _model_to_geo_json(self, entry):
|
||||
""" Adds the basic data
|
||||
|
||||
Args:
|
||||
entry (): The data entry
|
||||
|
||||
Returns:
|
||||
|
||||
"""
|
||||
return self._single_deduction_to_json(entry)
|
||||
|
||||
def create_model_from_json(self, json_model, user):
|
||||
""" Creates a new entry for the model based on the contents of json_model
|
||||
|
||||
Args:
|
||||
json_model (dict): The json containing data
|
||||
user (User): The API user
|
||||
|
||||
Returns:
|
||||
created_id (str): The id of the newly created Intervention entry
|
||||
"""
|
||||
acc_id = json_model["eco_account"]
|
||||
intervention_id = json_model["intervention"]
|
||||
surface = float(json_model["surface"])
|
||||
if surface <= 0:
|
||||
raise ValueError("Surface must be > 0 m²")
|
||||
|
||||
acc = EcoAccount.objects.get(
|
||||
id=acc_id,
|
||||
deleted__isnull=True,
|
||||
)
|
||||
intervention = Intervention.objects.get(
|
||||
id=intervention_id,
|
||||
deleted__isnull=True,
|
||||
)
|
||||
acc_shared = acc.is_shared_with(user)
|
||||
intervention_shared = intervention.is_shared_with(user)
|
||||
if not acc_shared:
|
||||
raise PermissionError(f"Account: {DATA_UNSHARED}")
|
||||
if not intervention_shared:
|
||||
raise PermissionError(f"Intervention: {DATA_UNSHARED}")
|
||||
|
||||
deduction = self.model.objects.create(
|
||||
intervention=intervention,
|
||||
account=acc,
|
||||
surface=surface
|
||||
)
|
||||
deduction.intervention.mark_as_edited(user)
|
||||
return str(deduction.id)
|
||||
|
||||
def _get_obj_from_db(self, id, user):
|
||||
""" Returns the object from database
|
||||
|
||||
Fails if id not found or user does not have shared access
|
||||
|
||||
Args:
|
||||
id (str): The object's id
|
||||
user (User): The API user
|
||||
|
||||
Returns:
|
||||
|
||||
"""
|
||||
obj = self.model.objects.get(
|
||||
id=id,
|
||||
)
|
||||
shared_with = obj.intervention.is_shared_with(user)
|
||||
if not shared_with:
|
||||
raise PermissionError(f"Intervention: {DATA_UNSHARED}")
|
||||
return obj
|
||||
|
||||
def update_model_from_json(self, id, json_model, user):
|
||||
""" Updates an entry for the model based on the contents of json_model
|
||||
|
||||
Args:
|
||||
id (str): The object's id
|
||||
json_model (dict): The json containing data
|
||||
user (User): The API user
|
||||
|
||||
Returns:
|
||||
created_id (str): The id of the newly created Intervention entry
|
||||
"""
|
||||
deduction = self._get_obj_from_db(id, user)
|
||||
|
||||
acc_id = json_model["eco_account"]
|
||||
intervention_id = json_model["intervention"]
|
||||
surface = float(json_model["surface"])
|
||||
if surface <= 0:
|
||||
raise ValueError("Surface must be > 0 m²")
|
||||
|
||||
acc = EcoAccount.objects.get(
|
||||
id=acc_id,
|
||||
deleted__isnull=True,
|
||||
)
|
||||
intervention = Intervention.objects.get(
|
||||
id=intervention_id,
|
||||
deleted__isnull=True,
|
||||
)
|
||||
acc_shared = acc.is_shared_with(user)
|
||||
intervention_shared = intervention.is_shared_with(user)
|
||||
if not acc_shared:
|
||||
raise PermissionError(f"Account: {DATA_UNSHARED}")
|
||||
if not intervention_shared:
|
||||
raise PermissionError(f"Intervention: {DATA_UNSHARED}")
|
||||
|
||||
deduction.intervention = intervention
|
||||
deduction.account = acc
|
||||
deduction.surface = surface
|
||||
deduction.save()
|
||||
|
||||
deduction.intervention.mark_as_edited(user)
|
||||
|
||||
return str(deduction.id)
|
||||
|
||||
def delete_entry(self, id, user):
|
||||
""" Deletes the entry
|
||||
|
||||
Args:
|
||||
id (str): The entry's id
|
||||
user (User): The API user
|
||||
|
||||
Returns:
|
||||
|
||||
"""
|
||||
entry = self._get_obj_from_db(id, user)
|
||||
entry.intervention.mark_as_edited(user)
|
||||
entry.delete()
|
||||
try:
|
||||
entry.refresh_from_db()
|
||||
success = False
|
||||
except ObjectDoesNotExist:
|
||||
success = True
|
||||
return success
|
199
api/utils/serializer/v1/ecoaccount.py
Normal file
199
api/utils/serializer/v1/ecoaccount.py
Normal file
@ -0,0 +1,199 @@
|
||||
"""
|
||||
Author: Michel Peltriaux
|
||||
Organization: Struktur- und Genehmigungsdirektion Nord, Rhineland-Palatinate, Germany
|
||||
Contact: michel.peltriaux@sgdnord.rlp.de
|
||||
Created on: 24.01.22
|
||||
|
||||
"""
|
||||
from django.db import transaction
|
||||
|
||||
from api.utils.serializer.v1.serializer import AbstractModelAPISerializerV1, AbstractCompensationAPISerializerV1Mixin, \
|
||||
LegalAPISerializerV1Mixin, ResponsibilityAPISerializerV1Mixin, DeductableAPISerializerV1Mixin
|
||||
from codelist.settings import CODELIST_CONSERVATION_OFFICE_ID, CODELIST_HANDLER_ID
|
||||
from compensation.models import EcoAccount
|
||||
from intervention.models import Legal, Responsibility, Handler
|
||||
from konova.models import Geometry
|
||||
from konova.tasks import celery_update_parcels, celery_check_for_geometry_conflicts
|
||||
from user.models import UserActionLogEntry
|
||||
|
||||
|
||||
class EcoAccountAPISerializerV1(AbstractModelAPISerializerV1,
|
||||
AbstractCompensationAPISerializerV1Mixin,
|
||||
LegalAPISerializerV1Mixin,
|
||||
ResponsibilityAPISerializerV1Mixin,
|
||||
DeductableAPISerializerV1Mixin):
|
||||
model = EcoAccount
|
||||
|
||||
def _extend_properties_data(self, entry):
|
||||
self.properties_data["is_pik"] = entry.is_pik
|
||||
self.properties_data["deductable_surface"] = entry.deductable_surface
|
||||
self.properties_data["deductable_surface_available"] = entry.deductable_surface - entry.get_deductions_surface()
|
||||
self.properties_data["responsible"] = self._responsible_to_json(entry.responsible)
|
||||
self.properties_data["legal"] = self._legal_to_json(entry.legal)
|
||||
self.properties_data["before_states"] = self._compensation_state_to_json(entry.before_states.all())
|
||||
self.properties_data["after_states"] = self._compensation_state_to_json(entry.after_states.all())
|
||||
self.properties_data["actions"] = self._compensation_actions_to_json(entry.actions.all())
|
||||
self.properties_data["deadlines"] = self._deadlines_to_json(entry.deadlines.all())
|
||||
self.properties_data["deductions"] = self._deductions_to_json(entry.deductions.all())
|
||||
|
||||
def _legal_to_json(self, legal: Legal):
|
||||
return {
|
||||
"agreement_date": legal.registration_date,
|
||||
}
|
||||
|
||||
def _responsible_to_json(self, responsible: Responsibility):
|
||||
return {
|
||||
"conservation_office": self._konova_code_to_json(responsible.conservation_office),
|
||||
"conservation_file_number": responsible.conservation_file_number,
|
||||
"handler": self._handler_to_json(responsible.handler),
|
||||
}
|
||||
|
||||
def _set_responsibility(self, obj, responsibility_data: dict):
|
||||
""" Sets the responsible data contents to the provided responsibility_data dict
|
||||
|
||||
Args:
|
||||
obj (Intervention): The intervention object
|
||||
responsibility_data (dict): The new data
|
||||
|
||||
Returns:
|
||||
obj
|
||||
"""
|
||||
if responsibility_data is None:
|
||||
return obj
|
||||
obj.responsible.conservation_office = self._konova_code_from_json(
|
||||
responsibility_data["conservation_office"],
|
||||
CODELIST_CONSERVATION_OFFICE_ID,
|
||||
)
|
||||
obj.responsible.conservation_file_number = responsibility_data["conservation_file_number"]
|
||||
obj.responsible.handler.type = self._konova_code_from_json(
|
||||
responsibility_data["handler"]["type"],
|
||||
CODELIST_HANDLER_ID,
|
||||
)
|
||||
obj.responsible.handler.detail = responsibility_data["handler"]["detail"]
|
||||
return obj
|
||||
|
||||
def _set_legal(self, obj, legal_data):
|
||||
obj.legal.registration_date = legal_data.get("agreement_date", None)
|
||||
return obj
|
||||
|
||||
def _initialize_objects(self, json_model, user):
|
||||
""" Initializes all needed objects from the json_model data
|
||||
|
||||
Does not persist data to the DB!
|
||||
|
||||
Args:
|
||||
json_model (dict): The json data
|
||||
user (User): The API user
|
||||
|
||||
Returns:
|
||||
obj (Compensation)
|
||||
"""
|
||||
create_action = UserActionLogEntry.get_created_action(user, comment="API Import")
|
||||
# Create geometry
|
||||
json_geom = self._create_geometry_from_json(json_model)
|
||||
geometry = Geometry()
|
||||
geometry.geom = json_geom
|
||||
geometry.created = create_action
|
||||
|
||||
# Create linked objects
|
||||
obj = EcoAccount()
|
||||
obj.responsible = Responsibility(
|
||||
handler=Handler()
|
||||
)
|
||||
obj.legal = Legal()
|
||||
created = create_action
|
||||
obj.created = created
|
||||
obj.modified = created
|
||||
obj.geometry = geometry
|
||||
return obj
|
||||
|
||||
def create_model_from_json(self, json_model, user):
|
||||
""" Creates a new entry for the model based on the contents of json_model
|
||||
|
||||
Args:
|
||||
json_model (dict): The json containing data
|
||||
user (User): The API user
|
||||
|
||||
Returns:
|
||||
created_id (str): The id of the newly created EcoAccount entry
|
||||
"""
|
||||
with transaction.atomic():
|
||||
obj = self._initialize_objects(json_model, user)
|
||||
|
||||
# Fill in data to objects
|
||||
properties = json_model["properties"]
|
||||
obj.identifier = obj.generate_new_identifier()
|
||||
obj.title = properties["title"]
|
||||
obj.is_pik = properties.get("is_pik", False)
|
||||
|
||||
try:
|
||||
obj.deductable_surface = float(properties["deductable_surface"])
|
||||
except TypeError:
|
||||
raise ValueError("Deductable surface (m²) must be a number >= 0")
|
||||
if obj.deductable_surface < 0:
|
||||
raise ValueError("Deductable surface (m²) must be greater or equal 0")
|
||||
|
||||
obj = self._set_responsibility(obj, properties["responsible"])
|
||||
obj = self._set_legal(obj, properties["legal"])
|
||||
|
||||
obj.geometry.save()
|
||||
obj.responsible.handler.save()
|
||||
obj.responsible.save()
|
||||
obj.legal.save()
|
||||
obj.save()
|
||||
|
||||
obj = self._set_compensation_actions(obj, properties["actions"])
|
||||
obj = self._set_compensation_states(obj, properties["before_states"], obj.before_states)
|
||||
obj = self._set_compensation_states(obj, properties["after_states"], obj.after_states)
|
||||
obj = self._set_deadlines(obj, properties["deadlines"])
|
||||
|
||||
obj.log.add(obj.created)
|
||||
obj.users.add(user)
|
||||
|
||||
celery_update_parcels.delay(obj.geometry.id)
|
||||
celery_check_for_geometry_conflicts.delay(obj.geometry.id)
|
||||
|
||||
return obj.id
|
||||
|
||||
def update_model_from_json(self, id, json_model, user):
|
||||
""" Updates an entry for the model based on the contents of json_model
|
||||
|
||||
Args:
|
||||
id (str): The object's id
|
||||
json_model (dict): The json containing data
|
||||
user (User): The API user
|
||||
|
||||
Returns:
|
||||
created_id (str): The id of the newly created EcoAccount entry
|
||||
"""
|
||||
with transaction.atomic():
|
||||
update_action = UserActionLogEntry.get_edited_action(user, "API update")
|
||||
obj = self._get_obj_from_db(id, user)
|
||||
|
||||
# Fill in data to objects
|
||||
properties = json_model["properties"]
|
||||
obj.title = properties["title"]
|
||||
obj.is_pik = properties.get("is_pik", False)
|
||||
obj.deductable_surface = float(properties["deductable_surface"])
|
||||
obj.modified = update_action
|
||||
obj.geometry.geom = self._create_geometry_from_json(json_model)
|
||||
obj.geometry.modified = update_action
|
||||
obj = self._set_responsibility(obj, properties["responsible"])
|
||||
obj = self._set_legal(obj, properties["legal"])
|
||||
|
||||
obj.geometry.save()
|
||||
obj.responsible.handler.save()
|
||||
obj.responsible.save()
|
||||
obj.legal.save()
|
||||
obj.save()
|
||||
|
||||
obj = self._set_compensation_actions(obj, properties["actions"])
|
||||
obj = self._set_compensation_states(obj, properties["before_states"], obj.before_states)
|
||||
obj = self._set_compensation_states(obj, properties["after_states"], obj.after_states)
|
||||
obj = self._set_deadlines(obj, properties["deadlines"])
|
||||
|
||||
obj.log.add(update_action)
|
||||
|
||||
celery_update_parcels.delay(obj.geometry.id)
|
||||
|
||||
return obj.id
|
168
api/utils/serializer/v1/ema.py
Normal file
168
api/utils/serializer/v1/ema.py
Normal file
@ -0,0 +1,168 @@
|
||||
"""
|
||||
Author: Michel Peltriaux
|
||||
Organization: Struktur- und Genehmigungsdirektion Nord, Rhineland-Palatinate, Germany
|
||||
Contact: michel.peltriaux@sgdnord.rlp.de
|
||||
Created on: 24.01.22
|
||||
|
||||
"""
|
||||
from django.db import transaction
|
||||
|
||||
from api.utils.serializer.v1.serializer import AbstractModelAPISerializerV1, AbstractCompensationAPISerializerV1Mixin, \
|
||||
ResponsibilityAPISerializerV1Mixin
|
||||
from codelist.settings import CODELIST_CONSERVATION_OFFICE_ID, CODELIST_HANDLER_ID
|
||||
from ema.models import Ema
|
||||
from intervention.models import Responsibility, Handler
|
||||
from konova.models import Geometry
|
||||
from konova.tasks import celery_update_parcels, celery_check_for_geometry_conflicts
|
||||
from user.models import UserActionLogEntry
|
||||
|
||||
|
||||
class EmaAPISerializerV1(AbstractModelAPISerializerV1, AbstractCompensationAPISerializerV1Mixin, ResponsibilityAPISerializerV1Mixin):
|
||||
model = Ema
|
||||
|
||||
def _extend_properties_data(self, entry):
|
||||
self.properties_data["is_pik"] = entry.is_pik
|
||||
self.properties_data["responsible"] = self._responsible_to_json(entry.responsible)
|
||||
self.properties_data["before_states"] = self._compensation_state_to_json(entry.before_states.all())
|
||||
self.properties_data["after_states"] = self._compensation_state_to_json(entry.after_states.all())
|
||||
self.properties_data["actions"] = self._compensation_actions_to_json(entry.actions.all())
|
||||
self.properties_data["deadlines"] = self._deadlines_to_json(entry.deadlines.all())
|
||||
|
||||
def _responsible_to_json(self, responsible: Responsibility):
|
||||
return {
|
||||
"conservation_office": self._konova_code_to_json(responsible.conservation_office),
|
||||
"conservation_file_number": responsible.conservation_file_number,
|
||||
"handler": self._handler_to_json(responsible.handler),
|
||||
}
|
||||
|
||||
def _set_responsibility(self, obj, responsibility_data: dict):
|
||||
""" Sets the responsible data contents to the provided responsibility_data dict
|
||||
|
||||
Args:
|
||||
obj (Intervention): The intervention object
|
||||
responsibility_data (dict): The new data
|
||||
|
||||
Returns:
|
||||
obj
|
||||
"""
|
||||
if responsibility_data is None:
|
||||
return obj
|
||||
obj.responsible.conservation_office = self._konova_code_from_json(
|
||||
responsibility_data["conservation_office"],
|
||||
CODELIST_CONSERVATION_OFFICE_ID,
|
||||
)
|
||||
obj.responsible.conservation_file_number = responsibility_data["conservation_file_number"]
|
||||
obj.responsible.handler.type = self._konova_code_from_json(
|
||||
responsibility_data["handler"]["type"],
|
||||
CODELIST_HANDLER_ID,
|
||||
)
|
||||
obj.responsible.handler.detail = responsibility_data["handler"]["detail"]
|
||||
return obj
|
||||
|
||||
def _initialize_objects(self, json_model, user):
|
||||
""" Initializes all needed objects from the json_model data
|
||||
|
||||
Does not persist data to the DB!
|
||||
|
||||
Args:
|
||||
json_model (dict): The json data
|
||||
user (User): The API user
|
||||
|
||||
Returns:
|
||||
obj (Compensation)
|
||||
"""
|
||||
create_action = UserActionLogEntry.get_created_action(user, comment="API Import")
|
||||
# Create geometry
|
||||
json_geom = self._create_geometry_from_json(json_model)
|
||||
geometry = Geometry()
|
||||
geometry.geom = json_geom
|
||||
geometry.created = create_action
|
||||
|
||||
# Create linked objects
|
||||
obj = Ema()
|
||||
obj.responsible = Responsibility(
|
||||
handler=Handler()
|
||||
)
|
||||
created = create_action
|
||||
obj.created = created
|
||||
obj.modified = created
|
||||
obj.geometry = geometry
|
||||
return obj
|
||||
|
||||
def create_model_from_json(self, json_model, user):
|
||||
""" Creates a new entry for the model based on the contents of json_model
|
||||
|
||||
Args:
|
||||
json_model (dict): The json containing data
|
||||
user (User): The API user
|
||||
|
||||
Returns:
|
||||
created_id (str): The id of the newly created Ema entry
|
||||
"""
|
||||
with transaction.atomic():
|
||||
obj = self._initialize_objects(json_model, user)
|
||||
|
||||
# Fill in data to objects
|
||||
properties = json_model["properties"]
|
||||
obj.identifier = obj.generate_new_identifier()
|
||||
obj.title = properties["title"]
|
||||
obj.is_pik = properties.get("is_pik", False)
|
||||
obj = self._set_responsibility(obj, properties["responsible"])
|
||||
|
||||
obj.geometry.save()
|
||||
obj.responsible.handler.save()
|
||||
obj.responsible.save()
|
||||
obj.save()
|
||||
|
||||
obj = self._set_compensation_actions(obj, properties["actions"])
|
||||
obj = self._set_compensation_states(obj, properties["before_states"], obj.before_states)
|
||||
obj = self._set_compensation_states(obj, properties["after_states"], obj.after_states)
|
||||
obj = self._set_deadlines(obj, properties["deadlines"])
|
||||
|
||||
obj.log.add(obj.created)
|
||||
obj.users.add(user)
|
||||
|
||||
celery_update_parcels.delay(obj.geometry.id)
|
||||
celery_check_for_geometry_conflicts.delay(obj.geometry.id)
|
||||
|
||||
return obj.id
|
||||
|
||||
def update_model_from_json(self, id, json_model, user):
|
||||
""" Updates an entry for the model based on the contents of json_model
|
||||
|
||||
Args:
|
||||
id (str): The object's id
|
||||
json_model (dict): The json containing data
|
||||
user (User): The API user
|
||||
|
||||
Returns:
|
||||
created_id (str): The id of the newly created Ema entry
|
||||
"""
|
||||
with transaction.atomic():
|
||||
update_action = UserActionLogEntry.get_edited_action(user, "API update")
|
||||
obj = self._get_obj_from_db(id, user)
|
||||
|
||||
# Fill in data to objects
|
||||
properties = json_model["properties"]
|
||||
obj.title = properties["title"]
|
||||
obj.is_pik = properties.get("is_pik", False)
|
||||
obj.modified = update_action
|
||||
obj.geometry.geom = self._create_geometry_from_json(json_model)
|
||||
obj.geometry.modified = update_action
|
||||
obj = self._set_responsibility(obj, properties["responsible"])
|
||||
|
||||
obj.geometry.save()
|
||||
obj.responsible.handler.save()
|
||||
obj.responsible.save()
|
||||
obj.save()
|
||||
|
||||
obj = self._set_compensation_actions(obj, properties["actions"])
|
||||
obj = self._set_compensation_states(obj, properties["before_states"], obj.before_states)
|
||||
obj = self._set_compensation_states(obj, properties["after_states"], obj.after_states)
|
||||
obj = self._set_deadlines(obj, properties["deadlines"])
|
||||
|
||||
obj.log.add(update_action)
|
||||
|
||||
celery_update_parcels.delay(obj.geometry.id)
|
||||
|
||||
return obj.id
|
207
api/utils/serializer/v1/intervention.py
Normal file
207
api/utils/serializer/v1/intervention.py
Normal file
@ -0,0 +1,207 @@
|
||||
"""
|
||||
Author: Michel Peltriaux
|
||||
Organization: Struktur- und Genehmigungsdirektion Nord, Rhineland-Palatinate, Germany
|
||||
Contact: michel.peltriaux@sgdnord.rlp.de
|
||||
Created on: 24.01.22
|
||||
|
||||
"""
|
||||
from django.db import transaction
|
||||
from django.db.models import QuerySet
|
||||
|
||||
from api.utils.serializer.v1.serializer import AbstractModelAPISerializerV1, \
|
||||
ResponsibilityAPISerializerV1Mixin, LegalAPISerializerV1Mixin, DeductableAPISerializerV1Mixin
|
||||
from compensation.models import Payment
|
||||
from intervention.models import Intervention, Responsibility, Legal, Handler
|
||||
from konova.models import Geometry
|
||||
from konova.tasks import celery_update_parcels, celery_check_for_geometry_conflicts
|
||||
from user.models import UserActionLogEntry
|
||||
|
||||
|
||||
class InterventionAPISerializerV1(AbstractModelAPISerializerV1,
|
||||
ResponsibilityAPISerializerV1Mixin,
|
||||
LegalAPISerializerV1Mixin,
|
||||
DeductableAPISerializerV1Mixin):
|
||||
model = Intervention
|
||||
|
||||
def _compensations_to_json(self, qs: QuerySet):
|
||||
return list(
|
||||
qs.values(
|
||||
"id", "identifier", "title"
|
||||
)
|
||||
)
|
||||
|
||||
def _payments_to_json(self, qs: QuerySet):
|
||||
""" Serializes payments into json
|
||||
|
||||
Args:
|
||||
qs (QuerySet): A queryset of Payment entries
|
||||
|
||||
Returns:
|
||||
serialized_json (list)
|
||||
"""
|
||||
return list(qs.values("amount", "due_on", "comment"))
|
||||
|
||||
def _extend_properties_data(self, entry):
|
||||
self.properties_data["responsible"] = self._responsible_to_json(entry.responsible)
|
||||
self.properties_data["legal"] = self._legal_to_json(entry.legal)
|
||||
self.properties_data["compensations"] = self._compensations_to_json(entry.compensations.all())
|
||||
self.properties_data["payments"] = self._payments_to_json(entry.payments.all())
|
||||
self.properties_data["deductions"] = self._deductions_to_json(entry.deductions.all())
|
||||
|
||||
def _initialize_objects(self, json_model, user):
|
||||
""" Initializes all needed objects from the json_model data
|
||||
|
||||
Does not persist data to the DB!
|
||||
|
||||
Args:
|
||||
json_model (dict): The json data
|
||||
user (User): The API user
|
||||
|
||||
Returns:
|
||||
obj (Intervention)
|
||||
"""
|
||||
create_action = UserActionLogEntry.get_created_action(user, comment="API Import")
|
||||
# Create geometry
|
||||
json_geom = self._create_geometry_from_json(json_model)
|
||||
geometry = Geometry()
|
||||
geometry.geom = json_geom
|
||||
geometry.created = create_action
|
||||
|
||||
# Create linked objects
|
||||
obj = Intervention()
|
||||
resp = Responsibility(
|
||||
handler=Handler()
|
||||
)
|
||||
legal = Legal()
|
||||
created = create_action
|
||||
obj.legal = legal
|
||||
obj.created = created
|
||||
obj.modified = created
|
||||
obj.geometry = geometry
|
||||
obj.responsible = resp
|
||||
return obj
|
||||
|
||||
def _set_payments(self, obj, payment_data):
|
||||
""" Sets the linked Payment data according to the given payment_data
|
||||
|
||||
|
||||
Args:
|
||||
obj (Compensation): The Compensation object
|
||||
payment_data (dict): The posted payment_data
|
||||
|
||||
Returns:
|
||||
obj (intervention)
|
||||
"""
|
||||
if payment_data is None:
|
||||
return obj
|
||||
payments = []
|
||||
for entry in payment_data:
|
||||
due_on = entry["due_on"]
|
||||
amount = float(entry["amount"])
|
||||
comment = entry["comment"]
|
||||
|
||||
# Check on validity
|
||||
if amount <= 0:
|
||||
raise ValueError("Payment amount must be > 0")
|
||||
|
||||
no_due_on = due_on is None or len(due_on) == 0
|
||||
no_comment = comment is None or len(comment) == 0
|
||||
|
||||
if no_due_on and no_comment:
|
||||
raise ValueError("If no due_on can be provided, you need to explain why using the comment")
|
||||
|
||||
# If this exact data is already existing, we do not create it new. Instead put it's id in the list of
|
||||
# entries, we will use to set the new actions
|
||||
pre_existing_payment = obj.payments.filter(
|
||||
amount=amount,
|
||||
due_on=due_on,
|
||||
comment=comment,
|
||||
).exclude(
|
||||
id__in=payments
|
||||
).first()
|
||||
if pre_existing_payment is not None:
|
||||
payments.append(pre_existing_payment.id)
|
||||
else:
|
||||
# Create and add id to list
|
||||
new_payment = Payment.objects.create(
|
||||
amount=amount,
|
||||
due_on=due_on,
|
||||
comment=comment,
|
||||
)
|
||||
payments.append(new_payment.id)
|
||||
payments = Payment.objects.filter(
|
||||
id__in=payments
|
||||
)
|
||||
obj.payments.set(payments)
|
||||
return obj
|
||||
|
||||
def create_model_from_json(self, json_model, user):
|
||||
""" Creates a new entry for the model based on the contents of json_model
|
||||
|
||||
Args:
|
||||
json_model (dict): The json containing data
|
||||
user (User): The API user
|
||||
|
||||
Returns:
|
||||
created_id (str): The id of the newly created Intervention entry
|
||||
"""
|
||||
with transaction.atomic():
|
||||
obj = self._initialize_objects(json_model, user)
|
||||
|
||||
# Fill in data to objects
|
||||
properties = json_model["properties"]
|
||||
obj.identifier = obj.generate_new_identifier()
|
||||
obj.title = properties["title"]
|
||||
self._set_responsibility(obj, properties["responsible"])
|
||||
self._set_legal(obj, properties["legal"])
|
||||
|
||||
obj.responsible.handler.save()
|
||||
obj.responsible.save()
|
||||
obj.geometry.save()
|
||||
obj.legal.save()
|
||||
obj.save()
|
||||
|
||||
obj.users.add(user)
|
||||
obj.log.add(obj.created)
|
||||
|
||||
celery_update_parcels.delay(obj.geometry.id)
|
||||
celery_check_for_geometry_conflicts.delay(obj.geometry.id)
|
||||
|
||||
return obj.id
|
||||
|
||||
def update_model_from_json(self, id, json_model, user):
|
||||
""" Updates an entry for the model based on the contents of json_model
|
||||
|
||||
Args:
|
||||
id (str): The object's id
|
||||
json_model (dict): The json containing data
|
||||
user (User): The API user
|
||||
|
||||
Returns:
|
||||
created_id (str): The id of the newly created Intervention entry
|
||||
"""
|
||||
with transaction.atomic():
|
||||
update_action = UserActionLogEntry.get_edited_action(user, "API update")
|
||||
obj = self._get_obj_from_db(id, user)
|
||||
|
||||
# Fill in data to objects
|
||||
properties = json_model["properties"]
|
||||
obj.title = properties["title"]
|
||||
self._set_responsibility(obj, properties.get("responsible", None))
|
||||
self._set_legal(obj, properties.get("legal", None))
|
||||
self._set_payments(obj, properties.get("payments", None))
|
||||
obj.geometry.geom = self._create_geometry_from_json(json_model)
|
||||
obj.geometry.modified = update_action
|
||||
|
||||
obj.responsible.handler.save()
|
||||
obj.responsible.save()
|
||||
obj.geometry.save()
|
||||
obj.legal.save()
|
||||
obj.save()
|
||||
|
||||
obj.mark_as_edited(user, edit_comment="API update")
|
||||
obj.send_data_to_egon()
|
||||
|
||||
celery_update_parcels.delay(obj.geometry.id)
|
||||
|
||||
return obj.id
|
498
api/utils/serializer/v1/serializer.py
Normal file
498
api/utils/serializer/v1/serializer.py
Normal file
@ -0,0 +1,498 @@
|
||||
"""
|
||||
Author: Michel Peltriaux
|
||||
Organization: Struktur- und Genehmigungsdirektion Nord, Rhineland-Palatinate, Germany
|
||||
Contact: michel.peltriaux@sgdnord.rlp.de
|
||||
Created on: 24.01.22
|
||||
|
||||
"""
|
||||
|
||||
import json
|
||||
|
||||
from django.contrib.gis.geos import MultiPolygon
|
||||
from django.core.exceptions import ObjectDoesNotExist
|
||||
from django.db.models import QuerySet
|
||||
|
||||
from api.utils.serializer.serializer import AbstractModelAPISerializer
|
||||
from codelist.models import KonovaCode
|
||||
from codelist.settings import CODELIST_COMPENSATION_ACTION_ID, CODELIST_BIOTOPES_ID, CODELIST_PROCESS_TYPE_ID, \
|
||||
CODELIST_LAW_ID, CODELIST_REGISTRATION_OFFICE_ID, CODELIST_CONSERVATION_OFFICE_ID, \
|
||||
CODELIST_COMPENSATION_ACTION_DETAIL_ID, CODELIST_HANDLER_ID, \
|
||||
CODELIST_BIOTOPES_EXTRA_CODES_FULL_ID
|
||||
from compensation.models import CompensationAction, UnitChoices, CompensationState
|
||||
from intervention.models import Responsibility, Legal, Handler
|
||||
from konova.models import Deadline, DeadlineType
|
||||
from konova.utils.message_templates import DATA_UNSHARED
|
||||
|
||||
|
||||
class AbstractModelAPISerializerV1(AbstractModelAPISerializer):
|
||||
def _model_to_geo_json(self, entry):
|
||||
""" Adds the basic data, which all elements hold
|
||||
|
||||
Args:
|
||||
entry (): The data entry
|
||||
|
||||
Returns:
|
||||
|
||||
"""
|
||||
if entry.geometry.geom is not None:
|
||||
geom = entry.geometry.geom.geojson
|
||||
else:
|
||||
geom = MultiPolygon().geojson
|
||||
geo_json = json.loads(geom)
|
||||
self.properties_data = {
|
||||
"id": entry.id,
|
||||
"identifier": entry.identifier,
|
||||
"title": entry.title,
|
||||
"created_on": self._created_on_to_json(entry),
|
||||
"modified_on": self._modified_on_to_json(entry),
|
||||
}
|
||||
self._extend_properties_data(entry)
|
||||
geo_json["properties"] = self.properties_data
|
||||
return geo_json
|
||||
|
||||
def _konova_code_to_json(self, konova_code: KonovaCode):
|
||||
""" Serializes KonovaCode model into json
|
||||
|
||||
Args:
|
||||
konova_code (KonovaCode): The KonovaCode entry
|
||||
|
||||
Returns:
|
||||
serialized_json (dict)
|
||||
"""
|
||||
if konova_code is None:
|
||||
return None
|
||||
return {
|
||||
"id": konova_code.id,
|
||||
"long_name": konova_code.long_name,
|
||||
"short_name": konova_code.short_name,
|
||||
}
|
||||
|
||||
def _konova_code_from_json(self, json_str, code_list_identifier):
|
||||
""" Returns a konova code instance
|
||||
|
||||
Args:
|
||||
json_str (str): The value for the code (id)
|
||||
code_list_identifier (str): From which konova code list this code is supposed to be from
|
||||
|
||||
Returns:
|
||||
|
||||
"""
|
||||
if json_str is None:
|
||||
return None
|
||||
json_str = str(json_str)
|
||||
if len(json_str) == 0:
|
||||
return None
|
||||
try:
|
||||
code = KonovaCode.objects.get(
|
||||
id=json_str,
|
||||
code_lists__in=[code_list_identifier]
|
||||
)
|
||||
except ObjectDoesNotExist as e:
|
||||
msg = f"{e.args[0]} ({json_str} not found in official list {code_list_identifier})"
|
||||
raise ObjectDoesNotExist(msg)
|
||||
return code
|
||||
|
||||
def _created_on_to_json(self, entry):
|
||||
""" Serializes the created_on into json
|
||||
|
||||
Args:
|
||||
entry (BaseObject): The entry
|
||||
|
||||
Returns:
|
||||
created_on (timestamp)
|
||||
"""
|
||||
return entry.created.timestamp if entry.created is not None else None
|
||||
|
||||
def _modified_on_to_json(self, entry):
|
||||
""" Serializes the modified_on into json
|
||||
|
||||
Args:
|
||||
entry (BaseObject): The entry
|
||||
|
||||
Returns:
|
||||
modified_on (timestamp)
|
||||
"""
|
||||
modified_on = entry.modified or entry.created
|
||||
modified_on = modified_on.timestamp if modified_on is not None else None
|
||||
return modified_on
|
||||
|
||||
def delete_entry(self, id, user):
|
||||
""" Marks an entry as deleted
|
||||
|
||||
Args:
|
||||
id (str): The entry's id
|
||||
user (User): The API user
|
||||
|
||||
Returns:
|
||||
|
||||
"""
|
||||
entry = self._get_obj_from_db(id, user)
|
||||
is_shared = entry.is_shared_with(user)
|
||||
if not is_shared:
|
||||
raise PermissionError(DATA_UNSHARED)
|
||||
# Do not send mails if entry is deleting using API. THere could be hundreds of deletion resulting in hundreds of
|
||||
# mails at once.
|
||||
entry.mark_as_deleted(user, send_mail=False)
|
||||
entry.refresh_from_db()
|
||||
success = entry.deleted is not None
|
||||
return success
|
||||
|
||||
|
||||
class DeductableAPISerializerV1Mixin:
|
||||
class Meta:
|
||||
abstract = True
|
||||
|
||||
def _single_deduction_to_json(self, entry):
|
||||
""" Serializes a single eco account deduction into json
|
||||
|
||||
Args:
|
||||
entry (EcoAccountDeduction): An EcoAccountDeduction
|
||||
|
||||
Returns:
|
||||
serialized_json (dict)
|
||||
"""
|
||||
return {
|
||||
"id": entry.pk,
|
||||
"eco_account": {
|
||||
"id": entry.account.pk,
|
||||
"identifier": entry.account.identifier,
|
||||
"title": entry.account.title,
|
||||
},
|
||||
"surface": entry.surface,
|
||||
"intervention": {
|
||||
"id": entry.intervention.pk,
|
||||
"identifier": entry.intervention.identifier,
|
||||
"title": entry.intervention.title,
|
||||
}
|
||||
}
|
||||
|
||||
def _deductions_to_json(self, qs: QuerySet):
|
||||
""" Serializes eco account deductions into json
|
||||
|
||||
Args:
|
||||
qs (QuerySet): A queryset of EcoAccountDeduction entries
|
||||
|
||||
Returns:
|
||||
serialized_json (list)
|
||||
"""
|
||||
return [
|
||||
self._single_deduction_to_json(entry)
|
||||
for entry in qs
|
||||
]
|
||||
|
||||
|
||||
class ResponsibilityAPISerializerV1Mixin:
|
||||
class Meta:
|
||||
abstract = True
|
||||
|
||||
def _handler_to_json(self, handler: Handler):
|
||||
return {
|
||||
"type": self._konova_code_to_json(handler.type),
|
||||
"detail": handler.detail
|
||||
}
|
||||
|
||||
def _responsible_to_json(self, responsible: Responsibility):
|
||||
""" Serializes Responsibility model into json
|
||||
|
||||
Args:
|
||||
responsible (Responsibility): The Responsibility entry
|
||||
|
||||
Returns:
|
||||
serialized_json (dict)
|
||||
"""
|
||||
return {
|
||||
"registration_office": self._konova_code_to_json(responsible.registration_office),
|
||||
"registration_file_number": responsible.registration_file_number,
|
||||
"conservation_office": self._konova_code_to_json(responsible.conservation_office),
|
||||
"conservation_file_number": responsible.conservation_file_number,
|
||||
"handler": self._handler_to_json(responsible.handler),
|
||||
}
|
||||
|
||||
def _set_responsibility(self, obj, responsibility_data: dict):
|
||||
""" Sets the responsible data contents to the provided responsibility_data dict
|
||||
|
||||
Args:
|
||||
obj (Intervention): The intervention object
|
||||
responsibility_data (dict): The new data
|
||||
|
||||
Returns:
|
||||
obj
|
||||
"""
|
||||
if responsibility_data is None:
|
||||
return obj
|
||||
obj.responsible.registration_office = self._konova_code_from_json(
|
||||
responsibility_data["registration_office"],
|
||||
CODELIST_REGISTRATION_OFFICE_ID
|
||||
)
|
||||
obj.responsible.registration_file_number = responsibility_data["registration_file_number"]
|
||||
obj.responsible.conservation_office = self._konova_code_from_json(
|
||||
responsibility_data["conservation_office"],
|
||||
CODELIST_CONSERVATION_OFFICE_ID,
|
||||
)
|
||||
obj.responsible.conservation_file_number = responsibility_data["conservation_file_number"]
|
||||
obj.responsible.handler.type = self._konova_code_from_json(
|
||||
responsibility_data["handler"]["type"],
|
||||
CODELIST_HANDLER_ID,
|
||||
)
|
||||
obj.responsible.handler.detail = responsibility_data["handler"]["detail"]
|
||||
return obj
|
||||
|
||||
|
||||
class LegalAPISerializerV1Mixin:
|
||||
class Meta:
|
||||
abstract = True
|
||||
|
||||
def _legal_to_json(self, legal: Legal):
|
||||
""" Serializes Legal model into json
|
||||
|
||||
Args:
|
||||
legal (Legal): The Legal entry
|
||||
|
||||
Returns:
|
||||
serialized_json (dict)
|
||||
"""
|
||||
return {
|
||||
"registration_date": legal.registration_date,
|
||||
"binding_date": legal.binding_date,
|
||||
"process_type": self._konova_code_to_json(legal.process_type),
|
||||
"laws": [self._konova_code_to_json(law) for law in legal.laws.all()],
|
||||
}
|
||||
|
||||
def _set_legal(self, obj, legal_data):
|
||||
""" Sets the legal data contents to the provided legal_data dict
|
||||
|
||||
Args:
|
||||
obj (Intervention): The intervention object
|
||||
legal_data (dict): The new data
|
||||
|
||||
Returns:
|
||||
obj
|
||||
"""
|
||||
if legal_data is None:
|
||||
return obj
|
||||
obj.legal.registration_date = legal_data.get("registration_date", None)
|
||||
obj.legal.binding_date = legal_data.get("binding_date", None)
|
||||
obj.legal.process_type = self._konova_code_from_json(
|
||||
legal_data.get("process_type", None),
|
||||
CODELIST_PROCESS_TYPE_ID,
|
||||
)
|
||||
laws = [self._konova_code_from_json(law, CODELIST_LAW_ID) for law in legal_data.get("laws", [])]
|
||||
obj.legal.laws.set(laws)
|
||||
return obj
|
||||
|
||||
|
||||
class AbstractCompensationAPISerializerV1Mixin:
|
||||
class Meta:
|
||||
abstract = True
|
||||
|
||||
def _set_deadlines(self, obj, deadline_data):
|
||||
""" Sets the linked deadline data according to the given deadline_data
|
||||
|
||||
|
||||
Args:
|
||||
obj (Compensation): The Compensation object
|
||||
deadline_data (dict): The posted deadline_data
|
||||
|
||||
Returns:
|
||||
obj (Compensation)
|
||||
"""
|
||||
deadlines = []
|
||||
for entry in deadline_data:
|
||||
try:
|
||||
deadline_type = entry["type"]
|
||||
date = entry["date"]
|
||||
comment = entry["comment"]
|
||||
except KeyError:
|
||||
raise ValueError(f"Invalid deadline content. Content was {entry} but should follow the specification")
|
||||
|
||||
# Check on validity
|
||||
if deadline_type not in DeadlineType:
|
||||
raise ValueError(f"Invalid deadline type. Choices are {DeadlineType.values}")
|
||||
|
||||
# If this exact data is already existing, we do not create it new. Instead put it's id in the list of
|
||||
# entries, we will use to set the new actions
|
||||
pre_existing_deadline = obj.deadlines.filter(
|
||||
type=deadline_type,
|
||||
date=date,
|
||||
comment=comment,
|
||||
).exclude(
|
||||
id__in=deadlines
|
||||
).first()
|
||||
if pre_existing_deadline is not None:
|
||||
deadlines.append(pre_existing_deadline.id)
|
||||
else:
|
||||
# Create and add id to list
|
||||
new_deadline = Deadline.objects.create(
|
||||
type=deadline_type,
|
||||
date=date,
|
||||
comment=comment,
|
||||
)
|
||||
deadlines.append(new_deadline.id)
|
||||
obj.deadlines.set(deadlines)
|
||||
return obj
|
||||
|
||||
def _set_compensation_states(self, obj, states_data, states_manager):
|
||||
""" Sets the linked compensation state data according to the given states_data
|
||||
|
||||
|
||||
Args:
|
||||
obj (Compensation): The Compensation object
|
||||
states_data (dict): The posted states_data
|
||||
states_manager (Manager): The before_states or after_states manager
|
||||
|
||||
Returns:
|
||||
obj (Compensation)
|
||||
"""
|
||||
states = []
|
||||
for entry in states_data:
|
||||
try:
|
||||
biotope_type = entry["biotope"]
|
||||
biotope_details = [
|
||||
self._konova_code_from_json(e, CODELIST_BIOTOPES_EXTRA_CODES_FULL_ID) for e in entry["biotope_details"]
|
||||
]
|
||||
surface = float(entry["surface"])
|
||||
except KeyError:
|
||||
raise ValueError(f"Invalid biotope content. Content was {entry} but should follow the specification ")
|
||||
|
||||
# Check on validity
|
||||
if surface <= 0:
|
||||
raise ValueError("State surfaces must be > 0")
|
||||
|
||||
# If this exact data is already existing, we do not create it new. Instead put it's id in the list of
|
||||
# entries, we will use to set the new actions
|
||||
state = states_manager.filter(
|
||||
biotope_type__id=biotope_type,
|
||||
surface=surface,
|
||||
).exclude(
|
||||
id__in=states
|
||||
).first()
|
||||
if state is not None:
|
||||
states.append(state.id)
|
||||
else:
|
||||
# Create and add id to list
|
||||
state = CompensationState.objects.create(
|
||||
biotope_type=self._konova_code_from_json(biotope_type, CODELIST_BIOTOPES_ID),
|
||||
surface=surface
|
||||
)
|
||||
states.append(state.id)
|
||||
state.biotope_type_details.set(biotope_details)
|
||||
states_manager.set(states)
|
||||
return obj
|
||||
|
||||
def _set_compensation_actions(self, obj, actions_data):
|
||||
""" Sets the linked compensation action data according to the given actions_data
|
||||
|
||||
|
||||
Args:
|
||||
obj (Compensation): The Compensation object
|
||||
actions_data (dict): The posted actions_data
|
||||
|
||||
Returns:
|
||||
obj (Compensation)
|
||||
"""
|
||||
actions = []
|
||||
for entry in actions_data:
|
||||
try:
|
||||
action_types = [
|
||||
self._konova_code_from_json(e, CODELIST_COMPENSATION_ACTION_ID) for e in entry["action_types"]
|
||||
]
|
||||
action_details = [
|
||||
self._konova_code_from_json(e, CODELIST_COMPENSATION_ACTION_DETAIL_ID) for e in entry["action_details"]
|
||||
]
|
||||
amount = float(entry["amount"])
|
||||
# Mapping of old "qm" into "m²"
|
||||
unit = UnitChoices.m2.value if entry["unit"] == "qm" else entry["unit"]
|
||||
comment = entry["comment"]
|
||||
except KeyError:
|
||||
raise ValueError(f"Invalid action content. Content was {entry} but should follow specification")
|
||||
|
||||
# Check on validity
|
||||
if amount <= 0:
|
||||
raise ValueError("Action amount must be > 0")
|
||||
if unit not in UnitChoices:
|
||||
raise ValueError(f"Invalid unit. Choices are {UnitChoices.values}")
|
||||
|
||||
# If this exact data is already existing, we do not create it new. Instead put it's id in the list of
|
||||
# entries, we will use to set the new actions
|
||||
action_entry = obj.actions.filter(
|
||||
action_type__in=action_types,
|
||||
amount=amount,
|
||||
unit=unit,
|
||||
comment=comment,
|
||||
).exclude(
|
||||
id__in=actions
|
||||
).first()
|
||||
if action_entry is not None:
|
||||
actions.append(action_entry.id)
|
||||
else:
|
||||
# Create and add id to list
|
||||
action_entry = CompensationAction.objects.create(
|
||||
amount=amount,
|
||||
unit=unit,
|
||||
comment=comment,
|
||||
)
|
||||
actions.append(action_entry.id)
|
||||
|
||||
action_entry.action_type.set(action_types)
|
||||
action_entry.action_type_details.set(action_details)
|
||||
obj.actions.set(actions)
|
||||
return obj
|
||||
|
||||
def _compensation_state_to_json(self, qs: QuerySet):
|
||||
""" Serializes compensation states into json
|
||||
|
||||
Args:
|
||||
qs (QuerySet): A queryset of CompensationState entries
|
||||
|
||||
Returns:
|
||||
serialized_json (list)
|
||||
"""
|
||||
return [
|
||||
{
|
||||
"biotope": self._konova_code_to_json(entry.biotope_type),
|
||||
"biotope_details": [
|
||||
self._konova_code_to_json(detail) for detail in entry.biotope_type_details.all()
|
||||
],
|
||||
"surface": entry.surface,
|
||||
}
|
||||
for entry in qs
|
||||
]
|
||||
|
||||
def _compensation_actions_to_json(self, qs: QuerySet):
|
||||
""" Serializes CompensationActions into json
|
||||
|
||||
Args:
|
||||
qs (QuerySet): A queryset of CompensationAction entries
|
||||
|
||||
Returns:
|
||||
serialized_json (list)
|
||||
"""
|
||||
return [
|
||||
{
|
||||
"action_types": [
|
||||
self._konova_code_to_json(action) for action in entry.action_type.all()
|
||||
],
|
||||
"action_details": [
|
||||
self._konova_code_to_json(detail) for detail in entry.action_type_details.all()
|
||||
],
|
||||
"amount": entry.amount,
|
||||
"unit": entry.unit,
|
||||
"comment": entry.comment,
|
||||
}
|
||||
for entry in qs
|
||||
]
|
||||
|
||||
def _deadlines_to_json(self, qs: QuerySet):
|
||||
""" Serializes deadlines into json
|
||||
|
||||
Args:
|
||||
qs (QuerySet): A queryset of Deadline entries
|
||||
|
||||
Returns:
|
||||
serialized_json (list)
|
||||
"""
|
||||
return list(qs.values(
|
||||
"type",
|
||||
"date",
|
||||
"comment",
|
||||
))
|
8
api/views/__init__.py
Normal file
8
api/views/__init__.py
Normal file
@ -0,0 +1,8 @@
|
||||
"""
|
||||
Author: Michel Peltriaux
|
||||
Organization: Struktur- und Genehmigungsdirektion Nord, Rhineland-Palatinate, Germany
|
||||
Contact: michel.peltriaux@sgdnord.rlp.de
|
||||
Created on: 21.01.22
|
||||
|
||||
"""
|
||||
from .v1 import *
|
35
api/views/method_views.py
Normal file
35
api/views/method_views.py
Normal file
@ -0,0 +1,35 @@
|
||||
"""
|
||||
Author: Michel Peltriaux
|
||||
Organization: Struktur- und Genehmigungsdirektion Nord, Rhineland-Palatinate, Germany
|
||||
Contact: michel.peltriaux@sgdnord.rlp.de
|
||||
Created on: 27.01.22
|
||||
|
||||
"""
|
||||
from django.contrib.auth.decorators import login_required
|
||||
from django.http import HttpRequest, JsonResponse
|
||||
|
||||
from api.models import APIUserToken
|
||||
|
||||
|
||||
@login_required
|
||||
def generate_new_token_view(request: HttpRequest):
|
||||
""" Handles request for fetching
|
||||
|
||||
Args:
|
||||
request (HttpRequest): The incoming request
|
||||
|
||||
Returns:
|
||||
|
||||
"""
|
||||
|
||||
if request.method == "GET":
|
||||
token = APIUserToken()
|
||||
while APIUserToken.objects.filter(token=token.token).exists():
|
||||
token = APIUserToken()
|
||||
return JsonResponse(
|
||||
data={
|
||||
"gen_data": token.token
|
||||
}
|
||||
)
|
||||
else:
|
||||
raise NotImplementedError
|
7
api/views/v1/__init__.py
Normal file
7
api/views/v1/__init__.py
Normal file
@ -0,0 +1,7 @@
|
||||
"""
|
||||
Author: Michel Peltriaux
|
||||
Organization: Struktur- und Genehmigungsdirektion Nord, Rhineland-Palatinate, Germany
|
||||
Contact: michel.peltriaux@sgdnord.rlp.de
|
||||
Created on: 21.01.22
|
||||
|
||||
"""
|
132
api/views/v1/views.py
Normal file
132
api/views/v1/views.py
Normal file
@ -0,0 +1,132 @@
|
||||
"""
|
||||
Author: Michel Peltriaux
|
||||
Organization: Struktur- und Genehmigungsdirektion Nord, Rhineland-Palatinate, Germany
|
||||
Contact: michel.peltriaux@sgdnord.rlp.de
|
||||
Created on: 21.01.22
|
||||
|
||||
"""
|
||||
import json
|
||||
|
||||
from django.http import JsonResponse, HttpRequest
|
||||
|
||||
from api.utils.serializer.v1.compensation import CompensationAPISerializerV1
|
||||
from api.utils.serializer.v1.deduction import DeductionAPISerializerV1
|
||||
from api.utils.serializer.v1.ecoaccount import EcoAccountAPISerializerV1
|
||||
from api.utils.serializer.v1.ema import EmaAPISerializerV1
|
||||
from api.utils.serializer.v1.intervention import InterventionAPISerializerV1
|
||||
from api.views.views import AbstractAPIView
|
||||
|
||||
|
||||
class AbstractAPIViewV1(AbstractAPIView):
|
||||
""" Holds general serialization functions for API v1
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
self.serializer = self.serializer()
|
||||
|
||||
def get(self, request: HttpRequest, id=None):
|
||||
""" Handles the GET request
|
||||
|
||||
Performs the fetching and serialization of the data
|
||||
|
||||
Args:
|
||||
request (HttpRequest): The incoming request
|
||||
id (str): The entries id (optional)
|
||||
|
||||
Returns:
|
||||
response (JsonResponse)
|
||||
"""
|
||||
try:
|
||||
self.rpp = int(request.GET.get("rpp", self.rpp))
|
||||
self.page_number = int(request.GET.get("p", self.page_number))
|
||||
|
||||
self.serializer.rpp = self.rpp
|
||||
self.serializer.page_number = self.page_number
|
||||
|
||||
self.serializer.prepare_lookup(id, self.user)
|
||||
data = self.serializer.fetch_and_serialize()
|
||||
except Exception as e:
|
||||
return self._return_error_response(e, 500)
|
||||
return self._return_response(request, data)
|
||||
|
||||
def post(self, request: HttpRequest):
|
||||
""" Handles the POST request
|
||||
|
||||
Performs creation of new data
|
||||
|
||||
Args:
|
||||
request (HttpRequest): The incoming request
|
||||
|
||||
Returns:
|
||||
response (JsonResponse)
|
||||
"""
|
||||
try:
|
||||
body = request.body.decode("utf-8")
|
||||
body = json.loads(body)
|
||||
created_id = self.serializer.create_model_from_json(body, self.user)
|
||||
except Exception as e:
|
||||
return self._return_error_response(e, 500)
|
||||
return JsonResponse({"id": created_id})
|
||||
|
||||
def put(self, request: HttpRequest, id=None):
|
||||
""" Handles the PUT request
|
||||
|
||||
Performs updating
|
||||
|
||||
Args:
|
||||
request (HttpRequest): The incoming request
|
||||
id (str): The entries id
|
||||
|
||||
Returns:
|
||||
response (JsonResponse)
|
||||
"""
|
||||
try:
|
||||
body = request.body.decode("utf-8")
|
||||
body = json.loads(body)
|
||||
updated_id = self.serializer.update_model_from_json(id, body, self.user)
|
||||
except Exception as e:
|
||||
return self._return_error_response(e, 500)
|
||||
return JsonResponse({"id": updated_id})
|
||||
|
||||
def delete(self, request: HttpRequest, id=None):
|
||||
""" Handles a DELETE request
|
||||
|
||||
Args:
|
||||
request (HttpRequest): The incoming request
|
||||
id (str): The object's id
|
||||
|
||||
Returns:
|
||||
response (JsonResponse)
|
||||
"""
|
||||
|
||||
try:
|
||||
success = self.serializer.delete_entry(id, self.user)
|
||||
except Exception as e:
|
||||
return self._return_error_response(e, 500)
|
||||
return JsonResponse(
|
||||
{
|
||||
"success": success,
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
class InterventionAPIViewV1(AbstractAPIViewV1):
|
||||
serializer = InterventionAPISerializerV1
|
||||
|
||||
|
||||
class CompensationAPIViewV1(AbstractAPIViewV1):
|
||||
serializer = CompensationAPISerializerV1
|
||||
|
||||
|
||||
class EcoAccountAPIViewV1(AbstractAPIViewV1):
|
||||
serializer = EcoAccountAPISerializerV1
|
||||
|
||||
|
||||
class EmaAPIViewV1(AbstractAPIViewV1):
|
||||
serializer = EmaAPISerializerV1
|
||||
|
||||
|
||||
class DeductionAPIViewV1(AbstractAPIViewV1):
|
||||
serializer = DeductionAPISerializerV1
|
358
api/views/views.py
Normal file
358
api/views/views.py
Normal file
@ -0,0 +1,358 @@
|
||||
"""
|
||||
Author: Michel Peltriaux
|
||||
Organization: Struktur- und Genehmigungsdirektion Nord, Rhineland-Palatinate, Germany
|
||||
Contact: michel.peltriaux@sgdnord.rlp.de
|
||||
Created on: 21.01.22
|
||||
|
||||
"""
|
||||
import json
|
||||
|
||||
from django.db.models import QuerySet
|
||||
from django.http import JsonResponse, HttpRequest
|
||||
from django.views import View
|
||||
from django.views.decorators.csrf import csrf_exempt
|
||||
|
||||
from api.models import APIUserToken
|
||||
from api.settings import KSP_TOKEN_HEADER_IDENTIFIER, KSP_USER_HEADER_IDENTIFIER
|
||||
from compensation.models import EcoAccount
|
||||
from ema.models import Ema
|
||||
from intervention.models import Intervention
|
||||
from konova.utils.message_templates import DATA_UNSHARED
|
||||
from user.models import User, Team
|
||||
|
||||
|
||||
class AbstractAPIView(View):
|
||||
""" Base class for API views
|
||||
|
||||
The API must follow the GeoJSON Specification RFC 7946
|
||||
https://geojson.org/
|
||||
https://datatracker.ietf.org/doc/html/rfc7946
|
||||
|
||||
"""
|
||||
user = None
|
||||
serializer = None
|
||||
rpp = 5 # Results per page default
|
||||
page_number = 1 # Page number default
|
||||
|
||||
class Meta:
|
||||
abstract = True
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
self.response_body_base = {
|
||||
"rpp": None,
|
||||
"p": None,
|
||||
"next": None,
|
||||
"results": None
|
||||
}
|
||||
|
||||
@csrf_exempt
|
||||
def dispatch(self, request, *args, **kwargs):
|
||||
try:
|
||||
# Fetch the proper user from the given request header token
|
||||
token = request.headers.get(KSP_TOKEN_HEADER_IDENTIFIER, None)
|
||||
ksp_user = request.headers.get(KSP_USER_HEADER_IDENTIFIER, None)
|
||||
|
||||
if not token and not ksp_user:
|
||||
bearer_token = request.headers.get("authorization", None)
|
||||
if not bearer_token:
|
||||
raise PermissionError("No token provided")
|
||||
token = bearer_token.split(" ")[1]
|
||||
|
||||
token_user = APIUserToken.get_user_from_token(token)
|
||||
if ksp_user and ksp_user != token_user.username:
|
||||
raise PermissionError(f"Invalid token for {ksp_user}")
|
||||
self.user = token_user
|
||||
|
||||
request.user = self.user
|
||||
if not self.user.is_default_user():
|
||||
raise PermissionError("Default permissions required")
|
||||
except PermissionError as e:
|
||||
return self._return_error_response(e, 403)
|
||||
return super().dispatch(request, *args, **kwargs)
|
||||
|
||||
def _return_error_response(self, error, status_code=500):
|
||||
""" Returns an error as JsonReponse
|
||||
|
||||
Args:
|
||||
error (): The error/exception
|
||||
status_code (): The desired status code
|
||||
|
||||
Returns:
|
||||
|
||||
"""
|
||||
content = [error.__str__()]
|
||||
if hasattr(error, "messages"):
|
||||
content = error.messages
|
||||
return JsonResponse(
|
||||
{
|
||||
"errors": content
|
||||
},
|
||||
status=status_code
|
||||
)
|
||||
|
||||
def _return_response(self, request: HttpRequest, data):
|
||||
""" Returns all important data into a response object
|
||||
|
||||
Args:
|
||||
request (HttpRequest): The incoming request
|
||||
data (dict): The serialized data
|
||||
|
||||
Returns:
|
||||
response (JsonResponse): The response to be returned
|
||||
"""
|
||||
response = self.response_body_base
|
||||
next_page = self.page_number + 1
|
||||
next_page = next_page if next_page in self.serializer.paginator.page_range else None
|
||||
if next_page is not None:
|
||||
next_url = request.build_absolute_uri(
|
||||
request.path + f"?rpp={self.rpp}&p={next_page}"
|
||||
)
|
||||
else:
|
||||
next_url = None
|
||||
response["rpp"] = self.rpp
|
||||
response["p"] = self.page_number
|
||||
response["next"] = next_url
|
||||
response["results"] = data
|
||||
return JsonResponse(response)
|
||||
|
||||
|
||||
class InterventionCheckAPIView(AbstractAPIView):
|
||||
|
||||
def get(self, request: HttpRequest, id):
|
||||
""" Takes the GET request
|
||||
|
||||
Args:
|
||||
request (HttpRequest): The incoming request
|
||||
id (str): The intervention's id
|
||||
|
||||
Returns:
|
||||
response (JsonResponse)
|
||||
"""
|
||||
if not self.user.is_zb_user():
|
||||
return self._return_error_response("Permission not granted", 403)
|
||||
try:
|
||||
obj = Intervention.objects.get(
|
||||
id=id,
|
||||
users__in=[self.user]
|
||||
)
|
||||
except Exception as e:
|
||||
return self._return_error_response(e)
|
||||
|
||||
all_valid, check_details = self.run_quality_checks(obj)
|
||||
|
||||
if all_valid:
|
||||
log_entry = obj.set_checked(self.user)
|
||||
obj.log.add(log_entry)
|
||||
|
||||
data = {
|
||||
"success": all_valid,
|
||||
"details": check_details
|
||||
}
|
||||
return JsonResponse(data)
|
||||
|
||||
def run_quality_checks(self, obj: Intervention) -> (bool, dict):
|
||||
""" Performs a check for intervention and related compensations
|
||||
|
||||
Args:
|
||||
obj (Intervention): The intervention
|
||||
|
||||
Returns:
|
||||
all_valid (boold): Whether an error occured or not
|
||||
check_details (dict): A dict containg details on which elements have errors
|
||||
"""
|
||||
# Run quality check for Intervention
|
||||
all_valid = True
|
||||
intervention_checker = obj.quality_check()
|
||||
all_valid = intervention_checker.valid and all_valid
|
||||
|
||||
# Run quality checks for linked compensations
|
||||
comps = obj.compensations.all()
|
||||
comp_checkers = []
|
||||
for comp in comps:
|
||||
comp_checker = comp.quality_check()
|
||||
comp_checkers.append(comp_checker)
|
||||
all_valid = comp_checker.valid and all_valid
|
||||
|
||||
check_details = {
|
||||
"intervention": {
|
||||
"id": obj.id,
|
||||
"errors": intervention_checker.messages
|
||||
},
|
||||
"compensations": [
|
||||
{
|
||||
"id": comp_checker.obj.id,
|
||||
"errors": comp_checker.messages
|
||||
}
|
||||
for comp_checker in comp_checkers
|
||||
]
|
||||
}
|
||||
return all_valid, check_details
|
||||
|
||||
|
||||
class AbstractModelShareAPIView(AbstractAPIView):
|
||||
model = None
|
||||
|
||||
class Meta:
|
||||
abstract = True
|
||||
|
||||
def get(self, request: HttpRequest, id):
|
||||
""" Performs the GET request handling
|
||||
|
||||
Args:
|
||||
request (HttpRequest): The incoming request
|
||||
id (str): The object's id
|
||||
|
||||
Returns:
|
||||
|
||||
"""
|
||||
try:
|
||||
users = self._get_shared_users_of_object(id)
|
||||
teams = self._get_shared_teams_of_object(id)
|
||||
except Exception as e:
|
||||
return self._return_error_response(e)
|
||||
|
||||
data = {
|
||||
"users": [
|
||||
user.username for user in users
|
||||
],
|
||||
"teams": [
|
||||
{
|
||||
"id": team.id,
|
||||
"name": team.name,
|
||||
}
|
||||
for team in teams
|
||||
],
|
||||
}
|
||||
|
||||
return JsonResponse(data)
|
||||
|
||||
def put(self, request: HttpRequest, id):
|
||||
""" Performs the PUT request handling
|
||||
|
||||
Args:
|
||||
request (HttpRequest): The incoming request
|
||||
id (str): The object's id
|
||||
|
||||
Returns:
|
||||
|
||||
"""
|
||||
|
||||
try:
|
||||
success = self._process_put_body(request.body, id)
|
||||
except Exception as e:
|
||||
return self._return_error_response(e)
|
||||
data = {
|
||||
"success": success,
|
||||
}
|
||||
return JsonResponse(data)
|
||||
|
||||
def _check_user_has_shared_access(self, obj):
|
||||
""" Raises a PermissionError if user has no shared access
|
||||
|
||||
Args:
|
||||
obj (BaseObject): The object
|
||||
|
||||
Returns:
|
||||
|
||||
"""
|
||||
is_shared = obj.is_shared_with(self.user)
|
||||
if not is_shared:
|
||||
raise PermissionError(DATA_UNSHARED)
|
||||
|
||||
def _get_shared_users_of_object(self, id) -> QuerySet:
|
||||
""" Check permissions and get the users
|
||||
|
||||
Args:
|
||||
id (str): The object's id
|
||||
|
||||
Returns:
|
||||
users (QuerySet)
|
||||
"""
|
||||
obj = self.model.objects.get(
|
||||
id=id
|
||||
)
|
||||
self._check_user_has_shared_access(obj)
|
||||
users = obj.shared_users
|
||||
return users
|
||||
|
||||
def _get_shared_teams_of_object(self, id) -> QuerySet:
|
||||
""" Check permissions and get the teams
|
||||
|
||||
Args:
|
||||
id (str): The object's id
|
||||
|
||||
Returns:
|
||||
users (QuerySet)
|
||||
"""
|
||||
obj = self.model.objects.get(
|
||||
id=id
|
||||
)
|
||||
self._check_user_has_shared_access(obj)
|
||||
teams = obj.shared_teams
|
||||
return teams
|
||||
|
||||
def _process_put_body(self, body: bytes, id: str):
|
||||
""" Reads the body data, performs validity checks and sets the new users
|
||||
|
||||
Args:
|
||||
body (bytes): The request.body
|
||||
id (str): The object's id
|
||||
|
||||
Returns:
|
||||
success (bool)
|
||||
"""
|
||||
obj = self.model.objects.get(id=id)
|
||||
self._check_user_has_shared_access(obj)
|
||||
|
||||
content = json.loads(body.decode("utf-8"))
|
||||
new_users = content.get("users", [])
|
||||
if len(new_users) == 0:
|
||||
raise ValueError("Shared user list must not be empty!")
|
||||
new_teams = content.get("teams", [])
|
||||
|
||||
# Eliminate duplicates
|
||||
new_users = list(dict.fromkeys(new_users))
|
||||
new_teams = list(dict.fromkeys(new_teams))
|
||||
|
||||
# Make sure each of these names exist as a user
|
||||
new_users_objs = []
|
||||
for user in new_users:
|
||||
new_users_objs.append(User.objects.get(username=user))
|
||||
|
||||
# Make sure each of these names exist as a user
|
||||
new_teams_objs = []
|
||||
for team_name in new_teams:
|
||||
new_teams_objs.append(Team.objects.get(name=team_name))
|
||||
|
||||
if self.user.is_default_group_only():
|
||||
# Default only users are not allowed to remove other users from having access. They can only add new ones!
|
||||
new_users_to_be_added = User.objects.filter(
|
||||
username__in=new_users
|
||||
).exclude(
|
||||
id__in=obj.shared_users
|
||||
)
|
||||
new_users_objs = obj.shared_users.union(new_users_to_be_added)
|
||||
|
||||
new_teams_to_be_added = Team.objects.filter(
|
||||
name__in=new_teams
|
||||
).exclude(
|
||||
id__in=obj.shared_teams
|
||||
)
|
||||
new_teams_objs = obj.shared_teams.union(new_teams_to_be_added)
|
||||
|
||||
obj.share_with_user_list(new_users_objs)
|
||||
obj.share_with_team_list(new_teams_objs)
|
||||
return True
|
||||
|
||||
|
||||
class InterventionAPIShareView(AbstractModelShareAPIView):
|
||||
model = Intervention
|
||||
|
||||
|
||||
class EcoAccountAPIShareView(AbstractModelShareAPIView):
|
||||
model = EcoAccount
|
||||
|
||||
|
||||
class EmaAPIShareView(AbstractModelShareAPIView):
|
||||
model = Ema
|
0
codelist/__init__.py
Normal file
0
codelist/__init__.py
Normal file
54
codelist/admin.py
Normal file
54
codelist/admin.py
Normal file
@ -0,0 +1,54 @@
|
||||
from django.contrib import admin
|
||||
|
||||
from codelist.models import KonovaCode, KonovaCodeList
|
||||
|
||||
|
||||
class KonovaCodeListAdmin(admin.ModelAdmin):
|
||||
list_display = [
|
||||
"id",
|
||||
]
|
||||
readonly_fields = [
|
||||
"id",
|
||||
"codes",
|
||||
]
|
||||
|
||||
|
||||
class KonovaCodeAdmin(admin.ModelAdmin):
|
||||
list_display = [
|
||||
"id",
|
||||
"atom_id",
|
||||
"parent",
|
||||
"short_name",
|
||||
"long_name",
|
||||
"is_leaf",
|
||||
"is_selectable",
|
||||
"is_archived",
|
||||
]
|
||||
|
||||
readonly_fields = [
|
||||
"id",
|
||||
"short_name",
|
||||
"long_name",
|
||||
"is_archived",
|
||||
"is_selectable",
|
||||
"is_leaf",
|
||||
"parent",
|
||||
"found_in_codelists",
|
||||
]
|
||||
|
||||
search_fields = [
|
||||
"id",
|
||||
"atom_id",
|
||||
"long_name",
|
||||
"short_name",
|
||||
]
|
||||
|
||||
def found_in_codelists(self, obj):
|
||||
codelists = KonovaCodeList.objects.filter(
|
||||
codes__in=[obj]
|
||||
).values_list("id", flat=True)
|
||||
codelists = "\n".join(str(x) for x in codelists)
|
||||
return codelists
|
||||
|
||||
#admin.site.register(KonovaCodeList, KonovaCodeListAdmin)
|
||||
admin.site.register(KonovaCode, KonovaCodeAdmin)
|
5
codelist/apps.py
Normal file
5
codelist/apps.py
Normal file
@ -0,0 +1,5 @@
|
||||
from django.apps import AppConfig
|
||||
|
||||
|
||||
class CodelistConfig(AppConfig):
|
||||
name = 'codelist'
|
7
codelist/autocomplete/__init__.py
Normal file
7
codelist/autocomplete/__init__.py
Normal file
@ -0,0 +1,7 @@
|
||||
"""
|
||||
Author: Michel Peltriaux
|
||||
Organization: Struktur- und Genehmigungsdirektion Nord, Rhineland-Palatinate, Germany
|
||||
Contact: ksp-servicestelle@sgdnord.rlp.de
|
||||
Created on: 18.08.22
|
||||
|
||||
"""
|
74
codelist/autocomplete/base.py
Normal file
74
codelist/autocomplete/base.py
Normal file
@ -0,0 +1,74 @@
|
||||
"""
|
||||
Author: Michel Peltriaux
|
||||
Organization: Struktur- und Genehmigungsdirektion Nord, Rhineland-Palatinate, Germany
|
||||
Contact: ksp-servicestelle@sgdnord.rlp.de
|
||||
Created on: 18.08.22
|
||||
|
||||
"""
|
||||
from dal_select2.views import Select2GroupQuerySetView
|
||||
from django.db.models import Q
|
||||
|
||||
from codelist.models import KonovaCode
|
||||
|
||||
|
||||
class KonovaCodeAutocomplete(Select2GroupQuerySetView):
|
||||
"""
|
||||
Provides simple autocomplete functionality for codes
|
||||
|
||||
Parameter support:
|
||||
* q: Search for a word inside long_name of a code
|
||||
* c: Search inside a special codelist
|
||||
|
||||
"""
|
||||
paginate_by = 50
|
||||
|
||||
def order_by(self, qs):
|
||||
""" Orders by a predefined value
|
||||
|
||||
Wrapped in a function to provide inheritance-based different orders
|
||||
|
||||
Args:
|
||||
qs (QuerySet): The queryset to be ordered
|
||||
|
||||
Returns:
|
||||
qs (QuerySet): The ordered queryset
|
||||
"""
|
||||
return qs.order_by(
|
||||
"long_name"
|
||||
)
|
||||
|
||||
def get_queryset(self):
|
||||
if self.request.user.is_anonymous:
|
||||
return KonovaCode.objects.none()
|
||||
qs = KonovaCode.objects.filter(
|
||||
is_archived=False,
|
||||
is_selectable=True,
|
||||
is_leaf=True,
|
||||
)
|
||||
qs = self.order_by(qs)
|
||||
if self.c:
|
||||
qs = qs.filter(
|
||||
code_lists__in=[self.c]
|
||||
)
|
||||
if self.q:
|
||||
# Remove whitespaces from self.q and split input in all keywords (if multiple given)
|
||||
q = dict.fromkeys(self.q.strip().split(" "))
|
||||
# Create one filter looking up for all keys where all keywords can be found in the same result
|
||||
_filter = Q()
|
||||
for keyword in q:
|
||||
q_or = Q()
|
||||
q_or |= Q(long_name__icontains=keyword)
|
||||
q_or |= Q(short_name__icontains=keyword)
|
||||
q_or |= Q(parent__long_name__icontains=keyword)
|
||||
q_or |= Q(parent__short_name__icontains=keyword)
|
||||
q_or |= Q(parent__parent__long_name__icontains=keyword)
|
||||
q_or |= Q(parent__parent__short_name__icontains=keyword)
|
||||
_filter.add(q_or, Q.AND)
|
||||
qs = qs.filter(_filter).distinct()
|
||||
return qs
|
||||
|
||||
def get_result_label(self, result):
|
||||
return f"{result.long_name}"
|
||||
|
||||
def get_selected_result_label(self, result):
|
||||
return f"{result.__str__()}"
|
114
codelist/autocomplete/biotope.py
Normal file
114
codelist/autocomplete/biotope.py
Normal file
@ -0,0 +1,114 @@
|
||||
"""
|
||||
Author: Michel Peltriaux
|
||||
Organization: Struktur- und Genehmigungsdirektion Nord, Rhineland-Palatinate, Germany
|
||||
Contact: ksp-servicestelle@sgdnord.rlp.de
|
||||
Created on: 18.08.22
|
||||
|
||||
"""
|
||||
import collections
|
||||
|
||||
from django.core.exceptions import ImproperlyConfigured
|
||||
|
||||
from codelist.settings import CODELIST_BIOTOPES_ID, \
|
||||
CODELIST_BIOTOPES_EXTRA_CODES_FULL_ID
|
||||
from codelist.autocomplete.base import KonovaCodeAutocomplete
|
||||
from konova.utils.message_templates import UNGROUPED
|
||||
|
||||
|
||||
class BiotopeCodeAutocomplete(KonovaCodeAutocomplete):
|
||||
"""
|
||||
Due to limitations of the django dal package, we need to subclass for each code list
|
||||
"""
|
||||
group_by_related = "parent"
|
||||
related_field_name = "long_name"
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
self.c = CODELIST_BIOTOPES_ID
|
||||
super().__init__(*args, **kwargs)
|
||||
|
||||
def order_by(self, qs):
|
||||
""" Orders by a predefined value
|
||||
|
||||
Wrapped in a function to provide inheritance-based different orders
|
||||
|
||||
Args:
|
||||
qs (QuerySet): The queryset to be ordered
|
||||
|
||||
Returns:
|
||||
qs (QuerySet): The ordered queryset
|
||||
"""
|
||||
return qs.order_by(
|
||||
"short_name",
|
||||
)
|
||||
|
||||
def get_result_label(self, result):
|
||||
return f"{result.long_name} ({result.short_name})"
|
||||
|
||||
def get_results(self, context):
|
||||
"""Return the options grouped by a common related model.
|
||||
|
||||
Raises ImproperlyConfigured if self.group_by_name is not configured
|
||||
"""
|
||||
if not self.group_by_related:
|
||||
raise ImproperlyConfigured("Missing group_by_related.")
|
||||
|
||||
super_groups = collections.OrderedDict()
|
||||
|
||||
object_list = context['object_list']
|
||||
|
||||
for result in object_list:
|
||||
group = result.parent if result.parent else None
|
||||
group_name = f"{group.long_name} ({group.short_name})" if group else UNGROUPED
|
||||
super_group = result.parent.parent if result.parent else None
|
||||
super_group_name = f"{super_group.long_name} ({super_group.short_name})" if super_group else UNGROUPED
|
||||
super_groups.setdefault(super_group_name, {})
|
||||
super_groups[super_group_name].setdefault(group_name, [])
|
||||
super_groups[super_group_name][group_name].append(result)
|
||||
|
||||
return [{
|
||||
'id': None,
|
||||
'text': super_group,
|
||||
'children': [{
|
||||
"id": None,
|
||||
"text": group,
|
||||
"children": [{
|
||||
'id': self.get_result_value(result),
|
||||
'text': self.get_result_label(result),
|
||||
'selected_text': self.get_selected_result_label(result),
|
||||
} for result in results]
|
||||
} for group, results in groups.items()]
|
||||
} for super_group, groups in super_groups.items()]
|
||||
|
||||
|
||||
class BiotopeExtraCodeAutocomplete(KonovaCodeAutocomplete):
|
||||
"""
|
||||
Due to limitations of the django dal package, we need to subclass for each code list
|
||||
"""
|
||||
group_by_related = "parent"
|
||||
related_field_name = "short_name"
|
||||
paginate_by = 200
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
self.c = CODELIST_BIOTOPES_EXTRA_CODES_FULL_ID
|
||||
super().__init__(*args, **kwargs)
|
||||
|
||||
def order_by(self, qs):
|
||||
""" Orders by a predefined value
|
||||
|
||||
Wrapped in a function to provide inheritance-based different orders
|
||||
|
||||
Args:
|
||||
qs (QuerySet): The queryset to be ordered
|
||||
|
||||
Returns:
|
||||
qs (QuerySet): The ordered queryset
|
||||
"""
|
||||
return qs.order_by(
|
||||
"short_name",
|
||||
)
|
||||
|
||||
def get_result_label(self, result):
|
||||
return f"{result.long_name} ({result.short_name})"
|
||||
|
||||
def get_selected_result_label(self, result):
|
||||
return f"{result.parent.short_name} > {result.long_name} ({result.short_name})"
|
45
codelist/autocomplete/compensation_action.py
Normal file
45
codelist/autocomplete/compensation_action.py
Normal file
@ -0,0 +1,45 @@
|
||||
"""
|
||||
Author: Michel Peltriaux
|
||||
Organization: Struktur- und Genehmigungsdirektion Nord, Rhineland-Palatinate, Germany
|
||||
Contact: ksp-servicestelle@sgdnord.rlp.de
|
||||
Created on: 18.08.22
|
||||
|
||||
"""
|
||||
from codelist.settings import CODELIST_COMPENSATION_ACTION_ID, CODELIST_COMPENSATION_ACTION_DETAIL_ID
|
||||
from codelist.autocomplete.base import KonovaCodeAutocomplete
|
||||
|
||||
|
||||
class CompensationActionCodeAutocomplete(KonovaCodeAutocomplete):
|
||||
"""
|
||||
Due to limitations of the django dal package, we need to subclass for each code list
|
||||
"""
|
||||
group_by_related = "parent"
|
||||
related_field_name = "long_name"
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
self.c = CODELIST_COMPENSATION_ACTION_ID
|
||||
super().__init__(*args, **kwargs)
|
||||
|
||||
def order_by(self, qs):
|
||||
return qs.order_by(
|
||||
"parent__long_name"
|
||||
)
|
||||
|
||||
|
||||
class CompensationActionDetailCodeAutocomplete(KonovaCodeAutocomplete):
|
||||
"""
|
||||
Due to limitations of the django dal package, we need to subclass for each code list
|
||||
"""
|
||||
group_by_related = "parent"
|
||||
related_field_name = "long_name"
|
||||
paginate_by = 200
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
self.c = CODELIST_COMPENSATION_ACTION_DETAIL_ID
|
||||
super().__init__(*args, **kwargs)
|
||||
|
||||
def order_by(self, qs):
|
||||
return qs.order_by(
|
||||
"long_name"
|
||||
)
|
||||
|
24
codelist/autocomplete/handler.py
Normal file
24
codelist/autocomplete/handler.py
Normal file
@ -0,0 +1,24 @@
|
||||
"""
|
||||
Author: Michel Peltriaux
|
||||
Organization: Struktur- und Genehmigungsdirektion Nord, Rhineland-Palatinate, Germany
|
||||
Contact: ksp-servicestelle@sgdnord.rlp.de
|
||||
Created on: 18.08.22
|
||||
|
||||
"""
|
||||
from codelist.settings import CODELIST_HANDLER_ID
|
||||
from codelist.autocomplete.base import KonovaCodeAutocomplete
|
||||
|
||||
|
||||
class HandlerCodeAutocomplete(KonovaCodeAutocomplete):
|
||||
"""
|
||||
Due to limitations of the django dal package, we need to subclass for each code list
|
||||
"""
|
||||
group_by_related = "parent"
|
||||
related_field_name = "long_name"
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
self.c = CODELIST_HANDLER_ID
|
||||
super().__init__(*args, **kwargs)
|
||||
|
||||
def get_result_label(self, result):
|
||||
return result.long_name
|
24
codelist/autocomplete/law.py
Normal file
24
codelist/autocomplete/law.py
Normal file
@ -0,0 +1,24 @@
|
||||
"""
|
||||
Author: Michel Peltriaux
|
||||
Organization: Struktur- und Genehmigungsdirektion Nord, Rhineland-Palatinate, Germany
|
||||
Contact: ksp-servicestelle@sgdnord.rlp.de
|
||||
Created on: 18.08.22
|
||||
|
||||
"""
|
||||
from codelist.settings import CODELIST_LAW_ID
|
||||
from codelist.autocomplete.base import KonovaCodeAutocomplete
|
||||
|
||||
|
||||
class LawCodeAutocomplete(KonovaCodeAutocomplete):
|
||||
"""
|
||||
Due to limitations of the django dal package, we need to subclass for each code list
|
||||
"""
|
||||
group_by_related = "parent"
|
||||
related_field_name = "long_name"
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
self.c = CODELIST_LAW_ID
|
||||
super().__init__(*args, **kwargs)
|
||||
|
||||
def get_result_label(self, result):
|
||||
return f"{result.long_name} ({result.short_name})"
|
41
codelist/autocomplete/office.py
Normal file
41
codelist/autocomplete/office.py
Normal file
@ -0,0 +1,41 @@
|
||||
"""
|
||||
Author: Michel Peltriaux
|
||||
Organization: Struktur- und Genehmigungsdirektion Nord, Rhineland-Palatinate, Germany
|
||||
Contact: ksp-servicestelle@sgdnord.rlp.de
|
||||
Created on: 18.08.22
|
||||
|
||||
"""
|
||||
from codelist.settings import CODELIST_CONSERVATION_OFFICE_ID, CODELIST_REGISTRATION_OFFICE_ID
|
||||
from codelist.autocomplete.base import KonovaCodeAutocomplete
|
||||
|
||||
|
||||
class RegistrationOfficeCodeAutocomplete(KonovaCodeAutocomplete):
|
||||
"""
|
||||
Due to limitations of the django dal package, we need to subclass for each code list
|
||||
"""
|
||||
group_by_related = "parent"
|
||||
related_field_name = "long_name"
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
self.c = CODELIST_REGISTRATION_OFFICE_ID
|
||||
super().__init__(*args, **kwargs)
|
||||
|
||||
def order_by(self, qs):
|
||||
return qs.order_by(
|
||||
"parent__long_name"
|
||||
)
|
||||
|
||||
|
||||
class ConservationOfficeCodeAutocomplete(KonovaCodeAutocomplete):
|
||||
"""
|
||||
Due to limitations of the django dal package, we need to subclass for each code list
|
||||
"""
|
||||
group_by_related = "parent"
|
||||
related_field_name = "long_name"
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
self.c = CODELIST_CONSERVATION_OFFICE_ID
|
||||
super().__init__(*args, **kwargs)
|
||||
|
||||
def get_result_label(self, result):
|
||||
return f"{result.long_name} ({result.short_name})"
|
21
codelist/autocomplete/process_type.py
Normal file
21
codelist/autocomplete/process_type.py
Normal file
@ -0,0 +1,21 @@
|
||||
"""
|
||||
Author: Michel Peltriaux
|
||||
Organization: Struktur- und Genehmigungsdirektion Nord, Rhineland-Palatinate, Germany
|
||||
Contact: ksp-servicestelle@sgdnord.rlp.de
|
||||
Created on: 18.08.22
|
||||
|
||||
"""
|
||||
from codelist.autocomplete.base import KonovaCodeAutocomplete
|
||||
from codelist.settings import CODELIST_PROCESS_TYPE_ID
|
||||
|
||||
|
||||
class ProcessTypeCodeAutocomplete(KonovaCodeAutocomplete):
|
||||
"""
|
||||
Due to limitations of the django dal package, we need to subclass for each code list
|
||||
"""
|
||||
group_by_related = "parent"
|
||||
related_field_name = "long_name"
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
self.c = CODELIST_PROCESS_TYPE_ID
|
||||
super().__init__(*args, **kwargs)
|
68
codelist/management/commands/export_codelist.py
Normal file
68
codelist/management/commands/export_codelist.py
Normal file
@ -0,0 +1,68 @@
|
||||
"""
|
||||
Author: Michel Peltriaux
|
||||
Organization: Struktur- und Genehmigungsdirektion Nord, Rhineland-Palatinate, Germany
|
||||
Contact: michel.peltriaux@sgdnord.rlp.de
|
||||
Created on: 10.11.21
|
||||
|
||||
"""
|
||||
import csv
|
||||
|
||||
from codelist.models import KonovaCodeList
|
||||
from konova.management.commands.setup import BaseKonovaCommand
|
||||
|
||||
|
||||
class Command(BaseKonovaCommand):
|
||||
help = "Exports a single list of internal codes. Codelist identifier must be provided as argument"
|
||||
list_id = 'list_id'
|
||||
save_to = 'save_to'
|
||||
|
||||
def add_arguments(self, parser):
|
||||
try:
|
||||
parser.add_argument(self.list_id, type=int)
|
||||
except ValueError:
|
||||
self._write_error("No list id provided")
|
||||
exit(-1)
|
||||
try:
|
||||
parser.add_argument(self.save_to, type=str)
|
||||
except ValueError:
|
||||
self._write_error("No save to path given")
|
||||
exit(-1)
|
||||
|
||||
def handle(self, *args, **options):
|
||||
try:
|
||||
list_id = options[self.list_id]
|
||||
self.save_to = options[self.save_to]
|
||||
self._write_warning("Fetching codes...")
|
||||
code_list = KonovaCodeList.objects.get(
|
||||
id=list_id,
|
||||
)
|
||||
codes = code_list.codes.filter(
|
||||
is_selectable=True,
|
||||
).order_by(
|
||||
"parent"
|
||||
)
|
||||
header_row = [
|
||||
"Parent long name",
|
||||
"Parent short name",
|
||||
"Code long name",
|
||||
"Code short name",
|
||||
"Code ID",
|
||||
]
|
||||
with open(self.save_to, 'w', newline='') as csvfile:
|
||||
writer = csv.writer(
|
||||
csvfile,
|
||||
delimiter=' ',
|
||||
quoting=csv.QUOTE_MINIMAL,
|
||||
)
|
||||
writer.writerow(header_row)
|
||||
for code in codes:
|
||||
if code.parent is not None:
|
||||
row = [code.parent.long_name, code.parent.short_name, code.long_name, code.short_name, code.id]
|
||||
else:
|
||||
row = ["", "", code.long_name, code.short_name, code.id]
|
||||
#row = f"{code.parent.long_name};{code.parent.short_name};{code.long_name};{code.short_name};{code.id}"
|
||||
writer.writerow(row)
|
||||
|
||||
except KeyboardInterrupt:
|
||||
self._break_line()
|
||||
exit(-1)
|
165
codelist/management/commands/sync_codelist.py
Normal file
165
codelist/management/commands/sync_codelist.py
Normal file
@ -0,0 +1,165 @@
|
||||
"""
|
||||
Author: Michel Peltriaux
|
||||
Organization: Struktur- und Genehmigungsdirektion Nord, Rhineland-Palatinate, Germany
|
||||
Contact: michel.peltriaux@sgdnord.rlp.de
|
||||
Created on: 31.05.22
|
||||
|
||||
"""
|
||||
|
||||
from django.db import transaction
|
||||
|
||||
from codelist.models import KonovaCode
|
||||
from compensation.models import CompensationAction, CompensationState
|
||||
from intervention.models import Legal, Handler, Responsibility
|
||||
from konova.management.commands.setup import BaseKonovaCommand
|
||||
|
||||
|
||||
class Command(BaseKonovaCommand):
|
||||
help = "Updates internal codelist by external API"
|
||||
|
||||
def handle(self, *args, **options):
|
||||
try:
|
||||
with transaction.atomic():
|
||||
self.sync_codelist()
|
||||
except KeyboardInterrupt:
|
||||
self._break_line()
|
||||
exit(-1)
|
||||
|
||||
def __get_newest_code(self, code):
|
||||
code = KonovaCode.objects.filter(
|
||||
atom_id=code.atom_id,
|
||||
parent=code.parent,
|
||||
code_lists__in=code.code_lists.all(),
|
||||
).order_by(
|
||||
"-id"
|
||||
).first()
|
||||
return code
|
||||
|
||||
def __migrate_compensation_action_codes(self):
|
||||
all_actions = CompensationAction.objects.all()
|
||||
used_codes = []
|
||||
for action in all_actions:
|
||||
stored_codes = action.action_type.all()
|
||||
codes = []
|
||||
for code in stored_codes:
|
||||
codes.append(self.__get_newest_code(code))
|
||||
action.action_type.set(codes)
|
||||
used_codes += codes
|
||||
|
||||
stored_codes = action.action_type_details.all()
|
||||
codes = []
|
||||
for code in stored_codes:
|
||||
codes.append(self.__get_newest_code(code))
|
||||
action.action_type_details.set(codes)
|
||||
used_codes += codes
|
||||
|
||||
action.save()
|
||||
return used_codes
|
||||
|
||||
def __migrate_compensation_state_codes(self):
|
||||
all_states = CompensationState.objects.all()
|
||||
used_codes = []
|
||||
for state in all_states:
|
||||
code = state.biotope_type
|
||||
if code is not None:
|
||||
new_code = self.__get_newest_code(code)
|
||||
state.biotope_type = new_code
|
||||
used_codes.append(new_code)
|
||||
|
||||
stored_codes = state.biotope_type_details.all()
|
||||
codes = []
|
||||
for code in stored_codes:
|
||||
codes.append(self.__get_newest_code(code))
|
||||
state.biotope_type_details.set(codes)
|
||||
|
||||
used_codes += codes
|
||||
state.save()
|
||||
return used_codes
|
||||
|
||||
def __migrate_legal_codes(self):
|
||||
all_legal = Legal.objects.all()
|
||||
used_codes = []
|
||||
for legal in all_legal:
|
||||
code = legal.process_type
|
||||
if code is not None:
|
||||
new_code = self.__get_newest_code(code)
|
||||
legal.process_type = new_code
|
||||
used_codes.append(new_code)
|
||||
|
||||
stored_codes = legal.laws.all()
|
||||
codes = []
|
||||
for code in stored_codes:
|
||||
codes.append(self.__get_newest_code(code))
|
||||
legal.laws.set(codes)
|
||||
|
||||
used_codes += codes
|
||||
legal.save()
|
||||
return used_codes
|
||||
|
||||
def __migrate_handler_codes(apps):
|
||||
all_handlers = Handler.objects.all()
|
||||
used_codes = []
|
||||
for handler in all_handlers:
|
||||
code = handler.type
|
||||
if code is None:
|
||||
continue
|
||||
new_code = apps.__get_newest_code(code)
|
||||
handler.type = new_code
|
||||
used_codes.append(new_code)
|
||||
handler.save()
|
||||
return used_codes
|
||||
|
||||
def __migrate_responsibility_codes(apps):
|
||||
all_resps = Responsibility.objects.all()
|
||||
used_codes = []
|
||||
for responsibility in all_resps:
|
||||
code = responsibility.registration_office
|
||||
if code is not None:
|
||||
new_code = apps.__get_newest_code(code)
|
||||
responsibility.registration_office = new_code
|
||||
used_codes.append(new_code)
|
||||
|
||||
code = responsibility.conservation_office
|
||||
if code is not None:
|
||||
new_code = apps.__get_newest_code(code)
|
||||
responsibility.conservation_office = new_code
|
||||
used_codes.append(new_code)
|
||||
|
||||
responsibility.save()
|
||||
return used_codes
|
||||
|
||||
def sync_codelist(self):
|
||||
""" Due to issues on the external codelist app there can be multiple entries of the same code
|
||||
(atom_id, parent, list) but with different identifiers.
|
||||
|
||||
These issues have been resolved but already
|
||||
|
||||
Returns:
|
||||
|
||||
"""
|
||||
self._write_warning("Sync codes in usage and replace by newest entries...")
|
||||
used_codes = []
|
||||
used_codes += self.__migrate_compensation_action_codes()
|
||||
used_codes += self.__migrate_compensation_state_codes()
|
||||
used_codes += self.__migrate_legal_codes()
|
||||
used_codes += self.__migrate_handler_codes()
|
||||
used_codes += self.__migrate_responsibility_codes()
|
||||
self._write_success(f"Synced {len(used_codes)} code usages!")
|
||||
|
||||
all_codes = KonovaCode.objects.all()
|
||||
newest_code_ids = []
|
||||
for code in all_codes:
|
||||
newest_code = self.__get_newest_code(code)
|
||||
newest_code_ids.append(newest_code.id)
|
||||
|
||||
code_ids_to_keep = set(newest_code_ids)
|
||||
self._write_warning(f"Of {all_codes.count()} KonovaCodes there are {len(code_ids_to_keep)} to keep as newest versions...")
|
||||
|
||||
deletable_codes = KonovaCode.objects.all().exclude(
|
||||
id__in=code_ids_to_keep
|
||||
)
|
||||
deletable_codes_count = deletable_codes.count()
|
||||
self._write_warning(f"{deletable_codes_count} found which are obsolet...")
|
||||
if deletable_codes_count > 0:
|
||||
deletable_codes.delete()
|
||||
self._write_success("Obsolete codes deleted!")
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user