From ffbf12acf48e8fe212f0dbb111be92c05aa7db32 Mon Sep 17 00:00:00 2001 From: Jonas Huber Date: Wed, 12 Jul 2023 19:08:33 +0200 Subject: [PATCH 1/7] add initial structure #1277 --- docs/Makefile | 192 ------------ docs/make.bat | 263 ----------------- docs/source/api.rst | 279 ------------------ docs/source/api/advanced.rst | 312 -------------------- docs/source/api/how_to.rst | 551 ----------------------------------- docs/source/api/scenario.rst | 33 --- docs/source/base.rst | 18 -- docs/source/conf.py | 333 --------------------- docs/source/dataedit.rst | 6 - docs/source/errors.rst | 11 - docs/source/index.rst | 119 -------- docs/source/login.rst | 6 - docs/source/modelview.rst | 6 - 13 files changed, 2129 deletions(-) delete mode 100644 docs/Makefile delete mode 100755 docs/make.bat delete mode 100644 docs/source/api.rst delete mode 100644 docs/source/api/advanced.rst delete mode 100644 docs/source/api/how_to.rst delete mode 100644 docs/source/api/scenario.rst delete mode 100644 docs/source/base.rst delete mode 100644 docs/source/conf.py delete mode 100644 docs/source/dataedit.rst delete mode 100644 docs/source/errors.rst delete mode 100644 docs/source/index.rst delete mode 100644 docs/source/login.rst delete mode 100644 docs/source/modelview.rst diff --git a/docs/Makefile b/docs/Makefile deleted file mode 100644 index 60beb7c00..000000000 --- a/docs/Makefile +++ /dev/null @@ -1,192 +0,0 @@ -# Makefile for Sphinx documentation -# - -# You can set these variables from the command line. -SPHINXOPTS = -SPHINXBUILD = sphinx-build -PAPER = -BUILDDIR = build - -# User-friendly check for sphinx-build -ifeq ($(shell which $(SPHINXBUILD) >/dev/null 2>&1; echo $$?), 1) -$(error The '$(SPHINXBUILD)' command was not found. Make sure you have Sphinx installed, then set the SPHINXBUILD environment variable to point to the full path of the '$(SPHINXBUILD)' executable. Alternatively you can add the directory with the executable to your PATH. If you don't have Sphinx installed, grab it from http://sphinx-doc.org/) -endif - -# Internal variables. -PAPEROPT_a4 = -D latex_paper_size=a4 -PAPEROPT_letter = -D latex_paper_size=letter -ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) source -# the i18n builder cannot share the environment and doctrees with the others -I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) source - -.PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest coverage gettext - -help: - @echo "Please use \`make ' where is one of" - @echo " html to make standalone HTML files" - @echo " dirhtml to make HTML files named index.html in directories" - @echo " singlehtml to make a single large HTML file" - @echo " pickle to make pickle files" - @echo " json to make JSON files" - @echo " htmlhelp to make HTML files and a HTML help project" - @echo " qthelp to make HTML files and a qthelp project" - @echo " applehelp to make an Apple Help Book" - @echo " devhelp to make HTML files and a Devhelp project" - @echo " epub to make an epub" - @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter" - @echo " latexpdf to make LaTeX files and run them through pdflatex" - @echo " latexpdfja to make LaTeX files and run them through platex/dvipdfmx" - @echo " text to make text files" - @echo " man to make manual pages" - @echo " texinfo to make Texinfo files" - @echo " info to make Texinfo files and run them through makeinfo" - @echo " gettext to make PO message catalogs" - @echo " changes to make an overview of all changed/added/deprecated items" - @echo " xml to make Docutils-native XML files" - @echo " pseudoxml to make pseudoxml-XML files for display purposes" - @echo " linkcheck to check all external links for integrity" - @echo " doctest to run all doctests embedded in the documentation (if enabled)" - @echo " coverage to run coverage check of the documentation (if enabled)" - -clean: - rm -rf $(BUILDDIR)/* - -html: - $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html - @echo - @echo "Build finished. The HTML pages are in $(BUILDDIR)/html." - -dirhtml: - $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml - @echo - @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml." - -singlehtml: - $(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml - @echo - @echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml." - -pickle: - $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle - @echo - @echo "Build finished; now you can process the pickle files." - -json: - $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json - @echo - @echo "Build finished; now you can process the JSON files." - -htmlhelp: - $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp - @echo - @echo "Build finished; now you can run HTML Help Workshop with the" \ - ".hhp project file in $(BUILDDIR)/htmlhelp." - -qthelp: - $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp - @echo - @echo "Build finished; now you can run "qcollectiongenerator" with the" \ - ".qhcp project file in $(BUILDDIR)/qthelp, like this:" - @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/OpenEnergyPlatform.qhcp" - @echo "To view the help file:" - @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/OpenEnergyPlatform.qhc" - -applehelp: - $(SPHINXBUILD) -b applehelp $(ALLSPHINXOPTS) $(BUILDDIR)/applehelp - @echo - @echo "Build finished. The help book is in $(BUILDDIR)/applehelp." - @echo "N.B. You won't be able to view it unless you put it in" \ - "~/Library/Documentation/Help or install it in your application" \ - "bundle." - -devhelp: - $(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp - @echo - @echo "Build finished." - @echo "To view the help file:" - @echo "# mkdir -p $$HOME/.local/share/devhelp/OpenEnergyPlatform" - @echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/OpenEnergyPlatform" - @echo "# devhelp" - -epub: - $(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub - @echo - @echo "Build finished. The epub file is in $(BUILDDIR)/epub." - -latex: - $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex - @echo - @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex." - @echo "Run \`make' in that directory to run these through (pdf)latex" \ - "(use \`make latexpdf' here to do that automatically)." - -latexpdf: - $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex - @echo "Running LaTeX files through pdflatex..." - $(MAKE) -C $(BUILDDIR)/latex all-pdf - @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." - -latexpdfja: - $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex - @echo "Running LaTeX files through platex and dvipdfmx..." - $(MAKE) -C $(BUILDDIR)/latex all-pdf-ja - @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." - -text: - $(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text - @echo - @echo "Build finished. The text files are in $(BUILDDIR)/text." - -man: - $(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man - @echo - @echo "Build finished. The manual pages are in $(BUILDDIR)/man." - -texinfo: - $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo - @echo - @echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo." - @echo "Run \`make' in that directory to run these through makeinfo" \ - "(use \`make info' here to do that automatically)." - -info: - $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo - @echo "Running Texinfo files through makeinfo..." - make -C $(BUILDDIR)/texinfo info - @echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo." - -gettext: - $(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale - @echo - @echo "Build finished. The message catalogs are in $(BUILDDIR)/locale." - -changes: - $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes - @echo - @echo "The overview file is in $(BUILDDIR)/changes." - -linkcheck: - $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck - @echo - @echo "Link check complete; look for any errors in the above output " \ - "or in $(BUILDDIR)/linkcheck/output.txt." - -doctest: - $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest - @echo "Testing of doctests in the sources finished, look at the " \ - "results in $(BUILDDIR)/doctest/output.txt." - -coverage: - $(SPHINXBUILD) -b coverage $(ALLSPHINXOPTS) $(BUILDDIR)/coverage - @echo "Testing of coverage in the sources finished, look at the " \ - "results in $(BUILDDIR)/coverage/python.txt." - -xml: - $(SPHINXBUILD) -b xml $(ALLSPHINXOPTS) $(BUILDDIR)/xml - @echo - @echo "Build finished. The XML files are in $(BUILDDIR)/xml." - -pseudoxml: - $(SPHINXBUILD) -b pseudoxml $(ALLSPHINXOPTS) $(BUILDDIR)/pseudoxml - @echo - @echo "Build finished. The pseudo-XML files are in $(BUILDDIR)/pseudoxml." diff --git a/docs/make.bat b/docs/make.bat deleted file mode 100755 index 77c5d6801..000000000 --- a/docs/make.bat +++ /dev/null @@ -1,263 +0,0 @@ -@ECHO OFF - -REM Command file for Sphinx documentation - -if "%SPHINXBUILD%" == "" ( - set SPHINXBUILD=sphinx-build -) -set BUILDDIR=build -set ALLSPHINXOPTS=-d %BUILDDIR%/doctrees %SPHINXOPTS% source -set I18NSPHINXOPTS=%SPHINXOPTS% source -if NOT "%PAPER%" == "" ( - set ALLSPHINXOPTS=-D latex_paper_size=%PAPER% %ALLSPHINXOPTS% - set I18NSPHINXOPTS=-D latex_paper_size=%PAPER% %I18NSPHINXOPTS% -) - -if "%1" == "" goto help - -if "%1" == "help" ( - :help - echo.Please use `make ^` where ^ is one of - echo. html to make standalone HTML files - echo. dirhtml to make HTML files named index.html in directories - echo. singlehtml to make a single large HTML file - echo. pickle to make pickle files - echo. json to make JSON files - echo. htmlhelp to make HTML files and a HTML help project - echo. qthelp to make HTML files and a qthelp project - echo. devhelp to make HTML files and a Devhelp project - echo. epub to make an epub - echo. latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter - echo. text to make text files - echo. man to make manual pages - echo. texinfo to make Texinfo files - echo. gettext to make PO message catalogs - echo. changes to make an overview over all changed/added/deprecated items - echo. xml to make Docutils-native XML files - echo. pseudoxml to make pseudoxml-XML files for display purposes - echo. linkcheck to check all external links for integrity - echo. doctest to run all doctests embedded in the documentation if enabled - echo. coverage to run coverage check of the documentation if enabled - goto end -) - -if "%1" == "clean" ( - for /d %%i in (%BUILDDIR%\*) do rmdir /q /s %%i - del /q /s %BUILDDIR%\* - goto end -) - - -REM Check if sphinx-build is available and fallback to Python version if any -%SPHINXBUILD% 2> nul -if errorlevel 9009 goto sphinx_python -goto sphinx_ok - -:sphinx_python - -set SPHINXBUILD=python -m sphinx.__init__ -%SPHINXBUILD% 2> nul -if errorlevel 9009 ( - echo. - echo.The 'sphinx-build' command was not found. Make sure you have Sphinx - echo.installed, then set the SPHINXBUILD environment variable to point - echo.to the full path of the 'sphinx-build' executable. Alternatively you - echo.may add the Sphinx directory to PATH. - echo. - echo.If you don't have Sphinx installed, grab it from - echo.http://sphinx-doc.org/ - exit /b 1 -) - -:sphinx_ok - - -if "%1" == "html" ( - %SPHINXBUILD% -b html %ALLSPHINXOPTS% %BUILDDIR%/html - if errorlevel 1 exit /b 1 - echo. - echo.Build finished. The HTML pages are in %BUILDDIR%/html. - goto end -) - -if "%1" == "dirhtml" ( - %SPHINXBUILD% -b dirhtml %ALLSPHINXOPTS% %BUILDDIR%/dirhtml - if errorlevel 1 exit /b 1 - echo. - echo.Build finished. The HTML pages are in %BUILDDIR%/dirhtml. - goto end -) - -if "%1" == "singlehtml" ( - %SPHINXBUILD% -b singlehtml %ALLSPHINXOPTS% %BUILDDIR%/singlehtml - if errorlevel 1 exit /b 1 - echo. - echo.Build finished. The HTML pages are in %BUILDDIR%/singlehtml. - goto end -) - -if "%1" == "pickle" ( - %SPHINXBUILD% -b pickle %ALLSPHINXOPTS% %BUILDDIR%/pickle - if errorlevel 1 exit /b 1 - echo. - echo.Build finished; now you can process the pickle files. - goto end -) - -if "%1" == "json" ( - %SPHINXBUILD% -b json %ALLSPHINXOPTS% %BUILDDIR%/json - if errorlevel 1 exit /b 1 - echo. - echo.Build finished; now you can process the JSON files. - goto end -) - -if "%1" == "htmlhelp" ( - %SPHINXBUILD% -b htmlhelp %ALLSPHINXOPTS% %BUILDDIR%/htmlhelp - if errorlevel 1 exit /b 1 - echo. - echo.Build finished; now you can run HTML Help Workshop with the ^ -.hhp project file in %BUILDDIR%/htmlhelp. - goto end -) - -if "%1" == "qthelp" ( - %SPHINXBUILD% -b qthelp %ALLSPHINXOPTS% %BUILDDIR%/qthelp - if errorlevel 1 exit /b 1 - echo. - echo.Build finished; now you can run "qcollectiongenerator" with the ^ -.qhcp project file in %BUILDDIR%/qthelp, like this: - echo.^> qcollectiongenerator %BUILDDIR%\qthelp\OpenEnergyPlatform.qhcp - echo.To view the help file: - echo.^> assistant -collectionFile %BUILDDIR%\qthelp\OpenEnergyPlatform.ghc - goto end -) - -if "%1" == "devhelp" ( - %SPHINXBUILD% -b devhelp %ALLSPHINXOPTS% %BUILDDIR%/devhelp - if errorlevel 1 exit /b 1 - echo. - echo.Build finished. - goto end -) - -if "%1" == "epub" ( - %SPHINXBUILD% -b epub %ALLSPHINXOPTS% %BUILDDIR%/epub - if errorlevel 1 exit /b 1 - echo. - echo.Build finished. The epub file is in %BUILDDIR%/epub. - goto end -) - -if "%1" == "latex" ( - %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex - if errorlevel 1 exit /b 1 - echo. - echo.Build finished; the LaTeX files are in %BUILDDIR%/latex. - goto end -) - -if "%1" == "latexpdf" ( - %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex - cd %BUILDDIR%/latex - make all-pdf - cd %~dp0 - echo. - echo.Build finished; the PDF files are in %BUILDDIR%/latex. - goto end -) - -if "%1" == "latexpdfja" ( - %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex - cd %BUILDDIR%/latex - make all-pdf-ja - cd %~dp0 - echo. - echo.Build finished; the PDF files are in %BUILDDIR%/latex. - goto end -) - -if "%1" == "text" ( - %SPHINXBUILD% -b text %ALLSPHINXOPTS% %BUILDDIR%/text - if errorlevel 1 exit /b 1 - echo. - echo.Build finished. The text files are in %BUILDDIR%/text. - goto end -) - -if "%1" == "man" ( - %SPHINXBUILD% -b man %ALLSPHINXOPTS% %BUILDDIR%/man - if errorlevel 1 exit /b 1 - echo. - echo.Build finished. The manual pages are in %BUILDDIR%/man. - goto end -) - -if "%1" == "texinfo" ( - %SPHINXBUILD% -b texinfo %ALLSPHINXOPTS% %BUILDDIR%/texinfo - if errorlevel 1 exit /b 1 - echo. - echo.Build finished. The Texinfo files are in %BUILDDIR%/texinfo. - goto end -) - -if "%1" == "gettext" ( - %SPHINXBUILD% -b gettext %I18NSPHINXOPTS% %BUILDDIR%/locale - if errorlevel 1 exit /b 1 - echo. - echo.Build finished. The message catalogs are in %BUILDDIR%/locale. - goto end -) - -if "%1" == "changes" ( - %SPHINXBUILD% -b changes %ALLSPHINXOPTS% %BUILDDIR%/changes - if errorlevel 1 exit /b 1 - echo. - echo.The overview file is in %BUILDDIR%/changes. - goto end -) - -if "%1" == "linkcheck" ( - %SPHINXBUILD% -b linkcheck %ALLSPHINXOPTS% %BUILDDIR%/linkcheck - if errorlevel 1 exit /b 1 - echo. - echo.Link check complete; look for any errors in the above output ^ -or in %BUILDDIR%/linkcheck/output.txt. - goto end -) - -if "%1" == "doctest" ( - %SPHINXBUILD% -b doctest %ALLSPHINXOPTS% %BUILDDIR%/doctest - if errorlevel 1 exit /b 1 - echo. - echo.Testing of doctests in the sources finished, look at the ^ -results in %BUILDDIR%/doctest/output.txt. - goto end -) - -if "%1" == "coverage" ( - %SPHINXBUILD% -b coverage %ALLSPHINXOPTS% %BUILDDIR%/coverage - if errorlevel 1 exit /b 1 - echo. - echo.Testing of coverage in the sources finished, look at the ^ -results in %BUILDDIR%/coverage/python.txt. - goto end -) - -if "%1" == "xml" ( - %SPHINXBUILD% -b xml %ALLSPHINXOPTS% %BUILDDIR%/xml - if errorlevel 1 exit /b 1 - echo. - echo.Build finished. The XML files are in %BUILDDIR%/xml. - goto end -) - -if "%1" == "pseudoxml" ( - %SPHINXBUILD% -b pseudoxml %ALLSPHINXOPTS% %BUILDDIR%/pseudoxml - if errorlevel 1 exit /b 1 - echo. - echo.Build finished. The pseudo-XML files are in %BUILDDIR%/pseudoxml. - goto end -) - -:end diff --git a/docs/source/api.rst b/docs/source/api.rst deleted file mode 100644 index 616cba0bc..000000000 --- a/docs/source/api.rst +++ /dev/null @@ -1,279 +0,0 @@ -===================== -Data interface (REST) -===================== - - -Data Structures -=============== - -Constraint Definition ---------------------- - -:constraint_definition (Dictonary): - Specifies a definition of a constraint. - * ``action`` Action of constraint (e.g. ADD, DROP) - * ``constraint_type`` Type of constraint (e.g. UNIQUE, PRIMARY KEY, FOREIGN KEY) - * ``constraint_name`` Name of constraint. - * ``constraint_parameter`` Parameter of constraint. - * ``reference_table`` Name of reference table, can be None. - * ``reference_column`` Name of reference column, can be None. - -Column Definition ------------------ - -:column_definition (Dictonary): - Specifies a definition of a column. - * ``name`` Name of column. - * ``new_name`` New name of column, can be None. - * ``data_type`` New datatype of column, can be None. - * ``is_nullable`` New null value of column, can be None. - * ``character_maximum_length`` New data length of column, can be None. - -Response Definition -------------------- - -:response_dictonary (Dictonary): - Describes the result of an api action. - * ``success (Boolean)`` Result of Action - * ``error (String)`` Error Message - * ``http_status (Integer)`` HTTP status code (https://en.wikipedia.org/wiki/List_of_HTTP_status_codes) - - -Table (RESTful) -=============== - -URL: /schema/{schema}/table/{table} - -GET ---- - -Reference needed. - -PUT ---- - -Creates a new table in database. -JSON should contain a constraint definition array and a column definition array. - -Example: - -.. code-block:: json - - { - "constraints": [ - { - "constraint_type": "FOREIGN KEY", - "constraint_name": "fkey_schema_table_database_id", - "constraint_parameter": "database_id", - "reference_table": "example.table", - "reference_column": "database_id_ref" - }, - { - "constraint_type": "PRIMARY KEY", - "constraint_name": "pkey_schema_table_id", - "constraint_parameter": "id", - "reference_table": null, - "reference_column": null - } - ], - "columns": [ - { - "name": "id", - "data_type": "int", - "is_nullable": "YES", - "character_maximum_length": null - }, - { - "name": "name", - "data_type": "character varying", - "is_nullable": "NO", - "character_maximum_length": 50 - } - ] - } - -POST ----- - -JSON should contain a column or constraint definition. -Additionally ``action`` and ``type`` should be mentioned. - -- ``type`` can be ``constraint`` or ``column``. -- ``action`` can be ``ADD`` and ``DROP``. -- ``constraint_type`` can be every constraint type supported by Postgres. -- ``reference_table`` and ``reference_column`` can be null, if not necessary. - -Example: - -.. code-block:: json - - { - "type" : "constraint", - "action": "ADD", - "constraint_type": "FOREIGN KEY", - "constraint_name": "fkey_label", - "constraint_parameter": "changed_name", - "reference_table" : "reference.group_types", - "reference_column" : "label" - } - - { - "type" : "column", - "name" : "test_name", - "newname" : "changed_name", - "data_type": "character varying", - "is_nullable": "NO", - "character_maximum_length": 50 - } - - -Rows (RESTful) -============== - -GET ---- - -URL: :code:`/schema//tables//rows/` - -You can use this part to get information from the database. - -You can specify the following parameters in the url: - * ``columns (List)`` List of selected columns, e.g. :code:`id,name` - * ``where (List)`` List of where clauses, e.g. :code:`id+OPERATOR+1+CONNECTOR+name+OPERATOR+georg` - * OPERATORS could be EQUAL, GREATER, LOWER, NOTEQUAL, NOTGREATER, NOTLOWER - * CONNECTORS could be AND, OR - * ``orderby (List)`` List of order columns, e.g. :code:`name,code` - * ``limit (Number)`` Number of displayed items, e.g. :code:`100` - * ``offset (Number)`` Number of offset from start, e.g. :code:`10` - -================ -Deprecated Stuff -================ - -Create a table -============== - -.. [#idpk] The OEP is currently only supporting a non-compound integer primary - key labeled 'id'. Violation of this constraint might render the OEP unable to - display the data stored in this table. - - -Dictionary structure --------------------- - -:schema (String): - Specifies the schema name the table should be created in. If this - schema does not exist it will be created. - -:table (String): - Specifies the name of the table to be created. - -:fields (List): - List specifying the columns of the new table (see `Field specification`_). - -:constraints (List): - List of additional constraints (see `Constraint specification`_). - - -Field specification -------------------- - -:name (String): - Name of the field - -:type (String): - Name of a valid `Postgresql type `_ - -:pk (Bool): - Specifies whether this column is a primary key. Be aware - of [#idpk]_ - -Constraint specification ------------------------- - -Args: - :name (String): - Type of constraint. Possible values: - - * ``fk`` (see `Foreign key specification`_) - :constraint (Dictionary): - Dictionary as specified by the foreign key. - - -Foreign key specification -------------------------- - -:schema (String): - Name of the schema the referenced table is stored in - -:table (String): - Name of the referenced table - -:field (String): - Name of the referenced column - -:on_delete (String): - Specifies the behaviour if this field is deleted. Possible values: - - * ``cascade`` - * ``no action`` - * ``restrict`` - * ``set null`` - * ``set default`` - - -Insert data -=========== - -:schema (String): - Specifies the schema name the table should be created in. If this - schema does not exist it will be created. - -:table (String): - Specifies the name of the table to be created. - -:fields (List): - List specifying the column names the date should be inserted in. - -:values (List): - Each element is a list of values that should be inserted. The number - of elements must match the number of fields. - -:returning (Bool): - An expression that is evaluated and returned as result. If this - entry is present the result of this expression is returned as in - `Select Data`_. - - -Select data -=========== - -:all (Bool): - Specifies whether all rows should be returned (default) - -:distinct (Bool): - Specifies whether only unique rows should be returned - -:fields (List): - The list of columns that should be returned (see select_field_spec_) - -:where (List): - The list of condition that should be considered (see select_condition_spec_) - -:limit (Integer or 'all'): - Specifies how many results should be returned. If 'all' - is set all matching rows will be returned (default). - -:offset (Integer): - Specifies how many entries should be skipped before returning - data - - -Binding the API to python -========================= - -.. automodule:: api.views - :members: - -.. automodule:: api.actions - :members: diff --git a/docs/source/api/advanced.rst b/docs/source/api/advanced.rst deleted file mode 100644 index 96e564fb8..000000000 --- a/docs/source/api/advanced.rst +++ /dev/null @@ -1,312 +0,0 @@ -********************* -Advanced API features -********************* - -.. testsetup:: - - import os - from oeplatform import securitysettings as sec - oep_url = 'http://localhost:8000' - your_token = os.environ.get("LOCAL_OEP_TOKEN") - if your_token is None: - if hasattr(sec, "token_test_user") and sec.token_test_user is not None: - your_token = sec.token_test_user - else: - raise Exception("No token available, please set LOCAL_OEP_TOKEN or adapt your security settings") - from shapely import wkt - import json - -.. doctest:: - - >>> import requests - >>> data = { "query": { "columns": [ { "name":"id", "data_type": "bigserial", "is_nullable": "NO" },{ "name":"name", "data_type": "varchar", "character_maximum_length": "50" },{ "name":"geom", "data_type": "geometry(point)" } ], "constraints": [ { "constraint_type": "PRIMARY KEY", "constraint_parameter": "id" } ] } } - >>> requests.put(oep_url+'/api/v0/schema/sandbox/tables/example_table/', json=data, headers={'Authorization': 'Token %s'%your_token} ) - - >>> data = {"query": [{"id": i, "name": "John Doe"+str(i), "geom":"SRID=32140;POINT(0 %d)"%i} for i in range(10)]} - >>> requests.post(oep_url+'/api/v0/schema/sandbox/tables/example_table/rows/new', json=data, headers={'Authorization': 'Token %s'%your_token} ) - - -The basic REST-API as described in :doc:`how_to` freatures functionalities for -simple CRUD-tasks. - -This may be sufficient for data manipulation, but the underlying Database -Management System features a much richer environment for data select scripts. - -You can issue a POST-request to the URL `advanced/v0/search`. The actual query -described as a JSON string inside this request. This page will describe the -general make-up of this JSON structure. - -Syntax Specification -==================== - -.. _select-objects: - -Select with from ----------------- - -A query object **MUST** contain - - * :code:`from` : A :ref:`From item ` - -It **MAY** contain the folowing directives. If not present, they will be -replaced by the stated defaults: - - * :code:`distrinct`: A boolean specifying wheter a *SELECT DISTINCT* (see https://www.postgresql.org/docs/9.5/static/sql-select.html#SQL-DISTINCT ). (Possible values: :code:`true` | :code:`false`, default: :code:`false`) - * :code:`fields`: List of :ref:`Expressions ` (If not present, will be interpreted as :code:`*`), that **MAY** contain the following additional fields: - * :code:`as`: A string - * :code:`where`: A list of :ref:`Expressions ` that return a truth value (default: []) - * :code:`group_by`: List of :ref:`Expressions ` (default: []) - * :code:`having`: A list of :ref:`Expressions ` that return a truth value (default: []) - * :code:`select`: A list of dictionaries that **MUST** contain: - * :code:`query`: A :ref:`Select object ` - * :code:`type`: :code:`union` | :code:`intersect` | :code:`except` - * :code:`order_by`: List of :ref:`Expressions ` (default: []), that **MAY** contain the following additional fields: - * :code:`ordering`: :code:`asc` | :code:`desc` (default: :code:`asc`) - * :code:`limit`: Integer - * :code:`offset`: Integer - - -.. _expression-objects: - -Expressions ------------ - -An expression object **MUST** contain: - * :code:`type`: A string as specified below - -The depending on the :code:`type` the dictionary may have a a different structure: - * :code:`column`: A column expression **MUST** contain the following fields: - * :code:`column`: Name of the column - * :code:`grouping`: A grouping expression **MUST** contain the following fields: - * :code:`grouping`: A list of :ref:`Expressions ` - * :code:`operator`: An operator expression **MUST** contain the following fields: - * :code:`operator`: A string consisting of one of the following operators: - * Unary operators: :code:`NOT` - * Binary operators: :code:`EQUALS` | :code:`=` :code:`GREATER` | :code:`>` | :code:`LOWER` | :code:`<` | :code:`NOTEQUAL` | :code:`<>` | :code:`!=` | :code:`NOTGREATER` | :code:`<=` | :code:`NOTLOWER` | :code:`>=` - * n-ary operators: :code:`AND` | :code:`OR` - * :code:`operands`: A list of :ref:`Expressions ` - * :code:`function`: A function expression **MUST** contain the following fields: - * :code:`function`: The name of the function. All functions implemented in sqlalchemy and geoalchemy are available. - * :code:`operands`: A list of :ref:`Expressions ` - * :code:`value`: A constant value - -.. _from-objects: - -From items ----------- - -A from object **MUST** contain: - * :code:`type`: A string as specified below - -The depending on the :code:`type` the dictionary may have a a different structure: - * :code:`table`: A table item **MUST** contain the following fields: - * :code:`table`: Name of the table - A table item **MAY** contain the following fields: - * :code:`schema`: Name of the schema - * :code:`only`: :code:`true` | :code:`false` (default: :code:`false`) - * :code:`select`: A select item **MUST** contain the following fields: - * :code:`query`: A :ref:`Select object ` - * :code:`join`: A join item **MUST** contain the following fields: - * :code:`left`: A :ref:`From item ` - * :code:`right`: A :ref:`From item ` - A join item **MAY** contain the following fields: - * :code:`is_outer`: :code:`true` | :code:`false` (default: :code:`false`) - * :code:`is_full`: :code:`true` | :code:`false` (default: :code:`false`) - * :code:`on`: An :ref:`Expression ` that returns a truth value - -Each from item **MAY** contain the following fields regardless of its type: - * :code:`alias`: An alias for this item - -.. _condition-objects: - -Condition items ---------------- - -Condition can come in two different fashions: - -1. A single :ref:`Expression ` -2. A list of :ref:`Expression ` - - -Compound selects ----------------- - - It is also possible to query compound selects (i.e. UNION, EXCEPT, - INTERSECT) via the API - - * :code:`keyword`: Specifies wheter the respective command should be added - to the query. Possible Values: :code:`union`, :code:`except`, :code:`intersect` - * :code:`selects`: A list of sub-queries used in the compound of one of two - types, identified by their type: - * :code:`type`: Possible values: :code:`grouping` | :code:`select` - Depending on this type this impies an additional mandatory field: - * :code:`grouping`: A list of :ref:`Select object ` - that are grouped together - * :code:`select`: A single :ref:`Select object ` - - -Examples -======== - -For starters we will issue a simple request to check which data is available. In order to do so, -we use the following query:: - - { - "fields":[ - "id", - "name" - ], - "from":{ - 'type': 'table', - 'table': 'example_table', - 'schema':"sandbox" - } - } - - -.. doctest:: - - >>> import requests - >>> data = { "query": {"fields": ["id", "name"], "from":{'type': 'table', 'table': 'example_table', 'schema':"sandbox"}}} - >>> response = requests.post(oep_url+'/api/v0/advanced/search', json=data ) - >>> response.status_code - 200 - >>> response.json().get('data') - [[0, 'John Doe0'], [1, 'John Doe1'], [2, 'John Doe2'], [3, 'John Doe3'], [4, 'John Doe4'], [5, 'John Doe5'], [6, 'John Doe6'], [7, 'John Doe7'], [8, 'John Doe8'], [9, 'John Doe9']] - -In order to get all entries with an id less than 3, we could extend above query -by a where clause:: - - 'where': { - 'operands': [ - { - 'type': 'column', - 'column':'id' - }, - 3 - ], - 'operator': '<', - 'type': 'operator' - } - - - - -.. doctest:: - - >>> import requests - >>> data = { "query": {"fields": ["id", "name"], "from":{'type': 'table', 'table': 'example_table', 'schema':"sandbox"}, 'where': {'operands': [{'type': 'column', 'column':'id'}, 3], 'operator': '<', 'type': 'operator'} }} - >>> response = requests.post(oep_url+'/api/v0/advanced/search', json=data) - >>> response.status_code - 200 - >>> response.json().get('data') - [[0, 'John Doe0'], [1, 'John Doe1'], [2, 'John Doe2']] - -You can add several conditons as a list. Those will be interpreted as a conjunction: - -.. doctest:: - - >>> import requests - >>> data = { "query": {"fields": ["id", "name"], "from":{'type': 'table', 'table': 'example_table', 'schema':"sandbox"}, 'where': [{'operands': [{'type': 'column', 'column':'id'}, 3], 'operator': '<', 'type': 'operator'}, {'operands': [{'type': 'column', 'column':'id'}, 1], 'operator': '>', 'type': 'operator'} ] }} - >>> response = requests.post(oep_url+'/api/v0/advanced/search', json=data) - >>> response.status_code - 200 - >>> response.json().get('data') - [[2, 'John Doe2']] - -Functions ---------- - -You can also alter all functions that are implemented in sqlalchemy and -geoalchemy2 to alter the results of your query. In the following example we -simply add two to every id: - -.. doctest:: - - >>> import requests - >>> data = { "query": {"fields": ['id', {'type': 'function', 'function': '+', 'operands':[{'type': 'column', 'column': 'id'}, 2]}], "from":{'type': 'table', 'table': 'example_table', 'schema':"sandbox"}}} - >>> response = requests.post(oep_url+'/api/v0/advanced/search', json=data) - >>> response.status_code - 200 - >>> response.json().get('data') - [[0, 2], [1, 3], [2, 4], [3, 5], [4, 6], [5, 7], [6, 8], [7, 9], [8, 10], [9, 11]] - -Functions are especially usefull if you want to return geodata in a specific -format. In the following we obtain the WKT representation of our data: - -.. doctest:: - - >>> import requests - >>> data = { "query": {"fields": ['id', {'type': 'function', 'function': 'ST_AsText', 'operands':[{'type': 'column', 'column': 'geom'}]}], "from":{'type': 'table', 'table': 'example_table', 'schema':"sandbox"}}} - >>> response = requests.post(oep_url+'/api/v0/advanced/search', json=data) - >>> response.status_code - 200 - >>> data = response.json().get('data') - >>> data[0] - [0, 'POINT(0 0)'] - >>> all(geom == 'POINT(0 %d)'%pid for pid, geom in data) - True - -... or the geoJSON representation ... - -.. doctest:: - - >>> import requests - >>> data = { "query": {"fields": ['id', {'type': 'function', 'function': 'ST_AsGeoJSON', 'operands':[{'type': 'column', 'column': 'geom'}, 4236]}], "from":{'type': 'table', 'table': 'example_table', 'schema':"sandbox"}}} - >>> response = requests.post(oep_url+'/api/v0/advanced/search', json=data) - >>> response.status_code - 200 - >>> data = response.json().get('data') - >>> data[0] - [0, '{"type":"Point","coordinates":[0,0]}'] - >>> all(pid == json.loads(geom)['coordinates'][1] for pid, geom in data) - True - -Joins ------ - -Joins can be queried by using the corresponding from-item:: - - { - "from":{ - 'type': 'join', - 'left': { - 'type': 'table', - 'table': 'example_table', - 'schema':"sandbox", - "alias":"a" - }, - 'right': { - 'type': 'table', - 'table': 'example_table', - 'schema':"sandbox", - "alias":"b" - }, - 'on': { - 'operands': [ - {'type': 'column', 'column':'id', 'table': 'a'}, - {'type': 'column', 'column':'id', 'table': 'b'} - ], - 'operator': '<', - 'type': 'operator' - } - } - } - - -.. doctest:: - - >>> import requests - >>> data = { "query": {"from":{'type': 'join','left': {'type': 'table', 'table': 'example_table', 'schema':"sandbox", "alias":"a"},'right': {'type': 'table', 'table': 'example_table', 'schema':"sandbox", "alias":"b"},'on': {'operands': [{'type': 'column', 'column':'id', 'table': 'a', 'schema':'sandbox',}, {'type': 'column', 'column':'id', 'table': 'b', 'schema':'sandbox',}], 'operator': '<', 'type': 'operator'}}}} - >>> response = requests.post(oep_url+'/api/v0/advanced/search', json=data) - >>> response.status_code - 200 - >>> response.json().get('data') - [[0, 'John Doe0', '01010000208C7D000000000000000000000000000000000000', 1, 'John Doe1', '01010000208C7D00000000000000000000000000000000F03F'], [0, 'John Doe0', '01010000208C7D000000000000000000000000000000000000', 2, 'John Doe2', '01010000208C7D000000000000000000000000000000000040'], [0, 'John Doe0', '01010000208C7D000000000000000000000000000000000000', 3, 'John Doe3', '01010000208C7D000000000000000000000000000000000840'], [0, 'John Doe0', '01010000208C7D000000000000000000000000000000000000', 4, 'John Doe4', '01010000208C7D000000000000000000000000000000001040'], [0, 'John Doe0', '01010000208C7D000000000000000000000000000000000000', 5, 'John Doe5', '01010000208C7D000000000000000000000000000000001440'], [0, 'John Doe0', '01010000208C7D000000000000000000000000000000000000', 6, 'John Doe6', '01010000208C7D000000000000000000000000000000001840'], [0, 'John Doe0', '01010000208C7D000000000000000000000000000000000000', 7, 'John Doe7', '01010000208C7D000000000000000000000000000000001C40'], [0, 'John Doe0', '01010000208C7D000000000000000000000000000000000000', 8, 'John Doe8', '01010000208C7D000000000000000000000000000000002040'], [0, 'John Doe0', '01010000208C7D000000000000000000000000000000000000', 9, 'John Doe9', '01010000208C7D000000000000000000000000000000002240'], [1, 'John Doe1', '01010000208C7D00000000000000000000000000000000F03F', 2, 'John Doe2', '01010000208C7D000000000000000000000000000000000040'], [1, 'John Doe1', '01010000208C7D00000000000000000000000000000000F03F', 3, 'John Doe3', '01010000208C7D000000000000000000000000000000000840'], [1, 'John Doe1', '01010000208C7D00000000000000000000000000000000F03F', 4, 'John Doe4', '01010000208C7D000000000000000000000000000000001040'], [1, 'John Doe1', '01010000208C7D00000000000000000000000000000000F03F', 5, 'John Doe5', '01010000208C7D000000000000000000000000000000001440'], [1, 'John Doe1', '01010000208C7D00000000000000000000000000000000F03F', 6, 'John Doe6', '01010000208C7D000000000000000000000000000000001840'], [1, 'John Doe1', '01010000208C7D00000000000000000000000000000000F03F', 7, 'John Doe7', '01010000208C7D000000000000000000000000000000001C40'], [1, 'John Doe1', '01010000208C7D00000000000000000000000000000000F03F', 8, 'John Doe8', '01010000208C7D000000000000000000000000000000002040'], [1, 'John Doe1', '01010000208C7D00000000000000000000000000000000F03F', 9, 'John Doe9', '01010000208C7D000000000000000000000000000000002240'], [2, 'John Doe2', '01010000208C7D000000000000000000000000000000000040', 3, 'John Doe3', '01010000208C7D000000000000000000000000000000000840'], [2, 'John Doe2', '01010000208C7D000000000000000000000000000000000040', 4, 'John Doe4', '01010000208C7D000000000000000000000000000000001040'], [2, 'John Doe2', '01010000208C7D000000000000000000000000000000000040', 5, 'John Doe5', '01010000208C7D000000000000000000000000000000001440'], [2, 'John Doe2', '01010000208C7D000000000000000000000000000000000040', 6, 'John Doe6', '01010000208C7D000000000000000000000000000000001840'], [2, 'John Doe2', '01010000208C7D000000000000000000000000000000000040', 7, 'John Doe7', '01010000208C7D000000000000000000000000000000001C40'], [2, 'John Doe2', '01010000208C7D000000000000000000000000000000000040', 8, 'John Doe8', '01010000208C7D000000000000000000000000000000002040'], [2, 'John Doe2', '01010000208C7D000000000000000000000000000000000040', 9, 'John Doe9', '01010000208C7D000000000000000000000000000000002240'], [3, 'John Doe3', '01010000208C7D000000000000000000000000000000000840', 4, 'John Doe4', '01010000208C7D000000000000000000000000000000001040'], [3, 'John Doe3', '01010000208C7D000000000000000000000000000000000840', 5, 'John Doe5', '01010000208C7D000000000000000000000000000000001440'], [3, 'John Doe3', '01010000208C7D000000000000000000000000000000000840', 6, 'John Doe6', '01010000208C7D000000000000000000000000000000001840'], [3, 'John Doe3', '01010000208C7D000000000000000000000000000000000840', 7, 'John Doe7', '01010000208C7D000000000000000000000000000000001C40'], [3, 'John Doe3', '01010000208C7D000000000000000000000000000000000840', 8, 'John Doe8', '01010000208C7D000000000000000000000000000000002040'], [3, 'John Doe3', '01010000208C7D000000000000000000000000000000000840', 9, 'John Doe9', '01010000208C7D000000000000000000000000000000002240'], [4, 'John Doe4', '01010000208C7D000000000000000000000000000000001040', 5, 'John Doe5', '01010000208C7D000000000000000000000000000000001440'], [4, 'John Doe4', '01010000208C7D000000000000000000000000000000001040', 6, 'John Doe6', '01010000208C7D000000000000000000000000000000001840'], [4, 'John Doe4', '01010000208C7D000000000000000000000000000000001040', 7, 'John Doe7', '01010000208C7D000000000000000000000000000000001C40'], [4, 'John Doe4', '01010000208C7D000000000000000000000000000000001040', 8, 'John Doe8', '01010000208C7D000000000000000000000000000000002040'], [4, 'John Doe4', '01010000208C7D000000000000000000000000000000001040', 9, 'John Doe9', '01010000208C7D000000000000000000000000000000002240'], [5, 'John Doe5', '01010000208C7D000000000000000000000000000000001440', 6, 'John Doe6', '01010000208C7D000000000000000000000000000000001840'], [5, 'John Doe5', '01010000208C7D000000000000000000000000000000001440', 7, 'John Doe7', '01010000208C7D000000000000000000000000000000001C40'], [5, 'John Doe5', '01010000208C7D000000000000000000000000000000001440', 8, 'John Doe8', '01010000208C7D000000000000000000000000000000002040'], [5, 'John Doe5', '01010000208C7D000000000000000000000000000000001440', 9, 'John Doe9', '01010000208C7D000000000000000000000000000000002240'], [6, 'John Doe6', '01010000208C7D000000000000000000000000000000001840', 7, 'John Doe7', '01010000208C7D000000000000000000000000000000001C40'], [6, 'John Doe6', '01010000208C7D000000000000000000000000000000001840', 8, 'John Doe8', '01010000208C7D000000000000000000000000000000002040'], [6, 'John Doe6', '01010000208C7D000000000000000000000000000000001840', 9, 'John Doe9', '01010000208C7D000000000000000000000000000000002240'], [7, 'John Doe7', '01010000208C7D000000000000000000000000000000001C40', 8, 'John Doe8', '01010000208C7D000000000000000000000000000000002040'], [7, 'John Doe7', '01010000208C7D000000000000000000000000000000001C40', 9, 'John Doe9', '01010000208C7D000000000000000000000000000000002240'], [8, 'John Doe8', '01010000208C7D000000000000000000000000000000002040', 9, 'John Doe9', '01010000208C7D000000000000000000000000000000002240']] - - -.. testcleanup:: - - import requests - response = requests.delete(oep_url+'/api/v0/schema/sandbox/tables/example_table', headers={'Authorization': 'Token %s'%your_token} ) - assert response.status_code == 200, response diff --git a/docs/source/api/how_to.rst b/docs/source/api/how_to.rst deleted file mode 100644 index f38fa4326..000000000 --- a/docs/source/api/how_to.rst +++ /dev/null @@ -1,551 +0,0 @@ -************************************* -How to work with the API - An example -************************************* - -.. testsetup:: - - import os - from oeplatform import securitysettings as sec - oep_url = 'http://localhost:8000' - your_token = os.environ.get("LOCAL_OEP_TOKEN") - if your_token is None: - if hasattr(sec, "token_test_user") and sec.token_test_user is not None: - your_token = sec.token_test_user - else: - raise Exception("No token available, please set LOCAL_OEP_TOKEN or adapt your security settings") - -.. note:: - - The API is enable for the following schmemas only: - - * model_draft - * sandbox - - -Authenticate -============ - -The OpenEnergy Platform API uses token authentication. Each user has a unique -token assigned to it that will be used as an authentication password. You can -access you token by visiting you profile on the OEP. In order to issue PUT or -POST request you have to include this token in the *Authorization*-field of -your request: - -* Authorization: Token *your-token* - - -Create table -============ - -We want to create the following table with primary key `id`: - -+-----------------+-------------------+-----------------------+ -| *id*: bigserial | name: varchar(50) | geom: geometry(Point) | -+===========+===================+=======================+ -| | | | -+-----------------+-------------------+-----------------------+ - -In order to do so, we send the following PUT request:: - - PUT https://openenergy-platform.org/api/v0/schema/sandbox/tables/example_table/ - { - "query": { - "columns": [ - { - "name":"id", - "data_type": "Bigserial", - "is_nullable": "NO" - },{ - "name":"name", - "data_type": "varchar", - "character_maximum_length": "50" - },{ - "name":"geom", - "data_type": "geometry(point)" - } - ], - "constraints": [ - { - "constraint_type": "PRIMARY KEY", - "constraint_parameter": "id", - } - ], - "metadata": {"id": "sandbox.example_table"} - } - } - -and include the following headers: - -* Content-Type: application/json -* Authorization: Token *your-token* - -You can use any tool that can send HTTP-requests. E.g. you could use the linux -tool **curl**:: - - curl - -X PUT - -H 'Content-Type: application/json' - -H 'Authorization: Token ' - -d '{ - "query": { - "columns": [ - { - "name":"id", - "data_type": "bigsersial", - "is_nullable": "NO" - },{ - "name":"name", - "data_type": "varchar", - "character_maximum_length": "50" - },{ - "name":"geom", - "data_type": "geometry(point)" - } - ], - "constraints": [ - { - "constraint_type": "PRIMARY KEY", - "constraint_parameter": "id", - } - ], - "metadata": {"id": "sandbox.example_table"} - } - }' - https://openenergy-platform.org/api/v0/schema/sandbox/tables/example_table/ - - -or **python**: - -.. doctest:: - - >>> import requests - >>> data = { "query": { "columns": [ { "name":"id", "data_type": "bigserial", "is_nullable": "NO" },{ "name":"name", "data_type": "varchar", "character_maximum_length": "50" },{ "name":"geom", "data_type": "geometry(point)" } ], "constraints": [ { "constraint_type": "PRIMARY KEY", "constraint_parameter": "id" } ], "metadata": {"id": "sandbox.example_table"} } } - >>> requests.put(oep_url+'/api/v0/schema/sandbox/tables/example_table/', json=data, headers={'Authorization': 'Token %s'%your_token} ) - - -If everything went right, you will receive a 201-Resonse_ and the table has -been created. - -.. note:: - - The OEP will automatically grant the 'admin'-permissions on this - table to your user. - -.. doctest:: - - >>> result = requests.get(oep_url+'/api/v0/schema/sandbox/tables/example_table/columns') - >>> result.status_code - 200 - >>> json_result = result.json() - >>> json_result['id'] == {'character_maximum_length': None, 'maximum_cardinality': None, 'is_nullable': False, 'data_type': 'bigint', 'numeric_precision': 64, 'character_octet_length': None, 'interval_type': None, 'dtd_identifier': '1', 'interval_precision': None, 'numeric_scale': 0, 'is_updatable': True, 'datetime_precision': None, 'ordinal_position': 1, 'column_default': "nextval('sandbox.example_table_id_seq'::regclass)", 'numeric_precision_radix': 2} - True - >>> json_result['geom'] == {'column_default': None, 'character_maximum_length': None, 'maximum_cardinality': None, 'is_nullable': True, 'data_type': 'geometry', 'numeric_precision': None, 'character_octet_length': None, 'interval_type': None, 'dtd_identifier': '3', 'interval_precision': None, 'numeric_scale': None, 'is_updatable': True, 'datetime_precision': None, 'ordinal_position': 3, 'numeric_precision_radix': None} - True - >>> json_result['name'] == {'character_maximum_length': 50, 'maximum_cardinality': None, 'is_nullable': True, 'data_type': 'character varying', 'numeric_precision': None, 'character_octet_length': 200, 'interval_type': None, 'dtd_identifier': '2', 'interval_precision': None, 'numeric_scale': None, 'is_updatable': True, 'datetime_precision': None, 'ordinal_position': 2, 'column_default': None, 'numeric_precision_radix': None} - True - - -.. _200-Resonse: https://www.w3.org/Protocols/rfc2616/rfc2616-sec10.html -.. _201-Resonse: https://www.w3.org/Protocols/rfc2616/rfc2616-sec10.html - -Insert data -=========== - -You can insert data into a specific table by sending a request to its -`/rows` subresource. The `query` part of the sent data contians the row you want -to insert in form of a JSON-dictionary::: - - { - 'name_of_column_1': 'value_in_column_1', - 'name_of_column_2': 'value_in_column_2', - ... - } - -If you the row you want to insert should have a specific id, send a PUT-request -to the `/rows/{id}/` subresource. -In case the id should be generated automatically, just ommit the id field in the -data dictionary and send a POST-request to the `/rows/new` subresource. If -successful, the response will contain the id of the new row. - -In the following example, we want to add a row containing just the name -"John Doe", **but** we do not want to set the the id of this entry. - -**curl**:: - - curl - -X POST - -H "Content-Type: application/json" - -H 'Authorization: Token ' - -d '{"query": {"name": "John Doe"}}' - https://openenergy-platform.org//api/v0/schema/sandbox/tables/example_table/rows/ - -**python**: - -.. doctest:: - - >>> import requests - >>> data = {"query": {"name": "John Doe"}} - >>> result = requests.post(oep_url+'/api/v0/schema/sandbox/tables/example_table/rows/new', json=data, headers={'Authorization': 'Token %s'%your_token} ) - >>> result.status_code - 201 - >>> result = requests.get(oep_url+'/api/v0/schema/sandbox/tables/example_table/rows/') - >>> json_result = result.json() - >>> json_result[-1]["id"] # Show the id of the new row - 1 - -Alternatively, we can specify that the new row should be stored under id 12: - -**python**: - -.. doctest:: - - >>> import requests - >>> data = {"query": {"name": "Mary Doe XII"}} - >>> result = requests.put(oep_url+'/api/v0/schema/sandbox/tables/example_table/rows/12', json=data, headers={'Authorization': 'Token %s'%your_token} ) - >>> result.status_code - 201 - -Our database should have the following structure now: - -+-----------+-------------------+-----------------------+ -| *id*: int | name: varchar(50) | geom: geometry(Point) | -+===========+===================+=======================+ -| 1 | John Doe | NULL | -+-----------+-------------------+-----------------------+ -| 12 | Mary Doe XII | NULL | -+-----------+-------------------+-----------------------+ - -.. note:: - - In order to insert new data, or perfom any other actions that alter the data - state, you need the 'write'-permission for the respective table. Permissions can - be granted by a user with 'admin'-permissions in the OEP web interface. - -Select data -=========== - -You can insert data into a specific table by sending a GET-request to its -`/rows` subresource. -No authorization is required to do so. - -**curl**:: - - curl - -X GET - https://openenergy-platform.org/api/v0/schema/sandbox/tables/example_table/rows/ - -The data will be returned as list of JSON-dictionaries similar to the ones used -when adding new rows:: - - [ - { - "name": "John Doe", - "geom": null, - "id": 1 - } - ] - -**python**: - -.. doctest:: - - >>> result = requests.get(oep_url+'/api/v0/schema/sandbox/tables/example_table/rows/', ) - >>> result.status_code - 200 - >>> json_result = result.json() - >>> json_result == [{'id': 1, 'name': 'John Doe', 'geom': None}, {'id': 12, 'name': 'Mary Doe XII', 'geom': None}] - True - - -There are also optional parameters for these GET-queries: - -* limit: Limit the number of returned rows -* offset: Ignore the specified amount of rows -* orderby: Name of a column to refer when ordering -* column: Name of a column to include in the results. If not present, all - columns are returned -* where: Constraint fourmulated as `VALUE+OPERATOR+VALUE` with - - * VALUE: Constant or name of a column - * OPERATOR: One of the following: - - * `EQUALS` or `=`, - * `GREATER` or `>`, - * `LOWER` or `<`, - * `NOTEQUAL` or `!=` or `<>`, - * `NOTGREATER` or `<=`, - * `NOTLOWER` or `>=`, - -.. doctest:: - - >>> result = requests.get(oep_url+"/api/v0/schema/sandbox/tables/example_table/rows/?where=name=John+Doe", ) - >>> result.status_code - 200 - >>> json_result = result.json() - >>> json_result == [{'id': 1, 'name': 'John Doe', 'geom': None}] - True - -.. doctest:: - - >>> result = requests.get(oep_url+"/api/v0/schema/sandbox/tables/example_table/rows/1", ) - >>> result.status_code - 200 - >>> json_result = result.json() - >>> json_result == {'id': 1, 'name': 'John Doe', 'geom': None} - True - -.. doctest:: - - >>> result = requests.get(oep_url+"/api/v0/schema/sandbox/tables/example_table/rows/?offset=1") - >>> result.status_code - 200 - >>> json_result = result.json() - >>> json_result == [{'id': 12, 'name': 'Mary Doe XII', 'geom': None}] - True - -.. doctest:: - - >>> result = requests.get(oep_url+"/api/v0/schema/sandbox/tables/example_table/rows/?column=name&column=id") - >>> result.status_code - 200 - >>> json_result = result.json() - >>> json_result == [{'id': 1, 'name': 'John Doe'},{'id': 12, 'name': 'Mary Doe XII'}] - True - -Add columns table -================= - -.. doctest:: - - >>> data = {'query':{'data_type': 'varchar', 'character_maximum_length': 30}} - >>> result = requests.put(oep_url+"/api/v0/schema/sandbox/tables/example_table/columns/first_name", json=data, headers={'Authorization': 'Token %s'%your_token}) - >>> result.status_code - 201 - -.. doctest:: - - >>> result = requests.get(oep_url+"/api/v0/schema/sandbox/tables/example_table/columns/first_name") - >>> result.status_code - 200 - >>> result.json() == {'numeric_scale': None, 'numeric_precision_radix': None, 'is_updatable': True, 'maximum_cardinality': None, 'character_maximum_length': 30, 'character_octet_length': 120, 'ordinal_position': 4, 'is_nullable': True, 'interval_type': None, 'data_type': 'character varying', 'dtd_identifier': '4', 'column_default': None, 'datetime_precision': None, 'interval_precision': None, 'numeric_precision': None} - True - -Alter data -========== - -Our current table looks as follows: - -+-----------------+-------------------+-----------------------+------------------------+ -| *id*: bigserial | name: varchar(50) | geom: geometry(Point) | first_name: varchar(30)| -+=================+===================+=======================+========================+ -| 1 | John Doe | NULL | NULL | -+-----------------+-------------------+-----------------------+------------------------+ -| 12 | Mary Doe XII | NULL | NULL | -+-----------------+-------------------+-----------------------+------------------------+ - -Our next task is to distribute for and last name to the different columns: - -.. doctest:: - - >>> result = requests.get(oep_url+'/api/v0/schema/sandbox/tables/example_table/rows/') # Load the names via GET - >>> result.status_code - 200 - >>> for row in result.json(): - ... first_name, last_name = str(row['name']).split(' ', 1) # Split the names at the first space - ... data = {'query': {'name': last_name, 'first_name': first_name}} # Build the data dictionary and post it to /rows/ - ... result = requests.post(oep_url+'/api/v0/schema/sandbox/tables/example_table/rows/{id}'.format(id=row['id']), json=data, headers={'Authorization': 'Token %s'%your_token}) - ... result.status_code - 200 - 200 - -Now, our table looks as follows: - -+-----------+-------------------+-----------------------+------------------------+ -| *id*: int | name: varchar(50) | geom: geometry(Point) | first_name: varchar(30)| -+===========+===================+=======================+========================+ -| 1 | Doe | NULL | John | -+-----------+-------------------+-----------------------+------------------------+ -| 12 | Doe XII | NULL | Mary | -+-----------+-------------------+-----------------------+------------------------+ - -Alter tables -============ - -Currently, rows are allowed that contain no first name. In order to prohibit -such behaviour, we have to set column `first_name` to `NOT NULL`. Such `ALTER -TABLE` commands can be executed by POST-ing a dictionary with the corresponding -values to the column's resource: - -.. doctest:: - - >>> data = {'query': {'is_nullable': False}} - >>> result = requests.post(oep_url+"/api/v0/schema/sandbox/tables/example_table/columns/first_name", json=data, headers={'Authorization': 'Token %s'%your_token} ) - >>> result.status_code - 200 - -We can check, whether your command worked by retrieving the corresponding resource: - -.. doctest:: - - >>> result = requests.get(oep_url+"/api/v0/schema/sandbox/tables/example_table/columns/first_name") - >>> result.status_code - 200 - >>> json_result = result.json() - >>> json_result['is_nullable'] - False - -After prohibiting null-values in the first name column, such rows can not be -added anymore. - -.. doctest:: - - >>> import requests - >>> data = {"query": {"name": "McPaul"}} - >>> result = requests.post(oep_url+'/api/v0/schema/sandbox/tables/example_table/rows/new', json=data, headers={'Authorization': 'Token %s'%your_token} ) - >>> result.status_code - 400 - >>> result.json()['reason'] - 'Action violates not-null constraint on first_name. Failing row was (McPaul)' - - -Delete rows -*********** - -In order to delete rows, you need the 'delete'-permission on the respective -table. The permissions can be granted by an admin in the OEP web interface. - -.. doctest:: - - >>> import requests - >>> data = {"query": {"name": "McPaul"}} - >>> result = requests.delete(oep_url+'/api/v0/schema/sandbox/tables/example_table/rows/1', json=data, headers={'Authorization': 'Token %s'%your_token} ) - >>> result.status_code - 200 - >>> result = requests.get(oep_url+'/api/v0/schema/sandbox/tables/example_table/rows/1') - >>> result.status_code - 404 - - -Metadata -******** - -The OEP gives the opportunity to publish datasets and annotate it with important -information. You can query this metadata - -.. doctest:: - - >>> import requests - >>> result = requests.get(oep_url+'/api/v0/schema/sandbox/tables/example_table/meta/') - >>> result.status_code - 200 - >>> result.json() == {'id': 'sandbox.example_table', 'metaMetadata': {'metadataVersion': 'OEP-1.5.2', 'metadataLicense': {'name': 'CC0-1.0', 'title': 'Creative Commons Zero v1.0 Universal', 'path': 'https://creativecommons.org/publicdomain/zero/1.0/'}}, "_comment": {"metadata": "Metadata documentation and explanation (https://github.com/OpenEnergyPlatform/oemetadata)", "dates": "Dates and time must follow the ISO8601 including time zone (YYYY-MM-DD or YYYY-MM-DDThh:mm:ss±hh)", "units": "Use a space between numbers and units (100 m)", "languages": "Languages must follow the IETF (BCP47) format (en-GB, en-US, de-DE)", "licenses": "License name must follow the SPDX License List (https://spdx.org/licenses/)", "review": "Following the OEP Data Review (https://github.com/OpenEnergyPlatform/data-preprocessing/blob/master/data-review/manual/review_manual.md)", "null": "If not applicable use: null", "todo": "If a value is not yet available, use: todo"}} - True - -Note that the returned metadata differs from the metadata passed when creating -the table. This is because the OEP autocompletes missing fields. You can fill -those fields to make you data more easily accessible. You can also set metadata -on existing tables via `POST`-requests (granted that you have write-permissions): - -.. doctest:: - - >>> import requests - >>> data = { - ... "id": "sandbox.example_table", - ... "name": "Human-readable name", - ... "description": "A verbose description of this dataset", - ... "language": [ - ... "eng-uk" - ... ], - ... "keywords": [ - ... "test" - ... ], - ... "publicationDate": "2020-02-06", - ... "context": { - ... "homepage": "example.com", - ... "documentation": "doc.example.com", - ... "sourceCode": "src.example.com", - ... "contact": "example.com", - ... "grantNo": "0", - ... "fundingAgency": "test agency", - ... "fundingAgencyLogo": "http://www.example.com/logo.png", - ... "publisherLogo": "http://www.example.com/logo2.png" - ... }, - ... "licenses": [ - ... { - ... "name": "CC0-1.0", - ... "title": "Creative Commons Zero v1.0 Universal", - ... "path": "https://creativecommons.org/publicdomain/zero/1.0/legalcode", - ... "instruction": "You are free: To Share, To Create, To Adapt", - ... "attribution": "© Reiner Lemoine Institut" - ... } - ... ], - ... "metaMetadata": { - ... "metadataVersion": "OEP-1.5.1", - ... "metadataLicense": { - ... "name": "CC0-1.0", - ... "title": "Creative Commons Zero v1.0 Universal", - ... "path": "https://creativecommons.org/publicdomain/zero/1.0/" - ... } - ... }, - ... "_comment": { - ... "metadata": "Metadata documentation and explanation (https://github.com/OpenEnergyPlatform/oemetadata)", - ... "dates": "Dates and time must follow the ISO8601 including time zone (YYYY-MM-DD or YYYY-MM-DDThh:mm:ss±hh)", - ... "units": "Use a space between numbers and units (100 m)", - ... "languages": "Languages must follow the IETF (BCP47) format (en-GB, en-US, de-DE)", - ... "licenses": "License name must follow the SPDX License List (https://spdx.org/licenses/)", - ... "review": "Following the OEP Data Review (https://github.com/OpenEnergyPlatform/data-preprocessing/blob/master/data-review/manual/review_manual.md)", - ... "null": "If not applicable use: null", - ... "todo": "If a value is not yet available, use: todo" - ... } - ... } - >>> result = requests.post(oep_url+'/api/v0/schema/sandbox/tables/example_table/meta/', json=data, headers={'Authorization': 'Token %s'%your_token}) - >>> result.status_code - 200 - - -Delete tables -************* - -In order to delete rows, you need the 'admin'-permission on the respective -table. The permissions can be granted by an admin in the OEP web interface. - -.. doctest:: - - >>> import requests - >>> requests.delete(oep_url+'/api/v0/schema/sandbox/tables/example_table', headers={'Authorization': 'Token %s'%your_token} ) - - >>> requests.get(oep_url+'/api/v0/schema/sandbox/tables/example_table') - - -For more advanced commands read :doc:`advanced` - -Handling Arrays -*************** - -The underlying OpenEnergy Database is a Postgres database. Thus, it supports -Array-typed fields. - -.. doctest:: - - >>> import requests - >>> data = { "query": { "columns": [ { "name":"id", "data_type": "bigserial", "is_nullable": "NO" },{ "name":"arr", "data_type": "int[]"},{ "name":"geom", "data_type": "geometry(point)" } ], "constraints": [ { "constraint_type": "PRIMARY KEY", "constraint_parameter": "id" } ] } } - >>> requests.put(oep_url+'/api/v0/schema/sandbox/tables/example_table/', json=data, headers={'Authorization': 'Token %s'%your_token} ) - - -.. doctest:: - - >>> import requests - >>> data = {"query": {"arr": [1,2,3]}} - >>> result = requests.post(oep_url+'/api/v0/schema/sandbox/tables/example_table/rows/new', json=data, headers={'Authorization': 'Token %s'%your_token} ) - >>> result.status_code - 201 - >>> result = requests.get(oep_url+'/api/v0/schema/sandbox/tables/example_table/rows/1') - >>> json_result = result.json() - >>> json_result['arr'] - [1, 2, 3] - -.. testcleanup:: - - import requests - response = requests.delete(oep_url+'/api/v0/schema/sandbox/tables/example_table/', json=data, headers={'Authorization': 'Token %s'%your_token} ) - assert response.status_code == 200, response diff --git a/docs/source/api/scenario.rst b/docs/source/api/scenario.rst deleted file mode 100644 index 4552c63e1..000000000 --- a/docs/source/api/scenario.rst +++ /dev/null @@ -1,33 +0,0 @@ -************ -Scenario API -************ - -This API is built on RDF factories, which are comparable to django models. Each factory has a collection of fields. -Let's look at the Person factory: - -.. autoclass:: modelview.rdf.factory.Person - :members: - -This factory has three fields (excluding `classes`): `affiliation`, `first_name`, `last_name`. Each of them relates to one -rdf:Property, which is identified by the respective URL (`field.rdf_name`). - -.. autoclass:: modelview.rdf.field.Field - -Given a subject `:subject` a field `f` can be transformed into an rdf triple by - -.. code-block:: - - ":subject {p} {o}".format(p=f.rdf_name, o=v) - -for each `v` in `f.values`. - -A `POST` request to to a resource expects a data dictionary that has a `graph` field that contains an RDF-graph representing the -new structure. Following is a list of all available factories with all their fields (and the respective IRIs): - - - -Factories -######### - -.. automodule:: modelview.rdf.factory - :members: diff --git a/docs/source/base.rst b/docs/source/base.rst deleted file mode 100644 index 7f4ccbd48..000000000 --- a/docs/source/base.rst +++ /dev/null @@ -1,18 +0,0 @@ -==== -Base -==== - - -Navigation Bar -============== - -The **Navigation Bar** on top of the page is visible on all pages. -On wide screens all elements are shown next to each other. -On small screens the bar can be accessed on the "hamburger icon". - -The navigation bar is done in ``base/templates/base``: - -* ``base.html`` -* ``base-wide.html`` - -It is either a link or a submenu with individual links. diff --git a/docs/source/conf.py b/docs/source/conf.py deleted file mode 100644 index 61cc659a5..000000000 --- a/docs/source/conf.py +++ /dev/null @@ -1,333 +0,0 @@ -# -*- coding: utf-8 -*- -# -# OpenEnergyPlatform documentation build configuration file, created by -# sphinx-quickstart on Fri Aug 12 20:13:24 2016. -# -# This file is execfile()d with the current directory set to its -# containing dir. -# -# Note that not all possible configuration values are present in this -# autogenerated file. -# -# All configuration values have a default; values that are commented out -# serve to show the default. - -import os -import sys -from unittest.mock import MagicMock - -import django - -# try: -# from oeplatform.oeplatform.securitysettings import SECRET_KEY -# except: -# import logging -# logging.error("No securitysettings found. Setting defaults") -# DEBUG = True -# SECRET_KEY = '0' - - -# If extensions (or modules to document with autodoc) are in another directory, -# add these directories to sys.path here. If the directory is relative to the -# documentation root, use os.path.abspath to make it absolute, like shown here. -# sys.path.insert(0, os.path.abspath('.')) - -# -- General configuration ------------------------------------------------ - -sys.path.insert(0, os.path.abspath(os.path.join("..", ".."))) -os.environ["DJANGO_SETTINGS_MODULE"] = "oeplatform.settings" - -django.setup() - -# If your documentation needs a minimal Sphinx version, state it here. -# needs_sphinx = '1.0' - -# Add any Sphinx extension module names here, as strings. They can be -# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom -# ones. -extensions = [ - "sphinx.ext.autodoc", - "sphinx.ext.doctest", - "sphinx.ext.intersphinx", - "sphinx.ext.todo", - "sphinx.ext.coverage", - "sphinx.ext.ifconfig", - "sphinx.ext.viewcode", - "sphinx.ext.napoleon", -] - -# Add any paths that contain templates here, relative to this directory. -templates_path = ["_templates"] - -# The suffix(es) of source filenames. -# You can specify multiple suffix as a list of string: -# source_suffix = ['.rst', '.md'] -source_suffix = ".rst" - -# The encoding of source files. -# source_encoding = 'utf-8-sig' - -# The master toctree document. -master_doc = "index" - -# General information about the project. -project = "OpenEnergyPlatform" -copyright = "2023, SIROP" -author = "Open Energy Family" - -# The version info for the project you're documenting, acts as replacement for -# |version| and |release|, also used in various other places throughout the -# built documents. -# -# The short X.Y version. -version = "0.0.4" -# The full version, including alpha/beta/rc tags. -release = "0.0.4" - -# The language for content autogenerated by Sphinx. Refer to documentation -# for a list of supported languages. -# -# This is also used if you do content translation via gettext catalogs. -# Usually you set "language" from the command line for these cases. -language = None - -# There are two options for replacing |today|: either, you set today to some -# non-false value, then it is used: -# today = '' -# Else, today_fmt is used as the format for a strftime call. -# today_fmt = '%B %d, %Y' - -# List of patterns, relative to source directory, that match files and -# directories to ignore when looking for source files. -exclude_patterns = [] - -# The reST default role (used for this markup: `text`) to use for all -# documents. -# default_role = None - -# If true, '()' will be appended to :func: etc. cross-reference text. -# add_function_parentheses = True - -# If true, the current module name will be prepended to all description -# unit titles (such as .. function::). -# add_module_names = True - -# If true, sectionauthor and moduleauthor directives will be shown in the -# output. They are ignored by default. -# show_authors = False - -# The name of the Pygments (syntax highlighting) style to use. -pygments_style = "sphinx" - -# A list of ignored prefixes for module index sorting. -# modindex_common_prefix = [] - -# If true, keep warnings as "system message" paragraphs in the built documents. -# keep_warnings = False - -# If true, `todo` and `todoList` produce output, else they produce nothing. -todo_include_todos = True - - -# -- Options for HTML output ---------------------------------------------- - -# The theme to use for HTML and HTML Help pages. See the documentation for -# a list of builtin themes. -html_theme = "alabaster" - -# Theme options are theme-specific and customize the look and feel of a theme -# further. For a list of options available for each theme, see the -# documentation. -# html_theme_options = {} - -# Add any paths that contain custom themes here, relative to this directory. -# html_theme_path = [] - -# The name for this set of Sphinx documents. If None, it defaults to -# " v documentation". -# html_title = None - -# A shorter title for the navigation bar. Default is the same as html_title. -# html_short_title = None - -# The name of an image file (relative to this directory) to place at the top -# of the sidebar. -# html_logo = None - -# The name of an image file (within the static path) to use as favicon of the -# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 -# pixels large. -# html_favicon = None - -# Add any paths that contain custom static files (such as style sheets) here, -# relative to this directory. They are copied after the builtin static files, -# so a file named "default.css" will overwrite the builtin "default.css". -html_static_path = ["_static"] - -# Add any extra paths that contain custom files (such as robots.txt or -# .htaccess) here, relative to this directory. These files are copied -# directly to the root of the documentation. -# html_extra_path = [] - -# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, -# using the given strftime format. -# html_last_updated_fmt = '%b %d, %Y' - -# If true, SmartyPants will be used to convert quotes and dashes to -# typographically correct entities. -# html_use_smartypants = True - -# Custom sidebar templates, maps document names to template names. -# html_sidebars = {} - -# Additional templates that should be rendered to pages, maps page names to -# template names. -# html_additional_pages = {} - -# If false, no module index is generated. -# html_domain_indices = True - -# If false, no index is generated. -# html_use_index = True - -# If true, the index is split into individual pages for each letter. -# html_split_index = False - -# If true, links to the reST sources are added to the pages. -# html_show_sourcelink = True - -# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. -# html_show_sphinx = True - -# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. -# html_show_copyright = True - -# If true, an OpenSearch description file will be output, and all pages will -# contain a tag referring to it. The value of this option must be the -# base URL from which the finished HTML is served. -# html_use_opensearch = '' - -# This is the file name suffix for HTML files (e.g. ".xhtml"). -# html_file_suffix = None - -# Language to be used for generating the HTML full-text search index. -# Sphinx supports the following languages: -# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' -# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr' -# html_search_language = 'en' - -# A dictionary with options for the search language support, empty by default. -# Now only 'ja' uses this config value -# html_search_options = {'type': 'default'} - -# The name of a javascript file (relative to the configuration directory) that -# implements a search results scorer. If empty, the default will be used. -# html_search_scorer = 'scorer.js' - -# Output file base name for HTML help builder. -htmlhelp_basename = "OpenEnergyPlatformdoc" - -# -- Options for LaTeX output --------------------------------------------- - -latex_elements = { - # # The paper size ('letterpaper' or 'a4paper'). - # 'papersize': 'letterpaper', - # # The font size ('10pt', '11pt' or '12pt'). - # 'pointsize': '10pt', - # # Additional stuff for the LaTeX preamble. - # 'preamble': '', - # # Latex figure (float) alignment - # 'figure_align': 'htbp', -} - -# Grouping the document tree into LaTeX files. List of tuples -# (source start file, target name, title, -# author, documentclass [howto, manual, or own class]). -latex_documents = [ - ( - master_doc, - "OpenEnergyPlatform.tex", - "OpenEnergyPlatform Documentation", - "open\\_eGo", - "manual", - ) -] - -# The name of an image file (relative to this directory) to place at the top of -# the title page. -# latex_logo = None - -# For "manual" documents, if this is true, then toplevel headings are parts, -# not chapters. -# latex_use_parts = False - -# If true, show page references after internal links. -# latex_show_pagerefs = False - -# If true, show URL addresses after external links. -# latex_show_urls = False - -# Documents to append as an appendix to all manuals. -# latex_appendices = [] - -# If false, no module index is generated. -# latex_domain_indices = True - - -# -- Options for manual page output --------------------------------------- - -# One entry per manual page. List of tuples -# (source start file, name, description, authors, manual section). -man_pages = [ - (master_doc, "openenergyplatform", "OpenEnergyPlatform Documentation", [author], 1) -] - -# If true, show URL addresses after external links. -# man_show_urls = False - - -# -- Options for Texinfo output ------------------------------------------- - -# Grouping the document tree into Texinfo files. List of tuples -# (source start file, target name, title, author, -# dir menu entry, description, category) -texinfo_documents = [ - ( - master_doc, - "OpenEnergyPlatform", - "OpenEnergyPlatform Documentation", - author, - "OpenEnergyPlatform", - "One line description of project.", - "Miscellaneous", - ) -] - -# Documents to append as an appendix to all manuals. -# texinfo_appendices = [] - -# If false, no module index is generated. -# texinfo_domain_indices = True - -# How to display URL addresses: 'footnote', 'no', or 'inline'. -# texinfo_show_urls = 'footnote' - -# If true, do not generate a @detailmenu in the "Top" node's menu. -# texinfo_no_detailmenu = False - - -# Example configuration for intersphinx: refer to the Python standard library. -intersphinx_mapping = {"https://docs.python.org/": None} - - -class Mock(MagicMock): - @classmethod - def __getattr__(cls, name): - if name == "_mock_methods": - return name._mock_methods - else: - return Mock() - - -MOCK_MODULES = ["shapely"] -sys.modules.update((mod_name, Mock()) for mod_name in MOCK_MODULES) diff --git a/docs/source/dataedit.rst b/docs/source/dataedit.rst deleted file mode 100644 index 6d379a4ea..000000000 --- a/docs/source/dataedit.rst +++ /dev/null @@ -1,6 +0,0 @@ -====================== -Database visualisation -====================== - -.. automodule:: dataedit.views - :members: diff --git a/docs/source/errors.rst b/docs/source/errors.rst deleted file mode 100644 index 80e0a2018..000000000 --- a/docs/source/errors.rst +++ /dev/null @@ -1,11 +0,0 @@ -================================ -Errors and Response Status Codes -================================ - -The OEP returns standard http status codes. Not all of them are Errors. There is a full list on Wikipedia (https://en.wikipedia.org/wiki/List_of_HTTP_status_codes ). The ones you may encounter more frequently are: - * ``200`` **OK** This is the standard response for successful HTTP requests. When using the api to query a table, the OEP will return this status if the table does indeed exist. - * ``201`` **Created** The OEP will return this e.g. when you have successfully created a table. - * ``400`` **Bad Request** The OEP cannot process the request, because of a malformed query, use of an unsupported syntax or some other faulty user input. - * ``403`` **Forbidden** The OEP understands the request, but the current user is not allowed to perform the action. This may be the case when you try to write to a table that is in a different schema than model_draft, you did not log in for a certain action, or you did not provide your key for a data upload. - * ``404`` **Not Found** The requested resource or page does not exist or is not available. - * ``500`` **Internal Server Error** Something unexpected happened and no more specific message is suitable. These should be rare, but if they do occur, a developer will receive a mail about it, so they can try to fix the issue. diff --git a/docs/source/index.rst b/docs/source/index.rst deleted file mode 100644 index 6bca7de19..000000000 --- a/docs/source/index.rst +++ /dev/null @@ -1,119 +0,0 @@ -.. Open Energy Platform developer documentation master file, created by - sphinx-quickstart on Fri Aug 12 20:13:24 2016. - You can adapt this file completely to your liking, but it should at least - contain the root `toctree` directive. - -Developer documentation of the Open Energy Platform (OEP) -========================================================= - -The Open Energy Platform is a website that has three main targets: - - 1. Provide a language-independent interface that is a thin layer on top of the Open Energy Database (oedb) - 2. Implement an intuitive and easy-to use web interface on top of the database - 3. Improve the visibility, communication and transparency of results from energy system modelling - -Mission statement -***************** - -The transition to renewable energy sources is one of the huge goals of the last -few decades. Whilst conventional energy generation provides a constant, generally -available source of electricity, heat and so on, our environment pays a toll. -Contrary, renewable energy generation is less environmentally demanding but more -financially expensive or just locally or inconsistently available. -Guaranteeing a steady and reliable, yet sustainable supply of energy requires -still a lot of thorough research. - -Expansion of the energy grid might imply measures that must be communicable in -a transparent way. Hence, results from research of energy system studies should -be publicly available and reproducible. This raises the need for publicly available -data sources. - -Landing page: index -******************* - -The landing page is programmed in **index.html**. -It contains a heading, the main modules in boxes, and further information. - - -App: dataedit -************* - -One aim of the Open Energy Platform is the visual and understandable presentation -of such datasets. The underlying OpenEnergyDatabase (OEDB) stores datasets of different -open-data projects. The visual presentation is implemented in the **dataedit** app. - -.. toctree:: - :maxdepth: 2 - - dataedit - - -App: api -******** - -The data stored in the OEDB is also used in several projects. In order to ease -the access to required datasets the OEP provides a RESTful HTTP-interface in -the **api** app: - -.. toctree:: - :maxdepth: 2 - - api - api/how_to - api/advanced - errors - - -App: modelview -************** - -Researchers or interested developers that just entered this field might be interested -in an overview which open energy models already exists. This data is collected in -so called fact sheets. Modellers can look through these, add their own models or -enhance existing descriptions using the forms definied in the **modelview** app - -.. toctree:: - :maxdepth: 2 - - modelview - -Other apps are: - -App: login -********** - -.. toctree:: - :maxdepth: 2 - - login - -App: tutorials -************** - -The OEP features should be easy to use for the user. Therefore text, video or jupyternotebook based tutorials -are offered. The tutorials app can be accessed via the front page. With the tutorials app text and -video tutorials can be created. All CRUD functions are implemented here. The tutorials are stored as -markdown and html format in the django internal database. Jupyternotebook tutorials are imported -from https://github.com/OpenEnergyPlatform/examples Editing the jupyternotebook tutorials is not possible -via the webinterface. Update and delete functionalities for updating the listed tutorials are implemented -via console commands (base/management/commands/notebooks.py) - -.. toctree:: - :maxdepth: 2 - - tutorials - -RDF Factsheets -************** - -.. toctree:: - :maxdepth: 2 - - api/scenario - -Indices and tables -================== - -* :ref:`genindex` -* :ref:`modindex` -* :ref:`search` diff --git a/docs/source/login.rst b/docs/source/login.rst deleted file mode 100644 index f1e1e11c7..000000000 --- a/docs/source/login.rst +++ /dev/null @@ -1,6 +0,0 @@ -=============== -User management -=============== - -.. automodule:: login.views - :members: diff --git a/docs/source/modelview.rst b/docs/source/modelview.rst deleted file mode 100644 index 0559d74a0..000000000 --- a/docs/source/modelview.rst +++ /dev/null @@ -1,6 +0,0 @@ -================= -Factsheet handler -================= - -.. automodule:: modelview.views - :members: From bb355e5e3e4ed222bfbec114d1a4316c72ff20e7 Mon Sep 17 00:00:00 2001 From: Jonas Huber Date: Wed, 12 Jul 2023 19:10:52 +0200 Subject: [PATCH 2/7] setup mkdocs (using super-repo) #1277 --- mkdocs.yml | 58 ++++++++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 58 insertions(+) create mode 100644 mkdocs.yml diff --git a/mkdocs.yml b/mkdocs.yml new file mode 100644 index 000000000..eb6a81c1a --- /dev/null +++ b/mkdocs.yml @@ -0,0 +1,58 @@ +# Project +site_name: Open Energy Platform Documentation +site_url: +site_description: The Open Energy Platform is the Frontend of the Open Energy Family and specifically provides a Webapplication which is documented using mkdocs. + +# Repository +repo_name: OpenEnergyFamily/oeplatform +repo_url: https://github.com/OpenEnergyPlatform/oeplatform +docs_dir: docs + +# Configuration +theme: + name: material + logo: + palette: + # Palette toggle for dark mode + - scheme: default + toggle: + icon: material/brightness-4 + name: Switch to light mode + # Palette toggle for light mode + - scheme: slate + toggle: + icon: material/brightness-7 + name: Switch to dark mode + icon: + repo: fontawesome/brands/github + features: + - navigation.tabs + - navigation.tabs.sticky + - navigation.sections + - navigation.indexes + - navigation.path + - navigation.tracking + - navigation.top + - toc.follow + +# extensions +markdown_extensions: + - admonition + - toc: + permalink: 💙 + +plugins: +- mkdocstrings: + +# Customization +extra: + generator: false + +extra_css: + - css/extra.css + +# Navigation +nav: + - Home: index.md + - Dataedit: + - dataedit/views.md \ No newline at end of file From 7193f7dcb388be3a7db8cfe60da6f9bba8fc256c Mon Sep 17 00:00:00 2001 From: Jonas Huber Date: Wed, 12 Jul 2023 19:11:22 +0200 Subject: [PATCH 3/7] add initial structure #1277 --- docs/css/extra.css | 7 +++++++ docs/dataedit/views.md | 3 +++ docs/index.md | 17 +++++++++++++++++ 3 files changed, 27 insertions(+) create mode 100644 docs/css/extra.css create mode 100644 docs/dataedit/views.md create mode 100644 docs/index.md diff --git a/docs/css/extra.css b/docs/css/extra.css new file mode 100644 index 000000000..a9146c82b --- /dev/null +++ b/docs/css/extra.css @@ -0,0 +1,7 @@ +:root > * { + --md-primary-fg-color: #0f3b64; + --md-primary-fg-color--dark: #5d6cc0; + --md-typeset-a-color: #5ba6ec; + /* --md-text-font-color: #ffffff; */ + /* --md-primary-fg-color--light: #ECB7B7; */ + } \ No newline at end of file diff --git a/docs/dataedit/views.md b/docs/dataedit/views.md new file mode 100644 index 000000000..3732ba478 --- /dev/null +++ b/docs/dataedit/views.md @@ -0,0 +1,3 @@ +# Test + +::: dataedit.views \ No newline at end of file diff --git a/docs/index.md b/docs/index.md new file mode 100644 index 000000000..000ea3455 --- /dev/null +++ b/docs/index.md @@ -0,0 +1,17 @@ +# Welcome to MkDocs + +For full documentation visit [mkdocs.org](https://www.mkdocs.org). + +## Commands + +* `mkdocs new [dir-name]` - Create a new project. +* `mkdocs serve` - Start the live-reloading docs server. +* `mkdocs build` - Build the documentation site. +* `mkdocs -h` - Print help message and exit. + +## Project layout + + mkdocs.yml # The configuration file. + docs/ + index.md # The documentation homepage. + ... # Other markdown pages, images and other files. From e720e7ff1a2a40abb87f5318a3d494828f22265c Mon Sep 17 00:00:00 2001 From: Jonas Huber Date: Wed, 12 Jul 2023 19:17:19 +0200 Subject: [PATCH 4/7] update pull request template #1277 --- .github/pull_request_template.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/pull_request_template.md b/.github/pull_request_template.md index 80663d97e..88d48b2c5 100644 --- a/.github/pull_request_template.md +++ b/.github/pull_request_template.md @@ -22,7 +22,7 @@ Closes # ### PR-Assignee - [ ] 🐙 Follow the workflow in [CONTRIBUTING.md](https://github.com/OpenEnergyPlatform/oeplatform/blob/develop/CONTRIBUTING.md) - [ ] 📝 Update the [CHANGELOG.md](https://github.com/OpenEnergyPlatform/oeplatform/blob/develop/versions/changelogs/current.md) -- [ ] 📙 Update the documentation on [Read The Docs](https://oeplatform.readthedocs.io/en/latest/?badge=latest) +- [ ] 📙 Update the documentation the [documentaion](gh-pages) ### Reviewer - [ ] 🐙 Follow the [Reviewer Guidelines](https://github.com/rl-institut/super-repo/blob/develop/CONTRIBUTING.md#40-let-someone-else-review-your-pr) From 5895e6f7662f029b3b5c79efa3b0049045b2151e Mon Sep 17 00:00:00 2001 From: Jonas Huber Date: Tue, 18 Jul 2023 23:38:31 +0200 Subject: [PATCH 5/7] add first wanky but working adaptation of the converted .rst to .md documentation files #1277 --- docs/api/api_converted.md | 313 ++++++++++++++++++++++++++ docs/base/base_converted.md | 13 ++ docs/dataedit/index.md | 1 + docs/dataedit/views.md | 2 +- docs/error/error_converted.md | 25 ++ docs/index.md | 90 +++++++- docs/login/login_converted.md | 5 + docs/modelview/modelview_converted.md | 3 + 8 files changed, 439 insertions(+), 13 deletions(-) create mode 100644 docs/api/api_converted.md create mode 100644 docs/base/base_converted.md create mode 100644 docs/dataedit/index.md create mode 100644 docs/error/error_converted.md create mode 100644 docs/login/login_converted.md create mode 100644 docs/modelview/modelview_converted.md diff --git a/docs/api/api_converted.md b/docs/api/api_converted.md new file mode 100644 index 000000000..599377032 --- /dev/null +++ b/docs/api/api_converted.md @@ -0,0 +1,313 @@ +Data interface (REST) +===================== + +Data Structures +--------------- + +### Constraint Definition + +constraint\_definition (Dictonary) + +: + + Specifies a definition of a constraint. + + : - `action` Action of constraint (e.g. ADD, DROP) + - `constraint_type` Type of constraint (e.g. UNIQUE, PRIMARY + KEY, FOREIGN KEY) + - `constraint_name` Name of constraint. + - `constraint_parameter` Parameter of constraint. + - `reference_table` Name of reference table, can be None. + - `reference_column` Name of reference column, can be None. + +### Column Definition + +column\_definition (Dictonary) + +: + + Specifies a definition of a column. + + : - `name` Name of column. + - `new_name` New name of column, can be None. + - `data_type` New datatype of column, can be None. + - `is_nullable` New null value of column, can be None. + - `character_maximum_length` New data length of column, can be + None. + +### Response Definition + +response\_dictonary (Dictonary) + +: + + Describes the result of an api action. + + : - `success (Boolean)` Result of Action + - `error (String)` Error Message + - `http_status (Integer)` HTTP status code + () + +Table (RESTful) +--------------- + +URL: /schema/{schema}/table/{table} + +### GET + +Reference needed. + +### PUT + +Creates a new table in database. JSON should contain a constraint +definition array and a column definition array. + +Example: + +``` json +{ + "constraints": [ + { + "constraint_type": "FOREIGN KEY", + "constraint_name": "fkey_schema_table_database_id", + "constraint_parameter": "database_id", + "reference_table": "example.table", + "reference_column": "database_id_ref" + }, + { + "constraint_type": "PRIMARY KEY", + "constraint_name": "pkey_schema_table_id", + "constraint_parameter": "id", + "reference_table": null, + "reference_column": null + } + ], + "columns": [ + { + "name": "id", + "data_type": "int", + "is_nullable": "YES", + "character_maximum_length": null + }, + { + "name": "name", + "data_type": "character varying", + "is_nullable": "NO", + "character_maximum_length": 50 + } + ] +} +``` + +### POST + +JSON should contain a column or constraint definition. Additionally +`action` and `type` should be mentioned. + +- `type` can be `constraint` or `column`. +- `action` can be `ADD` and `DROP`. +- `constraint_type` can be every constraint type supported by + Postgres. +- `reference_table` and `reference_column` can be null, if not + necessary. + +Example: + +``` json +{ + "type" : "constraint", + "action": "ADD", + "constraint_type": "FOREIGN KEY", + "constraint_name": "fkey_label", + "constraint_parameter": "changed_name", + "reference_table" : "reference.group_types", + "reference_column" : "label" +} + +{ + "type" : "column", + "name" : "test_name", + "newname" : "changed_name", + "data_type": "character varying", + "is_nullable": "NO", + "character_maximum_length": 50 +} +``` + +Rows (RESTful) +-------------- + +### GET + +URL: `/schema//tables/
/rows/` + +You can use this part to get information from the database. + +You can specify the following parameters in the url: + +: - `columns (List)` List of selected columns, e.g. `id,name` + + - + + `where (List)` List of where clauses, e.g. `id+OPERATOR+1+CONNECTOR+name+OPERATOR+georg` + + : - OPERATORS could be EQUAL, GREATER, LOWER, NOTEQUAL, + NOTGREATER, NOTLOWER + - CONNECTORS could be AND, OR + + - `orderby (List)` List of order columns, e.g. `name,code` + + - `limit (Number)` Number of displayed items, e.g. `100` + + - `offset (Number)` Number of offset from start, e.g. `10` + +Deprecated Stuff +================ + +Create a table +-------------- + +### Dictionary structure + +schema (String) + +: Specifies the schema name the table should be created in. If this + schema does not exist it will be created. + +table (String) + +: Specifies the name of the table to be created. + +fields (List) + +: List specifying the columns of the new table (see [Field + specification](#field-specification)). + +constraints (List) + +: List of additional constraints (see [Constraint + specification](#constraint-specification)). + +### Field specification + +name (String) + +: Name of the field + +type (String) + +: Name of a valid [Postgresql + type](https://www.postgresql.org/docs/8.4/static/datatype.html) + +pk (Bool) + +: Specifies whether this column is a primary key. Be aware of[^1] + +### Constraint specification + +Args: + +: + + name (String) + + : Type of constraint. Possible values: + + > - `fk` (see [Foreign key + > specification](#foreign-key-specification)) + + constraint (Dictionary) + + : Dictionary as specified by the foreign key. + +### Foreign key specification + +schema (String) + +: Name of the schema the referenced table is stored in + +table (String) + +: Name of the referenced table + +field (String) + +: Name of the referenced column + +on\_delete (String) + +: Specifies the behaviour if this field is deleted. Possible values: + + > - `cascade` + > - `no action` + > - `restrict` + > - `set null` + > - `set default` + +Insert data +----------- + +schema (String) + +: Specifies the schema name the table should be created in. If this + schema does not exist it will be created. + +table (String) + +: Specifies the name of the table to be created. + +fields (List) + +: List specifying the column names the date should be inserted in. + +values (List) + +: Each element is a list of values that should be inserted. The number + of elements must match the number of fields. + +returning (Bool) + +: An expression that is evaluated and returned as result. If this + entry is present the result of this expression is returned as in + [Select Data](#select-data). + +Select data +----------- + +all (Bool) + +: Specifies whether all rows should be returned (default) + +distinct (Bool) + +: Specifies whether only unique rows should be returned + +fields (List) + +: The list of columns that should be returned (see + [select\_field\_spec]()) + +where (List) + +: The list of condition that should be considered (see + [select\_condition\_spec]()) + +limit (Integer or \'all\') + +: Specifies how many results should be returned. If \'all\' is set all + matching rows will be returned (default). + +offset (Integer) + +: Specifies how many entries should be skipped before returning data + +Binding the API to python +------------------------- + +::: api.views + +::: api.actions + +[^1]: The OEP is currently only supporting a non-compound integer + primary key labeled \'id\'. Violation of this constraint might + render the OEP unable to display the data stored in this table. diff --git a/docs/base/base_converted.md b/docs/base/base_converted.md new file mode 100644 index 000000000..77ab4cf5f --- /dev/null +++ b/docs/base/base_converted.md @@ -0,0 +1,13 @@ +# Base - Navigation Bar + + +The **Navigation Bar** on top of the page is visible on all pages. On +wide screens all elements are shown next to each other. On small screens +the bar can be accessed on the \"hamburger icon\". + +The navigation bar is done in `base/templates/base`: + +- `base.html` +- `base-wide.html` + +It is either a link or a submenu with individual links. diff --git a/docs/dataedit/index.md b/docs/dataedit/index.md new file mode 100644 index 000000000..100c873a0 --- /dev/null +++ b/docs/dataedit/index.md @@ -0,0 +1 @@ +# Database visualisation diff --git a/docs/dataedit/views.md b/docs/dataedit/views.md index 3732ba478..ca95baddf 100644 --- a/docs/dataedit/views.md +++ b/docs/dataedit/views.md @@ -1,3 +1,3 @@ -# Test +# Database visualisation ::: dataedit.views \ No newline at end of file diff --git a/docs/error/error_converted.md b/docs/error/error_converted.md new file mode 100644 index 000000000..cab8f8d87 --- /dev/null +++ b/docs/error/error_converted.md @@ -0,0 +1,25 @@ +--- +the oep returns standard http status codes. not all of them are errors. there is a full list on wikipedia (https://en.wikipedia.org/wiki/list_of_http_status_codes ). the ones you may encounter more frequently are:: + | + - `200` **OK** This is the standard response for successful HTTP + requests. When using the api to query a table, the OEP will return + this status if the table does indeed exist. + - `201` **Created** The OEP will return this e.g. when you have + successfully created a table. + - `400` **Bad Request** The OEP cannot process the request, because + of a malformed query, use of an unsupported syntax or some other + faulty user input. + - `403` **Forbidden** The OEP understands the request, but the + current user is not allowed to perform the action. This may be the + case when you try to write to a table that is in a different + schema than model\_draft, you did not log in for a certain action, + or you did not provide your key for a data upload. + - `404` **Not Found** The requested resource or page does not exist + or is not available. + - `500` **Internal Server Error** Something unexpected happened and + no more specific message is suitable. These should be rare, but if + they do occur, a developer will receive a mail about it, so they + can try to fix the issue. +title: Errors and Response Status Codes +--- + diff --git a/docs/index.md b/docs/index.md index 000ea3455..9e3ac6697 100644 --- a/docs/index.md +++ b/docs/index.md @@ -1,17 +1,83 @@ -# Welcome to MkDocs +Developer documentation of the Open Energy Platform (OEP) +========================================================= -For full documentation visit [mkdocs.org](https://www.mkdocs.org). +The Open Energy Platform is a website that has three main targets: -## Commands +> 1. Provide a language-independent interface that is a thin layer on +> top of the Open Energy Database (oedb) +> 2. Implement an intuitive and easy-to use web interface on top of the +> database +> 3. Improve the visibility, communication and transparency of results +> from energy system modelling -* `mkdocs new [dir-name]` - Create a new project. -* `mkdocs serve` - Start the live-reloading docs server. -* `mkdocs build` - Build the documentation site. -* `mkdocs -h` - Print help message and exit. +Mission statement +----------------- -## Project layout +The transition to renewable energy sources is one of the huge goals of +the last few decades. Whilst conventional energy generation provides a +constant, generally available source of electricity, heat and so on, our +environment pays a toll. Contrary, renewable energy generation is less +environmentally demanding but more financially expensive or just locally +or inconsistently available. Guaranteeing a steady and reliable, yet +sustainable supply of energy requires still a lot of thorough research. - mkdocs.yml # The configuration file. - docs/ - index.md # The documentation homepage. - ... # Other markdown pages, images and other files. +Expansion of the energy grid might imply measures that must be +communicable in a transparent way. Hence, results from research of +energy system studies should be publicly available and reproducible. +This raises the need for publicly available data sources. + +Landing page: index +------------------- + +The landing page is programmed in **index.html**. It contains a heading, +the main modules in boxes, and further information. + +App: dataedit +------------- + +One aim of the Open Energy Platform is the visual and understandable +presentation of such datasets. The underlying OpenEnergyDatabase (OEDB) +stores datasets of different open-data projects. The visual presentation +is implemented in the **dataedit** app. + +::: dataedit + +App: api +-------- + +The data stored in the OEDB is also used in several projects. In order +to ease the access to required datasets the OEP provides a RESTful +HTTP-interface in the **api** app: + +::: api + + +App: modelview +-------------- + +Researchers or interested developers that just entered this field might +be interested in an overview which open energy models already exists. +This data is collected in so called fact sheets. Modellers can look +through these, add their own models or enhance existing descriptions +using the forms definied in the **modelview** app + +::: modelview + +Other apps are: + +App: login +---------- + +::: login + +RDF Factsheets +-------------- + +::: api + +Indices and tables +================== + +- `genindex`{.interpreted-text role="ref"} +- `modindex`{.interpreted-text role="ref"} +- `search`{.interpreted-text role="ref"} diff --git a/docs/login/login_converted.md b/docs/login/login_converted.md new file mode 100644 index 000000000..7ff27de28 --- /dev/null +++ b/docs/login/login_converted.md @@ -0,0 +1,5 @@ +# User management + + +::: login.views + diff --git a/docs/modelview/modelview_converted.md b/docs/modelview/modelview_converted.md new file mode 100644 index 000000000..0b9139c49 --- /dev/null +++ b/docs/modelview/modelview_converted.md @@ -0,0 +1,3 @@ +# Factsheet handler + +::: modelview.views From 6a6b364f0db55efbf68e91b6e58b622aadb7fb59 Mon Sep 17 00:00:00 2001 From: Jonas Huber Date: Wed, 19 Jul 2023 13:46:19 +0200 Subject: [PATCH 6/7] activate search plugin an add draft navigation structure (I assume this structure will be reworked again) #1277 --- docs/api/api_converted.md | 313 -------------------------- docs/base/base_converted.md | 13 -- docs/dataedit/views.md | 2 +- docs/modelview/modelview_converted.md | 3 - mkdocs.yml | 52 ++++- 5 files changed, 50 insertions(+), 333 deletions(-) delete mode 100644 docs/api/api_converted.md delete mode 100644 docs/base/base_converted.md delete mode 100644 docs/modelview/modelview_converted.md diff --git a/docs/api/api_converted.md b/docs/api/api_converted.md deleted file mode 100644 index 599377032..000000000 --- a/docs/api/api_converted.md +++ /dev/null @@ -1,313 +0,0 @@ -Data interface (REST) -===================== - -Data Structures ---------------- - -### Constraint Definition - -constraint\_definition (Dictonary) - -: - - Specifies a definition of a constraint. - - : - `action` Action of constraint (e.g. ADD, DROP) - - `constraint_type` Type of constraint (e.g. UNIQUE, PRIMARY - KEY, FOREIGN KEY) - - `constraint_name` Name of constraint. - - `constraint_parameter` Parameter of constraint. - - `reference_table` Name of reference table, can be None. - - `reference_column` Name of reference column, can be None. - -### Column Definition - -column\_definition (Dictonary) - -: - - Specifies a definition of a column. - - : - `name` Name of column. - - `new_name` New name of column, can be None. - - `data_type` New datatype of column, can be None. - - `is_nullable` New null value of column, can be None. - - `character_maximum_length` New data length of column, can be - None. - -### Response Definition - -response\_dictonary (Dictonary) - -: - - Describes the result of an api action. - - : - `success (Boolean)` Result of Action - - `error (String)` Error Message - - `http_status (Integer)` HTTP status code - () - -Table (RESTful) ---------------- - -URL: /schema/{schema}/table/{table} - -### GET - -Reference needed. - -### PUT - -Creates a new table in database. JSON should contain a constraint -definition array and a column definition array. - -Example: - -``` json -{ - "constraints": [ - { - "constraint_type": "FOREIGN KEY", - "constraint_name": "fkey_schema_table_database_id", - "constraint_parameter": "database_id", - "reference_table": "example.table", - "reference_column": "database_id_ref" - }, - { - "constraint_type": "PRIMARY KEY", - "constraint_name": "pkey_schema_table_id", - "constraint_parameter": "id", - "reference_table": null, - "reference_column": null - } - ], - "columns": [ - { - "name": "id", - "data_type": "int", - "is_nullable": "YES", - "character_maximum_length": null - }, - { - "name": "name", - "data_type": "character varying", - "is_nullable": "NO", - "character_maximum_length": 50 - } - ] -} -``` - -### POST - -JSON should contain a column or constraint definition. Additionally -`action` and `type` should be mentioned. - -- `type` can be `constraint` or `column`. -- `action` can be `ADD` and `DROP`. -- `constraint_type` can be every constraint type supported by - Postgres. -- `reference_table` and `reference_column` can be null, if not - necessary. - -Example: - -``` json -{ - "type" : "constraint", - "action": "ADD", - "constraint_type": "FOREIGN KEY", - "constraint_name": "fkey_label", - "constraint_parameter": "changed_name", - "reference_table" : "reference.group_types", - "reference_column" : "label" -} - -{ - "type" : "column", - "name" : "test_name", - "newname" : "changed_name", - "data_type": "character varying", - "is_nullable": "NO", - "character_maximum_length": 50 -} -``` - -Rows (RESTful) --------------- - -### GET - -URL: `/schema//tables/
/rows/` - -You can use this part to get information from the database. - -You can specify the following parameters in the url: - -: - `columns (List)` List of selected columns, e.g. `id,name` - - - - - `where (List)` List of where clauses, e.g. `id+OPERATOR+1+CONNECTOR+name+OPERATOR+georg` - - : - OPERATORS could be EQUAL, GREATER, LOWER, NOTEQUAL, - NOTGREATER, NOTLOWER - - CONNECTORS could be AND, OR - - - `orderby (List)` List of order columns, e.g. `name,code` - - - `limit (Number)` Number of displayed items, e.g. `100` - - - `offset (Number)` Number of offset from start, e.g. `10` - -Deprecated Stuff -================ - -Create a table --------------- - -### Dictionary structure - -schema (String) - -: Specifies the schema name the table should be created in. If this - schema does not exist it will be created. - -table (String) - -: Specifies the name of the table to be created. - -fields (List) - -: List specifying the columns of the new table (see [Field - specification](#field-specification)). - -constraints (List) - -: List of additional constraints (see [Constraint - specification](#constraint-specification)). - -### Field specification - -name (String) - -: Name of the field - -type (String) - -: Name of a valid [Postgresql - type](https://www.postgresql.org/docs/8.4/static/datatype.html) - -pk (Bool) - -: Specifies whether this column is a primary key. Be aware of[^1] - -### Constraint specification - -Args: - -: - - name (String) - - : Type of constraint. Possible values: - - > - `fk` (see [Foreign key - > specification](#foreign-key-specification)) - - constraint (Dictionary) - - : Dictionary as specified by the foreign key. - -### Foreign key specification - -schema (String) - -: Name of the schema the referenced table is stored in - -table (String) - -: Name of the referenced table - -field (String) - -: Name of the referenced column - -on\_delete (String) - -: Specifies the behaviour if this field is deleted. Possible values: - - > - `cascade` - > - `no action` - > - `restrict` - > - `set null` - > - `set default` - -Insert data ------------ - -schema (String) - -: Specifies the schema name the table should be created in. If this - schema does not exist it will be created. - -table (String) - -: Specifies the name of the table to be created. - -fields (List) - -: List specifying the column names the date should be inserted in. - -values (List) - -: Each element is a list of values that should be inserted. The number - of elements must match the number of fields. - -returning (Bool) - -: An expression that is evaluated and returned as result. If this - entry is present the result of this expression is returned as in - [Select Data](#select-data). - -Select data ------------ - -all (Bool) - -: Specifies whether all rows should be returned (default) - -distinct (Bool) - -: Specifies whether only unique rows should be returned - -fields (List) - -: The list of columns that should be returned (see - [select\_field\_spec]()) - -where (List) - -: The list of condition that should be considered (see - [select\_condition\_spec]()) - -limit (Integer or \'all\') - -: Specifies how many results should be returned. If \'all\' is set all - matching rows will be returned (default). - -offset (Integer) - -: Specifies how many entries should be skipped before returning data - -Binding the API to python -------------------------- - -::: api.views - -::: api.actions - -[^1]: The OEP is currently only supporting a non-compound integer - primary key labeled \'id\'. Violation of this constraint might - render the OEP unable to display the data stored in this table. diff --git a/docs/base/base_converted.md b/docs/base/base_converted.md deleted file mode 100644 index 77ab4cf5f..000000000 --- a/docs/base/base_converted.md +++ /dev/null @@ -1,13 +0,0 @@ -# Base - Navigation Bar - - -The **Navigation Bar** on top of the page is visible on all pages. On -wide screens all elements are shown next to each other. On small screens -the bar can be accessed on the \"hamburger icon\". - -The navigation bar is done in `base/templates/base`: - -- `base.html` -- `base-wide.html` - -It is either a link or a submenu with individual links. diff --git a/docs/dataedit/views.md b/docs/dataedit/views.md index ca95baddf..1e0e9c1bb 100644 --- a/docs/dataedit/views.md +++ b/docs/dataedit/views.md @@ -1,3 +1,3 @@ -# Database visualisation +# Views ::: dataedit.views \ No newline at end of file diff --git a/docs/modelview/modelview_converted.md b/docs/modelview/modelview_converted.md deleted file mode 100644 index 0b9139c49..000000000 --- a/docs/modelview/modelview_converted.md +++ /dev/null @@ -1,3 +0,0 @@ -# Factsheet handler - -::: modelview.views diff --git a/mkdocs.yml b/mkdocs.yml index eb6a81c1a..a79b1f123 100644 --- a/mkdocs.yml +++ b/mkdocs.yml @@ -34,14 +34,17 @@ theme: - navigation.tracking - navigation.top - toc.follow + - search.suggest + - search.share # extensions markdown_extensions: - admonition - toc: - permalink: 💙 + permalink: "#" plugins: +- search: - mkdocstrings: # Customization @@ -54,5 +57,48 @@ extra_css: # Navigation nav: - Home: index.md - - Dataedit: - - dataedit/views.md \ No newline at end of file + - Installation & Setup: installation.md + - Django Application: + - Overview: django/index.md + - Base: + - base/index.md + - Views: base/views.md + - Forms: base/forms.md + - Application Programming Interface: + - api/index.md + - Views: api/views.md + - Models: api/models.md + - Actions: api/actions.md + - Connection: api/connection.md + - Encode: api/encode.md + - Parser: api/parser.md + - References: api/references.md + - Sessions: api/sessions.md + - Error: api/error.md + - Dataedit: + - dataedit/index.md + - Views: dataedit/views.md + - Models: dataedit/models.md + - Structures: dataedit/structures.md + - Forms: dataedit/forms.md + - Helper: dataedit/helper.md + - Modelview: + - modelview/index.md + - Views: modelview/views.md + - Models: modelview/models.md + - Forms: modelview/forms.md + - Login: + - login/index.md + - Open Energy Ontologie Viewer: + - oeo_viewer/index.md + - Views: oeo_viewer/views.md + - Models: oeo_viewer/models.md + - Oeplatform: + - oeplatform/index.md + - Ontology: + - ontology/index.md + - Views: ontology/views.md + - Models: ontology/models.md + - Forms: ontology/forms.md + - Theming: + - theming/index.md \ No newline at end of file From 2bac78abb2720708578de78e29d86eeab094fd5e Mon Sep 17 00:00:00 2001 From: Jonas Huber Date: Wed, 19 Jul 2023 13:47:12 +0200 Subject: [PATCH 7/7] add docs files for each django app (incomplete) #1277 --- docs/api/actions.md | 3 + docs/api/connection.md | 3 + docs/api/encode.md | 3 + docs/api/error.md | 3 + docs/api/index.md | 167 ++++++++++++++++++++++++++++++++++++ docs/api/models.md | 2 + docs/api/parser.md | 3 + docs/api/references.md | 3 + docs/api/sessions.md | 3 + docs/api/views.md | 3 + docs/base/forms.md | 3 + docs/base/index.md | 13 +++ docs/base/views.md | 3 + docs/dataedit/forms.md | 3 + docs/dataedit/helper.md | 3 + docs/dataedit/models.md | 3 + docs/dataedit/structures.md | 4 + docs/django/index.md | 1 + docs/docker/index.md | 1 + docs/error/index.md | 1 + docs/installation.md | 1 + docs/login/index.md | 1 + docs/modelview/forms.md | 3 + docs/modelview/index.md | 1 + docs/modelview/models.md | 3 + docs/modelview/rdf/index.md | 0 docs/modelview/views.md | 3 + docs/oeo_viewer/index.md | 1 + docs/oeplatform/index.md | 1 + docs/ontology/index.md | 1 + docs/theming/index.md | 1 + 31 files changed, 244 insertions(+) create mode 100644 docs/api/actions.md create mode 100644 docs/api/connection.md create mode 100644 docs/api/encode.md create mode 100644 docs/api/error.md create mode 100644 docs/api/index.md create mode 100644 docs/api/models.md create mode 100644 docs/api/parser.md create mode 100644 docs/api/references.md create mode 100644 docs/api/sessions.md create mode 100644 docs/api/views.md create mode 100644 docs/base/forms.md create mode 100644 docs/base/index.md create mode 100644 docs/base/views.md create mode 100644 docs/dataedit/forms.md create mode 100644 docs/dataedit/helper.md create mode 100644 docs/dataedit/models.md create mode 100644 docs/dataedit/structures.md create mode 100644 docs/django/index.md create mode 100644 docs/docker/index.md create mode 100644 docs/error/index.md create mode 100644 docs/installation.md create mode 100644 docs/login/index.md create mode 100644 docs/modelview/forms.md create mode 100644 docs/modelview/index.md create mode 100644 docs/modelview/models.md create mode 100644 docs/modelview/rdf/index.md create mode 100644 docs/modelview/views.md create mode 100644 docs/oeo_viewer/index.md create mode 100644 docs/oeplatform/index.md create mode 100644 docs/ontology/index.md create mode 100644 docs/theming/index.md diff --git a/docs/api/actions.md b/docs/api/actions.md new file mode 100644 index 000000000..de29167e3 --- /dev/null +++ b/docs/api/actions.md @@ -0,0 +1,3 @@ +# Actions + +::: api.actions \ No newline at end of file diff --git a/docs/api/connection.md b/docs/api/connection.md new file mode 100644 index 000000000..49bac0408 --- /dev/null +++ b/docs/api/connection.md @@ -0,0 +1,3 @@ +# Connection + +::: api.connection \ No newline at end of file diff --git a/docs/api/encode.md b/docs/api/encode.md new file mode 100644 index 000000000..657f30224 --- /dev/null +++ b/docs/api/encode.md @@ -0,0 +1,3 @@ +# Encoder + +::: api.encode \ No newline at end of file diff --git a/docs/api/error.md b/docs/api/error.md new file mode 100644 index 000000000..3fab0ca8c --- /dev/null +++ b/docs/api/error.md @@ -0,0 +1,3 @@ +# Error + +::: api.error \ No newline at end of file diff --git a/docs/api/index.md b/docs/api/index.md new file mode 100644 index 000000000..fcca1a2f7 --- /dev/null +++ b/docs/api/index.md @@ -0,0 +1,167 @@ +Data interface (REST) +===================== + +Data Structures +--------------- + +### Constraint Definition + +constraint\_definition (Dictonary) + +: + + Specifies a definition of a constraint. + + : - `action` Action of constraint (e.g. ADD, DROP) + - `constraint_type` Type of constraint (e.g. UNIQUE, PRIMARY + KEY, FOREIGN KEY) + - `constraint_name` Name of constraint. + - `constraint_parameter` Parameter of constraint. + - `reference_table` Name of reference table, can be None. + - `reference_column` Name of reference column, can be None. + +### Column Definition + +column\_definition (Dictonary) + +: + + Specifies a definition of a column. + + : - `name` Name of column. + - `new_name` New name of column, can be None. + - `data_type` New datatype of column, can be None. + - `is_nullable` New null value of column, can be None. + - `character_maximum_length` New data length of column, can be + None. + +### Response Definition + +response\_dictonary (Dictonary) + +: + + Describes the result of an api action. + + : - `success (Boolean)` Result of Action + - `error (String)` Error Message + - `http_status (Integer)` HTTP status code + () + +Table (RESTful) +--------------- + +URL: /schema/{schema}/table/{table} + +### GET + +Reference needed. + +### PUT + +Creates a new table in database. JSON should contain a constraint +definition array and a column definition array. + +Example: + +``` json +{ + "constraints": [ + { + "constraint_type": "FOREIGN KEY", + "constraint_name": "fkey_schema_table_database_id", + "constraint_parameter": "database_id", + "reference_table": "example.table", + "reference_column": "database_id_ref" + }, + { + "constraint_type": "PRIMARY KEY", + "constraint_name": "pkey_schema_table_id", + "constraint_parameter": "id", + "reference_table": null, + "reference_column": null + } + ], + "columns": [ + { + "name": "id", + "data_type": "int", + "is_nullable": "YES", + "character_maximum_length": null + }, + { + "name": "name", + "data_type": "character varying", + "is_nullable": "NO", + "character_maximum_length": 50 + } + ] +} +``` + +### POST + +JSON should contain a column or constraint definition. Additionally +`action` and `type` should be mentioned. + +- `type` can be `constraint` or `column`. +- `action` can be `ADD` and `DROP`. +- `constraint_type` can be every constraint type supported by + Postgres. +- `reference_table` and `reference_column` can be null, if not + necessary. + +Example: + +``` json +{ + "type" : "constraint", + "action": "ADD", + "constraint_type": "FOREIGN KEY", + "constraint_name": "fkey_label", + "constraint_parameter": "changed_name", + "reference_table" : "reference.group_types", + "reference_column" : "label" +} + +{ + "type" : "column", + "name" : "test_name", + "newname" : "changed_name", + "data_type": "character varying", + "is_nullable": "NO", + "character_maximum_length": 50 +} +``` + +Rows (RESTful) +-------------- + +### GET + +URL: `/schema//tables/
/rows/` + +You can use this part to get information from the database. + +You can specify the following parameters in the url: + +: - `columns (List)` List of selected columns, e.g. `id,name` + + - + + `where (List)` List of where clauses, e.g. `id+OPERATOR+1+CONNECTOR+name+OPERATOR+georg` + + : - OPERATORS could be EQUAL, GREATER, LOWER, NOTEQUAL, + NOTGREATER, NOTLOWER + - CONNECTORS could be AND, OR + + - `orderby (List)` List of order columns, e.g. `name,code` + + - `limit (Number)` Number of displayed items, e.g. `100` + + - `offset (Number)` Number of offset from start, e.g. `10` + + +[^1]: The OEP is currently only supporting a non-compound integer + primary key labeled \'id\'. Violation of this constraint might + render the OEP unable to display the data stored in this table. diff --git a/docs/api/models.md b/docs/api/models.md new file mode 100644 index 000000000..5c5996efd --- /dev/null +++ b/docs/api/models.md @@ -0,0 +1,2 @@ +# Models + diff --git a/docs/api/parser.md b/docs/api/parser.md new file mode 100644 index 000000000..7297a4081 --- /dev/null +++ b/docs/api/parser.md @@ -0,0 +1,3 @@ +# Parser + +::: api.parser \ No newline at end of file diff --git a/docs/api/references.md b/docs/api/references.md new file mode 100644 index 000000000..d6a0aca0c --- /dev/null +++ b/docs/api/references.md @@ -0,0 +1,3 @@ +# References + +::: api.references \ No newline at end of file diff --git a/docs/api/sessions.md b/docs/api/sessions.md new file mode 100644 index 000000000..2d2a882d8 --- /dev/null +++ b/docs/api/sessions.md @@ -0,0 +1,3 @@ +# Sessions + +::: api.sessions \ No newline at end of file diff --git a/docs/api/views.md b/docs/api/views.md new file mode 100644 index 000000000..cbd8268df --- /dev/null +++ b/docs/api/views.md @@ -0,0 +1,3 @@ +# Views + +::: api.views diff --git a/docs/base/forms.md b/docs/base/forms.md new file mode 100644 index 000000000..a4ca06bad --- /dev/null +++ b/docs/base/forms.md @@ -0,0 +1,3 @@ +# Forms + +::: base.forms \ No newline at end of file diff --git a/docs/base/index.md b/docs/base/index.md new file mode 100644 index 000000000..77ab4cf5f --- /dev/null +++ b/docs/base/index.md @@ -0,0 +1,13 @@ +# Base - Navigation Bar + + +The **Navigation Bar** on top of the page is visible on all pages. On +wide screens all elements are shown next to each other. On small screens +the bar can be accessed on the \"hamburger icon\". + +The navigation bar is done in `base/templates/base`: + +- `base.html` +- `base-wide.html` + +It is either a link or a submenu with individual links. diff --git a/docs/base/views.md b/docs/base/views.md new file mode 100644 index 000000000..f7f83617b --- /dev/null +++ b/docs/base/views.md @@ -0,0 +1,3 @@ +# Views + +::: base.views \ No newline at end of file diff --git a/docs/dataedit/forms.md b/docs/dataedit/forms.md new file mode 100644 index 000000000..7e4dda8c3 --- /dev/null +++ b/docs/dataedit/forms.md @@ -0,0 +1,3 @@ +# Forms + +::: dataedit.forms \ No newline at end of file diff --git a/docs/dataedit/helper.md b/docs/dataedit/helper.md new file mode 100644 index 000000000..bed11001d --- /dev/null +++ b/docs/dataedit/helper.md @@ -0,0 +1,3 @@ +# Helper + +::: dataedit.helper \ No newline at end of file diff --git a/docs/dataedit/models.md b/docs/dataedit/models.md new file mode 100644 index 000000000..d9f72c3db --- /dev/null +++ b/docs/dataedit/models.md @@ -0,0 +1,3 @@ +# Models + +::: dataedit.models \ No newline at end of file diff --git a/docs/dataedit/structures.md b/docs/dataedit/structures.md new file mode 100644 index 000000000..b9d663856 --- /dev/null +++ b/docs/dataedit/structures.md @@ -0,0 +1,4 @@ +# Structures +OEDB Datamodels + +::: dataedit.structures \ No newline at end of file diff --git a/docs/django/index.md b/docs/django/index.md new file mode 100644 index 000000000..e9168dbb7 --- /dev/null +++ b/docs/django/index.md @@ -0,0 +1 @@ +# Hello World \ No newline at end of file diff --git a/docs/docker/index.md b/docs/docker/index.md new file mode 100644 index 000000000..c597eaa5e --- /dev/null +++ b/docs/docker/index.md @@ -0,0 +1 @@ +# Docker diff --git a/docs/error/index.md b/docs/error/index.md new file mode 100644 index 000000000..8a5473962 --- /dev/null +++ b/docs/error/index.md @@ -0,0 +1 @@ +# Error diff --git a/docs/installation.md b/docs/installation.md new file mode 100644 index 000000000..b6bf8a50c --- /dev/null +++ b/docs/installation.md @@ -0,0 +1 @@ +# Install and setup the Open Energ Platform Application \ No newline at end of file diff --git a/docs/login/index.md b/docs/login/index.md new file mode 100644 index 000000000..233487530 --- /dev/null +++ b/docs/login/index.md @@ -0,0 +1 @@ +# Login and Usermanagement diff --git a/docs/modelview/forms.md b/docs/modelview/forms.md new file mode 100644 index 000000000..7cb2fae39 --- /dev/null +++ b/docs/modelview/forms.md @@ -0,0 +1,3 @@ +# Forms + +::: modelview.forms \ No newline at end of file diff --git a/docs/modelview/index.md b/docs/modelview/index.md new file mode 100644 index 000000000..dde2120ab --- /dev/null +++ b/docs/modelview/index.md @@ -0,0 +1 @@ +# Modelview diff --git a/docs/modelview/models.md b/docs/modelview/models.md new file mode 100644 index 000000000..871796f12 --- /dev/null +++ b/docs/modelview/models.md @@ -0,0 +1,3 @@ +# Models + +::: modelview.models \ No newline at end of file diff --git a/docs/modelview/rdf/index.md b/docs/modelview/rdf/index.md new file mode 100644 index 000000000..e69de29bb diff --git a/docs/modelview/views.md b/docs/modelview/views.md new file mode 100644 index 000000000..0b9139c49 --- /dev/null +++ b/docs/modelview/views.md @@ -0,0 +1,3 @@ +# Factsheet handler + +::: modelview.views diff --git a/docs/oeo_viewer/index.md b/docs/oeo_viewer/index.md new file mode 100644 index 000000000..8681f1937 --- /dev/null +++ b/docs/oeo_viewer/index.md @@ -0,0 +1 @@ +# Open Energy Ontology Viewer diff --git a/docs/oeplatform/index.md b/docs/oeplatform/index.md new file mode 100644 index 000000000..5dc13854e --- /dev/null +++ b/docs/oeplatform/index.md @@ -0,0 +1 @@ +# Oeplatform configuration diff --git a/docs/ontology/index.md b/docs/ontology/index.md new file mode 100644 index 000000000..1c9617b71 --- /dev/null +++ b/docs/ontology/index.md @@ -0,0 +1 @@ +# Ontology diff --git a/docs/theming/index.md b/docs/theming/index.md new file mode 100644 index 000000000..6358a1220 --- /dev/null +++ b/docs/theming/index.md @@ -0,0 +1 @@ +# Theming