diff --git a/.gitignore b/.gitignore
new file mode 100644
index 0000000..7f0bfa3
--- /dev/null
+++ b/.gitignore
@@ -0,0 +1,101 @@
+# Byte-compiled / optimized / DLL files
+__pycache__/
+*.py[cod]
+*$py.class
+
+# C extensions
+*.so
+
+# Distribution / packaging
+.Python
+env/
+build/
+develop-eggs/
+dist/
+downloads/
+eggs/
+.eggs/
+lib/
+lib64/
+parts/
+sdist/
+var/
+wheels/
+*.egg-info/
+.installed.cfg
+*.egg
+
+# PyInstaller
+# Usually these files are written by a python script from a template
+# before PyInstaller builds the exe, so as to inject date/other infos into it.
+*.manifest
+*.spec
+
+# Installer logs
+pip-log.txt
+pip-delete-this-directory.txt
+
+# Unit test / coverage reports
+htmlcov/
+.tox/
+.coverage
+.coverage.*
+.cache
+nosetests.xml
+coverage.xml
+*.cover
+.hypothesis/
+
+# Translations
+*.mo
+*.pot
+
+# Django stuff:
+*.log
+local_settings.py
+
+# Flask stuff:
+instance/
+.webassets-cache
+
+# Scrapy stuff:
+.scrapy
+
+# Sphinx documentation
+docs/_build/
+
+# PyBuilder
+target/
+
+# Jupyter Notebook
+.ipynb_checkpoints
+
+# pyenv
+.python-version
+
+# celery beat schedule file
+celerybeat-schedule
+
+# SageMath parsed files
+*.sage.py
+
+# dotenv
+.env
+
+# virtualenv
+.venv
+venv/
+ENV/
+
+# Spyder project settings
+.spyderproject
+.spyproject
+
+# Rope project settings
+.ropeproject
+
+# mkdocs documentation
+/site
+
+# mypy
+.mypy_cache/
\ No newline at end of file
diff --git a/.vscode/settings.json b/.vscode/settings.json
new file mode 100644
index 0000000..f42ab7d
--- /dev/null
+++ b/.vscode/settings.json
@@ -0,0 +1,12 @@
+{
+ "python.linting.pylintEnabled": true,
+ "python.linting.flake8Enabled": false,
+ "python.linting.enabled": true,
+ "python.linting.pylintArgs": [
+ "--load-plugins",
+ "pylint_flask",
+ "pylint_flask_sqlalchemy",
+ ],
+ "python.pythonPath": "d:\\Software\\geomon\\.venv\\Scripts\\python.exe",
+ "python.envFile": "${workspaceFolder}/.env"
+}
\ No newline at end of file
diff --git a/__init__.py b/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/fb_models.py b/fb_models.py
new file mode 100644
index 0000000..f42628f
--- /dev/null
+++ b/fb_models.py
@@ -0,0 +1,66 @@
+'''
+Tutorial link: https://docs.sqlalchemy.org/en/latest/orm/tutorial.html
+Sqlalchemy version: 1.2.15
+Python version: 3.7
+'''
+import os
+import datetime
+from sqlalchemy import (create_engine, Column, Integer,
+ String, ForeignKey, Time, Date)
+from sqlalchemy.ext.declarative import declarative_base
+from sqlalchemy.orm import sessionmaker, relationship
+import sqlalchemy.orm.session
+
+Base = declarative_base()
+
+
+class Catena(Base):
+ """ catena class """
+ __tablename__ = 'CATENE'
+
+ id = Column('CHIAVE', Integer, primary_key=True)
+ name = Column('NOME', String)
+ observations = relationship('FbObservation')
+
+ def __repr__(self): # optional
+ return f'Catena {self.name}'
+
+
+class FbObservation(Base):
+ """ FbObservation class """
+ __tablename__ = 'DATI_ACQUISITI'
+
+ # id = Column(Integer, primary_key=True) # obligatory
+ pitch = Column('PITCH', String)
+ roll = Column('ROLL', String)
+ ora = Column('ORA', Time, primary_key=True)
+ sensore = Column('SENSORE', Integer, primary_key=True)
+ data = Column('DATA', Date)
+ temperature = Column('TEMPERATURA', String)
+
+ chiave_id = Column('CATENA', Integer, ForeignKey('CATENE.CHIAVE'))
+ catena = relationship("Catena", lazy="joined", foreign_keys=[chiave_id])
+
+ def __repr__(self): # optional
+ return f'FbObservation {self.roll}'
+
+ @property
+ def result_time(self):
+ ''' Create a datetime object '''
+ start_datetime = datetime.datetime.combine(self.data, self.ora)
+ return start_datetime
+
+
+def create_session() -> sqlalchemy.orm.session:
+ """Return the sum of x and y."""
+ # engine = create_engine('sqlite:///:memory:')
+ db_user = os.environ.get("FDB_DBUSER")
+ db_password = os.environ.get("FDB_DBPASSWORD")
+ db_url = os.environ.get("FDB_DBURL")
+ engine = create_engine(
+ "firebird+fdb://" + db_user + ":" + db_password + "@" + db_url)
+ session_maker = sessionmaker(bind=engine)
+ session = session_maker()
+
+ Base.metadata.create_all(engine)
+ return session
diff --git a/firebird_to_postgis_export.py b/firebird_to_postgis_export.py
new file mode 100644
index 0000000..32f5bec
--- /dev/null
+++ b/firebird_to_postgis_export.py
@@ -0,0 +1,206 @@
+""" import firebird, export to postgresql """
+#!/usr/bin/python# -*- coding: utf-8 -*-
+
+from typing import List
+import uuid
+from sqlalchemy.orm import session
+from sqlalchemy import func, desc, asc
+from fb_models import (create_session, FbObservation, Catena)
+from pg_models import (create_pg_session, Dataset, Observation, Procedure, Phenomenon, Platform)
+
+def main():
+ """
+ Main function.
+
+ """
+
+ # parameter:
+ # sensor id in firebird db:
+ sensor_id = 3
+ # name of project area in firebird db
+ feature_of_interest = 'Sibratsgfall'
+ # sensor name in postgis db
+ sensor = 'sibratsgfall_3'
+ platform = 'Sibratsgfall'
+
+ # #gschliefgraben_0
+ # sensor_id = 2
+ # # name of project area in firebird db
+ # feature_of_interest = 'GSA01A-033-0909'
+ # # sensor name in postgis db
+ # sensor = 'gschliefgraben_2'
+
+ # #laakirchen_0
+ # sensor_id = 10
+ # # name of project area in firebird db
+ # feature_of_interest = 'GSA02B-007-0911'
+ # platform = 'laakirchen'
+ # # sensor name in postgis db
+ # sensor = 'laakirchen_10'
+
+ firebird_session: session = create_session()
+ # db_observation = session.query(Observation) \
+ # .filter_by(name='John Snow').first()
+ query = firebird_session.query(FbObservation).join(FbObservation.catena) \
+ .filter(FbObservation.sensore == sensor_id) \
+ .filter(Catena.name == feature_of_interest)
+ # feature_of_interest = query.statement.compile(dialect=firebird.dialect())
+ firebird_observations: List[FbObservation] = query.all()
+ firebird_session.close()
+
+ pg_session: session = create_pg_session()
+ # pg_datasets: List[Dataset] = pg_query.all()
+ pg_query = pg_session.query(Dataset) \
+ .join(Procedure) \
+ .join(Phenomenon) \
+ .filter(Procedure.sta_identifier == sensor.lower())
+ # .join(Platform).all() \
+
+
+ # roll_dataset = [x for x in pg_datasets if x.phenomenon.sta_identifier == "Roll"]
+ roll_dataset = pg_query.filter(Phenomenon.sta_identifier == "Roll").first()
+ roll_dataset.is_published = 1
+ roll_dataset.is_hidden = 0
+ roll_dataset.dataset_type = "timeseries"
+ roll_dataset.observation_type = "simple"
+ roll_dataset.value_type = "quantity"
+ slope_dataset = pg_query.filter(
+ Phenomenon.sta_identifier == "Slope").first()
+ slope_dataset.is_published = 1
+ slope_dataset.is_hidden = 0
+ slope_dataset.dataset_type = "timeseries"
+ slope_dataset.observation_type = "simple"
+ slope_dataset.value_type = "quantity"
+ temperature_dataset = pg_query.filter(
+ Phenomenon.sta_identifier == "InSystemTemperature").first()
+ temperature_dataset.is_published = 1
+ temperature_dataset.is_hidden = 0
+ temperature_dataset.dataset_type = "timeseries"
+ temperature_dataset.observation_type = "simple"
+ temperature_dataset.value_type = "quantity"
+ pg_session.commit()
+
+ max_id = pg_session.query(func.max(Observation.id)).scalar()
+ for fb_observation in firebird_observations:
+ # print(fb_observation.catena.name)
+ if(fb_observation.roll is not None and roll_dataset is not None):
+ max_id = max_id + 1
+ pg_roll_observation = Observation(
+ id=max_id,
+ value_type='quantity',
+ sampling_time_start=fb_observation.result_time,
+ sampling_time_end=fb_observation.result_time,
+ result_time=fb_observation.result_time,
+ sta_identifier=str(uuid.uuid4()),
+ value_quantity=fb_observation.roll
+ )
+ roll_dataset.observations.append(pg_roll_observation)
+ if(fb_observation.pitch is not None and slope_dataset is not None):
+ max_id = max_id + 1
+ pg_slope_observation = Observation(
+ id=max_id,
+ value_type='quantity',
+ sampling_time_start=fb_observation.result_time,
+ sampling_time_end=fb_observation.result_time,
+ result_time=fb_observation.result_time,
+ sta_identifier=str(uuid.uuid4()),
+ value_quantity=fb_observation.pitch
+ )
+ slope_dataset.observations.append(pg_slope_observation)
+ if(fb_observation.temperature is not None and temperature_dataset is not None):
+ max_id = max_id + 1
+ pg_temperature_observation = Observation(
+ id=max_id,
+ value_type='quantity',
+ sampling_time_start=fb_observation.result_time,
+ sampling_time_end=fb_observation.result_time,
+ result_time=fb_observation.result_time,
+ sta_identifier=str(uuid.uuid4()),
+ value_quantity=fb_observation.temperature
+ )
+ temperature_dataset.observations.append(pg_temperature_observation)
+ # commit observations:
+ pg_session.commit()
+
+ last_roll_observation = pg_session.query(Observation) \
+ .filter(Observation.fk_dataset_id == roll_dataset.id) \
+ .order_by(desc('sampling_time_start')) \
+ .first()
+ if last_roll_observation is not None:
+ roll_dataset.last_time = last_roll_observation.sampling_time_start
+ roll_dataset.last_value = last_roll_observation.value_quantity
+ roll_dataset.fk_last_observation_id = last_roll_observation.id
+
+ last_slope_observation = pg_session.query(Observation) \
+ .filter(Observation.fk_dataset_id == slope_dataset.id) \
+ .order_by(desc('sampling_time_start')) \
+ .first()
+ if last_slope_observation is not None:
+ slope_dataset.last_time = last_slope_observation.sampling_time_start
+ slope_dataset.last_value = last_slope_observation.value_quantity
+ slope_dataset.fk_last_observation_id = last_slope_observation.id
+
+ last_temperature_observation = pg_session.query(Observation) \
+ .filter(Observation.fk_dataset_id == temperature_dataset.id) \
+ .order_by(desc('sampling_time_start')) \
+ .first()
+ if last_temperature_observation is not None:
+ temperature_dataset.last_time = last_temperature_observation.sampling_time_start
+ temperature_dataset.last_value = last_temperature_observation.value_quantity
+ temperature_dataset.fk_last_observation_id = last_temperature_observation.id
+
+ first_roll_observation = pg_session.query(Observation) \
+ .filter(Observation.fk_dataset_id == roll_dataset.id) \
+ .order_by(asc('sampling_time_start')) \
+ .first()
+ if first_roll_observation is not None:
+ roll_dataset.first_time = first_roll_observation.sampling_time_start
+ roll_dataset.first_value = first_roll_observation.value_quantity
+ roll_dataset.fk_first_observation_id = first_roll_observation.id
+
+ first_slope_observation = pg_session.query(Observation) \
+ .filter(Observation.fk_dataset_id == slope_dataset.id) \
+ .order_by(asc('sampling_time_start')) \
+ .first()
+ if first_slope_observation is not None:
+ slope_dataset.first_time = first_slope_observation.sampling_time_start
+ slope_dataset.first_value = first_slope_observation.value_quantity
+ slope_dataset.fk_first_observation_id = first_slope_observation.id
+
+ first_temperature_observation = pg_session.query(Observation) \
+ .filter(Observation.fk_dataset_id == temperature_dataset.id) \
+ .order_by(asc('sampling_time_start')) \
+ .first()
+ if first_temperature_observation is not None:
+ temperature_dataset.first_time = first_temperature_observation.sampling_time_start
+ temperature_dataset.first_value = first_temperature_observation.value_quantity
+ temperature_dataset.fk_first_observation_id = first_temperature_observation.id
+
+ platform_exists = pg_session.query(Platform.id).filter_by(
+ name=platform.lower()).scalar() is not None
+ if not platform_exists:
+ sensor_platform = Platform()
+ max_id = pg_session.query(func.max(Platform.id)).scalar()
+ sensor_platform.id = max_id + 1
+ sensor_platform.sta_identifier = platform.lower()
+ sensor_platform.identifier = platform.lower()
+ sensor_platform.name = platform.lower()
+ slope_dataset.platform = sensor_platform
+ roll_dataset.platform = sensor_platform
+ temperature_dataset.platform = sensor_platform
+ else:
+ sensor_platform = pg_session.query(Platform.id) \
+ .filter(Platform.name == platform.lower()) \
+ .first()
+ slope_dataset.fk_platform_id = sensor_platform.id
+ roll_dataset.fk_platform_id = sensor_platform.id
+ temperature_dataset.fk_platform_id = sensor_platform.id
+
+ # commit dataset changes:
+ pg_session.commit()
+ pg_session.close()
+
+
+# -----------------------------------------------------------------------------
+if __name__ == "__main__":
+ main()
diff --git a/insert_sensor/InsertGschliefgraben.xml b/insert_sensor/InsertGschliefgraben.xml
new file mode 100644
index 0000000..5b303bc
--- /dev/null
+++ b/insert_sensor/InsertGschliefgraben.xml
@@ -0,0 +1,93 @@
+
+ {procedure_identifier}
+
+
+
+
+ shortName
+ {procedure_name}
+
+
+
+
+
+
+
+
+ {offering_label}
+ {offering_name}
+
+
+
+
+
+
+ featuresOfInterest
+
+
+ {feature_id}
+ {feature_name}
+
+
+
+
+ {coordinates}
+
+
+
+
+
+
+
+
+
+
+ Slope
+
+
+
+
+
+ Roll
+
+
+
+
+
+ InSystemTemperature
+
+
+
+
+
+
+
+
+
+
+ {cordX}
+
+
+
+
+
+ {cordY}
+
+
+
+
+
+ {height}
+
+
+
+
+
\ No newline at end of file
diff --git a/insert_sensor/InsertSibratgfaell.xml b/insert_sensor/InsertSibratgfaell.xml
new file mode 100644
index 0000000..62723f5
--- /dev/null
+++ b/insert_sensor/InsertSibratgfaell.xml
@@ -0,0 +1,108 @@
+
+ {procedure_identifier}
+
+
+
+
+ shortName
+ {procedure_name}
+
+
+
+
+
+
+
+
+ {offering_label}
+ {offering_name}
+
+
+
+
+
+
+ featuresOfInterest
+
+
+ {feature_id}
+ {feature_name}
+
+
+
+
+ {coordinates}
+
+
+
+
+
+
+
+
+
+
+ Slope
+
+
+
+
+
+ Roll
+
+
+
+
+
+ InSystemTemperature
+
+
+
+
+
+
+
+
+
+ Test parmeter
+
+
+
+ 0.01 10.0
+
+
+
+
+
+
+
+
+
+
+
+ {cordX}
+
+
+
+
+
+ {cordY}
+
+
+
+
+
+ {height}
+
+
+
+
+
\ No newline at end of file
diff --git a/insert_sensor/__init__.py b/insert_sensor/__init__.py
new file mode 100644
index 0000000..c4f4da5
--- /dev/null
+++ b/insert_sensor/__init__.py
@@ -0,0 +1,2 @@
+# For relative imports to work in Python 3.6
+import os, sys; sys.path.append(os.path.dirname(os.path.realpath(__file__)))
\ No newline at end of file
diff --git a/insert_sensor/execute.py b/insert_sensor/execute.py
new file mode 100644
index 0000000..2f71235
--- /dev/null
+++ b/insert_sensor/execute.py
@@ -0,0 +1,83 @@
+# -*- coding: utf-8 -*-
+"""This module does blah blah."""
+
+import requests
+from transactional import insert_sensor
+from wrapper import (Offering, FoI, Procedure, SensorType)
+# import json
+
+
+class Sos():
+ """
+ A class to represent a sos service.
+ ...
+
+ Attributes
+ ----------
+ sosurl : str
+ first name of the person
+ token : str
+ token to access soso service
+ """
+
+ def __init__(self, url, token=''):
+ self.sosurl = str(url) # url to access the SOS
+ self.token = str(token) # security token, optional
+ # Test if URL exists
+ try:
+ test = requests.get(self.sosurl)
+ test.raise_for_status()
+ except requests.HTTPError:
+ print("The URL is not valid")
+
+
+def main():
+ """
+ main function
+ """
+ sos_url = 'https://geomon.geologie.ac.at/52n-sos-webapp/service'
+
+ ######################## Sibratsgfall
+ offering = Offering(
+ "https://geomon.geologie.ac.at/52n-sos-webapp/api/offerings/",
+ "sibratsgfall_3",
+ "Inklinometer 3, Sibratsgfaell Sensor"
+ )
+ procedure = Procedure( "sibratsgfall_3","sibratsgfall-3")
+ foi = FoI("degree", "m", (47.4279288, 10.0360888, 0.0),
+ "sibratsgfall", "Sibratsgfall Beobachtung der Bodenbewegungen Test")
+
+ ######################## Gschliefgraben
+ # offering = Offering(
+ # "https://geomon.geologie.ac.at/52n-sos-webapp/api/offerings/",
+ # "gschliefgraben_2",
+ # "Inklinometer 2, Gschliefgraben Sensor"
+ # )
+ # procedure = Procedure( "gschliefgraben_2","gschliefgraben-2")
+
+ # foi = FoI("degree", "m", (47.8845629, 13.8199351, 0.0),
+ # "GSA01A-033-0909", "Geophysikalische Untersuchungen am Gschliefgraben (Gmunden)")
+
+ ######################## Laakirchen
+ # offering = Offering(
+ # "https://geomon.geologie.ac.at/52n-sos-webapp/api/offerings/",
+ # "laakirchen_10",
+ # "Inklinometer 10, Laakirchen Sensor"
+ # )
+ # procedure = Procedure( "laakirchen_10","laakirchen-10")
+
+ # foi = FoI("degree", "m", (47.9789118, 13.8141457, 0.0),
+ # "GSA02B-007-0911", "Massenbewegung Laakirchen")
+
+
+ sensor_type = SensorType("inclinometer")
+ post_data = insert_sensor(offering, procedure, foi, sensor_type)
+ print(post_data)
+ headers = {'Accept': 'application/json'}
+ request = requests.post(sos_url, headers=headers, json=post_data)
+ print(request.text)
+
+
+
+if __name__ == '__main__':
+ main()
diff --git a/insert_sensor/transactional.py b/insert_sensor/transactional.py
new file mode 100644
index 0000000..fd892ec
--- /dev/null
+++ b/insert_sensor/transactional.py
@@ -0,0 +1,93 @@
+# -*- coding: utf-8 -*-
+"""
+Function for the SOS Transactional profile.
+This set of function format requests to publish and handle data in a SOS using a RESTful API.
+Requests need to be passed as the body of a HTTP request to the SOS server.
+When more than one syntax is allowed, requests as passed using XML version 2.0
+Author: Arno Kaimbacher
+Created: 12-07-2021
+"""
+
+
+def insert_sensor(offering, procedure, foi, sensor_type):
+ """
+ Prepares the body of a InsertSensor request for JSON biding.
+ :param offering: an instance of class Offering.Type object.
+ :param Procedure: instance of class Procedure. type object.
+ :param foi: feature of interest. Instance of FoI
+ :param sensor_type: SensorType object
+ :return: valid body for an InsertSensor request.
+ """
+
+ # shortName = offering.name # string
+ # longName = 'Sibratsgfall test' # string
+
+ # Offering values
+ off_name = '\"' + str(offering.name) + '\"' # Offering name, double quoted
+ offering_name = offering.name
+ offering_label = offering.label
+ # offID = offering.fullId # URL format of full id
+
+ # featureName = featureID = cordX = cordY = height = h_unit = z_unit = coordinates = ""
+ if foi is not None: # check if feature of interest should be declare
+ # feature_id = 'https://geomon.geologie.ac.at/52n-sos-webapp/api/features/' + \
+ # str(foi.fid) # URL format
+ cordX = str(foi.x) # longitude degrees, float
+ cordY = str(foi.y) # latitude degrees, float
+ coordinates = cordX + " " + cordY
+ height = str(foi.z) # altitude in meters, float
+ # h_unit = foi.Hunit # units for horizontal coordinates
+ # z_unit = foi.Vunit # units for altitude
+ feature_id = foi.fid # "feature location"
+ feature_name = foi.name # "feature location"
+ else:
+ pass
+
+ procedure_name = procedure.name
+ procedure_identifier = procedure.id # URL,
+ obs_types = []
+ output_list = '' # output list element for describe procedure
+ properties_list = []
+ for a in sensor_type.pattern["attributes"]:
+ ObsPropName = '\"' + a[0] + '\"' # attribute name
+ # print(ObsPropName)
+ unit_name = sensor_type.om_types[a[1]] # om type
+ # magnitud = a # ??
+
+ obs_name = ObsPropName.replace('\"', '')
+ obs_name = "".join(obs_name.split()) # observable property name
+ output = ''
+ output_list = output_list + output
+ # add property identifier to the list.
+ properties_list.append(obs_name)
+ # prepare list of measurement types
+ # A sensor can not registry duplicated sensor types.
+ this_type = "http://www.opengis.net/def/observationType/OGC-OM/2.0/"+unit_name
+ if this_type not in obs_types: # when new type appears
+ obs_types.append(this_type)
+ else:
+ continue
+
+ # Unit of measurement:
+ unit_name = '\"' + procedure.name + '\"' # double quoted string
+ # unit = omType # one of the MO measurement types
+
+ body = {
+ "request": "InsertSensor",
+ "service": "SOS",
+ "version": "2.0.0",
+ "procedureDescriptionFormat": "http://www.opengis.net/sensorml/2.0",
+ "procedureDescription": f'{procedure_identifier}shortName{procedure_name}{offering_label}{offering_name}featuresOfInterest{feature_id}{feature_name}{coordinates}SlopeRollInSystemTemperature{cordX}{cordY}{height}',
+ "observableProperty": [
+ "Slope",
+ "Roll",
+ "InSystemTemperature"
+ ],
+ "observationType": [
+ "http://www.opengis.net/def/observationType/OGC-OM/2.0/OM_Measurement"
+ ],
+ "featureOfInterestType": "http://www.opengis.net/def/samplingFeatureType/OGC-OM/2.0/SF_SamplingPoint"
+ }
+ return body
diff --git a/insert_sensor/wrapper.py b/insert_sensor/wrapper.py
new file mode 100644
index 0000000..049b188
--- /dev/null
+++ b/insert_sensor/wrapper.py
@@ -0,0 +1,97 @@
+# -*- coding: utf-8 -*-
+"""This module does blah blah."""
+
+
+class Offering:
+ """ offering class """
+ def __init__(self, rooturl, offering_name, offering_label):
+ self.url = rooturl
+ # self.identifier = offeringId
+ self.name = str(offering_name)
+ self.label = offering_label
+ #self.fullId = str(rooturl) + str(offeringId)
+
+
+class Procedure:
+ """ procedure class """
+ def __init__(self, procedure_id, procedure_name):
+ self.id = procedure_id
+ self.name = procedure_name
+
+
+# Feature of Interest
+class FoI:
+ """ foi class """
+ def __init__(self, xy_unit, z_unit, cords, feature_id, feature_name):
+ '''
+ :param xy_unit: unit for X,Y coordinates. Eg. degrees, meters, etc.
+ :param z_unit: unit for z, usually height in meters.
+ :param cords: a tuple like (X, Y, Z)
+ :param feature_id: id for the feature of interest
+ '''
+ self.x = cords[0]
+ self.y = cords[1]
+ self.z = cords[2]
+ self.Vunit = str(z_unit) # unit of the vertical dimension
+ self.Hunit = str(xy_unit) # unit of the horizontal dimensions
+ self.fid = feature_id # ID of the feature
+ self.name = feature_name
+
+
+class SensorType:
+ """ sensor type class """
+
+ # In a SOS sensors can be:
+ # (a) In-situ ('on the spot') or (b) remote (e.g. satellites, airborne)
+ # (1) stationary (with fixed location) or mobile (in movement).
+ # Classification used in this class.
+ # TODO: extend class to consider all types.
+
+ om_types = {"m": "OM_Measurement", "co": "OM_CategoryObservation", "cto": "OM_CountObservation",
+ "to": "OM_TextObservation", "go": "OM_GeometryObservation", "tho": "OM_TruthObservation",
+ "xo": "OM_ComplexObservation"}
+
+ def __init__(self, type_): # type refers to the description of phenomena observed,
+ # and the mobility of the the sensor.
+ # TODO: work on an ontology to deal with different phenomena names
+ if type_ == "light": # LIGHT
+ self.pattern = {"name": "light", "type": 'fixed', "attributes": [
+ ("Luminosity", "m"), ("Battery level", "m"), ("Temperature", "m")]}
+ elif type_ == "bus": # BUS
+ self.pattern = {"name": "BUS", "type": 'mobile', "attributes": [("Speed", "m"), ("Course", "m"), ("Odometer", "m"), ("CO", "m"), (
+ "Particles", "m"), ("Ozone N02", "m"), ("N02", "m"), ("Temperature", "m"), ("Humidity", "m")]} # ("Location","go")]}
+ elif type_ == "env_station": # ENV_STATION
+ self.pattern = {"name": "env_station", "type": 'fixed', "attributes": [("Battery level", "m"), ("Temperature", "m"), ("Relative humidity", "m"), ("Soil Moisture", "m"), (
+ "Solar Radiation", "m"), ("Rainfall", "m"), ("Wind_Speed", "m"), ("Wind_Direction", "m"), ("Radiation_PAR", "m"), ("Atmospheric Pressure", "m")]}
+ elif type_ == "irrigation": # IRRIGATION
+ self.pattern = {"name": "irrigation", "type": 'fixed', "attributes": [("Battery level", "m"), (
+ "Temperature", "m"), ("Relative humidity", "m"), ("Soil Moisture", "m"), ("Soil Temperature", "m")]}
+ elif type_ == "agriculture": # AGRICULTURE
+ self.pattern = {"name": "agriculture", "type": 'fixed', "attributes": [
+ ("Battery level", "m"), ("Temperature", "m"), ("Relative humidity", "m")]}
+ elif type_ == "inclinometer": # INCLINOMETER
+ self.pattern = {"name": "inclinometer", "type": 'fixed', "attributes": [
+ ("Slope", "m"), ("Roll", "m")]}
+ elif type_ == "noise": # NOISE
+ self.pattern = {"name": "noise", "type": 'fixed', "attributes": [
+ ("Battery level", "m"), ("Noise", "m")]}
+ elif type_ == "vehicle_counter": # VEHICLE_COUNTER
+ self.pattern = {"name": "vehicle_counter", "type": 'fixed', "attributes": [
+ ("Occupancy", "m"), (" Count", "cto")]}
+ elif type_ == "vehicle_speed": # VEHICLE_SPEED
+ self.pattern = {"name": "vehicle_speed", "type": 'fixed', "attributes": [
+ ("Occupancy", "m"), (" Count", "cto"), (" Average Speed", "m"), (" Median Speed", "m")]}
+ elif type_ == 'temp': # TEMP
+ self.pattern = {"name": "temp", "type": 'fixed', "attributes": [
+ ("Battery level", "m"), ("Temperature", "m")]}
+ elif type_ == 'outdoor': # Low EMF, measuring 'electrosmog'
+ # EFM-project, http://lexnet-project.eu/
+ self.pattern = {"name": "outdoor", "type": 'fixed', "attributes": [(" EField (900 Mhz)", "m"), (
+ " EField (1800 Mhz)", "m"), (" EField (2100 Mhz)", "m"), (" EField (2400 Mhz)", "m")]}
+ elif type_ == 'waste': # WASTE COLLECTOR (Truck)
+ self.pattern = {"name": "waste", "type": "fixed", "attributes": [("temperature", "m"), (
+ "humidity", "m"), ("particles", "m"), ("CO", "m"), ("NO2", "m"), ("O3", "m"), ("Location", "go")]}
+ elif type_ == 'air': # AIR, Not currently reporting
+ self.pattern = {"name": "air", "type": "fixed"}
+ else:
+ print("Sensor type is not defined")
diff --git a/notes.txt b/notes.txt
new file mode 100644
index 0000000..03ce5a0
--- /dev/null
+++ b/notes.txt
@@ -0,0 +1,33 @@
+
+===========================================================================================
+python -m venv .venv
+d:/Software/geomon/.venv/Scripts/python.exe -m pip install -U pylint
+
+WARNING: You are using pip version 21.1.3; however, version 21.2.1 is available.
+You should consider upgrading via the 'd:\Software\geomon\.venv\Scripts\python.exe -m pip install --upgrade pip' command.
+
+
+d:/Software/geomon/.venv/Scripts/python.exe -m pip install requests
+
+
+d:/Software/geomon/.venv/Scripts/python.exe -m pip install sqlalchemy
+d:/Software/geomon/.venv/Scripts/python.exe -m pip install pylint-flask
+pip install pylint-flask-sqlalchemy
+
+pylint --load-plugins pylint_flask pylint_flask_sqlalchemy
+or:
+And in your settings.json of VisualCode:
+ "python.linting.pylintArgs": [
+ "--load-plugins",
+ "pylint_flask",
+ "pylint_flask_sqlalchemy",
+ ],
+
+
+Install python formatter:
+d:/Software/geomon/.venv/Scripts/python.exe -m pip install -U autopep8
+
+
+pip install fdb
+pip install sqlalchemy-firebird
+pip install psycopg2
\ No newline at end of file
diff --git a/pg_models.py b/pg_models.py
new file mode 100644
index 0000000..e1fe5b5
--- /dev/null
+++ b/pg_models.py
@@ -0,0 +1,169 @@
+'''
+Tutorial link: https://docs.sqlalchemy.org/en/latest/orm/tutorial.html
+Sqlalchemy version: 1.2.15
+Python version: 3.7
+'''
+
+import os
+from sqlalchemy import (create_engine, Column, Integer,
+ SmallInteger, String, ForeignKey, DateTime, Numeric)
+from sqlalchemy.ext.declarative import declarative_base
+from sqlalchemy.orm import sessionmaker, relationship
+import sqlalchemy.orm.session
+
+Base = declarative_base()
+
+
+class Platform(Base):
+ """ Platform class """
+ __tablename__ = 'platform'
+ __table_args__ = {"schema": "gba"}
+
+ id = Column('platform_id', Integer, primary_key=True)
+ identifier = Column('identifier', String)
+ sta_identifier = Column('sta_identifier', String)
+ name = Column('name', String)
+ # datasets = relationship('Dataset')
+ datasets = relationship('Dataset', back_populates="platform", lazy=True)
+
+ def __repr__(self):
+ return f'Platform {self.name}'
+
+
+class Phenomenon(Base):
+ """ phenomenon class """
+ __tablename__ = 'phenomenon'
+ __table_args__ = {"schema": "gba"}
+
+ id = Column('phenomenon_id', Integer, primary_key=True)
+ name = Column('name', String)
+ sta_identifier = Column('sta_identifier', String)
+ # datasets = relationship('Dataset')
+ datasets = relationship('Dataset', back_populates="phenomenon", lazy=True)
+
+ def __repr__(self):
+ return f'Phenomenon {self.name}'
+
+
+class Procedure(Base):
+ """ procedure class """
+ __tablename__ = 'procedure'
+ __table_args__ = {"schema": "gba"}
+
+ id = Column('procedure_id', Integer, primary_key=True)
+ name = Column('name', String)
+ sta_identifier = Column('sta_identifier', String)
+ # datasets = relationship('Dataset')
+ datasets = relationship('Dataset', back_populates="procedure", lazy=True)
+
+ def __repr__(self):
+ return f'Procedure {self.name}'
+
+
+class Dataset(Base):
+ """ dataset class """
+ __tablename__ = 'dataset'
+ __table_args__ = {"schema": "gba"}
+
+ id = Column('dataset_id', Integer, primary_key=True)
+ name = Column('name', String)
+ is_published = Column('is_published', SmallInteger)
+ is_hidden = Column('is_hidden', SmallInteger)
+ dataset_type = Column('dataset_type', String)
+ observation_type = Column('observation_type', String)
+ value_type = Column('value_type', String)
+
+ last_time = Column('last_time', DateTime)
+ last_value = Column('last_value', Numeric(20, 10))
+ fk_last_observation_id = Column(
+ 'fk_last_observation_id',
+ Integer
+ )
+ # last_observation = relationship(
+ # "Observation", foreign_keys=[fk_last_observation_id])
+
+ first_time = Column('first_time', DateTime)
+ first_value = Column('first_value', Numeric(20, 10))
+ fk_first_observation_id = Column(
+ 'fk_first_observation_id',
+ Integer
+ )
+ # first_observation = relationship("Observation", foreign_keys=[
+ # fk_first_observation_id])
+
+ observations = relationship(
+ 'Observation', back_populates='dataset', lazy=True)
+
+ fk_procedure_id = Column('fk_procedure_id', Integer, ForeignKey(
+ 'gba.procedure.procedure_id'), nullable=False)
+ # procedure = relationship("Procedure", lazy="joined")
+ procedure = relationship(
+ "Procedure", back_populates="datasets", lazy="joined")
+
+ fk_phenomenon_id = Column(
+ 'fk_phenomenon_id', Integer, ForeignKey('gba.phenomenon.phenomenon_id'), nullable=False)
+ # phenomenon = relationship("Phenomenon", lazy="joined", foreign_keys=[fk_phenomenon_id])
+ phenomenon = relationship(
+ "Phenomenon", back_populates="datasets", lazy="joined")
+
+ # fk_platform_id = Column(
+ # 'fk_platform_id', Integer, ForeignKey('gba.platform.platform_id'), nullable=True)
+ # # platform = relationship("Platform", lazy="joined", foreign_keys=[fk_platform_id])
+ fk_platform_id = Column('fk_platform_id', Integer, ForeignKey(
+ 'gba.platform.platform_id'), nullable=True)
+ platform = relationship(
+ "Platform", back_populates="datasets", lazy="joined")
+
+ def __repr__(self):
+ return f'Dataset {self.name}'
+
+
+class Observation(Base):
+ """ observation class """
+ __tablename__ = 'observation'
+ __table_args__ = {"schema": "gba"}
+
+ id = Column('observation_id', Integer, primary_key=True)
+ name = Column('name', String)
+ value_type = Column('value_type', String)
+ # pitch = Column('PITCH', String)
+ # roll = Column('ROLL', String)
+ sampling_time_start = Column('sampling_time_start', DateTime)
+ sampling_time_end = Column('sampling_time_end', DateTime)
+ result_time = Column('result_time', DateTime)
+ sta_identifier = Column('sta_identifier', String)
+ value_quantity = Column('value_quantity', Numeric(20, 10), nullable=False)
+
+ # fk_dataset_id = Column('fk_dataset_id', Integer,
+ # ForeignKey('gba.dataset.dataset_id'))
+ # dataset = relationship("Dataset", lazy="joined",
+ # foreign_keys=[fk_dataset_id])
+ fk_dataset_id = Column(Integer, ForeignKey(
+ 'gba.dataset.dataset_id'), nullable=False)
+ dataset = relationship("Dataset", back_populates="observations")
+
+ def __repr__(self):
+ return f'Observation {self.name}'
+
+ # @property
+ # def result_time(self):
+ # ''' Create a datetime object '''
+ # start_datetime = datetime.datetime.combine(self.date, self.ora)
+ # return start_datetime
+
+
+def create_pg_session() -> sqlalchemy.orm.sessionmaker:
+ """Return the sum of x and y."""
+ dbschema = ''
+ db_user = os.environ.get("POSTGIS_DBUSER")
+ db_password = os.environ.get("POSTGIS_DBPASSWORD")
+ db_url = os.environ.get("POSTGIS_DBURL")
+ engine = create_engine(
+ "postgresql+psycopg2://" + db_user + ":" + db_password + "@" + db_url,
+ connect_args={'options': '-csearch_path={}'.format(dbschema)},
+ isolation_level="READ UNCOMMITTED")
+ session_maker = sessionmaker(bind=engine)
+ session = session_maker()
+
+ Base.metadata.create_all(engine)
+ return session