diff --git a/gschliefgraben_glasfaser/InsertGschliefgraben.xml b/gschliefgraben_glasfaser/InsertGschliefgraben.xml
new file mode 100644
index 0000000..cf2ffea
--- /dev/null
+++ b/gschliefgraben_glasfaser/InsertGschliefgraben.xml
@@ -0,0 +1,81 @@
+
+ {procedure_identifier}
+
+
+
+
+ shortName
+ {procedure_name}
+
+
+
+
+
+
+
+
+ {offering_label}
+ {offering_name}
+
+
+
+
+
+
+ featuresOfInterest
+
+
+ {feature_id}
+ {feature_name}
+
+
+
+
+ {coordinates}
+
+
+
+
+
+
+
+
+
+
+ Slope
+
+
+
+
+
+
+
+
+
+
+ {cord_x}
+
+
+
+
+
+ {cord_y}
+
+
+
+
+
+ {height}
+
+
+
+
+
\ No newline at end of file
diff --git a/gschliefgraben_glasfaser/import_feature_sensor.py b/gschliefgraben_glasfaser/import_feature_sensor.py
new file mode 100644
index 0000000..1df55db
--- /dev/null
+++ b/gschliefgraben_glasfaser/import_feature_sensor.py
@@ -0,0 +1,167 @@
+# -*- coding: utf-8 -*-
+"""This module does blah blah."""
+
+import requests
+# from insert_sensor.transactional import insert_sensor
+from insert_sensor.wrapper import (Offering, FoI, Procedure, SensorType)
+# import json
+
+
+class Sos():
+ """
+ A class to represent a sos service.
+ ...
+
+ Attributes
+ ----------
+ sosurl : str
+ first name of the person
+ token : str
+ token to access soso service
+ """
+
+ def __init__(self, url, token=''):
+ self.sosurl = str(url) # url to access the SOS
+ self.token = str(token) # security token, optional
+ # Test if URL exists
+ try:
+ test = requests.get(self.sosurl)
+ test.raise_for_status()
+ except requests.HTTPError:
+ print("The URL is not valid")
+
+
+def main():
+ """
+ main function
+ """
+ sos_url = 'https://geomon.geologie.ac.at/52n-sos-webapp/service'
+
+ # Gschliefgraben Glasfaser
+
+ # offering = Offering(
+ # "https://geomon.geologie.ac.at/52n-sos-webapp/api/offerings/",
+ # "inclino1_02",
+ # "Inklinometer inclino1_02, Gschliefgraben Glasfaser"
+ # )
+ # procedure = Procedure( "inclino1_02","inclino1_02")
+
+ # offering = Offering(
+ # "https://geomon.geologie.ac.at/52n-sos-webapp/api/offerings/",
+ # "inclino1_05",
+ # "Inklinometer inclino1_05, Gschliefgraben Glasfaser"
+ # )
+ # procedure = Procedure("inclino1_05", "inclino1_05")
+
+ offering = Offering(
+ "https://geomon.geologie.ac.at/52n-sos-webapp/api/offerings/",
+ "inclino1_06",
+ "Inklinometer inclino1_06, Gschliefgraben Glasfaser"
+ )
+ procedure = Procedure("inclino1_06", "inclino1_06")
+
+ foi = FoI("degree", "m", (47.910849, 13.774966, 0.0),
+ "FBGuard23", "Glasfaser Untersuchungen am Gschliefgraben (Gmunden)")
+
+ sensor_type = SensorType("inclinometer")
+ post_data = insert_sensor(offering, procedure, foi, sensor_type)
+ print(post_data)
+ headers = {'Accept': 'application/json'}
+ request = requests.post(sos_url, headers=headers, json=post_data)
+ print(request.text)
+
+ # {
+ # "request" : "InsertSensor",
+ # "version" : "2.0.0",
+ # "service" : "SOS",
+ # "assignedProcedure" : "inclino1_14",
+ # "assignedOffering" : "inclino1_14"
+ # }
+
+
+def insert_sensor(offering, procedure, foi, sensor_type):
+ """
+ Prepares the body of a InsertSensor request for JSON biding.
+ :param offering: an instance of class Offering.Type object.
+ :param Procedure: instance of class Procedure. type object.
+ :param foi: feature of interest. Instance of FoI
+ :param sensor_type: SensorType object
+ :return: valid body for an InsertSensor request.
+ """
+
+ # shortName = offering.name # string
+ # longName = 'Sibratsgfall test' # string
+
+ # Offering values
+ off_name = '\"' + str(offering.name) + '\"' # Offering name, double quoted
+ offering_name = offering.name
+ offering_label = offering.label
+ # offID = offering.fullId # URL format of full id
+
+ # featureName = featureID = cordX = cordY = height = h_unit = z_unit = coordinates = ""
+ if foi is not None: # check if feature of interest should be declare
+ # feature_id = 'https://geomon.geologie.ac.at/52n-sos-webapp/api/features/' + \
+ # str(foi.fid) # URL format
+ cord_x = str(foi.x) # longitude degrees, float
+ cord_y = str(foi.y) # latitude degrees, float
+ coordinates = cord_x + " " + cord_y
+ height = str(foi.z) # altitude in meters, float
+ # h_unit = foi.Hunit # units for horizontal coordinates
+ # z_unit = foi.Vunit # units for altitude
+ feature_id = foi.fid # "feature location"
+ feature_name = foi.name # "feature location"
+ else:
+ pass
+
+ procedure_name = procedure.name
+ procedure_identifier = procedure.id # URL,
+ obs_types = []
+ output_list = '' # output list element for describe procedure
+ properties_list = []
+ for attr in sensor_type.pattern["attributes"]:
+ obs_prop_name = '\"' + attr[0] + '\"' # attribute name
+ # print(obs_prop_name)
+ unit_name = sensor_type.om_types[attr[1]] # om type
+ # magnitud = a # ??
+
+ obs_name = obs_prop_name.replace('\"', '')
+ obs_name = "".join(obs_name.split()) # observable property name
+ output = ''
+ output_list = output_list + output
+ # add property identifier to the list.
+ properties_list.append(obs_name)
+ # prepare list of measurement types
+ # A sensor can not registry duplicated sensor types.
+ this_type = "http://www.opengis.net/def/observationType/OGC-OM/2.0/"+unit_name
+ if this_type not in obs_types: # when new type appears
+ obs_types.append(this_type)
+ else:
+ continue
+
+ # Unit of measurement:
+ unit_name = '\"' + procedure.name + '\"' # double quoted string
+ # unit = omType # one of the MO measurement types
+
+ body = {
+ "request": "InsertSensor",
+ "service": "SOS",
+ "version": "2.0.0",
+ "procedureDescriptionFormat": "http://www.opengis.net/sensorml/2.0",
+ "procedureDescription": f'{procedure_identifier}shortName{procedure_name}{offering_label}{offering_name}featuresOfInterest{feature_id}{feature_name}{coordinates}Slope{cord_x}{cord_y}{height}',
+ "observableProperty": [
+ "Slope",
+ # "Roll",
+ # "InSystemTemperature"
+ ],
+ "observationType": [
+ "http://www.opengis.net/def/observationType/OGC-OM/2.0/OM_Measurement"
+ ],
+ "featureOfInterestType": "http://www.opengis.net/def/samplingFeatureType/OGC-OM/2.0/SF_SamplingPoint"
+ }
+ return body
+
+
+if __name__ == '__main__':
+ main()
diff --git a/gschliefgraben_glasfaser/main.py b/gschliefgraben_glasfaser/main.py
index 6fe0ec9..c9fa941 100644
--- a/gschliefgraben_glasfaser/main.py
+++ b/gschliefgraben_glasfaser/main.py
@@ -15,24 +15,28 @@ import json
# parentdir = os.path.dirname(currentdir)
# sys.path.insert(0, parentdir)
# import requests
+from datetime import datetime, date, timedelta
+from dotenv import load_dotenv, find_dotenv
from sqlalchemy.orm import session
from sqlalchemy import func, asc, desc
# from db.pg_models import Platform
-from gschliefgraben_glasfaser.models import ObservationSchema, Person, PersonSchema, Observation, create_pg_session, Dataset, Procedure, Phenomenon, Platform
+from gschliefgraben_glasfaser.models import (
+ ObservationSchema, Person, PersonSchema, Observation,
+ create_pg_session, Dataset, Procedure, Phenomenon, Platform)
from gschliefgraben_glasfaser.my_api import MyApi
-from datetime import datetime, date, timedelta
+
def main():
''' main method '''
pg_session: session = create_pg_session()
- platform_sta_identifier = "gschliefgraben_glasfaser"
+ platform_sta_identifier = "gschliefgraben_glasfaser"
# sensor_list = ["inclino1_14", "inclino1_02"]
#sensor_list = os.environ.get("GLASFASER_GSCHLIEFGRABEN_SENSORS")
sensor_list = json.loads(os.environ['GLASFASER_GSCHLIEFGRABEN_SENSORS'])
-
+
# this will print elements along with their index value
for sensor in sensor_list:
-
+
pg_query = pg_session.query(Dataset) \
.join(Procedure) \
.join(Phenomenon) \
@@ -49,9 +53,9 @@ def main():
slope_dataset.observation_type = "simple"
slope_dataset.value_type = "quantity"
pg_session.commit()
-
+
platform_exists: bool = pg_session.query(Platform.id).filter_by(
- sta_identifier = platform_sta_identifier).scalar() is not None
+ sta_identifier=platform_sta_identifier).scalar() is not None
if platform_exists:
sensor_platform = pg_session.query(Platform.id) \
.filter(Platform.sta_identifier == platform_sta_identifier) \
@@ -59,10 +63,10 @@ def main():
slope_dataset.fk_platform_id = sensor_platform.id
else:
exit()
-
+
# create all the observation for the given sensor names
create_observations(sensor, slope_dataset)
-
+
# update first and last observations for the dataset
first_slope_observation = pg_session.query(Observation) \
.filter(Observation.fk_dataset_id == slope_dataset.id) \
@@ -80,20 +84,21 @@ def main():
slope_dataset.last_time = last_slope_observation.sampling_time_start
slope_dataset.last_value = last_slope_observation.value_quantity
slope_dataset.fk_last_observation_id = last_slope_observation.id
-
+
pg_session.commit()
pg_session.close()
-
-def create_observations(sensor: str, slope_dataset: Dataset):
- ''' create_observations method for given sensor '''
- pg_session: session = create_pg_session()
+
+def create_observations(sensor: str, slope_dataset: Dataset):
+ ''' create_observations method for given sensor '''
+
+ pg_session: session = create_pg_session()
# The size of each step in days
# consider the start date as 2021-february 1 st
start_date = date(2022, 1, 1)
# consider the end date as 2021-march 1 st
- end_date = date(2022, 3, 3)
+ end_date = date(2022, 3, 6)
# delta time
delta = timedelta(days=7)
@@ -104,11 +109,12 @@ def create_observations(sensor: str, slope_dataset: Dataset):
while start_date <= end_date:
# print(start_date, end="\n")
query_date_start: str = start_date.strftime('%Y-%m-%d')
- end_date_temp: date = start_date + delta # (plus 7 days)
+ end_date_temp: date = start_date + delta # (plus 7 days)
if end_date_temp > end_date:
end_date_temp = end_date
query_date_end: str = end_date_temp.strftime('%Y-%m-%d')
- create_db_observations(sensor, query_date_start, query_date_end, test_api, pg_session, slope_dataset)
+ create_db_observations(
+ sensor, query_date_start, query_date_end, test_api, pg_session, slope_dataset)
# for next loop step set new start_date (1 day greate then last end_date)
start_date = end_date_temp + timedelta(days=1)
pg_session.commit()
@@ -136,7 +142,8 @@ def create_observations(sensor: str, slope_dataset: Dataset):
# pg_session.commit()
-def create_db_observations(sensor, query_date_start, query_date_end, test_api, pg_session, dataset: Dataset):
+def create_db_observations(sensor, query_date_start, query_date_end, test_api,
+ pg_session, dataset: Dataset):
''' to do '''
query_date_start_obj = datetime.strptime(query_date_start, "%Y-%m-%d")
query_date_end_obj = datetime.strptime(query_date_end, "%Y-%m-%d")
@@ -169,14 +176,16 @@ def create_db_observations(sensor, query_date_start, query_date_end, test_api,
max_id = create_observation(
observation_json, pg_session, max_id, dataset, value_identifier_db_list)
# pg_session.commit()
- print("observations for date " + query_date_start + " to " + query_date_end + " succesfully imported \n")
+ print("observations for date " + query_date_start +
+ " to " + query_date_end + " for sensor " + sensor + " succesfully imported \n")
-def create_observation(observation_json: ObservationSchema, db_session, max_id, dataset: Dataset, value_identifier_db_list):
+def create_observation(observation_json: ObservationSchema, db_session,
+ max_id, dataset: Dataset, value_identifier_db_list):
"""
This function creates a new observation in the people structure
based on the passed-in observation data
- :param observation: person to create in people structure
+ :param observation: observation to create in people structure
:return: 201 on success, observation on person exists
"""
@@ -259,4 +268,7 @@ def create(person_json: PersonSchema):
if __name__ == "__main__":
+ load_dotenv(find_dotenv())
+ print('sensors: {}'.format(os.environ.get(
+ 'GLASFASER_GSCHLIEFGRABEN_SENSORS', [])))
main()
diff --git a/gschliefgraben_glasfaser/models.py b/gschliefgraben_glasfaser/models.py
index c72f369..31e1296 100644
--- a/gschliefgraben_glasfaser/models.py
+++ b/gschliefgraben_glasfaser/models.py
@@ -10,12 +10,13 @@ from datetime import datetime
import os
from sqlalchemy import (Column, Integer,
- String, DateTime, ForeignKey, Numeric, SmallInteger, create_engine, func)
+ String, DateTime, ForeignKey, Numeric, SmallInteger, create_engine)
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import session, relationship, sessionmaker
#from marshmallow import Schema
from marshmallow_sqlalchemy import SQLAlchemySchema, SQLAlchemyAutoSchema
from marshmallow import fields
+from dotenv import load_dotenv, find_dotenv
# from db.pg_models import create_pg_session
# import sqlalchemy.orm.session
@@ -24,6 +25,7 @@ Base = declarative_base()
def create_pg_session() -> sessionmaker:
""" create postgres db session """
+ load_dotenv(find_dotenv())
dbschema = ''
db_user = os.environ.get("POSTGIS_DBUSER")
db_password = os.environ.get("POSTGIS_DBPASSWORD")
@@ -117,18 +119,18 @@ class Dataset(Base):
observations = relationship(
'Observation', back_populates='dataset', lazy=True)
-
+
fk_phenomenon_id = Column(
'fk_phenomenon_id', Integer, ForeignKey('gba.phenomenon.phenomenon_id'), nullable=False)
# phenomenon = relationship("Phenomenon", lazy="joined", foreign_keys=[fk_phenomenon_id])
phenomenon = relationship(
"Phenomenon", back_populates="datasets", lazy="joined")
-
+
fk_platform_id = Column('fk_platform_id', Integer, ForeignKey(
'gba.platform.platform_id'), nullable=True)
platform = relationship(
"Platform", back_populates="datasets", lazy="joined")
-
+
fk_procedure_id = Column('fk_procedure_id', Integer, ForeignKey(
'gba.procedure.procedure_id'), nullable=False)
# procedure = relationship("Procedure", lazy="joined")
@@ -226,6 +228,7 @@ def create_db():
# Base.metadata.drop_all(bind=engine)
# Base.metadata.create_all(engine)
""" create postgres db session """
+ load_dotenv("D:\\Software\\geomon\\.env")
dbschema = ''
db_user = os.environ.get("POSTGIS_DBUSER")
db_password = os.environ.get("POSTGIS_DBPASSWORD")
diff --git a/gschliefgraben_glasfaser/update_daily_cron.py b/gschliefgraben_glasfaser/update_daily_cron.py
index 381ec33..44615d0 100644
--- a/gschliefgraben_glasfaser/update_daily_cron.py
+++ b/gschliefgraben_glasfaser/update_daily_cron.py
@@ -5,23 +5,28 @@ Sqlalchemy version: 1.2.15
Python version: 3.10
'''
-import os, json
+import os
+import json
import uuid
+from datetime import datetime
+from dotenv import load_dotenv, find_dotenv
from sqlalchemy.orm import session
from sqlalchemy import func, asc, desc
# from db.pg_models import Platform
-from gschliefgraben_glasfaser.models import ObservationSchema, Observation, create_pg_session, Dataset, Procedure, Phenomenon, Platform
+from gschliefgraben_glasfaser.models import (
+ ObservationSchema, Observation, create_pg_session,
+ Dataset, Procedure, Phenomenon, Platform)
from gschliefgraben_glasfaser.my_api import MyApi
-from datetime import datetime
+
def main():
''' main method '''
pg_session: session = create_pg_session()
- platform_sta_identifier = "gschliefgraben_glasfaser"
+ platform_sta_identifier = "gschliefgraben_glasfaser"
# sensor_list = ["inclino1_14", "inclino1_02"]
#sensor_list = os.environ.get("GLASFASER_GSCHLIEFGRABEN_SENSORS")
sensor_list = json.loads(os.environ['GLASFASER_GSCHLIEFGRABEN_SENSORS'])
-
+
# this will print elements along with their index value
for sensor in sensor_list:
pg_query = pg_session.query(Dataset) \
@@ -32,7 +37,8 @@ def main():
Phenomenon.sta_identifier == "Slope").first()
if not slope_dataset:
print("Sensor " + sensor + " ist noch nicht angelegt!")
- exit()
+ # exit()
+ continue
if not slope_dataset.is_published:
slope_dataset.is_published = 1
slope_dataset.is_hidden = 0
@@ -40,18 +46,18 @@ def main():
slope_dataset.observation_type = "simple"
slope_dataset.value_type = "quantity"
pg_session.commit()
-
+
platform_exists: bool = pg_session.query(Platform.id).filter_by(
- sta_identifier = platform_sta_identifier).scalar() is not None
+ sta_identifier=platform_sta_identifier).scalar() is not None
if platform_exists:
sensor_platform = pg_session.query(Platform.id) \
.filter(Platform.sta_identifier == platform_sta_identifier) \
.first()
slope_dataset.fk_platform_id = sensor_platform.id
-
+
# create all the observation for the given sensor names
create_observations(sensor, slope_dataset)
-
+
first_slope_observation = pg_session.query(Observation) \
.filter(Observation.fk_dataset_id == slope_dataset.id) \
.order_by(asc('sampling_time_start')) \
@@ -68,27 +74,29 @@ def main():
slope_dataset.last_time = last_slope_observation.sampling_time_start
slope_dataset.last_value = last_slope_observation.value_quantity
slope_dataset.fk_last_observation_id = last_slope_observation.id
-
+
pg_session.commit()
pg_session.close()
-
-def create_observations(sensor: str, slope_dataset: Dataset):
- ''' create_observations method for given sensor '''
- pg_session: session = create_pg_session()
-
+def create_observations(sensor: str, slope_dataset: Dataset):
+ ''' create_observations method for given sensor '''
+
+ pg_session: session = create_pg_session()
+
# create access token
token_api = os.environ.get("TOKEN_API")
test_api = MyApi(token_api)
-
+
# The size of each step in days
# consider the start date as 2021-february 1 st
start_date = datetime.today()
query_date = start_date.strftime('%Y-%m-%d')
- create_db_observations(sensor, query_date, test_api, pg_session, slope_dataset)
+ create_db_observations(sensor, query_date, test_api,
+ pg_session, slope_dataset)
pg_session.commit()
+
def create_db_observations(sensor: str, query_date, test_api, pg_session, dataset: Dataset):
''' to do '''
query_date_obj = datetime.strptime(query_date, "%Y-%m-%d")
@@ -113,7 +121,8 @@ def create_db_observations(sensor: str, query_date, test_api, pg_session, datase
max_id = create_observation(
observation_json, pg_session, max_id, dataset)
# pg_session.commit()
- print("observations for date " + query_date + " succesfully imported \n")
+ print("observations for date " + query_date + " and sensor " + sensor +
+ " succesfully imported \n")
def create_observation(observation_json: ObservationSchema, db_session, max_id, dataset: Dataset):
@@ -142,8 +151,8 @@ def create_observation(observation_json: ObservationSchema, db_session, max_id,
new_observation: Observation = schema.load(observation_json)
new_observation.id = max_id
new_observation.sta_identifier = str(uuid.uuid4())
- new_observation.sampling_time_start=new_observation.result_time
- new_observation.sampling_time_end=new_observation.result_time
+ new_observation.sampling_time_start = new_observation.result_time
+ new_observation.sampling_time_end = new_observation.result_time
new_observation.fk_dataset_id = dataset.id
# Add the person to the database
@@ -159,6 +168,10 @@ def create_observation(observation_json: ObservationSchema, db_session, max_id,
else:
print(409, f'Observation {ob_id} exists already')
return max_id
-
+
+
if __name__ == "__main__":
+ load_dotenv(find_dotenv())
+ print('sensors: {}'.format(os.environ.get(
+ 'GLASFASER_GSCHLIEFGRABEN_SENSORS', [])))
main()
diff --git a/insert_sensor/__init__.py b/insert_sensor/__init__.py
index c4f4da5..cce2a5a 100644
--- a/insert_sensor/__init__.py
+++ b/insert_sensor/__init__.py
@@ -1,2 +1,4 @@
-# For relative imports to work in Python 3.6
-import os, sys; sys.path.append(os.path.dirname(os.path.realpath(__file__)))
\ No newline at end of file
+''' For relative imports to work in Python 3.6 '''
+import os
+import sys
+sys.path.append(os.path.dirname(os.path.realpath(__file__)))
\ No newline at end of file
diff --git a/insert_sensor/transactional.py b/insert_sensor/transactional.py
index 29803ab..481588d 100644
--- a/insert_sensor/transactional.py
+++ b/insert_sensor/transactional.py
@@ -32,9 +32,9 @@ def insert_sensor(offering, procedure, foi, sensor_type):
if foi is not None: # check if feature of interest should be declare
# feature_id = 'https://geomon.geologie.ac.at/52n-sos-webapp/api/features/' + \
# str(foi.fid) # URL format
- cordX = str(foi.x) # longitude degrees, float
- cordY = str(foi.y) # latitude degrees, float
- coordinates = cordX + " " + cordY
+ cord_x = str(foi.x) # longitude degrees, float
+ cord_y = str(foi.y) # latitude degrees, float
+ coordinates = cord_x + " " + cord_y
height = str(foi.z) # altitude in meters, float
# h_unit = foi.Hunit # units for horizontal coordinates
# z_unit = foi.Vunit # units for altitude
@@ -49,14 +49,14 @@ def insert_sensor(offering, procedure, foi, sensor_type):
output_list = '' # output list element for describe procedure
properties_list = []
for a in sensor_type.pattern["attributes"]:
- ObsPropName = '\"' + a[0] + '\"' # attribute name
- # print(ObsPropName)
+ obs_prop_name = '\"' + a[0] + '\"' # attribute name
+ # print(obs_prop_name)
unit_name = sensor_type.om_types[a[1]] # om type
# magnitud = a # ??
- obs_name = ObsPropName.replace('\"', '')
+ obs_name = obs_prop_name.replace('\"', '')
obs_name = "".join(obs_name.split()) # observable property name
- output = ''
output_list = output_list + output
@@ -79,7 +79,7 @@ def insert_sensor(offering, procedure, foi, sensor_type):
"service": "SOS",
"version": "2.0.0",
"procedureDescriptionFormat": "http://www.opengis.net/sensorml/2.0",
- "procedureDescription": f'{procedure_identifier}shortName{procedure_name}{offering_label}{offering_name}featuresOfInterest{feature_id}{feature_name}{coordinates}Slope{cordX}{cordY}{height}',
+ "procedureDescription": f'{procedure_identifier}shortName{procedure_name}{offering_label}{offering_name}featuresOfInterest{feature_id}{feature_name}{coordinates}Slope{cord_x}{cord_y}{height}',
"observableProperty": [
"Slope",
"Roll",
diff --git a/requirements.txt b/requirements.txt
index 7afca30..baf1a8f 100644
Binary files a/requirements.txt and b/requirements.txt differ