From 8db6507252384f8539ddd3e8a72fa454affca7c9 Mon Sep 17 00:00:00 2001 From: Arno Kaimbacher Date: Mon, 14 Mar 2022 15:20:05 +0100 Subject: [PATCH] - add module for import piezometer data --- .../InsertPechgraben.xml | 102 ++++++++++ .../import_feature_sensor.py | 183 ++++++++++++++++++ .../import_piezometer_observations.py | 98 ++++++++++ insert_sensor/istSOS_insert.xml | 84 -------- .../import_image_observations.py | 8 +- 5 files changed, 387 insertions(+), 88 deletions(-) create mode 100644 gschliefgraben_piezometer/InsertPechgraben.xml create mode 100644 gschliefgraben_piezometer/import_feature_sensor.py create mode 100644 gschliefgraben_piezometer/import_piezometer_observations.py delete mode 100644 insert_sensor/istSOS_insert.xml diff --git a/gschliefgraben_piezometer/InsertPechgraben.xml b/gschliefgraben_piezometer/InsertPechgraben.xml new file mode 100644 index 0000000..8516902 --- /dev/null +++ b/gschliefgraben_piezometer/InsertPechgraben.xml @@ -0,0 +1,102 @@ + + {procedure_identifier} + + + + + longName + {procedure_name} + + + + + shortName + {procedure_name} + + + + + + + + + {offering_label} + {offering_name} + + + + + + + + + true + + + + + false + + + + + + + featuresOfInterest + + + {feature_id} + {feature_name} + + + + + {coordinates} + + + + + + + + + + + + + + + + + + + + + + + {cord_x} + + + + + + {cord_y} + + + + + + {height} + + + + + \ No newline at end of file diff --git a/gschliefgraben_piezometer/import_feature_sensor.py b/gschliefgraben_piezometer/import_feature_sensor.py new file mode 100644 index 0000000..95e7cc9 --- /dev/null +++ b/gschliefgraben_piezometer/import_feature_sensor.py @@ -0,0 +1,183 @@ +# -*- coding: utf-8 -*- +"""This module does blah blah.""" + +import requests +# from insert_sensor.transactional import insert_sensor +from insert_sensor.wrapper import (Offering, FoI, Procedure, SensorType) +# import json + + +class Sos(): + """ + A class to represent a sos service. + ... + + Attributes + ---------- + sosurl : str + first name of the person + token : str + token to access soso service + """ + + def __init__(self, url, token=''): + self.sosurl = str(url) # url to access the SOS + self.token = str(token) # security token, optional + # Test if URL exists + try: + test = requests.get(self.sosurl) + test.raise_for_status() + except requests.HTTPError: + print("The URL is not valid") + + +def main(): + """ + main function + """ + sos_url = 'https://geomon.geologie.ac.at/52n-sos-webapp/service' + + # Gschliefgraben Glasfaser + + # offering = Offering( + # "https://geomon.geologie.ac.at/52n-sos-webapp/api/offerings/", + # "inclino1_02", + # "Inklinometer inclino1_02, Gschliefgraben Glasfaser" + # ) + # procedure = Procedure( "inclino1_02","inclino1_02") + # foi = FoI("degree", "m", (13.774966, 47.910849, 0.0), + # "inclino1-glasfaser-gschliefgraben", + # "Glasfaser Untersuchungen am Gschliefgraben (Gmunden)") + + # offering = Offering( + # "https://geomon.geologie.ac.at/52n-sos-webapp/api/offerings/", + # "inclino1_05", + # "Inklinometer inclino1_05, Gschliefgraben Glasfaser" + # ) + # procedure = Procedure("inclino1_05", "inclino1_05") + # foi = FoI("degree", "m", (13.774966, 47.910849, 0.0), + # "inclino1-glasfaser-gschliefgraben", + # "Glasfaser Untersuchungen am Gschliefgraben (Gmunden)") + + # offering = Offering( + # "https://geomon.geologie.ac.at/52n-sos-webapp/api/offerings/", + # "inclino1_14", + # "Inklinometer inclino1_14, Gschliefgraben Glasfaser" + # ) + # procedure = Procedure("inclino1_14", "inclino1_14") + # foi = FoI("degree", "m", (13.774966, 47.910849, 0.0), + # "inclino1-glasfaser-gschliefgraben", + # "Glasfaser Untersuchungen am Gschliefgraben (Gmunden)") + + offering = Offering( + "https://geomon.geologie.ac.at/52n-sos-webapp/api/offerings/", + "inclino1_06", + "Inklinometer inclino1_06, Gschliefgraben Glasfaser" + ) + procedure = Procedure("inclino1_06", "inclino1_06") + foi = FoI("degree", "m", (13.774966, 47.910849, 0.0), + "inclino1-glasfaser-gschliefgraben", + "Glasfaser Untersuchungen am Gschliefgraben (Gmunden)") + + sensor_type = SensorType("inclinometer") + post_data = insert_sensor(offering, procedure, foi, sensor_type) + print(post_data) + headers = {'Accept': 'application/json'} + request = requests.post(sos_url, headers=headers, json=post_data) + print(request.text) + + # { + # "request" : "InsertSensor", + # "version" : "2.0.0", + # "service" : "SOS", + # "assignedProcedure" : "inclino1_14", + # "assignedOffering" : "inclino1_14" + # } + + +def insert_sensor(offering, procedure, foi, sensor_type): + """ + Prepares the body of a InsertSensor request for JSON biding. + :param offering: an instance of class Offering.Type object. + :param Procedure: instance of class Procedure. type object. + :param foi: feature of interest. Instance of FoI + :param sensor_type: SensorType object + :return: valid body for an InsertSensor request. + """ + + # shortName = offering.name # string + # longName = 'Sibratsgfall test' # string + + # Offering values + off_name = '\"' + str(offering.name) + '\"' # Offering name, double quoted + offering_name = offering.name + offering_label = offering.label + # offID = offering.fullId # URL format of full id + + # featureName = featureID = cordX = cordY = height = h_unit = z_unit = coordinates = "" + if foi is not None: # check if feature of interest should be declare + # feature_id = 'https://geomon.geologie.ac.at/52n-sos-webapp/api/features/' + \ + # str(foi.fid) # URL format + cord_x = str(foi.x) # longitude degrees, float + cord_y = str(foi.y) # latitude degrees, float + coordinates = cord_x + " " + cord_y + height = str(foi.z) # altitude in meters, float + # h_unit = foi.Hunit # units for horizontal coordinates + # z_unit = foi.Vunit # units for altitude + feature_id = foi.fid # "feature location" + feature_name = foi.name # "feature location" + else: + pass + + procedure_name = procedure.name + procedure_identifier = procedure.id # URL, + obs_types = [] + output_list = '' # output list element for describe procedure + properties_list = [] + for attr in sensor_type.pattern["attributes"]: + obs_prop_name = '\"' + attr[0] + '\"' # attribute name + # print(obs_prop_name) + unit_name = sensor_type.om_types[attr[1]] # om type + # magnitud = a # ?? + + obs_name = obs_prop_name.replace('\"', '') + obs_name = "".join(obs_name.split()) # observable property name + output = '' + output_list = output_list + output + # add property identifier to the list. + properties_list.append(obs_name) + # prepare list of measurement types + # A sensor can not registry duplicated sensor types. + this_type = "http://www.opengis.net/def/observationType/OGC-OM/2.0/"+unit_name + if this_type not in obs_types: # when new type appears + obs_types.append(this_type) + else: + continue + + # Unit of measurement: + unit_name = '\"' + procedure.name + '\"' # double quoted string + # unit = omType # one of the MO measurement types + + body = { + "request": "InsertSensor", + "service": "SOS", + "version": "2.0.0", + "procedureDescriptionFormat": "http://www.opengis.net/sensorml/2.0", + "procedureDescription": f'{procedure_identifier}shortName{procedure_name}{offering_label}{offering_name}featuresOfInterest{feature_id}{feature_name}{coordinates}Slope{cord_x}{cord_y}{height}', + "observableProperty": [ + "Slope", + # "Roll", + # "InSystemTemperature" + ], + "observationType": [ + "http://www.opengis.net/def/observationType/OGC-OM/2.0/OM_Measurement" + ], + "featureOfInterestType": "http://www.opengis.net/def/samplingFeatureType/OGC-OM/2.0/SF_SamplingPoint" + } + return body + + +if __name__ == '__main__': + main() diff --git a/gschliefgraben_piezometer/import_piezometer_observations.py b/gschliefgraben_piezometer/import_piezometer_observations.py new file mode 100644 index 0000000..4bd734a --- /dev/null +++ b/gschliefgraben_piezometer/import_piezometer_observations.py @@ -0,0 +1,98 @@ +''' +Sqlalchemy version: 1.2.15 +Python version: 3.7 +''' + +import os +import uuid +from sqlalchemy.orm import session +from dotenv import load_dotenv, find_dotenv +import requests +from datetime import datetime +from db.models import ( + Observation, + create_pg_session, + Dataset, + Procedure, + Phenomenon, + Platform, + Format +) + + +def main(): + ''' main method ''' + pg_session: session = create_pg_session() + platform_sta_identifier = "pechgraben_piezometer" + sensor = "bohrloch1" + + pg_query = pg_session.query(Dataset) \ + .join(Procedure) \ + .join(Phenomenon) \ + .filter(Procedure.sta_identifier == sensor.lower()) + elevation_dataset: Dataset = pg_query.filter( + Phenomenon.sta_identifier == "Elevation").first() + if not elevation_dataset: + print("Sensor " + sensor + " ist noch nicht angelegt!") + exit() + # if not elevation_dataset.is_published: + # elevation_dataset.is_published = 1 + # elevation_dataset.is_hidden = 0 + # elevation_dataset.dataset_type = "timeseries" + # elevation_dataset.observation_type = "simple" + # elevation_dataset.value_type = "text" + # pg_session.commit() + + platform_exists: bool = pg_session.query(Platform.id).filter_by( + sta_identifier=platform_sta_identifier).scalar() is not None + if platform_exists: + sensor_platform = pg_session.query(Platform.id) \ + .filter(Platform.sta_identifier == platform_sta_identifier) \ + .first() + elevation_dataset.fk_platform_id = sensor_platform.id + + format_exists: bool = pg_session.query(Format.id).filter_by( + definition="http://www.opengis.net/def/observationType/OGC-OM/2.0/OM_Measurement" + ).scalar() is not None + if format_exists: + sensor_format = pg_session.query(Format.id) \ + .filter(Format.definition == "http://www.opengis.net/def/observationType/OGC-OM/2.0/OM_Measurement") \ + .first() + elevation_dataset.fk_format_id = sensor_format.id + + +def test(): + ''' test method ''' + sensor_key = 'bohrloch1' + url = 'https://jaa5ixl2y0.execute-api.ap-southeast-2.amazonaws.com/v1/data' + + params = {} + headers = {'content-type': 'application/json'} + + resp = requests.get(url=url, params=params, headers=headers) + data = resp.json() # Check the JSON Response Content documentation below + # sensor_data = json.dumps(data) + if sensor_key in data: + print("Sesnor key exist in JSON data") + sensor_object = data[sensor_key] + zeitstempel = sensor_object["zeitstempel"] + abstich = sensor_object["abstich"] + date_obj = datetime.strptime( + zeitstempel, '%Y:%m:%d %H:%M:%S') + + new_observation: Observation = Observation() + # new_observation.id = max_id + new_observation.sta_identifier = str(uuid.uuid4()) + new_observation.result_time = date_obj + new_observation.sampling_time_start = new_observation.result_time + new_observation.sampling_time_end = new_observation.result_time + new_observation.value_type = "quantity" + new_observation.value_quantity = abstich + # new_observation.fk_dataset_id = dataset.id + + +if __name__ == "__main__": + load_dotenv(find_dotenv()) + sensor_list1 = os.environ.get('GLASFASER_GSCHLIEFGRABEN_SENSORS', []) + print(f'sensors: {sensor_list1} .') + test() diff --git a/insert_sensor/istSOS_insert.xml b/insert_sensor/istSOS_insert.xml deleted file mode 100644 index d33e214..0000000 --- a/insert_sensor/istSOS_insert.xml +++ /dev/null @@ -1,84 +0,0 @@ - - - xxxxxxxxxxxxxxxxxxxxxxxxxxx - - - - - 2014-06-03T15:08:00Z - 2014-06-03T15:48:00Z - - - - - - - - - - - - - - - - 5 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 2014-06-03T14:10:00+0200,0.000000,200,20.000000,200@ - 2014-06-03T14:20:00+0200,0.000000,200,20.100000,200@ - 2014-06-03T14:30:00+0200,0.000000,200,20.200000,200@ - 2014-06-03T14:40:00+0200,0.000000,200,20.500000,200@ - 2014-06-03T14:50:00+0200,0.000000,200,20.500000,200@ - 2014-06-03T15:00:00+0200,0.000000,200,20.400000,200@ - 2014-06-03T15:10:00+0200,0.000000,200,20.400000,200@ - 2014-06-03T15:20:00+0200,0.100000,200,19.600000,200@ - 2014-06-03T15:30:00+0200,0.100000,200,19.100000,200@ - 2014-06-03T15:40:00+0200,0.000000,200,19.000000,200@ - 2014-06-03T15:50:00+0200,0.000000,200,20.600000,200 - - - - - \ No newline at end of file diff --git a/pechgraben_images/import_image_observations.py b/pechgraben_images/import_image_observations.py index f316b36..d08059b 100644 --- a/pechgraben_images/import_image_observations.py +++ b/pechgraben_images/import_image_observations.py @@ -92,10 +92,10 @@ def import_images(dataset: Dataset, pg_session): # print(file_path) img_file = open(file_path, 'rb') img: Image = Image(img_file) - if img.has_exif: - info = f" has the EXIF {img.exif_version}" - else: - info = "does not contain any EXIF information" + # if img.has_exif: + # info = f" has the EXIF {img.exif_version}" + # else: + # info = "does not contain any EXIF information" # print(f"Image {img_file.name}: {info}") # Original datetime that image was taken (photographed)