diff --git a/notes.txt b/notes.txt index 97c2c62..6992ac9 100644 --- a/notes.txt +++ b/notes.txt @@ -67,4 +67,17 @@ https://stackoverflow.com/questions/51737548/how-to-set-primary-key-auto-increme 1. Pechgraben images 2. Gschliefgraben Piezometer 3. Gschliefgraben Glasfaser -4. Laakirchen Inklinometer \ No newline at end of file +4. Laakirchen Inklinometer + + + + + + + + + + +UPDATE pg_extension SET extrelocatable = TRUE WHERE extname = 'postgis'; + +ALTER EXTENSION postgis SET SCHEMA gba; \ No newline at end of file diff --git a/voegelsberg/import_tachymeter_observations.py b/voegelsberg/import_tachymeter_observations.py index 0505631..c52d308 100644 --- a/voegelsberg/import_tachymeter_observations.py +++ b/voegelsberg/import_tachymeter_observations.py @@ -6,6 +6,7 @@ import uuid from pyproj import Transformer # from insert_sensor.wrapper import (Offering, FoI, Procedure) from sqlalchemy.orm import session +from sqlalchemy import asc, desc from db.models import ( Observation, create_pg_session, @@ -66,8 +67,10 @@ def main(): .first() location_dataset.fk_format_id = sensor_format.id pg_session.commit() - successfully_inserted = create_observation(location_dataset, row, pg_session) - + successfully_inserted = create_observation( + location_dataset, row, pg_session) + + # commit new observations: if successfully_inserted: if not location_dataset.is_published: location_dataset.is_published = 1 @@ -75,30 +78,37 @@ def main(): location_dataset.dataset_type = "timeseries" location_dataset.observation_type = "simple" location_dataset.value_type = "geometry" + pg_session.commit() - # if sensor_id in data: - # create_observation(elevation_dataset, sensor_id, data, pg_session) - # pg_session.commit() + last_location_observation = pg_session.query(Observation) \ + .filter(Observation.fk_dataset_id == location_dataset.id) \ + .order_by(desc('sampling_time_start')) \ + .first() + if last_location_observation is not None: + location_dataset.last_time = last_location_observation.sampling_time_start + #location_dataset.last_value = last_location_observation.value_quantity + location_dataset.fk_last_observation_id = last_location_observation.id - # offering = Offering( - # "https://geomon.geologie.ac.at/52n-sos-webapp/api/offerings/", - # sensor_name, - # "Vögelsberg Tachymeter" - # ) - # procedure = Procedure(sensor_name, sensor_name) - # foi_name = "origin of " + sensor_name - # foi = FoI("degree", "m", (cord_x, cord_y, z_1), - # sensor_name, foi_name) - # xml = get_xml(offering, procedure, foi, result_time, identifier) - # print(xml) - exit() + first_location_observation = pg_session.query(Observation) \ + .filter(Observation.fk_dataset_id == location_dataset.id) \ + .order_by(asc('sampling_time_start')) \ + .first() + if first_location_observation is not None: + location_dataset.first_time = first_location_observation.sampling_time_start + # roll_dataset.first_value = first_location_observation.value_quantity + location_dataset.fk_first_observation_id = first_location_observation.id + + pg_session.commit() + + # for loop sensors end + pg_session.close() def create_observation(location_dataset: Dataset, data: dict[any, any], pg_session: session): ''' create observation in db''' # print("Sesnor key exist in JSON data") transprojr = Transformer.from_crs(31254, 4326, always_xy=True) - x_1, y_1 = (float(data['Y']), float(data['X'])) + x_1, y_1, z_1 = (float(data['Y']), float(data['X']), float(data['H'])) cord_x, cord_y = map(float, transprojr.transform(x_1, y_1)) print((cord_x, cord_y)) # (11.597409730065536, 47.27196543449542) @@ -123,8 +133,8 @@ def create_observation(location_dataset: Dataset, data: dict[any, any], pg_sess new_observation.result_time = date_obj new_observation.sampling_time_start = new_observation.result_time new_observation.sampling_time_end = new_observation.result_time - new_observation.value_type = "quantity" - new_observation.value_geometry = f'SRID=4326;POINT({cord_x} {cord_y})' + new_observation.value_type = "geometry" + new_observation.value_geometry = f'SRID=4326;POINTZ({cord_x} {cord_y} {z_1})' new_observation.fk_dataset_id = location_dataset.id pg_session.add(new_observation) print(f"new observation with result time {new_observation.result_time} "