diff --git a/voegelsberg/import_tachymeter_observations.py b/voegelsberg/import_tachymeter_observations.py index 4ed9b35..8851712 100644 --- a/voegelsberg/import_tachymeter_observations.py +++ b/voegelsberg/import_tachymeter_observations.py @@ -2,6 +2,7 @@ import csv # import requests from datetime import datetime +from typing import List import uuid from pyproj import Transformer # from insert_sensor.wrapper import (Offering, FoI, Procedure) @@ -101,10 +102,11 @@ def main(): # pg_session.commit() # for loop sensors end + actualize_first_last_observations() pg_session.close() -def create_observation(location_dataset: Dataset, data: dict[any, any], pg_session: session): +def create_observation(location_dataset: Dataset, data, pg_session: session): ''' create observation in db''' # print("Sesnor key exist in JSON data") transprojr = Transformer.from_crs(31254, 4326, always_xy=True) @@ -146,6 +148,46 @@ def create_observation(location_dataset: Dataset, data: dict[any, any], pg_sess return False +def actualize_first_last_observations(): + ''' iterate throug all datasets of Voregelsberg project area + and actualize last and first corresponding observations''' + pg_session: session = create_pg_session() + platform_sta_identifier = "voegelsberg_tachymeter" + # sensor_platform = pg_session.query(Platform.id) \ + # .filter(Platform.sta_identifier == platform_sta_identifier) \ + # .first() + + voegelsberg_datasets: List[Dataset] = [] + voegelsberg_datasets = pg_session.query(Dataset) \ + .join(Procedure) \ + .join(Phenomenon) \ + .join(Platform) \ + .filter(Platform.sta_identifier == platform_sta_identifier).all() + + for location_dataset in voegelsberg_datasets: + ''' iterate throug all datasets of Voregelsberg project area + and actualize last and first corresponding observations''' + last_location_observation = pg_session.query(Observation) \ + .filter(Observation.fk_dataset_id == location_dataset.id) \ + .order_by(desc('sampling_time_start')) \ + .first() + if last_location_observation is not None: + location_dataset.last_time = last_location_observation.sampling_time_start + # location_dataset.last_value = last_location_observation.value_quantity + location_dataset.fk_last_observation_id = last_location_observation.id + + first_location_observation = pg_session.query(Observation) \ + .filter(Observation.fk_dataset_id == location_dataset.id) \ + .order_by(asc('sampling_time_start')) \ + .first() + if first_location_observation is not None: + location_dataset.first_time = first_location_observation.sampling_time_start + # roll_dataset.first_value = first_location_observation.value_quantity + location_dataset.fk_first_observation_id = first_location_observation.id + + pg_session.commit() + + def get_xml(offering, procedure, foi, result_time, identifier): ''' """ Prepares the body of a InsertSensor request for JSON biding. @@ -178,4 +220,4 @@ def get_xml(offering, procedure, foi, result_time, identifier): if __name__ == '__main__': - main() + actualize_first_last_observations()