diff --git a/gschliefgraben_glasfaser/main.py b/gschliefgraben_glasfaser/main.py index 3689b72..6fe0ec9 100644 --- a/gschliefgraben_glasfaser/main.py +++ b/gschliefgraben_glasfaser/main.py @@ -9,33 +9,29 @@ import os import uuid from typing import List from itertools import chain +import json # import sys, inspect # currentdir = os.path.dirname(os.path.abspath(inspect.getfile(inspect.currentframe()))) # parentdir = os.path.dirname(currentdir) # sys.path.insert(0, parentdir) # import requests from sqlalchemy.orm import session -from sqlalchemy import func +from sqlalchemy import func, asc, desc # from db.pg_models import Platform from gschliefgraben_glasfaser.models import ObservationSchema, Person, PersonSchema, Observation, create_pg_session, Dataset, Procedure, Phenomenon, Platform from gschliefgraben_glasfaser.my_api import MyApi from datetime import datetime, date, timedelta -# from db.pg_models import create_pg_session -#from models import Person, PersonSchema -# response = requests.get('https://api.com/') -# print(response) # shows the response's HTTP status code -# print(response.json()) # shows the response's JSON response body, if it has one -# print(response.content) # get the data content of the response def main(): ''' main method ''' pg_session: session = create_pg_session() platform_sta_identifier = "gschliefgraben_glasfaser" # sensor_list = ["inclino1_14", "inclino1_02"] - sensor_list = os.environ.get("GLASFASER_GSCHLIEFGRABEN_SENSORS") + #sensor_list = os.environ.get("GLASFASER_GSCHLIEFGRABEN_SENSORS") + sensor_list = json.loads(os.environ['GLASFASER_GSCHLIEFGRABEN_SENSORS']) # this will print elements along with their index value - for sensor in enumerate(sensor_list): + for sensor in sensor_list: pg_query = pg_session.query(Dataset) \ .join(Procedure) \ @@ -66,6 +62,27 @@ def main(): # create all the observation for the given sensor names create_observations(sensor, slope_dataset) + + # update first and last observations for the dataset + first_slope_observation = pg_session.query(Observation) \ + .filter(Observation.fk_dataset_id == slope_dataset.id) \ + .order_by(asc('sampling_time_start')) \ + .first() + if first_slope_observation is not None: + slope_dataset.first_time = first_slope_observation.sampling_time_start + slope_dataset.first_value = first_slope_observation.value_quantity + slope_dataset.fk_first_observation_id = first_slope_observation.id + last_slope_observation = pg_session.query(Observation) \ + .filter(Observation.fk_dataset_id == slope_dataset.id) \ + .order_by(desc('sampling_time_start')) \ + .first() + if last_slope_observation is not None: + slope_dataset.last_time = last_slope_observation.sampling_time_start + slope_dataset.last_value = last_slope_observation.value_quantity + slope_dataset.fk_last_observation_id = last_slope_observation.id + + pg_session.commit() + pg_session.close() def create_observations(sensor: str, slope_dataset: Dataset): ''' create_observations method for given sensor ''' diff --git a/gschliefgraben_glasfaser/update_daily_cron.py b/gschliefgraben_glasfaser/update_daily_cron.py index 77d2fed..381ec33 100644 --- a/gschliefgraben_glasfaser/update_daily_cron.py +++ b/gschliefgraben_glasfaser/update_daily_cron.py @@ -5,10 +5,10 @@ Sqlalchemy version: 1.2.15 Python version: 3.10 ''' -import os +import os, json import uuid from sqlalchemy.orm import session -from sqlalchemy import func +from sqlalchemy import func, asc, desc # from db.pg_models import Platform from gschliefgraben_glasfaser.models import ObservationSchema, Observation, create_pg_session, Dataset, Procedure, Phenomenon, Platform from gschliefgraben_glasfaser.my_api import MyApi @@ -19,10 +19,11 @@ def main(): pg_session: session = create_pg_session() platform_sta_identifier = "gschliefgraben_glasfaser" # sensor_list = ["inclino1_14", "inclino1_02"] - sensor_list = os.environ.get("GLASFASER_GSCHLIEFGRABEN_SENSORS") - + #sensor_list = os.environ.get("GLASFASER_GSCHLIEFGRABEN_SENSORS") + sensor_list = json.loads(os.environ['GLASFASER_GSCHLIEFGRABEN_SENSORS']) + # this will print elements along with their index value - for sensor in enumerate(sensor_list): + for sensor in sensor_list: pg_query = pg_session.query(Dataset) \ .join(Procedure) \ .join(Phenomenon) \ @@ -50,6 +51,27 @@ def main(): # create all the observation for the given sensor names create_observations(sensor, slope_dataset) + + first_slope_observation = pg_session.query(Observation) \ + .filter(Observation.fk_dataset_id == slope_dataset.id) \ + .order_by(asc('sampling_time_start')) \ + .first() + if first_slope_observation is not None: + slope_dataset.first_time = first_slope_observation.sampling_time_start + slope_dataset.first_value = first_slope_observation.value_quantity + slope_dataset.fk_first_observation_id = first_slope_observation.id + last_slope_observation = pg_session.query(Observation) \ + .filter(Observation.fk_dataset_id == slope_dataset.id) \ + .order_by(desc('sampling_time_start')) \ + .first() + if last_slope_observation is not None: + slope_dataset.last_time = last_slope_observation.sampling_time_start + slope_dataset.last_value = last_slope_observation.value_quantity + slope_dataset.fk_last_observation_id = last_slope_observation.id + + pg_session.commit() + pg_session.close() + def create_observations(sensor: str, slope_dataset: Dataset): ''' create_observations method for given sensor '''