- updat dataset with first und last values of depending observations

This commit is contained in:
Arno Kaimbacher 2022-03-04 17:32:01 +01:00
parent fdc5da7373
commit 87cb78af65
2 changed files with 53 additions and 14 deletions

View File

@ -9,33 +9,29 @@ import os
import uuid import uuid
from typing import List from typing import List
from itertools import chain from itertools import chain
import json
# import sys, inspect # import sys, inspect
# currentdir = os.path.dirname(os.path.abspath(inspect.getfile(inspect.currentframe()))) # currentdir = os.path.dirname(os.path.abspath(inspect.getfile(inspect.currentframe())))
# parentdir = os.path.dirname(currentdir) # parentdir = os.path.dirname(currentdir)
# sys.path.insert(0, parentdir) # sys.path.insert(0, parentdir)
# import requests # import requests
from sqlalchemy.orm import session from sqlalchemy.orm import session
from sqlalchemy import func from sqlalchemy import func, asc, desc
# from db.pg_models import Platform # from db.pg_models import Platform
from gschliefgraben_glasfaser.models import ObservationSchema, Person, PersonSchema, Observation, create_pg_session, Dataset, Procedure, Phenomenon, Platform from gschliefgraben_glasfaser.models import ObservationSchema, Person, PersonSchema, Observation, create_pg_session, Dataset, Procedure, Phenomenon, Platform
from gschliefgraben_glasfaser.my_api import MyApi from gschliefgraben_glasfaser.my_api import MyApi
from datetime import datetime, date, timedelta from datetime import datetime, date, timedelta
# from db.pg_models import create_pg_session
#from models import Person, PersonSchema
# response = requests.get('https://api.com/')
# print(response) # shows the response's HTTP status code
# print(response.json()) # shows the response's JSON response body, if it has one
# print(response.content) # get the data content of the response
def main(): def main():
''' main method ''' ''' main method '''
pg_session: session = create_pg_session() pg_session: session = create_pg_session()
platform_sta_identifier = "gschliefgraben_glasfaser" platform_sta_identifier = "gschliefgraben_glasfaser"
# sensor_list = ["inclino1_14", "inclino1_02"] # sensor_list = ["inclino1_14", "inclino1_02"]
sensor_list = os.environ.get("GLASFASER_GSCHLIEFGRABEN_SENSORS") #sensor_list = os.environ.get("GLASFASER_GSCHLIEFGRABEN_SENSORS")
sensor_list = json.loads(os.environ['GLASFASER_GSCHLIEFGRABEN_SENSORS'])
# this will print elements along with their index value # this will print elements along with their index value
for sensor in enumerate(sensor_list): for sensor in sensor_list:
pg_query = pg_session.query(Dataset) \ pg_query = pg_session.query(Dataset) \
.join(Procedure) \ .join(Procedure) \
@ -67,6 +63,27 @@ def main():
# create all the observation for the given sensor names # create all the observation for the given sensor names
create_observations(sensor, slope_dataset) create_observations(sensor, slope_dataset)
# update first and last observations for the dataset
first_slope_observation = pg_session.query(Observation) \
.filter(Observation.fk_dataset_id == slope_dataset.id) \
.order_by(asc('sampling_time_start')) \
.first()
if first_slope_observation is not None:
slope_dataset.first_time = first_slope_observation.sampling_time_start
slope_dataset.first_value = first_slope_observation.value_quantity
slope_dataset.fk_first_observation_id = first_slope_observation.id
last_slope_observation = pg_session.query(Observation) \
.filter(Observation.fk_dataset_id == slope_dataset.id) \
.order_by(desc('sampling_time_start')) \
.first()
if last_slope_observation is not None:
slope_dataset.last_time = last_slope_observation.sampling_time_start
slope_dataset.last_value = last_slope_observation.value_quantity
slope_dataset.fk_last_observation_id = last_slope_observation.id
pg_session.commit()
pg_session.close()
def create_observations(sensor: str, slope_dataset: Dataset): def create_observations(sensor: str, slope_dataset: Dataset):
''' create_observations method for given sensor ''' ''' create_observations method for given sensor '''

View File

@ -5,10 +5,10 @@ Sqlalchemy version: 1.2.15
Python version: 3.10 Python version: 3.10
''' '''
import os import os, json
import uuid import uuid
from sqlalchemy.orm import session from sqlalchemy.orm import session
from sqlalchemy import func from sqlalchemy import func, asc, desc
# from db.pg_models import Platform # from db.pg_models import Platform
from gschliefgraben_glasfaser.models import ObservationSchema, Observation, create_pg_session, Dataset, Procedure, Phenomenon, Platform from gschliefgraben_glasfaser.models import ObservationSchema, Observation, create_pg_session, Dataset, Procedure, Phenomenon, Platform
from gschliefgraben_glasfaser.my_api import MyApi from gschliefgraben_glasfaser.my_api import MyApi
@ -19,10 +19,11 @@ def main():
pg_session: session = create_pg_session() pg_session: session = create_pg_session()
platform_sta_identifier = "gschliefgraben_glasfaser" platform_sta_identifier = "gschliefgraben_glasfaser"
# sensor_list = ["inclino1_14", "inclino1_02"] # sensor_list = ["inclino1_14", "inclino1_02"]
sensor_list = os.environ.get("GLASFASER_GSCHLIEFGRABEN_SENSORS") #sensor_list = os.environ.get("GLASFASER_GSCHLIEFGRABEN_SENSORS")
sensor_list = json.loads(os.environ['GLASFASER_GSCHLIEFGRABEN_SENSORS'])
# this will print elements along with their index value # this will print elements along with their index value
for sensor in enumerate(sensor_list): for sensor in sensor_list:
pg_query = pg_session.query(Dataset) \ pg_query = pg_session.query(Dataset) \
.join(Procedure) \ .join(Procedure) \
.join(Phenomenon) \ .join(Phenomenon) \
@ -51,6 +52,27 @@ def main():
# create all the observation for the given sensor names # create all the observation for the given sensor names
create_observations(sensor, slope_dataset) create_observations(sensor, slope_dataset)
first_slope_observation = pg_session.query(Observation) \
.filter(Observation.fk_dataset_id == slope_dataset.id) \
.order_by(asc('sampling_time_start')) \
.first()
if first_slope_observation is not None:
slope_dataset.first_time = first_slope_observation.sampling_time_start
slope_dataset.first_value = first_slope_observation.value_quantity
slope_dataset.fk_first_observation_id = first_slope_observation.id
last_slope_observation = pg_session.query(Observation) \
.filter(Observation.fk_dataset_id == slope_dataset.id) \
.order_by(desc('sampling_time_start')) \
.first()
if last_slope_observation is not None:
slope_dataset.last_time = last_slope_observation.sampling_time_start
slope_dataset.last_value = last_slope_observation.value_quantity
slope_dataset.fk_last_observation_id = last_slope_observation.id
pg_session.commit()
pg_session.close()
def create_observations(sensor: str, slope_dataset: Dataset): def create_observations(sensor: str, slope_dataset: Dataset):
''' create_observations method for given sensor ''' ''' create_observations method for given sensor '''