''' Tutorial link: https://realpython.com/flask-connexion-rest-api-part-2/ https://github.com/realpython/materials/blob/master/flask-connexion-rest-part-2/version_1/people.py Sqlalchemy version: 1.2.15 Python version: 3.10 ''' import os import json import uuid from datetime import datetime from dotenv import load_dotenv, find_dotenv from sqlalchemy.orm import session from sqlalchemy import func, asc, desc # from db.pg_models import Platform from db.models import ( ObservationSchema, Observation, create_pg_session, Dataset, Procedure, Phenomenon, Platform) from gschliefgraben_glasfaser.my_api import MyApi def main(): ''' main method ''' pg_session: session = create_pg_session() platform_sta_identifier = "gschliefgraben_glasfaser" # sensor_list = ["inclino1_14", "inclino1_02"] #sensor_list = os.environ.get("GLASFASER_GSCHLIEFGRABEN_SENSORS") sensor_list = json.loads(os.environ['GLASFASER_GSCHLIEFGRABEN_SENSORS']) # this will print elements along with their index value for sensor in sensor_list: pg_query = pg_session.query(Dataset) \ .join(Procedure) \ .join(Phenomenon) \ .filter(Procedure.sta_identifier == sensor.lower()) slope_dataset: Dataset = pg_query.filter( Phenomenon.sta_identifier == "Slope").first() if not slope_dataset: print("Sensor " + sensor + " ist noch nicht angelegt!") # exit() continue if not slope_dataset.is_published: slope_dataset.is_published = 1 slope_dataset.is_hidden = 0 slope_dataset.dataset_type = "timeseries" slope_dataset.observation_type = "simple" slope_dataset.value_type = "quantity" pg_session.commit() platform_exists: bool = pg_session.query(Platform.id).filter_by( sta_identifier=platform_sta_identifier).scalar() is not None if platform_exists: sensor_platform = pg_session.query(Platform.id) \ .filter(Platform.sta_identifier == platform_sta_identifier) \ .first() slope_dataset.fk_platform_id = sensor_platform.id # create all the observation for the given sensor names create_observations(sensor, slope_dataset) first_slope_observation = pg_session.query(Observation) \ .filter(Observation.fk_dataset_id == slope_dataset.id) \ .order_by(asc('sampling_time_start')) \ .first() if first_slope_observation is not None: slope_dataset.first_time = first_slope_observation.sampling_time_start slope_dataset.first_value = first_slope_observation.value_quantity slope_dataset.fk_first_observation_id = first_slope_observation.id last_slope_observation = pg_session.query(Observation) \ .filter(Observation.fk_dataset_id == slope_dataset.id) \ .order_by(desc('sampling_time_start')) \ .first() if last_slope_observation is not None: slope_dataset.last_time = last_slope_observation.sampling_time_start slope_dataset.last_value = last_slope_observation.value_quantity slope_dataset.fk_last_observation_id = last_slope_observation.id pg_session.commit() pg_session.close() def create_observations(sensor: str, slope_dataset: Dataset): ''' create_observations method for given sensor ''' pg_session: session = create_pg_session() # create access token token_api = os.environ.get("TOKEN_API") test_api = MyApi(token_api) # The size of each step in days # consider the start date as 2021-february 1 st start_date = datetime.today() query_date = start_date.strftime('%Y-%m-%d') create_db_observations(sensor, query_date, test_api, pg_session, slope_dataset) pg_session.commit() def create_db_observations(sensor: str, query_date, test_api, pg_session, dataset: Dataset): ''' parse each observation ''' query_date_obj = datetime.strptime(query_date, "%Y-%m-%d") data = test_api.getSensorData(sensor, query_date, query_date) observation_array = (data['FeatureCollection'] ['Features'][0]['geometry']['properties'][0]) # print(observation_array) max_id = pg_session.query(func.max(Observation.id)).scalar() if max_id is None: max_id = -1 # pg_session.bulk_save_objects(observations) for observation_json in observation_array: ob_date_time = observation_json.get('DateTime') datetime_obj = datetime.strptime(ob_date_time, "%Y-%m-%dT%H:%M:%S.%fZ") if datetime_obj.date() != query_date_obj.date(): continue ob_value = observation_json.get('Value') if ob_value is None: continue # max_id = max_id + 1 max_id = create_observation( observation_json, pg_session, max_id, dataset) # pg_session.commit() print("observations for date " + query_date + " and sensor " + sensor + " succesfully imported \n") def create_observation(observation_json: ObservationSchema, db_session, max_id, dataset: Dataset): """ This function creates a new observation in the people structure based on the passed-in observation data :param observation: person to create in people structure :return: 201 on success, observation on person exists """ ob_id: str = str(observation_json.get('id')) # db_session = create_pg_session() existing_observation: bool = ( db_session.query(Observation) .filter(Observation.value_identifier == ob_id, Observation.fk_dataset_id == dataset.id) .one_or_none() ) # Can we insert this observation? if existing_observation is None: max_id += 1 # Create a person instance using the schema and the passed in person schema = ObservationSchema() # deserialize to python object new_observation: Observation = schema.load(observation_json) # new_observation.id = max_id new_observation.sta_identifier = str(uuid.uuid4()) new_observation.sampling_time_start = new_observation.result_time new_observation.sampling_time_end = new_observation.result_time new_observation.fk_dataset_id = dataset.id # Add the person to the database db_session.add(new_observation) # dataset.observations.append(new_observation) # db_session.commit() # Serialize and return the newly created person in the response # data = schema.dump(new_observation) # return data, 201 return max_id # Otherwise, nope, person exists already else: print(409, f'Observation {ob_id} exists already') return max_id if __name__ == "__main__": load_dotenv(find_dotenv()) sensor_list1 = os.environ.get('GLASFASER_GSCHLIEFGRABEN_SENSORS', []) print(f'sensors: {sensor_list1} .') main()