From f104e9e74bb8ab37f39923d333115175fd6d1487 Mon Sep 17 00:00:00 2001 From: Arno Kaimbacher Date: Thu, 3 Mar 2022 15:55:40 +0100 Subject: [PATCH] - now adding observations to dataset - addiotonal notes for pip in notes.txt - pip requirements.txt --- gschliefgraben_glasfaser/main.py | 77 ++++++++++++++++++----------- gschliefgraben_glasfaser/models.py | 74 ++++++++++++++++++++++++--- notes.txt | 3 +- requirements.txt | Bin 0 -> 1114 bytes 4 files changed, 117 insertions(+), 37 deletions(-) create mode 100644 requirements.txt diff --git a/gschliefgraben_glasfaser/main.py b/gschliefgraben_glasfaser/main.py index 09d852b..fcdf65a 100644 --- a/gschliefgraben_glasfaser/main.py +++ b/gschliefgraben_glasfaser/main.py @@ -6,7 +6,6 @@ Python version: 3.7 ''' import os -from tokenize import String import uuid # import sys, inspect # currentdir = os.path.dirname(os.path.abspath(inspect.getfile(inspect.currentframe()))) @@ -15,7 +14,8 @@ import uuid # import requests from sqlalchemy.orm import session from sqlalchemy import func -from gschliefgraben_glasfaser.models import ObservationSchema, Person, PersonSchema, Observation, create_pg_session +# from db.pg_models import Platform +from gschliefgraben_glasfaser.models import ObservationSchema, Person, PersonSchema, Observation, create_pg_session, Dataset, Procedure, Phenomenon, Platform from gschliefgraben_glasfaser.my_api import MyApi from datetime import datetime, date, timedelta # from db.pg_models import create_pg_session @@ -41,13 +41,9 @@ def main(): # dump_data = person_schema.dump(pg_person) # print(dump_data) # serialize db data to json - observation_schema = ObservationSchema() - dump_data = observation_schema.dump(observation) - print(dump_data) - - # # deserialize to db model - # load_data: Person = person_schema.load(dump_data) - # print(load_data) + # observation_schema = ObservationSchema() + # dump_data = observation_schema.dump(observation) + # print(dump_data) # request ortmann api # response = @@ -61,10 +57,24 @@ def main(): # data='grant_type=client_credentials&scope=gschliefgraben') # print(response) + sensor: str = "inclino1_14" + pg_query = pg_session.query(Dataset) \ + .join(Procedure) \ + .join(Phenomenon) \ + .filter(Procedure.sta_identifier == sensor.lower()) + slope_dataset: Dataset = pg_query.filter( + Phenomenon.sta_identifier == "Slope").first() + if not slope_dataset.is_published: + slope_dataset.is_published = 1 + slope_dataset.is_hidden = 0 + slope_dataset.dataset_type = "timeseries" + slope_dataset.observation_type = "simple" + slope_dataset.value_type = "quantity" + pg_session.commit() + # The size of each step in days - # consider the start date as 2021-february 1 st - start_date = date(2021, 2, 28) + start_date = date(2022, 1, 1) # consider the end date as 2021-march 1 st end_date = date(2022, 3, 1) @@ -77,13 +87,13 @@ def main(): while start_date <= end_date: # print(start_date, end="\n") query_date = start_date.strftime('%Y-%m-%d') - create_db_observations(query_date, test_api, pg_session) + create_db_observations(query_date, test_api, pg_session, slope_dataset) start_date += delta + pg_session.commit() # for i in rrule(DAILY , dtstart=start_date,until=end_date): # print(i.strftime('%Y%b%d'),sep='\n') - # query_date = "2022-02-28" # create_db_observations(query_date, test_api, pg_session) # query_date_obj = datetime.strptime(query_date, "%Y-%m-%d") @@ -92,14 +102,12 @@ def main(): # ['Features'][0]['geometry']['properties'][0]) # print(observation_array) - - # max_id = pg_session.query(func.max(Observation.id)).scalar() # if max_id is None: # max_id = -1 # # pg_session.bulk_save_objects(observations) # for observation_json in observation_array: - # ob_date_time = observation_json.get('DateTime') + # ob_date_time = observation_json.get('DateTime') # datetime_obj = datetime.strptime(ob_date_time, "%Y-%m-%dT%H:%M:%S.%fZ") # if datetime_obj.date() != query_date_obj.date(): # continue @@ -108,7 +116,8 @@ def main(): # pg_session.commit() -def create_db_observations(query_date, test_api, pg_session): + +def create_db_observations(query_date, test_api, pg_session, dataset: Dataset): ''' to do ''' query_date_obj = datetime.strptime(query_date, "%Y-%m-%d") data = test_api.getSensorData("inclino1_14", query_date) @@ -121,20 +130,21 @@ def create_db_observations(query_date, test_api, pg_session): max_id = -1 # pg_session.bulk_save_objects(observations) for observation_json in observation_array: - ob_date_time = observation_json.get('DateTime') + ob_date_time = observation_json.get('DateTime') datetime_obj = datetime.strptime(ob_date_time, "%Y-%m-%dT%H:%M:%S.%fZ") if datetime_obj.date() != query_date_obj.date(): continue - ob_value = observation_json.get('Value') + ob_value = observation_json.get('Value') if ob_value is None: continue - max_id = max_id + 1 - create_observation(observation_json, pg_session, max_id) - pg_session.commit() - print("observations for date " +query_date+ "succesfully imported \n") + # max_id = max_id + 1 + max_id = create_observation( + observation_json, pg_session, max_id, dataset) + # pg_session.commit() + print("observations for date " + query_date + "succesfully imported \n") -def create_observation(observation_json: ObservationSchema, db_session, max_id): +def create_observation(observation_json: ObservationSchema, db_session, max_id, dataset: Dataset): """ This function creates a new observation in the people structure based on the passed-in observation data @@ -142,34 +152,41 @@ def create_observation(observation_json: ObservationSchema, db_session, max_id): :return: 201 on success, observation on person exists """ - ob_id = observation_json.get('id') + ob_id: str = str(observation_json.get('id')) # db_session = create_pg_session() existing_observation: bool = ( db_session.query(Observation) - .filter(Observation.id == ob_id) + .filter(Observation.value_identifier == ob_id) .one_or_none() ) # Can we insert this observation? if existing_observation is None: + max_id += 1 # Create a person instance using the schema and the passed in person schema = ObservationSchema() - # deserialize to object + # deserialize to python object new_observation: Observation = schema.load(observation_json) - new_observation.id = max_id + 1 + new_observation.id = max_id new_observation.sta_identifier = str(uuid.uuid4()) + new_observation.sampling_time_start=new_observation.result_time + new_observation.sampling_time_end=new_observation.result_time + new_observation.fk_dataset_id = dataset.id # Add the person to the database db_session.add(new_observation) + # dataset.observations.append(new_observation) # db_session.commit() # Serialize and return the newly created person in the response - data = schema.dump(new_observation) - return data, 201 + # data = schema.dump(new_observation) + # return data, 201 + return max_id # Otherwise, nope, person exists already else: print(409, f'Observation {ob_id} exists already') + return max_id def create(person_json: PersonSchema): diff --git a/gschliefgraben_glasfaser/models.py b/gschliefgraben_glasfaser/models.py index 1405794..c72f369 100644 --- a/gschliefgraben_glasfaser/models.py +++ b/gschliefgraben_glasfaser/models.py @@ -38,6 +38,51 @@ def create_pg_session() -> sessionmaker: # Base.metadata.create_all(engine) return _session +class Platform(Base): + """ Platform class """ + __tablename__ = 'platform' + __table_args__ = {"schema": "gba"} + + id = Column('platform_id', Integer, primary_key=True) + identifier = Column('identifier', String) + sta_identifier = Column('sta_identifier', String) + name = Column('name', String) + # datasets = relationship('Dataset') + datasets = relationship('Dataset', back_populates="platform", lazy=True) + + def __repr__(self): + return f'Platform {self.name}' + + +class Phenomenon(Base): + """ phenomenon class """ + __tablename__ = 'phenomenon' + __table_args__ = {"schema": "gba"} + + id = Column('phenomenon_id', Integer, primary_key=True) + name = Column('name', String) + sta_identifier = Column('sta_identifier', String) + # datasets = relationship('Dataset') + datasets = relationship('Dataset', back_populates="phenomenon", lazy=True) + + def __repr__(self): + return f'Phenomenon {self.name}' + + +class Procedure(Base): + """ procedure class """ + __tablename__ = 'procedure' + __table_args__ = {"schema": "gba"} + + id = Column('procedure_id', Integer, primary_key=True) + name = Column('name', String) + sta_identifier = Column('sta_identifier', String) + # datasets = relationship('Dataset') + datasets = relationship('Dataset', back_populates="procedure", lazy=True) + + def __repr__(self): + return f'Procedure {self.name}' + class Dataset(Base): """ dataset class """ @@ -70,8 +115,25 @@ class Dataset(Base): # first_observation = relationship("Observation", foreign_keys=[ # fk_first_observation_id]) - # observations = relationship( - # 'Observation', back_populates='dataset', lazy=True) + observations = relationship( + 'Observation', back_populates='dataset', lazy=True) + + fk_phenomenon_id = Column( + 'fk_phenomenon_id', Integer, ForeignKey('gba.phenomenon.phenomenon_id'), nullable=False) + # phenomenon = relationship("Phenomenon", lazy="joined", foreign_keys=[fk_phenomenon_id]) + phenomenon = relationship( + "Phenomenon", back_populates="datasets", lazy="joined") + + fk_platform_id = Column('fk_platform_id', Integer, ForeignKey( + 'gba.platform.platform_id'), nullable=True) + platform = relationship( + "Platform", back_populates="datasets", lazy="joined") + + fk_procedure_id = Column('fk_procedure_id', Integer, ForeignKey( + 'gba.procedure.procedure_id'), nullable=False) + # procedure = relationship("Procedure", lazy="joined") + procedure = relationship( + "Procedure", back_populates="datasets", lazy="joined") def new_id_factory(): ''' test ''' @@ -108,9 +170,9 @@ class Observation(Base): value_identifier = Column('value_identifier', String) value_quantity = Column('value_quantity', Numeric(20, 10), nullable=False) - # fk_dataset_id = Column(Integer, ForeignKey( - # 'gba.dataset.dataset_id'), nullable=False) - # dataset = relationship("Dataset", back_populates="observations") + fk_dataset_id = Column(Integer, ForeignKey( + 'gba.dataset.dataset_id'), nullable=False) + dataset = relationship("Dataset", back_populates="observations") class ObservationSchema(SQLAlchemySchema): @@ -175,7 +237,7 @@ def create_db(): # session_maker = sessionmaker(bind=engine) # session = session_maker() Base.metadata.drop_all(bind=engine) - Base.metadata.create_all(engine) + # Base.metadata.create_all(engine) if __name__ == "__main__": diff --git a/notes.txt b/notes.txt index c406f62..9864bda 100644 --- a/notes.txt +++ b/notes.txt @@ -1,4 +1,5 @@ - +pip freeze > requirements.txt +pip install -f ./requirements.txt =========================================================================================== python -m venv .venv d:/Software/geomon/.venv/Scripts/python.exe -m pip install -U pylint diff --git a/requirements.txt b/requirements.txt new file mode 100644 index 0000000000000000000000000000000000000000..2718fd8329c956344f72a111a240150a9cf03239 GIT binary patch literal 1114 zcmZvb%}&EW41|3~;vJAUq7>3lN|BJ@&IR!VNxCgX{i6gyd3fL(JE0(|Rhq^gkL|Jd z>$9{MYiwgHo7vc|IeqKdh^MeaJFwDT@vm%ciRaQ0Io=DNGe^{THehBp;R%K_ScJ;K z^oSBuZI7^R$d^E8wuE(K3v6@ZKAfe?uc#c3h+BaWwsf>G@}K)7#DvPLHnXbgwUHCA z{gue%-eA4_@n_hNY)baVPN=Totf}wHPB~#uWbN@kQGu$EHDM{~N573D!&~vKIDIX6 zmL4hg8*F_d5_L+GA^%650&9tu6YLdsVFp--eAS#WUDkLD-W6}10?Wvb;hJ);|KyVz zQ|~vCeGaDa&SMsxE}Ha`yi&5D#+dYs&ZTlh46w)aa-YH+{D*Cs+loq_V6H)C=%w9x z$0Ii78G58D(QGyE$9Brn@9igR?h_35{m6{iXS&daXnPVoktb%XOtFIt{57}=Ot&A} z&|qIXM}u~;absqvx`foWN3W__)8E3ReV^j%5WD-ZZvA62>E>)*sqXax j_D->wcicMNnz?sSkw*w+?fbot`}_u?M5o^yw%ev}Y{jQL literal 0 HcmV?d00001