- now adding observations to dataset

- addiotonal notes for pip in notes.txt
- pip requirements.txt
This commit is contained in:
Arno Kaimbacher 2022-03-03 15:55:40 +01:00
parent e2ceb107c9
commit f104e9e74b
4 changed files with 117 additions and 37 deletions

View File

@ -6,7 +6,6 @@ Python version: 3.7
''' '''
import os import os
from tokenize import String
import uuid import uuid
# import sys, inspect # import sys, inspect
# currentdir = os.path.dirname(os.path.abspath(inspect.getfile(inspect.currentframe()))) # currentdir = os.path.dirname(os.path.abspath(inspect.getfile(inspect.currentframe())))
@ -15,7 +14,8 @@ import uuid
# import requests # import requests
from sqlalchemy.orm import session from sqlalchemy.orm import session
from sqlalchemy import func from sqlalchemy import func
from gschliefgraben_glasfaser.models import ObservationSchema, Person, PersonSchema, Observation, create_pg_session # from db.pg_models import Platform
from gschliefgraben_glasfaser.models import ObservationSchema, Person, PersonSchema, Observation, create_pg_session, Dataset, Procedure, Phenomenon, Platform
from gschliefgraben_glasfaser.my_api import MyApi from gschliefgraben_glasfaser.my_api import MyApi
from datetime import datetime, date, timedelta from datetime import datetime, date, timedelta
# from db.pg_models import create_pg_session # from db.pg_models import create_pg_session
@ -41,13 +41,9 @@ def main():
# dump_data = person_schema.dump(pg_person) # dump_data = person_schema.dump(pg_person)
# print(dump_data) # print(dump_data)
# serialize db data to json # serialize db data to json
observation_schema = ObservationSchema() # observation_schema = ObservationSchema()
dump_data = observation_schema.dump(observation) # dump_data = observation_schema.dump(observation)
print(dump_data) # print(dump_data)
# # deserialize to db model
# load_data: Person = person_schema.load(dump_data)
# print(load_data)
# request ortmann api # request ortmann api
# response = # response =
@ -61,10 +57,24 @@ def main():
# data='grant_type=client_credentials&scope=gschliefgraben') # data='grant_type=client_credentials&scope=gschliefgraben')
# print(response) # print(response)
sensor: str = "inclino1_14"
pg_query = pg_session.query(Dataset) \
.join(Procedure) \
.join(Phenomenon) \
.filter(Procedure.sta_identifier == sensor.lower())
slope_dataset: Dataset = pg_query.filter(
Phenomenon.sta_identifier == "Slope").first()
if not slope_dataset.is_published:
slope_dataset.is_published = 1
slope_dataset.is_hidden = 0
slope_dataset.dataset_type = "timeseries"
slope_dataset.observation_type = "simple"
slope_dataset.value_type = "quantity"
pg_session.commit()
# The size of each step in days # The size of each step in days
# consider the start date as 2021-february 1 st # consider the start date as 2021-february 1 st
start_date = date(2021, 2, 28) start_date = date(2022, 1, 1)
# consider the end date as 2021-march 1 st # consider the end date as 2021-march 1 st
end_date = date(2022, 3, 1) end_date = date(2022, 3, 1)
@ -77,13 +87,13 @@ def main():
while start_date <= end_date: while start_date <= end_date:
# print(start_date, end="\n") # print(start_date, end="\n")
query_date = start_date.strftime('%Y-%m-%d') query_date = start_date.strftime('%Y-%m-%d')
create_db_observations(query_date, test_api, pg_session) create_db_observations(query_date, test_api, pg_session, slope_dataset)
start_date += delta start_date += delta
pg_session.commit()
# for i in rrule(DAILY , dtstart=start_date,until=end_date): # for i in rrule(DAILY , dtstart=start_date,until=end_date):
# print(i.strftime('%Y%b%d'),sep='\n') # print(i.strftime('%Y%b%d'),sep='\n')
# query_date = "2022-02-28" # query_date = "2022-02-28"
# create_db_observations(query_date, test_api, pg_session) # create_db_observations(query_date, test_api, pg_session)
# query_date_obj = datetime.strptime(query_date, "%Y-%m-%d") # query_date_obj = datetime.strptime(query_date, "%Y-%m-%d")
@ -92,14 +102,12 @@ def main():
# ['Features'][0]['geometry']['properties'][0]) # ['Features'][0]['geometry']['properties'][0])
# print(observation_array) # print(observation_array)
# max_id = pg_session.query(func.max(Observation.id)).scalar() # max_id = pg_session.query(func.max(Observation.id)).scalar()
# if max_id is None: # if max_id is None:
# max_id = -1 # max_id = -1
# # pg_session.bulk_save_objects(observations) # # pg_session.bulk_save_objects(observations)
# for observation_json in observation_array: # for observation_json in observation_array:
# ob_date_time = observation_json.get('DateTime') # ob_date_time = observation_json.get('DateTime')
# datetime_obj = datetime.strptime(ob_date_time, "%Y-%m-%dT%H:%M:%S.%fZ") # datetime_obj = datetime.strptime(ob_date_time, "%Y-%m-%dT%H:%M:%S.%fZ")
# if datetime_obj.date() != query_date_obj.date(): # if datetime_obj.date() != query_date_obj.date():
# continue # continue
@ -108,7 +116,8 @@ def main():
# pg_session.commit() # pg_session.commit()
def create_db_observations(query_date, test_api, pg_session):
def create_db_observations(query_date, test_api, pg_session, dataset: Dataset):
''' to do ''' ''' to do '''
query_date_obj = datetime.strptime(query_date, "%Y-%m-%d") query_date_obj = datetime.strptime(query_date, "%Y-%m-%d")
data = test_api.getSensorData("inclino1_14", query_date) data = test_api.getSensorData("inclino1_14", query_date)
@ -121,20 +130,21 @@ def create_db_observations(query_date, test_api, pg_session):
max_id = -1 max_id = -1
# pg_session.bulk_save_objects(observations) # pg_session.bulk_save_objects(observations)
for observation_json in observation_array: for observation_json in observation_array:
ob_date_time = observation_json.get('DateTime') ob_date_time = observation_json.get('DateTime')
datetime_obj = datetime.strptime(ob_date_time, "%Y-%m-%dT%H:%M:%S.%fZ") datetime_obj = datetime.strptime(ob_date_time, "%Y-%m-%dT%H:%M:%S.%fZ")
if datetime_obj.date() != query_date_obj.date(): if datetime_obj.date() != query_date_obj.date():
continue continue
ob_value = observation_json.get('Value') ob_value = observation_json.get('Value')
if ob_value is None: if ob_value is None:
continue continue
max_id = max_id + 1 # max_id = max_id + 1
create_observation(observation_json, pg_session, max_id) max_id = create_observation(
pg_session.commit() observation_json, pg_session, max_id, dataset)
print("observations for date " +query_date+ "succesfully imported \n") # pg_session.commit()
print("observations for date " + query_date + "succesfully imported \n")
def create_observation(observation_json: ObservationSchema, db_session, max_id): def create_observation(observation_json: ObservationSchema, db_session, max_id, dataset: Dataset):
""" """
This function creates a new observation in the people structure This function creates a new observation in the people structure
based on the passed-in observation data based on the passed-in observation data
@ -142,34 +152,41 @@ def create_observation(observation_json: ObservationSchema, db_session, max_id):
:return: 201 on success, observation on person exists :return: 201 on success, observation on person exists
""" """
ob_id = observation_json.get('id') ob_id: str = str(observation_json.get('id'))
# db_session = create_pg_session() # db_session = create_pg_session()
existing_observation: bool = ( existing_observation: bool = (
db_session.query(Observation) db_session.query(Observation)
.filter(Observation.id == ob_id) .filter(Observation.value_identifier == ob_id)
.one_or_none() .one_or_none()
) )
# Can we insert this observation? # Can we insert this observation?
if existing_observation is None: if existing_observation is None:
max_id += 1
# Create a person instance using the schema and the passed in person # Create a person instance using the schema and the passed in person
schema = ObservationSchema() schema = ObservationSchema()
# deserialize to object # deserialize to python object
new_observation: Observation = schema.load(observation_json) new_observation: Observation = schema.load(observation_json)
new_observation.id = max_id + 1 new_observation.id = max_id
new_observation.sta_identifier = str(uuid.uuid4()) new_observation.sta_identifier = str(uuid.uuid4())
new_observation.sampling_time_start=new_observation.result_time
new_observation.sampling_time_end=new_observation.result_time
new_observation.fk_dataset_id = dataset.id
# Add the person to the database # Add the person to the database
db_session.add(new_observation) db_session.add(new_observation)
# dataset.observations.append(new_observation)
# db_session.commit() # db_session.commit()
# Serialize and return the newly created person in the response # Serialize and return the newly created person in the response
data = schema.dump(new_observation) # data = schema.dump(new_observation)
return data, 201 # return data, 201
return max_id
# Otherwise, nope, person exists already # Otherwise, nope, person exists already
else: else:
print(409, f'Observation {ob_id} exists already') print(409, f'Observation {ob_id} exists already')
return max_id
def create(person_json: PersonSchema): def create(person_json: PersonSchema):

View File

@ -38,6 +38,51 @@ def create_pg_session() -> sessionmaker:
# Base.metadata.create_all(engine) # Base.metadata.create_all(engine)
return _session return _session
class Platform(Base):
""" Platform class """
__tablename__ = 'platform'
__table_args__ = {"schema": "gba"}
id = Column('platform_id', Integer, primary_key=True)
identifier = Column('identifier', String)
sta_identifier = Column('sta_identifier', String)
name = Column('name', String)
# datasets = relationship('Dataset')
datasets = relationship('Dataset', back_populates="platform", lazy=True)
def __repr__(self):
return f'Platform {self.name}'
class Phenomenon(Base):
""" phenomenon class """
__tablename__ = 'phenomenon'
__table_args__ = {"schema": "gba"}
id = Column('phenomenon_id', Integer, primary_key=True)
name = Column('name', String)
sta_identifier = Column('sta_identifier', String)
# datasets = relationship('Dataset')
datasets = relationship('Dataset', back_populates="phenomenon", lazy=True)
def __repr__(self):
return f'Phenomenon {self.name}'
class Procedure(Base):
""" procedure class """
__tablename__ = 'procedure'
__table_args__ = {"schema": "gba"}
id = Column('procedure_id', Integer, primary_key=True)
name = Column('name', String)
sta_identifier = Column('sta_identifier', String)
# datasets = relationship('Dataset')
datasets = relationship('Dataset', back_populates="procedure", lazy=True)
def __repr__(self):
return f'Procedure {self.name}'
class Dataset(Base): class Dataset(Base):
""" dataset class """ """ dataset class """
@ -70,8 +115,25 @@ class Dataset(Base):
# first_observation = relationship("Observation", foreign_keys=[ # first_observation = relationship("Observation", foreign_keys=[
# fk_first_observation_id]) # fk_first_observation_id])
# observations = relationship( observations = relationship(
# 'Observation', back_populates='dataset', lazy=True) 'Observation', back_populates='dataset', lazy=True)
fk_phenomenon_id = Column(
'fk_phenomenon_id', Integer, ForeignKey('gba.phenomenon.phenomenon_id'), nullable=False)
# phenomenon = relationship("Phenomenon", lazy="joined", foreign_keys=[fk_phenomenon_id])
phenomenon = relationship(
"Phenomenon", back_populates="datasets", lazy="joined")
fk_platform_id = Column('fk_platform_id', Integer, ForeignKey(
'gba.platform.platform_id'), nullable=True)
platform = relationship(
"Platform", back_populates="datasets", lazy="joined")
fk_procedure_id = Column('fk_procedure_id', Integer, ForeignKey(
'gba.procedure.procedure_id'), nullable=False)
# procedure = relationship("Procedure", lazy="joined")
procedure = relationship(
"Procedure", back_populates="datasets", lazy="joined")
def new_id_factory(): def new_id_factory():
''' test ''' ''' test '''
@ -108,9 +170,9 @@ class Observation(Base):
value_identifier = Column('value_identifier', String) value_identifier = Column('value_identifier', String)
value_quantity = Column('value_quantity', Numeric(20, 10), nullable=False) value_quantity = Column('value_quantity', Numeric(20, 10), nullable=False)
# fk_dataset_id = Column(Integer, ForeignKey( fk_dataset_id = Column(Integer, ForeignKey(
# 'gba.dataset.dataset_id'), nullable=False) 'gba.dataset.dataset_id'), nullable=False)
# dataset = relationship("Dataset", back_populates="observations") dataset = relationship("Dataset", back_populates="observations")
class ObservationSchema(SQLAlchemySchema): class ObservationSchema(SQLAlchemySchema):
@ -175,7 +237,7 @@ def create_db():
# session_maker = sessionmaker(bind=engine) # session_maker = sessionmaker(bind=engine)
# session = session_maker() # session = session_maker()
Base.metadata.drop_all(bind=engine) Base.metadata.drop_all(bind=engine)
Base.metadata.create_all(engine) # Base.metadata.create_all(engine)
if __name__ == "__main__": if __name__ == "__main__":

View File

@ -1,4 +1,5 @@
pip freeze > requirements.txt
pip install -f ./requirements.txt
=========================================================================================== ===========================================================================================
python -m venv .venv python -m venv .venv
d:/Software/geomon/.venv/Scripts/python.exe -m pip install -U pylint d:/Software/geomon/.venv/Scripts/python.exe -m pip install -U pylint

BIN
requirements.txt Normal file

Binary file not shown.