- check if observation already exits in Db, then insert
This commit is contained in:
parent
095cfdfe24
commit
e2ceb107c9
|
@ -6,16 +6,19 @@ Python version: 3.7
|
|||
'''
|
||||
|
||||
import os
|
||||
from tokenize import String
|
||||
import uuid
|
||||
# import sys, inspect
|
||||
# currentdir = os.path.dirname(os.path.abspath(inspect.getfile(inspect.currentframe())))
|
||||
# parentdir = os.path.dirname(currentdir)
|
||||
# sys.path.insert(0, parentdir)
|
||||
# import requests
|
||||
from sqlalchemy.orm import session
|
||||
|
||||
from gschliefgraben_glasfaser.models import ObservationSchema, Person, PersonSchema, Observation
|
||||
from sqlalchemy import func
|
||||
from gschliefgraben_glasfaser.models import ObservationSchema, Person, PersonSchema, Observation, create_pg_session
|
||||
from gschliefgraben_glasfaser.my_api import MyApi
|
||||
from db.pg_models import create_pg_session
|
||||
from datetime import datetime, date, timedelta
|
||||
# from db.pg_models import create_pg_session
|
||||
#from models import Person, PersonSchema
|
||||
# response = requests.get('https://api.com/')
|
||||
# print(response) # shows the response's HTTP status code
|
||||
|
@ -47,8 +50,8 @@ def main():
|
|||
# print(load_data)
|
||||
|
||||
# request ortmann api
|
||||
# token = 'eyJraWQiOiJlakFmX1MwMTBMU3doS0Zod05wZDQtQkZPYTM4cDRYRE1zU1hFa0lrRlhFIiwiYWxnIjoiUlMyNTYifQ.eyJ2ZXIiOjEsImp0aSI6IkFULkZRUHNCOWh5Snd6eEM5d3ZWelRvaTNpZVlMWlJiT3U4YzFCbWJWRGM1SFkiLCJpc3MiOiJodHRwczovL2Rldi01MjUwMDA2Lm9rdGEuY29tL29hdXRoMi9kZWZhdWx0IiwiYXVkIjoiYXBpOi8vZGVmYXVsdCIsImlhdCI6MTY0NTc4Mjg0NSwiZXhwIjoxNjQ1Nzg2NDQ1LCJjaWQiOiIwb2EyOWhzdGZ3RnFya1BrUDVkNyIsInNjcCI6WyJnc2NobGllZmdyYWJlbiJdLCJzdWIiOiIwb2EyOWhzdGZ3RnFya1BrUDVkNyJ9.c-pTs-3VJMnFO2SOqxOvsABAloprUmOjk6SO9J71NrgLj7claKZOMLZxRyUeSBLWCJFFNI3A6xMd4twEexjJdUR8UEM4U50srxr2p_enaMm1_jZTSt_76u6H05kwV-A2AOQPkx-Fxxaj_PDjT7w43Zlg6SUEoT11uGKR6KtxVYbclGtWgOR7wvH4NZav-P_EDjHwHxbk2kQSf7tBU1JbWl74Xt58gzv1t8VNtLYLICabRsuTNQUNiO7Y1rtUEav4ugf7WZMIY1cP_4rCupZrAFbxrnyprAuXA2x01Z9hbFmiaK0QDlrwHcCHL_1fKvj9uIbO5JeI1x81X6g7eAxQdA'
|
||||
# response = requests.get('https://api.dgnss-sensors.com/gschliefgraben?sensors=("inclino1_14")',
|
||||
# response =
|
||||
# requests.get('https://api.dgnss-sensors.com/gschliefgraben?sensors=("inclino1_14")',
|
||||
# headers={
|
||||
# 'Authorization': 'Bearer' + token,
|
||||
# 'cache-control': 'no-cache',
|
||||
|
@ -57,16 +60,116 @@ def main():
|
|||
# },
|
||||
# data='grant_type=client_credentials&scope=gschliefgraben')
|
||||
# print(response)
|
||||
|
||||
# The size of each step in days
|
||||
|
||||
# consider the start date as 2021-february 1 st
|
||||
start_date = date(2021, 2, 28)
|
||||
# consider the end date as 2021-march 1 st
|
||||
end_date = date(2022, 3, 1)
|
||||
|
||||
# delta time
|
||||
delta = timedelta(days=1)
|
||||
token_api = os.environ.get("TOKEN_API")
|
||||
test_api = MyApi(token_api)
|
||||
data = test_api.getSensorData("inclino1_14")
|
||||
observation_array = (data['FeatureCollection']['Features'][0]['geometry']['properties'][0])
|
||||
print(observation_array)
|
||||
# create(dump_data)
|
||||
# # deserialize to db model
|
||||
observation_schema = ObservationSchema(many=True)
|
||||
observations: Observation = observation_schema.load(observation_array)
|
||||
print(observations)
|
||||
|
||||
# iterate over range of dates
|
||||
while start_date <= end_date:
|
||||
# print(start_date, end="\n")
|
||||
query_date = start_date.strftime('%Y-%m-%d')
|
||||
create_db_observations(query_date, test_api, pg_session)
|
||||
start_date += delta
|
||||
|
||||
# for i in rrule(DAILY , dtstart=start_date,until=end_date):
|
||||
# print(i.strftime('%Y%b%d'),sep='\n')
|
||||
|
||||
|
||||
# query_date = "2022-02-28"
|
||||
# create_db_observations(query_date, test_api, pg_session)
|
||||
# query_date_obj = datetime.strptime(query_date, "%Y-%m-%d")
|
||||
# data = test_api.getSensorData("inclino1_14", query_date)
|
||||
# observation_array = (data['FeatureCollection']
|
||||
# ['Features'][0]['geometry']['properties'][0])
|
||||
# print(observation_array)
|
||||
|
||||
|
||||
|
||||
# max_id = pg_session.query(func.max(Observation.id)).scalar()
|
||||
# if max_id is None:
|
||||
# max_id = -1
|
||||
# # pg_session.bulk_save_objects(observations)
|
||||
# for observation_json in observation_array:
|
||||
# ob_date_time = observation_json.get('DateTime')
|
||||
# datetime_obj = datetime.strptime(ob_date_time, "%Y-%m-%dT%H:%M:%S.%fZ")
|
||||
# if datetime_obj.date() != query_date_obj.date():
|
||||
# continue
|
||||
# max_id = max_id + 1
|
||||
# create_observation(observation_json, pg_session, max_id)
|
||||
|
||||
# pg_session.commit()
|
||||
|
||||
def create_db_observations(query_date, test_api, pg_session):
|
||||
''' to do '''
|
||||
query_date_obj = datetime.strptime(query_date, "%Y-%m-%d")
|
||||
data = test_api.getSensorData("inclino1_14", query_date)
|
||||
observation_array = (data['FeatureCollection']
|
||||
['Features'][0]['geometry']['properties'][0])
|
||||
# print(observation_array)
|
||||
|
||||
max_id = pg_session.query(func.max(Observation.id)).scalar()
|
||||
if max_id is None:
|
||||
max_id = -1
|
||||
# pg_session.bulk_save_objects(observations)
|
||||
for observation_json in observation_array:
|
||||
ob_date_time = observation_json.get('DateTime')
|
||||
datetime_obj = datetime.strptime(ob_date_time, "%Y-%m-%dT%H:%M:%S.%fZ")
|
||||
if datetime_obj.date() != query_date_obj.date():
|
||||
continue
|
||||
ob_value = observation_json.get('Value')
|
||||
if ob_value is None:
|
||||
continue
|
||||
max_id = max_id + 1
|
||||
create_observation(observation_json, pg_session, max_id)
|
||||
pg_session.commit()
|
||||
print("observations for date " +query_date+ "succesfully imported \n")
|
||||
|
||||
|
||||
def create_observation(observation_json: ObservationSchema, db_session, max_id):
|
||||
"""
|
||||
This function creates a new observation in the people structure
|
||||
based on the passed-in observation data
|
||||
:param observation: person to create in people structure
|
||||
:return: 201 on success, observation on person exists
|
||||
"""
|
||||
|
||||
ob_id = observation_json.get('id')
|
||||
# db_session = create_pg_session()
|
||||
|
||||
existing_observation: bool = (
|
||||
db_session.query(Observation)
|
||||
.filter(Observation.id == ob_id)
|
||||
.one_or_none()
|
||||
)
|
||||
|
||||
# Can we insert this observation?
|
||||
if existing_observation is None:
|
||||
# Create a person instance using the schema and the passed in person
|
||||
schema = ObservationSchema()
|
||||
# deserialize to object
|
||||
new_observation: Observation = schema.load(observation_json)
|
||||
new_observation.id = max_id + 1
|
||||
new_observation.sta_identifier = str(uuid.uuid4())
|
||||
|
||||
# Add the person to the database
|
||||
db_session.add(new_observation)
|
||||
# db_session.commit()
|
||||
|
||||
# Serialize and return the newly created person in the response
|
||||
data = schema.dump(new_observation)
|
||||
return data, 201
|
||||
# Otherwise, nope, person exists already
|
||||
else:
|
||||
print(409, f'Observation {ob_id} exists already')
|
||||
|
||||
|
||||
def create(person_json: PersonSchema):
|
||||
|
|
|
@ -10,25 +10,19 @@ from datetime import datetime
|
|||
|
||||
import os
|
||||
from sqlalchemy import (Column, Integer,
|
||||
String, DateTime, ForeignKey, Numeric, SmallInteger, create_engine)
|
||||
String, DateTime, ForeignKey, Numeric, SmallInteger, create_engine, func)
|
||||
from sqlalchemy.ext.declarative import declarative_base
|
||||
from sqlalchemy.orm import session, relationship
|
||||
from sqlalchemy.orm import session, relationship, sessionmaker
|
||||
#from marshmallow import Schema
|
||||
from marshmallow_sqlalchemy import SQLAlchemySchema, SQLAlchemyAutoSchema
|
||||
from marshmallow import fields
|
||||
# from db.pg_models import create_pg_session
|
||||
from sqlalchemy import func, desc, asc
|
||||
import sqlalchemy.orm.session
|
||||
from sqlalchemy.orm import sessionmaker, relationship
|
||||
# import sqlalchemy.orm.session
|
||||
|
||||
Base = declarative_base()
|
||||
def new_id_factory():
|
||||
pg_session = create_pg_session()
|
||||
_MYTABLE_ID_ = pg_session.query(func.max(Observation.id)).scalar()
|
||||
_MYTABLE_ID_ += 1
|
||||
return _MYTABLE_ID_
|
||||
|
||||
def create_pg_session() -> sqlalchemy.orm.sessionmaker:
|
||||
|
||||
def create_pg_session() -> sessionmaker:
|
||||
""" create postgres db session """
|
||||
dbschema = ''
|
||||
db_user = os.environ.get("POSTGIS_DBUSER")
|
||||
|
@ -39,10 +33,11 @@ def create_pg_session() -> sqlalchemy.orm.sessionmaker:
|
|||
connect_args={'options': '-csearch_path={}'.format(dbschema)},
|
||||
isolation_level="READ UNCOMMITTED")
|
||||
session_maker = sessionmaker(bind=engine)
|
||||
session = session_maker()
|
||||
_session = session_maker()
|
||||
|
||||
# Base.metadata.create_all(engine)
|
||||
return session
|
||||
return _session
|
||||
|
||||
|
||||
class Dataset(Base):
|
||||
""" dataset class """
|
||||
|
@ -75,29 +70,47 @@ class Dataset(Base):
|
|||
# first_observation = relationship("Observation", foreign_keys=[
|
||||
# fk_first_observation_id])
|
||||
|
||||
observations = relationship(
|
||||
'Observation', back_populates='dataset', lazy=True)
|
||||
|
||||
# observations = relationship(
|
||||
# 'Observation', back_populates='dataset', lazy=True)
|
||||
|
||||
def new_id_factory():
|
||||
''' test '''
|
||||
dbschema = ''
|
||||
db_user = os.environ.get("POSTGIS_DBUSER")
|
||||
db_password = os.environ.get("POSTGIS_DBPASSWORD")
|
||||
db_url = os.environ.get("POSTGIS_DBURL")
|
||||
engine = create_engine(
|
||||
"postgresql+psycopg2://" + db_user + ":" + db_password + "@" + db_url,
|
||||
connect_args={'options': '-csearch_path={}'.format(dbschema)},
|
||||
isolation_level="READ UNCOMMITTED")
|
||||
result = engine.execute('SELECT MAX(observation_id) FROM gba.observation')
|
||||
mytable_max_id = result.first().max
|
||||
if mytable_max_id is None:
|
||||
mytable_max_id = 0
|
||||
mytable_max_id += 1
|
||||
return mytable_max_id
|
||||
|
||||
|
||||
class Observation(Base):
|
||||
""" observation class """
|
||||
__tablename__ = 'observation'
|
||||
__table_args__ = {"schema": "gba"}
|
||||
|
||||
id = Column('observation_id', Integer,
|
||||
primary_key=True)
|
||||
id = Column('observation_id', Integer, primary_key=True)
|
||||
name = Column('name', String)
|
||||
value_type = Column('value_type', String)
|
||||
value_type = Column('value_type', String, default="quantity")
|
||||
# pitch = Column('PITCH', String)
|
||||
# roll = Column('ROLL', String)
|
||||
sampling_time_start = Column('sampling_time_start', DateTime)
|
||||
sampling_time_end = Column('sampling_time_end', DateTime)
|
||||
result_time = Column('result_time', DateTime)
|
||||
sta_identifier = Column('sta_identifier', String)
|
||||
value_identifier = Column('value_identifier', String)
|
||||
value_quantity = Column('value_quantity', Numeric(20, 10), nullable=False)
|
||||
|
||||
fk_dataset_id = Column(Integer, ForeignKey(
|
||||
'gba.dataset.dataset_id'), nullable=False)
|
||||
dataset = relationship("Dataset", back_populates="observations")
|
||||
# fk_dataset_id = Column(Integer, ForeignKey(
|
||||
# 'gba.dataset.dataset_id'), nullable=False)
|
||||
# dataset = relationship("Dataset", back_populates="observations")
|
||||
|
||||
|
||||
class ObservationSchema(SQLAlchemySchema):
|
||||
|
@ -106,7 +119,7 @@ class ObservationSchema(SQLAlchemySchema):
|
|||
# value_quantity = fields.Integer(attribute='Value')
|
||||
# id = fields.Integer(attribute='id')
|
||||
Value = fields.Integer(attribute='value_quantity')
|
||||
id = fields.Integer(attribute='sta_identifier')
|
||||
id = fields.Integer(attribute='value_identifier')
|
||||
# sta_identifier= fields.String(default=uuid.uuid4()),
|
||||
|
||||
class Meta:
|
||||
|
@ -134,7 +147,7 @@ class Person(Base):
|
|||
self.login, self.lname)
|
||||
|
||||
|
||||
class PersonSchema(SQLAlchemyAutoSchema):
|
||||
class PersonSchema(SQLAlchemyAutoSchema):
|
||||
""" Platform class """
|
||||
class Meta:
|
||||
""" Platform class """
|
||||
|
@ -144,12 +157,12 @@ class PersonSchema(SQLAlchemyAutoSchema):
|
|||
#pg_session: session = create_pg_session()
|
||||
sqla_session: session = create_pg_session()
|
||||
|
||||
|
||||
def create_db():
|
||||
# db_url = 'sqlite:///db.sqlite'
|
||||
# engine = create_engine(db_url, echo = True )
|
||||
# db_url = 'sqlite:///db.sqlite'
|
||||
# engine = create_engine(db_url, echo = True )
|
||||
# Base.metadata.drop_all(bind=engine)
|
||||
# Base.metadata.create_all(engine)
|
||||
|
||||
""" create postgres db session """
|
||||
dbschema = ''
|
||||
db_user = os.environ.get("POSTGIS_DBUSER")
|
||||
|
@ -158,11 +171,12 @@ def create_db():
|
|||
engine = create_engine(
|
||||
"postgresql+psycopg2://" + db_user + ":" + db_password + "@" + db_url,
|
||||
connect_args={'options': '-csearch_path={}'.format(dbschema)},
|
||||
isolation_level="READ UNCOMMITTED", echo = True)
|
||||
isolation_level="READ UNCOMMITTED", echo=True)
|
||||
# session_maker = sessionmaker(bind=engine)
|
||||
# session = session_maker()
|
||||
Base.metadata.drop_all(bind=engine)
|
||||
Base.metadata.create_all(engine)
|
||||
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
create_db()
|
||||
create_db()
|
||||
|
|
|
@ -65,10 +65,10 @@ class MyApi():
|
|||
# self.access_token = res.json()['access_token']
|
||||
# else:
|
||||
# # Token expired -> re-authenticate
|
||||
def getSensorData(self, sensor: string):
|
||||
def getSensorData(self, sensor: string, date):
|
||||
''' request observations'''
|
||||
try:
|
||||
request = self.session.get('https://api.dgnss-sensors.com/gschliefgraben?sensors=(\''+sensor+ '\')&start=2022-02-28&end=2022-02-28',
|
||||
request = self.session.get('https://api.dgnss-sensors.com/gschliefgraben?sensors=(\''+sensor+ '\')&start='+date+'&end='+date,
|
||||
headers={
|
||||
'cache-control': 'no-cache',
|
||||
'Content-Type': 'application/x-www-form-urlencoded',
|
||||
|
|
|
@ -38,14 +38,25 @@ def main():
|
|||
sos_url = 'https://geomon.geologie.ac.at/52n-sos-webapp/service'
|
||||
|
||||
######################## Sibratsgfall
|
||||
# offering = Offering(
|
||||
# "https://geomon.geologie.ac.at/52n-sos-webapp/api/offerings/",
|
||||
# "sibratsgfall_3",
|
||||
# "Inklinometer 3, Sibratsgfaell Sensor"
|
||||
# )
|
||||
# procedure = Procedure( "sibratsgfall_3","sibratsgfall-3")
|
||||
# foi = FoI("degree", "m", (47.4279288, 10.0360888, 0.0),
|
||||
# "sibratsgfall", "Sibratsgfall Beobachtung der Bodenbewegungen Test")
|
||||
|
||||
####################### Gschliefgraben Glasfaser
|
||||
offering = Offering(
|
||||
"https://geomon.geologie.ac.at/52n-sos-webapp/api/offerings/",
|
||||
"sibratsgfall_3",
|
||||
"Inklinometer 3, Sibratsgfaell Sensor"
|
||||
"inclino1_14",
|
||||
"Inklinometer inclino1_14, Gschliefgraben Glasfaser"
|
||||
)
|
||||
procedure = Procedure( "sibratsgfall_3","sibratsgfall-3")
|
||||
foi = FoI("degree", "m", (47.4279288, 10.0360888, 0.0),
|
||||
"sibratsgfall", "Sibratsgfall Beobachtung der Bodenbewegungen Test")
|
||||
procedure = Procedure( "inclino1_14","inclino1_14")
|
||||
|
||||
foi = FoI("degree", "m", (47.910849, 13.774966, 0.0),
|
||||
"FBGuard23", "Glasfaser Untersuchungen am Gschliefgraben (Gmunden)")
|
||||
|
||||
######################## Gschliefgraben
|
||||
# offering = Offering(
|
||||
|
@ -77,6 +88,13 @@ def main():
|
|||
request = requests.post(sos_url, headers=headers, json=post_data)
|
||||
print(request.text)
|
||||
|
||||
# {
|
||||
# "request" : "InsertSensor",
|
||||
# "version" : "2.0.0",
|
||||
# "service" : "SOS",
|
||||
# "assignedProcedure" : "inclino1_14",
|
||||
# "assignedOffering" : "inclino1_14"
|
||||
# }
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
|
Loading…
Reference in New Issue
Block a user