- add Piezometer data via json (Gschliegraben)
This commit is contained in:
parent
941d2006a1
commit
095cfdfe24
3
.gitignore
vendored
3
.gitignore
vendored
|
@ -98,4 +98,5 @@ ENV/
|
||||||
/site
|
/site
|
||||||
|
|
||||||
# mypy
|
# mypy
|
||||||
.mypy_cache/
|
.mypy_cache/
|
||||||
|
/db.sqlite
|
|
@ -153,7 +153,7 @@ class Observation(Base):
|
||||||
|
|
||||||
|
|
||||||
def create_pg_session() -> sqlalchemy.orm.sessionmaker:
|
def create_pg_session() -> sqlalchemy.orm.sessionmaker:
|
||||||
"""Return the sum of x and y."""
|
""" create postgres db session """
|
||||||
dbschema = ''
|
dbschema = ''
|
||||||
db_user = os.environ.get("POSTGIS_DBUSER")
|
db_user = os.environ.get("POSTGIS_DBUSER")
|
||||||
db_password = os.environ.get("POSTGIS_DBPASSWORD")
|
db_password = os.environ.get("POSTGIS_DBPASSWORD")
|
||||||
|
@ -165,5 +165,5 @@ def create_pg_session() -> sqlalchemy.orm.sessionmaker:
|
||||||
session_maker = sessionmaker(bind=engine)
|
session_maker = sessionmaker(bind=engine)
|
||||||
session = session_maker()
|
session = session_maker()
|
||||||
|
|
||||||
Base.metadata.create_all(engine)
|
# Base.metadata.create_all(engine)
|
||||||
return session
|
return session
|
||||||
|
|
26
gschliefgraben_glasfaser/create_db.py
Normal file
26
gschliefgraben_glasfaser/create_db.py
Normal file
|
@ -0,0 +1,26 @@
|
||||||
|
from sqlalchemy import create_engine
|
||||||
|
from sqlalchemy import MetaData
|
||||||
|
from sqlalchemy import Table
|
||||||
|
from sqlalchemy import Column
|
||||||
|
from sqlalchemy import Integer, String
|
||||||
|
|
||||||
|
db_url = 'sqlite:///db.sqlite'
|
||||||
|
engine = create_engine(db_url )
|
||||||
|
|
||||||
|
# Create a metadata instance
|
||||||
|
metadata = MetaData(engine)
|
||||||
|
# Declare a table
|
||||||
|
table = Table('Example',metadata,
|
||||||
|
Column('id',Integer, primary_key=True),
|
||||||
|
Column('name',String))
|
||||||
|
# Create all tables
|
||||||
|
students = Table(
|
||||||
|
'students', metadata,
|
||||||
|
Column('id', Integer, primary_key = True),
|
||||||
|
Column('name', String),
|
||||||
|
Column('lastname', String),
|
||||||
|
)
|
||||||
|
metadata.create_all(engine)
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -42,7 +42,7 @@ def main():
|
||||||
dump_data = observation_schema.dump(observation)
|
dump_data = observation_schema.dump(observation)
|
||||||
print(dump_data)
|
print(dump_data)
|
||||||
|
|
||||||
# # deserialize
|
# # deserialize to db model
|
||||||
# load_data: Person = person_schema.load(dump_data)
|
# load_data: Person = person_schema.load(dump_data)
|
||||||
# print(load_data)
|
# print(load_data)
|
||||||
|
|
||||||
|
@ -60,9 +60,13 @@ def main():
|
||||||
token_api = os.environ.get("TOKEN_API")
|
token_api = os.environ.get("TOKEN_API")
|
||||||
test_api = MyApi(token_api)
|
test_api = MyApi(token_api)
|
||||||
data = test_api.getSensorData("inclino1_14")
|
data = test_api.getSensorData("inclino1_14")
|
||||||
print(data)
|
observation_array = (data['FeatureCollection']['Features'][0]['geometry']['properties'][0])
|
||||||
|
print(observation_array)
|
||||||
# create(dump_data)
|
# create(dump_data)
|
||||||
|
# # deserialize to db model
|
||||||
|
observation_schema = ObservationSchema(many=True)
|
||||||
|
observations: Observation = observation_schema.load(observation_array)
|
||||||
|
print(observations)
|
||||||
|
|
||||||
|
|
||||||
def create(person_json: PersonSchema):
|
def create(person_json: PersonSchema):
|
||||||
|
|
|
@ -8,25 +8,83 @@ Python version: 3.10
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
# from config import db, ma
|
# from config import db, ma
|
||||||
|
|
||||||
# import os
|
import os
|
||||||
from sqlalchemy import (Column, Integer,
|
from sqlalchemy import (Column, Integer,
|
||||||
String, DateTime, ForeignKey, Numeric)
|
String, DateTime, ForeignKey, Numeric, SmallInteger, create_engine)
|
||||||
from sqlalchemy.ext.declarative import declarative_base
|
from sqlalchemy.ext.declarative import declarative_base
|
||||||
from sqlalchemy.orm import session, relationship
|
from sqlalchemy.orm import session, relationship
|
||||||
#from marshmallow import Schema
|
#from marshmallow import Schema
|
||||||
from marshmallow_sqlalchemy import SQLAlchemyAutoSchema
|
from marshmallow_sqlalchemy import SQLAlchemySchema, SQLAlchemyAutoSchema
|
||||||
from db.pg_models import create_pg_session
|
from marshmallow import fields
|
||||||
|
# from db.pg_models import create_pg_session
|
||||||
|
from sqlalchemy import func, desc, asc
|
||||||
|
import sqlalchemy.orm.session
|
||||||
|
from sqlalchemy.orm import sessionmaker, relationship
|
||||||
|
|
||||||
Base = declarative_base()
|
Base = declarative_base()
|
||||||
|
def new_id_factory():
|
||||||
|
pg_session = create_pg_session()
|
||||||
|
_MYTABLE_ID_ = pg_session.query(func.max(Observation.id)).scalar()
|
||||||
|
_MYTABLE_ID_ += 1
|
||||||
|
return _MYTABLE_ID_
|
||||||
|
|
||||||
|
def create_pg_session() -> sqlalchemy.orm.sessionmaker:
|
||||||
|
""" create postgres db session """
|
||||||
|
dbschema = ''
|
||||||
|
db_user = os.environ.get("POSTGIS_DBUSER")
|
||||||
|
db_password = os.environ.get("POSTGIS_DBPASSWORD")
|
||||||
|
db_url = os.environ.get("POSTGIS_DBURL")
|
||||||
|
engine = create_engine(
|
||||||
|
"postgresql+psycopg2://" + db_user + ":" + db_password + "@" + db_url,
|
||||||
|
connect_args={'options': '-csearch_path={}'.format(dbschema)},
|
||||||
|
isolation_level="READ UNCOMMITTED")
|
||||||
|
session_maker = sessionmaker(bind=engine)
|
||||||
|
session = session_maker()
|
||||||
|
|
||||||
|
# Base.metadata.create_all(engine)
|
||||||
|
return session
|
||||||
|
|
||||||
|
class Dataset(Base):
|
||||||
|
""" dataset class """
|
||||||
|
__tablename__ = 'dataset'
|
||||||
|
__table_args__ = {"schema": "gba"}
|
||||||
|
|
||||||
|
id = Column('dataset_id', Integer, primary_key=True)
|
||||||
|
name = Column('name', String)
|
||||||
|
is_published = Column('is_published', SmallInteger)
|
||||||
|
is_hidden = Column('is_hidden', SmallInteger)
|
||||||
|
dataset_type = Column('dataset_type', String)
|
||||||
|
observation_type = Column('observation_type', String)
|
||||||
|
value_type = Column('value_type', String)
|
||||||
|
|
||||||
|
last_time = Column('last_time', DateTime)
|
||||||
|
last_value = Column('last_value', Numeric(20, 10))
|
||||||
|
fk_last_observation_id = Column(
|
||||||
|
'fk_last_observation_id',
|
||||||
|
Integer
|
||||||
|
)
|
||||||
|
# last_observation = relationship(
|
||||||
|
# "Observation", foreign_keys=[fk_last_observation_id])
|
||||||
|
|
||||||
|
first_time = Column('first_time', DateTime)
|
||||||
|
first_value = Column('first_value', Numeric(20, 10))
|
||||||
|
fk_first_observation_id = Column(
|
||||||
|
'fk_first_observation_id',
|
||||||
|
Integer
|
||||||
|
)
|
||||||
|
# first_observation = relationship("Observation", foreign_keys=[
|
||||||
|
# fk_first_observation_id])
|
||||||
|
|
||||||
|
observations = relationship(
|
||||||
|
'Observation', back_populates='dataset', lazy=True)
|
||||||
|
|
||||||
class Observation(Base):
|
class Observation(Base):
|
||||||
""" observation class """
|
""" observation class """
|
||||||
__tablename__ = 'observation'
|
__tablename__ = 'observation'
|
||||||
__table_args__ = {"schema": "gba"}
|
__table_args__ = {"schema": "gba"}
|
||||||
|
|
||||||
id = Column('observation_id', Integer, primary_key=True)
|
id = Column('observation_id', Integer,
|
||||||
|
primary_key=True)
|
||||||
name = Column('name', String)
|
name = Column('name', String)
|
||||||
value_type = Column('value_type', String)
|
value_type = Column('value_type', String)
|
||||||
# pitch = Column('PITCH', String)
|
# pitch = Column('PITCH', String)
|
||||||
|
@ -37,16 +95,20 @@ class Observation(Base):
|
||||||
sta_identifier = Column('sta_identifier', String)
|
sta_identifier = Column('sta_identifier', String)
|
||||||
value_quantity = Column('value_quantity', Numeric(20, 10), nullable=False)
|
value_quantity = Column('value_quantity', Numeric(20, 10), nullable=False)
|
||||||
|
|
||||||
# fk_dataset_id = Column('fk_dataset_id', Integer,
|
|
||||||
# ForeignKey('gba.dataset.dataset_id'))
|
|
||||||
# dataset = relationship("Dataset", lazy="joined",
|
|
||||||
# foreign_keys=[fk_dataset_id])
|
|
||||||
fk_dataset_id = Column(Integer, ForeignKey(
|
fk_dataset_id = Column(Integer, ForeignKey(
|
||||||
'gba.dataset.dataset_id'), nullable=False)
|
'gba.dataset.dataset_id'), nullable=False)
|
||||||
# dataset = relationship("Dataset", back_populates="observations")
|
dataset = relationship("Dataset", back_populates="observations")
|
||||||
|
|
||||||
class ObservationSchema(SQLAlchemyAutoSchema):
|
|
||||||
|
class ObservationSchema(SQLAlchemySchema):
|
||||||
""" Platform class """
|
""" Platform class """
|
||||||
|
DateTime = fields.DateTime(attribute='result_time') # Or vice-versa
|
||||||
|
# value_quantity = fields.Integer(attribute='Value')
|
||||||
|
# id = fields.Integer(attribute='id')
|
||||||
|
Value = fields.Integer(attribute='value_quantity')
|
||||||
|
id = fields.Integer(attribute='sta_identifier')
|
||||||
|
# sta_identifier= fields.String(default=uuid.uuid4()),
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
""" Platform class """
|
""" Platform class """
|
||||||
model = Observation
|
model = Observation
|
||||||
|
@ -72,7 +134,7 @@ class Person(Base):
|
||||||
self.login, self.lname)
|
self.login, self.lname)
|
||||||
|
|
||||||
|
|
||||||
class PersonSchema(SQLAlchemyAutoSchema):
|
class PersonSchema(SQLAlchemyAutoSchema):
|
||||||
""" Platform class """
|
""" Platform class """
|
||||||
class Meta:
|
class Meta:
|
||||||
""" Platform class """
|
""" Platform class """
|
||||||
|
@ -81,3 +143,26 @@ class PersonSchema(SQLAlchemyAutoSchema):
|
||||||
load_instance = True
|
load_instance = True
|
||||||
#pg_session: session = create_pg_session()
|
#pg_session: session = create_pg_session()
|
||||||
sqla_session: session = create_pg_session()
|
sqla_session: session = create_pg_session()
|
||||||
|
|
||||||
|
def create_db():
|
||||||
|
# db_url = 'sqlite:///db.sqlite'
|
||||||
|
# engine = create_engine(db_url, echo = True )
|
||||||
|
# Base.metadata.drop_all(bind=engine)
|
||||||
|
# Base.metadata.create_all(engine)
|
||||||
|
|
||||||
|
""" create postgres db session """
|
||||||
|
dbschema = ''
|
||||||
|
db_user = os.environ.get("POSTGIS_DBUSER")
|
||||||
|
db_password = os.environ.get("POSTGIS_DBPASSWORD")
|
||||||
|
db_url = os.environ.get("POSTGIS_DBURL")
|
||||||
|
engine = create_engine(
|
||||||
|
"postgresql+psycopg2://" + db_user + ":" + db_password + "@" + db_url,
|
||||||
|
connect_args={'options': '-csearch_path={}'.format(dbschema)},
|
||||||
|
isolation_level="READ UNCOMMITTED", echo = True)
|
||||||
|
# session_maker = sessionmaker(bind=engine)
|
||||||
|
# session = session_maker()
|
||||||
|
Base.metadata.drop_all(bind=engine)
|
||||||
|
Base.metadata.create_all(engine)
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
create_db()
|
|
@ -68,13 +68,13 @@ class MyApi():
|
||||||
def getSensorData(self, sensor: string):
|
def getSensorData(self, sensor: string):
|
||||||
''' request observations'''
|
''' request observations'''
|
||||||
try:
|
try:
|
||||||
request = self.session.get('https://api.dgnss-sensors.com/gschliefgraben',
|
request = self.session.get('https://api.dgnss-sensors.com/gschliefgraben?sensors=(\''+sensor+ '\')&start=2022-02-28&end=2022-02-28',
|
||||||
headers={
|
headers={
|
||||||
'cache-control': 'no-cache',
|
'cache-control': 'no-cache',
|
||||||
'Content-Type': 'application/x-www-form-urlencoded',
|
'Content-Type': 'application/x-www-form-urlencoded',
|
||||||
'accept': 'application/json'
|
'accept': 'application/json'
|
||||||
},
|
},
|
||||||
data="grant_type=client_credentials&scope=gschliefgraben&sensors=('\"inclino1_14'\")"
|
data="grant_type=client_credentials&scope=gschliefgraben"
|
||||||
)
|
)
|
||||||
# optional: raise exception for status code
|
# optional: raise exception for status code
|
||||||
request.raise_for_status()
|
request.raise_for_status()
|
||||||
|
|
|
@ -33,4 +33,11 @@ python -m pip install sqlalchemy-firebird
|
||||||
python -m pip uninstall psycopg2
|
python -m pip uninstall psycopg2
|
||||||
|
|
||||||
Marshmallow provides functionality to serialize and deserialize Python objects as they flow out of and into our JSON-based REST API. Marshmallow converts Python class instances to objects that can be converted to JSON.
|
Marshmallow provides functionality to serialize and deserialize Python objects as they flow out of and into our JSON-based REST API. Marshmallow converts Python class instances to objects that can be converted to JSON.
|
||||||
python -m pip install marshmallow-sqlalchemy marshmallow
|
python -m pip install marshmallow-sqlalchemy marshmallow
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
https://medium.com/dataexplorations/sqlalchemy-orm-a-more-pythonic-way-of-interacting-with-your-database-935b57fd2d4d
|
||||||
|
https://stackoverflow.com/questions/51737548/how-to-set-primary-key-auto-increment-in-sqlalchemy-orm
|
||||||
|
|
||||||
|
|
||||||
|
|
Loading…
Reference in New Issue
Block a user