2022-03-11 15:10:52 +00:00
|
|
|
'''
|
|
|
|
Tutorial link: https://docs.sqlalchemy.org/en/latest/orm/tutorial.html
|
|
|
|
Sqlalchemy version: 1.4.31
|
|
|
|
Python version: 3.10
|
|
|
|
'''
|
|
|
|
#!/usr/bin/python# -*- coding: utf-8 -*-
|
|
|
|
|
|
|
|
from datetime import datetime
|
|
|
|
# from config import db, ma
|
|
|
|
|
|
|
|
import os
|
|
|
|
from sqlalchemy import (Column, Integer, Sequence,
|
|
|
|
String, DateTime, ForeignKey, Numeric, SmallInteger, create_engine)
|
|
|
|
from sqlalchemy.ext.declarative import declarative_base
|
|
|
|
from sqlalchemy.orm import session, relationship, sessionmaker
|
2022-03-25 15:37:57 +00:00
|
|
|
# from geoalchemy2 import Geometry
|
|
|
|
from geoalchemy2.types import Geometry
|
2022-03-11 15:10:52 +00:00
|
|
|
#from marshmallow import Schema
|
|
|
|
from marshmallow_sqlalchemy import SQLAlchemySchema, SQLAlchemyAutoSchema
|
|
|
|
from marshmallow import fields
|
|
|
|
from dotenv import load_dotenv, find_dotenv
|
|
|
|
# from db.pg_models import create_pg_session
|
|
|
|
# import sqlalchemy.orm.session
|
|
|
|
|
|
|
|
Base = declarative_base()
|
|
|
|
|
|
|
|
|
|
|
|
def create_pg_session() -> sessionmaker:
|
|
|
|
""" create postgres db session """
|
|
|
|
load_dotenv(find_dotenv())
|
2022-03-25 15:37:57 +00:00
|
|
|
dbschema = 'gba'
|
2022-03-11 15:10:52 +00:00
|
|
|
db_user = os.environ.get("POSTGIS_DBUSER")
|
|
|
|
db_password = os.environ.get("POSTGIS_DBPASSWORD")
|
|
|
|
db_url = os.environ.get("POSTGIS_DBURL")
|
|
|
|
engine = create_engine(
|
|
|
|
"postgresql+psycopg2://" + db_user + ":" + db_password + "@" + db_url,
|
2022-04-07 09:24:43 +00:00
|
|
|
# connect_args={'options': f'-csearch_path={dbschema}'},
|
2022-03-11 15:10:52 +00:00
|
|
|
isolation_level="READ UNCOMMITTED")
|
|
|
|
session_maker = sessionmaker(bind=engine)
|
|
|
|
_session = session_maker()
|
|
|
|
|
|
|
|
# Base.metadata.create_all(engine)
|
|
|
|
return _session
|
|
|
|
|
2022-03-16 15:37:32 +00:00
|
|
|
platform_seq = Sequence('platform_seq', schema="gba") # define sequence explicitly
|
2022-03-11 15:10:52 +00:00
|
|
|
class Platform(Base):
|
|
|
|
""" Platform class """
|
|
|
|
__tablename__ = 'platform'
|
|
|
|
__table_args__ = {"schema": "gba"}
|
|
|
|
|
2022-03-16 15:37:32 +00:00
|
|
|
# id = Column('platform_id', Integer, primary_key=True)
|
|
|
|
id = Column('platform_id',
|
|
|
|
Integer,
|
|
|
|
platform_seq,
|
|
|
|
primary_key=True,
|
|
|
|
server_default=platform_seq.next_value())
|
2022-03-11 15:10:52 +00:00
|
|
|
identifier = Column('identifier', String)
|
|
|
|
sta_identifier = Column('sta_identifier', String)
|
|
|
|
name = Column('name', String)
|
|
|
|
# datasets = relationship('Dataset')
|
|
|
|
datasets = relationship('Dataset', back_populates="platform", lazy=True)
|
|
|
|
|
|
|
|
def __repr__(self):
|
|
|
|
return f'Platform {self.name}'
|
|
|
|
|
|
|
|
|
|
|
|
class Phenomenon(Base):
|
|
|
|
""" phenomenon class """
|
|
|
|
__tablename__ = 'phenomenon'
|
|
|
|
__table_args__ = {"schema": "gba"}
|
|
|
|
|
|
|
|
id = Column('phenomenon_id', Integer, primary_key=True)
|
|
|
|
name = Column('name', String)
|
|
|
|
sta_identifier = Column('sta_identifier', String)
|
|
|
|
# datasets = relationship('Dataset')
|
|
|
|
datasets = relationship('Dataset', back_populates="phenomenon", lazy=True)
|
|
|
|
|
|
|
|
def __repr__(self):
|
|
|
|
return f'Phenomenon {self.name}'
|
|
|
|
|
|
|
|
|
|
|
|
class Procedure(Base):
|
|
|
|
""" procedure class """
|
|
|
|
__tablename__ = 'procedure'
|
|
|
|
__table_args__ = {"schema": "gba"}
|
|
|
|
|
|
|
|
id = Column('procedure_id', Integer, primary_key=True)
|
|
|
|
name = Column('name', String)
|
|
|
|
sta_identifier = Column('sta_identifier', String)
|
|
|
|
# datasets = relationship('Dataset')
|
|
|
|
datasets = relationship('Dataset', back_populates="procedure", lazy=True)
|
|
|
|
|
|
|
|
def __repr__(self):
|
|
|
|
return f'Procedure {self.name}'
|
|
|
|
|
|
|
|
|
|
|
|
class Dataset(Base):
|
|
|
|
""" dataset class """
|
|
|
|
__tablename__ = 'dataset'
|
|
|
|
__table_args__ = {"schema": "gba"}
|
|
|
|
|
|
|
|
id = Column('dataset_id', Integer, primary_key=True)
|
|
|
|
name = Column('name', String)
|
|
|
|
is_published = Column('is_published', SmallInteger)
|
|
|
|
is_hidden = Column('is_hidden', SmallInteger)
|
|
|
|
dataset_type = Column('dataset_type', String)
|
|
|
|
observation_type = Column('observation_type', String)
|
|
|
|
value_type = Column('value_type', String)
|
|
|
|
|
|
|
|
last_time = Column('last_time', DateTime)
|
|
|
|
last_value = Column('last_value', Numeric(20, 10))
|
|
|
|
fk_last_observation_id = Column(
|
|
|
|
'fk_last_observation_id',
|
|
|
|
Integer
|
|
|
|
)
|
|
|
|
# last_observation = relationship(
|
|
|
|
# "Observation", foreign_keys=[fk_last_observation_id])
|
|
|
|
|
|
|
|
first_time = Column('first_time', DateTime)
|
|
|
|
first_value = Column('first_value', Numeric(20, 10))
|
|
|
|
fk_first_observation_id = Column(
|
|
|
|
'fk_first_observation_id',
|
|
|
|
Integer
|
|
|
|
)
|
|
|
|
# first_observation = relationship("Observation", foreign_keys=[
|
|
|
|
# fk_first_observation_id])
|
|
|
|
|
|
|
|
observations = relationship(
|
|
|
|
'Observation', back_populates='dataset', lazy=True)
|
|
|
|
|
|
|
|
fk_phenomenon_id = Column(
|
|
|
|
'fk_phenomenon_id', Integer, ForeignKey('gba.phenomenon.phenomenon_id'), nullable=False)
|
|
|
|
# phenomenon = relationship("Phenomenon", lazy="joined", foreign_keys=[fk_phenomenon_id])
|
|
|
|
phenomenon = relationship(
|
|
|
|
"Phenomenon", back_populates="datasets", lazy="joined")
|
|
|
|
|
|
|
|
fk_platform_id = Column('fk_platform_id', Integer, ForeignKey(
|
|
|
|
'gba.platform.platform_id'), nullable=True)
|
|
|
|
platform = relationship(
|
|
|
|
"Platform", back_populates="datasets", lazy="joined")
|
|
|
|
|
|
|
|
fk_format_id = Column('fk_format_id', Integer, ForeignKey(
|
|
|
|
'gba.format.format_id'), nullable=True)
|
|
|
|
format = relationship(
|
|
|
|
"Format", back_populates="datasets", lazy="joined")
|
|
|
|
|
|
|
|
fk_procedure_id = Column('fk_procedure_id', Integer, ForeignKey(
|
|
|
|
'gba.procedure.procedure_id'), nullable=False)
|
|
|
|
# procedure = relationship("Procedure", lazy="joined")
|
|
|
|
procedure = relationship(
|
|
|
|
"Procedure", back_populates="datasets", lazy="joined")
|
|
|
|
|
|
|
|
def new_id_factory():
|
|
|
|
''' test '''
|
|
|
|
dbschema = ''
|
|
|
|
db_user = os.environ.get("POSTGIS_DBUSER")
|
|
|
|
db_password = os.environ.get("POSTGIS_DBPASSWORD")
|
|
|
|
db_url = os.environ.get("POSTGIS_DBURL")
|
|
|
|
engine = create_engine(
|
|
|
|
"postgresql+psycopg2://" + db_user + ":" + db_password + "@" + db_url,
|
|
|
|
connect_args={'options': f'-csearch_path={dbschema}'},
|
|
|
|
isolation_level="READ UNCOMMITTED")
|
|
|
|
result = engine.execute('SELECT MAX(observation_id) FROM gba.observation')
|
|
|
|
mytable_max_id = result.first().max
|
|
|
|
if mytable_max_id is None:
|
|
|
|
mytable_max_id = 0
|
|
|
|
mytable_max_id += 1
|
|
|
|
return mytable_max_id
|
|
|
|
|
|
|
|
observation_seq = Sequence('observation_seq', schema="gba") # define sequence explicitly
|
|
|
|
class Observation(Base):
|
|
|
|
""" observation class """
|
|
|
|
__tablename__ = 'observation'
|
|
|
|
__table_args__ = {"schema": "gba"}
|
|
|
|
|
|
|
|
# id = Column('observation_id', Integer, primary_key=True)
|
|
|
|
id = Column('observation_id',
|
|
|
|
Integer,
|
|
|
|
observation_seq,
|
|
|
|
primary_key=True,
|
|
|
|
server_default=observation_seq.next_value())
|
|
|
|
name = Column('name', String)
|
|
|
|
value_type = Column('value_type', String, default="quantity")
|
|
|
|
# pitch = Column('PITCH', String)
|
|
|
|
# roll = Column('ROLL', String)
|
|
|
|
sampling_time_start = Column('sampling_time_start', DateTime)
|
|
|
|
sampling_time_end = Column('sampling_time_end', DateTime)
|
|
|
|
result_time = Column('result_time', DateTime)
|
|
|
|
sta_identifier = Column('sta_identifier', String)
|
|
|
|
value_identifier = Column('value_identifier', String)
|
2022-03-25 15:37:57 +00:00
|
|
|
value_quantity = Column('value_quantity', Numeric(20, 10), nullable=True)
|
|
|
|
value_text = Column('value_text', String, nullable=True)
|
2022-04-11 14:14:01 +00:00
|
|
|
value_geometry = Column(Geometry(geometry_type='POINT', srid=4326, dimension=3), nullable=True)
|
2022-03-11 15:10:52 +00:00
|
|
|
|
|
|
|
fk_dataset_id = Column(Integer, ForeignKey(
|
|
|
|
'gba.dataset.dataset_id'), nullable=False)
|
|
|
|
dataset = relationship("Dataset", back_populates="observations")
|
|
|
|
|
|
|
|
|
|
|
|
class ObservationSchema(SQLAlchemySchema):
|
|
|
|
""" Platform class """
|
|
|
|
DateTime = fields.DateTime(attribute='result_time') # Or vice-versa
|
|
|
|
# value_quantity = fields.Integer(attribute='Value')
|
|
|
|
# id = fields.Integer(attribute='id')
|
|
|
|
Value = fields.Integer(attribute='value_quantity')
|
|
|
|
id = fields.Integer(attribute='value_identifier')
|
|
|
|
# sta_identifier= fields.String(default=uuid.uuid4()),
|
|
|
|
|
|
|
|
class Meta:
|
|
|
|
""" Platform class """
|
|
|
|
model = Observation
|
|
|
|
include_relationships = True
|
|
|
|
load_instance = True
|
|
|
|
#pg_session: session = create_pg_session()
|
|
|
|
sqla_session: session = create_pg_session()
|
|
|
|
|
|
|
|
class Format(Base):
|
|
|
|
""" Format class """
|
|
|
|
__tablename__ = 'format'
|
|
|
|
__table_args__ = {"schema": "gba"}
|
|
|
|
id = Column('format_id', Integer, primary_key=True)
|
|
|
|
definition = Column('definition', String(255), index=True)
|
|
|
|
|
|
|
|
datasets = relationship('Dataset', back_populates="format", lazy=True)
|
|
|
|
|
|
|
|
class Person(Base):
|
|
|
|
""" Platform class """
|
|
|
|
__tablename__ = 'accounts'
|
|
|
|
__table_args__ = {"schema": "gba"}
|
|
|
|
person_id = Column('id', Integer, primary_key=True)
|
|
|
|
lname = Column('last_name', String(255), index=True)
|
|
|
|
fname = Column('first_name', String(255))
|
|
|
|
login = Column(String(255))
|
|
|
|
timestamp = Column('updated_at', DateTime, default=datetime.utcnow,
|
|
|
|
onupdate=datetime.utcnow)
|
|
|
|
|
|
|
|
def __repr__(self):
|
|
|
|
return f"<User(name='{self.login}', lastname='{self.lname}')>"
|
|
|
|
|
|
|
|
|
|
|
|
class PersonSchema(SQLAlchemyAutoSchema):
|
|
|
|
""" Platform class """
|
|
|
|
class Meta:
|
|
|
|
""" Platform class """
|
|
|
|
model = Person
|
|
|
|
include_relationships = True
|
|
|
|
load_instance = True
|
|
|
|
#pg_session: session = create_pg_session()
|
|
|
|
sqla_session: session = create_pg_session()
|
|
|
|
|
|
|
|
|
|
|
|
def create_db():
|
|
|
|
# db_url = 'sqlite:///db.sqlite'
|
|
|
|
# engine = create_engine(db_url, echo = True )
|
|
|
|
# Base.metadata.drop_all(bind=engine)
|
|
|
|
# Base.metadata.create_all(engine)
|
|
|
|
""" create postgres db session """
|
|
|
|
load_dotenv("D:\\Software\\geomon\\.env")
|
|
|
|
dbschema = ''
|
|
|
|
db_user = os.environ.get("POSTGIS_DBUSER")
|
|
|
|
db_password = os.environ.get("POSTGIS_DBPASSWORD")
|
|
|
|
db_url = os.environ.get("POSTGIS_DBURL")
|
|
|
|
engine = create_engine(
|
|
|
|
"postgresql+psycopg2://" + db_user + ":" + db_password + "@" + db_url,
|
|
|
|
connect_args={'options': f'-csearch_path={dbschema}'},
|
|
|
|
isolation_level="READ UNCOMMITTED", echo=True)
|
|
|
|
# session_maker = sessionmaker(bind=engine)
|
|
|
|
# session = session_maker()
|
2022-03-25 15:37:57 +00:00
|
|
|
# Base.metadata.drop_all(bind=engine)
|
2022-04-11 14:14:01 +00:00
|
|
|
Base.metadata.create_all(engine)
|
2022-03-11 15:10:52 +00:00
|
|
|
|
|
|
|
|
|
|
|
if __name__ == "__main__":
|
|
|
|
create_db()
|