geomon/db/pg_models.py
2022-02-21 16:07:04 +01:00

170 lines
5.9 KiB
Python

'''
Tutorial link: https://docs.sqlalchemy.org/en/latest/orm/tutorial.html
Sqlalchemy version: 1.2.15
Python version: 3.7
'''
import os
from sqlalchemy import (create_engine, Column, Integer,
SmallInteger, String, ForeignKey, DateTime, Numeric)
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import sessionmaker, relationship
import sqlalchemy.orm.session
Base = declarative_base()
class Platform(Base):
""" Platform class """
__tablename__ = 'platform'
__table_args__ = {"schema": "gba"}
id = Column('platform_id', Integer, primary_key=True)
identifier = Column('identifier', String)
sta_identifier = Column('sta_identifier', String)
name = Column('name', String)
# datasets = relationship('Dataset')
datasets = relationship('Dataset', back_populates="platform", lazy=True)
def __repr__(self):
return f'Platform {self.name}'
class Phenomenon(Base):
""" phenomenon class """
__tablename__ = 'phenomenon'
__table_args__ = {"schema": "gba"}
id = Column('phenomenon_id', Integer, primary_key=True)
name = Column('name', String)
sta_identifier = Column('sta_identifier', String)
# datasets = relationship('Dataset')
datasets = relationship('Dataset', back_populates="phenomenon", lazy=True)
def __repr__(self):
return f'Phenomenon {self.name}'
class Procedure(Base):
""" procedure class """
__tablename__ = 'procedure'
__table_args__ = {"schema": "gba"}
id = Column('procedure_id', Integer, primary_key=True)
name = Column('name', String)
sta_identifier = Column('sta_identifier', String)
# datasets = relationship('Dataset')
datasets = relationship('Dataset', back_populates="procedure", lazy=True)
def __repr__(self):
return f'Procedure {self.name}'
class Dataset(Base):
""" dataset class """
__tablename__ = 'dataset'
__table_args__ = {"schema": "gba"}
id = Column('dataset_id', Integer, primary_key=True)
name = Column('name', String)
is_published = Column('is_published', SmallInteger)
is_hidden = Column('is_hidden', SmallInteger)
dataset_type = Column('dataset_type', String)
observation_type = Column('observation_type', String)
value_type = Column('value_type', String)
last_time = Column('last_time', DateTime)
last_value = Column('last_value', Numeric(20, 10))
fk_last_observation_id = Column(
'fk_last_observation_id',
Integer
)
# last_observation = relationship(
# "Observation", foreign_keys=[fk_last_observation_id])
first_time = Column('first_time', DateTime)
first_value = Column('first_value', Numeric(20, 10))
fk_first_observation_id = Column(
'fk_first_observation_id',
Integer
)
# first_observation = relationship("Observation", foreign_keys=[
# fk_first_observation_id])
observations = relationship(
'Observation', back_populates='dataset', lazy=True)
fk_procedure_id = Column('fk_procedure_id', Integer, ForeignKey(
'gba.procedure.procedure_id'), nullable=False)
# procedure = relationship("Procedure", lazy="joined")
procedure = relationship(
"Procedure", back_populates="datasets", lazy="joined")
fk_phenomenon_id = Column(
'fk_phenomenon_id', Integer, ForeignKey('gba.phenomenon.phenomenon_id'), nullable=False)
# phenomenon = relationship("Phenomenon", lazy="joined", foreign_keys=[fk_phenomenon_id])
phenomenon = relationship(
"Phenomenon", back_populates="datasets", lazy="joined")
# fk_platform_id = Column(
# 'fk_platform_id', Integer, ForeignKey('gba.platform.platform_id'), nullable=True)
# # platform = relationship("Platform", lazy="joined", foreign_keys=[fk_platform_id])
fk_platform_id = Column('fk_platform_id', Integer, ForeignKey(
'gba.platform.platform_id'), nullable=True)
platform = relationship(
"Platform", back_populates="datasets", lazy="joined")
def __repr__(self):
return f'Dataset {self.name}'
class Observation(Base):
""" observation class """
__tablename__ = 'observation'
__table_args__ = {"schema": "gba"}
id = Column('observation_id', Integer, primary_key=True)
name = Column('name', String)
value_type = Column('value_type', String)
# pitch = Column('PITCH', String)
# roll = Column('ROLL', String)
sampling_time_start = Column('sampling_time_start', DateTime)
sampling_time_end = Column('sampling_time_end', DateTime)
result_time = Column('result_time', DateTime)
sta_identifier = Column('sta_identifier', String)
value_quantity = Column('value_quantity', Numeric(20, 10), nullable=False)
# fk_dataset_id = Column('fk_dataset_id', Integer,
# ForeignKey('gba.dataset.dataset_id'))
# dataset = relationship("Dataset", lazy="joined",
# foreign_keys=[fk_dataset_id])
fk_dataset_id = Column(Integer, ForeignKey(
'gba.dataset.dataset_id'), nullable=False)
dataset = relationship("Dataset", back_populates="observations")
def __repr__(self):
return f'Observation {self.name}'
# @property
# def result_time(self):
# ''' Create a datetime object '''
# start_datetime = datetime.datetime.combine(self.date, self.ora)
# return start_datetime
def create_pg_session() -> sqlalchemy.orm.sessionmaker:
"""Return the sum of x and y."""
dbschema = ''
db_user = os.environ.get("POSTGIS_DBUSER")
db_password = os.environ.get("POSTGIS_DBPASSWORD")
db_url = os.environ.get("POSTGIS_DBURL")
engine = create_engine(
"postgresql+psycopg2://" + db_user + ":" + db_password + "@" + db_url,
connect_args={'options': '-csearch_path={}'.format(dbschema)},
isolation_level="READ UNCOMMITTED")
session_maker = sessionmaker(bind=engine)
session = session_maker()
Base.metadata.create_all(engine)
return session