''' Tutorial link: https://docs.sqlalchemy.org/en/latest/orm/tutorial.html Sqlalchemy version: 1.4.31 Python version: 3.10 ''' #!/usr/bin/python# -*- coding: utf-8 -*- from datetime import datetime # from config import db, ma import os from sqlalchemy import (Column, Integer, String, DateTime, ForeignKey, Numeric, SmallInteger, create_engine, func) from sqlalchemy.ext.declarative import declarative_base from sqlalchemy.orm import session, relationship, sessionmaker #from marshmallow import Schema from marshmallow_sqlalchemy import SQLAlchemySchema, SQLAlchemyAutoSchema from marshmallow import fields # from db.pg_models import create_pg_session # import sqlalchemy.orm.session Base = declarative_base() def create_pg_session() -> sessionmaker: """ create postgres db session """ dbschema = '' db_user = os.environ.get("POSTGIS_DBUSER") db_password = os.environ.get("POSTGIS_DBPASSWORD") db_url = os.environ.get("POSTGIS_DBURL") engine = create_engine( "postgresql+psycopg2://" + db_user + ":" + db_password + "@" + db_url, connect_args={'options': '-csearch_path={}'.format(dbschema)}, isolation_level="READ UNCOMMITTED") session_maker = sessionmaker(bind=engine) _session = session_maker() # Base.metadata.create_all(engine) return _session class Platform(Base): """ Platform class """ __tablename__ = 'platform' __table_args__ = {"schema": "gba"} id = Column('platform_id', Integer, primary_key=True) identifier = Column('identifier', String) sta_identifier = Column('sta_identifier', String) name = Column('name', String) # datasets = relationship('Dataset') datasets = relationship('Dataset', back_populates="platform", lazy=True) def __repr__(self): return f'Platform {self.name}' class Phenomenon(Base): """ phenomenon class """ __tablename__ = 'phenomenon' __table_args__ = {"schema": "gba"} id = Column('phenomenon_id', Integer, primary_key=True) name = Column('name', String) sta_identifier = Column('sta_identifier', String) # datasets = relationship('Dataset') datasets = relationship('Dataset', back_populates="phenomenon", lazy=True) def __repr__(self): return f'Phenomenon {self.name}' class Procedure(Base): """ procedure class """ __tablename__ = 'procedure' __table_args__ = {"schema": "gba"} id = Column('procedure_id', Integer, primary_key=True) name = Column('name', String) sta_identifier = Column('sta_identifier', String) # datasets = relationship('Dataset') datasets = relationship('Dataset', back_populates="procedure", lazy=True) def __repr__(self): return f'Procedure {self.name}' class Dataset(Base): """ dataset class """ __tablename__ = 'dataset' __table_args__ = {"schema": "gba"} id = Column('dataset_id', Integer, primary_key=True) name = Column('name', String) is_published = Column('is_published', SmallInteger) is_hidden = Column('is_hidden', SmallInteger) dataset_type = Column('dataset_type', String) observation_type = Column('observation_type', String) value_type = Column('value_type', String) last_time = Column('last_time', DateTime) last_value = Column('last_value', Numeric(20, 10)) fk_last_observation_id = Column( 'fk_last_observation_id', Integer ) # last_observation = relationship( # "Observation", foreign_keys=[fk_last_observation_id]) first_time = Column('first_time', DateTime) first_value = Column('first_value', Numeric(20, 10)) fk_first_observation_id = Column( 'fk_first_observation_id', Integer ) # first_observation = relationship("Observation", foreign_keys=[ # fk_first_observation_id]) observations = relationship( 'Observation', back_populates='dataset', lazy=True) fk_phenomenon_id = Column( 'fk_phenomenon_id', Integer, ForeignKey('gba.phenomenon.phenomenon_id'), nullable=False) # phenomenon = relationship("Phenomenon", lazy="joined", foreign_keys=[fk_phenomenon_id]) phenomenon = relationship( "Phenomenon", back_populates="datasets", lazy="joined") fk_platform_id = Column('fk_platform_id', Integer, ForeignKey( 'gba.platform.platform_id'), nullable=True) platform = relationship( "Platform", back_populates="datasets", lazy="joined") fk_procedure_id = Column('fk_procedure_id', Integer, ForeignKey( 'gba.procedure.procedure_id'), nullable=False) # procedure = relationship("Procedure", lazy="joined") procedure = relationship( "Procedure", back_populates="datasets", lazy="joined") def new_id_factory(): ''' test ''' dbschema = '' db_user = os.environ.get("POSTGIS_DBUSER") db_password = os.environ.get("POSTGIS_DBPASSWORD") db_url = os.environ.get("POSTGIS_DBURL") engine = create_engine( "postgresql+psycopg2://" + db_user + ":" + db_password + "@" + db_url, connect_args={'options': '-csearch_path={}'.format(dbschema)}, isolation_level="READ UNCOMMITTED") result = engine.execute('SELECT MAX(observation_id) FROM gba.observation') mytable_max_id = result.first().max if mytable_max_id is None: mytable_max_id = 0 mytable_max_id += 1 return mytable_max_id class Observation(Base): """ observation class """ __tablename__ = 'observation' __table_args__ = {"schema": "gba"} id = Column('observation_id', Integer, primary_key=True) name = Column('name', String) value_type = Column('value_type', String, default="quantity") # pitch = Column('PITCH', String) # roll = Column('ROLL', String) sampling_time_start = Column('sampling_time_start', DateTime) sampling_time_end = Column('sampling_time_end', DateTime) result_time = Column('result_time', DateTime) sta_identifier = Column('sta_identifier', String) value_identifier = Column('value_identifier', String) value_quantity = Column('value_quantity', Numeric(20, 10), nullable=False) fk_dataset_id = Column(Integer, ForeignKey( 'gba.dataset.dataset_id'), nullable=False) dataset = relationship("Dataset", back_populates="observations") class ObservationSchema(SQLAlchemySchema): """ Platform class """ DateTime = fields.DateTime(attribute='result_time') # Or vice-versa # value_quantity = fields.Integer(attribute='Value') # id = fields.Integer(attribute='id') Value = fields.Integer(attribute='value_quantity') id = fields.Integer(attribute='value_identifier') # sta_identifier= fields.String(default=uuid.uuid4()), class Meta: """ Platform class """ model = Observation include_relationships = True load_instance = True #pg_session: session = create_pg_session() sqla_session: session = create_pg_session() class Person(Base): """ Platform class """ __tablename__ = 'accounts' __table_args__ = {"schema": "gba"} person_id = Column('id', Integer, primary_key=True) lname = Column('last_name', String(255), index=True) fname = Column('first_name', String(255)) login = Column(String(255)) timestamp = Column('updated_at', DateTime, default=datetime.utcnow, onupdate=datetime.utcnow) def __repr__(self): return "" % ( self.login, self.lname) class PersonSchema(SQLAlchemyAutoSchema): """ Platform class """ class Meta: """ Platform class """ model = Person include_relationships = True load_instance = True #pg_session: session = create_pg_session() sqla_session: session = create_pg_session() def create_db(): # db_url = 'sqlite:///db.sqlite' # engine = create_engine(db_url, echo = True ) # Base.metadata.drop_all(bind=engine) # Base.metadata.create_all(engine) """ create postgres db session """ dbschema = '' db_user = os.environ.get("POSTGIS_DBUSER") db_password = os.environ.get("POSTGIS_DBPASSWORD") db_url = os.environ.get("POSTGIS_DBURL") engine = create_engine( "postgresql+psycopg2://" + db_user + ":" + db_password + "@" + db_url, connect_args={'options': '-csearch_path={}'.format(dbschema)}, isolation_level="READ UNCOMMITTED", echo=True) # session_maker = sessionmaker(bind=engine) # session = session_maker() Base.metadata.drop_all(bind=engine) # Base.metadata.create_all(engine) if __name__ == "__main__": create_db()