- add observations for laakirchen with check if timestamp already in db
This commit is contained in:
parent
49d348678f
commit
e068773eec
300
automatic_inclinometer/import_observations_laakirchen.py
Normal file
300
automatic_inclinometer/import_observations_laakirchen.py
Normal file
|
@ -0,0 +1,300 @@
|
||||||
|
""" import firebird, export to postgresql """
|
||||||
|
#!/usr/bin/python# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
|
import os
|
||||||
|
import time
|
||||||
|
from typing import List
|
||||||
|
from itertools import chain
|
||||||
|
import uuid
|
||||||
|
import json
|
||||||
|
from dotenv import load_dotenv, find_dotenv
|
||||||
|
from sqlalchemy.orm import session
|
||||||
|
from sqlalchemy import asc, desc
|
||||||
|
# from sqlalchemy.dialects import firebird
|
||||||
|
from sqlalchemy.sql import or_
|
||||||
|
from db.fb_models import (create_session, FbObservation, Catena)
|
||||||
|
from db.models import (create_pg_session, Dataset,
|
||||||
|
Observation, Procedure, Phenomenon, Platform, Format)
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
"""
|
||||||
|
Main function.
|
||||||
|
"""
|
||||||
|
|
||||||
|
# parameter:
|
||||||
|
# sensor id in firebird db:
|
||||||
|
# sensor_id = 1
|
||||||
|
# # name of project area in firebird db
|
||||||
|
# feature_of_interest = 'TAC003-020-0517' # Wolfsegg KB1
|
||||||
|
# # sensor name in postgis db
|
||||||
|
# sensor = 'wolfsegg_kb1_1'
|
||||||
|
# platform = 'wolfsegg'
|
||||||
|
|
||||||
|
#sensor_id = 0
|
||||||
|
# name of project area in firebird db
|
||||||
|
feature_of_interest = 'GSA02B-007-0911' # Laakirchen
|
||||||
|
# sensor name in postgis db
|
||||||
|
# sensor = 'wolfsegg_kb1_0'
|
||||||
|
platform = 'laakirchen_inclinometer'
|
||||||
|
|
||||||
|
sensor_env_list = os.getenv('LAAKIRCHEN_SENSORS').replace('\n', '')
|
||||||
|
sensor_list = json.loads(sensor_env_list)
|
||||||
|
# print(sensor_list)
|
||||||
|
firebird_session: session = create_session()
|
||||||
|
# this will print elements along with their index value
|
||||||
|
for sensor_id, sensor in enumerate(sensor_list):
|
||||||
|
|
||||||
|
# db_observation = session.query(Observation) \
|
||||||
|
# .filter_by(name='John Snow').first()
|
||||||
|
query_count = firebird_session.query(FbObservation).join(FbObservation.catena) \
|
||||||
|
.filter(FbObservation.sensore == sensor_id) \
|
||||||
|
.filter(Catena.name == feature_of_interest) \
|
||||||
|
.filter(
|
||||||
|
or_(
|
||||||
|
FbObservation.temperature != None,
|
||||||
|
FbObservation.pitch != None #this is used to check NULL values
|
||||||
|
)) \
|
||||||
|
.count()
|
||||||
|
if query_count == 0:
|
||||||
|
print(f"sensor {sensor} "
|
||||||
|
f"doesn't have any observations with measured values in firebird database!")
|
||||||
|
# hop to next for iteration, next sensor in list
|
||||||
|
continue
|
||||||
|
# feature_of_interest = query.statement.compile(dialect=firebird.dialect())
|
||||||
|
|
||||||
|
query = firebird_session.query(FbObservation).join(FbObservation.catena) \
|
||||||
|
.filter(FbObservation.sensore == sensor_id) \
|
||||||
|
.filter(Catena.name == feature_of_interest)
|
||||||
|
# print (query.statement.compile(dialect=firebird.dialect()))
|
||||||
|
firebird_observations: List[FbObservation] = query.all()
|
||||||
|
# firebird_session.close()
|
||||||
|
|
||||||
|
pg_session: session = create_pg_session()
|
||||||
|
# pg_datasets: List[Dataset] = pg_query.all()
|
||||||
|
pg_query = pg_session.query(Dataset) \
|
||||||
|
.join(Procedure) \
|
||||||
|
.join(Phenomenon) \
|
||||||
|
.filter(Procedure.sta_identifier == sensor.lower())
|
||||||
|
# .join(Platform).all() \
|
||||||
|
|
||||||
|
roll_dataset: Dataset = pg_query.filter(
|
||||||
|
Phenomenon.sta_identifier == "Roll").first()
|
||||||
|
|
||||||
|
slope_dataset: Dataset = pg_query.filter(
|
||||||
|
Phenomenon.sta_identifier == "Slope").first()
|
||||||
|
|
||||||
|
temperature_dataset: Dataset = pg_query.filter(
|
||||||
|
Phenomenon.sta_identifier == "InSystemTemperature").first()
|
||||||
|
|
||||||
|
platform_exists = pg_session.query(Platform.id).filter_by(
|
||||||
|
name=platform.lower()).scalar() is not None
|
||||||
|
if not platform_exists:
|
||||||
|
sensor_platform = Platform()
|
||||||
|
sensor_platform.sta_identifier = platform.lower()
|
||||||
|
sensor_platform.identifier = platform.lower()
|
||||||
|
sensor_platform.name = platform.lower()
|
||||||
|
slope_dataset.platform = sensor_platform
|
||||||
|
roll_dataset.platform = sensor_platform
|
||||||
|
temperature_dataset.platform = sensor_platform
|
||||||
|
else:
|
||||||
|
sensor_platform = pg_session.query(Platform.id) \
|
||||||
|
.filter(Platform.name == platform.lower()) \
|
||||||
|
.first()
|
||||||
|
slope_dataset.fk_platform_id = sensor_platform.id
|
||||||
|
roll_dataset.fk_platform_id = sensor_platform.id
|
||||||
|
temperature_dataset.fk_platform_id = sensor_platform.id
|
||||||
|
|
||||||
|
# commit dataset changes:
|
||||||
|
pg_session.commit()
|
||||||
|
|
||||||
|
|
||||||
|
format_exists: bool = pg_session.query(Format.id).filter_by(
|
||||||
|
definition="http://www.opengis.net/def/observationType/OGC-OM/2.0/OM_Measurement"
|
||||||
|
).scalar() is not None
|
||||||
|
if format_exists:
|
||||||
|
sensor_format = pg_session.query(Format.id) \
|
||||||
|
.filter(Format.definition ==
|
||||||
|
"http://www.opengis.net/def/observationType/OGC-OM/2.0/OM_Measurement") \
|
||||||
|
.first()
|
||||||
|
slope_dataset.fk_format_id = sensor_format.id
|
||||||
|
roll_dataset.fk_format_id = sensor_format.id
|
||||||
|
temperature_dataset.fk_format_id = sensor_format.id
|
||||||
|
pg_session.commit()
|
||||||
|
|
||||||
|
create_db_observations(firebird_observations, roll_dataset, slope_dataset, temperature_dataset, pg_session)
|
||||||
|
|
||||||
|
# commit new observations:
|
||||||
|
pg_session.commit()
|
||||||
|
|
||||||
|
if len(roll_dataset.observations) > 0:
|
||||||
|
# if not published yet, publish the roll dataset
|
||||||
|
if not roll_dataset.is_published:
|
||||||
|
roll_dataset.is_published = 1
|
||||||
|
roll_dataset.is_hidden = 0
|
||||||
|
roll_dataset.dataset_type = "timeseries"
|
||||||
|
roll_dataset.observation_type = "simple"
|
||||||
|
roll_dataset.value_type = "quantity"
|
||||||
|
|
||||||
|
if len(slope_dataset.observations) > 0:
|
||||||
|
# if not published yet, publish the roll dataset
|
||||||
|
if not slope_dataset.is_published:
|
||||||
|
slope_dataset.is_published = 1
|
||||||
|
slope_dataset.is_hidden = 0
|
||||||
|
slope_dataset.dataset_type = "timeseries"
|
||||||
|
slope_dataset.observation_type = "simple"
|
||||||
|
slope_dataset.value_type = "quantity"
|
||||||
|
|
||||||
|
if len(temperature_dataset.observations) > 0:
|
||||||
|
# if not published yet, publish the temperature dataset
|
||||||
|
if not temperature_dataset.is_published:
|
||||||
|
temperature_dataset.is_published = 1
|
||||||
|
temperature_dataset.is_hidden = 0
|
||||||
|
temperature_dataset.dataset_type = "timeseries"
|
||||||
|
temperature_dataset.observation_type = "simple"
|
||||||
|
temperature_dataset.value_type = "quantity"
|
||||||
|
pg_session.commit()
|
||||||
|
|
||||||
|
|
||||||
|
last_roll_observation = pg_session.query(Observation) \
|
||||||
|
.filter(Observation.fk_dataset_id == roll_dataset.id) \
|
||||||
|
.order_by(desc('sampling_time_start')) \
|
||||||
|
.first()
|
||||||
|
if last_roll_observation is not None:
|
||||||
|
roll_dataset.last_time = last_roll_observation.sampling_time_start
|
||||||
|
roll_dataset.last_value = last_roll_observation.value_quantity
|
||||||
|
roll_dataset.fk_last_observation_id = last_roll_observation.id
|
||||||
|
|
||||||
|
last_slope_observation = pg_session.query(Observation) \
|
||||||
|
.filter(Observation.fk_dataset_id == slope_dataset.id) \
|
||||||
|
.order_by(desc('sampling_time_start')) \
|
||||||
|
.first()
|
||||||
|
if last_slope_observation is not None:
|
||||||
|
slope_dataset.last_time = last_slope_observation.sampling_time_start
|
||||||
|
slope_dataset.last_value = last_slope_observation.value_quantity
|
||||||
|
slope_dataset.fk_last_observation_id = last_slope_observation.id
|
||||||
|
|
||||||
|
last_temperature_observation = pg_session.query(Observation) \
|
||||||
|
.filter(Observation.fk_dataset_id == temperature_dataset.id) \
|
||||||
|
.order_by(desc('sampling_time_start')) \
|
||||||
|
.first()
|
||||||
|
if last_temperature_observation is not None:
|
||||||
|
temperature_dataset.last_time = last_temperature_observation.sampling_time_start
|
||||||
|
temperature_dataset.last_value = last_temperature_observation.value_quantity
|
||||||
|
temperature_dataset.fk_last_observation_id = last_temperature_observation.id
|
||||||
|
|
||||||
|
first_roll_observation = pg_session.query(Observation) \
|
||||||
|
.filter(Observation.fk_dataset_id == roll_dataset.id) \
|
||||||
|
.order_by(asc('sampling_time_start')) \
|
||||||
|
.first()
|
||||||
|
if first_roll_observation is not None:
|
||||||
|
roll_dataset.first_time = first_roll_observation.sampling_time_start
|
||||||
|
roll_dataset.first_value = first_roll_observation.value_quantity
|
||||||
|
roll_dataset.fk_first_observation_id = first_roll_observation.id
|
||||||
|
|
||||||
|
first_slope_observation = pg_session.query(Observation) \
|
||||||
|
.filter(Observation.fk_dataset_id == slope_dataset.id) \
|
||||||
|
.order_by(asc('sampling_time_start')) \
|
||||||
|
.first()
|
||||||
|
if first_slope_observation is not None:
|
||||||
|
slope_dataset.first_time = first_slope_observation.sampling_time_start
|
||||||
|
slope_dataset.first_value = first_slope_observation.value_quantity
|
||||||
|
slope_dataset.fk_first_observation_id = first_slope_observation.id
|
||||||
|
|
||||||
|
first_temperature_observation = pg_session.query(Observation) \
|
||||||
|
.filter(Observation.fk_dataset_id == temperature_dataset.id) \
|
||||||
|
.order_by(asc('sampling_time_start')) \
|
||||||
|
.first()
|
||||||
|
if first_temperature_observation is not None:
|
||||||
|
temperature_dataset.first_time = first_temperature_observation.sampling_time_start
|
||||||
|
temperature_dataset.first_value = first_temperature_observation.value_quantity
|
||||||
|
temperature_dataset.fk_first_observation_id = first_temperature_observation.id
|
||||||
|
pg_session.commit()
|
||||||
|
|
||||||
|
# for loop sensors end
|
||||||
|
pg_session.close()
|
||||||
|
firebird_session.close()
|
||||||
|
|
||||||
|
def create_db_observations(firebird_observations: List[FbObservation],
|
||||||
|
roll_dataset: Dataset,
|
||||||
|
slope_dataset: Dataset,
|
||||||
|
temperature_dataset: Dataset,
|
||||||
|
pg_session: session):
|
||||||
|
''' insert new observations ito db '''
|
||||||
|
roll_result = (
|
||||||
|
pg_session.query(Observation.result_time)
|
||||||
|
.filter(Observation.fk_dataset_id == roll_dataset.id)
|
||||||
|
.all()
|
||||||
|
)
|
||||||
|
roll_result_time_db_list1: List[str] = list(chain(*roll_result))
|
||||||
|
roll_result_time_db_list : List[float]= [time.mktime(date_obj.timetuple()) for date_obj in roll_result_time_db_list1]
|
||||||
|
|
||||||
|
|
||||||
|
slope_result = (
|
||||||
|
pg_session.query(Observation.result_time)
|
||||||
|
.filter(Observation.fk_dataset_id == slope_dataset.id)
|
||||||
|
.all()
|
||||||
|
)
|
||||||
|
slope_result_time_db_list1: List[str] = list(chain(*slope_result))
|
||||||
|
slope_result_time_db_list : List[float]= [time.mktime(date_obj.timetuple()) for date_obj in slope_result_time_db_list1]
|
||||||
|
|
||||||
|
temperature_result = (
|
||||||
|
pg_session.query(Observation.result_time)
|
||||||
|
.filter(Observation.fk_dataset_id == temperature_dataset.id)
|
||||||
|
.all()
|
||||||
|
)
|
||||||
|
temperature_result_time_db_list1: List[str] = list(chain(*temperature_result))
|
||||||
|
temperature_result_time_db_list : List[float]= [time.mktime(date_obj.timetuple()) for date_obj in temperature_result_time_db_list1]
|
||||||
|
|
||||||
|
for fb_observation in firebird_observations:
|
||||||
|
# print(fb_observation.catena.name)
|
||||||
|
if(fb_observation.roll is not None and roll_dataset is not None):
|
||||||
|
value = fb_observation.roll
|
||||||
|
add_observation(roll_dataset, fb_observation, value, roll_result_time_db_list)
|
||||||
|
|
||||||
|
if(fb_observation.pitch is not None and slope_dataset is not None):
|
||||||
|
# max_id = max_id + 1
|
||||||
|
value = fb_observation.pitch
|
||||||
|
add_observation(slope_dataset, fb_observation, value, slope_result_time_db_list)
|
||||||
|
|
||||||
|
if(fb_observation.temperature is not None and temperature_dataset is not None):
|
||||||
|
# max_id = max_id + 1
|
||||||
|
value = fb_observation.temperature
|
||||||
|
add_observation(temperature_dataset, fb_observation, value, temperature_result_time_db_list)
|
||||||
|
|
||||||
|
def add_observation(dataset: Dataset, fb_observation: FbObservation, value: str, value_identifier_db_list: List[float]):
|
||||||
|
''' check if observation still extists in db,
|
||||||
|
otherwise add it to fb'''
|
||||||
|
# ob_id: str = str(observation_json.get('id'))
|
||||||
|
|
||||||
|
# existing_observation: bool = (
|
||||||
|
# db_session.query(Observation)
|
||||||
|
# .filter(Observation.result_time == fb_observation.result_time, Observation.fk_dataset_id == dataset.id)
|
||||||
|
# .one_or_none()
|
||||||
|
# )
|
||||||
|
existing_observation: bool =time.mktime(fb_observation.result_time.timetuple()) in value_identifier_db_list
|
||||||
|
# Can we insert this observation?
|
||||||
|
if existing_observation is False:
|
||||||
|
# insert bew observation
|
||||||
|
new_observation: Observation = Observation()
|
||||||
|
new_observation = Observation(
|
||||||
|
# id=max_id,
|
||||||
|
value_type='quantity',
|
||||||
|
sampling_time_start=fb_observation.result_time,
|
||||||
|
sampling_time_end=fb_observation.result_time,
|
||||||
|
result_time=fb_observation.result_time,
|
||||||
|
sta_identifier=str(uuid.uuid4()),
|
||||||
|
value_identifier = str(time.mktime(fb_observation.result_time.timetuple())),
|
||||||
|
value_quantity=value
|
||||||
|
)
|
||||||
|
dataset.observations.append(new_observation)
|
||||||
|
print(f"new observation with result time {new_observation.result_time} "
|
||||||
|
f"for inclinometer {dataset.procedure.name} succesfully imported!")
|
||||||
|
else:
|
||||||
|
print(f"observation with result time {fb_observation.result_time} "
|
||||||
|
f"for inclinometer {dataset.procedure.name} already exists!")
|
||||||
|
|
||||||
|
# -----------------------------------------------------------------------------
|
||||||
|
if __name__ == "__main__":
|
||||||
|
load_dotenv(find_dotenv())
|
||||||
|
main()
|
|
@ -66,45 +66,45 @@ def main():
|
||||||
|
|
||||||
# appending instances to list
|
# appending instances to list
|
||||||
sensor_list.append(
|
sensor_list.append(
|
||||||
Sensor('laakirchen_0', 13.8141457, 47.9789118))
|
Sensor('laakirchen_inclino_0', 13.8141457, 47.9789118))
|
||||||
sensor_list.append(
|
sensor_list.append(
|
||||||
Sensor('laakirchen_1', 13.8141457, 47.9789118))
|
Sensor('laakirchen_inclino_1', 13.8141457, 47.9789118))
|
||||||
sensor_list.append(
|
sensor_list.append(
|
||||||
Sensor('laakirchen_2', 13.8141457, 47.9789118))
|
Sensor('laakirchen_inclino_2', 13.8141457, 47.9789118))
|
||||||
sensor_list.append(
|
sensor_list.append(
|
||||||
Sensor('laakirchen_3', 13.8141457, 47.9789118))
|
Sensor('laakirchen_inclino_3', 13.8141457, 47.9789118))
|
||||||
sensor_list.append(
|
sensor_list.append(
|
||||||
Sensor('laakirchen_4', 13.8141457, 47.9789118))
|
Sensor('laakirchen_inclino_4', 13.8141457, 47.9789118))
|
||||||
sensor_list.append(
|
sensor_list.append(
|
||||||
Sensor('laakirchen_5', 13.8141457, 47.9789118))
|
Sensor('laakirchen_inclino_5', 13.8141457, 47.9789118))
|
||||||
sensor_list.append(
|
sensor_list.append(
|
||||||
Sensor('laakirchen_6', 13.8141457, 47.9789118))
|
Sensor('laakirchen_inclino_6', 13.8141457, 47.9789118))
|
||||||
sensor_list.append(
|
sensor_list.append(
|
||||||
Sensor('laakirchen_7', 13.8141457, 47.9789118))
|
Sensor('laakirchen_inclino_7', 13.8141457, 47.9789118))
|
||||||
sensor_list.append(
|
sensor_list.append(
|
||||||
Sensor('laakirchen_8', 13.8141457, 47.9789118))
|
Sensor('laakirchen_inclino_8', 13.8141457, 47.9789118))
|
||||||
sensor_list.append(
|
sensor_list.append(
|
||||||
Sensor('laakirchen_9', 13.8141457, 47.9789118))
|
Sensor('laakirchen_inclino_9', 13.8141457, 47.9789118))
|
||||||
sensor_list.append(
|
sensor_list.append(
|
||||||
Sensor('laakirchen_10', 13.8141457, 47.9789118))
|
Sensor('laakirchen_inclino_10', 13.8141457, 47.9789118))
|
||||||
sensor_list.append(
|
sensor_list.append(
|
||||||
Sensor('laakirchen_11', 13.8141457, 47.9789118))
|
Sensor('laakirchen_inclino_11', 13.8141457, 47.9789118))
|
||||||
sensor_list.append(
|
sensor_list.append(
|
||||||
Sensor('laakirchen_12', 13.8141457, 47.9789118))
|
Sensor('laakirchen_inclino_12', 13.8141457, 47.9789118))
|
||||||
sensor_list.append(
|
sensor_list.append(
|
||||||
Sensor('laakirchen_13', 13.8141457, 47.9789118))
|
Sensor('laakirchen_inclino_13', 13.8141457, 47.9789118))
|
||||||
sensor_list.append(
|
sensor_list.append(
|
||||||
Sensor('laakirchen_14', 13.8141457, 47.9789118))
|
Sensor('laakirchen_inclino_14', 13.8141457, 47.9789118))
|
||||||
sensor_list.append(
|
sensor_list.append(
|
||||||
Sensor('laakirchen_15', 13.8141457, 47.9789118))
|
Sensor('laakirchen_inclino_15', 13.8141457, 47.9789118))
|
||||||
sensor_list.append(
|
sensor_list.append(
|
||||||
Sensor('laakirchen_16', 13.8141457, 47.9789118))
|
Sensor('laakirchen_inclino_16', 13.8141457, 47.9789118))
|
||||||
sensor_list.append(
|
sensor_list.append(
|
||||||
Sensor('laakirchen_17', 13.8141457, 47.9789118))
|
Sensor('laakirchen_inclino_17', 13.8141457, 47.9789118))
|
||||||
sensor_list.append(
|
sensor_list.append(
|
||||||
Sensor('laakirchen_18', 13.8141457, 47.9789118))
|
Sensor('laakirchen_inclino_18', 13.8141457, 47.9789118))
|
||||||
sensor_list.append(
|
sensor_list.append(
|
||||||
Sensor('laakirchen_19', 13.8141457, 47.9789118))
|
Sensor('laakirchen_inclino_19', 13.8141457, 47.9789118))
|
||||||
|
|
||||||
sensor: Sensor
|
sensor: Sensor
|
||||||
for sensor in sensor_list:
|
for sensor in sensor_list:
|
||||||
|
@ -136,17 +136,6 @@ def main():
|
||||||
request=requests.post(sos_url, headers = headers, json = post_data)
|
request=requests.post(sos_url, headers = headers, json = post_data)
|
||||||
print(request.text)
|
print(request.text)
|
||||||
|
|
||||||
# # platform Wolfsegg
|
|
||||||
# offering = Offering(
|
|
||||||
# "https://geomon.geologie.ac.at/52n-sos-webapp/api/offerings/",
|
|
||||||
# "wolfsegg_1",
|
|
||||||
# "Bohrloch, Wolfsegg Inklinometer"
|
|
||||||
# )
|
|
||||||
# procedure = Procedure("wolfsegg_1", "wolfsegg_1")
|
|
||||||
# foi = FoI("degree", "m", (13.6731638, 48.1064354, 0.0),
|
|
||||||
# "TAC003-020-0517", "Wolfsegg KB1")
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def insert_sensor(offering, procedure, foi, sensor_type):
|
def insert_sensor(offering, procedure, foi, sensor_type):
|
||||||
"""
|
"""
|
||||||
|
|
|
@ -20,7 +20,8 @@ class Catena(Base):
|
||||||
|
|
||||||
id = Column('CHIAVE', Integer, primary_key=True)
|
id = Column('CHIAVE', Integer, primary_key=True)
|
||||||
name = Column('NOME', String)
|
name = Column('NOME', String)
|
||||||
observations = relationship('FbObservation')
|
# observations = relationship('FbObservation')
|
||||||
|
observations = relationship('FbObservation', back_populates="catena", lazy=True)
|
||||||
|
|
||||||
def __repr__(self): # optional
|
def __repr__(self): # optional
|
||||||
return f'Catena {self.name}'
|
return f'Catena {self.name}'
|
||||||
|
@ -38,8 +39,12 @@ class FbObservation(Base):
|
||||||
data = Column('DATA', Date)
|
data = Column('DATA', Date)
|
||||||
temperature = Column('TEMPERATURA', String)
|
temperature = Column('TEMPERATURA', String)
|
||||||
|
|
||||||
chiave_id = Column('CATENA', Integer, ForeignKey('CATENE.CHIAVE'))
|
chiave_id = Column('CATENA', Integer, ForeignKey('CATENE.CHIAVE'), nullable=True)
|
||||||
catena = relationship("Catena", lazy="joined", foreign_keys=[chiave_id])
|
catena = relationship(
|
||||||
|
"Catena", back_populates="observations", lazy="joined")
|
||||||
|
|
||||||
|
# chiave_id = Column('CATENA', Integer, ForeignKey('CATENE.CHIAVE'))
|
||||||
|
# catena = relationship("Catena", lazy="joined", foreign_keys=[chiave_id])
|
||||||
|
|
||||||
def __repr__(self): # optional
|
def __repr__(self): # optional
|
||||||
return f'FbObservation {self.roll}'
|
return f'FbObservation {self.roll}'
|
||||||
|
@ -62,5 +67,5 @@ def create_session() -> sqlalchemy.orm.session:
|
||||||
session_maker = sessionmaker(bind=engine)
|
session_maker = sessionmaker(bind=engine)
|
||||||
session = session_maker()
|
session = session_maker()
|
||||||
|
|
||||||
Base.metadata.create_all(engine)
|
# Base.metadata.create_all(engine)
|
||||||
return session
|
return session
|
||||||
|
|
Loading…
Reference in New Issue
Block a user