- add geometry observations (without csearch_path in model.py)

This commit is contained in:
Arno Kaimbacher 2022-04-07 11:24:43 +02:00
parent 5b78dede0b
commit 86c3c6eb32
3 changed files with 22 additions and 21 deletions

View File

@ -34,7 +34,7 @@ def create_pg_session() -> sessionmaker:
db_url = os.environ.get("POSTGIS_DBURL")
engine = create_engine(
"postgresql+psycopg2://" + db_user + ":" + db_password + "@" + db_url,
connect_args={'options': f'-csearch_path={dbschema}'},
# connect_args={'options': f'-csearch_path={dbschema}'},
isolation_level="READ UNCOMMITTED")
session_maker = sessionmaker(bind=engine)
_session = session_maker()
@ -190,7 +190,7 @@ class Observation(Base):
value_identifier = Column('value_identifier', String)
value_quantity = Column('value_quantity', Numeric(20, 10), nullable=True)
value_text = Column('value_text', String, nullable=True)
value_geometry = Column(Geometry(geometry_type='POLYGON', srid=4326, dimension=3), nullable=True)
value_geometry = Column(Geometry(geometry_type='POINTZ', srid=4326, dimension=3), nullable=True)
fk_dataset_id = Column(Integer, ForeignKey(
'gba.dataset.dataset_id'), nullable=False)

View File

@ -79,5 +79,6 @@ https://stackoverflow.com/questions/51737548/how-to-set-primary-key-auto-increme
UPDATE pg_extension SET extrelocatable = TRUE WHERE extname = 'postgis';
ALTER EXTENSION postgis SET SCHEMA gba;
ALTER EXTENSION postgis SET SCHEMA gba;
ALTER DATABASE sos_db SET search_path TO gba, public;

View File

@ -75,30 +75,30 @@ def main():
if not location_dataset.is_published:
location_dataset.is_published = 1
location_dataset.is_hidden = 0
location_dataset.dataset_type = "timeseries"
location_dataset.dataset_type = "trajectory"
location_dataset.observation_type = "simple"
location_dataset.value_type = "geometry"
pg_session.commit()
last_location_observation = pg_session.query(Observation) \
.filter(Observation.fk_dataset_id == location_dataset.id) \
.order_by(desc('sampling_time_start')) \
.first()
if last_location_observation is not None:
location_dataset.last_time = last_location_observation.sampling_time_start
#location_dataset.last_value = last_location_observation.value_quantity
location_dataset.fk_last_observation_id = last_location_observation.id
# last_location_observation = pg_session.query(Observation) \
# .filter(Observation.fk_dataset_id == location_dataset.id) \
# .order_by(desc('sampling_time_start')) \
# .first()
# if last_location_observation is not None:
# location_dataset.last_time = last_location_observation.sampling_time_start
# #location_dataset.last_value = last_location_observation.value_quantity
# location_dataset.fk_last_observation_id = last_location_observation.id
first_location_observation = pg_session.query(Observation) \
.filter(Observation.fk_dataset_id == location_dataset.id) \
.order_by(asc('sampling_time_start')) \
.first()
if first_location_observation is not None:
location_dataset.first_time = first_location_observation.sampling_time_start
# roll_dataset.first_value = first_location_observation.value_quantity
location_dataset.fk_first_observation_id = first_location_observation.id
# first_location_observation = pg_session.query(Observation) \
# .filter(Observation.fk_dataset_id == location_dataset.id) \
# .order_by(asc('sampling_time_start')) \
# .first()
# if first_location_observation is not None:
# location_dataset.first_time = first_location_observation.sampling_time_start
# # roll_dataset.first_value = first_location_observation.value_quantity
# location_dataset.fk_first_observation_id = first_location_observation.id
pg_session.commit()
# pg_session.commit()
# for loop sensors end
pg_session.close()