- add additional projecta reas for the automatic inclinometer import
- beginning with voegelsberg (ftp download, moving objects)
This commit is contained in:
parent
e068773eec
commit
7f08225b40
|
@ -68,7 +68,7 @@ CHIAVE NOME NUMERO_SENSORI SITO x y
|
||||||
5 GSA002-017-0510 17 Rosano 43.7685394,11.4232703
|
5 GSA002-017-0510 17 Rosano 43.7685394,11.4232703
|
||||||
6 GSA02A-010-1210 10 Ampflwang - KB1 48.0889892,13.5583703
|
6 GSA02A-010-1210 10 Ampflwang - KB1 48.0889892,13.5583703
|
||||||
7 GSA02B-007-1210 17 Ampflwang - KB2 48.088,13.5583
|
7 GSA02B-007-1210 17 Ampflwang - KB2 48.088,13.5583
|
||||||
8 GSA02B-007-0911 19 Laakirchen 47.9789118,13.8141457
|
8 GSA02B-007-0911 19 Laakirchen 47.9789118,13.8141457 erledigt
|
||||||
9 Copy of GSA002-017-0510 17 Rosano 43.7685394,11.4232703
|
9 Copy of GSA002-017-0510 17 Rosano 43.7685394,11.4232703
|
||||||
10 GSA02B-007-0613 17 Pechgraben Haus 47.9193704,14.5242307
|
10 GSA02B-007-0613 17 Pechgraben Haus 47.9193704,14.5242307
|
||||||
11 Copy of GSA02B-007-0911 19 Laakirchen 47.9789118,13.8141457
|
11 Copy of GSA02B-007-0911 19 Laakirchen 47.9789118,13.8141457
|
||||||
|
@ -77,7 +77,7 @@ CHIAVE NOME NUMERO_SENSORI SITO x y
|
||||||
14 TAC003-020-1213 20 Pechgraben KB1 47.9193704,14.5242307
|
14 TAC003-020-1213 20 Pechgraben KB1 47.9193704,14.5242307
|
||||||
15 GSA02A-010-1213 10 Pechgraben KB2 47.9193704,14.5242307
|
15 GSA02A-010-1213 10 Pechgraben KB2 47.9193704,14.5242307
|
||||||
16 TAC003-020-0414 20 Pechgraben KB1 47.9193704,14.5242307
|
16 TAC003-020-0414 20 Pechgraben KB1 47.9193704,14.5242307
|
||||||
17 TAC003-020-0517 20 Wolfsegg KB1 48.1064354,13.6731638
|
17 TAC003-020-0517 20 Wolfsegg KB1 48.1064354,13.6731638 erledigt
|
||||||
18 GSA02A-010-0517 10 Wolfsegg KB3 48.1064354,13.6731638
|
18 GSA02A-010-0517 10 Wolfsegg KB3 48.1064354,13.6731638
|
||||||
19 TAC005-013-0517 14 Wolfsegg KB2 48.1064354,13.6731638
|
19 TAC005-013-0517 14 Wolfsegg KB2 48.1064354,13.6731638
|
||||||
20 GSA003-020-0517 34 Wolfsegg KB5 48.1064354,13.6731638
|
20 GSA003-020-0517 34 Wolfsegg KB5 48.1064354,13.6731638
|
||||||
|
|
314
automatic_inclinometer/import_observations_ampfwang_kb1.py
Normal file
314
automatic_inclinometer/import_observations_ampfwang_kb1.py
Normal file
|
@ -0,0 +1,314 @@
|
||||||
|
""" import firebird, export to postgresql """
|
||||||
|
#!/usr/bin/python# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
|
import os
|
||||||
|
import time
|
||||||
|
from typing import List
|
||||||
|
from itertools import chain
|
||||||
|
import uuid
|
||||||
|
import json
|
||||||
|
from dotenv import load_dotenv, find_dotenv
|
||||||
|
from sqlalchemy.orm import session
|
||||||
|
from sqlalchemy import asc, desc
|
||||||
|
# from sqlalchemy.dialects import firebird
|
||||||
|
from sqlalchemy.sql import or_
|
||||||
|
from db.fb_models import (create_session, FbObservation, Catena)
|
||||||
|
from db.models import (create_pg_session, Dataset,
|
||||||
|
Observation, Procedure, Phenomenon, Platform, Format)
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
"""
|
||||||
|
Main function.
|
||||||
|
"""
|
||||||
|
|
||||||
|
#sensor_id = 0
|
||||||
|
# name of project area in firebird db
|
||||||
|
feature_of_interest = 'GSA02A-010-1210' # Ampflwang KB1
|
||||||
|
# sensor name in postgis db
|
||||||
|
# sensor = 'wolfsegg_kb1_0'
|
||||||
|
platform = 'ampflwang_kb1_inclinometer'
|
||||||
|
|
||||||
|
sensor_env_list = os.getenv('AMPFLWANG_KB1_SENSORS').replace('\n', '')
|
||||||
|
sensor_list = json.loads(sensor_env_list)
|
||||||
|
# print(sensor_list)
|
||||||
|
firebird_session: session = create_session()
|
||||||
|
# this will print elements along with their index value
|
||||||
|
for sensor_id, sensor in enumerate(sensor_list):
|
||||||
|
|
||||||
|
# db_observation = session.query(Observation) \
|
||||||
|
# .filter_by(name='John Snow').first()
|
||||||
|
query_count = firebird_session.query(FbObservation).join(FbObservation.catena) \
|
||||||
|
.filter(FbObservation.sensore == sensor_id) \
|
||||||
|
.filter(Catena.name == feature_of_interest) \
|
||||||
|
.filter(
|
||||||
|
or_(
|
||||||
|
FbObservation.temperature != None,
|
||||||
|
FbObservation.pitch != None # this is used to check NULL values
|
||||||
|
)) \
|
||||||
|
.count()
|
||||||
|
# if query_count == 0:
|
||||||
|
# print(f"sensor {sensor} "
|
||||||
|
# f"doesn't have any observations with measured values in firebird database!")
|
||||||
|
# # hop to next for iteration, next sensor in list
|
||||||
|
# continue
|
||||||
|
# test = query_count.statement.compile(dialect=firebird.dialect())
|
||||||
|
|
||||||
|
firebird_observations: List[FbObservation] = []
|
||||||
|
if query_count > 0:
|
||||||
|
query = firebird_session.query(FbObservation).join(FbObservation.catena) \
|
||||||
|
.filter(FbObservation.sensore == sensor_id) \
|
||||||
|
.filter(Catena.name == feature_of_interest)
|
||||||
|
# print (query.statement.compile(dialect=firebird.dialect()))
|
||||||
|
firebird_observations: List[FbObservation] = query.all()
|
||||||
|
firebird_session.close()
|
||||||
|
|
||||||
|
pg_session: session = create_pg_session()
|
||||||
|
# pg_datasets: List[Dataset] = pg_query.all()
|
||||||
|
pg_query = pg_session.query(Dataset) \
|
||||||
|
.join(Procedure) \
|
||||||
|
.join(Phenomenon) \
|
||||||
|
.filter(Procedure.sta_identifier == sensor.lower())
|
||||||
|
# .join(Platform).all() \
|
||||||
|
|
||||||
|
roll_dataset: Dataset = pg_query.filter(
|
||||||
|
Phenomenon.sta_identifier == "Roll").first()
|
||||||
|
|
||||||
|
slope_dataset: Dataset = pg_query.filter(
|
||||||
|
Phenomenon.sta_identifier == "Slope").first()
|
||||||
|
|
||||||
|
temperature_dataset: Dataset = pg_query.filter(
|
||||||
|
Phenomenon.sta_identifier == "InSystemTemperature").first()
|
||||||
|
|
||||||
|
platform_exists = pg_session.query(Platform.id).filter_by(
|
||||||
|
name=platform.lower()).scalar() is not None
|
||||||
|
if not platform_exists:
|
||||||
|
sensor_platform = Platform()
|
||||||
|
sensor_platform.sta_identifier = platform.lower()
|
||||||
|
sensor_platform.identifier = platform.lower()
|
||||||
|
sensor_platform.name = platform.lower()
|
||||||
|
slope_dataset.platform = sensor_platform
|
||||||
|
roll_dataset.platform = sensor_platform
|
||||||
|
temperature_dataset.platform = sensor_platform
|
||||||
|
else:
|
||||||
|
sensor_platform = pg_session.query(Platform.id) \
|
||||||
|
.filter(Platform.name == platform.lower()) \
|
||||||
|
.first()
|
||||||
|
slope_dataset.fk_platform_id = sensor_platform.id
|
||||||
|
roll_dataset.fk_platform_id = sensor_platform.id
|
||||||
|
temperature_dataset.fk_platform_id = sensor_platform.id
|
||||||
|
|
||||||
|
# commit dataset changes:
|
||||||
|
pg_session.commit()
|
||||||
|
|
||||||
|
format_exists: bool = pg_session.query(Format.id).filter_by(
|
||||||
|
definition="http://www.opengis.net/def/observationType/OGC-OM/2.0/OM_Measurement"
|
||||||
|
).scalar() is not None
|
||||||
|
if format_exists:
|
||||||
|
sensor_format = pg_session.query(Format.id) \
|
||||||
|
.filter(Format.definition ==
|
||||||
|
"http://www.opengis.net/def/observationType/OGC-OM/2.0/OM_Measurement") \
|
||||||
|
.first()
|
||||||
|
slope_dataset.fk_format_id = sensor_format.id
|
||||||
|
roll_dataset.fk_format_id = sensor_format.id
|
||||||
|
temperature_dataset.fk_format_id = sensor_format.id
|
||||||
|
pg_session.commit()
|
||||||
|
|
||||||
|
if query_count == 0:
|
||||||
|
print(f"sensor {sensor} "
|
||||||
|
f"doesn't have any observations with measured values in firebird database!")
|
||||||
|
# hop to next for iteration, next sensor in list, don't insert any observations
|
||||||
|
continue
|
||||||
|
|
||||||
|
create_db_observations(firebird_observations, roll_dataset,
|
||||||
|
slope_dataset, temperature_dataset, pg_session)
|
||||||
|
|
||||||
|
# commit new observations:
|
||||||
|
pg_session.commit()
|
||||||
|
|
||||||
|
if len(roll_dataset.observations) > 0:
|
||||||
|
# if not published yet, publish the roll dataset
|
||||||
|
if not roll_dataset.is_published:
|
||||||
|
roll_dataset.is_published = 1
|
||||||
|
roll_dataset.is_hidden = 0
|
||||||
|
roll_dataset.dataset_type = "timeseries"
|
||||||
|
roll_dataset.observation_type = "simple"
|
||||||
|
roll_dataset.value_type = "quantity"
|
||||||
|
|
||||||
|
if len(slope_dataset.observations) > 0:
|
||||||
|
# if not published yet, publish the roll dataset
|
||||||
|
if not slope_dataset.is_published:
|
||||||
|
slope_dataset.is_published = 1
|
||||||
|
slope_dataset.is_hidden = 0
|
||||||
|
slope_dataset.dataset_type = "timeseries"
|
||||||
|
slope_dataset.observation_type = "simple"
|
||||||
|
slope_dataset.value_type = "quantity"
|
||||||
|
|
||||||
|
if len(temperature_dataset.observations) > 0:
|
||||||
|
# if not published yet, publish the temperature dataset
|
||||||
|
if not temperature_dataset.is_published:
|
||||||
|
temperature_dataset.is_published = 1
|
||||||
|
temperature_dataset.is_hidden = 0
|
||||||
|
temperature_dataset.dataset_type = "timeseries"
|
||||||
|
temperature_dataset.observation_type = "simple"
|
||||||
|
temperature_dataset.value_type = "quantity"
|
||||||
|
pg_session.commit()
|
||||||
|
|
||||||
|
last_roll_observation = pg_session.query(Observation) \
|
||||||
|
.filter(Observation.fk_dataset_id == roll_dataset.id) \
|
||||||
|
.order_by(desc('sampling_time_start')) \
|
||||||
|
.first()
|
||||||
|
if last_roll_observation is not None:
|
||||||
|
roll_dataset.last_time = last_roll_observation.sampling_time_start
|
||||||
|
roll_dataset.last_value = last_roll_observation.value_quantity
|
||||||
|
roll_dataset.fk_last_observation_id = last_roll_observation.id
|
||||||
|
|
||||||
|
last_slope_observation = pg_session.query(Observation) \
|
||||||
|
.filter(Observation.fk_dataset_id == slope_dataset.id) \
|
||||||
|
.order_by(desc('sampling_time_start')) \
|
||||||
|
.first()
|
||||||
|
if last_slope_observation is not None:
|
||||||
|
slope_dataset.last_time = last_slope_observation.sampling_time_start
|
||||||
|
slope_dataset.last_value = last_slope_observation.value_quantity
|
||||||
|
slope_dataset.fk_last_observation_id = last_slope_observation.id
|
||||||
|
|
||||||
|
last_temperature_observation = pg_session.query(Observation) \
|
||||||
|
.filter(Observation.fk_dataset_id == temperature_dataset.id) \
|
||||||
|
.order_by(desc('sampling_time_start')) \
|
||||||
|
.first()
|
||||||
|
if last_temperature_observation is not None:
|
||||||
|
temperature_dataset.last_time = last_temperature_observation.sampling_time_start
|
||||||
|
temperature_dataset.last_value = last_temperature_observation.value_quantity
|
||||||
|
temperature_dataset.fk_last_observation_id = last_temperature_observation.id
|
||||||
|
|
||||||
|
first_roll_observation = pg_session.query(Observation) \
|
||||||
|
.filter(Observation.fk_dataset_id == roll_dataset.id) \
|
||||||
|
.order_by(asc('sampling_time_start')) \
|
||||||
|
.first()
|
||||||
|
if first_roll_observation is not None:
|
||||||
|
roll_dataset.first_time = first_roll_observation.sampling_time_start
|
||||||
|
roll_dataset.first_value = first_roll_observation.value_quantity
|
||||||
|
roll_dataset.fk_first_observation_id = first_roll_observation.id
|
||||||
|
|
||||||
|
first_slope_observation = pg_session.query(Observation) \
|
||||||
|
.filter(Observation.fk_dataset_id == slope_dataset.id) \
|
||||||
|
.order_by(asc('sampling_time_start')) \
|
||||||
|
.first()
|
||||||
|
if first_slope_observation is not None:
|
||||||
|
slope_dataset.first_time = first_slope_observation.sampling_time_start
|
||||||
|
slope_dataset.first_value = first_slope_observation.value_quantity
|
||||||
|
slope_dataset.fk_first_observation_id = first_slope_observation.id
|
||||||
|
|
||||||
|
first_temperature_observation = pg_session.query(Observation) \
|
||||||
|
.filter(Observation.fk_dataset_id == temperature_dataset.id) \
|
||||||
|
.order_by(asc('sampling_time_start')) \
|
||||||
|
.first()
|
||||||
|
if first_temperature_observation is not None:
|
||||||
|
temperature_dataset.first_time = first_temperature_observation.sampling_time_start
|
||||||
|
temperature_dataset.first_value = first_temperature_observation.value_quantity
|
||||||
|
temperature_dataset.fk_first_observation_id = first_temperature_observation.id
|
||||||
|
pg_session.commit()
|
||||||
|
|
||||||
|
# for loop sensors end
|
||||||
|
pg_session.close()
|
||||||
|
# firebird_session.close()
|
||||||
|
|
||||||
|
|
||||||
|
def create_db_observations(firebird_observations: List[FbObservation],
|
||||||
|
roll_dataset: Dataset,
|
||||||
|
slope_dataset: Dataset,
|
||||||
|
temperature_dataset: Dataset,
|
||||||
|
pg_session: session):
|
||||||
|
''' insert new observations ito db '''
|
||||||
|
roll_result = (
|
||||||
|
pg_session.query(Observation.result_time)
|
||||||
|
.filter(Observation.fk_dataset_id == roll_dataset.id)
|
||||||
|
.all()
|
||||||
|
)
|
||||||
|
roll_result_time_db_list1: List[str] = list(chain(*roll_result))
|
||||||
|
roll_result_time_db_list: List[float] = [time.mktime(
|
||||||
|
date_obj.timetuple()) for date_obj in roll_result_time_db_list1]
|
||||||
|
|
||||||
|
slope_result = (
|
||||||
|
pg_session.query(Observation.result_time)
|
||||||
|
.filter(Observation.fk_dataset_id == slope_dataset.id)
|
||||||
|
.all()
|
||||||
|
)
|
||||||
|
slope_result_time_db_list1: List[str] = list(chain(*slope_result))
|
||||||
|
slope_result_time_db_list: List[float] = [time.mktime(
|
||||||
|
date_obj.timetuple()) for date_obj in slope_result_time_db_list1]
|
||||||
|
|
||||||
|
temperature_result = (
|
||||||
|
pg_session.query(Observation.result_time)
|
||||||
|
.filter(Observation.fk_dataset_id == temperature_dataset.id)
|
||||||
|
.all()
|
||||||
|
)
|
||||||
|
temperature_result_time_db_list1: List[str] = list(
|
||||||
|
chain(*temperature_result))
|
||||||
|
temperature_result_time_db_list: List[float] = [time.mktime(
|
||||||
|
date_obj.timetuple()) for date_obj in temperature_result_time_db_list1]
|
||||||
|
|
||||||
|
for fb_observation in firebird_observations:
|
||||||
|
# print(fb_observation.catena.name)
|
||||||
|
if(fb_observation.roll is not None and roll_dataset is not None):
|
||||||
|
value = fb_observation.roll
|
||||||
|
add_observation(roll_dataset, fb_observation,
|
||||||
|
value, roll_result_time_db_list)
|
||||||
|
|
||||||
|
if(fb_observation.pitch is not None and slope_dataset is not None):
|
||||||
|
# max_id = max_id + 1
|
||||||
|
value = fb_observation.pitch
|
||||||
|
add_observation(slope_dataset, fb_observation,
|
||||||
|
value, slope_result_time_db_list)
|
||||||
|
|
||||||
|
if(fb_observation.temperature is not None and temperature_dataset is not None):
|
||||||
|
# max_id = max_id + 1
|
||||||
|
value = fb_observation.temperature
|
||||||
|
add_observation(temperature_dataset, fb_observation,
|
||||||
|
value, temperature_result_time_db_list)
|
||||||
|
|
||||||
|
|
||||||
|
def add_observation(
|
||||||
|
dataset: Dataset,
|
||||||
|
fb_observation: FbObservation,
|
||||||
|
value: str,
|
||||||
|
value_identifier_db_list: List[float]):
|
||||||
|
''' check if observation still extists in db,
|
||||||
|
otherwise add it to fb'''
|
||||||
|
# ob_id: str = str(observation_json.get('id'))
|
||||||
|
|
||||||
|
# existing_observation: bool = (
|
||||||
|
# db_session.query(Observation)
|
||||||
|
# .filter(Observation.result_time == fb_observation.result_time,
|
||||||
|
# Observation.fk_dataset_id == dataset.id)
|
||||||
|
# .one_or_none()
|
||||||
|
# )
|
||||||
|
existing_observation: bool = time.mktime(
|
||||||
|
fb_observation.result_time.timetuple()) in value_identifier_db_list
|
||||||
|
# Can we insert this observation?
|
||||||
|
if existing_observation is False:
|
||||||
|
# insert bew observation
|
||||||
|
new_observation: Observation = Observation()
|
||||||
|
new_observation = Observation(
|
||||||
|
# id=max_id,
|
||||||
|
value_type='quantity',
|
||||||
|
sampling_time_start=fb_observation.result_time,
|
||||||
|
sampling_time_end=fb_observation.result_time,
|
||||||
|
result_time=fb_observation.result_time,
|
||||||
|
sta_identifier=str(uuid.uuid4()),
|
||||||
|
value_identifier=str(time.mktime(
|
||||||
|
fb_observation.result_time.timetuple())),
|
||||||
|
value_quantity=value
|
||||||
|
)
|
||||||
|
dataset.observations.append(new_observation)
|
||||||
|
print(f"new observation with result time {new_observation.result_time} "
|
||||||
|
f"for inclinometer {dataset.procedure.name} succesfully imported!")
|
||||||
|
else:
|
||||||
|
print(f"observation with result time {fb_observation.result_time} "
|
||||||
|
f"for inclinometer {dataset.procedure.name} already exists!")
|
||||||
|
|
||||||
|
|
||||||
|
# -----------------------------------------------------------------------------
|
||||||
|
if __name__ == "__main__":
|
||||||
|
load_dotenv(find_dotenv())
|
||||||
|
main()
|
|
@ -37,14 +37,14 @@ def main():
|
||||||
# sensor name in postgis db
|
# sensor name in postgis db
|
||||||
# sensor = 'wolfsegg_kb1_0'
|
# sensor = 'wolfsegg_kb1_0'
|
||||||
platform = 'laakirchen_inclinometer'
|
platform = 'laakirchen_inclinometer'
|
||||||
|
|
||||||
sensor_env_list = os.getenv('LAAKIRCHEN_SENSORS').replace('\n', '')
|
sensor_env_list = os.getenv('LAAKIRCHEN_SENSORS').replace('\n', '')
|
||||||
sensor_list = json.loads(sensor_env_list)
|
sensor_list = json.loads(sensor_env_list)
|
||||||
# print(sensor_list)
|
# print(sensor_list)
|
||||||
firebird_session: session = create_session()
|
firebird_session: session = create_session()
|
||||||
# this will print elements along with their index value
|
# this will print elements along with their index value
|
||||||
for sensor_id, sensor in enumerate(sensor_list):
|
for sensor_id, sensor in enumerate(sensor_list):
|
||||||
|
|
||||||
# db_observation = session.query(Observation) \
|
# db_observation = session.query(Observation) \
|
||||||
# .filter_by(name='John Snow').first()
|
# .filter_by(name='John Snow').first()
|
||||||
query_count = firebird_session.query(FbObservation).join(FbObservation.catena) \
|
query_count = firebird_session.query(FbObservation).join(FbObservation.catena) \
|
||||||
|
@ -53,22 +53,24 @@ def main():
|
||||||
.filter(
|
.filter(
|
||||||
or_(
|
or_(
|
||||||
FbObservation.temperature != None,
|
FbObservation.temperature != None,
|
||||||
FbObservation.pitch != None #this is used to check NULL values
|
FbObservation.pitch != None # this is used to check NULL values
|
||||||
)) \
|
)) \
|
||||||
.count()
|
.count()
|
||||||
if query_count == 0:
|
# if query_count == 0:
|
||||||
print(f"sensor {sensor} "
|
# print(f"sensor {sensor} "
|
||||||
f"doesn't have any observations with measured values in firebird database!")
|
# f"doesn't have any observations with measured values in firebird database!")
|
||||||
# hop to next for iteration, next sensor in list
|
# # hop to next for iteration, next sensor in list
|
||||||
continue
|
# continue
|
||||||
# feature_of_interest = query.statement.compile(dialect=firebird.dialect())
|
# test = query_count.statement.compile(dialect=firebird.dialect())
|
||||||
|
|
||||||
query = firebird_session.query(FbObservation).join(FbObservation.catena) \
|
firebird_observations: List[FbObservation] = []
|
||||||
.filter(FbObservation.sensore == sensor_id) \
|
if query_count > 0:
|
||||||
.filter(Catena.name == feature_of_interest)
|
query = firebird_session.query(FbObservation).join(FbObservation.catena) \
|
||||||
# print (query.statement.compile(dialect=firebird.dialect()))
|
.filter(FbObservation.sensore == sensor_id) \
|
||||||
firebird_observations: List[FbObservation] = query.all()
|
.filter(Catena.name == feature_of_interest)
|
||||||
# firebird_session.close()
|
# print (query.statement.compile(dialect=firebird.dialect()))
|
||||||
|
firebird_observations: List[FbObservation] = query.all()
|
||||||
|
firebird_session.close()
|
||||||
|
|
||||||
pg_session: session = create_pg_session()
|
pg_session: session = create_pg_session()
|
||||||
# pg_datasets: List[Dataset] = pg_query.all()
|
# pg_datasets: List[Dataset] = pg_query.all()
|
||||||
|
@ -77,20 +79,20 @@ def main():
|
||||||
.join(Phenomenon) \
|
.join(Phenomenon) \
|
||||||
.filter(Procedure.sta_identifier == sensor.lower())
|
.filter(Procedure.sta_identifier == sensor.lower())
|
||||||
# .join(Platform).all() \
|
# .join(Platform).all() \
|
||||||
|
|
||||||
roll_dataset: Dataset = pg_query.filter(
|
roll_dataset: Dataset = pg_query.filter(
|
||||||
Phenomenon.sta_identifier == "Roll").first()
|
Phenomenon.sta_identifier == "Roll").first()
|
||||||
|
|
||||||
slope_dataset: Dataset = pg_query.filter(
|
slope_dataset: Dataset = pg_query.filter(
|
||||||
Phenomenon.sta_identifier == "Slope").first()
|
Phenomenon.sta_identifier == "Slope").first()
|
||||||
|
|
||||||
temperature_dataset: Dataset = pg_query.filter(
|
temperature_dataset: Dataset = pg_query.filter(
|
||||||
Phenomenon.sta_identifier == "InSystemTemperature").first()
|
Phenomenon.sta_identifier == "InSystemTemperature").first()
|
||||||
|
|
||||||
platform_exists = pg_session.query(Platform.id).filter_by(
|
platform_exists = pg_session.query(Platform.id).filter_by(
|
||||||
name=platform.lower()).scalar() is not None
|
name=platform.lower()).scalar() is not None
|
||||||
if not platform_exists:
|
if not platform_exists:
|
||||||
sensor_platform = Platform()
|
sensor_platform = Platform()
|
||||||
sensor_platform.sta_identifier = platform.lower()
|
sensor_platform.sta_identifier = platform.lower()
|
||||||
sensor_platform.identifier = platform.lower()
|
sensor_platform.identifier = platform.lower()
|
||||||
sensor_platform.name = platform.lower()
|
sensor_platform.name = platform.lower()
|
||||||
|
@ -107,8 +109,7 @@ def main():
|
||||||
|
|
||||||
# commit dataset changes:
|
# commit dataset changes:
|
||||||
pg_session.commit()
|
pg_session.commit()
|
||||||
|
|
||||||
|
|
||||||
format_exists: bool = pg_session.query(Format.id).filter_by(
|
format_exists: bool = pg_session.query(Format.id).filter_by(
|
||||||
definition="http://www.opengis.net/def/observationType/OGC-OM/2.0/OM_Measurement"
|
definition="http://www.opengis.net/def/observationType/OGC-OM/2.0/OM_Measurement"
|
||||||
).scalar() is not None
|
).scalar() is not None
|
||||||
|
@ -121,12 +122,19 @@ def main():
|
||||||
roll_dataset.fk_format_id = sensor_format.id
|
roll_dataset.fk_format_id = sensor_format.id
|
||||||
temperature_dataset.fk_format_id = sensor_format.id
|
temperature_dataset.fk_format_id = sensor_format.id
|
||||||
pg_session.commit()
|
pg_session.commit()
|
||||||
|
|
||||||
create_db_observations(firebird_observations, roll_dataset, slope_dataset, temperature_dataset, pg_session)
|
if query_count == 0:
|
||||||
|
print(f"sensor {sensor} "
|
||||||
|
f"doesn't have any observations with measured values in firebird database!")
|
||||||
|
# hop to next for iteration, next sensor in list, don't insert any observations
|
||||||
|
continue
|
||||||
|
|
||||||
|
create_db_observations(firebird_observations, roll_dataset,
|
||||||
|
slope_dataset, temperature_dataset, pg_session)
|
||||||
|
|
||||||
# commit new observations:
|
# commit new observations:
|
||||||
pg_session.commit()
|
pg_session.commit()
|
||||||
|
|
||||||
if len(roll_dataset.observations) > 0:
|
if len(roll_dataset.observations) > 0:
|
||||||
# if not published yet, publish the roll dataset
|
# if not published yet, publish the roll dataset
|
||||||
if not roll_dataset.is_published:
|
if not roll_dataset.is_published:
|
||||||
|
@ -135,7 +143,7 @@ def main():
|
||||||
roll_dataset.dataset_type = "timeseries"
|
roll_dataset.dataset_type = "timeseries"
|
||||||
roll_dataset.observation_type = "simple"
|
roll_dataset.observation_type = "simple"
|
||||||
roll_dataset.value_type = "quantity"
|
roll_dataset.value_type = "quantity"
|
||||||
|
|
||||||
if len(slope_dataset.observations) > 0:
|
if len(slope_dataset.observations) > 0:
|
||||||
# if not published yet, publish the roll dataset
|
# if not published yet, publish the roll dataset
|
||||||
if not slope_dataset.is_published:
|
if not slope_dataset.is_published:
|
||||||
|
@ -144,7 +152,7 @@ def main():
|
||||||
slope_dataset.dataset_type = "timeseries"
|
slope_dataset.dataset_type = "timeseries"
|
||||||
slope_dataset.observation_type = "simple"
|
slope_dataset.observation_type = "simple"
|
||||||
slope_dataset.value_type = "quantity"
|
slope_dataset.value_type = "quantity"
|
||||||
|
|
||||||
if len(temperature_dataset.observations) > 0:
|
if len(temperature_dataset.observations) > 0:
|
||||||
# if not published yet, publish the temperature dataset
|
# if not published yet, publish the temperature dataset
|
||||||
if not temperature_dataset.is_published:
|
if not temperature_dataset.is_published:
|
||||||
|
@ -154,7 +162,6 @@ def main():
|
||||||
temperature_dataset.observation_type = "simple"
|
temperature_dataset.observation_type = "simple"
|
||||||
temperature_dataset.value_type = "quantity"
|
temperature_dataset.value_type = "quantity"
|
||||||
pg_session.commit()
|
pg_session.commit()
|
||||||
|
|
||||||
|
|
||||||
last_roll_observation = pg_session.query(Observation) \
|
last_roll_observation = pg_session.query(Observation) \
|
||||||
.filter(Observation.fk_dataset_id == roll_dataset.id) \
|
.filter(Observation.fk_dataset_id == roll_dataset.id) \
|
||||||
|
@ -213,80 +220,95 @@ def main():
|
||||||
|
|
||||||
# for loop sensors end
|
# for loop sensors end
|
||||||
pg_session.close()
|
pg_session.close()
|
||||||
firebird_session.close()
|
# firebird_session.close()
|
||||||
|
|
||||||
|
|
||||||
def create_db_observations(firebird_observations: List[FbObservation],
|
def create_db_observations(firebird_observations: List[FbObservation],
|
||||||
roll_dataset: Dataset,
|
roll_dataset: Dataset,
|
||||||
slope_dataset: Dataset,
|
slope_dataset: Dataset,
|
||||||
temperature_dataset: Dataset,
|
temperature_dataset: Dataset,
|
||||||
pg_session: session):
|
pg_session: session):
|
||||||
''' insert new observations ito db '''
|
''' insert new observations ito db '''
|
||||||
roll_result = (
|
roll_result = (
|
||||||
pg_session.query(Observation.result_time)
|
pg_session.query(Observation.result_time)
|
||||||
.filter(Observation.fk_dataset_id == roll_dataset.id)
|
.filter(Observation.fk_dataset_id == roll_dataset.id)
|
||||||
.all()
|
.all()
|
||||||
)
|
)
|
||||||
roll_result_time_db_list1: List[str] = list(chain(*roll_result))
|
roll_result_time_db_list1: List[str] = list(chain(*roll_result))
|
||||||
roll_result_time_db_list : List[float]= [time.mktime(date_obj.timetuple()) for date_obj in roll_result_time_db_list1]
|
roll_result_time_db_list: List[float] = [time.mktime(
|
||||||
|
date_obj.timetuple()) for date_obj in roll_result_time_db_list1]
|
||||||
|
|
||||||
slope_result = (
|
slope_result = (
|
||||||
pg_session.query(Observation.result_time)
|
pg_session.query(Observation.result_time)
|
||||||
.filter(Observation.fk_dataset_id == slope_dataset.id)
|
.filter(Observation.fk_dataset_id == slope_dataset.id)
|
||||||
.all()
|
.all()
|
||||||
)
|
)
|
||||||
slope_result_time_db_list1: List[str] = list(chain(*slope_result))
|
slope_result_time_db_list1: List[str] = list(chain(*slope_result))
|
||||||
slope_result_time_db_list : List[float]= [time.mktime(date_obj.timetuple()) for date_obj in slope_result_time_db_list1]
|
slope_result_time_db_list: List[float] = [time.mktime(
|
||||||
|
date_obj.timetuple()) for date_obj in slope_result_time_db_list1]
|
||||||
|
|
||||||
temperature_result = (
|
temperature_result = (
|
||||||
pg_session.query(Observation.result_time)
|
pg_session.query(Observation.result_time)
|
||||||
.filter(Observation.fk_dataset_id == temperature_dataset.id)
|
.filter(Observation.fk_dataset_id == temperature_dataset.id)
|
||||||
.all()
|
.all()
|
||||||
)
|
)
|
||||||
temperature_result_time_db_list1: List[str] = list(chain(*temperature_result))
|
temperature_result_time_db_list1: List[str] = list(
|
||||||
temperature_result_time_db_list : List[float]= [time.mktime(date_obj.timetuple()) for date_obj in temperature_result_time_db_list1]
|
chain(*temperature_result))
|
||||||
|
temperature_result_time_db_list: List[float] = [time.mktime(
|
||||||
|
date_obj.timetuple()) for date_obj in temperature_result_time_db_list1]
|
||||||
|
|
||||||
for fb_observation in firebird_observations:
|
for fb_observation in firebird_observations:
|
||||||
# print(fb_observation.catena.name)
|
# print(fb_observation.catena.name)
|
||||||
if(fb_observation.roll is not None and roll_dataset is not None):
|
if(fb_observation.roll is not None and roll_dataset is not None):
|
||||||
value = fb_observation.roll
|
value = fb_observation.roll
|
||||||
add_observation(roll_dataset, fb_observation, value, roll_result_time_db_list)
|
add_observation(roll_dataset, fb_observation,
|
||||||
|
value, roll_result_time_db_list)
|
||||||
|
|
||||||
if(fb_observation.pitch is not None and slope_dataset is not None):
|
if(fb_observation.pitch is not None and slope_dataset is not None):
|
||||||
# max_id = max_id + 1
|
# max_id = max_id + 1
|
||||||
value = fb_observation.pitch
|
value = fb_observation.pitch
|
||||||
add_observation(slope_dataset, fb_observation, value, slope_result_time_db_list)
|
add_observation(slope_dataset, fb_observation,
|
||||||
|
value, slope_result_time_db_list)
|
||||||
|
|
||||||
if(fb_observation.temperature is not None and temperature_dataset is not None):
|
if(fb_observation.temperature is not None and temperature_dataset is not None):
|
||||||
# max_id = max_id + 1
|
# max_id = max_id + 1
|
||||||
value = fb_observation.temperature
|
value = fb_observation.temperature
|
||||||
add_observation(temperature_dataset, fb_observation, value, temperature_result_time_db_list)
|
add_observation(temperature_dataset, fb_observation,
|
||||||
|
value, temperature_result_time_db_list)
|
||||||
|
|
||||||
def add_observation(dataset: Dataset, fb_observation: FbObservation, value: str, value_identifier_db_list: List[float]):
|
|
||||||
|
def add_observation(
|
||||||
|
dataset: Dataset,
|
||||||
|
fb_observation: FbObservation,
|
||||||
|
value: str,
|
||||||
|
value_identifier_db_list: List[float]):
|
||||||
''' check if observation still extists in db,
|
''' check if observation still extists in db,
|
||||||
otherwise add it to fb'''
|
otherwise add it to fb'''
|
||||||
# ob_id: str = str(observation_json.get('id'))
|
# ob_id: str = str(observation_json.get('id'))
|
||||||
|
|
||||||
# existing_observation: bool = (
|
# existing_observation: bool = (
|
||||||
# db_session.query(Observation)
|
# db_session.query(Observation)
|
||||||
# .filter(Observation.result_time == fb_observation.result_time, Observation.fk_dataset_id == dataset.id)
|
# .filter(Observation.result_time == fb_observation.result_time,
|
||||||
|
# Observation.fk_dataset_id == dataset.id)
|
||||||
# .one_or_none()
|
# .one_or_none()
|
||||||
# )
|
# )
|
||||||
existing_observation: bool =time.mktime(fb_observation.result_time.timetuple()) in value_identifier_db_list
|
existing_observation: bool = time.mktime(
|
||||||
# Can we insert this observation?
|
fb_observation.result_time.timetuple()) in value_identifier_db_list
|
||||||
|
# Can we insert this observation?
|
||||||
if existing_observation is False:
|
if existing_observation is False:
|
||||||
# insert bew observation
|
# insert bew observation
|
||||||
new_observation: Observation = Observation()
|
new_observation: Observation = Observation()
|
||||||
new_observation = Observation(
|
new_observation = Observation(
|
||||||
# id=max_id,
|
# id=max_id,
|
||||||
value_type='quantity',
|
value_type='quantity',
|
||||||
sampling_time_start=fb_observation.result_time,
|
sampling_time_start=fb_observation.result_time,
|
||||||
sampling_time_end=fb_observation.result_time,
|
sampling_time_end=fb_observation.result_time,
|
||||||
result_time=fb_observation.result_time,
|
result_time=fb_observation.result_time,
|
||||||
sta_identifier=str(uuid.uuid4()),
|
sta_identifier=str(uuid.uuid4()),
|
||||||
value_identifier = str(time.mktime(fb_observation.result_time.timetuple())),
|
value_identifier=str(time.mktime(
|
||||||
value_quantity=value
|
fb_observation.result_time.timetuple())),
|
||||||
)
|
value_quantity=value
|
||||||
|
)
|
||||||
dataset.observations.append(new_observation)
|
dataset.observations.append(new_observation)
|
||||||
print(f"new observation with result time {new_observation.result_time} "
|
print(f"new observation with result time {new_observation.result_time} "
|
||||||
f"for inclinometer {dataset.procedure.name} succesfully imported!")
|
f"for inclinometer {dataset.procedure.name} succesfully imported!")
|
||||||
|
@ -294,6 +316,7 @@ def add_observation(dataset: Dataset, fb_observation: FbObservation, value: str,
|
||||||
print(f"observation with result time {fb_observation.result_time} "
|
print(f"observation with result time {fb_observation.result_time} "
|
||||||
f"for inclinometer {dataset.procedure.name} already exists!")
|
f"for inclinometer {dataset.procedure.name} already exists!")
|
||||||
|
|
||||||
|
|
||||||
# -----------------------------------------------------------------------------
|
# -----------------------------------------------------------------------------
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
load_dotenv(find_dotenv())
|
load_dotenv(find_dotenv())
|
||||||
|
|
|
@ -1,197 +1,314 @@
|
||||||
""" import firebird, export to postgresql """
|
""" import firebird, export to postgresql """
|
||||||
#!/usr/bin/python# -*- coding: utf-8 -*-
|
#!/usr/bin/python# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
|
import os
|
||||||
|
import time
|
||||||
from typing import List
|
from typing import List
|
||||||
|
from itertools import chain
|
||||||
import uuid
|
import uuid
|
||||||
|
import json
|
||||||
|
from dotenv import load_dotenv, find_dotenv
|
||||||
from sqlalchemy.orm import session
|
from sqlalchemy.orm import session
|
||||||
from sqlalchemy import desc, asc
|
from sqlalchemy import asc, desc
|
||||||
|
# from sqlalchemy.dialects import firebird
|
||||||
|
from sqlalchemy.sql import or_
|
||||||
from db.fb_models import (create_session, FbObservation, Catena)
|
from db.fb_models import (create_session, FbObservation, Catena)
|
||||||
from db.models import (create_pg_session, Dataset, Observation, Procedure, Phenomenon, Platform)
|
from db.models import (create_pg_session, Dataset,
|
||||||
|
Observation, Procedure, Phenomenon, Platform, Format)
|
||||||
|
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
"""
|
"""
|
||||||
Main function.
|
Main function.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
# parameter:
|
#sensor_id = 0
|
||||||
# sensor id in firebird db:
|
|
||||||
# sensor_id = 1
|
|
||||||
# # name of project area in firebird db
|
|
||||||
# feature_of_interest = 'TAC003-020-0517' # Wolfsegg KB1
|
|
||||||
# # sensor name in postgis db
|
|
||||||
# sensor = 'wolfsegg_kb1_1'
|
|
||||||
# platform = 'wolfsegg'
|
|
||||||
|
|
||||||
sensor_id = 0
|
|
||||||
# name of project area in firebird db
|
# name of project area in firebird db
|
||||||
feature_of_interest = 'TAC003-020-0517' # Wolfsegg KB1
|
feature_of_interest = 'TAC003-020-0517' # Wolfsegg KB1
|
||||||
# sensor name in postgis db
|
# sensor name in postgis db
|
||||||
sensor = 'wolfsegg_kb1_0'
|
# sensor = 'wolfsegg_kb1_0'
|
||||||
platform = 'wolfsegg_inclinometer'
|
platform = 'wolfsegg_kb1_inclinometer'
|
||||||
|
|
||||||
|
sensor_env_list = os.getenv('WOLFSEGG_KB1_SENSORS').replace('\n', '')
|
||||||
|
sensor_list = json.loads(sensor_env_list)
|
||||||
|
# print(sensor_list)
|
||||||
firebird_session: session = create_session()
|
firebird_session: session = create_session()
|
||||||
# db_observation = session.query(Observation) \
|
# this will print elements along with their index value
|
||||||
# .filter_by(name='John Snow').first()
|
for sensor_id, sensor in enumerate(sensor_list):
|
||||||
query = firebird_session.query(FbObservation).join(FbObservation.catena) \
|
|
||||||
.filter(FbObservation.sensore == sensor_id) \
|
|
||||||
.filter(Catena.name == feature_of_interest)
|
|
||||||
# feature_of_interest = query.statement.compile(dialect=firebird.dialect())
|
|
||||||
firebird_observations: List[FbObservation] = query.all()
|
|
||||||
firebird_session.close()
|
|
||||||
|
|
||||||
pg_session: session = create_pg_session()
|
# db_observation = session.query(Observation) \
|
||||||
# pg_datasets: List[Dataset] = pg_query.all()
|
# .filter_by(name='John Snow').first()
|
||||||
pg_query = pg_session.query(Dataset) \
|
query_count = firebird_session.query(FbObservation).join(FbObservation.catena) \
|
||||||
.join(Procedure) \
|
.filter(FbObservation.sensore == sensor_id) \
|
||||||
.join(Phenomenon) \
|
.filter(Catena.name == feature_of_interest) \
|
||||||
.filter(Procedure.sta_identifier == sensor.lower())
|
.filter(
|
||||||
|
or_(
|
||||||
|
FbObservation.temperature != None,
|
||||||
|
FbObservation.pitch != None # this is used to check NULL values
|
||||||
|
)) \
|
||||||
|
.count()
|
||||||
|
# if query_count == 0:
|
||||||
|
# print(f"sensor {sensor} "
|
||||||
|
# f"doesn't have any observations with measured values in firebird database!")
|
||||||
|
# # hop to next for iteration, next sensor in list
|
||||||
|
# continue
|
||||||
|
# test = query_count.statement.compile(dialect=firebird.dialect())
|
||||||
|
|
||||||
|
firebird_observations: List[FbObservation] = []
|
||||||
|
if query_count > 0:
|
||||||
|
query = firebird_session.query(FbObservation).join(FbObservation.catena) \
|
||||||
|
.filter(FbObservation.sensore == sensor_id) \
|
||||||
|
.filter(Catena.name == feature_of_interest)
|
||||||
|
# print (query.statement.compile(dialect=firebird.dialect()))
|
||||||
|
firebird_observations: List[FbObservation] = query.all()
|
||||||
|
firebird_session.close()
|
||||||
|
|
||||||
|
pg_session: session = create_pg_session()
|
||||||
|
# pg_datasets: List[Dataset] = pg_query.all()
|
||||||
|
pg_query = pg_session.query(Dataset) \
|
||||||
|
.join(Procedure) \
|
||||||
|
.join(Phenomenon) \
|
||||||
|
.filter(Procedure.sta_identifier == sensor.lower())
|
||||||
# .join(Platform).all() \
|
# .join(Platform).all() \
|
||||||
|
|
||||||
|
roll_dataset: Dataset = pg_query.filter(
|
||||||
|
Phenomenon.sta_identifier == "Roll").first()
|
||||||
|
|
||||||
# roll_dataset = [x for x in pg_datasets if x.phenomenon.sta_identifier == "Roll"]
|
slope_dataset: Dataset = pg_query.filter(
|
||||||
roll_dataset = pg_query.filter(Phenomenon.sta_identifier == "Roll").first()
|
Phenomenon.sta_identifier == "Slope").first()
|
||||||
roll_dataset.is_published = 1
|
|
||||||
roll_dataset.is_hidden = 0
|
temperature_dataset: Dataset = pg_query.filter(
|
||||||
roll_dataset.dataset_type = "timeseries"
|
Phenomenon.sta_identifier == "InSystemTemperature").first()
|
||||||
roll_dataset.observation_type = "simple"
|
|
||||||
roll_dataset.value_type = "quantity"
|
platform_exists = pg_session.query(Platform.id).filter_by(
|
||||||
slope_dataset = pg_query.filter(
|
name=platform.lower()).scalar() is not None
|
||||||
Phenomenon.sta_identifier == "Slope").first()
|
if not platform_exists:
|
||||||
slope_dataset.is_published = 1
|
sensor_platform = Platform()
|
||||||
slope_dataset.is_hidden = 0
|
sensor_platform.sta_identifier = platform.lower()
|
||||||
slope_dataset.dataset_type = "timeseries"
|
sensor_platform.identifier = platform.lower()
|
||||||
slope_dataset.observation_type = "simple"
|
sensor_platform.name = platform.lower()
|
||||||
slope_dataset.value_type = "quantity"
|
slope_dataset.platform = sensor_platform
|
||||||
temperature_dataset = pg_query.filter(
|
roll_dataset.platform = sensor_platform
|
||||||
Phenomenon.sta_identifier == "InSystemTemperature").first()
|
temperature_dataset.platform = sensor_platform
|
||||||
temperature_dataset.is_published = 1
|
else:
|
||||||
temperature_dataset.is_hidden = 0
|
sensor_platform = pg_session.query(Platform.id) \
|
||||||
temperature_dataset.dataset_type = "timeseries"
|
.filter(Platform.name == platform.lower()) \
|
||||||
temperature_dataset.observation_type = "simple"
|
.first()
|
||||||
temperature_dataset.value_type = "quantity"
|
slope_dataset.fk_platform_id = sensor_platform.id
|
||||||
pg_session.commit()
|
roll_dataset.fk_platform_id = sensor_platform.id
|
||||||
|
temperature_dataset.fk_platform_id = sensor_platform.id
|
||||||
|
|
||||||
|
# commit dataset changes:
|
||||||
|
pg_session.commit()
|
||||||
|
|
||||||
|
format_exists: bool = pg_session.query(Format.id).filter_by(
|
||||||
|
definition="http://www.opengis.net/def/observationType/OGC-OM/2.0/OM_Measurement"
|
||||||
|
).scalar() is not None
|
||||||
|
if format_exists:
|
||||||
|
sensor_format = pg_session.query(Format.id) \
|
||||||
|
.filter(Format.definition ==
|
||||||
|
"http://www.opengis.net/def/observationType/OGC-OM/2.0/OM_Measurement") \
|
||||||
|
.first()
|
||||||
|
slope_dataset.fk_format_id = sensor_format.id
|
||||||
|
roll_dataset.fk_format_id = sensor_format.id
|
||||||
|
temperature_dataset.fk_format_id = sensor_format.id
|
||||||
|
pg_session.commit()
|
||||||
|
|
||||||
|
if query_count == 0:
|
||||||
|
print(f"sensor {sensor} "
|
||||||
|
f"doesn't have any observations with measured values in firebird database!")
|
||||||
|
# hop to next for iteration, next sensor in list, don't insert any observations
|
||||||
|
continue
|
||||||
|
|
||||||
|
create_db_observations(firebird_observations, roll_dataset,
|
||||||
|
slope_dataset, temperature_dataset, pg_session)
|
||||||
|
|
||||||
|
# commit new observations:
|
||||||
|
pg_session.commit()
|
||||||
|
|
||||||
|
if len(roll_dataset.observations) > 0:
|
||||||
|
# if not published yet, publish the roll dataset
|
||||||
|
if not roll_dataset.is_published:
|
||||||
|
roll_dataset.is_published = 1
|
||||||
|
roll_dataset.is_hidden = 0
|
||||||
|
roll_dataset.dataset_type = "timeseries"
|
||||||
|
roll_dataset.observation_type = "simple"
|
||||||
|
roll_dataset.value_type = "quantity"
|
||||||
|
|
||||||
|
if len(slope_dataset.observations) > 0:
|
||||||
|
# if not published yet, publish the roll dataset
|
||||||
|
if not slope_dataset.is_published:
|
||||||
|
slope_dataset.is_published = 1
|
||||||
|
slope_dataset.is_hidden = 0
|
||||||
|
slope_dataset.dataset_type = "timeseries"
|
||||||
|
slope_dataset.observation_type = "simple"
|
||||||
|
slope_dataset.value_type = "quantity"
|
||||||
|
|
||||||
|
if len(temperature_dataset.observations) > 0:
|
||||||
|
# if not published yet, publish the temperature dataset
|
||||||
|
if not temperature_dataset.is_published:
|
||||||
|
temperature_dataset.is_published = 1
|
||||||
|
temperature_dataset.is_hidden = 0
|
||||||
|
temperature_dataset.dataset_type = "timeseries"
|
||||||
|
temperature_dataset.observation_type = "simple"
|
||||||
|
temperature_dataset.value_type = "quantity"
|
||||||
|
pg_session.commit()
|
||||||
|
|
||||||
|
last_roll_observation = pg_session.query(Observation) \
|
||||||
|
.filter(Observation.fk_dataset_id == roll_dataset.id) \
|
||||||
|
.order_by(desc('sampling_time_start')) \
|
||||||
|
.first()
|
||||||
|
if last_roll_observation is not None:
|
||||||
|
roll_dataset.last_time = last_roll_observation.sampling_time_start
|
||||||
|
roll_dataset.last_value = last_roll_observation.value_quantity
|
||||||
|
roll_dataset.fk_last_observation_id = last_roll_observation.id
|
||||||
|
|
||||||
|
last_slope_observation = pg_session.query(Observation) \
|
||||||
|
.filter(Observation.fk_dataset_id == slope_dataset.id) \
|
||||||
|
.order_by(desc('sampling_time_start')) \
|
||||||
|
.first()
|
||||||
|
if last_slope_observation is not None:
|
||||||
|
slope_dataset.last_time = last_slope_observation.sampling_time_start
|
||||||
|
slope_dataset.last_value = last_slope_observation.value_quantity
|
||||||
|
slope_dataset.fk_last_observation_id = last_slope_observation.id
|
||||||
|
|
||||||
|
last_temperature_observation = pg_session.query(Observation) \
|
||||||
|
.filter(Observation.fk_dataset_id == temperature_dataset.id) \
|
||||||
|
.order_by(desc('sampling_time_start')) \
|
||||||
|
.first()
|
||||||
|
if last_temperature_observation is not None:
|
||||||
|
temperature_dataset.last_time = last_temperature_observation.sampling_time_start
|
||||||
|
temperature_dataset.last_value = last_temperature_observation.value_quantity
|
||||||
|
temperature_dataset.fk_last_observation_id = last_temperature_observation.id
|
||||||
|
|
||||||
|
first_roll_observation = pg_session.query(Observation) \
|
||||||
|
.filter(Observation.fk_dataset_id == roll_dataset.id) \
|
||||||
|
.order_by(asc('sampling_time_start')) \
|
||||||
|
.first()
|
||||||
|
if first_roll_observation is not None:
|
||||||
|
roll_dataset.first_time = first_roll_observation.sampling_time_start
|
||||||
|
roll_dataset.first_value = first_roll_observation.value_quantity
|
||||||
|
roll_dataset.fk_first_observation_id = first_roll_observation.id
|
||||||
|
|
||||||
|
first_slope_observation = pg_session.query(Observation) \
|
||||||
|
.filter(Observation.fk_dataset_id == slope_dataset.id) \
|
||||||
|
.order_by(asc('sampling_time_start')) \
|
||||||
|
.first()
|
||||||
|
if first_slope_observation is not None:
|
||||||
|
slope_dataset.first_time = first_slope_observation.sampling_time_start
|
||||||
|
slope_dataset.first_value = first_slope_observation.value_quantity
|
||||||
|
slope_dataset.fk_first_observation_id = first_slope_observation.id
|
||||||
|
|
||||||
|
first_temperature_observation = pg_session.query(Observation) \
|
||||||
|
.filter(Observation.fk_dataset_id == temperature_dataset.id) \
|
||||||
|
.order_by(asc('sampling_time_start')) \
|
||||||
|
.first()
|
||||||
|
if first_temperature_observation is not None:
|
||||||
|
temperature_dataset.first_time = first_temperature_observation.sampling_time_start
|
||||||
|
temperature_dataset.first_value = first_temperature_observation.value_quantity
|
||||||
|
temperature_dataset.fk_first_observation_id = first_temperature_observation.id
|
||||||
|
pg_session.commit()
|
||||||
|
|
||||||
|
# for loop sensors end
|
||||||
|
pg_session.close()
|
||||||
|
# firebird_session.close()
|
||||||
|
|
||||||
|
|
||||||
|
def create_db_observations(firebird_observations: List[FbObservation],
|
||||||
|
roll_dataset: Dataset,
|
||||||
|
slope_dataset: Dataset,
|
||||||
|
temperature_dataset: Dataset,
|
||||||
|
pg_session: session):
|
||||||
|
''' insert new observations ito db '''
|
||||||
|
roll_result = (
|
||||||
|
pg_session.query(Observation.result_time)
|
||||||
|
.filter(Observation.fk_dataset_id == roll_dataset.id)
|
||||||
|
.all()
|
||||||
|
)
|
||||||
|
roll_result_time_db_list1: List[str] = list(chain(*roll_result))
|
||||||
|
roll_result_time_db_list: List[float] = [time.mktime(
|
||||||
|
date_obj.timetuple()) for date_obj in roll_result_time_db_list1]
|
||||||
|
|
||||||
|
slope_result = (
|
||||||
|
pg_session.query(Observation.result_time)
|
||||||
|
.filter(Observation.fk_dataset_id == slope_dataset.id)
|
||||||
|
.all()
|
||||||
|
)
|
||||||
|
slope_result_time_db_list1: List[str] = list(chain(*slope_result))
|
||||||
|
slope_result_time_db_list: List[float] = [time.mktime(
|
||||||
|
date_obj.timetuple()) for date_obj in slope_result_time_db_list1]
|
||||||
|
|
||||||
|
temperature_result = (
|
||||||
|
pg_session.query(Observation.result_time)
|
||||||
|
.filter(Observation.fk_dataset_id == temperature_dataset.id)
|
||||||
|
.all()
|
||||||
|
)
|
||||||
|
temperature_result_time_db_list1: List[str] = list(
|
||||||
|
chain(*temperature_result))
|
||||||
|
temperature_result_time_db_list: List[float] = [time.mktime(
|
||||||
|
date_obj.timetuple()) for date_obj in temperature_result_time_db_list1]
|
||||||
|
|
||||||
# max_id = pg_session.query(func.max(Observation.id)).scalar()
|
|
||||||
for fb_observation in firebird_observations:
|
for fb_observation in firebird_observations:
|
||||||
# print(fb_observation.catena.name)
|
# print(fb_observation.catena.name)
|
||||||
if(fb_observation.roll is not None and roll_dataset is not None):
|
if(fb_observation.roll is not None and roll_dataset is not None):
|
||||||
# max_id = max_id + 1
|
value = fb_observation.roll
|
||||||
pg_roll_observation = Observation(
|
add_observation(roll_dataset, fb_observation,
|
||||||
# id=max_id,
|
value, roll_result_time_db_list)
|
||||||
value_type='quantity',
|
|
||||||
sampling_time_start=fb_observation.result_time,
|
|
||||||
sampling_time_end=fb_observation.result_time,
|
|
||||||
result_time=fb_observation.result_time,
|
|
||||||
sta_identifier=str(uuid.uuid4()),
|
|
||||||
value_quantity=fb_observation.roll
|
|
||||||
)
|
|
||||||
roll_dataset.observations.append(pg_roll_observation)
|
|
||||||
if(fb_observation.pitch is not None and slope_dataset is not None):
|
if(fb_observation.pitch is not None and slope_dataset is not None):
|
||||||
# max_id = max_id + 1
|
# max_id = max_id + 1
|
||||||
pg_slope_observation = Observation(
|
value = fb_observation.pitch
|
||||||
# id=max_id,
|
add_observation(slope_dataset, fb_observation,
|
||||||
value_type='quantity',
|
value, slope_result_time_db_list)
|
||||||
sampling_time_start=fb_observation.result_time,
|
|
||||||
sampling_time_end=fb_observation.result_time,
|
|
||||||
result_time=fb_observation.result_time,
|
|
||||||
sta_identifier=str(uuid.uuid4()),
|
|
||||||
value_quantity=fb_observation.pitch
|
|
||||||
)
|
|
||||||
slope_dataset.observations.append(pg_slope_observation)
|
|
||||||
if(fb_observation.temperature is not None and temperature_dataset is not None):
|
if(fb_observation.temperature is not None and temperature_dataset is not None):
|
||||||
# max_id = max_id + 1
|
# max_id = max_id + 1
|
||||||
pg_temperature_observation = Observation(
|
value = fb_observation.temperature
|
||||||
# id=max_id,
|
add_observation(temperature_dataset, fb_observation,
|
||||||
value_type='quantity',
|
value, temperature_result_time_db_list)
|
||||||
sampling_time_start=fb_observation.result_time,
|
|
||||||
sampling_time_end=fb_observation.result_time,
|
|
||||||
result_time=fb_observation.result_time,
|
|
||||||
sta_identifier=str(uuid.uuid4()),
|
|
||||||
value_quantity=fb_observation.temperature
|
|
||||||
)
|
|
||||||
temperature_dataset.observations.append(pg_temperature_observation)
|
|
||||||
# commit observations:
|
|
||||||
pg_session.commit()
|
|
||||||
|
|
||||||
last_roll_observation = pg_session.query(Observation) \
|
|
||||||
.filter(Observation.fk_dataset_id == roll_dataset.id) \
|
|
||||||
.order_by(desc('sampling_time_start')) \
|
|
||||||
.first()
|
|
||||||
if last_roll_observation is not None:
|
|
||||||
roll_dataset.last_time = last_roll_observation.sampling_time_start
|
|
||||||
roll_dataset.last_value = last_roll_observation.value_quantity
|
|
||||||
roll_dataset.fk_last_observation_id = last_roll_observation.id
|
|
||||||
|
|
||||||
last_slope_observation = pg_session.query(Observation) \
|
def add_observation(
|
||||||
.filter(Observation.fk_dataset_id == slope_dataset.id) \
|
dataset: Dataset,
|
||||||
.order_by(desc('sampling_time_start')) \
|
fb_observation: FbObservation,
|
||||||
.first()
|
value: str,
|
||||||
if last_slope_observation is not None:
|
value_identifier_db_list: List[float]):
|
||||||
slope_dataset.last_time = last_slope_observation.sampling_time_start
|
''' check if observation still extists in db,
|
||||||
slope_dataset.last_value = last_slope_observation.value_quantity
|
otherwise add it to fb'''
|
||||||
slope_dataset.fk_last_observation_id = last_slope_observation.id
|
# ob_id: str = str(observation_json.get('id'))
|
||||||
|
|
||||||
last_temperature_observation = pg_session.query(Observation) \
|
# existing_observation: bool = (
|
||||||
.filter(Observation.fk_dataset_id == temperature_dataset.id) \
|
# db_session.query(Observation)
|
||||||
.order_by(desc('sampling_time_start')) \
|
# .filter(Observation.result_time == fb_observation.result_time,
|
||||||
.first()
|
# Observation.fk_dataset_id == dataset.id)
|
||||||
if last_temperature_observation is not None:
|
# .one_or_none()
|
||||||
temperature_dataset.last_time = last_temperature_observation.sampling_time_start
|
# )
|
||||||
temperature_dataset.last_value = last_temperature_observation.value_quantity
|
existing_observation: bool = time.mktime(
|
||||||
temperature_dataset.fk_last_observation_id = last_temperature_observation.id
|
fb_observation.result_time.timetuple()) in value_identifier_db_list
|
||||||
|
# Can we insert this observation?
|
||||||
first_roll_observation = pg_session.query(Observation) \
|
if existing_observation is False:
|
||||||
.filter(Observation.fk_dataset_id == roll_dataset.id) \
|
# insert bew observation
|
||||||
.order_by(asc('sampling_time_start')) \
|
new_observation: Observation = Observation()
|
||||||
.first()
|
new_observation = Observation(
|
||||||
if first_roll_observation is not None:
|
# id=max_id,
|
||||||
roll_dataset.first_time = first_roll_observation.sampling_time_start
|
value_type='quantity',
|
||||||
roll_dataset.first_value = first_roll_observation.value_quantity
|
sampling_time_start=fb_observation.result_time,
|
||||||
roll_dataset.fk_first_observation_id = first_roll_observation.id
|
sampling_time_end=fb_observation.result_time,
|
||||||
|
result_time=fb_observation.result_time,
|
||||||
first_slope_observation = pg_session.query(Observation) \
|
sta_identifier=str(uuid.uuid4()),
|
||||||
.filter(Observation.fk_dataset_id == slope_dataset.id) \
|
value_identifier=str(time.mktime(
|
||||||
.order_by(asc('sampling_time_start')) \
|
fb_observation.result_time.timetuple())),
|
||||||
.first()
|
value_quantity=value
|
||||||
if first_slope_observation is not None:
|
)
|
||||||
slope_dataset.first_time = first_slope_observation.sampling_time_start
|
dataset.observations.append(new_observation)
|
||||||
slope_dataset.first_value = first_slope_observation.value_quantity
|
print(f"new observation with result time {new_observation.result_time} "
|
||||||
slope_dataset.fk_first_observation_id = first_slope_observation.id
|
f"for inclinometer {dataset.procedure.name} succesfully imported!")
|
||||||
|
|
||||||
first_temperature_observation = pg_session.query(Observation) \
|
|
||||||
.filter(Observation.fk_dataset_id == temperature_dataset.id) \
|
|
||||||
.order_by(asc('sampling_time_start')) \
|
|
||||||
.first()
|
|
||||||
if first_temperature_observation is not None:
|
|
||||||
temperature_dataset.first_time = first_temperature_observation.sampling_time_start
|
|
||||||
temperature_dataset.first_value = first_temperature_observation.value_quantity
|
|
||||||
temperature_dataset.fk_first_observation_id = first_temperature_observation.id
|
|
||||||
|
|
||||||
platform_exists = pg_session.query(Platform.id).filter_by(
|
|
||||||
name=platform.lower()).scalar() is not None
|
|
||||||
if not platform_exists:
|
|
||||||
sensor_platform = Platform()
|
|
||||||
# max_id = pg_session.query(func.max(Platform.id)).scalar()
|
|
||||||
# sensor_platform.id = max_id + 1
|
|
||||||
sensor_platform.sta_identifier = platform.lower()
|
|
||||||
sensor_platform.identifier = platform.lower()
|
|
||||||
sensor_platform.name = platform.lower()
|
|
||||||
slope_dataset.platform = sensor_platform
|
|
||||||
roll_dataset.platform = sensor_platform
|
|
||||||
temperature_dataset.platform = sensor_platform
|
|
||||||
else:
|
else:
|
||||||
sensor_platform = pg_session.query(Platform.id) \
|
print(f"observation with result time {fb_observation.result_time} "
|
||||||
.filter(Platform.name == platform.lower()) \
|
f"for inclinometer {dataset.procedure.name} already exists!")
|
||||||
.first()
|
|
||||||
slope_dataset.fk_platform_id = sensor_platform.id
|
|
||||||
roll_dataset.fk_platform_id = sensor_platform.id
|
|
||||||
temperature_dataset.fk_platform_id = sensor_platform.id
|
|
||||||
|
|
||||||
# commit dataset changes:
|
|
||||||
pg_session.commit()
|
|
||||||
pg_session.close()
|
|
||||||
|
|
||||||
|
|
||||||
# -----------------------------------------------------------------------------
|
# -----------------------------------------------------------------------------
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
|
load_dotenv(find_dotenv())
|
||||||
main()
|
main()
|
||||||
|
|
197
automatic_inclinometer/import_observations_wolfsegg_kb1_old.py
Normal file
197
automatic_inclinometer/import_observations_wolfsegg_kb1_old.py
Normal file
|
@ -0,0 +1,197 @@
|
||||||
|
""" import firebird, export to postgresql """
|
||||||
|
#!/usr/bin/python# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
|
from typing import List
|
||||||
|
import uuid
|
||||||
|
from sqlalchemy.orm import session
|
||||||
|
from sqlalchemy import desc, asc
|
||||||
|
from db.fb_models import (create_session, FbObservation, Catena)
|
||||||
|
from db.models import (create_pg_session, Dataset, Observation, Procedure, Phenomenon, Platform)
|
||||||
|
|
||||||
|
def main():
|
||||||
|
"""
|
||||||
|
Main function.
|
||||||
|
"""
|
||||||
|
|
||||||
|
# parameter:
|
||||||
|
# sensor id in firebird db:
|
||||||
|
# sensor_id = 1
|
||||||
|
# # name of project area in firebird db
|
||||||
|
# feature_of_interest = 'TAC003-020-0517' # Wolfsegg KB1
|
||||||
|
# # sensor name in postgis db
|
||||||
|
# sensor = 'wolfsegg_kb1_1'
|
||||||
|
# platform = 'wolfsegg'
|
||||||
|
|
||||||
|
sensor_id = 0
|
||||||
|
# name of project area in firebird db
|
||||||
|
feature_of_interest = 'TAC003-020-0517' # Wolfsegg KB1
|
||||||
|
# sensor name in postgis db
|
||||||
|
sensor = 'wolfsegg_kb1_0'
|
||||||
|
platform = 'wolfsegg_kb1_inclinometer'
|
||||||
|
|
||||||
|
firebird_session: session = create_session()
|
||||||
|
# db_observation = session.query(Observation) \
|
||||||
|
# .filter_by(name='John Snow').first()
|
||||||
|
query = firebird_session.query(FbObservation).join(FbObservation.catena) \
|
||||||
|
.filter(FbObservation.sensore == sensor_id) \
|
||||||
|
.filter(Catena.name == feature_of_interest)
|
||||||
|
# feature_of_interest = query.statement.compile(dialect=firebird.dialect())
|
||||||
|
firebird_observations: List[FbObservation] = query.all()
|
||||||
|
firebird_session.close()
|
||||||
|
|
||||||
|
pg_session: session = create_pg_session()
|
||||||
|
# pg_datasets: List[Dataset] = pg_query.all()
|
||||||
|
pg_query = pg_session.query(Dataset) \
|
||||||
|
.join(Procedure) \
|
||||||
|
.join(Phenomenon) \
|
||||||
|
.filter(Procedure.sta_identifier == sensor.lower())
|
||||||
|
# .join(Platform).all() \
|
||||||
|
|
||||||
|
|
||||||
|
# roll_dataset = [x for x in pg_datasets if x.phenomenon.sta_identifier == "Roll"]
|
||||||
|
roll_dataset = pg_query.filter(Phenomenon.sta_identifier == "Roll").first()
|
||||||
|
roll_dataset.is_published = 1
|
||||||
|
roll_dataset.is_hidden = 0
|
||||||
|
roll_dataset.dataset_type = "timeseries"
|
||||||
|
roll_dataset.observation_type = "simple"
|
||||||
|
roll_dataset.value_type = "quantity"
|
||||||
|
slope_dataset = pg_query.filter(
|
||||||
|
Phenomenon.sta_identifier == "Slope").first()
|
||||||
|
slope_dataset.is_published = 1
|
||||||
|
slope_dataset.is_hidden = 0
|
||||||
|
slope_dataset.dataset_type = "timeseries"
|
||||||
|
slope_dataset.observation_type = "simple"
|
||||||
|
slope_dataset.value_type = "quantity"
|
||||||
|
temperature_dataset = pg_query.filter(
|
||||||
|
Phenomenon.sta_identifier == "InSystemTemperature").first()
|
||||||
|
temperature_dataset.is_published = 1
|
||||||
|
temperature_dataset.is_hidden = 0
|
||||||
|
temperature_dataset.dataset_type = "timeseries"
|
||||||
|
temperature_dataset.observation_type = "simple"
|
||||||
|
temperature_dataset.value_type = "quantity"
|
||||||
|
pg_session.commit()
|
||||||
|
|
||||||
|
# max_id = pg_session.query(func.max(Observation.id)).scalar()
|
||||||
|
for fb_observation in firebird_observations:
|
||||||
|
# print(fb_observation.catena.name)
|
||||||
|
if(fb_observation.roll is not None and roll_dataset is not None):
|
||||||
|
# max_id = max_id + 1
|
||||||
|
pg_roll_observation = Observation(
|
||||||
|
# id=max_id,
|
||||||
|
value_type='quantity',
|
||||||
|
sampling_time_start=fb_observation.result_time,
|
||||||
|
sampling_time_end=fb_observation.result_time,
|
||||||
|
result_time=fb_observation.result_time,
|
||||||
|
sta_identifier=str(uuid.uuid4()),
|
||||||
|
value_quantity=fb_observation.roll
|
||||||
|
)
|
||||||
|
roll_dataset.observations.append(pg_roll_observation)
|
||||||
|
if(fb_observation.pitch is not None and slope_dataset is not None):
|
||||||
|
# max_id = max_id + 1
|
||||||
|
pg_slope_observation = Observation(
|
||||||
|
# id=max_id,
|
||||||
|
value_type='quantity',
|
||||||
|
sampling_time_start=fb_observation.result_time,
|
||||||
|
sampling_time_end=fb_observation.result_time,
|
||||||
|
result_time=fb_observation.result_time,
|
||||||
|
sta_identifier=str(uuid.uuid4()),
|
||||||
|
value_quantity=fb_observation.pitch
|
||||||
|
)
|
||||||
|
slope_dataset.observations.append(pg_slope_observation)
|
||||||
|
if(fb_observation.temperature is not None and temperature_dataset is not None):
|
||||||
|
# max_id = max_id + 1
|
||||||
|
pg_temperature_observation = Observation(
|
||||||
|
# id=max_id,
|
||||||
|
value_type='quantity',
|
||||||
|
sampling_time_start=fb_observation.result_time,
|
||||||
|
sampling_time_end=fb_observation.result_time,
|
||||||
|
result_time=fb_observation.result_time,
|
||||||
|
sta_identifier=str(uuid.uuid4()),
|
||||||
|
value_quantity=fb_observation.temperature
|
||||||
|
)
|
||||||
|
temperature_dataset.observations.append(pg_temperature_observation)
|
||||||
|
# commit observations:
|
||||||
|
pg_session.commit()
|
||||||
|
|
||||||
|
last_roll_observation = pg_session.query(Observation) \
|
||||||
|
.filter(Observation.fk_dataset_id == roll_dataset.id) \
|
||||||
|
.order_by(desc('sampling_time_start')) \
|
||||||
|
.first()
|
||||||
|
if last_roll_observation is not None:
|
||||||
|
roll_dataset.last_time = last_roll_observation.sampling_time_start
|
||||||
|
roll_dataset.last_value = last_roll_observation.value_quantity
|
||||||
|
roll_dataset.fk_last_observation_id = last_roll_observation.id
|
||||||
|
|
||||||
|
last_slope_observation = pg_session.query(Observation) \
|
||||||
|
.filter(Observation.fk_dataset_id == slope_dataset.id) \
|
||||||
|
.order_by(desc('sampling_time_start')) \
|
||||||
|
.first()
|
||||||
|
if last_slope_observation is not None:
|
||||||
|
slope_dataset.last_time = last_slope_observation.sampling_time_start
|
||||||
|
slope_dataset.last_value = last_slope_observation.value_quantity
|
||||||
|
slope_dataset.fk_last_observation_id = last_slope_observation.id
|
||||||
|
|
||||||
|
last_temperature_observation = pg_session.query(Observation) \
|
||||||
|
.filter(Observation.fk_dataset_id == temperature_dataset.id) \
|
||||||
|
.order_by(desc('sampling_time_start')) \
|
||||||
|
.first()
|
||||||
|
if last_temperature_observation is not None:
|
||||||
|
temperature_dataset.last_time = last_temperature_observation.sampling_time_start
|
||||||
|
temperature_dataset.last_value = last_temperature_observation.value_quantity
|
||||||
|
temperature_dataset.fk_last_observation_id = last_temperature_observation.id
|
||||||
|
|
||||||
|
first_roll_observation = pg_session.query(Observation) \
|
||||||
|
.filter(Observation.fk_dataset_id == roll_dataset.id) \
|
||||||
|
.order_by(asc('sampling_time_start')) \
|
||||||
|
.first()
|
||||||
|
if first_roll_observation is not None:
|
||||||
|
roll_dataset.first_time = first_roll_observation.sampling_time_start
|
||||||
|
roll_dataset.first_value = first_roll_observation.value_quantity
|
||||||
|
roll_dataset.fk_first_observation_id = first_roll_observation.id
|
||||||
|
|
||||||
|
first_slope_observation = pg_session.query(Observation) \
|
||||||
|
.filter(Observation.fk_dataset_id == slope_dataset.id) \
|
||||||
|
.order_by(asc('sampling_time_start')) \
|
||||||
|
.first()
|
||||||
|
if first_slope_observation is not None:
|
||||||
|
slope_dataset.first_time = first_slope_observation.sampling_time_start
|
||||||
|
slope_dataset.first_value = first_slope_observation.value_quantity
|
||||||
|
slope_dataset.fk_first_observation_id = first_slope_observation.id
|
||||||
|
|
||||||
|
first_temperature_observation = pg_session.query(Observation) \
|
||||||
|
.filter(Observation.fk_dataset_id == temperature_dataset.id) \
|
||||||
|
.order_by(asc('sampling_time_start')) \
|
||||||
|
.first()
|
||||||
|
if first_temperature_observation is not None:
|
||||||
|
temperature_dataset.first_time = first_temperature_observation.sampling_time_start
|
||||||
|
temperature_dataset.first_value = first_temperature_observation.value_quantity
|
||||||
|
temperature_dataset.fk_first_observation_id = first_temperature_observation.id
|
||||||
|
|
||||||
|
platform_exists = pg_session.query(Platform.id).filter_by(
|
||||||
|
name=platform.lower()).scalar() is not None
|
||||||
|
if not platform_exists:
|
||||||
|
sensor_platform = Platform()
|
||||||
|
# max_id = pg_session.query(func.max(Platform.id)).scalar()
|
||||||
|
# sensor_platform.id = max_id + 1
|
||||||
|
sensor_platform.sta_identifier = platform.lower()
|
||||||
|
sensor_platform.identifier = platform.lower()
|
||||||
|
sensor_platform.name = platform.lower()
|
||||||
|
slope_dataset.platform = sensor_platform
|
||||||
|
roll_dataset.platform = sensor_platform
|
||||||
|
temperature_dataset.platform = sensor_platform
|
||||||
|
else:
|
||||||
|
sensor_platform = pg_session.query(Platform.id) \
|
||||||
|
.filter(Platform.name == platform.lower()) \
|
||||||
|
.first()
|
||||||
|
slope_dataset.fk_platform_id = sensor_platform.id
|
||||||
|
roll_dataset.fk_platform_id = sensor_platform.id
|
||||||
|
temperature_dataset.fk_platform_id = sensor_platform.id
|
||||||
|
|
||||||
|
# commit dataset changes:
|
||||||
|
pg_session.commit()
|
||||||
|
pg_session.close()
|
||||||
|
|
||||||
|
|
||||||
|
# -----------------------------------------------------------------------------
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
|
@ -0,0 +1,195 @@
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
"""This module does blah blah."""
|
||||||
|
|
||||||
|
from ast import List
|
||||||
|
import requests
|
||||||
|
# from insert_sensor.transactional import insert_sensor
|
||||||
|
from insert_sensor.wrapper import (Offering, FoI, Procedure, SensorType)
|
||||||
|
# import json
|
||||||
|
|
||||||
|
|
||||||
|
class Sos():
|
||||||
|
"""
|
||||||
|
A class to represent a sos service.
|
||||||
|
...
|
||||||
|
|
||||||
|
Attributes
|
||||||
|
----------
|
||||||
|
sosurl : str
|
||||||
|
first name of the person
|
||||||
|
token : str
|
||||||
|
token to access soso service
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, url, token=''):
|
||||||
|
self.sosurl = str(url) # url to access the SOS
|
||||||
|
self.token = str(token) # security token, optional
|
||||||
|
# Test if URL exists
|
||||||
|
try:
|
||||||
|
test = requests.get(self.sosurl)
|
||||||
|
test.raise_for_status()
|
||||||
|
except requests.HTTPError:
|
||||||
|
print("The URL is not valid")
|
||||||
|
|
||||||
|
# Python3 code here creating class
|
||||||
|
|
||||||
|
|
||||||
|
class Sensor:
|
||||||
|
"""
|
||||||
|
A class to represent an input sensor.
|
||||||
|
...
|
||||||
|
|
||||||
|
Attributes
|
||||||
|
----------
|
||||||
|
name : str
|
||||||
|
first name of the person
|
||||||
|
x : float
|
||||||
|
token to access soso service
|
||||||
|
y : float
|
||||||
|
token to access soso service
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, name: str, x_coord: float, y_coord: float):
|
||||||
|
self.name = name
|
||||||
|
self.x_coord = x_coord
|
||||||
|
self.y_coord = y_coord
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
"""
|
||||||
|
main function
|
||||||
|
"""
|
||||||
|
sos_url = 'https://geomon.geologie.ac.at/52n-sos-webapp/service'
|
||||||
|
|
||||||
|
# creating list
|
||||||
|
sensor_list: List[Sensor] = []
|
||||||
|
|
||||||
|
# appending instances to list 48.0889892,13.5583703
|
||||||
|
sensor_list.append(
|
||||||
|
Sensor('ampflwang_kb1_0', 13.5583703, 48.0889892))
|
||||||
|
sensor_list.append(
|
||||||
|
Sensor('ampflwang_kb1_1', 13.5583703, 48.0889892))
|
||||||
|
sensor_list.append(
|
||||||
|
Sensor('ampflwang_kb1_2', 13.5583703, 48.0889892))
|
||||||
|
sensor_list.append(
|
||||||
|
Sensor('ampflwang_kb1_3', 13.5583703, 48.0889892))
|
||||||
|
sensor_list.append(
|
||||||
|
Sensor('ampflwang_kb1_4', 13.5583703, 48.0889892))
|
||||||
|
sensor_list.append(
|
||||||
|
Sensor('ampflwang_kb1_5', 13.5583703, 48.0889892))
|
||||||
|
sensor_list.append(
|
||||||
|
Sensor('ampflwang_kb1_6', 13.5583703, 48.0889892))
|
||||||
|
sensor_list.append(
|
||||||
|
Sensor('ampflwang_kb1_7', 13.5583703, 48.0889892))
|
||||||
|
sensor_list.append(
|
||||||
|
Sensor('ampflwang_kb1_8', 13.5583703, 48.0889892))
|
||||||
|
sensor_list.append(
|
||||||
|
Sensor('ampflwang_kb1_9', 13.5583703, 48.0889892))
|
||||||
|
|
||||||
|
sensor: Sensor
|
||||||
|
for sensor in sensor_list:
|
||||||
|
# platform ampflwang_kb1_inclinometer
|
||||||
|
offering = Offering(
|
||||||
|
"https://geomon.geologie.ac.at/52n-sos-webapp/api/offerings/",
|
||||||
|
sensor.name,
|
||||||
|
"Bohrloch, Ampflwang Inklinometer"
|
||||||
|
)
|
||||||
|
procedure = Procedure(sensor.name, sensor.name)
|
||||||
|
foi = FoI("degree", "m", (sensor.x_coord, sensor.y_coord, 0.0),
|
||||||
|
"GSA02A-010-1210", "Ampflwang KB1")
|
||||||
|
# now insert sensor via rest service:
|
||||||
|
sensor_type=SensorType("inclinometer")
|
||||||
|
post_data=insert_sensor(offering, procedure, foi, sensor_type)
|
||||||
|
# print(post_data)
|
||||||
|
headers={'Accept': 'application/json'}
|
||||||
|
request=requests.post(sos_url, headers = headers, json = post_data)
|
||||||
|
print(request.text)
|
||||||
|
|
||||||
|
def insert_sensor(offering, procedure, foi, sensor_type):
|
||||||
|
"""
|
||||||
|
Prepares the body of a InsertSensor request for JSON biding.
|
||||||
|
:param offering: an instance of class Offering.Type object.
|
||||||
|
:param Procedure: instance of class Procedure. type object.
|
||||||
|
:param foi: feature of interest. Instance of FoI
|
||||||
|
:param sensor_type: SensorType object
|
||||||
|
:return: valid body for an InsertSensor request.
|
||||||
|
"""
|
||||||
|
|
||||||
|
# shortName = offering.name # string
|
||||||
|
# longName = 'Sibratsgfall test' # string
|
||||||
|
|
||||||
|
# Offering values
|
||||||
|
gml_id='\"' + str(procedure.id) + '\"' # Offering name, double quoted
|
||||||
|
offering_name=offering.name
|
||||||
|
offering_label=offering.label
|
||||||
|
# offID = offering.fullId # URL format of full id
|
||||||
|
|
||||||
|
# featureName = featureID = cordX = cordY = height = h_unit = z_unit = coordinates = ""
|
||||||
|
# check if feature of interest should be declare
|
||||||
|
if foi is not None:
|
||||||
|
# feature_id = 'https://geomon.geologie.ac.at/52n-sos-webapp/api/features/' + \
|
||||||
|
# str(foi.fid) # URL format
|
||||||
|
cord_x=str(foi.x) # longitude degrees, float
|
||||||
|
cord_y=str(foi.y) # latitude degrees, float
|
||||||
|
coordinates=cord_x + " " + cord_y
|
||||||
|
height=str(foi.z) # altitude in meters, float
|
||||||
|
# h_unit = foi.Hunit # units for horizontal coordinates
|
||||||
|
# z_unit = foi.Vunit # units for altitude
|
||||||
|
feature_id=foi.fid # "feature location"
|
||||||
|
feature_name=foi.name # "feature location"
|
||||||
|
else:
|
||||||
|
pass
|
||||||
|
|
||||||
|
procedure_name=procedure.name
|
||||||
|
procedure_identifier=procedure.id # URL,
|
||||||
|
obs_types=[]
|
||||||
|
output_list='' # output list element for describe procedure
|
||||||
|
properties_list=[]
|
||||||
|
for attr in sensor_type.pattern["attributes"]:
|
||||||
|
obs_prop_name='\"' + attr[0] + '\"' # attribute name
|
||||||
|
# print(obs_prop_name)
|
||||||
|
unit_name=sensor_type.om_types[attr[1]] # om type
|
||||||
|
# magnitud = a # ??
|
||||||
|
|
||||||
|
obs_name=obs_prop_name.replace('\"', '')
|
||||||
|
obs_name="".join(obs_name.split()) # observable property name
|
||||||
|
output='<sml:output name=' + obs_prop_name + '><swe:Quantity definition=' + \
|
||||||
|
'\"' + (obs_name) + '\"' + \
|
||||||
|
'></swe:Quantity></sml:output>'
|
||||||
|
output_list=output_list + output
|
||||||
|
# add property identifier to the list.
|
||||||
|
properties_list.append(obs_name)
|
||||||
|
# prepare list of measurement types
|
||||||
|
# A sensor can not registry duplicated sensor types.
|
||||||
|
this_type="http://www.opengis.net/def/observationType/OGC-OM/2.0/"+unit_name
|
||||||
|
if this_type not in obs_types: # when new type appears
|
||||||
|
obs_types.append(this_type)
|
||||||
|
else:
|
||||||
|
continue
|
||||||
|
|
||||||
|
# Unit of measurement:
|
||||||
|
unit_name='\"' + procedure.name + '\"' # double quoted string
|
||||||
|
# unit = omType # one of the MO measurement types
|
||||||
|
|
||||||
|
body={
|
||||||
|
"request": "InsertSensor",
|
||||||
|
"service": "SOS",
|
||||||
|
"version": "2.0.0",
|
||||||
|
"procedureDescriptionFormat": "http://www.opengis.net/sensorml/2.0",
|
||||||
|
"procedureDescription": f'<sml:PhysicalSystem gml:id={gml_id} xmlns:swes=\"http://www.opengis.net/swes/2.0\" xmlns:sos=\"http://www.opengis.net/sos/2.0\" xmlns:swe=\"http://www.opengis.net/swe/2.0\" xmlns:sml=\"http://www.opengis.net/sensorml/2.0\" xmlns:gml=\"http://www.opengis.net/gml/3.2\" xmlns:xlink=\"http://www.w3.org/1999/xlink\" xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\" xmlns:gco=\"http://www.isotc211.org/2005/gco\" xmlns:gmd=\"http://www.isotc211.org/2005/gmd\"><gml:identifier codeSpace=\"uniqueID\">{procedure_identifier}</gml:identifier><sml:identification><sml:IdentifierList><sml:identifier><sml:Term definition=\"urn:ogc:def:identifier:OGC:1.0:longName\"><sml:label>longName</sml:label><sml:value>{procedure_name}</sml:value></sml:Term></sml:identifier><sml:identifier><sml:Term definition=\"urn:ogc:def:identifier:OGC:1.0:shortName\"><sml:label>shortName</sml:label><sml:value>{procedure_name}</sml:value></sml:Term></sml:identifier></sml:IdentifierList></sml:identification><sml:capabilities name=\"offerings\"><sml:CapabilityList><sml:capability name=\"offeringID\"><swe:Text definition=\"urn:ogc:def:identifier:OGC:offeringID\"><swe:label>{offering_label}</swe:label><swe:value>{offering_name}</swe:value></swe:Text></sml:capability></sml:CapabilityList></sml:capabilities><sml:capabilities name=\"metadata\"><sml:CapabilityList><!-- status indicates, whether sensor is insitu (true) or remote (false) --><sml:capability name=\"insitu\"><swe:Boolean definition=\"insitu\"><swe:value>true</swe:value></swe:Boolean></sml:capability><!-- status indicates, whether sensor is mobile (true) or fixed/stationary (false) --><sml:capability name=\"mobile\"><swe:Boolean definition=\"mobile\"><swe:value>false</swe:value></swe:Boolean></sml:capability></sml:CapabilityList></sml:capabilities><sml:featuresOfInterest><sml:FeatureList definition=\"http://www.opengis.net/def/featureOfInterest/identifier\"><swe:label>featuresOfInterest</swe:label><sml:feature><sams:SF_SpatialSamplingFeature xmlns:sams=\"http://www.opengis.net/samplingSpatial/2.0\" gml:id=\"ssf_b3a826dd44012201b01323232323041f7a92e0cc47260eb9888f6a4e9f747\"><gml:identifier codeSpace=\"http://www.opengis.net/def/nil/OGC/0/unknown\">{feature_id}</gml:identifier><gml:name codeSpace=\"http://www.opengis.net/def/nil/OGC/0/unknown\">{feature_name}</gml:name><sf:type xmlns:sf=\"http://www.opengis.net/sampling/2.0\" xlink:href=\"http://www.opengis.net/def/samplingFeatureType/OGC-OM/2.0/SF_SamplingPoint\"/><sf:sampledFeature xmlns:sf=\"http://www.opengis.net/sampling/2.0\" xlink:href=\"http://www.opengis.net/def/nil/OGC/0/unknown\"/><sams:shape><ns:Point xmlns:ns=\"http://www.opengis.net/gml/3.2\" ns:id=\"Point_ssf_b3a826dd44012201b013c90c51da28c041f7a92e0cc47260eb9888f6a4e9f747\"><ns:pos srsName=\"http://www.opengis.net/def/crs/EPSG/0/4326\">{coordinates}</ns:pos></ns:Point></sams:shape></sams:SF_SpatialSamplingFeature></sml:feature></sml:FeatureList></sml:featuresOfInterest><sml:outputs><sml:OutputList><sml:output name=\"Slope\"><swe:Quantity definition=\"Slope\"><swe:label>Slope</swe:label><swe:uom code=\"deg\"/></swe:Quantity></sml:output><sml:output name=\"Roll\"><swe:Quantity definition=\"Roll\"><swe:label>Roll</swe:label><swe:uom code=\"deg\"/></swe:Quantity></sml:output><sml:output name=\"InSystemTemperature\"><swe:Quantity definition=\"InSystemTemperature\"><swe:label>InSystemTemperature</swe:label><swe:uom code=\"degC\"/></swe:Quantity></sml:output></sml:OutputList></sml:outputs><sml:position><swe:Vector referenceFrame=\"urn:ogc:def:crs:EPSG::4326\"><swe:coordinate name=\"easting\"><swe:Quantity axisID=\"x\"><swe:uom code=\"degree\"/><swe:value>{cord_x}</swe:value></swe:Quantity></swe:coordinate><swe:coordinate name=\"northing\"><swe:Quantity axisID=\"y\"><swe:uom code=\"degree\"/><swe:value>{cord_y}</swe:value></swe:Quantity></swe:coordinate><swe:coordinate name=\"altitude\"><swe:Quantity axisID=\"z\"><swe:uom code=\"m\"/><swe:value>{height}</swe:value></swe:Quantity></swe:coordinate></swe:Vector></sml:position></sml:PhysicalSystem>',
|
||||||
|
"observableProperty": [
|
||||||
|
"Slope",
|
||||||
|
"Roll",
|
||||||
|
"InSystemTemperature"
|
||||||
|
],
|
||||||
|
"observationType": [
|
||||||
|
"http://www.opengis.net/def/observationType/OGC-OM/2.0/OM_Measurement"
|
||||||
|
],
|
||||||
|
"featureOfInterestType":
|
||||||
|
"http://www.opengis.net/def/samplingFeatureType/OGC-OM/2.0/SF_SamplingPoint"
|
||||||
|
}
|
||||||
|
return body
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
main()
|
|
@ -127,7 +127,7 @@ def main():
|
||||||
|
|
||||||
foi = FoI("degree", "m", (sensor.x_coord, sensor.y_coord, 0.0),
|
foi = FoI("degree", "m", (sensor.x_coord, sensor.y_coord, 0.0),
|
||||||
"GSA02B-007-0911", "Massenbewegung Laakirchen")
|
"GSA02B-007-0911", "Massenbewegung Laakirchen")
|
||||||
|
|
||||||
# now insert sensor via rest service:
|
# now insert sensor via rest service:
|
||||||
sensor_type=SensorType("inclinometer")
|
sensor_type=SensorType("inclinometer")
|
||||||
post_data=insert_sensor(offering, procedure, foi, sensor_type)
|
post_data=insert_sensor(offering, procedure, foi, sensor_type)
|
||||||
|
|
|
@ -64,47 +64,47 @@ def main():
|
||||||
# creating list
|
# creating list
|
||||||
sensor_list: List[Sensor] = []
|
sensor_list: List[Sensor] = []
|
||||||
|
|
||||||
# appending instances to list
|
# appending instances to list 48.1064354,13.6731638
|
||||||
sensor_list.append(
|
sensor_list.append(
|
||||||
Sensor('wolfsegg_kb1_0', 13.808378638676, 47.882871028831))
|
Sensor('wolfsegg_kb1_0', 13.6731638, 48.1064354))
|
||||||
# sensor_list.append(
|
|
||||||
# Sensor('wolfsegg_kb1_1', 13.808378638676, 47.882871028831))
|
|
||||||
sensor_list.append(
|
sensor_list.append(
|
||||||
Sensor('wolfsegg_kb1_2', 13.808378638676, 47.882871028831))
|
Sensor('wolfsegg_kb1_1', 13.6731638, 48.1064354))
|
||||||
sensor_list.append(
|
sensor_list.append(
|
||||||
Sensor('wolfsegg_kb1_3', 13.808378638676, 47.882871028831))
|
Sensor('wolfsegg_kb1_2', 13.6731638, 48.1064354))
|
||||||
sensor_list.append(
|
sensor_list.append(
|
||||||
Sensor('wolfsegg_kb1_4', 13.808378638676, 47.882871028831))
|
Sensor('wolfsegg_kb1_3', 13.6731638, 48.1064354))
|
||||||
sensor_list.append(
|
sensor_list.append(
|
||||||
Sensor('wolfsegg_kb1_5', 13.808378638676, 47.882871028831))
|
Sensor('wolfsegg_kb1_4', 13.6731638, 48.1064354))
|
||||||
sensor_list.append(
|
sensor_list.append(
|
||||||
Sensor('wolfsegg_kb1_6', 13.808378638676, 47.882871028831))
|
Sensor('wolfsegg_kb1_5', 13.6731638, 48.1064354))
|
||||||
sensor_list.append(
|
sensor_list.append(
|
||||||
Sensor('wolfsegg_kb1_7', 13.808378638676, 47.882871028831))
|
Sensor('wolfsegg_kb1_6', 13.6731638, 48.1064354))
|
||||||
sensor_list.append(
|
sensor_list.append(
|
||||||
Sensor('wolfsegg_kb1_8', 13.808378638676, 47.882871028831))
|
Sensor('wolfsegg_kb1_7',13.6731638, 48.1064354))
|
||||||
sensor_list.append(
|
sensor_list.append(
|
||||||
Sensor('wolfsegg_kb1_9', 13.808378638676, 47.882871028831))
|
Sensor('wolfsegg_kb1_8', 13.6731638, 48.1064354))
|
||||||
sensor_list.append(
|
sensor_list.append(
|
||||||
Sensor('wolfsegg_kb1_10', 13.808378638676, 47.882871028831))
|
Sensor('wolfsegg_kb1_9', 13.6731638, 48.1064354))
|
||||||
sensor_list.append(
|
sensor_list.append(
|
||||||
Sensor('wolfsegg_kb1_11', 13.808378638676, 47.882871028831))
|
Sensor('wolfsegg_kb1_10', 13.6731638, 48.1064354))
|
||||||
sensor_list.append(
|
sensor_list.append(
|
||||||
Sensor('wolfsegg_kb1_12', 13.808378638676, 47.882871028831))
|
Sensor('wolfsegg_kb1_11', 13.6731638, 48.1064354))
|
||||||
sensor_list.append(
|
sensor_list.append(
|
||||||
Sensor('wolfsegg_kb1_13', 13.808378638676, 47.882871028831))
|
Sensor('wolfsegg_kb1_12', 13.6731638, 48.1064354))
|
||||||
sensor_list.append(
|
sensor_list.append(
|
||||||
Sensor('wolfsegg_kb1_14', 13.808378638676, 47.882871028831))
|
Sensor('wolfsegg_kb1_13', 13.6731638, 48.1064354))
|
||||||
sensor_list.append(
|
sensor_list.append(
|
||||||
Sensor('wolfsegg_kb1_15', 13.808378638676, 47.882871028831))
|
Sensor('wolfsegg_kb1_14', 13.6731638, 48.1064354))
|
||||||
sensor_list.append(
|
sensor_list.append(
|
||||||
Sensor('wolfsegg_kb1_16', 13.808378638676, 47.882871028831))
|
Sensor('wolfsegg_kb1_15', 13.6731638, 48.1064354))
|
||||||
sensor_list.append(
|
sensor_list.append(
|
||||||
Sensor('wolfsegg_kb1_17', 13.808378638676, 47.882871028831))
|
Sensor('wolfsegg_kb1_16', 13.6731638, 48.1064354))
|
||||||
sensor_list.append(
|
sensor_list.append(
|
||||||
Sensor('wolfsegg_kb1_18', 13.808378638676, 47.882871028831))
|
Sensor('wolfsegg_kb1_17', 13.6731638, 48.1064354))
|
||||||
sensor_list.append(
|
sensor_list.append(
|
||||||
Sensor('wolfsegg_kb1_19', 13.808378638676, 47.882871028831))
|
Sensor('wolfsegg_kb1_18', 13.6731638, 48.1064354))
|
||||||
|
sensor_list.append(
|
||||||
|
Sensor('wolfsegg_kb1_19', 13.6731638, 48.1064354))
|
||||||
|
|
||||||
sensor: Sensor
|
sensor: Sensor
|
||||||
for sensor in sensor_list:
|
for sensor in sensor_list:
|
||||||
|
|
|
@ -87,153 +87,153 @@ def main():
|
||||||
sensor_list.append(
|
sensor_list.append(
|
||||||
Sensor('inclino1_01', 13.816940062459931, 47.883893347112163,
|
Sensor('inclino1_01', 13.816940062459931, 47.883893347112163,
|
||||||
"bohrloch1-glasfaser-gschliefgraben",
|
"bohrloch1-glasfaser-gschliefgraben",
|
||||||
"Glasfaser Untersuchungen am Gschliefgraben (Gmunden)"))
|
"Glasfaser Untersuchungen am Gschliefgraben (Bohrloch1)"))
|
||||||
sensor_list.append(
|
sensor_list.append(
|
||||||
Sensor('inclino1_02', 13.816940062459931, 47.883893347112163,
|
Sensor('inclino1_02', 13.816940062459931, 47.883893347112163,
|
||||||
"bohrloch1-glasfaser-gschliefgraben",
|
"bohrloch1-glasfaser-gschliefgraben",
|
||||||
"Glasfaser Untersuchungen am Gschliefgraben (Gmunden)"))
|
"Glasfaser Untersuchungen am Gschliefgraben (Bohrloch1)"))
|
||||||
sensor_list.append(
|
sensor_list.append(
|
||||||
Sensor('inclino1_03',13.816940062459931, 47.883893347112163,
|
Sensor('inclino1_03',13.816940062459931, 47.883893347112163,
|
||||||
"bohrloch1-glasfaser-gschliefgraben",
|
"bohrloch1-glasfaser-gschliefgraben",
|
||||||
"Glasfaser Untersuchungen am Gschliefgraben (Gmunden)"))
|
"Glasfaser Untersuchungen am Gschliefgraben (Bohrloch1)"))
|
||||||
sensor_list.append(
|
sensor_list.append(
|
||||||
Sensor('inclino1_04', 13.816940062459931, 47.883893347112163,
|
Sensor('inclino1_04', 13.816940062459931, 47.883893347112163,
|
||||||
"bohrloch1-glasfaser-gschliefgraben",
|
"bohrloch1-glasfaser-gschliefgraben",
|
||||||
"Glasfaser Untersuchungen am Gschliefgraben (Gmunden)"))
|
"Glasfaser Untersuchungen am Gschliefgraben (Bohrloch1)"))
|
||||||
sensor_list.append(
|
sensor_list.append(
|
||||||
Sensor('inclino1_05', 13.816940062459931, 47.883893347112163,
|
Sensor('inclino1_05', 13.816940062459931, 47.883893347112163,
|
||||||
"bohrloch1-glasfaser-gschliefgraben",
|
"bohrloch1-glasfaser-gschliefgraben",
|
||||||
"Glasfaser Untersuchungen am Gschliefgraben (Gmunden)"))
|
"Glasfaser Untersuchungen am Gschliefgraben (Bohrloch1)"))
|
||||||
sensor_list.append(
|
sensor_list.append(
|
||||||
Sensor('inclino1_06', 13.816940062459931, 47.883893347112163,
|
Sensor('inclino1_06', 13.816940062459931, 47.883893347112163,
|
||||||
"bohrloch1-glasfaser-gschliefgraben",
|
"bohrloch1-glasfaser-gschliefgraben",
|
||||||
"Glasfaser Untersuchungen am Gschliefgraben (Gmunden)"))
|
"Glasfaser Untersuchungen am Gschliefgraben (Bohrloch1)"))
|
||||||
sensor_list.append(
|
sensor_list.append(
|
||||||
Sensor('inclino1_07', 13.816940062459931, 47.883893347112163,
|
Sensor('inclino1_07', 13.816940062459931, 47.883893347112163,
|
||||||
"bohrloch1-glasfaser-gschliefgraben",
|
"bohrloch1-glasfaser-gschliefgraben",
|
||||||
"Glasfaser Untersuchungen am Gschliefgraben (Gmunden)"))
|
"Glasfaser Untersuchungen am Gschliefgraben (Bohrloch1)"))
|
||||||
sensor_list.append(
|
sensor_list.append(
|
||||||
Sensor('inclino1_08', 13.816940062459931, 47.883893347112163,
|
Sensor('inclino1_08', 13.816940062459931, 47.883893347112163,
|
||||||
"bohrloch1-glasfaser-gschliefgraben",
|
"bohrloch1-glasfaser-gschliefgraben",
|
||||||
"Glasfaser Untersuchungen am Gschliefgraben (Gmunden)"))
|
"Glasfaser Untersuchungen am Gschliefgraben (Bohrloch1)"))
|
||||||
sensor_list.append(
|
sensor_list.append(
|
||||||
Sensor('inclino1_09', 13.816940062459931, 47.883893347112163,
|
Sensor('inclino1_09', 13.816940062459931, 47.883893347112163,
|
||||||
"bohrloch1-glasfaser-gschliefgraben",
|
"bohrloch1-glasfaser-gschliefgraben",
|
||||||
"Glasfaser Untersuchungen am Gschliefgraben (Gmunden)"))
|
"Glasfaser Untersuchungen am Gschliefgraben (Bohrloch1)"))
|
||||||
sensor_list.append(
|
sensor_list.append(
|
||||||
Sensor('inclino1_10', 13.816940062459931, 47.883893347112163,
|
Sensor('inclino1_10', 13.816940062459931, 47.883893347112163,
|
||||||
"bohrloch1-glasfaser-gschliefgraben",
|
"bohrloch1-glasfaser-gschliefgraben",
|
||||||
"Glasfaser Untersuchungen am Gschliefgraben (Gmunden)"))
|
"Glasfaser Untersuchungen am Gschliefgraben (Bohrloch1)"))
|
||||||
sensor_list.append(
|
sensor_list.append(
|
||||||
Sensor('inclino1_11', 13.816940062459931, 47.883893347112163,
|
Sensor('inclino1_11', 13.816940062459931, 47.883893347112163,
|
||||||
"bohrloch1-glasfaser-gschliefgraben",
|
"bohrloch1-glasfaser-gschliefgraben",
|
||||||
"Glasfaser Untersuchungen am Gschliefgraben (Gmunden)"))
|
"Glasfaser Untersuchungen am Gschliefgraben (Bohrloch1)"))
|
||||||
sensor_list.append(
|
sensor_list.append(
|
||||||
Sensor('inclino1_12', 13.816940062459931, 47.883893347112163,
|
Sensor('inclino1_12', 13.816940062459931, 47.883893347112163,
|
||||||
"bohrloch1-glasfaser-gschliefgraben",
|
"bohrloch1-glasfaser-gschliefgraben",
|
||||||
"Glasfaser Untersuchungen am Gschliefgraben (Gmunden)"))
|
"Glasfaser Untersuchungen am Gschliefgraben (Bohrloch1)"))
|
||||||
sensor_list.append(
|
sensor_list.append(
|
||||||
Sensor('inclino1_13', 13.816940062459931, 47.883893347112163,
|
Sensor('inclino1_13', 13.816940062459931, 47.883893347112163,
|
||||||
"bohrloch1-glasfaser-gschliefgraben",
|
"bohrloch1-glasfaser-gschliefgraben",
|
||||||
"Glasfaser Untersuchungen am Gschliefgraben (Gmunden)"))
|
"Glasfaser Untersuchungen am Gschliefgraben (Bohrloch1)"))
|
||||||
sensor_list.append(
|
sensor_list.append(
|
||||||
Sensor('inclino1_14', 13.816940062459931, 47.883893347112163,
|
Sensor('inclino1_14', 13.816940062459931, 47.883893347112163,
|
||||||
"bohrloch1-glasfaser-gschliefgraben",
|
"bohrloch1-glasfaser-gschliefgraben",
|
||||||
"Glasfaser Untersuchungen am Gschliefgraben (Gmunden)"))
|
"Glasfaser Untersuchungen am Gschliefgraben (Bohrloch1)"))
|
||||||
sensor_list.append(
|
sensor_list.append(
|
||||||
Sensor('inclino1_15', 13.816940062459931, 47.883893347112163,
|
Sensor('inclino1_15', 13.816940062459931, 47.883893347112163,
|
||||||
"bohrloch1-glasfaser-gschliefgraben",
|
"bohrloch1-glasfaser-gschliefgraben",
|
||||||
"Glasfaser Untersuchungen am Gschliefgraben (Gmunden)"))
|
"Glasfaser Untersuchungen am Gschliefgraben (Bohrloch1)"))
|
||||||
sensor_list.append(
|
sensor_list.append(
|
||||||
Sensor('inclino1_16', 13.816940062459931, 47.883893347112163,
|
Sensor('inclino1_16', 13.816940062459931, 47.883893347112163,
|
||||||
"bohrloch1-glasfaser-gschliefgraben",
|
"bohrloch1-glasfaser-gschliefgraben",
|
||||||
"Glasfaser Untersuchungen am Gschliefgraben (Gmunden)"))
|
"Glasfaser Untersuchungen am Gschliefgraben (Bohrloch1)"))
|
||||||
sensor_list.append(
|
sensor_list.append(
|
||||||
Sensor('inclino1_17', 13.816940062459931, 47.883893347112163,
|
Sensor('inclino1_17', 13.816940062459931, 47.883893347112163,
|
||||||
"bohrloch1-glasfaser-gschliefgraben",
|
"bohrloch1-glasfaser-gschliefgraben",
|
||||||
"Glasfaser Untersuchungen am Gschliefgraben (Gmunden)"))
|
"Glasfaser Untersuchungen am Gschliefgraben (Bohrloch1)"))
|
||||||
sensor_list.append(
|
sensor_list.append(
|
||||||
Sensor('inclino1_18', 13.816940062459931, 47.883893347112163,
|
Sensor('inclino1_18', 13.816940062459931, 47.883893347112163,
|
||||||
"bohrloch1-glasfaser-gschliefgraben",
|
"bohrloch1-glasfaser-gschliefgraben",
|
||||||
"Glasfaser Untersuchungen am Gschliefgraben (Gmunden)"))
|
"Glasfaser Untersuchungen am Gschliefgraben (Bohrloch1)"))
|
||||||
sensor_list.append(
|
sensor_list.append(
|
||||||
Sensor('inclino1_19', 13.816940062459931, 47.883893347112163,
|
Sensor('inclino1_19', 13.816940062459931, 47.883893347112163,
|
||||||
"bohrloch1-glasfaser-gschliefgraben",
|
"bohrloch1-glasfaser-gschliefgraben",
|
||||||
"Glasfaser Untersuchungen am Gschliefgraben (Gmunden)"))
|
"Glasfaser Untersuchungen am Gschliefgraben (Bohrloch1)"))
|
||||||
sensor_list.append(
|
sensor_list.append(
|
||||||
Sensor('inclino1_20', 13.816940062459931, 47.883893347112163,
|
Sensor('inclino1_20', 13.816940062459931, 47.883893347112163,
|
||||||
"bohrloch1-glasfaser-gschliefgraben",
|
"bohrloch1-glasfaser-gschliefgraben",
|
||||||
"Glasfaser Untersuchungen am Gschliefgraben (Gmunden)"))
|
"Glasfaser Untersuchungen am Gschliefgraben (Bohrloch1)"))
|
||||||
|
|
||||||
## inclino2_04 bis inclino2_22
|
## inclino2_04 bis inclino2_22
|
||||||
sensor_list.append(
|
sensor_list.append(
|
||||||
Sensor('inclino2_04', 13.817740197926463, 47.883901327648893,
|
Sensor('inclino2_04', 13.817740197926463, 47.883901327648893,
|
||||||
"bohrloch2-glasfaser-gschliefgraben",
|
"bohrloch2-glasfaser-gschliefgraben",
|
||||||
"Glasfaser Untersuchungen am Gschliefgraben (Gmunden)"))
|
"Glasfaser Untersuchungen am Gschliefgraben (Bohrloch2)"))
|
||||||
sensor_list.append(
|
sensor_list.append(
|
||||||
Sensor('inclino2_05', 13.817740197926463, 47.883901327648893,
|
Sensor('inclino2_05', 13.817740197926463, 47.883901327648893,
|
||||||
"bohrloch2-glasfaser-gschliefgraben",
|
"bohrloch2-glasfaser-gschliefgraben",
|
||||||
"Glasfaser Untersuchungen am Gschliefgraben (Gmunden)"))
|
"Glasfaser Untersuchungen am Gschliefgraben (Bohrloch2)"))
|
||||||
sensor_list.append(
|
sensor_list.append(
|
||||||
Sensor('inclino2_06', 13.817740197926463, 47.883901327648893,
|
Sensor('inclino2_06', 13.817740197926463, 47.883901327648893,
|
||||||
"bohrloch2-glasfaser-gschliefgraben",
|
"bohrloch2-glasfaser-gschliefgraben",
|
||||||
"Glasfaser Untersuchungen am Gschliefgraben (Gmunden)"))
|
"Glasfaser Untersuchungen am Gschliefgraben (Bohrloch2)"))
|
||||||
sensor_list.append(
|
sensor_list.append(
|
||||||
Sensor('inclino2_07', 13.817740197926463, 47.883901327648893,
|
Sensor('inclino2_07', 13.817740197926463, 47.883901327648893,
|
||||||
"bohrloch2-glasfaser-gschliefgraben",
|
"bohrloch2-glasfaser-gschliefgraben",
|
||||||
"Glasfaser Untersuchungen am Gschliefgraben (Gmunden)"))
|
"Glasfaser Untersuchungen am Gschliefgraben (Bohrloch2)"))
|
||||||
sensor_list.append(
|
sensor_list.append(
|
||||||
Sensor('inclino2_08', 13.817740197926463, 47.883901327648893,
|
Sensor('inclino2_08', 13.817740197926463, 47.883901327648893,
|
||||||
"bohrloch2-glasfaser-gschliefgraben",
|
"bohrloch2-glasfaser-gschliefgraben",
|
||||||
"Glasfaser Untersuchungen am Gschliefgraben (Gmunden)"))
|
"Glasfaser Untersuchungen am Gschliefgraben (Bohrloch2)"))
|
||||||
sensor_list.append(
|
sensor_list.append(
|
||||||
Sensor('inclino2_09', 13.817740197926463, 47.883901327648893,
|
Sensor('inclino2_09', 13.817740197926463, 47.883901327648893,
|
||||||
"bohrloch2-glasfaser-gschliefgraben",
|
"bohrloch2-glasfaser-gschliefgraben",
|
||||||
"Glasfaser Untersuchungen am Gschliefgraben (Gmunden)"))
|
"Glasfaser Untersuchungen am Gschliefgraben (Bohrloch2)"))
|
||||||
sensor_list.append(
|
sensor_list.append(
|
||||||
Sensor('inclino2_10', 13.817740197926463, 47.883901327648893,
|
Sensor('inclino2_10', 13.817740197926463, 47.883901327648893,
|
||||||
"bohrloch2-glasfaser-gschliefgraben",
|
"bohrloch2-glasfaser-gschliefgraben",
|
||||||
"Glasfaser Untersuchungen am Gschliefgraben (Gmunden)"))
|
"Glasfaser Untersuchungen am Gschliefgraben (Bohrloch2)"))
|
||||||
sensor_list.append(
|
sensor_list.append(
|
||||||
Sensor('inclino2_11', 13.817740197926463, 47.883901327648893,
|
Sensor('inclino2_11', 13.817740197926463, 47.883901327648893,
|
||||||
"bohrloch2-glasfaser-gschliefgraben",
|
"bohrloch2-glasfaser-gschliefgraben",
|
||||||
"Glasfaser Untersuchungen am Gschliefgraben (Gmunden)"))
|
"Glasfaser Untersuchungen am Gschliefgraben (Bohrloch2)"))
|
||||||
sensor_list.append(
|
sensor_list.append(
|
||||||
Sensor('inclino2_12', 13.817740197926463, 47.883901327648893,
|
Sensor('inclino2_12', 13.817740197926463, 47.883901327648893,
|
||||||
"bohrloch2-glasfaser-gschliefgraben",
|
"bohrloch2-glasfaser-gschliefgraben",
|
||||||
"Glasfaser Untersuchungen am Gschliefgraben (Gmunden)"))
|
"Glasfaser Untersuchungen am Gschliefgraben (Bohrloch2)"))
|
||||||
sensor_list.append(
|
sensor_list.append(
|
||||||
Sensor('inclino2_13', 13.817740197926463, 47.883901327648893,
|
Sensor('inclino2_13', 13.817740197926463, 47.883901327648893,
|
||||||
"bohrloch2-glasfaser-gschliefgraben",
|
"bohrloch2-glasfaser-gschliefgraben",
|
||||||
"Glasfaser Untersuchungen am Gschliefgraben (Gmunden)"))
|
"Glasfaser Untersuchungen am Gschliefgraben (Bohrloch2)"))
|
||||||
sensor_list.append(
|
sensor_list.append(
|
||||||
Sensor('inclino2_14', 13.817740197926463, 47.883901327648893,
|
Sensor('inclino2_14', 13.817740197926463, 47.883901327648893,
|
||||||
"bohrloch2-glasfaser-gschliefgraben",
|
"bohrloch2-glasfaser-gschliefgraben",
|
||||||
"Glasfaser Untersuchungen am Gschliefgraben (Gmunden)"))
|
"Glasfaser Untersuchungen am Gschliefgraben (Bohrloch2)"))
|
||||||
sensor_list.append(
|
sensor_list.append(
|
||||||
Sensor('inclino2_15', 13.817740197926463, 47.883901327648893,
|
Sensor('inclino2_15', 13.817740197926463, 47.883901327648893,
|
||||||
"bohrloch2-glasfaser-gschliefgraben",
|
"bohrloch2-glasfaser-gschliefgraben",
|
||||||
"Glasfaser Untersuchungen am Gschliefgraben (Gmunden)"))
|
"Glasfaser Untersuchungen am Gschliefgraben (Bohrloch2)"))
|
||||||
sensor_list.append(
|
sensor_list.append(
|
||||||
Sensor('inclino2_16',13.817740197926463, 47.883901327648893,
|
Sensor('inclino2_16',13.817740197926463, 47.883901327648893,
|
||||||
"bohrloch1-glasfaser-gschliefgraben",
|
"bohrloch1-glasfaser-gschliefgraben",
|
||||||
"Glasfaser Untersuchungen am Gschliefgraben (Gmunden)"))
|
"Glasfaser Untersuchungen am Gschliefgraben (Bohrloch2)"))
|
||||||
sensor_list.append(
|
sensor_list.append(
|
||||||
Sensor('inclino2_17', 13.817740197926463, 47.883901327648893,
|
Sensor('inclino2_17', 13.817740197926463, 47.883901327648893,
|
||||||
"bohrloch2-glasfaser-gschliefgraben",
|
"bohrloch2-glasfaser-gschliefgraben",
|
||||||
"Glasfaser Untersuchungen am Gschliefgraben (Gmunden)"))
|
"Glasfaser Untersuchungen am Gschliefgraben (Bohrloch2)"))
|
||||||
sensor_list.append(
|
sensor_list.append(
|
||||||
Sensor('inclino2_18',13.817740197926463, 47.883901327648893,
|
Sensor('inclino2_18',13.817740197926463, 47.883901327648893,
|
||||||
"bohrloch2-glasfaser-gschliefgraben",
|
"bohrloch2-glasfaser-gschliefgraben",
|
||||||
"Glasfaser Untersuchungen am Gschliefgraben (Gmunden)"))
|
"Glasfaser Untersuchungen am Gschliefgraben (Bohrloch2)"))
|
||||||
sensor_list.append(
|
sensor_list.append(
|
||||||
Sensor('inclino2_19', 13.817740197926463, 47.883901327648893,
|
Sensor('inclino2_19', 13.817740197926463, 47.883901327648893,
|
||||||
"bohrloch2-glasfaser-gschliefgraben",
|
"bohrloch2-glasfaser-gschliefgraben",
|
||||||
"Glasfaser Untersuchungen am Gschliefgraben (Gmunden)"))
|
"Glasfaser Untersuchungen am Gschliefgraben (Bohrloch2)"))
|
||||||
sensor_list.append(
|
sensor_list.append(
|
||||||
Sensor('inclino2_20', 13.817740197926463, 47.883901327648893,
|
Sensor('inclino2_20', 13.817740197926463, 47.883901327648893,
|
||||||
"bohrloch2-glasfaser-gschliefgraben",
|
"bohrloch2-glasfaser-gschliefgraben",
|
||||||
"Glasfaser Untersuchungen am Gschliefgraben (Gmunden)"))
|
"Glasfaser Untersuchungen am Gschliefgraben (Bohrloch2)"))
|
||||||
|
|
||||||
sensor: Sensor
|
sensor: Sensor
|
||||||
for sensor in sensor_list:
|
for sensor in sensor_list:
|
||||||
|
|
|
@ -25,7 +25,7 @@ from db.models import (
|
||||||
def main():
|
def main():
|
||||||
''' main method '''
|
''' main method '''
|
||||||
pg_session: session = create_pg_session()
|
pg_session: session = create_pg_session()
|
||||||
platform_sta_identifier = "pechgraben_piezometer"
|
platform_sta_identifier = "gschliefgraben_piezometer"
|
||||||
# sensor = "bohrloch1"
|
# sensor = "bohrloch1"
|
||||||
# sensor_list = os.environ.get('PIEZOMETER_GSCHLIEFGRABEN_SENSORS', [])
|
# sensor_list = os.environ.get('PIEZOMETER_GSCHLIEFGRABEN_SENSORS', [])
|
||||||
sensor_list = json.loads(os.environ['PIEZOMETER_GSCHLIEFGRABEN_SENSORS'])
|
sensor_list = json.loads(os.environ['PIEZOMETER_GSCHLIEFGRABEN_SENSORS'])
|
||||||
|
|
1
voegelsberg/import_feature_sensor.py
Normal file
1
voegelsberg/import_feature_sensor.py
Normal file
|
@ -0,0 +1 @@
|
||||||
|
# https://lists.ogc.org/pipermail/sensorml/2008-September/000573.html
|
Loading…
Reference in New Issue
Block a user