diff --git a/automatic_inclinometer/firebird_queries.sql b/automatic_inclinometer/firebird_queries.sql
index 86f1f2c..c105c9f 100644
--- a/automatic_inclinometer/firebird_queries.sql
+++ b/automatic_inclinometer/firebird_queries.sql
@@ -68,7 +68,7 @@ CHIAVE NOME NUMERO_SENSORI SITO x y
5 GSA002-017-0510 17 Rosano 43.7685394,11.4232703
6 GSA02A-010-1210 10 Ampflwang - KB1 48.0889892,13.5583703
7 GSA02B-007-1210 17 Ampflwang - KB2 48.088,13.5583
-8 GSA02B-007-0911 19 Laakirchen 47.9789118,13.8141457
+8 GSA02B-007-0911 19 Laakirchen 47.9789118,13.8141457 erledigt
9 Copy of GSA002-017-0510 17 Rosano 43.7685394,11.4232703
10 GSA02B-007-0613 17 Pechgraben Haus 47.9193704,14.5242307
11 Copy of GSA02B-007-0911 19 Laakirchen 47.9789118,13.8141457
@@ -77,7 +77,7 @@ CHIAVE NOME NUMERO_SENSORI SITO x y
14 TAC003-020-1213 20 Pechgraben KB1 47.9193704,14.5242307
15 GSA02A-010-1213 10 Pechgraben KB2 47.9193704,14.5242307
16 TAC003-020-0414 20 Pechgraben KB1 47.9193704,14.5242307
-17 TAC003-020-0517 20 Wolfsegg KB1 48.1064354,13.6731638
+17 TAC003-020-0517 20 Wolfsegg KB1 48.1064354,13.6731638 erledigt
18 GSA02A-010-0517 10 Wolfsegg KB3 48.1064354,13.6731638
19 TAC005-013-0517 14 Wolfsegg KB2 48.1064354,13.6731638
20 GSA003-020-0517 34 Wolfsegg KB5 48.1064354,13.6731638
diff --git a/automatic_inclinometer/import_observations_ampfwang_kb1.py b/automatic_inclinometer/import_observations_ampfwang_kb1.py
new file mode 100644
index 0000000..da24603
--- /dev/null
+++ b/automatic_inclinometer/import_observations_ampfwang_kb1.py
@@ -0,0 +1,314 @@
+""" import firebird, export to postgresql """
+#!/usr/bin/python# -*- coding: utf-8 -*-
+
+import os
+import time
+from typing import List
+from itertools import chain
+import uuid
+import json
+from dotenv import load_dotenv, find_dotenv
+from sqlalchemy.orm import session
+from sqlalchemy import asc, desc
+# from sqlalchemy.dialects import firebird
+from sqlalchemy.sql import or_
+from db.fb_models import (create_session, FbObservation, Catena)
+from db.models import (create_pg_session, Dataset,
+ Observation, Procedure, Phenomenon, Platform, Format)
+
+
+def main():
+ """
+ Main function.
+ """
+
+ #sensor_id = 0
+ # name of project area in firebird db
+ feature_of_interest = 'GSA02A-010-1210' # Ampflwang KB1
+ # sensor name in postgis db
+ # sensor = 'wolfsegg_kb1_0'
+ platform = 'ampflwang_kb1_inclinometer'
+
+ sensor_env_list = os.getenv('AMPFLWANG_KB1_SENSORS').replace('\n', '')
+ sensor_list = json.loads(sensor_env_list)
+ # print(sensor_list)
+ firebird_session: session = create_session()
+ # this will print elements along with their index value
+ for sensor_id, sensor in enumerate(sensor_list):
+
+ # db_observation = session.query(Observation) \
+ # .filter_by(name='John Snow').first()
+ query_count = firebird_session.query(FbObservation).join(FbObservation.catena) \
+ .filter(FbObservation.sensore == sensor_id) \
+ .filter(Catena.name == feature_of_interest) \
+ .filter(
+ or_(
+ FbObservation.temperature != None,
+ FbObservation.pitch != None # this is used to check NULL values
+ )) \
+ .count()
+ # if query_count == 0:
+ # print(f"sensor {sensor} "
+ # f"doesn't have any observations with measured values in firebird database!")
+ # # hop to next for iteration, next sensor in list
+ # continue
+ # test = query_count.statement.compile(dialect=firebird.dialect())
+
+ firebird_observations: List[FbObservation] = []
+ if query_count > 0:
+ query = firebird_session.query(FbObservation).join(FbObservation.catena) \
+ .filter(FbObservation.sensore == sensor_id) \
+ .filter(Catena.name == feature_of_interest)
+ # print (query.statement.compile(dialect=firebird.dialect()))
+ firebird_observations: List[FbObservation] = query.all()
+ firebird_session.close()
+
+ pg_session: session = create_pg_session()
+ # pg_datasets: List[Dataset] = pg_query.all()
+ pg_query = pg_session.query(Dataset) \
+ .join(Procedure) \
+ .join(Phenomenon) \
+ .filter(Procedure.sta_identifier == sensor.lower())
+ # .join(Platform).all() \
+
+ roll_dataset: Dataset = pg_query.filter(
+ Phenomenon.sta_identifier == "Roll").first()
+
+ slope_dataset: Dataset = pg_query.filter(
+ Phenomenon.sta_identifier == "Slope").first()
+
+ temperature_dataset: Dataset = pg_query.filter(
+ Phenomenon.sta_identifier == "InSystemTemperature").first()
+
+ platform_exists = pg_session.query(Platform.id).filter_by(
+ name=platform.lower()).scalar() is not None
+ if not platform_exists:
+ sensor_platform = Platform()
+ sensor_platform.sta_identifier = platform.lower()
+ sensor_platform.identifier = platform.lower()
+ sensor_platform.name = platform.lower()
+ slope_dataset.platform = sensor_platform
+ roll_dataset.platform = sensor_platform
+ temperature_dataset.platform = sensor_platform
+ else:
+ sensor_platform = pg_session.query(Platform.id) \
+ .filter(Platform.name == platform.lower()) \
+ .first()
+ slope_dataset.fk_platform_id = sensor_platform.id
+ roll_dataset.fk_platform_id = sensor_platform.id
+ temperature_dataset.fk_platform_id = sensor_platform.id
+
+ # commit dataset changes:
+ pg_session.commit()
+
+ format_exists: bool = pg_session.query(Format.id).filter_by(
+ definition="http://www.opengis.net/def/observationType/OGC-OM/2.0/OM_Measurement"
+ ).scalar() is not None
+ if format_exists:
+ sensor_format = pg_session.query(Format.id) \
+ .filter(Format.definition ==
+ "http://www.opengis.net/def/observationType/OGC-OM/2.0/OM_Measurement") \
+ .first()
+ slope_dataset.fk_format_id = sensor_format.id
+ roll_dataset.fk_format_id = sensor_format.id
+ temperature_dataset.fk_format_id = sensor_format.id
+ pg_session.commit()
+
+ if query_count == 0:
+ print(f"sensor {sensor} "
+ f"doesn't have any observations with measured values in firebird database!")
+ # hop to next for iteration, next sensor in list, don't insert any observations
+ continue
+
+ create_db_observations(firebird_observations, roll_dataset,
+ slope_dataset, temperature_dataset, pg_session)
+
+ # commit new observations:
+ pg_session.commit()
+
+ if len(roll_dataset.observations) > 0:
+ # if not published yet, publish the roll dataset
+ if not roll_dataset.is_published:
+ roll_dataset.is_published = 1
+ roll_dataset.is_hidden = 0
+ roll_dataset.dataset_type = "timeseries"
+ roll_dataset.observation_type = "simple"
+ roll_dataset.value_type = "quantity"
+
+ if len(slope_dataset.observations) > 0:
+ # if not published yet, publish the roll dataset
+ if not slope_dataset.is_published:
+ slope_dataset.is_published = 1
+ slope_dataset.is_hidden = 0
+ slope_dataset.dataset_type = "timeseries"
+ slope_dataset.observation_type = "simple"
+ slope_dataset.value_type = "quantity"
+
+ if len(temperature_dataset.observations) > 0:
+ # if not published yet, publish the temperature dataset
+ if not temperature_dataset.is_published:
+ temperature_dataset.is_published = 1
+ temperature_dataset.is_hidden = 0
+ temperature_dataset.dataset_type = "timeseries"
+ temperature_dataset.observation_type = "simple"
+ temperature_dataset.value_type = "quantity"
+ pg_session.commit()
+
+ last_roll_observation = pg_session.query(Observation) \
+ .filter(Observation.fk_dataset_id == roll_dataset.id) \
+ .order_by(desc('sampling_time_start')) \
+ .first()
+ if last_roll_observation is not None:
+ roll_dataset.last_time = last_roll_observation.sampling_time_start
+ roll_dataset.last_value = last_roll_observation.value_quantity
+ roll_dataset.fk_last_observation_id = last_roll_observation.id
+
+ last_slope_observation = pg_session.query(Observation) \
+ .filter(Observation.fk_dataset_id == slope_dataset.id) \
+ .order_by(desc('sampling_time_start')) \
+ .first()
+ if last_slope_observation is not None:
+ slope_dataset.last_time = last_slope_observation.sampling_time_start
+ slope_dataset.last_value = last_slope_observation.value_quantity
+ slope_dataset.fk_last_observation_id = last_slope_observation.id
+
+ last_temperature_observation = pg_session.query(Observation) \
+ .filter(Observation.fk_dataset_id == temperature_dataset.id) \
+ .order_by(desc('sampling_time_start')) \
+ .first()
+ if last_temperature_observation is not None:
+ temperature_dataset.last_time = last_temperature_observation.sampling_time_start
+ temperature_dataset.last_value = last_temperature_observation.value_quantity
+ temperature_dataset.fk_last_observation_id = last_temperature_observation.id
+
+ first_roll_observation = pg_session.query(Observation) \
+ .filter(Observation.fk_dataset_id == roll_dataset.id) \
+ .order_by(asc('sampling_time_start')) \
+ .first()
+ if first_roll_observation is not None:
+ roll_dataset.first_time = first_roll_observation.sampling_time_start
+ roll_dataset.first_value = first_roll_observation.value_quantity
+ roll_dataset.fk_first_observation_id = first_roll_observation.id
+
+ first_slope_observation = pg_session.query(Observation) \
+ .filter(Observation.fk_dataset_id == slope_dataset.id) \
+ .order_by(asc('sampling_time_start')) \
+ .first()
+ if first_slope_observation is not None:
+ slope_dataset.first_time = first_slope_observation.sampling_time_start
+ slope_dataset.first_value = first_slope_observation.value_quantity
+ slope_dataset.fk_first_observation_id = first_slope_observation.id
+
+ first_temperature_observation = pg_session.query(Observation) \
+ .filter(Observation.fk_dataset_id == temperature_dataset.id) \
+ .order_by(asc('sampling_time_start')) \
+ .first()
+ if first_temperature_observation is not None:
+ temperature_dataset.first_time = first_temperature_observation.sampling_time_start
+ temperature_dataset.first_value = first_temperature_observation.value_quantity
+ temperature_dataset.fk_first_observation_id = first_temperature_observation.id
+ pg_session.commit()
+
+ # for loop sensors end
+ pg_session.close()
+ # firebird_session.close()
+
+
+def create_db_observations(firebird_observations: List[FbObservation],
+ roll_dataset: Dataset,
+ slope_dataset: Dataset,
+ temperature_dataset: Dataset,
+ pg_session: session):
+ ''' insert new observations ito db '''
+ roll_result = (
+ pg_session.query(Observation.result_time)
+ .filter(Observation.fk_dataset_id == roll_dataset.id)
+ .all()
+ )
+ roll_result_time_db_list1: List[str] = list(chain(*roll_result))
+ roll_result_time_db_list: List[float] = [time.mktime(
+ date_obj.timetuple()) for date_obj in roll_result_time_db_list1]
+
+ slope_result = (
+ pg_session.query(Observation.result_time)
+ .filter(Observation.fk_dataset_id == slope_dataset.id)
+ .all()
+ )
+ slope_result_time_db_list1: List[str] = list(chain(*slope_result))
+ slope_result_time_db_list: List[float] = [time.mktime(
+ date_obj.timetuple()) for date_obj in slope_result_time_db_list1]
+
+ temperature_result = (
+ pg_session.query(Observation.result_time)
+ .filter(Observation.fk_dataset_id == temperature_dataset.id)
+ .all()
+ )
+ temperature_result_time_db_list1: List[str] = list(
+ chain(*temperature_result))
+ temperature_result_time_db_list: List[float] = [time.mktime(
+ date_obj.timetuple()) for date_obj in temperature_result_time_db_list1]
+
+ for fb_observation in firebird_observations:
+ # print(fb_observation.catena.name)
+ if(fb_observation.roll is not None and roll_dataset is not None):
+ value = fb_observation.roll
+ add_observation(roll_dataset, fb_observation,
+ value, roll_result_time_db_list)
+
+ if(fb_observation.pitch is not None and slope_dataset is not None):
+ # max_id = max_id + 1
+ value = fb_observation.pitch
+ add_observation(slope_dataset, fb_observation,
+ value, slope_result_time_db_list)
+
+ if(fb_observation.temperature is not None and temperature_dataset is not None):
+ # max_id = max_id + 1
+ value = fb_observation.temperature
+ add_observation(temperature_dataset, fb_observation,
+ value, temperature_result_time_db_list)
+
+
+def add_observation(
+ dataset: Dataset,
+ fb_observation: FbObservation,
+ value: str,
+ value_identifier_db_list: List[float]):
+ ''' check if observation still extists in db,
+ otherwise add it to fb'''
+ # ob_id: str = str(observation_json.get('id'))
+
+ # existing_observation: bool = (
+ # db_session.query(Observation)
+ # .filter(Observation.result_time == fb_observation.result_time,
+ # Observation.fk_dataset_id == dataset.id)
+ # .one_or_none()
+ # )
+ existing_observation: bool = time.mktime(
+ fb_observation.result_time.timetuple()) in value_identifier_db_list
+ # Can we insert this observation?
+ if existing_observation is False:
+ # insert bew observation
+ new_observation: Observation = Observation()
+ new_observation = Observation(
+ # id=max_id,
+ value_type='quantity',
+ sampling_time_start=fb_observation.result_time,
+ sampling_time_end=fb_observation.result_time,
+ result_time=fb_observation.result_time,
+ sta_identifier=str(uuid.uuid4()),
+ value_identifier=str(time.mktime(
+ fb_observation.result_time.timetuple())),
+ value_quantity=value
+ )
+ dataset.observations.append(new_observation)
+ print(f"new observation with result time {new_observation.result_time} "
+ f"for inclinometer {dataset.procedure.name} succesfully imported!")
+ else:
+ print(f"observation with result time {fb_observation.result_time} "
+ f"for inclinometer {dataset.procedure.name} already exists!")
+
+
+# -----------------------------------------------------------------------------
+if __name__ == "__main__":
+ load_dotenv(find_dotenv())
+ main()
diff --git a/automatic_inclinometer/import_observations_laakirchen.py b/automatic_inclinometer/import_observations_laakirchen.py
index 52588e8..e65b971 100644
--- a/automatic_inclinometer/import_observations_laakirchen.py
+++ b/automatic_inclinometer/import_observations_laakirchen.py
@@ -37,14 +37,14 @@ def main():
# sensor name in postgis db
# sensor = 'wolfsegg_kb1_0'
platform = 'laakirchen_inclinometer'
-
- sensor_env_list = os.getenv('LAAKIRCHEN_SENSORS').replace('\n', '')
+
+ sensor_env_list = os.getenv('LAAKIRCHEN_SENSORS').replace('\n', '')
sensor_list = json.loads(sensor_env_list)
# print(sensor_list)
- firebird_session: session = create_session()
+ firebird_session: session = create_session()
# this will print elements along with their index value
for sensor_id, sensor in enumerate(sensor_list):
-
+
# db_observation = session.query(Observation) \
# .filter_by(name='John Snow').first()
query_count = firebird_session.query(FbObservation).join(FbObservation.catena) \
@@ -53,22 +53,24 @@ def main():
.filter(
or_(
FbObservation.temperature != None,
- FbObservation.pitch != None #this is used to check NULL values
+ FbObservation.pitch != None # this is used to check NULL values
)) \
.count()
- if query_count == 0:
- print(f"sensor {sensor} "
- f"doesn't have any observations with measured values in firebird database!")
- # hop to next for iteration, next sensor in list
- continue
- # feature_of_interest = query.statement.compile(dialect=firebird.dialect())
-
- query = firebird_session.query(FbObservation).join(FbObservation.catena) \
- .filter(FbObservation.sensore == sensor_id) \
- .filter(Catena.name == feature_of_interest)
- # print (query.statement.compile(dialect=firebird.dialect()))
- firebird_observations: List[FbObservation] = query.all()
- # firebird_session.close()
+ # if query_count == 0:
+ # print(f"sensor {sensor} "
+ # f"doesn't have any observations with measured values in firebird database!")
+ # # hop to next for iteration, next sensor in list
+ # continue
+ # test = query_count.statement.compile(dialect=firebird.dialect())
+
+ firebird_observations: List[FbObservation] = []
+ if query_count > 0:
+ query = firebird_session.query(FbObservation).join(FbObservation.catena) \
+ .filter(FbObservation.sensore == sensor_id) \
+ .filter(Catena.name == feature_of_interest)
+ # print (query.statement.compile(dialect=firebird.dialect()))
+ firebird_observations: List[FbObservation] = query.all()
+ firebird_session.close()
pg_session: session = create_pg_session()
# pg_datasets: List[Dataset] = pg_query.all()
@@ -77,20 +79,20 @@ def main():
.join(Phenomenon) \
.filter(Procedure.sta_identifier == sensor.lower())
# .join(Platform).all() \
-
+
roll_dataset: Dataset = pg_query.filter(
- Phenomenon.sta_identifier == "Roll").first()
+ Phenomenon.sta_identifier == "Roll").first()
slope_dataset: Dataset = pg_query.filter(
Phenomenon.sta_identifier == "Slope").first()
-
+
temperature_dataset: Dataset = pg_query.filter(
Phenomenon.sta_identifier == "InSystemTemperature").first()
-
+
platform_exists = pg_session.query(Platform.id).filter_by(
name=platform.lower()).scalar() is not None
if not platform_exists:
- sensor_platform = Platform()
+ sensor_platform = Platform()
sensor_platform.sta_identifier = platform.lower()
sensor_platform.identifier = platform.lower()
sensor_platform.name = platform.lower()
@@ -107,8 +109,7 @@ def main():
# commit dataset changes:
pg_session.commit()
-
-
+
format_exists: bool = pg_session.query(Format.id).filter_by(
definition="http://www.opengis.net/def/observationType/OGC-OM/2.0/OM_Measurement"
).scalar() is not None
@@ -121,12 +122,19 @@ def main():
roll_dataset.fk_format_id = sensor_format.id
temperature_dataset.fk_format_id = sensor_format.id
pg_session.commit()
-
- create_db_observations(firebird_observations, roll_dataset, slope_dataset, temperature_dataset, pg_session)
-
+
+ if query_count == 0:
+ print(f"sensor {sensor} "
+ f"doesn't have any observations with measured values in firebird database!")
+ # hop to next for iteration, next sensor in list, don't insert any observations
+ continue
+
+ create_db_observations(firebird_observations, roll_dataset,
+ slope_dataset, temperature_dataset, pg_session)
+
# commit new observations:
pg_session.commit()
-
+
if len(roll_dataset.observations) > 0:
# if not published yet, publish the roll dataset
if not roll_dataset.is_published:
@@ -135,7 +143,7 @@ def main():
roll_dataset.dataset_type = "timeseries"
roll_dataset.observation_type = "simple"
roll_dataset.value_type = "quantity"
-
+
if len(slope_dataset.observations) > 0:
# if not published yet, publish the roll dataset
if not slope_dataset.is_published:
@@ -144,7 +152,7 @@ def main():
slope_dataset.dataset_type = "timeseries"
slope_dataset.observation_type = "simple"
slope_dataset.value_type = "quantity"
-
+
if len(temperature_dataset.observations) > 0:
# if not published yet, publish the temperature dataset
if not temperature_dataset.is_published:
@@ -154,7 +162,6 @@ def main():
temperature_dataset.observation_type = "simple"
temperature_dataset.value_type = "quantity"
pg_session.commit()
-
last_roll_observation = pg_session.query(Observation) \
.filter(Observation.fk_dataset_id == roll_dataset.id) \
@@ -213,80 +220,95 @@ def main():
# for loop sensors end
pg_session.close()
- firebird_session.close()
+ # firebird_session.close()
+
def create_db_observations(firebird_observations: List[FbObservation],
- roll_dataset: Dataset,
- slope_dataset: Dataset,
- temperature_dataset: Dataset,
- pg_session: session):
+ roll_dataset: Dataset,
+ slope_dataset: Dataset,
+ temperature_dataset: Dataset,
+ pg_session: session):
''' insert new observations ito db '''
roll_result = (
- pg_session.query(Observation.result_time)
- .filter(Observation.fk_dataset_id == roll_dataset.id)
- .all()
- )
+ pg_session.query(Observation.result_time)
+ .filter(Observation.fk_dataset_id == roll_dataset.id)
+ .all()
+ )
roll_result_time_db_list1: List[str] = list(chain(*roll_result))
- roll_result_time_db_list : List[float]= [time.mktime(date_obj.timetuple()) for date_obj in roll_result_time_db_list1]
-
-
+ roll_result_time_db_list: List[float] = [time.mktime(
+ date_obj.timetuple()) for date_obj in roll_result_time_db_list1]
+
slope_result = (
- pg_session.query(Observation.result_time)
- .filter(Observation.fk_dataset_id == slope_dataset.id)
- .all()
- )
+ pg_session.query(Observation.result_time)
+ .filter(Observation.fk_dataset_id == slope_dataset.id)
+ .all()
+ )
slope_result_time_db_list1: List[str] = list(chain(*slope_result))
- slope_result_time_db_list : List[float]= [time.mktime(date_obj.timetuple()) for date_obj in slope_result_time_db_list1]
-
+ slope_result_time_db_list: List[float] = [time.mktime(
+ date_obj.timetuple()) for date_obj in slope_result_time_db_list1]
+
temperature_result = (
- pg_session.query(Observation.result_time)
- .filter(Observation.fk_dataset_id == temperature_dataset.id)
- .all()
- )
- temperature_result_time_db_list1: List[str] = list(chain(*temperature_result))
- temperature_result_time_db_list : List[float]= [time.mktime(date_obj.timetuple()) for date_obj in temperature_result_time_db_list1]
-
+ pg_session.query(Observation.result_time)
+ .filter(Observation.fk_dataset_id == temperature_dataset.id)
+ .all()
+ )
+ temperature_result_time_db_list1: List[str] = list(
+ chain(*temperature_result))
+ temperature_result_time_db_list: List[float] = [time.mktime(
+ date_obj.timetuple()) for date_obj in temperature_result_time_db_list1]
+
for fb_observation in firebird_observations:
# print(fb_observation.catena.name)
if(fb_observation.roll is not None and roll_dataset is not None):
value = fb_observation.roll
- add_observation(roll_dataset, fb_observation, value, roll_result_time_db_list)
-
+ add_observation(roll_dataset, fb_observation,
+ value, roll_result_time_db_list)
+
if(fb_observation.pitch is not None and slope_dataset is not None):
# max_id = max_id + 1
- value = fb_observation.pitch
- add_observation(slope_dataset, fb_observation, value, slope_result_time_db_list)
-
+ value = fb_observation.pitch
+ add_observation(slope_dataset, fb_observation,
+ value, slope_result_time_db_list)
+
if(fb_observation.temperature is not None and temperature_dataset is not None):
# max_id = max_id + 1
value = fb_observation.temperature
- add_observation(temperature_dataset, fb_observation, value, temperature_result_time_db_list)
+ add_observation(temperature_dataset, fb_observation,
+ value, temperature_result_time_db_list)
-def add_observation(dataset: Dataset, fb_observation: FbObservation, value: str, value_identifier_db_list: List[float]):
+
+def add_observation(
+ dataset: Dataset,
+ fb_observation: FbObservation,
+ value: str,
+ value_identifier_db_list: List[float]):
''' check if observation still extists in db,
otherwise add it to fb'''
# ob_id: str = str(observation_json.get('id'))
-
+
# existing_observation: bool = (
# db_session.query(Observation)
- # .filter(Observation.result_time == fb_observation.result_time, Observation.fk_dataset_id == dataset.id)
+ # .filter(Observation.result_time == fb_observation.result_time,
+ # Observation.fk_dataset_id == dataset.id)
# .one_or_none()
# )
- existing_observation: bool =time.mktime(fb_observation.result_time.timetuple()) in value_identifier_db_list
- # Can we insert this observation?
+ existing_observation: bool = time.mktime(
+ fb_observation.result_time.timetuple()) in value_identifier_db_list
+ # Can we insert this observation?
if existing_observation is False:
# insert bew observation
new_observation: Observation = Observation()
new_observation = Observation(
- # id=max_id,
- value_type='quantity',
- sampling_time_start=fb_observation.result_time,
- sampling_time_end=fb_observation.result_time,
- result_time=fb_observation.result_time,
- sta_identifier=str(uuid.uuid4()),
- value_identifier = str(time.mktime(fb_observation.result_time.timetuple())),
- value_quantity=value
- )
+ # id=max_id,
+ value_type='quantity',
+ sampling_time_start=fb_observation.result_time,
+ sampling_time_end=fb_observation.result_time,
+ result_time=fb_observation.result_time,
+ sta_identifier=str(uuid.uuid4()),
+ value_identifier=str(time.mktime(
+ fb_observation.result_time.timetuple())),
+ value_quantity=value
+ )
dataset.observations.append(new_observation)
print(f"new observation with result time {new_observation.result_time} "
f"for inclinometer {dataset.procedure.name} succesfully imported!")
@@ -294,6 +316,7 @@ def add_observation(dataset: Dataset, fb_observation: FbObservation, value: str,
print(f"observation with result time {fb_observation.result_time} "
f"for inclinometer {dataset.procedure.name} already exists!")
+
# -----------------------------------------------------------------------------
if __name__ == "__main__":
load_dotenv(find_dotenv())
diff --git a/automatic_inclinometer/import_observations_wolfsegg_kb1.py b/automatic_inclinometer/import_observations_wolfsegg_kb1.py
index 2c145de..d453d3b 100644
--- a/automatic_inclinometer/import_observations_wolfsegg_kb1.py
+++ b/automatic_inclinometer/import_observations_wolfsegg_kb1.py
@@ -1,197 +1,314 @@
""" import firebird, export to postgresql """
#!/usr/bin/python# -*- coding: utf-8 -*-
+import os
+import time
from typing import List
+from itertools import chain
import uuid
+import json
+from dotenv import load_dotenv, find_dotenv
from sqlalchemy.orm import session
-from sqlalchemy import desc, asc
+from sqlalchemy import asc, desc
+# from sqlalchemy.dialects import firebird
+from sqlalchemy.sql import or_
from db.fb_models import (create_session, FbObservation, Catena)
-from db.models import (create_pg_session, Dataset, Observation, Procedure, Phenomenon, Platform)
+from db.models import (create_pg_session, Dataset,
+ Observation, Procedure, Phenomenon, Platform, Format)
+
def main():
"""
Main function.
"""
- # parameter:
- # sensor id in firebird db:
- # sensor_id = 1
- # # name of project area in firebird db
- # feature_of_interest = 'TAC003-020-0517' # Wolfsegg KB1
- # # sensor name in postgis db
- # sensor = 'wolfsegg_kb1_1'
- # platform = 'wolfsegg'
-
- sensor_id = 0
+ #sensor_id = 0
# name of project area in firebird db
feature_of_interest = 'TAC003-020-0517' # Wolfsegg KB1
# sensor name in postgis db
- sensor = 'wolfsegg_kb1_0'
- platform = 'wolfsegg_inclinometer'
+ # sensor = 'wolfsegg_kb1_0'
+ platform = 'wolfsegg_kb1_inclinometer'
+ sensor_env_list = os.getenv('WOLFSEGG_KB1_SENSORS').replace('\n', '')
+ sensor_list = json.loads(sensor_env_list)
+ # print(sensor_list)
firebird_session: session = create_session()
- # db_observation = session.query(Observation) \
- # .filter_by(name='John Snow').first()
- query = firebird_session.query(FbObservation).join(FbObservation.catena) \
- .filter(FbObservation.sensore == sensor_id) \
- .filter(Catena.name == feature_of_interest)
- # feature_of_interest = query.statement.compile(dialect=firebird.dialect())
- firebird_observations: List[FbObservation] = query.all()
- firebird_session.close()
+ # this will print elements along with their index value
+ for sensor_id, sensor in enumerate(sensor_list):
- pg_session: session = create_pg_session()
- # pg_datasets: List[Dataset] = pg_query.all()
- pg_query = pg_session.query(Dataset) \
- .join(Procedure) \
- .join(Phenomenon) \
- .filter(Procedure.sta_identifier == sensor.lower())
+ # db_observation = session.query(Observation) \
+ # .filter_by(name='John Snow').first()
+ query_count = firebird_session.query(FbObservation).join(FbObservation.catena) \
+ .filter(FbObservation.sensore == sensor_id) \
+ .filter(Catena.name == feature_of_interest) \
+ .filter(
+ or_(
+ FbObservation.temperature != None,
+ FbObservation.pitch != None # this is used to check NULL values
+ )) \
+ .count()
+ # if query_count == 0:
+ # print(f"sensor {sensor} "
+ # f"doesn't have any observations with measured values in firebird database!")
+ # # hop to next for iteration, next sensor in list
+ # continue
+ # test = query_count.statement.compile(dialect=firebird.dialect())
+
+ firebird_observations: List[FbObservation] = []
+ if query_count > 0:
+ query = firebird_session.query(FbObservation).join(FbObservation.catena) \
+ .filter(FbObservation.sensore == sensor_id) \
+ .filter(Catena.name == feature_of_interest)
+ # print (query.statement.compile(dialect=firebird.dialect()))
+ firebird_observations: List[FbObservation] = query.all()
+ firebird_session.close()
+
+ pg_session: session = create_pg_session()
+ # pg_datasets: List[Dataset] = pg_query.all()
+ pg_query = pg_session.query(Dataset) \
+ .join(Procedure) \
+ .join(Phenomenon) \
+ .filter(Procedure.sta_identifier == sensor.lower())
# .join(Platform).all() \
+ roll_dataset: Dataset = pg_query.filter(
+ Phenomenon.sta_identifier == "Roll").first()
- # roll_dataset = [x for x in pg_datasets if x.phenomenon.sta_identifier == "Roll"]
- roll_dataset = pg_query.filter(Phenomenon.sta_identifier == "Roll").first()
- roll_dataset.is_published = 1
- roll_dataset.is_hidden = 0
- roll_dataset.dataset_type = "timeseries"
- roll_dataset.observation_type = "simple"
- roll_dataset.value_type = "quantity"
- slope_dataset = pg_query.filter(
- Phenomenon.sta_identifier == "Slope").first()
- slope_dataset.is_published = 1
- slope_dataset.is_hidden = 0
- slope_dataset.dataset_type = "timeseries"
- slope_dataset.observation_type = "simple"
- slope_dataset.value_type = "quantity"
- temperature_dataset = pg_query.filter(
- Phenomenon.sta_identifier == "InSystemTemperature").first()
- temperature_dataset.is_published = 1
- temperature_dataset.is_hidden = 0
- temperature_dataset.dataset_type = "timeseries"
- temperature_dataset.observation_type = "simple"
- temperature_dataset.value_type = "quantity"
- pg_session.commit()
+ slope_dataset: Dataset = pg_query.filter(
+ Phenomenon.sta_identifier == "Slope").first()
+
+ temperature_dataset: Dataset = pg_query.filter(
+ Phenomenon.sta_identifier == "InSystemTemperature").first()
+
+ platform_exists = pg_session.query(Platform.id).filter_by(
+ name=platform.lower()).scalar() is not None
+ if not platform_exists:
+ sensor_platform = Platform()
+ sensor_platform.sta_identifier = platform.lower()
+ sensor_platform.identifier = platform.lower()
+ sensor_platform.name = platform.lower()
+ slope_dataset.platform = sensor_platform
+ roll_dataset.platform = sensor_platform
+ temperature_dataset.platform = sensor_platform
+ else:
+ sensor_platform = pg_session.query(Platform.id) \
+ .filter(Platform.name == platform.lower()) \
+ .first()
+ slope_dataset.fk_platform_id = sensor_platform.id
+ roll_dataset.fk_platform_id = sensor_platform.id
+ temperature_dataset.fk_platform_id = sensor_platform.id
+
+ # commit dataset changes:
+ pg_session.commit()
+
+ format_exists: bool = pg_session.query(Format.id).filter_by(
+ definition="http://www.opengis.net/def/observationType/OGC-OM/2.0/OM_Measurement"
+ ).scalar() is not None
+ if format_exists:
+ sensor_format = pg_session.query(Format.id) \
+ .filter(Format.definition ==
+ "http://www.opengis.net/def/observationType/OGC-OM/2.0/OM_Measurement") \
+ .first()
+ slope_dataset.fk_format_id = sensor_format.id
+ roll_dataset.fk_format_id = sensor_format.id
+ temperature_dataset.fk_format_id = sensor_format.id
+ pg_session.commit()
+
+ if query_count == 0:
+ print(f"sensor {sensor} "
+ f"doesn't have any observations with measured values in firebird database!")
+ # hop to next for iteration, next sensor in list, don't insert any observations
+ continue
+
+ create_db_observations(firebird_observations, roll_dataset,
+ slope_dataset, temperature_dataset, pg_session)
+
+ # commit new observations:
+ pg_session.commit()
+
+ if len(roll_dataset.observations) > 0:
+ # if not published yet, publish the roll dataset
+ if not roll_dataset.is_published:
+ roll_dataset.is_published = 1
+ roll_dataset.is_hidden = 0
+ roll_dataset.dataset_type = "timeseries"
+ roll_dataset.observation_type = "simple"
+ roll_dataset.value_type = "quantity"
+
+ if len(slope_dataset.observations) > 0:
+ # if not published yet, publish the roll dataset
+ if not slope_dataset.is_published:
+ slope_dataset.is_published = 1
+ slope_dataset.is_hidden = 0
+ slope_dataset.dataset_type = "timeseries"
+ slope_dataset.observation_type = "simple"
+ slope_dataset.value_type = "quantity"
+
+ if len(temperature_dataset.observations) > 0:
+ # if not published yet, publish the temperature dataset
+ if not temperature_dataset.is_published:
+ temperature_dataset.is_published = 1
+ temperature_dataset.is_hidden = 0
+ temperature_dataset.dataset_type = "timeseries"
+ temperature_dataset.observation_type = "simple"
+ temperature_dataset.value_type = "quantity"
+ pg_session.commit()
+
+ last_roll_observation = pg_session.query(Observation) \
+ .filter(Observation.fk_dataset_id == roll_dataset.id) \
+ .order_by(desc('sampling_time_start')) \
+ .first()
+ if last_roll_observation is not None:
+ roll_dataset.last_time = last_roll_observation.sampling_time_start
+ roll_dataset.last_value = last_roll_observation.value_quantity
+ roll_dataset.fk_last_observation_id = last_roll_observation.id
+
+ last_slope_observation = pg_session.query(Observation) \
+ .filter(Observation.fk_dataset_id == slope_dataset.id) \
+ .order_by(desc('sampling_time_start')) \
+ .first()
+ if last_slope_observation is not None:
+ slope_dataset.last_time = last_slope_observation.sampling_time_start
+ slope_dataset.last_value = last_slope_observation.value_quantity
+ slope_dataset.fk_last_observation_id = last_slope_observation.id
+
+ last_temperature_observation = pg_session.query(Observation) \
+ .filter(Observation.fk_dataset_id == temperature_dataset.id) \
+ .order_by(desc('sampling_time_start')) \
+ .first()
+ if last_temperature_observation is not None:
+ temperature_dataset.last_time = last_temperature_observation.sampling_time_start
+ temperature_dataset.last_value = last_temperature_observation.value_quantity
+ temperature_dataset.fk_last_observation_id = last_temperature_observation.id
+
+ first_roll_observation = pg_session.query(Observation) \
+ .filter(Observation.fk_dataset_id == roll_dataset.id) \
+ .order_by(asc('sampling_time_start')) \
+ .first()
+ if first_roll_observation is not None:
+ roll_dataset.first_time = first_roll_observation.sampling_time_start
+ roll_dataset.first_value = first_roll_observation.value_quantity
+ roll_dataset.fk_first_observation_id = first_roll_observation.id
+
+ first_slope_observation = pg_session.query(Observation) \
+ .filter(Observation.fk_dataset_id == slope_dataset.id) \
+ .order_by(asc('sampling_time_start')) \
+ .first()
+ if first_slope_observation is not None:
+ slope_dataset.first_time = first_slope_observation.sampling_time_start
+ slope_dataset.first_value = first_slope_observation.value_quantity
+ slope_dataset.fk_first_observation_id = first_slope_observation.id
+
+ first_temperature_observation = pg_session.query(Observation) \
+ .filter(Observation.fk_dataset_id == temperature_dataset.id) \
+ .order_by(asc('sampling_time_start')) \
+ .first()
+ if first_temperature_observation is not None:
+ temperature_dataset.first_time = first_temperature_observation.sampling_time_start
+ temperature_dataset.first_value = first_temperature_observation.value_quantity
+ temperature_dataset.fk_first_observation_id = first_temperature_observation.id
+ pg_session.commit()
+
+ # for loop sensors end
+ pg_session.close()
+ # firebird_session.close()
+
+
+def create_db_observations(firebird_observations: List[FbObservation],
+ roll_dataset: Dataset,
+ slope_dataset: Dataset,
+ temperature_dataset: Dataset,
+ pg_session: session):
+ ''' insert new observations ito db '''
+ roll_result = (
+ pg_session.query(Observation.result_time)
+ .filter(Observation.fk_dataset_id == roll_dataset.id)
+ .all()
+ )
+ roll_result_time_db_list1: List[str] = list(chain(*roll_result))
+ roll_result_time_db_list: List[float] = [time.mktime(
+ date_obj.timetuple()) for date_obj in roll_result_time_db_list1]
+
+ slope_result = (
+ pg_session.query(Observation.result_time)
+ .filter(Observation.fk_dataset_id == slope_dataset.id)
+ .all()
+ )
+ slope_result_time_db_list1: List[str] = list(chain(*slope_result))
+ slope_result_time_db_list: List[float] = [time.mktime(
+ date_obj.timetuple()) for date_obj in slope_result_time_db_list1]
+
+ temperature_result = (
+ pg_session.query(Observation.result_time)
+ .filter(Observation.fk_dataset_id == temperature_dataset.id)
+ .all()
+ )
+ temperature_result_time_db_list1: List[str] = list(
+ chain(*temperature_result))
+ temperature_result_time_db_list: List[float] = [time.mktime(
+ date_obj.timetuple()) for date_obj in temperature_result_time_db_list1]
- # max_id = pg_session.query(func.max(Observation.id)).scalar()
for fb_observation in firebird_observations:
# print(fb_observation.catena.name)
if(fb_observation.roll is not None and roll_dataset is not None):
- # max_id = max_id + 1
- pg_roll_observation = Observation(
- # id=max_id,
- value_type='quantity',
- sampling_time_start=fb_observation.result_time,
- sampling_time_end=fb_observation.result_time,
- result_time=fb_observation.result_time,
- sta_identifier=str(uuid.uuid4()),
- value_quantity=fb_observation.roll
- )
- roll_dataset.observations.append(pg_roll_observation)
+ value = fb_observation.roll
+ add_observation(roll_dataset, fb_observation,
+ value, roll_result_time_db_list)
+
if(fb_observation.pitch is not None and slope_dataset is not None):
# max_id = max_id + 1
- pg_slope_observation = Observation(
- # id=max_id,
- value_type='quantity',
- sampling_time_start=fb_observation.result_time,
- sampling_time_end=fb_observation.result_time,
- result_time=fb_observation.result_time,
- sta_identifier=str(uuid.uuid4()),
- value_quantity=fb_observation.pitch
- )
- slope_dataset.observations.append(pg_slope_observation)
+ value = fb_observation.pitch
+ add_observation(slope_dataset, fb_observation,
+ value, slope_result_time_db_list)
+
if(fb_observation.temperature is not None and temperature_dataset is not None):
# max_id = max_id + 1
- pg_temperature_observation = Observation(
- # id=max_id,
- value_type='quantity',
- sampling_time_start=fb_observation.result_time,
- sampling_time_end=fb_observation.result_time,
- result_time=fb_observation.result_time,
- sta_identifier=str(uuid.uuid4()),
- value_quantity=fb_observation.temperature
- )
- temperature_dataset.observations.append(pg_temperature_observation)
- # commit observations:
- pg_session.commit()
+ value = fb_observation.temperature
+ add_observation(temperature_dataset, fb_observation,
+ value, temperature_result_time_db_list)
- last_roll_observation = pg_session.query(Observation) \
- .filter(Observation.fk_dataset_id == roll_dataset.id) \
- .order_by(desc('sampling_time_start')) \
- .first()
- if last_roll_observation is not None:
- roll_dataset.last_time = last_roll_observation.sampling_time_start
- roll_dataset.last_value = last_roll_observation.value_quantity
- roll_dataset.fk_last_observation_id = last_roll_observation.id
- last_slope_observation = pg_session.query(Observation) \
- .filter(Observation.fk_dataset_id == slope_dataset.id) \
- .order_by(desc('sampling_time_start')) \
- .first()
- if last_slope_observation is not None:
- slope_dataset.last_time = last_slope_observation.sampling_time_start
- slope_dataset.last_value = last_slope_observation.value_quantity
- slope_dataset.fk_last_observation_id = last_slope_observation.id
+def add_observation(
+ dataset: Dataset,
+ fb_observation: FbObservation,
+ value: str,
+ value_identifier_db_list: List[float]):
+ ''' check if observation still extists in db,
+ otherwise add it to fb'''
+ # ob_id: str = str(observation_json.get('id'))
- last_temperature_observation = pg_session.query(Observation) \
- .filter(Observation.fk_dataset_id == temperature_dataset.id) \
- .order_by(desc('sampling_time_start')) \
- .first()
- if last_temperature_observation is not None:
- temperature_dataset.last_time = last_temperature_observation.sampling_time_start
- temperature_dataset.last_value = last_temperature_observation.value_quantity
- temperature_dataset.fk_last_observation_id = last_temperature_observation.id
-
- first_roll_observation = pg_session.query(Observation) \
- .filter(Observation.fk_dataset_id == roll_dataset.id) \
- .order_by(asc('sampling_time_start')) \
- .first()
- if first_roll_observation is not None:
- roll_dataset.first_time = first_roll_observation.sampling_time_start
- roll_dataset.first_value = first_roll_observation.value_quantity
- roll_dataset.fk_first_observation_id = first_roll_observation.id
-
- first_slope_observation = pg_session.query(Observation) \
- .filter(Observation.fk_dataset_id == slope_dataset.id) \
- .order_by(asc('sampling_time_start')) \
- .first()
- if first_slope_observation is not None:
- slope_dataset.first_time = first_slope_observation.sampling_time_start
- slope_dataset.first_value = first_slope_observation.value_quantity
- slope_dataset.fk_first_observation_id = first_slope_observation.id
-
- first_temperature_observation = pg_session.query(Observation) \
- .filter(Observation.fk_dataset_id == temperature_dataset.id) \
- .order_by(asc('sampling_time_start')) \
- .first()
- if first_temperature_observation is not None:
- temperature_dataset.first_time = first_temperature_observation.sampling_time_start
- temperature_dataset.first_value = first_temperature_observation.value_quantity
- temperature_dataset.fk_first_observation_id = first_temperature_observation.id
-
- platform_exists = pg_session.query(Platform.id).filter_by(
- name=platform.lower()).scalar() is not None
- if not platform_exists:
- sensor_platform = Platform()
- # max_id = pg_session.query(func.max(Platform.id)).scalar()
- # sensor_platform.id = max_id + 1
- sensor_platform.sta_identifier = platform.lower()
- sensor_platform.identifier = platform.lower()
- sensor_platform.name = platform.lower()
- slope_dataset.platform = sensor_platform
- roll_dataset.platform = sensor_platform
- temperature_dataset.platform = sensor_platform
+ # existing_observation: bool = (
+ # db_session.query(Observation)
+ # .filter(Observation.result_time == fb_observation.result_time,
+ # Observation.fk_dataset_id == dataset.id)
+ # .one_or_none()
+ # )
+ existing_observation: bool = time.mktime(
+ fb_observation.result_time.timetuple()) in value_identifier_db_list
+ # Can we insert this observation?
+ if existing_observation is False:
+ # insert bew observation
+ new_observation: Observation = Observation()
+ new_observation = Observation(
+ # id=max_id,
+ value_type='quantity',
+ sampling_time_start=fb_observation.result_time,
+ sampling_time_end=fb_observation.result_time,
+ result_time=fb_observation.result_time,
+ sta_identifier=str(uuid.uuid4()),
+ value_identifier=str(time.mktime(
+ fb_observation.result_time.timetuple())),
+ value_quantity=value
+ )
+ dataset.observations.append(new_observation)
+ print(f"new observation with result time {new_observation.result_time} "
+ f"for inclinometer {dataset.procedure.name} succesfully imported!")
else:
- sensor_platform = pg_session.query(Platform.id) \
- .filter(Platform.name == platform.lower()) \
- .first()
- slope_dataset.fk_platform_id = sensor_platform.id
- roll_dataset.fk_platform_id = sensor_platform.id
- temperature_dataset.fk_platform_id = sensor_platform.id
-
- # commit dataset changes:
- pg_session.commit()
- pg_session.close()
+ print(f"observation with result time {fb_observation.result_time} "
+ f"for inclinometer {dataset.procedure.name} already exists!")
# -----------------------------------------------------------------------------
if __name__ == "__main__":
+ load_dotenv(find_dotenv())
main()
diff --git a/automatic_inclinometer/import_observations_wolfsegg_kb1_old.py b/automatic_inclinometer/import_observations_wolfsegg_kb1_old.py
new file mode 100644
index 0000000..9bdc086
--- /dev/null
+++ b/automatic_inclinometer/import_observations_wolfsegg_kb1_old.py
@@ -0,0 +1,197 @@
+""" import firebird, export to postgresql """
+#!/usr/bin/python# -*- coding: utf-8 -*-
+
+from typing import List
+import uuid
+from sqlalchemy.orm import session
+from sqlalchemy import desc, asc
+from db.fb_models import (create_session, FbObservation, Catena)
+from db.models import (create_pg_session, Dataset, Observation, Procedure, Phenomenon, Platform)
+
+def main():
+ """
+ Main function.
+ """
+
+ # parameter:
+ # sensor id in firebird db:
+ # sensor_id = 1
+ # # name of project area in firebird db
+ # feature_of_interest = 'TAC003-020-0517' # Wolfsegg KB1
+ # # sensor name in postgis db
+ # sensor = 'wolfsegg_kb1_1'
+ # platform = 'wolfsegg'
+
+ sensor_id = 0
+ # name of project area in firebird db
+ feature_of_interest = 'TAC003-020-0517' # Wolfsegg KB1
+ # sensor name in postgis db
+ sensor = 'wolfsegg_kb1_0'
+ platform = 'wolfsegg_kb1_inclinometer'
+
+ firebird_session: session = create_session()
+ # db_observation = session.query(Observation) \
+ # .filter_by(name='John Snow').first()
+ query = firebird_session.query(FbObservation).join(FbObservation.catena) \
+ .filter(FbObservation.sensore == sensor_id) \
+ .filter(Catena.name == feature_of_interest)
+ # feature_of_interest = query.statement.compile(dialect=firebird.dialect())
+ firebird_observations: List[FbObservation] = query.all()
+ firebird_session.close()
+
+ pg_session: session = create_pg_session()
+ # pg_datasets: List[Dataset] = pg_query.all()
+ pg_query = pg_session.query(Dataset) \
+ .join(Procedure) \
+ .join(Phenomenon) \
+ .filter(Procedure.sta_identifier == sensor.lower())
+ # .join(Platform).all() \
+
+
+ # roll_dataset = [x for x in pg_datasets if x.phenomenon.sta_identifier == "Roll"]
+ roll_dataset = pg_query.filter(Phenomenon.sta_identifier == "Roll").first()
+ roll_dataset.is_published = 1
+ roll_dataset.is_hidden = 0
+ roll_dataset.dataset_type = "timeseries"
+ roll_dataset.observation_type = "simple"
+ roll_dataset.value_type = "quantity"
+ slope_dataset = pg_query.filter(
+ Phenomenon.sta_identifier == "Slope").first()
+ slope_dataset.is_published = 1
+ slope_dataset.is_hidden = 0
+ slope_dataset.dataset_type = "timeseries"
+ slope_dataset.observation_type = "simple"
+ slope_dataset.value_type = "quantity"
+ temperature_dataset = pg_query.filter(
+ Phenomenon.sta_identifier == "InSystemTemperature").first()
+ temperature_dataset.is_published = 1
+ temperature_dataset.is_hidden = 0
+ temperature_dataset.dataset_type = "timeseries"
+ temperature_dataset.observation_type = "simple"
+ temperature_dataset.value_type = "quantity"
+ pg_session.commit()
+
+ # max_id = pg_session.query(func.max(Observation.id)).scalar()
+ for fb_observation in firebird_observations:
+ # print(fb_observation.catena.name)
+ if(fb_observation.roll is not None and roll_dataset is not None):
+ # max_id = max_id + 1
+ pg_roll_observation = Observation(
+ # id=max_id,
+ value_type='quantity',
+ sampling_time_start=fb_observation.result_time,
+ sampling_time_end=fb_observation.result_time,
+ result_time=fb_observation.result_time,
+ sta_identifier=str(uuid.uuid4()),
+ value_quantity=fb_observation.roll
+ )
+ roll_dataset.observations.append(pg_roll_observation)
+ if(fb_observation.pitch is not None and slope_dataset is not None):
+ # max_id = max_id + 1
+ pg_slope_observation = Observation(
+ # id=max_id,
+ value_type='quantity',
+ sampling_time_start=fb_observation.result_time,
+ sampling_time_end=fb_observation.result_time,
+ result_time=fb_observation.result_time,
+ sta_identifier=str(uuid.uuid4()),
+ value_quantity=fb_observation.pitch
+ )
+ slope_dataset.observations.append(pg_slope_observation)
+ if(fb_observation.temperature is not None and temperature_dataset is not None):
+ # max_id = max_id + 1
+ pg_temperature_observation = Observation(
+ # id=max_id,
+ value_type='quantity',
+ sampling_time_start=fb_observation.result_time,
+ sampling_time_end=fb_observation.result_time,
+ result_time=fb_observation.result_time,
+ sta_identifier=str(uuid.uuid4()),
+ value_quantity=fb_observation.temperature
+ )
+ temperature_dataset.observations.append(pg_temperature_observation)
+ # commit observations:
+ pg_session.commit()
+
+ last_roll_observation = pg_session.query(Observation) \
+ .filter(Observation.fk_dataset_id == roll_dataset.id) \
+ .order_by(desc('sampling_time_start')) \
+ .first()
+ if last_roll_observation is not None:
+ roll_dataset.last_time = last_roll_observation.sampling_time_start
+ roll_dataset.last_value = last_roll_observation.value_quantity
+ roll_dataset.fk_last_observation_id = last_roll_observation.id
+
+ last_slope_observation = pg_session.query(Observation) \
+ .filter(Observation.fk_dataset_id == slope_dataset.id) \
+ .order_by(desc('sampling_time_start')) \
+ .first()
+ if last_slope_observation is not None:
+ slope_dataset.last_time = last_slope_observation.sampling_time_start
+ slope_dataset.last_value = last_slope_observation.value_quantity
+ slope_dataset.fk_last_observation_id = last_slope_observation.id
+
+ last_temperature_observation = pg_session.query(Observation) \
+ .filter(Observation.fk_dataset_id == temperature_dataset.id) \
+ .order_by(desc('sampling_time_start')) \
+ .first()
+ if last_temperature_observation is not None:
+ temperature_dataset.last_time = last_temperature_observation.sampling_time_start
+ temperature_dataset.last_value = last_temperature_observation.value_quantity
+ temperature_dataset.fk_last_observation_id = last_temperature_observation.id
+
+ first_roll_observation = pg_session.query(Observation) \
+ .filter(Observation.fk_dataset_id == roll_dataset.id) \
+ .order_by(asc('sampling_time_start')) \
+ .first()
+ if first_roll_observation is not None:
+ roll_dataset.first_time = first_roll_observation.sampling_time_start
+ roll_dataset.first_value = first_roll_observation.value_quantity
+ roll_dataset.fk_first_observation_id = first_roll_observation.id
+
+ first_slope_observation = pg_session.query(Observation) \
+ .filter(Observation.fk_dataset_id == slope_dataset.id) \
+ .order_by(asc('sampling_time_start')) \
+ .first()
+ if first_slope_observation is not None:
+ slope_dataset.first_time = first_slope_observation.sampling_time_start
+ slope_dataset.first_value = first_slope_observation.value_quantity
+ slope_dataset.fk_first_observation_id = first_slope_observation.id
+
+ first_temperature_observation = pg_session.query(Observation) \
+ .filter(Observation.fk_dataset_id == temperature_dataset.id) \
+ .order_by(asc('sampling_time_start')) \
+ .first()
+ if first_temperature_observation is not None:
+ temperature_dataset.first_time = first_temperature_observation.sampling_time_start
+ temperature_dataset.first_value = first_temperature_observation.value_quantity
+ temperature_dataset.fk_first_observation_id = first_temperature_observation.id
+
+ platform_exists = pg_session.query(Platform.id).filter_by(
+ name=platform.lower()).scalar() is not None
+ if not platform_exists:
+ sensor_platform = Platform()
+ # max_id = pg_session.query(func.max(Platform.id)).scalar()
+ # sensor_platform.id = max_id + 1
+ sensor_platform.sta_identifier = platform.lower()
+ sensor_platform.identifier = platform.lower()
+ sensor_platform.name = platform.lower()
+ slope_dataset.platform = sensor_platform
+ roll_dataset.platform = sensor_platform
+ temperature_dataset.platform = sensor_platform
+ else:
+ sensor_platform = pg_session.query(Platform.id) \
+ .filter(Platform.name == platform.lower()) \
+ .first()
+ slope_dataset.fk_platform_id = sensor_platform.id
+ roll_dataset.fk_platform_id = sensor_platform.id
+ temperature_dataset.fk_platform_id = sensor_platform.id
+
+ # commit dataset changes:
+ pg_session.commit()
+ pg_session.close()
+
+
+# -----------------------------------------------------------------------------
+if __name__ == "__main__":
+ main()
diff --git a/automatic_inclinometer/insert_sensors/import_sensors_ampflwang.py b/automatic_inclinometer/insert_sensors/import_sensors_ampflwang.py
new file mode 100644
index 0000000..13d9926
--- /dev/null
+++ b/automatic_inclinometer/insert_sensors/import_sensors_ampflwang.py
@@ -0,0 +1,195 @@
+# -*- coding: utf-8 -*-
+"""This module does blah blah."""
+
+from ast import List
+import requests
+# from insert_sensor.transactional import insert_sensor
+from insert_sensor.wrapper import (Offering, FoI, Procedure, SensorType)
+# import json
+
+
+class Sos():
+ """
+ A class to represent a sos service.
+ ...
+
+ Attributes
+ ----------
+ sosurl : str
+ first name of the person
+ token : str
+ token to access soso service
+ """
+
+ def __init__(self, url, token=''):
+ self.sosurl = str(url) # url to access the SOS
+ self.token = str(token) # security token, optional
+ # Test if URL exists
+ try:
+ test = requests.get(self.sosurl)
+ test.raise_for_status()
+ except requests.HTTPError:
+ print("The URL is not valid")
+
+# Python3 code here creating class
+
+
+class Sensor:
+ """
+ A class to represent an input sensor.
+ ...
+
+ Attributes
+ ----------
+ name : str
+ first name of the person
+ x : float
+ token to access soso service
+ y : float
+ token to access soso service
+ """
+
+ def __init__(self, name: str, x_coord: float, y_coord: float):
+ self.name = name
+ self.x_coord = x_coord
+ self.y_coord = y_coord
+
+
+def main():
+ """
+ main function
+ """
+ sos_url = 'https://geomon.geologie.ac.at/52n-sos-webapp/service'
+
+ # creating list
+ sensor_list: List[Sensor] = []
+
+ # appending instances to list 48.0889892,13.5583703
+ sensor_list.append(
+ Sensor('ampflwang_kb1_0', 13.5583703, 48.0889892))
+ sensor_list.append(
+ Sensor('ampflwang_kb1_1', 13.5583703, 48.0889892))
+ sensor_list.append(
+ Sensor('ampflwang_kb1_2', 13.5583703, 48.0889892))
+ sensor_list.append(
+ Sensor('ampflwang_kb1_3', 13.5583703, 48.0889892))
+ sensor_list.append(
+ Sensor('ampflwang_kb1_4', 13.5583703, 48.0889892))
+ sensor_list.append(
+ Sensor('ampflwang_kb1_5', 13.5583703, 48.0889892))
+ sensor_list.append(
+ Sensor('ampflwang_kb1_6', 13.5583703, 48.0889892))
+ sensor_list.append(
+ Sensor('ampflwang_kb1_7', 13.5583703, 48.0889892))
+ sensor_list.append(
+ Sensor('ampflwang_kb1_8', 13.5583703, 48.0889892))
+ sensor_list.append(
+ Sensor('ampflwang_kb1_9', 13.5583703, 48.0889892))
+
+ sensor: Sensor
+ for sensor in sensor_list:
+ # platform ampflwang_kb1_inclinometer
+ offering = Offering(
+ "https://geomon.geologie.ac.at/52n-sos-webapp/api/offerings/",
+ sensor.name,
+ "Bohrloch, Ampflwang Inklinometer"
+ )
+ procedure = Procedure(sensor.name, sensor.name)
+ foi = FoI("degree", "m", (sensor.x_coord, sensor.y_coord, 0.0),
+ "GSA02A-010-1210", "Ampflwang KB1")
+ # now insert sensor via rest service:
+ sensor_type=SensorType("inclinometer")
+ post_data=insert_sensor(offering, procedure, foi, sensor_type)
+ # print(post_data)
+ headers={'Accept': 'application/json'}
+ request=requests.post(sos_url, headers = headers, json = post_data)
+ print(request.text)
+
+def insert_sensor(offering, procedure, foi, sensor_type):
+ """
+ Prepares the body of a InsertSensor request for JSON biding.
+ :param offering: an instance of class Offering.Type object.
+ :param Procedure: instance of class Procedure. type object.
+ :param foi: feature of interest. Instance of FoI
+ :param sensor_type: SensorType object
+ :return: valid body for an InsertSensor request.
+ """
+
+ # shortName = offering.name # string
+ # longName = 'Sibratsgfall test' # string
+
+ # Offering values
+ gml_id='\"' + str(procedure.id) + '\"' # Offering name, double quoted
+ offering_name=offering.name
+ offering_label=offering.label
+ # offID = offering.fullId # URL format of full id
+
+ # featureName = featureID = cordX = cordY = height = h_unit = z_unit = coordinates = ""
+ # check if feature of interest should be declare
+ if foi is not None:
+ # feature_id = 'https://geomon.geologie.ac.at/52n-sos-webapp/api/features/' + \
+ # str(foi.fid) # URL format
+ cord_x=str(foi.x) # longitude degrees, float
+ cord_y=str(foi.y) # latitude degrees, float
+ coordinates=cord_x + " " + cord_y
+ height=str(foi.z) # altitude in meters, float
+ # h_unit = foi.Hunit # units for horizontal coordinates
+ # z_unit = foi.Vunit # units for altitude
+ feature_id=foi.fid # "feature location"
+ feature_name=foi.name # "feature location"
+ else:
+ pass
+
+ procedure_name=procedure.name
+ procedure_identifier=procedure.id # URL,
+ obs_types=[]
+ output_list='' # output list element for describe procedure
+ properties_list=[]
+ for attr in sensor_type.pattern["attributes"]:
+ obs_prop_name='\"' + attr[0] + '\"' # attribute name
+ # print(obs_prop_name)
+ unit_name=sensor_type.om_types[attr[1]] # om type
+ # magnitud = a # ??
+
+ obs_name=obs_prop_name.replace('\"', '')
+ obs_name="".join(obs_name.split()) # observable property name
+ output=''
+ output_list=output_list + output
+ # add property identifier to the list.
+ properties_list.append(obs_name)
+ # prepare list of measurement types
+ # A sensor can not registry duplicated sensor types.
+ this_type="http://www.opengis.net/def/observationType/OGC-OM/2.0/"+unit_name
+ if this_type not in obs_types: # when new type appears
+ obs_types.append(this_type)
+ else:
+ continue
+
+ # Unit of measurement:
+ unit_name='\"' + procedure.name + '\"' # double quoted string
+ # unit = omType # one of the MO measurement types
+
+ body={
+ "request": "InsertSensor",
+ "service": "SOS",
+ "version": "2.0.0",
+ "procedureDescriptionFormat": "http://www.opengis.net/sensorml/2.0",
+ "procedureDescription": f'{procedure_identifier}longName{procedure_name}shortName{procedure_name}{offering_label}{offering_name}truefalsefeaturesOfInterest{feature_id}{feature_name}{coordinates}SlopeRollInSystemTemperature{cord_x}{cord_y}{height}',
+ "observableProperty": [
+ "Slope",
+ "Roll",
+ "InSystemTemperature"
+ ],
+ "observationType": [
+ "http://www.opengis.net/def/observationType/OGC-OM/2.0/OM_Measurement"
+ ],
+ "featureOfInterestType":
+ "http://www.opengis.net/def/samplingFeatureType/OGC-OM/2.0/SF_SamplingPoint"
+ }
+ return body
+
+
+if __name__ == '__main__':
+ main()
diff --git a/automatic_inclinometer/insert_sensors/import_sensors_laakirchen.py b/automatic_inclinometer/insert_sensors/import_sensors_laakirchen.py
index 6f22cba..ca89ba2 100644
--- a/automatic_inclinometer/insert_sensors/import_sensors_laakirchen.py
+++ b/automatic_inclinometer/insert_sensors/import_sensors_laakirchen.py
@@ -127,7 +127,7 @@ def main():
foi = FoI("degree", "m", (sensor.x_coord, sensor.y_coord, 0.0),
"GSA02B-007-0911", "Massenbewegung Laakirchen")
-
+
# now insert sensor via rest service:
sensor_type=SensorType("inclinometer")
post_data=insert_sensor(offering, procedure, foi, sensor_type)
diff --git a/automatic_inclinometer/insert_sensors/import_sensors_wolfsegg.py b/automatic_inclinometer/insert_sensors/import_sensors_wolfsegg.py
index 0b9f8c7..f0aa274 100644
--- a/automatic_inclinometer/insert_sensors/import_sensors_wolfsegg.py
+++ b/automatic_inclinometer/insert_sensors/import_sensors_wolfsegg.py
@@ -64,47 +64,47 @@ def main():
# creating list
sensor_list: List[Sensor] = []
- # appending instances to list
+ # appending instances to list 48.1064354,13.6731638
sensor_list.append(
- Sensor('wolfsegg_kb1_0', 13.808378638676, 47.882871028831))
- # sensor_list.append(
- # Sensor('wolfsegg_kb1_1', 13.808378638676, 47.882871028831))
+ Sensor('wolfsegg_kb1_0', 13.6731638, 48.1064354))
sensor_list.append(
- Sensor('wolfsegg_kb1_2', 13.808378638676, 47.882871028831))
+ Sensor('wolfsegg_kb1_1', 13.6731638, 48.1064354))
sensor_list.append(
- Sensor('wolfsegg_kb1_3', 13.808378638676, 47.882871028831))
+ Sensor('wolfsegg_kb1_2', 13.6731638, 48.1064354))
sensor_list.append(
- Sensor('wolfsegg_kb1_4', 13.808378638676, 47.882871028831))
+ Sensor('wolfsegg_kb1_3', 13.6731638, 48.1064354))
sensor_list.append(
- Sensor('wolfsegg_kb1_5', 13.808378638676, 47.882871028831))
+ Sensor('wolfsegg_kb1_4', 13.6731638, 48.1064354))
sensor_list.append(
- Sensor('wolfsegg_kb1_6', 13.808378638676, 47.882871028831))
+ Sensor('wolfsegg_kb1_5', 13.6731638, 48.1064354))
sensor_list.append(
- Sensor('wolfsegg_kb1_7', 13.808378638676, 47.882871028831))
+ Sensor('wolfsegg_kb1_6', 13.6731638, 48.1064354))
sensor_list.append(
- Sensor('wolfsegg_kb1_8', 13.808378638676, 47.882871028831))
+ Sensor('wolfsegg_kb1_7',13.6731638, 48.1064354))
sensor_list.append(
- Sensor('wolfsegg_kb1_9', 13.808378638676, 47.882871028831))
+ Sensor('wolfsegg_kb1_8', 13.6731638, 48.1064354))
sensor_list.append(
- Sensor('wolfsegg_kb1_10', 13.808378638676, 47.882871028831))
+ Sensor('wolfsegg_kb1_9', 13.6731638, 48.1064354))
sensor_list.append(
- Sensor('wolfsegg_kb1_11', 13.808378638676, 47.882871028831))
+ Sensor('wolfsegg_kb1_10', 13.6731638, 48.1064354))
sensor_list.append(
- Sensor('wolfsegg_kb1_12', 13.808378638676, 47.882871028831))
+ Sensor('wolfsegg_kb1_11', 13.6731638, 48.1064354))
sensor_list.append(
- Sensor('wolfsegg_kb1_13', 13.808378638676, 47.882871028831))
+ Sensor('wolfsegg_kb1_12', 13.6731638, 48.1064354))
sensor_list.append(
- Sensor('wolfsegg_kb1_14', 13.808378638676, 47.882871028831))
+ Sensor('wolfsegg_kb1_13', 13.6731638, 48.1064354))
sensor_list.append(
- Sensor('wolfsegg_kb1_15', 13.808378638676, 47.882871028831))
+ Sensor('wolfsegg_kb1_14', 13.6731638, 48.1064354))
sensor_list.append(
- Sensor('wolfsegg_kb1_16', 13.808378638676, 47.882871028831))
+ Sensor('wolfsegg_kb1_15', 13.6731638, 48.1064354))
sensor_list.append(
- Sensor('wolfsegg_kb1_17', 13.808378638676, 47.882871028831))
+ Sensor('wolfsegg_kb1_16', 13.6731638, 48.1064354))
sensor_list.append(
- Sensor('wolfsegg_kb1_18', 13.808378638676, 47.882871028831))
+ Sensor('wolfsegg_kb1_17', 13.6731638, 48.1064354))
sensor_list.append(
- Sensor('wolfsegg_kb1_19', 13.808378638676, 47.882871028831))
+ Sensor('wolfsegg_kb1_18', 13.6731638, 48.1064354))
+ sensor_list.append(
+ Sensor('wolfsegg_kb1_19', 13.6731638, 48.1064354))
sensor: Sensor
for sensor in sensor_list:
diff --git a/db/insert_initial_values.txt b/db/insert_initial_db_values.txt
similarity index 100%
rename from db/insert_initial_values.txt
rename to db/insert_initial_db_values.txt
diff --git a/gschliefgraben_glasfaser/import_feature_sensor.py b/gschliefgraben_glasfaser/import_feature_sensor.py
index 278d620..61ec792 100644
--- a/gschliefgraben_glasfaser/import_feature_sensor.py
+++ b/gschliefgraben_glasfaser/import_feature_sensor.py
@@ -87,153 +87,153 @@ def main():
sensor_list.append(
Sensor('inclino1_01', 13.816940062459931, 47.883893347112163,
"bohrloch1-glasfaser-gschliefgraben",
- "Glasfaser Untersuchungen am Gschliefgraben (Gmunden)"))
+ "Glasfaser Untersuchungen am Gschliefgraben (Bohrloch1)"))
sensor_list.append(
Sensor('inclino1_02', 13.816940062459931, 47.883893347112163,
"bohrloch1-glasfaser-gschliefgraben",
- "Glasfaser Untersuchungen am Gschliefgraben (Gmunden)"))
+ "Glasfaser Untersuchungen am Gschliefgraben (Bohrloch1)"))
sensor_list.append(
Sensor('inclino1_03',13.816940062459931, 47.883893347112163,
"bohrloch1-glasfaser-gschliefgraben",
- "Glasfaser Untersuchungen am Gschliefgraben (Gmunden)"))
+ "Glasfaser Untersuchungen am Gschliefgraben (Bohrloch1)"))
sensor_list.append(
Sensor('inclino1_04', 13.816940062459931, 47.883893347112163,
"bohrloch1-glasfaser-gschliefgraben",
- "Glasfaser Untersuchungen am Gschliefgraben (Gmunden)"))
+ "Glasfaser Untersuchungen am Gschliefgraben (Bohrloch1)"))
sensor_list.append(
Sensor('inclino1_05', 13.816940062459931, 47.883893347112163,
"bohrloch1-glasfaser-gschliefgraben",
- "Glasfaser Untersuchungen am Gschliefgraben (Gmunden)"))
+ "Glasfaser Untersuchungen am Gschliefgraben (Bohrloch1)"))
sensor_list.append(
Sensor('inclino1_06', 13.816940062459931, 47.883893347112163,
"bohrloch1-glasfaser-gschliefgraben",
- "Glasfaser Untersuchungen am Gschliefgraben (Gmunden)"))
+ "Glasfaser Untersuchungen am Gschliefgraben (Bohrloch1)"))
sensor_list.append(
Sensor('inclino1_07', 13.816940062459931, 47.883893347112163,
"bohrloch1-glasfaser-gschliefgraben",
- "Glasfaser Untersuchungen am Gschliefgraben (Gmunden)"))
+ "Glasfaser Untersuchungen am Gschliefgraben (Bohrloch1)"))
sensor_list.append(
Sensor('inclino1_08', 13.816940062459931, 47.883893347112163,
"bohrloch1-glasfaser-gschliefgraben",
- "Glasfaser Untersuchungen am Gschliefgraben (Gmunden)"))
+ "Glasfaser Untersuchungen am Gschliefgraben (Bohrloch1)"))
sensor_list.append(
Sensor('inclino1_09', 13.816940062459931, 47.883893347112163,
"bohrloch1-glasfaser-gschliefgraben",
- "Glasfaser Untersuchungen am Gschliefgraben (Gmunden)"))
+ "Glasfaser Untersuchungen am Gschliefgraben (Bohrloch1)"))
sensor_list.append(
Sensor('inclino1_10', 13.816940062459931, 47.883893347112163,
"bohrloch1-glasfaser-gschliefgraben",
- "Glasfaser Untersuchungen am Gschliefgraben (Gmunden)"))
+ "Glasfaser Untersuchungen am Gschliefgraben (Bohrloch1)"))
sensor_list.append(
Sensor('inclino1_11', 13.816940062459931, 47.883893347112163,
"bohrloch1-glasfaser-gschliefgraben",
- "Glasfaser Untersuchungen am Gschliefgraben (Gmunden)"))
+ "Glasfaser Untersuchungen am Gschliefgraben (Bohrloch1)"))
sensor_list.append(
Sensor('inclino1_12', 13.816940062459931, 47.883893347112163,
"bohrloch1-glasfaser-gschliefgraben",
- "Glasfaser Untersuchungen am Gschliefgraben (Gmunden)"))
+ "Glasfaser Untersuchungen am Gschliefgraben (Bohrloch1)"))
sensor_list.append(
Sensor('inclino1_13', 13.816940062459931, 47.883893347112163,
"bohrloch1-glasfaser-gschliefgraben",
- "Glasfaser Untersuchungen am Gschliefgraben (Gmunden)"))
+ "Glasfaser Untersuchungen am Gschliefgraben (Bohrloch1)"))
sensor_list.append(
Sensor('inclino1_14', 13.816940062459931, 47.883893347112163,
"bohrloch1-glasfaser-gschliefgraben",
- "Glasfaser Untersuchungen am Gschliefgraben (Gmunden)"))
+ "Glasfaser Untersuchungen am Gschliefgraben (Bohrloch1)"))
sensor_list.append(
Sensor('inclino1_15', 13.816940062459931, 47.883893347112163,
"bohrloch1-glasfaser-gschliefgraben",
- "Glasfaser Untersuchungen am Gschliefgraben (Gmunden)"))
+ "Glasfaser Untersuchungen am Gschliefgraben (Bohrloch1)"))
sensor_list.append(
Sensor('inclino1_16', 13.816940062459931, 47.883893347112163,
"bohrloch1-glasfaser-gschliefgraben",
- "Glasfaser Untersuchungen am Gschliefgraben (Gmunden)"))
+ "Glasfaser Untersuchungen am Gschliefgraben (Bohrloch1)"))
sensor_list.append(
Sensor('inclino1_17', 13.816940062459931, 47.883893347112163,
"bohrloch1-glasfaser-gschliefgraben",
- "Glasfaser Untersuchungen am Gschliefgraben (Gmunden)"))
+ "Glasfaser Untersuchungen am Gschliefgraben (Bohrloch1)"))
sensor_list.append(
Sensor('inclino1_18', 13.816940062459931, 47.883893347112163,
"bohrloch1-glasfaser-gschliefgraben",
- "Glasfaser Untersuchungen am Gschliefgraben (Gmunden)"))
+ "Glasfaser Untersuchungen am Gschliefgraben (Bohrloch1)"))
sensor_list.append(
Sensor('inclino1_19', 13.816940062459931, 47.883893347112163,
"bohrloch1-glasfaser-gschliefgraben",
- "Glasfaser Untersuchungen am Gschliefgraben (Gmunden)"))
+ "Glasfaser Untersuchungen am Gschliefgraben (Bohrloch1)"))
sensor_list.append(
Sensor('inclino1_20', 13.816940062459931, 47.883893347112163,
"bohrloch1-glasfaser-gschliefgraben",
- "Glasfaser Untersuchungen am Gschliefgraben (Gmunden)"))
+ "Glasfaser Untersuchungen am Gschliefgraben (Bohrloch1)"))
## inclino2_04 bis inclino2_22
sensor_list.append(
Sensor('inclino2_04', 13.817740197926463, 47.883901327648893,
"bohrloch2-glasfaser-gschliefgraben",
- "Glasfaser Untersuchungen am Gschliefgraben (Gmunden)"))
+ "Glasfaser Untersuchungen am Gschliefgraben (Bohrloch2)"))
sensor_list.append(
Sensor('inclino2_05', 13.817740197926463, 47.883901327648893,
"bohrloch2-glasfaser-gschliefgraben",
- "Glasfaser Untersuchungen am Gschliefgraben (Gmunden)"))
+ "Glasfaser Untersuchungen am Gschliefgraben (Bohrloch2)"))
sensor_list.append(
Sensor('inclino2_06', 13.817740197926463, 47.883901327648893,
"bohrloch2-glasfaser-gschliefgraben",
- "Glasfaser Untersuchungen am Gschliefgraben (Gmunden)"))
+ "Glasfaser Untersuchungen am Gschliefgraben (Bohrloch2)"))
sensor_list.append(
Sensor('inclino2_07', 13.817740197926463, 47.883901327648893,
"bohrloch2-glasfaser-gschliefgraben",
- "Glasfaser Untersuchungen am Gschliefgraben (Gmunden)"))
+ "Glasfaser Untersuchungen am Gschliefgraben (Bohrloch2)"))
sensor_list.append(
Sensor('inclino2_08', 13.817740197926463, 47.883901327648893,
"bohrloch2-glasfaser-gschliefgraben",
- "Glasfaser Untersuchungen am Gschliefgraben (Gmunden)"))
+ "Glasfaser Untersuchungen am Gschliefgraben (Bohrloch2)"))
sensor_list.append(
Sensor('inclino2_09', 13.817740197926463, 47.883901327648893,
"bohrloch2-glasfaser-gschliefgraben",
- "Glasfaser Untersuchungen am Gschliefgraben (Gmunden)"))
+ "Glasfaser Untersuchungen am Gschliefgraben (Bohrloch2)"))
sensor_list.append(
Sensor('inclino2_10', 13.817740197926463, 47.883901327648893,
"bohrloch2-glasfaser-gschliefgraben",
- "Glasfaser Untersuchungen am Gschliefgraben (Gmunden)"))
+ "Glasfaser Untersuchungen am Gschliefgraben (Bohrloch2)"))
sensor_list.append(
Sensor('inclino2_11', 13.817740197926463, 47.883901327648893,
"bohrloch2-glasfaser-gschliefgraben",
- "Glasfaser Untersuchungen am Gschliefgraben (Gmunden)"))
+ "Glasfaser Untersuchungen am Gschliefgraben (Bohrloch2)"))
sensor_list.append(
Sensor('inclino2_12', 13.817740197926463, 47.883901327648893,
"bohrloch2-glasfaser-gschliefgraben",
- "Glasfaser Untersuchungen am Gschliefgraben (Gmunden)"))
+ "Glasfaser Untersuchungen am Gschliefgraben (Bohrloch2)"))
sensor_list.append(
Sensor('inclino2_13', 13.817740197926463, 47.883901327648893,
"bohrloch2-glasfaser-gschliefgraben",
- "Glasfaser Untersuchungen am Gschliefgraben (Gmunden)"))
+ "Glasfaser Untersuchungen am Gschliefgraben (Bohrloch2)"))
sensor_list.append(
Sensor('inclino2_14', 13.817740197926463, 47.883901327648893,
"bohrloch2-glasfaser-gschliefgraben",
- "Glasfaser Untersuchungen am Gschliefgraben (Gmunden)"))
+ "Glasfaser Untersuchungen am Gschliefgraben (Bohrloch2)"))
sensor_list.append(
Sensor('inclino2_15', 13.817740197926463, 47.883901327648893,
"bohrloch2-glasfaser-gschliefgraben",
- "Glasfaser Untersuchungen am Gschliefgraben (Gmunden)"))
+ "Glasfaser Untersuchungen am Gschliefgraben (Bohrloch2)"))
sensor_list.append(
Sensor('inclino2_16',13.817740197926463, 47.883901327648893,
"bohrloch1-glasfaser-gschliefgraben",
- "Glasfaser Untersuchungen am Gschliefgraben (Gmunden)"))
+ "Glasfaser Untersuchungen am Gschliefgraben (Bohrloch2)"))
sensor_list.append(
Sensor('inclino2_17', 13.817740197926463, 47.883901327648893,
"bohrloch2-glasfaser-gschliefgraben",
- "Glasfaser Untersuchungen am Gschliefgraben (Gmunden)"))
+ "Glasfaser Untersuchungen am Gschliefgraben (Bohrloch2)"))
sensor_list.append(
Sensor('inclino2_18',13.817740197926463, 47.883901327648893,
"bohrloch2-glasfaser-gschliefgraben",
- "Glasfaser Untersuchungen am Gschliefgraben (Gmunden)"))
+ "Glasfaser Untersuchungen am Gschliefgraben (Bohrloch2)"))
sensor_list.append(
Sensor('inclino2_19', 13.817740197926463, 47.883901327648893,
"bohrloch2-glasfaser-gschliefgraben",
- "Glasfaser Untersuchungen am Gschliefgraben (Gmunden)"))
+ "Glasfaser Untersuchungen am Gschliefgraben (Bohrloch2)"))
sensor_list.append(
Sensor('inclino2_20', 13.817740197926463, 47.883901327648893,
"bohrloch2-glasfaser-gschliefgraben",
- "Glasfaser Untersuchungen am Gschliefgraben (Gmunden)"))
+ "Glasfaser Untersuchungen am Gschliefgraben (Bohrloch2)"))
sensor: Sensor
for sensor in sensor_list:
diff --git a/gschliefgraben_piezometer/import_piezometer_observations_hourly.py b/gschliefgraben_piezometer/import_piezometer_observations_hourly.py
index 6dde3a3..adfad31 100644
--- a/gschliefgraben_piezometer/import_piezometer_observations_hourly.py
+++ b/gschliefgraben_piezometer/import_piezometer_observations_hourly.py
@@ -25,7 +25,7 @@ from db.models import (
def main():
''' main method '''
pg_session: session = create_pg_session()
- platform_sta_identifier = "pechgraben_piezometer"
+ platform_sta_identifier = "gschliefgraben_piezometer"
# sensor = "bohrloch1"
# sensor_list = os.environ.get('PIEZOMETER_GSCHLIEFGRABEN_SENSORS', [])
sensor_list = json.loads(os.environ['PIEZOMETER_GSCHLIEFGRABEN_SENSORS'])
diff --git a/voegelsberg/import_feature_sensor.py b/voegelsberg/import_feature_sensor.py
new file mode 100644
index 0000000..1dbafd7
--- /dev/null
+++ b/voegelsberg/import_feature_sensor.py
@@ -0,0 +1 @@
+# https://lists.ogc.org/pipermail/sensorml/2008-September/000573.html
\ No newline at end of file