- insert tachymeter observations now as TextObservation

This commit is contained in:
Arno Kaimbacher 2022-04-11 16:14:01 +02:00
parent baca212600
commit 29790fdd18
4 changed files with 84 additions and 16 deletions

View File

@ -190,7 +190,7 @@ class Observation(Base):
value_identifier = Column('value_identifier', String) value_identifier = Column('value_identifier', String)
value_quantity = Column('value_quantity', Numeric(20, 10), nullable=True) value_quantity = Column('value_quantity', Numeric(20, 10), nullable=True)
value_text = Column('value_text', String, nullable=True) value_text = Column('value_text', String, nullable=True)
value_geometry = Column(Geometry(geometry_type='POINTZ', srid=4326, dimension=3), nullable=True) value_geometry = Column(Geometry(geometry_type='POINT', srid=4326, dimension=3), nullable=True)
fk_dataset_id = Column(Integer, ForeignKey( fk_dataset_id = Column(Integer, ForeignKey(
'gba.dataset.dataset_id'), nullable=False) 'gba.dataset.dataset_id'), nullable=False)
@ -267,7 +267,7 @@ def create_db():
# session_maker = sessionmaker(bind=engine) # session_maker = sessionmaker(bind=engine)
# session = session_maker() # session = session_maker()
# Base.metadata.drop_all(bind=engine) # Base.metadata.drop_all(bind=engine)
# Base.metadata.create_all(engine) Base.metadata.create_all(engine)
if __name__ == "__main__": if __name__ == "__main__":

View File

@ -81,4 +81,6 @@ https://stackoverflow.com/questions/51737548/how-to-set-primary-key-auto-increme
UPDATE pg_extension SET extrelocatable = TRUE WHERE extname = 'postgis'; UPDATE pg_extension SET extrelocatable = TRUE WHERE extname = 'postgis';
ALTER EXTENSION postgis SET SCHEMA gba; ALTER EXTENSION postgis SET SCHEMA gba;
ALTER DATABASE sos_db SET search_path TO gba, public; ALTER DATABASE sos_db SET search_path TO gba, public;
oder ??
alter role sos_admin set search_path = "$user", public, gba;

View File

@ -15,11 +15,11 @@
xmlns:xs="http://www.w3.org/2001/XMLSchema" xsi:schemaLocation="http://www.opengis.net/sos/2.0 http://schemas.opengis.net/sos/2.0/sos.xsd http://www.opengis.net/samplingSpatial/2.0 http://schemas.opengis.net/samplingSpatial/2.0/spatialSamplingFeature.xsd"> <!-- multiple offerings are possible --> xmlns:xs="http://www.w3.org/2001/XMLSchema" xsi:schemaLocation="http://www.opengis.net/sos/2.0 http://schemas.opengis.net/sos/2.0/sos.xsd http://www.opengis.net/samplingSpatial/2.0 http://schemas.opengis.net/samplingSpatial/2.0/spatialSamplingFeature.xsd"> <!-- multiple offerings are possible -->
<sos:offering>D5_2</sos:offering> <sos:offering>D5_2</sos:offering>
<sos:observation> <sos:observation>
<om:OM_Observation gml:id="voegelsberg_0"> <om:OM_Observation gml:id="voegelsberg_o1">
<om:type xlink:href="http://www.opengis.net/def/observationType/OGC-OM/2.0/OM_GeometryObservation"/> <om:type xlink:href="http://www.opengis.net/def/observationType/OGC-OM/2.0/OM_GeometryObservation"/>
<om:phenomenonTime> <om:phenomenonTime>
<gml:TimeInstant gml:id="phenomenonTime"> <gml:TimeInstant gml:id="phenomenonTime">
<gml:timePosition>2021-09-01T00:00:00</gml:timePosition> <gml:timePosition>2021-08-01T00:00:00</gml:timePosition>
</gml:TimeInstant> </gml:TimeInstant>
</om:phenomenonTime> </om:phenomenonTime>
<om:resultTime xlink:href="#phenomenonTime"/> <om:resultTime xlink:href="#phenomenonTime"/>
@ -42,8 +42,8 @@
<sams:SF_SpatialSamplingFeature gml:id="ssf_instance"> <sams:SF_SpatialSamplingFeature gml:id="ssf_instance">
<gml:identifier codeSpace="">D5_2</gml:identifier> <gml:identifier codeSpace="">D5_2</gml:identifier>
<gml:name>origin of D5_2</gml:name> <gml:name>origin of D5_2</gml:name>
<sf:type xlink:href="http://www.opengis.net/def/samplingFeatureType/OGC-OM/2.0/SF_SamplingPoint"/> <sf:type xlink:href="http://www.opengis.net/def/samplingFeatureType/OGC-OM/2.0/SF_SamplingPoint"/>
<sf:sampledFeature xlink:href="http://www.opengis.net/def/nil/OGC/0/unknown"/> <sf:sampledFeature xmlns:sf="http://www.opengis.net/sampling/2.0" xlink:href="http://www.opengis.net/def/nil/OGC/0/unknown"/>
<sams:shape> <sams:shape>
<ns:Point xmlns:ns="http://www.opengis.net/gml/3.2" ns:id="Point_ssf_b3a826dd44012201b013c90c51da28c041f7a92e0cc47260eb9888f6a4e9f747"> <ns:Point xmlns:ns="http://www.opengis.net/gml/3.2" ns:id="Point_ssf_b3a826dd44012201b013c90c51da28c041f7a92e0cc47260eb9888f6a4e9f747">
<ns:pos srsName="http://www.opengis.net/def/crs/EPSG/0/4326">11.597409730065536 47.27196543449542</ns:pos> <ns:pos srsName="http://www.opengis.net/def/crs/EPSG/0/4326">11.597409730065536 47.27196543449542</ns:pos>
@ -60,4 +60,55 @@
</sos:observation> </sos:observation>
</sos:InsertObservation> </sos:InsertObservation>
</env:Body> </env:Body>
</env:Envelope> </env:Envelope>
{
"request": "InsertObservation",
"service": "SOS",
"version": "2.0.0",
"offering": "D5_2",
"observation": {
"type": "http://www.opengis.net/def/observationType/OGC-OM/2.0/OM_GeometryObservation",
"procedure": "D5_2",
"observedProperty": "TachymeterLocation",
"featureOfInterest": {
"identifier": {
"value": "D5_2",
"codespace": "http://www.opengis.net/def/nil/OGC/0/unknown"
},
"name": [
{
"value": "origin of D5_2",
"codespace": "http://www.opengis.net/def/nil/OGC/0/unknown"
}
],
"sampledFeature": [
"http://www.52north.org/test/featureOfInterest/world"
],
"geometry": {
"type": "Point",
"coordinates": [
10.874314927293595,
44.48931950733285
],
"crs": {
"type": "name",
"properties": {
"name": "EPSG:4326"
}
}
}
},
"phenomenonTime" : "2021-08-16T15:18:30.738Z",
"resultTime" : "2021-08-16T15:18:30.738Z",
"result": {
"type" : "Point",
"coordinates" : [
11.597688540227727,
47.271865827824854,
909.7036
]
}
}

View File

@ -68,6 +68,19 @@ def main():
.first() .first()
location_dataset.fk_format_id = sensor_format.id location_dataset.fk_format_id = sensor_format.id
pg_session.commit() pg_session.commit()
# offering = Offering(
# "https://geomon.geologie.ac.at/52n-sos-webapp/api/offerings/",
# sensor,
# "Vögelsberg Tachymeter"
# )
# procedure = Procedure(sensor, sensor)
# foi_name = "origin of " + sensor
# foi = FoI("degree", "m", (cord_x, cord_y, z_1),
# sensor, foi_name)
# xml = get_xml(offering, procedure, foi, result_time, identifier)
# print(xml)
successfully_inserted = create_observation( successfully_inserted = create_observation(
location_dataset, row, pg_session) location_dataset, row, pg_session)
@ -76,9 +89,11 @@ def main():
if not location_dataset.is_published: if not location_dataset.is_published:
location_dataset.is_published = 1 location_dataset.is_published = 1
location_dataset.is_hidden = 0 location_dataset.is_hidden = 0
location_dataset.dataset_type = "trajectory" location_dataset.dataset_type = "timeseries"
# location_dataset.dataset_type = "trajectory"
location_dataset.observation_type = "simple" location_dataset.observation_type = "simple"
location_dataset.value_type = "geometry" # location_dataset.value_type = "geometry"
location_dataset.value_type = "text"
pg_session.commit() pg_session.commit()
# last_location_observation = pg_session.query(Observation) \ # last_location_observation = pg_session.query(Observation) \
@ -106,6 +121,7 @@ def main():
pg_session.close() pg_session.close()
def create_observation(location_dataset: Dataset, data, pg_session: session): def create_observation(location_dataset: Dataset, data, pg_session: session):
''' create observation in db''' ''' create observation in db'''
# print("Sesnor key exist in JSON data") # print("Sesnor key exist in JSON data")
@ -135,8 +151,9 @@ def create_observation(location_dataset: Dataset, data, pg_session: session):
new_observation.result_time = date_obj new_observation.result_time = date_obj
new_observation.sampling_time_start = new_observation.result_time new_observation.sampling_time_start = new_observation.result_time
new_observation.sampling_time_end = new_observation.result_time new_observation.sampling_time_end = new_observation.result_time
new_observation.value_type = "geometry" new_observation.value_type = "text"
new_observation.value_geometry = f'SRID=4326;POINTZ({cord_x} {cord_y} {z_1})' new_observation.value_geometry = f'POINT({cord_x} {cord_y} {z_1})'
new_observation.value_text = '{"type":"Point","coordinates":['+ str(cord_x) +',' + str(cord_y) + ',' + str(z_1) + ']}'
new_observation.fk_dataset_id = location_dataset.id new_observation.fk_dataset_id = location_dataset.id
pg_session.add(new_observation) pg_session.add(new_observation)
print(f"new observation with result time {new_observation.result_time} " print(f"new observation with result time {new_observation.result_time} "
@ -149,7 +166,7 @@ def create_observation(location_dataset: Dataset, data, pg_session: session):
def actualize_first_last_observations(): def actualize_first_last_observations():
''' iterate throug all datasets of Voregelsberg project area ''' iterate through all datasets of Voregelsberg project area
and actualize last and first corresponding observations''' and actualize last and first corresponding observations'''
pg_session: session = create_pg_session() pg_session: session = create_pg_session()
platform_sta_identifier = "voegelsberg_tachymeter" platform_sta_identifier = "voegelsberg_tachymeter"
@ -165,8 +182,6 @@ def actualize_first_last_observations():
.filter(Platform.sta_identifier == platform_sta_identifier).all() .filter(Platform.sta_identifier == platform_sta_identifier).all()
for location_dataset in voegelsberg_datasets: for location_dataset in voegelsberg_datasets:
''' iterate throug all datasets of Voregelsberg project area
and actualize last and first corresponding observations'''
last_location_observation = pg_session.query(Observation) \ last_location_observation = pg_session.query(Observation) \
.filter(Observation.fk_dataset_id == location_dataset.id) \ .filter(Observation.fk_dataset_id == location_dataset.id) \
.order_by(desc('sampling_time_start')) \ .order_by(desc('sampling_time_start')) \
@ -175,7 +190,7 @@ def actualize_first_last_observations():
location_dataset.last_time = last_location_observation.sampling_time_start location_dataset.last_time = last_location_observation.sampling_time_start
# location_dataset.last_value = last_location_observation.value_quantity # location_dataset.last_value = last_location_observation.value_quantity
location_dataset.fk_last_observation_id = last_location_observation.id location_dataset.fk_last_observation_id = last_location_observation.id
first_location_observation = pg_session.query(Observation) \ first_location_observation = pg_session.query(Observation) \
.filter(Observation.fk_dataset_id == location_dataset.id) \ .filter(Observation.fk_dataset_id == location_dataset.id) \
.order_by(asc('sampling_time_start')) \ .order_by(asc('sampling_time_start')) \