2022-02-21 15:07:04 +00:00
|
|
|
'''
|
|
|
|
Tutorial link: https://realpython.com/flask-connexion-rest-api-part-2/
|
2022-02-22 15:36:49 +00:00
|
|
|
https://github.com/realpython/materials/blob/master/flask-connexion-rest-part-2/version_1/people.py
|
2022-02-21 15:07:04 +00:00
|
|
|
Sqlalchemy version: 1.2.15
|
|
|
|
Python version: 3.7
|
|
|
|
'''
|
|
|
|
|
|
|
|
import os
|
2022-03-02 16:17:38 +00:00
|
|
|
import uuid
|
2022-02-22 15:36:49 +00:00
|
|
|
# import sys, inspect
|
|
|
|
# currentdir = os.path.dirname(os.path.abspath(inspect.getfile(inspect.currentframe())))
|
|
|
|
# parentdir = os.path.dirname(currentdir)
|
|
|
|
# sys.path.insert(0, parentdir)
|
2022-02-28 16:25:48 +00:00
|
|
|
# import requests
|
2022-02-22 15:36:49 +00:00
|
|
|
from sqlalchemy.orm import session
|
2022-03-02 16:17:38 +00:00
|
|
|
from sqlalchemy import func
|
2022-03-03 14:55:40 +00:00
|
|
|
# from db.pg_models import Platform
|
|
|
|
from gschliefgraben_glasfaser.models import ObservationSchema, Person, PersonSchema, Observation, create_pg_session, Dataset, Procedure, Phenomenon, Platform
|
2022-02-28 16:25:48 +00:00
|
|
|
from gschliefgraben_glasfaser.my_api import MyApi
|
2022-03-02 16:17:38 +00:00
|
|
|
from datetime import datetime, date, timedelta
|
|
|
|
# from db.pg_models import create_pg_session
|
2022-02-23 15:46:47 +00:00
|
|
|
#from models import Person, PersonSchema
|
2022-02-21 15:07:04 +00:00
|
|
|
# response = requests.get('https://api.com/')
|
|
|
|
# print(response) # shows the response's HTTP status code
|
|
|
|
# print(response.json()) # shows the response's JSON response body, if it has one
|
|
|
|
# print(response.content) # get the data content of the response
|
2022-02-22 15:36:49 +00:00
|
|
|
|
|
|
|
|
|
|
|
def main():
|
2022-02-23 15:46:47 +00:00
|
|
|
''' main method '''
|
|
|
|
db_user = os.environ.get("POSTGIS_DBUSER")
|
|
|
|
print(db_user)
|
2022-02-22 15:36:49 +00:00
|
|
|
|
|
|
|
pg_session: session = create_pg_session()
|
2022-02-28 16:25:48 +00:00
|
|
|
# pg_person: Person = pg_session.query(Person).first()
|
|
|
|
observation: Observation = pg_session.query(Observation).first()
|
|
|
|
# print(pg_person)
|
2022-02-22 15:36:49 +00:00
|
|
|
|
|
|
|
# serialize db data to json
|
2022-02-28 16:25:48 +00:00
|
|
|
# person_schema = PersonSchema()
|
|
|
|
# dump_data = person_schema.dump(pg_person)
|
2022-02-25 15:05:31 +00:00
|
|
|
# print(dump_data)
|
2022-02-28 16:25:48 +00:00
|
|
|
# serialize db data to json
|
2022-03-03 14:55:40 +00:00
|
|
|
# observation_schema = ObservationSchema()
|
|
|
|
# dump_data = observation_schema.dump(observation)
|
|
|
|
# print(dump_data)
|
2022-02-23 15:46:47 +00:00
|
|
|
|
2022-02-25 15:05:31 +00:00
|
|
|
# request ortmann api
|
2022-03-02 16:17:38 +00:00
|
|
|
# response =
|
|
|
|
# requests.get('https://api.dgnss-sensors.com/gschliefgraben?sensors=("inclino1_14")',
|
2022-02-25 15:05:31 +00:00
|
|
|
# headers={
|
|
|
|
# 'Authorization': 'Bearer' + token,
|
|
|
|
# 'cache-control': 'no-cache',
|
|
|
|
# 'Content-Type': 'application/x-www-form-urlencoded',
|
|
|
|
# 'accept': 'application/json'
|
|
|
|
# },
|
|
|
|
# data='grant_type=client_credentials&scope=gschliefgraben')
|
|
|
|
# print(response)
|
2022-03-02 16:17:38 +00:00
|
|
|
|
2022-03-03 14:55:40 +00:00
|
|
|
sensor: str = "inclino1_14"
|
|
|
|
pg_query = pg_session.query(Dataset) \
|
|
|
|
.join(Procedure) \
|
|
|
|
.join(Phenomenon) \
|
|
|
|
.filter(Procedure.sta_identifier == sensor.lower())
|
|
|
|
slope_dataset: Dataset = pg_query.filter(
|
|
|
|
Phenomenon.sta_identifier == "Slope").first()
|
|
|
|
if not slope_dataset.is_published:
|
|
|
|
slope_dataset.is_published = 1
|
|
|
|
slope_dataset.is_hidden = 0
|
|
|
|
slope_dataset.dataset_type = "timeseries"
|
|
|
|
slope_dataset.observation_type = "simple"
|
|
|
|
slope_dataset.value_type = "quantity"
|
|
|
|
pg_session.commit()
|
|
|
|
|
2022-03-02 16:17:38 +00:00
|
|
|
# The size of each step in days
|
|
|
|
# consider the start date as 2021-february 1 st
|
2022-03-03 14:55:40 +00:00
|
|
|
start_date = date(2022, 1, 1)
|
2022-03-02 16:17:38 +00:00
|
|
|
# consider the end date as 2021-march 1 st
|
|
|
|
end_date = date(2022, 3, 1)
|
|
|
|
|
|
|
|
# delta time
|
|
|
|
delta = timedelta(days=1)
|
2022-02-25 15:05:31 +00:00
|
|
|
token_api = os.environ.get("TOKEN_API")
|
2022-02-28 16:25:48 +00:00
|
|
|
test_api = MyApi(token_api)
|
2022-03-02 16:17:38 +00:00
|
|
|
|
|
|
|
# iterate over range of dates
|
|
|
|
while start_date <= end_date:
|
|
|
|
# print(start_date, end="\n")
|
|
|
|
query_date = start_date.strftime('%Y-%m-%d')
|
2022-03-03 14:55:40 +00:00
|
|
|
create_db_observations(query_date, test_api, pg_session, slope_dataset)
|
2022-03-02 16:17:38 +00:00
|
|
|
start_date += delta
|
2022-03-03 14:55:40 +00:00
|
|
|
pg_session.commit()
|
2022-03-02 16:17:38 +00:00
|
|
|
|
|
|
|
# for i in rrule(DAILY , dtstart=start_date,until=end_date):
|
|
|
|
# print(i.strftime('%Y%b%d'),sep='\n')
|
|
|
|
|
|
|
|
# query_date = "2022-02-28"
|
|
|
|
# create_db_observations(query_date, test_api, pg_session)
|
|
|
|
# query_date_obj = datetime.strptime(query_date, "%Y-%m-%d")
|
|
|
|
# data = test_api.getSensorData("inclino1_14", query_date)
|
|
|
|
# observation_array = (data['FeatureCollection']
|
|
|
|
# ['Features'][0]['geometry']['properties'][0])
|
|
|
|
# print(observation_array)
|
|
|
|
|
|
|
|
# max_id = pg_session.query(func.max(Observation.id)).scalar()
|
|
|
|
# if max_id is None:
|
|
|
|
# max_id = -1
|
|
|
|
# # pg_session.bulk_save_objects(observations)
|
|
|
|
# for observation_json in observation_array:
|
2022-03-03 14:55:40 +00:00
|
|
|
# ob_date_time = observation_json.get('DateTime')
|
2022-03-02 16:17:38 +00:00
|
|
|
# datetime_obj = datetime.strptime(ob_date_time, "%Y-%m-%dT%H:%M:%S.%fZ")
|
|
|
|
# if datetime_obj.date() != query_date_obj.date():
|
|
|
|
# continue
|
|
|
|
# max_id = max_id + 1
|
|
|
|
# create_observation(observation_json, pg_session, max_id)
|
|
|
|
|
|
|
|
# pg_session.commit()
|
|
|
|
|
2022-03-03 14:55:40 +00:00
|
|
|
|
|
|
|
def create_db_observations(query_date, test_api, pg_session, dataset: Dataset):
|
2022-03-02 16:17:38 +00:00
|
|
|
''' to do '''
|
|
|
|
query_date_obj = datetime.strptime(query_date, "%Y-%m-%d")
|
|
|
|
data = test_api.getSensorData("inclino1_14", query_date)
|
|
|
|
observation_array = (data['FeatureCollection']
|
|
|
|
['Features'][0]['geometry']['properties'][0])
|
|
|
|
# print(observation_array)
|
|
|
|
|
|
|
|
max_id = pg_session.query(func.max(Observation.id)).scalar()
|
|
|
|
if max_id is None:
|
|
|
|
max_id = -1
|
|
|
|
# pg_session.bulk_save_objects(observations)
|
|
|
|
for observation_json in observation_array:
|
2022-03-03 14:55:40 +00:00
|
|
|
ob_date_time = observation_json.get('DateTime')
|
2022-03-02 16:17:38 +00:00
|
|
|
datetime_obj = datetime.strptime(ob_date_time, "%Y-%m-%dT%H:%M:%S.%fZ")
|
|
|
|
if datetime_obj.date() != query_date_obj.date():
|
|
|
|
continue
|
2022-03-03 14:55:40 +00:00
|
|
|
ob_value = observation_json.get('Value')
|
2022-03-02 16:17:38 +00:00
|
|
|
if ob_value is None:
|
|
|
|
continue
|
2022-03-03 14:55:40 +00:00
|
|
|
# max_id = max_id + 1
|
|
|
|
max_id = create_observation(
|
|
|
|
observation_json, pg_session, max_id, dataset)
|
|
|
|
# pg_session.commit()
|
|
|
|
print("observations for date " + query_date + "succesfully imported \n")
|
2022-03-02 16:17:38 +00:00
|
|
|
|
|
|
|
|
2022-03-03 14:55:40 +00:00
|
|
|
def create_observation(observation_json: ObservationSchema, db_session, max_id, dataset: Dataset):
|
2022-03-02 16:17:38 +00:00
|
|
|
"""
|
|
|
|
This function creates a new observation in the people structure
|
|
|
|
based on the passed-in observation data
|
|
|
|
:param observation: person to create in people structure
|
|
|
|
:return: 201 on success, observation on person exists
|
|
|
|
"""
|
|
|
|
|
2022-03-03 14:55:40 +00:00
|
|
|
ob_id: str = str(observation_json.get('id'))
|
2022-03-02 16:17:38 +00:00
|
|
|
# db_session = create_pg_session()
|
|
|
|
|
|
|
|
existing_observation: bool = (
|
|
|
|
db_session.query(Observation)
|
2022-03-03 14:55:40 +00:00
|
|
|
.filter(Observation.value_identifier == ob_id)
|
2022-03-02 16:17:38 +00:00
|
|
|
.one_or_none()
|
|
|
|
)
|
|
|
|
|
|
|
|
# Can we insert this observation?
|
|
|
|
if existing_observation is None:
|
2022-03-03 14:55:40 +00:00
|
|
|
max_id += 1
|
2022-03-02 16:17:38 +00:00
|
|
|
# Create a person instance using the schema and the passed in person
|
|
|
|
schema = ObservationSchema()
|
2022-03-03 14:55:40 +00:00
|
|
|
# deserialize to python object
|
2022-03-02 16:17:38 +00:00
|
|
|
new_observation: Observation = schema.load(observation_json)
|
2022-03-03 14:55:40 +00:00
|
|
|
new_observation.id = max_id
|
2022-03-02 16:17:38 +00:00
|
|
|
new_observation.sta_identifier = str(uuid.uuid4())
|
2022-03-03 14:55:40 +00:00
|
|
|
new_observation.sampling_time_start=new_observation.result_time
|
|
|
|
new_observation.sampling_time_end=new_observation.result_time
|
|
|
|
new_observation.fk_dataset_id = dataset.id
|
2022-03-02 16:17:38 +00:00
|
|
|
|
|
|
|
# Add the person to the database
|
|
|
|
db_session.add(new_observation)
|
2022-03-03 14:55:40 +00:00
|
|
|
# dataset.observations.append(new_observation)
|
2022-03-02 16:17:38 +00:00
|
|
|
# db_session.commit()
|
|
|
|
|
|
|
|
# Serialize and return the newly created person in the response
|
2022-03-03 14:55:40 +00:00
|
|
|
# data = schema.dump(new_observation)
|
|
|
|
# return data, 201
|
|
|
|
return max_id
|
2022-03-02 16:17:38 +00:00
|
|
|
# Otherwise, nope, person exists already
|
|
|
|
else:
|
|
|
|
print(409, f'Observation {ob_id} exists already')
|
2022-03-03 14:55:40 +00:00
|
|
|
return max_id
|
2022-02-22 15:36:49 +00:00
|
|
|
|
|
|
|
|
|
|
|
def create(person_json: PersonSchema):
|
|
|
|
"""
|
|
|
|
This function creates a new person in the people structure
|
|
|
|
based on the passed-in person data
|
|
|
|
:param person: person to create in people structure
|
|
|
|
:return: 201 on success, 406 on person exists
|
|
|
|
"""
|
|
|
|
|
|
|
|
login = person_json.get('login')
|
|
|
|
#lname = person.get('lname')
|
2022-02-23 15:46:47 +00:00
|
|
|
db_session = create_pg_session()
|
2022-02-22 15:36:49 +00:00
|
|
|
|
|
|
|
# existing_person = Person.query \
|
|
|
|
# .filter(Person.login == login) \
|
|
|
|
# .one_or_none()
|
2022-02-25 15:05:31 +00:00
|
|
|
existing_person: bool = (
|
|
|
|
db_session.query(Person)
|
2022-02-23 15:46:47 +00:00
|
|
|
.filter(Person.login == login)
|
2022-02-22 15:36:49 +00:00
|
|
|
.one_or_none()
|
|
|
|
)
|
|
|
|
|
|
|
|
# Can we insert this person?
|
|
|
|
if existing_person is None:
|
2022-02-25 15:05:31 +00:00
|
|
|
# Create a person instance using the schema and the passed in person
|
2022-02-22 15:36:49 +00:00
|
|
|
schema = PersonSchema()
|
|
|
|
# deserialize to object
|
|
|
|
new_person: Person = schema.load(person_json)
|
|
|
|
|
2022-02-23 15:46:47 +00:00
|
|
|
# Add the person to the database
|
|
|
|
db_session.add(new_person)
|
|
|
|
db_session.commit()
|
2022-02-22 15:36:49 +00:00
|
|
|
|
|
|
|
# Serialize and return the newly created person in the response
|
|
|
|
data = schema.dump(new_person)
|
|
|
|
return data, 201
|
|
|
|
# Otherwise, nope, person exists already
|
|
|
|
else:
|
|
|
|
print(409, f'Person {login} exists already')
|
|
|
|
|
|
|
|
|
|
|
|
if __name__ == "__main__":
|
|
|
|
main()
|