diff --git a/db/models.py b/db/models.py new file mode 100644 index 0000000..5054af4 --- /dev/null +++ b/db/models.py @@ -0,0 +1,265 @@ +''' +Tutorial link: https://docs.sqlalchemy.org/en/latest/orm/tutorial.html +Sqlalchemy version: 1.4.31 +Python version: 3.10 +''' +#!/usr/bin/python# -*- coding: utf-8 -*- + +from datetime import datetime +# from config import db, ma + +import os +from sqlalchemy import (Column, Integer, Sequence, + String, DateTime, ForeignKey, Numeric, SmallInteger, create_engine) +from sqlalchemy.ext.declarative import declarative_base +from sqlalchemy.orm import session, relationship, sessionmaker +#from marshmallow import Schema +from marshmallow_sqlalchemy import SQLAlchemySchema, SQLAlchemyAutoSchema +from marshmallow import fields +from dotenv import load_dotenv, find_dotenv +# from db.pg_models import create_pg_session +# import sqlalchemy.orm.session + +Base = declarative_base() + + +def create_pg_session() -> sessionmaker: + """ create postgres db session """ + load_dotenv(find_dotenv()) + dbschema = '' + db_user = os.environ.get("POSTGIS_DBUSER") + db_password = os.environ.get("POSTGIS_DBPASSWORD") + db_url = os.environ.get("POSTGIS_DBURL") + engine = create_engine( + "postgresql+psycopg2://" + db_user + ":" + db_password + "@" + db_url, + connect_args={'options': f'-csearch_path={dbschema}'}, + isolation_level="READ UNCOMMITTED") + session_maker = sessionmaker(bind=engine) + _session = session_maker() + + # Base.metadata.create_all(engine) + return _session + +class Platform(Base): + """ Platform class """ + __tablename__ = 'platform' + __table_args__ = {"schema": "gba"} + + id = Column('platform_id', Integer, primary_key=True) + identifier = Column('identifier', String) + sta_identifier = Column('sta_identifier', String) + name = Column('name', String) + # datasets = relationship('Dataset') + datasets = relationship('Dataset', back_populates="platform", lazy=True) + + def __repr__(self): + return f'Platform {self.name}' + + +class Phenomenon(Base): + """ phenomenon class """ + __tablename__ = 'phenomenon' + __table_args__ = {"schema": "gba"} + + id = Column('phenomenon_id', Integer, primary_key=True) + name = Column('name', String) + sta_identifier = Column('sta_identifier', String) + # datasets = relationship('Dataset') + datasets = relationship('Dataset', back_populates="phenomenon", lazy=True) + + def __repr__(self): + return f'Phenomenon {self.name}' + + +class Procedure(Base): + """ procedure class """ + __tablename__ = 'procedure' + __table_args__ = {"schema": "gba"} + + id = Column('procedure_id', Integer, primary_key=True) + name = Column('name', String) + sta_identifier = Column('sta_identifier', String) + # datasets = relationship('Dataset') + datasets = relationship('Dataset', back_populates="procedure", lazy=True) + + def __repr__(self): + return f'Procedure {self.name}' + + +class Dataset(Base): + """ dataset class """ + __tablename__ = 'dataset' + __table_args__ = {"schema": "gba"} + + id = Column('dataset_id', Integer, primary_key=True) + name = Column('name', String) + is_published = Column('is_published', SmallInteger) + is_hidden = Column('is_hidden', SmallInteger) + dataset_type = Column('dataset_type', String) + observation_type = Column('observation_type', String) + value_type = Column('value_type', String) + + last_time = Column('last_time', DateTime) + last_value = Column('last_value', Numeric(20, 10)) + fk_last_observation_id = Column( + 'fk_last_observation_id', + Integer + ) + # last_observation = relationship( + # "Observation", foreign_keys=[fk_last_observation_id]) + + first_time = Column('first_time', DateTime) + first_value = Column('first_value', Numeric(20, 10)) + fk_first_observation_id = Column( + 'fk_first_observation_id', + Integer + ) + # first_observation = relationship("Observation", foreign_keys=[ + # fk_first_observation_id]) + + observations = relationship( + 'Observation', back_populates='dataset', lazy=True) + + fk_phenomenon_id = Column( + 'fk_phenomenon_id', Integer, ForeignKey('gba.phenomenon.phenomenon_id'), nullable=False) + # phenomenon = relationship("Phenomenon", lazy="joined", foreign_keys=[fk_phenomenon_id]) + phenomenon = relationship( + "Phenomenon", back_populates="datasets", lazy="joined") + + fk_platform_id = Column('fk_platform_id', Integer, ForeignKey( + 'gba.platform.platform_id'), nullable=True) + platform = relationship( + "Platform", back_populates="datasets", lazy="joined") + + fk_format_id = Column('fk_format_id', Integer, ForeignKey( + 'gba.format.format_id'), nullable=True) + format = relationship( + "Format", back_populates="datasets", lazy="joined") + + fk_procedure_id = Column('fk_procedure_id', Integer, ForeignKey( + 'gba.procedure.procedure_id'), nullable=False) + # procedure = relationship("Procedure", lazy="joined") + procedure = relationship( + "Procedure", back_populates="datasets", lazy="joined") + +def new_id_factory(): + ''' test ''' + dbschema = '' + db_user = os.environ.get("POSTGIS_DBUSER") + db_password = os.environ.get("POSTGIS_DBPASSWORD") + db_url = os.environ.get("POSTGIS_DBURL") + engine = create_engine( + "postgresql+psycopg2://" + db_user + ":" + db_password + "@" + db_url, + connect_args={'options': f'-csearch_path={dbschema}'}, + isolation_level="READ UNCOMMITTED") + result = engine.execute('SELECT MAX(observation_id) FROM gba.observation') + mytable_max_id = result.first().max + if mytable_max_id is None: + mytable_max_id = 0 + mytable_max_id += 1 + return mytable_max_id + +observation_seq = Sequence('observation_seq', schema="gba") # define sequence explicitly +class Observation(Base): + """ observation class """ + __tablename__ = 'observation' + __table_args__ = {"schema": "gba"} + + # id = Column('observation_id', Integer, primary_key=True) + id = Column('observation_id', + Integer, + observation_seq, + primary_key=True, + server_default=observation_seq.next_value()) + name = Column('name', String) + value_type = Column('value_type', String, default="quantity") + # pitch = Column('PITCH', String) + # roll = Column('ROLL', String) + sampling_time_start = Column('sampling_time_start', DateTime) + sampling_time_end = Column('sampling_time_end', DateTime) + result_time = Column('result_time', DateTime) + sta_identifier = Column('sta_identifier', String) + value_identifier = Column('value_identifier', String) + value_quantity = Column('value_quantity', Numeric(20, 10)) + value_text = Column('value_text', String) + + fk_dataset_id = Column(Integer, ForeignKey( + 'gba.dataset.dataset_id'), nullable=False) + dataset = relationship("Dataset", back_populates="observations") + + +class ObservationSchema(SQLAlchemySchema): + """ Platform class """ + DateTime = fields.DateTime(attribute='result_time') # Or vice-versa + # value_quantity = fields.Integer(attribute='Value') + # id = fields.Integer(attribute='id') + Value = fields.Integer(attribute='value_quantity') + id = fields.Integer(attribute='value_identifier') + # sta_identifier= fields.String(default=uuid.uuid4()), + + class Meta: + """ Platform class """ + model = Observation + include_relationships = True + load_instance = True + #pg_session: session = create_pg_session() + sqla_session: session = create_pg_session() + +class Format(Base): + """ Format class """ + __tablename__ = 'format' + __table_args__ = {"schema": "gba"} + id = Column('format_id', Integer, primary_key=True) + definition = Column('definition', String(255), index=True) + + datasets = relationship('Dataset', back_populates="format", lazy=True) + +class Person(Base): + """ Platform class """ + __tablename__ = 'accounts' + __table_args__ = {"schema": "gba"} + person_id = Column('id', Integer, primary_key=True) + lname = Column('last_name', String(255), index=True) + fname = Column('first_name', String(255)) + login = Column(String(255)) + timestamp = Column('updated_at', DateTime, default=datetime.utcnow, + onupdate=datetime.utcnow) + + def __repr__(self): + return f"" + + +class PersonSchema(SQLAlchemyAutoSchema): + """ Platform class """ + class Meta: + """ Platform class """ + model = Person + include_relationships = True + load_instance = True + #pg_session: session = create_pg_session() + sqla_session: session = create_pg_session() + + +def create_db(): + # db_url = 'sqlite:///db.sqlite' + # engine = create_engine(db_url, echo = True ) + # Base.metadata.drop_all(bind=engine) + # Base.metadata.create_all(engine) + """ create postgres db session """ + load_dotenv("D:\\Software\\geomon\\.env") + dbschema = '' + db_user = os.environ.get("POSTGIS_DBUSER") + db_password = os.environ.get("POSTGIS_DBPASSWORD") + db_url = os.environ.get("POSTGIS_DBURL") + engine = create_engine( + "postgresql+psycopg2://" + db_user + ":" + db_password + "@" + db_url, + connect_args={'options': f'-csearch_path={dbschema}'}, + isolation_level="READ UNCOMMITTED", echo=True) + # session_maker = sessionmaker(bind=engine) + # session = session_maker() + Base.metadata.drop_all(bind=engine) + # Base.metadata.create_all(engine) + + +if __name__ == "__main__": + create_db() diff --git a/gschliefgraben_glasfaser/main.py b/gschliefgraben_glasfaser/main.py index c9fa941..7a552c3 100644 --- a/gschliefgraben_glasfaser/main.py +++ b/gschliefgraben_glasfaser/main.py @@ -20,7 +20,7 @@ from dotenv import load_dotenv, find_dotenv from sqlalchemy.orm import session from sqlalchemy import func, asc, desc # from db.pg_models import Platform -from gschliefgraben_glasfaser.models import ( +from db.models import ( ObservationSchema, Person, PersonSchema, Observation, create_pg_session, Dataset, Procedure, Phenomenon, Platform) from gschliefgraben_glasfaser.my_api import MyApi @@ -206,7 +206,7 @@ def create_observation(observation_json: ObservationSchema, db_session, schema = ObservationSchema() # deserialize to python object new_observation: Observation = schema.load(observation_json) - new_observation.id = max_id + # new_observation.id = max_id new_observation.sta_identifier = str(uuid.uuid4()) new_observation.sampling_time_start = new_observation.result_time new_observation.sampling_time_end = new_observation.result_time @@ -223,7 +223,7 @@ def create_observation(observation_json: ObservationSchema, db_session, return max_id # Otherwise, nope, person exists already else: - print(409, f'Observation {ob_id} exists already') + # print(409, f'Observation {ob_id} exists already') return max_id @@ -269,6 +269,6 @@ def create(person_json: PersonSchema): if __name__ == "__main__": load_dotenv(find_dotenv()) - print('sensors: {}'.format(os.environ.get( - 'GLASFASER_GSCHLIEFGRABEN_SENSORS', []))) + sensor_list1 = os.environ.get('GLASFASER_GSCHLIEFGRABEN_SENSORS', []) + print(f'sensors: {sensor_list1} .') main() diff --git a/gschliefgraben_glasfaser/update_daily_cron.py b/gschliefgraben_glasfaser/update_daily_cron.py index 44615d0..b53bff7 100644 --- a/gschliefgraben_glasfaser/update_daily_cron.py +++ b/gschliefgraben_glasfaser/update_daily_cron.py @@ -13,7 +13,7 @@ from dotenv import load_dotenv, find_dotenv from sqlalchemy.orm import session from sqlalchemy import func, asc, desc # from db.pg_models import Platform -from gschliefgraben_glasfaser.models import ( +from db.models import ( ObservationSchema, Observation, create_pg_session, Dataset, Procedure, Phenomenon, Platform) from gschliefgraben_glasfaser.my_api import MyApi @@ -149,7 +149,7 @@ def create_observation(observation_json: ObservationSchema, db_session, max_id, schema = ObservationSchema() # deserialize to python object new_observation: Observation = schema.load(observation_json) - new_observation.id = max_id + # new_observation.id = max_id new_observation.sta_identifier = str(uuid.uuid4()) new_observation.sampling_time_start = new_observation.result_time new_observation.sampling_time_end = new_observation.result_time @@ -172,6 +172,6 @@ def create_observation(observation_json: ObservationSchema, db_session, max_id, if __name__ == "__main__": load_dotenv(find_dotenv()) - print('sensors: {}'.format(os.environ.get( - 'GLASFASER_GSCHLIEFGRABEN_SENSORS', []))) + sensor_list1 = os.environ.get('GLASFASER_GSCHLIEFGRABEN_SENSORS', []) + print(f'sensors: {sensor_list1} .') main() diff --git a/notes.txt b/notes.txt index 3a1da10..68f0214 100644 --- a/notes.txt +++ b/notes.txt @@ -42,3 +42,20 @@ https://medium.com/dataexplorations/sqlalchemy-orm-a-more-pythonic-way-of-intera https://stackoverflow.com/questions/51737548/how-to-set-primary-key-auto-increment-in-sqlalchemy-orm + + + + + + + + + + + + + + + + + --> \ No newline at end of file diff --git a/pechgraben_images/InsertPechgraben.xml b/pechgraben_images/InsertPechgraben.xml index 41a72e5..8516902 100644 --- a/pechgraben_images/InsertPechgraben.xml +++ b/pechgraben_images/InsertPechgraben.xml @@ -10,6 +10,12 @@ {procedure_identifier} + + + longName + {procedure_name} + + shortName @@ -28,6 +34,20 @@ + + + + + true + + + + + false + + + + featuresOfInterest @@ -48,10 +68,10 @@ - + - - + + @@ -61,19 +81,19 @@ - + {cord_x} - + {cord_y} - + {height} diff --git a/pechgraben_images/InsertSensorTest.xml b/pechgraben_images/InsertSensorSoap.xml similarity index 97% rename from pechgraben_images/InsertSensorTest.xml rename to pechgraben_images/InsertSensorSoap.xml index c536174..423f4c6 100644 --- a/pechgraben_images/InsertSensorTest.xml +++ b/pechgraben_images/InsertSensorSoap.xml @@ -111,8 +111,8 @@ http://www.opengis.net/def/property/humanVisualPerception - http://www.opengis.net/def/observationType/OGCOM/2.0/OM_CategoryObservation - http://www.opengis.net/def/samplingFeatureType/OGCOM/2.0/SF_SamplingPoint + http://www.opengis.net/def/observationType/OGC-OM/2.0/OM_CategoryObservation + http://www.opengis.net/def/samplingFeatureType/OGC-OM/2.0/SF_SamplingPoint diff --git a/pechgraben_images/import_feature_sensor.py b/pechgraben_images/import_feature_sensor.py index 758d838..792011c 100644 --- a/pechgraben_images/import_feature_sensor.py +++ b/pechgraben_images/import_feature_sensor.py @@ -55,26 +55,28 @@ def main(): offering = Offering( "https://geomon.geologie.ac.at/52n-sos-webapp/api/offerings/", - "camera1", - "camera1, Pechgraben" + "camera2", + "camera2, Pechgraben" ) - procedure = Procedure("camera1", "Procedure camera1 am Pechgraben") + procedure = Procedure("camera2", "Procedure camera2 am Pechgraben") foi = FoI("degree", "m", (14.54621, 47.92861, 0.0), - "pechgraben", "Katastropheneinsatz in Pechgraben") + "pechgraben2", "Katastropheneinsatz in Pechgraben2") sensor_type = SensorType("camera") - # post_data = insert_sensor(offering, procedure, foi, sensor_type) - # print(post_data) - # headers = {'Accept': 'application/json'} - # request = requests.post(sos_url, headers=headers, json=post_data) - # print(request.text) - headers = {'Content-Type': 'application/soap+xml'} # set what your server accepts - xml = get_xml(offering, procedure, foi, sensor_type) - # print(xml) - request = requests.post(sos_url, data = xml, headers=headers) + post_data = insert_sensor(offering, procedure, foi, sensor_type) + print(post_data) + headers = {'Accept': 'application/json'} + request = requests.post(sos_url, headers=headers, json=post_data) print(request.text) + + + # headers = {'Content-Type': 'application/soap+xml'} # set what your server accepts + # xml = get_xml(offering, procedure, foi, sensor_type) + # request = requests.post(sos_url, data = xml, headers=headers) + # print(request.text) + # { # "request" : "InsertSensor", # "version" : "2.0.0", @@ -147,7 +149,7 @@ def get_xml(offering, procedure, foi, sensor_type): unit_name = '\"' + procedure.name + '\"' # double quoted string # unit = omType # one of the MO measurement types xml = f'http://www.opengis.net/sensorML/2.0{procedure_identifier}longName{procedure_name}shortName{procedure_name}{offering_label}{offering_name}truefalsefeaturesOfInterest{feature_id}{feature_name}{coordinates}{cord_x}{cord_y}{height}http://www.opengis.net/def/property/humanVisualPerceptionhttp://www.opengis.net/def/observationType/OGCOM/2.0/OM_CategoryObservationhttp://www.opengis.net/def/samplingFeatureType/OGCOM/2.0/SF_SamplingPoint' - return xml + return xml def insert_sensor(offering, procedure, foi, sensor_type): @@ -220,12 +222,12 @@ def insert_sensor(offering, procedure, foi, sensor_type): "service": "SOS", "version": "2.0.0", "procedureDescriptionFormat": "http://www.opengis.net/sensorml/2.0", - "procedureDescription": f'{procedure_identifier}{procedure_identifier} Procedure {procedure_name} {procedure_name}{offering_label}{offering_name}featuresOfInterest{feature_id}{feature_name}{coordinates}{cord_x}{cord_y}{height}', + "procedureDescription": f'{procedure_identifier}longName{procedure_name}shortName{procedure_name}{offering_label}{offering_name}truefalsefeaturesOfInterest{feature_id}{feature_name}{coordinates}{cord_x}{cord_y}{height}', "observableProperty": [ - "http://www.opengis.net/def/property/humanVisualPerception" + "HumanVisualPerception" ], "observationType": [ - "http://www.opengis.net/def/observationType/OGC-OM/2.0/OM_CategoryObservation" + "http://www.opengis.net/def/observationType/OGC-OM/2.0/OM_TextObservation" ], "featureOfInterestType": "http://www.opengis.net/def/samplingFeatureType/OGC-OM/2.0/SF_SamplingPoint" } diff --git a/pechgraben_images/import_image_observations.py b/pechgraben_images/import_image_observations.py new file mode 100644 index 0000000..f316b36 --- /dev/null +++ b/pechgraben_images/import_image_observations.py @@ -0,0 +1,165 @@ +''' +Sqlalchemy version: 1.2.15 +Python version: 3.7 +''' + +import os +import uuid +from datetime import datetime +from sqlalchemy.orm import session +from sqlalchemy import asc, desc +from exif import Image +from db.models import ( + create_pg_session, Observation, + Dataset, Procedure, Phenomenon, Platform, Format) + +def main(): + ''' main method ''' + pg_session: session = create_pg_session() + platform_sta_identifier = "pechgraben_images" + sensor = "camera2" + + pg_query = pg_session.query(Dataset) \ + .join(Procedure) \ + .join(Phenomenon) \ + .filter(Procedure.sta_identifier == sensor.lower()) + visual_perception_dataset: Dataset = pg_query.filter( + Phenomenon.sta_identifier == "HumanVisualPerception").first() + if not visual_perception_dataset: + print("Sensor " + sensor + " ist noch nicht angelegt!") + exit() + if not visual_perception_dataset.is_published: + visual_perception_dataset.is_published = 1 + visual_perception_dataset.is_hidden = 0 + visual_perception_dataset.dataset_type = "timeseries" + visual_perception_dataset.observation_type = "simple" + visual_perception_dataset.value_type = "text" + pg_session.commit() + + platform_exists: bool = pg_session.query(Platform.id).filter_by( + sta_identifier=platform_sta_identifier).scalar() is not None + if platform_exists: + sensor_platform = pg_session.query(Platform.id) \ + .filter(Platform.sta_identifier == platform_sta_identifier) \ + .first() + visual_perception_dataset.fk_platform_id = sensor_platform.id + + format_exists: bool = pg_session.query(Format.id).filter_by( + definition="http://www.opengis.net/def/observationType/OGC-OM/2.0/OM_TextObservation" + ).scalar() is not None + if format_exists: + sensor_format = pg_session.query(Format.id) \ + .filter(Format.definition == "http://www.opengis.net/def/observationType/OGC-OM/2.0/OM_TextObservation") \ + .first() + visual_perception_dataset.fk_format_id = sensor_format.id + + # import all the images for the given sensor names + import_images(visual_perception_dataset, pg_session) + + # save first and last values of all the observations + first_observation: Observation = pg_session.query(Observation) \ + .filter(Observation.fk_dataset_id == visual_perception_dataset.id) \ + .order_by(asc('sampling_time_start')) \ + .first() + if first_observation is not None: + visual_perception_dataset.first_time = first_observation.sampling_time_start + # visual_perception_dataset.first_value = first_observation.value_quantity + visual_perception_dataset.fk_first_observation_id = first_observation.id + + last_observation: Observation = pg_session.query(Observation) \ + .filter(Observation.fk_dataset_id == visual_perception_dataset.id) \ + .order_by(desc('sampling_time_start')) \ + .first() + if last_observation is not None: + visual_perception_dataset.last_time = last_observation.sampling_time_start + # visual_perception_dataset.last_value = last_observation.value_quantity + visual_perception_dataset.fk_last_observation_id = last_observation.id + + pg_session.commit() + pg_session.close() + +def import_images(dataset: Dataset, pg_session): + ''' main method ''' + folder_path = 'C:/Users/kaiarn/Documents/Fotos' + # img_filename = '_DSC9548.JPG' + # img_path = f'{folder_path}/{img_filename}' + + # Get the list of image files in the directory that exifread supports + directory = os.listdir(folder_path) + for file_name in directory: + if file_name.endswith(('jpg', 'JPG', 'png', 'PNG', 'tiff', 'TIFF')): + file_path = os.path.join(folder_path, file_name) + # print(file_path) + img_file = open(file_path, 'rb') + img: Image = Image(img_file) + if img.has_exif: + info = f" has the EXIF {img.exif_version}" + else: + info = "does not contain any EXIF information" + # print(f"Image {img_file.name}: {info}") + + # Original datetime that image was taken (photographed) + # print(f'DateTime (Original): {img.get("datetime_original")}') + datetime_original = img.get("datetime_original") + # Grab the date + date_obj = datetime.strptime( + datetime_original, '%Y:%m:%d %H:%M:%S') + # print(date_obj) + create_observation(dataset, date_obj, file_name) + + pg_session.commit() + +def create_observation(dataset: Dataset, datetime_original, file_name): + """ + This function creates a new observation in the people structure + based on the passed-in observation data + :param observation: person to create in people structure + :return: 201 on success, observation on person exists + """ + + # deserialize to python object + new_observation: Observation = Observation() + # new_observation.id = max_id + new_observation.sta_identifier = str(uuid.uuid4()) + new_observation.result_time = datetime_original + new_observation.sampling_time_start = new_observation.result_time + new_observation.sampling_time_end = new_observation.result_time + new_observation.value_type = "text" + new_observation.value_text = "https://geomon.geologie.ac.at/images/" + file_name + new_observation.fk_dataset_id = dataset.id + + # Add the person to the database + dataset.observations.append(new_observation) + # db_session.commit() + + +if __name__ == "__main__": + # load_dotenv(find_dotenv()) + # print('sensors: {}'.format(os.environ.get( + # 'GLASFASER_GSCHLIEFGRABEN_SENSORS', []))) + main() + +# print(img.list_all()) +# print(img.has_exif) +# # Make of device which captured image: NIKON CORPORATION +# print(f'Make: {img.get("make")}') + +# # Model of device: NIKON D7000 +# print(f'Model: {img.get("model")}') + +# # Software involved in uploading and digitizing image: Ver.1.04 +# print(f'Software: {img.get("software")}') + +# # Name of photographer who took the image: not defined +# print(f'Artist: {img.get("artist")}') + +# # Original datetime that image was taken (photographed) +# print(f'DateTime (Original): {img.get("datetime_original")}') + +# # Details of flash function +# print(f'Flash Details: {img.get("flash")}') + +# print(f"Coordinates - Image") +# print("---------------------") +# print(f"Latitude: {img.copyright} {img.get('gps_latitude_ref')}") +# print(f"Longitude: {img.get('gps_longitude')} {img.get('gps_longitude_ref')}\n") diff --git a/pechgraben_images/import_images.py b/pechgraben_images/import_images.py deleted file mode 100644 index 767f2f0..0000000 --- a/pechgraben_images/import_images.py +++ /dev/null @@ -1,78 +0,0 @@ -''' -Sqlalchemy version: 1.2.15 -Python version: 3.7 -''' - -import os -from datetime import datetime -from exif import Image - - -def main(): - ''' main method ''' - folder_path = 'C:/Users/kaiarn/Documents/Fotos' - # img_filename = '_DSC9548.JPG' - # img_path = f'{folder_path}/{img_filename}' - - # Get the list of image files in the directory that exifread supports - directory = os.listdir(folder_path) - - for files in directory: - if files.endswith(('jpg', 'JPG', 'png', 'PNG', 'tiff', 'TIFF')): - file_path = os.path.join(folder_path, files) - # print(file_path) - img_file = open(file_path, 'rb') - img: Image = Image(img_file) - if img.has_exif: - info = f" has the EXIF {img.exif_version}" - else: - info = "does not contain any EXIF information" - print(f"Image {img_file.name}: {info}") - - # Original datetime that image was taken (photographed) - # print(f'DateTime (Original): {img.get("datetime_original")}') - datetime_original = img.get("datetime_original") - # print(datetime_original) - # Grab the date - date_obj = datetime.strptime( - datetime_original, '%Y:%m:%d %H:%M:%S') - print(date_obj) - # print(f"Longitude: {img.get('gps_longitude')} {img.get('gps_longitude_ref')}\n") - - # with open(img_path, 'rb') as img_file: - # img = Image(img_file) - # if img.has_exif: - # info = f" has the EXIF {img.exif_version}" - # else: - # info = "does not contain any EXIF information" - # print(f"Image {img_file.name}: {info}") - - # print(img.list_all()) - # print(img.has_exif) - # # Make of device which captured image: NIKON CORPORATION - # print(f'Make: {img.get("make")}') - - # # Model of device: NIKON D7000 - # print(f'Model: {img.get("model")}') - - # # Software involved in uploading and digitizing image: Ver.1.04 - # print(f'Software: {img.get("software")}') - - # # Name of photographer who took the image: not defined - # print(f'Artist: {img.get("artist")}') - - # # Original datetime that image was taken (photographed) - # print(f'DateTime (Original): {img.get("datetime_original")}') - - # # Details of flash function - # print(f'Flash Details: {img.get("flash")}') - - # print(f"Coordinates - Image") - # print("---------------------") - # print(f"Latitude: {img.copyright} {img.get('gps_latitude_ref')}") - # print(f"Longitude: {img.get('gps_longitude')} {img.get('gps_longitude_ref')}\n") -if __name__ == "__main__": - # load_dotenv(find_dotenv()) - # print('sensors: {}'.format(os.environ.get( - # 'GLASFASER_GSCHLIEFGRABEN_SENSORS', []))) - main()