import type { HttpContextContract } from '@ioc:Adonis/Core/HttpContext'; import { Client } from '@opensearch-project/opensearch'; import User from 'App/Models/User'; import Dataset from 'App/Models/Dataset'; import DatasetIdentifier from 'App/Models/DatasetIdentifier'; import XmlModel from 'App/Library/XmlModel'; import { XMLBuilder } from 'xmlbuilder2/lib/interfaces'; import { create } from 'xmlbuilder2'; import { readFileSync } from 'fs'; import { transform } from 'saxon-js'; import type { ModelQueryBuilderContract } from '@ioc:Adonis/Lucid/Orm'; import { schema, CustomMessages } from '@ioc:Adonis/Core/Validator'; import { DateTime } from 'luxon'; import Index from 'App/Library/Utils/Index'; import { getDomain } from 'App/Utils/utility-functions'; import { DoiClient } from 'App/Library/Doi/DoiClient'; import DoiClientException from 'App/Exceptions/DoiClientException'; import Logger from '@ioc:Adonis/Core/Logger'; import { HttpException } from 'node-exceptions'; // Create a new instance of the client const client = new Client({ node: 'http://localhost:9200' }); // replace with your OpenSearch endpoint export default class DatasetsController { private proc; public messages: CustomMessages = { // 'required': '{{ field }} is required', // 'licenses.minLength': 'at least {{ options.minLength }} permission must be defined', 'reviewer_id.required': 'reviewer_id must be defined', 'publisher_name.required': 'publisher name must be defined', }; constructor() { this.proc = readFileSync('public/assets2/solr.sef.json'); // Load the XSLT file // this.proc = readFileSync('public/assets2/datasetxml2oai.sef.json'); } // public async index({}: HttpContextContract) {} public async index({ auth, request, inertia }: HttpContextContract) { const user = (await User.find(auth.user?.id)) as User; const page = request.input('page', 1); let datasets: ModelQueryBuilderContract = Dataset.query(); // if (request.input('search')) { // // users = users.whereRaw('name like %?%', [request.input('search')]) // const searchTerm = request.input('search'); // datasets.where('name', 'ilike', `%${searchTerm}%`); // } if (request.input('sort')) { type SortOrder = 'asc' | 'desc' | undefined; let attribute = request.input('sort'); let sortOrder: SortOrder = 'asc'; if (attribute.substr(0, 1) === '-') { sortOrder = 'desc'; // attribute = substr(attribute, 1); attribute = attribute.substr(1); } datasets.orderBy(attribute, sortOrder); } else { // users.orderBy('created_at', 'desc'); datasets.orderBy('id', 'asc'); } // const users = await User.query().orderBy('login').paginate(page, limit); const myDatasets = await datasets .where('server_state', 'released') .orWhere((dQuery) => { dQuery .whereIn('server_state', ['editor_accepted', 'rejected_reviewer', 'reviewed', 'published']) .where('editor_id', user.id) .doesntHave('identifier', 'and'); }) // .preload('identifier') .preload('titles') .preload('user', (query) => query.select('id', 'login')) .preload('editor', (query) => query.select('id', 'login')) .paginate(page, 10); return inertia.render('Editor/Dataset/Index', { datasets: myDatasets.serialize(), filters: request.all(), can: { receive: await auth.user?.can(['dataset-receive']), approve: await auth.user?.can(['dataset-approve']), edit: await auth.user?.can(['dataset-editor-update']), delete: await auth.user?.can(['dataset-editor-delete']), publish: await auth.user?.can(['dataset-publish']), }, }); } public async receive({ request, inertia, response }: HttpContextContract) { const id = request.param('id'); const dataset = await Dataset.query() .where('id', id) .preload('titles') .preload('descriptions') .preload('user', (builder) => { builder.select('id', 'login'); }) .firstOrFail(); const validStates = ['released']; if (!validStates.includes(dataset.server_state)) { // session.flash('errors', 'Invalid server state!'); return response .flash( 'warning', `Invalid server state. Dataset with id ${id} cannot be received. Datset has server state ${dataset.server_state}.`, ) .redirect() .back(); } return inertia.render('Editor/Dataset/Receive', { dataset, }); } public async receiveUpdate({ auth, request, response }) { const id = request.param('id'); // const { id } = params; const dataset = await Dataset.findOrFail(id); const validStates = ['released']; if (!validStates.includes(dataset.server_state)) { // throw new Error('Invalid server state!'); // return response.flash('warning', 'Invalid server state. Dataset cannot be released to editor').redirect().back(); return response .flash( 'warning', `Invalid server state. Dataset with id ${id} cannot be received by editor. Datset has server state ${dataset.server_state}.`, ) .redirect() .toRoute('editor.dataset.list'); } dataset.server_state = 'editor_accepted'; const user = (await User.find(auth.user?.id)) as User; // dataset.editor().associate(user).save(); try { await dataset.related('editor').associate(user); // speichert schon ab // await dataset.save(); return response.toRoute('editor.dataset.list').flash('message', `You have accepted dataset ${dataset.id}!`); } catch (error) { // Handle any errors console.error(error); return response.status(500).json({ error: 'An error occurred while accepting the data.' }); } } public async approve({ request, inertia, response }) { const id = request.param('id'); // $dataset = Dataset::with('user:id,login')->findOrFail($id); const dataset = await Dataset.findOrFail(id); const validStates = ['editor_accepted', 'rejected_reviewer']; if (!validStates.includes(dataset.server_state)) { // session.flash('errors', 'Invalid server state!'); return response .flash( 'warning', `Invalid server state. Dataset with id ${id} cannot be approved. Datset has server state ${dataset.server_state}.`, ) .redirect() .back(); } const reviewers = await User.query() .whereHas('roles', (builder) => { builder.where('name', 'reviewer'); }) .pluck('login', 'id'); return inertia.render('Editor/Dataset/Approve', { dataset, reviewers, }); } public async approveUpdate({ request, response }) { const approveDatasetSchema = schema.create({ reviewer_id: schema.number(), }); try { await request.validate({ schema: approveDatasetSchema, messages: this.messages }); } catch (error) { // return response.badRequest(error.messages); throw error; } const id = request.param('id'); const dataset = await Dataset.findOrFail(id); const validStates = ['editor_accepted', 'rejected_reviewer']; if (!validStates.includes(dataset.server_state)) { // session.flash('errors', 'Invalid server state!'); return response .flash( 'warning', `Invalid server state. Dataset with id ${id} cannot be approved. Datset has server state ${dataset.server_state}.`, ) .redirect() .back(); } dataset.server_state = 'approved'; if (dataset.reject_reviewer_note != null) { dataset.reject_reviewer_note = null; } //save main and additional titles const reviewer_id = request.input('reviewer_id', null); dataset.reviewer_id = reviewer_id; if (await dataset.save()) { return response.toRoute('editor.dataset.list').flash('message', 'You have approved one dataset!'); } } public async publish({ request, inertia, response }) { const id = request.param('id'); const dataset = await Dataset.query() .where('id', id) .preload('titles') .preload('authors') // .preload('persons', (builder) => { // builder.wherePivot('role', 'author') // }) .firstOrFail(); const validStates = ['reviewed']; if (!validStates.includes(dataset.server_state)) { // session.flash('errors', 'Invalid server state!'); return response .flash( 'warning', `Invalid server state. Dataset with id ${id} cannot be published. Datset has server state ${dataset.server_state}.`, ) .redirect() .back(); } return inertia.render('Editor/Dataset/Publish', { dataset, }); } public async publishUpdate({ request, response }) { const publishDatasetSchema = schema.create({ publisher_name: schema.string({ trim: true }), }); try { await request.validate({ schema: publishDatasetSchema, messages: this.messages }); } catch (error) { // return response.badRequest(error.messages); throw error; } const id = request.param('id'); const dataset = await Dataset.findOrFail(id); // let test = await Dataset.getMax('publish_id'); // const maxPublishId = await Database.from('documents').max('publish_id as max_publish_id').first(); // const max = maxPublishId.max_publish_id; const max = await Dataset.getMax('publish_id'); let publish_id = 0; if (max != null) { publish_id = max + 1; } else { publish_id = publish_id + 1; } dataset.publish_id = publish_id; dataset.server_state = 'published'; dataset.server_date_published = DateTime.now(); const publisherName = request.input('publisher_name', 'Tethys'); dataset.publisher_name = publisherName; if (await dataset.save()) { const index_name = 'tethys-records'; await Index.indexDocument(dataset, index_name); return response.toRoute('editor.dataset.list').flash('message', 'You have successfully published the dataset!'); } } public async doiCreate({ request, inertia }: HttpContextContract) { const id = request.param('id'); const dataset = await Dataset.query() .where('id', id) .preload('titles') .preload('descriptions') // .preload('identifier') .preload('authors') .firstOrFail(); return inertia.render('Editor/Dataset/Doi', { dataset, }); } public async doiStore({ request, response }: HttpContextContract) { const dataId = request.param('publish_id'); const dataset = await Dataset.query() // .preload('xmlCache') .where('publish_id', dataId) .firstOrFail(); const xmlMeta = (await Index.getDoiRegisterString(dataset)) as string; let prefix = ''; let base_domain = ''; // const datacite_environment = process.env.DATACITE_ENVIRONMENT || 'debug'; prefix = process.env.DATACITE_PREFIX || ''; base_domain = process.env.BASE_DOMAIN || ''; // register DOI: const doiValue = prefix + '/tethys.' + dataset.publish_id; //'10.21388/tethys.213' const landingPageUrl = 'https://doi.' + getDomain(base_domain) + '/' + prefix + '/tethys.' + dataset.publish_id; //https://doi.dev.tethys.at/10.21388/tethys.213 const doiClient = new DoiClient(); const dataciteResponse = await doiClient.registerDoi(doiValue, xmlMeta, landingPageUrl); if (dataciteResponse?.status === 201) { // if response OK 201; save the Identifier value into db const doiIdentifier = new DatasetIdentifier(); doiIdentifier.value = doiValue; doiIdentifier.dataset_id = dataset.id; doiIdentifier.type = 'doi'; doiIdentifier.status = 'findable'; // save modified date of datset for re-caching model in db an update the search index dataset.server_date_modified = DateTime.now(); // save updated dataset to db an index to OpenSearch try { await dataset.related('identifier').save(doiIdentifier); const index_name = 'tethys-records'; await Index.indexDocument(dataset, index_name); } catch (error) { Logger.error(`${__filename}: Indexing document ${dataset.id} failed: ${error.message}`); // Log the error or handle it as needed throw new HttpException(error.message); } return response.toRoute('editor.dataset.list').flash('message', 'You have successfully created a DOI for the dataset!'); } else { const message = `Unexpected DataCite MDS response code ${dataciteResponse?.status}`; // Log the error or handle it as needed throw new DoiClientException(dataciteResponse?.status, message); } // return response.toRoute('editor.dataset.list').flash('message', xmlMeta); } public async show({}: HttpContextContract) {} public async edit({}: HttpContextContract) {} // public async update({}: HttpContextContract) {} public async update({ response }) { const id = 273; //request.param('id'); const dataset = await Dataset.query().preload('xmlCache').where('id', id).firstOrFail(); // add xml elements let xml = create({ version: '1.0', encoding: 'UTF-8', standalone: true }, ''); const datasetNode = xml.root().ele('Dataset'); await this.createXmlRecord(dataset, datasetNode); // const domNode = await this.getDatasetXmlDomNode(dataset); // const xmlString = xml.end({ prettyPrint: true }); // const data = request.only(['field1', 'field2']); // get it from xslt // Create an index with non-default settings. var index_name = 'tethys-features'; const xmlString = xml.end({ prettyPrint: false }); let doc = ''; try { const result = await transform({ // stylesheetFileName: `${config.TMP_BASE_DIR}/data-quality/rules/iati.sef.json`, stylesheetText: this.proc, destination: 'serialized', // sourceFileName: sourceFile, sourceText: xmlString, // stylesheetParams: xsltParameter, // logLevel: 10, }); doc = result.principalResult; } catch (error) { return response.status(500).json({ message: 'An error occurred while creating the user', error: error.message, }); } // var settings = { // settings: { // index: { // number_of_shards: 4, // number_of_replicas: 3, // }, // }, // }; // var test = await client.indices.create({ // index: index_name, // body: settings, // }); // var document = { // title: 'Sample Document', // authors: [ // { // first_name: 'John', // last_name: 'Doe', // }, // { // first_name: 'Jane', // last_name: 'Smith', // }, // ], // year: '2018', // genre: 'Crime fiction', // }; // http://localhost:9200/datastets/_doc/1 // var id = '1'; try { // console.log(doc); let document = JSON.parse(`${doc}`); // https://opensearch.org/docs/2.1/opensearch/supported-field-types/geo-shape/ // Define the new document // const document = { // title: 'Your Document Name', // id: dataset.publish_id, // doctype: 'GIS', // // "location" : { // // "type" : "point", // // "coordinates" : [74.00, 40.71] // // }, // geo_location: { // type: 'linestring', // coordinates: [ // [-77.03653, 38.897676], // [-77.009051, 38.889939], // ], // }, // // geo_location: 'BBOX (71.0589, 74.0060, 42.3601, 40.7128)' // // geo_location: { // // type: 'envelope', // // coordinates: [ // // [13.0, 53.0], // // [14.0, 52.0], // // ], // Define your BBOX coordinates // // }, // }; // Update the document var test = await client.index({ id: dataset.publish_id?.toString(), index: index_name, body: document, refresh: true, }); // Return the result return response.json(test.body); } catch (error) { // Handle any errors console.error(error); return response.status(500).json({ error: 'An error occurred while updating the data.' }); } } public async destroy({}: HttpContextContract) {} private async createXmlRecord(dataset: Dataset, datasetNode: XMLBuilder) { const domNode = await this.getDatasetXmlDomNode(dataset); if (domNode) { datasetNode.import(domNode); } } private async getDatasetXmlDomNode(dataset: Dataset) { const xmlModel = new XmlModel(dataset); // xmlModel.setModel(dataset); xmlModel.excludeEmptyFields(); xmlModel.caching = true; // const cache = dataset.xmlCache ? dataset.xmlCache : null; // dataset.load('xmlCache'); if (dataset.xmlCache) { xmlModel.xmlCache = dataset.xmlCache; } // return cache.getDomDocument(); const domDocument: XMLBuilder | null = await xmlModel.getDomDocument(); return domDocument; } }