tethys.backend/app/Controllers/Http/Editor/DatasetController.ts
Arno Kaimbacher 18635f77b3
Some checks failed
CI Pipeline / japa-tests (push) Failing after 51s
- npm updates
- added views and controller coder for reviewer role
- added program logic for publishing a dataset by editor
- added reviewer menu
- adapted routes.ts for additional routes
2024-01-04 16:40:05 +01:00

432 lines
16 KiB
TypeScript

import type { HttpContextContract } from '@ioc:Adonis/Core/HttpContext';
import { Client } from '@opensearch-project/opensearch';
import User from 'App/Models/User';
import Dataset from 'App/Models/Dataset';
import XmlModel from 'App/Library/XmlModel';
import { XMLBuilder } from 'xmlbuilder2/lib/interfaces';
import { create } from 'xmlbuilder2';
import { readFileSync } from 'fs';
import { transform } from 'saxon-js';
import type { ModelQueryBuilderContract } from '@ioc:Adonis/Lucid/Orm';
import { schema, CustomMessages } from '@ioc:Adonis/Core/Validator';
import { DateTime } from 'luxon';
// Create a new instance of the client
const client = new Client({ node: 'http://localhost:9200' }); // replace with your OpenSearch endpoint
export default class DatasetsController {
private proc;
public messages: CustomMessages = {
// 'required': '{{ field }} is required',
// 'licenses.minLength': 'at least {{ options.minLength }} permission must be defined',
'reviewer_id.required': 'reviewer_id must be defined',
'publisher_name.required': 'publisher name must be defined',
};
constructor() {
this.proc = readFileSync('public/assets2/solr.sef.json');
// Load the XSLT file
// this.proc = readFileSync('public/assets2/datasetxml2oai.sef.json');
}
// public async index({}: HttpContextContract) {}
public async index({ auth, request, inertia }: HttpContextContract) {
const user = (await User.find(auth.user?.id)) as User;
const page = request.input('page', 1);
let datasets: ModelQueryBuilderContract<typeof Dataset, Dataset> = Dataset.query();
// if (request.input('search')) {
// // users = users.whereRaw('name like %?%', [request.input('search')])
// const searchTerm = request.input('search');
// datasets.where('name', 'ilike', `%${searchTerm}%`);
// }
if (request.input('sort')) {
type SortOrder = 'asc' | 'desc' | undefined;
let attribute = request.input('sort');
let sortOrder: SortOrder = 'asc';
if (attribute.substr(0, 1) === '-') {
sortOrder = 'desc';
// attribute = substr(attribute, 1);
attribute = attribute.substr(1);
}
datasets.orderBy(attribute, sortOrder);
} else {
// users.orderBy('created_at', 'desc');
datasets.orderBy('id', 'asc');
}
// const users = await User.query().orderBy('login').paginate(page, limit);
const myDatasets = await datasets
.where('server_state', 'released')
.orWhere((dQuery) => {
dQuery
.whereIn('server_state', ['editor_accepted', 'rejected_reviewer', 'reviewed', 'published'])
.where('editor_id', user.id)
.doesntHave('identifier', 'and');
})
.preload('titles')
.preload('user', (query) => query.select('id', 'login'))
.preload('editor', (query) => query.select('id', 'login'))
.paginate(page, 10);
return inertia.render('Editor/Dataset/Index', {
datasets: myDatasets.serialize(),
filters: request.all(),
can: {
receive: await auth.user?.can(['dataset-receive']),
approve: await auth.user?.can(['dataset-approve']),
edit: await auth.user?.can(['dataset-editor-update']),
delete: await auth.user?.can(['dataset-editor-delete']),
publish: await auth.user?.can(['dataset-publish']),
},
});
}
public async receive({ request, inertia, response }: HttpContextContract) {
const id = request.param('id');
const dataset = await Dataset.query()
.where('id', id)
.preload('titles')
.preload('descriptions')
.preload('user', (builder) => {
builder.select('id', 'login');
})
.firstOrFail();
const validStates = ['released'];
if (!validStates.includes(dataset.server_state)) {
// session.flash('errors', 'Invalid server state!');
return response
.flash(
'warning',
`Invalid server state. Dataset with id ${id} cannot be received. Datset has server state ${dataset.server_state}.`,
)
.redirect()
.back();
}
return inertia.render('Editor/Dataset/Receive', {
dataset,
});
}
public async receiveUpdate({ auth, request, response }) {
const id = request.param('id');
// const { id } = params;
const dataset = await Dataset.findOrFail(id);
const validStates = ['released'];
if (!validStates.includes(dataset.server_state)) {
// throw new Error('Invalid server state!');
// return response.flash('warning', 'Invalid server state. Dataset cannot be released to editor').redirect().back();
return response
.flash(
'warning',
`Invalid server state. Dataset with id ${id} cannot be received by editor. Datset has server state ${dataset.server_state}.`,
)
.redirect()
.toRoute('editor.dataset.list');
}
dataset.server_state = 'editor_accepted';
const user = (await User.find(auth.user?.id)) as User;
// dataset.editor().associate(user).save();
try {
await dataset.related('editor').associate(user); // speichert schon ab
// await dataset.save();
return response.toRoute('editor.dataset.list').flash('message', `You have accepted dataset ${dataset.id}!`);
} catch (error) {
// Handle any errors
console.error(error);
return response.status(500).json({ error: 'An error occurred while accepting the data.' });
}
}
public async approve({ request, inertia, response }) {
const id = request.param('id');
// $dataset = Dataset::with('user:id,login')->findOrFail($id);
const dataset = await Dataset.findOrFail(id);
const validStates = ['editor_accepted', 'rejected_reviewer'];
if (!validStates.includes(dataset.server_state)) {
// session.flash('errors', 'Invalid server state!');
return response
.flash(
'warning',
`Invalid server state. Dataset with id ${id} cannot be approved. Datset has server state ${dataset.server_state}.`,
)
.redirect()
.back();
}
const reviewers = await User.query()
.whereHas('roles', (builder) => {
builder.where('name', 'reviewer');
})
.pluck('login', 'id');
return inertia.render('Editor/Dataset/Approve', {
dataset,
reviewers,
});
}
public async approveUpdate({ request, response }) {
const approveDatasetSchema = schema.create({
reviewer_id: schema.number(),
});
try {
await request.validate({ schema: approveDatasetSchema, messages: this.messages });
} catch (error) {
// return response.badRequest(error.messages);
throw error;
}
const id = request.param('id');
const dataset = await Dataset.findOrFail(id);
const validStates = ['editor_accepted', 'rejected_reviewer'];
if (!validStates.includes(dataset.server_state)) {
// session.flash('errors', 'Invalid server state!');
return response
.flash(
'warning',
`Invalid server state. Dataset with id ${id} cannot be approved. Datset has server state ${dataset.server_state}.`,
)
.redirect()
.back();
}
dataset.server_state = 'approved';
if (dataset.reject_reviewer_note != null) {
dataset.reject_reviewer_note = null;
}
//save main and additional titles
const reviewer_id = request.input('reviewer_id', null);
dataset.reviewer_id = reviewer_id;
if (await dataset.save()) {
return response.toRoute('editor.dataset.list').flash('message', 'You have approved one dataset!');
}
}
public async publish({ request, inertia, response }) {
const id = request.param('id');
const dataset = await Dataset.query()
.where('id', id)
.preload('titles')
.preload('authors')
// .preload('persons', (builder) => {
// builder.wherePivot('role', 'author')
// })
.firstOrFail();
const validStates = ['reviewed'];
if (!validStates.includes(dataset.server_state)) {
// session.flash('errors', 'Invalid server state!');
return response
.flash(
'warning',
`Invalid server state. Dataset with id ${id} cannot be published. Datset has server state ${dataset.server_state}.`,
)
.redirect()
.back();
}
return inertia.render('Editor/Dataset/Publish', {
dataset,
});
}
public async publishUpdate({ request, response }) {
const publishDatasetSchema = schema.create({
publisher_name: schema.string({ trim: true }),
});
try {
await request.validate({ schema: publishDatasetSchema, messages: this.messages });
} catch (error) {
// return response.badRequest(error.messages);
throw error;
}
const id = request.param('id');
const dataset = await Dataset.findOrFail(id);
// let test = await Dataset.getMax('publish_id');
// const maxPublishId = await Database.from('documents').max('publish_id as max_publish_id').first();
// const max = maxPublishId.max_publish_id;
const max = await Dataset.getMax('publish_id');
let publish_id = 0;
if (max != null) {
publish_id = max + 1;
} else {
publish_id = publish_id + 1;
}
dataset.publish_id = publish_id;
dataset.server_state = 'published';
dataset.server_date_published = DateTime.now();
const publisherName = request.input('publisher_name', 'Tethys');
dataset.publisher_name = publisherName;
if (await dataset.save()) {
return response.toRoute('editor.dataset.list').flash('message', 'You have successfully published the dataset!');
}
}
public async create({}: HttpContextContract) {}
public async store({}: HttpContextContract) {}
public async show({}: HttpContextContract) {}
public async edit({}: HttpContextContract) {}
// public async update({}: HttpContextContract) {}
public async update({ response }) {
const id = 273; //request.param('id');
const dataset = await Dataset.query().preload('xmlCache').where('id', id).firstOrFail();
// add xml elements
let xml = create({ version: '1.0', encoding: 'UTF-8', standalone: true }, '<root></root>');
const datasetNode = xml.root().ele('Dataset');
await this.createXmlRecord(dataset, datasetNode);
// const domNode = await this.getDatasetXmlDomNode(dataset);
// const xmlString = xml.end({ prettyPrint: true });
// const data = request.only(['field1', 'field2']); // get it from xslt
// Create an index with non-default settings.
var index_name = 'tethys-features';
const xmlString = xml.end({ prettyPrint: false });
let doc = '';
try {
const result = await transform({
// stylesheetFileName: `${config.TMP_BASE_DIR}/data-quality/rules/iati.sef.json`,
stylesheetText: this.proc,
destination: 'serialized',
// sourceFileName: sourceFile,
sourceText: xmlString,
// stylesheetParams: xsltParameter,
// logLevel: 10,
});
doc = result.principalResult;
} catch (error) {
return response.status(500).json({
message: 'An error occurred while creating the user',
error: error.message,
});
}
// var settings = {
// settings: {
// index: {
// number_of_shards: 4,
// number_of_replicas: 3,
// },
// },
// };
// var test = await client.indices.create({
// index: index_name,
// body: settings,
// });
// var document = {
// title: 'Sample Document',
// authors: [
// {
// first_name: 'John',
// last_name: 'Doe',
// },
// {
// first_name: 'Jane',
// last_name: 'Smith',
// },
// ],
// year: '2018',
// genre: 'Crime fiction',
// };
// http://localhost:9200/datastets/_doc/1
// var id = '1';
try {
// console.log(doc);
let document = JSON.parse(`${doc}`);
// https://opensearch.org/docs/2.1/opensearch/supported-field-types/geo-shape/
// Define the new document
// const document = {
// title: 'Your Document Name',
// id: dataset.publish_id,
// doctype: 'GIS',
// // "location" : {
// // "type" : "point",
// // "coordinates" : [74.00, 40.71]
// // },
// geo_location: {
// type: 'linestring',
// coordinates: [
// [-77.03653, 38.897676],
// [-77.009051, 38.889939],
// ],
// },
// // geo_location: 'BBOX (71.0589, 74.0060, 42.3601, 40.7128)'
// // geo_location: {
// // type: 'envelope',
// // coordinates: [
// // [13.0, 53.0],
// // [14.0, 52.0],
// // ], // Define your BBOX coordinates
// // },
// };
// Update the document
var test = await client.index({
id: dataset.publish_id?.toString(),
index: index_name,
body: document,
refresh: true,
});
// Return the result
return response.json(test.body);
} catch (error) {
// Handle any errors
console.error(error);
return response.status(500).json({ error: 'An error occurred while updating the data.' });
}
}
public async destroy({}: HttpContextContract) {}
public async syncOpensearch({}: HttpContextContract) {}
private async createXmlRecord(dataset: Dataset, datasetNode: XMLBuilder) {
const domNode = await this.getDatasetXmlDomNode(dataset);
if (domNode) {
datasetNode.import(domNode);
}
}
private async getDatasetXmlDomNode(dataset: Dataset) {
const xmlModel = new XmlModel(dataset);
// xmlModel.setModel(dataset);
xmlModel.excludeEmptyFields();
xmlModel.caching = true;
// const cache = dataset.xmlCache ? dataset.xmlCache : null;
// dataset.load('xmlCache');
if (dataset.xmlCache) {
xmlModel.xmlCache = dataset.xmlCache;
}
// return cache.getDomDocument();
const domDocument: XMLBuilder | null = await xmlModel.getDomDocument();
return domDocument;
}
}