2024-03-14 19:25:27 +00:00
|
|
|
import type { HttpContext } from '@adonisjs/core/http';
|
2023-10-17 13:45:41 +00:00
|
|
|
import { Client } from '@opensearch-project/opensearch';
|
2024-03-14 19:25:27 +00:00
|
|
|
import User from '#app/Models/User';
|
|
|
|
import Dataset from '#app/Models/Dataset';
|
|
|
|
import DatasetIdentifier from '#app/Models/DatasetIdentifier';
|
|
|
|
import XmlModel from '#app/Library/XmlModel';
|
|
|
|
import { XMLBuilder } from 'xmlbuilder2/lib/interfaces.js';
|
2023-10-17 13:45:41 +00:00
|
|
|
import { create } from 'xmlbuilder2';
|
|
|
|
import { readFileSync } from 'fs';
|
2024-03-14 19:25:27 +00:00
|
|
|
import SaxonJS from 'saxon-js';
|
|
|
|
import { schema } from '@adonisjs/validator';
|
2024-01-04 15:40:05 +00:00
|
|
|
import { DateTime } from 'luxon';
|
2024-03-14 19:25:27 +00:00
|
|
|
import Index from '#app/Library/Utils/Index';
|
|
|
|
import { getDomain } from '#app/Utils/utility-functions';
|
|
|
|
import { DoiClient } from '#app/Library/Doi/DoiClient';
|
|
|
|
import DoiClientException from '#app/Exceptions/DoiClientException';
|
|
|
|
import logger from '@adonisjs/core/services/logger';
|
2024-01-26 08:39:03 +00:00
|
|
|
import { HttpException } from 'node-exceptions';
|
2024-03-14 19:25:27 +00:00
|
|
|
import { ModelQueryBuilderContract } from "@adonisjs/lucid/types/model";
|
|
|
|
import { CustomMessages } from "@adonisjs/validator/types";
|
2023-10-17 13:45:41 +00:00
|
|
|
|
|
|
|
// Create a new instance of the client
|
|
|
|
const client = new Client({ node: 'http://localhost:9200' }); // replace with your OpenSearch endpoint
|
|
|
|
|
|
|
|
export default class DatasetsController {
|
|
|
|
private proc;
|
2023-12-12 14:22:25 +00:00
|
|
|
public messages: CustomMessages = {
|
|
|
|
// 'required': '{{ field }} is required',
|
|
|
|
// 'licenses.minLength': 'at least {{ options.minLength }} permission must be defined',
|
|
|
|
'reviewer_id.required': 'reviewer_id must be defined',
|
2024-01-04 15:40:05 +00:00
|
|
|
'publisher_name.required': 'publisher name must be defined',
|
2023-12-12 14:22:25 +00:00
|
|
|
};
|
2023-10-17 13:45:41 +00:00
|
|
|
|
|
|
|
constructor() {
|
|
|
|
this.proc = readFileSync('public/assets2/solr.sef.json');
|
|
|
|
// Load the XSLT file
|
|
|
|
// this.proc = readFileSync('public/assets2/datasetxml2oai.sef.json');
|
|
|
|
}
|
|
|
|
|
2023-11-30 12:40:32 +00:00
|
|
|
// public async index({}: HttpContextContract) {}
|
2024-03-14 19:25:27 +00:00
|
|
|
public async index({ auth, request, inertia }: HttpContext) {
|
2023-11-30 12:40:32 +00:00
|
|
|
const user = (await User.find(auth.user?.id)) as User;
|
|
|
|
const page = request.input('page', 1);
|
|
|
|
let datasets: ModelQueryBuilderContract<typeof Dataset, Dataset> = Dataset.query();
|
|
|
|
|
|
|
|
// if (request.input('search')) {
|
|
|
|
// // users = users.whereRaw('name like %?%', [request.input('search')])
|
|
|
|
// const searchTerm = request.input('search');
|
|
|
|
// datasets.where('name', 'ilike', `%${searchTerm}%`);
|
|
|
|
// }
|
|
|
|
|
|
|
|
if (request.input('sort')) {
|
|
|
|
type SortOrder = 'asc' | 'desc' | undefined;
|
|
|
|
let attribute = request.input('sort');
|
|
|
|
let sortOrder: SortOrder = 'asc';
|
|
|
|
|
|
|
|
if (attribute.substr(0, 1) === '-') {
|
|
|
|
sortOrder = 'desc';
|
|
|
|
// attribute = substr(attribute, 1);
|
|
|
|
attribute = attribute.substr(1);
|
|
|
|
}
|
|
|
|
datasets.orderBy(attribute, sortOrder);
|
|
|
|
} else {
|
|
|
|
// users.orderBy('created_at', 'desc');
|
|
|
|
datasets.orderBy('id', 'asc');
|
|
|
|
}
|
|
|
|
|
|
|
|
// const users = await User.query().orderBy('login').paginate(page, limit);
|
|
|
|
const myDatasets = await datasets
|
|
|
|
.where('server_state', 'released')
|
|
|
|
.orWhere((dQuery) => {
|
|
|
|
dQuery
|
|
|
|
.whereIn('server_state', ['editor_accepted', 'rejected_reviewer', 'reviewed', 'published'])
|
2023-12-01 09:44:19 +00:00
|
|
|
.where('editor_id', user.id)
|
|
|
|
.doesntHave('identifier', 'and');
|
2023-11-30 12:40:32 +00:00
|
|
|
})
|
2024-01-26 08:39:03 +00:00
|
|
|
// .preload('identifier')
|
2023-11-30 12:40:32 +00:00
|
|
|
.preload('titles')
|
|
|
|
.preload('user', (query) => query.select('id', 'login'))
|
2023-12-12 14:22:25 +00:00
|
|
|
.preload('editor', (query) => query.select('id', 'login'))
|
2023-11-30 12:40:32 +00:00
|
|
|
.paginate(page, 10);
|
|
|
|
|
|
|
|
return inertia.render('Editor/Dataset/Index', {
|
|
|
|
datasets: myDatasets.serialize(),
|
|
|
|
filters: request.all(),
|
|
|
|
can: {
|
|
|
|
receive: await auth.user?.can(['dataset-receive']),
|
2023-12-12 14:22:25 +00:00
|
|
|
approve: await auth.user?.can(['dataset-approve']),
|
2024-01-04 15:40:05 +00:00
|
|
|
edit: await auth.user?.can(['dataset-editor-update']),
|
2023-11-30 12:40:32 +00:00
|
|
|
delete: await auth.user?.can(['dataset-editor-delete']),
|
2024-01-04 15:40:05 +00:00
|
|
|
publish: await auth.user?.can(['dataset-publish']),
|
2023-11-30 12:40:32 +00:00
|
|
|
},
|
|
|
|
});
|
|
|
|
}
|
|
|
|
|
2024-03-14 19:25:27 +00:00
|
|
|
public async receive({ request, inertia, response }: HttpContext) {
|
2023-11-30 12:40:32 +00:00
|
|
|
const id = request.param('id');
|
|
|
|
const dataset = await Dataset.query()
|
|
|
|
.where('id', id)
|
|
|
|
.preload('titles')
|
|
|
|
.preload('descriptions')
|
|
|
|
.preload('user', (builder) => {
|
|
|
|
builder.select('id', 'login');
|
|
|
|
})
|
|
|
|
|
|
|
|
.firstOrFail();
|
|
|
|
|
|
|
|
const validStates = ['released'];
|
|
|
|
if (!validStates.includes(dataset.server_state)) {
|
|
|
|
// session.flash('errors', 'Invalid server state!');
|
|
|
|
return response
|
|
|
|
.flash(
|
|
|
|
'warning',
|
|
|
|
`Invalid server state. Dataset with id ${id} cannot be received. Datset has server state ${dataset.server_state}.`,
|
|
|
|
)
|
|
|
|
.redirect()
|
|
|
|
.back();
|
|
|
|
}
|
|
|
|
|
|
|
|
return inertia.render('Editor/Dataset/Receive', {
|
|
|
|
dataset,
|
|
|
|
});
|
|
|
|
}
|
2023-10-17 13:45:41 +00:00
|
|
|
|
2024-03-14 19:25:27 +00:00
|
|
|
public async receiveUpdate({ auth, request, response }: HttpContext) {
|
2023-12-12 14:22:25 +00:00
|
|
|
const id = request.param('id');
|
|
|
|
// const { id } = params;
|
|
|
|
const dataset = await Dataset.findOrFail(id);
|
|
|
|
|
|
|
|
const validStates = ['released'];
|
|
|
|
if (!validStates.includes(dataset.server_state)) {
|
|
|
|
// throw new Error('Invalid server state!');
|
|
|
|
// return response.flash('warning', 'Invalid server state. Dataset cannot be released to editor').redirect().back();
|
|
|
|
return response
|
|
|
|
.flash(
|
|
|
|
'warning',
|
|
|
|
`Invalid server state. Dataset with id ${id} cannot be received by editor. Datset has server state ${dataset.server_state}.`,
|
|
|
|
)
|
|
|
|
.redirect()
|
|
|
|
.toRoute('editor.dataset.list');
|
|
|
|
}
|
|
|
|
|
|
|
|
dataset.server_state = 'editor_accepted';
|
|
|
|
const user = (await User.find(auth.user?.id)) as User;
|
|
|
|
// dataset.editor().associate(user).save();
|
|
|
|
try {
|
|
|
|
await dataset.related('editor').associate(user); // speichert schon ab
|
|
|
|
// await dataset.save();
|
|
|
|
return response.toRoute('editor.dataset.list').flash('message', `You have accepted dataset ${dataset.id}!`);
|
|
|
|
} catch (error) {
|
|
|
|
// Handle any errors
|
|
|
|
console.error(error);
|
|
|
|
return response.status(500).json({ error: 'An error occurred while accepting the data.' });
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2024-03-14 19:25:27 +00:00
|
|
|
public async approve({ request, inertia, response }: HttpContext) {
|
2023-12-12 14:22:25 +00:00
|
|
|
const id = request.param('id');
|
|
|
|
// $dataset = Dataset::with('user:id,login')->findOrFail($id);
|
|
|
|
const dataset = await Dataset.findOrFail(id);
|
|
|
|
|
|
|
|
const validStates = ['editor_accepted', 'rejected_reviewer'];
|
|
|
|
if (!validStates.includes(dataset.server_state)) {
|
|
|
|
// session.flash('errors', 'Invalid server state!');
|
|
|
|
return response
|
|
|
|
.flash(
|
|
|
|
'warning',
|
|
|
|
`Invalid server state. Dataset with id ${id} cannot be approved. Datset has server state ${dataset.server_state}.`,
|
|
|
|
)
|
|
|
|
.redirect()
|
|
|
|
.back();
|
|
|
|
}
|
|
|
|
|
|
|
|
const reviewers = await User.query()
|
|
|
|
.whereHas('roles', (builder) => {
|
|
|
|
builder.where('name', 'reviewer');
|
|
|
|
})
|
|
|
|
.pluck('login', 'id');
|
|
|
|
|
|
|
|
return inertia.render('Editor/Dataset/Approve', {
|
|
|
|
dataset,
|
|
|
|
reviewers,
|
|
|
|
});
|
|
|
|
}
|
|
|
|
|
2024-03-14 19:25:27 +00:00
|
|
|
public async approveUpdate({ request, response }: HttpContext) {
|
2023-12-12 14:22:25 +00:00
|
|
|
const approveDatasetSchema = schema.create({
|
|
|
|
reviewer_id: schema.number(),
|
|
|
|
});
|
|
|
|
try {
|
|
|
|
await request.validate({ schema: approveDatasetSchema, messages: this.messages });
|
|
|
|
} catch (error) {
|
|
|
|
// return response.badRequest(error.messages);
|
|
|
|
throw error;
|
|
|
|
}
|
|
|
|
const id = request.param('id');
|
|
|
|
const dataset = await Dataset.findOrFail(id);
|
|
|
|
|
|
|
|
const validStates = ['editor_accepted', 'rejected_reviewer'];
|
|
|
|
if (!validStates.includes(dataset.server_state)) {
|
|
|
|
// session.flash('errors', 'Invalid server state!');
|
|
|
|
return response
|
|
|
|
.flash(
|
|
|
|
'warning',
|
|
|
|
`Invalid server state. Dataset with id ${id} cannot be approved. Datset has server state ${dataset.server_state}.`,
|
|
|
|
)
|
|
|
|
.redirect()
|
|
|
|
.back();
|
|
|
|
}
|
|
|
|
|
|
|
|
dataset.server_state = 'approved';
|
|
|
|
if (dataset.reject_reviewer_note != null) {
|
|
|
|
dataset.reject_reviewer_note = null;
|
|
|
|
}
|
|
|
|
|
|
|
|
//save main and additional titles
|
|
|
|
const reviewer_id = request.input('reviewer_id', null);
|
|
|
|
dataset.reviewer_id = reviewer_id;
|
|
|
|
|
|
|
|
if (await dataset.save()) {
|
|
|
|
return response.toRoute('editor.dataset.list').flash('message', 'You have approved one dataset!');
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2024-03-14 19:25:27 +00:00
|
|
|
public async publish({ request, inertia, response }: HttpContext) {
|
2024-01-04 15:40:05 +00:00
|
|
|
const id = request.param('id');
|
|
|
|
|
|
|
|
const dataset = await Dataset.query()
|
|
|
|
.where('id', id)
|
|
|
|
.preload('titles')
|
|
|
|
.preload('authors')
|
|
|
|
// .preload('persons', (builder) => {
|
|
|
|
// builder.wherePivot('role', 'author')
|
|
|
|
// })
|
|
|
|
.firstOrFail();
|
|
|
|
|
|
|
|
const validStates = ['reviewed'];
|
|
|
|
if (!validStates.includes(dataset.server_state)) {
|
|
|
|
// session.flash('errors', 'Invalid server state!');
|
|
|
|
return response
|
|
|
|
.flash(
|
|
|
|
'warning',
|
|
|
|
`Invalid server state. Dataset with id ${id} cannot be published. Datset has server state ${dataset.server_state}.`,
|
|
|
|
)
|
|
|
|
.redirect()
|
|
|
|
.back();
|
|
|
|
}
|
|
|
|
|
|
|
|
return inertia.render('Editor/Dataset/Publish', {
|
|
|
|
dataset,
|
|
|
|
});
|
|
|
|
}
|
|
|
|
|
2024-03-14 19:25:27 +00:00
|
|
|
public async publishUpdate({ request, response }: HttpContext) {
|
2024-01-04 15:40:05 +00:00
|
|
|
const publishDatasetSchema = schema.create({
|
|
|
|
publisher_name: schema.string({ trim: true }),
|
|
|
|
});
|
|
|
|
try {
|
|
|
|
await request.validate({ schema: publishDatasetSchema, messages: this.messages });
|
|
|
|
} catch (error) {
|
|
|
|
// return response.badRequest(error.messages);
|
|
|
|
throw error;
|
|
|
|
}
|
|
|
|
const id = request.param('id');
|
|
|
|
const dataset = await Dataset.findOrFail(id);
|
|
|
|
|
|
|
|
// let test = await Dataset.getMax('publish_id');
|
|
|
|
// const maxPublishId = await Database.from('documents').max('publish_id as max_publish_id').first();
|
|
|
|
// const max = maxPublishId.max_publish_id;
|
|
|
|
const max = await Dataset.getMax('publish_id');
|
|
|
|
let publish_id = 0;
|
|
|
|
if (max != null) {
|
|
|
|
publish_id = max + 1;
|
|
|
|
} else {
|
|
|
|
publish_id = publish_id + 1;
|
|
|
|
}
|
|
|
|
dataset.publish_id = publish_id;
|
|
|
|
dataset.server_state = 'published';
|
|
|
|
dataset.server_date_published = DateTime.now();
|
|
|
|
|
|
|
|
const publisherName = request.input('publisher_name', 'Tethys');
|
|
|
|
dataset.publisher_name = publisherName;
|
|
|
|
|
|
|
|
if (await dataset.save()) {
|
2024-01-26 08:39:03 +00:00
|
|
|
const index_name = 'tethys-records';
|
|
|
|
await Index.indexDocument(dataset, index_name);
|
2024-01-04 15:40:05 +00:00
|
|
|
return response.toRoute('editor.dataset.list').flash('message', 'You have successfully published the dataset!');
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2024-03-14 19:25:27 +00:00
|
|
|
public async doiCreate({ request, inertia }: HttpContext) {
|
2024-01-26 08:39:03 +00:00
|
|
|
const id = request.param('id');
|
|
|
|
const dataset = await Dataset.query()
|
|
|
|
.where('id', id)
|
|
|
|
.preload('titles')
|
|
|
|
.preload('descriptions')
|
|
|
|
// .preload('identifier')
|
|
|
|
.preload('authors')
|
|
|
|
.firstOrFail();
|
|
|
|
return inertia.render('Editor/Dataset/Doi', {
|
|
|
|
dataset,
|
|
|
|
});
|
|
|
|
}
|
2023-10-17 13:45:41 +00:00
|
|
|
|
2024-03-14 19:25:27 +00:00
|
|
|
public async doiStore({ request, response }: HttpContext) {
|
2024-01-26 08:39:03 +00:00
|
|
|
const dataId = request.param('publish_id');
|
|
|
|
const dataset = await Dataset.query()
|
|
|
|
// .preload('xmlCache')
|
|
|
|
.where('publish_id', dataId)
|
|
|
|
.firstOrFail();
|
|
|
|
const xmlMeta = (await Index.getDoiRegisterString(dataset)) as string;
|
|
|
|
|
|
|
|
let prefix = '';
|
|
|
|
let base_domain = '';
|
2024-01-31 12:19:46 +00:00
|
|
|
// const datacite_environment = process.env.DATACITE_ENVIRONMENT || 'debug';
|
|
|
|
prefix = process.env.DATACITE_PREFIX || '';
|
|
|
|
base_domain = process.env.BASE_DOMAIN || '';
|
|
|
|
|
2024-01-26 08:39:03 +00:00
|
|
|
|
|
|
|
// register DOI:
|
|
|
|
const doiValue = prefix + '/tethys.' + dataset.publish_id; //'10.21388/tethys.213'
|
|
|
|
const landingPageUrl = 'https://doi.' + getDomain(base_domain) + '/' + prefix + '/tethys.' + dataset.publish_id; //https://doi.dev.tethys.at/10.21388/tethys.213
|
|
|
|
const doiClient = new DoiClient();
|
|
|
|
const dataciteResponse = await doiClient.registerDoi(doiValue, xmlMeta, landingPageUrl);
|
|
|
|
|
|
|
|
if (dataciteResponse?.status === 201) {
|
|
|
|
// if response OK 201; save the Identifier value into db
|
|
|
|
const doiIdentifier = new DatasetIdentifier();
|
|
|
|
doiIdentifier.value = doiValue;
|
|
|
|
doiIdentifier.dataset_id = dataset.id;
|
|
|
|
doiIdentifier.type = 'doi';
|
|
|
|
doiIdentifier.status = 'findable';
|
|
|
|
// save modified date of datset for re-caching model in db an update the search index
|
|
|
|
dataset.server_date_modified = DateTime.now();
|
|
|
|
|
|
|
|
// save updated dataset to db an index to OpenSearch
|
|
|
|
try {
|
|
|
|
await dataset.related('identifier').save(doiIdentifier);
|
|
|
|
const index_name = 'tethys-records';
|
|
|
|
await Index.indexDocument(dataset, index_name);
|
|
|
|
} catch (error) {
|
2024-03-14 19:25:27 +00:00
|
|
|
logger.error(`${__filename}: Indexing document ${dataset.id} failed: ${error.message}`);
|
2024-01-26 08:39:03 +00:00
|
|
|
// Log the error or handle it as needed
|
|
|
|
throw new HttpException(error.message);
|
|
|
|
}
|
|
|
|
return response.toRoute('editor.dataset.list').flash('message', 'You have successfully created a DOI for the dataset!');
|
|
|
|
} else {
|
|
|
|
const message = `Unexpected DataCite MDS response code ${dataciteResponse?.status}`;
|
|
|
|
// Log the error or handle it as needed
|
|
|
|
throw new DoiClientException(dataciteResponse?.status, message);
|
|
|
|
}
|
|
|
|
// return response.toRoute('editor.dataset.list').flash('message', xmlMeta);
|
|
|
|
}
|
2023-10-17 13:45:41 +00:00
|
|
|
|
2024-03-14 19:25:27 +00:00
|
|
|
public async show({}: HttpContext) {}
|
2023-10-17 13:45:41 +00:00
|
|
|
|
2024-03-14 19:25:27 +00:00
|
|
|
public async edit({}: HttpContext) {}
|
2023-10-17 13:45:41 +00:00
|
|
|
|
|
|
|
// public async update({}: HttpContextContract) {}
|
2024-03-14 19:25:27 +00:00
|
|
|
public async update({ response }: HttpContext) {
|
2023-10-17 13:45:41 +00:00
|
|
|
const id = 273; //request.param('id');
|
|
|
|
const dataset = await Dataset.query().preload('xmlCache').where('id', id).firstOrFail();
|
|
|
|
// add xml elements
|
|
|
|
let xml = create({ version: '1.0', encoding: 'UTF-8', standalone: true }, '<root></root>');
|
|
|
|
const datasetNode = xml.root().ele('Dataset');
|
|
|
|
await this.createXmlRecord(dataset, datasetNode);
|
|
|
|
// const domNode = await this.getDatasetXmlDomNode(dataset);
|
|
|
|
// const xmlString = xml.end({ prettyPrint: true });
|
|
|
|
|
|
|
|
// const data = request.only(['field1', 'field2']); // get it from xslt
|
|
|
|
|
|
|
|
// Create an index with non-default settings.
|
|
|
|
var index_name = 'tethys-features';
|
|
|
|
|
|
|
|
const xmlString = xml.end({ prettyPrint: false });
|
|
|
|
let doc = '';
|
|
|
|
try {
|
2024-03-14 19:25:27 +00:00
|
|
|
const result = await SaxonJS.transform({
|
2023-10-17 13:45:41 +00:00
|
|
|
// stylesheetFileName: `${config.TMP_BASE_DIR}/data-quality/rules/iati.sef.json`,
|
|
|
|
stylesheetText: this.proc,
|
|
|
|
destination: 'serialized',
|
|
|
|
// sourceFileName: sourceFile,
|
|
|
|
sourceText: xmlString,
|
|
|
|
// stylesheetParams: xsltParameter,
|
|
|
|
// logLevel: 10,
|
|
|
|
});
|
|
|
|
doc = result.principalResult;
|
|
|
|
} catch (error) {
|
|
|
|
return response.status(500).json({
|
|
|
|
message: 'An error occurred while creating the user',
|
|
|
|
error: error.message,
|
|
|
|
});
|
|
|
|
}
|
|
|
|
|
|
|
|
// var settings = {
|
|
|
|
// settings: {
|
|
|
|
// index: {
|
|
|
|
// number_of_shards: 4,
|
|
|
|
// number_of_replicas: 3,
|
|
|
|
// },
|
|
|
|
// },
|
|
|
|
// };
|
|
|
|
// var test = await client.indices.create({
|
|
|
|
// index: index_name,
|
|
|
|
// body: settings,
|
|
|
|
// });
|
|
|
|
|
|
|
|
// var document = {
|
|
|
|
// title: 'Sample Document',
|
|
|
|
// authors: [
|
|
|
|
// {
|
|
|
|
// first_name: 'John',
|
|
|
|
// last_name: 'Doe',
|
|
|
|
// },
|
|
|
|
// {
|
|
|
|
// first_name: 'Jane',
|
|
|
|
// last_name: 'Smith',
|
|
|
|
// },
|
|
|
|
// ],
|
|
|
|
// year: '2018',
|
|
|
|
// genre: 'Crime fiction',
|
|
|
|
// };
|
|
|
|
|
|
|
|
// http://localhost:9200/datastets/_doc/1
|
|
|
|
|
|
|
|
// var id = '1';
|
|
|
|
|
|
|
|
try {
|
|
|
|
// console.log(doc);
|
2023-10-31 14:38:43 +00:00
|
|
|
let document = JSON.parse(`${doc}`);
|
2023-10-17 13:45:41 +00:00
|
|
|
|
|
|
|
// https://opensearch.org/docs/2.1/opensearch/supported-field-types/geo-shape/
|
|
|
|
// Define the new document
|
|
|
|
// const document = {
|
|
|
|
// title: 'Your Document Name',
|
|
|
|
// id: dataset.publish_id,
|
|
|
|
// doctype: 'GIS',
|
|
|
|
// // "location" : {
|
|
|
|
// // "type" : "point",
|
|
|
|
// // "coordinates" : [74.00, 40.71]
|
|
|
|
// // },
|
|
|
|
// geo_location: {
|
|
|
|
// type: 'linestring',
|
|
|
|
// coordinates: [
|
|
|
|
// [-77.03653, 38.897676],
|
|
|
|
// [-77.009051, 38.889939],
|
|
|
|
// ],
|
|
|
|
// },
|
|
|
|
// // geo_location: 'BBOX (71.0589, 74.0060, 42.3601, 40.7128)'
|
|
|
|
// // geo_location: {
|
|
|
|
// // type: 'envelope',
|
|
|
|
// // coordinates: [
|
|
|
|
// // [13.0, 53.0],
|
|
|
|
// // [14.0, 52.0],
|
|
|
|
// // ], // Define your BBOX coordinates
|
|
|
|
// // },
|
|
|
|
// };
|
|
|
|
|
|
|
|
// Update the document
|
|
|
|
var test = await client.index({
|
2023-10-20 13:26:25 +00:00
|
|
|
id: dataset.publish_id?.toString(),
|
2023-10-17 13:45:41 +00:00
|
|
|
index: index_name,
|
|
|
|
body: document,
|
|
|
|
refresh: true,
|
|
|
|
});
|
|
|
|
|
|
|
|
// Return the result
|
|
|
|
return response.json(test.body);
|
|
|
|
} catch (error) {
|
|
|
|
// Handle any errors
|
|
|
|
console.error(error);
|
|
|
|
return response.status(500).json({ error: 'An error occurred while updating the data.' });
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2024-03-14 19:25:27 +00:00
|
|
|
public async destroy({}: HttpContext) {}
|
2023-10-17 13:45:41 +00:00
|
|
|
|
|
|
|
private async createXmlRecord(dataset: Dataset, datasetNode: XMLBuilder) {
|
|
|
|
const domNode = await this.getDatasetXmlDomNode(dataset);
|
|
|
|
if (domNode) {
|
|
|
|
datasetNode.import(domNode);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
private async getDatasetXmlDomNode(dataset: Dataset) {
|
|
|
|
const xmlModel = new XmlModel(dataset);
|
|
|
|
// xmlModel.setModel(dataset);
|
|
|
|
xmlModel.excludeEmptyFields();
|
|
|
|
xmlModel.caching = true;
|
|
|
|
// const cache = dataset.xmlCache ? dataset.xmlCache : null;
|
|
|
|
// dataset.load('xmlCache');
|
|
|
|
if (dataset.xmlCache) {
|
|
|
|
xmlModel.xmlCache = dataset.xmlCache;
|
|
|
|
}
|
|
|
|
|
|
|
|
// return cache.getDomDocument();
|
|
|
|
const domDocument: XMLBuilder | null = await xmlModel.getDomDocument();
|
|
|
|
return domDocument;
|
|
|
|
}
|
|
|
|
}
|