tethys.backend/commands/IndexDatasets.ts
Arno Kaimbacher a7142f694f - prettier formatting
- npm updates
- new SearchMap.vue component
2023-10-31 15:38:43 +01:00

117 lines
4.4 KiB
TypeScript

import { BaseCommand, flags } from '@adonisjs/core/build/standalone';
import Logger from '@ioc:Adonis/Core/Logger';
import { XMLBuilder } from 'xmlbuilder2/lib/interfaces';
import { create } from 'xmlbuilder2';
import Dataset from 'App/Models/Dataset';
import XmlModel from 'App/Library/XmlModel';
import { readFileSync } from 'fs';
import { transform } from 'saxon-js';
import { Client } from '@opensearch-project/opensearch';
const opensearchNode = process.env.OPENSEARCH_HOST || 'localhost';
const client = new Client({ node: `http://${opensearchNode}` }); // replace with your OpenSearch endpoint
export default class IndexDatasets extends BaseCommand {
public static commandName = 'index:datasets';
public static description = 'Index datasets based on publish_id';
@flags.number({ alias: 'p' })
public publish_id: number;
public static settings = {
/**
* Set the following value to true, if you want to load the application
* before running the command. Don't forget to call `node ace generate:manifest`
* afterwards.
*/
loadApp: true,
/**
* Set the following value to true, if you want this command to keep running until
* you manually decide to exit the process. Don't forget to call
* `node ace generate:manifest` afterwards.
*/
stayAlive: false,
};
public async run() {
this.logger.info('Hello world!');
// const { default: Dataset } = await import('App/Models/Dataset');
const datasets = await this.getDatasets();
const proc = readFileSync('public/assets2/solr.sef.json');
const index_name = 'tethys-records';
for (var dataset of datasets) {
// Logger.info(`File publish_id ${dataset.publish_id}`);
const jsonString = await this.getJsonString(dataset, proc);
// console.log(jsonString);
await this.indexDocument(dataset, index_name, jsonString);
}
}
private async getDatasets(): Promise<Dataset[]> {
const query = Dataset.query().preload('xmlCache').where('server_state', 'published');
if (this.publish_id) {
query.where('publish_id', this.publish_id);
}
return await query;
}
private async getJsonString(dataset, proc) {
let xml = create({ version: '1.0', encoding: 'UTF-8', standalone: true }, '<root></root>');
const datasetNode = xml.root().ele('Dataset');
await this.createXmlRecord(dataset, datasetNode);
const xmlString = xml.end({ prettyPrint: false });
try {
const result = await transform({
stylesheetText: proc,
destination: 'serialized',
sourceText: xmlString,
});
return result.principalResult;
} catch (error) {
Logger.error(`An error occurred while creating the user, error: ${error.message},`);
return '';
}
}
private async indexDocument(dataset: Dataset, index_name: string, doc: string): Promise<void> {
try {
let document = JSON.parse(doc);
await client.index({
id: dataset.publish_id?.toString(),
index: index_name,
body: document,
refresh: true,
});
Logger.info(`dataset with publish_id ${dataset.publish_id} successfully indexed`);
} catch (error) {
Logger.error(`An error occurred while indexing datsaet with publish_id ${dataset.publish_id}.`);
}
}
private async createXmlRecord(dataset: Dataset, datasetNode: XMLBuilder): Promise<void> {
const domNode = await this.getDatasetXmlDomNode(dataset);
if (domNode) {
datasetNode.import(domNode);
}
}
private async getDatasetXmlDomNode(dataset: Dataset): Promise<XMLBuilder | null> {
const xmlModel = new XmlModel(dataset);
// xmlModel.setModel(dataset);
xmlModel.excludeEmptyFields();
xmlModel.caching = true;
// const cache = dataset.xmlCache ? dataset.xmlCache : null;
// dataset.load('xmlCache');
if (dataset.xmlCache) {
xmlModel.xmlCache = dataset.xmlCache;
}
// return cache.getDomDocument();
const domDocument: XMLBuilder | null = await xmlModel.getDomDocument();
return domDocument;
}
}