forked from geolba/tethys.backend
Arno Kaimbacher
ebb24cc75c
- added model DocumentXmlCache.ts - npm updates - changed all models inside app/Models to use corrected BaseModel.ts - added extra extension class DatasetExtension.ts for app/dataset.ts for caching internal and external fields
330 lines
12 KiB
TypeScript
330 lines
12 KiB
TypeScript
import Title from 'App/Models/Title';
|
|
import Description from 'App/Models/Description';
|
|
import License from 'App/Models/License';
|
|
import Person from 'App/Models/Person';
|
|
import DatasetReference from 'App/Models/DatasetReference';
|
|
import DatasetIdentifier from 'App/Models/DatasetIdentifier';
|
|
import Subject from 'App/Models/Subject';
|
|
import File from 'App/Models/File';
|
|
import Coverage from 'App/Models/Coverage';
|
|
import Collection from 'App/Models/Collection';
|
|
import { BaseModel as LucidBaseModel } from '@ioc:Adonis/Lucid/Orm';
|
|
import Field from 'App/Library/Field';
|
|
import { DateTime } from 'luxon';
|
|
|
|
// @StaticImplements<LucidModel>()
|
|
// class LucidDatasetModel extends BaseModel{
|
|
// @belongsTo(() => Dataset, {
|
|
// foreignKey: 'dataset_id',
|
|
// })
|
|
// dataset: BelongsTo<typeof Dataset>;
|
|
// }
|
|
|
|
export type DatasetRelatedModel =
|
|
| typeof Title
|
|
| typeof Description
|
|
| typeof Coverage
|
|
| typeof DatasetIdentifier
|
|
| typeof DatasetReference
|
|
| typeof Description
|
|
| typeof DatasetIdentifier
|
|
| typeof File;
|
|
|
|
|
|
export default abstract class DatasetExtension extends LucidBaseModel {
|
|
public abstract id;
|
|
public externalFields: Record<string, any> = this.getExternalFields();
|
|
// which fields shoud#t be published
|
|
protected internalFields: Record<string, any> = {};
|
|
protected fields: Record<string, any> = {};
|
|
|
|
private getExternalFields(): Record<string, any> {
|
|
// External fields definition
|
|
return {
|
|
TitleMain: {
|
|
model: Title,
|
|
options: { type: ['Main'] },
|
|
fetch: 'eager',
|
|
},
|
|
TitleAdditional: {
|
|
model: Title,
|
|
options: { type: ['Alternative', 'Sub', 'Translated', 'Other'] },
|
|
fetch: 'eager',
|
|
},
|
|
TitleAbstract: {
|
|
model: Description,
|
|
options: { type: ['Abstract', 'Translated'] },
|
|
fetch: 'eager',
|
|
},
|
|
TitleAbstractAdditional: {
|
|
model: Description,
|
|
options: { type: ['Methods', 'Technical_info', 'Series_information', 'Other'] },
|
|
fetch: 'eager',
|
|
},
|
|
Licence: {
|
|
model: License,
|
|
through: 'link_documents_licences',
|
|
relation: 'licenses',
|
|
fetch: 'eager',
|
|
},
|
|
PersonAuthor: {
|
|
model: Person,
|
|
through: 'link_documents_persons',
|
|
pivot: { role: 'author', sort_order: 'sort_order', allow_email_contact: 'allow_email_contact' },
|
|
relation: 'persons',
|
|
fetch: 'eager',
|
|
},
|
|
PersonContributor: {
|
|
model: Person,
|
|
through: 'link_documents_persons',
|
|
pivot: {
|
|
role: 'contributor',
|
|
contributor_type: 'contributor_type',
|
|
sort_order: 'sort_order',
|
|
allow_email_contact: 'allow_email_contact',
|
|
},
|
|
relation: 'persons',
|
|
fetch: 'eager',
|
|
},
|
|
Reference: {
|
|
model: DatasetReference,
|
|
relation: 'references',
|
|
fetch: 'eager',
|
|
},
|
|
Identifier: {
|
|
model: DatasetIdentifier,
|
|
relation: 'identifier',
|
|
fetch: 'eager',
|
|
},
|
|
Subject: {
|
|
model: Subject,
|
|
through: 'link_dataset_subjects',
|
|
relation: 'subjects',
|
|
fetch: 'eager',
|
|
},
|
|
File: {
|
|
model: File,
|
|
relation: 'files',
|
|
fetch: 'eager',
|
|
},
|
|
Coverage: {
|
|
model: Coverage,
|
|
relation: 'coverage',
|
|
fetch: 'eager',
|
|
},
|
|
Collection: {
|
|
model: Collection,
|
|
through: 'link_documents_collections',
|
|
relation: 'collections',
|
|
fetch: 'eager',
|
|
// 'include': { 'model': CollectionRole, 'relation': 'collectionrole' }
|
|
},
|
|
};
|
|
}
|
|
|
|
public initFields(): void {
|
|
// Initialize internal fields
|
|
let fields = new Array<string>(
|
|
'Id',
|
|
'PublisherName',
|
|
'PublishId',
|
|
'ContributingCorporation',
|
|
'CreatingCorporation',
|
|
'Language',
|
|
'PublishedDate',
|
|
// 'PublishedYear',
|
|
'PublisherName',
|
|
// 'PublisherPlace',
|
|
'PublicationState',
|
|
'EmbargoDate',
|
|
'CreatedAt',
|
|
'ServerDateModified',
|
|
'ServerDatePublished',
|
|
'ServerDateDeleted',
|
|
'ServerState',
|
|
'Type',
|
|
'BelongsToBibliography',
|
|
);
|
|
fields.forEach((fieldname) => {
|
|
let field = new Field(fieldname);
|
|
this.addField(field);
|
|
});
|
|
// Initialize external fields
|
|
const fieldNames = Object.keys(this.externalFields);
|
|
for (const fieldName of fieldNames) {
|
|
// const field = this.externalFields[fieldName];
|
|
let field = new Field(fieldName);
|
|
field.setMultiplicity('*');
|
|
this.addField(field);
|
|
}
|
|
|
|
// // Initialize available date fields and set up date validator
|
|
// // if the particular field is present
|
|
let dateFields = new Array<string>('EmbargoDate', 'CreatedAt', 'ServerDatePublished', 'ServerDateDeleted');
|
|
dateFields.forEach((fieldname) => {
|
|
let dateField = this.getField(fieldname);
|
|
dateField instanceof Field && dateField.setValueModelClass(DateTime.now());
|
|
});
|
|
}
|
|
|
|
public async describe(): Promise<Array<string>> {
|
|
let length: number = Object.keys(this.fields).length;
|
|
if (length == 0) {
|
|
await this.fetchValues();
|
|
}
|
|
// Get an array of all field names in the 'fields' object
|
|
const allFields = Object.keys(this.fields);
|
|
// Get an array of all field names in the 'internalFields' array
|
|
const internalFields = Object.keys(this.internalFields);
|
|
// Use the `filter` method to find field names that are not in 'internalFields'
|
|
const filteredFields = allFields.filter((fieldName) => !internalFields.includes(fieldName));
|
|
return filteredFields;
|
|
}
|
|
|
|
private addField(field: Field): void {
|
|
// Add field
|
|
const fieldName = field.getName();
|
|
if (fieldName && this.externalFields[fieldName]) {
|
|
const options = this.externalFields[fieldName];
|
|
|
|
// Set ValueModelClass if a model option is given
|
|
if (options.model) {
|
|
field.setValueModelClass(options.model);
|
|
}
|
|
}
|
|
|
|
this.fields[field.getName()] = field;
|
|
// field.setOwningModelClass(this.constructor.name);
|
|
}
|
|
|
|
public getField(name: string): Field | null {
|
|
// Get field
|
|
return this.fields[name] !== undefined ? this.fields[name] : null;
|
|
}
|
|
|
|
public async fetchValues(): Promise<void> {
|
|
this.initFields();
|
|
await this.loadFieldValues();
|
|
}
|
|
|
|
private async loadFieldValues(): Promise<void> {
|
|
for (const [fieldname, field] of Object.entries(this.fields)) {
|
|
// extern fields via model relation
|
|
if (this.externalFields.hasOwnProperty(fieldname)) {
|
|
await this.loadExternal(fieldname);
|
|
// dataset attributes itself
|
|
} else {
|
|
// Field is not external and gets handled by simply reading. to snake_case
|
|
const property_name = this.convertFieldnameToColumn(fieldname); //id
|
|
const fieldVal = this[property_name]; //276
|
|
|
|
// Explicitly set null if the field represents a model except for dates.
|
|
if (field.getValueModelClass() !== null) {
|
|
field.setValue(fieldVal === undefined || fieldVal === null ? null : fieldVal);
|
|
} else {
|
|
field.setValue(fieldVal);
|
|
}
|
|
}
|
|
}
|
|
}
|
|
|
|
private async loadExternal(fieldname: string): Promise<void> {
|
|
const field = this.fields[fieldname];
|
|
|
|
// let modelclass: typeof Title | typeof Description;
|
|
let modelclass: DatasetRelatedModel = field.getValueModelClass();
|
|
let modelInstance = new modelclass();
|
|
|
|
// Create a query builder
|
|
const select = modelclass.query();
|
|
|
|
// If any declared constraints, add them to the query
|
|
if (this.externalFields[fieldname]?.options) {
|
|
const options: Array<string> = this.externalFields[fieldname].options;
|
|
for (const [column, value] of Object.entries(options)) {
|
|
if (Array.isArray(value)) {
|
|
select.whereIn(column, value);
|
|
} else {
|
|
select.where(column, value);
|
|
}
|
|
}
|
|
}
|
|
|
|
// Get dependent rows
|
|
const result: Record<string, any>[] = [];
|
|
const datasetId = this.id;
|
|
let rows: any[] = [];
|
|
|
|
if (this.externalFields[fieldname]?.through) {
|
|
const relation = this.externalFields[fieldname].relation;
|
|
// rows = this[relation];
|
|
rows = await this.related(relation).query();
|
|
|
|
if (this.externalFields[fieldname].pivot) {
|
|
const pivotArray = this.externalFields[fieldname].pivot;
|
|
const pivotValue = pivotArray.role;
|
|
// rows = this[relation]().wherePivot('role', pivotValue).get();
|
|
rows = await this.related(relation).query().wherePivot('role', pivotValue);
|
|
}
|
|
} else if (modelInstance.hasOwnProperty('dataset')) {
|
|
rows = await select
|
|
.whereHas('dataset', (q) => {
|
|
q.where('id', datasetId);
|
|
})
|
|
.orderBy('id')
|
|
.select();
|
|
}
|
|
|
|
// 1 ..n relations
|
|
for (const row of rows) {
|
|
const attributes = Object.keys(row.$attributes);
|
|
|
|
if (this.externalFields[fieldname]?.pivot) {
|
|
const pivotArray = this.externalFields[fieldname].pivot;
|
|
const arrayKeys = Object.keys(pivotArray);
|
|
const extendedArrayKeys = arrayKeys.map((pivotAttribute) => {
|
|
return `pivot_${pivotAttribute}`;
|
|
});
|
|
attributes.push(...extendedArrayKeys);
|
|
}
|
|
|
|
const objArray: Record<string, any> = {};
|
|
|
|
for (const property_name of attributes) {
|
|
let fieldName = this.convertColumnToFieldname(property_name);
|
|
let fieldval = '';
|
|
|
|
if (property_name.startsWith('pivot_')) {
|
|
const str = property_name.replace('pivot_', '');
|
|
fieldName = this.convertColumnToFieldname(str);
|
|
// fieldval = row.$pivot[str];
|
|
fieldval = row.$extras[property_name];
|
|
} else if (fieldName === 'Type') {
|
|
fieldval = row[property_name]?.charAt(0).toUpperCase() + row[property_name]?.slice(1);
|
|
} else {
|
|
fieldval = row[property_name];
|
|
}
|
|
|
|
objArray[fieldName] = fieldval;
|
|
}
|
|
|
|
result.push(objArray);
|
|
}
|
|
|
|
// Set the field value
|
|
field.setValue(result);
|
|
}
|
|
|
|
// to snakeCase
|
|
private convertFieldnameToColumn(fieldname: string): string {
|
|
return fieldname.replace(/([a-z\d])([A-Z])/g, '$1_$2').toLowerCase();
|
|
}
|
|
|
|
private convertColumnToFieldname(columnName: string): string {
|
|
return columnName
|
|
.split(/[-_]/)
|
|
.map((word) => (word.charAt(0).toUpperCase() + word.slice(1)))
|
|
.join('');
|
|
}
|
|
}
|