2023-03-17 15:13:37 +00:00
|
|
|
import type { HttpContextContract } from '@ioc:Adonis/Core/HttpContext';
|
2023-06-22 15:20:04 +00:00
|
|
|
import User from 'App/Models/User';
|
|
|
|
import Dataset from 'App/Models/Dataset';
|
2023-03-17 15:13:37 +00:00
|
|
|
import License from 'App/Models/License';
|
2023-03-24 10:41:52 +00:00
|
|
|
import Project from 'App/Models/Project';
|
2023-06-22 15:20:04 +00:00
|
|
|
import Title from 'App/Models/Title';
|
|
|
|
import Description from 'App/Models/Description';
|
2023-07-17 17:13:30 +00:00
|
|
|
import Language from 'App/Models/Language';
|
2023-08-01 15:06:51 +00:00
|
|
|
import Coverage from 'App/Models/Coverage';
|
2023-08-23 15:07:26 +00:00
|
|
|
import Collection from 'App/Models/Collection';
|
2023-03-17 15:13:37 +00:00
|
|
|
import { schema, CustomMessages, rules } from '@ioc:Adonis/Core/Validator';
|
2023-03-24 10:41:52 +00:00
|
|
|
import dayjs from 'dayjs';
|
2023-06-22 15:20:04 +00:00
|
|
|
import Person from 'App/Models/Person';
|
|
|
|
import Database from '@ioc:Adonis/Lucid/Database';
|
|
|
|
import { TransactionClientContract } from '@ioc:Adonis/Lucid/Database';
|
|
|
|
import Subject from 'App/Models/Subject';
|
|
|
|
import CreateDatasetValidator from 'App/Validators/CreateDatasetValidator';
|
2023-11-22 16:06:55 +00:00
|
|
|
import UpdateDatasetValidator from 'App/Validators/UpdateDatasetValidator';
|
2023-09-05 16:18:42 +00:00
|
|
|
import {
|
|
|
|
TitleTypes,
|
|
|
|
DescriptionTypes,
|
|
|
|
ContributorTypes,
|
|
|
|
PersonNameTypes,
|
|
|
|
ReferenceIdentifierTypes,
|
|
|
|
RelationTypes,
|
|
|
|
DatasetTypes,
|
2023-09-12 14:09:04 +00:00
|
|
|
SubjectTypes,
|
2023-09-05 16:18:42 +00:00
|
|
|
} from 'Contracts/enums';
|
2023-06-27 16:23:18 +00:00
|
|
|
import type { ModelQueryBuilderContract } from '@ioc:Adonis/Lucid/Orm';
|
2023-08-01 15:06:51 +00:00
|
|
|
import DatasetReference from 'App/Models/DatasetReference';
|
2023-09-04 11:24:58 +00:00
|
|
|
import { cuid } from '@ioc:Adonis/Core/Helpers';
|
|
|
|
import File from 'App/Models/File';
|
|
|
|
import ClamScan from 'clamscan';
|
|
|
|
import { ValidationException } from '@ioc:Adonis/Core/Validator';
|
2023-09-05 16:18:42 +00:00
|
|
|
import Drive from '@ioc:Adonis/Core/Drive';
|
|
|
|
import { Exception } from '@adonisjs/core/build/standalone';
|
2023-11-27 16:17:22 +00:00
|
|
|
import { MultipartFileContract } from '@ioc:Adonis/Core/BodyParser';
|
2023-03-17 15:13:37 +00:00
|
|
|
|
|
|
|
export default class DatasetController {
|
2023-06-27 16:23:18 +00:00
|
|
|
public async index({ auth, request, inertia }: HttpContextContract) {
|
|
|
|
const user = (await User.find(auth.user?.id)) as User;
|
|
|
|
const page = request.input('page', 1);
|
|
|
|
let datasets: ModelQueryBuilderContract<typeof Dataset, Dataset> = Dataset.query();
|
|
|
|
|
|
|
|
// if (request.input('search')) {
|
|
|
|
// // users = users.whereRaw('name like %?%', [request.input('search')])
|
|
|
|
// const searchTerm = request.input('search');
|
|
|
|
// datasets.where('name', 'ilike', `%${searchTerm}%`);
|
|
|
|
// }
|
|
|
|
|
|
|
|
if (request.input('sort')) {
|
|
|
|
type SortOrder = 'asc' | 'desc' | undefined;
|
|
|
|
let attribute = request.input('sort');
|
2023-07-17 17:13:30 +00:00
|
|
|
let sortOrder: SortOrder = 'asc';
|
2023-06-27 16:23:18 +00:00
|
|
|
|
2023-07-17 17:13:30 +00:00
|
|
|
if (attribute.substr(0, 1) === '-') {
|
|
|
|
sortOrder = 'desc';
|
2023-06-27 16:23:18 +00:00
|
|
|
// attribute = substr(attribute, 1);
|
|
|
|
attribute = attribute.substr(1);
|
|
|
|
}
|
2023-07-17 17:13:30 +00:00
|
|
|
datasets.orderBy(attribute, sortOrder);
|
2023-06-27 16:23:18 +00:00
|
|
|
} else {
|
|
|
|
// users.orderBy('created_at', 'desc');
|
|
|
|
datasets.orderBy('id', 'asc');
|
|
|
|
}
|
|
|
|
|
2023-08-23 15:07:26 +00:00
|
|
|
// const results = await Database
|
2023-09-04 11:24:58 +00:00
|
|
|
// .query()
|
2023-08-23 15:07:26 +00:00
|
|
|
// .select(Database.raw("CONCAT('https://doi.org/', b.value) AS concatenated_value"))
|
|
|
|
// .from('documents as doc')
|
|
|
|
// .innerJoin('dataset_identifiers as b', 'doc.id', 'b.dataset_id')
|
|
|
|
// .groupBy('a.id').toQuery();
|
|
|
|
|
2023-06-27 16:23:18 +00:00
|
|
|
// const users = await User.query().orderBy('login').paginate(page, limit);
|
|
|
|
const myDatasets = await datasets
|
|
|
|
.whereIn('server_state', [
|
|
|
|
'inprogress',
|
|
|
|
'released',
|
|
|
|
'editor_accepted',
|
|
|
|
'approved',
|
|
|
|
'reviewed',
|
|
|
|
'rejected_editor',
|
|
|
|
'rejected_reviewer',
|
|
|
|
])
|
|
|
|
.where('account_id', user.id)
|
|
|
|
.preload('titles')
|
|
|
|
.preload('user', (query) => query.select('id', 'login'))
|
|
|
|
// .preload('titles', (builder) => {
|
|
|
|
// // pull the actual preload data
|
|
|
|
|
|
|
|
// builder.where('type', 'Main');
|
|
|
|
// })
|
|
|
|
.paginate(page, 10);
|
|
|
|
|
|
|
|
return inertia.render('Submitter/Dataset/Index', {
|
|
|
|
// testing: 'this is a test',
|
|
|
|
datasets: myDatasets.serialize(),
|
|
|
|
filters: request.all(),
|
|
|
|
can: {
|
2023-07-17 17:13:30 +00:00
|
|
|
// create: await auth.user?.can(['dataset-submit']),
|
2023-06-27 16:23:18 +00:00
|
|
|
edit: await auth.user?.can(['dataset-edit']),
|
|
|
|
delete: await auth.user?.can(['dataset-delete']),
|
|
|
|
},
|
|
|
|
});
|
|
|
|
}
|
2023-07-27 12:53:34 +00:00
|
|
|
|
2023-03-17 15:13:37 +00:00
|
|
|
public async create({ inertia }: HttpContextContract) {
|
2023-09-04 11:24:58 +00:00
|
|
|
const licenses = await License.query().select('id', 'name_long').where('active', 'true').pluck('name_long', 'id');
|
2023-03-17 15:13:37 +00:00
|
|
|
|
2023-03-24 10:41:52 +00:00
|
|
|
const projects = await Project.query().pluck('label', 'id');
|
|
|
|
|
2023-09-05 16:18:42 +00:00
|
|
|
// const doctypes = {
|
|
|
|
// analysisdata: { label: 'Analysis', value: 'analysisdata' },
|
|
|
|
// measurementdata: { label: 'Measurements', value: 'measurementdata' },
|
|
|
|
// monitoring: 'Monitoring',
|
|
|
|
// remotesensing: 'Remote Sensing',
|
|
|
|
// gis: 'GIS',
|
|
|
|
// models: 'Models',
|
|
|
|
// mixedtype: 'Mixed Type',
|
|
|
|
// vocabulary: 'Vocabulary',
|
2023-03-17 15:13:37 +00:00
|
|
|
// };
|
|
|
|
|
|
|
|
// const languages = await Database.from('languages').select('*').where('active', true);
|
|
|
|
return inertia.render('Submitter/Dataset/Create', {
|
|
|
|
licenses: licenses,
|
2023-09-05 16:18:42 +00:00
|
|
|
doctypes: DatasetTypes,
|
2023-03-24 10:41:52 +00:00
|
|
|
titletypes: Object.entries(TitleTypes)
|
|
|
|
.filter(([value]) => value !== 'Main')
|
|
|
|
.map(([key, value]) => ({ value: key, label: value })),
|
|
|
|
descriptiontypes: Object.entries(DescriptionTypes)
|
|
|
|
.filter(([value]) => value !== 'Abstract')
|
|
|
|
.map(([key, value]) => ({ value: key, label: value })),
|
2023-03-17 15:13:37 +00:00
|
|
|
// descriptiontypes: DescriptionTypes
|
2023-03-24 10:41:52 +00:00
|
|
|
projects: projects,
|
2023-08-23 15:07:26 +00:00
|
|
|
referenceIdentifierTypes: Object.entries(ReferenceIdentifierTypes).map(([key, value]) => ({ value: key, label: value })),
|
|
|
|
relationTypes: Object.entries(RelationTypes).map(([key, value]) => ({ value: key, label: value })),
|
2023-09-12 14:09:04 +00:00
|
|
|
contributorTypes: ContributorTypes,
|
2023-09-14 13:37:36 +00:00
|
|
|
subjectTypes: SubjectTypes,
|
2023-03-17 15:13:37 +00:00
|
|
|
});
|
|
|
|
}
|
|
|
|
|
|
|
|
public async firstStep({ request, response }: HttpContextContract) {
|
|
|
|
const newDatasetSchema = schema.create({
|
|
|
|
language: schema.string({ trim: true }, [
|
|
|
|
rules.regex(/^[a-zA-Z0-9-_]+$/), //Must be alphanumeric with hyphens or underscores
|
|
|
|
]),
|
|
|
|
licenses: schema.array([rules.minLength(1)]).members(schema.number()), // define at least one license for the new dataset
|
|
|
|
rights: schema.string([rules.equalTo('true')]),
|
|
|
|
});
|
|
|
|
|
|
|
|
try {
|
|
|
|
// Step 2 - Validate request body against the schema
|
2023-03-24 10:41:52 +00:00
|
|
|
|
2023-03-17 15:13:37 +00:00
|
|
|
await request.validate({ schema: newDatasetSchema, messages: this.messages });
|
|
|
|
// console.log({ payload });
|
|
|
|
} catch (error) {
|
|
|
|
// Step 3 - Handle errors
|
|
|
|
// return response.badRequest(error.messages);
|
|
|
|
throw error;
|
|
|
|
}
|
|
|
|
return response.redirect().back();
|
|
|
|
}
|
|
|
|
|
|
|
|
public async secondStep({ request, response }: HttpContextContract) {
|
|
|
|
const newDatasetSchema = schema.create({
|
2023-03-24 10:41:52 +00:00
|
|
|
// first step
|
|
|
|
language: schema.string({ trim: true }, [
|
|
|
|
rules.regex(/^[a-zA-Z0-9-_]+$/), //Must be alphanumeric with hyphens or underscores
|
|
|
|
]),
|
|
|
|
licenses: schema.array([rules.minLength(1)]).members(schema.number()), // define at least one license for the new dataset
|
|
|
|
rights: schema.string([rules.equalTo('true')]),
|
|
|
|
// second step
|
|
|
|
type: schema.string({ trim: true }, [rules.minLength(3), rules.maxLength(255)]),
|
|
|
|
creating_corporation: schema.string({ trim: true }, [rules.minLength(3), rules.maxLength(255)]),
|
|
|
|
titles: schema.array([rules.minLength(1)]).members(
|
|
|
|
schema.object().members({
|
|
|
|
value: schema.string({ trim: true }, [rules.minLength(3), rules.maxLength(255)]),
|
|
|
|
type: schema.enum(Object.values(TitleTypes)),
|
|
|
|
language: schema.string({ trim: true }, [
|
|
|
|
rules.minLength(2),
|
|
|
|
rules.maxLength(255),
|
2023-05-02 16:10:32 +00:00
|
|
|
rules.translatedLanguage('/language', 'type'),
|
2023-03-24 10:41:52 +00:00
|
|
|
]),
|
|
|
|
}),
|
|
|
|
),
|
|
|
|
descriptions: schema.array([rules.minLength(1)]).members(
|
|
|
|
schema.object().members({
|
|
|
|
value: schema.string({ trim: true }, [rules.minLength(3), rules.maxLength(255)]),
|
|
|
|
type: schema.enum(Object.values(DescriptionTypes)),
|
|
|
|
language: schema.string({ trim: true }, [
|
|
|
|
rules.minLength(2),
|
|
|
|
rules.maxLength(255),
|
2023-05-02 16:10:32 +00:00
|
|
|
rules.translatedLanguage('/language', 'type'),
|
2023-03-24 10:41:52 +00:00
|
|
|
]),
|
|
|
|
}),
|
|
|
|
),
|
|
|
|
authors: schema.array([rules.minLength(1)]).members(schema.object().members({ email: schema.string({ trim: true }) })),
|
2023-09-12 14:09:04 +00:00
|
|
|
contributors: schema.array.optional().members(
|
|
|
|
schema.object().members({
|
|
|
|
email: schema.string({ trim: true }),
|
|
|
|
pivot_contributor_type: schema.enum(Object.keys(ContributorTypes)),
|
|
|
|
}),
|
|
|
|
),
|
2023-03-24 10:41:52 +00:00
|
|
|
// project_id: schema.number(),
|
|
|
|
});
|
|
|
|
|
|
|
|
try {
|
|
|
|
// Step 2 - Validate request body against the schema
|
|
|
|
await request.validate({ schema: newDatasetSchema, messages: this.messages });
|
|
|
|
// console.log({ payload });
|
|
|
|
} catch (error) {
|
|
|
|
// Step 3 - Handle errors
|
|
|
|
// return response.badRequest(error.messages);
|
|
|
|
throw error;
|
|
|
|
}
|
|
|
|
return response.redirect().back();
|
|
|
|
}
|
|
|
|
|
|
|
|
public async thirdStep({ request, response }: HttpContextContract) {
|
|
|
|
const newDatasetSchema = schema.create({
|
|
|
|
// first step
|
2023-03-17 15:13:37 +00:00
|
|
|
language: schema.string({ trim: true }, [
|
|
|
|
rules.regex(/^[a-zA-Z0-9-_]+$/), //Must be alphanumeric with hyphens or underscores
|
|
|
|
]),
|
|
|
|
licenses: schema.array([rules.minLength(1)]).members(schema.number()), // define at least one license for the new dataset
|
2023-03-24 10:41:52 +00:00
|
|
|
rights: schema.string([rules.equalTo('true')]),
|
|
|
|
// second step
|
2023-03-17 15:13:37 +00:00
|
|
|
type: schema.string({ trim: true }, [rules.minLength(3), rules.maxLength(255)]),
|
|
|
|
creating_corporation: schema.string({ trim: true }, [rules.minLength(3), rules.maxLength(255)]),
|
|
|
|
titles: schema.array([rules.minLength(1)]).members(
|
|
|
|
schema.object().members({
|
|
|
|
value: schema.string({ trim: true }, [rules.minLength(3), rules.maxLength(255)]),
|
|
|
|
type: schema.enum(Object.values(TitleTypes)),
|
2023-03-31 12:54:15 +00:00
|
|
|
language: schema.string({ trim: true }, [
|
|
|
|
rules.minLength(2),
|
|
|
|
rules.maxLength(255),
|
2023-05-02 16:10:32 +00:00
|
|
|
rules.translatedLanguage('/language', 'type'),
|
2023-03-31 12:54:15 +00:00
|
|
|
]),
|
2023-03-17 15:13:37 +00:00
|
|
|
}),
|
|
|
|
),
|
|
|
|
descriptions: schema.array([rules.minLength(1)]).members(
|
|
|
|
schema.object().members({
|
|
|
|
value: schema.string({ trim: true }, [rules.minLength(3), rules.maxLength(255)]),
|
|
|
|
type: schema.enum(Object.values(DescriptionTypes)),
|
2023-03-31 12:54:15 +00:00
|
|
|
language: schema.string({ trim: true }, [
|
|
|
|
rules.minLength(2),
|
|
|
|
rules.maxLength(255),
|
2023-05-02 16:10:32 +00:00
|
|
|
rules.translatedLanguage('/language', 'type'),
|
2023-03-31 12:54:15 +00:00
|
|
|
]),
|
2023-03-17 15:13:37 +00:00
|
|
|
}),
|
|
|
|
),
|
2023-03-24 10:41:52 +00:00
|
|
|
authors: schema.array([rules.minLength(1)]).members(schema.object().members({ email: schema.string({ trim: true }) })),
|
2023-09-12 14:09:04 +00:00
|
|
|
contributors: schema.array.optional().members(
|
|
|
|
schema.object().members({
|
|
|
|
email: schema.string({ trim: true }),
|
|
|
|
pivot_contributor_type: schema.enum(Object.keys(ContributorTypes)),
|
|
|
|
}),
|
|
|
|
),
|
2023-03-24 10:41:52 +00:00
|
|
|
// third step
|
|
|
|
project_id: schema.number.optional(),
|
|
|
|
embargo_date: schema.date.optional({ format: 'yyyy-MM-dd' }, [rules.after(10, 'days')]),
|
2023-05-02 16:10:32 +00:00
|
|
|
coverage: schema.object().members({
|
|
|
|
x_min: schema.number(),
|
|
|
|
x_max: schema.number(),
|
|
|
|
y_min: schema.number(),
|
|
|
|
y_max: schema.number(),
|
|
|
|
elevation_absolut: schema.number.optional(),
|
2023-06-01 12:29:56 +00:00
|
|
|
elevation_min: schema.number.optional([rules.requiredIfExists('elevation_max')]),
|
|
|
|
elevation_max: schema.number.optional([rules.requiredIfExists('elevation_min')]),
|
2023-05-02 16:10:32 +00:00
|
|
|
depth_absolut: schema.number.optional(),
|
2023-06-01 12:29:56 +00:00
|
|
|
depth_min: schema.number.optional([rules.requiredIfExists('depth_max')]),
|
|
|
|
depth_max: schema.number.optional([rules.requiredIfExists('depth_min')]),
|
2023-05-02 16:10:32 +00:00
|
|
|
}),
|
2023-08-23 15:07:26 +00:00
|
|
|
references: schema.array.optional([rules.uniqueArray('value')]).members(
|
2023-08-01 15:06:51 +00:00
|
|
|
schema.object().members({
|
2023-08-23 15:07:26 +00:00
|
|
|
value: schema.string({ trim: true }, [rules.minLength(3), rules.maxLength(255)]),
|
2023-08-01 15:06:51 +00:00
|
|
|
type: schema.enum(Object.values(ReferenceIdentifierTypes)),
|
|
|
|
relation: schema.enum(Object.values(RelationTypes)),
|
2023-08-23 15:07:26 +00:00
|
|
|
label: schema.string({ trim: true }, [rules.minLength(2), rules.maxLength(255)]),
|
2023-08-01 15:06:51 +00:00
|
|
|
}),
|
|
|
|
),
|
2023-06-22 15:20:04 +00:00
|
|
|
subjects: schema.array([rules.minLength(3), rules.uniqueArray('value')]).members(
|
2023-05-19 09:30:49 +00:00
|
|
|
schema.object().members({
|
2023-05-24 14:58:52 +00:00
|
|
|
value: schema.string({ trim: true }, [
|
2023-06-01 12:29:56 +00:00
|
|
|
rules.minLength(3),
|
2023-05-24 14:58:52 +00:00
|
|
|
rules.maxLength(255),
|
|
|
|
// rules.unique({ table: 'dataset_subjects', column: 'value' }),
|
|
|
|
]),
|
2023-05-19 09:30:49 +00:00
|
|
|
// type: schema.enum(Object.values(TitleTypes)),
|
2023-06-01 12:29:56 +00:00
|
|
|
language: schema.string({ trim: true }, [rules.minLength(2), rules.maxLength(255)]),
|
|
|
|
}),
|
|
|
|
),
|
|
|
|
});
|
|
|
|
|
|
|
|
try {
|
|
|
|
// Step 2 - Validate request body against the schema
|
|
|
|
await request.validate({ schema: newDatasetSchema, messages: this.messages });
|
|
|
|
// console.log({ payload });
|
|
|
|
} catch (error) {
|
|
|
|
// Step 3 - Handle errors
|
|
|
|
// return response.badRequest(error.messages);
|
|
|
|
throw error;
|
|
|
|
}
|
|
|
|
return response.redirect().back();
|
|
|
|
}
|
2023-08-01 15:06:51 +00:00
|
|
|
|
2023-06-22 15:20:04 +00:00
|
|
|
public async store({ auth, request, response, session }: HttpContextContract) {
|
|
|
|
// node ace make:validator CreateDataset
|
2023-03-17 15:13:37 +00:00
|
|
|
try {
|
|
|
|
// Step 2 - Validate request body against the schema
|
2023-06-22 15:20:04 +00:00
|
|
|
// await request.validate({ schema: newDatasetSchema, messages: this.messages });
|
|
|
|
await request.validate(CreateDatasetValidator);
|
2023-03-17 15:13:37 +00:00
|
|
|
// console.log({ payload });
|
|
|
|
} catch (error) {
|
|
|
|
// Step 3 - Handle errors
|
|
|
|
// return response.badRequest(error.messages);
|
|
|
|
throw error;
|
|
|
|
}
|
2023-06-22 15:20:04 +00:00
|
|
|
|
|
|
|
let trx: TransactionClientContract | null = null;
|
|
|
|
try {
|
|
|
|
trx = await Database.transaction();
|
|
|
|
const user = (await User.find(auth.user?.id)) as User;
|
|
|
|
|
2023-08-01 15:06:51 +00:00
|
|
|
await this.createDatasetAndAssociations(user, request, trx);
|
2023-06-22 15:20:04 +00:00
|
|
|
|
|
|
|
await trx.commit();
|
2023-08-01 15:06:51 +00:00
|
|
|
console.log('Dataset and related models created successfully');
|
2023-06-22 15:20:04 +00:00
|
|
|
} catch (error) {
|
|
|
|
if (trx !== null) {
|
|
|
|
await trx.rollback();
|
|
|
|
}
|
|
|
|
console.error('Failed to create dataset and related models:', error);
|
2023-09-04 11:24:58 +00:00
|
|
|
// throw new ValidationException(true, { 'upload error': `failed to create dataset and related models. ${error}` });
|
2023-06-22 15:20:04 +00:00
|
|
|
throw error;
|
|
|
|
}
|
|
|
|
|
2023-06-01 12:29:56 +00:00
|
|
|
session.flash('message', 'Dataset has been created successfully');
|
2023-11-29 15:52:41 +00:00
|
|
|
return response.redirect().toRoute('dataset.list');
|
2023-11-27 16:17:22 +00:00
|
|
|
// return response.redirect().back();
|
2023-03-17 15:13:37 +00:00
|
|
|
}
|
|
|
|
|
2023-08-01 15:06:51 +00:00
|
|
|
private async createDatasetAndAssociations(user: User, request: HttpContextContract['request'], trx: TransactionClientContract) {
|
|
|
|
// Create a new instance of the Dataset model:
|
|
|
|
const dataset = new Dataset();
|
|
|
|
dataset.type = request.input('type');
|
2023-09-26 15:53:00 +00:00
|
|
|
dataset.creating_corporation = request.input('creating_corporation');
|
2023-08-01 15:06:51 +00:00
|
|
|
dataset.language = request.input('language');
|
2023-09-26 15:53:00 +00:00
|
|
|
dataset.embargo_date = request.input('embargo_date');
|
2023-08-01 15:06:51 +00:00
|
|
|
//await dataset.related('user').associate(user); // speichert schon ab
|
|
|
|
// Dataset.$getRelation('user').boot();
|
|
|
|
// Dataset.$getRelation('user').setRelated(dataset, user);
|
|
|
|
// dataset.$setRelated('user', user);
|
|
|
|
await user.useTransaction(trx).related('datasets').save(dataset);
|
|
|
|
|
|
|
|
//store licenses:
|
|
|
|
const licenses: number[] = request.input('licenses', []);
|
2023-11-22 16:06:55 +00:00
|
|
|
await dataset.useTransaction(trx).related('licenses').sync(licenses);
|
2023-08-01 15:06:51 +00:00
|
|
|
|
|
|
|
// save authors and contributors
|
|
|
|
await this.savePersons(dataset, request.input('authors', []), 'author', trx);
|
|
|
|
await this.savePersons(dataset, request.input('contributors', []), 'contributor', trx);
|
|
|
|
|
|
|
|
//save main and additional titles
|
|
|
|
const titles = request.input('titles', []);
|
|
|
|
for (const titleData of titles) {
|
|
|
|
const title = new Title();
|
|
|
|
title.value = titleData.value;
|
|
|
|
title.language = titleData.language;
|
|
|
|
title.type = titleData.type;
|
|
|
|
await dataset.useTransaction(trx).related('titles').save(title);
|
|
|
|
}
|
|
|
|
|
|
|
|
// save descriptions
|
|
|
|
const descriptions = request.input('descriptions', []);
|
|
|
|
for (const descriptionData of descriptions) {
|
|
|
|
const description = new Description();
|
|
|
|
description.value = descriptionData.value;
|
|
|
|
description.language = descriptionData.language;
|
|
|
|
description.type = descriptionData.type;
|
|
|
|
await dataset.useTransaction(trx).related('descriptions').save(description);
|
|
|
|
}
|
|
|
|
|
|
|
|
//save references
|
|
|
|
const references = request.input('references', []);
|
|
|
|
for (const referencePayload of references) {
|
|
|
|
const dataReference = new DatasetReference();
|
|
|
|
dataReference.fill(referencePayload);
|
|
|
|
// $dataReference = new DatasetReference($reference);
|
|
|
|
dataset.related('references').save(dataReference);
|
|
|
|
}
|
|
|
|
|
|
|
|
//save keywords
|
|
|
|
const keywords = request.input('subjects', []);
|
|
|
|
for (const keywordData of keywords) {
|
|
|
|
// $dataKeyword = new Subject($keyword);
|
|
|
|
// $dataset->subjects()->save($dataKeyword);
|
|
|
|
const keyword = await Subject.firstOrNew({ value: keywordData.value, type: keywordData.type }, keywordData);
|
|
|
|
if (keyword.$isNew === true) {
|
|
|
|
await dataset.useTransaction(trx).related('subjects').save(keyword);
|
|
|
|
} else {
|
|
|
|
await dataset.useTransaction(trx).related('subjects').attach([keyword.id]);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2023-08-23 15:07:26 +00:00
|
|
|
// save collection
|
|
|
|
const collection: Collection | null = await Collection.query().where('id', 21).first();
|
|
|
|
collection && (await dataset.useTransaction(trx).related('collections').attach([collection.id]));
|
|
|
|
|
2023-08-01 15:06:51 +00:00
|
|
|
// save coverage
|
|
|
|
const coverageData = request.input('coverage');
|
|
|
|
if (coverageData) {
|
|
|
|
// const formCoverage = request.input('coverage');
|
|
|
|
const coverage = new Coverage();
|
|
|
|
coverage.fill(coverageData);
|
|
|
|
// await dataset.coverage().save(coverageData);
|
|
|
|
await dataset.useTransaction(trx).related('coverage').save(coverage);
|
|
|
|
// Alternatively, you can associate the dataset with the coverage and then save it:
|
|
|
|
// await coverage.dataset().associate(dataset).save();
|
|
|
|
// await coverage.useTransaction(trx).related('dataset').associate(dataset);
|
|
|
|
}
|
2023-09-04 11:24:58 +00:00
|
|
|
|
|
|
|
// save data files
|
|
|
|
const uploadedFiles = request.files('files');
|
|
|
|
for (const [index, file] of uploadedFiles.entries()) {
|
|
|
|
try {
|
|
|
|
await this.scanFileForViruses(file.tmpPath); //, 'gitea.lan', 3310);
|
|
|
|
// await this.scanFileForViruses("/tmp/testfile.txt");
|
|
|
|
} catch (error) {
|
|
|
|
// If the file is infected or there's an error scanning the file, throw a validation exception
|
|
|
|
throw error;
|
|
|
|
}
|
|
|
|
// clientName: 'Gehaltsschema.png'
|
|
|
|
// extname: 'png'
|
|
|
|
// fieldName: 'file'
|
|
|
|
const fileName = `file-${cuid()}.${file.extname}`;
|
|
|
|
const mimeType = file.headers['content-type'] || 'application/octet-stream'; // Fallback to a default MIME type
|
|
|
|
const datasetFolder = `files/${dataset.id}`;
|
|
|
|
// const size = file.size;
|
|
|
|
await file.moveToDisk(
|
|
|
|
datasetFolder,
|
|
|
|
{
|
|
|
|
name: fileName,
|
|
|
|
overwrite: true, // overwrite in case of conflict
|
|
|
|
},
|
|
|
|
'local',
|
|
|
|
);
|
|
|
|
// save file metadata into db
|
|
|
|
const newFile = new File();
|
|
|
|
newFile.pathName = `${datasetFolder}/${fileName}`;
|
|
|
|
newFile.fileSize = file.size;
|
|
|
|
newFile.mimeType = mimeType;
|
|
|
|
newFile.label = file.clientName;
|
|
|
|
newFile.sortOrder = index;
|
|
|
|
newFile.visibleInFrontdoor = true;
|
|
|
|
newFile.visibleInOai = true;
|
|
|
|
// let path = coverImage.filePath;
|
|
|
|
await dataset.useTransaction(trx).related('files').save(newFile);
|
2023-11-22 16:06:55 +00:00
|
|
|
await newFile.createHashValues(trx);
|
2023-09-04 11:24:58 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
private async scanFileForViruses(filePath, host?: string, port?: number): Promise<void> {
|
|
|
|
// const clamscan = await (new ClamScan().init());
|
|
|
|
const opts: ClamScan.Options = {
|
|
|
|
removeInfected: true, // If true, removes infected files
|
|
|
|
debugMode: false, // Whether or not to log info/debug/error msgs to the console
|
|
|
|
scanRecursively: true, // If true, deep scan folders recursively
|
|
|
|
clamdscan: {
|
|
|
|
active: true, // If true, this module will consider using the clamdscan binary
|
|
|
|
host,
|
|
|
|
port,
|
|
|
|
multiscan: true, // Scan using all available cores! Yay!
|
|
|
|
},
|
|
|
|
preference: 'clamdscan', // If clamdscan is found and active, it will be used by default
|
|
|
|
};
|
|
|
|
|
|
|
|
return new Promise(async (resolve, reject) => {
|
|
|
|
try {
|
|
|
|
const clamscan = await new ClamScan().init(opts);
|
|
|
|
// You can re-use the `clamscan` object as many times as you want
|
|
|
|
// const version = await clamscan.getVersion();
|
|
|
|
// console.log(`ClamAV Version: ${version}`);
|
|
|
|
const { file, isInfected, viruses } = await clamscan.isInfected(filePath);
|
|
|
|
if (isInfected) {
|
|
|
|
console.log(`${file} is infected with ${viruses}!`);
|
|
|
|
reject(new ValidationException(true, { 'upload error': `File ${file} is infected!` }));
|
|
|
|
} else {
|
|
|
|
resolve();
|
|
|
|
}
|
|
|
|
} catch (error) {
|
|
|
|
// If there's an error scanning the file, throw a validation exception
|
|
|
|
reject(new ValidationException(true, { 'upload error': `${error.message}` }));
|
|
|
|
}
|
|
|
|
});
|
2023-08-01 15:06:51 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
private async savePersons(dataset: Dataset, persons: any[], role: string, trx: TransactionClientContract) {
|
|
|
|
for (const [key, person] of persons.entries()) {
|
2023-09-12 14:09:04 +00:00
|
|
|
const pivotData = {
|
|
|
|
role: role,
|
|
|
|
sort_order: key + 1,
|
|
|
|
allow_email_contact: false,
|
|
|
|
...this.extractPivotAttributes(person), // Merge pivot attributes here
|
|
|
|
};
|
2023-08-01 15:06:51 +00:00
|
|
|
|
|
|
|
if (person.id !== undefined) {
|
|
|
|
await dataset
|
|
|
|
.useTransaction(trx)
|
|
|
|
.related('persons')
|
|
|
|
.attach({
|
2023-09-12 14:09:04 +00:00
|
|
|
[person.id]: pivotData,
|
2023-08-01 15:06:51 +00:00
|
|
|
});
|
|
|
|
} else {
|
|
|
|
const dataPerson = new Person();
|
|
|
|
dataPerson.fill(person);
|
2023-09-12 14:09:04 +00:00
|
|
|
await dataset.useTransaction(trx).related('persons').save(dataPerson, false, pivotData);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// Helper function to extract pivot attributes from a person object
|
|
|
|
private extractPivotAttributes(person: any) {
|
|
|
|
const pivotAttributes = {};
|
|
|
|
for (const key in person) {
|
|
|
|
if (key.startsWith('pivot_')) {
|
|
|
|
// pivotAttributes[key] = person[key];
|
|
|
|
const cleanKey = key.replace('pivot_', ''); // Remove 'pivot_' prefix
|
|
|
|
pivotAttributes[cleanKey] = person[key];
|
2023-08-01 15:06:51 +00:00
|
|
|
}
|
|
|
|
}
|
2023-09-12 14:09:04 +00:00
|
|
|
return pivotAttributes;
|
2023-08-01 15:06:51 +00:00
|
|
|
}
|
|
|
|
|
2023-03-17 15:13:37 +00:00
|
|
|
public messages: CustomMessages = {
|
2023-03-24 10:41:52 +00:00
|
|
|
'minLength': '{{ field }} must be at least {{ options.minLength }} characters long',
|
|
|
|
'maxLength': '{{ field }} must be less then {{ options.maxLength }} characters long',
|
|
|
|
'required': '{{ field }} is required',
|
|
|
|
'unique': '{{ field }} must be unique, and this value is already taken',
|
|
|
|
// 'confirmed': '{{ field }} is not correct',
|
2023-09-04 11:24:58 +00:00
|
|
|
'licenses.minLength': 'at least {{ options.minLength }} permission must be defined',
|
|
|
|
'licenses.*.number': 'Define roles as valid numbers',
|
2023-03-17 15:13:37 +00:00
|
|
|
'rights.equalTo': 'you must agree to continue',
|
2023-03-24 10:41:52 +00:00
|
|
|
|
2023-03-17 15:13:37 +00:00
|
|
|
'titles.0.value.minLength': 'Main Title must be at least {{ options.minLength }} characters long',
|
|
|
|
'titles.0.value.required': 'Main Title is required',
|
|
|
|
'titles.*.value.required': 'Additional title is required, if defined',
|
|
|
|
'titles.*.type.required': 'Additional title type is required',
|
|
|
|
'titles.*.language.required': 'Additional title language is required',
|
2023-03-24 10:41:52 +00:00
|
|
|
'titles.*.language.translatedLanguage': 'The language of the translated title must be different from the language of the dataset',
|
|
|
|
|
2023-03-17 15:13:37 +00:00
|
|
|
'descriptions.0.value.minLength': 'Main Abstract must be at least {{ options.minLength }} characters long',
|
|
|
|
'descriptions.0.value.required': 'Main Abstract is required',
|
|
|
|
'descriptions.*.value.required': 'Additional description is required, if defined',
|
|
|
|
'descriptions.*.type.required': 'Additional description type is required',
|
|
|
|
'descriptions.*.language.required': 'Additional description language is required',
|
2023-05-02 16:10:32 +00:00
|
|
|
'descriptions.*.language.translatedLanguage':
|
|
|
|
'The language of the translated description must be different from the language of the dataset',
|
2023-03-24 10:41:52 +00:00
|
|
|
|
|
|
|
'authors.minLength': 'at least {{ options.minLength }} author must be defined',
|
2023-09-12 14:09:04 +00:00
|
|
|
'contributors.*.pivot_contributor_type.required': 'contributor type is required, if defined',
|
2023-03-24 10:41:52 +00:00
|
|
|
|
|
|
|
'after': `{{ field }} must be older than ${dayjs().add(10, 'day')}`,
|
2023-05-19 09:30:49 +00:00
|
|
|
|
|
|
|
'subjects.minLength': 'at least {{ options.minLength }} keywords must be defined',
|
2023-06-22 15:20:04 +00:00
|
|
|
'subjects.uniqueArray': 'The {{ options.array }} array must have unique values based on the {{ options.field }} attribute.',
|
2023-05-19 09:30:49 +00:00
|
|
|
'subjects.*.value.required': 'keyword value is required',
|
|
|
|
'subjects.*.value.minLength': 'keyword value must be at least {{ options.minLength }} characters long',
|
|
|
|
'subjects.*.type.required': 'keyword type is required',
|
|
|
|
'subjects.*.language.required': 'language of keyword is required',
|
2023-06-01 12:29:56 +00:00
|
|
|
|
2023-08-23 15:07:26 +00:00
|
|
|
'references.*.value.required': 'Additional reference value is required, if defined',
|
|
|
|
'references.*.type.required': 'Additional reference identifier type is required',
|
|
|
|
'references.*.relation.required': 'Additional reference relation type is required',
|
|
|
|
'references.*.label.required': 'Additional reference label is required',
|
|
|
|
|
|
|
|
'files.minLength': 'At least {{ options.minLength }} file upload is required.',
|
2023-06-16 14:44:28 +00:00
|
|
|
'files.*.size': 'file size is to big',
|
|
|
|
'files.extnames': 'file extension is not supported',
|
2023-03-24 10:41:52 +00:00
|
|
|
};
|
2023-06-27 16:23:18 +00:00
|
|
|
|
|
|
|
// public async release({ params, view }) {
|
2023-07-17 17:13:30 +00:00
|
|
|
public async release({ request, inertia, response }: HttpContextContract) {
|
2023-06-27 16:23:18 +00:00
|
|
|
const id = request.param('id');
|
|
|
|
|
|
|
|
const dataset = await Dataset.query()
|
|
|
|
.preload('user', (builder) => {
|
|
|
|
builder.select('id', 'login');
|
|
|
|
})
|
|
|
|
.where('id', id)
|
|
|
|
.firstOrFail();
|
|
|
|
|
2023-07-17 17:13:30 +00:00
|
|
|
const validStates = ['inprogress', 'rejected_editor'];
|
|
|
|
if (!validStates.includes(dataset.server_state)) {
|
|
|
|
// session.flash('errors', 'Invalid server state!');
|
|
|
|
return response
|
|
|
|
.flash(
|
|
|
|
'warning',
|
|
|
|
`Invalid server state. Dataset with id ${id} cannot be released to editor. Datset has server state ${dataset.server_state}.`,
|
|
|
|
)
|
|
|
|
.redirect()
|
|
|
|
.back();
|
|
|
|
}
|
2023-06-27 16:23:18 +00:00
|
|
|
|
|
|
|
return inertia.render('Submitter/Dataset/Release', {
|
|
|
|
dataset,
|
|
|
|
});
|
|
|
|
}
|
|
|
|
|
|
|
|
public async releaseUpdate({ request, response }: HttpContextContract) {
|
|
|
|
const id = request.param('id');
|
|
|
|
const dataset = await Dataset.query().preload('files').where('id', id).firstOrFail();
|
2023-07-17 17:13:30 +00:00
|
|
|
|
|
|
|
const validStates = ['inprogress', 'rejected_editor'];
|
|
|
|
if (!validStates.includes(dataset.server_state)) {
|
|
|
|
// throw new Error('Invalid server state!');
|
|
|
|
// return response.flash('warning', 'Invalid server state. Dataset cannot be released to editor').redirect().back();
|
2023-07-27 12:53:34 +00:00
|
|
|
return response
|
|
|
|
.flash(
|
|
|
|
'warning',
|
|
|
|
`Invalid server state. Dataset with id ${id} cannot be released to editor. Datset has server state ${dataset.server_state}.`,
|
|
|
|
)
|
|
|
|
.redirect()
|
|
|
|
.toRoute('dataset.list');
|
2023-07-17 17:13:30 +00:00
|
|
|
}
|
|
|
|
|
2023-06-27 16:23:18 +00:00
|
|
|
if (dataset.files.length === 0) {
|
2023-07-17 17:13:30 +00:00
|
|
|
return response.flash('warning', 'At least minimum one file is required.').redirect('back');
|
2023-06-27 16:23:18 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
const preferation = request.input('preferation', '');
|
|
|
|
const preferredReviewer = request.input('preferred_reviewer');
|
|
|
|
const preferredReviewerEmail = request.input('preferred_reviewer_email');
|
|
|
|
|
|
|
|
if (preferation === 'yes_preferation') {
|
|
|
|
const newSchema = schema.create({
|
|
|
|
preferred_reviewer: schema.string({ trim: true }, [rules.minLength(3), rules.maxLength(255)]),
|
|
|
|
preferred_reviewer_email: schema.string([rules.email()]),
|
|
|
|
});
|
|
|
|
|
|
|
|
try {
|
|
|
|
await request.validate({ schema: newSchema });
|
|
|
|
} catch (error) {
|
|
|
|
// return response.badRequest(error.messages);
|
|
|
|
throw error;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
const input = {
|
|
|
|
preferred_reviewer: preferredReviewer || null,
|
|
|
|
preferred_reviewer_email: preferredReviewerEmail || null,
|
|
|
|
server_state: 'released',
|
|
|
|
editor_id: null,
|
|
|
|
reviewer_id: null,
|
|
|
|
reject_editor_note: null,
|
|
|
|
reject_reviewer_note: null,
|
|
|
|
};
|
|
|
|
|
|
|
|
// Clear editor_id if it exists
|
|
|
|
if (dataset.editor_id !== null) {
|
|
|
|
input.editor_id = null;
|
|
|
|
}
|
|
|
|
|
|
|
|
// Clear reject_editor_note if it exists
|
|
|
|
if (dataset.reject_editor_note !== null) {
|
|
|
|
input.reject_editor_note = null;
|
|
|
|
}
|
|
|
|
|
|
|
|
// Clear reviewer_id if it exists
|
|
|
|
if (dataset.reviewer_id !== null) {
|
|
|
|
input.reviewer_id = null;
|
|
|
|
}
|
|
|
|
|
|
|
|
// Clear reject_reviewer_note if it exists
|
|
|
|
if (dataset.reject_reviewer_note !== null) {
|
|
|
|
input.reject_reviewer_note = null;
|
|
|
|
}
|
|
|
|
|
2023-07-17 17:13:30 +00:00
|
|
|
if (await dataset.merge(input).save()) {
|
|
|
|
return response.toRoute('dataset.list').flash('message', 'You have released your dataset!');
|
|
|
|
}
|
2023-06-27 16:23:18 +00:00
|
|
|
// throw new GeneralException(trans('exceptions.publish.release.update_error'));
|
|
|
|
}
|
2023-07-17 17:13:30 +00:00
|
|
|
|
2023-11-22 16:06:55 +00:00
|
|
|
public async edit({ request, inertia, response }) {
|
|
|
|
const id = request.param('id');
|
|
|
|
const datasetQuery = Dataset.query().where('id', id);
|
|
|
|
datasetQuery
|
|
|
|
.preload('titles', (query) => query.orderBy('id', 'asc'))
|
|
|
|
.preload('descriptions', (query) => query.orderBy('id', 'asc'))
|
|
|
|
.preload('coverage')
|
|
|
|
.preload('licenses')
|
|
|
|
.preload('authors')
|
|
|
|
.preload('contributors')
|
2023-11-27 16:17:22 +00:00
|
|
|
// .preload('subjects')
|
|
|
|
.preload('subjects', (builder) => {
|
|
|
|
builder.orderBy('id', 'asc').withCount('datasets');
|
|
|
|
})
|
2023-11-22 16:06:55 +00:00
|
|
|
.preload('references')
|
|
|
|
.preload('files');
|
2023-07-17 17:13:30 +00:00
|
|
|
|
2023-11-22 16:06:55 +00:00
|
|
|
const dataset = await datasetQuery.firstOrFail();
|
|
|
|
const validStates = ['inprogress', 'rejected_editor'];
|
|
|
|
if (!validStates.includes(dataset.server_state)) {
|
|
|
|
// session.flash('errors', 'Invalid server state!');
|
|
|
|
return response
|
|
|
|
.flash(
|
|
|
|
'warning',
|
|
|
|
`Invalid server state. Dataset with id ${id} cannot be edited. Datset has server state ${dataset.server_state}.`,
|
|
|
|
)
|
|
|
|
.redirect()
|
|
|
|
.toRoute('dataset.list');
|
|
|
|
}
|
2023-07-17 17:13:30 +00:00
|
|
|
|
|
|
|
const titleTypes = Object.entries(TitleTypes)
|
2023-11-22 16:06:55 +00:00
|
|
|
.filter(([value]) => value !== 'Main')
|
2023-07-17 17:13:30 +00:00
|
|
|
.map(([key, value]) => ({ value: key, label: value }));
|
|
|
|
|
|
|
|
const descriptionTypes = Object.entries(DescriptionTypes)
|
2023-11-22 16:06:55 +00:00
|
|
|
.filter(([value]) => value !== 'Abstract')
|
2023-07-17 17:13:30 +00:00
|
|
|
.map(([key, value]) => ({ value: key, label: value }));
|
|
|
|
|
|
|
|
const languages = await Language.query().where('active', true).pluck('part1', 'part1');
|
|
|
|
|
|
|
|
// const contributorTypes = Config.get('enums.contributor_types');
|
|
|
|
const contributorTypes = Object.entries(ContributorTypes).map(([key, value]) => ({ value: key, label: value }));
|
|
|
|
|
|
|
|
// const nameTypes = Config.get('enums.name_types');
|
|
|
|
const nameTypes = Object.entries(PersonNameTypes).map(([key, value]) => ({ value: key, label: value }));
|
|
|
|
|
|
|
|
// const messages = await Database.table('messages')
|
|
|
|
// .pluck('help_text', 'metadata_element');
|
|
|
|
|
|
|
|
const projects = await Project.query().pluck('label', 'id');
|
|
|
|
|
|
|
|
const currentDate = new Date();
|
|
|
|
const currentYear = currentDate.getFullYear();
|
|
|
|
const years = Array.from({ length: currentYear - 1990 + 1 }, (_, index) => 1990 + index);
|
|
|
|
|
2023-11-22 16:06:55 +00:00
|
|
|
const licenses = await License.query().select('id', 'name_long').where('active', 'true').pluck('name_long', 'id');
|
|
|
|
// const userHasRoles = user.roles;
|
|
|
|
// const datasetHasLicenses = await dataset.related('licenses').query().pluck('id');
|
2023-07-17 17:13:30 +00:00
|
|
|
// const checkeds = dataset.licenses.first().id;
|
|
|
|
|
|
|
|
const doctypes = {
|
|
|
|
analysisdata: { label: 'Analysis', value: 'analysisdata' },
|
|
|
|
measurementdata: { label: 'Measurements', value: 'measurementdata' },
|
|
|
|
monitoring: 'Monitoring',
|
|
|
|
remotesensing: 'Remote Sensing',
|
|
|
|
gis: 'GIS',
|
|
|
|
models: 'Models',
|
|
|
|
mixedtype: 'Mixed Type',
|
|
|
|
};
|
|
|
|
|
|
|
|
return inertia.render('Submitter/Dataset/Edit', {
|
|
|
|
dataset,
|
2023-07-27 12:53:34 +00:00
|
|
|
titletypes: titleTypes,
|
2023-07-28 15:08:20 +00:00
|
|
|
descriptiontypes: descriptionTypes,
|
2023-07-17 17:13:30 +00:00
|
|
|
contributorTypes,
|
|
|
|
nameTypes,
|
|
|
|
languages,
|
|
|
|
// messages,
|
|
|
|
projects,
|
|
|
|
licenses,
|
2023-11-22 16:06:55 +00:00
|
|
|
// datasetHasLicenses: Object.keys(datasetHasLicenses).map((key) => datasetHasLicenses[key]), //convert object to array with license ids
|
2023-07-17 17:13:30 +00:00
|
|
|
// checkeds,
|
|
|
|
years,
|
|
|
|
// languages,
|
2023-11-22 16:06:55 +00:00
|
|
|
subjectTypes: SubjectTypes,
|
|
|
|
referenceIdentifierTypes: Object.entries(ReferenceIdentifierTypes).map(([key, value]) => ({ value: key, label: value })),
|
|
|
|
relationTypes: Object.entries(RelationTypes).map(([key, value]) => ({ value: key, label: value })),
|
2023-07-17 17:13:30 +00:00
|
|
|
doctypes,
|
|
|
|
});
|
|
|
|
}
|
2023-09-05 16:18:42 +00:00
|
|
|
|
2023-11-22 16:06:55 +00:00
|
|
|
public async update({ request, response, session }: HttpContextContract) {
|
|
|
|
try {
|
|
|
|
// await request.validate({ schema: newDatasetSchema, messages: this.messages });
|
|
|
|
await request.validate(UpdateDatasetValidator);
|
|
|
|
} catch (error) {
|
|
|
|
// - Handle errors
|
|
|
|
// return response.badRequest(error.messages);
|
|
|
|
throw error;
|
|
|
|
// return response.badRequest(error.messages);
|
|
|
|
}
|
2023-11-27 16:17:22 +00:00
|
|
|
// await request.validate(UpdateDatasetValidator);
|
2023-11-22 16:06:55 +00:00
|
|
|
const id = request.param('id');
|
|
|
|
|
|
|
|
let trx: TransactionClientContract | null = null;
|
|
|
|
try {
|
|
|
|
trx = await Database.transaction();
|
|
|
|
// const user = (await User.find(auth.user?.id)) as User;
|
|
|
|
// await this.createDatasetAndAssociations(user, request, trx);
|
|
|
|
const dataset = await Dataset.findOrFail(id);
|
|
|
|
|
|
|
|
// save the licenses
|
|
|
|
const licenses: number[] = request.input('licenses', []);
|
|
|
|
// await dataset.useTransaction(trx).related('licenses').sync(licenses);
|
|
|
|
await dataset.useTransaction(trx).related('licenses').sync(licenses);
|
|
|
|
|
|
|
|
// save authors and contributors
|
|
|
|
await dataset.useTransaction(trx).related('authors').sync([]);
|
|
|
|
await dataset.useTransaction(trx).related('contributors').sync([]);
|
|
|
|
await this.savePersons(dataset, request.input('authors', []), 'author', trx);
|
|
|
|
await this.savePersons(dataset, request.input('contributors', []), 'contributor', trx);
|
|
|
|
|
|
|
|
//save the titles:
|
|
|
|
const titles = request.input('titles', []);
|
|
|
|
// const savedTitles:Array<Title> = [];
|
|
|
|
for (const titleData of titles) {
|
|
|
|
if (titleData.id) {
|
|
|
|
const title = await Title.findOrFail(titleData.id);
|
|
|
|
title.value = titleData.value;
|
|
|
|
title.language = titleData.language;
|
|
|
|
title.type = titleData.type;
|
|
|
|
if (title.$isDirty) {
|
|
|
|
await title.useTransaction(trx).save();
|
|
|
|
// await dataset.useTransaction(trx).related('titles').save(title);
|
|
|
|
// savedTitles.push(title);
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
const title = new Title();
|
|
|
|
title.fill(titleData);
|
|
|
|
// savedTitles.push(title);
|
|
|
|
await dataset.useTransaction(trx).related('titles').save(title);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// save the abstracts
|
|
|
|
const descriptions = request.input('descriptions', []);
|
|
|
|
// const savedTitles:Array<Title> = [];
|
|
|
|
for (const descriptionData of descriptions) {
|
|
|
|
if (descriptionData.id) {
|
|
|
|
const description = await Description.findOrFail(descriptionData.id);
|
|
|
|
description.value = descriptionData.value;
|
|
|
|
description.language = descriptionData.language;
|
|
|
|
description.type = descriptionData.type;
|
|
|
|
if (description.$isDirty) {
|
|
|
|
await description.useTransaction(trx).save();
|
|
|
|
// await dataset.useTransaction(trx).related('titles').save(title);
|
|
|
|
// savedTitles.push(title);
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
const description = new Description();
|
|
|
|
description.fill(descriptionData);
|
|
|
|
// savedTitles.push(title);
|
|
|
|
await dataset.useTransaction(trx).related('descriptions').save(description);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2023-11-27 16:17:22 +00:00
|
|
|
// await dataset.useTransaction(trx).related('subjects').sync([]);
|
|
|
|
const keywords = request.input('subjects');
|
|
|
|
for (const keywordData of keywords) {
|
|
|
|
if (keywordData.id) {
|
|
|
|
const subject = await Subject.findOrFail(keywordData.id);
|
|
|
|
// await dataset.useTransaction(trx).related('subjects').attach([keywordData.id]);
|
|
|
|
subject.value = keywordData.value;
|
|
|
|
subject.type = keywordData.type;
|
|
|
|
subject.external_key = keywordData.external_key;
|
|
|
|
if (subject.$isDirty) {
|
|
|
|
await subject.save();
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
const keyword = new Subject();
|
|
|
|
keyword.fill(keywordData);
|
|
|
|
await dataset.useTransaction(trx).related('subjects').save(keyword, false);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2023-11-22 16:06:55 +00:00
|
|
|
// Save already existing files
|
|
|
|
const files = request.input('fileInputs', []);
|
|
|
|
for (const fileData of files) {
|
|
|
|
if (fileData.id) {
|
|
|
|
const file = await File.findOrFail(fileData.id);
|
|
|
|
file.label = fileData.label;
|
|
|
|
file.sortOrder = fileData.sort_order;
|
|
|
|
if (file.$isDirty) {
|
|
|
|
await file.useTransaction(trx).save();
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// handle new uploaded files:
|
2023-11-27 16:17:22 +00:00
|
|
|
const uploadedFiles: MultipartFileContract[] = request.files('files');
|
2023-11-22 16:06:55 +00:00
|
|
|
if (Array.isArray(uploadedFiles) && uploadedFiles.length > 0) {
|
|
|
|
for (const [index, fileData] of uploadedFiles.entries()) {
|
2023-11-30 12:40:32 +00:00
|
|
|
try {
|
|
|
|
await this.scanFileForViruses(fileData.tmpPath); //, 'gitea.lan', 3310);
|
|
|
|
// await this.scanFileForViruses("/tmp/testfile.txt");
|
|
|
|
} catch (error) {
|
|
|
|
// If the file is infected or there's an error scanning the file, throw a validation exception
|
|
|
|
throw error;
|
|
|
|
}
|
|
|
|
|
|
|
|
// move to disk:
|
2023-11-22 16:06:55 +00:00
|
|
|
const fileName = `file-${cuid()}.${fileData.extname}`;
|
|
|
|
const datasetFolder = `files/${dataset.id}`;
|
2023-11-27 16:17:22 +00:00
|
|
|
await fileData.moveToDisk(datasetFolder, { name: fileName, overwrite: true }, 'local');
|
2023-11-22 16:06:55 +00:00
|
|
|
// let path = coverImage.filePath;
|
2023-11-27 16:17:22 +00:00
|
|
|
|
2023-11-30 12:40:32 +00:00
|
|
|
//save to db:
|
2023-11-27 16:17:22 +00:00
|
|
|
const { clientFileName, sortOrder } = this.extractVariableNameAndSortOrder(fileData.clientName);
|
|
|
|
const mimeType = fileData.headers['content-type'] || 'application/octet-stream'; // Fallback to a default MIME type
|
|
|
|
const newFile = await dataset
|
|
|
|
.useTransaction(trx)
|
|
|
|
.related('files')
|
|
|
|
.create({
|
|
|
|
pathName: `${datasetFolder}/${fileName}`,
|
|
|
|
fileSize: fileData.size,
|
|
|
|
mimeType,
|
|
|
|
label: clientFileName,
|
|
|
|
sortOrder: sortOrder || index,
|
|
|
|
visibleInFrontdoor: true,
|
|
|
|
visibleInOai: true,
|
|
|
|
});
|
|
|
|
|
|
|
|
// save many related HashValue Instances to the file:
|
|
|
|
await newFile.createHashValues(trx);
|
2023-11-22 16:06:55 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2023-11-27 16:17:22 +00:00
|
|
|
// save collection
|
|
|
|
// const collection: Collection | null = await Collection.query().where('id', 21).first();
|
|
|
|
// collection && (await dataset.useTransaction(trx).related('collections').attach([collection.id]));
|
|
|
|
|
|
|
|
// // Save coverage
|
|
|
|
// if (data.coverage && !this.containsOnlyNull(data.coverage)) {
|
|
|
|
// const formCoverage = request.input('coverage');
|
|
|
|
// const coverage = await dataset.related('coverage').updateOrCreate({ dataset_id: dataset.id }, formCoverage);
|
|
|
|
// } else if (data.coverage && this.containsOnlyNull(data.coverage) && !dataset.coverage) {
|
|
|
|
// await dataset.coverage().delete();
|
|
|
|
// }
|
|
|
|
|
2023-11-22 16:06:55 +00:00
|
|
|
const input = request.only(['project_id', 'embargo_date', 'language', 'type', 'creating_corporation']);
|
|
|
|
// dataset.type = request.input('type');
|
|
|
|
dataset.merge(input);
|
|
|
|
// let test: boolean = dataset.$isDirty;
|
|
|
|
await dataset.useTransaction(trx).save();
|
|
|
|
|
|
|
|
await trx.commit();
|
|
|
|
console.log('Dataset and related models created successfully');
|
|
|
|
} catch (error) {
|
|
|
|
if (trx !== null) {
|
|
|
|
await trx.rollback();
|
|
|
|
}
|
|
|
|
console.error('Failed to create dataset and related models:', error);
|
|
|
|
// throw new ValidationException(true, { 'upload error': `failed to create dataset and related models. ${error}` });
|
|
|
|
throw error;
|
|
|
|
}
|
|
|
|
|
2023-11-27 16:17:22 +00:00
|
|
|
session.flash('message', 'Dataset has been updated successfully');
|
2023-11-22 16:06:55 +00:00
|
|
|
// return response.redirect().toRoute('user.index');
|
|
|
|
return response.redirect().back();
|
|
|
|
}
|
|
|
|
|
2023-11-27 16:17:22 +00:00
|
|
|
private extractVariableNameAndSortOrder(inputString: string): { clientFileName: string; sortOrder?: number } {
|
|
|
|
const regex = /^([^?]+)(?:\?([^=]+)=([^&]+))?/;
|
|
|
|
const match = inputString.match(regex);
|
|
|
|
|
|
|
|
if (match) {
|
|
|
|
const clientFileName = match[1];
|
|
|
|
|
|
|
|
const param = match[2];
|
|
|
|
let sortOrder;
|
|
|
|
if (param && param.toLowerCase() === 'sortorder') {
|
|
|
|
sortOrder = parseInt(match[3], 10);
|
|
|
|
}
|
|
|
|
|
|
|
|
return { clientFileName, sortOrder };
|
|
|
|
} else {
|
|
|
|
return { clientFileName: '', sortOrder: undefined }; // Or handle as needed for no match
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2023-09-05 16:18:42 +00:00
|
|
|
public async delete({ request, inertia, response, session }) {
|
|
|
|
const id = request.param('id');
|
|
|
|
try {
|
|
|
|
const dataset = await Dataset.query()
|
|
|
|
.preload('user', (builder) => {
|
|
|
|
builder.select('id', 'login');
|
|
|
|
})
|
|
|
|
.where('id', id)
|
2023-11-27 16:17:22 +00:00
|
|
|
.preload('files')
|
2023-09-05 16:18:42 +00:00
|
|
|
.firstOrFail();
|
|
|
|
const validStates = ['inprogress', 'rejected_editor'];
|
|
|
|
if (!validStates.includes(dataset.server_state)) {
|
|
|
|
// session.flash('errors', 'Invalid server state!');
|
|
|
|
return response
|
|
|
|
.flash(
|
|
|
|
'warning',
|
|
|
|
`Invalid server state. Dataset with id ${id} cannot be deleted. Datset has server state ${dataset.server_state}.`,
|
|
|
|
)
|
|
|
|
.redirect()
|
|
|
|
.toRoute('dataset.list');
|
|
|
|
}
|
|
|
|
return inertia.render('Submitter/Dataset/Delete', {
|
|
|
|
dataset,
|
|
|
|
});
|
|
|
|
} catch (error) {
|
|
|
|
if (error.code == 'E_ROW_NOT_FOUND') {
|
|
|
|
session.flash({ warning: 'Dataset is not found in database' });
|
|
|
|
} else {
|
|
|
|
session.flash({ warning: 'general error occured, you cannot delete the dataset' });
|
|
|
|
}
|
|
|
|
return response.redirect().toRoute('dataset.list');
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
public async deleteUpdate({ params, session, response }) {
|
|
|
|
try {
|
|
|
|
const dataset = await Dataset.query().where('id', params.id).preload('files').firstOrFail();
|
|
|
|
|
|
|
|
const validStates = ['inprogress', 'rejected_editor'];
|
|
|
|
if (validStates.includes(dataset.server_state)) {
|
|
|
|
if (dataset.files && dataset.files.length > 0) {
|
|
|
|
for (const file of dataset.files) {
|
2023-11-29 15:52:41 +00:00
|
|
|
// overwritten delete method also delets file on filespace
|
2023-11-27 16:17:22 +00:00
|
|
|
await file.delete();
|
2023-09-05 16:18:42 +00:00
|
|
|
}
|
|
|
|
}
|
2023-11-27 16:17:22 +00:00
|
|
|
const datasetFolder = `files/${params.id}`;
|
|
|
|
const folderExists = await Drive.exists(datasetFolder);
|
|
|
|
if (folderExists) {
|
|
|
|
const folderContents = await Drive.list(datasetFolder).toArray();
|
|
|
|
if (folderContents.length === 0) {
|
|
|
|
await Drive.delete(datasetFolder);
|
|
|
|
}
|
|
|
|
// delete dataset wirh relation from db
|
|
|
|
await dataset.delete();
|
|
|
|
session.flash({ message: 'You have deleted 1 dataset!' });
|
|
|
|
return response.redirect().toRoute('dataset.list');
|
|
|
|
} else {
|
|
|
|
session.flash({
|
|
|
|
warning: `You cannot delete this dataset! Invalid server_state: "${dataset.server_state}"!`,
|
|
|
|
});
|
|
|
|
return response.status(400).redirect().back();
|
|
|
|
}
|
2023-09-05 16:18:42 +00:00
|
|
|
}
|
|
|
|
} catch (error) {
|
|
|
|
if (error instanceof ValidationException) {
|
|
|
|
// Validation exception handling
|
|
|
|
throw error;
|
|
|
|
} else if (error instanceof Exception) {
|
|
|
|
// General exception handling
|
|
|
|
return response
|
|
|
|
.flash({ errors: { error: error.message } })
|
|
|
|
.redirect()
|
|
|
|
.back();
|
|
|
|
} else {
|
|
|
|
session.flash({ error: 'An error occurred while deleting the dataset.' });
|
|
|
|
return response.redirect().back();
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2023-03-17 15:13:37 +00:00
|
|
|
}
|