import type { HttpContextContract } from '@ioc:Adonis/Core/HttpContext'; import User from 'App/Models/User'; import Dataset from 'App/Models/Dataset'; import License from 'App/Models/License'; import Project from 'App/Models/Project'; import Title from 'App/Models/Title'; import Description from 'App/Models/Description'; import Language from 'App/Models/Language'; import Coverage from 'App/Models/Coverage'; import Collection from 'App/Models/Collection'; import { schema, CustomMessages, rules } from '@ioc:Adonis/Core/Validator'; import dayjs from 'dayjs'; import Person from 'App/Models/Person'; import Database from '@ioc:Adonis/Lucid/Database'; import { TransactionClientContract } from '@ioc:Adonis/Lucid/Database'; import Subject from 'App/Models/Subject'; import CreateDatasetValidator from 'App/Validators/CreateDatasetValidator'; import UpdateDatasetValidator from 'App/Validators/UpdateDatasetValidator'; import { TitleTypes, DescriptionTypes, ContributorTypes, PersonNameTypes, ReferenceIdentifierTypes, RelationTypes, DatasetTypes, SubjectTypes, } from 'Contracts/enums'; import type { ModelQueryBuilderContract } from '@ioc:Adonis/Lucid/Orm'; import DatasetReference from 'App/Models/DatasetReference'; import { cuid } from '@ioc:Adonis/Core/Helpers'; import File from 'App/Models/File'; import ClamScan from 'clamscan'; import { ValidationException } from '@ioc:Adonis/Core/Validator'; import Drive from '@ioc:Adonis/Core/Drive'; import { Exception } from '@adonisjs/core/build/standalone'; export default class DatasetController { public async index({ auth, request, inertia }: HttpContextContract) { const user = (await User.find(auth.user?.id)) as User; const page = request.input('page', 1); let datasets: ModelQueryBuilderContract = Dataset.query(); // if (request.input('search')) { // // users = users.whereRaw('name like %?%', [request.input('search')]) // const searchTerm = request.input('search'); // datasets.where('name', 'ilike', `%${searchTerm}%`); // } if (request.input('sort')) { type SortOrder = 'asc' | 'desc' | undefined; let attribute = request.input('sort'); let sortOrder: SortOrder = 'asc'; if (attribute.substr(0, 1) === '-') { sortOrder = 'desc'; // attribute = substr(attribute, 1); attribute = attribute.substr(1); } datasets.orderBy(attribute, sortOrder); } else { // users.orderBy('created_at', 'desc'); datasets.orderBy('id', 'asc'); } // const results = await Database // .query() // .select(Database.raw("CONCAT('https://doi.org/', b.value) AS concatenated_value")) // .from('documents as doc') // .innerJoin('dataset_identifiers as b', 'doc.id', 'b.dataset_id') // .groupBy('a.id').toQuery(); // const users = await User.query().orderBy('login').paginate(page, limit); const myDatasets = await datasets .whereIn('server_state', [ 'inprogress', 'released', 'editor_accepted', 'approved', 'reviewed', 'rejected_editor', 'rejected_reviewer', ]) .where('account_id', user.id) .preload('titles') .preload('user', (query) => query.select('id', 'login')) // .preload('titles', (builder) => { // // pull the actual preload data // builder.where('type', 'Main'); // }) .paginate(page, 10); return inertia.render('Submitter/Dataset/Index', { // testing: 'this is a test', datasets: myDatasets.serialize(), filters: request.all(), can: { // create: await auth.user?.can(['dataset-submit']), edit: await auth.user?.can(['dataset-edit']), delete: await auth.user?.can(['dataset-delete']), }, }); } public async create({ inertia }: HttpContextContract) { const licenses = await License.query().select('id', 'name_long').where('active', 'true').pluck('name_long', 'id'); const projects = await Project.query().pluck('label', 'id'); // const doctypes = { // analysisdata: { label: 'Analysis', value: 'analysisdata' }, // measurementdata: { label: 'Measurements', value: 'measurementdata' }, // monitoring: 'Monitoring', // remotesensing: 'Remote Sensing', // gis: 'GIS', // models: 'Models', // mixedtype: 'Mixed Type', // vocabulary: 'Vocabulary', // }; // const languages = await Database.from('languages').select('*').where('active', true); return inertia.render('Submitter/Dataset/Create', { licenses: licenses, doctypes: DatasetTypes, titletypes: Object.entries(TitleTypes) .filter(([value]) => value !== 'Main') .map(([key, value]) => ({ value: key, label: value })), descriptiontypes: Object.entries(DescriptionTypes) .filter(([value]) => value !== 'Abstract') .map(([key, value]) => ({ value: key, label: value })), // descriptiontypes: DescriptionTypes projects: projects, referenceIdentifierTypes: Object.entries(ReferenceIdentifierTypes).map(([key, value]) => ({ value: key, label: value })), relationTypes: Object.entries(RelationTypes).map(([key, value]) => ({ value: key, label: value })), contributorTypes: ContributorTypes, subjectTypes: SubjectTypes, }); } public async firstStep({ request, response }: HttpContextContract) { const newDatasetSchema = schema.create({ language: schema.string({ trim: true }, [ rules.regex(/^[a-zA-Z0-9-_]+$/), //Must be alphanumeric with hyphens or underscores ]), licenses: schema.array([rules.minLength(1)]).members(schema.number()), // define at least one license for the new dataset rights: schema.string([rules.equalTo('true')]), }); try { // Step 2 - Validate request body against the schema await request.validate({ schema: newDatasetSchema, messages: this.messages }); // console.log({ payload }); } catch (error) { // Step 3 - Handle errors // return response.badRequest(error.messages); throw error; } return response.redirect().back(); } public async secondStep({ request, response }: HttpContextContract) { const newDatasetSchema = schema.create({ // first step language: schema.string({ trim: true }, [ rules.regex(/^[a-zA-Z0-9-_]+$/), //Must be alphanumeric with hyphens or underscores ]), licenses: schema.array([rules.minLength(1)]).members(schema.number()), // define at least one license for the new dataset rights: schema.string([rules.equalTo('true')]), // second step type: schema.string({ trim: true }, [rules.minLength(3), rules.maxLength(255)]), creating_corporation: schema.string({ trim: true }, [rules.minLength(3), rules.maxLength(255)]), titles: schema.array([rules.minLength(1)]).members( schema.object().members({ value: schema.string({ trim: true }, [rules.minLength(3), rules.maxLength(255)]), type: schema.enum(Object.values(TitleTypes)), language: schema.string({ trim: true }, [ rules.minLength(2), rules.maxLength(255), rules.translatedLanguage('/language', 'type'), ]), }), ), descriptions: schema.array([rules.minLength(1)]).members( schema.object().members({ value: schema.string({ trim: true }, [rules.minLength(3), rules.maxLength(255)]), type: schema.enum(Object.values(DescriptionTypes)), language: schema.string({ trim: true }, [ rules.minLength(2), rules.maxLength(255), rules.translatedLanguage('/language', 'type'), ]), }), ), authors: schema.array([rules.minLength(1)]).members(schema.object().members({ email: schema.string({ trim: true }) })), contributors: schema.array.optional().members( schema.object().members({ email: schema.string({ trim: true }), pivot_contributor_type: schema.enum(Object.keys(ContributorTypes)), }), ), // project_id: schema.number(), }); try { // Step 2 - Validate request body against the schema await request.validate({ schema: newDatasetSchema, messages: this.messages }); // console.log({ payload }); } catch (error) { // Step 3 - Handle errors // return response.badRequest(error.messages); throw error; } return response.redirect().back(); } public async thirdStep({ request, response }: HttpContextContract) { const newDatasetSchema = schema.create({ // first step language: schema.string({ trim: true }, [ rules.regex(/^[a-zA-Z0-9-_]+$/), //Must be alphanumeric with hyphens or underscores ]), licenses: schema.array([rules.minLength(1)]).members(schema.number()), // define at least one license for the new dataset rights: schema.string([rules.equalTo('true')]), // second step type: schema.string({ trim: true }, [rules.minLength(3), rules.maxLength(255)]), creating_corporation: schema.string({ trim: true }, [rules.minLength(3), rules.maxLength(255)]), titles: schema.array([rules.minLength(1)]).members( schema.object().members({ value: schema.string({ trim: true }, [rules.minLength(3), rules.maxLength(255)]), type: schema.enum(Object.values(TitleTypes)), language: schema.string({ trim: true }, [ rules.minLength(2), rules.maxLength(255), rules.translatedLanguage('/language', 'type'), ]), }), ), descriptions: schema.array([rules.minLength(1)]).members( schema.object().members({ value: schema.string({ trim: true }, [rules.minLength(3), rules.maxLength(255)]), type: schema.enum(Object.values(DescriptionTypes)), language: schema.string({ trim: true }, [ rules.minLength(2), rules.maxLength(255), rules.translatedLanguage('/language', 'type'), ]), }), ), authors: schema.array([rules.minLength(1)]).members(schema.object().members({ email: schema.string({ trim: true }) })), contributors: schema.array.optional().members( schema.object().members({ email: schema.string({ trim: true }), pivot_contributor_type: schema.enum(Object.keys(ContributorTypes)), }), ), // third step project_id: schema.number.optional(), embargo_date: schema.date.optional({ format: 'yyyy-MM-dd' }, [rules.after(10, 'days')]), coverage: schema.object().members({ x_min: schema.number(), x_max: schema.number(), y_min: schema.number(), y_max: schema.number(), elevation_absolut: schema.number.optional(), elevation_min: schema.number.optional([rules.requiredIfExists('elevation_max')]), elevation_max: schema.number.optional([rules.requiredIfExists('elevation_min')]), depth_absolut: schema.number.optional(), depth_min: schema.number.optional([rules.requiredIfExists('depth_max')]), depth_max: schema.number.optional([rules.requiredIfExists('depth_min')]), }), references: schema.array.optional([rules.uniqueArray('value')]).members( schema.object().members({ value: schema.string({ trim: true }, [rules.minLength(3), rules.maxLength(255)]), type: schema.enum(Object.values(ReferenceIdentifierTypes)), relation: schema.enum(Object.values(RelationTypes)), label: schema.string({ trim: true }, [rules.minLength(2), rules.maxLength(255)]), }), ), subjects: schema.array([rules.minLength(3), rules.uniqueArray('value')]).members( schema.object().members({ value: schema.string({ trim: true }, [ rules.minLength(3), rules.maxLength(255), // rules.unique({ table: 'dataset_subjects', column: 'value' }), ]), // type: schema.enum(Object.values(TitleTypes)), language: schema.string({ trim: true }, [rules.minLength(2), rules.maxLength(255)]), }), ), }); try { // Step 2 - Validate request body against the schema await request.validate({ schema: newDatasetSchema, messages: this.messages }); // console.log({ payload }); } catch (error) { // Step 3 - Handle errors // return response.badRequest(error.messages); throw error; } return response.redirect().back(); } public async store({ auth, request, response, session }: HttpContextContract) { // node ace make:validator CreateDataset try { // Step 2 - Validate request body against the schema // await request.validate({ schema: newDatasetSchema, messages: this.messages }); await request.validate(CreateDatasetValidator); // console.log({ payload }); } catch (error) { // Step 3 - Handle errors // return response.badRequest(error.messages); throw error; } let trx: TransactionClientContract | null = null; try { trx = await Database.transaction(); const user = (await User.find(auth.user?.id)) as User; await this.createDatasetAndAssociations(user, request, trx); await trx.commit(); console.log('Dataset and related models created successfully'); } catch (error) { if (trx !== null) { await trx.rollback(); } console.error('Failed to create dataset and related models:', error); // throw new ValidationException(true, { 'upload error': `failed to create dataset and related models. ${error}` }); throw error; } session.flash('message', 'Dataset has been created successfully'); // return response.redirect().toRoute('user.index'); return response.redirect().back(); } private async createDatasetAndAssociations(user: User, request: HttpContextContract['request'], trx: TransactionClientContract) { // Create a new instance of the Dataset model: const dataset = new Dataset(); dataset.type = request.input('type'); dataset.creating_corporation = request.input('creating_corporation'); dataset.language = request.input('language'); dataset.embargo_date = request.input('embargo_date'); //await dataset.related('user').associate(user); // speichert schon ab // Dataset.$getRelation('user').boot(); // Dataset.$getRelation('user').setRelated(dataset, user); // dataset.$setRelated('user', user); await user.useTransaction(trx).related('datasets').save(dataset); //store licenses: const licenses: number[] = request.input('licenses', []); await dataset.useTransaction(trx).related('licenses').sync(licenses); // save authors and contributors await this.savePersons(dataset, request.input('authors', []), 'author', trx); await this.savePersons(dataset, request.input('contributors', []), 'contributor', trx); //save main and additional titles const titles = request.input('titles', []); for (const titleData of titles) { const title = new Title(); title.value = titleData.value; title.language = titleData.language; title.type = titleData.type; await dataset.useTransaction(trx).related('titles').save(title); } // save descriptions const descriptions = request.input('descriptions', []); for (const descriptionData of descriptions) { const description = new Description(); description.value = descriptionData.value; description.language = descriptionData.language; description.type = descriptionData.type; await dataset.useTransaction(trx).related('descriptions').save(description); } //save references const references = request.input('references', []); for (const referencePayload of references) { const dataReference = new DatasetReference(); dataReference.fill(referencePayload); // $dataReference = new DatasetReference($reference); dataset.related('references').save(dataReference); } //save keywords const keywords = request.input('subjects', []); for (const keywordData of keywords) { // $dataKeyword = new Subject($keyword); // $dataset->subjects()->save($dataKeyword); const keyword = await Subject.firstOrNew({ value: keywordData.value, type: keywordData.type }, keywordData); if (keyword.$isNew === true) { await dataset.useTransaction(trx).related('subjects').save(keyword); } else { await dataset.useTransaction(trx).related('subjects').attach([keyword.id]); } } // save collection const collection: Collection | null = await Collection.query().where('id', 21).first(); collection && (await dataset.useTransaction(trx).related('collections').attach([collection.id])); // save coverage const coverageData = request.input('coverage'); if (coverageData) { // const formCoverage = request.input('coverage'); const coverage = new Coverage(); coverage.fill(coverageData); // await dataset.coverage().save(coverageData); await dataset.useTransaction(trx).related('coverage').save(coverage); // Alternatively, you can associate the dataset with the coverage and then save it: // await coverage.dataset().associate(dataset).save(); // await coverage.useTransaction(trx).related('dataset').associate(dataset); } // save data files const uploadedFiles = request.files('files'); for (const [index, file] of uploadedFiles.entries()) { try { await this.scanFileForViruses(file.tmpPath); //, 'gitea.lan', 3310); // await this.scanFileForViruses("/tmp/testfile.txt"); } catch (error) { // If the file is infected or there's an error scanning the file, throw a validation exception throw error; } // clientName: 'Gehaltsschema.png' // extname: 'png' // fieldName: 'file' const fileName = `file-${cuid()}.${file.extname}`; const mimeType = file.headers['content-type'] || 'application/octet-stream'; // Fallback to a default MIME type const datasetFolder = `files/${dataset.id}`; // const size = file.size; await file.moveToDisk( datasetFolder, { name: fileName, overwrite: true, // overwrite in case of conflict }, 'local', ); // save file metadata into db const newFile = new File(); newFile.pathName = `${datasetFolder}/${fileName}`; newFile.fileSize = file.size; newFile.mimeType = mimeType; newFile.label = file.clientName; newFile.sortOrder = index; newFile.visibleInFrontdoor = true; newFile.visibleInOai = true; // let path = coverImage.filePath; await dataset.useTransaction(trx).related('files').save(newFile); await newFile.createHashValues(trx); } } private async scanFileForViruses(filePath, host?: string, port?: number): Promise { // const clamscan = await (new ClamScan().init()); const opts: ClamScan.Options = { removeInfected: true, // If true, removes infected files debugMode: false, // Whether or not to log info/debug/error msgs to the console scanRecursively: true, // If true, deep scan folders recursively clamdscan: { active: true, // If true, this module will consider using the clamdscan binary host, port, multiscan: true, // Scan using all available cores! Yay! }, preference: 'clamdscan', // If clamdscan is found and active, it will be used by default }; return new Promise(async (resolve, reject) => { try { const clamscan = await new ClamScan().init(opts); // You can re-use the `clamscan` object as many times as you want // const version = await clamscan.getVersion(); // console.log(`ClamAV Version: ${version}`); const { file, isInfected, viruses } = await clamscan.isInfected(filePath); if (isInfected) { console.log(`${file} is infected with ${viruses}!`); reject(new ValidationException(true, { 'upload error': `File ${file} is infected!` })); } else { resolve(); } } catch (error) { // If there's an error scanning the file, throw a validation exception reject(new ValidationException(true, { 'upload error': `${error.message}` })); } }); } private async savePersons(dataset: Dataset, persons: any[], role: string, trx: TransactionClientContract) { for (const [key, person] of persons.entries()) { const pivotData = { role: role, sort_order: key + 1, allow_email_contact: false, ...this.extractPivotAttributes(person), // Merge pivot attributes here }; if (person.id !== undefined) { await dataset .useTransaction(trx) .related('persons') .attach({ [person.id]: pivotData, }); } else { const dataPerson = new Person(); dataPerson.fill(person); await dataset.useTransaction(trx).related('persons').save(dataPerson, false, pivotData); } } } // Helper function to extract pivot attributes from a person object private extractPivotAttributes(person: any) { const pivotAttributes = {}; for (const key in person) { if (key.startsWith('pivot_')) { // pivotAttributes[key] = person[key]; const cleanKey = key.replace('pivot_', ''); // Remove 'pivot_' prefix pivotAttributes[cleanKey] = person[key]; } } return pivotAttributes; } public messages: CustomMessages = { 'minLength': '{{ field }} must be at least {{ options.minLength }} characters long', 'maxLength': '{{ field }} must be less then {{ options.maxLength }} characters long', 'required': '{{ field }} is required', 'unique': '{{ field }} must be unique, and this value is already taken', // 'confirmed': '{{ field }} is not correct', 'licenses.minLength': 'at least {{ options.minLength }} permission must be defined', 'licenses.*.number': 'Define roles as valid numbers', 'rights.equalTo': 'you must agree to continue', 'titles.0.value.minLength': 'Main Title must be at least {{ options.minLength }} characters long', 'titles.0.value.required': 'Main Title is required', 'titles.*.value.required': 'Additional title is required, if defined', 'titles.*.type.required': 'Additional title type is required', 'titles.*.language.required': 'Additional title language is required', 'titles.*.language.translatedLanguage': 'The language of the translated title must be different from the language of the dataset', 'descriptions.0.value.minLength': 'Main Abstract must be at least {{ options.minLength }} characters long', 'descriptions.0.value.required': 'Main Abstract is required', 'descriptions.*.value.required': 'Additional description is required, if defined', 'descriptions.*.type.required': 'Additional description type is required', 'descriptions.*.language.required': 'Additional description language is required', 'descriptions.*.language.translatedLanguage': 'The language of the translated description must be different from the language of the dataset', 'authors.minLength': 'at least {{ options.minLength }} author must be defined', 'contributors.*.pivot_contributor_type.required': 'contributor type is required, if defined', 'after': `{{ field }} must be older than ${dayjs().add(10, 'day')}`, 'subjects.minLength': 'at least {{ options.minLength }} keywords must be defined', 'subjects.uniqueArray': 'The {{ options.array }} array must have unique values based on the {{ options.field }} attribute.', 'subjects.*.value.required': 'keyword value is required', 'subjects.*.value.minLength': 'keyword value must be at least {{ options.minLength }} characters long', 'subjects.*.type.required': 'keyword type is required', 'subjects.*.language.required': 'language of keyword is required', 'references.*.value.required': 'Additional reference value is required, if defined', 'references.*.type.required': 'Additional reference identifier type is required', 'references.*.relation.required': 'Additional reference relation type is required', 'references.*.label.required': 'Additional reference label is required', 'files.minLength': 'At least {{ options.minLength }} file upload is required.', 'files.*.size': 'file size is to big', 'files.extnames': 'file extension is not supported', }; // public async release({ params, view }) { public async release({ request, inertia, response }: HttpContextContract) { const id = request.param('id'); const dataset = await Dataset.query() .preload('user', (builder) => { builder.select('id', 'login'); }) .where('id', id) .firstOrFail(); const validStates = ['inprogress', 'rejected_editor']; if (!validStates.includes(dataset.server_state)) { // session.flash('errors', 'Invalid server state!'); return response .flash( 'warning', `Invalid server state. Dataset with id ${id} cannot be released to editor. Datset has server state ${dataset.server_state}.`, ) .redirect() .back(); } return inertia.render('Submitter/Dataset/Release', { dataset, }); } public async releaseUpdate({ request, response }: HttpContextContract) { const id = request.param('id'); const dataset = await Dataset.query().preload('files').where('id', id).firstOrFail(); const validStates = ['inprogress', 'rejected_editor']; if (!validStates.includes(dataset.server_state)) { // throw new Error('Invalid server state!'); // return response.flash('warning', 'Invalid server state. Dataset cannot be released to editor').redirect().back(); return response .flash( 'warning', `Invalid server state. Dataset with id ${id} cannot be released to editor. Datset has server state ${dataset.server_state}.`, ) .redirect() .toRoute('dataset.list'); } if (dataset.files.length === 0) { return response.flash('warning', 'At least minimum one file is required.').redirect('back'); } const preferation = request.input('preferation', ''); const preferredReviewer = request.input('preferred_reviewer'); const preferredReviewerEmail = request.input('preferred_reviewer_email'); if (preferation === 'yes_preferation') { const newSchema = schema.create({ preferred_reviewer: schema.string({ trim: true }, [rules.minLength(3), rules.maxLength(255)]), preferred_reviewer_email: schema.string([rules.email()]), }); try { await request.validate({ schema: newSchema }); } catch (error) { // return response.badRequest(error.messages); throw error; } } const input = { preferred_reviewer: preferredReviewer || null, preferred_reviewer_email: preferredReviewerEmail || null, server_state: 'released', editor_id: null, reviewer_id: null, reject_editor_note: null, reject_reviewer_note: null, }; // Clear editor_id if it exists if (dataset.editor_id !== null) { input.editor_id = null; } // Clear reject_editor_note if it exists if (dataset.reject_editor_note !== null) { input.reject_editor_note = null; } // Clear reviewer_id if it exists if (dataset.reviewer_id !== null) { input.reviewer_id = null; } // Clear reject_reviewer_note if it exists if (dataset.reject_reviewer_note !== null) { input.reject_reviewer_note = null; } if (await dataset.merge(input).save()) { return response.toRoute('dataset.list').flash('message', 'You have released your dataset!'); } // throw new GeneralException(trans('exceptions.publish.release.update_error')); } public async edit({ request, inertia, response }) { const id = request.param('id'); const datasetQuery = Dataset.query().where('id', id); datasetQuery .preload('titles', (query) => query.orderBy('id', 'asc')) .preload('descriptions', (query) => query.orderBy('id', 'asc')) .preload('coverage') .preload('licenses') .preload('authors') .preload('contributors') .preload('subjects') .preload('references') .preload('files'); const dataset = await datasetQuery.firstOrFail(); const validStates = ['inprogress', 'rejected_editor']; if (!validStates.includes(dataset.server_state)) { // session.flash('errors', 'Invalid server state!'); return response .flash( 'warning', `Invalid server state. Dataset with id ${id} cannot be edited. Datset has server state ${dataset.server_state}.`, ) .redirect() .toRoute('dataset.list'); } const titleTypes = Object.entries(TitleTypes) .filter(([value]) => value !== 'Main') .map(([key, value]) => ({ value: key, label: value })); const descriptionTypes = Object.entries(DescriptionTypes) .filter(([value]) => value !== 'Abstract') .map(([key, value]) => ({ value: key, label: value })); const languages = await Language.query().where('active', true).pluck('part1', 'part1'); // const contributorTypes = Config.get('enums.contributor_types'); const contributorTypes = Object.entries(ContributorTypes).map(([key, value]) => ({ value: key, label: value })); // const nameTypes = Config.get('enums.name_types'); const nameTypes = Object.entries(PersonNameTypes).map(([key, value]) => ({ value: key, label: value })); // const messages = await Database.table('messages') // .pluck('help_text', 'metadata_element'); const projects = await Project.query().pluck('label', 'id'); const currentDate = new Date(); const currentYear = currentDate.getFullYear(); const years = Array.from({ length: currentYear - 1990 + 1 }, (_, index) => 1990 + index); const licenses = await License.query().select('id', 'name_long').where('active', 'true').pluck('name_long', 'id'); // const userHasRoles = user.roles; // const datasetHasLicenses = await dataset.related('licenses').query().pluck('id'); // const checkeds = dataset.licenses.first().id; const doctypes = { analysisdata: { label: 'Analysis', value: 'analysisdata' }, measurementdata: { label: 'Measurements', value: 'measurementdata' }, monitoring: 'Monitoring', remotesensing: 'Remote Sensing', gis: 'GIS', models: 'Models', mixedtype: 'Mixed Type', }; return inertia.render('Submitter/Dataset/Edit', { dataset, titletypes: titleTypes, descriptiontypes: descriptionTypes, contributorTypes, nameTypes, languages, // messages, projects, licenses, // datasetHasLicenses: Object.keys(datasetHasLicenses).map((key) => datasetHasLicenses[key]), //convert object to array with license ids // checkeds, years, // languages, subjectTypes: SubjectTypes, referenceIdentifierTypes: Object.entries(ReferenceIdentifierTypes).map(([key, value]) => ({ value: key, label: value })), relationTypes: Object.entries(RelationTypes).map(([key, value]) => ({ value: key, label: value })), doctypes, }); } public async update({ request, response, session }: HttpContextContract) { try { // await request.validate({ schema: newDatasetSchema, messages: this.messages }); await request.validate(UpdateDatasetValidator); } catch (error) { // - Handle errors // return response.badRequest(error.messages); throw error; // return response.badRequest(error.messages); } const id = request.param('id'); let trx: TransactionClientContract | null = null; try { trx = await Database.transaction(); // const user = (await User.find(auth.user?.id)) as User; // await this.createDatasetAndAssociations(user, request, trx); const dataset = await Dataset.findOrFail(id); // save the licenses const licenses: number[] = request.input('licenses', []); // await dataset.useTransaction(trx).related('licenses').sync(licenses); await dataset.useTransaction(trx).related('licenses').sync(licenses); // save authors and contributors await dataset.useTransaction(trx).related('authors').sync([]); await dataset.useTransaction(trx).related('contributors').sync([]); await this.savePersons(dataset, request.input('authors', []), 'author', trx); await this.savePersons(dataset, request.input('contributors', []), 'contributor', trx); //save the titles: const titles = request.input('titles', []); // const savedTitles:Array = []; for (const titleData of titles) { if (titleData.id) { const title = await Title.findOrFail(titleData.id); title.value = titleData.value; title.language = titleData.language; title.type = titleData.type; if (title.$isDirty) { await title.useTransaction(trx).save(); // await dataset.useTransaction(trx).related('titles').save(title); // savedTitles.push(title); } } else { const title = new Title(); title.fill(titleData); // savedTitles.push(title); await dataset.useTransaction(trx).related('titles').save(title); } } // save the abstracts const descriptions = request.input('descriptions', []); // const savedTitles:Array<Title> = []; for (const descriptionData of descriptions) { if (descriptionData.id) { const description = await Description.findOrFail(descriptionData.id); description.value = descriptionData.value; description.language = descriptionData.language; description.type = descriptionData.type; if (description.$isDirty) { await description.useTransaction(trx).save(); // await dataset.useTransaction(trx).related('titles').save(title); // savedTitles.push(title); } } else { const description = new Description(); description.fill(descriptionData); // savedTitles.push(title); await dataset.useTransaction(trx).related('descriptions').save(description); } } // Save already existing files const files = request.input('fileInputs', []); for (const fileData of files) { if (fileData.id) { const file = await File.findOrFail(fileData.id); file.label = fileData.label; file.sortOrder = fileData.sort_order; if (file.$isDirty) { await file.useTransaction(trx).save(); } } } // handle new uploaded files: const uploadedFiles = request.files('files'); if (Array.isArray(uploadedFiles) && uploadedFiles.length > 0) { // let index = 1; // for (const key in files) { // const formFile = files[key] // for (const fileData of files) { for (const [index, fileData] of uploadedFiles.entries()) { // const uploads = request.file('uploads'); // const fileIndex = formFile.file; // const file = uploads[fileIndex]; const fileName = `file-${cuid()}.${fileData.extname}`; const mimeType = fileData.headers['content-type'] || 'application/octet-stream'; // Fallback to a default MIME type const datasetFolder = `files/${dataset.id}`; await fileData.moveToDisk( datasetFolder, { name: fileName, overwrite: true, // overwrite in case of conflict }, 'local', ); // save file metadata into db const newFile = new File(); newFile.pathName = `${datasetFolder}/${fileName}`; newFile.fileSize = fileData.size; newFile.mimeType = mimeType; newFile.label = fileData.clientName; newFile.sortOrder = index; newFile.visibleInFrontdoor = true; newFile.visibleInOai = true; // let path = coverImage.filePath; await dataset.useTransaction(trx).related('files').save(newFile); await newFile.createHashValues(); } } const input = request.only(['project_id', 'embargo_date', 'language', 'type', 'creating_corporation']); // dataset.type = request.input('type'); dataset.merge(input); // let test: boolean = dataset.$isDirty; await dataset.useTransaction(trx).save(); await trx.commit(); console.log('Dataset and related models created successfully'); } catch (error) { if (trx !== null) { await trx.rollback(); } console.error('Failed to create dataset and related models:', error); // throw new ValidationException(true, { 'upload error': `failed to create dataset and related models. ${error}` }); throw error; } session.flash('message', 'Dataset has been created successfully'); // return response.redirect().toRoute('user.index'); return response.redirect().back(); } public async delete({ request, inertia, response, session }) { const id = request.param('id'); try { const dataset = await Dataset.query() .preload('user', (builder) => { builder.select('id', 'login'); }) .preload('files') .where('id', id) .firstOrFail(); const validStates = ['inprogress', 'rejected_editor']; if (!validStates.includes(dataset.server_state)) { // session.flash('errors', 'Invalid server state!'); return response .flash( 'warning', `Invalid server state. Dataset with id ${id} cannot be deleted. Datset has server state ${dataset.server_state}.`, ) .redirect() .toRoute('dataset.list'); } return inertia.render('Submitter/Dataset/Delete', { dataset, }); } catch (error) { if (error.code == 'E_ROW_NOT_FOUND') { session.flash({ warning: 'Dataset is not found in database' }); } else { session.flash({ warning: 'general error occured, you cannot delete the dataset' }); } return response.redirect().toRoute('dataset.list'); } } public async deleteUpdate({ params, session, response }) { try { const dataset = await Dataset.query().where('id', params.id).preload('files').firstOrFail(); const validStates = ['inprogress', 'rejected_editor']; if (validStates.includes(dataset.server_state)) { if (dataset.files && dataset.files.length > 0) { for (const file of dataset.files) { if (file.pathName) { // delete file from filesystem await Drive.delete(file.pathName); } } } // delete dataset wirh relation from db await dataset.delete(); session.flash({ message: 'You have deleted 1 dataset!' }); return response.redirect().toRoute('dataset.list'); } else { session.flash({ warning: `You cannot delete this dataset! The status of this dataset is "${dataset.server_state}"!`, }); return response.redirect().back(); } } catch (error) { if (error instanceof ValidationException) { // Validation exception handling throw error; } else if (error instanceof Exception) { // General exception handling return response .flash({ errors: { error: error.message } }) .redirect() .back(); } else { session.flash({ error: 'An error occurred while deleting the dataset.' }); return response.redirect().back(); } } } }