feat: import resource

This commit is contained in:
Ahmed Bouhuolia
2024-03-14 22:18:12 +02:00
parent daa1e3a6bd
commit 084d9d3d10
14 changed files with 360 additions and 352 deletions

View File

@@ -62,16 +62,16 @@ export class ImportController extends BaseController {
private get importValidationSchema() {
return [
body('resource').exists(),
// body('file').custom((value, { req }) => {
// if (!value) {
// throw new Error('File is required');
// }
// if (!['xlsx', 'csv'].includes(value.split('.').pop())) {
// throw new Error('File must be in xlsx or csv format');
// }
// return true;
// }),
// ];
// body('file').custom((value, { req }) => {
// if (!value) {
// throw new Error('File is required');
// }
// if (!['xlsx', 'csv'].includes(value.split('.').pop())) {
// throw new Error('File must be in xlsx or csv format');
// }
// return true;
// }),
];
}
/**
@@ -92,7 +92,6 @@ export class ImportController extends BaseController {
const data = await this.importResourceApp.import(
tenantId,
req.body.resource,
req.file.path,
req.file.filename
);
return res.status(200).send(data);
@@ -107,18 +106,19 @@ export class ImportController extends BaseController {
* @param {Response} res
* @param {NextFunction} next
*/
private async mapping(req: Request, res: Response, next: NextFunction) {
private async mapping(req: Request, res: Response, next: NextFunction) {
const { tenantId } = req;
const { import_id: importId } = req.params;
const body = this.matchedBodyData(req);
try {
await this.importResourceApp.mapping(tenantId, importId, body?.mapping);
const mapping = await this.importResourceApp.mapping(
tenantId,
importId,
body?.mapping
);
return res.status(200).send({
id: importId,
message: 'The given import sheet has mapped successfully.'
})
return res.status(200).send(mapping);
} catch (error) {
next(error);
}
@@ -135,7 +135,7 @@ export class ImportController extends BaseController {
const { import_id: importId } = req.params;
try {
const preview = await this.importResourceApp.preview(tenantId, importId);
const preview = await this.importResourceApp.preview(tenantId, importId);
return res.status(200).send(preview);
} catch (error) {
@@ -158,7 +158,7 @@ export class ImportController extends BaseController {
return res.status(200).send({
id: importId,
message: 'Importing the uploaded file is importing.'
message: 'Importing the uploaded file is importing.',
});
} catch (error) {
next(error);
@@ -181,18 +181,18 @@ export class ImportController extends BaseController {
if (error instanceof ServiceError) {
if (error.errorType === 'INVALID_MAP_ATTRS') {
return res.status(400).send({
errors: [{ type: 'INVALID_MAP_ATTRS' }]
errors: [{ type: 'INVALID_MAP_ATTRS' }],
});
}
if (error.errorType === 'DUPLICATED_FROM_MAP_ATTR') {
return res.status(400).send({
errors: [{ type: 'DUPLICATED_FROM_MAP_ATTR' }],
});
};
}
if (error.errorType === 'DUPLICATED_TO_MAP_ATTR') {
return res.status(400).send({
errors: [{ type: 'DUPLICATED_TO_MAP_ATTR' }],
})
});
}
}
next(error);

View File

@@ -1,7 +1,6 @@
import { Inject, Service } from 'typedi';
import { Knex } from 'knex';
import { IAccountCreateDTO } from '@/interfaces';
import { AccountsApplication } from '../Accounts/AccountsApplication';
import { CreateAccount } from '../Accounts/CreateAccount';
@Service()

View File

@@ -1,20 +1,36 @@
import fs from 'fs/promises';
import XLSX from 'xlsx';
import bluebird from 'bluebird';
import * as R from 'ramda';
import { Inject, Service } from 'typedi';
import { first } from 'lodash';
import { ImportFileDataValidator } from './ImportFileDataValidator';
import { Knex } from 'knex';
import { ImportInsertError } from './interfaces';
import {
ImportInsertError,
ImportOperError,
ImportOperSuccess,
} from './interfaces';
import { AccountsImportable } from './AccountsImportable';
import { ServiceError } from '@/exceptions';
import { trimObject } from './_utils';
import { ImportableResources } from './ImportableResources';
import ResourceService from '../Resource/ResourceService';
import HasTenancyService from '../Tenancy/TenancyService';
@Service()
export class ImportFileCommon {
@Inject()
private tenancy: HasTenancyService;
@Inject()
private importFileValidator: ImportFileDataValidator;
@Inject()
private importable: AccountsImportable;
private importable: ImportableResources;
@Inject()
private resource: ResourceService;
/**
* Maps the columns of the imported data based on the provided mapping attributes.
@@ -22,7 +38,7 @@ export class ImportFileCommon {
* @param {ImportMappingAttr[]} map - The mapping attributes.
* @returns {Record<string, any>[]} - The mapped data objects.
*/
public parseXlsxSheet(buffer) {
public parseXlsxSheet(buffer: Buffer): Record<string, unknown>[] {
const workbook = XLSX.read(buffer, { type: 'buffer' });
const firstSheetName = workbook.SheetNames[0];
@@ -43,45 +59,81 @@ export class ImportFileCommon {
/**
*
* @param {number} tenantId -
* @param {Record<string, any>} importableFields
* @param {Record<string, any>} parsedData
* @param {string} resourceName - Resource name.
* @param {Record<string, any>} parsedData -
* @param {Knex.Transaction} trx
* @returns
*/
public import(
public async import(
tenantId: number,
importableFields,
parsedData: Record<string, any>,
resourceName: string,
parsedData: Record<string, any>[],
trx?: Knex.Transaction
): Promise<(void | ImportInsertError[])[]> {
return bluebird.map(
parsedData,
async (objectDTO, index: number): Promise<true | ImportInsertError[]> => {
try {
// Validate the DTO object before passing it to the service layer.
await this.importFileValidator.validateData(
importableFields,
objectDTO
);
try {
// Run the importable function and listen to the errors.
await this.importable.importable(tenantId, objectDTO, trx);
} catch (error) {
if (error instanceof ServiceError) {
return [
{
errorCode: 'ValidationError',
errorMessage: error.message || error.errorType,
rowNumber: index + 1,
},
];
}
}
} catch (errors) {
return errors.map((er) => ({ ...er, rowNumber: index + 1 }));
}
},
{ concurrency: 2 }
): Promise<[ImportOperSuccess[], ImportOperError[]]> {
const importableFields = this.resource.getResourceImportableFields(
tenantId,
resourceName
);
const ImportableRegistry = this.importable.registry;
const importable = ImportableRegistry.getImportable(resourceName);
const success: ImportOperSuccess[] = [];
const failed: ImportOperError[] = [];
const importAsync = async (objectDTO, index: number): Promise<void> => {
try {
// Validate the DTO object before passing it to the service layer.
await this.importFileValidator.validateData(
importableFields,
objectDTO
);
try {
// Run the importable function and listen to the errors.
const data = await importable.importable(tenantId, objectDTO, trx);
success.push({ index, data });
} catch (err) {
if (err instanceof ServiceError) {
const error = [
{
errorCode: 'ValidationError',
errorMessage: err.message || err.errorType,
rowNumber: index + 1,
},
];
failed.push({ index, error });
}
}
} catch (errors) {
const error = errors.map((er) => ({ ...er, rowNumber: index + 1 }));
failed.push({ index, error });
}
};
await bluebird.map(parsedData, importAsync, { concurrency: 2 });
return [success, failed];
}
/**
* Retrieves the sheet columns from the given sheet data.
* @param {unknown[]} json
* @returns {string[]}
*/
public parseSheetColumns(json: unknown[]): string[] {
return R.compose(Object.keys, trimObject, first)(json);
}
/**
* Deletes the imported file from the storage and database.
* @param {number} tenantId
* @param {} importFile
*/
private async deleteImportFile(tenantId: number, importFile: any) {
const { Import } = this.tenancy.models(tenantId);
// Deletes the import row.
await Import.query().findById(importFile.id).delete();
// Deletes the imported file.
await fs.unlink(`public/imports/${importFile.filename}`);
}
}

View File

@@ -8,14 +8,12 @@ import ResourceService from '../Resource/ResourceService';
@Service()
export class ImportFileDataTransformer {
@Inject()
private resource: ResourceService;
/**
*
* @param {number} tenantId -
* @param {}
*/
public transformSheetData(
public parseSheetData(
importFile: any,
importableFields: any,
data: Record<string, unknown>[]

View File

@@ -1,13 +1,26 @@
import { Service } from 'typedi';
import { ImportInsertError, ResourceMetaFieldsMap } from './interfaces';
import { convertFieldsToYupValidation } from './_utils';
import { ERRORS, convertFieldsToYupValidation } from './_utils';
import { IModelMeta } from '@/interfaces';
import { ServiceError } from '@/exceptions';
@Service()
export class ImportFileDataValidator {
/**
* Validates the given resource is importable.
* @param {IModelMeta} resourceMeta
*/
public validateResourceImportable(resourceMeta: IModelMeta) {
// Throw service error if the resource does not support importing.
if (!resourceMeta.importable) {
throw new ServiceError(ERRORS.RESOURCE_NOT_IMPORTABLE);
}
}
/**
* Validates the given mapped DTOs and returns errors with their index.
* @param {Record<string, any>} mappedDTOs
* @returns {Promise<ImportValidationError[][]>}
* @returns {Promise<void | ImportInsertError[]>}
*/
public async validateData(
importableFields: ResourceMetaFieldsMap,

View File

@@ -1,10 +1,10 @@
import { fromPairs } from 'lodash';
import { Inject, Service } from 'typedi';
import HasTenancyService from '../Tenancy/TenancyService';
import { ImportMappingAttr } from './interfaces';
import { ImportFileMapPOJO, ImportMappingAttr } from './interfaces';
import ResourceService from '../Resource/ResourceService';
import { ServiceError } from '@/exceptions';
import { ERRORS } from './_utils';
import { fromPairs } from 'lodash';
@Service()
export class ImportFileMapping {
@@ -24,7 +24,7 @@ export class ImportFileMapping {
tenantId: number,
importId: number,
maps: ImportMappingAttr[]
) {
): Promise<ImportFileMapPOJO> {
const { Import } = this.tenancy.models(tenantId);
const importFile = await Import.query()
@@ -42,6 +42,13 @@ export class ImportFileMapping {
await Import.query().findById(importFile.id).patch({
mapping: mappingStringified,
});
return {
import: {
importId: importFile.importId,
resource: importFile.resource,
},
};
}
/**
@@ -80,7 +87,7 @@ export class ImportFileMapping {
/**
* Validate the map attrs relation should be one-to-one relation only.
* @param {ImportMappingAttr[]} maps
* @param {ImportMappingAttr[]} maps
*/
private validateDuplicatedMapAttrs(maps: ImportMappingAttr[]) {
const fromMap = {};

View File

@@ -1,11 +1,7 @@
import { Inject, Service } from 'typedi';
import { first, omit } from 'lodash';
import { ServiceError } from '@/exceptions';
import { ERRORS, getUnmappedSheetColumns } from './_utils';
import HasTenancyService from '../Tenancy/TenancyService';
import { ImportFileCommon } from './ImportFileCommon';
import { ImportFileDataTransformer } from './ImportFileDataTransformer';
import ResourceService from '../Resource/ResourceService';
import { ImportFilePreviewPOJO } from './interfaces';
import { ImportFileProcess } from './ImportFileProcess';
@Service()
export class ImportFilePreview {
@@ -13,86 +9,26 @@ export class ImportFilePreview {
private tenancy: HasTenancyService;
@Inject()
private resource: ResourceService;
@Inject()
private importFileCommon: ImportFileCommon;
@Inject()
private importFileParser: ImportFileDataTransformer;
private importFile: ImportFileProcess;
/**
*
* - Returns the passed rows and will be in inserted.
* - Returns the passed rows will be overwritten.
* - Returns the rows errors from the validation.
* - Returns the unmapped fields.
*
* Preview the imported file results before commiting the transactions.
* @param {number} tenantId
* @param {number} importId
* @returns {Promise<ImportFilePreviewPOJO>}
*/
public async preview(tenantId: number, importId: number) {
const { Import } = this.tenancy.models(tenantId);
const importFile = await Import.query()
.findOne('importId', importId)
.throwIfNotFound();
// Throw error if the import file is not mapped yet.
if (!importFile.isMapped) {
throw new ServiceError(ERRORS.IMPORT_FILE_NOT_MAPPED);
}
const buffer = await this.importFileCommon.readImportFile(
importFile.filename
);
const jsonData = this.importFileCommon.parseXlsxSheet(buffer);
const importableFields = this.resource.getResourceImportableFields(
tenantId,
importFile.resource
);
// Prases the sheet json data.
const parsedData = this.importFileParser.transformSheetData(
importFile,
importableFields,
jsonData
);
public async preview(
tenantId: number,
importId: number
): Promise<ImportFilePreviewPOJO> {
const knex = this.tenancy.knex(tenantId);
const trx = await knex.transaction({ isolationLevel: 'read uncommitted' });
// Runs the importing operation with ability to return errors that will happen.
const asyncOpers = await this.importFileCommon.import(
tenantId,
importableFields,
parsedData,
trx
);
// Filter out the operations that have successed.
const successAsyncOpers = asyncOpers.filter((oper) => !oper);
const errors = asyncOpers.filter((oper) => oper);
const meta = await this.importFile.import(tenantId, importId, trx);
// Rollback all the successed transactions.
// Rollback the successed transaction.
await trx.rollback();
const header = Object.keys(first(jsonData));
const mapping = importFile.mappingParsed;
const unmappedColumns = getUnmappedSheetColumns(header, mapping);
const totalCount = parsedData.length;
const createdCount = successAsyncOpers.length;
const errorsCount = errors.length;
const skippedCount = errorsCount;
return {
createdCount,
skippedCount,
totalCount,
errorsCount,
errors,
unmappedColumns: unmappedColumns,
unmappedColumnsCount: unmappedColumns.length,
};
return meta;
}
}

View File

@@ -1,18 +1,14 @@
import { Inject, Service } from 'typedi';
import * as R from 'ramda';
import XLSX from 'xlsx';
import { first, isUndefined } from 'lodash';
import bluebird from 'bluebird';
import fs from 'fs/promises';
import { chain } from 'lodash';
import { Knex } from 'knex';
import HasTenancyService from '../Tenancy/TenancyService';
import { ERRORS, convertFieldsToYupValidation, trimObject } from './_utils';
import { ImportMappingAttr, ImportValidationError } from './interfaces';
import { AccountsImportable } from './AccountsImportable';
import UnitOfWork from '../UnitOfWork';
import { ServiceError } from '@/exceptions';
import { ERRORS, getSheetColumns, getUnmappedSheetColumns } from './_utils';
import HasTenancyService from '../Tenancy/TenancyService';
import { ImportFileCommon } from './ImportFileCommon';
import { ImportFileDataTransformer } from './ImportFileDataTransformer';
import ResourceService from '../Resource/ResourceService';
import UnitOfWork from '../UnitOfWork';
import { ImportFilePreviewPOJO } from './interfaces';
@Service()
export class ImportFileProcess {
@@ -20,121 +16,28 @@ export class ImportFileProcess {
private tenancy: HasTenancyService;
@Inject()
private importable: AccountsImportable;
private resource: ResourceService;
@Inject()
private importCommon: ImportFileCommon;
@Inject()
private importParser: ImportFileDataTransformer;
@Inject()
private uow: UnitOfWork;
@Inject()
private resourceService: ResourceService;
/**
* Reads the import file.
* @param {string} filename
* @returns {Promise<Buffer>}
*/
public readImportFile(filename: string) {
return fs.readFile(`public/imports/${filename}`);
}
/**
* Maps the columns of the imported data based on the provided mapping attributes.
* @param {Record<string, any>[]} body - The array of data objects to map.
* @param {ImportMappingAttr[]} map - The mapping attributes.
* @returns {Record<string, any>[]} - The mapped data objects.
*/
public parseXlsxSheet(buffer) {
const workbook = XLSX.read(buffer, { type: 'buffer' });
const firstSheetName = workbook.SheetNames[0];
const worksheet = workbook.Sheets[firstSheetName];
return XLSX.utils.sheet_to_json(worksheet);
}
/**
* Sanitizes the data in the imported sheet by trimming object keys.
* @param json - The JSON data representing the imported sheet.
* @returns {string[][]} - The sanitized data with trimmed object keys.
*/
public sanitizeSheetData(json) {
return R.compose(R.map(Object.keys), R.map(trimObject))(json);
}
/**
* Maps the columns of the imported data based on the provided mapping attributes.
* @param {Record<string, any>[]} body - The array of data objects to map.
* @param {ImportMappingAttr[]} map - The mapping attributes.
* @returns {Record<string, any>[]} - The mapped data objects.
*/
private mapSheetColumns(
body: Record<string, any>[],
map: ImportMappingAttr[]
): Record<string, any>[] {
return body.map((item) => {
const newItem = {};
map
.filter((mapping) => !isUndefined(item[mapping.from]))
.forEach((mapping) => {
newItem[mapping.to] = item[mapping.from];
});
return newItem;
});
}
/**
* Validates the given mapped DTOs and returns errors with their index.
* @param {Record<string, any>} mappedDTOs
* @returns {Promise<ImportValidationError[][]>}
*/
private async validateData(
tenantId: number,
resource: string,
mappedDTOs: Record<string, any>
): Promise<ImportValidationError[][]> {
const importableFields = this.resourceService.getResourceImportableFields(
tenantId,
resource
);
const YupSchema = convertFieldsToYupValidation(importableFields);
const validateData = async (data, index: number) => {
const _data = { ...data };
try {
await YupSchema.validate(_data, { abortEarly: false });
return { index, data: _data, errors: [] };
} catch (validationError) {
const errors = validationError.inner.map((error) => ({
path: error.params.path,
label: error.params.label,
message: error.errors,
}));
return { index, data: _data, errors };
}
};
const errors = await bluebird.map(mappedDTOs, validateData, {
concurrency: 20,
});
return errors.filter((error) => error !== false);
}
/**
* Transformes the mapped DTOs.
* @param DTOs
* @returns
*/
private transformDTOs(DTOs) {
return DTOs.map((DTO) => this.importable.transform(DTO));
}
/**
* Processes the import file sheet through the resource service.
* Preview the imported file results before commiting the transactions.
* @param {number} tenantId
* @param {number} importId
* @returns {Promise<void>}
* @returns {Promise<ImportFilePreviewPOJO>}
*/
public async process(tenantId: number, importId: number) {
public async import(
tenantId: number,
importId: number,
trx?: Knex.Transaction
): Promise<ImportFilePreviewPOJO> {
const { Import } = this.tenancy.models(tenantId);
const importFile = await Import.query()
@@ -145,55 +48,54 @@ export class ImportFileProcess {
if (!importFile.isMapped) {
throw new ServiceError(ERRORS.IMPORT_FILE_NOT_MAPPED);
}
const buffer = await this.readImportFile(importFile.filename);
const jsonData = this.parseXlsxSheet(buffer);
// Read the imported file.
const buffer = await this.importCommon.readImportFile(importFile.filename);
const sheetData = this.importCommon.parseXlsxSheet(buffer);
const header = getSheetColumns(sheetData);
const data = this.sanitizeSheetData(jsonData);
const header = first(data);
const body = jsonData;
const mappedDTOs = this.mapSheetColumns(body, importFile.mappingParsed);
const transformedDTOs = this.transformDTOs(mappedDTOs);
// Validate the mapped DTOs.
const rowsWithErrors = await this.validateData(
const importableFields = this.resource.getResourceImportableFields(
tenantId,
importFile.resource,
transformedDTOs
importFile.resource
);
// Runs the importing under UOW envirement.
await this.uow.withTransaction(tenantId, async (trx: Knex.Transaction) => {
await bluebird.map(
rowsWithErrors,
(rowWithErrors) => {
if (rowWithErrors.errors.length === 0) {
return this.importable.importable(
tenantId,
rowWithErrors.data,
trx
);
}
},
{ concurrency: 10 }
);
});
// Deletes the imported file after importing success./
await this.deleteImportFile(tenantId, importFile)
}
// Prases the sheet json data.
const parsedData = this.importParser.parseSheetData(
importFile,
importableFields,
sheetData
);
// Runs the importing operation with ability to return errors that will happen.
const [successedImport, failedImport] = await this.uow.withTransaction(
tenantId,
(trx: Knex.Transaction) =>
this.importCommon.import(
tenantId,
importFile.resource,
parsedData,
trx
),
trx
);
const mapping = importFile.mappingParsed;
const errors = chain(failedImport)
.map((oper) => oper.error)
.flatten()
.value();
/**
* Deletes the imported file from the storage and database.
* @param {number} tenantId
* @param {} importFile
*/
private async deleteImportFile(tenantId: number, importFile: any) {
const { Import } = this.tenancy.models(tenantId);
const unmappedColumns = getUnmappedSheetColumns(header, mapping);
const totalCount = parsedData.length;
// Deletes the import row.
await Import.query().findById(importFile.id).delete();
const createdCount = successedImport.length;
const errorsCount = failedImport.length;
const skippedCount = errorsCount;
// Deletes the imported file.
await fs.unlink(`public/imports/${importFile.filename}`);
return {
createdCount,
skippedCount,
totalCount,
errorsCount,
errors,
unmappedColumns: unmappedColumns,
unmappedColumnsCount: unmappedColumns.length,
};
}
}

View File

@@ -1,14 +1,12 @@
import { first, values } from 'lodash';
import { Inject, Service } from 'typedi';
import { ServiceError } from '@/exceptions';
import XLSX from 'xlsx';
import * as R from 'ramda';
import HasTenancyService from '../Tenancy/TenancyService';
import { ERRORS, trimObject } from './_utils';
import { sanitizeResourceName } from './_utils';
import ResourceService from '../Resource/ResourceService';
import fs from 'fs/promises';
import { IModelMetaField } from '@/interfaces';
import { ImportFileCommon } from './ImportFileCommon';
import { ImportFileDataValidator } from './ImportFileDataValidator';
import { ImportFileUploadPOJO } from './interfaces';
@Service()
export class ImportFileUploadService {
@Inject()
@@ -20,6 +18,9 @@ export class ImportFileUploadService {
@Inject()
private importFileCommon: ImportFileCommon;
@Inject()
private importValidator: ImportFileDataValidator;
/**
* Reads the imported file and stores the import file meta under unqiue id.
* @param {number} tenantId - Tenant id.
@@ -30,58 +31,50 @@ export class ImportFileUploadService {
*/
public async import(
tenantId: number,
resource: string,
filePath: string,
resourceName: string,
filename: string
) {
): Promise<ImportFileUploadPOJO> {
const { Import } = this.tenancy.models(tenantId);
const resourceMeta = this.resourceService.getResourceMeta(
tenantId,
resource
resourceName
);
// Throw service error if the resource does not support importing.
if (!resourceMeta.importable) {
throw new ServiceError(ERRORS.RESOURCE_NOT_IMPORTABLE);
}
this.importValidator.validateResourceImportable(resourceMeta);
// Reads the imported file into buffer.
const buffer = await this.importFileCommon.readImportFile(filename);
// Parse the buffer file to array data.
const jsonData = this.importFileCommon.parseXlsxSheet(buffer);
const sheetData = this.importFileCommon.parseXlsxSheet(buffer);
const columns = this.getColumns(jsonData);
const coumnsStringified = JSON.stringify(columns);
const sheetColumns = this.importFileCommon.parseSheetColumns(sheetData);
const coumnsStringified = JSON.stringify(sheetColumns);
// @todo validate the resource.
const _resource = this.resourceService.resourceToModelName(resource);
const _resourceName = sanitizeResourceName(resourceName);
const exportFile = await Import.query().insert({
// Store the import model with related metadata.
const importFile = await Import.query().insert({
filename,
importId: filename,
resource: _resource,
resource: _resourceName,
columns: coumnsStringified,
});
const resourceColumns = this.resourceService.getResourceImportableFields(
tenantId,
resource
_resourceName
);
const resourceColumnsTransformeed = Object.entries(resourceColumns).map(
([key, { name }]: [string, IModelMetaField]) => ({ key, name })
);
return {
export: exportFile,
columns,
import: {
importId: importFile.importId,
resource: importFile.resource,
},
sheetColumns,
resourceColumns: resourceColumnsTransformeed,
};
}
/**
* Retrieves the sheet columns from the given sheet data.
* @param {unknown[]} json
* @returns {string[]}
*/
private getColumns(json: unknown[]): string[] {
return R.compose(Object.keys, trimObject, first)(json);
}
}

View File

@@ -22,22 +22,16 @@ export class ImportResourceApplication {
/**
* Reads the imported file and stores the import file meta under unqiue id.
* @param {number} tenantId -
* @param {string} filePath -
* @param {string} resource -
* @param {string} fileName -
* @returns
* @returns {Promise<ImportFileUploadPOJO>}
*/
public async import(
tenantId: number,
resource: string,
filePath: string,
filename: string
) {
return this.importFileService.import(
tenantId,
resource,
filePath,
filename
);
return this.importFileService.import(tenantId, resource, filename);
}
/**
@@ -71,6 +65,6 @@ export class ImportResourceApplication {
* @returns {Promise<void>}
*/
public async process(tenantId: number, importId: number) {
return this.importProcessService.process(tenantId, importId);
return this.importProcessService.import(tenantId, importId);
}
}

View File

@@ -0,0 +1,31 @@
import { camelCase, upperFirst } from 'lodash';
export class ImportableRegistry {
private static instance: ImportableRegistry;
private importables: Record<string, any>;
private constructor() {
this.importables = {};
}
public static getInstance(): ImportableRegistry {
if (!ImportableRegistry.instance) {
ImportableRegistry.instance = new ImportableRegistry();
}
return ImportableRegistry.instance;
}
public registerImportable(resource: string, importable: any): void {
const _resource = this.sanitizeResourceName(resource);
this.importables[_resource] = importable;
}
public getImportable(name: string): any {
const _name = this.sanitizeResourceName(name);
return this.importables[_name];
}
private sanitizeResourceName(resource: string) {
return upperFirst(camelCase(resource));
}
}

View File

@@ -0,0 +1,39 @@
import Container, { Service } from 'typedi';
import { AccountsImportable } from './AccountsImportable';
import { ImportableRegistry } from './ImportableRegistry';
@Service()
export class ImportableResources {
private static registry: ImportableRegistry;
constructor() {
this.boot();
}
/**
* Importable instances.
*/
private importables = [
{ resource: 'Account', importable: AccountsImportable },
];
public get registry() {
return ImportableResources.registry;
}
/**
* Boots all the registered importables.
*/
public boot() {
if (!ImportableResources.registry) {
const instance = ImportableRegistry.getInstance();
this.importables.forEach((importable) => {
const importableInstance = Container.get(importable.importable);
instance.registerImportable(importable.resource, importableInstance);
});
ImportableResources.registry = instance;
}
}
}

View File

@@ -1,4 +1,6 @@
import * as Yup from 'yup';
import { upperFirst, camelCase, first } from 'lodash';
import pluralize from 'pluralize';
import { ResourceMetaFieldsMap } from './interfaces';
import { IModelMetaField } from '@/interfaces';
@@ -62,11 +64,16 @@ export const ERRORS = {
IMPORT_FILE_NOT_MAPPED: 'IMPORT_FILE_NOT_MAPPED',
};
/**
*
*/
export const getUnmappedSheetColumns = (columns, mapping) => {
return columns.filter(
(column) => !mapping.some((map) => map.from === column)
);
};
export const sanitizeResourceName = (resourceName: string) => {
return upperFirst(camelCase(pluralize.singular(resourceName)));
};
export const getSheetColumns = (sheetData: unknown[]) => {
return Object.keys(first(sheetData));
};

View File

@@ -18,3 +18,40 @@ export interface ImportInsertError {
errorCode: string;
errorMessage: string;
}
export interface ImportFileUploadPOJO {
import: {
importId: string;
resource: string;
};
sheetColumns: string[];
resourceColumns: { key: string; name: string }[];
}
export interface ImportFileMapPOJO {
import: {
importId: string;
resource: string;
};
}
export interface ImportFilePreviewPOJO {
createdCount: number;
skippedCount: number;
totalCount: number;
errorsCount: number;
errors: ImportInsertError[];
unmappedColumns: string[];
unmappedColumnsCount: number;
}
export interface ImportOperSuccess {
data: unknown;
index: number;
}
export interface ImportOperError {
error: ImportInsertError;
index: number;
}