feat(nestjs): migrate to NestJS

This commit is contained in:
Ahmed Bouhuolia
2025-04-07 11:51:24 +02:00
parent f068218a16
commit 55fcc908ef
3779 changed files with 631 additions and 195332 deletions

View File

@@ -0,0 +1,8 @@
import { Module } from '@nestjs/common';
import { ImportAls } from './ImportALS';
@Module({
providers: [ImportAls],
exports: [ImportAls],
})
export class ImportModule {}

View File

@@ -0,0 +1,105 @@
import { AsyncLocalStorage } from 'async_hooks';
import { Injectable } from '@nestjs/common';
@Injectable()
export class ImportAls {
private als: AsyncLocalStorage<Map<string, any>>;
constructor() {
this.als = new AsyncLocalStorage();
}
/**
* Runs a callback function within the context of a new AsyncLocalStorage store.
* @param callback The function to be executed within the AsyncLocalStorage context.
* @returns The result of the callback function.
*/
public run<T>(callback: () => T): T {
return this.als.run<T, any>(new Map(), callback);
}
/**
* Runs a callback function in preview mode within the AsyncLocalStorage context.
* @param callback The function to be executed in preview mode.
* @returns The result of the callback function.
*/
public runPreview<T>(callback: () => T): T {
return this.run(() => {
this.markAsImport();
this.markAsImportPreview();
return callback();
});
}
/**
* Runs a callback function in commit mode within the AsyncLocalStorage context.
* @param {() => T} callback - The function to be executed in commit mode.
* @returns {T} The result of the callback function.
*/
public runCommit<T>(callback: () => T): T {
return this.run(() => {
this.markAsImport();
this.markAsImportCommit();
return callback();
});
}
/**
* Retrieves the current AsyncLocalStorage store.
* @returns The current store or undefined if not in a valid context.
*/
public getStore(): Map<string, any> | undefined {
return this.als.getStore();
}
/**
* Marks the current context as an import operation.
* @param flag Boolean flag to set or unset the import status. Defaults to true.
*/
public markAsImport(flag: boolean = true): void {
const store = this.getStore();
store?.set('isImport', flag);
}
/**
* Marks the current context as an import commit operation.
* @param flag Boolean flag to set or unset the import commit status. Defaults to true.
*/
public markAsImportCommit(flag: boolean = true): void {
const store = this.getStore();
store?.set('isImportCommit', flag);
}
/**
* Marks the current context as an import preview operation.
* @param {boolean} flag - Boolean flag to set or unset the import preview status. Defaults to true.
*/
public markAsImportPreview(flag: boolean = true): void {
const store = this.getStore();
store?.set('isImportPreview', flag);
}
/**
* Checks if the current context is an import operation.
* @returns {boolean} True if the context is an import operation, false otherwise.
*/
public get isImport(): boolean {
return !!this.getStore()?.get('isImport');
}
/**
* Checks if the current context is an import commit operation.
* @returns {boolean} True if the context is an import commit operation, false otherwise.
*/
public get isImportCommit(): boolean {
return !!this.getStore()?.get('isImportCommit');
}
/**
* Checks if the current context is an import preview operation.
* @returns {boolean} True if the context is an import preview operation, false otherwise.
*/
public get isImportPreview(): boolean {
return !!this.getStore()?.get('isImportPreview');
}
}

View File

@@ -0,0 +1,169 @@
import bluebird from 'bluebird';
import * as R from 'ramda';
import { first } from 'lodash';
import { ImportFileDataValidator } from './ImportFileDataValidator';
import { Knex } from 'knex';
import {
ImportInsertError,
ImportOperError,
ImportOperSuccess,
ImportableContext,
} from './interfaces';
import { getUniqueImportableValue, trimObject } from './_utils';
import { ImportableResources } from './ImportableResources';
import { ResourceService } from '../Resource/ResourceService';
import { Import } from '@/system/models';
import { Injectable } from '@nestjs/common';
import { ServiceError } from '../Items/ServiceError';
@Injectable()
export class ImportFileCommon {
constructor(
private readonly importFileValidator: ImportFileDataValidator,
private readonly importable: ImportableResources,
private readonly resource: ResourceService,
) {}
/**
* Imports the given parsed data to the resource storage through registered importable service.
* @param {string} resourceName - Resource name.
* @param {Record<string, any>} parsedData - Parsed data.
* @param {Knex.Transaction} trx - Knex transaction.
* @returns {Promise<[ImportOperSuccess[], ImportOperError[]]>}
*/
public async import(
importFile: Import,
parsedData: Record<string, any>[],
trx?: Knex.Transaction,
): Promise<[ImportOperSuccess[], ImportOperError[]]> {
const resourceFields = this.resource.getResourceFields2(
importFile.resource,
);
const ImportableRegistry = this.importable.registry;
const importable = ImportableRegistry.getImportable(importFile.resource);
const concurrency = importable.concurrency || 10;
const success: ImportOperSuccess[] = [];
const failed: ImportOperError[] = [];
const importAsync = async (objectDTO, index: number): Promise<void> => {
const context: ImportableContext = {
rowIndex: index,
import: importFile,
};
const transformedDTO = importable.transform(objectDTO, context);
const rowNumber = index + 1;
const uniqueValue = getUniqueImportableValue(resourceFields, objectDTO);
const errorContext = {
rowNumber,
uniqueValue,
};
try {
// Validate the DTO object before passing it to the service layer.
await this.importFileValidator.validateData(
resourceFields,
transformedDTO,
);
try {
// Run the importable function and listen to the errors.
const data = await importable.importable(
tenantId,
transformedDTO,
trx,
);
success.push({ index, data });
} catch (err) {
if (err instanceof ServiceError) {
const error: ImportInsertError[] = [
{
errorCode: 'ServiceError',
errorMessage: err.message || err.errorType,
...errorContext,
},
];
failed.push({ index, error });
} else {
const error: ImportInsertError[] = [
{
errorCode: 'UnknownError',
errorMessage: 'Unknown error occurred',
...errorContext,
},
];
failed.push({ index, error });
}
}
} catch (errors) {
const error = errors.map((er) => ({ ...er, ...errorContext }));
failed.push({ index, error });
}
};
await bluebird.map(parsedData, importAsync, { concurrency });
return [success, failed];
}
/**
*
* @param {string} resourceName
* @param {Record<string, any>} params
*/
public async validateParamsSchema(
resourceName: string,
params: Record<string, any>,
) {
const ImportableRegistry = this.importable.registry;
const importable = ImportableRegistry.getImportable(resourceName);
const yupSchema = importable.paramsValidationSchema();
try {
await yupSchema.validate(params, { abortEarly: false });
} catch (validationError) {
const errors = validationError.inner.map((error) => ({
errorCode: 'ParamsValidationError',
errorMessage: error.errors,
}));
throw errors;
}
}
/**
*
* @param {string} resourceName
* @param {Record<string, any>} params
*/
public async validateParams(
tenantId: number,
resourceName: string,
params: Record<string, any>,
) {
const ImportableRegistry = this.importable.registry;
const importable = ImportableRegistry.getImportable(resourceName);
await importable.validateParams(tenantId, params);
}
/**
*
* @param {string} resourceName
* @param {Record<string, any>} params
* @returns
*/
public transformParams(resourceName: string, params: Record<string, any>) {
const ImportableRegistry = this.importable.registry;
const importable = ImportableRegistry.getImportable(resourceName);
return importable.transformParams(params);
}
/**
* Retrieves the sheet columns from the given sheet data.
* @param {unknown[]} json
* @returns {string[]}
*/
public parseSheetColumns(json: unknown[]): string[] {
return R.compose(Object.keys, trimObject, first)(json);
}
}

View File

@@ -0,0 +1,143 @@
import bluebird from 'bluebird';
import { isUndefined, pickBy, set } from 'lodash';
import { Knex } from 'knex';
import { ImportMappingAttr, ResourceMetaFieldsMap } from './interfaces';
import {
valueParser,
parseKey,
getFieldKey,
aggregate,
sanitizeSheetData,
getMapToPath,
} from './_utils';
import { ResourceService } from '../Resource/ResourceService';
import { CurrencyParsingDTOs } from './_constants';
import { Injectable } from '@nestjs/common';
@Injectable()
export class ImportFileDataTransformer {
constructor(
private readonly resource: ResourceService,
) {}
/**
* Parses the given sheet data before passing to the service layer.
* based on the mapped fields and the each field type.
* @param {number} tenantId -
* @param {}
*/
public async parseSheetData(
importFile: any,
importableFields: ResourceMetaFieldsMap,
data: Record<string, unknown>[],
trx?: Knex.Transaction
): Promise<Record<string, any>[]> {
// Sanitize the sheet data.
const sanitizedData = sanitizeSheetData(data);
// Map the sheet columns key with the given map.
const mappedDTOs = this.mapSheetColumns(
sanitizedData,
importFile.mappingParsed
);
// Parse the mapped sheet values.
const parsedValues = await this.parseExcelValues(
importableFields,
mappedDTOs,
trx
);
const aggregateValues = this.aggregateParsedValues(
importFile.resource,
parsedValues
);
return aggregateValues;
}
/**
* Aggregates parsed data based on resource metadata configuration.
* @param {number} tenantId
* @param {string} resourceName
* @param {Record<string, any>} parsedData
* @returns {Record<string, any>[]}
*/
public aggregateParsedValues(
resourceName: string,
parsedData: Record<string, any>[]
): Record<string, any>[] {
let _value = parsedData;
const meta = this.resource.getResourceMeta(resourceName);
if (meta.importAggregator === 'group') {
_value = aggregate(
_value,
meta.importAggregateBy,
meta.importAggregateOn
);
}
return _value;
};
/**
* Maps the columns of the imported data based on the provided mapping attributes.
* @param {Record<string, any>[]} body - The array of data objects to map.
* @param {ImportMappingAttr[]} map - The mapping attributes.
* @returns {Record<string, any>[]} - The mapped data objects.
*/
public mapSheetColumns(
body: Record<string, any>[],
map: ImportMappingAttr[]
): Record<string, any>[] {
return body.map((item) => {
const newItem = {};
map
.filter((mapping) => !isUndefined(item[mapping.from]))
.forEach((mapping) => {
const toPath = getMapToPath(mapping.to, mapping.group);
newItem[toPath] = item[mapping.from];
});
return newItem;
});
}
/**
* Parses sheet values before passing to the service layer.
* @param {ResourceMetaFieldsMap} fields -
* @param {Record<string, any>} valueDTOS -
* @returns {Record<string, any>}
*/
public async parseExcelValues(
fields: ResourceMetaFieldsMap,
valueDTOs: Record<string, any>[],
trx?: Knex.Transaction
): Promise<Record<string, any>[]> {
// const tenantModels = this.tenancy.models(tenantId);
const _valueParser = valueParser(fields, {}, trx);
const _keyParser = parseKey(fields);
const parseAsync = async (valueDTO) => {
// Clean up the undefined keys that not exist in resource fields.
const _valueDTO = pickBy(
valueDTO,
(value, key) => !isUndefined(fields[getFieldKey(key)])
);
// Keys of mapped values. key structure: `group.key` or `key`.
const keys = Object.keys(_valueDTO);
// Map the object values.
return bluebird.reduce(
keys,
async (acc, key) => {
const parsedValue = await _valueParser(_valueDTO[key], key);
const parsedKey = await _keyParser(key);
set(acc, parsedKey, parsedValue);
return acc;
},
{}
);
};
return bluebird.map(valueDTOs, parseAsync, {
concurrency: CurrencyParsingDTOs,
});
}
}

View File

@@ -0,0 +1,47 @@
import { ImportInsertError, ResourceMetaFieldsMap } from './interfaces';
import { ERRORS, convertFieldsToYupValidation } from './_utils';
import { Injectable } from '@nestjs/common';
import { IModelMeta } from '@/interfaces/Model';
import { ServiceError } from '../Items/ServiceError';
@Injectable()
export class ImportFileDataValidator {
/**
* Validates the given resource is importable.
* @param {IModelMeta} resourceMeta
*/
public validateResourceImportable(resourceMeta: IModelMeta) {
// Throw service error if the resource does not support importing.
if (!resourceMeta.importable) {
throw new ServiceError(ERRORS.RESOURCE_NOT_IMPORTABLE);
}
}
/**
* Validates the given mapped DTOs and returns errors with their index.
* @param {Record<string, any>} mappedDTOs
* @returns {Promise<void | ImportInsertError[]>}
*/
public async validateData(
importableFields: ResourceMetaFieldsMap,
data: Record<string, any>
): Promise<void | ImportInsertError[]> {
const YupSchema = convertFieldsToYupValidation(importableFields);
const _data = { ...data };
try {
await YupSchema.validate(_data, { abortEarly: false });
} catch (validationError) {
const errors = validationError.inner.reduce((errors, error) => {
const newErrors = error.errors.map((errMsg) => ({
errorCode: 'ValidationError',
errorMessage: errMsg,
}));
return [...errors, ...newErrors];
}, []);
throw errors;
}
}
}

View File

@@ -0,0 +1,143 @@
import { Injectable } from '@nestjs/common';
import { fromPairs, isUndefined } from 'lodash';
import {
ImportDateFormats,
ImportFileMapPOJO,
ImportMappingAttr,
} from './interfaces';
import { ResourceService } from '../Resource/ResourceService';
import { ServiceError } from '../Items/ServiceError';
import { ERRORS } from './_utils';
import { Import } from './models/Import';
@Injectable()
export class ImportFileMapping {
constructor(private readonly resource: ResourceService) {}
/**
* Mapping the excel sheet columns with resource columns.
* @param {number} tenantId
* @param {number} importId
* @param {ImportMappingAttr} maps
*/
public async mapping(
importId: string,
maps: ImportMappingAttr[],
): Promise<ImportFileMapPOJO> {
const importFile = await Import.query()
.findOne('filename', importId)
.throwIfNotFound();
// Invalidate the from/to map attributes.
this.validateMapsAttrs(importFile, maps);
// @todo validate the required fields.
// Validate the diplicated relations of map attrs.
this.validateDuplicatedMapAttrs(maps);
// Validate the date format mapping.
this.validateDateFormatMapping(importFile.resource, maps);
const mappingStringified = JSON.stringify(maps);
await Import.query().findById(importFile.id).patch({
mapping: mappingStringified,
});
return {
import: {
importId: importFile.importId,
resource: importFile.resource,
},
};
}
/**
* Validate the mapping attributes.
* @param {number} tenantId -
* @param {} importFile -
* @param {ImportMappingAttr[]} maps
* @throws {ServiceError(ERRORS.INVALID_MAP_ATTRS)}
*/
private validateMapsAttrs(importFile: any, maps: ImportMappingAttr[]) {
const fields = this.resource.getResourceFields2(importFile.resource);
const columnsMap = fromPairs(
importFile.columnsParsed.map((field) => [field, '']),
);
const invalid = [];
// is not empty, is not undefined or map.group
maps.forEach((map) => {
let _invalid = true;
if (!map.group && fields[map.to]) {
_invalid = false;
}
if (map.group && fields[map.group] && fields[map.group]?.fields[map.to]) {
_invalid = false;
}
if (columnsMap[map.from]) {
_invalid = false;
}
if (_invalid) {
invalid.push(map);
}
});
if (invalid.length > 0) {
throw new ServiceError(ERRORS.INVALID_MAP_ATTRS);
}
}
/**
* Validate the map attrs relation should be one-to-one relation only.
* @param {ImportMappingAttr[]} maps
*/
private validateDuplicatedMapAttrs(maps: ImportMappingAttr[]) {
const fromMap = {};
const toMap = {};
maps.forEach((map) => {
if (fromMap[map.from]) {
throw new ServiceError(ERRORS.DUPLICATED_FROM_MAP_ATTR);
} else {
fromMap[map.from] = true;
}
const toPath = !isUndefined(map?.group)
? `${map.group}.${map.to}`
: map.to;
if (toMap[toPath]) {
throw new ServiceError(ERRORS.DUPLICATED_TO_MAP_ATTR);
} else {
toMap[toPath] = true;
}
});
}
/**
* Validates the date format mapping.
* @param {number} tenantId
* @param {string} resource
* @param {ImportMappingAttr[]} maps
*/
private validateDateFormatMapping(
resource: string,
maps: ImportMappingAttr[],
) {
const fields = this.resource.getResourceImportableFields(resource);
// @todo Validate date type of the nested fields.
maps.forEach((map) => {
if (
typeof fields[map.to] !== 'undefined' &&
fields[map.to].fieldType === 'date'
) {
if (
typeof map.dateFormat !== 'undefined' &&
ImportDateFormats.indexOf(map.dateFormat) === -1
) {
throw new ServiceError(ERRORS.INVALID_MAP_DATE_FORMAT);
}
}
});
}
}

View File

@@ -0,0 +1,27 @@
import { Import } from './models/Import';
import { ImportFileMetaTransformer } from './ImportFileMetaTransformer';
import { Injectable } from '@nestjs/common';
import { TransformerInjectable } from '../Transformer/TransformerInjectable.service';
@Injectable()
export class ImportFileMeta {
constructor(private readonly transformer: TransformerInjectable) {}
/**
* Retrieves the import meta of the given import model id.
* @param {number} tenantId
* @param {number} importId
* @returns {}
*/
async getImportMeta(importId: string) {
const importFile = await Import.query()
.where('tenantId', tenantId)
.findOne('importId', importId);
// Retrieves the transformed accounts collection.
return this.transformer.transform(
importFile,
new ImportFileMetaTransformer(),
);
}
}

View File

@@ -0,0 +1,19 @@
import { Transformer } from '../Transformer/Transformer';
export class ImportFileMetaTransformer extends Transformer {
/**
* Include these attributes to sale invoice object.
* @returns {Array}
*/
public includeAttributes = (): string[] => {
return ['map'];
};
public excludeAttributes = (): string[] => {
return ['id', 'filename', 'columns', 'mappingParsed', 'mapping'];
};
map(importFile) {
return importFile.mappingParsed;
}
}

View File

@@ -0,0 +1,40 @@
import { ImportFilePreviewPOJO } from './interfaces';
import { ImportFileProcess } from './ImportFileProcess';
import { ImportAls } from './ImportALS';
import { Injectable } from '@nestjs/common';
@Injectable()
export class ImportFilePreview {
constructor(
private readonly importFile: ImportFileProcess,
private readonly importAls: ImportAls,
) {}
/**
* Preview the imported file results before commiting the transactions.
* @param {string} importId -
* @returns {Promise<ImportFilePreviewPOJO>}
*/
public async preview(importId: string): Promise<ImportFilePreviewPOJO> {
return this.importAls.runPreview<Promise<ImportFilePreviewPOJO>>(() =>
this.previewAlsRun(importId),
);
}
/**
* Preview the imported file results before commiting the transactions.
* @param {number} importId
* @returns {Promise<ImportFilePreviewPOJO>}
*/
public async previewAlsRun(importId: string): Promise<ImportFilePreviewPOJO> {
const knex = this.tenancy.knex(tenantId);
const trx = await knex.transaction({ isolationLevel: 'read uncommitted' });
const meta = await this.importFile.import(importId, trx);
// Rollback the successed transaction.
await trx.rollback();
return meta;
}
}

View File

@@ -0,0 +1,97 @@
import { chain } from 'lodash';
import { Knex } from 'knex';
import { ERRORS, getUnmappedSheetColumns, readImportFile } from './_utils';
import { ImportFileCommon } from './ImportFileCommon';
import { ImportFileDataTransformer } from './ImportFileDataTransformer';
import { ImportFilePreviewPOJO } from './interfaces';
import { parseSheetData } from './sheet_utils';
import { Injectable } from '@nestjs/common';
import { ResourceService } from '../Resource/ResourceService';
import { UnitOfWork } from '../Tenancy/TenancyDB/UnitOfWork.service';
import { ServiceError } from '../Items/ServiceError';
@Injectable()
export class ImportFileProcess {
constructor(
private readonly resource: ResourceService,
private readonly importCommon: ImportFileCommon,
private readonly importParser: ImportFileDataTransformer,
private readonly uow: UnitOfWork,
) {}
/**
* Preview the imported file results before commiting the transactions.
* @param {number} tenantId
* @param {number} importId
* @returns {Promise<ImportFilePreviewPOJO>}
*/
public async import(
importId: string,
trx?: Knex.Transaction,
): Promise<ImportFilePreviewPOJO> {
const importFile = await Import.query()
.findOne('importId', importId)
.where('tenantId', tenantId)
.throwIfNotFound();
// Throw error if the import file is not mapped yet.
if (!importFile.isMapped) {
throw new ServiceError(ERRORS.IMPORT_FILE_NOT_MAPPED);
}
// Read the imported file and parse the given buffer to get columns
// and sheet data in json format.
const buffer = await readImportFile(importFile.filename);
const [sheetData, sheetColumns] = parseSheetData(buffer);
const resource = importFile.resource;
const resourceFields = this.resource.getResourceFields2(resource);
// Runs the importing operation with ability to return errors that will happen.
const [successedImport, failedImport, allData] =
await this.uow.withTransaction(
tenantId,
async (trx: Knex.Transaction) => {
// Prases the sheet json data.
const parsedData = await this.importParser.parseSheetData(
tenantId,
importFile,
resourceFields,
sheetData,
trx,
);
const [successedImport, failedImport] =
await this.importCommon.import(
tenantId,
importFile,
parsedData,
trx,
);
return [successedImport, failedImport, parsedData];
},
trx,
);
const mapping = importFile.mappingParsed;
const errors = chain(failedImport)
.map((oper) => oper.error)
.flatten()
.value();
const unmappedColumns = getUnmappedSheetColumns(sheetColumns, mapping);
const totalCount = allData.length;
const createdCount = successedImport.length;
const errorsCount = failedImport.length;
const skippedCount = errorsCount;
return {
resource,
createdCount,
skippedCount,
totalCount,
errorsCount,
errors,
unmappedColumns: unmappedColumns,
unmappedColumnsCount: unmappedColumns.length,
};
}
}

View File

@@ -0,0 +1,48 @@
import { ImportFilePreviewPOJO } from './interfaces';
import { ImportFileProcess } from './ImportFileProcess';
import { ImportAls } from './ImportALS';
import { Injectable } from '@nestjs/common';
import { EventEmitter2 } from '@nestjs/event-emitter';
import { events } from '@/common/events/events';
@Injectable()
export class ImportFileProcessCommit {
constructor(
private readonly importFile: ImportFileProcess,
private readonly importAls: ImportAls,
private readonly eventEmitter: EventEmitter2,
) {}
/**
* Commits the imported file under ALS.
* @param {string} importId - The import ID.
* @returns {Promise<ImportFilePreviewPOJO>}
*/
public commit(importId: string): Promise<ImportFilePreviewPOJO> {
return this.importAls.runCommit<Promise<ImportFilePreviewPOJO>>(() =>
this.commitAlsRun(importId),
);
}
/**
* Commits the imported file.
* @param {number} importId - The import ID.
* @returns {Promise<ImportFilePreviewPOJO>}
*/
public async commitAlsRun(importId: string): Promise<ImportFilePreviewPOJO> {
const trx = await knex.transaction({ isolationLevel: 'read uncommitted' });
const meta = await this.importFile.import(importId, trx);
// Commit the successed transaction.
await trx.commit();
// Triggers `onImportFileCommitted` event.
await this.eventEmitter.emitAsync(events.import.onImportCommitted, {
meta,
importId,
} as IImportFileCommitedEventPayload);
return meta;
}
}

View File

@@ -0,0 +1,115 @@
import {
deleteImportFile,
getResourceColumns,
readImportFile,
sanitizeResourceName,
validateSheetEmpty,
} from './_utils';
import { ResourceService } from '../Resource/ResourceService';
import { ImportFileCommon } from './ImportFileCommon';
import { ImportFileDataValidator } from './ImportFileDataValidator';
import { ImportFileUploadPOJO } from './interfaces';
import { Import } from '@/system/models';
import { parseSheetData } from './sheet_utils';
import { Injectable } from '@nestjs/common';
@Injectable()
export class ImportFileUploadService {
constructor(
private resourceService: ResourceService,
private importFileCommon: ImportFileCommon,
private importValidator: ImportFileDataValidator,
) {}
/**
* Imports the specified file for the given resource.
* Deletes the file if an error occurs during the import process.
* @param {number} tenantId
* @param {string} resourceName
* @param {string} filename
* @param {Record<string, number | string>} params
* @returns {Promise<ImportFileUploadPOJO>}
*/
public async import(
resourceName: string,
filename: string,
params: Record<string, number | string>,
): Promise<ImportFileUploadPOJO> {
try {
return await this.importUnhandled(
tenantId,
resourceName,
filename,
params,
);
} catch (err) {
deleteImportFile(filename);
throw err;
}
}
/**
* Reads the imported file and stores the import file meta under unqiue id.
* @param {number} tenantId - Tenant id.
* @param {string} resource - Resource name.
* @param {string} filePath - File path.
* @param {string} fileName - File name.
* @returns {Promise<ImportFileUploadPOJO>}
*/
public async importUnhandled(
resourceName: string,
filename: string,
params: Record<string, number | string>,
): Promise<ImportFileUploadPOJO> {
const resource = sanitizeResourceName(resourceName);
const resourceMeta = this.resourceService.getResourceMeta(resource);
// Throw service error if the resource does not support importing.
this.importValidator.validateResourceImportable(resourceMeta);
// Reads the imported file into buffer.
const buffer = await readImportFile(filename);
// Parse the buffer file to array data.
const [sheetData, sheetColumns] = parseSheetData(buffer);
const coumnsStringified = JSON.stringify(sheetColumns);
// Throws service error if the sheet data is empty.
validateSheetEmpty(sheetData);
try {
// Validates the params Yup schema.
await this.importFileCommon.validateParamsSchema(resource, params);
// Validates importable params asyncly.
await this.importFileCommon.validateParams(tenantId, resource, params);
} catch (error) {
throw error;
}
const _params = this.importFileCommon.transformParams(resource, params);
const paramsStringified = JSON.stringify(_params);
// Store the import model with related metadata.
const importFile = await Import.query().insert({
filename,
resource,
tenantId,
importId: filename,
columns: coumnsStringified,
params: paramsStringified,
});
const resourceColumnsMap = this.resourceService.getResourceFields2(
tenantId,
resource,
);
const resourceColumns = getResourceColumns(resourceColumnsMap);
return {
import: {
importId: importFile.importId,
resource: importFile.resource,
},
sheetColumns,
resourceColumns,
};
}
}

View File

@@ -0,0 +1,34 @@
import * as moment from 'moment';
import bluebird from 'bluebird';
import { deleteImportFile } from './_utils';
import { Injectable } from '@nestjs/common';
import { Import } from './models/Import';
@Injectable()
export class ImportDeleteExpiredFiles {
/**
* Delete expired files.
*/
async deleteExpiredFiles() {
const yesterday = moment().subtract(1, 'hour').format('YYYY-MM-DD HH:mm');
const expiredImports = await Import.query().where(
'createdAt',
'<',
yesterday
);
await bluebird.map(
expiredImports,
async (expiredImport) => {
await deleteImportFile(expiredImport.filename);
},
{ concurrency: 10 }
);
const expiredImportsIds = expiredImports.map(
(expiredImport) => expiredImport.id
);
if (expiredImportsIds.length > 0) {
await Import.query().whereIn('id', expiredImportsIds).delete();
}
}
}

View File

@@ -0,0 +1,25 @@
import { Module } from '@nestjs/common';
import { ImportResourceApplication } from './ImportResourceApplication';
import { ImportFileUploadService } from './ImportFileUpload';
import { ImportFileMapping } from './ImportFileMapping';
import { ImportFileProcess } from './ImportFileProcess';
import { ImportFilePreview } from './ImportFilePreview';
import { ImportFileProcessCommit } from './ImportFileProcessCommit';
import { ImportFileMeta } from './ImportFileMeta';
import { ImportSampleService } from './ImportSample';
@Module({
imports: [],
providers: [
ImportResourceApplication,
ImportFileUploadService,
ImportFileMapping,
ImportFileProcess,
ImportFilePreview,
ImportSampleService,
ImportFileMeta,
ImportFileProcessCommit,
],
exports: [ImportResourceApplication],
})
export class ImportResourceModule {}

View File

@@ -0,0 +1,80 @@
import { ImportFileUploadService } from './ImportFileUpload';
import { ImportFileMapping } from './ImportFileMapping';
import { ImportMappingAttr } from './interfaces';
import { ImportFileProcess } from './ImportFileProcess';
import { ImportFilePreview } from './ImportFilePreview';
import { ImportSampleService } from './ImportSample';
import { ImportFileMeta } from './ImportFileMeta';
import { ImportFileProcessCommit } from './ImportFileProcessCommit';
import { Injectable } from '@nestjs/common';
@Injectable()
export class ImportResourceApplication {
constructor(
private readonly importFileService: ImportFileUploadService,
private readonly importMappingService: ImportFileMapping,
private readonly importProcessService: ImportFileProcess,
private readonly ImportFilePreviewService: ImportFilePreview,
private readonly importSampleService: ImportSampleService,
private readonly importMetaService: ImportFileMeta,
private readonly importProcessCommit: ImportFileProcessCommit,
) {}
/**
* Reads the imported file and stores the import file meta under unqiue id.
* @param {string} resource - Resource name.
* @param {string} fileName - File name.
* @returns {Promise<ImportFileUploadPOJO>}
*/
public async import(
resource: string,
filename: string,
params: Record<string, any>,
) {
return this.importFileService.import(resource, filename, params);
}
/**
* Mapping the excel sheet columns with resource columns.
* @param {number} importId - Import id.
* @param {ImportMappingAttr} maps
*/
public async mapping(importId: string, maps: ImportMappingAttr[]) {
return this.importMappingService.mapping(importId, maps);
}
/**
* Preview the mapped results before process importing.
* @param {number} importId - Import id.
* @returns {Promise<ImportFilePreviewPOJO>}
*/
public async preview(importId: string) {
return this.ImportFilePreviewService.preview(importId);
}
/**
* Process the import file sheet through service for creating entities.
* @param {number} importId
* @returns {Promise<ImportFilePreviewPOJO>}
*/
public async process(importId: string) {
return this.importProcessCommit.commit(importId);
}
/**
* Retrieves the import meta of the given import id.
* @param {string} importId - Import id.
* @returns {}
*/
public importMeta(importId: string) {
return this.importMetaService.getImportMeta(importId);
}
/**
* Retrieves the csv/xlsx sample sheet of the given
* @param {number} resource - Resource name.
*/
public sample(resource: string, format: 'csv' | 'xlsx' = 'csv') {
return this.importSampleService.sample(resource, format);
}
}

View File

@@ -0,0 +1,45 @@
import XLSX from 'xlsx';
import { ImportableResources } from './ImportableResources';
import { sanitizeResourceName } from './_utils';
import { Injectable } from '@nestjs/common';
@Injectable()
export class ImportSampleService {
constructor(
private readonly importable: ImportableResources,
) {}
/**
* Retrieves the sample sheet of the given resource.
* @param {string} resource
* @param {string} format
* @returns {Buffer | string}
*/
public sample(
resource: string,
format: 'csv' | 'xlsx'
): Buffer | string {
const _resource = sanitizeResourceName(resource);
const ImportableRegistry = this.importable.registry;
const importable = ImportableRegistry.getImportable(_resource);
const data = importable.sampleData();
const workbook = XLSX.utils.book_new();
const worksheet = XLSX.utils.json_to_sheet(data);
XLSX.utils.book_append_sheet(workbook, worksheet, 'Sheet1');
// Determine the output format
if (format === 'csv') {
const csvOutput = XLSX.utils.sheet_to_csv(worksheet);
return csvOutput;
} else {
const xlsxOutput = XLSX.write(workbook, {
bookType: 'xlsx',
type: 'buffer',
});
return xlsxOutput;
}
}
}

View File

@@ -0,0 +1,71 @@
import { Knex } from 'knex';
import * as Yup from 'yup';
import { ImportableContext } from './interfaces';
export abstract class Importable {
/**
*
* @param {number} tenantId
* @param {any} createDTO
* @param {Knex.Transaction} trx
*/
public importable(createDTO: any, trx?: Knex.Transaction) {
throw new Error(
'The `importable` function is not defined in service importable.'
);
}
/**
* Transformes the DTO before passing it to importable and validation.
* @param {Record<string, any>} createDTO
* @param {ImportableContext} context
* @returns {Record<string, any>}
*/
public transform(createDTO: Record<string, any>, context: ImportableContext) {
return createDTO;
}
/**
* Concurrency controlling of the importing process.
* @returns {number}
*/
public get concurrency() {
return 10;
}
/**
* Retrieves the sample data of importable.
* @returns {Array<any>}
*/
public sampleData(): Array<any> {
return [];
}
// ------------------
// # Params
// ------------------
/**
* Params Yup validation schema.
* @returns {Yup.ObjectSchema<object, object>}
*/
public paramsValidationSchema(): Yup.ObjectSchema<object, object> {
return Yup.object().nullable();
}
/**
* Validates the params of the importable service.
* @param {Record<string, any>}
* @returns {Promise<boolean>} - True means passed and false failed.
*/
public async validateParams(
params: Record<string, any>
): Promise<void> {}
/**
* Transformes the import params before storing them.
* @param {Record<string, any>} parmas
*/
public transformParams(parmas: Record<string, any>) {
return parmas;
}
}

View File

@@ -0,0 +1,46 @@
import { camelCase, upperFirst } from 'lodash';
import { Importable } from './Importable';
export class ImportableRegistry {
private static instance: ImportableRegistry;
private importables: Record<string, Importable>;
constructor() {
this.importables = {};
}
/**
* Gets singleton instance of registry.
* @returns {ImportableRegistry}
*/
public static getInstance(): ImportableRegistry {
if (!ImportableRegistry.instance) {
ImportableRegistry.instance = new ImportableRegistry();
}
return ImportableRegistry.instance;
}
/**
* Registers the given importable service.
* @param {string} resource
* @param {Importable} importable
*/
public registerImportable(resource: string, importable: Importable): void {
const _resource = this.sanitizeResourceName(resource);
this.importables[_resource] = importable;
}
/**
* Retrieves the importable service instance of the given resource name.
* @param {string} name
* @returns {Importable}
*/
public getImportable(name: string): Importable {
const _name = this.sanitizeResourceName(name);
return this.importables[_name];
}
private sanitizeResourceName(resource: string) {
return upperFirst(camelCase(resource));
}
}

View File

@@ -0,0 +1,73 @@
// import { AccountsImportable } from '../Accounts/AccountsImportable';
import { Injectable } from '@nestjs/common';
import { ImportableRegistry } from './ImportableRegistry';
// import { UncategorizedTransactionsImportable } from '../BankingCategorize/commands/UncategorizedTransactionsImportable';
// import { CustomersImportable } from '../Contacts/Customers/CustomersImportable';
// import { VendorsImportable } from '../Contacts/Vendors/VendorsImportable';
// import { ItemsImportable } from '../Items/ItemsImportable';
// import { ItemCategoriesImportable } from '../ItemCategories/ItemCategoriesImportable';
// import { ManualJournalImportable } from '../ManualJournals/commands/ManualJournalsImport';
// import { BillsImportable } from '../Purchases/Bills/BillsImportable';
// import { ExpensesImportable } from '../Expenses/ExpensesImportable';
// import { SaleInvoicesImportable } from '../Sales/Invoices/SaleInvoicesImportable';
// import { SaleEstimatesImportable } from '../Sales/Estimates/SaleEstimatesImportable';
// import { BillPaymentsImportable } from '../Purchases/BillPayments/BillPaymentsImportable';
// import { VendorCreditsImportable } from '../Purchases/VendorCredits/VendorCreditsImportable';
// import { PaymentsReceivedImportable } from '../Sales/PaymentReceived/PaymentsReceivedImportable';
// import { CreditNotesImportable } from '../CreditNotes/commands/CreditNotesImportable';
// import { SaleReceiptsImportable } from '../Sales/Receipts/SaleReceiptsImportable';
// import { TaxRatesImportable } from '../TaxRates/TaxRatesImportable';
@Injectable()
export class ImportableResources {
private static registry: ImportableRegistry;
constructor() {
this.boot();
}
/**
* Importable instances.
*/
private importables = [
// { resource: 'Account', importable: AccountsImportable },
// {
// resource: 'UncategorizedCashflowTransaction',
// importable: UncategorizedTransactionsImportable,
// },
// { resource: 'Customer', importable: CustomersImportable },
// { resource: 'Vendor', importable: VendorsImportable },
// { resource: 'Item', importable: ItemsImportable },
// { resource: 'ItemCategory', importable: ItemCategoriesImportable },
// { resource: 'ManualJournal', importable: ManualJournalImportable },
// { resource: 'Bill', importable: BillsImportable },
// { resource: 'Expense', importable: ExpensesImportable },
// { resource: 'SaleInvoice', importable: SaleInvoicesImportable },
// { resource: 'SaleEstimate', importable: SaleEstimatesImportable },
// { resource: 'BillPayment', importable: BillPaymentsImportable },
// { resource: 'PaymentReceive', importable: PaymentsReceivedImportable },
// { resource: 'VendorCredit', importable: VendorCreditsImportable },
// { resource: 'CreditNote', importable: CreditNotesImportable },
// { resource: 'SaleReceipt', importable: SaleReceiptsImportable },
// { resource: 'TaxRate', importable: TaxRatesImportable },
];
public get registry() {
return ImportableResources.registry;
}
/**
* Boots all the registered importables.
*/
public boot() {
if (!ImportableResources.registry) {
const instance = ImportableRegistry.getInstance();
this.importables.forEach((importable) => {
// const importableInstance = Container.get(importable.importable);
// instance.registerImportable(importable.resource, importableInstance);
});
ImportableResources.registry = instance;
}
}
}

View File

@@ -0,0 +1,3 @@
export const CurrencyParsingDTOs = 10;

View File

@@ -0,0 +1,459 @@
import * as Yup from 'yup';
import * as moment from 'moment';
import * as R from 'ramda';
import { Knex } from 'knex';
import fs from 'fs/promises';
import path from 'path';
import {
defaultTo,
upperFirst,
camelCase,
first,
isUndefined,
pickBy,
isEmpty,
castArray,
get,
head,
split,
last,
} from 'lodash';
import pluralize from 'pluralize';
import { ResourceMetaFieldsMap } from './interfaces';
import { IModelMetaField, IModelMetaField2 } from '@/interfaces';
import { ServiceError } from '@/exceptions';
import { multiNumberParse } from '@/utils/multi-number-parse';
export const ERRORS = {
RESOURCE_NOT_IMPORTABLE: 'RESOURCE_NOT_IMPORTABLE',
INVALID_MAP_ATTRS: 'INVALID_MAP_ATTRS',
DUPLICATED_FROM_MAP_ATTR: 'DUPLICATED_FROM_MAP_ATTR',
DUPLICATED_TO_MAP_ATTR: 'DUPLICATED_TO_MAP_ATTR',
IMPORT_FILE_NOT_MAPPED: 'IMPORT_FILE_NOT_MAPPED',
INVALID_MAP_DATE_FORMAT: 'INVALID_MAP_DATE_FORMAT',
MAP_DATE_FORMAT_NOT_DEFINED: 'MAP_DATE_FORMAT_NOT_DEFINED',
IMPORTED_SHEET_EMPTY: 'IMPORTED_SHEET_EMPTY',
};
/**
* Trimms the imported object string values before parsing.
* @param {Record<string, string | number>} obj
* @returns {<Record<string, string | number>}
*/
export function trimObject(obj: Record<string, string | number>) {
return Object.entries(obj).reduce((acc, [key, value]) => {
// Trim the key
const trimmedKey = key.trim();
// Trim the value if it's a string, otherwise leave it as is
const trimmedValue = typeof value === 'string' ? value.trim() : value;
// Assign the trimmed key and value to the accumulator object
return { ...acc, [trimmedKey]: trimmedValue };
}, {});
}
/**
* Generates the Yup validation schema based on the given resource fields.
* @param {ResourceMetaFieldsMap} fields
* @returns {Yup}
*/
export const convertFieldsToYupValidation = (fields: ResourceMetaFieldsMap) => {
const yupSchema = {};
Object.keys(fields).forEach((fieldName: string) => {
const field = fields[fieldName] as IModelMetaField;
let fieldSchema;
fieldSchema = Yup.string().label(field.name);
if (field.fieldType === 'text') {
if (!isUndefined(field.minLength)) {
fieldSchema = fieldSchema.min(
field.minLength,
`Minimum length is ${field.minLength} characters`
);
}
if (!isUndefined(field.maxLength)) {
fieldSchema = fieldSchema.max(
field.maxLength,
`Maximum length is ${field.maxLength} characters`
);
}
} else if (field.fieldType === 'number') {
fieldSchema = Yup.number().label(field.name);
if (!isUndefined(field.max)) {
fieldSchema = fieldSchema.max(field.max);
}
if (!isUndefined(field.min)) {
fieldSchema = fieldSchema.min(field.min);
}
} else if (field.fieldType === 'boolean') {
fieldSchema = Yup.boolean().label(field.name);
} else if (field.fieldType === 'enumeration') {
const options = field.options.reduce((acc, option) => {
acc[option.key] = option.label;
return acc;
}, {});
fieldSchema = Yup.string().oneOf(Object.keys(options)).label(field.name);
// Validate date field type.
} else if (field.fieldType === 'date') {
fieldSchema = fieldSchema.test(
'date validation',
'Invalid date or format. The string should be a valid YYYY-MM-DD format.',
(val) => {
if (!val) {
return true;
}
return moment(val, 'YYYY-MM-DD', true).isValid();
}
);
} else if (field.fieldType === 'url') {
fieldSchema = fieldSchema.url();
} else if (field.fieldType === 'collection') {
const nestedFieldShema = convertFieldsToYupValidation(field.fields);
fieldSchema = Yup.array().label(field.name);
if (!isUndefined(field.collectionMaxLength)) {
fieldSchema = fieldSchema.max(field.collectionMaxLength);
}
if (!isUndefined(field.collectionMinLength)) {
fieldSchema = fieldSchema.min(field.collectionMinLength);
}
fieldSchema = fieldSchema.of(nestedFieldShema);
}
if (field.required) {
fieldSchema = fieldSchema.required();
}
const _fieldName = parseFieldName(fieldName, field);
yupSchema[_fieldName] = fieldSchema;
});
return Yup.object().shape(yupSchema);
};
const parseFieldName = (fieldName: string, field: IModelMetaField) => {
let _key = fieldName;
if (field.dataTransferObjectKey) {
_key = field.dataTransferObjectKey;
}
return _key;
};
/**
* Retrieves the unmapped sheet columns.
* @param columns
* @param mapping
* @returns
*/
export const getUnmappedSheetColumns = (columns, mapping) => {
return columns.filter(
(column) => !mapping.some((map) => map.from === column)
);
};
export const sanitizeResourceName = (resourceName: string) => {
return upperFirst(camelCase(pluralize.singular(resourceName)));
};
export const getSheetColumns = (sheetData: unknown[]) => {
return Object.keys(first(sheetData));
};
/**
* Retrieves the unique value from the given imported object DTO based on the
* configured unique resource field.
* @param {{ [key: string]: IModelMetaField }} importableFields -
* @param {<Record<string, any>}
* @returns {string}
*/
export const getUniqueImportableValue = (
importableFields: { [key: string]: IModelMetaField2 },
objectDTO: Record<string, any>
) => {
const uniqueImportableValue = pickBy(
importableFields,
(field) => field.unique
);
const uniqueImportableKeys = Object.keys(uniqueImportableValue);
const uniqueImportableKey = first(uniqueImportableKeys);
return defaultTo(objectDTO[uniqueImportableKey], '');
};
/**
* Throws service error the given sheet is empty.
* @param {Array<any>} sheetData
*/
export const validateSheetEmpty = (sheetData: Array<any>) => {
if (isEmpty(sheetData)) {
throw new ServiceError(ERRORS.IMPORTED_SHEET_EMPTY);
}
};
const booleanValuesRepresentingTrue: string[] = ['true', 'yes', 'y', 't', '1'];
const booleanValuesRepresentingFalse: string[] = ['false', 'no', 'n', 'f', '0'];
/**
* Parses the given string value to boolean.
* @param {string} value
* @returns {string|null}
*/
export const parseBoolean = (value: string): boolean | null => {
const normalizeValue = (value: string): string =>
value.toString().trim().toLowerCase();
const normalizedValue = normalizeValue(value);
const valuesRepresentingTrue =
booleanValuesRepresentingTrue.map(normalizeValue);
const valueRepresentingFalse =
booleanValuesRepresentingFalse.map(normalizeValue);
if (valuesRepresentingTrue.includes(normalizedValue)) {
return true;
} else if (valueRepresentingFalse.includes(normalizedValue)) {
return false;
}
return null;
};
export const transformInputToGroupedFields = (input) => {
const output = [];
// Group for non-nested fields
const mainGroup = {
groupLabel: '',
groupKey: '',
fields: [],
};
input.forEach((item) => {
if (!item.fields) {
// If the item does not have nested fields, add it to the main group
mainGroup.fields.push(item);
} else {
// If the item has nested fields, create a new group for these fields
output.push({
groupLabel: item.name,
groupKey: item.key,
fields: item.fields,
});
}
});
// Add the main group to the output if it contains any fields
if (mainGroup.fields.length > 0) {
output.unshift(mainGroup); // Add the main group at the beginning
}
return output;
};
export const getResourceColumns = (resourceColumns: {
[key: string]: IModelMetaField2;
}) => {
const mapColumn =
(group: string) =>
([fieldKey, { name, importHint, required, order, ...field }]: [
string,
IModelMetaField2
]) => {
const extra: Record<string, any> = {};
const key = fieldKey;
if (group) {
extra.group = group;
}
if (field.fieldType === 'collection') {
extra.fields = mapColumns(field.fields, key);
}
return {
key,
name,
required,
hint: importHint,
order,
...extra,
};
};
const sortColumn = (a, b) =>
a.order && b.order ? a.order - b.order : a.order ? -1 : b.order ? 1 : 0;
const mapColumns = (columns, parentKey = '') =>
Object.entries(columns).map(mapColumn(parentKey)).sort(sortColumn);
return R.compose(transformInputToGroupedFields, mapColumns)(resourceColumns);
};
// Prases the given object value based on the field key type.
export const valueParser =
(fields: ResourceMetaFieldsMap, tenantModels: any, trx?: Knex.Transaction) =>
async (value: any, key: string, group = '') => {
let _value = value;
const fieldKey = key.includes('.') ? key.split('.')[0] : key;
const field = group ? fields[group]?.fields[fieldKey] : fields[fieldKey];
// Parses the boolean value.
if (field.fieldType === 'boolean') {
_value = parseBoolean(value);
// Parses the enumeration value.
} else if (field.fieldType === 'enumeration') {
const option = get(field, 'options', []).find(
(option) => option.label?.toLowerCase() === value?.toLowerCase()
);
_value = get(option, 'key');
// Parses the numeric value.
} else if (field.fieldType === 'number') {
_value = multiNumberParse(value);
// Parses the relation value.
} else if (field.fieldType === 'relation') {
const RelationModel = tenantModels[field.relationModel];
if (!RelationModel) {
throw new Error(`The relation model of ${key} field is not exist.`);
}
const relationQuery = RelationModel.query(trx);
const relationKeys = castArray(field?.relationImportMatch);
relationQuery.where(function () {
relationKeys.forEach((relationKey: string) => {
this.orWhereRaw('LOWER(??) = LOWER(?)', [relationKey, value]);
});
});
const result = await relationQuery.first();
_value = get(result, 'id');
} else if (field.fieldType === 'collection') {
const ObjectFieldKey = key.includes('.') ? key.split('.')[1] : key;
const _valueParser = valueParser(fields, tenantModels);
_value = await _valueParser(value, ObjectFieldKey, fieldKey);
}
return _value;
};
/**
* Parses the field key and detarmines the key path.
* @param {{ [key: string]: IModelMetaField2 }} fields
* @param {string} key - Mapped key path. formats: `group.key` or `key`.
* @returns {string}
*/
export const parseKey: R.Curry<string> = R.curry(
(fields: { [key: string]: IModelMetaField2 }, key: string) => {
const fieldKey = getFieldKey(key);
const field = fields[fieldKey];
let _key = key;
if (field.fieldType === 'collection') {
if (field.collectionOf === 'object') {
const nestedFieldKey = last(key.split('.'));
_key = `${fieldKey}[0].${nestedFieldKey}`;
} else if (
field.collectionOf === 'string' ||
field.collectionOf ||
'numberic'
) {
_key = `${fieldKey}`;
}
}
return _key;
}
);
/**
* Retrieves the field root key, for instance: I -> entries.itemId O -> entries.
* @param {string} input
* @returns {string}
*/
export const getFieldKey = (input: string) => {
const keys = split(input, '.');
const firstKey = head(keys).split('[')[0]; // Split by "[" in case of array notation
return firstKey;
};
/**
{ * Aggregates the input array of objects based on a comparator attribute and groups the entries.
* This function is useful for combining multiple entries into a single entry based on a specific attribute,
* while aggregating other attributes into an array.}
*
* @param {Array} input - The array of objects to be aggregated.
* @param {string} comparatorAttr - The attribute of the objects used for comparison to aggregate.
* @param {string} groupOn - The attribute of the objects where the grouped entries will be pushed.
* @returns {Array} - The aggregated array of objects.
*
* @example
* // Example input:
* const input = [
* { id: 1, name: 'John', entries: ['entry1'] },
* { id: 2, name: 'Jane', entries: ['entry2'] },
* { id: 1, name: 'John', entries: ['entry3'] },
* ];
* const comparatorAttr = 'id';
* const groupOn = 'entries';
*
* // Example output:
* const output = [
* { id: 1, name: 'John', entries: ['entry1', 'entry3'] },
* { id: 2, name: 'Jane', entries: ['entry2'] },
* ];
*/
export function aggregate(
input: Array<any>,
comparatorAttr: string,
groupOn: string
): Array<Record<string, any>> {
return input.reduce((acc, curr) => {
const existingEntry = acc.find(
(entry) => entry[comparatorAttr] === curr[comparatorAttr]
);
if (existingEntry) {
existingEntry[groupOn].push(...curr.entries);
} else {
acc.push({ ...curr });
}
return acc;
}, []);
}
/**
* Sanitizes the data in the imported sheet by trimming object keys.
* @param json - The JSON data representing the imported sheet.
* @returns {string[][]} - The sanitized data with trimmed object keys.
*/
export const sanitizeSheetData = (json) => {
return R.compose(R.map(trimObject))(json);
};
/**
* Returns the path to map a value to based on the 'to' and 'group' parameters.
* @param {string} to - The target key to map the value to.
* @param {string} group - The group key to nest the target key under.
* @returns {string} - The path to map the value to.
*/
export const getMapToPath = (to: string, group = '') =>
group ? `${group}.${to}` : to;
export const getImportsStoragePath = () => {
return path.join(global.__storage_dir, `/imports`);
};
/**
* Deletes the imported file from the storage and database.
* @param {string} filename
*/
export const deleteImportFile = async (filename: string) => {
const filePath = getImportsStoragePath();
// Deletes the imported file.
await fs.unlink(`${filePath}/${filename}`);
};
/**
* Reads the import file.
* @param {string} filename
* @returns {Promise<Buffer>}
*/
export const readImportFile = (filename: string) => {
const filePath = getImportsStoragePath();
return fs.readFile(`${filePath}/${filename}`);
};

View File

@@ -0,0 +1,77 @@
import { IModelMetaField2 } from "@/interfaces/Model";
import { Import } from "./models/Import";
export interface ImportMappingAttr {
from: string;
to: string;
group?: string;
dateFormat?: string;
}
export interface ImportValidationError {
index: number;
property: string;
constraints: Record<string, string>;
}
export type ResourceMetaFieldsMap = { [key: string]: IModelMetaField2 };
export interface ImportInsertError {
rowNumber: number;
errorCode: string;
errorMessage: string;
}
export interface ImportFileUploadPOJO {
import: {
importId: string;
resource: string;
};
sheetColumns: string[];
resourceColumns: {
key: string;
name: string;
required?: boolean;
hint?: string;
}[];
}
export interface ImportFileMapPOJO {
import: {
importId: string;
resource: string;
};
}
export interface ImportFilePreviewPOJO {
resource: string;
createdCount: number;
skippedCount: number;
totalCount: number;
errorsCount: number;
errors: ImportInsertError[];
unmappedColumns: string[];
unmappedColumnsCount: number;
}
export interface ImportOperSuccess {
data: unknown;
index: number;
}
export interface ImportOperError {
error: ImportInsertError[];
index: number;
}
export interface ImportableContext {
import: Import;
rowIndex: number;
}
export const ImportDateFormats = [
'yyyy-MM-dd',
'dd.MM.yy',
'MM/dd/yy',
'dd/MMM/yyyy',
];

View File

@@ -0,0 +1,28 @@
// import Container, { Service } from 'typedi';
// import { ImportDeleteExpiredFiles } from '../ImportRemoveExpiredFiles';
// @Service()
// export class ImportDeleteExpiredFilesJobs {
// /**
// * Constructor method.
// */
// constructor(agenda) {
// agenda.define('delete-expired-imported-files', this.handler);
// }
// /**
// * Triggers sending invoice mail.
// */
// private handler = async (job, done: Function) => {
// const importDeleteExpiredFiles = Container.get(ImportDeleteExpiredFiles);
// try {
// console.log('Delete expired import files has started.');
// await importDeleteExpiredFiles.deleteExpiredFiles();
// done();
// } catch (error) {
// console.log(error);
// done(error);
// }
// };
// }

View File

@@ -0,0 +1,87 @@
import { Model, ModelObject } from 'objection';
// import SystemModel from './SystemModel';
import { BaseModel } from '@/models/Model';
export class Import extends BaseModel {
resource: string;
tenantId: number;
mapping!: string;
columns!: string;
params!: string;
/**
* Table name.
*/
static get tableName() {
return 'imports';
}
/**
* Virtual attributes.
*/
static get virtualAttributes() {
return ['mappingParsed'];
}
/**
* Timestamps columns.
*/
get timestamps() {
return ['createdAt', 'updatedAt'];
}
/**
* Detarmines whether the import is mapped.
* @returns {boolean}
*/
public get isMapped() {
return Boolean(this.mapping);
}
public get columnsParsed() {
try {
return JSON.parse(this.columns);
} catch {
return [];
}
}
public get paramsParsed() {
try {
return JSON.parse(this.params);
} catch {
return [];
}
}
public get mappingParsed() {
try {
return JSON.parse(this.mapping);
} catch {
return [];
}
}
/**
* Relationship mapping.
*/
static get relationMappings() {
const Tenant = require('system/models/Tenant');
return {
/**
* System user may belongs to tenant model.
*/
tenant: {
relation: Model.BelongsToOneRelation,
modelClass: Tenant.default,
join: {
from: 'imports.tenantId',
to: 'tenants.id',
},
},
};
}
}
export type ImportShape = ModelObject<Import>;

View File

@@ -0,0 +1,56 @@
import XLSX from 'xlsx';
import { first } from 'lodash';
/**
* Parses the given sheet buffer to worksheet.
* @param {Buffer} buffer
* @returns {XLSX.WorkSheet}
*/
export function parseFirstSheet(buffer: Buffer): XLSX.WorkSheet {
const workbook = XLSX.read(buffer, { type: 'buffer', raw: true });
const firstSheetName = workbook.SheetNames[0];
const worksheet = workbook.Sheets[firstSheetName];
return worksheet;
}
/**
* Extracts the given worksheet to columns.
* @param {XLSX.WorkSheet} worksheet
* @returns {Array<string>}
*/
export function extractSheetColumns(worksheet: XLSX.WorkSheet): Array<string> {
// By default, sheet_to_json scans the first row and uses the values as headers.
// With the header: 1 option, the function exports an array of arrays of values.
const sheetCells = XLSX.utils.sheet_to_json(worksheet, { header: 1 });
const sheetCols = first(sheetCells) as Array<string>;
return sheetCols.filter((col) => col);
}
/**
* Parses the given worksheet to json values. the keys are columns labels.
* @param {XLSX.WorkSheet} worksheet
* @returns {Array<Record<string, string>>}
*/
export function parseSheetToJson(
worksheet: XLSX.WorkSheet
): Array<Record<string, string>> {
return XLSX.utils.sheet_to_json(worksheet, {});
}
/**
* Parses the given sheet buffer then retrieves the sheet data and columns.
* @param {Buffer} buffer
*/
export function parseSheetData(
buffer: Buffer
): [Array<Record<string, string>>, string[]] {
const worksheet = parseFirstSheet(buffer);
const columns = extractSheetColumns(worksheet);
const data = parseSheetToJson(worksheet);
return [data, columns];
}