mirror of
https://github.com/bigcapitalhq/bigcapital.git
synced 2026-02-17 21:30:31 +00:00
Compare commits
3 Commits
v0.19.11
...
fix-gettin
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
f92acbcbe0 | ||
|
|
c986585cd9 | ||
|
|
250f0a30ef |
@@ -146,6 +146,7 @@ export default {
|
|||||||
name: 'vendor.field.opening_balance_at',
|
name: 'vendor.field.opening_balance_at',
|
||||||
type: 'date',
|
type: 'date',
|
||||||
printable: false,
|
printable: false,
|
||||||
|
accessor: 'formattedOpeningBalanceAt'
|
||||||
},
|
},
|
||||||
currencyCode: {
|
currencyCode: {
|
||||||
name: 'vendor.field.currency',
|
name: 'vendor.field.currency',
|
||||||
|
|||||||
@@ -1,4 +1,3 @@
|
|||||||
import XLSX from 'xlsx';
|
|
||||||
import bluebird from 'bluebird';
|
import bluebird from 'bluebird';
|
||||||
import * as R from 'ramda';
|
import * as R from 'ramda';
|
||||||
import { Inject, Service } from 'typedi';
|
import { Inject, Service } from 'typedi';
|
||||||
@@ -27,23 +26,7 @@ export class ImportFileCommon {
|
|||||||
|
|
||||||
@Inject()
|
@Inject()
|
||||||
private resource: ResourceService;
|
private resource: ResourceService;
|
||||||
|
|
||||||
/**
|
|
||||||
* Maps the columns of the imported data based on the provided mapping attributes.
|
|
||||||
* @param {Record<string, any>[]} body - The array of data objects to map.
|
|
||||||
* @param {ImportMappingAttr[]} map - The mapping attributes.
|
|
||||||
* @returns {Record<string, any>[]} - The mapped data objects.
|
|
||||||
*/
|
|
||||||
public parseXlsxSheet(buffer: Buffer): Record<string, unknown>[] {
|
|
||||||
const workbook = XLSX.read(buffer, { type: 'buffer', raw: true });
|
|
||||||
|
|
||||||
const firstSheetName = workbook.SheetNames[0];
|
|
||||||
const worksheet = workbook.Sheets[firstSheetName];
|
|
||||||
|
|
||||||
return XLSX.utils.sheet_to_json(worksheet, {});
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Imports the given parsed data to the resource storage through registered importable service.
|
* Imports the given parsed data to the resource storage through registered importable service.
|
||||||
* @param {number} tenantId -
|
* @param {number} tenantId -
|
||||||
|
|||||||
@@ -2,18 +2,14 @@ import { Inject, Service } from 'typedi';
|
|||||||
import { chain } from 'lodash';
|
import { chain } from 'lodash';
|
||||||
import { Knex } from 'knex';
|
import { Knex } from 'knex';
|
||||||
import { ServiceError } from '@/exceptions';
|
import { ServiceError } from '@/exceptions';
|
||||||
import {
|
import { ERRORS, getUnmappedSheetColumns, readImportFile } from './_utils';
|
||||||
ERRORS,
|
|
||||||
getSheetColumns,
|
|
||||||
getUnmappedSheetColumns,
|
|
||||||
readImportFile,
|
|
||||||
} from './_utils';
|
|
||||||
import { ImportFileCommon } from './ImportFileCommon';
|
import { ImportFileCommon } from './ImportFileCommon';
|
||||||
import { ImportFileDataTransformer } from './ImportFileDataTransformer';
|
import { ImportFileDataTransformer } from './ImportFileDataTransformer';
|
||||||
import ResourceService from '../Resource/ResourceService';
|
import ResourceService from '../Resource/ResourceService';
|
||||||
import UnitOfWork from '../UnitOfWork';
|
import UnitOfWork from '../UnitOfWork';
|
||||||
import { ImportFilePreviewPOJO } from './interfaces';
|
import { ImportFilePreviewPOJO } from './interfaces';
|
||||||
import { Import } from '@/system/models';
|
import { Import } from '@/system/models';
|
||||||
|
import { parseSheetData } from './sheet_utils';
|
||||||
|
|
||||||
@Service()
|
@Service()
|
||||||
export class ImportFileProcess {
|
export class ImportFileProcess {
|
||||||
@@ -49,10 +45,10 @@ export class ImportFileProcess {
|
|||||||
if (!importFile.isMapped) {
|
if (!importFile.isMapped) {
|
||||||
throw new ServiceError(ERRORS.IMPORT_FILE_NOT_MAPPED);
|
throw new ServiceError(ERRORS.IMPORT_FILE_NOT_MAPPED);
|
||||||
}
|
}
|
||||||
// Read the imported file.
|
// Read the imported file and parse the given buffer to get columns
|
||||||
|
// and sheet data in json format.
|
||||||
const buffer = await readImportFile(importFile.filename);
|
const buffer = await readImportFile(importFile.filename);
|
||||||
const sheetData = this.importCommon.parseXlsxSheet(buffer);
|
const [sheetData, sheetColumns] = parseSheetData(buffer);
|
||||||
const header = getSheetColumns(sheetData);
|
|
||||||
|
|
||||||
const resource = importFile.resource;
|
const resource = importFile.resource;
|
||||||
const resourceFields = this.resource.getResourceFields2(tenantId, resource);
|
const resourceFields = this.resource.getResourceFields2(tenantId, resource);
|
||||||
@@ -87,7 +83,7 @@ export class ImportFileProcess {
|
|||||||
.flatten()
|
.flatten()
|
||||||
.value();
|
.value();
|
||||||
|
|
||||||
const unmappedColumns = getUnmappedSheetColumns(header, mapping);
|
const unmappedColumns = getUnmappedSheetColumns(sheetColumns, mapping);
|
||||||
const totalCount = allData.length;
|
const totalCount = allData.length;
|
||||||
|
|
||||||
const createdCount = successedImport.length;
|
const createdCount = successedImport.length;
|
||||||
|
|||||||
@@ -11,6 +11,7 @@ import { ImportFileCommon } from './ImportFileCommon';
|
|||||||
import { ImportFileDataValidator } from './ImportFileDataValidator';
|
import { ImportFileDataValidator } from './ImportFileDataValidator';
|
||||||
import { ImportFileUploadPOJO } from './interfaces';
|
import { ImportFileUploadPOJO } from './interfaces';
|
||||||
import { Import } from '@/system/models';
|
import { Import } from '@/system/models';
|
||||||
|
import { parseSheetData } from './sheet_utils';
|
||||||
|
|
||||||
@Service()
|
@Service()
|
||||||
export class ImportFileUploadService {
|
export class ImportFileUploadService {
|
||||||
@@ -77,14 +78,12 @@ export class ImportFileUploadService {
|
|||||||
const buffer = await readImportFile(filename);
|
const buffer = await readImportFile(filename);
|
||||||
|
|
||||||
// Parse the buffer file to array data.
|
// Parse the buffer file to array data.
|
||||||
const sheetData = this.importFileCommon.parseXlsxSheet(buffer);
|
const [sheetData, sheetColumns] = parseSheetData(buffer);
|
||||||
|
const coumnsStringified = JSON.stringify(sheetColumns);
|
||||||
|
|
||||||
// Throws service error if the sheet data is empty.
|
// Throws service error if the sheet data is empty.
|
||||||
validateSheetEmpty(sheetData);
|
validateSheetEmpty(sheetData);
|
||||||
|
|
||||||
const sheetColumns = this.importFileCommon.parseSheetColumns(sheetData);
|
|
||||||
const coumnsStringified = JSON.stringify(sheetColumns);
|
|
||||||
|
|
||||||
try {
|
try {
|
||||||
// Validates the params Yup schema.
|
// Validates the params Yup schema.
|
||||||
await this.importFileCommon.validateParamsSchema(resource, params);
|
await this.importFileCommon.validateParamsSchema(resource, params);
|
||||||
|
|||||||
56
packages/server/src/services/Import/sheet_utils.ts
Normal file
56
packages/server/src/services/Import/sheet_utils.ts
Normal file
@@ -0,0 +1,56 @@
|
|||||||
|
import XLSX from 'xlsx';
|
||||||
|
import { first } from 'lodash';
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Parses the given sheet buffer to worksheet.
|
||||||
|
* @param {Buffer} buffer
|
||||||
|
* @returns {XLSX.WorkSheet}
|
||||||
|
*/
|
||||||
|
export function parseFirstSheet(buffer: Buffer): XLSX.WorkSheet {
|
||||||
|
const workbook = XLSX.read(buffer, { type: 'buffer', raw: true });
|
||||||
|
|
||||||
|
const firstSheetName = workbook.SheetNames[0];
|
||||||
|
const worksheet = workbook.Sheets[firstSheetName];
|
||||||
|
|
||||||
|
return worksheet;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Extracts the given worksheet to columns.
|
||||||
|
* @param {XLSX.WorkSheet} worksheet
|
||||||
|
* @returns {Array<string>}
|
||||||
|
*/
|
||||||
|
export function extractSheetColumns(worksheet: XLSX.WorkSheet): Array<string> {
|
||||||
|
// By default, sheet_to_json scans the first row and uses the values as headers.
|
||||||
|
// With the header: 1 option, the function exports an array of arrays of values.
|
||||||
|
const sheetCells = XLSX.utils.sheet_to_json(worksheet, { header: 1 });
|
||||||
|
const sheetCols = first(sheetCells) as Array<string>;
|
||||||
|
|
||||||
|
return sheetCols.filter((col) => col);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Parses the given worksheet to json values. the keys are columns labels.
|
||||||
|
* @param {XLSX.WorkSheet} worksheet
|
||||||
|
* @returns {Array<Record<string, string>>}
|
||||||
|
*/
|
||||||
|
export function parseSheetToJson(
|
||||||
|
worksheet: XLSX.WorkSheet
|
||||||
|
): Array<Record<string, string>> {
|
||||||
|
return XLSX.utils.sheet_to_json(worksheet, {});
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Parses the given sheet buffer then retrieves the sheet data and columns.
|
||||||
|
* @param {Buffer} buffer
|
||||||
|
*/
|
||||||
|
export function parseSheetData(
|
||||||
|
buffer: Buffer
|
||||||
|
): [Array<Record<string, string>>, string[]] {
|
||||||
|
const worksheet = parseFirstSheet(buffer);
|
||||||
|
|
||||||
|
const columns = extractSheetColumns(worksheet);
|
||||||
|
const data = parseSheetToJson(worksheet);
|
||||||
|
|
||||||
|
return [data, columns];
|
||||||
|
}
|
||||||
@@ -73,8 +73,9 @@ export function OneClickDemoPageContent() {
|
|||||||
)}
|
)}
|
||||||
{running && (
|
{running && (
|
||||||
<Text className={style.waitingText}>
|
<Text className={style.waitingText}>
|
||||||
We're preparing temporary environment for trial, It typically
|
We're preparing the temporary environment for trial. It
|
||||||
take few seconds. Do not close or refresh the page.
|
typically takes a few seconds. Do not close or refresh the
|
||||||
|
page.
|
||||||
</Text>
|
</Text>
|
||||||
)}
|
)}
|
||||||
</Stack>
|
</Stack>
|
||||||
|
|||||||
Reference in New Issue
Block a user