mirror of
https://github.com/bigcapitalhq/bigcapital.git
synced 2026-02-16 12:50:38 +00:00
add server to monorepo.
This commit is contained in:
101
packages/server/src/lib/AccountTypes/index.ts
Normal file
101
packages/server/src/lib/AccountTypes/index.ts
Normal file
@@ -0,0 +1,101 @@
|
||||
import { get } from 'lodash';
|
||||
import { ACCOUNT_TYPES } from '@/data/AccountTypes';
|
||||
|
||||
export default class AccountTypesUtils {
|
||||
/**
|
||||
* Retrieve account types list.
|
||||
*/
|
||||
static getList() {
|
||||
return ACCOUNT_TYPES;
|
||||
}
|
||||
|
||||
/**
|
||||
* Retrieve accounts types by the given root type.
|
||||
* @param {string} rootType -
|
||||
* @return {string}
|
||||
*/
|
||||
static getTypesByRootType(rootType: string) {
|
||||
return ACCOUNT_TYPES.filter((type) => type.rootType === rootType);
|
||||
}
|
||||
|
||||
/**
|
||||
* Retrieve account type by the given account type key.
|
||||
* @param {string} key
|
||||
* @param {string} accessor
|
||||
*/
|
||||
static getType(key: string, accessor?: string) {
|
||||
const type = ACCOUNT_TYPES.find((type) => type.key === key);
|
||||
|
||||
if (accessor) {
|
||||
return get(type, accessor);
|
||||
}
|
||||
return type;
|
||||
}
|
||||
|
||||
/**
|
||||
* Retrieve accounts types by the parent account type.
|
||||
* @param {string} parentType
|
||||
*/
|
||||
static getTypesByParentType(parentType: string) {
|
||||
return ACCOUNT_TYPES.filter((type) => type.parentType === parentType);
|
||||
}
|
||||
|
||||
/**
|
||||
* Retrieve accounts types by the given account normal.
|
||||
* @param {string} normal
|
||||
*/
|
||||
static getTypesByNormal(normal: string) {
|
||||
return ACCOUNT_TYPES.filter((type) => type.normal === normal);
|
||||
}
|
||||
|
||||
/**
|
||||
* Detarmines whether the root type equals the account type.
|
||||
* @param {string} key
|
||||
* @param {string} rootType
|
||||
*/
|
||||
static isRootTypeEqualsKey(key: string, rootType: string): boolean {
|
||||
return ACCOUNT_TYPES.some((type) => {
|
||||
const isType = type.key === key;
|
||||
const isRootType = type.rootType === rootType;
|
||||
|
||||
return isType && isRootType;
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Detarmines whether the parent account type equals the account type key.
|
||||
* @param {string} key - Account type key.
|
||||
* @param {string} parentType - Account parent type.
|
||||
*/
|
||||
static isParentTypeEqualsKey(key: string, parentType: string): boolean {
|
||||
return ACCOUNT_TYPES.some((type) => {
|
||||
const isType = type.key === key;
|
||||
const isParentType = type.parentType === parentType;
|
||||
|
||||
return isType && isParentType;
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Detarmines whether account type has balance sheet.
|
||||
* @param {string} key - Account type key.
|
||||
*
|
||||
*/
|
||||
static isTypeBalanceSheet(key: string): boolean {
|
||||
return ACCOUNT_TYPES.some((type) => {
|
||||
const isType = type.key === key;
|
||||
return isType && type.balanceSheet;
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Detarmines whether account type has profit/loss sheet.
|
||||
* @param {string} key - Account type key.
|
||||
*/
|
||||
static isTypePLSheet(key: string): boolean {
|
||||
return ACCOUNT_TYPES.some((type) => {
|
||||
const isType = type.key === key;
|
||||
return isType && type.incomeSheet;
|
||||
});
|
||||
}
|
||||
}
|
||||
16
packages/server/src/lib/Cachable/CachableModel.js
Normal file
16
packages/server/src/lib/Cachable/CachableModel.js
Normal file
@@ -0,0 +1,16 @@
|
||||
import BaseModel from 'models/Model';
|
||||
import CacheService from '@/services/Cache';
|
||||
|
||||
export default (Model) => {
|
||||
return class CachableModel extends Model{
|
||||
static flushCache(key) {
|
||||
const modelName = this.name;
|
||||
|
||||
if (key) {
|
||||
CacheService.del(`${modelName}.${key}`);
|
||||
} else {
|
||||
CacheService.delStartWith(modelName);
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
69
packages/server/src/lib/Cachable/CachableQueryBuilder.js
Normal file
69
packages/server/src/lib/Cachable/CachableQueryBuilder.js
Normal file
@@ -0,0 +1,69 @@
|
||||
import { QueryBuilder } from 'objection';
|
||||
import crypto from 'crypto';
|
||||
import CacheService from '@/services/Cache';
|
||||
|
||||
export default class CachableQueryBuilder extends QueryBuilder{
|
||||
|
||||
async then(...args) {
|
||||
// Flush model cache after insert, delete or update transaction.
|
||||
if (this.isInsert() || this.isDelete() || this.isUpdate()) {
|
||||
this.modelClass().flushCache();
|
||||
}
|
||||
if (this.cacheTag && this.isFind()) {
|
||||
this.setCacheKey();
|
||||
return this.getOrStoreCache().then(...args);
|
||||
} else {
|
||||
const promise = this.execute();
|
||||
|
||||
return promise.then((result) => {
|
||||
this.setCache(result);
|
||||
return result;
|
||||
}).then(...args);
|
||||
}
|
||||
}
|
||||
|
||||
getOrStoreCache() {
|
||||
const storeFunction = () => this.execute();
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
CacheService.get(this.cacheKey, storeFunction)
|
||||
.then((result) => { resolve(result); });
|
||||
});
|
||||
}
|
||||
|
||||
setCache(results) {
|
||||
CacheService.set(`${this.cacheKey}`, results, this.cacheSeconds);
|
||||
}
|
||||
|
||||
generateCacheKey() {
|
||||
const knexSql = this.toKnexQuery().toSQL();
|
||||
const hashedQuery = crypto.createHash('md5').update(knexSql.sql).digest("hex");
|
||||
|
||||
return hashedQuery;
|
||||
}
|
||||
|
||||
remember(key, seconds) {
|
||||
const modelName = this.modelClass().name;
|
||||
|
||||
this.cacheSeconds = seconds;
|
||||
this.cacheTag = (key) ? `${modelName}.${key}` : modelName;
|
||||
|
||||
return this;
|
||||
}
|
||||
|
||||
withGraphFetched(relation, settings) {
|
||||
if (!this.graphAppends) {
|
||||
this.graphAppends = [relation];
|
||||
} else {
|
||||
this.graphAppends.push(relation);
|
||||
}
|
||||
return super.withGraphFetched(relation, settings);
|
||||
}
|
||||
|
||||
setCacheKey() {
|
||||
const hashedQuery = this.generateCacheKey();
|
||||
const appends = (this.graphAppends || []).join(this.graphAppends, ',');
|
||||
|
||||
this.cacheKey = `${this.cacheTag}.${hashedQuery}.${appends}`;
|
||||
}
|
||||
}
|
||||
350
packages/server/src/lib/DependencyGraph/index.js
Normal file
350
packages/server/src/lib/DependencyGraph/index.js
Normal file
@@ -0,0 +1,350 @@
|
||||
/**
|
||||
* A simple dependency graph
|
||||
*/
|
||||
|
||||
/**
|
||||
* Helper for creating a Topological Sort using Depth-First-Search on a set of edges.
|
||||
*
|
||||
* Detects cycles and throws an Error if one is detected (unless the "circular"
|
||||
* parameter is "true" in which case it ignores them).
|
||||
*
|
||||
* @param edges The set of edges to DFS through
|
||||
* @param leavesOnly Whether to only return "leaf" nodes (ones who have no edges)
|
||||
* @param result An array in which the results will be populated
|
||||
* @param circular A boolean to allow circular dependencies
|
||||
*/
|
||||
function createDFS(edges, leavesOnly, result, circular) {
|
||||
var visited = {};
|
||||
return function (start) {
|
||||
if (visited[start]) {
|
||||
return;
|
||||
}
|
||||
var inCurrentPath = {};
|
||||
var currentPath = [];
|
||||
var todo = []; // used as a stack
|
||||
todo.push({ node: start, processed: false });
|
||||
while (todo.length > 0) {
|
||||
var current = todo[todo.length - 1]; // peek at the todo stack
|
||||
var processed = current.processed;
|
||||
var node = current.node;
|
||||
if (!processed) {
|
||||
// Haven't visited edges yet (visiting phase)
|
||||
if (visited[node]) {
|
||||
todo.pop();
|
||||
continue;
|
||||
} else if (inCurrentPath[node]) {
|
||||
// It's not a DAG
|
||||
if (circular) {
|
||||
todo.pop();
|
||||
// If we're tolerating cycles, don't revisit the node
|
||||
continue;
|
||||
}
|
||||
currentPath.push(node);
|
||||
throw new DepGraphCycleError(currentPath);
|
||||
}
|
||||
|
||||
inCurrentPath[node] = true;
|
||||
currentPath.push(node);
|
||||
var nodeEdges = edges[node];
|
||||
// (push edges onto the todo stack in reverse order to be order-compatible with the old DFS implementation)
|
||||
for (var i = nodeEdges.length - 1; i >= 0; i--) {
|
||||
todo.push({ node: nodeEdges[i], processed: false });
|
||||
}
|
||||
current.processed = true;
|
||||
} else {
|
||||
// Have visited edges (stack unrolling phase)
|
||||
todo.pop();
|
||||
currentPath.pop();
|
||||
inCurrentPath[node] = false;
|
||||
visited[node] = true;
|
||||
if (!leavesOnly || edges[node].length === 0) {
|
||||
result.push(node);
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Simple Dependency Graph
|
||||
*/
|
||||
var DepGraph = (DepGraph = function DepGraph(opts) {
|
||||
this.nodes = {}; // Node -> Node/Data (treated like a Set)
|
||||
this.outgoingEdges = {}; // Node -> [Dependency Node]
|
||||
this.incomingEdges = {}; // Node -> [Dependant Node]
|
||||
this.circular = opts && !!opts.circular; // Allows circular deps
|
||||
});
|
||||
|
||||
DepGraph.fromArray = (
|
||||
items,
|
||||
options = { itemId: 'id', parentItemId: 'parent_id' }
|
||||
) => {
|
||||
const depGraph = new DepGraph();
|
||||
|
||||
items.forEach((item) => {
|
||||
depGraph.addNode(item[options.itemId], item);
|
||||
});
|
||||
items.forEach((item) => {
|
||||
if (item[options.parentItemId]) {
|
||||
depGraph.addDependency(item[options.parentItemId], item[options.itemId]);
|
||||
}
|
||||
});
|
||||
return depGraph;
|
||||
};
|
||||
|
||||
DepGraph.prototype = {
|
||||
/**
|
||||
* The number of nodes in the graph.
|
||||
*/
|
||||
size: function () {
|
||||
return Object.keys(this.nodes).length;
|
||||
},
|
||||
/**
|
||||
* Add a node to the dependency graph. If a node already exists, this method will do nothing.
|
||||
*/
|
||||
addNode: function (node, data) {
|
||||
if (!this.hasNode(node)) {
|
||||
// Checking the arguments length allows the user to add a node with undefined data
|
||||
if (arguments.length === 2) {
|
||||
this.nodes[node] = data;
|
||||
} else {
|
||||
this.nodes[node] = node;
|
||||
}
|
||||
this.outgoingEdges[node] = [];
|
||||
this.incomingEdges[node] = [];
|
||||
}
|
||||
},
|
||||
/**
|
||||
* Remove a node from the dependency graph. If a node does not exist, this method will do nothing.
|
||||
*/
|
||||
removeNode: function (node) {
|
||||
if (this.hasNode(node)) {
|
||||
delete this.nodes[node];
|
||||
delete this.outgoingEdges[node];
|
||||
delete this.incomingEdges[node];
|
||||
[this.incomingEdges, this.outgoingEdges].forEach(function (edgeList) {
|
||||
Object.keys(edgeList).forEach(function (key) {
|
||||
var idx = edgeList[key].indexOf(node);
|
||||
if (idx >= 0) {
|
||||
edgeList[key].splice(idx, 1);
|
||||
}
|
||||
}, this);
|
||||
});
|
||||
}
|
||||
},
|
||||
/**
|
||||
* Check if a node exists in the graph
|
||||
*/
|
||||
hasNode: function (node) {
|
||||
return this.nodes.hasOwnProperty(node);
|
||||
},
|
||||
/**
|
||||
* Get the data associated with a node name
|
||||
*/
|
||||
getNodeData: function (node) {
|
||||
if (this.hasNode(node)) {
|
||||
return this.nodes[node];
|
||||
} else {
|
||||
throw new Error('Node does not exist: ' + node);
|
||||
}
|
||||
},
|
||||
|
||||
/**
|
||||
* Set the associated data for a given node name. If the node does not exist, this method will throw an error
|
||||
*/
|
||||
setNodeData: function (node, data) {
|
||||
if (this.hasNode(node)) {
|
||||
this.nodes[node] = data;
|
||||
} else {
|
||||
throw new Error('Node does not exist: ' + node);
|
||||
}
|
||||
},
|
||||
/**
|
||||
* Add a dependency between two nodes. If either of the nodes does not exist,
|
||||
* an Error will be thrown.
|
||||
*/
|
||||
addDependency: function (from, to) {
|
||||
if (!this.hasNode(from)) {
|
||||
throw new Error('Node does not exist: ' + from);
|
||||
}
|
||||
if (!this.hasNode(to)) {
|
||||
throw new Error('Node does not exist: ' + to);
|
||||
}
|
||||
if (this.outgoingEdges[from].indexOf(to) === -1) {
|
||||
this.outgoingEdges[from].push(to);
|
||||
}
|
||||
if (this.incomingEdges[to].indexOf(from) === -1) {
|
||||
this.incomingEdges[to].push(from);
|
||||
}
|
||||
return true;
|
||||
},
|
||||
/**
|
||||
* Remove a dependency between two nodes.
|
||||
*/
|
||||
removeDependency: function (from, to) {
|
||||
var idx;
|
||||
if (this.hasNode(from)) {
|
||||
idx = this.outgoingEdges[from].indexOf(to);
|
||||
if (idx >= 0) {
|
||||
this.outgoingEdges[from].splice(idx, 1);
|
||||
}
|
||||
}
|
||||
|
||||
if (this.hasNode(to)) {
|
||||
idx = this.incomingEdges[to].indexOf(from);
|
||||
if (idx >= 0) {
|
||||
this.incomingEdges[to].splice(idx, 1);
|
||||
}
|
||||
}
|
||||
},
|
||||
/**
|
||||
* Return a clone of the dependency graph. If any custom data is attached
|
||||
* to the nodes, it will only be shallow copied.
|
||||
*/
|
||||
clone: function () {
|
||||
var source = this;
|
||||
var result = new DepGraph();
|
||||
var keys = Object.keys(source.nodes);
|
||||
keys.forEach(function (n) {
|
||||
result.nodes[n] = source.nodes[n];
|
||||
result.outgoingEdges[n] = source.outgoingEdges[n].slice(0);
|
||||
result.incomingEdges[n] = source.incomingEdges[n].slice(0);
|
||||
});
|
||||
return result;
|
||||
},
|
||||
/**
|
||||
* Get an array containing the nodes that the specified node depends on (transitively).
|
||||
*
|
||||
* Throws an Error if the graph has a cycle, or the specified node does not exist.
|
||||
*
|
||||
* If `leavesOnly` is true, only nodes that do not depend on any other nodes will be returned
|
||||
* in the array.
|
||||
*/
|
||||
dependenciesOf: function (node, leavesOnly) {
|
||||
if (this.hasNode(node)) {
|
||||
var result = [];
|
||||
var DFS = createDFS(
|
||||
this.outgoingEdges,
|
||||
leavesOnly,
|
||||
result,
|
||||
this.circular
|
||||
);
|
||||
DFS(node);
|
||||
var idx = result.indexOf(node);
|
||||
if (idx >= 0) {
|
||||
result.splice(idx, 1);
|
||||
}
|
||||
return result;
|
||||
} else {
|
||||
throw new Error('Node does not exist: ' + node);
|
||||
}
|
||||
},
|
||||
/**
|
||||
* get an array containing the nodes that depend on the specified node (transitively).
|
||||
*
|
||||
* Throws an Error if the graph has a cycle, or the specified node does not exist.
|
||||
*
|
||||
* If `leavesOnly` is true, only nodes that do not have any dependants will be returned in the array.
|
||||
*/
|
||||
dependantsOf: function (node, leavesOnly) {
|
||||
if (this.hasNode(node)) {
|
||||
var result = [];
|
||||
var DFS = createDFS(
|
||||
this.incomingEdges,
|
||||
leavesOnly,
|
||||
result,
|
||||
this.circular
|
||||
);
|
||||
DFS(node);
|
||||
var idx = result.indexOf(node);
|
||||
if (idx >= 0) {
|
||||
result.splice(idx, 1);
|
||||
}
|
||||
return result;
|
||||
} else {
|
||||
throw new Error('Node does not exist: ' + node);
|
||||
}
|
||||
},
|
||||
/**
|
||||
* Construct the overall processing order for the dependency graph.
|
||||
*
|
||||
* Throws an Error if the graph has a cycle.
|
||||
*
|
||||
* If `leavesOnly` is true, only nodes that do not depend on any other nodes will be returned.
|
||||
*/
|
||||
overallOrder: function (leavesOnly) {
|
||||
var self = this;
|
||||
var result = [];
|
||||
var keys = Object.keys(this.nodes);
|
||||
if (keys.length === 0) {
|
||||
return result; // Empty graph
|
||||
} else {
|
||||
if (!this.circular) {
|
||||
// Look for cycles - we run the DFS starting at all the nodes in case there
|
||||
// are several disconnected subgraphs inside this dependency graph.
|
||||
var CycleDFS = createDFS(this.outgoingEdges, false, [], this.circular);
|
||||
keys.forEach(function (n) {
|
||||
CycleDFS(n);
|
||||
});
|
||||
}
|
||||
|
||||
var DFS = createDFS(
|
||||
this.outgoingEdges,
|
||||
leavesOnly,
|
||||
result,
|
||||
this.circular
|
||||
);
|
||||
// Find all potential starting points (nodes with nothing depending on them) an
|
||||
// run a DFS starting at these points to get the order
|
||||
keys
|
||||
.filter(function (node) {
|
||||
return self.incomingEdges[node].length === 0;
|
||||
})
|
||||
.forEach(function (n) {
|
||||
DFS(n);
|
||||
});
|
||||
|
||||
// If we're allowing cycles - we need to run the DFS against any remaining
|
||||
// nodes that did not end up in the initial result (as they are part of a
|
||||
// subgraph that does not have a clear starting point)
|
||||
if (this.circular) {
|
||||
keys
|
||||
.filter(function (node) {
|
||||
return result.indexOf(node) === -1;
|
||||
})
|
||||
.forEach(function (n) {
|
||||
DFS(n);
|
||||
});
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
},
|
||||
|
||||
mapNodes(mapper) {},
|
||||
};
|
||||
|
||||
/**
|
||||
* Cycle error, including the path of the cycle.
|
||||
*/
|
||||
var DepGraphCycleError = (exports.DepGraphCycleError = function (cyclePath) {
|
||||
var message = 'Dependency Cycle Found: ' + cyclePath.join(' -> ');
|
||||
var instance = new Error(message);
|
||||
instance.cyclePath = cyclePath;
|
||||
Object.setPrototypeOf(instance, Object.getPrototypeOf(this));
|
||||
if (Error.captureStackTrace) {
|
||||
Error.captureStackTrace(instance, DepGraphCycleError);
|
||||
}
|
||||
return instance;
|
||||
});
|
||||
DepGraphCycleError.prototype = Object.create(Error.prototype, {
|
||||
constructor: {
|
||||
value: Error,
|
||||
enumerable: false,
|
||||
writable: true,
|
||||
configurable: true,
|
||||
},
|
||||
});
|
||||
Object.setPrototypeOf(DepGraphCycleError, Error);
|
||||
|
||||
export default DepGraph;
|
||||
91
packages/server/src/lib/DynamicFilter/DynamicFilter.ts
Normal file
91
packages/server/src/lib/DynamicFilter/DynamicFilter.ts
Normal file
@@ -0,0 +1,91 @@
|
||||
import { forEach, uniqBy } from 'lodash';
|
||||
import DynamicFilterAbstructor from './DynamicFilterAbstructor';
|
||||
import { IDynamicFilter, IFilterRole, IModel } from '@/interfaces';
|
||||
|
||||
export default class DynamicFilter extends DynamicFilterAbstructor{
|
||||
private model: IModel;
|
||||
private tableName: string;
|
||||
private dynamicFilters: IDynamicFilter[];
|
||||
|
||||
/**
|
||||
* Constructor.
|
||||
* @param {String} tableName -
|
||||
*/
|
||||
constructor(model) {
|
||||
super();
|
||||
|
||||
this.model = model;
|
||||
this.tableName = model.tableName;
|
||||
this.dynamicFilters = [];
|
||||
}
|
||||
|
||||
/**
|
||||
* Registers the given dynamic filter.
|
||||
* @param {IDynamicFilter} filterRole - Filter role.
|
||||
*/
|
||||
public setFilter = (dynamicFilter: IDynamicFilter) => {
|
||||
dynamicFilter.setModel(this.model);
|
||||
|
||||
dynamicFilter.onInitialize();
|
||||
|
||||
this.dynamicFilters.push(dynamicFilter);
|
||||
}
|
||||
|
||||
/**
|
||||
* Retrieve dynamic filter build queries.
|
||||
* @returns
|
||||
*/
|
||||
private dynamicFiltersBuildQuery = () => {
|
||||
return this.dynamicFilters.map((filter) => {
|
||||
return filter.buildQuery()
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Retrieve dynamic filter roles.
|
||||
* @returns {IFilterRole[]}
|
||||
*/
|
||||
private dynamicFilterTableColumns = (): IFilterRole[] => {
|
||||
const localFilterRoles = [];
|
||||
|
||||
this.dynamicFilters.forEach((dynamicFilter) => {
|
||||
const { filterRoles } = dynamicFilter;
|
||||
|
||||
localFilterRoles.push(
|
||||
...(Array.isArray(filterRoles) ? filterRoles : [filterRoles])
|
||||
);
|
||||
});
|
||||
return localFilterRoles;
|
||||
}
|
||||
|
||||
/**
|
||||
* Builds queries of filter roles.
|
||||
*/
|
||||
public buildQuery = () => {
|
||||
const buildersCallbacks = this.dynamicFiltersBuildQuery();
|
||||
const tableColumns = this.dynamicFilterTableColumns();
|
||||
|
||||
return (builder) => {
|
||||
buildersCallbacks.forEach((builderCallback) => {
|
||||
builderCallback(builder);
|
||||
});
|
||||
this.buildFilterRolesJoins(builder);
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Retrieve response metadata from all filters adapters.
|
||||
*/
|
||||
public getResponseMeta = () => {
|
||||
const responseMeta = {};
|
||||
|
||||
this.dynamicFilters.forEach((filter) => {
|
||||
const { responseMeta: filterMeta } = filter;
|
||||
|
||||
forEach(filterMeta, (value, key) => {
|
||||
responseMeta[key] = value;
|
||||
});
|
||||
});
|
||||
return responseMeta;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,50 @@
|
||||
|
||||
export default class DynamicFilterAbstructor {
|
||||
/**
|
||||
* Extract relation table name from relation.
|
||||
* @param {String} column -
|
||||
* @return {String} - join relation table.
|
||||
*/
|
||||
protected getTableFromRelationColumn = (column: string) => {
|
||||
const splitedColumn = column.split('.');
|
||||
return splitedColumn.length > 0 ? splitedColumn[0] : '';
|
||||
};
|
||||
|
||||
/**
|
||||
* Builds view roles join queries.
|
||||
* @param {String} tableName - Table name.
|
||||
* @param {Array} roles - Roles.
|
||||
*/
|
||||
protected buildFilterRolesJoins = (builder) => {
|
||||
this.dynamicFilters.forEach((dynamicFilter) => {
|
||||
const relationsFields = dynamicFilter.relationFields;
|
||||
|
||||
this.buildFieldsJoinQueries(builder, relationsFields);
|
||||
});
|
||||
};
|
||||
|
||||
/**
|
||||
* Builds join queries of fields.
|
||||
* @param builder -
|
||||
* @param {string[]} fieldsRelations -
|
||||
*/
|
||||
private buildFieldsJoinQueries = (builder, fieldsRelations: string[]) => {
|
||||
fieldsRelations.forEach((fieldRelation) => {
|
||||
const relation = this.model.relationMappings[fieldRelation];
|
||||
|
||||
if (relation) {
|
||||
const splitToRelation = relation.join.to.split('.');
|
||||
const relationTable = splitToRelation[0] || '';
|
||||
|
||||
builder.join(relationTable, relation.join.from, '=', relation.join.to);
|
||||
}
|
||||
});
|
||||
};
|
||||
|
||||
/**
|
||||
* Retrieve the dynamic filter mode.
|
||||
*/
|
||||
protected getModel() {
|
||||
return this.model;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,27 @@
|
||||
import { IFilterRole } from '@/interfaces';
|
||||
import DynamicFilterFilterRoles from './DynamicFilterFilterRoles';
|
||||
|
||||
export default class DynamicFilterAdvancedFilter extends DynamicFilterFilterRoles {
|
||||
private filterRoles: IFilterRole[];
|
||||
|
||||
/**
|
||||
* Constructor method.
|
||||
* @param {Array} filterRoles -
|
||||
* @param {Array} resourceFields -
|
||||
*/
|
||||
constructor(filterRoles: IFilterRole[]) {
|
||||
super();
|
||||
|
||||
this.filterRoles = filterRoles;
|
||||
this.setResponseMeta();
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets response meta.
|
||||
*/
|
||||
private setResponseMeta() {
|
||||
this.responseMeta = {
|
||||
filterRoles: this.filterRoles,
|
||||
};
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,52 @@
|
||||
import DynamicFilterRoleAbstructor from './DynamicFilterRoleAbstructor';
|
||||
import { IFilterRole } from '@/interfaces';
|
||||
|
||||
export default class FilterRoles extends DynamicFilterRoleAbstructor {
|
||||
private filterRoles: IFilterRole[];
|
||||
|
||||
/**
|
||||
* On initialize filter roles.
|
||||
*/
|
||||
public onInitialize() {
|
||||
super.onInitialize();
|
||||
this.setFilterRolesRelations();
|
||||
}
|
||||
|
||||
/**
|
||||
* Builds filter roles logic expression.
|
||||
* @return {string}
|
||||
*/
|
||||
private buildLogicExpression(): string {
|
||||
let expression = '';
|
||||
|
||||
this.filterRoles.forEach((role, index) => {
|
||||
expression +=
|
||||
index === 0 ? `${role.index} ` : `${role.condition} ${role.index} `;
|
||||
});
|
||||
return expression.trim();
|
||||
}
|
||||
|
||||
/**
|
||||
* Builds database query of view roles.
|
||||
*/
|
||||
protected buildQuery() {
|
||||
const logicExpression = this.buildLogicExpression();
|
||||
|
||||
return (builder) => {
|
||||
this.buildFilterQuery(
|
||||
this.model,
|
||||
this.filterRoles,
|
||||
logicExpression
|
||||
)(builder);
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets filter roles relations if field was relation type.
|
||||
*/
|
||||
private setFilterRolesRelations() {
|
||||
this.filterRoles.forEach((relationRole) => {
|
||||
this.setRelationIfRelationField(relationRole.fieldKey);
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,72 @@
|
||||
import { OPERATION } from '../LogicEvaluation/Parser';
|
||||
|
||||
export default class QueryParser {
|
||||
constructor(tree, queries) {
|
||||
this.tree = tree;
|
||||
this.queries = queries;
|
||||
this.query = null;
|
||||
}
|
||||
|
||||
setQuery(query) {
|
||||
this.query = query.clone();
|
||||
}
|
||||
|
||||
parse() {
|
||||
return this.parseNode(this.tree);
|
||||
}
|
||||
|
||||
parseNode(node) {
|
||||
if (typeof node === 'string') {
|
||||
const nodeQuery = this.getQuery(node);
|
||||
return (query) => {
|
||||
nodeQuery(query);
|
||||
};
|
||||
}
|
||||
if (OPERATION[node.operation] === undefined) {
|
||||
throw new Error(`unknow expression ${node.operation}`);
|
||||
}
|
||||
const leftQuery = this.getQuery(node.left);
|
||||
const rightQuery = this.getQuery(node.right);
|
||||
|
||||
switch (node.operation) {
|
||||
case '&&':
|
||||
case 'AND':
|
||||
default:
|
||||
return (nodeQuery) =>
|
||||
nodeQuery.where((query) => {
|
||||
query.where((q) => {
|
||||
leftQuery(q);
|
||||
});
|
||||
query.andWhere((q) => {
|
||||
rightQuery(q);
|
||||
});
|
||||
});
|
||||
case '||':
|
||||
case 'OR':
|
||||
return (nodeQuery) =>
|
||||
nodeQuery.where((query) => {
|
||||
query.where((q) => {
|
||||
leftQuery(q);
|
||||
});
|
||||
query.orWhere((q) => {
|
||||
rightQuery(q);
|
||||
});
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
getQuery(node) {
|
||||
if (typeof node !== 'string' && node !== null) {
|
||||
return this.parseNode(node);
|
||||
}
|
||||
const value = parseFloat(node);
|
||||
|
||||
if (!isNaN(value)) {
|
||||
if (typeof this.queries[node] === 'undefined') {
|
||||
throw new Error(`unknow query under index ${node}`);
|
||||
}
|
||||
return this.queries[node];
|
||||
}
|
||||
return null;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,387 @@
|
||||
import moment from 'moment';
|
||||
import * as R from 'ramda';
|
||||
import { IFilterRole, IDynamicFilter, IModel } from '@/interfaces';
|
||||
import Parser from '../LogicEvaluation/Parser';
|
||||
import DynamicFilterQueryParser from './DynamicFilterQueryParser';
|
||||
import { Lexer } from '../LogicEvaluation/Lexer';
|
||||
import { COMPARATOR_TYPE, FIELD_TYPE } from './constants';
|
||||
|
||||
export default abstract class DynamicFilterAbstructor
|
||||
implements IDynamicFilter
|
||||
{
|
||||
protected filterRoles: IFilterRole[] = [];
|
||||
protected tableName: string;
|
||||
protected model: IModel;
|
||||
protected responseMeta: { [key: string]: any } = {};
|
||||
public relationFields = [];
|
||||
|
||||
/**
|
||||
* Sets model the dynamic filter service.
|
||||
* @param {IModel} model
|
||||
*/
|
||||
public setModel(model: IModel) {
|
||||
this.model = model;
|
||||
this.tableName = model.tableName;
|
||||
}
|
||||
|
||||
/**
|
||||
* Transformes filter roles to map by index.
|
||||
* @param {IModel} model
|
||||
* @param {IFilterRole[]} roles
|
||||
* @returns
|
||||
*/
|
||||
protected convertRolesMapByIndex = (model, roles) => {
|
||||
const rolesIndexSet = {};
|
||||
|
||||
roles.forEach((role) => {
|
||||
rolesIndexSet[role.index] = this.buildRoleQuery(model, role);
|
||||
});
|
||||
return rolesIndexSet;
|
||||
};
|
||||
|
||||
/**
|
||||
* Builds database query from stored view roles.
|
||||
* @param {Array} roles -
|
||||
* @return {Function}
|
||||
*/
|
||||
protected buildFilterRolesQuery = (
|
||||
model: IModel,
|
||||
roles: IFilterRole[],
|
||||
logicExpression: string = ''
|
||||
) => {
|
||||
const rolesIndexSet = this.convertRolesMapByIndex(model, roles);
|
||||
|
||||
// Lexer for logic expression.
|
||||
const lexer = new Lexer(logicExpression);
|
||||
const tokens = lexer.getTokens();
|
||||
|
||||
// Parse the logic expression.
|
||||
const parser = new Parser(tokens);
|
||||
const parsedTree = parser.parse();
|
||||
|
||||
const queryParser = new DynamicFilterQueryParser(parsedTree, rolesIndexSet);
|
||||
|
||||
return queryParser.parse();
|
||||
};
|
||||
|
||||
/**
|
||||
* Parses the logic expression to base expression.
|
||||
* @param {string} logicExpression -
|
||||
* @return {string}
|
||||
*/
|
||||
private parseLogicExpression(logicExpression: string): string {
|
||||
return R.compose(
|
||||
R.replace(/or|OR/g, '||'),
|
||||
R.replace(/and|AND/g, '&&'),
|
||||
)(logicExpression);
|
||||
}
|
||||
|
||||
/**
|
||||
* Builds filter query for query builder.
|
||||
* @param {String} tableName - Table name.
|
||||
* @param {Array} roles - Filter roles.
|
||||
* @param {String} logicExpression - Logic expression.
|
||||
*/
|
||||
protected buildFilterQuery = (
|
||||
model: IModel,
|
||||
roles: IFilterRole[],
|
||||
logicExpression: string
|
||||
) => {
|
||||
const basicExpression = this.parseLogicExpression(logicExpression);
|
||||
|
||||
return (builder) => {
|
||||
this.buildFilterRolesQuery(model, roles, basicExpression)(builder);
|
||||
};
|
||||
};
|
||||
|
||||
/**
|
||||
* Retrieve relation column of comparator fieldز
|
||||
*/
|
||||
private getFieldComparatorRelationColumn(field) {
|
||||
const relation = this.model.relationMappings[field.relationKey];
|
||||
|
||||
if (relation) {
|
||||
const relationModel = relation.modelClass;
|
||||
const relationColumn =
|
||||
field.relationEntityKey === 'id'
|
||||
? 'id'
|
||||
: relationModel.getField(field.relationEntityKey, 'column');
|
||||
|
||||
return `${relationModel.tableName}.${relationColumn}`;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Retrieve the comparator field column.
|
||||
* @param {IModel} model -
|
||||
* @param {} -
|
||||
*/
|
||||
private getFieldComparatorColumn = (field) => {
|
||||
return field.fieldType === FIELD_TYPE.RELATION
|
||||
? this.getFieldComparatorRelationColumn(field)
|
||||
: `${this.tableName}.${field.column}`;
|
||||
};
|
||||
|
||||
/**
|
||||
* Builds roles queries.
|
||||
* @param {IModel} model -
|
||||
* @param {Object} role -
|
||||
*/
|
||||
protected buildRoleQuery = (model: IModel, role: IFilterRole) => {
|
||||
const field = model.getField(role.fieldKey);
|
||||
const comparatorColumn = this.getFieldComparatorColumn(field);
|
||||
|
||||
// Field relation custom query.
|
||||
if (typeof field.filterCustomQuery !== 'undefined') {
|
||||
return (builder) => {
|
||||
field.filterCustomQuery(builder, role);
|
||||
};
|
||||
}
|
||||
switch (field.fieldType) {
|
||||
case FIELD_TYPE.BOOLEAN:
|
||||
case FIELD_TYPE.ENUMERATION:
|
||||
return this.booleanRoleQueryBuilder(role, comparatorColumn);
|
||||
case FIELD_TYPE.NUMBER:
|
||||
return this.numberRoleQueryBuilder(role, comparatorColumn);
|
||||
case FIELD_TYPE.DATE:
|
||||
return this.dateQueryBuilder(role, comparatorColumn);
|
||||
case FIELD_TYPE.TEXT:
|
||||
default:
|
||||
return this.textRoleQueryBuilder(role, comparatorColumn);
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Boolean column query builder.
|
||||
* @param {IFilterRole} role
|
||||
* @param {string} comparatorColumn
|
||||
* @returns
|
||||
*/
|
||||
protected booleanRoleQueryBuilder = (
|
||||
role: IFilterRole,
|
||||
comparatorColumn: string
|
||||
) => {
|
||||
switch (role.comparator) {
|
||||
case COMPARATOR_TYPE.EQUALS:
|
||||
case COMPARATOR_TYPE.EQUAL:
|
||||
case COMPARATOR_TYPE.IS:
|
||||
default:
|
||||
return (builder) => {
|
||||
builder.where(comparatorColumn, '=', role.value);
|
||||
};
|
||||
case COMPARATOR_TYPE.NOT_EQUAL:
|
||||
case COMPARATOR_TYPE.NOT_EQUALS:
|
||||
case COMPARATOR_TYPE.IS_NOT:
|
||||
return (builder) => {
|
||||
builder.where(comparatorColumn, '<>', role.value);
|
||||
};
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Numeric column query builder.
|
||||
* @param {IFilterRole} role
|
||||
* @param {string} comparatorColumn
|
||||
* @returns
|
||||
*/
|
||||
protected numberRoleQueryBuilder = (
|
||||
role: IFilterRole,
|
||||
comparatorColumn: string
|
||||
) => {
|
||||
switch (role.comparator) {
|
||||
case COMPARATOR_TYPE.EQUALS:
|
||||
case COMPARATOR_TYPE.EQUAL:
|
||||
default:
|
||||
return (builder) => {
|
||||
builder.where(comparatorColumn, '=', role.value);
|
||||
};
|
||||
case COMPARATOR_TYPE.NOT_EQUAL:
|
||||
case COMPARATOR_TYPE.NOT_EQUALS:
|
||||
return (builder) => {
|
||||
builder.whereNot(comparatorColumn, role.value);
|
||||
};
|
||||
case COMPARATOR_TYPE.BIGGER_THAN:
|
||||
case COMPARATOR_TYPE.BIGGER:
|
||||
return (builder) => {
|
||||
builder.where(comparatorColumn, '>', role.value);
|
||||
};
|
||||
case COMPARATOR_TYPE.BIGGER_OR_EQUALS:
|
||||
return (builder) => {
|
||||
builder.where(comparatorColumn, '>=', role.value);
|
||||
};
|
||||
case COMPARATOR_TYPE.SMALLER_THAN:
|
||||
case COMPARATOR_TYPE.SMALLER:
|
||||
return (builder) => {
|
||||
builder.where(comparatorColumn, '<', role.value);
|
||||
};
|
||||
case COMPARATOR_TYPE.SMALLER_OR_EQUALS:
|
||||
return (builder) => {
|
||||
builder.where(comparatorColumn, '<=', role.value);
|
||||
};
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Text column query builder.
|
||||
* @param {IFilterRole} role
|
||||
* @param {string} comparatorColumn
|
||||
* @returns {Function}
|
||||
*/
|
||||
protected textRoleQueryBuilder = (
|
||||
role: IFilterRole,
|
||||
comparatorColumn: string
|
||||
) => {
|
||||
switch (role.comparator) {
|
||||
case COMPARATOR_TYPE.EQUAL:
|
||||
case COMPARATOR_TYPE.EQUALS:
|
||||
case COMPARATOR_TYPE.IS:
|
||||
default:
|
||||
return (builder) => {
|
||||
builder.where(comparatorColumn, role.value);
|
||||
};
|
||||
case COMPARATOR_TYPE.NOT_EQUALS:
|
||||
case COMPARATOR_TYPE.NOT_EQUAL:
|
||||
case COMPARATOR_TYPE.IS_NOT:
|
||||
return (builder) => {
|
||||
builder.whereNot(comparatorColumn, role.value);
|
||||
};
|
||||
case COMPARATOR_TYPE.CONTAIN:
|
||||
case COMPARATOR_TYPE.CONTAINS:
|
||||
return (builder) => {
|
||||
builder.where(comparatorColumn, 'LIKE', `%${role.value}%`);
|
||||
};
|
||||
case COMPARATOR_TYPE.NOT_CONTAIN:
|
||||
case COMPARATOR_TYPE.NOT_CONTAINS:
|
||||
return (builder) => {
|
||||
builder.whereNot(comparatorColumn, 'LIKE', `%${role.value}%`);
|
||||
};
|
||||
case COMPARATOR_TYPE.STARTS_WITH:
|
||||
case COMPARATOR_TYPE.START_WITH:
|
||||
return (builder) => {
|
||||
builder.where(comparatorColumn, 'LIKE', `${role.value}%`);
|
||||
};
|
||||
case COMPARATOR_TYPE.ENDS_WITH:
|
||||
case COMPARATOR_TYPE.END_WITH:
|
||||
return (builder) => {
|
||||
builder.where(comparatorColumn, 'LIKE', `%${role.value}`);
|
||||
};
|
||||
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Date column query builder.
|
||||
* @param {IFilterRole} role
|
||||
* @param {string} comparatorColumn
|
||||
* @returns {Function}
|
||||
*/
|
||||
protected dateQueryBuilder = (
|
||||
role: IFilterRole,
|
||||
comparatorColumn: string
|
||||
) => {
|
||||
switch (role.comparator) {
|
||||
case COMPARATOR_TYPE.AFTER:
|
||||
case COMPARATOR_TYPE.BEFORE:
|
||||
return (builder) => {
|
||||
this.dateQueryAfterBeforeComparator(role, comparatorColumn, builder);
|
||||
};
|
||||
case COMPARATOR_TYPE.IN:
|
||||
return (builder) => {
|
||||
this.dateQueryInComparator(role, comparatorColumn, builder);
|
||||
};
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Date query 'IN' comparator type.
|
||||
* @param {IFilterRole} role
|
||||
* @param {string} comparatorColumn
|
||||
* @param builder
|
||||
*/
|
||||
protected dateQueryInComparator = (
|
||||
role: IFilterRole,
|
||||
comparatorColumn: string,
|
||||
builder
|
||||
) => {
|
||||
const hasTimeFormat = moment(
|
||||
role.value,
|
||||
'YYYY-MM-DD HH:MM',
|
||||
true
|
||||
).isValid();
|
||||
const dateFormat = 'YYYY-MM-DD HH:MM:SS';
|
||||
|
||||
if (hasTimeFormat) {
|
||||
const targetDateTime = moment(role.value).format(dateFormat);
|
||||
builder.where(comparatorColumn, '=', targetDateTime);
|
||||
} else {
|
||||
const startDate = moment(role.value).startOf('day');
|
||||
const endDate = moment(role.value).endOf('day');
|
||||
|
||||
builder.where(comparatorColumn, '>=', startDate.format(dateFormat));
|
||||
builder.where(comparatorColumn, '<=', endDate.format(dateFormat));
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Date query after/before comparator type.
|
||||
* @param {IFilterRole} role
|
||||
* @param {string} comparatorColumn - Column.
|
||||
* @param builder
|
||||
*/
|
||||
protected dateQueryAfterBeforeComparator = (
|
||||
role: IFilterRole,
|
||||
comparatorColumn: string,
|
||||
builder
|
||||
) => {
|
||||
const comparator = role.comparator === COMPARATOR_TYPE.BEFORE ? '<' : '>';
|
||||
const hasTimeFormat = moment(
|
||||
role.value,
|
||||
'YYYY-MM-DD HH:MM',
|
||||
true
|
||||
).isValid();
|
||||
const targetDate = moment(role.value);
|
||||
const dateFormat = 'YYYY-MM-DD HH:MM:SS';
|
||||
|
||||
if (!hasTimeFormat) {
|
||||
if (role.comparator === COMPARATOR_TYPE.BEFORE) {
|
||||
targetDate.startOf('day');
|
||||
} else {
|
||||
targetDate.endOf('day');
|
||||
}
|
||||
}
|
||||
const comparatorValue = targetDate.format(dateFormat);
|
||||
builder.where(comparatorColumn, comparator, comparatorValue);
|
||||
};
|
||||
|
||||
/**
|
||||
* Registers relation field if the given field was relation type
|
||||
* and not registered.
|
||||
* @param {string} fieldKey - Field key.
|
||||
*/
|
||||
protected setRelationIfRelationField = (fieldKey: string): void => {
|
||||
const field = this.model.getField(fieldKey);
|
||||
const isAlreadyRegistered = this.relationFields.some(
|
||||
(field) => field === fieldKey
|
||||
);
|
||||
|
||||
if (
|
||||
!isAlreadyRegistered &&
|
||||
field &&
|
||||
field.fieldType === FIELD_TYPE.RELATION
|
||||
) {
|
||||
this.relationFields.push(field.relationKey);
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Retrieve the model.
|
||||
*/
|
||||
getModel() {
|
||||
return this.model;
|
||||
}
|
||||
|
||||
/**
|
||||
* On initialize the registered dynamic filter.
|
||||
*/
|
||||
onInitialize() {}
|
||||
}
|
||||
48
packages/server/src/lib/DynamicFilter/DynamicFilterSearch.ts
Normal file
48
packages/server/src/lib/DynamicFilter/DynamicFilterSearch.ts
Normal file
@@ -0,0 +1,48 @@
|
||||
import { IFilterRole } from '@/interfaces';
|
||||
import DynamicFilterFilterRoles from './DynamicFilterFilterRoles';
|
||||
|
||||
export default class DynamicFilterSearch extends DynamicFilterFilterRoles {
|
||||
private searchKeyword: string;
|
||||
private filterRoles: IFilterRole[];
|
||||
|
||||
/**
|
||||
* Constructor method.
|
||||
* @param {string} searchKeyword - Search keyword.
|
||||
*/
|
||||
constructor(searchKeyword: string) {
|
||||
super();
|
||||
this.searchKeyword = searchKeyword;
|
||||
}
|
||||
|
||||
/**
|
||||
* On initialize the dynamic filter.
|
||||
*/
|
||||
public onInitialize() {
|
||||
super.onInitialize();
|
||||
this.filterRoles = this.getModelSearchFilterRoles(this.searchKeyword);
|
||||
}
|
||||
|
||||
/**
|
||||
* Retrieve the filter roles from model search roles.
|
||||
* @param {string} searchKeyword
|
||||
* @returns {IFilterRole[]}
|
||||
*/
|
||||
private getModelSearchFilterRoles(searchKeyword: string): IFilterRole[] {
|
||||
const model = this.getModel();
|
||||
|
||||
return model.searchRoles.map((searchRole, index) => ({
|
||||
...searchRole,
|
||||
value: searchKeyword,
|
||||
index: index + 1,
|
||||
}));
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
setResponseMeta() {
|
||||
this.responseMeta = {
|
||||
searchKeyword: this.searchKeyword,
|
||||
};
|
||||
}
|
||||
}
|
||||
92
packages/server/src/lib/DynamicFilter/DynamicFilterSortBy.ts
Normal file
92
packages/server/src/lib/DynamicFilter/DynamicFilterSortBy.ts
Normal file
@@ -0,0 +1,92 @@
|
||||
import DynamicFilterRoleAbstructor from '@/lib/DynamicFilter/DynamicFilterRoleAbstructor';
|
||||
import { FIELD_TYPE } from './constants';
|
||||
|
||||
interface ISortRole {
|
||||
fieldKey: string;
|
||||
order: string;
|
||||
}
|
||||
|
||||
export default class DynamicFilterSortBy extends DynamicFilterRoleAbstructor {
|
||||
private sortRole: ISortRole = {};
|
||||
|
||||
/**
|
||||
* Constructor method.
|
||||
* @param {string} sortByFieldKey
|
||||
* @param {string} sortDirection
|
||||
*/
|
||||
constructor(sortByFieldKey: string, sortDirection: string) {
|
||||
super();
|
||||
|
||||
this.sortRole = {
|
||||
fieldKey: sortByFieldKey,
|
||||
order: sortDirection,
|
||||
};
|
||||
this.setResponseMeta();
|
||||
}
|
||||
|
||||
/**
|
||||
* On initialize the dyanmic sort by.
|
||||
*/
|
||||
public onInitialize() {
|
||||
this.setRelationIfRelationField(this.sortRole.fieldKey);
|
||||
}
|
||||
|
||||
/**
|
||||
* Retrieve field comparator relatin column.
|
||||
* @param field
|
||||
* @returns {string}
|
||||
*/
|
||||
private getFieldComparatorRelationColumn = (field): string => {
|
||||
const relation = this.model.relationMappings[field.relationKey];
|
||||
|
||||
if (relation) {
|
||||
const relationModel = relation.modelClass;
|
||||
const relationField = relationModel.getField(field.relationEntityLabel);
|
||||
|
||||
return `${relationModel.tableName}.${relationField.column}`;
|
||||
}
|
||||
return '';
|
||||
};
|
||||
|
||||
/**
|
||||
* Retrieve the comparator field column.
|
||||
* @param {IModel} field
|
||||
* @returns {string}
|
||||
*/
|
||||
private getFieldComparatorColumn = (field): string => {
|
||||
return field.fieldType === FIELD_TYPE.RELATION
|
||||
? this.getFieldComparatorRelationColumn(field)
|
||||
: `${this.tableName}.${field.column}`;
|
||||
};
|
||||
|
||||
/**
|
||||
* Builds database query of sort by column on the given direction.
|
||||
*/
|
||||
public buildQuery = () => {
|
||||
const field = this.model.getField(this.sortRole.fieldKey);
|
||||
const comparatorColumn = this.getFieldComparatorColumn(field);
|
||||
|
||||
// Sort custom query.
|
||||
if (typeof field.sortCustomQuery !== 'undefined') {
|
||||
return (builder) => {
|
||||
field.sortCustomQuery(builder, this.sortRole);
|
||||
};
|
||||
}
|
||||
|
||||
return (builder) => {
|
||||
if (this.sortRole.fieldKey) {
|
||||
builder.orderBy(`${comparatorColumn}`, this.sortRole.order);
|
||||
}
|
||||
};
|
||||
};
|
||||
|
||||
/**
|
||||
* Sets response meta.
|
||||
*/
|
||||
public setResponseMeta() {
|
||||
this.responseMeta = {
|
||||
sortOrder: this.sortRole.fieldKey,
|
||||
sortBy: this.sortRole.order,
|
||||
};
|
||||
}
|
||||
}
|
||||
56
packages/server/src/lib/DynamicFilter/DynamicFilterViews.ts
Normal file
56
packages/server/src/lib/DynamicFilter/DynamicFilterViews.ts
Normal file
@@ -0,0 +1,56 @@
|
||||
import { omit } from 'lodash';
|
||||
import { IView, IViewRole } from '@/interfaces';
|
||||
import DynamicFilterRoleAbstructor from './DynamicFilterRoleAbstructor';
|
||||
|
||||
export default class DynamicFilterViews extends DynamicFilterRoleAbstructor {
|
||||
private viewSlug: string;
|
||||
private logicExpression: string;
|
||||
private filterRoles: IViewRole[];
|
||||
private viewColumns = [];
|
||||
|
||||
/**
|
||||
* Constructor method.
|
||||
* @param {IView} view -
|
||||
*/
|
||||
constructor(view: IView) {
|
||||
super();
|
||||
|
||||
this.viewSlug = view.slug;
|
||||
this.filterRoles = view.roles;
|
||||
this.viewColumns = view.columns;
|
||||
this.logicExpression = view.rolesLogicExpression
|
||||
.replace('AND', '&&')
|
||||
.replace('OR', '||');
|
||||
|
||||
this.setResponseMeta();
|
||||
}
|
||||
|
||||
/**
|
||||
* Builds database query of view roles.
|
||||
*/
|
||||
public buildQuery() {
|
||||
return (builder) => {
|
||||
this.buildFilterQuery(
|
||||
this.model,
|
||||
this.filterRoles,
|
||||
this.logicExpression
|
||||
)(builder);
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets response meta.
|
||||
*/
|
||||
public setResponseMeta() {
|
||||
this.responseMeta = {
|
||||
view: {
|
||||
logicExpression: this.logicExpression,
|
||||
filterRoles: this.filterRoles.map((filterRole) => ({
|
||||
...omit(filterRole, ['id', 'viewId']),
|
||||
})),
|
||||
viewSlug: this.viewSlug,
|
||||
viewColumns: this.viewColumns,
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
43
packages/server/src/lib/DynamicFilter/constants.ts
Normal file
43
packages/server/src/lib/DynamicFilter/constants.ts
Normal file
@@ -0,0 +1,43 @@
|
||||
export const COMPARATOR_TYPE = {
|
||||
EQUAL: 'equal',
|
||||
EQUALS: 'equals',
|
||||
|
||||
NOT_EQUAL: 'not_equal',
|
||||
NOT_EQUALS: 'not_equals',
|
||||
|
||||
BIGGER_THAN: 'bigger_than',
|
||||
BIGGER: 'bigger',
|
||||
BIGGER_OR_EQUALS: 'bigger_or_equals',
|
||||
|
||||
SMALLER_THAN: 'smaller_than',
|
||||
SMALLER: 'smaller',
|
||||
SMALLER_OR_EQUALS: 'smaller_or_equals',
|
||||
|
||||
IS: 'is',
|
||||
IS_NOT: 'is_not',
|
||||
|
||||
CONTAINS: 'contains',
|
||||
CONTAIN: 'contain',
|
||||
NOT_CONTAINS: 'contains',
|
||||
NOT_CONTAIN: 'contain',
|
||||
|
||||
AFTER: 'after',
|
||||
BEFORE: 'before',
|
||||
IN: 'in',
|
||||
|
||||
STARTS_WITH: 'starts_with',
|
||||
START_WITH: 'start_with',
|
||||
|
||||
ENDS_WITH: 'ends_with',
|
||||
END_WITH: 'end_with'
|
||||
};
|
||||
|
||||
export const FIELD_TYPE = {
|
||||
TEXT: 'text',
|
||||
NUMBER: 'number',
|
||||
ENUMERATION: 'enumeration',
|
||||
BOOLEAN: 'boolean',
|
||||
RELATION: 'relation',
|
||||
DATE: 'date',
|
||||
COMPUTED: 'computed'
|
||||
};
|
||||
13
packages/server/src/lib/DynamicFilter/index.ts
Normal file
13
packages/server/src/lib/DynamicFilter/index.ts
Normal file
@@ -0,0 +1,13 @@
|
||||
|
||||
|
||||
import DynamicFilter from './DynamicFilter';
|
||||
import DynamicFilterSortBy from './DynamicFilterSortBy';
|
||||
import DynamicFilterViews from './DynamicFilterViews';
|
||||
import DynamicFilterFilterRoles from './DynamicFilterFilterRoles';
|
||||
|
||||
export {
|
||||
DynamicFilter,
|
||||
DynamicFilterSortBy,
|
||||
DynamicFilterViews,
|
||||
DynamicFilterFilterRoles,
|
||||
};
|
||||
66
packages/server/src/lib/EventPublisher/EventPublisher.ts
Normal file
66
packages/server/src/lib/EventPublisher/EventPublisher.ts
Normal file
@@ -0,0 +1,66 @@
|
||||
import { Container } from 'typedi';
|
||||
import { EventEmitter2 } from 'eventemitter2';
|
||||
|
||||
interface IEventPublisherArgs {
|
||||
subscribers: EventSubscriber[];
|
||||
}
|
||||
class PublishEvent {
|
||||
constructor(public id: string) {}
|
||||
}
|
||||
|
||||
type SubscribeListenerFunction = (event: PublishEvent) => void;
|
||||
type SubscribeFunction = (id: string, cb: SubscribeListenerFunction) => void;
|
||||
|
||||
interface IEventBus {
|
||||
subscribe: SubscribeFunction;
|
||||
}
|
||||
|
||||
export abstract class EventSubscriber {
|
||||
abstract attach(bus: IEventBus): void;
|
||||
}
|
||||
|
||||
export class EventPublisher {
|
||||
private emitter: EventEmitter2;
|
||||
|
||||
/**
|
||||
*
|
||||
* @param {IEventPublisherArgs} args
|
||||
*/
|
||||
constructor() {
|
||||
this.emitter = new EventEmitter2({ wildcard: true, delimiter: '.' });
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @param {EventSubscriber} args
|
||||
*/
|
||||
loadSubscribers(subscribers: EventSubscriber[]) {
|
||||
const bus: IEventBus = {
|
||||
subscribe: (id, cb) => {
|
||||
this.emitter.on(id, cb);
|
||||
},
|
||||
};
|
||||
for (const Subscriber of subscribers) {
|
||||
const subscriberInstance = Container.get(Subscriber);
|
||||
subscriberInstance.attach(bus);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @param event
|
||||
* @param payload
|
||||
*/
|
||||
emit(event: string, payload) {
|
||||
return this.emitter.emit(event, payload);
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @param event
|
||||
* @param payload
|
||||
*/
|
||||
emitAsync(event: string, payload) {
|
||||
return this.emitter.emitAsync(event, payload);
|
||||
}
|
||||
}
|
||||
55
packages/server/src/lib/KnexFactory/index.js
Normal file
55
packages/server/src/lib/KnexFactory/index.js
Normal file
@@ -0,0 +1,55 @@
|
||||
const { extend, isFunction, isObject } = require('lodash');
|
||||
|
||||
export default class KnexFactory {
|
||||
|
||||
constructor(knex) {
|
||||
this.knex = knex;
|
||||
|
||||
this.factories = [];
|
||||
}
|
||||
|
||||
define(name, tableName, defaultAttributes) {
|
||||
this.factories[name] = { tableName, defaultAttributes };
|
||||
}
|
||||
|
||||
async build(factoryName, attributes) {
|
||||
const factory = this.factories[factoryName];
|
||||
|
||||
if (!factory) {
|
||||
throw `Unkown factory: ${factoryName}`;
|
||||
}
|
||||
let { defaultAttributes } = factory;
|
||||
const insertData = {};
|
||||
|
||||
if( 'function' === typeof defaultAttributes) {
|
||||
defaultAttributes = await defaultAttributes();
|
||||
}
|
||||
extend(insertData, defaultAttributes, attributes);
|
||||
|
||||
for (let k in insertData) {
|
||||
const v = insertData[k];
|
||||
|
||||
if (isFunction(v)) {
|
||||
insertData[k] = await v();
|
||||
} else {
|
||||
insertData[k] = await v;
|
||||
}
|
||||
if (isObject(insertData[k]) && insertData[k].id) {
|
||||
insertData[k] = insertData[k].id;
|
||||
}
|
||||
};
|
||||
|
||||
return insertData;
|
||||
}
|
||||
|
||||
async create(factoryName, attributes) {
|
||||
const factory = this.factories[factoryName];
|
||||
const insertData = await this.build(factoryName, attributes);
|
||||
const { tableName } = factory;
|
||||
|
||||
const [id] = await this.knex(tableName).insert(insertData);
|
||||
const record = await this.knex(tableName).where({ id }).first();
|
||||
|
||||
return record;
|
||||
}
|
||||
}
|
||||
172
packages/server/src/lib/LogicEvaluation/Lexer.js
Normal file
172
packages/server/src/lib/LogicEvaluation/Lexer.js
Normal file
@@ -0,0 +1,172 @@
|
||||
|
||||
const OperationType = {
|
||||
LOGIC: 'LOGIC',
|
||||
STRING: 'STRING',
|
||||
COMPARISON: 'COMPARISON',
|
||||
MATH: 'MATH',
|
||||
};
|
||||
|
||||
export class Lexer {
|
||||
// operation table
|
||||
static get optable() {
|
||||
return {
|
||||
'=': OperationType.LOGIC,
|
||||
'&': OperationType.LOGIC,
|
||||
'|': OperationType.LOGIC,
|
||||
'?': OperationType.LOGIC,
|
||||
':': OperationType.LOGIC,
|
||||
|
||||
'\'': OperationType.STRING,
|
||||
'"': OperationType.STRING,
|
||||
|
||||
'!': OperationType.COMPARISON,
|
||||
'>': OperationType.COMPARISON,
|
||||
'<': OperationType.COMPARISON,
|
||||
|
||||
'(': OperationType.MATH,
|
||||
')': OperationType.MATH,
|
||||
'+': OperationType.MATH,
|
||||
'-': OperationType.MATH,
|
||||
'*': OperationType.MATH,
|
||||
'/': OperationType.MATH,
|
||||
'%': OperationType.MATH,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Constructor
|
||||
* @param {*} expression -
|
||||
*/
|
||||
constructor(expression) {
|
||||
this.currentIndex = 0;
|
||||
this.input = expression;
|
||||
this.tokenList = [];
|
||||
}
|
||||
|
||||
getTokens() {
|
||||
let tok;
|
||||
do {
|
||||
// read current token, so step should be -1
|
||||
tok = this.pickNext(-1);
|
||||
const pos = this.currentIndex;
|
||||
switch (Lexer.optable[tok]) {
|
||||
case OperationType.LOGIC:
|
||||
// == && || ===
|
||||
this.readLogicOpt(tok);
|
||||
break;
|
||||
|
||||
case OperationType.STRING:
|
||||
this.readString(tok);
|
||||
break;
|
||||
|
||||
case OperationType.COMPARISON:
|
||||
this.readCompare(tok);
|
||||
break;
|
||||
|
||||
case OperationType.MATH:
|
||||
this.receiveToken();
|
||||
break;
|
||||
|
||||
default:
|
||||
this.readValue(tok);
|
||||
}
|
||||
|
||||
// if the pos not changed, this loop will go into a infinite loop, every step of while loop,
|
||||
// we must move the pos forward
|
||||
// so here we should throw error, for example `1 & 2`
|
||||
if (pos === this.currentIndex && tok !== undefined) {
|
||||
const err = new Error(`unkonw token ${tok} from input string ${this.input}`);
|
||||
err.name = 'UnknowToken';
|
||||
throw err;
|
||||
}
|
||||
} while (tok !== undefined)
|
||||
|
||||
return this.tokenList;
|
||||
}
|
||||
|
||||
/**
|
||||
* read next token, the index param can set next step, default go foward 1 step
|
||||
*
|
||||
* @param index next postion
|
||||
*/
|
||||
pickNext(index = 0) {
|
||||
return this.input[index + this.currentIndex + 1];
|
||||
}
|
||||
|
||||
/**
|
||||
* Store token into result tokenList, and move the pos index
|
||||
*
|
||||
* @param index
|
||||
*/
|
||||
receiveToken(index = 1) {
|
||||
const tok = this.input.slice(this.currentIndex, this.currentIndex + index).trim();
|
||||
// skip empty string
|
||||
if (tok) {
|
||||
this.tokenList.push(tok);
|
||||
}
|
||||
|
||||
this.currentIndex += index;
|
||||
}
|
||||
|
||||
// ' or "
|
||||
readString(tok) {
|
||||
let next;
|
||||
let index = 0;
|
||||
do {
|
||||
next = this.pickNext(index);
|
||||
index += 1;
|
||||
} while (next !== tok && next !== undefined);
|
||||
this.receiveToken(index + 1);
|
||||
}
|
||||
|
||||
// > or < or >= or <= or !==
|
||||
// tok in (>, <, !)
|
||||
readCompare(tok) {
|
||||
if (this.pickNext() !== '=') {
|
||||
this.receiveToken(1);
|
||||
return;
|
||||
}
|
||||
// !==
|
||||
if (tok === '!' && this.pickNext(1) === '=') {
|
||||
this.receiveToken(3);
|
||||
return;
|
||||
}
|
||||
this.receiveToken(2);
|
||||
}
|
||||
|
||||
// === or ==
|
||||
// && ||
|
||||
readLogicOpt(tok) {
|
||||
if (this.pickNext() === tok) {
|
||||
// ===
|
||||
if (tok === '=' && this.pickNext(1) === tok) {
|
||||
return this.receiveToken(3);
|
||||
}
|
||||
// == && ||
|
||||
return this.receiveToken(2);
|
||||
}
|
||||
// handle as &&
|
||||
// a ? b : c is equal to a && b || c
|
||||
if (tok === '?' || tok === ':') {
|
||||
return this.receiveToken(1);
|
||||
}
|
||||
}
|
||||
|
||||
readValue(tok) {
|
||||
if (!tok) {
|
||||
return;
|
||||
}
|
||||
|
||||
let index = 0;
|
||||
while (!Lexer.optable[tok] && tok !== undefined) {
|
||||
tok = this.pickNext(index);
|
||||
index += 1;
|
||||
}
|
||||
this.receiveToken(index);
|
||||
}
|
||||
}
|
||||
|
||||
export default function token(expression) {
|
||||
const lexer = new Lexer(expression);
|
||||
return lexer.getTokens();
|
||||
}
|
||||
159
packages/server/src/lib/LogicEvaluation/Parser.js
Normal file
159
packages/server/src/lib/LogicEvaluation/Parser.js
Normal file
@@ -0,0 +1,159 @@
|
||||
export const OPERATION = {
|
||||
'!': 5,
|
||||
'*': 4,
|
||||
'/': 4,
|
||||
'%': 4,
|
||||
'+': 3,
|
||||
'-': 3,
|
||||
'>': 2,
|
||||
'<': 2,
|
||||
'>=': 2,
|
||||
'<=': 2,
|
||||
'===': 2,
|
||||
'!==': 2,
|
||||
'==': 2,
|
||||
'!=': 2,
|
||||
'&&': 1,
|
||||
'||': 1,
|
||||
'?': 1,
|
||||
':': 1,
|
||||
};
|
||||
|
||||
// export interface Node {
|
||||
// left: Node | string | null;
|
||||
// right: Node | string | null;
|
||||
// operation: string;
|
||||
// grouped?: boolean;
|
||||
// };
|
||||
|
||||
export default class Parser {
|
||||
|
||||
constructor(token) {
|
||||
this.index = -1;
|
||||
this.blockLevel = 0;
|
||||
this.token = token;
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @return {Node | string} =-
|
||||
*/
|
||||
parse() {
|
||||
let tok;
|
||||
let root = {
|
||||
left: null,
|
||||
right: null,
|
||||
operation: null,
|
||||
};
|
||||
|
||||
do {
|
||||
tok = this.parseStatement();
|
||||
|
||||
if (tok === null || tok === undefined) {
|
||||
break;
|
||||
}
|
||||
|
||||
if (root.left === null) {
|
||||
root.left = tok;
|
||||
root.operation = this.nextToken();
|
||||
|
||||
if (!root.operation) {
|
||||
return tok;
|
||||
}
|
||||
|
||||
root.right = this.parseStatement();
|
||||
} else {
|
||||
if (typeof tok !== 'string') {
|
||||
throw new Error('operation must be string, but get ' + JSON.stringify(tok));
|
||||
}
|
||||
root = this.addNode(tok, this.parseStatement(), root);
|
||||
}
|
||||
} while (tok);
|
||||
|
||||
return root;
|
||||
}
|
||||
|
||||
nextToken() {
|
||||
this.index += 1;
|
||||
return this.token[this.index];
|
||||
}
|
||||
|
||||
prevToken() {
|
||||
return this.token[this.index - 1];
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @param {string} operation
|
||||
* @param {Node|String|null} right
|
||||
* @param {Node} root
|
||||
*/
|
||||
addNode(operation, right, root) {
|
||||
let pre = root;
|
||||
|
||||
if (this.compare(pre.operation, operation) < 0 && !pre.grouped) {
|
||||
|
||||
while (pre.right !== null &&
|
||||
typeof pre.right !== 'string' &&
|
||||
this.compare(pre.right.operation, operation) < 0 && !pre.right.grouped) {
|
||||
pre = pre.right;
|
||||
}
|
||||
|
||||
pre.right = {
|
||||
operation,
|
||||
left: pre.right,
|
||||
right,
|
||||
};
|
||||
return root;
|
||||
}
|
||||
return {
|
||||
left: pre,
|
||||
right,
|
||||
operation,
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @param {String} a
|
||||
* @param {String} b
|
||||
*/
|
||||
compare(a, b) {
|
||||
if (!OPERATION.hasOwnProperty(a) || !OPERATION.hasOwnProperty(b)) {
|
||||
throw new Error(`unknow operation ${a} or ${b}`);
|
||||
}
|
||||
return OPERATION[a] - OPERATION[b];
|
||||
}
|
||||
|
||||
/**
|
||||
* @return string | Node | null
|
||||
*/
|
||||
parseStatement() {
|
||||
const token = this.nextToken();
|
||||
if (token === '(') {
|
||||
this.blockLevel += 1;
|
||||
const node = this.parse();
|
||||
this.blockLevel -= 1;
|
||||
|
||||
if (typeof node !== 'string') {
|
||||
node.grouped = true;
|
||||
}
|
||||
return node;
|
||||
}
|
||||
|
||||
if (token === ')') {
|
||||
return null;
|
||||
}
|
||||
|
||||
if (token === '!') {
|
||||
return { left: null, operation: token, right: this.parseStatement() }
|
||||
}
|
||||
|
||||
// 3 > -12 or -12 + 10
|
||||
if (token === '-' && (OPERATION[this.prevToken()] > 0 || this.prevToken() === undefined)) {
|
||||
return { left: '0', operation: token, right: this.parseStatement(), grouped: true };
|
||||
}
|
||||
|
||||
return token;
|
||||
}
|
||||
}
|
||||
61
packages/server/src/lib/LogicEvaluation/QueryParser.js
Normal file
61
packages/server/src/lib/LogicEvaluation/QueryParser.js
Normal file
@@ -0,0 +1,61 @@
|
||||
import { OPERATION } from './Parser';
|
||||
|
||||
export default class QueryParser {
|
||||
|
||||
constructor(tree, queries) {
|
||||
this.tree = tree;
|
||||
this.queries = queries;
|
||||
this.query = null;
|
||||
}
|
||||
|
||||
setQuery(query) {
|
||||
this.query = query.clone();
|
||||
}
|
||||
|
||||
parse() {
|
||||
return this.parseNode(this.tree);
|
||||
}
|
||||
|
||||
parseNode(node) {
|
||||
if (typeof node === 'string') {
|
||||
const nodeQuery = this.getQuery(node);
|
||||
return (query) => { nodeQuery(query); };
|
||||
}
|
||||
if (OPERATION[node.operation] === undefined) {
|
||||
throw new Error(`unknow expression ${node.operation}`);
|
||||
}
|
||||
const leftQuery = this.getQuery(node.left);
|
||||
const rightQuery = this.getQuery(node.right);
|
||||
|
||||
switch (node.operation) {
|
||||
case '&&':
|
||||
case 'AND':
|
||||
default:
|
||||
return (nodeQuery) => nodeQuery.where((query) => {
|
||||
query.where((q) => { leftQuery(q); });
|
||||
query.andWhere((q) => { rightQuery(q); });
|
||||
});
|
||||
case '||':
|
||||
case 'OR':
|
||||
return (nodeQuery) => nodeQuery.where((query) => {
|
||||
query.where((q) => { leftQuery(q); });
|
||||
query.orWhere((q) => { rightQuery(q); });
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
getQuery(node) {
|
||||
if (typeof node !== 'string' && node !== null) {
|
||||
return this.parseNode(node);
|
||||
}
|
||||
const value = parseFloat(node);
|
||||
|
||||
if (!isNaN(value)) {
|
||||
if (typeof this.queries[node] === 'undefined') {
|
||||
throw new Error(`unknow query under index ${node}`);
|
||||
}
|
||||
return this.queries[node];
|
||||
}
|
||||
return null;
|
||||
}
|
||||
}
|
||||
115
packages/server/src/lib/Mail/index.ts
Normal file
115
packages/server/src/lib/Mail/index.ts
Normal file
@@ -0,0 +1,115 @@
|
||||
import fs from 'fs';
|
||||
import Mustache from 'mustache';
|
||||
import { Container } from 'typedi';
|
||||
import path from 'path';
|
||||
import { IMailable } from '@/interfaces';
|
||||
|
||||
interface IMailAttachment {
|
||||
filename: string;
|
||||
path: string;
|
||||
cid: string;
|
||||
}
|
||||
|
||||
export default class Mail {
|
||||
view: string;
|
||||
subject: string;
|
||||
to: string;
|
||||
from: string = `${process.env.MAIL_FROM_NAME} ${process.env.MAIL_FROM_ADDRESS}`;
|
||||
data: { [key: string]: string | number };
|
||||
attachments: IMailAttachment[];
|
||||
|
||||
/**
|
||||
* Mail options.
|
||||
*/
|
||||
private get mailOptions() {
|
||||
return {
|
||||
to: this.to,
|
||||
from: this.from,
|
||||
subject: this.subject,
|
||||
html: this.render(this.data),
|
||||
attachments: this.attachments,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Sends the given mail to the target address.
|
||||
*/
|
||||
public send() {
|
||||
return new Promise((resolve, reject) => {
|
||||
const Mail = Container.get('mail');
|
||||
|
||||
Mail.sendMail(this.mailOptions, (error) => {
|
||||
if (error) {
|
||||
reject(error);
|
||||
return;
|
||||
}
|
||||
resolve(true);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Set send mail to address.
|
||||
* @param {string} to -
|
||||
*/
|
||||
setTo(to: string) {
|
||||
this.to = to;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets from address to the mail.
|
||||
* @param {string} from
|
||||
* @return {}
|
||||
*/
|
||||
private setFrom(from: string) {
|
||||
this.from = from;
|
||||
return this;
|
||||
}
|
||||
|
||||
setAttachments(attachments: IMailAttachment[]) {
|
||||
this.attachments = attachments;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set mail subject.
|
||||
* @param {string} subject
|
||||
*/
|
||||
setSubject(subject: string) {
|
||||
this.subject = subject;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set view directory.
|
||||
* @param {string} view
|
||||
*/
|
||||
setView(view: string) {
|
||||
this.view = view;
|
||||
return this;
|
||||
}
|
||||
|
||||
setData(data) {
|
||||
this.data = data;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Renders the view template with the given data.
|
||||
* @param {object} data
|
||||
* @return {string}
|
||||
*/
|
||||
render(data): string {
|
||||
const viewContent = this.getViewContent();
|
||||
return Mustache.render(viewContent, data);
|
||||
}
|
||||
|
||||
/**
|
||||
* Retrieve view content from the view directory.
|
||||
*/
|
||||
private getViewContent(): string {
|
||||
const filePath = path.join(global.__root, `../views/${this.view}`);
|
||||
return fs.readFileSync(filePath, 'utf8');
|
||||
}
|
||||
}
|
||||
40
packages/server/src/lib/Metable/MetableConfig.ts
Normal file
40
packages/server/src/lib/Metable/MetableConfig.ts
Normal file
@@ -0,0 +1,40 @@
|
||||
import { get } from 'lodash';
|
||||
|
||||
export default class MetableConfig {
|
||||
readonly config: any;
|
||||
|
||||
constructor(config) {
|
||||
this.setConfig(config);
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets config.
|
||||
*/
|
||||
setConfig(config) {
|
||||
this.config = config;
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @param {string} key
|
||||
* @param {string} group
|
||||
* @param {string} accessor
|
||||
* @returns {object|string}
|
||||
*/
|
||||
getMetaConfig(key: string, group?: string, accessor?: string) {
|
||||
const configGroup = get(this.config, group);
|
||||
const config = get(configGroup, key);
|
||||
|
||||
return accessor ? get(config, accessor) : config;
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @param {string} key
|
||||
* @param {string} group
|
||||
* @returns {string}
|
||||
*/
|
||||
getMetaType(key: string, group?: string) {
|
||||
return this.getMetaConfig(key, group, 'type');
|
||||
}
|
||||
}
|
||||
12
packages/server/src/lib/Metable/MetableModel.js
Normal file
12
packages/server/src/lib/Metable/MetableModel.js
Normal file
@@ -0,0 +1,12 @@
|
||||
|
||||
|
||||
export default class Metable{
|
||||
|
||||
static get modifiers() {
|
||||
return {
|
||||
whereKey(builder, key) {
|
||||
builder.where('key', key);
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
215
packages/server/src/lib/Metable/MetableStore.ts
Normal file
215
packages/server/src/lib/Metable/MetableStore.ts
Normal file
@@ -0,0 +1,215 @@
|
||||
import { Model } from 'objection';
|
||||
import { omit, isEmpty } from 'lodash';
|
||||
import { IMetadata, IMetaQuery, IMetableStore } from '@/interfaces';
|
||||
import { itemsStartWith } from 'utils';
|
||||
|
||||
export default class MetableStore implements IMetableStore {
|
||||
metadata: IMetadata[];
|
||||
model: Model;
|
||||
extraColumns: string[];
|
||||
|
||||
/**
|
||||
* Constructor method.
|
||||
*/
|
||||
constructor() {
|
||||
this.metadata = [];
|
||||
this.model = null;
|
||||
this.extraColumns = [];
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets a extra columns.
|
||||
* @param {Array} columns -
|
||||
*/
|
||||
setExtraColumns(columns: string[]): void {
|
||||
this.extraColumns = columns;
|
||||
}
|
||||
|
||||
/**
|
||||
* Find the given metadata key.
|
||||
* @param {string|IMetaQuery} query -
|
||||
* @returns {IMetadata} - Metadata object.
|
||||
*/
|
||||
find(query: string | IMetaQuery): IMetadata {
|
||||
const { key, value, ...extraColumns } = this.parseQuery(query);
|
||||
|
||||
return this.metadata.find((meta: IMetadata) => {
|
||||
const isSameKey = meta.key === key;
|
||||
const sameExtraColumns = this.extraColumns.some(
|
||||
(extraColumn: string) => extraColumns[extraColumn] === meta[extraColumn]
|
||||
);
|
||||
|
||||
const isSameExtraColumns = sameExtraColumns || isEmpty(extraColumns);
|
||||
|
||||
return isSameKey && isSameExtraColumns;
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Retrieve all metadata.
|
||||
* @returns {IMetadata[]}
|
||||
*/
|
||||
all(): IMetadata[] {
|
||||
return this.metadata
|
||||
.filter((meta: IMetadata) => !meta._markAsDeleted)
|
||||
.map((meta: IMetadata) =>
|
||||
omit(meta, itemsStartWith(Object.keys(meta), '_'))
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Retrieve metadata of the given key.
|
||||
* @param {String} key -
|
||||
* @param {Mixied} defaultValue -
|
||||
*/
|
||||
get(query: string | IMetaQuery, defaultValue: any): any | false {
|
||||
const metadata = this.find(query);
|
||||
return metadata
|
||||
? metadata.value
|
||||
: typeof defaultValue !== 'undefined'
|
||||
? defaultValue
|
||||
: false;
|
||||
}
|
||||
|
||||
/**
|
||||
* Markes the metadata to should be deleted.
|
||||
* @param {String} key -
|
||||
*/
|
||||
remove(query: string | IMetaQuery): void {
|
||||
const metadata: IMetadata = this.find(query);
|
||||
|
||||
if (metadata) {
|
||||
metadata._markAsDeleted = true;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Remove all meta data of the given group.
|
||||
* @param {string} group
|
||||
*/
|
||||
removeAll(group: string = 'default'): void {
|
||||
this.metadata = this.metadata.map((meta) => ({
|
||||
...meta,
|
||||
_markAsDeleted: true,
|
||||
}));
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the meta data to the stack.
|
||||
* @param {String} key -
|
||||
* @param {String} value -
|
||||
*/
|
||||
set(query: IMetaQuery | IMetadata[] | string, metaValue?: any): void {
|
||||
if (Array.isArray(query)) {
|
||||
const metadata = query;
|
||||
|
||||
metadata.forEach((meta: IMetadata) => {
|
||||
this.set(meta);
|
||||
});
|
||||
return;
|
||||
}
|
||||
const { key, value, ...extraColumns } = this.parseQuery(query);
|
||||
const metadata = this.find(query);
|
||||
const newValue = metaValue || value;
|
||||
|
||||
if (metadata) {
|
||||
metadata.value = newValue;
|
||||
metadata._markAsUpdated = true;
|
||||
} else {
|
||||
this.metadata.push({
|
||||
value: newValue,
|
||||
key,
|
||||
...extraColumns,
|
||||
_markAsInserted: true,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Parses query query.
|
||||
* @param query
|
||||
* @param value
|
||||
*/
|
||||
parseQuery(query: string | IMetaQuery): IMetaQuery {
|
||||
return typeof query !== 'object' ? { key: query } : { ...query };
|
||||
}
|
||||
|
||||
/**
|
||||
* Format the metadata before saving to the database.
|
||||
* @param {string|number|boolean} value -
|
||||
* @param {string} valueType -
|
||||
* @return {string|number|boolean} -
|
||||
*/
|
||||
static formatMetaValue(
|
||||
value: string | boolean | number,
|
||||
valueType: string
|
||||
): string | number | boolean {
|
||||
let parsedValue;
|
||||
|
||||
switch (valueType) {
|
||||
case 'number':
|
||||
parsedValue = `${value}`;
|
||||
break;
|
||||
case 'boolean':
|
||||
parsedValue = value ? '1' : '0';
|
||||
break;
|
||||
case 'json':
|
||||
parsedValue = JSON.stringify(parsedValue);
|
||||
break;
|
||||
default:
|
||||
parsedValue = value;
|
||||
break;
|
||||
}
|
||||
return parsedValue;
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse the metadata to the collection.
|
||||
* @param {Array} collection -
|
||||
*/
|
||||
mapMetadataToCollection(metadata: IMetadata[], parseType: string = 'parse') {
|
||||
return metadata.map((model) =>
|
||||
this.mapMetadataToCollection(model, parseType)
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Load metadata to the metable collection.
|
||||
* @param {Array} meta -
|
||||
*/
|
||||
from(meta: []) {
|
||||
if (Array.isArray(meta)) {
|
||||
meta.forEach((m) => {
|
||||
this.from(m);
|
||||
});
|
||||
return;
|
||||
}
|
||||
this.metadata.push(meta);
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @returns {array}
|
||||
*/
|
||||
toArray(): IMetadata[] {
|
||||
return this.metadata;
|
||||
}
|
||||
|
||||
/**
|
||||
* Static method to load metadata to the collection.
|
||||
* @param {Array} meta
|
||||
*/
|
||||
static from(meta) {
|
||||
const collection = new MetableCollection();
|
||||
collection.from(meta);
|
||||
|
||||
return collection;
|
||||
}
|
||||
|
||||
/**
|
||||
* Reset the momerized metadata.
|
||||
*/
|
||||
resetMetadata() {
|
||||
this.metadata = [];
|
||||
}
|
||||
}
|
||||
243
packages/server/src/lib/Metable/MetableStoreDB.ts
Normal file
243
packages/server/src/lib/Metable/MetableStoreDB.ts
Normal file
@@ -0,0 +1,243 @@
|
||||
import { IMetadata, IMetableStoreStorage } from '@/interfaces';
|
||||
import MetableStore from './MetableStore';
|
||||
import { isBlank, parseBoolean } from 'utils';
|
||||
import MetableConfig from './MetableConfig';
|
||||
import config from '@/data/options'
|
||||
export default class MetableDBStore
|
||||
extends MetableStore
|
||||
implements IMetableStoreStorage {
|
||||
repository: any;
|
||||
KEY_COLUMN: string;
|
||||
VALUE_COLUMN: string;
|
||||
TYPE_COLUMN: string;
|
||||
extraQuery: Function;
|
||||
loaded: Boolean;
|
||||
config: MetableConfig;
|
||||
|
||||
/**
|
||||
* Constructor method.
|
||||
*/
|
||||
constructor() {
|
||||
super();
|
||||
|
||||
this.loaded = false;
|
||||
this.KEY_COLUMN = 'key';
|
||||
this.VALUE_COLUMN = 'value';
|
||||
this.TYPE_COLUMN = 'type';
|
||||
this.repository = null;
|
||||
|
||||
this.extraQuery = (meta) => {
|
||||
return {
|
||||
key: meta[this.KEY_COLUMN],
|
||||
...this.transfromMetaExtraColumns(meta),
|
||||
};
|
||||
};
|
||||
this.config = new MetableConfig(config);
|
||||
}
|
||||
|
||||
/**
|
||||
* Transformes meta query.
|
||||
* @param {IMetadata} meta
|
||||
*/
|
||||
private transfromMetaExtraColumns(meta: IMetadata) {
|
||||
return this.extraColumns.reduce((obj, column) => {
|
||||
const metaValue = meta[column];
|
||||
|
||||
if (!isBlank(metaValue)) {
|
||||
obj[column] = metaValue;
|
||||
}
|
||||
return obj;
|
||||
}, {});
|
||||
}
|
||||
|
||||
/**
|
||||
* Set repository entity of this metadata collection.
|
||||
* @param {Object} repository -
|
||||
*/
|
||||
setRepository(repository) {
|
||||
this.repository = repository;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets a extra query callback.
|
||||
* @param callback
|
||||
*/
|
||||
setExtraQuery(callback) {
|
||||
this.extraQuery = callback;
|
||||
}
|
||||
|
||||
/**
|
||||
* Saves the modified, deleted and insert metadata.
|
||||
*/
|
||||
save() {
|
||||
this.validateStoreIsLoaded();
|
||||
|
||||
return Promise.all([
|
||||
this.saveUpdated(this.metadata),
|
||||
this.saveDeleted(this.metadata),
|
||||
this.saveInserted(this.metadata),
|
||||
]);
|
||||
}
|
||||
|
||||
/**
|
||||
* Saves the updated metadata.
|
||||
* @param {IMetadata[]} metadata -
|
||||
* @returns {Promise}
|
||||
*/
|
||||
saveUpdated(metadata: IMetadata[]) {
|
||||
const updated = metadata.filter((m) => m._markAsUpdated === true);
|
||||
const opers = [];
|
||||
|
||||
updated.forEach((meta) => {
|
||||
const updateOper = this.repository
|
||||
.update(
|
||||
{ [this.VALUE_COLUMN]: meta.value },
|
||||
{ ...this.extraQuery(meta) }
|
||||
)
|
||||
.then(() => {
|
||||
meta._markAsUpdated = false;
|
||||
});
|
||||
opers.push(updateOper);
|
||||
});
|
||||
return Promise.all(opers);
|
||||
}
|
||||
|
||||
/**
|
||||
* Saves the deleted metadata.
|
||||
* @param {IMetadata[]} metadata -
|
||||
* @returns {Promise}
|
||||
*/
|
||||
saveDeleted(metadata: IMetadata[]) {
|
||||
const deleted = metadata.filter(
|
||||
(m: IMetadata) => m._markAsDeleted === true
|
||||
);
|
||||
const opers: Promise<void> = [];
|
||||
|
||||
if (deleted.length > 0) {
|
||||
deleted.forEach((meta) => {
|
||||
const deleteOper = this.repository
|
||||
.deleteBy({
|
||||
...this.extraQuery(meta),
|
||||
})
|
||||
.then(() => {
|
||||
meta._markAsDeleted = false;
|
||||
});
|
||||
opers.push(deleteOper);
|
||||
});
|
||||
}
|
||||
return Promise.all(opers);
|
||||
}
|
||||
|
||||
/**
|
||||
* Saves the inserted metadata.
|
||||
* @param {IMetadata[]} metadata -
|
||||
* @returns {Promise}
|
||||
*/
|
||||
saveInserted(metadata: IMetadata[]) {
|
||||
const inserted = metadata.filter(
|
||||
(m: IMetadata) => m._markAsInserted === true
|
||||
);
|
||||
const opers: Promise<void> = [];
|
||||
|
||||
inserted.forEach((meta) => {
|
||||
const insertData = {
|
||||
[this.KEY_COLUMN]: meta.key,
|
||||
[this.VALUE_COLUMN]: meta.value,
|
||||
...this.transfromMetaExtraColumns(meta),
|
||||
};
|
||||
const insertOper = this.repository.create(insertData).then(() => {
|
||||
meta._markAsInserted = false;
|
||||
});
|
||||
opers.push(insertOper);
|
||||
});
|
||||
return Promise.all(opers);
|
||||
}
|
||||
|
||||
/**
|
||||
* Loads the metadata from the storage.
|
||||
* @param {String|Array} key -
|
||||
* @param {Boolean} force -
|
||||
*/
|
||||
async load() {
|
||||
const metadata = await this.repository.all();
|
||||
const mappedMetadata = this.mapMetadataCollection(metadata);
|
||||
|
||||
this.resetMetadata();
|
||||
|
||||
mappedMetadata.forEach((meta: IMetadata) => {
|
||||
this.metadata.push(meta);
|
||||
});
|
||||
this.loaded = true;
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse the metadata values after fetching it from the storage.
|
||||
* @param {String|Number|Boolean} value -
|
||||
* @param {String} valueType -
|
||||
* @return {String|Number|Boolean} -
|
||||
*/
|
||||
static parseMetaValue(
|
||||
value: string,
|
||||
valueType: string | false
|
||||
): string | boolean | number {
|
||||
let parsedValue: string | number | boolean;
|
||||
|
||||
switch (valueType) {
|
||||
case 'number':
|
||||
parsedValue = parseFloat(value);
|
||||
break;
|
||||
case 'boolean':
|
||||
parsedValue = parseBoolean(value, false);
|
||||
break;
|
||||
case 'json':
|
||||
parsedValue = JSON.stringify(parsedValue);
|
||||
break;
|
||||
default:
|
||||
parsedValue = value;
|
||||
break;
|
||||
}
|
||||
return parsedValue;
|
||||
}
|
||||
|
||||
/**
|
||||
* Mapping and parse metadata to collection entries.
|
||||
* @param {Meta} attr -
|
||||
* @param {String} parseType -
|
||||
*/
|
||||
mapMetadata(metadata: IMetadata) {
|
||||
const metaType = this.config.getMetaType(
|
||||
metadata[this.KEY_COLUMN],
|
||||
metadata['group'],
|
||||
);
|
||||
return {
|
||||
key: metadata[this.KEY_COLUMN],
|
||||
value: MetableDBStore.parseMetaValue(
|
||||
metadata[this.VALUE_COLUMN],
|
||||
metaType
|
||||
),
|
||||
...this.extraColumns.reduce((obj, extraCol: string) => {
|
||||
obj[extraCol] = metadata[extraCol] || null;
|
||||
return obj;
|
||||
}, {}),
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse the metadata to the collection.
|
||||
* @param {Array} collection -
|
||||
*/
|
||||
mapMetadataCollection(metadata: IMetadata[]) {
|
||||
return metadata.map((model) => this.mapMetadata(model));
|
||||
}
|
||||
|
||||
/**
|
||||
* Throw error in case the store is not loaded yet.
|
||||
*/
|
||||
private validateStoreIsLoaded() {
|
||||
if (!this.loaded) {
|
||||
throw new Error(
|
||||
'You could not save the store before loaded from the storage.'
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
48
packages/server/src/lib/MomentFormats/index.ts
Normal file
48
packages/server/src/lib/MomentFormats/index.ts
Normal file
@@ -0,0 +1,48 @@
|
||||
import moment from 'moment';
|
||||
|
||||
moment.prototype.toMySqlDateTime = function () {
|
||||
return this.format('YYYY-MM-DD HH:mm:ss');
|
||||
};
|
||||
|
||||
// moment.fn.businessDiff = function (param) {
|
||||
// param = moment(param);
|
||||
// var signal = param.unix() < this.unix() ? 1 : -1;
|
||||
// var start = moment.min(param, this).clone();
|
||||
// var end = moment.max(param, this).clone();
|
||||
// var start_offset = start.day() - 7;
|
||||
// var end_offset = end.day();
|
||||
|
||||
// var end_sunday = end.clone().subtract('d', end_offset);
|
||||
// var start_sunday = start.clone().subtract('d', start_offset);
|
||||
// var weeks = end_sunday.diff(start_sunday, 'days') / 7;
|
||||
|
||||
// start_offset = Math.abs(start_offset);
|
||||
// if (start_offset == 7)
|
||||
// start_offset = 5;
|
||||
// else if (start_offset == 1)
|
||||
// start_offset = 0;
|
||||
// else
|
||||
// start_offset -= 2;
|
||||
|
||||
// if (end_offset == 6)
|
||||
// end_offset--;
|
||||
|
||||
// return signal * (weeks * 5 + start_offset + end_offset);
|
||||
// };
|
||||
|
||||
// moment.fn.businessAdd = function (days) {
|
||||
// var signal = days < 0 ? -1 : 1;
|
||||
// days = Math.abs(days);
|
||||
// var d = this.clone().add(Math.floor(days / 5) * 7 * signal, 'd');
|
||||
// var remaining = days % 5;
|
||||
// while (remaining) {
|
||||
// d.add(signal, 'd');
|
||||
// if (d.day() !== 0 && d.day() !== 6)
|
||||
// remaining--;
|
||||
// }
|
||||
// return d;
|
||||
// };
|
||||
|
||||
// moment.fn.businessSubtract = function (days) {
|
||||
// return this.businessAdd(-days);
|
||||
// };
|
||||
9
packages/server/src/lib/NestedSet/NestedSetNode.js
Normal file
9
packages/server/src/lib/NestedSet/NestedSetNode.js
Normal file
@@ -0,0 +1,9 @@
|
||||
|
||||
|
||||
class NestedSetNode {
|
||||
|
||||
// Saves
|
||||
appendToNode($parent) {
|
||||
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,27 @@
|
||||
import { QueryBuilder } from "knex"
|
||||
import { QueryBuilder } from 'objection';
|
||||
|
||||
export default class BulkOperationsQueryBuilder extends QueryBuilder {
|
||||
|
||||
bulkInsert(collection) {
|
||||
const opers = [];
|
||||
|
||||
collection.forEach((dataset) => {
|
||||
const insertOper = this.insert({ ...dataset });
|
||||
opers.push(insertOper);
|
||||
});
|
||||
return Promise.all(opers);
|
||||
}
|
||||
|
||||
bulkDelete(rowsIds) {
|
||||
|
||||
}
|
||||
|
||||
bulkUpdate(dataset, whereColumn) {
|
||||
|
||||
}
|
||||
|
||||
bulkPatch(newDataset, oldDataset) {
|
||||
|
||||
}
|
||||
}
|
||||
100
packages/server/src/lib/Seeder/FsMigrations.ts
Normal file
100
packages/server/src/lib/Seeder/FsMigrations.ts
Normal file
@@ -0,0 +1,100 @@
|
||||
import path from 'path';
|
||||
import { sortBy } from 'lodash';
|
||||
import fs from 'fs';
|
||||
import { promisify } from 'util';
|
||||
import { MigrateItem } from './interfaces';
|
||||
import { importWebpackSeedModule } from './Utils';
|
||||
import { DEFAULT_LOAD_EXTENSIONS } from './constants';
|
||||
import { filterMigrations } from './MigrateUtils';
|
||||
|
||||
const readdir = promisify(fs.readdir);
|
||||
|
||||
class FsMigrations {
|
||||
private sortDirsSeparately: boolean;
|
||||
private migrationsPaths: string[];
|
||||
private loadExtensions: string[];
|
||||
|
||||
/**
|
||||
* Constructor method.
|
||||
* @param migrationDirectories
|
||||
* @param sortDirsSeparately
|
||||
* @param loadExtensions
|
||||
*/
|
||||
constructor(
|
||||
migrationDirectories: string[],
|
||||
sortDirsSeparately: boolean,
|
||||
loadExtensions: string[]
|
||||
) {
|
||||
this.sortDirsSeparately = sortDirsSeparately;
|
||||
|
||||
if (!Array.isArray(migrationDirectories)) {
|
||||
migrationDirectories = [migrationDirectories];
|
||||
}
|
||||
this.migrationsPaths = migrationDirectories;
|
||||
this.loadExtensions = loadExtensions || DEFAULT_LOAD_EXTENSIONS;
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the migration names
|
||||
* @returns Promise<MigrateItem[]>
|
||||
*/
|
||||
public getMigrations(loadExtensions = null): Promise<MigrateItem[]> {
|
||||
// Get a list of files in all specified migration directories
|
||||
const readMigrationsPromises = this.migrationsPaths.map((configDir) => {
|
||||
const absoluteDir = path.resolve(process.cwd(), configDir);
|
||||
return readdir(absoluteDir).then((files) => ({
|
||||
files,
|
||||
configDir,
|
||||
absoluteDir,
|
||||
}));
|
||||
});
|
||||
|
||||
return Promise.all(readMigrationsPromises).then((allMigrations) => {
|
||||
const migrations = allMigrations.reduce((acc, migrationDirectory) => {
|
||||
// When true, files inside the folder should be sorted
|
||||
if (this.sortDirsSeparately) {
|
||||
migrationDirectory.files = migrationDirectory.files.sort();
|
||||
}
|
||||
migrationDirectory.files.forEach((file) =>
|
||||
acc.push({ file, directory: migrationDirectory.configDir })
|
||||
);
|
||||
return acc;
|
||||
}, []);
|
||||
|
||||
// If true we have already sorted the migrations inside the folders
|
||||
// return the migrations fully qualified
|
||||
if (this.sortDirsSeparately) {
|
||||
return filterMigrations(
|
||||
this,
|
||||
migrations,
|
||||
loadExtensions || this.loadExtensions
|
||||
);
|
||||
}
|
||||
return filterMigrations(
|
||||
this,
|
||||
sortBy(migrations, 'file'),
|
||||
loadExtensions || this.loadExtensions
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Retrieve the file name from given migrate item.
|
||||
* @param {MigrateItem} migration
|
||||
* @returns {string}
|
||||
*/
|
||||
public getMigrationName(migration: MigrateItem): string {
|
||||
return migration.file;
|
||||
}
|
||||
|
||||
/**
|
||||
* Retrieve the migrate file content from given migrate item.
|
||||
* @param {MigrateItem} migration
|
||||
* @returns {string}
|
||||
*/
|
||||
public getMigration(migration: MigrateItem): string {
|
||||
return importWebpackSeedModule(migration.file);
|
||||
}
|
||||
}
|
||||
|
||||
export { DEFAULT_LOAD_EXTENSIONS, FsMigrations };
|
||||
192
packages/server/src/lib/Seeder/MigrateUtils.ts
Normal file
192
packages/server/src/lib/Seeder/MigrateUtils.ts
Normal file
@@ -0,0 +1,192 @@
|
||||
import { differenceWith } from 'lodash';
|
||||
import path from 'path';
|
||||
import { FsMigrations } from './FsMigrations';
|
||||
import {
|
||||
getTable,
|
||||
getTableName,
|
||||
getLockTableName,
|
||||
getLockTableNameWithSchema,
|
||||
} from './TableUtils';
|
||||
import { ISeederConfig, MigrateItem } from './interfaces';
|
||||
|
||||
/**
|
||||
* Get schema-aware schema builder for a given schema nam
|
||||
* @param trxOrKnex
|
||||
* @param {string} schemaName
|
||||
* @returns
|
||||
*/
|
||||
function getSchemaBuilder(trxOrKnex, schemaName: string | null = null) {
|
||||
return schemaName
|
||||
? trxOrKnex.schema.withSchema(schemaName)
|
||||
: trxOrKnex.schema;
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates migration table of the given table name.
|
||||
* @param {string} tableName
|
||||
* @param {string} schemaName
|
||||
* @param trxOrKnex
|
||||
* @returns
|
||||
*/
|
||||
function createMigrationTable(
|
||||
tableName: string,
|
||||
schemaName: string,
|
||||
trxOrKnex
|
||||
) {
|
||||
return getSchemaBuilder(trxOrKnex, schemaName).createTable(
|
||||
getTableName(tableName),
|
||||
(t) => {
|
||||
t.increments();
|
||||
t.string('name');
|
||||
t.integer('batch');
|
||||
t.timestamp('migration_time');
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a migration lock table of the given table name.
|
||||
* @param {string} tableName
|
||||
* @param {string} schemaName
|
||||
* @param trxOrKnex
|
||||
* @returns
|
||||
*/
|
||||
function createMigrationLockTable(
|
||||
tableName: string,
|
||||
schemaName: string,
|
||||
trxOrKnex
|
||||
) {
|
||||
return getSchemaBuilder(trxOrKnex, schemaName).createTable(tableName, (t) => {
|
||||
t.increments('index').primary();
|
||||
t.integer('is_locked');
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @param tableName
|
||||
* @param schemaName
|
||||
* @param trxOrKnex
|
||||
* @returns
|
||||
*/
|
||||
export function ensureMigrationTables(
|
||||
tableName: string,
|
||||
schemaName: string,
|
||||
trxOrKnex
|
||||
) {
|
||||
const lockTable = getLockTableName(tableName);
|
||||
const lockTableWithSchema = getLockTableNameWithSchema(tableName, schemaName);
|
||||
|
||||
return getSchemaBuilder(trxOrKnex, schemaName)
|
||||
.hasTable(tableName)
|
||||
.then((exists) => {
|
||||
return !exists && createMigrationTable(tableName, schemaName, trxOrKnex);
|
||||
})
|
||||
.then(() => {
|
||||
return getSchemaBuilder(trxOrKnex, schemaName).hasTable(lockTable);
|
||||
})
|
||||
.then((exists) => {
|
||||
return (
|
||||
!exists && createMigrationLockTable(lockTable, schemaName, trxOrKnex)
|
||||
);
|
||||
})
|
||||
.then(() => {
|
||||
return getTable(trxOrKnex, lockTable, schemaName).select('*');
|
||||
})
|
||||
.then((data) => {
|
||||
return (
|
||||
!data.length &&
|
||||
trxOrKnex.into(lockTableWithSchema).insert({ is_locked: 0 })
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Lists all available migration versions, as a sorted array.
|
||||
* @param migrationSource
|
||||
* @param loadExtensions
|
||||
* @returns
|
||||
*/
|
||||
function listAll(
|
||||
migrationSource: FsMigrations,
|
||||
loadExtensions
|
||||
): Promise<MigrateItem[]> {
|
||||
return migrationSource.getMigrations(loadExtensions);
|
||||
}
|
||||
|
||||
/**
|
||||
* Lists all migrations that have been completed for the current db, as an array.
|
||||
* @param {string} tableName
|
||||
* @param {string} schemaName
|
||||
* @param {} trxOrKnex
|
||||
* @returns Promise<string[]>
|
||||
*/
|
||||
export async function listCompleted(
|
||||
tableName: string,
|
||||
schemaName: string,
|
||||
trxOrKnex
|
||||
): Promise<string[]> {
|
||||
const completedMigrations = await trxOrKnex
|
||||
.from(getTableName(tableName, schemaName))
|
||||
.orderBy('id')
|
||||
.select('name');
|
||||
|
||||
return completedMigrations.map((migration) => {
|
||||
return migration.name;
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the migration list from the migration directory specified in config, as well as
|
||||
* the list of completed migrations to check what should be run.
|
||||
*/
|
||||
export function listAllAndCompleted(config: ISeederConfig, trxOrKnex) {
|
||||
return Promise.all([
|
||||
listAll(config.migrationSource, config.loadExtensions),
|
||||
listCompleted(config.tableName, config.schemaName, trxOrKnex),
|
||||
]);
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @param migrationSource
|
||||
* @param all
|
||||
* @param completed
|
||||
* @returns
|
||||
*/
|
||||
export function getNewMigrations(
|
||||
migrationSource: FsMigrations,
|
||||
all: MigrateItem[],
|
||||
completed: string[]
|
||||
): MigrateItem[] {
|
||||
return differenceWith(all, completed, (allMigration, completedMigration) => {
|
||||
return (
|
||||
completedMigration === migrationSource.getMigrationName(allMigration)
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
function startsWithNumber(str) {
|
||||
return /^\d/.test(str);
|
||||
}
|
||||
/**
|
||||
*
|
||||
* @param {FsMigrations} migrationSource -
|
||||
* @param {MigrateItem[]} migrations -
|
||||
* @param {string[]} loadExtensions -
|
||||
* @returns
|
||||
*/
|
||||
export function filterMigrations(
|
||||
migrationSource: FsMigrations,
|
||||
migrations: MigrateItem[],
|
||||
loadExtensions: string[]
|
||||
) {
|
||||
return migrations.filter((migration) => {
|
||||
const migrationName = migrationSource.getMigrationName(migration);
|
||||
const extension = path.extname(migrationName);
|
||||
|
||||
return (
|
||||
loadExtensions.includes(extension) && startsWithNumber(migrationName)
|
||||
);
|
||||
});
|
||||
}
|
||||
222
packages/server/src/lib/Seeder/SeedMigration.ts
Normal file
222
packages/server/src/lib/Seeder/SeedMigration.ts
Normal file
@@ -0,0 +1,222 @@
|
||||
import { Knex } from 'knex';
|
||||
import Bluebird from 'bluebird';
|
||||
import { getTable, getTableName, getLockTableName } from './TableUtils';
|
||||
import getMergedConfig from './SeederConfig';
|
||||
import {
|
||||
listAllAndCompleted,
|
||||
getNewMigrations,
|
||||
listCompleted,
|
||||
ensureMigrationTables,
|
||||
} from './MigrateUtils';
|
||||
import { MigrateItem, SeedMigrationContext, ISeederConfig } from './interfaces';
|
||||
import { FsMigrations } from './FsMigrations';
|
||||
|
||||
export class SeedMigration {
|
||||
knex: Knex;
|
||||
config: ISeederConfig;
|
||||
migrationSource: FsMigrations;
|
||||
context: SeedMigrationContext;
|
||||
|
||||
/**
|
||||
* Constructor method.
|
||||
* @param {Knex} knex - Knex instance.
|
||||
* @param {SeedMigrationContext} context -
|
||||
*/
|
||||
constructor(knex: Knex, context: SeedMigrationContext) {
|
||||
this.knex = knex;
|
||||
this.config = getMergedConfig(this.knex.client.config.seeds, undefined);
|
||||
this.migrationSource = this.config.migrationSource;
|
||||
this.context = context;
|
||||
}
|
||||
|
||||
/**
|
||||
* Latest migration.
|
||||
* @returns {Promise<void>}
|
||||
*/
|
||||
async latest(config = null): Promise<void> {
|
||||
// Merges the configuration.
|
||||
this.config = getMergedConfig(config, this.config);
|
||||
|
||||
// Ensure migration tables.
|
||||
await ensureMigrationTables(this.config.tableName, null, this.knex);
|
||||
|
||||
// Retrieve all and completed migrations.
|
||||
const [all, completed] = await listAllAndCompleted(this.config, this.knex);
|
||||
|
||||
// Retrieve the new migrations.
|
||||
const migrations = getNewMigrations(this.migrationSource, all, completed);
|
||||
|
||||
// Run the latest migration on one batch.
|
||||
return this.knex.transaction((trx: Knex.Transaction) => {
|
||||
return this.runBatch(migrations, 'up', trx);
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Add migration lock flag.
|
||||
* @param {Knex.Transaction} trx
|
||||
* @returns
|
||||
*/
|
||||
private migrateLockTable(trx: Knex.Transaction) {
|
||||
const tableName = getLockTableName(this.config.tableName);
|
||||
return getTable(this.knex, tableName, this.config.schemaName)
|
||||
.transacting(trx)
|
||||
.where('is_locked', '=', 0)
|
||||
.update({ is_locked: 1 })
|
||||
.then((rowCount) => {
|
||||
if (rowCount != 1) {
|
||||
throw new Error('Migration table is already locked');
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Add migration lock flag.
|
||||
* @param {Knex.Transaction} trx
|
||||
* @returns
|
||||
*/
|
||||
private migrationLock(trx: Knex.Transaction) {
|
||||
return this.migrateLockTable(trx);
|
||||
}
|
||||
|
||||
/**
|
||||
* Free the migration lock flag.
|
||||
* @param {Knex.Transaction} trx
|
||||
* @returns
|
||||
*/
|
||||
private freeLock(trx = this.knex): Promise<void> {
|
||||
const tableName = getLockTableName(this.config.tableName);
|
||||
|
||||
return getTable(trx, tableName, this.config.schemaName).update({
|
||||
is_locked: 0,
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the latest batch number.
|
||||
* @param trx
|
||||
* @returns
|
||||
*/
|
||||
private latestBatchNumber(trx = this.knex): number {
|
||||
return trx
|
||||
.from(getTableName(this.config.tableName, this.config.schemaName))
|
||||
.max('batch as max_batch')
|
||||
.then((obj) => obj[0].max_batch || 0);
|
||||
}
|
||||
|
||||
/**
|
||||
* Runs a batch of `migrations` in a specified `direction`, saving the
|
||||
* appropriate database information as the migrations are run.
|
||||
* @param {number} batchNo
|
||||
* @param {MigrateItem[]} migrations
|
||||
* @param {string} direction
|
||||
* @param {Knex.Transaction} trx
|
||||
* @returns {Promise<void>}
|
||||
*/
|
||||
private waterfallBatch(
|
||||
batchNo: number,
|
||||
migrations: MigrateItem[],
|
||||
direction: string,
|
||||
trx: Knex.Transaction
|
||||
): Promise<void> {
|
||||
const { tableName } = this.config;
|
||||
|
||||
return Bluebird.each(migrations, (migration) => {
|
||||
const name = this.migrationSource.getMigrationName(migration);
|
||||
|
||||
return this.migrationSource
|
||||
.getMigration(migration)
|
||||
.then((migrationContent) =>
|
||||
this.runMigrationContent(migrationContent.default, direction, trx)
|
||||
)
|
||||
.then(() => {
|
||||
if (direction === 'up') {
|
||||
return trx.into(getTableName(tableName)).insert({
|
||||
name,
|
||||
batch: batchNo,
|
||||
migration_time: new Date(),
|
||||
});
|
||||
}
|
||||
if (direction === 'down') {
|
||||
return trx.from(getTableName(tableName)).where({ name }).del();
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Runs and builds the given migration class.
|
||||
*/
|
||||
private runMigrationContent(Migration, direction, trx) {
|
||||
const instance = new Migration(trx);
|
||||
|
||||
if (this.context.i18n) {
|
||||
instance.setI18n(this.context.i18n);
|
||||
}
|
||||
instance.setTenant(this.context.tenant);
|
||||
|
||||
return instance[direction](trx);
|
||||
}
|
||||
|
||||
/**
|
||||
* Validates some migrations by requiring and checking for an `up` and `down`function.
|
||||
* @param {MigrateItem} migration
|
||||
* @returns {MigrateItem}
|
||||
*/
|
||||
async validateMigrationStructure(migration: MigrateItem): MigrateItem {
|
||||
const migrationName = this.migrationSource.getMigrationName(migration);
|
||||
|
||||
// maybe promise
|
||||
const migrationContent = await this.migrationSource.getMigration(migration);
|
||||
if (
|
||||
typeof migrationContent.up !== 'function' ||
|
||||
typeof migrationContent.down !== 'function'
|
||||
) {
|
||||
throw new Error(
|
||||
`Invalid migration: ${migrationName} must have both an up and down function`
|
||||
);
|
||||
}
|
||||
return migration;
|
||||
}
|
||||
|
||||
/**
|
||||
* Run a batch of current migrations, in sequence.
|
||||
* @param {MigrateItem[]} migrations
|
||||
* @param {string} direction
|
||||
* @param {Knex.Transaction} trx
|
||||
* @returns {Promise<void>}
|
||||
*/
|
||||
private async runBatch(
|
||||
migrations: MigrateItem[],
|
||||
direction: string,
|
||||
trx: Knex.Transaction
|
||||
): Promise<void> {
|
||||
// Adds flag to migration lock.
|
||||
await this.migrationLock(trx);
|
||||
|
||||
// When there is a wrapping transaction, some migrations
|
||||
// could have been done while waiting for the lock:
|
||||
const completed = await listCompleted(
|
||||
this.config.tableName,
|
||||
this.config.schemaName,
|
||||
trx
|
||||
);
|
||||
// Differentiate between all and completed to get new migrations.
|
||||
const newMigrations = getNewMigrations(
|
||||
this.config.migrationSource,
|
||||
migrations,
|
||||
completed
|
||||
);
|
||||
// Retrieve the latest batch number.
|
||||
const batchNo = await this.latestBatchNumber(trx);
|
||||
|
||||
// Increment the next batch number.
|
||||
const newBatchNo = direction === 'up' ? batchNo + 1 : batchNo;
|
||||
|
||||
// Run all migration files in waterfall.
|
||||
await this.waterfallBatch(newBatchNo, newMigrations, direction, trx);
|
||||
|
||||
// Free the migration lock flag.
|
||||
await this.freeLock(trx);
|
||||
}
|
||||
}
|
||||
11
packages/server/src/lib/Seeder/Seeder.ts
Normal file
11
packages/server/src/lib/Seeder/Seeder.ts
Normal file
@@ -0,0 +1,11 @@
|
||||
|
||||
export class Seeder {
|
||||
knex: any;
|
||||
|
||||
constructor(knex) {
|
||||
this.knex = knex;
|
||||
}
|
||||
up(knex) {}
|
||||
down(knex) {}
|
||||
}
|
||||
|
||||
44
packages/server/src/lib/Seeder/SeederConfig.ts
Normal file
44
packages/server/src/lib/Seeder/SeederConfig.ts
Normal file
@@ -0,0 +1,44 @@
|
||||
import { DEFAULT_LOAD_EXTENSIONS, FsMigrations } from './FsMigrations';
|
||||
|
||||
const CONFIG_DEFAULT = Object.freeze({
|
||||
extension: 'js',
|
||||
loadExtensions: DEFAULT_LOAD_EXTENSIONS,
|
||||
tableName: 'knex_migrations',
|
||||
schemaName: null,
|
||||
directory: './migrations',
|
||||
disableTransactions: false,
|
||||
disableMigrationsListValidation: false,
|
||||
sortDirsSeparately: false,
|
||||
});
|
||||
|
||||
export default function getMergedConfig(config, currentConfig) {
|
||||
// config is the user specified config, mergedConfig has defaults and current config
|
||||
// applied to it.
|
||||
const mergedConfig = {
|
||||
...CONFIG_DEFAULT,
|
||||
...(currentConfig || {}),
|
||||
...config,
|
||||
};
|
||||
|
||||
if (
|
||||
config &&
|
||||
// If user specifies any FS related config,
|
||||
// clear specified migrationSource to avoid ambiguity
|
||||
(config.directory ||
|
||||
config.sortDirsSeparately !== undefined ||
|
||||
config.loadExtensions)
|
||||
) {
|
||||
mergedConfig.migrationSource = null;
|
||||
}
|
||||
|
||||
// If the user has not specified any configs, we need to
|
||||
// default to fs migrations to maintain compatibility
|
||||
if (!mergedConfig.migrationSource) {
|
||||
mergedConfig.migrationSource = new FsMigrations(
|
||||
mergedConfig.directory,
|
||||
mergedConfig.sortDirsSeparately,
|
||||
mergedConfig.loadExtensions
|
||||
);
|
||||
}
|
||||
return mergedConfig;
|
||||
}
|
||||
43
packages/server/src/lib/Seeder/TableUtils.ts
Normal file
43
packages/server/src/lib/Seeder/TableUtils.ts
Normal file
@@ -0,0 +1,43 @@
|
||||
/**
|
||||
* Get schema-aware query builder for a given table and schema name.
|
||||
* @param {Knex} trxOrKnex -
|
||||
* @param {string} tableName -
|
||||
* @param {string} schemaName -
|
||||
* @returns {string}
|
||||
*/
|
||||
export function getTable(trx, tableName: string, schemaName = null) {
|
||||
return schemaName ? trx(tableName).withSchema(schemaName) : trx(tableName);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get schema-aware table name.
|
||||
* @param {string} tableName -
|
||||
* @returns {string}
|
||||
*/
|
||||
export function getTableName(tableName: string, schemaName = null): string {
|
||||
return schemaName ? `${schemaName}.${tableName}` : tableName;
|
||||
}
|
||||
|
||||
/**
|
||||
* Retrieve the lock table name from given migration table name.
|
||||
* @param {string} tableName
|
||||
* @returns {string}
|
||||
*/
|
||||
export function getLockTableName(tableName: string): string {
|
||||
return `${tableName}_lock`;
|
||||
}
|
||||
|
||||
/**
|
||||
* Retireve the lock table name from ginve migration table name with schema.
|
||||
* @param {string} tableName
|
||||
* @param {string} schemaName
|
||||
* @returns {string}
|
||||
*/
|
||||
export function getLockTableNameWithSchema(
|
||||
tableName: string,
|
||||
schemaName = null
|
||||
): string {
|
||||
return schemaName
|
||||
? `${schemaName} + ${getLockTableName(tableName)}`
|
||||
: getLockTableName(tableName);
|
||||
}
|
||||
25
packages/server/src/lib/Seeder/TenantSeeder.ts
Normal file
25
packages/server/src/lib/Seeder/TenantSeeder.ts
Normal file
@@ -0,0 +1,25 @@
|
||||
import { Seeder } from "./Seeder";
|
||||
|
||||
export class TenantSeeder extends Seeder{
|
||||
public knex: any;
|
||||
public i18n: i18nAPI;
|
||||
public models: any;
|
||||
public tenant: any;
|
||||
|
||||
constructor(knex) {
|
||||
super(knex);
|
||||
this.knex = knex;
|
||||
}
|
||||
|
||||
setI18n(i18n) {
|
||||
this.i18n = i18n;
|
||||
}
|
||||
|
||||
setModels(models) {
|
||||
this.models = models;
|
||||
}
|
||||
|
||||
setTenant(tenant) {
|
||||
this.tenant = tenant;
|
||||
}
|
||||
}
|
||||
42
packages/server/src/lib/Seeder/Utils.ts
Normal file
42
packages/server/src/lib/Seeder/Utils.ts
Normal file
@@ -0,0 +1,42 @@
|
||||
import fs from 'fs';
|
||||
|
||||
const { promisify } = require('util');
|
||||
const readFile = promisify(fs.readFile);
|
||||
|
||||
/**
|
||||
* Detarmines the module type of the given file path.
|
||||
* @param {string} filepath
|
||||
* @returns {boolean}
|
||||
*/
|
||||
async function isModuleType(filepath: string): boolean {
|
||||
if (process.env.npm_package_json) {
|
||||
// npm >= 7.0.0
|
||||
const packageJson = JSON.parse(
|
||||
await readFile(process.env.npm_package_json, 'utf-8')
|
||||
);
|
||||
if (packageJson.type === 'module') {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return process.env.npm_package_type === 'module' || filepath.endsWith('.mjs');
|
||||
}
|
||||
|
||||
/**
|
||||
* Imports content of the given file path.
|
||||
* @param {string} filepath
|
||||
* @returns
|
||||
*/
|
||||
export async function importFile(filepath: string): any {
|
||||
return (await isModuleType(filepath))
|
||||
? import(require('url').pathToFileURL(filepath))
|
||||
: require(filepath);
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @param {string} moduleName
|
||||
* @returns
|
||||
*/
|
||||
export async function importWebpackSeedModule(moduleName: string): any {
|
||||
return import(`@/database/seeds/core/${moduleName}`);
|
||||
}
|
||||
12
packages/server/src/lib/Seeder/constants.ts
Normal file
12
packages/server/src/lib/Seeder/constants.ts
Normal file
@@ -0,0 +1,12 @@
|
||||
// Default load extensions.
|
||||
export const DEFAULT_LOAD_EXTENSIONS = [
|
||||
'.co',
|
||||
'.coffee',
|
||||
'.eg',
|
||||
'.iced',
|
||||
'.js',
|
||||
'.cjs',
|
||||
'.litcoffee',
|
||||
'.ls',
|
||||
'.ts',
|
||||
];
|
||||
20
packages/server/src/lib/Seeder/interfaces.ts
Normal file
20
packages/server/src/lib/Seeder/interfaces.ts
Normal file
@@ -0,0 +1,20 @@
|
||||
import { ITenant } from "interfaces";
|
||||
|
||||
export interface FsMigrations {}
|
||||
|
||||
export interface ISeederConfig {
|
||||
tableName: string;
|
||||
migrationSource: FsMigrations;
|
||||
schemaName?: string;
|
||||
loadExtensions: string[];
|
||||
}
|
||||
|
||||
export interface MigrateItem {
|
||||
file: string;
|
||||
directory: string;
|
||||
}
|
||||
|
||||
export interface SeedMigrationContext {
|
||||
i18n: i18nAPI;
|
||||
tenant: ITenant;
|
||||
}
|
||||
197
packages/server/src/lib/Transformer/Transformer.ts
Normal file
197
packages/server/src/lib/Transformer/Transformer.ts
Normal file
@@ -0,0 +1,197 @@
|
||||
import moment from 'moment';
|
||||
import * as R from 'ramda';
|
||||
import { includes, isFunction, isObject, isUndefined, omit } from 'lodash';
|
||||
import { formatNumber } from 'utils';
|
||||
|
||||
export class Transformer {
|
||||
public context: any;
|
||||
public options: Record<string, any>;
|
||||
|
||||
/**
|
||||
* Includeded attributes.
|
||||
* @returns {string[]}
|
||||
*/
|
||||
public includeAttributes = (): string[] => {
|
||||
return [];
|
||||
};
|
||||
|
||||
/**
|
||||
* Exclude attributes.
|
||||
* @returns {string[]}
|
||||
*/
|
||||
public excludeAttributes = (): string[] => {
|
||||
return [];
|
||||
};
|
||||
|
||||
/**
|
||||
* Detarmines whether to exclude all attributes except the include attributes.
|
||||
* @returns {boolean}
|
||||
*/
|
||||
public isExcludeAllAttributes = () => {
|
||||
return includes(this.excludeAttributes(), '*');
|
||||
};
|
||||
|
||||
/**
|
||||
*
|
||||
* @param object
|
||||
*/
|
||||
transform = (object: any) => {
|
||||
return object;
|
||||
};
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
public work = (object: any) => {
|
||||
if (Array.isArray(object)) {
|
||||
return object.map(this.getTransformation);
|
||||
} else if (isObject(object)) {
|
||||
return this.getTransformation(object);
|
||||
}
|
||||
return object;
|
||||
};
|
||||
|
||||
/**
|
||||
* Transformes the given item to desired output.
|
||||
* @param item
|
||||
* @returns
|
||||
*/
|
||||
protected getTransformation = (item) => {
|
||||
const normlizedItem = this.normalizeModelItem(item);
|
||||
|
||||
return R.compose(
|
||||
this.transform,
|
||||
R.when(this.hasExcludeAttributes, this.excludeAttributesTransformed),
|
||||
this.includeAttributesTransformed
|
||||
)(normlizedItem);
|
||||
};
|
||||
|
||||
/**
|
||||
*
|
||||
* @param item
|
||||
* @returns
|
||||
*/
|
||||
protected normalizeModelItem = (item) => {
|
||||
return !isUndefined(item.toJSON) ? item.toJSON() : item;
|
||||
};
|
||||
|
||||
/**
|
||||
* Exclude attributes from the given item.
|
||||
*/
|
||||
protected excludeAttributesTransformed = (item) => {
|
||||
const exclude = this.excludeAttributes();
|
||||
|
||||
return omit(item, exclude);
|
||||
};
|
||||
|
||||
/**
|
||||
* Incldues virtual attributes.
|
||||
*/
|
||||
protected getIncludeAttributesTransformed = (item) => {
|
||||
const attributes = this.includeAttributes();
|
||||
|
||||
return attributes
|
||||
.filter(
|
||||
(attribute) =>
|
||||
isFunction(this[attribute]) || !isUndefined(item[attribute])
|
||||
)
|
||||
.reduce((acc, attribute: string) => {
|
||||
acc[attribute] = isFunction(this[attribute])
|
||||
? this[attribute](item)
|
||||
: item[attribute];
|
||||
|
||||
return acc;
|
||||
}, {});
|
||||
};
|
||||
|
||||
/**
|
||||
*
|
||||
* @param item
|
||||
* @returns
|
||||
*/
|
||||
protected includeAttributesTransformed = (item) => {
|
||||
const excludeAll = this.isExcludeAllAttributes();
|
||||
const virtualAttrs = this.getIncludeAttributesTransformed(item);
|
||||
|
||||
return {
|
||||
...(!excludeAll ? item : {}),
|
||||
...virtualAttrs,
|
||||
};
|
||||
};
|
||||
|
||||
/**
|
||||
*
|
||||
* @returns
|
||||
*/
|
||||
private hasExcludeAttributes = () => {
|
||||
return this.excludeAttributes().length > 0;
|
||||
};
|
||||
|
||||
/**
|
||||
*
|
||||
* @param date
|
||||
* @returns
|
||||
*/
|
||||
protected formatDate(date) {
|
||||
return date ? moment(date).format('YYYY/MM/DD') : '';
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @param number
|
||||
* @returns
|
||||
*/
|
||||
protected formatNumber(number) {
|
||||
return formatNumber(number, { money: false });
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @param money
|
||||
* @param options
|
||||
* @returns
|
||||
*/
|
||||
protected formatMoney(money, options?) {
|
||||
return formatNumber(money, {
|
||||
currencyCode: this.context.organization.baseCurrency,
|
||||
...options,
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @param obj
|
||||
* @param transformer
|
||||
* @param options
|
||||
*/
|
||||
public item(
|
||||
obj: Record<string, any>,
|
||||
transformer: Transformer,
|
||||
options?: any
|
||||
) {
|
||||
transformer.setOptions(options);
|
||||
transformer.setContext(this.context);
|
||||
|
||||
return transformer.work(obj);
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets custom options to the application.
|
||||
* @param {} options
|
||||
* @returns {Transformer}
|
||||
*/
|
||||
public setOptions(options) {
|
||||
this.options = options;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the application context to the application.
|
||||
* @param {} context
|
||||
* @returns {Transformer}
|
||||
*/
|
||||
public setContext(context) {
|
||||
this.context = context;
|
||||
return this;
|
||||
}
|
||||
}
|
||||
49
packages/server/src/lib/Transformer/TransformerInjectable.ts
Normal file
49
packages/server/src/lib/Transformer/TransformerInjectable.ts
Normal file
@@ -0,0 +1,49 @@
|
||||
import { Service, Inject } from 'typedi';
|
||||
import { isNull } from 'lodash';
|
||||
import HasTenancyService from '@/services/Tenancy/TenancyService';
|
||||
import { TenantMetadata } from '@/system/models';
|
||||
import { Transformer } from './Transformer';
|
||||
|
||||
@Service()
|
||||
export class TransformerInjectable {
|
||||
@Inject()
|
||||
private tenancy: HasTenancyService;
|
||||
|
||||
/**
|
||||
* Retrieves the application context of all tenant transformers.
|
||||
* @param {number} tenantId
|
||||
* @returns {}
|
||||
*/
|
||||
async getApplicationContext(tenantId: number) {
|
||||
const i18n = this.tenancy.i18n(tenantId);
|
||||
const organization = await TenantMetadata.query().findOne({ tenantId });
|
||||
|
||||
return {
|
||||
organization,
|
||||
i18n,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Transformes the given transformer after inject the tenant context.
|
||||
* @param {number} tenantId
|
||||
* @param {Record<string, any> | Record<string, any>[]} object
|
||||
* @param {Transformer} transformer
|
||||
* @param {Record<string, any>} options
|
||||
* @returns {Record<string, any>}
|
||||
*/
|
||||
async transform(
|
||||
tenantId: number,
|
||||
object: Record<string, any> | Record<string, any>[],
|
||||
transformer: Transformer,
|
||||
options?: Record<string, any>
|
||||
) {
|
||||
if (!isNull(tenantId)) {
|
||||
const context = await this.getApplicationContext(tenantId);
|
||||
transformer.setContext(context);
|
||||
}
|
||||
transformer.setOptions(options);
|
||||
|
||||
return transformer.work(object);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,44 @@
|
||||
import DynamicFilterRoleAbstructor from '@/lib/DynamicFilter/DynamicFilterRoleAbstructor';
|
||||
import {
|
||||
validateViewRoles,
|
||||
buildFilterQuery,
|
||||
} from '@/lib/ViewRolesBuilder';
|
||||
|
||||
export default class ViewRolesDynamicFilter extends DynamicFilterRoleAbstructor {
|
||||
/**
|
||||
* Constructor method.
|
||||
* @param {*} filterRoles -
|
||||
* @param {*} logicExpression -
|
||||
*/
|
||||
constructor(filterRoles, logicExpression) {
|
||||
super();
|
||||
|
||||
this.filterRoles = filterRoles;
|
||||
this.logicExpression = logicExpression;
|
||||
|
||||
this.tableName = '';
|
||||
}
|
||||
|
||||
/**
|
||||
* Retrieve logic expression.
|
||||
*/
|
||||
buildLogicExpression() {
|
||||
return this.logicExpression;
|
||||
}
|
||||
|
||||
/**
|
||||
* Validates filter roles.
|
||||
*/
|
||||
validateFilterRoles() {
|
||||
return validateViewRoles(this.filterRoles, this.logicExpression);
|
||||
}
|
||||
|
||||
/**
|
||||
* Builds database query of view roles.
|
||||
*/
|
||||
buildQuery() {
|
||||
return (builder) => {
|
||||
buildFilterQuery(this.tableName, this.filterRoles, this.logicExpression)(builder);
|
||||
};
|
||||
}
|
||||
}
|
||||
129
packages/server/src/lib/ViewRolesBuilder/index.ts
Normal file
129
packages/server/src/lib/ViewRolesBuilder/index.ts
Normal file
@@ -0,0 +1,129 @@
|
||||
import { difference } from 'lodash';
|
||||
|
||||
import { IFilterRole, IModel } from '@/interfaces';
|
||||
|
||||
/**
|
||||
* Get field column metadata and its relation with other tables.
|
||||
* @param {String} tableName - Table name of target column.
|
||||
* @param {String} fieldKey - Target column key that stored in resource field.
|
||||
*/
|
||||
export function getRoleFieldColumn(model: IModel, fieldKey: string) {
|
||||
const tableFields = model.fields;
|
||||
return tableFields[fieldKey] ? tableFields[fieldKey] : null;
|
||||
}
|
||||
|
||||
export function buildSortColumnJoin(model: IModel, sortColumnKey: string) {
|
||||
return (builder) => {
|
||||
const fieldColumn = getRoleFieldColumn(model, sortColumnKey);
|
||||
|
||||
if (fieldColumn.relation) {
|
||||
const joinTable = getTableFromRelationColumn(fieldColumn.relation);
|
||||
builder.join(
|
||||
joinTable,
|
||||
`${model.tableName}.${fieldColumn.column}`,
|
||||
'=',
|
||||
fieldColumn.relation
|
||||
);
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Mapes the view roles to view conditionals.
|
||||
* @param {Array} viewRoles -
|
||||
* @return {Array}
|
||||
*/
|
||||
export function mapViewRolesToConditionals(viewRoles) {
|
||||
return viewRoles.map((viewRole) => ({
|
||||
comparator: viewRole.comparator,
|
||||
value: viewRole.value,
|
||||
index: viewRole.index,
|
||||
|
||||
columnKey: viewRole.field.key,
|
||||
slug: viewRole.field.slug,
|
||||
}));
|
||||
}
|
||||
|
||||
export function mapFilterRolesToDynamicFilter(roles) {
|
||||
return roles.map((role) => ({
|
||||
...role,
|
||||
columnKey: role.fieldKey,
|
||||
}));
|
||||
}
|
||||
|
||||
/**
|
||||
* Builds sort column query.
|
||||
* @param {String} tableName -
|
||||
* @param {String} columnKey -
|
||||
* @param {String} sortDirection -
|
||||
*/
|
||||
export function buildSortColumnQuery(
|
||||
model: IModel,
|
||||
columnKey: string,
|
||||
sortDirection: string
|
||||
) {
|
||||
const fieldRelation = getRoleFieldColumn(model, columnKey);
|
||||
const sortColumn =
|
||||
fieldRelation.relation || `${model.tableName}.${fieldRelation.column}`;
|
||||
|
||||
return (builder) => {
|
||||
builder.orderBy(sortColumn, sortDirection);
|
||||
buildSortColumnJoin(model, columnKey)(builder);
|
||||
};
|
||||
}
|
||||
|
||||
export function validateFilterLogicExpression(
|
||||
logicExpression: string,
|
||||
indexes
|
||||
) {
|
||||
const logicExpIndexes = logicExpression.match(/\d+/g) || [];
|
||||
const diff = difference(logicExpIndexes.map(Number), indexes);
|
||||
|
||||
return diff.length > 0 ? false : true;
|
||||
}
|
||||
|
||||
export function validateRolesLogicExpression(
|
||||
logicExpression: string,
|
||||
roles: IFilterRole[]
|
||||
) {
|
||||
return validateFilterLogicExpression(
|
||||
logicExpression,
|
||||
roles.map((r) => r.index)
|
||||
);
|
||||
}
|
||||
|
||||
export function validateFieldKeyExistance(model: any, fieldKey: string) {
|
||||
return model?.fields?.[fieldKey] || false;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Retrieve model fields keys.
|
||||
* @param {IModel} Model
|
||||
* @return {string[]}
|
||||
*/
|
||||
export function getModelFieldsKeys(Model: IModel) {
|
||||
const fields = Object.keys(Model.fields);
|
||||
|
||||
return fields.sort((a, b) => {
|
||||
if (a < b) {
|
||||
return -1;
|
||||
}
|
||||
if (a > b) {
|
||||
return 1;
|
||||
}
|
||||
return 0;
|
||||
});
|
||||
}
|
||||
|
||||
export function getModelFields(Model: IModel) {
|
||||
const fieldsKey = this.getModelFieldsKeys(Model);
|
||||
|
||||
return fieldsKey.map((fieldKey) => {
|
||||
const field = Model.fields[fieldKey];
|
||||
return {
|
||||
...field,
|
||||
key: fieldKey,
|
||||
};
|
||||
});
|
||||
}
|
||||
Reference in New Issue
Block a user