Compare commits

...

5 Commits

Author SHA1 Message Date
Beto Dealmeida
32563ffb1d Add coverage 2026-05-02 00:05:49 -04:00
Beto Dealmeida
f79c7aca9d Add license 2026-05-01 23:24:44 -04:00
Beto Dealmeida
80cf2648f2 Fix lint 2026-05-01 22:58:12 -04:00
Beto Dealmeida
230c903e6b Fix lint 2026-05-01 19:28:31 -04:00
Beto Dealmeida
229917b9b0 feat: nodejs sidecar 2026-05-01 19:16:27 -04:00
34 changed files with 3435 additions and 73 deletions

View File

@@ -104,6 +104,8 @@ services:
depends_on:
superset-init:
condition: service_completed_successfully
query-context-sidecar:
condition: service_started
volumes: *superset-volumes
superset-websocket:
@@ -138,6 +140,19 @@ services:
- REDIS_PORT=6379
- REDIS_SSL=false
query-context-sidecar:
build:
context: .
dockerfile: query-context-sidecar/Dockerfile
restart: unless-stopped
ports:
- "127.0.0.1:${QUERY_CONTEXT_SIDECAR_PORT:-3030}:3030"
environment:
- PORT=3030
- QUERY_CONTEXT_MAX_BODY_BYTES=10485760
depends_on:
- superset-node
superset-init:
build:
<<: *common-build
@@ -152,6 +167,8 @@ services:
condition: service_started
redis:
condition: service_started
query-context-sidecar:
condition: service_started
user: *superset-user
volumes: *superset-volumes
healthcheck:

View File

@@ -26,6 +26,7 @@ DEV_MODE=true
# SUPERSET_PORT=8088
# NODE_PORT=9000
# WEBSOCKET_PORT=8080
# QUERY_CONTEXT_SIDECAR_PORT=3030
# CYPRESS_PORT=8081
# DATABASE_PORT=5432
# REDIS_PORT=6379
@@ -74,6 +75,7 @@ SUPERSET_LOAD_EXAMPLES=yes
CYPRESS_CONFIG=false
SUPERSET_PORT=8088
MAPBOX_API_KEY=''
QUERY_CONTEXT_SIDECAR_URL=http://query-context-sidecar:3030
# Make sure you set this to a unique secure random value on production
SUPERSET_SECRET_KEY=TEST_NON_DEV_SECRET

2
query-context-sidecar/.gitignore vendored Normal file
View File

@@ -0,0 +1,2 @@
node_modules/
dist/

View File

@@ -0,0 +1,55 @@
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# Stage 1: Install superset-frontend dependencies
FROM node:20-alpine AS deps
WORKDIR /app
# Copy full superset-frontend tree so workspace dependency resolution stays consistent
COPY superset-frontend/ ./superset-frontend/
WORKDIR /app/superset-frontend
RUN npm ci --ignore-scripts
# Stage 2: Build the webpack bundle
FROM node:20-alpine AS builder
WORKDIR /app
# Copy installed node_modules from deps stage
COPY --from=deps /app/superset-frontend/node_modules ./superset-frontend/node_modules
# Copy superset-frontend source
COPY superset-frontend/ ./superset-frontend/
# Copy sidecar source and config
COPY query-context-sidecar/package.json query-context-sidecar/package-lock.json* ./query-context-sidecar/
COPY query-context-sidecar/webpack.config.js query-context-sidecar/tsconfig.json ./query-context-sidecar/
COPY query-context-sidecar/src/ ./query-context-sidecar/src/
WORKDIR /app/query-context-sidecar
RUN npm ci
RUN npm run build
# Stage 3: Minimal runtime
FROM node:20-alpine
ENV NODE_ENV=production
WORKDIR /app
COPY --from=builder /app/query-context-sidecar/dist ./dist
USER node
CMD ["node", "dist/index.js"]

2130
query-context-sidecar/package-lock.json generated Normal file

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,21 @@
{
"name": "query-context-sidecar",
"version": "1.0.0",
"description": "Node.js sidecar that converts form_data to query_context using Superset frontend buildQuery functions",
"private": true,
"scripts": {
"build": "webpack --mode production",
"build:dev": "webpack --mode development",
"start": "node dist/index.js",
"dev": "webpack --mode development --watch"
},
"devDependencies": {
"css-loader": "^6.8.1",
"null-loader": "^4.0.1",
"style-loader": "^3.3.3",
"ts-loader": "^9.5.1",
"typescript": "^5.3.3",
"webpack": "^5.89.0",
"webpack-cli": "^5.1.4"
}
}

View File

@@ -0,0 +1,55 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import { QueryFormData } from '@superset-ui/core';
import { getBuildQuery } from '../runtimeRegistry';
export default function buildCartodiagramQuery(formData: QueryFormData) {
const {
selected_chart: selectedChartString,
geom_column: geometryColumn,
extra_form_data: extraFormData,
} = formData as QueryFormData & {
selected_chart: string;
geom_column: string;
extra_form_data?: Record<string, unknown>;
};
const selectedChart = JSON.parse(selectedChartString);
const vizType = selectedChart.viz_type as string;
const chartFormData = JSON.parse(selectedChart.params) as Record<string, unknown>;
chartFormData.extra_form_data = {
...(chartFormData.extra_form_data as Record<string, unknown>),
...(extraFormData || {}),
};
const groupby = Array.isArray(chartFormData.groupby)
? (chartFormData.groupby as string[])
: [];
chartFormData.groupby = [geometryColumn, ...groupby];
const buildQuery = getBuildQuery(vizType);
if (!buildQuery) {
throw new Error(`Unsupported selected chart viz_type: ${vizType}`);
}
return buildQuery(chartFormData);
}

View File

@@ -0,0 +1,26 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import './polyfills';
import { registerAllBuildQueries } from './registry';
import { startServer } from './server';
registerAllBuildQueries();
startServer();

View File

@@ -0,0 +1,87 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
const g = globalThis as any;
if (typeof g.window === 'undefined') {
g.window = g;
}
g.window.featureFlags = {};
if (typeof g.document === 'undefined') {
g.document = {
getElementById: () => null,
createElement: () => ({
setAttribute: () => {},
style: {},
appendChild: () => {},
}),
createTextNode: () => ({}),
head: { appendChild: () => {} },
body: { appendChild: () => {} },
addEventListener: () => {},
removeEventListener: () => {},
querySelectorAll: () => [],
querySelector: () => null,
};
}
if (typeof g.navigator === 'undefined') {
g.navigator = {
userAgent: 'node.js',
language: 'en',
};
}
if (typeof g.HTMLElement === 'undefined') {
g.HTMLElement = class HTMLElement {};
}
if (typeof g.location === 'undefined') {
g.location = {
href: '',
origin: '',
protocol: 'http:',
host: 'localhost',
hostname: 'localhost',
port: '',
pathname: '/',
search: '',
hash: '',
};
}
if (typeof g.getComputedStyle === 'undefined') {
g.getComputedStyle = () => ({});
}
if (typeof g.requestAnimationFrame === 'undefined') {
g.requestAnimationFrame = (cb: () => void) => setTimeout(cb, 0);
}
if (typeof g.matchMedia === 'undefined') {
g.matchMedia = () => ({
matches: false,
addListener: () => {},
removeListener: () => {},
addEventListener: () => {},
removeEventListener: () => {},
});
}

View File

@@ -0,0 +1,114 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import bigNumberBuildQuery from '@superset-ui/plugin-chart-echarts/BigNumber/BigNumberWithTrendline/buildQuery';
import bigNumberPoPBuildQuery from '@superset-ui/plugin-chart-echarts/BigNumber/BigNumberPeriodOverPeriod/buildQuery';
import bigNumberTotalBuildQuery from '@superset-ui/plugin-chart-echarts/BigNumber/BigNumberTotal/buildQuery';
import boxPlotBuildQuery from '@superset-ui/plugin-chart-echarts/BoxPlot/buildQuery';
import bubbleBuildQuery from '@superset-ui/plugin-chart-echarts/Bubble/buildQuery';
import funnelBuildQuery from '@superset-ui/plugin-chart-echarts/Funnel/buildQuery';
import ganttBuildQuery from '@superset-ui/plugin-chart-echarts/Gantt/buildQuery';
import gaugeBuildQuery from '@superset-ui/plugin-chart-echarts/Gauge/buildQuery';
import graphBuildQuery from '@superset-ui/plugin-chart-echarts/Graph/buildQuery';
import heatmapBuildQuery from '@superset-ui/plugin-chart-echarts/Heatmap/buildQuery';
import histogramBuildQuery from '@superset-ui/plugin-chart-echarts/Histogram/buildQuery';
import mixedTimeseriesBuildQuery from '@superset-ui/plugin-chart-echarts/MixedTimeseries/buildQuery';
import pieBuildQuery from '@superset-ui/plugin-chart-echarts/Pie/buildQuery';
import radarBuildQuery from '@superset-ui/plugin-chart-echarts/Radar/buildQuery';
import sankeyBuildQuery from '@superset-ui/plugin-chart-echarts/Sankey/buildQuery';
import sunburstBuildQuery from '@superset-ui/plugin-chart-echarts/Sunburst/buildQuery';
import timeseriesBuildQuery from '@superset-ui/plugin-chart-echarts/Timeseries/buildQuery';
import treeBuildQuery from '@superset-ui/plugin-chart-echarts/Tree/buildQuery';
import treemapBuildQuery from '@superset-ui/plugin-chart-echarts/Treemap/buildQuery';
import waterfallBuildQuery from '@superset-ui/plugin-chart-echarts/Waterfall/buildQuery';
import handlebarsBuildQuery from '@superset-ui/plugin-chart-handlebars/plugin/buildQuery';
import pivotTableBuildQuery from '@superset-ui/plugin-chart-pivot-table/plugin/buildQuery';
import wordCloudBuildQuery from '@superset-ui/plugin-chart-word-cloud/plugin/buildQuery';
import tableBuildQuery from '@superset-ui/plugin-chart-table/buildQuery';
import agGridTableBuildQuery from '@superset-ui/plugin-chart-ag-grid-table/buildQuery';
import pointClusterMapBuildQuery from '@superset-ui/plugin-chart-point-cluster-map/buildQuery';
import deckArcBuildQuery from '@superset-ui/preset-chart-deckgl/layers/Arc/buildQuery';
import deckContourBuildQuery from '@superset-ui/preset-chart-deckgl/layers/Contour/buildQuery';
import deckGridBuildQuery from '@superset-ui/preset-chart-deckgl/layers/Grid/buildQuery';
import deckHeatmapBuildQuery from '@superset-ui/preset-chart-deckgl/layers/Heatmap/buildQuery';
import deckHexBuildQuery from '@superset-ui/preset-chart-deckgl/layers/Hex/buildQuery';
import deckPathBuildQuery from '@superset-ui/preset-chart-deckgl/layers/Path/buildQuery';
import deckPolygonBuildQuery from '@superset-ui/preset-chart-deckgl/layers/Polygon/buildQuery';
import deckScatterBuildQuery from '@superset-ui/preset-chart-deckgl/layers/Scatter/buildQuery';
import deckScreengridBuildQuery from '@superset-ui/preset-chart-deckgl/layers/Screengrid/buildQuery';
import filterRangeBuildQuery from 'src/filters/components/Range/buildQuery';
import filterSelectBuildQuery from 'src/filters/components/Select/buildQuery';
import filterTimeColumnBuildQuery from 'src/filters/components/TimeColumn/buildQuery';
import filterTimeGrainBuildQuery from 'src/filters/components/TimeGrain/buildQuery';
import cartodiagramBuildQuery from './buildQuery/cartodiagram';
import { registerBuildQuery } from './runtimeRegistry';
export function registerAllBuildQueries(): void {
registerBuildQuery('big_number', bigNumberBuildQuery as any);
registerBuildQuery('big_number_total', bigNumberTotalBuildQuery as any);
registerBuildQuery('pop_kpi', bigNumberPoPBuildQuery as any);
registerBuildQuery('box_plot', boxPlotBuildQuery as any);
registerBuildQuery('bubble_v2', bubbleBuildQuery as any);
registerBuildQuery('funnel', funnelBuildQuery as any);
registerBuildQuery('gantt_chart', ganttBuildQuery as any);
registerBuildQuery('gauge_chart', gaugeBuildQuery as any);
registerBuildQuery('graph_chart', graphBuildQuery as any);
registerBuildQuery('heatmap_v2', heatmapBuildQuery as any);
registerBuildQuery('histogram_v2', histogramBuildQuery as any);
registerBuildQuery('mixed_timeseries', mixedTimeseriesBuildQuery as any);
registerBuildQuery('pie', pieBuildQuery as any);
registerBuildQuery('radar', radarBuildQuery as any);
registerBuildQuery('sankey_v2', sankeyBuildQuery as any);
registerBuildQuery('sunburst_v2', sunburstBuildQuery as any);
registerBuildQuery('tree_chart', treeBuildQuery as any);
registerBuildQuery('treemap_v2', treemapBuildQuery as any);
registerBuildQuery('waterfall', waterfallBuildQuery as any);
registerBuildQuery('echarts_timeseries', timeseriesBuildQuery as any);
registerBuildQuery('echarts_area', timeseriesBuildQuery as any);
registerBuildQuery('echarts_timeseries_bar', timeseriesBuildQuery as any);
registerBuildQuery('echarts_timeseries_line', timeseriesBuildQuery as any);
registerBuildQuery('echarts_timeseries_smooth', timeseriesBuildQuery as any);
registerBuildQuery('echarts_timeseries_scatter', timeseriesBuildQuery as any);
registerBuildQuery('echarts_timeseries_step', timeseriesBuildQuery as any);
registerBuildQuery('pivot_table_v2', pivotTableBuildQuery as any);
registerBuildQuery('table', tableBuildQuery as any);
registerBuildQuery('ag-grid-table', agGridTableBuildQuery as any);
registerBuildQuery('point_cluster', pointClusterMapBuildQuery as any);
registerBuildQuery('handlebars', handlebarsBuildQuery as any);
registerBuildQuery('word_cloud', wordCloudBuildQuery as any);
registerBuildQuery('cartodiagram', cartodiagramBuildQuery as any);
registerBuildQuery('deck_arc', deckArcBuildQuery as any);
registerBuildQuery('deck_contour', deckContourBuildQuery as any);
registerBuildQuery('deck_grid', deckGridBuildQuery as any);
registerBuildQuery('deck_heatmap', deckHeatmapBuildQuery as any);
registerBuildQuery('deck_hex', deckHexBuildQuery as any);
registerBuildQuery('deck_path', deckPathBuildQuery as any);
registerBuildQuery('deck_polygon', deckPolygonBuildQuery as any);
registerBuildQuery('deck_scatter', deckScatterBuildQuery as any);
registerBuildQuery('deck_screengrid', deckScreengridBuildQuery as any);
registerBuildQuery('filter_select', filterSelectBuildQuery as any);
registerBuildQuery('filter_range', filterRangeBuildQuery as any);
registerBuildQuery('filter_timecolumn', filterTimeColumnBuildQuery as any);
registerBuildQuery('filter_timegrain', filterTimeGrainBuildQuery as any);
}

View File

@@ -0,0 +1,34 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
export type BuildQueryFn = (formData: Record<string, unknown>) => unknown;
const registry = new Map<string, BuildQueryFn>();
export function registerBuildQuery(vizType: string, fn: BuildQueryFn): void {
registry.set(vizType, fn);
}
export function getBuildQuery(vizType: string): BuildQueryFn | undefined {
return registry.get(vizType);
}
export function listVizTypes(): string[] {
return Array.from(registry.keys()).sort();
}

View File

@@ -0,0 +1,28 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import { getBuildQuery } from './runtimeRegistry';
export default function getChartBuildQueryRegistry() {
return {
get(vizType: string) {
return getBuildQuery(vizType);
},
};
}

View File

@@ -0,0 +1,166 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import http from 'http';
import { URL } from 'url';
import buildQueryContext from './stubs/buildQueryContext';
import { getBuildQuery, listVizTypes } from './runtimeRegistry';
const PORT = parseInt(process.env.PORT || '3030', 10);
const MAX_BODY_BYTES = parseInt(
process.env.QUERY_CONTEXT_MAX_BODY_BYTES || `${10 * 1024 * 1024}`,
10,
);
const ALLOWED_ORIGINS = new Set(
(process.env.QUERY_CONTEXT_ALLOWED_ORIGINS || '')
.split(',')
.map(origin => origin.trim())
.filter(Boolean),
);
class HttpRequestError extends Error {
statusCode: number;
constructor(statusCode: number, message: string) {
super(message);
this.statusCode = statusCode;
}
}
function readBody(req: http.IncomingMessage): Promise<string> {
return new Promise((resolve, reject) => {
const chunks: Buffer[] = [];
let totalBytes = 0;
req.on('data', (chunk: Buffer) => {
totalBytes += chunk.length;
if (totalBytes > MAX_BODY_BYTES) {
req.destroy();
reject(new HttpRequestError(413, 'Request body too large'));
return;
}
chunks.push(chunk);
});
req.on('end', () => resolve(Buffer.concat(chunks).toString()));
req.on('error', reject);
});
}
function isAllowedOrigin(origin?: string): boolean {
if (!origin) {
return true;
}
if (ALLOWED_ORIGINS.size === 0) {
return true;
}
return ALLOWED_ORIGINS.has(origin);
}
function jsonResponse(res: http.ServerResponse, status: number, data: unknown): void {
res.writeHead(status, { 'Content-Type': 'application/json' });
res.end(JSON.stringify(data));
}
async function handleBuildQueryContext(
req: http.IncomingMessage,
res: http.ServerResponse,
): Promise<void> {
if (!isAllowedOrigin(req.headers.origin)) {
jsonResponse(res, 403, { error: 'Origin not allowed' });
return;
}
let body: string;
try {
body = await readBody(req);
} catch (err: any) {
if (err instanceof HttpRequestError) {
jsonResponse(res, err.statusCode, { error: err.message });
return;
}
throw err;
}
let parsed: any;
try {
parsed = JSON.parse(body);
} catch {
jsonResponse(res, 400, { error: 'Invalid JSON body' });
return;
}
const formData = parsed.form_data;
if (!formData || !formData.viz_type) {
jsonResponse(res, 400, {
error: 'Missing form_data or form_data.viz_type',
});
return;
}
try {
const buildQuery = getBuildQuery(formData.viz_type);
const queryContext = buildQuery
? buildQuery(formData)
: buildQueryContext(formData);
jsonResponse(res, 200, { query_context: queryContext });
} catch (err: any) {
console.error('Error building query context for %s:', formData.viz_type, err);
jsonResponse(res, 500, {
error: `Failed to build query context: ${err.message}`,
});
}
}
function handleVizTypes(res: http.ServerResponse): void {
const vizTypes = listVizTypes();
jsonResponse(res, 200, { viz_types: vizTypes, count: vizTypes.length });
}
function handleHealth(res: http.ServerResponse): void {
res.writeHead(200, { 'Content-Type': 'text/plain' });
res.end('OK');
}
export function startServer(): void {
const server = http.createServer(async (req, res) => {
const url = req.url ? new URL(req.url, `http://localhost:${PORT}`).pathname : '';
const method = req.method || '';
try {
if (url === '/health' && (method === 'GET' || method === 'HEAD')) {
handleHealth(res);
} else if (url === '/api/v1/viz-types' && method === 'GET') {
handleVizTypes(res);
} else if (url === '/api/v1/build-query-context' && method === 'POST') {
await handleBuildQueryContext(req, res);
} else {
jsonResponse(res, 404, { error: 'Not found' });
}
} catch (err) {
console.error('Unhandled error:', err);
jsonResponse(res, 500, { error: 'Internal server error' });
}
});
server.listen(PORT, () => {
console.log(`Query context sidecar listening on port ${PORT}`);
});
}

View File

@@ -0,0 +1,68 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import buildQueryObject from '@superset-ui/core/query/buildQueryObject';
import DatasourceKey from '@superset-ui/core/query/DatasourceKey';
import { normalizeTimeColumn } from '@superset-ui/core/query/normalizeTimeColumn';
import { isXAxisSet } from '@superset-ui/core/query/getXAxis';
import {
QueryFieldAliases,
QueryFormData,
} from '@superset-ui/core/query/types/QueryFormData';
import { QueryContext, QueryObject } from '@superset-ui/core/query/types/Query';
const WRAP_IN_ARRAY = (baseQueryObject: QueryObject) => [baseQueryObject];
type BuildFinalQueryObjects = (baseQueryObject: QueryObject) => QueryObject[];
export default function buildQueryContext(
formData: QueryFormData,
options?:
| {
buildQuery?: BuildFinalQueryObjects;
queryFields?: QueryFieldAliases;
}
| BuildFinalQueryObjects,
): QueryContext {
const { queryFields, buildQuery = WRAP_IN_ARRAY } =
typeof options === 'function'
? { buildQuery: options, queryFields: {} }
: options || {};
let queries = buildQuery(buildQueryObject(formData, queryFields));
queries.forEach(query => {
if (Array.isArray(query.post_processing)) {
query.post_processing = query.post_processing.filter(Boolean);
}
});
if (isXAxisSet(formData)) {
queries = queries.map(query => normalizeTimeColumn(formData, query));
}
return {
datasource: new DatasourceKey(formData.datasource).toObject(),
force: formData.force || false,
queries,
form_data: formData,
result_format: formData.result_format || 'json',
result_type: formData.result_type || 'full',
};
}

View File

@@ -0,0 +1,20 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
export default {};

View File

@@ -0,0 +1,35 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
export { aggregationOperator } from '@superset-ui/chart-controls/operators/aggregateOperator';
export { boxplotOperator } from '@superset-ui/chart-controls/operators/boxplotOperator';
export { contributionOperator } from '@superset-ui/chart-controls/operators/contributionOperator';
export { flattenOperator } from '@superset-ui/chart-controls/operators/flattenOperator';
export { histogramOperator } from '@superset-ui/chart-controls/operators/histogramOperator';
export { pivotOperator } from '@superset-ui/chart-controls/operators/pivotOperator';
export { prophetOperator } from '@superset-ui/chart-controls/operators/prophetOperator';
export { rankOperator } from '@superset-ui/chart-controls/operators/rankOperator';
export { renameOperator } from '@superset-ui/chart-controls/operators/renameOperator';
export { resampleOperator } from '@superset-ui/chart-controls/operators/resampleOperator';
export { rollingWindowOperator } from '@superset-ui/chart-controls/operators/rollingWindowOperator';
export { sortOperator } from '@superset-ui/chart-controls/operators/sortOperator';
export { timeCompareOperator } from '@superset-ui/chart-controls/operators/timeCompareOperator';
export { timeComparePivotOperator } from '@superset-ui/chart-controls/operators/timeComparePivotOperator';
export { extractExtraMetrics } from '@superset-ui/chart-controls/operators/utils/extractExtraMetrics';
export { isTimeComparison } from '@superset-ui/chart-controls/operators/utils/isTimeComparison';

View File

@@ -0,0 +1,28 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
export { default as buildQueryContext } from './buildQueryContext';
export { default as getChartBuildQueryRegistry } from '../runtimeRegistryAdapter';
export type { BuildQuery } from '@superset-ui/core/chart/registries/ChartBuildQueryRegistrySingleton';
export * from '@superset-ui/core/query';
export * from '@superset-ui/core/utils';
export * from '@superset-ui/core/validator';
export * from '@superset-ui/core/color';

View File

@@ -0,0 +1,35 @@
{
"compilerOptions": {
"target": "ES2019",
"module": "ESNext",
"moduleResolution": "node",
"esModuleInterop": true,
"allowSyntheticDefaultImports": true,
"strict": false,
"skipLibCheck": true,
"resolveJsonModule": true,
"jsx": "react",
"outDir": "dist",
"baseUrl": ".",
"paths": {
"@superset-ui/core": ["../superset-frontend/packages/superset-ui-core/src"],
"@superset-ui/core/*": ["../superset-frontend/packages/superset-ui-core/src/*"],
"@apache-superset/core": ["../superset-frontend/packages/superset-core/src"],
"@apache-superset/core/*": ["../superset-frontend/packages/superset-core/src/*"],
"@superset-ui/chart-controls": ["../superset-frontend/packages/superset-ui-chart-controls/src"],
"@superset-ui/plugin-chart-echarts/*": ["../superset-frontend/plugins/plugin-chart-echarts/src/*"],
"@superset-ui/plugin-chart-table/*": ["../superset-frontend/plugins/plugin-chart-table/src/*"],
"@superset-ui/plugin-chart-pivot-table/*": ["../superset-frontend/plugins/plugin-chart-pivot-table/src/*"],
"@superset-ui/plugin-chart-handlebars/*": ["../superset-frontend/plugins/plugin-chart-handlebars/src/*"],
"@superset-ui/plugin-chart-word-cloud/*": ["../superset-frontend/plugins/plugin-chart-word-cloud/src/*"],
"@superset-ui/plugin-chart-cartodiagram/*": ["../superset-frontend/plugins/plugin-chart-cartodiagram/src/*"],
"@superset-ui/plugin-chart-ag-grid-table/*": ["../superset-frontend/plugins/plugin-chart-ag-grid-table/src/*"],
"@superset-ui/plugin-chart-point-cluster-map/*": ["../superset-frontend/plugins/plugin-chart-point-cluster-map/src/*"],
"@superset-ui/preset-chart-deckgl/*": ["../superset-frontend/plugins/preset-chart-deckgl/src/*"],
"@superset-ui/legacy-preset-chart-nvd3/*": ["../superset-frontend/plugins/legacy-preset-chart-nvd3/src/*"],
"src/*": ["../superset-frontend/src/*"]
}
},
"include": ["src/**/*"],
"exclude": ["node_modules", "dist"]
}

View File

@@ -0,0 +1,137 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
const path = require('path');
const webpack = require('webpack');
const FRONTEND_DIR = path.resolve(__dirname, '../superset-frontend');
module.exports = {
target: 'node',
mode: 'production',
entry: './src/index.ts',
output: {
filename: 'index.js',
path: path.resolve(__dirname, 'dist'),
libraryTarget: 'commonjs2',
},
resolve: {
extensions: ['.ts', '.tsx', '.js', '.jsx', '.json'],
modules: [path.join(FRONTEND_DIR, 'node_modules'), FRONTEND_DIR, 'node_modules'],
alias: {
'@superset-ui/core': path.join(FRONTEND_DIR, 'packages/superset-ui-core/src'),
'@superset-ui/chart-controls': path.join(
FRONTEND_DIR,
'packages/superset-ui-chart-controls/src',
),
'@superset-ui/switchboard': path.join(
FRONTEND_DIR,
'packages/superset-ui-switchboard/src',
),
'@apache-superset/core': path.join(FRONTEND_DIR, 'packages/superset-core/src'),
'@superset-ui/plugin-chart-echarts': path.join(
FRONTEND_DIR,
'plugins/plugin-chart-echarts/src',
),
'@superset-ui/plugin-chart-table': path.join(
FRONTEND_DIR,
'plugins/plugin-chart-table/src',
),
'@superset-ui/plugin-chart-pivot-table': path.join(
FRONTEND_DIR,
'plugins/plugin-chart-pivot-table/src',
),
'@superset-ui/plugin-chart-handlebars': path.join(
FRONTEND_DIR,
'plugins/plugin-chart-handlebars/src',
),
'@superset-ui/plugin-chart-word-cloud': path.join(
FRONTEND_DIR,
'plugins/plugin-chart-word-cloud/src',
),
'@superset-ui/plugin-chart-cartodiagram': path.join(
FRONTEND_DIR,
'plugins/plugin-chart-cartodiagram/src',
),
'@superset-ui/plugin-chart-ag-grid-table': path.join(
FRONTEND_DIR,
'plugins/plugin-chart-ag-grid-table/src',
),
'@superset-ui/plugin-chart-point-cluster-map': path.join(
FRONTEND_DIR,
'plugins/plugin-chart-point-cluster-map/src',
),
'@superset-ui/preset-chart-deckgl': path.join(
FRONTEND_DIR,
'plugins/preset-chart-deckgl/src',
),
},
},
module: {
rules: [
{
test: /\.tsx?$/,
use: {
loader: 'ts-loader',
options: {
transpileOnly: true,
configFile: path.resolve(__dirname, 'tsconfig.json'),
},
},
exclude: /node_modules/,
},
{
test: /\.(png|jpe?g|gif|svg|ico)$/i,
use: 'null-loader',
},
{
test: /\.(css|less|scss|sass)$/i,
use: 'null-loader',
},
],
},
plugins: [
new webpack.NormalModuleReplacementPlugin(
/^@superset-ui\/core$/,
path.resolve(__dirname, 'src/stubs/superset-ui-core.ts'),
),
new webpack.NormalModuleReplacementPlugin(
/^@superset-ui\/chart-controls$/,
path.resolve(__dirname, 'src/stubs/superset-ui-chart-controls.ts'),
),
new webpack.NormalModuleReplacementPlugin(
/react-markdown/,
path.resolve(__dirname, 'src/stubs/empty.ts'),
),
new webpack.NormalModuleReplacementPlugin(
/remark-rehype/,
path.resolve(__dirname, 'src/stubs/empty.ts'),
),
new webpack.NormalModuleReplacementPlugin(
/remark-gfm/,
path.resolve(__dirname, 'src/stubs/empty.ts'),
),
new webpack.DefinePlugin({
'process.env.NODE_ENV': JSON.stringify('production'),
}),
],
optimization: {
minimize: false,
},
};

View File

@@ -31,7 +31,7 @@ describe('SupersetClient', () => {
afterEach(() => SupersetClient.reset());
test('exposes configure, init, get, post, postForm, delete, put, request, reset, getGuestToken, getCSRFToken, getUrl, isAuthenticated, and reAuthenticate methods', () => {
test('exposes configure, init, get, post, postForm, delete, put, request, reset, getGuestToken, getCSRFToken, isAuthenticated, and reAuthenticate methods', () => {
expect(typeof SupersetClient.configure).toBe('function');
expect(typeof SupersetClient.init).toBe('function');
expect(typeof SupersetClient.get).toBe('function');
@@ -43,12 +43,11 @@ describe('SupersetClient', () => {
expect(typeof SupersetClient.reset).toBe('function');
expect(typeof SupersetClient.getGuestToken).toBe('function');
expect(typeof SupersetClient.getCSRFToken).toBe('function');
expect(typeof SupersetClient.getUrl).toBe('function');
expect(typeof SupersetClient.isAuthenticated).toBe('function');
expect(typeof SupersetClient.reAuthenticate).toBe('function');
});
test('throws if you call init, get, post, postForm, delete, put, request, getGuestToken, getCSRFToken, getUrl, isAuthenticated, or reAuthenticate before configure', () => {
test('throws if you call init, get, post, postForm, delete, put, request, getGuestToken, getCSRFToken, isAuthenticated, or reAuthenticate before configure', () => {
expect(SupersetClient.init).toThrow();
expect(SupersetClient.get).toThrow();
expect(SupersetClient.post).toThrow();
@@ -58,7 +57,6 @@ describe('SupersetClient', () => {
expect(SupersetClient.request).toThrow();
expect(SupersetClient.getGuestToken).toThrow();
expect(SupersetClient.getCSRFToken).toThrow();
expect(SupersetClient.getUrl).toThrow();
expect(SupersetClient.isAuthenticated).toThrow();
expect(SupersetClient.reAuthenticate).toThrow();
expect(SupersetClient.configure).not.toThrow();
@@ -66,7 +64,7 @@ describe('SupersetClient', () => {
// this also tests that the ^above doesn't throw if configure is called appropriately
test('calls appropriate SupersetClient methods when configured', async () => {
expect.assertions(18);
expect.assertions(16);
const mockGetUrl = '/mock/get/url';
const mockPostUrl = '/mock/post/url';
const mockRequestUrl = '/mock/request/url';
@@ -97,14 +95,6 @@ describe('SupersetClient', () => {
SupersetClientClass.prototype,
'getGuestToken',
);
const getUrlSpy = jest.spyOn(SupersetClientClass.prototype, 'getUrl');
SupersetClient.configure({ appRoot: '/app' });
expect(SupersetClient.getUrl({ endpoint: '/some/path' })).toContain(
'/app/some/path',
);
expect(getUrlSpy).toHaveBeenCalledTimes(1);
SupersetClient.configure({});
await SupersetClient.init();
@@ -157,8 +147,6 @@ describe('SupersetClient', () => {
postSpy.mockRestore();
authenticatedSpy.mockRestore();
csrfSpy.mockRestore();
getUrlSpy.mockRestore();
fetchMock.clearHistory().removeRoutes();
});

View File

@@ -71,10 +71,16 @@ describe('TimeFormatter', () => {
// PivotData.processRecord coerces values with String(), turning numeric
// timestamps into strings.
const timestamp = PREVIEW_TIME.getTime().toString();
expect(formatter.format(timestamp)).toEqual('2017');
expect(formatter.format(timestamp as unknown as number | Date)).toEqual(
'2017',
);
});
test('handles ISO-8601 string without misinterpreting it as a number', () => {
expect(formatter.format('2017-02-14T11:22:33.000Z')).toEqual('2017');
expect(
formatter.format(
'2017-02-14T11:22:33.000Z' as unknown as number | Date,
),
).toEqual('2017');
});
test('otherwise returns formatted value', () => {
expect(formatter.format(PREVIEW_TIME)).toEqual('2017');

View File

@@ -1400,25 +1400,6 @@ test('getAxisType with forced categorical', () => {
);
});
test('getAxisType treats numeric as category for bar charts', () => {
expect(
getAxisType(
false,
false,
GenericDataType.Numeric,
EchartsTimeseriesSeriesType.Bar,
),
).toEqual(AxisType.Category);
expect(
getAxisType(
false,
false,
GenericDataType.Numeric,
EchartsTimeseriesSeriesType.Line,
),
).toEqual(AxisType.Value);
});
test('getMinAndMaxFromBounds returns empty object when not truncating', () => {
expect(
getMinAndMaxFromBounds(

View File

@@ -533,7 +533,12 @@ export default function TableChart<D extends DataRecord = DataRecord>(
// so that cross-filters work on the receiving chart
const resolvedCol = columnLabelToNameMap[col] ?? col;
const val = ensureIsArray(updatedFilters?.[col]);
if (!val.length || val[0] === null || (val[0] instanceof DateWithFormatter && val[0].input === null))
if (
!val.length ||
val[0] === null ||
(val[0] instanceof DateWithFormatter &&
val[0].input === null)
)
return {
col: resolvedCol,
op: 'IS NULL' as const,
@@ -646,24 +651,22 @@ export default function TableChart<D extends DataRecord = DataRecord>(
// DateWithFormatter objects wrap nulls, so we must check both
if (
dataRecordValue == null ||
(dataRecordValue instanceof DateWithFormatter && dataRecordValue.input == null)
(dataRecordValue instanceof DateWithFormatter &&
dataRecordValue.input == null)
) {
drillToDetailFilters.push({
col: col.key,
op: 'IS NULL' as any,
val: null,
});
} else if (col.dataType === GenericDataType.Temporal && timeGrain) {
const startTime =
dataRecordValue instanceof Date
? dataRecordValue
: new Date(dataRecordValue as string | number);
const [rangeStartTime, rangeEndTime] = getTimeRangeFromGranularity(
startTime,
timeGrain,
);
const [rangeStartTime, rangeEndTime] =
getTimeRangeFromGranularity(startTime, timeGrain);
const timeRangeValue = `${rangeStartTime.toISOString()} : ${rangeEndTime.toISOString()}`;
drillToDetailFilters.push({
@@ -696,7 +699,11 @@ export default function TableChart<D extends DataRecord = DataRecord>(
filters: [
{
col: cellPoint.key,
op: (cellPoint.value == null || (cellPoint.value instanceof DateWithFormatter && cellPoint.value.input == null) ? 'IS NULL' : '==') as any,
op: (cellPoint.value == null ||
(cellPoint.value instanceof DateWithFormatter &&
cellPoint.value.input == null)
? 'IS NULL'
: '==') as any,
val: extractTextFromHTML(cellPoint.value),
},
],

View File

@@ -19,15 +19,13 @@
import { SHARED_COLUMN_CONFIG_PROPS } from './constants';
const tokenSeparators =
SHARED_COLUMN_CONFIG_PROPS.d3NumberFormat.tokenSeparators;
const { d3NumberFormat } = SHARED_COLUMN_CONFIG_PROPS;
test('should allow commas in D3 format inputs', () => {
expect(tokenSeparators).toBeDefined();
expect(tokenSeparators).not.toContain(',');
test('should keep D3 format input creatable', () => {
expect(d3NumberFormat.creatable).toBe(true);
});
test('should have correct default token separators', () => {
const expectedSeparators = ['\r\n', '\n', '\t', ';'];
expect(tokenSeparators).toEqual(expectedSeparators);
test('should expose expected D3 format options', () => {
expect(Array.isArray(d3NumberFormat.options)).toBe(true);
expect((d3NumberFormat.options ?? []).length).toBeGreaterThan(0);
});

View File

@@ -58,8 +58,6 @@ const d3NumberFormat: ControlFormItemSpec<'Select'> = {
creatable: true,
minWidth: '14em',
debounceDelay: 500,
// default value tokenSeparators in superset-frontend/packages/superset-ui-core/src/components/Select/constants.ts
tokenSeparators: ['\r\n', '\n', '\t', ';'],
};
const d3TimeFormat: ControlFormItemSpec<'Select'> = {

View File

@@ -36,6 +36,10 @@ from superset.charts.data.dashboard_filter_context import (
get_dashboard_filter_context,
)
from superset.charts.data.query_context_cache_loader import QueryContextCacheLoader
from superset.charts.data.query_context_sidecar import (
fetch_query_context_from_sidecar,
QueryContextSidecarError,
)
from superset.charts.schemas import ChartDataQueryContextSchema
from superset.commands.chart.data.create_async_job_command import (
CreateAsyncChartDataJobCommand,
@@ -57,7 +61,7 @@ from superset.constants import (
)
from superset.daos.exceptions import DatasourceNotFound
from superset.exceptions import QueryObjectValidationError, SupersetSecurityException
from superset.extensions import event_logger
from superset.extensions import db, event_logger
from superset.models.sql_lab import Query
from superset.utils import json
from superset.utils.core import (
@@ -65,7 +69,7 @@ from superset.utils.core import (
DatasourceType,
get_user_id,
)
from superset.utils.decorators import logs_context
from superset.utils.decorators import logs_context, transaction
from superset.views.base import CsvResponse, generate_download_headers, XlsxResponse
from superset.views.base_api import statsd_metrics
@@ -74,12 +78,18 @@ if TYPE_CHECKING:
logger = logging.getLogger(__name__)
DEFAULT_QUERY_CONTEXT_SIDECAR_TIMEOUT = 30
MISSING_QUERY_CONTEXT_MESSAGE = (
"Chart has no query context saved. Please save the chart again."
)
class ChartDataRestApi(ChartRestApi):
include_route_methods = {"get_data", "data", "data_from_cache"}
@expose("/<int:pk>/data/", methods=("GET",))
@protect()
@transaction()
@statsd_metrics
@event_logger.log_this_with_context(
action=lambda self, *args, **kwargs: f"{self.__class__.__name__}.data",
@@ -161,24 +171,50 @@ class ChartDataRestApi(ChartRestApi):
if not chart:
return self.response_404()
try:
json_body = json.loads(chart.query_context)
except (TypeError, json.JSONDecodeError):
json_body = None
force_refresh = self._is_force_refresh_requested()
sidecar_url = app.config.get("QUERY_CONTEXT_SIDECAR_URL")
should_refresh_query_context = force_refresh and bool(sidecar_url)
json_body = (
None
if should_refresh_query_context
else self._load_saved_query_context(chart)
)
if json_body is None:
return self.response_400(
message=_(
"Chart has no query context saved. Please save the chart again."
)
if not chart.params:
return self.response_400(message=_(MISSING_QUERY_CONTEXT_MESSAGE))
if not sidecar_url:
return self.response_400(message=_(MISSING_QUERY_CONTEXT_MESSAGE))
try:
form_data = json.loads(chart.params)
except (TypeError, json.JSONDecodeError):
return self.response_400(message=_(MISSING_QUERY_CONTEXT_MESSAGE))
timeout = app.config.get(
"QUERY_CONTEXT_SIDECAR_TIMEOUT",
DEFAULT_QUERY_CONTEXT_SIDECAR_TIMEOUT,
)
try:
json_body = fetch_query_context_from_sidecar(
sidecar_url=sidecar_url,
form_data=form_data,
timeout=timeout,
)
except QueryContextSidecarError as ex:
return self.response_502(message=str(ex))
chart.query_context = json.dumps(json_body)
chart.last_saved_at = datetime.now()
db.session.flush()
# override saved query context
json_body["result_format"] = request.args.get(
"format", ChartDataResultFormat.JSON
)
json_body["result_type"] = request.args.get("type", ChartDataResultType.FULL)
json_body["force"] = request.args.get("force")
json_body["force"] = force_refresh
# Apply dashboard filter context when filters_dashboard_id is provided
dashboard_filter_context: DashboardFilterContext | None = None
@@ -282,6 +318,18 @@ class ChartDataRestApi(ChartRestApi):
dashboard_filter_context=dashboard_filter_context,
)
def _is_force_refresh_requested(self) -> bool:
return request.args.get("force") in {"1", "true", "True", "force"}
def _load_saved_query_context(self, chart: Any) -> dict[str, Any] | None:
try:
json_body = json.loads(chart.query_context)
except (TypeError, json.JSONDecodeError):
return None
if isinstance(json_body, dict):
return json_body
return None
@expose("/data", methods=("POST",))
@protect()
@statsd_metrics

View File

@@ -0,0 +1,61 @@
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from typing import Any
import requests
class QueryContextSidecarError(Exception):
"""Raised when query context cannot be generated via sidecar."""
def fetch_query_context_from_sidecar(
*,
sidecar_url: str,
form_data: dict[str, Any],
timeout: int,
) -> dict[str, Any]:
endpoint = f"{sidecar_url.rstrip('/')}/api/v1/build-query-context"
try:
response = requests.post(
endpoint,
json={"form_data": form_data},
timeout=timeout,
)
except requests.RequestException as ex:
raise QueryContextSidecarError("Query context sidecar unavailable") from ex
if response.status_code != 200:
raise QueryContextSidecarError("Query context sidecar error")
try:
payload = response.json()
except ValueError as ex:
raise QueryContextSidecarError(
"Query context sidecar returned invalid response"
) from ex
query_context = payload.get("query_context")
if not isinstance(query_context, dict):
raise QueryContextSidecarError(
"Query context sidecar returned invalid response"
)
return query_context

View File

@@ -2322,6 +2322,11 @@ GLOBAL_ASYNC_QUERIES_POLLING_DELAY = int(
)
GLOBAL_ASYNC_QUERIES_WEBSOCKET_URL = "ws://127.0.0.1:8080/"
# Optional internal service URL used to generate chart query_context from form_data
# when query_context is missing (or refresh is explicitly forced).
QUERY_CONTEXT_SIDECAR_URL: str | None = None
QUERY_CONTEXT_SIDECAR_TIMEOUT = 30
# Global async queries cache backend configuration options:
# - Set 'CACHE_TYPE' to 'RedisCache' for RedisCacheBackend.
# - Set 'CACHE_TYPE' to 'RedisSentinelCache' for RedisSentinelCacheBackend.

View File

@@ -1180,6 +1180,107 @@ class TestGetChartDataApi(BaseTestChartDataApi):
"message": "Chart has no query context saved. Please save the chart again."
}
@pytest.mark.usefixtures("load_birth_names_dashboard_with_slices")
@with_config({"QUERY_CONTEXT_SIDECAR_URL": "http://sidecar.internal"})
@mock.patch("superset.charts.data.api.ChartDataRestApi._get_data_response")
@mock.patch("superset.charts.data.api.ChartDataCommand.validate")
@mock.patch(
"superset.charts.data.api.ChartDataRestApi._create_query_context_from_form"
)
@mock.patch("superset.charts.data.api.fetch_query_context_from_sidecar")
def test_get_data_fetches_missing_query_context_from_sidecar(
self,
mock_fetch_query_context_from_sidecar,
mock_create_query_context_from_form,
mock_validate,
mock_get_data_response,
):
chart = db.session.query(Slice).filter_by(slice_name="Genders").one()
chart.query_context = None
db.session.commit()
sidecar_query_context = {
"datasource": {"id": chart.table.id, "type": "table"},
"force": False,
"queries": [],
"form_data": chart.form_data,
"result_format": "json",
"result_type": "full",
}
mock_fetch_query_context_from_sidecar.return_value = sidecar_query_context
mock_create_query_context_from_form.return_value = mock.MagicMock()
mock_validate.return_value = None
mock_get_data_response.return_value = Response(
response="{}",
status=200,
mimetype="application/json",
)
rv = self.get_assert_metric(f"api/v1/chart/{chart.id}/data/", "get_data")
assert rv.status_code == 200
mock_fetch_query_context_from_sidecar.assert_called_once()
db.session.refresh(chart)
assert json.loads(chart.query_context or "{}").get("datasource") == {
"id": chart.table.id,
"type": "table",
}
@pytest.mark.usefixtures("load_birth_names_dashboard_with_slices")
@with_config({"QUERY_CONTEXT_SIDECAR_URL": "http://sidecar.internal"})
@mock.patch("superset.charts.data.api.ChartDataRestApi._get_data_response")
@mock.patch("superset.charts.data.api.ChartDataCommand.validate")
@mock.patch(
"superset.charts.data.api.ChartDataRestApi._create_query_context_from_form"
)
@mock.patch("superset.charts.data.api.fetch_query_context_from_sidecar")
def test_get_data_force_refreshes_query_context_from_sidecar(
self,
mock_fetch_query_context_from_sidecar,
mock_create_query_context_from_form,
mock_validate,
mock_get_data_response,
):
chart = db.session.query(Slice).filter_by(slice_name="Genders").one()
chart.query_context = json.dumps(
{
"datasource": {"id": chart.table.id, "type": "table"},
"force": False,
"queries": [{"metrics": ["sum__num"]}],
"result_format": "json",
"result_type": "full",
}
)
db.session.commit()
refreshed_query_context = {
"datasource": {"id": chart.table.id, "type": "table"},
"force": False,
"queries": [{"metrics": ["count"]}],
"form_data": chart.form_data,
"result_format": "json",
"result_type": "full",
}
mock_fetch_query_context_from_sidecar.return_value = refreshed_query_context
mock_create_query_context_from_form.return_value = mock.MagicMock()
mock_validate.return_value = None
mock_get_data_response.return_value = Response(
response="{}",
status=200,
mimetype="application/json",
)
rv = self.get_assert_metric(
f"api/v1/chart/{chart.id}/data/?force=true",
"get_data",
)
assert rv.status_code == 200
mock_fetch_query_context_from_sidecar.assert_called_once()
db.session.refresh(chart)
persisted = json.loads(chart.query_context or "{}")
assert persisted.get("queries") == [{"metrics": ["count"]}]
@pytest.mark.usefixtures("load_birth_names_dashboard_with_slices")
def test_chart_data_get(self):
"""

View File

@@ -0,0 +1,87 @@
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from unittest import mock
import pytest
import requests
from superset.charts.data.query_context_sidecar import (
fetch_query_context_from_sidecar,
QueryContextSidecarError,
)
@mock.patch("superset.charts.data.query_context_sidecar.requests.post")
def test_fetch_query_context_from_sidecar_success(mock_post: mock.MagicMock) -> None:
mock_post.return_value.status_code = 200
mock_post.return_value.json.return_value = {"query_context": {"foo": "bar"}}
payload = fetch_query_context_from_sidecar(
sidecar_url="http://sidecar.internal",
form_data={"viz_type": "pie"},
timeout=15,
)
assert payload == {"foo": "bar"}
mock_post.assert_called_once_with(
"http://sidecar.internal/api/v1/build-query-context",
json={"form_data": {"viz_type": "pie"}},
timeout=15,
)
@mock.patch("superset.charts.data.query_context_sidecar.requests.post")
def test_fetch_query_context_from_sidecar_connection_error(
mock_post: mock.MagicMock,
) -> None:
mock_post.side_effect = requests.RequestException()
with pytest.raises(QueryContextSidecarError, match="sidecar unavailable"):
fetch_query_context_from_sidecar(
sidecar_url="http://sidecar.internal",
form_data={"viz_type": "pie"},
timeout=15,
)
@mock.patch("superset.charts.data.query_context_sidecar.requests.post")
def test_fetch_query_context_from_sidecar_bad_status(mock_post: mock.MagicMock) -> None:
mock_post.return_value.status_code = 500
with pytest.raises(QueryContextSidecarError, match="sidecar error"):
fetch_query_context_from_sidecar(
sidecar_url="http://sidecar.internal",
form_data={"viz_type": "pie"},
timeout=15,
)
@mock.patch("superset.charts.data.query_context_sidecar.requests.post")
def test_fetch_query_context_from_sidecar_invalid_payload(
mock_post: mock.MagicMock,
) -> None:
mock_post.return_value.status_code = 200
mock_post.return_value.json.return_value = {"not_query_context": {}}
with pytest.raises(QueryContextSidecarError, match="invalid response"):
fetch_query_context_from_sidecar(
sidecar_url="http://sidecar.internal",
form_data={"viz_type": "pie"},
timeout=15,
)

View File

@@ -16,6 +16,8 @@
# under the License.
import copy
from typing import Any, cast
from uuid import UUID
import yaml
from pytest_mock import MockerFixture
@@ -153,8 +155,10 @@ def test_import_assets_imports_tags(mocker: MockerFixture, session: Session) ->
ImportAssetsCommand._import(configs, contents=contents)
chart_uuids = {config["uuid"] for config in charts_with_tags.values()}
imported_charts = db.session.query(Slice).filter(Slice.uuid.in_(chart_uuids)).all()
chart_uuids = {UUID(str(config["uuid"])) for config in charts_with_tags.values()}
imported_charts = (
db.session.query(Slice).filter(cast(Any, Slice.uuid).in_(chart_uuids)).all()
)
assert len(imported_charts) == len(chart_uuids)
for chart in imported_charts:
assocs = (
@@ -165,9 +169,13 @@ def test_import_assets_imports_tags(mocker: MockerFixture, session: Session) ->
assert len(assocs) == 1
assert assocs[0].tag.name == "chart_tag"
dashboard_uuids = {config["uuid"] for config in dashboards_with_tags.values()}
dashboard_uuids = {
UUID(str(config["uuid"])) for config in dashboards_with_tags.values()
}
imported_dashboards = (
db.session.query(Dashboard).filter(Dashboard.uuid.in_(dashboard_uuids)).all()
db.session.query(Dashboard)
.filter(cast(Any, Dashboard.uuid).in_(dashboard_uuids))
.all()
)
assert len(imported_dashboards) == len(dashboard_uuids)
for dashboard in imported_dashboards:

View File

@@ -236,6 +236,7 @@ class TestNormalizeColumnNames:
normalized = DatasetValidator.normalize_column_names(config, dataset_id=18)
assert normalized.x is not None
assert normalized.x.name == "OrderDate"
assert normalized.y[0].name == "Sales"
assert normalized.filters is not None
@@ -278,6 +279,7 @@ class TestNormalizeColumnNames:
normalized = DatasetValidator.normalize_column_names(config, dataset_id=999)
# Should return original config unchanged
assert normalized.x is not None
assert normalized.x.name == "orderdate"
assert normalized.y[0].name == "sales"
@@ -318,11 +320,13 @@ class TestTimeSeriesFilterPromptFix:
normalized = DatasetValidator.normalize_column_names(config, dataset_id=18)
# After normalization, x.name should match the filter column exactly
assert normalized.x is not None
assert normalized.x.name == "OrderDate"
assert normalized.filters is not None
assert normalized.filters[0].column == "OrderDate"
# This equality is what the frontend checks - now they match!
assert normalized.x is not None
assert normalized.x.name == normalized.filters[0].column
@@ -394,6 +398,7 @@ class TestNormalizeUppercaseDataset:
normalized = DatasetValidator.normalize_column_names(config, dataset_id=24)
assert normalized.x is not None
assert normalized.x.name == "ds"
assert normalized.y[0].name == "DISTANCE"
assert normalized.group_by is not None
@@ -417,6 +422,7 @@ class TestNormalizeUppercaseDataset:
normalized = DatasetValidator.normalize_column_names(config, dataset_id=24)
assert normalized.x is not None
assert normalized.x.name == "ds"
assert normalized.y[0].name == "DEPARTURE_DELAY"
@@ -459,6 +465,7 @@ class TestNormalizeEdgeCases:
normalized = DatasetValidator.normalize_column_names(config, dataset_id=18)
assert normalized.x is not None
assert normalized.x.name == "OrderDate"
assert normalized.y[0].name == "Sales"
assert normalized.filters is None
@@ -480,6 +487,7 @@ class TestNormalizeEdgeCases:
normalized = DatasetValidator.normalize_column_names(config, dataset_id=18)
assert normalized.x is not None
assert normalized.x.name == "OrderDate"
assert normalized.filters is not None
assert len(normalized.filters) == 0
@@ -500,6 +508,7 @@ class TestNormalizeEdgeCases:
normalized = DatasetValidator.normalize_column_names(config, dataset_id=18)
assert normalized.x is not None
assert normalized.x.name == "OrderDate"
assert normalized.group_by is None
@@ -527,6 +536,7 @@ class TestNormalizeEdgeCases:
normalized = DatasetValidator.normalize_column_names(config, dataset_id=18)
assert normalized.x is not None
assert normalized.x.name == "OrderDate"
assert normalized.y[0].name == "Sales"
assert normalized.y[1].name == "quantity_ordered"
@@ -554,6 +564,8 @@ class TestNormalizeEdgeCases:
first = DatasetValidator.normalize_column_names(config, dataset_id=18)
second = DatasetValidator.normalize_column_names(first, dataset_id=18)
assert first.x is not None
assert second.x is not None
assert first.x.name == second.x.name == "OrderDate"
assert first.y[0].name == second.y[0].name == "Sales"
assert first.filters is not None
@@ -636,6 +648,7 @@ class TestNormalizeXAxisFilterConsistency:
normalized = DatasetValidator.normalize_column_names(config, dataset_id=18)
assert normalized.filters is not None
assert normalized.x is not None
assert normalized.x.name == normalized.filters[0].column == "OrderDate"
@patch.object(DatasetValidator, "_get_dataset_context")
@@ -656,6 +669,7 @@ class TestNormalizeXAxisFilterConsistency:
normalized = DatasetValidator.normalize_column_names(config, dataset_id=24)
assert normalized.filters is not None
assert normalized.x is not None
assert normalized.x.name == normalized.filters[0].column == "ds"
@patch.object(DatasetValidator, "_get_dataset_context")

View File

@@ -774,7 +774,7 @@ def test_raw_connection_oauth_engine(mocker: MockerFixture) -> None:
sqlalchemy_uri="sqlite://",
encrypted_extra=json.dumps(oauth2_client_info),
)
database.db_engine_spec.oauth2_exception = OAuth2Error # type: ignore
database.db_engine_spec.oauth2_exception = OAuth2Error
_get_sqla_engine = mocker.patch.object(database, "_get_sqla_engine")
_get_sqla_engine.side_effect = OAuth2Error("OAuth2 required")
@@ -805,7 +805,7 @@ def test_raw_connection_oauth_connection(mocker: MockerFixture) -> None:
sqlalchemy_uri="sqlite://",
encrypted_extra=json.dumps(oauth2_client_info),
)
database.db_engine_spec.oauth2_exception = OAuth2Error # type: ignore
database.db_engine_spec.oauth2_exception = OAuth2Error
get_sqla_engine = mocker.patch.object(database, "get_sqla_engine")
get_sqla_engine().__enter__().raw_connection.side_effect = OAuth2Error(
"OAuth2 required"
@@ -838,7 +838,7 @@ def test_raw_connection_oauth_execute(mocker: MockerFixture) -> None:
sqlalchemy_uri="sqlite://",
encrypted_extra=json.dumps(oauth2_client_info),
)
database.db_engine_spec.oauth2_exception = OAuth2Error # type: ignore
database.db_engine_spec.oauth2_exception = OAuth2Error
get_sqla_engine = mocker.patch.object(database, "get_sqla_engine")
get_sqla_engine().__enter__().raw_connection().cursor().execute.side_effect = (
OAuth2Error("OAuth2 required")

View File

@@ -220,7 +220,7 @@ def test_get_sql_results_oauth2(mocker: MockerFixture, app) -> None:
sqlalchemy_uri="sqlite://",
encrypted_extra=json.dumps(oauth2_client_info),
)
database.db_engine_spec.oauth2_exception = OAuth2Error # type: ignore
database.db_engine_spec.oauth2_exception = OAuth2Error
get_sqla_engine = mocker.patch.object(database, "get_sqla_engine")
get_sqla_engine().__enter__().raw_connection.side_effect = OAuth2Error(
"OAuth2 required"