Compare commits

..

4 Commits

Author SHA1 Message Date
Enzo Martellucci
bec3d94a5b Merge branch master into enxdev/refactor/typescript-migration-ChartRenderer 2025-03-15 17:07:39 +01:00
Enzo Martellucci
c805c96f5a Merge branch 'master' into enxdev/refactor/typescript-migration-ChartRenderer 2025-02-25 09:29:36 +01:00
Enzo Martellucci
a3ec4080e6 wip(ChartRenderer) 2025-02-10 11:01:19 +01:00
Enzo Martellucci
3f6e511048 wip(ChartRenderer): migrates ChartRenderer to Ts 2025-02-06 14:41:31 +01:00
915 changed files with 30602 additions and 61294 deletions

View File

@@ -17,12 +17,6 @@
# https://cwiki.apache.org/confluence/display/INFRA/.asf.yaml+features+for+git+repositories
---
notifications:
commits: commits@superset.apache.org
issues: notifications@superset.apache.org
pullrequests: notifications@superset.apache.org
discussions: notifications@superset.apache.org
github:
del_branch_on_merge: true
description: "Apache Superset is a Data Visualization and Data Exploration Platform"
@@ -54,8 +48,6 @@ github:
projects: true
# Enable wiki for documentation
wiki: true
# Enable discussions
discussions: true
enabled_merge_buttons:
squash: true

View File

@@ -41,7 +41,7 @@ body:
label: Superset version
options:
- master / latest-dev
- "4.1.2"
- "4.1.1"
- "4.0.2"
validations:
required: true

5
.github/labeler.yml vendored
View File

@@ -127,11 +127,6 @@
- any-glob-to-any-file:
- 'superset/translations/es/**'
"i18n:persian":
- changed-files:
- any-glob-to-any-file:
- 'superset/translations/fa/**'
############################################
# Sub-projects and monorepo packages
############################################

View File

@@ -145,7 +145,6 @@ cypress-install() {
cypress-run-all() {
local USE_DASHBOARD=$1
local APP_ROOT=$2
cd "$GITHUB_WORKSPACE/superset-frontend/cypress-base"
# Start Flask and run it in background
@@ -153,12 +152,7 @@ cypress-run-all() {
# so errors can print to stderr.
local flasklog="${HOME}/flask.log"
local port=8081
CYPRESS_BASE_URL="http://localhost:${port}"
if [ -n "$APP_ROOT" ]; then
export SUPERSET_APP_ROOT=$APP_ROOT
CYPRESS_BASE_URL=${CYPRESS_BASE_URL}${APP_ROOT}
fi
export CYPRESS_BASE_URL
export CYPRESS_BASE_URL="http://localhost:${port}"
nohup flask run --no-debugger -p $port >"$flasklog" 2>&1 </dev/null &
local flaskProcessId=$!

View File

@@ -17,12 +17,13 @@ jobs:
check-python-deps:
runs-on: ubuntu-22.04
steps:
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
uses: actions/checkout@v4
with:
persist-credentials: false
submodules: recursive
fetch-depth: 1
depth: 1
- name: Setup Python
if: steps.check.outputs.python

View File

@@ -42,7 +42,6 @@ jobs:
matrix:
parallel_id: [0, 1, 2, 3, 4, 5]
browser: ["chrome"]
app_root: ["", "/app/prefix"]
env:
SUPERSET_ENV: development
SUPERSET_CONFIG: tests.integration_tests.superset_test_config
@@ -50,8 +49,8 @@ jobs:
PYTHONPATH: ${{ github.workspace }}
REDIS_PORT: 16379
GITHUB_TOKEN: ${{ github.token }}
# Only use dashboard when explicitly requested via workflow_dispatch
USE_DASHBOARD: ${{ github.event.inputs.use_dashboard == 'true' || 'false' }}
# use the dashboard feature when running manually OR merging to master
USE_DASHBOARD: ${{ github.event.inputs.use_dashboard == 'true'|| (github.ref == 'refs/heads/master' && 'true') || 'false' }}
services:
postgres:
image: postgres:16-alpine
@@ -136,7 +135,7 @@ jobs:
CYPRESS_RECORD_KEY: ${{ secrets.CYPRESS_RECORD_KEY }}
NODE_OPTIONS: "--max-old-space-size=4096"
with:
run: cypress-run-all ${{ env.USE_DASHBOARD }} ${{ matrix.app_root }}
run: cypress-run-all ${{ env.USE_DASHBOARD }}
- name: Upload Artifacts
uses: actions/upload-artifact@v4
if: failure()

View File

@@ -44,7 +44,7 @@ jobs:
SUPERSET_TESTENV: true
SUPERSET_SECRET_KEY: not-a-secret
run: |
pytest --durations-min=0.5 --cov-report= --cov=superset ./tests/common ./tests/unit_tests --cache-clear --maxfail=50
pytest --durations-min=0.5 --cov-report= --cov=superset ./tests/common ./tests/unit_tests --cache-clear
- name: Upload code coverage
uses: codecov/codecov-action@v5
with:

1
.gitignore vendored
View File

@@ -107,7 +107,6 @@ ghostdriver.log
testCSV.csv
.terser-plugin-cache/
apache-superset-*.tar.gz*
apache_superset-*.tar.gz*
release.json
# Translation-related files

View File

@@ -57,7 +57,7 @@ repos:
hooks:
- id: prettier
additional_dependencies:
- prettier@3.5.3
- prettier@3.3.3
args: ["--ignore-path=./superset-frontend/.prettierignore"]
files: "superset-frontend"
- repo: local

View File

@@ -1,50 +0,0 @@
<!--
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
-->
## Change Log
### 4.1 (Fri Nov 15 22:13:57 2024 +0530)
**Database Migrations**
**Features**
**Fixes**
- [#30886](https://github.com/apache/superset/pull/30886) fix: blocks UI elements on right side (@samarsrivastav)
- [#30859](https://github.com/apache/superset/pull/30859) fix(package.json): Pin luxon version to unblock master (@geido)
- [#30588](https://github.com/apache/superset/pull/30588) fix(explore): column data type tooltip format (@mistercrunch)
- [#29911](https://github.com/apache/superset/pull/29911) fix: Rename database from 'couchbasedb' to 'couchbase' in documentation and db_engine_specs (@ayush-couchbase)
- [#30828](https://github.com/apache/superset/pull/30828) fix(TimezoneSelector): Failing unit tests due to timezone change (@geido)
- [#30875](https://github.com/apache/superset/pull/30875) fix: don't show metadata for embedded dashboards (@sadpandajoe)
- [#30851](https://github.com/apache/superset/pull/30851) fix: Graph chart colors (@michael-s-molina)
- [#29867](https://github.com/apache/superset/pull/29867) fix(capitalization): Capitalizing a button. (@rusackas)
- [#29782](https://github.com/apache/superset/pull/29782) fix(translations): Translate embedded errors (@rusackas)
- [#29772](https://github.com/apache/superset/pull/29772) fix: Fixing incomplete string escaping. (@rusackas)
- [#29725](https://github.com/apache/superset/pull/29725) fix(frontend/docker, ci): fix borked Docker build due to Lerna v8 uplift (@hainenber)
**Others**
- [#30576](https://github.com/apache/superset/pull/30576) chore: add link to Superset when report error (@eschutho)
- [#29786](https://github.com/apache/superset/pull/29786) refactor(Slider): Upgrade Slider to Antd 5 (@geido)
- [#29674](https://github.com/apache/superset/pull/29674) refactor(ChartCreation): Migrate tests to RTL (@rtexelm)
- [#29843](https://github.com/apache/superset/pull/29843) refactor(controls): Migrate AdhocMetricOption.test to RTL (@rtexelm)
- [#29845](https://github.com/apache/superset/pull/29845) refactor(controls): Migrate MetricDefinitionValue.test to RTL (@rtexelm)
- [#28424](https://github.com/apache/superset/pull/28424) docs: Check markdown files for bad links using linkinator (@rusackas)
- [#29768](https://github.com/apache/superset/pull/29768) docs(contributing): fix broken link to translations sub-section (@sfirke)

View File

@@ -1,83 +0,0 @@
<!--
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
-->
## Change Log
### 4.1.2 (Fri Mar 7 13:28:05 2025 -0800)
**Database Migrations**
- [#32538](https://github.com/apache/superset/pull/32538) fix(migrations): Handle comparator None in old time comparison migration (@Antonio-RiveroMartnez)
- [#32155](https://github.com/apache/superset/pull/32155) fix(migrations): Handle no params in time comparison migration (@Antonio-RiveroMartnez)
- [#31185](https://github.com/apache/superset/pull/31185) fix: check for column before adding in migrations (@betodealmeida)
**Features**
- [#29974](https://github.com/apache/superset/pull/29974) feat(sqllab): Adds refresh button to table metadata in SQL Lab (@Usiel)
**Fixes**
- [#32515](https://github.com/apache/superset/pull/32515) fix(sqllab): Allow clear on schema and catalog (@justinpark)
- [#32500](https://github.com/apache/superset/pull/32500) fix: dashboard, chart and dataset import validation (@dpgaspar)
- [#31353](https://github.com/apache/superset/pull/31353) fix(sqllab): duplicate error message (@betodealmeida)
- [#31407](https://github.com/apache/superset/pull/31407) fix: Big Number side cut fixed (@fardin-developer)
- [#31480](https://github.com/apache/superset/pull/31480) fix(sunburst): Use metric label from verbose map (@gerbermichi)
- [#31427](https://github.com/apache/superset/pull/31427) fix(tags): clean up bulk create api and schema (@villebro)
- [#31334](https://github.com/apache/superset/pull/31334) fix(docs): add custom editUrl path for intro page (@dwgrossberg)
- [#31353](https://github.com/apache/superset/pull/31353) fix(sqllab): duplicate error message (@betodealmeida)
- [#31323](https://github.com/apache/superset/pull/31323) fix: Use clickhouse sqlglot dialect for YDB (@vgvoleg)
- [#31198](https://github.com/apache/superset/pull/31198) fix: add more clickhouse disallowed functions on config (@dpgaspar)
- [#31194](https://github.com/apache/superset/pull/31194) fix(embedded): Hide anchor links in embedded mode (@Vitor-Avila)
- [#31960](https://github.com/apache/superset/pull/31960) fix(sqllab): Missing allowHTML props in ResultTableExtension (@justinpark)
- [#31332](https://github.com/apache/superset/pull/31332) fix: prevent multiple pvm errors on migration (@eschutho)
- [#31437](https://github.com/apache/superset/pull/31437) fix(database import): Gracefully handle error to get catalog schemas (@Vitor-Avila)
- [#31173](https://github.com/apache/superset/pull/31173) fix: cache-warmup fails (@nsivarajan)
- [#30442](https://github.com/apache/superset/pull/30442) fix(fe/src/dashboard): optional chaining for possibly nullable parent attribute in LayoutItem type (@hainenber)
- [#31639](https://github.com/apache/superset/pull/31639) fix(sqllab): unable to update saved queries (@DamianPendrak)
- [#29898](https://github.com/apache/superset/pull/29898) fix: parse pandas pivot null values (@eschutho)
- [#31414](https://github.com/apache/superset/pull/31414) fix(Pivot Table): Fix column width to respect currency config (@Vitor-Avila)
- [#31335](https://github.com/apache/superset/pull/31335) fix(histogram): axis margin padding consistent with other graphs (@tatiana-cherne)
- [#31301](https://github.com/apache/superset/pull/31301) fix(AllEntitiesTable): show Tags (@alexandrusoare)
- [#31329](https://github.com/apache/superset/pull/31329) fix: pass string to `process_template` (@betodealmeida)
- [#31341](https://github.com/apache/superset/pull/31341) fix(pinot): remove query aliases from SELECT and ORDER BY clauses in Pinot (@yuribogomolov)
- [#31308](https://github.com/apache/superset/pull/31308) fix: annotations on horizontal bar chart (@DamianPendrak)
- [#31294](https://github.com/apache/superset/pull/31294) fix(sqllab): Remove update_saved_query_exec_info to reduce lag (@justinpark)
- [#30897](https://github.com/apache/superset/pull/30897) fix: Exception handling for SQL Lab views (@michael-s-molina)
- [#31199](https://github.com/apache/superset/pull/31199) fix(Databricks): Escape catalog and schema names in pre-queries (@Vitor-Avila)
- [#31265](https://github.com/apache/superset/pull/31265) fix(trino): db session error in handle cursor (@justinpark)
- [#31024](https://github.com/apache/superset/pull/31024) fix(dataset): use sqlglot for DML check (@betodealmeida)
- [#29885](https://github.com/apache/superset/pull/29885) fix: add mutator to get_columns_description (@eschutho)
- [#30821](https://github.com/apache/superset/pull/30821) fix: x axis title disappears when editing bar chart (@DamianPendrak)
- [#31181](https://github.com/apache/superset/pull/31181) fix: Time-series Line Chart Display unnecessary total (@michael-s-molina)
- [#31163](https://github.com/apache/superset/pull/31163) fix(Dashboard): Backward compatible shared_label_colors field (@geido)
- [#31156](https://github.com/apache/superset/pull/31156) fix: check orderby (@betodealmeida)
- [#31154](https://github.com/apache/superset/pull/31154) fix: Remove unwanted commit on Trino's handle_cursor (@michael-s-molina)
- [#31151](https://github.com/apache/superset/pull/31151) fix: Revert "feat(trino): Add functionality to upload data (#29164)" (@michael-s-molina)
- [#31031](https://github.com/apache/superset/pull/31031) fix(Dashboard): Ensure shared label colors are updated (@geido)
- [#30967](https://github.com/apache/superset/pull/30967) fix(release validation): scripts now support RSA and EDDSA keys. (@rusackas)
- [#30881](https://github.com/apache/superset/pull/30881) fix(Dashboard): Native & Cross-Filters Scoping Performance (@geido)
- [#30887](https://github.com/apache/superset/pull/30887) fix(imports): import query_context for imports with charts (@lindenh)
- [#31008](https://github.com/apache/superset/pull/31008) fix(explore): verified props is not updated (@justinpark)
- [#30646](https://github.com/apache/superset/pull/30646) fix(Dashboard): Retain colors when color scheme not set (@geido)
- [#30962](https://github.com/apache/superset/pull/30962) fix(Dashboard): Exclude edit param in async screenshot (@geido)
**Others**
- [#32043](https://github.com/apache/superset/pull/32043) chore: Skip the creation of secondary perms during catalog migrations (@Vitor-Avila)
- [#30865](https://github.com/apache/superset/pull/30865) docs: Updating 4.1 Release Notes (@yousoph)

View File

@@ -208,7 +208,7 @@ RUN rm superset/translations/*/*/*.po
COPY --from=superset-node /app/superset/translations superset/translations
COPY --from=python-translation-compiler /app/translations_mo superset/translations
HEALTHCHECK CMD /app/docker/docker-healthcheck.sh
HEALTHCHECK CMD curl -f "http://localhost:${SUPERSET_PORT}/health"
CMD ["/app/docker/entrypoints/run-server.sh"]
EXPOSE ${SUPERSET_PORT}

View File

@@ -20,11 +20,11 @@ under the License.
# Superset
[![License](https://img.shields.io/badge/License-Apache%202.0-blue.svg)](https://opensource.org/license/apache-2-0)
[![Latest Release on Github](https://img.shields.io/github/v/release/apache/superset?sort=semver)](https://github.com/apache/superset/releases/latest)
[![Build Status](https://github.com/apache/superset/actions/workflows/superset-python-unittest.yml/badge.svg)](https://github.com/apache/superset/actions)
[![PyPI version](https://badge.fury.io/py/apache_superset.svg)](https://badge.fury.io/py/apache_superset)
[![GitHub release (latest SemVer)](https://img.shields.io/github/v/release/apache/superset?sort=semver)](https://github.com/apache/superset/tree/latest)
[![Build Status](https://github.com/apache/superset/workflows/Python/badge.svg)](https://github.com/apache/superset/actions)
[![PyPI version](https://badge.fury.io/py/apache-superset.svg)](https://badge.fury.io/py/apache-superset)
[![Coverage Status](https://codecov.io/github/apache/superset/coverage.svg?branch=master)](https://codecov.io/github/apache/superset)
[![PyPI](https://img.shields.io/pypi/pyversions/apache_superset.svg?maxAge=2592000)](https://pypi.python.org/pypi/apache_superset)
[![PyPI](https://img.shields.io/pypi/pyversions/apache-superset.svg?maxAge=2592000)](https://pypi.python.org/pypi/apache-superset)
[![Get on Slack](https://img.shields.io/badge/slack-join-orange.svg)](http://bit.ly/join-superset-slack)
[![Documentation](https://img.shields.io/badge/docs-apache.org-blue.svg)](https://superset.apache.org)
@@ -72,11 +72,10 @@ Superset provides:
## Screenshots & Gifs
**Video Overview**
<!-- File hosted here https://github.com/apache/superset-site/raw/lfs/superset-video-4k.mp4 -->
[superset-video-1080p.webm](https://github.com/user-attachments/assets/b37388f7-a971-409c-96a7-90c4e31322e6)
<br/>
**Large Gallery of Visualizations**
@@ -157,7 +156,7 @@ Try out Superset's [quickstart](https://superset.apache.org/docs/quickstart/) gu
and please read our [Slack Community Guidelines](https://github.com/apache/superset/blob/master/CODE_OF_CONDUCT.md#slack-community-guidelines)
- [Join our dev@superset.apache.org Mailing list](https://lists.apache.org/list.html?dev@superset.apache.org). To join, simply send an email to [dev-subscribe@superset.apache.org](mailto:dev-subscribe@superset.apache.org)
- If you want to help troubleshoot GitHub Issues involving the numerous database drivers that Superset supports, please consider adding your name and the databases you have access to on the [Superset Database Familiarity Rolodex](https://docs.google.com/spreadsheets/d/1U1qxiLvOX0kBTUGME1AHHi6Ywel6ECF8xk_Qy-V9R8c/edit#gid=0)
- Join Superset's Town Hall and [Operational Model](https://preset.io/blog/the-superset-operational-model-wants-you/) recurring meetings. Meeting info is available on the [Superset Community Calendar](https://superset.apache.org/community)
- Join Superset's Town Hall and [Operational Model](https://preset.io/blog/the-superset-operational-model-wants-you/) recurring meetings. Meeting info is available on the [Superset Community Calendar](https://superset.apache.org/community)
## Contributor Guide
@@ -185,16 +184,14 @@ Understanding the Superset Points of View
- [Building New Database Connectors](https://preset.io/blog/building-database-connector/)
- [Create Your First Dashboard](https://superset.apache.org/docs/using-superset/creating-your-first-dashboard/)
- [Comprehensive Tutorial for Contributing Code to Apache Superset
](https://preset.io/blog/tutorial-contributing-code-to-apache-superset/)
](https://preset.io/blog/tutorial-contributing-code-to-apache-superset/)
- [Resources to master Superset by Preset](https://preset.io/resources/)
- Deploying Superset
- [Official Docker image](https://hub.docker.com/r/apache/superset)
- [Helm Chart](https://github.com/apache/superset/tree/master/helm/superset)
- Recordings of Past [Superset Community Events](https://preset.io/events)
- [Mixed Time Series Charts](https://preset.io/events/mixed-time-series-visualization-in-superset-workshop/)
- [How the Bing Team Customized Superset for the Internal Self-Serve Data & Analytics Platform](https://preset.io/events/how-the-bing-team-heavily-customized-superset-for-their-internal-data/)
- [Live Demo: Visualizing MongoDB and Pinot Data using Trino](https://preset.io/events/2021-04-13-visualizing-mongodb-and-pinot-data-using-trino/)
@@ -202,7 +199,6 @@ Understanding the Superset Points of View
- [Building a Database Connector for Superset](https://preset.io/events/2021-02-16-building-a-database-connector-for-superset/)
- Visualizations
- [Creating Viz Plugins](https://superset.apache.org/docs/contributing/creating-viz-plugins/)
- [Managing and Deploying Custom Viz Plugins](https://medium.com/nmc-techblog/apache-superset-manage-custom-viz-plugins-in-production-9fde1a708e55)
- [Why Apache Superset is Betting on Apache ECharts](https://preset.io/blog/2021-4-1-why-echarts/)

View File

@@ -20,7 +20,7 @@ RUN useradd --user-group --create-home --no-log-init --shell /bin/bash superset
# Configure environment
ENV LANG=C.UTF-8 \
LC_ALL=C.UTF-8
LC_ALL=C.UTF-8
RUN apt-get update -y
@@ -30,14 +30,14 @@ RUN apt-get install -y apt-transport-https apt-utils
# Install superset dependencies
# https://superset.apache.org/docs/installation/installing-superset-from-scratch
RUN apt-get install -y build-essential libssl-dev \
libffi-dev python3-dev libsasl2-dev libldap2-dev libxi-dev chromium zstd
libffi-dev python3-dev libsasl2-dev libldap2-dev libxi-dev chromium zstd
# Install nodejs for custom build
# https://nodejs.org/en/download/package-manager/
RUN set -eux; \
curl -sL https://deb.nodesource.com/setup_20.x | bash -; \
apt-get install -y nodejs; \
node --version;
curl -sL https://deb.nodesource.com/setup_20.x | bash -; \
apt-get install -y nodejs; \
node --version;
RUN if ! which npm; then apt-get install -y npm; fi
RUN mkdir -p /home/superset
@@ -50,21 +50,21 @@ ARG SUPERSET_RELEASE_RC_TARBALL
# Can fetch source from svn or copy tarball from local mounted directory
COPY $SUPERSET_RELEASE_RC_TARBALL ./
RUN tar -xvf *.tar.gz
WORKDIR /home/superset/apache_superset-$VERSION/superset-frontend
WORKDIR /home/superset/apache-superset-$VERSION/superset-frontend
RUN npm ci \
&& npm run build \
&& rm -rf node_modules
&& npm run build \
&& rm -rf node_modules
WORKDIR /home/superset/apache_superset-$VERSION
WORKDIR /home/superset/apache-superset-$VERSION
RUN pip install --upgrade setuptools pip \
&& pip install -r requirements/base.txt \
&& pip install --no-cache-dir .
&& pip install -r requirements/base.txt \
&& pip install --no-cache-dir .
RUN flask fab babel-compile --target superset/translations
ENV PATH=/home/superset/superset/bin:$PATH \
PYTHONPATH=/home/superset/superset/ \
SUPERSET_TESTENV=true
PYTHONPATH=/home/superset/superset/ \
SUPERSET_TESTENV=true
COPY from_tarball_entrypoint.sh /entrypoint.sh
ENTRYPOINT ["/entrypoint.sh"]

View File

@@ -20,7 +20,7 @@ RUN useradd --user-group --create-home --no-log-init --shell /bin/bash superset
# Configure environment
ENV LANG=C.UTF-8 \
LC_ALL=C.UTF-8
LC_ALL=C.UTF-8
RUN apt-get update -y
@@ -30,14 +30,14 @@ RUN apt-get install -y apt-transport-https apt-utils
# Install superset dependencies
# https://superset.apache.org/docs/installation/installing-superset-from-scratch
RUN apt-get install -y subversion build-essential libssl-dev \
libffi-dev python3-dev libsasl2-dev libldap2-dev libxi-dev chromium zstd
libffi-dev python3-dev libsasl2-dev libldap2-dev libxi-dev chromium zstd
# Install nodejs for custom build
# https://nodejs.org/en/download/package-manager/
RUN set -eux; \
curl -sL https://deb.nodesource.com/setup_20.x | bash -; \
apt-get install -y nodejs; \
node --version;
curl -sL https://deb.nodesource.com/setup_20.x | bash -; \
apt-get install -y nodejs; \
node --version;
RUN if ! which npm; then apt-get install -y npm; fi
RUN mkdir -p /home/superset
@@ -49,20 +49,20 @@ ARG VERSION
# Can fetch source from svn or copy tarball from local mounted directory
RUN svn co https://dist.apache.org/repos/dist/dev/superset/$VERSION ./
RUN tar -xvf *.tar.gz
WORKDIR /home/superset/apache_superset-$VERSION/superset-frontend
WORKDIR /home/superset/apache-superset-$VERSION/superset-frontend
RUN npm ci \
&& npm run build \
&& rm -rf node_modules
&& npm run build \
&& rm -rf node_modules
WORKDIR /home/superset/apache_superset-$VERSION
WORKDIR /home/superset/apache-superset-$VERSION
RUN pip install --upgrade setuptools pip \
&& pip install -r requirements/base.txt \
&& pip install --no-cache-dir .
&& pip install -r requirements/base.txt \
&& pip install --no-cache-dir .
RUN flask fab babel-compile --target superset/translations
ENV PATH=/home/superset/superset/bin:$PATH \
PYTHONPATH=/home/superset/superset/
PYTHONPATH=/home/superset/superset/
COPY from_tarball_entrypoint.sh /entrypoint.sh
ENTRYPOINT ["/entrypoint.sh"]

View File

@@ -123,10 +123,10 @@ SUPERSET_RC=1
SUPERSET_GITHUB_BRANCH=1.5
SUPERSET_PGP_FULLNAME=villebro@apache.org
SUPERSET_VERSION_RC=1.5.1rc1
SUPERSET_RELEASE=apache_superset-1.5.1
SUPERSET_RELEASE_RC=apache_superset-1.5.1rc1
SUPERSET_RELEASE_TARBALL=apache_superset-1.5.1-source.tar.gz
SUPERSET_RELEASE_RC_TARBALL=apache_superset-1.5.1rc1-source.tar.gz
SUPERSET_RELEASE=apache-superset-1.5.1
SUPERSET_RELEASE_RC=apache-superset-1.5.1rc1
SUPERSET_RELEASE_TARBALL=apache-superset-1.5.1-source.tar.gz
SUPERSET_RELEASE_RC_TARBALL=apache-superset-1.5.1rc1-source.tar.gz
SUPERSET_TMP_ASF_SITE_PATH=/tmp/incubator-superset-site-1.5.1
-------------------------------
```
@@ -380,7 +380,7 @@ Official instructions:
https://www.apache.org/info/verification.html
We now have a handy script for anyone validating a release to use. The core of it is in this very folder, `verify_release.py`. Just make sure you have all three release files in the same directory (`{some version}.tar.gz`, `{some version}.tar.gz.asc` and `{some version}tar.gz.sha512`). Then you can pass this script the path to the `.gz` file like so:
`python verify_release.py ~/path/tp/apache_superset-{version/candidate}-source.tar.gz`
`python verify_release.py ~/path/tp/apache-superset-{version/candidate}-source.tar.gz`
If all goes well, you will see this result in your terminal:
@@ -470,7 +470,7 @@ while requesting access to push packages.
```bash
twine upload dist/apache_superset-${SUPERSET_VERSION}-py3-none-any.whl
twine upload dist/apache_superset-${SUPERSET_VERSION}.tar.gz
twine upload dist/apache-superset-${SUPERSET_VERSION}.tar.gz
```
Set your username to `__token__`

View File

@@ -31,7 +31,7 @@ The official source release:
https://downloads.apache.org/{{ project_module }}/{{ version }}
The PyPI package:
https://pypi.org/project/apache_superset/{{ version }}
https://pypi.org/project/apache-superset/{{ version }}
The CHANGELOG for the release:
https://github.com/apache/{{ project_module }}/blob/{{ version }}/CHANGELOG/{{ version }}.md

View File

@@ -32,7 +32,7 @@ else
SUPERSET_VERSION="${1}"
SUPERSET_RC="${2}"
SUPERSET_PGP_FULLNAME="${3}"
SUPERSET_RELEASE_RC_TARBALL="apache_superset-${SUPERSET_VERSION_RC}-source.tar.gz"
SUPERSET_RELEASE_RC_TARBALL="apache-superset-${SUPERSET_VERSION_RC}-source.tar.gz"
fi
SUPERSET_VERSION_RC="${SUPERSET_VERSION}rc${SUPERSET_RC}"

View File

@@ -22,7 +22,7 @@ if [ -z "${SUPERSET_VERSION_RC}" ] || [ -z "${SUPERSET_SVN_DEV_PATH}" ] || [ -z
exit 1
fi
SUPERSET_RELEASE_RC=apache_superset-"${SUPERSET_VERSION_RC}"
SUPERSET_RELEASE_RC=apache-superset-"${SUPERSET_VERSION_RC}"
SUPERSET_RELEASE_RC_TARBALL="${SUPERSET_RELEASE_RC}"-source.tar.gz
SUPERSET_RELEASE_RC_BASE_PATH="${SUPERSET_SVN_DEV_PATH}"/"${SUPERSET_VERSION_RC}"
SUPERSET_RELEASE_RC_TARBALL_PATH="${SUPERSET_RELEASE_RC_BASE_PATH}"/"${SUPERSET_RELEASE_RC_TARBALL}"

View File

@@ -50,8 +50,8 @@ else
export SUPERSET_GITHUB_BRANCH="${VERSION_MAJOR}.${VERSION_MINOR}"
export SUPERSET_PGP_FULLNAME="${2}"
export SUPERSET_VERSION_RC="${SUPERSET_VERSION}rc${VERSION_RC}"
export SUPERSET_RELEASE=apache_superset-"${SUPERSET_VERSION}"
export SUPERSET_RELEASE_RC=apache_superset-"${SUPERSET_VERSION_RC}"
export SUPERSET_RELEASE=apache-superset-"${SUPERSET_VERSION}"
export SUPERSET_RELEASE_RC=apache-superset-"${SUPERSET_VERSION_RC}"
export SUPERSET_RELEASE_TARBALL="${SUPERSET_RELEASE}"-source.tar.gz
export SUPERSET_RELEASE_RC_TARBALL="${SUPERSET_RELEASE_RC}"-source.tar.gz
export SUPERSET_TMP_ASF_SITE_PATH="/tmp/incubator-superset-site-${SUPERSET_VERSION}"

View File

@@ -27,7 +27,7 @@ if [ -z "${SUPERSET_SVN_DEV_PATH}" ]; then
fi
if [[ -n ${1} ]] && [[ ${1} == "local" ]]; then
SUPERSET_RELEASE_RC=apache_superset-"${SUPERSET_VERSION_RC}"
SUPERSET_RELEASE_RC=apache-superset-"${SUPERSET_VERSION_RC}"
SUPERSET_RELEASE_RC_TARBALL="${SUPERSET_RELEASE_RC}"-source.tar.gz
SUPERSET_TARBALL_PATH="${SUPERSET_SVN_DEV_PATH}"/${SUPERSET_VERSION_RC}/${SUPERSET_RELEASE_RC_TARBALL}
SUPERSET_TMP_TARBALL_FILENAME=_tmp_"${SUPERSET_VERSION_RC}".tar.gz

View File

@@ -38,7 +38,7 @@ get_pip_command() {
PYTHON=$(get_python_command)
PIP=$(get_pip_command)
# Get the release directory's path. If you unzip an Apache release and just run the npm script to validate the release, this will be a file name like `apache_superset-x.x.xrcx-source.tar.gz`
# Get the release directory's path. If you unzip an Apache release and just run the npm script to validate the release, this will be a file name like `apache-superset-x.x.xrcx-source.tar.gz`
RELEASE_ZIP_PATH="../../$(basename "$(dirname "$(pwd)")")-source.tar.gz"
# Install dependencies from requirements.txt if the file exists

View File

@@ -101,7 +101,6 @@ Join our growing community!
- [ELMO Cloud HR & Payroll](https://elmosoftware.com.au/)
- [Endress+Hauser](https://www.endress.com/) [@rumbin]
- [FBK - ICT center](https://ict.fbk.eu)
- [Formbricks](https://formbricks.com)
- [Gavagai](https://gavagai.io) [@gavagai-corp]
- [GfK Data Lab](https://www.gfk.com/home) [@mherr]
- [Hydrolix](https://www.hydrolix.io/)
@@ -115,7 +114,6 @@ Join our growing community!
- [Ona](https://ona.io) [@pld]
- [Orange](https://www.orange.com) [@icsu]
- [Oslandia](https://oslandia.com)
- [Oxylabs](https://oxylabs.io/) [@rytis-ulys]
- [Peak AI](https://www.peak.ai/) [@azhar22k]
- [PeopleDoc](https://www.people-doc.com) [@rodo]
- [PlaidCloud](https://www.plaidcloud.com)
@@ -138,7 +136,6 @@ Join our growing community!
- [Virtuoso QA](https://www.virtuosoqa.com)
- [Whale](https://whale.im)
- [Windsor.ai](https://www.windsor.ai/) [@octaviancorlade]
- [WinWin Network马上赢](https://brandct.cn/) [@wenbinye]
- [Zeta](https://www.zeta.tech/) [@shaikidris]
### Media & Entertainment
@@ -217,7 +214,6 @@ Join our growing community!
- [Increff](https://www.increff.com/) [@ishansinghania]
- [komoot](https://www.komoot.com/) [@christophlingg]
- [Let's Roam](https://www.letsroam.com/)
- [Machrent SA](https://www.machrent.com/)
- [Onebeat](https://1beat.com/) [@GuyAttia]
- [X](https://x.com/)
- [VLMedia](https://www.vlmedia.com.tr/) [@ibotheperfect]

View File

@@ -23,8 +23,7 @@ This file documents any backwards-incompatible changes in Superset and
assists people when migrating to a new version.
## Next
- [33116](https://github.com/apache/superset/pull/33116) In Echarts Series charts (e.g. Line, Area, Bar, etc.) charts, the `x_axis_sort_series` and `x_axis_sort_series_ascending` form data items have been renamed with `x_axis_sort` and `x_axis_sort_asc`.
There's a migration added that can potentially affect a significant number of existing charts.
- [32317](https://github.com/apache/superset/pull/32317) The horizontal filter bar feature is now out of testing/beta development and its feature flag `HORIZONTAL_FILTER_BAR` has been removed.
- [31976](https://github.com/apache/superset/pull/31976) Removed the `DISABLE_LEGACY_DATASOURCE_EDITOR` feature flag. The previous value of the feature flag was `True` and now the feature is permanently removed.
- [31959](https://github.com/apache/superset/pull/32000) Removes CSV_UPLOAD_MAX_SIZE config, use your web server to control file upload size.
@@ -34,10 +33,12 @@ There's a migration added that can potentially affect a significant number of ex
- [31794](https://github.com/apache/superset/pull/31794) Removed the previously deprecated `DASHBOARD_CROSS_FILTERS` feature flag
- [31774](https://github.com/apache/superset/pull/31774): Fixes the spelling of the `USE-ANALAGOUS-COLORS` feature flag. Please update any scripts/configuration item to use the new/corrected `USE-ANALOGOUS-COLORS` flag spelling.
- [31582](https://github.com/apache/superset/pull/31582) Removed the legacy Area, Bar, Event Flow, Heatmap, Histogram, Line, Sankey, and Sankey Loop charts. They were all automatically migrated to their ECharts counterparts with the exception of the Event Flow and Sankey Loop charts which were removed as they were not actively maintained and not widely used. If you were using the Event Flow or Sankey Loop charts, you will need to find an alternative solution.
- [31198](https://github.com/apache/superset/pull/31198) Disallows by default the use of the following ClickHouse functions: "version", "currentDatabase", "hostName".
- [29798](https://github.com/apache/superset/pull/29798) Since 3.1.0, the intial schedule for an alert or report was mistakenly offset by the specified timezone's relation to UTC. The initial schedule should now begin at the correct time.
- [30021](https://github.com/apache/superset/pull/30021) The `dev` layer in our Dockerfile no long includes firefox binaries, only Chromium to reduce bloat/docker-build-time.
- [30099](https://github.com/apache/superset/pull/30099) Translations are no longer included in the default docker image builds. If your environment requires translations, you'll want to set the docker build arg `BUILD_TRANSACTION=true`.
- [31262](https://github.com/apache/superset/pull/31262) NOTE: deprecated `pylint` in favor of `ruff` as our only python linter. Only affect development workflows positively (not the release itself). It should cover most important rules, be much faster, but some things linting rules that were enforced before may not be enforce in the exact same way as before.
- [31173](https://github.com/apache/superset/pull/31173) Modified `fetch_csrf_token` to align with HTTP standards, particularly regarding how cookies are handled. If you encounter any issues related to CSRF functionality, please report them as a new issue and reference this PR for context.
- [31413](https://github.com/apache/superset/pull/31413) Enable the DATE_FORMAT_IN_EMAIL_SUBJECT feature flag to allow users to specify a date format for the email subject, which will then be replaced with the actual date.
- [31385](https://github.com/apache/superset/pull/31385) Significant docker refactor, reducing access levels for the `superset` user, streamlining layer building, ...
- [31503](https://github.com/apache/superset/pull/31503) Deprecating python 3.9.x support, 3.11 is now the recommended version and 3.10 is still supported over the Superset 5.0 lifecycle.
@@ -47,15 +48,9 @@ There's a migration added that can potentially affect a significant number of ex
- [30284](https://github.com/apache/superset/pull/30284) Deprecated GLOBAL_ASYNC_QUERIES_REDIS_CONFIG in favor of the new GLOBAL_ASYNC_QUERIES_CACHE_BACKEND configuration. To leverage Redis Sentinel, set CACHE_TYPE to RedisSentinelCache, or use RedisCache for standalone Redis
- [31961](https://github.com/apache/superset/pull/31961) Upgraded React from version 16.13.1 to 17.0.2. If you are using custom frontend extensions or plugins, you may need to update them to be compatible with React 17.
- [31260](https://github.com/apache/superset/pull/31260) Docker images now use `uv pip install` instead of `pip install` to manage the python envrionment. Most docker-based deployments will be affected, whether you derive one of the published images, or have custom bootstrap script that install python libraries (drivers)
- [32432](https://github.com/apache/superset/pull/31260) Moves the List Roles FAB view to the frontend and requires `FAB_ADD_SECURITY_API` to be enabled in the configuration and `superset init` to be executed.
### Potential Downtime
## 4.1.2
- [31198](https://github.com/apache/superset/pull/31198) Disallows by default the use of the following ClickHouse functions: "version", "currentDatabase", "hostName".
- [31173](https://github.com/apache/superset/pull/31173) Modified `fetch_csrf_token` to align with HTTP standards, particularly regarding how cookies are handled. If you encounter any issues related to CSRF functionality, please report them as a new issue and reference this PR for context.
## 4.1.0
- [29274](https://github.com/apache/superset/pull/29274): We made it easier to trigger CI on your

View File

@@ -29,6 +29,7 @@ x-superset-volumes: &superset-volumes
- ./superset-frontend:/app/superset-frontend
- superset_home:/app/superset_home
- ./tests:/app/tests
x-common-build: &common-build
context: .
target: ${SUPERSET_BUILD_TARGET:-dev} # can use `dev` (default) or `lean`
@@ -42,11 +43,6 @@ x-common-build: &common-build
services:
nginx:
env_file:
- path: docker/.env # default
required: true
- path: docker/.env-local # optional override
required: false
image: nginx:latest
container_name: superset_nginx
restart: unless-stopped
@@ -56,8 +52,6 @@ services:
- "host.docker.internal:host-gateway"
volumes:
- ./docker/nginx/nginx.conf:/etc/nginx/nginx.conf:ro
- ./docker/nginx/templates:/etc/nginx/templates:ro
redis:
image: redis:7
container_name: superset_cache

View File

@@ -54,7 +54,6 @@ REDIS_HOST=redis
REDIS_PORT=6379
FLASK_DEBUG=true
SUPERSET_APP_ROOT="/"
SUPERSET_ENV=development
SUPERSET_LOAD_EXAMPLES=yes
CYPRESS_CONFIG=false
@@ -63,6 +62,7 @@ MAPBOX_API_KEY=''
# Make sure you set this to a unique secure random value on production
SUPERSET_SECRET_KEY=TEST_NON_DEV_SECRET
ENABLE_PLAYWRIGHT=false
PUPPETEER_SKIP_CHROMIUM_DOWNLOAD=true
BUILD_SUPERSET_FRONTEND_IN_DOCKER=true

View File

@@ -50,11 +50,7 @@ fi
#
if [ -f "${REQUIREMENTS_LOCAL}" ]; then
echo "Installing local overrides at ${REQUIREMENTS_LOCAL}"
if command -v uv > /dev/null 2>&1; then
uv pip install --no-cache-dir -r "${REQUIREMENTS_LOCAL}"
else
pip install --no-cache-dir -r "${REQUIREMENTS_LOCAL}"
fi
uv pip install --no-cache-dir -r "${REQUIREMENTS_LOCAL}"
else
echo "Skipping local overrides"
fi

View File

@@ -1,19 +0,0 @@
#!/usr/bin/env bash
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
curl -f "http://localhost:${SUPERSET_PORT}/${SUPERSET_APP_ROOT/\//}/health" || exit 1

View File

@@ -90,5 +90,44 @@ http {
client_max_body_size 10m;
include /etc/nginx/conf.d/superset.conf;
upstream superset_app {
server host.docker.internal:8088;
keepalive 100;
}
upstream superset_websocket {
server host.docker.internal:8080;
keepalive 100;
}
server {
listen 80 default_server;
server_name _;
location /ws {
proxy_pass http://superset_websocket;
proxy_http_version 1.1;
proxy_set_header Upgrade $http_upgrade;
proxy_set_header Connection "Upgrade";
proxy_set_header Host $host;
}
location /static {
proxy_pass http://host.docker.internal:9000; # Proxy to superset-node
proxy_http_version 1.1;
proxy_set_header Host $host;
}
location / {
proxy_pass http://superset_app;
proxy_set_header Host $host;
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $remote_addr;
proxy_set_header X-Forwarded-Host $host;
proxy_set_header X-Forwarded-Proto $scheme;
proxy_http_version 1.1;
port_in_redirect off;
proxy_connect_timeout 300;
}
}
}

View File

@@ -1,57 +0,0 @@
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
upstream superset_app {
server host.docker.internal:8088;
keepalive 100;
}
upstream superset_websocket {
server host.docker.internal:8080;
keepalive 100;
}
server {
listen 80 default_server;
server_name _;
location /ws {
proxy_pass http://superset_websocket;
proxy_http_version 1.1;
proxy_set_header Upgrade $http_upgrade;
proxy_set_header Connection "Upgrade";
proxy_set_header Host $host;
}
location ${SUPERSET_APP_ROOT}/static {
proxy_pass http://host.docker.internal:9000; # Proxy to superset-node
proxy_http_version 1.1;
proxy_set_header Host $host;
}
location ${SUPERSET_APP_ROOT} {
proxy_pass http://superset_app;
proxy_set_header Host $http_host;
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_set_header X-Forwarded-Proto $scheme;
proxy_http_version 1.1;
port_in_redirect off;
proxy_connect_timeout 300;
}
}

View File

@@ -71,7 +71,6 @@ CACHE_CONFIG = {
"CACHE_REDIS_DB": REDIS_RESULTS_DB,
}
DATA_CACHE_CONFIG = CACHE_CONFIG
THUMBNAIL_CACHE_CONFIG = CACHE_CONFIG
class CeleryConfig:
@@ -101,11 +100,9 @@ CELERY_CONFIG = CeleryConfig
FEATURE_FLAGS = {"ALERT_REPORTS": True}
ALERT_REPORTS_NOTIFICATION_DRY_RUN = True
WEBDRIVER_BASEURL = f"http://superset_app{os.environ.get('SUPERSET_APP_ROOT', '/')}/" # When using docker compose baseurl should be http://superset_nginx{ENV{BASEPATH}}/ # noqa: E501
WEBDRIVER_BASEURL = "http://superset:8088/" # When using docker compose baseurl should be http://superset_app:8088/ # noqa: E501
# The base URL for the email report hyperlinks.
WEBDRIVER_BASEURL_USER_FRIENDLY = (
f"http://localhost:8888/{os.environ.get('SUPERSET_APP_ROOT', '/')}/"
)
WEBDRIVER_BASEURL_USER_FRIENDLY = WEBDRIVER_BASEURL
SQLLAB_CTAS_NO_LIMIT = True
log_level_text = os.getenv("SUPERSET_LOG_LEVEL", "INFO")

View File

@@ -86,7 +86,6 @@
"Israel",
"Italy",
"Italy (regions)",
"Ivory Coast",
"Japan",
"Jordan",
"Kazakhstan",
@@ -144,7 +143,6 @@
"Poland",
"Portugal",
"Qatar",
"Republic Of Serbia",
"Romania",
"Russia",
"Rwanda",

View File

@@ -215,45 +215,6 @@ In case the reverse proxy is used for providing SSL encryption, an explicit defi
RequestHeader set X-Forwarded-Proto "https"
```
## Configuring the application root
*Please be advised that this feature is in BETA.*
Superset supports running the application under a non-root path. The root path
prefix can be specified in one of two ways:
- Setting the `SUPERSET_APP_ROOT` environment variable to the desired prefix.
- Customizing the [Flask entrypoint](https://github.com/apache/superset/blob/master/superset/app.py#L29)
by passing the `superset_app_root` variable.
Note, the prefix should start with a `/`.
### Customizing the Flask entrypoint
To configure a prefix, e.g `/analytics`, pass the `superset_app_root` argument to
`create_app` when calling flask run either through the `FLASK_APP`
environment variable:
```sh
FLASK_APP="superset:create_app(superset_app_root='/analytics')"
```
or as part of the `--app` argument to `flask run`:
```sh
flask --app "superset.app:create_app(superset_app_root='/analytics')"
```
### Docker builds
The [docker compose](/docs/installation/docker-compose#configuring-further) developer
configuration includes an additional environmental variable,
[`SUPERSET_APP_ROOT`](https://github.com/apache/superset/blob/master/docker/.env),
to simplify the process of setting up a non-default root path across the services.
In `docker/.env-local` set `SUPERSET_APP_ROOT` to the desired prefix and then bring the
services up with `docker compose up --detach`.
## Custom OAuth2 Configuration
Superset is built on Flask-AppBuilder (FAB), which supports many providers out of the box

View File

@@ -72,7 +72,7 @@ are compatible with Superset.
| [PostgreSQL](/docs/configuration/databases#postgres) | `pip install psycopg2` | `postgresql://<UserName>:<DBPassword>@<Database Host>/<Database Name>` |
| [Presto](/docs/configuration/databases#presto) | `pip install pyhive` | `presto://{username}:{password}@{hostname}:{port}/{database}` |
| [Rockset](/docs/configuration/databases#rockset) | `pip install rockset-sqlalchemy` | `rockset://<api_key>:@<api_server>` |
| [SAP Hana](/docs/configuration/databases#hana) | `pip install hdbcli sqlalchemy-hana` or `pip install apache_superset[hana]` | `hana://{username}:{password}@{host}:{port}` |
| [SAP Hana](/docs/configuration/databases#hana) | `pip install hdbcli sqlalchemy-hana` or `pip install apache-superset[hana]` | `hana://{username}:{password}@{host}:{port}` |
| [StarRocks](/docs/configuration/databases#starrocks) | `pip install starrocks` | `starrocks://<User>:<Password>@<Host>:<Port>/<Catalog>.<Database>` |
| [Snowflake](/docs/configuration/databases#snowflake) | `pip install snowflake-sqlalchemy` | `snowflake://{user}:{password}@{account}.{region}/{database}?role={role}&warehouse={warehouse}` |
| SQLite | No additional library needed | `sqlite://path/to/file.db?check_same_thread=false` |
@@ -1293,13 +1293,6 @@ The connection string for SQL Server looks like this:
mssql+pyodbc:///?odbc_connect=Driver%3D%7BODBC+Driver+17+for+SQL+Server%7D%3BServer%3Dtcp%3A%3Cmy_server%3E%2C1433%3BDatabase%3Dmy_database%3BUid%3Dmy_user_name%3BPwd%3Dmy_password%3BEncrypt%3Dyes%3BConnection+Timeout%3D30
```
:::note
You might have noticed that some special charecters are used in the above connection string. For example see the `odbc_connect` parameter. The value is `Driver%3D%7BODBC+Driver+17+for+SQL+Server%7D%3B` which is a URL-encoded form of `Driver={ODBC+Driver+17+for+SQL+Server};`. It's important to give the connection string is URL encoded.
For more information about this check the [sqlalchemy documentation](https://docs.sqlalchemy.org/en/20/core/engines.html#escaping-special-characters-such-as-signs-in-passwords). Which says `When constructing a fully formed URL string to pass to create_engine(), special characters such as those that may be used in the user and password need to be URL encoded to be parsed correctly. This includes the @ sign.`
:::
#### StarRocks
The [sqlalchemy-starrocks](https://pypi.org/project/starrocks/) library is the recommended

View File

@@ -11,7 +11,7 @@ version: 1
To configure CORS, or cross-origin resource sharing, the following dependency must be installed:
```python
pip install apache_superset[cors]
pip install apache-superset[cors]
```
The following keys in `superset_config.py` can be specified to configure CORS:
@@ -138,4 +138,4 @@ of your additional middleware classes.
For example, to use `AUTH_REMOTE_USER` from behind a proxy server like nginx, you have to add a
simple middleware class to add the value of `HTTP_X_PROXY_REMOTE_USER` (or any other custom header
from the proxy) to Gunicorns `REMOTE_USER` environment variable.
from the proxy) to Gunicorns `REMOTE_USER` environment variable:

View File

@@ -220,36 +220,6 @@ cache key by adding the following parameter to your Jinja code:
{{ current_user_email(add_to_cache_keys=False) }}
```
**Current User Roles**
The `{{ current_user_roles() }}` macro returns an array of roles for the logged in user.
If you have caching enabled in your Superset configuration, then by default the roles value will be used
by Superset when calculating the cache key. A cache key is a unique identifier that determines if there's a
cache hit in the future and Superset can retrieve cached data.
You can disable the inclusion of the roles value in the calculation of the
cache key by adding the following parameter to your Jinja code:
```python
{{ current_user_roles(add_to_cache_keys=False) }}
```
You can json-stringify the array by adding `|tojson` to your Jinja code:
```python
{{ current_user_roles()|tojson }}
```
You can use the `|where_in` filter to use your roles in a SQL statement. For example, if `current_user_roles()` returns `['admin', 'viewer']`, the following template:
```python
SELECT * FROM users WHERE role IN {{ current_user_roles()|where_in }}
```
Will be rendered as:
```sql
SELECT * FROM users WHERE role IN ('admin', 'viewer')
```
**Custom URL Parameters**
The `{{ url_param('custom_variable') }}` macro lets you define arbitrary URL
@@ -491,37 +461,3 @@ This macro avoids copy/paste, allowing users to centralize the metric definition
The `dataset_id` parameter is optional, and if not provided Superset will use the current dataset from context (for example, when using this macro in the Chart Builder, by default the `macro_key` will be searched in the dataset powering the chart).
The parameter can be used in SQL Lab, or when fetching a metric from another dataset.
## Available Filters
Superset supports [builtin filters from the Jinja2 templating package](https://jinja.palletsprojects.com/en/stable/templates/#builtin-filters). Custom filters have also been implemented:
**Where In**
Parses a list into a SQL-compatible statement. This is useful with macros that return an array (for example the `filter_values` macro):
```
Dashboard filter with "First", "Second" and "Third" options selected
{{ filter_values('column') }} => ["First", "Second", "Third"]
{{ filter_values('column')|where_in }} => ('First', 'Second', 'Third')
```
By default, this filter returns `()` (as a string) in case the value is null. The `default_to_none` parameter can be se to `True` to return null in this case:
```
Dashboard filter without any value applied
{{ filter_values('column') }} => ()
{{ filter_values('column')|where_in(default_to_none=True) }} => None
```
**To Datetime**
Loads a string as a `datetime` object. This is useful when performing date operations. For example:
```
{% set from_expr = get_time_filter("dttm", strftime="%Y-%m-%d").from_expr %}
{% set to_expr = get_time_filter("dttm", strftime="%Y-%m-%d").to_expr %}
{% if (to_expr|to_datetime(format="%Y-%m-%d") - from_expr|to_datetime(format="%Y-%m-%d")).days > 100 %}
do something
{% else %}
do something else
{% endif %}
```

View File

@@ -26,9 +26,9 @@ More references:
Here's a list of repositories that contain Superset-related packages:
- [apache/superset](https://github.com/apache/superset)
is the main repository containing the `apache_superset` Python package
is the main repository containing the `apache-superset` Python package
distributed on
[pypi](https://pypi.org/project/apache_superset/). This repository
[pypi](https://pypi.org/project/apache-superset/). This repository
also includes Superset's main TypeScript/JavaScript bundles and react apps under
the [superset-frontend](https://github.com/apache/superset/tree/master/superset-frontend)
folder.

View File

@@ -4,95 +4,9 @@ version: 1
---
import InteractiveSVG from '../../src/components/InteractiveERDSVG';
import Mermaid from '@theme/Mermaid';
# Resources
## High Level Architecture
<div style={{ maxWidth: "600px", margin: "0 auto", marginLeft: 0, marginRight: "auto" }}>
```mermaid
flowchart TD
%% Top Level
LB["<b>Load Balancer(s)</b><br/>(optional)"]
LB -.-> WebServers
%% Web Servers
subgraph WebServers ["<b>Web Server(s)</b>"]
WS1["<b>Frontend</b><br/>(React, AntD, ECharts, AGGrid)"]
WS2["<b>Backend</b><br/>(Python, Flask, SQLAlchemy, Pandas, ...)"]
end
%% Infra
subgraph InfraServices ["<b>Infra</b>"]
DB[("<b>Metadata Database</b><br/>(Postgres / MySQL)")]
subgraph Caching ["<b>Caching Subservices<br/></b>(Redis, memcache, S3, ...)"]
direction LR
DummySpace[" "]:::invisible
QueryCache["<b>Query Results Cache</b><br/>(Accelerated Dashboards)"]
CsvCache["<b>CSV Exports Cache</b>"]
ThumbnailCache["<b>Thumbnails Cache</b>"]
AlertImageCache["<b>Alert/Report Images Cache</b>"]
QueryCache -- " " --> CsvCache
linkStyle 1 stroke:transparent;
ThumbnailCache -- " " --> AlertImageCache
linkStyle 2 stroke:transparent;
end
Broker(("<b>Message Queue</b><br/>(Redis / RabbitMQ / SQS)"))
end
AsyncBackend["<b>Async Workers (Celery)</b><br>required for Alerts & Reports, thumbnails, CSV exports, long-running workloads, ..."]
%% External DBs
subgraph ExternalDatabases ["<b>Analytics Databases</b>"]
direction LR
BigQuery[(BigQuery)]
Snowflake[(Snowflake)]
Redshift[(Redshift)]
Postgres[(Postgres)]
Postgres[(... any ...)]
end
%% Connections
LB -.-> WebServers
WebServers --> DB
WebServers -.-> Caching
WebServers -.-> Broker
WebServers -.-> ExternalDatabases
Broker -.-> AsyncBackend
AsyncBackend -.-> ExternalDatabases
AsyncBackend -.-> Caching
%% Legend styling
classDef requiredNode stroke-width:2px,stroke:black;
class Required requiredNode;
class Optional optionalNode;
%% Hide real arrow
linkStyle 0 stroke:transparent;
%% Styling
classDef optionalNode stroke-dasharray: 5 5, opacity:0.9;
class LB optionalNode;
class Caching optionalNode;
class AsyncBackend optionalNode;
class Broker optionalNode;
class QueryCache optionalNode;
class CsvCache optionalNode;
class ThumbnailCache optionalNode;
class AlertImageCache optionalNode;
class Celery optionalNode;
classDef invisible fill:transparent,stroke:transparent;
```
</div>
## Entity-Relationship Diagram
Here is our interactive ERD:

View File

@@ -1,7 +1,7 @@
---
title: Docker Builds
hide_title: true
sidebar_position: 7
sidebar_position: 6
version: 1
---

View File

@@ -1,7 +1,7 @@
---
title: Docker Compose
hide_title: true
sidebar_position: 5
sidebar_position: 4
version: 1
---
@@ -112,15 +112,7 @@ docker compose -f docker-compose-non-dev.yml up
### Option #3 - boot up an official release
```bash
# Set the version you want to run
export TAG=3.1.1
# Fetch the tag you're about to check out (assuming you shallow-cloned the repo)
git fetch --depth=1 origin tag $TAG
# Could also fetch all tags too if you've got bandwidth to spare
# git fetch --tags
# Checkout the corresponding git ref
git checkout $TAG
# Fire up docker compose
docker compose -f docker-compose-image-tag.yml up
```

View File

@@ -1,58 +0,0 @@
---
title: Installation Methods
hide_title: true
sidebar_position: 2
version: 1
---
import useBaseUrl from "@docusaurus/useBaseUrl";
# Installation Methods
How should you install Superset? Here's a comparison of the different options. It will help if you've first read the [Architecture](/docs/installation/architecture.mdx) page to understand Superset's different components.
The fundamental trade-off is between you needing to do more of the detail work yourself vs. using a more complex deployment route that handles those details.
## [Docker Compose](/docs/installation/docker-compose.mdx)
**Summary:** This takes advantage of containerization while remaining simpler than Kubernetes. This is the best way to try out Superset; it's also useful for developing & contributing back to Superset.
If you're not just demoing the software, you'll need a moderate understanding of Docker to customize your deployment and avoid a few risks. Even when fully-optimized this is not as robust a method as Kubernetes when it comes to large-scale production deployments.
You manage a superset-config.py file and a docker-compose.yml file. Docker Compose brings up all the needed services - the Superset application, a Postgres metadata DB, Redis cache, Celery worker and beat. They are automatically connected to each other.
**Responsibilities**
You will need to back up your metadata DB. That could mean backing up the service running as a Docker container and its volume; ideally you are running Postgres as a service outside of that container and backing up that service.
You will also need to extend the Superset docker image. The default `lean` images do not contain drivers needed to access your metadata database (Postgres or MySQL), nor to access your data warehouse, nor the headless browser needed for Alerts & Reports. You could run a `-dev` image while demoing Superset, which has some of this, but you'll still need to install the driver for your data warehouse. The `-dev` images run as root, which is not recommended for production.
Ideally you will build your own image of Superset that extends `lean`, adding what your deployment needs.
See [Docker Build Presets](/docs/installation/docker-builds/#build-presets) for more information about the different image versions you can extend.
## [Kubernetes (K8s)](/docs/installation/kubernetes.mdx)
**Summary:** This is the best-practice way to deploy a production instance of Superset, but has the steepest skill requirement - someone who knows Kubernetes.
You will deploy Superset into a K8s cluster. The most common method is using the community-maintained Helm chart, though work is now underway to implement [SIP-149 - a Kubernetes Operator for Superset](https://github.com/apache/superset/issues/31408).
A K8s deployment can scale up and down based on usage and deploy rolling updates with zero downtime - features that big deployments appreciate.
**Responsibilities**
You will need to build your own Docker image, and back up your metadata DB, both as described in Docker Compose above. You'll also need to customize your Helm chart values and deploy and maintain your Kubernetes cluster.
## [PyPI (Python)](/docs/installation/pypi.mdx)
**Summary:** This is the only method that requires no knowledge of containers. It requires the most hands-on work to deploy, connect, and maintain each component.
You install Superset as a Python package and run it that way, providing your own metadata database. Superset has documentation on how to install this way, but it is updated infrequently.
If you want caching, you'll set up Redis or RabbitMQ. If you want Alerts & Reports, you'll set up Celery.
**Responsibilities**
You will need to get the component services running and communicating with each other. You'll need to arrange backups of your metadata database.
When upgrading, you'll need to manage the system environment and packages and ensure all components have functional dependencies.

View File

@@ -1,7 +1,7 @@
---
title: Kubernetes
hide_title: true
sidebar_position: 3
sidebar_position: 2
version: 1
---

View File

@@ -1,7 +1,7 @@
---
title: PyPI
hide_title: true
sidebar_position: 4
sidebar_position: 3
version: 1
---
@@ -12,7 +12,7 @@ import useBaseUrl from "@docusaurus/useBaseUrl";
<img src={useBaseUrl("/img/pypi.png" )} width="150" />
<br /><br />
This page describes how to install Superset using the `apache_superset` package [published on PyPI](https://pypi.org/project/apache_superset/).
This page describes how to install Superset using the `apache-superset` package [published on PyPI](https://pypi.org/project/apache-superset/).
## OS Dependencies
@@ -124,10 +124,10 @@ command line.
### Installing and Initializing Superset
First, start by installing `apache_superset`:
First, start by installing `apache-superset`:
```bash
pip install apache_superset
pip install apache-superset
```
Then, define mandatory configurations, SECRET_KEY and FLASK_APP:

View File

@@ -1,7 +1,7 @@
---
title: Upgrading Superset
hide_title: true
sidebar_position: 6
sidebar_position: 5
version: 1
---
@@ -32,7 +32,7 @@ docker compose up
To upgrade superset in a native installation, run the following commands:
```bash
pip install apache_superset --upgrade
pip install apache-superset --upgrade
```
## Upgrading the Metadata Database

View File

@@ -32,7 +32,7 @@ git clone https://github.com/apache/superset
$ cd superset
# Set the repo to the state associated with the latest official version
$ git checkout tags/4.1.2
$ git checkout tags/4.1.1
# Fire up Superset using Docker Compose
$ docker compose -f docker-compose-image-tag.yml up

View File

@@ -64,26 +64,6 @@ tables in the **Permissions** dropdown. To select the data sources you want to a
You can then confirm with users assigned to the **Gamma** role that they see the
objects (dashboards and slices) associated with the tables you just extended them.
### SQL Execution Security Considerations
Apache Superset includes features designed to provide safeguards when interacting with connected databases, such as the `DISALLOWED_SQL_FUNCTIONS` configuration setting. This aims to prevent the execution of potentially harmful database functions or system variables directly from Superset interfaces like SQL Lab.
However, it is crucial to understand the following:
**Superset is Not a Database Firewall**: Superset's built-in checks, like `DISALLOWED_SQL_FUNCTIONS`, provide a layer of protection but cannot guarantee complete security against all database-level threats or advanced bypass techniques (like specific comment injection methods). They should be viewed as a supplement to, not a replacement for, robust database security.
**Configuration is Key**: The effectiveness of Superset's safeguards heavily depends on proper configuration by the Superset administrator. This includes maintaining the `DISALLOWED_SQL_FUNCTIONS` list, carefully managing feature flags (like `ENABLE_TEMPLATE_PROCESSING`), and configuring other security settings appropriately.
**Database Security is Paramount**: The ultimate responsibility for securing database access, controlling permissions, and preventing unauthorized function execution lies with the database administrators (DBAs) and security teams managing the underlying database instance.
**Recommended Database Practices**: We strongly recommend implementing security best practices at the database level, including:
* **Least Privilege**: Connecting Superset using dedicated database user accounts with the minimum permissions required for Superset's operation (typically read-only access to necessary schemas/tables).
* **Database Roles & Permissions**: Utilizing database-native roles and permissions to restrict access to sensitive functions, system variables (like `@@hostname`), schemas, or tables.
* **Network Security**: Employing network-level controls like database firewalls or proxies to restrict connections.
* **Auditing**: Enabling database-level auditing to monitor executed queries and access patterns.
By combining Superset's configurable safeguards with strong database-level security practices, you can achieve a more robust and layered security posture.
### REST API for user & role management
Flask-AppBuilder supports a REST API for user CRUD,
@@ -300,49 +280,6 @@ TALISMAN_CONFIG = {
"content_security_policy": { ...
```
#### Configuring Talisman in Superset
Talisman settings in Superset can be modified using superset_config.py. If you need to adjust security policies, you can override the default configuration.
Example: Overriding Talisman Configuration in superset_config.py for loading images form s3 or other external sources.
```python
TALISMAN_CONFIG = {
"content_security_policy": {
"base-uri": ["'self'"],
"default-src": ["'self'"],
"img-src": [
"'self'",
"blob:",
"data:",
"https://apachesuperset.gateway.scarf.sh",
"https://static.scarf.sh/",
# "https://cdn.brandfolder.io", # Uncomment when SLACK_ENABLE_AVATARS is True # noqa: E501
"ows.terrestris.de",
"aws.s3.com", # Add Your Bucket or external data source
],
"worker-src": ["'self'", "blob:"],
"connect-src": [
"'self'",
"https://api.mapbox.com",
"https://events.mapbox.com",
],
"object-src": "'none'",
"style-src": [
"'self'",
"'unsafe-inline'",
],
"script-src": ["'self'", "'strict-dynamic'"],
},
"content_security_policy_nonce_in": ["script-src"],
"force_https": False,
"session_cookie_secure": False,
}
```
# For more information on setting up Talisman, please refer to
https://superset.apache.org/docs/configuration/networking-settings/#changing-flask-talisman-csp
### Reporting Security Vulnerabilities
Apache Software Foundation takes a rigorous standpoint in annihilating the security issues in its

View File

@@ -12,12 +12,8 @@ import useBaseUrl from "@docusaurus/useBaseUrl";
This section is focused on documentation for end-users who will be using Superset
for the data analysis and exploration workflow
(data analysts, business analysts, data
scientists, etc).
:::tip
In addition to this site, [Preset.io](http://preset.io/) maintains an updated set of end-user
scientists, etc). In addition to this site, [Preset.io](http://preset.io/) maintains an updated set of end-user
documentation at [docs.preset.io](https://docs.preset.io/).
:::
This tutorial targets someone who wants to create charts and dashboards in Superset. Well show you
how to connect Superset to a new database and configure a table in that database for analysis.
@@ -179,36 +175,23 @@ into a position you like onto the underlying grid.
Congrats! Youve successfully linked, analyzed, and visualized data in Superset. There are a wealth
of other table configuration and visualization options, so please start exploring and creating
slices and dashboards of your own.
slices and dashboards of your own
ֿ
### Manage access to Dashboards
Access to dashboards is managed via owners (users that have edit permissions to the dashboard).
Access to dashboards is managed via owners (users that have edit permissions to the dashboard)
Non-owner users access can be managed in two different ways. The dashboard needs to be published to be visible to other users.
Non-owner users access can be managed two different ways:
1. Dataset permissions - if you add to the relevant role permissions to datasets it automatically grants implicit access to all dashboards that uses those permitted datasets.
2. Dashboard roles - if you enable [**DASHBOARD_RBAC** feature flag](/docs/configuration/configuring-superset#feature-flags) then you will be able to manage which roles can access the dashboard
1. Dataset permissions - if you add to the relevant role permissions to datasets it automatically grants implicit access to all dashboards that uses those permitted datasets
2. Dashboard roles - if you enable **DASHBOARD_RBAC** [feature flag](/docs/configuration/configuring-superset#feature-flags) then you be able to manage which roles can access the dashboard
- Granting a role access to a dashboard will bypass dataset level checks. Having dashboard access implicitly grants read access to all the featured charts in the dashboard, and thereby also all the associated datasets.
- If no roles are specified for a dashboard, regular **Dataset permissions** will apply.
<img src={useBaseUrl("/img/tutorial/tutorial_dashboard_access.png" )} />
### Publishing a Dashboard
If you would like to make your dashboard available to other users, click on the `Draft` button next to the
title of your dashboard.
<img src={useBaseUrl("/img/tutorial/publish_button_dashboard.png" )} />
:::warning
Draft dashboards are only visible to the dashboard owners and admins. Published dashboards are visible to all users with access to the underlying datasets or if RBAC is enabled, to the roles that have been granted access to the dashboard.
:::
### Mark a Dashboard as Favorite
You can mark a dashboard as a favorite by clicking on the star icon next to the title of your dashboard. This makes it easier to find it in the list of dashboards or on the home page.
### Customizing dashboard
The following URL parameters can be used to modify how the dashboard is rendered:

View File

@@ -31,13 +31,10 @@ const config: Config = {
baseUrl: '/',
onBrokenLinks: 'throw',
onBrokenMarkdownLinks: 'throw',
markdown: {
mermaid: true,
},
favicon: '/img/favicon.ico',
organizationName: 'apache',
projectName: 'superset',
themes: ['@saucelabs/theme-github-codeblock', '@docusaurus/theme-mermaid'],
themes: ['@saucelabs/theme-github-codeblock'],
plugins: [
[
'docusaurus-plugin-less',
@@ -342,19 +339,20 @@ const config: Config = {
async: true,
'data-website-id': 'c6a8a8b8-3127-48f9-97a7-51e9e10d20d0',
'data-project-name': 'Apache Superset',
'data-project-color': '#FFFFFF',
'data-project-color': '#1AA1C2',
'data-project-logo':
'https://images.seeklogo.com/logo-png/50/2/superset-icon-logo-png_seeklogo-500354.png',
'data-modal-override-open-id': 'ask-ai-input',
'data-modal-override-open-class': 'search-input',
'data-modal-open-by-default': 'true',
'data-modal-disclaimer':
'This is a custom LLM for Apache Superset with access to all [documentation](superset.apache.org/docs/intro/), [GitHub Open Issues, PRs and READMEs](github.com/apache/superset).&#10;&#10;Companies deploy assistants like this ([built by kapa.ai](https://kapa.ai)) on docs via [website widget](https://docs.kapa.ai/integrations/website-widget) (Docker, Reddit), in [support forms](https://docs.kapa.ai/integrations/support-form-deflector) for ticket deflection (Monday.com, Mapbox), or as [Slack bots](https://docs.kapa.ai/integrations/slack-bot) with private sources.',
'data-modal-example-questions':
'How do I install Superset?,How can I contribute to Superset?',
'data-button-text-color': 'rgb(81,166,197)',
'data-modal-header-bg-color': '#ffffff',
'data-modal-title-color': 'rgb(81,166,197)',
'data-modal-title': 'Apache Superset AI',
'How do I use Docker Compose?,How to run Supersets on kubernetes?',
'data-button-text-color': '#FFFFFF',
'data-modal-header-bg-color': '#1AA1C2',
'data-modal-title-color': '#FFFFFF',
'data-modal-title': 'Superset Ask AI',
'data-modal-disclaimer-text-color': '#000000',
'data-consent-required': 'true',
'data-consent-screen-disclaimer':

View File

@@ -19,14 +19,13 @@
},
"dependencies": {
"@ant-design/icons": "^5.5.2",
"@docusaurus/core": "3.7.0",
"@docusaurus/plugin-client-redirects": "3.7.0",
"@docusaurus/preset-classic": "3.7.0",
"@docusaurus/theme-mermaid": "3.7.0",
"@docusaurus/core": "^3.5.2",
"@docusaurus/plugin-client-redirects": "^3.5.2",
"@docusaurus/preset-classic": "^3.5.2",
"@emotion/styled": "^10.0.27",
"@saucelabs/theme-github-codeblock": "^0.3.0",
"@superset-ui/style": "^0.14.23",
"antd": "^5.24.5",
"antd": "^5.24.2",
"docusaurus-plugin-less": "^2.0.2",
"less": "^4.2.2",
"less-loader": "^11.0.0",
@@ -35,7 +34,7 @@
"react-dom": "^18.3.1",
"react-github-btn": "^1.4.0",
"react-svg-pan-zoom": "^3.13.1",
"swagger-ui-react": "^5.20.2"
"swagger-ui-react": "^5.20.0"
},
"devDependencies": {
"@docusaurus/module-type-aliases": "^3.7.0",
@@ -44,7 +43,7 @@
"@typescript-eslint/eslint-plugin": "^5.0.0",
"@typescript-eslint/parser": "^5.0.0",
"eslint": "^8.0.0",
"eslint-config-prettier": "^10.1.1",
"eslint-config-prettier": "^10.0.2",
"eslint-plugin-prettier": "^4.0.0",
"eslint-plugin-react": "^7.0.0",
"prettier": "^2.0.0",

View File

@@ -111,7 +111,7 @@ const StyledTitleContainer = styled('div')`
}
`;
const StyledButton = styled(Link as React.ComponentType<any>)`
const StyledButton = styled(Link)`
border-radius: 10px;
font-size: 20px;
font-weight: bold;

View File

@@ -58,6 +58,7 @@ ul.dropdown__menu svg {
--ifm-code-font-size: 95%;
--ifm-menu-link-padding-vertical: 12px;
--doc-sidebar-width: 350px !important;
--ifm-navbar-height: none;
--ifm-font-family-base: Roboto;
--ifm-footer-background-color: #173036;
--ifm-footer-color: #87939a;

View File

@@ -22,7 +22,7 @@ RewriteRule ^(.*)$ https://superset.apache.org/$1 [R,L]
RewriteCond %{HTTP_HOST} ^superset.incubator.apache.org$ [NC]
RewriteRule ^(.*)$ https://superset.apache.org/$1 [R=301,L]
Header set Content-Security-Policy "default-src data: blob: 'self' *.apache.org widget.kapa.ai *.githubusercontent.com *.scarf.sh *.googleapis.com *.google.com *.run.app *.gstatic.com *.github.com *.algolia.net *.algolianet.com 'unsafe-inline' 'unsafe-eval'; frame-src *; frame-ancestors 'self' *.google.com https://sidebar.bugherd.com; form-action 'self'; worker-src blob:; img-src 'self' blob: data: https:; font-src 'self'; object-src 'none'"
Header set Content-Security-Policy "default-src data: blob: 'self' *.apache.org *.githubusercontent.com *.scarf.sh *.googleapis.com *.github.com *.algolia.net *.algolianet.com 'unsafe-inline' 'unsafe-eval'; frame-src *; frame-ancestors 'self' *.google.com https://sidebar.bugherd.com; form-action 'self'; worker-src blob:; img-src 'self' blob: data: https:; font-src 'self'; object-src 'none'"
# REDIRECTS

Binary file not shown.

Before

Width:  |  Height:  |  Size: 84 KiB

View File

@@ -18736,7 +18736,7 @@
{
"description": "Table name",
"in": "query",
"name": "name",
"name": "table",
"required": true,
"schema": {
"type": "string"

File diff suppressed because it is too large Load Diff

View File

@@ -5,5 +5,5 @@ dependencies:
- name: redis
repository: oci://registry-1.docker.io/bitnamicharts
version: 17.9.4
digest: sha256:c6290bb7e8ce9c694c06b3f5e9b9d01401943b0943c515d3a7a3a8dc1e6492ea
generated: "2025-03-16T00:52:41.47139769+09:00"
digest: sha256:9588e2a9f15d875a95763ed7da8e92b5b48a8d13cbacd66b775eacba3e8cebcd
generated: "2024-12-29T12:19:15.365763+09:00"

View File

@@ -15,7 +15,7 @@
# limitations under the License.
#
apiVersion: v2
appVersion: "4.1.2"
appVersion: "4.1.1"
description: Apache Superset is a modern, enterprise-ready business intelligence web application
name: superset
icon: https://artifacthub.io/image/68c1d717-0e97-491f-b046-754e46f46922@2x
@@ -29,7 +29,7 @@ maintainers:
- name: craig-rueda
email: craig@craigrueda.com
url: https://github.com/craig-rueda
version: 0.14.2
version: 0.14.0
dependencies:
- name: postgresql
version: 13.4.4

View File

@@ -23,7 +23,7 @@ NOTE: This file is generated by helm-docs: https://github.com/norwoodj/helm-docs
# superset
![Version: 0.14.2](https://img.shields.io/badge/Version-0.14.2-informational?style=flat-square)
![Version: 0.14.0](https://img.shields.io/badge/Version-0.14.0-informational?style=flat-square)
Apache Superset is a modern, enterprise-ready business intelligence web application

View File

@@ -812,7 +812,7 @@ postgresql:
database: superset
image:
tag: "14.17.0-debian-12-r3"
tag: "14.6.0-debian-11-r13"
## PostgreSQL Primary parameters
primary:

BIN
null_byte.csv Normal file

Binary file not shown.
1 A
2

View File

@@ -20,7 +20,7 @@ requires = ["setuptools>=40.9.0", "wheel"]
build-backend = "setuptools.build_meta"
[project]
name = "apache_superset"
name = "apache-superset"
description = "A modern, enterprise-ready business intelligence web application"
readme = "README.md"
dynamic = ["version", "scripts", "entry-points"]
@@ -44,7 +44,7 @@ dependencies = [
"cryptography>=42.0.4, <45.0.0",
"deprecation>=2.1.0, <2.2.0",
"flask>=2.2.5, <3.0.0",
"flask-appbuilder>=4.6.1, <5.0.0",
"flask-appbuilder>=4.6.0, <5.0.0",
"flask-caching>=2.1.0, <3",
"flask-compress>=1.13, <2.0",
"flask-talisman>=1.0.0, <2.0",
@@ -125,7 +125,7 @@ denodo = ["denodo-sqlalchemy~=1.0.6"]
dremio = ["sqlalchemy-dremio>=1.2.1, <4"]
drill = ["sqlalchemy-drill>=1.1.4, <2"]
druid = ["pydruid>=0.6.5,<0.7"]
duckdb = ["duckdb-engine>=0.12.1, <0.13"]
duckdb = ["duckdb-engine>=0.10", "duckdb>=1.1.0"]
dynamodb = ["pydynamodb>=0.4.2"]
solr = ["sqlalchemy-solr >= 0.2.0"]
elasticsearch = ["elasticsearch-dbapi>=0.2.9, <0.3.0"]
@@ -146,7 +146,6 @@ hive = [
impala = ["impyla>0.16.2, <0.17"]
kusto = ["sqlalchemy-kusto>=3.0.0, <4"]
kylin = ["kylinpy>=2.8.1, <2.9"]
motherduck = ["duckdb==0.10.2", "duckdb-engine>=0.12.1, <0.13"]
mssql = ["pymssql>=2.2.8, <3"]
mysql = ["mysqlclient>=2.1.0, <3"]
ocient = [

View File

@@ -23,13 +23,3 @@ numexpr>=2.9.0
# 5.0.0 has a sensitive deprecation used in other libs
# -> https://github.com/aio-libs/async-timeout/blob/master/CHANGES.rst#500-2024-10-31
async_timeout>=4.0.0,<5.0.0
# Known issue with 6.7.0 breaking a unit test, probably easy to fix, but will require
# a bit of attention to bump.
apispec>=6.0.0,<6.7.0
# 1.4.1 appears to use much more memory, where the python test suite runs out of memory
# causing CI to fail. 1.4.0 is the last version that works.
# https://marshmallow-sqlalchemy.readthedocs.io/en/latest/changelog.html#id3
# Opened this issue https://github.com/marshmallow-code/marshmallow-sqlalchemy/issues/665
marshmallow-sqlalchemy>=1.3.0,<1.4.1

View File

@@ -4,25 +4,22 @@ alembic==1.15.1
# via flask-migrate
amqp==5.3.1
# via kombu
apispec==6.6.1
# via
# -r requirements/base.in
# flask-appbuilder
apsw==3.49.1.0
apispec==6.3.0
# via flask-appbuilder
apsw==3.46.0.0
# via shillelagh
async-timeout==4.0.3
# via
# -r requirements/base.in
# redis
attrs==25.3.0
attrs==24.2.0
# via
# cattrs
# jsonschema
# outcome
# referencing
# requests-cache
# trio
babel==2.17.0
babel==2.16.0
# via flask-babel
backoff==2.2.1
# via apache-superset (pyproject.toml)
@@ -40,13 +37,13 @@ cachelib==0.13.0
# via
# flask-caching
# flask-session
cachetools==5.5.2
cachetools==5.5.0
# via google-auth
cattrs==24.1.2
# via requests-cache
celery==5.5.2
celery==5.4.0
# via apache-superset (pyproject.toml)
certifi==2025.1.31
certifi==2024.8.30
# via
# requests
# selenium
@@ -54,7 +51,7 @@ cffi==1.17.1
# via
# cryptography
# pynacl
charset-normalizer==3.4.1
charset-normalizer==3.4.0
# via requests
click==8.1.8
# via
@@ -68,7 +65,7 @@ click==8.1.8
# flask-appbuilder
click-didyoumean==0.3.1
# via celery
click-option-group==0.5.7
click-option-group==0.5.6
# via apache-superset (pyproject.toml)
click-plugins==1.1.1
# via celery
@@ -89,7 +86,7 @@ cryptography==44.0.2
# pyopenssl
defusedxml==0.7.1
# via odfpy
deprecated==1.2.18
deprecated==1.2.15
# via limits
deprecation==2.1.0
# via apache-superset (pyproject.toml)
@@ -118,7 +115,7 @@ flask==2.3.3
# flask-session
# flask-sqlalchemy
# flask-wtf
flask-appbuilder==4.6.1
flask-appbuilder==4.6.0
# via apache-superset (pyproject.toml)
flask-babel==2.0.0
# via flask-appbuilder
@@ -128,7 +125,7 @@ flask-compress==1.17
# via apache-superset (pyproject.toml)
flask-jwt-extended==4.7.1
# via flask-appbuilder
flask-limiter==3.12
flask-limiter==3.8.0
# via flask-appbuilder
flask-login==0.6.3
# via
@@ -152,7 +149,7 @@ geographiclib==2.0
# via geopy
geopy==2.4.1
# via apache-superset (pyproject.toml)
google-auth==2.38.0
google-auth==2.36.0
# via shillelagh
greenlet==3.1.1
# via
@@ -167,15 +164,17 @@ hashids==1.3.1
# via apache-superset (pyproject.toml)
holidays==0.25
# via apache-superset (pyproject.toml)
humanize==4.12.2
humanize==4.12.1
# via apache-superset (pyproject.toml)
idna==3.10
# via
# email-validator
# requests
# trio
importlib-metadata==8.7.0
importlib-metadata==8.6.1
# via apache-superset (pyproject.toml)
importlib-resources==6.4.5
# via limits
isodate==0.7.2
# via apache-superset (pyproject.toml)
itsdangerous==2.2.0
@@ -188,17 +187,15 @@ jinja2==3.1.6
# flask-babel
jsonpath-ng==1.7.0
# via apache-superset (pyproject.toml)
jsonschema==4.23.0
jsonschema==4.17.3
# via flask-appbuilder
jsonschema-specifications==2024.10.1
# via jsonschema
kombu==5.5.3
kombu==5.4.2
# via celery
korean-lunar-calendar==0.3.1
# via holidays
limits==5.1.0
limits==3.13.0
# via flask-limiter
mako==1.3.10
mako==1.3.9
# via
# apache-superset (pyproject.toml)
# alembic
@@ -212,14 +209,12 @@ markupsafe==3.0.2
# mako
# werkzeug
# wtforms
marshmallow==3.26.1
marshmallow==3.23.1
# via
# flask-appbuilder
# marshmallow-sqlalchemy
marshmallow-sqlalchemy==1.4.0
# via
# -r requirements/base.in
# flask-appbuilder
marshmallow-sqlalchemy==0.28.2
# via flask-appbuilder
mdurl==0.1.2
# via markdown-it-py
msgpack==1.0.8
@@ -245,7 +240,7 @@ ordered-set==4.1.0
# via flask-limiter
outcome==1.3.0.post0
# via trio
packaging==25.0
packaging==24.2
# via
# apache-superset (pyproject.toml)
# apispec
@@ -253,6 +248,7 @@ packaging==25.0
# gunicorn
# limits
# marshmallow
# marshmallow-sqlalchemy
# shillelagh
pandas==2.0.3
# via apache-superset (pyproject.toml)
@@ -264,7 +260,7 @@ parsedatetime==2.6
# via apache-superset (pyproject.toml)
pgsanity==0.2.9
# via apache-superset (pyproject.toml)
platformdirs==4.3.7
platformdirs==3.9.1
# via requests-cache
ply==3.11
# via jsonpath-ng
@@ -272,7 +268,7 @@ polyline==2.0.2
# via apache-superset (pyproject.toml)
prison==0.2.1
# via flask-appbuilder
prompt-toolkit==3.0.51
prompt-toolkit==3.0.50
# via click-repl
pyarrow==14.0.2
# via apache-superset (pyproject.toml)
@@ -284,7 +280,7 @@ pyasn1-modules==0.4.1
# via google-auth
pycparser==2.22
# via cffi
pygments==2.19.1
pygments==2.18.0
# via rich
pyjwt==2.10.1
# via
@@ -295,8 +291,10 @@ pynacl==1.5.0
# via paramiko
pyopenssl==25.0.0
# via shillelagh
pyparsing==3.2.3
pyparsing==3.2.1
# via apache-superset (pyproject.toml)
pyrsistent==0.20.0
# via jsonschema
pysocks==1.7.1
# via urllib3
python-dateutil==2.9.0.post0
@@ -308,11 +306,11 @@ python-dateutil==2.9.0.post0
# holidays
# pandas
# shillelagh
python-dotenv==1.1.0
python-dotenv==1.0.1
# via apache-superset (pyproject.toml)
python-geohash==0.8.5
# via apache-superset (pyproject.toml)
pytz==2025.2
pytz==2025.1
# via
# croniter
# flask-babel
@@ -325,27 +323,19 @@ pyyaml==6.0.2
# apispec
redis==4.6.0
# via apache-superset (pyproject.toml)
referencing==0.36.2
# via
# jsonschema
# jsonschema-specifications
requests==2.32.3
requests==2.32.2
# via
# requests-cache
# shillelagh
requests-cache==1.2.1
requests-cache==1.2.0
# via shillelagh
rich==13.9.4
# via flask-limiter
rpds-py==0.23.1
# via
# jsonschema
# referencing
rsa==4.9
# via google-auth
selenium==4.27.1
# via apache-superset (pyproject.toml)
shillelagh==1.3.5
shillelagh==1.2.18
# via apache-superset (pyproject.toml)
simplejson==3.20.1
# via apache-superset (pyproject.toml)
@@ -355,7 +345,7 @@ six==1.17.0
# python-dateutil
# url-normalize
# wtforms-json
slack-sdk==3.35.0
slack-sdk==3.34.0
# via apache-superset (pyproject.toml)
sniffio==1.3.1
# via trio
@@ -374,7 +364,7 @@ sqlalchemy-utils==0.38.3
# via
# apache-superset (pyproject.toml)
# flask-appbuilder
sqlglot==26.16.2
sqlglot==26.1.3
# via apache-superset (pyproject.toml)
sqlparse==0.5.3
# via apache-superset (pyproject.toml)
@@ -393,14 +383,15 @@ typing-extensions==4.12.2
# apache-superset (pyproject.toml)
# alembic
# cattrs
# flask-limiter
# limits
# pyopenssl
# referencing
# rich
# selenium
# shillelagh
tzdata==2025.2
tzdata==2025.1
# via
# celery
# kombu
# pandas
url-normalize==1.4.3
@@ -427,7 +418,7 @@ werkzeug==3.1.3
# flask-appbuilder
# flask-jwt-extended
# flask-login
wrapt==1.17.2
wrapt==1.17.0
# via deprecated
wsproto==1.2.0
# via trio-websocket

View File

@@ -10,11 +10,11 @@ amqp==5.3.1
# via
# -c requirements/base.txt
# kombu
apispec==6.6.1
apispec==6.3.0
# via
# -c requirements/base.txt
# flask-appbuilder
apsw==3.49.1.0
apsw==3.46.0.0
# via
# -c requirements/base.txt
# shillelagh
@@ -22,16 +22,15 @@ async-timeout==4.0.3
# via
# -c requirements/base.txt
# redis
attrs==25.3.0
attrs==24.2.0
# via
# -c requirements/base.txt
# cattrs
# jsonschema
# outcome
# referencing
# requests-cache
# trio
babel==2.17.0
babel==2.16.0
# via
# -c requirements/base.txt
# flask-babel
@@ -64,7 +63,7 @@ cachelib==0.13.0
# -c requirements/base.txt
# flask-caching
# flask-session
cachetools==5.5.2
cachetools==5.5.0
# via
# -c requirements/base.txt
# google-auth
@@ -72,11 +71,11 @@ cattrs==24.1.2
# via
# -c requirements/base.txt
# requests-cache
celery==5.5.2
celery==5.4.0
# via
# -c requirements/base.txt
# apache-superset
certifi==2025.1.31
certifi==2024.8.30
# via
# -c requirements/base.txt
# requests
@@ -88,7 +87,7 @@ cffi==1.17.1
# pynacl
cfgv==3.4.0
# via pre-commit
charset-normalizer==3.4.1
charset-normalizer==3.4.0
# via
# -c requirements/base.txt
# requests
@@ -107,7 +106,7 @@ click-didyoumean==0.3.1
# via
# -c requirements/base.txt
# celery
click-option-group==0.5.7
click-option-group==0.5.6
# via
# -c requirements/base.txt
# apache-superset
@@ -152,7 +151,7 @@ defusedxml==0.7.1
# via
# -c requirements/base.txt
# odfpy
deprecated==1.2.18
deprecated==1.2.15
# via
# -c requirements/base.txt
# limits
@@ -202,7 +201,7 @@ flask==2.3.3
# flask-sqlalchemy
# flask-testing
# flask-wtf
flask-appbuilder==4.6.1
flask-appbuilder==4.6.0
# via
# -c requirements/base.txt
# apache-superset
@@ -224,7 +223,7 @@ flask-jwt-extended==4.7.1
# via
# -c requirements/base.txt
# flask-appbuilder
flask-limiter==3.12
flask-limiter==3.8.0
# via
# -c requirements/base.txt
# flask-appbuilder
@@ -280,7 +279,7 @@ google-api-core==2.23.0
# google-cloud-core
# pandas-gbq
# sqlalchemy-bigquery
google-auth==2.38.0
google-auth==2.36.0
# via
# -c requirements/base.txt
# google-api-core
@@ -319,7 +318,7 @@ greenlet==3.1.1
# gevent
# shillelagh
# sqlalchemy
grpcio==1.71.0
grpcio==1.68.0
# via
# apache-superset
# google-api-core
@@ -343,7 +342,7 @@ holidays==0.25
# -c requirements/base.txt
# apache-superset
# prophet
humanize==4.12.2
humanize==4.12.1
# via
# -c requirements/base.txt
# apache-superset
@@ -355,12 +354,15 @@ idna==3.10
# email-validator
# requests
# trio
importlib-metadata==8.7.0
importlib-metadata==8.6.1
# via
# -c requirements/base.txt
# apache-superset
importlib-resources==6.5.2
# via prophet
importlib-resources==6.4.5
# via
# -c requirements/base.txt
# limits
# prophet
iniconfig==2.0.0
# via pytest
isodate==0.7.2
@@ -381,22 +383,18 @@ jsonpath-ng==1.7.0
# via
# -c requirements/base.txt
# apache-superset
jsonschema==4.23.0
jsonschema==4.17.3
# via
# -c requirements/base.txt
# flask-appbuilder
# jsonschema-spec
# openapi-schema-validator
# openapi-spec-validator
jsonschema-path==0.3.4
jsonschema-spec==0.1.6
# via openapi-spec-validator
jsonschema-specifications==2024.10.1
# via
# -c requirements/base.txt
# jsonschema
# openapi-schema-validator
kiwisolver==1.4.7
# via matplotlib
kombu==5.5.3
kombu==5.4.2
# via
# -c requirements/base.txt
# celery
@@ -406,11 +404,11 @@ korean-lunar-calendar==0.3.1
# holidays
lazy-object-proxy==1.10.0
# via openapi-spec-validator
limits==5.1.0
limits==3.13.0
# via
# -c requirements/base.txt
# flask-limiter
mako==1.3.10
mako==1.3.9
# via
# -c requirements/base.txt
# alembic
@@ -430,12 +428,12 @@ markupsafe==3.0.2
# mako
# werkzeug
# wtforms
marshmallow==3.26.1
marshmallow==3.23.1
# via
# -c requirements/base.txt
# flask-appbuilder
# marshmallow-sqlalchemy
marshmallow-sqlalchemy==1.4.0
marshmallow-sqlalchemy==0.28.2
# via
# -c requirements/base.txt
# flask-appbuilder
@@ -480,9 +478,9 @@ odfpy==1.4.1
# via
# -c requirements/base.txt
# pandas
openapi-schema-validator==0.6.3
openapi-schema-validator==0.4.4
# via openapi-spec-validator
openapi-spec-validator==0.7.1
openapi-spec-validator==0.5.6
# via apache-superset
openpyxl==3.1.5
# via
@@ -496,7 +494,7 @@ outcome==1.3.0.post0
# via
# -c requirements/base.txt
# trio
packaging==25.0
packaging==24.2
# via
# -c requirements/base.txt
# apache-superset
@@ -508,6 +506,7 @@ packaging==25.0
# gunicorn
# limits
# marshmallow
# marshmallow-sqlalchemy
# matplotlib
# pytest
# shillelagh
@@ -534,7 +533,7 @@ parsedatetime==2.6
# -c requirements/base.txt
# apache-superset
pathable==0.4.3
# via jsonschema-path
# via jsonschema-spec
pgsanity==0.2.9
# via
# -c requirements/base.txt
@@ -543,7 +542,7 @@ pillow==10.3.0
# via
# apache-superset
# matplotlib
platformdirs==4.3.7
platformdirs==3.9.1
# via
# -c requirements/base.txt
# requests-cache
@@ -566,7 +565,7 @@ prison==0.2.1
# flask-appbuilder
progress==1.6
# via apache-superset
prompt-toolkit==3.0.51
prompt-toolkit==3.0.50
# via
# -c requirements/base.txt
# click-repl
@@ -614,7 +613,7 @@ pydruid==0.6.9
# via apache-superset
pyfakefs==5.3.5
# via apache-superset
pygments==2.19.1
pygments==2.18.0
# via
# -c requirements/base.txt
# rich
@@ -636,11 +635,15 @@ pyopenssl==25.0.0
# via
# -c requirements/base.txt
# shillelagh
pyparsing==3.2.3
pyparsing==3.2.1
# via
# -c requirements/base.txt
# apache-superset
# matplotlib
pyrsistent==0.20.0
# via
# -c requirements/base.txt
# jsonschema
pysocks==1.7.1
# via
# -c requirements/base.txt
@@ -669,7 +672,7 @@ python-dateutil==2.9.0.post0
# pyhive
# shillelagh
# trino
python-dotenv==1.1.0
python-dotenv==1.0.1
# via
# -c requirements/base.txt
# apache-superset
@@ -679,7 +682,7 @@ python-geohash==0.8.5
# apache-superset
python-ldap==3.4.4
# via apache-superset
pytz==2025.2
pytz==2025.1
# via
# -c requirements/base.txt
# croniter
@@ -695,32 +698,26 @@ pyyaml==6.0.2
# -c requirements/base.txt
# apache-superset
# apispec
# jsonschema-path
# jsonschema-spec
# pre-commit
redis==4.6.0
# via
# -c requirements/base.txt
# apache-superset
referencing==0.36.2
# via
# -c requirements/base.txt
# jsonschema
# jsonschema-path
# jsonschema-specifications
requests==2.32.3
requests==2.32.2
# via
# -c requirements/base.txt
# docker
# google-api-core
# google-cloud-bigquery
# jsonschema-path
# jsonschema-spec
# pydruid
# pyhive
# requests-cache
# requests-oauthlib
# shillelagh
# trino
requests-cache==1.2.1
requests-cache==1.2.0
# via
# -c requirements/base.txt
# shillelagh
@@ -732,11 +729,6 @@ rich==13.9.4
# via
# -c requirements/base.txt
# flask-limiter
rpds-py==0.23.1
# via
# -c requirements/base.txt
# jsonschema
# referencing
rsa==4.9
# via
# -c requirements/base.txt
@@ -754,7 +746,7 @@ setuptools==75.6.0
# pydata-google-auth
# zope-event
# zope-interface
shillelagh==1.3.5
shillelagh==1.2.18
# via
# -c requirements/base.txt
# apache-superset
@@ -770,7 +762,7 @@ six==1.17.0
# rfc3339-validator
# url-normalize
# wtforms-json
slack-sdk==3.35.0
slack-sdk==3.34.0
# via
# -c requirements/base.txt
# apache-superset
@@ -800,7 +792,7 @@ sqlalchemy-utils==0.38.3
# -c requirements/base.txt
# apache-superset
# flask-appbuilder
sqlglot==26.16.2
sqlglot==26.1.3
# via
# -c requirements/base.txt
# apache-superset
@@ -845,15 +837,16 @@ typing-extensions==4.12.2
# alembic
# apache-superset
# cattrs
# flask-limiter
# limits
# pyopenssl
# referencing
# rich
# selenium
# shillelagh
tzdata==2025.2
tzdata==2025.1
# via
# -c requirements/base.txt
# celery
# kombu
# pandas
tzlocal==5.2
@@ -892,7 +885,7 @@ werkzeug==3.1.3
# flask-appbuilder
# flask-jwt-extended
# flask-login
wrapt==1.17.2
wrapt==1.17.0
# via
# -c requirements/base.txt
# deprecated

View File

@@ -1,4 +1,4 @@
# This file was autogenerated by uv via the following command:
# uv pip compile requirements/translations.in -o requirements/translations.txt
babel==2.17.0
babel==2.16.0
# via -r requirements/translations.in

View File

@@ -63,10 +63,7 @@ def fetch_files_github_api(url: str): # type: ignore
def fetch_changed_files_pr(repo: str, pr_number: str) -> List[str]:
"""Fetches files changed in a PR using the GitHub API."""
# NOTE: limited to 100 files ideally should page-through but instead resorting
# to assuming we should trigger when 100 files have been touched
url = f"https://api.github.com/repos/{repo}/pulls/{pr_number}/files?per_page=100"
url = f"https://api.github.com/repos/{repo}/pulls/{pr_number}/files"
files = fetch_files_github_api(url)
return [file_info["filename"] for file_info in files]
@@ -106,7 +103,7 @@ def main(event_type: str, sha: str, repo: str) -> None:
"""Main function to check for file changes based on event context."""
print("SHA:", sha)
print("EVENT_TYPE", event_type)
files = []
files = None
if event_type == "pull_request":
pr_number = os.getenv("GITHUB_REF", "").split("/")[-2]
if is_int(pr_number):
@@ -136,11 +133,8 @@ def main(event_type: str, sha: str, repo: str) -> None:
output_path = os.getenv("GITHUB_OUTPUT") or "/tmp/GITHUB_OUTPUT.txt" # noqa: S108
with open(output_path, "a") as f:
for check, changed in changes_detected.items():
# NOTE: as noted above, we assume that if 100 files are touched, we should
# trigger all checks. This is a workaround for the GitHub API limit of 100
# files. Using >= 99 because off-by-one errors are not uncommon
if changed or len(files) >= 99:
print(f"{check}=true", file=f)
if changed:
print(f"{check}={str(changed).lower()}", file=f)
print(f"Triggering group: {check}")

View File

@@ -52,7 +52,6 @@ with open(VERSION_INFO_FILE, "w") as version_file:
version_string = version_string.replace("-dev", ".dev0")
setup(
name="apache_superset",
version=version_string,
packages=find_packages(),
include_package_data=True,

View File

@@ -60,9 +60,7 @@ embedDashboard({
}
},
// optional additional iframe sandbox attributes
iframeSandboxExtras: ['allow-top-navigation', 'allow-popups-to-escape-sandbox'],
// optional config to enforce a particular referrerPolicy
referrerPolicy: "same-origin"
iframeSandboxExtras: ['allow-top-navigation', 'allow-popups-to-escape-sandbox']
});
```
@@ -148,11 +146,3 @@ To pass additional sandbox attributes you can use `iframeSandboxExtras`:
// optional additional iframe sandbox attributes
iframeSandboxExtras: ['allow-top-navigation', 'allow-popups-to-escape-sandbox']
```
### Enforcing a ReferrerPolicy on the request triggered by the iframe
By default, the Embedded SDK creates an `iframe` element without a `referrerPolicy` value enforced. This means that a policy defined for `iframe` elements at the host app level would reflect to it.
This can be an issue as during the embedded enablement for a dashboard it's possible to specify which domain(s) are allowed to embed the dashboard, and this validation happens throuth the `Referrer` header. That said, in case the hosting app has a more restrictive policy that would omit this header, this validation would fail.
Use the `referrerPolicy` parameter in the `embedDashboard` method to specify [a particular policy](https://developer.mozilla.org/en-US/docs/Web/HTTP/Reference/Headers/Referrer-Policy) that works for your implementation.

View File

@@ -19,7 +19,7 @@
import {
DASHBOARD_UI_FILTER_CONFIG_URL_PARAM_KEY,
IFRAME_COMMS_MESSAGE_TYPE,
IFRAME_COMMS_MESSAGE_TYPE
} from './const';
// We can swap this out for the actual switchboard package once it gets published
@@ -34,62 +34,48 @@ import { getGuestTokenRefreshTiming } from './guestTokenRefresh';
export type GuestTokenFetchFn = () => Promise<string>;
export type UiConfigType = {
hideTitle?: boolean;
hideTab?: boolean;
hideChartControls?: boolean;
emitDataMasks?: boolean;
hideTitle?: boolean
hideTab?: boolean
hideChartControls?: boolean
filters?: {
[key: string]: boolean | undefined;
visible?: boolean;
expanded?: boolean;
};
[key: string]: boolean | undefined
visible?: boolean
expanded?: boolean
}
urlParams?: {
[key: string]: any;
};
};
[key: string]: any
}
}
export type EmbedDashboardParams = {
/** The id provided by the embed configuration UI in Superset */
id: string;
id: string
/** The domain where Superset can be located, with protocol, such as: https://superset.example.com */
supersetDomain: string;
supersetDomain: string
/** The html element within which to mount the iframe */
mountPoint: HTMLElement;
mountPoint: HTMLElement
/** A function to fetch a guest token from the Host App's backend server */
fetchGuestToken: GuestTokenFetchFn;
fetchGuestToken: GuestTokenFetchFn
/** The dashboard UI config: hideTitle, hideTab, hideChartControls, filters.visible, filters.expanded **/
dashboardUiConfig?: UiConfigType;
dashboardUiConfig?: UiConfigType
/** Are we in debug mode? */
debug?: boolean;
debug?: boolean
/** The iframe title attribute */
iframeTitle?: string;
iframeTitle?: string
/** additional iframe sandbox attributes ex (allow-top-navigation, allow-popups-to-escape-sandbox) **/
iframeSandboxExtras?: string[];
/** force a specific refererPolicy to be used in the iframe request **/
referrerPolicy?: ReferrerPolicy;
};
iframeSandboxExtras?: string[]
}
export type Size = {
width: number;
height: number;
};
width: number, height: number
}
export type ObserveDataMaskCallbackFn = (
dataMask: Record<string, any> & {
crossFiltersChanged: boolean;
nativeFiltersChanged: boolean;
},
) => void;
export type EmbeddedDashboard = {
getScrollSize: () => Promise<Size>;
unmount: () => void;
getDashboardPermalink: (anchor: string) => Promise<string>;
getActiveTabs: () => Promise<string[]>;
observeDataMask: (
callbackFn: ObserveDataMaskCallbackFn,
) => void;
getDataMask: () => Record<string, any>;
};
getScrollSize: () => Promise<Size>
unmount: () => void
getDashboardPermalink: (anchor: string) => Promise<string>
getActiveTabs: () => Promise<string[]>
}
/**
* Embeds a Superset dashboard into the page using an iframe.
@@ -101,9 +87,8 @@ export async function embedDashboard({
fetchGuestToken,
dashboardUiConfig,
debug = false,
iframeTitle = 'Embedded Dashboard',
iframeSandboxExtras = [],
referrerPolicy,
iframeTitle = "Embedded Dashboard",
iframeSandboxExtras = []
}: EmbedDashboardParams): Promise<EmbeddedDashboard> {
function log(...info: unknown[]) {
if (debug) {
@@ -113,69 +98,50 @@ export async function embedDashboard({
log('embedding');
if (supersetDomain.endsWith('/')) {
if (supersetDomain.endsWith("/")) {
supersetDomain = supersetDomain.slice(0, -1);
}
function calculateConfig() {
let configNumber = 0;
if (dashboardUiConfig) {
if (dashboardUiConfig.hideTitle) {
configNumber += 1;
let configNumber = 0
if(dashboardUiConfig) {
if(dashboardUiConfig.hideTitle) {
configNumber += 1
}
if (dashboardUiConfig.hideTab) {
configNumber += 2;
if(dashboardUiConfig.hideTab) {
configNumber += 2
}
if (dashboardUiConfig.hideChartControls) {
configNumber += 8;
}
if (dashboardUiConfig.emitDataMasks) {
configNumber += 16;
if(dashboardUiConfig.hideChartControls) {
configNumber += 8
}
}
return configNumber;
return configNumber
}
async function mountIframe(): Promise<Switchboard> {
return new Promise(resolve => {
const iframe = document.createElement('iframe');
const dashboardConfigUrlParams = dashboardUiConfig
? { uiConfig: `${calculateConfig()}` }
: undefined;
const filterConfig = dashboardUiConfig?.filters || {};
const filterConfigKeys = Object.keys(filterConfig);
const filterConfigUrlParams = Object.fromEntries(
filterConfigKeys.map(key => [
DASHBOARD_UI_FILTER_CONFIG_URL_PARAM_KEY[key],
filterConfig[key],
]),
);
const dashboardConfigUrlParams = dashboardUiConfig ? {uiConfig: `${calculateConfig()}`} : undefined;
const filterConfig = dashboardUiConfig?.filters || {}
const filterConfigKeys = Object.keys(filterConfig)
const filterConfigUrlParams = Object.fromEntries(filterConfigKeys.map(
key => [DASHBOARD_UI_FILTER_CONFIG_URL_PARAM_KEY[key], filterConfig[key]]))
// Allow url query parameters from dashboardUiConfig.urlParams to override the ones from filterConfig
const urlParams = {
...dashboardConfigUrlParams,
...filterConfigUrlParams,
...dashboardUiConfig?.urlParams,
};
const urlParamsString = Object.keys(urlParams).length
? '?' + new URLSearchParams(urlParams).toString()
: '';
const urlParams = {...dashboardConfigUrlParams, ...filterConfigUrlParams, ...dashboardUiConfig?.urlParams}
const urlParamsString = Object.keys(urlParams).length ? '?' + new URLSearchParams(urlParams).toString() : ''
// set up the iframe's sandbox configuration
iframe.sandbox.add('allow-same-origin'); // needed for postMessage to work
iframe.sandbox.add('allow-scripts'); // obviously the iframe needs scripts
iframe.sandbox.add('allow-presentation'); // for fullscreen charts
iframe.sandbox.add('allow-downloads'); // for downloading charts as image
iframe.sandbox.add('allow-forms'); // for forms to submit
iframe.sandbox.add('allow-popups'); // for exporting charts as csv
iframe.sandbox.add("allow-same-origin"); // needed for postMessage to work
iframe.sandbox.add("allow-scripts"); // obviously the iframe needs scripts
iframe.sandbox.add("allow-presentation"); // for fullscreen charts
iframe.sandbox.add("allow-downloads"); // for downloading charts as image
iframe.sandbox.add("allow-forms"); // for forms to submit
iframe.sandbox.add("allow-popups"); // for exporting charts as csv
// additional sandbox props
iframeSandboxExtras.forEach((key: string) => {
iframe.sandbox.add(key);
});
// force a specific refererPolicy to be used in the iframe request
if (referrerPolicy) {
iframe.referrerPolicy = referrerPolicy;
}
// add the event listener before setting src, to be 100% sure that we capture the load event
iframe.addEventListener('load', () => {
@@ -189,26 +155,20 @@ export async function embedDashboard({
// See https://developer.mozilla.org/en-US/docs/Web/API/Window/postMessage
// we know the content window isn't null because we are in the load event handler.
iframe.contentWindow!.postMessage(
{ type: IFRAME_COMMS_MESSAGE_TYPE, handshake: 'port transfer' },
{ type: IFRAME_COMMS_MESSAGE_TYPE, handshake: "port transfer" },
supersetDomain,
[theirPort],
);
)
log('sent message channel to the iframe');
// return our port from the promise
resolve(
new Switchboard({
port: ourPort,
name: 'superset-embedded-sdk',
debug,
}),
);
resolve(new Switchboard({ port: ourPort, name: 'superset-embedded-sdk', debug }));
});
iframe.src = `${supersetDomain}/embedded/${id}${urlParamsString}`;
iframe.title = iframeTitle;
//@ts-ignore
mountPoint.replaceChildren(iframe);
log('placed the iframe');
log('placed the iframe')
});
}
@@ -237,21 +197,12 @@ export async function embedDashboard({
const getScrollSize = () => ourPort.get<Size>('getScrollSize');
const getDashboardPermalink = (anchor: string) =>
ourPort.get<string>('getDashboardPermalink', { anchor });
const getActiveTabs = () => ourPort.get<string[]>('getActiveTabs');
const getDataMask = () => ourPort.get<Record<string, any>>('getDataMask');
const observeDataMask = (
callbackFn: ObserveDataMaskCallbackFn,
) => {
ourPort.start();
ourPort.defineMethod('observeDataMask', callbackFn);
};
const getActiveTabs = () => ourPort.get<string[]>('getActiveTabs')
return {
getScrollSize,
unmount,
getDashboardPermalink,
getActiveTabs,
observeDataMask,
getDataMask,
};
}

View File

@@ -36,45 +36,6 @@ if (process.env.NODE_ENV === 'production') {
];
}
const restrictedImportsRules = {
'no-design-icons': {
name: '@ant-design/icons',
message:
'Avoid importing icons directly from @ant-design/icons. Use the src/components/Icons component instead.',
},
'no-moment': {
name: 'moment',
message:
'Please use the dayjs library instead of moment.js. See https://day.js.org',
},
'no-lodash-memoize': {
name: 'lodash/memoize',
message: 'Lodash Memoize is unsafe! Please use memoize-one instead',
},
'no-testing-library-react': {
name: '@testing-library/react',
message: 'Please use spec/helpers/testing-library instead',
},
'no-testing-library-react-dom-utils': {
name: '@testing-library/react-dom-utils',
message: 'Please use spec/helpers/testing-library instead',
},
'no-antd': {
name: 'antd',
message: 'Please import Ant components from the index of src/components',
},
'no-antd-v5': {
name: 'antd-v5',
message: 'Please import Ant v5 components from the index of src/components',
},
'no-superset-theme': {
name: '@superset-ui/core',
importNames: ['supersetTheme'],
message:
'Please use the theme directly from the ThemeProvider rather than importing supersetTheme.',
},
};
module.exports = {
extends: [
'airbnb',
@@ -113,7 +74,6 @@ module.exports = {
'file-progress',
'lodash',
'theme-colors',
'icons',
'i18n-strings',
'react-prefer-function-component',
'prettier',
@@ -240,13 +200,6 @@ module.exports = {
message: 'Wildcard imports are not allowed',
},
],
'no-restricted-imports': [
'error',
{
paths: Object.values(restrictedImportsRules).filter(Boolean),
patterns: ['antd/*'],
},
],
},
settings: {
'import/resolver': {
@@ -257,51 +210,6 @@ module.exports = {
},
},
},
{
files: ['packages/**'],
rules: {
'no-restricted-imports': [
'error',
{
paths: [
restrictedImportsRules['no-moment'],
restrictedImportsRules['no-lodash-memoize'],
restrictedImportsRules['no-superset-theme'],
],
patterns: [],
},
],
},
},
{
files: ['plugins/**'],
rules: {
'no-restricted-imports': [
'error',
{
paths: [
restrictedImportsRules['no-moment'],
restrictedImportsRules['no-lodash-memoize'],
],
patterns: [],
},
],
},
},
{
files: ['src/components/**', 'src/theme/**'],
rules: {
'no-restricted-imports': [
'error',
{
paths: Object.values(restrictedImportsRules).filter(
r => r.name !== 'antd-v5',
),
patterns: ['antd/*'],
},
],
},
},
{
files: [
'*.test.ts',
@@ -359,7 +267,6 @@ module.exports = {
'Default React import is not required due to automatic JSX runtime in React 16.4',
},
],
'no-restricted-imports': 0,
},
},
{
@@ -377,7 +284,6 @@ module.exports = {
],
rules: {
'theme-colors/no-literal-colors': 0,
'icons/no-fa-icons-usage': 0,
'i18n-strings/no-template-vars': 0,
'no-restricted-imports': 0,
'react/no-void-elements': 0,
@@ -386,7 +292,6 @@ module.exports = {
],
rules: {
'theme-colors/no-literal-colors': 'error',
'icons/no-fa-icons-usage': 'error',
'i18n-strings/no-template-vars': ['error', true],
camelcase: [
'error',
@@ -425,6 +330,42 @@ module.exports = {
'no-nested-ternary': 0,
'no-prototype-builtins': 0,
'no-restricted-properties': 0,
'no-restricted-imports': [
'error',
{
paths: [
{
name: 'antd',
message:
'Please import Ant components from the index of src/components',
},
{
name: 'antd-v5',
message:
'Please import Ant v5 components from the index of src/components',
},
{
name: '@superset-ui/core',
importNames: ['supersetTheme'],
message:
'Please use the theme directly from the ThemeProvider rather than importing supersetTheme.',
},
{
name: 'lodash/memoize',
message: 'Lodash Memoize is unsafe! Please use memoize-one instead',
},
{
name: '@testing-library/react',
message: 'Please use spec/helpers/testing-library instead',
},
{
name: '@testing-library/react-dom-utils',
message: 'Please use spec/helpers/testing-library instead',
},
],
patterns: ['antd/*'],
},
],
'no-shadow': 0, // re-enable up for discussion
'padded-blocks': 0,
'prefer-arrow-callback': 0,
@@ -465,13 +406,6 @@ module.exports = {
'no-promise-executor-return': 0,
'react/no-unused-class-component-methods': 0,
'react/react-in-jsx-scope': 0,
'no-restricted-imports': [
'error',
{
paths: Object.values(restrictedImportsRules).filter(Boolean),
patterns: ['antd/*'],
},
],
},
ignorePatterns,
};

View File

@@ -28,7 +28,7 @@ describe('charts list view', () => {
});
it('should load the Charts list', () => {
cy.get('[aria-label="unordered-list"]').click();
cy.get('[aria-label="list-view"]').click();
cy.eyesOpen({
testName: 'Charts list-view',
});
@@ -36,7 +36,7 @@ describe('charts list view', () => {
});
it('should load the Charts card list', () => {
cy.get('[aria-label="appstore"]').click();
cy.get('[aria-label="card-view"]').click();
cy.eyesOpen({
testName: 'Charts card-view',
});

View File

@@ -28,7 +28,7 @@ describe('dashboard list view', () => {
});
it('should load the Dashboards list', () => {
cy.get('[aria-label="unordered-list"]').click();
cy.get('[aria-label="list-view"]').click();
cy.eyesOpen({
testName: 'Dashboards list-view',
});
@@ -36,7 +36,7 @@ describe('dashboard list view', () => {
});
it('should load the Dashboards card list', () => {
cy.get('[aria-label="appstore"]').click();
cy.get('[aria-label="card-view"]').click();
cy.eyesOpen({
testName: 'Dashboards card-view',
});

View File

@@ -23,7 +23,7 @@ import {
describe('explore view', () => {
beforeEach(() => {
cy.intercept('POST', '**/superset/explore_json/**').as('getJson');
cy.intercept('POST', '/superset/explore_json/**').as('getJson');
});
afterEach(() => {

View File

@@ -35,12 +35,12 @@ function orderAlphabetical() {
}
function openProperties() {
cy.get('[aria-label="more"]').eq(1).click();
cy.get('[aria-label="more-vert"]').eq(1).click();
cy.getBySel('chart-list-edit-option').click();
}
function openMenu() {
cy.get('[aria-label="more"]').eq(1).click();
cy.get('[aria-label="more-vert"]').eq(1).click();
}
function confirmDelete() {
@@ -184,13 +184,12 @@ describe('Charts list', () => {
});
it('should allow to favorite/unfavorite', () => {
cy.intercept({ url: `**/api/v1/chart/*/favorites/`, method: 'POST' }).as(
cy.intercept({ url: `/api/v1/chart/*/favorites/`, method: 'POST' }).as(
'select',
);
cy.intercept({
url: `**/api/v1/chart/*/favorites/`,
method: 'DELETE',
}).as('unselect');
cy.intercept({ url: `/api/v1/chart/*/favorites/`, method: 'DELETE' }).as(
'unselect',
);
setGridMode('card');
orderAlphabetical();
@@ -264,7 +263,7 @@ describe('Charts list', () => {
// deletes in list-view
setGridMode('list');
cy.getBySel('table-row').eq(1).contains('2 - Sample chart');
cy.getBySel('delete').eq(1).click();
cy.getBySel('trash').eq(1).click();
confirmDelete();
cy.wait('@delete');
cy.getBySel('table-row').eq(1).should('not.contain', '2 - Sample chart');

View File

@@ -62,7 +62,7 @@ describe.skip('Dashboard top-level controls', () => {
// should allow force refresh
WORLD_HEALTH_CHARTS.forEach(waitForChartLoad);
getChartAliasesBySpec(WORLD_HEALTH_CHARTS).then(aliases => {
cy.get('[aria-label="ellipsis"]').click();
cy.get('[aria-label="more-horiz"]').click();
cy.get('[data-test="refresh-dashboard-menu-item"]').should(
'not.have.class',
'antd5-dropdown-menu-item-disabled',
@@ -91,7 +91,7 @@ describe.skip('Dashboard top-level controls', () => {
});
});
});
cy.get('[aria-label="ellipsis"]').click();
cy.get('[aria-label="more-horiz"]').click();
cy.get('[data-test="refresh-dashboard-menu-item"]').and(
'not.have.class',
'antd5-dropdown-menu-item-disabled',

View File

@@ -27,13 +27,13 @@ describe.skip('Dashboard form data', () => {
});
it('should apply url params to slice requests', () => {
cy.intercept('**/api/v1/chart/data?*', request => {
cy.intercept('/api/v1/chart/data?*', request => {
// TODO: export url params to chart data API
request.body.queries.forEach((query: { url_params: JsonObject }) => {
expect(query.url_params).deep.eq(urlParams);
});
});
cy.intercept('**/superset/explore_json/*', request => {
cy.intercept('/superset/explore_json/*', request => {
const requestParams = JSON.parse(
parsePostForm(request.body).form_data as string,
);

View File

@@ -24,44 +24,21 @@ describe('Dashboard actions', () => {
cy.createSampleDashboards([0]);
cy.visit(SAMPLE_DASHBOARD_1);
});
it('should allow to favorite/unfavorite dashboard', () => {
interceptFav();
interceptUnfav();
// Find and click StarOutlined (adds to favorites)
cy.getBySel('dashboard-header-container')
.find("[aria-label='unstarred']")
.as('starIconOutlined')
.should('exist')
.find("[aria-label='favorite-unselected']")
.click();
cy.wait('@select');
// After clicking, StarFilled should appear
cy.getBySel('dashboard-header-container')
.find("[aria-label='starred']")
.as('starIconFilled')
.should('exist');
// Verify the color of the filled star (gold)
cy.get('@starIconFilled')
.should('have.css', 'color')
.and('eq', 'rgb(252, 199, 0)');
// Click on StarFilled (removes from favorites)
cy.get('@starIconFilled').click();
.find("[aria-label='favorite-selected']")
.click();
cy.wait('@unselect');
// After clicking, StarOutlined should reappear
cy.getBySel('dashboard-header-container')
.find("[aria-label='unstarred']")
.as('starIconOutlinedAfter')
.should('exist');
// Verify the color of the outlined star (gray)
cy.get('@starIconOutlinedAfter')
.should('have.css', 'color')
.and('eq', 'rgb(178, 178, 178)');
.find("[aria-label='favorite-selected']")
.should('not.exist');
});
});

View File

@@ -510,29 +510,29 @@ describe('Drill by modal', () => {
it('Line chart', () => {
testEchart('echarts_timeseries_line', 'Line Chart', [
[85, 93],
[85, 93],
[70, 93],
[70, 93],
]);
});
it('Area Chart', () => {
testEchart('echarts_area', 'Area Chart', [
[85, 93],
[85, 93],
[70, 93],
[70, 93],
]);
});
it('Scatter Chart', () => {
testEchart('echarts_timeseries_scatter', 'Scatter Chart', [
[85, 93],
[85, 93],
[70, 93],
[70, 93],
]);
});
it('Bar Chart', () => {
testEchart('echarts_timeseries_bar', 'Bar Chart', [
[85, 94],
[490, 68],
[70, 94],
[362, 68],
]);
});
@@ -565,22 +565,22 @@ describe('Drill by modal', () => {
it('Generic Chart', () => {
testEchart('echarts_timeseries', 'Generic Chart', [
[85, 93],
[85, 93],
[70, 93],
[70, 93],
]);
});
it('Smooth Line Chart', () => {
testEchart('echarts_timeseries_smooth', 'Smooth Line Chart', [
[85, 93],
[85, 93],
[70, 93],
[70, 93],
]);
});
it('Step Line Chart', () => {
testEchart('echarts_timeseries_step', 'Step Line Chart', [
[85, 93],
[85, 93],
[70, 93],
[70, 93],
]);
});
@@ -616,8 +616,8 @@ describe('Drill by modal', () => {
cy.get('[data-test-viz-type="mixed_timeseries"] canvas').then($canvas => {
// click 'boy'
cy.wrap($canvas).scrollIntoView();
cy.wrap($canvas).trigger('mouseover', 85, 93);
cy.wrap($canvas).rightclick(85, 93);
cy.wrap($canvas).trigger('mouseover', 70, 93);
cy.wrap($canvas).rightclick(70, 93);
drillBy('name').then(intercepted => {
const { queries } = intercepted.request.body;
@@ -650,8 +650,8 @@ describe('Drill by modal', () => {
cy.get(`[data-test="drill-by-chart"] canvas`).then($canvas => {
// click second query
cy.wrap($canvas).scrollIntoView();
cy.wrap($canvas).trigger('mouseover', 261, 114);
cy.wrap($canvas).rightclick(261, 114);
cy.wrap($canvas).trigger('mouseover', 246, 114);
cy.wrap($canvas).rightclick(246, 114);
drillBy('ds').then(intercepted => {
const { queries } = intercepted.request.body;

View File

@@ -25,7 +25,7 @@ import {
} from './utils';
function interceptSamples() {
cy.intercept(`**/datasource/samples*`).as('samples');
cy.intercept(`/datasource/samples*`).as('samples');
}
function openModalFromMenu(chartType: string) {
@@ -95,24 +95,24 @@ function testTimeChart(vizType: string) {
cy.get(`[data-test-viz-type='${vizType}'] canvas`).then($canvas => {
cy.wrap($canvas).scrollIntoView();
cy.wrap($canvas).trigger('mousemove', 85, 93);
cy.wrap($canvas).rightclick(85, 93);
cy.wrap($canvas).trigger('mousemove', 70, 93);
cy.wrap($canvas).rightclick(70, 93);
drillToDetailBy('Drill to detail by 1965');
cy.getBySel('filter-val').should('contain', '1965');
closeModal();
cy.wrap($canvas).scrollIntoView();
cy.wrap($canvas).trigger('mousemove', 85, 93);
cy.wrap($canvas).rightclick(85, 93);
cy.wrap($canvas).trigger('mousemove', 70, 93);
cy.wrap($canvas).rightclick(70, 93);
drillToDetailBy('Drill to detail by boy');
cy.getBySel('filter-val').should('contain', 'boy');
closeModal();
cy.wrap($canvas).scrollIntoView();
cy.wrap($canvas).trigger('mousemove', 85, 93);
cy.wrap($canvas).rightclick(85, 93);
cy.wrap($canvas).trigger('mousemove', 70, 93);
cy.wrap($canvas).rightclick(70, 93);
drillToDetailBy('Drill to detail by all');
cy.getBySel('filter-val').first().should('contain', '1965');
@@ -151,7 +151,7 @@ describe('Drill to detail modal', () => {
cy.on('uncaught:exception', () => false);
cy.wait('@samples');
// reload
cy.get("[aria-label='Reload']").click();
cy.get("[aria-label='reload']").click();
cy.wait('@samples');
// make sure it started back from first page
cy.get('.ant-pagination-item-active').should('contain', '1');
@@ -442,7 +442,7 @@ describe('Drill to detail modal', () => {
cy.get("[data-test-viz-type='box_plot'] canvas").then($canvas => {
const canvasWidth = $canvas.width() || 0;
const canvasHeight = $canvas.height() || 0;
const canvasCenterX = canvasWidth / 3 + 15;
const canvasCenterX = canvasWidth / 3;
const canvasCenterY = (canvasHeight * 5) / 6;
cy.wrap($canvas).scrollIntoView();

View File

@@ -177,7 +177,7 @@ describe('Horizontal FilterBar', () => {
});
it.skip('should spot changes in "more filters" and apply their values', () => {
cy.intercept(`**/api/v1/chart/data?form_data=**`).as('chart');
cy.intercept(`/api/v1/chart/data?form_data=**`).as('chart');
prepareDashboardFilters([
{ name: 'test_1', column: 'country_name', datasetId: 2 },
{ name: 'test_2', column: 'country_code', datasetId: 2 },

View File

@@ -170,7 +170,7 @@ describe('Native filters', () => {
testItems.datasetForNativeFilter,
);
saveNativeFilterSettings(WORLD_HEALTH_CHARTS);
cy.intercept(`**/api/v1/chart/data?form_data=**`).as('chart');
cy.intercept(`/api/v1/chart/data?form_data=**`).as('chart');
cy.get(nativeFilters.modal.container).should('not.exist');
// assert that native filter is created
validateFilterNameOnDashboard(testItems.filterType.timeColumn);

View File

@@ -55,7 +55,6 @@ export function prepareDashboardFilters(
controlValues: {
enableEmptyFilter: false,
defaultToFirstItem: false,
creatable: true,
multiSelect: true,
searchAllOptions: false,
inverseSelection: false,

View File

@@ -116,7 +116,7 @@ describe('Dashboard tabs', () => {
});
});
cy.intercept('**/superset/explore_json/?*').as('legacyChartData');
cy.intercept('/superset/explore_json/?*').as('legacyChartData');
// click row level tab, send 1 more query
cy.get('.ant-tabs-tab').contains('row tab 2').click();
@@ -131,7 +131,7 @@ describe('Dashboard tabs', () => {
expect(requestParams.viz_type).eq(LINE_CHART.viz);
});
cy.intercept('POST', '**/api/v1/chart/data?*').as('v1ChartData');
cy.intercept('POST', '/api/v1/chart/data?*').as('v1ChartData');
// click top level tab, send 1 more query
cy.get('.ant-tabs-tab').contains('Tab B').click();

View File

@@ -125,63 +125,63 @@ export const valueNativeFilterOptions = [
];
export function interceptGet() {
cy.intercept('GET', '**/api/v1/dashboard/*').as('get');
cy.intercept('GET', '/api/v1/dashboard/*').as('get');
}
export function interceptFiltering() {
cy.intercept('GET', `**/api/v1/dashboard/?q=*`).as('filtering');
cy.intercept('GET', `/api/v1/dashboard/?q=*`).as('filtering');
}
export function interceptBulkDelete() {
cy.intercept('DELETE', `**/api/v1/dashboard/?q=*`).as('bulkDelete');
cy.intercept('DELETE', `/api/v1/dashboard/?q=*`).as('bulkDelete');
}
export function interceptDelete() {
cy.intercept('DELETE', `**/api/v1/dashboard/*`).as('delete');
cy.intercept('DELETE', `/api/v1/dashboard/*`).as('delete');
}
export function interceptUpdate() {
cy.intercept('PUT', `**/api/v1/dashboard/*`).as('update');
cy.intercept('PUT', `/api/v1/dashboard/*`).as('update');
}
export function interceptExploreUpdate() {
cy.intercept('PUT', `**/api/v1/chart/*`).as('chartUpdate');
cy.intercept('PUT', `/api/v1/chart/*`).as('chartUpdate');
}
export function interceptPost() {
cy.intercept('POST', `**/api/v1/dashboard/`).as('post');
cy.intercept('POST', `/api/v1/dashboard/`).as('post');
}
export function interceptLog() {
cy.intercept('**/superset/log/?explode=events&dashboard_id=*').as('logs');
cy.intercept('/superset/log/?explode=events&dashboard_id=*').as('logs');
}
export function interceptFav() {
cy.intercept({ url: `**/api/v1/dashboard/*/favorites/`, method: 'POST' }).as(
cy.intercept({ url: `/api/v1/dashboard/*/favorites/`, method: 'POST' }).as(
'select',
);
}
export function interceptUnfav() {
cy.intercept({ url: `**/api/v1/dashboard/*/favorites/`, method: 'POST' }).as(
cy.intercept({ url: `/api/v1/dashboard/*/favorites/`, method: 'POST' }).as(
'unselect',
);
}
export function interceptDataset() {
cy.intercept('GET', `**/api/v1/dataset/*`).as('getDataset');
cy.intercept('GET', `/api/v1/dataset/*`).as('getDataset');
}
export function interceptCharts() {
cy.intercept('GET', `**/api/v1/dashboard/*/charts`).as('getCharts');
cy.intercept('GET', `/api/v1/dashboard/*/charts`).as('getCharts');
}
export function interceptDatasets() {
cy.intercept('GET', `**/api/v1/dashboard/*/datasets`).as('getDatasets');
cy.intercept('GET', `/api/v1/dashboard/*/datasets`).as('getDatasets');
}
export function interceptFilterState() {
cy.intercept('POST', `**/api/v1/dashboard/*/filter_state*`).as(
cy.intercept('POST', `/api/v1/dashboard/*/filter_state*`).as(
'postFilterState',
);
}

View File

@@ -32,12 +32,12 @@ function orderAlphabetical() {
}
function openProperties() {
cy.get('[aria-label="more"]').first().click();
cy.get('[aria-label="more-vert"]').first().click();
cy.getBySel('dashboard-card-option-edit-button').click();
}
function openMenu() {
cy.get('[aria-label="more"]').first().click();
cy.get('[aria-label="more-vert"]').first().click();
}
function confirmDelete(bulk = false) {
@@ -158,14 +158,17 @@ describe('Dashboards list', () => {
cy.getBySel('styled-card').first().contains('1 - Sample dashboard');
cy.getBySel('styled-card')
.first()
.find("[aria-label='unstarred']")
.find("[aria-label='favorite-unselected']")
.click();
cy.wait('@select');
cy.getBySel('styled-card').first().find("[aria-label='starred']").click();
cy.getBySel('styled-card')
.first()
.find("[aria-label='favorite-selected']")
.click();
cy.wait('@unselect');
cy.getBySel('styled-card')
.first()
.find("[aria-label='starred']")
.find("[aria-label='favorite-selected']")
.should('not.exist');
});

View File

@@ -18,11 +18,11 @@
*/
describe.skip('AdhocFilters', () => {
beforeEach(() => {
cy.intercept('GET', '**/api/v1/datasource/table/*/column/name/values').as(
cy.intercept('GET', '/api/v1/datasource/table/*/column/name/values').as(
'filterValues',
);
cy.intercept('POST', '**/superset/explore_json/**').as('postJson');
cy.intercept('GET', '**/superset/explore_json/**').as('getJson');
cy.intercept('POST', '/superset/explore_json/**').as('postJson');
cy.intercept('GET', '/superset/explore_json/**').as('getJson');
cy.visitChartByName('Boys'); // a table chart
cy.verifySliceSuccess({ waitAlias: '@postJson' });
});

View File

@@ -21,8 +21,8 @@ import { interceptV1ChartData } from './utils';
describe('Advanced analytics', () => {
beforeEach(() => {
interceptV1ChartData();
cy.intercept('PUT', '**/api/v1/explore/**').as('putExplore');
cy.intercept('GET', '**/explore/**').as('getExplore');
cy.intercept('PUT', '/api/v1/explore/**').as('putExplore');
cy.intercept('GET', '/explore/**').as('getExplore');
});
it('Create custom time compare', () => {

View File

@@ -146,7 +146,7 @@ describe('Test datatable', () => {
});
it('Datapane loads view samples', () => {
cy.intercept(
'**/datasource/samples?force=false&datasource_type=table&datasource_id=*',
'datasource/samples?force=false&datasource_type=table&datasource_id=*',
).as('Samples');
cy.contains('Samples').click();
cy.wait('@Samples');

View File

@@ -20,41 +20,41 @@
import { interceptGet as interceptDashboardGet } from '../dashboard/utils';
export function interceptFiltering() {
cy.intercept('GET', `**/api/v1/chart/?q=*`).as('filtering');
cy.intercept('GET', `/api/v1/chart/?q=*`).as('filtering');
}
export function interceptBulkDelete() {
cy.intercept('DELETE', `**/api/v1/chart/?q=*`).as('bulkDelete');
cy.intercept('DELETE', `/api/v1/chart/?q=*`).as('bulkDelete');
}
export function interceptDelete() {
cy.intercept('DELETE', `**/api/v1/chart/*`).as('delete');
cy.intercept('DELETE', `/api/v1/chart/*`).as('delete');
}
export function interceptFavoriteStatus() {
cy.intercept('GET', '**/api/v1/chart/favorite_status/*').as('favoriteStatus');
cy.intercept('GET', '/api/v1/chart/favorite_status/*').as('favoriteStatus');
}
export function interceptUpdate() {
cy.intercept('PUT', `**/api/v1/chart/*`).as('update');
cy.intercept('PUT', `/api/v1/chart/*`).as('update');
}
export const interceptV1ChartData = (alias = 'v1Data') => {
cy.intercept('**/api/v1/chart/data*').as(alias);
cy.intercept('/api/v1/chart/data*').as(alias);
};
export function interceptExploreJson(alias = 'getJson') {
cy.intercept('POST', `**/superset/explore_json/**`).as(alias);
cy.intercept('POST', `/superset/explore_json/**`).as(alias);
}
export const interceptFormDataKey = () => {
cy.intercept('POST', '**/api/v1/explore/form_data').as('formDataKey');
cy.intercept('POST', '/api/v1/explore/form_data').as('formDataKey');
};
export function interceptExploreGet() {
cy.intercept({
method: 'GET',
url: /.*\/api\/v1\/explore\/\?(form_data_key|dashboard_page_id|slice_id)=.*/,
url: /api\/v1\/explore\/\?(form_data_key|dashboard_page_id|slice_id)=.*/,
}).as('getExplore');
}
@@ -96,5 +96,5 @@ export function saveChartToDashboard(dashboardName: string) {
export function visitSampleChartFromList(chartName: string) {
cy.getBySel('table-row').contains(chartName).click();
cy.intercept('POST', '**/superset/explore_json/**').as('getJson');
cy.intercept('POST', '/superset/explore_json/**').as('getJson');
}

View File

@@ -56,6 +56,7 @@ describe('Visualization > Big Number with Trendline', () => {
it('should work', () => {
verify(BIG_NUMBER_FORM_DATA);
cy.get('.chart-container .header-line');
cy.get('.chart-container .subheader-line');
cy.get('.chart-container canvas');
});
@@ -65,7 +66,7 @@ describe('Visualization > Big Number with Trendline', () => {
compare_lag: null,
});
cy.get('.chart-container .header-line');
cy.get('.chart-container .subtitle-line').should('not.exist');
cy.get('.chart-container .subheader-line').should('not.exist');
cy.get('.chart-container canvas');
});
@@ -75,6 +76,7 @@ describe('Visualization > Big Number with Trendline', () => {
show_trend_line: false,
});
cy.get('[data-test="chart-container"] .header-line');
cy.get('[data-test="chart-container"] .subheader-line');
cy.get('[data-test="chart-container"] canvas').should('not.exist');
});
});

View File

@@ -18,7 +18,7 @@
*/
describe('Visualization > Box Plot', () => {
beforeEach(() => {
cy.intercept('POST', '**/api/v1/chart/data*').as('getJson');
cy.intercept('POST', '/api/v1/chart/data*').as('getJson');
});
const BOX_PLOT_FORM_DATA = {

View File

@@ -18,7 +18,7 @@
*/
describe('Visualization > Bubble', () => {
beforeEach(() => {
cy.intercept('POST', '**/superset/explore_json/**').as('getJson');
cy.intercept('POST', '/superset/explore_json/**').as('getJson');
});
const BUBBLE_FORM_DATA = {

View File

@@ -18,7 +18,7 @@
*/
describe('Visualization > Compare', () => {
beforeEach(() => {
cy.intercept('POST', '**/superset/explore_json/**').as('getJson');
cy.intercept('POST', '/superset/explore_json/**').as('getJson');
});
const COMPARE_FORM_DATA = {

View File

@@ -25,7 +25,7 @@ describe('Download Chart > Bar chart', () => {
};
beforeEach(() => {
cy.intercept('POST', '**/superset/explore_json/**').as('getJson');
cy.intercept('POST', '/superset/explore_json/**').as('getJson');
});
it('download chart with image works', () => {

View File

@@ -19,7 +19,7 @@
describe('Visualization > Gauge', () => {
beforeEach(() => {
cy.intercept('POST', '**/api/v1/chart/data*').as('getJson');
cy.intercept('POST', '/api/v1/chart/data*').as('getJson');
});
const GAUGE_FORM_DATA = {

View File

@@ -28,7 +28,7 @@ type adhocFilter = {
describe('Visualization > Graph', () => {
beforeEach(() => {
cy.intercept('POST', '**/api/v1/chart/data*').as('getJson');
cy.intercept('POST', '/api/v1/chart/data*').as('getJson');
});
const GRAPH_FORM_DATA = {

View File

@@ -18,7 +18,7 @@
*/
describe('Visualization > Pie', () => {
beforeEach(() => {
cy.intercept('POST', '**/api/v1/chart/data*').as('getJson');
cy.intercept('POST', '/api/v1/chart/data*').as('getJson');
});
const PIE_FORM_DATA = {

View File

@@ -18,7 +18,7 @@
*/
describe('Visualization > Pivot Table', () => {
beforeEach(() => {
cy.intercept('POST', '**/api/v1/chart/data**').as('chartData');
cy.intercept('POST', '/api/v1/chart/data**').as('chartData');
});
const PIVOT_TABLE_FORM_DATA = {

View File

@@ -18,7 +18,7 @@
*/
describe('Visualization > Sunburst', () => {
beforeEach(() => {
cy.intercept('POST', '**/api/v1/chart/data**').as('chartData');
cy.intercept('POST', '/api/v1/chart/data**').as('chartData');
});
const SUNBURST_FORM_DATA = {

View File

@@ -20,7 +20,7 @@ import { FORM_DATA_DEFAULTS, NUM_METRIC } from './shared.helper';
describe('Visualization > Time TableViz', () => {
beforeEach(() => {
cy.intercept('POST', '**/superset/explore_json/**').as('getJson');
cy.intercept('POST', '/superset/explore_json/**').as('getJson');
});
const VIZ_DEFAULTS = { ...FORM_DATA_DEFAULTS, viz_type: 'time_table' };

Some files were not shown because too many files have changed in this diff Show More