diff --git a/.gitignore b/.gitignore index b3ef8b6db11..2f649d941f8 100644 --- a/.gitignore +++ b/.gitignore @@ -107,6 +107,7 @@ ghostdriver.log testCSV.csv .terser-plugin-cache/ apache-superset-*.tar.gz* +apache_superset-*.tar.gz* release.json # Translation-related files diff --git a/README.md b/README.md index c23fd3fdbe3..1cc4c44a6d0 100644 --- a/README.md +++ b/README.md @@ -22,9 +22,9 @@ under the License. [![License](https://img.shields.io/badge/License-Apache%202.0-blue.svg)](https://opensource.org/license/apache-2-0) [![Latest Release on Github](https://img.shields.io/github/v/release/apache/superset?sort=semver)](https://github.com/apache/superset/releases/latest) [![Build Status](https://github.com/apache/superset/actions/workflows/superset-python-unittest.yml/badge.svg)](https://github.com/apache/superset/actions) -[![PyPI version](https://badge.fury.io/py/apache-superset.svg)](https://badge.fury.io/py/apache-superset) +[![PyPI version](https://badge.fury.io/py/apache_superset.svg)](https://badge.fury.io/py/apache_superset) [![Coverage Status](https://codecov.io/github/apache/superset/coverage.svg?branch=master)](https://codecov.io/github/apache/superset) -[![PyPI](https://img.shields.io/pypi/pyversions/apache-superset.svg?maxAge=2592000)](https://pypi.python.org/pypi/apache-superset) +[![PyPI](https://img.shields.io/pypi/pyversions/apache_superset.svg?maxAge=2592000)](https://pypi.python.org/pypi/apache_superset) [![Get on Slack](https://img.shields.io/badge/slack-join-orange.svg)](http://bit.ly/join-superset-slack) [![Documentation](https://img.shields.io/badge/docs-apache.org-blue.svg)](https://superset.apache.org) @@ -72,9 +72,10 @@ Superset provides: ## Screenshots & Gifs **Video Overview** - -[superset-video-1080p.webm](https://github.com/user-attachments/assets/b37388f7-a971-409c-96a7-90c4e31322e6) + + +[superset-video-1080p.webm](https://github.com/user-attachments/assets/b37388f7-a971-409c-96a7-90c4e31322e6)
@@ -156,7 +157,7 @@ Try out Superset's [quickstart](https://superset.apache.org/docs/quickstart/) gu and please read our [Slack Community Guidelines](https://github.com/apache/superset/blob/master/CODE_OF_CONDUCT.md#slack-community-guidelines) - [Join our dev@superset.apache.org Mailing list](https://lists.apache.org/list.html?dev@superset.apache.org). To join, simply send an email to [dev-subscribe@superset.apache.org](mailto:dev-subscribe@superset.apache.org) - If you want to help troubleshoot GitHub Issues involving the numerous database drivers that Superset supports, please consider adding your name and the databases you have access to on the [Superset Database Familiarity Rolodex](https://docs.google.com/spreadsheets/d/1U1qxiLvOX0kBTUGME1AHHi6Ywel6ECF8xk_Qy-V9R8c/edit#gid=0) -- Join Superset's Town Hall and [Operational Model](https://preset.io/blog/the-superset-operational-model-wants-you/) recurring meetings. Meeting info is available on the [Superset Community Calendar](https://superset.apache.org/community) +- Join Superset's Town Hall and [Operational Model](https://preset.io/blog/the-superset-operational-model-wants-you/) recurring meetings. Meeting info is available on the [Superset Community Calendar](https://superset.apache.org/community) ## Contributor Guide @@ -184,14 +185,16 @@ Understanding the Superset Points of View - [Building New Database Connectors](https://preset.io/blog/building-database-connector/) - [Create Your First Dashboard](https://superset.apache.org/docs/using-superset/creating-your-first-dashboard/) - [Comprehensive Tutorial for Contributing Code to Apache Superset - ](https://preset.io/blog/tutorial-contributing-code-to-apache-superset/) + ](https://preset.io/blog/tutorial-contributing-code-to-apache-superset/) - [Resources to master Superset by Preset](https://preset.io/resources/) - Deploying Superset + - [Official Docker image](https://hub.docker.com/r/apache/superset) - [Helm Chart](https://github.com/apache/superset/tree/master/helm/superset) - Recordings of Past [Superset Community Events](https://preset.io/events) + - [Mixed Time Series Charts](https://preset.io/events/mixed-time-series-visualization-in-superset-workshop/) - [How the Bing Team Customized Superset for the Internal Self-Serve Data & Analytics Platform](https://preset.io/events/how-the-bing-team-heavily-customized-superset-for-their-internal-data/) - [Live Demo: Visualizing MongoDB and Pinot Data using Trino](https://preset.io/events/2021-04-13-visualizing-mongodb-and-pinot-data-using-trino/) @@ -199,6 +202,7 @@ Understanding the Superset Points of View - [Building a Database Connector for Superset](https://preset.io/events/2021-02-16-building-a-database-connector-for-superset/) - Visualizations + - [Creating Viz Plugins](https://superset.apache.org/docs/contributing/creating-viz-plugins/) - [Managing and Deploying Custom Viz Plugins](https://medium.com/nmc-techblog/apache-superset-manage-custom-viz-plugins-in-production-9fde1a708e55) - [Why Apache Superset is Betting on Apache ECharts](https://preset.io/blog/2021-4-1-why-echarts/) diff --git a/RELEASING/Dockerfile.from_local_tarball b/RELEASING/Dockerfile.from_local_tarball index 6240439050a..3794ed4c80a 100644 --- a/RELEASING/Dockerfile.from_local_tarball +++ b/RELEASING/Dockerfile.from_local_tarball @@ -20,7 +20,7 @@ RUN useradd --user-group --create-home --no-log-init --shell /bin/bash superset # Configure environment ENV LANG=C.UTF-8 \ - LC_ALL=C.UTF-8 + LC_ALL=C.UTF-8 RUN apt-get update -y @@ -30,14 +30,14 @@ RUN apt-get install -y apt-transport-https apt-utils # Install superset dependencies # https://superset.apache.org/docs/installation/installing-superset-from-scratch RUN apt-get install -y build-essential libssl-dev \ - libffi-dev python3-dev libsasl2-dev libldap2-dev libxi-dev chromium zstd + libffi-dev python3-dev libsasl2-dev libldap2-dev libxi-dev chromium zstd # Install nodejs for custom build # https://nodejs.org/en/download/package-manager/ RUN set -eux; \ - curl -sL https://deb.nodesource.com/setup_20.x | bash -; \ - apt-get install -y nodejs; \ - node --version; + curl -sL https://deb.nodesource.com/setup_20.x | bash -; \ + apt-get install -y nodejs; \ + node --version; RUN if ! which npm; then apt-get install -y npm; fi RUN mkdir -p /home/superset @@ -50,21 +50,21 @@ ARG SUPERSET_RELEASE_RC_TARBALL # Can fetch source from svn or copy tarball from local mounted directory COPY $SUPERSET_RELEASE_RC_TARBALL ./ RUN tar -xvf *.tar.gz -WORKDIR /home/superset/apache-superset-$VERSION/superset-frontend +WORKDIR /home/superset/apache_superset-$VERSION/superset-frontend RUN npm ci \ - && npm run build \ - && rm -rf node_modules + && npm run build \ + && rm -rf node_modules -WORKDIR /home/superset/apache-superset-$VERSION +WORKDIR /home/superset/apache_superset-$VERSION RUN pip install --upgrade setuptools pip \ - && pip install -r requirements/base.txt \ - && pip install --no-cache-dir . + && pip install -r requirements/base.txt \ + && pip install --no-cache-dir . RUN flask fab babel-compile --target superset/translations ENV PATH=/home/superset/superset/bin:$PATH \ - PYTHONPATH=/home/superset/superset/ \ - SUPERSET_TESTENV=true + PYTHONPATH=/home/superset/superset/ \ + SUPERSET_TESTENV=true COPY from_tarball_entrypoint.sh /entrypoint.sh ENTRYPOINT ["/entrypoint.sh"] diff --git a/RELEASING/Dockerfile.from_svn_tarball b/RELEASING/Dockerfile.from_svn_tarball index f14754c6901..33d0e9451b0 100644 --- a/RELEASING/Dockerfile.from_svn_tarball +++ b/RELEASING/Dockerfile.from_svn_tarball @@ -20,7 +20,7 @@ RUN useradd --user-group --create-home --no-log-init --shell /bin/bash superset # Configure environment ENV LANG=C.UTF-8 \ - LC_ALL=C.UTF-8 + LC_ALL=C.UTF-8 RUN apt-get update -y @@ -30,14 +30,14 @@ RUN apt-get install -y apt-transport-https apt-utils # Install superset dependencies # https://superset.apache.org/docs/installation/installing-superset-from-scratch RUN apt-get install -y subversion build-essential libssl-dev \ - libffi-dev python3-dev libsasl2-dev libldap2-dev libxi-dev chromium zstd + libffi-dev python3-dev libsasl2-dev libldap2-dev libxi-dev chromium zstd # Install nodejs for custom build # https://nodejs.org/en/download/package-manager/ RUN set -eux; \ - curl -sL https://deb.nodesource.com/setup_20.x | bash -; \ - apt-get install -y nodejs; \ - node --version; + curl -sL https://deb.nodesource.com/setup_20.x | bash -; \ + apt-get install -y nodejs; \ + node --version; RUN if ! which npm; then apt-get install -y npm; fi RUN mkdir -p /home/superset @@ -49,20 +49,20 @@ ARG VERSION # Can fetch source from svn or copy tarball from local mounted directory RUN svn co https://dist.apache.org/repos/dist/dev/superset/$VERSION ./ RUN tar -xvf *.tar.gz -WORKDIR /home/superset/apache-superset-$VERSION/superset-frontend +WORKDIR /home/superset/apache_superset-$VERSION/superset-frontend RUN npm ci \ - && npm run build \ - && rm -rf node_modules + && npm run build \ + && rm -rf node_modules -WORKDIR /home/superset/apache-superset-$VERSION +WORKDIR /home/superset/apache_superset-$VERSION RUN pip install --upgrade setuptools pip \ - && pip install -r requirements/base.txt \ - && pip install --no-cache-dir . + && pip install -r requirements/base.txt \ + && pip install --no-cache-dir . RUN flask fab babel-compile --target superset/translations ENV PATH=/home/superset/superset/bin:$PATH \ - PYTHONPATH=/home/superset/superset/ + PYTHONPATH=/home/superset/superset/ COPY from_tarball_entrypoint.sh /entrypoint.sh ENTRYPOINT ["/entrypoint.sh"] diff --git a/RELEASING/README.md b/RELEASING/README.md index 7030571be62..4468c9d2783 100644 --- a/RELEASING/README.md +++ b/RELEASING/README.md @@ -123,10 +123,10 @@ SUPERSET_RC=1 SUPERSET_GITHUB_BRANCH=1.5 SUPERSET_PGP_FULLNAME=villebro@apache.org SUPERSET_VERSION_RC=1.5.1rc1 -SUPERSET_RELEASE=apache-superset-1.5.1 -SUPERSET_RELEASE_RC=apache-superset-1.5.1rc1 -SUPERSET_RELEASE_TARBALL=apache-superset-1.5.1-source.tar.gz -SUPERSET_RELEASE_RC_TARBALL=apache-superset-1.5.1rc1-source.tar.gz +SUPERSET_RELEASE=apache_superset-1.5.1 +SUPERSET_RELEASE_RC=apache_superset-1.5.1rc1 +SUPERSET_RELEASE_TARBALL=apache_superset-1.5.1-source.tar.gz +SUPERSET_RELEASE_RC_TARBALL=apache_superset-1.5.1rc1-source.tar.gz SUPERSET_TMP_ASF_SITE_PATH=/tmp/incubator-superset-site-1.5.1 ------------------------------- ``` @@ -380,7 +380,7 @@ Official instructions: https://www.apache.org/info/verification.html We now have a handy script for anyone validating a release to use. The core of it is in this very folder, `verify_release.py`. Just make sure you have all three release files in the same directory (`{some version}.tar.gz`, `{some version}.tar.gz.asc` and `{some version}tar.gz.sha512`). Then you can pass this script the path to the `.gz` file like so: -`python verify_release.py ~/path/tp/apache-superset-{version/candidate}-source.tar.gz` +`python verify_release.py ~/path/tp/apache_superset-{version/candidate}-source.tar.gz` If all goes well, you will see this result in your terminal: @@ -470,7 +470,7 @@ while requesting access to push packages. ```bash twine upload dist/apache_superset-${SUPERSET_VERSION}-py3-none-any.whl -twine upload dist/apache-superset-${SUPERSET_VERSION}.tar.gz +twine upload dist/apache_superset-${SUPERSET_VERSION}.tar.gz ``` Set your username to `__token__` diff --git a/RELEASING/email_templates/announce.j2 b/RELEASING/email_templates/announce.j2 index b12a2ee5b03..01b6893fb30 100644 --- a/RELEASING/email_templates/announce.j2 +++ b/RELEASING/email_templates/announce.j2 @@ -31,7 +31,7 @@ The official source release: https://downloads.apache.org/{{ project_module }}/{{ version }} The PyPI package: -https://pypi.org/project/apache-superset/{{ version }} +https://pypi.org/project/apache_superset/{{ version }} The CHANGELOG for the release: https://github.com/apache/{{ project_module }}/blob/{{ version }}/CHANGELOG/{{ version }}.md diff --git a/RELEASING/make_tarball.sh b/RELEASING/make_tarball.sh index 47686d44022..c4c53f979e3 100755 --- a/RELEASING/make_tarball.sh +++ b/RELEASING/make_tarball.sh @@ -32,7 +32,7 @@ else SUPERSET_VERSION="${1}" SUPERSET_RC="${2}" SUPERSET_PGP_FULLNAME="${3}" - SUPERSET_RELEASE_RC_TARBALL="apache-superset-${SUPERSET_VERSION_RC}-source.tar.gz" + SUPERSET_RELEASE_RC_TARBALL="apache_superset-${SUPERSET_VERSION_RC}-source.tar.gz" fi SUPERSET_VERSION_RC="${SUPERSET_VERSION}rc${SUPERSET_RC}" diff --git a/RELEASING/make_tarball_entrypoint.sh b/RELEASING/make_tarball_entrypoint.sh index ffbc0ac33ca..022fca294c2 100755 --- a/RELEASING/make_tarball_entrypoint.sh +++ b/RELEASING/make_tarball_entrypoint.sh @@ -22,7 +22,7 @@ if [ -z "${SUPERSET_VERSION_RC}" ] || [ -z "${SUPERSET_SVN_DEV_PATH}" ] || [ -z exit 1 fi -SUPERSET_RELEASE_RC=apache-superset-"${SUPERSET_VERSION_RC}" +SUPERSET_RELEASE_RC=apache_superset-"${SUPERSET_VERSION_RC}" SUPERSET_RELEASE_RC_TARBALL="${SUPERSET_RELEASE_RC}"-source.tar.gz SUPERSET_RELEASE_RC_BASE_PATH="${SUPERSET_SVN_DEV_PATH}"/"${SUPERSET_VERSION_RC}" SUPERSET_RELEASE_RC_TARBALL_PATH="${SUPERSET_RELEASE_RC_BASE_PATH}"/"${SUPERSET_RELEASE_RC_TARBALL}" diff --git a/RELEASING/set_release_env.sh b/RELEASING/set_release_env.sh index 3d04a76d786..7b297395775 100755 --- a/RELEASING/set_release_env.sh +++ b/RELEASING/set_release_env.sh @@ -50,8 +50,8 @@ else export SUPERSET_GITHUB_BRANCH="${VERSION_MAJOR}.${VERSION_MINOR}" export SUPERSET_PGP_FULLNAME="${2}" export SUPERSET_VERSION_RC="${SUPERSET_VERSION}rc${VERSION_RC}" - export SUPERSET_RELEASE=apache-superset-"${SUPERSET_VERSION}" - export SUPERSET_RELEASE_RC=apache-superset-"${SUPERSET_VERSION_RC}" + export SUPERSET_RELEASE=apache_superset-"${SUPERSET_VERSION}" + export SUPERSET_RELEASE_RC=apache_superset-"${SUPERSET_VERSION_RC}" export SUPERSET_RELEASE_TARBALL="${SUPERSET_RELEASE}"-source.tar.gz export SUPERSET_RELEASE_RC_TARBALL="${SUPERSET_RELEASE_RC}"-source.tar.gz export SUPERSET_TMP_ASF_SITE_PATH="/tmp/incubator-superset-site-${SUPERSET_VERSION}" diff --git a/RELEASING/test_run_tarball.sh b/RELEASING/test_run_tarball.sh index d4c8a9c706a..d28c7226bda 100755 --- a/RELEASING/test_run_tarball.sh +++ b/RELEASING/test_run_tarball.sh @@ -27,7 +27,7 @@ if [ -z "${SUPERSET_SVN_DEV_PATH}" ]; then fi if [[ -n ${1} ]] && [[ ${1} == "local" ]]; then - SUPERSET_RELEASE_RC=apache-superset-"${SUPERSET_VERSION_RC}" + SUPERSET_RELEASE_RC=apache_superset-"${SUPERSET_VERSION_RC}" SUPERSET_RELEASE_RC_TARBALL="${SUPERSET_RELEASE_RC}"-source.tar.gz SUPERSET_TARBALL_PATH="${SUPERSET_SVN_DEV_PATH}"/${SUPERSET_VERSION_RC}/${SUPERSET_RELEASE_RC_TARBALL} SUPERSET_TMP_TARBALL_FILENAME=_tmp_"${SUPERSET_VERSION_RC}".tar.gz diff --git a/RELEASING/validate_this_release.sh b/RELEASING/validate_this_release.sh index 98c502be2a2..4942803702c 100755 --- a/RELEASING/validate_this_release.sh +++ b/RELEASING/validate_this_release.sh @@ -38,7 +38,7 @@ get_pip_command() { PYTHON=$(get_python_command) PIP=$(get_pip_command) -# Get the release directory's path. If you unzip an Apache release and just run the npm script to validate the release, this will be a file name like `apache-superset-x.x.xrcx-source.tar.gz` +# Get the release directory's path. If you unzip an Apache release and just run the npm script to validate the release, this will be a file name like `apache_superset-x.x.xrcx-source.tar.gz` RELEASE_ZIP_PATH="../../$(basename "$(dirname "$(pwd)")")-source.tar.gz" # Install dependencies from requirements.txt if the file exists diff --git a/docs/docs/configuration/databases.mdx b/docs/docs/configuration/databases.mdx index d30d4c2e39c..e74c1e535a8 100644 --- a/docs/docs/configuration/databases.mdx +++ b/docs/docs/configuration/databases.mdx @@ -72,7 +72,7 @@ are compatible with Superset. | [PostgreSQL](/docs/configuration/databases#postgres) | `pip install psycopg2` | `postgresql://:@/` | | [Presto](/docs/configuration/databases#presto) | `pip install pyhive` | `presto://{username}:{password}@{hostname}:{port}/{database}` | | [Rockset](/docs/configuration/databases#rockset) | `pip install rockset-sqlalchemy` | `rockset://:@` | -| [SAP Hana](/docs/configuration/databases#hana) | `pip install hdbcli sqlalchemy-hana` or `pip install apache-superset[hana]` | `hana://{username}:{password}@{host}:{port}` | +| [SAP Hana](/docs/configuration/databases#hana) | `pip install hdbcli sqlalchemy-hana` or `pip install apache_superset[hana]` | `hana://{username}:{password}@{host}:{port}` | | [StarRocks](/docs/configuration/databases#starrocks) | `pip install starrocks` | `starrocks://:@:/.` | | [Snowflake](/docs/configuration/databases#snowflake) | `pip install snowflake-sqlalchemy` | `snowflake://{user}:{password}@{account}.{region}/{database}?role={role}&warehouse={warehouse}` | | SQLite | No additional library needed | `sqlite://path/to/file.db?check_same_thread=false` | diff --git a/docs/docs/configuration/networking-settings.mdx b/docs/docs/configuration/networking-settings.mdx index 03b2b981fe4..4921a277d48 100644 --- a/docs/docs/configuration/networking-settings.mdx +++ b/docs/docs/configuration/networking-settings.mdx @@ -11,7 +11,7 @@ version: 1 To configure CORS, or cross-origin resource sharing, the following dependency must be installed: ```python -pip install apache-superset[cors] +pip install apache_superset[cors] ``` The following keys in `superset_config.py` can be specified to configure CORS: diff --git a/docs/docs/contributing/contributing.mdx b/docs/docs/contributing/contributing.mdx index 6ae7901874e..109a3692df9 100644 --- a/docs/docs/contributing/contributing.mdx +++ b/docs/docs/contributing/contributing.mdx @@ -26,9 +26,9 @@ More references: Here's a list of repositories that contain Superset-related packages: - [apache/superset](https://github.com/apache/superset) - is the main repository containing the `apache-superset` Python package + is the main repository containing the `apache_superset` Python package distributed on - [pypi](https://pypi.org/project/apache-superset/). This repository + [pypi](https://pypi.org/project/apache_superset/). This repository also includes Superset's main TypeScript/JavaScript bundles and react apps under the [superset-frontend](https://github.com/apache/superset/tree/master/superset-frontend) folder. diff --git a/docs/docs/installation/pypi.mdx b/docs/docs/installation/pypi.mdx index 3e01986f5cd..da2aabcf0b1 100644 --- a/docs/docs/installation/pypi.mdx +++ b/docs/docs/installation/pypi.mdx @@ -12,7 +12,7 @@ import useBaseUrl from "@docusaurus/useBaseUrl";

-This page describes how to install Superset using the `apache-superset` package [published on PyPI](https://pypi.org/project/apache-superset/). +This page describes how to install Superset using the `apache_superset` package [published on PyPI](https://pypi.org/project/apache_superset/). ## OS Dependencies @@ -124,10 +124,10 @@ command line. ### Installing and Initializing Superset -First, start by installing `apache-superset`: +First, start by installing `apache_superset`: ```bash -pip install apache-superset +pip install apache_superset ``` Then, define mandatory configurations, SECRET_KEY and FLASK_APP: diff --git a/docs/docs/installation/upgrading-superset.mdx b/docs/docs/installation/upgrading-superset.mdx index 459223385c6..38e03822dd4 100644 --- a/docs/docs/installation/upgrading-superset.mdx +++ b/docs/docs/installation/upgrading-superset.mdx @@ -32,7 +32,7 @@ docker compose up To upgrade superset in a native installation, run the following commands: ```bash -pip install apache-superset --upgrade +pip install apache_superset --upgrade ``` ## Upgrading the Metadata Database diff --git a/pyproject.toml b/pyproject.toml index 2885418397a..55364db11f2 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -20,7 +20,7 @@ requires = ["setuptools>=40.9.0", "wheel"] build-backend = "setuptools.build_meta" [project] -name = "apache-superset" +name = "apache_superset" description = "A modern, enterprise-ready business intelligence web application" readme = "README.md" dynamic = ["version", "scripts", "entry-points"] diff --git a/requirements/development.txt b/requirements/development.txt index 8e563829a52..04daa92de1c 100644 --- a/requirements/development.txt +++ b/requirements/development.txt @@ -319,7 +319,7 @@ greenlet==3.1.1 # gevent # shillelagh # sqlalchemy -grpcio==1.68.0 +grpcio==1.71.0 # via # apache-superset # google-api-core diff --git a/setup.py b/setup.py index b89288f7605..9b834cc2417 100644 --- a/setup.py +++ b/setup.py @@ -52,6 +52,7 @@ with open(VERSION_INFO_FILE, "w") as version_file: version_string = version_string.replace("-dev", ".dev0") setup( + name="apache_superset", version=version_string, packages=find_packages(), include_package_data=True, diff --git a/superset-frontend/packages/superset-ui-chart-controls/src/operators/aggregateOperator.ts b/superset-frontend/packages/superset-ui-chart-controls/src/operators/aggregateOperator.ts new file mode 100644 index 00000000000..aa3c518ad92 --- /dev/null +++ b/superset-frontend/packages/superset-ui-chart-controls/src/operators/aggregateOperator.ts @@ -0,0 +1,58 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitationsxw + * under the License. + */ +import { + getMetricLabel, + ensureIsArray, + PostProcessingAggregation, + QueryFormData, + Aggregates, +} from '@superset-ui/core'; +import { PostProcessingFactory } from './types'; + +export const aggregationOperator: PostProcessingFactory< + PostProcessingAggregation +> = (formData: QueryFormData, queryObject) => { + const { aggregation = 'LAST_VALUE' } = formData; + + if (aggregation === 'LAST_VALUE') { + return undefined; + } + + const metrics = ensureIsArray(queryObject.metrics); + if (metrics.length === 0) { + return undefined; + } + + const aggregates: Aggregates = {}; + metrics.forEach(metric => { + const metricLabel = getMetricLabel(metric); + aggregates[metricLabel] = { + operator: aggregation, + column: metricLabel, + }; + }); + + return { + operation: 'aggregate', + options: { + groupby: [], + aggregates, + }, + }; +}; diff --git a/superset-frontend/packages/superset-ui-chart-controls/src/operators/index.ts b/superset-frontend/packages/superset-ui-chart-controls/src/operators/index.ts index cac7088a775..0f6a01ee127 100644 --- a/superset-frontend/packages/superset-ui-chart-controls/src/operators/index.ts +++ b/superset-frontend/packages/superset-ui-chart-controls/src/operators/index.ts @@ -21,6 +21,7 @@ export { rollingWindowOperator } from './rollingWindowOperator'; export { timeCompareOperator } from './timeCompareOperator'; export { timeComparePivotOperator } from './timeComparePivotOperator'; export { sortOperator } from './sortOperator'; +export { aggregationOperator } from './aggregateOperator'; export { histogramOperator } from './histogramOperator'; export { pivotOperator } from './pivotOperator'; export { resampleOperator } from './resampleOperator'; diff --git a/superset-frontend/packages/superset-ui-chart-controls/src/shared-controls/customControls.tsx b/superset-frontend/packages/superset-ui-chart-controls/src/shared-controls/customControls.tsx index d25273c08e9..bdd6d1b82cc 100644 --- a/superset-frontend/packages/superset-ui-chart-controls/src/shared-controls/customControls.tsx +++ b/superset-frontend/packages/superset-ui-chart-controls/src/shared-controls/customControls.tsx @@ -61,6 +61,32 @@ const xAxisSortVisibility = ({ controls }: { controls: ControlStateMapping }) => ensureIsArray(controls?.groupby?.value).length === 0 && ensureIsArray(controls?.metrics?.value).length === 1; +// TODO: Expand this aggregation options list to include all backend-supported aggregations. +// TODO: Migrate existing chart types (Pivot Table, etc.) to use this shared control. +export const aggregationControl = { + name: 'aggregation', + config: { + type: 'SelectControl', + label: t('Aggregation Method'), + default: 'LAST_VALUE', + clearable: false, + renderTrigger: false, + choices: [ + ['LAST_VALUE', t('Last Value')], + ['sum', t('Total (Sum)')], + ['mean', t('Average (Mean)')], + ['min', t('Minimum')], + ['max', t('Maximum')], + ['median', t('Median')], + ], + description: t('Select an aggregation method to apply to the metric.'), + provideFormDataToProps: true, + mapStateToProps: ({ form_data }: ControlPanelState) => ({ + value: form_data.aggregation || 'LAST_VALUE', + }), + }, +}; + const xAxisMultiSortVisibility = ({ controls, }: { diff --git a/superset-frontend/packages/superset-ui-chart-controls/src/shared-controls/index.ts b/superset-frontend/packages/superset-ui-chart-controls/src/shared-controls/index.ts index 8ff85439bfa..0deb6b39862 100644 --- a/superset-frontend/packages/superset-ui-chart-controls/src/shared-controls/index.ts +++ b/superset-frontend/packages/superset-ui-chart-controls/src/shared-controls/index.ts @@ -19,6 +19,7 @@ export { default as sharedControls } from './sharedControls'; // React control components export { default as sharedControlComponents } from './components'; +export { aggregationControl } from './customControls'; export * from './components'; export * from './customControls'; export * from './mixins'; diff --git a/superset-frontend/packages/superset-ui-chart-controls/src/utils/D3Formatting.ts b/superset-frontend/packages/superset-ui-chart-controls/src/utils/D3Formatting.ts index 8295c60d9cd..3ff8fb9c1e2 100644 --- a/superset-frontend/packages/superset-ui-chart-controls/src/utils/D3Formatting.ts +++ b/superset-frontend/packages/superset-ui-chart-controls/src/utils/D3Formatting.ts @@ -78,6 +78,7 @@ export const D3_TIME_FORMAT_OPTIONS: [string, string][] = [ [SMART_DATE_ID, t('Adaptive formatting')], ['%d/%m/%Y', '%d/%m/%Y | 14/01/2019'], ['%m/%d/%Y', '%m/%d/%Y | 01/14/2019'], + ['%d.%m.%Y', '%d.%m.%Y | 14.01.2019'], ['%Y-%m-%d', '%Y-%m-%d | 2019-01-14'], ['%Y-%m-%d %H:%M:%S', '%Y-%m-%d %H:%M:%S | 2019-01-14 01:32:10'], ['%d-%m-%Y %H:%M:%S', '%d-%m-%Y %H:%M:%S | 14-01-2019 01:32:10'], diff --git a/superset-frontend/packages/superset-ui-chart-controls/test/operators/aggregateOperator.test.ts b/superset-frontend/packages/superset-ui-chart-controls/test/operators/aggregateOperator.test.ts new file mode 100644 index 00000000000..52e3e454078 --- /dev/null +++ b/superset-frontend/packages/superset-ui-chart-controls/test/operators/aggregateOperator.test.ts @@ -0,0 +1,121 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +import { QueryObject, SqlaFormData, VizType } from '@superset-ui/core'; +import { aggregationOperator } from '@superset-ui/chart-controls'; + +describe('aggregationOperator', () => { + const formData: SqlaFormData = { + metrics: [ + 'count(*)', + { label: 'sum(val)', expressionType: 'SQL', sqlExpression: 'sum(val)' }, + ], + time_range: '2015 : 2016', + granularity: 'month', + datasource: 'foo', + viz_type: VizType.Table, + }; + + const queryObject: QueryObject = { + metrics: [ + 'count(*)', + { label: 'sum(val)', expressionType: 'SQL', sqlExpression: 'sum(val)' }, + ], + time_range: '2015 : 2016', + granularity: 'month', + }; + + test('should return undefined for LAST_VALUE aggregation', () => { + const formDataWithLastValue = { + ...formData, + aggregation: 'LAST_VALUE', + }; + + expect( + aggregationOperator(formDataWithLastValue, queryObject), + ).toBeUndefined(); + }); + + test('should return undefined when metrics is empty', () => { + const queryObjectWithoutMetrics = { + ...queryObject, + metrics: [], + }; + + const formDataWithSum = { + ...formData, + aggregation: 'sum', + }; + + expect( + aggregationOperator(formDataWithSum, queryObjectWithoutMetrics), + ).toBeUndefined(); + }); + + test('should apply sum aggregation to all metrics', () => { + const formDataWithSum = { + ...formData, + aggregation: 'sum', + }; + + expect(aggregationOperator(formDataWithSum, queryObject)).toEqual({ + operation: 'aggregate', + options: { + groupby: [], + aggregates: { + 'count(*)': { + operator: 'sum', + column: 'count(*)', + }, + 'sum(val)': { + operator: 'sum', + column: 'sum(val)', + }, + }, + }, + }); + }); + + test('should apply mean aggregation to all metrics', () => { + const formDataWithMean = { + ...formData, + aggregation: 'mean', + }; + + expect(aggregationOperator(formDataWithMean, queryObject)).toEqual({ + operation: 'aggregate', + options: { + groupby: [], + aggregates: { + 'count(*)': { + operator: 'mean', + column: 'count(*)', + }, + 'sum(val)': { + operator: 'mean', + column: 'sum(val)', + }, + }, + }, + }); + }); + + test('should use default aggregation when not specified', () => { + expect(aggregationOperator(formData, queryObject)).toBeUndefined(); + }); +}); diff --git a/superset-frontend/packages/superset-ui-chart-controls/test/operators/timeCompareOperator.test.ts b/superset-frontend/packages/superset-ui-chart-controls/test/operators/timeCompareOperator.test.ts index c7861af2ee5..437d3064bf5 100644 --- a/superset-frontend/packages/superset-ui-chart-controls/test/operators/timeCompareOperator.test.ts +++ b/superset-frontend/packages/superset-ui-chart-controls/test/operators/timeCompareOperator.test.ts @@ -54,7 +54,7 @@ const queryObject: QueryObject = { }, }, { - operation: 'aggregation', + operation: 'aggregate', options: { groupby: ['col1'], aggregates: {}, diff --git a/superset-frontend/packages/superset-ui-core/src/query/types/PostProcessing.ts b/superset-frontend/packages/superset-ui-core/src/query/types/PostProcessing.ts index a70d0111f2e..79bcabdaff1 100644 --- a/superset-frontend/packages/superset-ui-core/src/query/types/PostProcessing.ts +++ b/superset-frontend/packages/superset-ui-core/src/query/types/PostProcessing.ts @@ -67,7 +67,7 @@ export interface Aggregates { export type DefaultPostProcessing = undefined; interface _PostProcessingAggregation { - operation: 'aggregation'; + operation: 'aggregate'; options: { groupby: string[]; aggregates: Aggregates; @@ -271,7 +271,7 @@ export type PostProcessingRule = export function isPostProcessingAggregation( rule?: PostProcessingRule, ): rule is PostProcessingAggregation { - return rule?.operation === 'aggregation'; + return rule?.operation === 'aggregate'; } export function isPostProcessingBoxplot( diff --git a/superset-frontend/packages/superset-ui-core/test/query/types/PostProcessing.test.ts b/superset-frontend/packages/superset-ui-core/test/query/types/PostProcessing.test.ts index 05c385fb4e6..4e4ff949cbd 100644 --- a/superset-frontend/packages/superset-ui-core/test/query/types/PostProcessing.test.ts +++ b/superset-frontend/packages/superset-ui-core/test/query/types/PostProcessing.test.ts @@ -61,7 +61,7 @@ const AGGREGATES_OPTION: Aggregates = { }; const AGGREGATE_RULE: PostProcessingAggregation = { - operation: 'aggregation', + operation: 'aggregate', options: { groupby: ['foo'], aggregates: AGGREGATES_OPTION, diff --git a/superset-frontend/packages/superset-ui-core/test/time-comparison/getComparisonInfo.test.ts b/superset-frontend/packages/superset-ui-core/test/time-comparison/getComparisonInfo.test.ts index 9540badf334..ffaa58f48f7 100644 --- a/superset-frontend/packages/superset-ui-core/test/time-comparison/getComparisonInfo.test.ts +++ b/superset-frontend/packages/superset-ui-core/test/time-comparison/getComparisonInfo.test.ts @@ -63,6 +63,11 @@ const form_data = { header_font_size: 60, subheader_font_size: 26, comparison_color_enabled: true, + column_config: { + name: { + visible: true, + }, + }, extra_form_data: {}, force: false, result_format: 'json', @@ -142,7 +147,7 @@ describe('getComparisonInfo', () => { expect(resultFormData.adhoc_filters?.[0]).toEqual(expectedFilters[0]); }); - it('If adhoc_filter is undefrined the code wont break', () => { + it('If adhoc_filter is undefined the code wont break', () => { const resultFormData = getComparisonInfo( { ...form_data, @@ -175,4 +180,21 @@ describe('getComparisonInfo', () => { expect(resultFormData.adhoc_filters?.length).toEqual(1); expect(resultFormData.adhoc_filters).toEqual(expectedFilters); }); + + it('Updates comparison display values when toggled', () => { + const resultFormData = getComparisonInfo( + { + ...form_data, + column_config: { + name: { + visible: false, + }, + }, + }, + ComparisonTimeRangeType.Year, + {}, + ); + + expect(resultFormData.column_config.name.visible).toEqual(false); + }); }); diff --git a/superset-frontend/plugins/plugin-chart-echarts/src/BigNumber/BigNumberPeriodOverPeriod/PopKPI.tsx b/superset-frontend/plugins/plugin-chart-echarts/src/BigNumber/BigNumberPeriodOverPeriod/PopKPI.tsx index d4e5b2de0fe..63fef6432a5 100644 --- a/superset-frontend/plugins/plugin-chart-echarts/src/BigNumber/BigNumberPeriodOverPeriod/PopKPI.tsx +++ b/superset-frontend/plugins/plugin-chart-echarts/src/BigNumber/BigNumberPeriodOverPeriod/PopKPI.tsx @@ -198,16 +198,19 @@ export default function PopKPI(props: PopKPIProps) { symbol: '#', value: prevNumber, tooltipText: t('Data for %s', comparisonRange || 'previous range'), + columnKey: 'Previous value', }, { symbol: '△', value: valueDifference, tooltipText: t('Value difference between the time periods'), + columnKey: 'Delta', }, { symbol: '%', value: percentDifferenceFormattedString, tooltipText: t('Percentage difference between the time periods'), + columnKey: 'Percent change', }, ], [ @@ -218,6 +221,10 @@ export default function PopKPI(props: PopKPIProps) { ], ); + const visibleSymbols = SYMBOLS_WITH_VALUES.filter( + symbol => props.columnConfig?.[symbol.columnKey]?.visible !== false, + ); + const { isOverflowing, symbolContainerRef, wrapperRef } = useOverflowDetection(flexGap); @@ -242,51 +249,53 @@ export default function PopKPI(props: PopKPIProps) { )} -
- {SYMBOLS_WITH_VALUES.map((symbol_with_value, index) => ( - - 0 && ( +
+ {visibleSymbols.map((symbol_with_value, index) => ( + - 0 ? backgroundColor : defaultBackgroundColor - } - textColor={index > 0 ? textColor : defaultTextColor} + - {symbol_with_value.symbol} - - {symbol_with_value.value} - - - ))} -
+ 0 ? backgroundColor : defaultBackgroundColor + } + textColor={index > 0 ? textColor : defaultTextColor} + > + {symbol_with_value.symbol} + + {symbol_with_value.value} +
+
+ ))} +
+ )} ); diff --git a/superset-frontend/plugins/plugin-chart-echarts/src/BigNumber/BigNumberPeriodOverPeriod/controlPanel.ts b/superset-frontend/plugins/plugin-chart-echarts/src/BigNumber/BigNumberPeriodOverPeriod/controlPanel.ts index ce934c43360..bb285a70b0c 100644 --- a/superset-frontend/plugins/plugin-chart-echarts/src/BigNumber/BigNumberPeriodOverPeriod/controlPanel.ts +++ b/superset-frontend/plugins/plugin-chart-echarts/src/BigNumber/BigNumberPeriodOverPeriod/controlPanel.ts @@ -16,7 +16,7 @@ * specific language governing permissions and limitations * under the License. */ -import { t } from '@superset-ui/core'; +import { t, GenericDataType } from '@superset-ui/core'; import { ControlPanelConfig, getStandardizedControls, @@ -106,6 +106,42 @@ const config: ControlPanelConfig = { }, }, ], + [ + { + name: 'column_config', + config: { + type: 'ColumnConfigControl', + label: t('Customize columns'), + description: t('Further customize how to display each column'), + width: 400, + height: 320, + renderTrigger: true, + configFormLayout: { + [GenericDataType.Numeric]: [ + { + tab: t('General'), + children: [['visible']], + }, + ], + }, + shouldMapStateToProps() { + return true; + }, + mapStateToProps(explore, _, chart) { + return { + columnsPropsObject: { + colnames: ['Previous value', 'Delta', 'Percent change'], + coltypes: [ + GenericDataType.Numeric, + GenericDataType.Numeric, + GenericDataType.Numeric, + ], + }, + }; + }, + }, + }, + ], ], }, sections.timeComparisonControls({ diff --git a/superset-frontend/plugins/plugin-chart-echarts/src/BigNumber/BigNumberPeriodOverPeriod/transformProps.ts b/superset-frontend/plugins/plugin-chart-echarts/src/BigNumber/BigNumberPeriodOverPeriod/transformProps.ts index b434fbbc58e..9adf3e1fba7 100644 --- a/superset-frontend/plugins/plugin-chart-echarts/src/BigNumber/BigNumberPeriodOverPeriod/transformProps.ts +++ b/superset-frontend/plugins/plugin-chart-echarts/src/BigNumber/BigNumberPeriodOverPeriod/transformProps.ts @@ -89,6 +89,7 @@ export default function transformProps(chartProps: ChartProps) { comparisonColorScheme, comparisonColorEnabled, percentDifferenceFormat, + columnConfig, } = formData; const { data: dataA = [] } = queriesData[0]; const data = dataA; @@ -193,5 +194,6 @@ export default function transformProps(chartProps: ChartProps) { startDateOffset, shift: timeComparison, dashboardTimeRange: formData?.extraFormData?.time_range, + columnConfig, }; } diff --git a/superset-frontend/plugins/plugin-chart-echarts/src/BigNumber/BigNumberPeriodOverPeriod/types.ts b/superset-frontend/plugins/plugin-chart-echarts/src/BigNumber/BigNumberPeriodOverPeriod/types.ts index a2dbb1f29a2..8036447aa83 100644 --- a/superset-frontend/plugins/plugin-chart-echarts/src/BigNumber/BigNumberPeriodOverPeriod/types.ts +++ b/superset-frontend/plugins/plugin-chart-echarts/src/BigNumber/BigNumberPeriodOverPeriod/types.ts @@ -34,6 +34,10 @@ export interface PopKPIStylesProps { comparisonColorEnabled: boolean; } +export type TableColumnConfig = { + visible?: boolean; +}; + interface PopKPICustomizeProps { headerText: string; } @@ -67,6 +71,7 @@ export type PopKPIProps = PopKPIStylesProps & startDateOffset?: string; shift: string; dashboardTimeRange?: string; + columnConfig?: Record; }; export enum ColorSchemeEnum { diff --git a/superset-frontend/plugins/plugin-chart-echarts/src/BigNumber/BigNumberWithTrendline/buildQuery.ts b/superset-frontend/plugins/plugin-chart-echarts/src/BigNumber/BigNumberWithTrendline/buildQuery.ts index 7a0ba462b88..398125719b1 100644 --- a/superset-frontend/plugins/plugin-chart-echarts/src/BigNumber/BigNumberWithTrendline/buildQuery.ts +++ b/superset-frontend/plugins/plugin-chart-echarts/src/BigNumber/BigNumberWithTrendline/buildQuery.ts @@ -24,6 +24,7 @@ import { QueryFormData, } from '@superset-ui/core'; import { + aggregationOperator, flattenOperator, pivotOperator, resampleOperator, @@ -47,5 +48,19 @@ export default function buildQuery(formData: QueryFormData) { flattenOperator(formData, baseQueryObject), ], }, + + { + ...baseQueryObject, + columns: [ + ...(isXAxisSet(formData) + ? ensureIsArray(getXAxisColumn(formData)) + : []), + ], + ...(isXAxisSet(formData) ? {} : { is_timeseries: true }), + post_processing: [ + pivotOperator(formData, baseQueryObject), + aggregationOperator(formData, baseQueryObject), + ], + }, ]); } diff --git a/superset-frontend/plugins/plugin-chart-echarts/src/BigNumber/BigNumberWithTrendline/controlPanel.tsx b/superset-frontend/plugins/plugin-chart-echarts/src/BigNumber/BigNumberWithTrendline/controlPanel.tsx index 83cf915c7ce..ea8f9c66f48 100644 --- a/superset-frontend/plugins/plugin-chart-echarts/src/BigNumber/BigNumberWithTrendline/controlPanel.tsx +++ b/superset-frontend/plugins/plugin-chart-echarts/src/BigNumber/BigNumberWithTrendline/controlPanel.tsx @@ -18,6 +18,7 @@ */ import { SMART_DATE_ID, t } from '@superset-ui/core'; import { + aggregationControl, ControlPanelConfig, ControlSubSectionHeader, D3_FORMAT_DOCS, @@ -35,6 +36,7 @@ const config: ControlPanelConfig = { controlSetRows: [ ['x_axis'], ['time_grain_sqla'], + [aggregationControl], ['metric'], ['adhoc_filters'], ], diff --git a/superset-frontend/plugins/plugin-chart-echarts/src/BigNumber/BigNumberWithTrendline/transformProps.ts b/superset-frontend/plugins/plugin-chart-echarts/src/BigNumber/BigNumberWithTrendline/transformProps.ts index d285a551b13..53a44d9e3b0 100644 --- a/superset-frontend/plugins/plugin-chart-echarts/src/BigNumber/BigNumberWithTrendline/transformProps.ts +++ b/superset-frontend/plugins/plugin-chart-echarts/src/BigNumber/BigNumberWithTrendline/transformProps.ts @@ -66,6 +66,7 @@ export default function transformProps( metric = 'value', showTimestamp, showTrendLine, + aggregation, startYAxisAtZero, subheader = '', subheaderFontSize, @@ -82,6 +83,15 @@ export default function transformProps( from_dttm: fromDatetime, to_dttm: toDatetime, } = queriesData[0]; + + const aggregatedQueryData = queriesData.length > 1 ? queriesData[1] : null; + + const hasAggregatedData = + aggregatedQueryData?.data && + aggregatedQueryData.data.length > 0 && + aggregation !== 'LAST_VALUE'; + + const aggregatedData = hasAggregatedData ? aggregatedQueryData.data[0] : null; const refs: Refs = {}; const metricName = getMetricLabel(metric); const compareLag = Number(compareLag_) || 0; @@ -95,18 +105,39 @@ export default function transformProps( let percentChange = 0; let bigNumber = data.length === 0 ? null : data[0][metricName]; let timestamp = data.length === 0 ? null : data[0][xAxisLabel]; - let bigNumberFallback; - - const metricColtypeIndex = colnames.findIndex(name => name === metricName); - const metricColtype = - metricColtypeIndex > -1 ? coltypes[metricColtypeIndex] : null; + let bigNumberFallback = null; + let sortedData: [number | null, number | null][] = []; if (data.length > 0) { - const sortedData = (data as BigNumberDatum[]) - .map(d => [d[xAxisLabel], parseMetricValue(d[metricName])]) + sortedData = (data as BigNumberDatum[]) + .map( + d => + [d[xAxisLabel], parseMetricValue(d[metricName])] as [ + number | null, + number | null, + ], + ) // sort in time descending order .sort((a, b) => (a[0] !== null && b[0] !== null ? b[0] - a[0] : 0)); + } + if (hasAggregatedData && aggregatedData) { + if ( + aggregatedData[metricName] !== null && + aggregatedData[metricName] !== undefined + ) { + bigNumber = aggregatedData[metricName]; + } else { + const metricKeys = Object.keys(aggregatedData).filter( + key => + key !== xAxisLabel && + aggregatedData[key] !== null && + typeof aggregatedData[key] === 'number', + ); + bigNumber = metricKeys.length > 0 ? aggregatedData[metricKeys[0]] : null; + } + timestamp = sortedData.length > 0 ? sortedData[0][0] : null; + } else if (sortedData.length > 0) { bigNumber = sortedData[0][1]; timestamp = sortedData[0][0]; @@ -115,25 +146,28 @@ export default function transformProps( bigNumber = bigNumberFallback ? bigNumberFallback[1] : null; timestamp = bigNumberFallback ? bigNumberFallback[0] : null; } + } - if (compareLag > 0) { - const compareIndex = compareLag; - if (compareIndex < sortedData.length) { - const compareValue = sortedData[compareIndex][1]; - // compare values must both be non-nulls - if (bigNumber !== null && compareValue !== null) { - percentChange = compareValue - ? (bigNumber - compareValue) / Math.abs(compareValue) - : 0; - formattedSubheader = `${formatPercentChange( - percentChange, - )} ${compareSuffix}`; - } + if (compareLag > 0 && sortedData.length > 0) { + const compareIndex = compareLag; + if (compareIndex < sortedData.length) { + const compareValue = sortedData[compareIndex][1]; + // compare values must both be non-nulls + if (bigNumber !== null && compareValue !== null) { + percentChange = compareValue + ? (Number(bigNumber) - compareValue) / Math.abs(compareValue) + : 0; + formattedSubheader = `${formatPercentChange( + percentChange, + )} ${compareSuffix}`; } } - sortedData.reverse(); + } + + if (data.length > 0) { + const reversedData = [...sortedData].reverse(); // @ts-ignore - trendLineData = showTrendLine ? sortedData : undefined; + trendLineData = showTrendLine ? reversedData : undefined; } let className = ''; @@ -143,6 +177,10 @@ export default function transformProps( className = 'negative'; } + const metricColtypeIndex = colnames.findIndex(name => name === metricName); + const metricColtype = + metricColtypeIndex > -1 ? coltypes[metricColtypeIndex] : null; + let metricEntry: Metric | undefined; if (chartProps.datasource?.metrics) { metricEntry = chartProps.datasource.metrics.find( diff --git a/superset-frontend/plugins/plugin-chart-echarts/test/BigNumber/transformProps.test.ts b/superset-frontend/plugins/plugin-chart-echarts/test/BigNumber/transformProps.test.ts index 8b0bf355258..4ccedd1e7f2 100644 --- a/superset-frontend/plugins/plugin-chart-echarts/test/BigNumber/transformProps.test.ts +++ b/superset-frontend/plugins/plugin-chart-echarts/test/BigNumber/transformProps.test.ts @@ -186,3 +186,188 @@ describe('BigNumberWithTrendline', () => { }); }); }); + +describe('BigNumberWithTrendline - Aggregation Tests', () => { + const baseProps = { + width: 800, + height: 600, + formData: { + colorPicker: { r: 0, g: 0, b: 0, a: 1 }, + metric: 'metric', + aggregation: 'LAST_VALUE', + }, + queriesData: [ + { + data: [ + { __timestamp: 1607558400000, metric: 10 }, + { __timestamp: 1607558500000, metric: 30 }, + { __timestamp: 1607558600000, metric: 50 }, + { __timestamp: 1607558700000, metric: 60 }, + ], + colnames: ['__timestamp', 'metric'], + coltypes: ['TIMESTAMP', 'BIGINT'], + }, + ], + hooks: {}, + filterState: {}, + datasource: { + columnFormats: {}, + currencyFormats: {}, + }, + rawDatasource: {}, + rawFormData: {}, + theme: { + colors: { + grayscale: { + light5: '#fafafa', + }, + }, + }, + } as unknown as BigNumberWithTrendlineChartProps; + + const propsWithEvenData = { + ...baseProps, + queriesData: [ + { + data: [ + { __timestamp: 1607558400000, metric: 10 }, + { __timestamp: 1607558500000, metric: 20 }, + { __timestamp: 1607558600000, metric: 30 }, + { __timestamp: 1607558700000, metric: 40 }, + ], + colnames: ['__timestamp', 'metric'], + coltypes: ['TIMESTAMP', 'BIGINT'], + }, + ], + } as unknown as BigNumberWithTrendlineChartProps; + + it('should correctly calculate SUM', () => { + const props = { + ...baseProps, + formData: { ...baseProps.formData, aggregation: 'sum' }, + queriesData: [ + baseProps.queriesData[0], + { + data: [{ metric: 150 }], + colnames: ['metric'], + coltypes: ['BIGINT'], + }, + ], + } as unknown as BigNumberWithTrendlineChartProps; + + const transformed = transformProps(props); + expect(transformed.bigNumber).toStrictEqual(150); + }); + + it('should correctly calculate AVG', () => { + const props = { + ...baseProps, + formData: { ...baseProps.formData, aggregation: 'mean' }, + queriesData: [ + baseProps.queriesData[0], + { + data: [{ metric: 37.5 }], + colnames: ['metric'], + coltypes: ['BIGINT'], + }, + ], + } as unknown as BigNumberWithTrendlineChartProps; + + const transformed = transformProps(props); + expect(transformed.bigNumber).toStrictEqual(37.5); + }); + + it('should correctly calculate MIN', () => { + const props = { + ...baseProps, + formData: { ...baseProps.formData, aggregation: 'min' }, + queriesData: [ + baseProps.queriesData[0], + { + data: [{ metric: 10 }], + colnames: ['metric'], + coltypes: ['BIGINT'], + }, + ], + } as unknown as BigNumberWithTrendlineChartProps; + + const transformed = transformProps(props); + expect(transformed.bigNumber).toStrictEqual(10); + }); + + it('should correctly calculate MAX', () => { + const props = { + ...baseProps, + formData: { ...baseProps.formData, aggregation: 'max' }, + queriesData: [ + baseProps.queriesData[0], + { + data: [{ metric: 60 }], + colnames: ['metric'], + coltypes: ['BIGINT'], + }, + ], + } as unknown as BigNumberWithTrendlineChartProps; + + const transformed = transformProps(props); + expect(transformed.bigNumber).toStrictEqual(60); + }); + + it('should correctly calculate MEDIAN (odd count)', () => { + const oddCountProps = { + ...baseProps, + queriesData: [ + { + data: [ + { __timestamp: 1607558300000, metric: 10 }, + { __timestamp: 1607558400000, metric: 20 }, + { __timestamp: 1607558500000, metric: 30 }, + { __timestamp: 1607558600000, metric: 40 }, + { __timestamp: 1607558700000, metric: 50 }, + ], + colnames: ['__timestamp', 'metric'], + coltypes: ['TIMESTAMP', 'BIGINT'], + }, + ], + } as unknown as BigNumberWithTrendlineChartProps; + + const props = { + ...oddCountProps, + formData: { ...oddCountProps.formData, aggregation: 'median' }, + queriesData: [ + oddCountProps.queriesData[0], + { + data: [{ metric: 30 }], + colnames: ['metric'], + coltypes: ['BIGINT'], + }, + ], + } as unknown as BigNumberWithTrendlineChartProps; + + const transformed = transformProps(props); + expect(transformed.bigNumber).toStrictEqual(30); + }); + + it('should correctly calculate MEDIAN (even count)', () => { + const props = { + ...propsWithEvenData, + formData: { ...propsWithEvenData.formData, aggregation: 'median' }, + queriesData: [ + propsWithEvenData.queriesData[0], + { + data: [{ metric: 25 }], + colnames: ['metric'], + coltypes: ['BIGINT'], + }, + ], + } as unknown as BigNumberWithTrendlineChartProps; + + const transformed = transformProps(props); + expect(transformed.bigNumber).toStrictEqual(25); + }); + + it('should return the LAST_VALUE correctly', () => { + const transformed = transformProps(baseProps); + expect(transformed.bigNumber).toStrictEqual(10); + }); +}); diff --git a/superset-frontend/src/components/JsonModal/JsonModal.test.tsx b/superset-frontend/src/components/JsonModal/JsonModal.test.tsx index f0e2230f5a8..44f01404a3b 100644 --- a/superset-frontend/src/components/JsonModal/JsonModal.test.tsx +++ b/superset-frontend/src/components/JsonModal/JsonModal.test.tsx @@ -42,6 +42,21 @@ test('renders JSON object in a tree view in a modal', () => { expect(getByTestId('mock-json-tree')).toBeInTheDocument(); }); +test('renders an object in a tree view in a modal', () => { + const jsonData = { a: 1 }; + const expected = JSON.stringify(jsonData); + const { getByText, getByTestId, queryByTestId } = render( + , + { + useRedux: true, + }, + ); + expect(queryByTestId('mock-json-tree')).not.toBeInTheDocument(); + const link = getByText(expected); + fireEvent.click(link); + expect(getByTestId('mock-json-tree')).toBeInTheDocument(); +}); + test('renders bigInt value in a number format', () => { expect(convertBigIntStrToNumber('123')).toBe('123'); expect(convertBigIntStrToNumber('some string value')).toBe( diff --git a/superset-frontend/src/components/JsonModal/index.tsx b/superset-frontend/src/components/JsonModal/index.tsx index e599f483dcd..79ff25ef5af 100644 --- a/superset-frontend/src/components/JsonModal/index.tsx +++ b/superset-frontend/src/components/JsonModal/index.tsx @@ -36,7 +36,7 @@ * under the License. */ import JSONbig from 'json-bigint'; -import { FC } from 'react'; +import { FC, useMemo } from 'react'; import { JSONTree } from 'react-json-tree'; import { useJsonTreeTheme } from 'src/hooks/useJsonTreeTheme'; import Button from '../Button'; @@ -46,6 +46,10 @@ import ModalTrigger from '../ModalTrigger'; export function safeJsonObjectParse( data: unknown, ): null | unknown[] | Record { + if (typeof data === 'object') { + return data as null | unknown[] | Record; + } + // First perform a cheap proxy to avoid calling JSON.parse on data that is clearly not a // JSON object or array if ( @@ -78,7 +82,7 @@ function renderBigIntStrToNumber(value: string | number) { return <>{convertBigIntStrToNumber(value)}; } -type CellDataType = string | number | null; +type CellDataType = string | number | null | object; export interface Props { modalTitle: string; @@ -88,6 +92,11 @@ export interface Props { export const JsonModal: FC = ({ modalTitle, jsonObject, jsonValue }) => { const jsonTreeTheme = useJsonTreeTheme(); + const content = useMemo( + () => + typeof jsonValue === 'object' ? JSON.stringify(jsonValue) : jsonValue, + [jsonValue], + ); return ( = ({ modalTitle, jsonObject, jsonValue }) => { } modalFooter={ } modalTitle={modalTitle} - triggerNode={<>{jsonValue}} + triggerNode={<>{content}} /> ); }; diff --git a/superset-frontend/src/explore/components/controls/ColumnConfigControl/constants.tsx b/superset-frontend/src/explore/components/controls/ColumnConfigControl/constants.tsx index ea58e4777d8..2d98f1bb434 100644 --- a/superset-frontend/src/explore/components/controls/ColumnConfigControl/constants.tsx +++ b/superset-frontend/src/explore/components/controls/ColumnConfigControl/constants.tsx @@ -38,7 +38,8 @@ export type SharedColumnConfigProp = | 'horizontalAlign' | 'truncateLongCells' | 'showCellBars' - | 'currencyFormat'; + | 'currencyFormat' + | 'visible'; const d3NumberFormat: ControlFormItemSpec<'Select'> = { allowNewOptions: true, @@ -152,6 +153,14 @@ const currencyFormat: ControlFormItemSpec<'CurrencyControl'> = { ), debounceDelay: 200, }; + +const visible: ControlFormItemSpec<'Checkbox'> = { + controlType: 'Checkbox', + label: t('Display in chart'), + description: t('Whether to display in the chart'), + defaultValue: true, + debounceDelay: 200, +}; /** * All configurable column formatting properties. */ @@ -174,6 +183,7 @@ export const SHARED_COLUMN_CONFIG_PROPS = { alignPositiveNegative, colorPositiveNegative, currencyFormat, + visible, }; export const DEFAULT_CONFIG_FORM_LAYOUT: ColumnConfigFormLayout = { diff --git a/superset/config.py b/superset/config.py index b604094bfec..fc586d2bb9f 100644 --- a/superset/config.py +++ b/superset/config.py @@ -807,7 +807,7 @@ STORE_CACHE_KEYS_IN_METADATA_DB = False # CORS Options # NOTE: enabling this requires installing the cors-related python dependencies -# `pip install .[cors]` or `pip install apache-superset[cors]`, depending +# `pip install .[cors]` or `pip install apache_superset[cors]`, depending ENABLE_CORS = False CORS_OPTIONS: dict[Any, Any] = {} @@ -1006,6 +1006,7 @@ class CeleryConfig: # pylint: disable=too-few-public-methods "superset.tasks.scheduler", "superset.tasks.thumbnails", "superset.tasks.cache", + "superset.tasks.slack", ) result_backend = "db+sqlite:///celery_results.sqlite" worker_prefetch_multiplier = 1 @@ -1037,6 +1038,11 @@ class CeleryConfig: # pylint: disable=too-few-public-methods # "schedule": crontab(minute="*", hour="*"), # "kwargs": {"retention_period_days": 180}, # }, + # Uncomment to enable Slack channel cache warm-up + # "slack.cache_channels": { + # "task": "slack.cache_channels", + # "schedule": crontab(minute="0", hour="*"), + # }, } @@ -1479,6 +1485,7 @@ EMAIL_REPORTS_CTA = "Explore in Superset" # Slack API token for the superset reports, either string or callable SLACK_API_TOKEN: Callable[[], str] | str | None = None SLACK_PROXY = None +SLACK_CACHE_TIMEOUT = int(timedelta(days=1).total_seconds()) # The webdriver to use for generating reports. Use one of the following # firefox diff --git a/superset/dashboards/api.py b/superset/dashboards/api.py index c7becfe8a16..b578ee081d0 100644 --- a/superset/dashboards/api.py +++ b/superset/dashboards/api.py @@ -1087,16 +1087,19 @@ class DashboardRestApi(BaseSupersetModelRestApi): "urlParams": payload.get("urlParams", []), } - permalink_key = CreateDashboardPermalinkCommand( - dashboard_id=str(dashboard.id), - state=dashboard_state, - ).run() + # if the permalink key is provided, dashboard_state will be ignored + # else, create a permalink key from the dashboard_state + permalink_key = ( + payload.get("permalinkKey", None) + or CreateDashboardPermalinkCommand( + dashboard_id=str(dashboard.id), + state=dashboard_state, + ).run() + ) dashboard_url = get_url_path("Superset.dashboard_permalink", key=permalink_key) screenshot_obj = DashboardScreenshot(dashboard_url, dashboard.digest) - cache_key = screenshot_obj.get_cache_key( - window_size, thumb_size, dashboard_state - ) + cache_key = screenshot_obj.get_cache_key(window_size, thumb_size, permalink_key) image_url = get_url_path( "DashboardRestApi.screenshot", pk=dashboard.id, digest=cache_key ) diff --git a/superset/dashboards/schemas.py b/superset/dashboards/schemas.py index ded83246691..88ad279855a 100644 --- a/superset/dashboards/schemas.py +++ b/superset/dashboards/schemas.py @@ -521,3 +521,4 @@ class CacheScreenshotSchema(Schema): urlParams = fields.List( # noqa: N815 fields.List(fields.Str(), validate=lambda x: len(x) == 2), required=False ) + permalinkKey = fields.Str(required=False) # noqa: N815 diff --git a/superset/jinja_context.py b/superset/jinja_context.py index c32d097b5bf..c4182b136eb 100644 --- a/superset/jinja_context.py +++ b/superset/jinja_context.py @@ -33,6 +33,7 @@ from sqlalchemy.engine.interfaces import Dialect from sqlalchemy.sql.expression import bindparam from sqlalchemy.types import String +from superset import security_manager from superset.commands.dataset.exceptions import DatasetNotFoundError from superset.common.utils.time_range_utils import get_since_until_from_time_range from superset.constants import LRU_CACHE_MAX_SIZE, NO_TIME_RANGE @@ -46,7 +47,6 @@ from superset.utils.core import ( FilterOperator, get_user_email, get_user_id, - get_user_roles, get_username, merge_extra_filters, ) @@ -176,17 +176,22 @@ class ExtraCache: def current_user_roles(self, add_to_cache_keys: bool = True) -> list[str] | None: """ - Return the list of roles of the user who is currently logged in. + Return the sorted list of roles of the user who is currently logged in. :param add_to_cache_keys: Whether the value should be included in the cache key :returns: List of role names """ - - if user_roles := get_user_roles(): + try: + user_roles = sorted( + [role.name for role in security_manager.get_user_roles()] + ) + if not user_roles: + return None if add_to_cache_keys: self.cache_key_wrapper(json.dumps(user_roles)) return user_roles - return None + except Exception: # pylint: disable=broad-except + return None def cache_key_wrapper(self, key: Any) -> Any: """ diff --git a/superset/tasks/slack.py b/superset/tasks/slack.py new file mode 100644 index 00000000000..0b35a721bb5 --- /dev/null +++ b/superset/tasks/slack.py @@ -0,0 +1,35 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +import logging + +from flask import current_app + +from superset.extensions import celery_app +from superset.utils.slack import get_channels + +logger = logging.getLogger(__name__) + + +@celery_app.task(name="slack.cache_channels") +def cache_channels() -> None: + try: + get_channels( + force=True, cache_timeout=current_app.config["SLACK_CACHE_TIMEOUT"] + ) + except Exception as ex: + logger.exception("An error occurred while caching Slack channels: %s", ex) + raise diff --git a/superset/utils/core.py b/superset/utils/core.py index 6caea19b1e2..2b80c89f612 100644 --- a/superset/utils/core.py +++ b/superset/utils/core.py @@ -1292,19 +1292,6 @@ def get_user_email() -> str | None: return None -def get_user_roles() -> list[str] | None: - """ - Get the roles (if defined) associated with the current user. - - :returns: The sorted list of roles - """ - - try: - return sorted([role.name for role in g.user.roles]) - except Exception: # pylint: disable=broad-except - return None - - @contextmanager def override_user(user: User | None, force: bool = True) -> Iterator[Any]: """ diff --git a/superset/utils/screenshots.py b/superset/utils/screenshots.py index 74a1f0746fe..cf28dcf916c 100644 --- a/superset/utils/screenshots.py +++ b/superset/utils/screenshots.py @@ -26,7 +26,6 @@ from typing import cast, TYPE_CHECKING, TypedDict from flask import current_app from superset import app, feature_flag_manager, thumbnail_cache -from superset.dashboards.permalink.types import DashboardPermalinkState from superset.extensions import event_logger from superset.utils.hashing import md5_sha_from_dict from superset.utils.urls import modify_url_query @@ -349,7 +348,7 @@ class DashboardScreenshot(BaseScreenshot): self, window_size: bool | WindowSize | None = None, thumb_size: bool | WindowSize | None = None, - dashboard_state: DashboardPermalinkState | None = None, + permalink_key: str | None = None, ) -> str: window_size = window_size or self.window_size thumb_size = thumb_size or self.thumb_size @@ -359,6 +358,6 @@ class DashboardScreenshot(BaseScreenshot): "type": "thumb", "window_size": window_size, "thumb_size": thumb_size, - "dashboard_state": dashboard_state, + "permalink_key": permalink_key, } return md5_sha_from_dict(args) diff --git a/superset/utils/slack.py b/superset/utils/slack.py index 8125a3ac401..34d48bef21b 100644 --- a/superset/utils/slack.py +++ b/superset/utils/slack.py @@ -17,7 +17,7 @@ import logging -from typing import Any, Optional +from typing import Callable, Optional from flask import current_app from slack_sdk import WebClient @@ -60,7 +60,7 @@ def get_slack_client() -> WebClient: key="slack_conversations_list", cache=cache_manager.cache, ) -def get_channels(limit: int, extra_params: dict[str, Any]) -> list[SlackChannelSchema]: +def get_channels() -> list[SlackChannelSchema]: """ Retrieves a list of all conversations accessible by the bot from the Slack API, and caches results (to avoid rate limits). @@ -71,11 +71,12 @@ def get_channels(limit: int, extra_params: dict[str, Any]) -> list[SlackChannelS client = get_slack_client() channel_schema = SlackChannelSchema() channels: list[SlackChannelSchema] = [] + extra_params = {"types": ",".join(SlackChannelTypes)} cursor = None while True: response = client.conversations_list( - limit=limit, cursor=cursor, exclude_archived=True, **extra_params + limit=999, cursor=cursor, exclude_archived=True, **extra_params ) channels.extend( channel_schema.load(channel) for channel in response.data["channels"] @@ -89,7 +90,6 @@ def get_channels(limit: int, extra_params: dict[str, Any]) -> list[SlackChannelS def get_channels_with_search( search_string: str = "", - limit: int = 999, types: Optional[list[SlackChannelTypes]] = None, exact_match: bool = False, force: bool = False, @@ -99,18 +99,25 @@ def get_channels_with_search( all channels and filter them ourselves This will search by slack name or id """ - extra_params = {} - extra_params["types"] = ",".join(types) if types else None try: channels = get_channels( - limit=limit, - extra_params=extra_params, force=force, - cache_timeout=86400, + cache_timeout=current_app.config["SLACK_CACHE_TIMEOUT"], ) except (SlackClientError, SlackApiError) as ex: raise SupersetException(f"Failed to list channels: {ex}") from ex + if types and not len(types) == len(SlackChannelTypes): + conditions: list[Callable[[SlackChannelSchema], bool]] = [] + if SlackChannelTypes.PUBLIC in types: + conditions.append(lambda channel: not channel["is_private"]) + if SlackChannelTypes.PRIVATE in types: + conditions.append(lambda channel: channel["is_private"]) + + channels = [ + channel for channel in channels if any(cond(channel) for cond in conditions) + ] + # The search string can be multiple channels separated by commas if search_string: search_array = recipients_string_to_list(search_string) diff --git a/tests/integration_tests/dashboards/api_tests.py b/tests/integration_tests/dashboards/api_tests.py index 56062dc8527..291096deb69 100644 --- a/tests/integration_tests/dashboards/api_tests.py +++ b/tests/integration_tests/dashboards/api_tests.py @@ -3038,6 +3038,18 @@ class TestDashboardApi(ApiOwnersTestCaseMixin, InsertChartMixin, SupersetTestCas response = self._cache_screenshot(dashboard.id) assert response.status_code == 202 + @with_feature_flags(THUMBNAILS=True, ENABLE_DASHBOARD_SCREENSHOT_ENDPOINTS=True) + @pytest.mark.usefixtures("create_dashboard_with_tag") + def test_cache_dashboard_screenshot_success_permalink_payload(self): + self.login(ADMIN_USERNAME) + dashboard = ( + db.session.query(Dashboard) + .filter(Dashboard.dashboard_title == "dash with tag") + .first() + ) + response = self._cache_screenshot(dashboard.id, {"permalinkKey": "1234"}) + assert response.status_code == 202 + @with_feature_flags(THUMBNAILS=True, ENABLE_DASHBOARD_SCREENSHOT_ENDPOINTS=True) @pytest.mark.usefixtures("create_dashboard_with_tag") def test_cache_dashboard_screenshot_dashboard_validation(self): diff --git a/tests/integration_tests/sqla_models_tests.py b/tests/integration_tests/sqla_models_tests.py index f19d495fd87..80ada22e0aa 100644 --- a/tests/integration_tests/sqla_models_tests.py +++ b/tests/integration_tests/sqla_models_tests.py @@ -27,6 +27,7 @@ import pytest import numpy as np import pandas as pd from flask.ctx import AppContext +from flask_appbuilder.security.sqla.models import Role from pytest_mock import MockerFixture from sqlalchemy.sql import text from sqlalchemy.sql.elements import TextClause @@ -844,7 +845,10 @@ def test_none_operand_in_filter(login_as_admin, physical_dataset): @patch("superset.jinja_context.get_user_id", return_value=1) @patch("superset.jinja_context.get_username", return_value="abc") @patch("superset.jinja_context.get_user_email", return_value="abc@test.com") -@patch("superset.jinja_context.get_user_roles", return_value=["role1", "role2"]) +@patch( + "superset.jinja_context.security_manager.get_user_roles", + return_value=[Role(name="role1"), Role(name="role2")], +) def test_extra_cache_keys( mock_get_user_roles, mock_user_email, @@ -888,7 +892,10 @@ def test_extra_cache_keys( @patch("superset.jinja_context.get_user_id", return_value=1) @patch("superset.jinja_context.get_username", return_value="abc") @patch("superset.jinja_context.get_user_email", return_value="abc@test.com") -@patch("superset.jinja_context.get_user_roles", return_value=["role1", "role2"]) +@patch( + "superset.jinja_context.security_manager.get_user_roles", + return_value=[Role(name="role1"), Role(name="role2")], +) def test_extra_cache_keys_in_sql_expression( mock_get_user_roles, mock_user_email, diff --git a/tests/unit_tests/jinja_context_test.py b/tests/unit_tests/jinja_context_test.py index 1654e267814..fa79cc04936 100644 --- a/tests/unit_tests/jinja_context_test.py +++ b/tests/unit_tests/jinja_context_test.py @@ -364,13 +364,14 @@ def test_user_macros(mocker: MockerFixture): - ``current_user_roles`` """ mock_g = mocker.patch("superset.utils.core.g") + mock_get_user_roles = mocker.patch("superset.security_manager.get_user_roles") mock_cache_key_wrapper = mocker.patch( "superset.jinja_context.ExtraCache.cache_key_wrapper" ) mock_g.user.id = 1 mock_g.user.username = "my_username" mock_g.user.email = "my_email@test.com" - mock_g.user.roles = [Role(name="my_role1"), Role(name="my_role2")] + mock_get_user_roles.return_value = [Role(name="my_role1"), Role(name="my_role2")] cache = ExtraCache() assert cache.current_user_id() == 1 assert cache.current_username() == "my_username" @@ -378,19 +379,23 @@ def test_user_macros(mocker: MockerFixture): assert cache.current_user_roles() == ["my_role1", "my_role2"] assert mock_cache_key_wrapper.call_count == 4 + mock_get_user_roles.return_value = [] + assert cache.current_user_roles() is None + def test_user_macros_without_cache_key_inclusion(mocker: MockerFixture): """ Test all user macros with ``add_to_cache_keys`` set to ``False``. """ mock_g = mocker.patch("superset.utils.core.g") + mock_get_user_roles = mocker.patch("superset.security_manager.get_user_roles") mock_cache_key_wrapper = mocker.patch( "superset.jinja_context.ExtraCache.cache_key_wrapper" ) mock_g.user.id = 1 mock_g.user.username = "my_username" mock_g.user.email = "my_email@test.com" - mock_g.user.roles = [Role(name="my_role1"), Role(name="my_role2")] + mock_get_user_roles.return_value = [Role(name="my_role1"), Role(name="my_role2")] cache = ExtraCache() assert cache.current_user_id(False) == 1 assert cache.current_username(False) == "my_username" diff --git a/tests/unit_tests/utils/slack_test.py b/tests/unit_tests/utils/slack_test.py index ed7a82c220c..024d6cf96ee 100644 --- a/tests/unit_tests/utils/slack_test.py +++ b/tests/unit_tests/utils/slack_test.py @@ -17,7 +17,7 @@ import pytest -from superset.utils.slack import get_channels_with_search +from superset.utils.slack import get_channels_with_search, SlackChannelTypes class MockResponse: @@ -150,15 +150,35 @@ class TestGetChannelsWithSearch: The server responded with: missing scope: channels:read""" ) - def test_filter_channels_by_specified_types(self, mocker): + @pytest.mark.parametrize( + "types, expected_channel_ids", + [ + ([SlackChannelTypes.PUBLIC], {"public_channel_id"}), + ([SlackChannelTypes.PRIVATE], {"private_channel_id"}), + ( + [SlackChannelTypes.PUBLIC, SlackChannelTypes.PRIVATE], + {"public_channel_id", "private_channel_id"}, + ), + ([], {"public_channel_id", "private_channel_id"}), + ], + ) + def test_filter_channels_by_specified_types( + self, types: list[SlackChannelTypes], expected_channel_ids: set[str], mocker + ): mock_data = { "channels": [ { - "id": "C12345", - "name": "general", + "id": "public_channel_id", + "name": "open", "is_member": False, "is_private": False, }, + { + "id": "private_channel_id", + "name": "secret", + "is_member": False, + "is_private": True, + }, ], "response_metadata": {"next_cursor": None}, } @@ -168,15 +188,8 @@ The server responded with: missing scope: channels:read""" mock_client.conversations_list.return_value = mock_response_instance mocker.patch("superset.utils.slack.get_slack_client", return_value=mock_client) - result = get_channels_with_search(types=["public"]) - assert result == [ - { - "id": "C12345", - "name": "general", - "is_member": False, - "is_private": False, - } - ] + result = get_channels_with_search(types=types) + assert {channel["id"] for channel in result} == expected_channel_ids def test_handle_pagination_multiple_pages(self, mocker): mock_data_page1 = {