Compare commits

..

1 Commits

Author SHA1 Message Date
Beto Dealmeida
4a03e80fcb fix: pass request context to async queries 2024-07-01 17:11:11 -04:00
1017 changed files with 205472 additions and 64008 deletions

View File

@@ -70,9 +70,8 @@ github:
- cypress-matrix (4, chrome)
- cypress-matrix (5, chrome)
- frontend-build
- pre-commit (current)
- pre-commit (next)
- pre-commit (previous)
- pre-commit
- python-lint
- test-mysql
- test-postgres (current)
- test-postgres (next)

1
.gitattributes vendored
View File

@@ -1,2 +1 @@
docker/**/*.sh text eol=lf
*.svg binary

2
.github/CODEOWNERS vendored
View File

@@ -2,7 +2,7 @@
# https://github.com/apache/superset/issues/13351
/superset/migrations/ @mistercrunch @michael-s-molina @betodealmeida @eschutho
/superset/migrations/ @apache/superset-committers
# Notify some committers of changes in the components

View File

@@ -46,7 +46,7 @@ body:
label: Superset version
options:
- master / latest-dev
- "4.0.2"
- "4.0.1"
- "3.1.3"
validations:
required: true

View File

@@ -8,9 +8,8 @@ updates:
- package-ecosystem: "npm"
ignore:
# not until React >= 18.0.0
- dependency-name: "storybook"
- dependency-name: "@storybook*"
# not until node >= 18.12.0
- dependency-name: "css-minimizer-webpack-plugin"
directory: "/superset-frontend/"
schedule:
interval: "monthly"

View File

@@ -31,7 +31,7 @@ jobs:
- uses: actions/checkout@v4
- uses: actions/setup-node@v4
with:
node-version: "20"
node-version: "18"
registry-url: 'https://registry.npmjs.org'
- run: npm ci
- run: npm run ci:release

View File

@@ -21,7 +21,7 @@ jobs:
- uses: actions/checkout@v4
- uses: actions/setup-node@v4
with:
node-version: "20"
node-version: "18"
registry-url: 'https://registry.npmjs.org'
- run: npm ci
- run: npm test

View File

@@ -233,7 +233,7 @@ jobs:
- name: Deploy Amazon ECS task definition
id: deploy-task
uses: aws-actions/amazon-ecs-deploy-task-definition@v2
uses: aws-actions/amazon-ecs-deploy-task-definition@v1
with:
task-definition: ${{ steps.task-def.outputs.task-definition }}
service: pr-${{ github.event.issue.number }}-service

View File

@@ -19,7 +19,7 @@ jobs:
- name: Set up Node.js
uses: actions/setup-node@v4
with:
node-version: '20'
node-version: '18'
- name: Install Dependencies
run: npm install -g @action-validator/core @action-validator/cli --save-dev

View File

@@ -16,9 +16,6 @@ concurrency:
jobs:
pre-commit:
runs-on: ubuntu-22.04
strategy:
matrix:
python-version: ["current", "next", "previous"]
steps:
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
uses: actions/checkout@v4
@@ -27,8 +24,6 @@ jobs:
submodules: recursive
- name: Setup Python
uses: ./.github/actions/setup-backend/
with:
python-version: ${{ matrix.python-version }}
- name: Enable brew and helm-docs
# Add brew to the path - see https://github.com/actions/runner-images/issues/6283
run: |
@@ -40,11 +35,8 @@ jobs:
brew install norwoodj/tap/helm-docs
- name: pre-commit
run: |
set +e # Don't exit immediately on failure
pre-commit run --all-files
if [ $? -ne 0 ] || ! git diff --quiet --exit-code; then
echo "❌ Pre-commit check failed."
echo "🚒 To prevent/address this CI issue, please install/use pre-commit locally."
echo "📖 More details here: https://superset.apache.org/docs/contributing/development#git-hooks"
if ! pre-commit run --all-files; then
git status
git diff
exit 1
fi

View File

@@ -29,7 +29,7 @@ jobs:
strategy:
matrix:
node-version: [20]
node-version: [18]
steps:
- uses: actions/checkout@v4

View File

@@ -26,7 +26,7 @@ jobs:
fail-fast: false
matrix:
browser: ["chrome"]
node: [20]
node: [18]
env:
SUPERSET_ENV: development
SUPERSET_CONFIG: tests.integration_tests.superset_test_config

View File

@@ -30,7 +30,7 @@ jobs:
runs-on: ubuntu-22.04
strategy:
matrix:
node: [20]
node: [18]
steps:
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
uses: actions/checkout@v4

View File

@@ -35,10 +35,10 @@ jobs:
with:
persist-credentials: false
submodules: recursive
- name: Set up Node.js 20
- name: Set up Node.js 18
uses: actions/setup-node@v4
with:
node-version: '20'
node-version: '18'
- name: Setup Python
uses: ./.github/actions/setup-backend/
- uses: actions/setup-java@v4

View File

@@ -12,34 +12,6 @@ concurrency:
cancel-in-progress: true
jobs:
linkinator:
# See docs here: https://github.com/marketplace/actions/linkinator
name: Link Checking
runs-on: ubuntu-latest
continue-on-error: true # This will make the job advisory (non-blocking, no red X)
steps:
- uses: actions/checkout@v4
- uses: JustinBeckwith/linkinator-action@v1.10.4
with:
paths: "**/*.md, **/*.mdx"
linksToSkip: >-
^https://github.com/apache/(superset|incubator-superset)/(pull|issue)/\d+,
http://localhost:8088/,
docker/.env-non-dev,
http://127.0.0.1:3000/,
http://localhost:9001/,
https://charts.bitnami.com/bitnami,
https://www.li.me/,
https://www.fanatics.com/,
https://tails.com/gb/,
https://www.techaudit.info/,
https://avetilearning.com/,
https://www.udemy.com/,
https://trustmedis.com/,
http://theiconic.com.au/,
https://dev.mysql.com/doc/refman/5.7/en/innodb-limits.html,
^https://img\.shields\.io/.*,
https://vkusvill.ru/
build-deploy:
name: Build & Deploy
runs-on: ubuntu-22.04

View File

@@ -66,20 +66,20 @@ jobs:
# Conditional checkout based on context
- name: Checkout for push or pull_request event
if: github.event_name == 'push' || github.event_name == 'pull_request'
uses: actions/checkout@v4
uses: actions/checkout@v2
with:
persist-credentials: false
submodules: recursive
- name: Checkout using ref (workflow_dispatch)
if: github.event_name == 'workflow_dispatch' && github.event.inputs.ref != ''
uses: actions/checkout@v4
uses: actions/checkout@v2
with:
persist-credentials: false
ref: ${{ github.event.inputs.ref }}
submodules: recursive
- name: Checkout using PR ID (workflow_dispatch)
if: github.event_name == 'workflow_dispatch' && github.event.inputs.pr_id != ''
uses: actions/checkout@v4
uses: actions/checkout@v2
with:
persist-credentials: false
ref: refs/pull/${{ github.event.inputs.pr_id }}/merge
@@ -107,7 +107,7 @@ jobs:
if: steps.check.outputs.python || steps.check.outputs.frontend
uses: actions/setup-node@v4
with:
node-version: "20"
node-version: "18"
- name: Install npm dependencies
if: steps.check.outputs.python || steps.check.outputs.frontend
uses: ./.github/actions/cached-dependencies

View File

@@ -33,7 +33,7 @@ jobs:
if: steps.check.outputs.frontend
uses: actions/setup-node@v4
with:
node-version: "20"
node-version: "18"
- name: Install dependencies
if: steps.check.outputs.frontend
uses: ./.github/actions/cached-dependencies
@@ -49,6 +49,11 @@ jobs:
working-directory: ./superset-frontend
run: |
npm run type
- name: prettier
if: steps.check.outputs.frontend
working-directory: ./superset-frontend
run: |
npm run prettier-check
- name: Build plugins packages
if: steps.check.outputs.frontend
working-directory: ./superset-frontend
@@ -71,7 +76,7 @@ jobs:
- name: generator-superset unit tests
if: steps.check.outputs.frontend
working-directory: ./superset-frontend/packages/generator-superset
run: npm run test
run: npx jest
- name: Upload code coverage
uses: codecov/codecov-action@v4
with:

View File

@@ -0,0 +1,53 @@
# Python Misc unit tests
name: Python Misc
on:
push:
branches:
- "master"
- "[0-9].[0-9]*"
pull_request:
types: [synchronize, opened, reopened, ready_for_review]
# cancel previous workflow jobs for PRs
concurrency:
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.run_id }}
cancel-in-progress: true
jobs:
python-lint:
runs-on: ubuntu-22.04
steps:
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
uses: actions/checkout@v4
with:
persist-credentials: false
submodules: recursive
- name: Check for file changes
id: check
uses: ./.github/actions/change-detector/
with:
token: ${{ secrets.GITHUB_TOKEN }}
- name: Setup Python
uses: ./.github/actions/setup-backend/
if: steps.check.outputs.python
babel-extract:
runs-on: ubuntu-22.04
steps:
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
uses: actions/checkout@v4
with:
persist-credentials: false
submodules: recursive
- name: Check for file changes
id: check
uses: ./.github/actions/change-detector/
with:
token: ${{ secrets.GITHUB_TOKEN }}
- name: Setup Python
if: steps.check.outputs.python
uses: ./.github/actions/setup-backend/
- name: Test babel extraction
if: steps.check.outputs.python
run: scripts/translations/babel_update.sh

View File

@@ -54,13 +54,10 @@ jobs:
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
uses: actions/checkout@v4
with:
persist-credentials: false
tags: true
fetch-depth: 0
- name: Use Node.js 20
uses: actions/setup-node@v4
with:
node-version: 20
- name: Setup supersetbot
uses: ./.github/actions/setup-supersetbot/
@@ -97,38 +94,16 @@ jobs:
--platform "linux/arm64" \
--platform "linux/amd64"
# Returning to master to support closing setup-supersetbot
git checkout master
update-prs-with-release-info:
needs: config
if: needs.config.outputs.has-secrets
runs-on: ubuntu-22.04
permissions:
contents: read
pull-requests: write
steps:
# Going back on original branch to allow "post" GHA operations
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
uses: actions/checkout@v4
with:
fetch-depth: 0
- name: Use Node.js 20
uses: actions/setup-node@v4
with:
node-version: 20
- name: Setup supersetbot
uses: ./.github/actions/setup-supersetbot/
persist-credentials: false
- name: Label the PRs with the right release-related labels
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: |
export GITHUB_ACTOR=""
git fetch --all --tags
git checkout master
RELEASE="${{ github.event.release.tag_name }}"
if [ "${{ github.event_name }}" = "workflow_dispatch" ]; then
# in the case of a manually-triggered run, read release from input

View File

@@ -32,7 +32,7 @@ jobs:
- name: Set up Node.js
uses: actions/setup-node@v4
with:
node-version: '20'
node-version: '18'
- name: Install Dependencies
run: npm install

1
.gitignore vendored
View File

@@ -121,4 +121,3 @@ docker/*local*
# Jest test report
test-report.html
superset/static/stats/statistics.html

View File

@@ -53,14 +53,11 @@ repos:
- id: debug-statements
- id: end-of-file-fixer
- id: trailing-whitespace
exclude: ^.*\.(snap)
args: ["--markdown-linebreak-ext=md"]
- repo: https://github.com/pre-commit/mirrors-prettier
rev: v3.1.0 # Use the sha or tag you want to point at
hooks:
- id: prettier
additional_dependencies:
- prettier@3.3.3
args: ["--ignore-path=./superset-frontend/.prettierignore"]
files: "superset-frontend"
# blacklist unsafe functions like make_url (see #19526)

View File

@@ -61,9 +61,6 @@ tsconfig.tsbuildinfo
generator-superset/*
temporary_superset_ui/*
# skip license checks for auto-generated test snapshots
.*snap
# docs overrides for third party logos we don't have the rights to
google-big-query.svg
google-sheets.svg

View File

@@ -42,4 +42,3 @@ under the License.
- [3.1.3](./CHANGELOG/3.1.3.md)
- [4.0.0](./CHANGELOG/4.0.0.md)
- [4.0.1](./CHANGELOG/4.0.1.md)
- [4.0.2](./CHANGELOG/4.0.2.md)

View File

@@ -1,78 +0,0 @@
<!--
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
-->
## Change Log
### 4.0.2 (Wed Jun 26 14:26:58 2024 -0300)
**Fixes**
- [#28639](https://github.com/apache/superset/pull/28639) fix: adds the ability to disallow SQL functions per engine (@dpgaspar)
- [#29367](https://github.com/apache/superset/pull/29367) fix(explore): don't respect y-axis formatting (@justinpark)
- [#29345](https://github.com/apache/superset/pull/29345) fix(revert 27883): Excess padding in horizontal Bar charts (@michael-s-molina)
- [#29349](https://github.com/apache/superset/pull/29349) fix(explore): restored hidden field values has discarded (@justinpark)
- [#29346](https://github.com/apache/superset/pull/29346) fix: Cannot delete empty column inside a tab using the dashboard editor (@michael-s-molina)
- [#29314](https://github.com/apache/superset/pull/29314) fix: Remove recursive repr call (@jessie-ross)
- [#29301](https://github.com/apache/superset/pull/29301) fix(metastore-cache): prune before add (@villebro)
- [#29278](https://github.com/apache/superset/pull/29278) fix(sqllab): invalid empty state on switch tab (@justinpark)
- [#29291](https://github.com/apache/superset/pull/29291) fix: filters not updating with force update when caching is enabled (@ka-weihe)
- [#28744](https://github.com/apache/superset/pull/28744) fix(permalink): adding anchor to dashboard permalink generation (@fisjac)
- [#29260](https://github.com/apache/superset/pull/29260) fix: Custom SQL filter control (@michael-s-molina)
- [#29248](https://github.com/apache/superset/pull/29248) fix(sqllab): Do not strip comments when executing SQL statements (@john-bodley)
- [#28755](https://github.com/apache/superset/pull/28755) fix: Workaround for Pandas.DataFrame.to_csv bug (@john-bodley)
- [#29234](https://github.com/apache/superset/pull/29234) fix(Explore): Keep necessary form data to allow query mode switching (@rtexelm)
- [#29230](https://github.com/apache/superset/pull/29230) fix(sqllab): run previous state query (@justinpark)
- [#29119](https://github.com/apache/superset/pull/29119) fix(mixed-timeseries-plugin): Second query stacks stacked on top of first query series (@kgabryje)
- [#28932](https://github.com/apache/superset/pull/28932) fix(embedded): add missing GUEST_TOKEN_HEADER_NAME to bootstrap data (@hexcafe)
- [#29084](https://github.com/apache/superset/pull/29084) fix: Remove BASE_AXIS from pre-query (@john-bodley)
- [#29081](https://github.com/apache/superset/pull/29081) fix(explore): Drill to detail truncates int64 IDs (@justinpark)
- [#28771](https://github.com/apache/superset/pull/28771) fix(Mixed Chart Filter Control): Allow delete condition for `adhoc_filters_b` (@rtexelm)
- [#28772](https://github.com/apache/superset/pull/28772) fix(dashboard): unable to resize due to the overlapped droptarget (@justinpark)
- [#28750](https://github.com/apache/superset/pull/28750) fix: do not close database modal on mask click (@eschutho)
- [#28745](https://github.com/apache/superset/pull/28745) fix(reports): Update the element class to wait for when taking a screenshot (@Vitor-Avila)
- [#28749](https://github.com/apache/superset/pull/28749) fix(sqllab): Sort db selector options by the API order (@justinpark)
- [#28653](https://github.com/apache/superset/pull/28653) fix: Handling of column types for Presto, Trino, et al. (@john-bodley)
- [#28422](https://github.com/apache/superset/pull/28422) fix: Update migration logic in #27119 (@john-bodley)
- [#28349](https://github.com/apache/superset/pull/28349) fix: Add back description column to saved queries #12431 (@imancrsrk)
- [#28512](https://github.com/apache/superset/pull/28512) fix: improve df to records performance (@dpgaspar)
- [#28613](https://github.com/apache/superset/pull/28613) fix: revert fix(presto preview): re-enable schema previsualization for Trino/Presto table/schemas" (@john-bodley)
- [#28567](https://github.com/apache/superset/pull/28567) fix: Revert "fix: don't strip SQL comments in Explore (#28363)" (@michael-s-molina)
- [#28555](https://github.com/apache/superset/pull/28555) fix(explore): hide a control wrapped with StashFormDataContainer correctly (@justinpark)
- [#28507](https://github.com/apache/superset/pull/28507) fix(dashboard): invalid drop item on a tab (@justinpark)
- [#28432](https://github.com/apache/superset/pull/28432) fix: Time shifts calculation for ECharts plugins (@michael-s-molina)
- [#26782](https://github.com/apache/superset/pull/26782) fix(presto preview): re-enable schema previsualization for Trino/Presto table/schemas (@brouberol)
- [#28409](https://github.com/apache/superset/pull/28409) fix(ar-modal): updateNotificationSettings not updating state (@fisjac)
- [#28395](https://github.com/apache/superset/pull/28395) fix(dashboard): Change class name on last Droppable in a column (@rtexelm)
- [#28396](https://github.com/apache/superset/pull/28396) fix: type annotation breaking on py3.9 (@dpgaspar)
- [#28368](https://github.com/apache/superset/pull/28368) fix: Contribution percentages for ECharts plugins (@michael-s-molina)
- [#28386](https://github.com/apache/superset/pull/28386) fix: Scroll to top when selecting a global dashboard tab (@michael-s-molina)
- [#28312](https://github.com/apache/superset/pull/28312) fix(explore): hide advanced analytics for non temporal xaxis (@justinpark)
- [#28363](https://github.com/apache/superset/pull/28363) fix: don't strip SQL comments in Explore (@mistercrunch)
- [#28341](https://github.com/apache/superset/pull/28341) fix: Remedy logic for UpdateDatasetCommand uniqueness check (@john-bodley)
- [#28334](https://github.com/apache/superset/pull/28334) fix: Small tweaks for Line and Area chart migrations (ECharts) (@michael-s-molina)
- [#28266](https://github.com/apache/superset/pull/28266) fix: use pessimistic json encoder in SQL Lab (@mistercrunch)
- [#28113](https://github.com/apache/superset/pull/28113) fix: Rename legacy line and area charts (@john-bodley)
- [#28279](https://github.com/apache/superset/pull/28279) fix(sql_parse): Ignore USE SQL keyword when determining SELECT statement (@john-bodley)
- [#28322](https://github.com/apache/superset/pull/28322) fix(sql_parse): Add Apache Spark to SQLGlot dialect mapping (@john-bodley)
**Others**
- [#29360](https://github.com/apache/superset/pull/29360) chore: Rename Totals to Summary in table chart (@michael-s-molina)
- [#29249](https://github.com/apache/superset/pull/29249) test(Explorer): Fix minor errors in ExploreViewContainer syntax, add tests (@rtexelm)
- [#28876](https://github.com/apache/superset/pull/28876) chore(sqllab): Add logging for actions (@justinpark)

View File

@@ -16,6 +16,7 @@
specific language governing permissions and limitations
under the License.
-->
# CODE OF CONDUCT
*The following is copied for your convenience from <https://www.apache.org/foundation/policies/conduct.html>. If there's a discrepancy between the two, let us know or submit a PR to fix it.*
@@ -94,9 +95,9 @@ This statement thanks the following, on which it draws for content and inspirati
* [CouchDB Project Code of conduct](http://couchdb.apache.org/conduct.html)
* [Fedora Project Code of Conduct](http://fedoraproject.org/code-of-conduct)
* [Speak Up! Code of Conduct](http://web.archive.org/web/20141109123859/http://speakup.io/coc.html)
* [Speak Up! Code of Conduct](http://speakup.io/coc.html)
* [Django Code of Conduct](https://www.djangoproject.com/conduct/)
* [Debian Code of Conduct](https://www.debian.org/vote/2014/vote_002)
* [Debian Code of Conduct](http://www.debian.org/vote/2014/vote_002)
* [Twitter Open Source Code of Conduct](https://github.com/twitter/code-of-conduct/blob/master/code-of-conduct.md)
* [Mozilla Code of Conduct/Draft](https://wiki.mozilla.org/Code_of_Conduct/Draft#Conflicts_of_Interest)
* [Python Diversity Appendix](https://www.python.org/community/diversity/)

View File

@@ -22,29 +22,16 @@ ARG PY_VER=3.10-slim-bookworm
# if BUILDPLATFORM is null, set it to 'amd64' (or leave as is otherwise).
ARG BUILDPLATFORM=${BUILDPLATFORM:-amd64}
FROM --platform=${BUILDPLATFORM} node:20-bullseye-slim AS superset-node
FROM --platform=${BUILDPLATFORM} node:18-bullseye-slim AS superset-node
ARG NPM_BUILD_CMD="build"
# Include translations in the final build. The default supports en only to
# reduce complexity and weight for those only using en
ARG BUILD_TRANSLATIONS="false"
# Used by docker-compose to skip the frontend build,
# in dev we mount the repo and build the frontend inside docker
ARG DEV_MODE="false"
# Include headless browsers? Allows for alerts, reports & thumbnails, but bloats the images
ARG INCLUDE_CHROMIUM="true"
ARG INCLUDE_FIREFOX="false"
# Somehow we need python3 + build-essential on this side of the house to install node-gyp
RUN apt-get update -qq \
&& apt-get install \
-yqq --no-install-recommends \
build-essential \
python3 \
zstd
python3
ENV BUILD_CMD=${NPM_BUILD_CMD} \
PUPPETEER_SKIP_CHROMIUM_DOWNLOAD=true
@@ -54,34 +41,19 @@ RUN --mount=type=bind,target=/frontend-mem-nag.sh,src=./docker/frontend-mem-nag.
/frontend-mem-nag.sh
WORKDIR /app/superset-frontend
# Creating empty folders to avoid errors when running COPY later on
RUN mkdir -p /app/superset/static/assets
RUN --mount=type=bind,target=./package.json,src=./superset-frontend/package.json \
--mount=type=bind,target=./package-lock.json,src=./superset-frontend/package-lock.json \
if [ "$DEV_MODE" = "false" ]; then \
npm ci; \
else \
echo "Skipping 'npm ci' in dev mode"; \
fi
npm ci
# Runs the webpack build process
COPY superset-frontend /app/superset-frontend
RUN npm run ${BUILD_CMD}
# This copies the .po files needed for translation
RUN mkdir -p /app/superset/translations
COPY superset/translations /app/superset/translations
RUN if [ "$DEV_MODE" = "false" ]; then \
BUILD_TRANSLATIONS=$BUILD_TRANSLATIONS npm run ${BUILD_CMD}; \
else \
echo "Skipping 'npm run ${BUILD_CMD}' in dev mode"; \
fi
# Compiles .json files from the .po files, then deletes the .po files
RUN if [ "$BUILD_TRANSLATIONS" = "true" ]; then \
npm run build-translation; \
else \
echo "Skipping translations as requested by build flag"; \
fi
RUN npm run build-translation
RUN rm /app/superset/translations/*/LC_MESSAGES/*.po
RUN rm /app/superset/translations/messages.pot
@@ -90,10 +62,6 @@ RUN rm /app/superset/translations/messages.pot
######################################################################
FROM python:${PY_VER} AS lean
# Include translations in the final build. The default supports en only to
# reduce complexity and weight for those only using en
ARG BUILD_TRANSLATIONS="false"
WORKDIR /app
ENV LANG=C.UTF-8 \
LC_ALL=C.UTF-8 \
@@ -107,6 +75,7 @@ RUN mkdir -p ${PYTHONPATH} superset/static requirements superset-frontend apache
&& useradd --user-group -d ${SUPERSET_HOME} -m --no-log-init --shell /bin/bash superset \
&& apt-get update -qq && apt-get install -yqq --no-install-recommends \
curl \
default-libmysqlclient-dev \
libsasl2-dev \
libsasl2-modules-gssapi-mit \
libpq-dev \
@@ -123,8 +92,8 @@ COPY --chown=superset:superset requirements/base.txt requirements/
RUN --mount=type=cache,target=/root/.cache/pip \
apt-get update -qq && apt-get install -yqq --no-install-recommends \
build-essential \
&& pip install --no-cache-dir --upgrade setuptools pip \
&& pip install --no-cache-dir -r requirements/base.txt \
&& pip install --upgrade setuptools pip \
&& pip install -r requirements/base.txt \
&& apt-get autoremove -yqq --purge build-essential \
&& rm -rf /var/lib/apt/lists/*
@@ -134,21 +103,17 @@ COPY --chown=superset:superset --from=superset-node /app/superset/static/assets
## Lastly, let's install superset itself
COPY --chown=superset:superset superset superset
RUN --mount=type=cache,target=/root/.cache/pip \
pip install --no-cache-dir -e .
pip install -e .
# Copy the .json translations from the frontend layer
COPY --chown=superset:superset --from=superset-node /app/superset/translations superset/translations
# Compile translations for the backend - this generates .mo files, then deletes the .po files
COPY ./scripts/translations/generate_mo_files.sh ./scripts/translations/
RUN if [ "$BUILD_TRANSLATIONS" = "true" ]; then \
./scripts/translations/generate_mo_files.sh \
&& chown -R superset:superset superset/translations \
&& rm superset/translations/messages.pot \
&& rm superset/translations/*/LC_MESSAGES/*.po; \
else \
echo "Skipping translations as requested by build flag"; \
fi
RUN ./scripts/translations/generate_mo_files.sh \
&& chown -R superset:superset superset/translations \
&& rm superset/translations/messages.pot \
&& rm superset/translations/*/LC_MESSAGES/*.po
COPY --chmod=755 ./docker/run-server.sh /usr/bin/
USER superset
@@ -178,38 +143,28 @@ RUN apt-get update -qq \
&& rm -rf /var/lib/apt/lists/*
RUN --mount=type=cache,target=/root/.cache/pip \
pip install --no-cache-dir playwright
pip install playwright
RUN playwright install-deps
RUN if [ "$INCLUDE_CHROMIUM" = "true" ]; then \
playwright install chromium; \
else \
echo "Skipping translations in dev mode"; \
fi
RUN playwright install chromium
# Install GeckoDriver WebDriver
ARG GECKODRIVER_VERSION=v0.34.0 \
FIREFOX_VERSION=125.0.3
RUN if [ "$INCLUDE_FIREFOX" = "true" ]; then \
apt-get update -qq \
&& apt-get install -yqq --no-install-recommends wget bzip2 \
&& wget -q https://github.com/mozilla/geckodriver/releases/download/${GECKODRIVER_VERSION}/geckodriver-${GECKODRIVER_VERSION}-linux64.tar.gz -O - | tar xfz - -C /usr/local/bin \
&& wget -q https://download-installer.cdn.mozilla.net/pub/firefox/releases/${FIREFOX_VERSION}/linux-x86_64/en-US/firefox-${FIREFOX_VERSION}.tar.bz2 -O - | tar xfj - -C /opt \
&& ln -s /opt/firefox/firefox /usr/local/bin/firefox \
&& apt-get autoremove -yqq --purge wget bzip2 && rm -rf /var/[log,tmp]/* /tmp/* /var/lib/apt/lists/*; \
fi
# Installing mysql client os-level dependencies in dev image only because GPL
RUN apt-get install -yqq --no-install-recommends \
default-libmysqlclient-dev \
&& rm -rf /var/lib/apt/lists/*
RUN apt-get update -qq \
&& apt-get install -yqq --no-install-recommends wget bzip2 \
&& wget -q https://github.com/mozilla/geckodriver/releases/download/${GECKODRIVER_VERSION}/geckodriver-${GECKODRIVER_VERSION}-linux64.tar.gz -O - | tar xfz - -C /usr/local/bin \
# Install Firefox
&& wget -q https://download-installer.cdn.mozilla.net/pub/firefox/releases/${FIREFOX_VERSION}/linux-x86_64/en-US/firefox-${FIREFOX_VERSION}.tar.bz2 -O - | tar xfj - -C /opt \
&& ln -s /opt/firefox/firefox /usr/local/bin/firefox \
&& apt-get autoremove -yqq --purge wget bzip2 && rm -rf /var/[log,tmp]/* /tmp/* /var/lib/apt/lists/*
# Cache everything for dev purposes...
COPY --chown=superset:superset requirements/development.txt requirements/
RUN --mount=type=cache,target=/root/.cache/pip \
apt-get update -qq && apt-get install -yqq --no-install-recommends \
build-essential \
&& pip install --no-cache-dir -r requirements/development.txt \
&& pip install -r requirements/development.txt \
&& apt-get autoremove -yqq --purge build-essential \
&& rm -rf /var/lib/apt/lists/*

View File

@@ -133,7 +133,6 @@ Here are some of the major database solutions that are supported:
<img src="https://superset.apache.org/img/databases/databend.png" alt="databend" border="0" width="200" />
<img src="https://superset.apache.org/img/databases/starrocks.png" alt="starrocks" border="0" width="200" />
<img src="https://superset.apache.org/img/databases/doris.png" alt="doris" border="0" width="200" />
<img src="https://superset.apache.org/img/databases/oceanbase.svg" alt="oceanbase" border="0" width="220" />
</p>
**A more comprehensive list of supported databases** along with the configuration instructions can be found [here](https://superset.apache.org/docs/configuration/databases).
@@ -169,10 +168,10 @@ how to set up a development environment.
- [Superset SIPs](https://github.com/orgs/apache/projects/170) - The status of Superset's SIPs (Superset Improvement Proposals) for both consensus and implementation status.
Understanding the Superset Points of View
- [The Case for Dataset-Centric Visualization](https://preset.io/blog/dataset-centric-visualization/)
- [Understanding the Superset Semantic Layer](https://preset.io/blog/understanding-superset-semantic-layer/)
- Getting Started with Superset
- [Superset in 2 Minutes using Docker Compose](https://superset.apache.org/docs/installation/docker-compose#installing-superset-locally-using-docker-compose)
- [Installing Database Drivers](https://superset.apache.org/docs/configuration/databases#installing-database-drivers)
@@ -190,8 +189,8 @@ Understanding the Superset Points of View
- [Mixed Time Series Charts](https://preset.io/events/mixed-time-series-visualization-in-superset-workshop/)
- [How the Bing Team Customized Superset for the Internal Self-Serve Data & Analytics Platform](https://preset.io/events/how-the-bing-team-heavily-customized-superset-for-their-internal-data/)
- [Live Demo: Visualizing MongoDB and Pinot Data using Trino](https://preset.io/events/2021-04-13-visualizing-mongodb-and-pinot-data-using-trino/)
- [Introduction to the Superset API](https://preset.io/events/introduction-to-the-superset-api/)
- [Building a Database Connector for Superset](https://preset.io/events/2021-02-16-building-a-database-connector-for-superset/)
- [Introduction to the Superset API](https://preset.io/events/introduction-to-the-superset-api/)
- [Building a Database Connector for Superset](https://preset.io/events/2021-02-16-building-a-database-connector-for-superset/)
- Visualizations
- [Creating Viz Plugins](https://superset.apache.org/docs/contributing/creating-viz-plugins/)
@@ -201,7 +200,6 @@ Understanding the Superset Points of View
- [Superset API](https://superset.apache.org/docs/rest-api)
## Repo Activity
<a href="https://next.ossinsight.io/widgets/official/compose-last-28-days-stats?repo_id=39464018" target="_blank" align="center">
<picture>
<source media="(prefers-color-scheme: dark)" srcset="https://next.ossinsight.io/widgets/official/compose-last-28-days-stats/thumbnail.png?repo_id=39464018&image_size=auto&color_scheme=dark" width="655" height="auto" />

View File

@@ -34,11 +34,8 @@ RUN apt-get install -y build-essential libssl-dev \
# Install nodejs for custom build
# https://nodejs.org/en/download/package-manager/
RUN set -eux; \
curl -sL https://deb.nodesource.com/setup_18.x | bash -; \
apt-get install -y nodejs; \
node --version;
RUN if ! which npm; then apt-get install -y npm; fi
RUN curl -sL https://deb.nodesource.com/setup_16.x | bash - \
&& apt-get install -y nodejs
RUN mkdir -p /home/superset
RUN chown superset /home/superset

View File

@@ -17,9 +17,7 @@
FROM python:3.10-slim-bookworm
RUN apt-get update -y
RUN apt-get install -y \
git \
jq
RUN apt-get install -y jq
COPY make_tarball_entrypoint.sh /entrypoint.sh
ENTRYPOINT ["/entrypoint.sh"]

View File

@@ -115,7 +115,7 @@ source set_release_env.sh 1.5.1rc1 myid@apache.org
The script will output the exported variables. Here's example for 1.5.1rc1:
```env
```
-------------------------------
Set Release env variables
SUPERSET_VERSION=1.5.1
@@ -264,13 +264,13 @@ python changelog.py --previous_version 1.5.0 --current_version ${SUPERSET_GITHUB
Finally, bump the version number on `superset-frontend/package.json` (replace with whichever version is being released excluding the RC version):
```json
```
"version": "0.38.0"
```
Commit the change with the version number, then git tag the version with the release candidate and push to the branch:
```bash
```
# add changed files and commit
git add ...
git commit ...
@@ -366,7 +366,7 @@ The script will interactively ask for extra information needed to fill out the e
voting description, it will generate a passing, non passing or non conclusive email.
Here's an example:
```text
```
A List of people with +1 binding vote (ex: Max,Grace,Krist): Daniel,Alan,Max,Grace
A List of people with +1 non binding vote (ex: Ville): Ville
A List of people with -1 vote (ex: John):
@@ -505,7 +505,7 @@ We also need to update the Environment section of [ISSUE_TEMPLATE/bug-report.yml
Docker release with proper tags should happen automatically as version
tags get pushed to the `apache/superset` GitHub repository through this
[GitHub action](https://github.com/apache/superset/blob/master/.github/workflows/docker.yml)
[GitHub action](https://github.com/apache/superset/blob/master/.github/workflows/docker-release.yml)
Note that this GH action implements a `workflow_dispatch` trigger,
meaning that it can be triggered manually from the GitHub UI. If anything
@@ -516,22 +516,16 @@ reference), and whether to force the `latest` Docker tag on the
generated images.
### Npm Release
You might want to publish the latest @superset-ui release to npm
```bash
cd superset/superset-frontend
```
An automated GitHub action will run and generate a new tag, which will contain a version number provided as a parameter.
```bash
export GH_TOKEN={GITHUB_TOKEN}
npx lerna version {VERSION} --conventional-commits --create-release github --no-private --yes --message {COMMIT_MESSAGE}
```
This action will publish the specified version to npm registry.
```bash
npx lerna publish from-package --yes
```

View File

@@ -102,7 +102,7 @@ Some of the new features in this release are disabled by default. Each has a fea
This release includes **hundreds** of bugfixes and stability enhancements. Future major releases will have a continued emphasis on providing a stable and bug-free experience for the user.
# PR Highlights
Below is a highlight of the PRs included in this update. The full list is much longer, and can be found [here](https://github.com/apache/superset/blob/master/CHANGELOG.md).
Below is a highlight of the PRs included in this update. The full list is much longer, and can be found [here](apache/incubator-superset/CHANGELOG.md).
## User Experience
- Revert "refactor: Remove usages of reactable from TimeTable (#11046)" (#[11150](https://github.com/apache/incubator-superset/pull/11150))
@@ -222,4 +222,4 @@ Below is a highlight of the PRs included in this update. The full list is much l
## Complete Changelog
Backwards incompatible changes and can be found [here](../../UPDATING.md).
To see the complete changelog, see [apache/superset/CHANGELOG.md](https://github.com/apache/superset/blob/master/CHANGELOG.md)
To see the complete changelog, see [apache/incubator-superset/CHANGELOG.md](https://github.com/apache/superset/blob/master/CHANGELOG.md)

View File

@@ -137,6 +137,6 @@ when available.
**Changelog**
To see the complete changelog in this release, head to
[CHANGELOG.MD](https://github.com/apache/superset/blob/master/CHANGELOG/1.5.0.md).
[CHANGELOG.MD](https://github.com/apache/superset/blob/1.5/CHANGELOG/1.5.0.md).
As mentioned earlier, this release has a MASSIVE amount of bug fixes. The full
changelog lists all of them!

View File

@@ -1,140 +0,0 @@
<!--
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
-->
# Release Notes for Superset 4.1.0
Superset 4.1.0 brings a range of new features and quality of life improvements. This release is a minor version, meaning it doesn't include any breaking changes. However, users of basic Superset docker images like `4.1.0` should see the note at the bottom of this file about [changes to those builds](/RELEASING/release-notes-4-1/README.md#change-to-docker-image-builds).
## Highlights
Here are some of the highlights of this release.
### Big Number With Time Period Updates
We released a [Big Number with Time Period Comparison](https://github.com/apache/superset/pull/26908) chart as part of Superset 4.0. With the latest update, there are now [color options](https://github.com/apache/superset/pull/27524) for comparisons. The chart now also uses [standardize controls](https://github.com/apache/superset/pull/27193) such that when switching charts will maintain the selected metrics. To enable the new chart, you'll need to enable the `CHART_PLUGINS_EXPERIMENTAL` feature flag.
<div>
<image src="media/big_number_chart.png" alt="Image" width="100%">
</div>
### Table with Time Comparison
Added functionality to do [table time comparisons](https://github.com/apache/superset/pull/28057) behind the `CHART_PLUGINS_EXPERIMENTAL` feature flag. This will help improve and facilitate efficient data analysis.
<div>
<image src="media/table_with_time.png" alt="Image" width="100%">
</div>
### New ECharts Versions
The new ECharts [Heatmap](https://github.com/apache/superset/pull/25353) has been added. Compared to the legacy Heatmap, it has more accurate percentage calculations, server side sorting to respect row limits, and a more interactive legend control that allows selecting a subset of values.
<div>
<image src="media/heatmap.png" alt="Image" width="100%">
</div>
We also added a new ECharts [Histogram](https://github.com/apache/superset/pull/28652) chart. The new chart will help visualize patterns, clusters, and outliers in the data and provides insights into its shape, central tendency, and spread.
<div>
<image src="media/histogram.png" alt="Image" width="100%">
</div>
A new Echarts [Sankey](https://github.com/apache/superset/pull/29329) chart now exists. The chart visually tracks the movement and transformation of values across system stages.
<div>
<image src="media/sankey.png" alt="Image" width="100%">
</div>
You can use the CLI command to migrate Area, Bubble, Line, Sankey, [Heatmap](https://github.com/apache/superset/pull/27771), and [Histogram](https://github.com/apache/superset/pull/28780) chart types but we'll add more as the ECharts migrations continue. Note that migrations for deprecated charts may be forced in upcoming major versions when the code is removed. Running migrations earlier will allow you to de-risk future upgrades while improving user experience.
```bash
Usage: superset viz-migrations [OPTIONS] COMMAND [ARGS]...
Migrates a viz from one type to another.
Commands:
downgrade Downgrades a viz to the previous version.
upgrade Upgrade a viz to the latest version.
```
Note: When migrating dashboards from one Superset instance to another (using import/export features or the Superset CLI), or restoring a backup of prior charts and dashboards, Superset will apply the existing migrations that are used during version upgrades. This will ensure that your charts and dashboards are using the latest and greatest charts that Superset officially supports. For any migration issues, feel free to [open a new issue](https://github.com/apache/superset/issues/new?assignees=&labels=bug&projects=&template=bug-report.yml) in the repo.
### Improved Upload Forms
We've made design changes to the [CSV](https://github.com/apache/superset/pull/27840), [Excel](https://github.com/apache/superset/pull/28105), and [Columnar](https://github.com/apache/superset/pull/28192
) upload modals to improve user experience and to be more performant. The new designs has the following goals:
- Improved error handling.
- Better backend parameter validation.
- More aligned with our other modal dialogs
#### CSV
<div>
<img src="media/csv_modal_1.png" alt="Image" width="25%">
<img src="media/csv_modal_2.png" alt="Image" width="25%">
<img src="media/csv_modal_3.png" alt="Image" width="25%">
<img src="media/csv_modal_4.png" alt="Image" width="25%">
</div>
#### Excel
<div>
<img src="media/excel_modal_1.png" alt="Image" width="25%">
<img src="media/excel_modal_2.png" alt="Image" width="25%">
<img src="media/excel_modal_3.png" alt="Image" width="25%">
<img src="media/excel_modal_4.png" alt="Image" width="25%">
</div>
#### Columnar
<div>
<img src="media/columnar_modal_1.png" alt="Image" width="33%">
<img src="media/columnar_modal_2.png" alt="Image" width="33%">
<img src="media/columnar_modal_3.png" alt="Image" width="33%">
</div>
### OAuth2 For Databases
You now have the ability to enable [OAuth2](https://github.com/apache/superset/pull/27631) for databases like BigQuery, Snowflake, Dremio, Databricks, Google Sheets, etc. When enabled, it will allow users to connect to Oauth2 enabled databases with their own credentials.
### Catalog Support For Databases
Added support for the [catalog heirachy](https://github.com/apache/superset/pull/28317) for databases that support it, such as [BigQuery (projects), Databricks, Presto, Snowflake, and Trino](https://github.com/apache/superset/pull/28416). Once enabled, users will see catalogs when selecting tables in [SQL Lab, datasets](https://github.com/apache/superset/pull/28376), and when setting up Data Access Roles
### Slack Upload Files V2 API Updates
As part of [[SIP-138] Proposal for Slack file upload V2 integration for Alerts and Reports](https://github.com/apache/superset/issues/29263) we now have support for Slack file upload files v2 API call. This feature is behind the feature flag `ALERT_REPORT_SLACK_V2` and also changes the Slack channel to a selector. You may also need to add the following scopes (`channels:read`, `group:read`) to your Slack bot to work.
<div>
<image src="media/slack_modal.png" alt="Image" width="100%">
</div>
### Total and Percentages In Tooltips For ECharts
Users can now see both the [total and percentage in tooltips](https://github.com/apache/superset/pull/27950) for ECharts.
<div>
<image src="media/tooltips.png" alt="Image" width="100%">
</div>
### Additional Metadata Bar To Dashboards
There is now a [metadata bar](https://github.com/apache/superset/pull/27857) added to the header of dashboards. This will now show viewers of the dashboard both the owners and last modified time of the dashboard.
## Change to Docker image builds
Starting in 4.1.0, the release's docker image does not ship with drivers needed to operate Superset. Users may need to install a driver for their metadata database (MySQL or Postgres) as well as the driver for their data warehouse. This is a result of changes to the `lean` docker image that official releases come from; see [Docker Build Presets](/docs/installation/docker-builds/#build-presets) for more details.

Binary file not shown.

Before

Width:  |  Height:  |  Size: 110 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 59 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 44 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 49 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 66 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 73 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 55 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 49 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 68 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 59 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 53 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 49 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 132 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 510 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 617 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 64 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 168 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 363 KiB

View File

@@ -45,7 +45,7 @@ These features are **finished** but currently being tested. They are usable, but
- CACHE_IMPERSONATION
- CONFIRM_DASHBOARD_DIFF
- DRILL_TO_DETAIL
- DYNAMIC_PLUGINS
- DYNAMIC_PLUGINS: [(docs)](https://superset.apache.org/docs/configuration/running-on-kubernetes)
- ENABLE_SUPERSET_META_DB: [(docs)](https://superset.apache.org/docs/configuration/databases/#querying-across-databases)
- ESTIMATE_QUERY_COST
- GLOBAL_ASYNC_QUERIES [(docs)](https://github.com/apache/superset/blob/master/CONTRIBUTING.md#async-chart-queries)
@@ -68,16 +68,9 @@ These features flags are **safe for production**. They have been tested and will
- DISABLE_LEGACY_DATASOURCE_EDITOR
### Flags retained for runtime configuration
Currently some of our feature flags act as dynamic configurations that can changed
on the fly. This acts in contradiction with the typical ephemeral feature flag use case,
where the flag is used to mature a feature, and eventually deprecated once the feature is
solid. Eventually we'll likely refactor these under a more formal "dynamic configurations" managed
independently. This new framework will also allow for non-boolean configurations.
- ALERTS_ATTACH_REPORTS
- ALLOW_ADHOC_SUBQUERY
- DASHBOARD_RBAC [(docs)](https://superset.apache.org/docs/using-superset/creating-your-first-dashboard#manage-access-to-dashboards)
- DASHBOARD_RBAC [(docs)](https://superset.apache.org/docs/using-superset/first-dashboard#manage-access-to-dashboards)
- DATAPANEL_CLOSED_BY_DEFAULT
- DRUID_JOINS
- EMBEDDABLE_CHARTS
@@ -86,7 +79,6 @@ independently. This new framework will also allow for non-boolean configurations
- ESCAPE_MARKDOWN_HTML
- LISTVIEWS_DEFAULT_CARD_VIEW
- SCHEDULED_QUERIES [(docs)](https://superset.apache.org/docs/configuration/alerts-reports)
- SLACK_ENABLE_AVATARS (see `superset/config.py` for more information)
- SQLLAB_BACKEND_PERSISTENCE
- SQL_VALIDATORS_BY_ENGINE [(docs)](https://superset.apache.org/docs/configuration/sql-templating)
- THUMBNAILS [(docs)](https://superset.apache.org/docs/configuration/cache)

View File

@@ -28,26 +28,25 @@ Join our growing community!
### Sharing Economy
- [Airbnb](https://github.com/airbnb)
- [Faasos](https://faasos.com/) [@shashanksingh]
- [Faasos](http://faasos.com/) [@shashanksingh]
- [Hostnfly](https://www.hostnfly.com/) [@alexisrosuel]
- [Lime](https://www.li.me/) [@cxmcc]
- [Lime](https://www.limebike.com/) [@cxmcc]
- [Lyft](https://www.lyft.com/)
- [Ontruck](https://www.ontruck.com/)
### Financial Services
- [Aktia Bank plc](https://www.aktia.com)
- [Aktia Bank plc](https://www.aktia.com) [@villebro]
- [American Express](https://www.americanexpress.com) [@TheLastSultan]
- [bumper](https://www.bumper.co/) [@vasu-ram, @JamiePercival]
- [Cape Crypto](https://capecrypto.com)
- [Capital Service S.A.](https://capitalservice.pl) [@pkonarzewski]
- [Clark.de](https://clark.de/)
- [Capital Service S.A.](http://capitalservice.pl) [@pkonarzewski]
- [Clark.de](http://clark.de/)
- [KarrotPay](https://www.daangnpay.com/)
- [Taveo](https://www.taveo.com) [@codek]
- [Unit](https://www.unit.co/about-us) [@amitmiran137]
- [Wise](https://wise.com) [@koszti]
- [Xendit](https://xendit.co/) [@LieAlbertTriAdrian]
- [Xendit](http://xendit.co/) [@LieAlbertTriAdrian]
### Gaming
- [Digit Game Studios](https://www.digitgaming.com/)
- [Popoko VM Games Studio](https://popoko.live)
### E-Commerce
@@ -55,19 +54,19 @@ Join our growing community!
- [Bazaar Technologies](https://www.bazaartech.com) [@umair-abro]
- [Dragonpass](https://www.dragonpass.com.cn/) [@zhxjdwh]
- [Dropit Shopping](https://www.dropit.shop/) [@dropit-dev]
- [Fanatics](https://www.fanatics.com/) [@coderfender]
- [Fordeal](https://www.fordeal.com) [@Renkai]
- [Fanatics](https://www.fanatics.com) [@coderfender]
- [Fordeal](http://www.fordeal.com) [@Renkai]
- [GFG - Global Fashion Group](https://global-fashion-group.com) [@ksaagariconic]
- [HuiShouBao](https://www.huishoubao.com/) [@Yukinoshita-Yukino]
- [HuiShouBao](http://www.huishoubao.com/) [@Yukinoshita-Yukino]
- [Now](https://www.now.vn/) [@davidkohcw]
- [Qunar](https://www.qunar.com/) [@flametest]
- [Rakuten Viki](https://www.viki.com)
- [Shopee](https://shopee.sg) [@xiaohanyu]
- [Shopkick](https://www.shopkick.com) [@LAlbertalli]
- [Tails.com](https://tails.com/gb/) [@alanmcruickshank]
- [THE ICONIC](https://theiconic.com.au/) [@ksaagariconic]
- [Tails.com](https://tails.com) [@alanmcruickshank]
- [THE ICONIC](http://theiconic.com.au/) [@ksaagariconic]
- [Utair](https://www.utair.ru) [@utair-digital]
- [VkusVill](https://vkusvill.ru/) [@ETselikov]
- [VkusVill](https://www.vkusvill.ru) [@ETselikov]
- [Zalando](https://www.zalando.com) [@dmigo]
- [Zalora](https://www.zalora.com) [@ksaagariconic]
@@ -80,44 +79,47 @@ Join our growing community!
- [Caizin](https://caizin.com/) [@tejaskatariya]
- [Careem](https://www.careem.com/) [@SamraHanifCareem]
- [Cloudsmith](https://cloudsmith.io) [@alancarson]
- [CnOvit](https://www.cnovit.com/) [@xieshaohu]
- [CnOvit](http://www.cnovit.com/) [@xieshaohu]
- [Cyberhaven](https://www.cyberhaven.com/) [@toliver-ch]
- [Deepomatic](https://deepomatic.com/) [@Zanoellia]
- [Dial Once](https://www.dial-once.com/)
- [Dremio](https://dremio.com) [@narendrans]
- [Elestio](https://elest.io/) [@kaiwalyakoparkar]
- [ELMO Cloud HR & Payroll](https://elmosoftware.com.au/)
- [Endress+Hauser](https://www.endress.com/) [@rumbin]
- [FBK - ICT center](https://ict.fbk.eu)
- [Endress+Hauser](http://www.endress.com/) [@rumbin]
- [FBK - ICT center](http://ict.fbk.eu)
- [Gavagai](https://gavagai.io) [@gavagai-corp]
- [GfK Data Lab](https://www.gfk.com/home) [@mherr]
- [GfK Data Lab](http://datalab.gfk.com) [@mherr]
- [GrowthSimple](https://growthsimple.ai/)
- [Hydrolix](https://www.hydrolix.io/)
- [Intercom](https://www.intercom.com/) [@kate-gallo]
- [jampp](https://jampp.com/)
- [Konfío](https://konfio.mx) [@uis-rodriguez]
- [Konfío](http://konfio.mx) [@uis-rodriguez]
- [Mainstrat](https://mainstrat.com/)
- [mishmash io](https://mishmash.io/)[@mishmash-io]
- [Myra Labs](https://www.myralabs.com/) [@viksit]
- [Nielsen](https://www.nielsen.com/) [@amitNielsen]
- [Myra Labs](http://www.myralabs.com/) [@viksit]
- [Nielsen](http://www.nielsen.com/) [@amitNielsen]
- [Ona](https://ona.io) [@pld]
- [Orange](https://www.orange.com) [@icsu]
- [Oslandia](https://oslandia.com)
- [Peak AI](https://www.peak.ai/) [@azhar22k]
- [PeopleDoc](https://www.people-doc.com) [@rodo]
- [Preset, Inc.](https://preset.io)
- [Pronto Tools](http://www.prontotools.io) [@zkan]
- [PubNub](https://pubnub.com) [@jzucker2]
- [ReadyTech](https://www.readytech.io)
- [Reward Gateway](https://www.rewardgateway.com)
- [ScopeAI](https://www.getscopeai.com) [@iloveluce]
- [Showmax](https://showmax.com) [@bobek]
- [Showmax](https://tech.showmax.com) [@bobek]
- [source{d}](https://www.sourced.tech) [@marnovo]
- [Steamroot](https://streamroot.io/)
- [TechAudit](https://www.techaudit.info) [@ETselikov]
- [Tenable](https://www.tenable.com) [@dflionis]
- [Tentacle](https://www.linkedin.com/company/tentacle-cmi/) [@jdclarke5]
- [Tentacle](https://public.tentaclecmi.com) [@jdclarke5]
- [timbr.ai](https://timbr.ai/) [@semantiDan]
- [Tobii](https://www.tobii.com/) [@dwa]
- [Tobii](http://www.tobii.com/) [@dwa]
- [Tooploox](https://www.tooploox.com/) [@jakubczaplicki]
- [Unvired](https://unvired.com)[@srinisubramanian]
- [Whale](https://whale.im)
- [Whale](http://whale.im)
- [Windsor.ai](https://www.windsor.ai/) [@octaviancorlade]
- [Zeta](https://www.zeta.tech/) [@shaikidris]
@@ -131,7 +133,7 @@ Join our growing community!
- [Prensa Iberica](https://www.prensaiberica.es/) [@zamar-roura]
- [TME QQMUSIC/WESING](https://www.tencentmusic.com/)[@shenyuanli,@marklaw]
- [Xite](https://xite.com/) [@shashankkoppar]
- [Zaihang](https://www.zaih.com/)
- [Zaihang](http://www.zaih.com/)
### Education
- [Aveti Learning](https://avetilearning.com/) [@TheShubhendra]
@@ -152,12 +154,14 @@ Join our growing community!
### Healthcare
- [Amino](https://amino.com) [@shkr]
- [Beans](https://www.beans.fi) [@kakoni]
- [Bluesquare](https://www.bluesquarehub.com/) [@madewulf]
- [Care](https://www.getcare.io/)[@alandao2021]
- [Living Goods](https://www.livinggoods.org) [@chelule]
- [Maieutical Labs](https://maieuticallabs.it) [@xrmx]
- [QPID Health](http://www.qpidhealth.com/)
- [REDCap Cloud](https://www.redcapcloud.com/)
- [TrustMedis](https://trustmedis.com/) [@famasya]
- [TrustMedis](https://trustmedis.com) [@famasya]
- [WeSure](https://www.wesure.cn/)
### HR / Staffing

View File

@@ -32,9 +32,9 @@ assists people when migrating to a new version.
`requirements/` folder. If you use these files for your builds you may want to double
check that your builds are not affected. `base.txt` should be the same as before, though
`development.txt` becomes a bigger set, incorporating the now defunct local,testing,integration, and docker
- [27434](https://github.com/apache/superset/pull/27434/files): DO NOT USE our docker compose.\*
- [27434](https://github.com/apache/superset/pull/27434/files): DO NOT USE our docker-compose.\*
files for production use cases! While we never really supported
or should have tried to support docker compose for production use cases, we now actively
or should have tried to support docker-compose for production use cases, we now actively
have taken a stance against supporting it. See the PR for details.
- [24112](https://github.com/apache/superset/pull/24112): Python 3.10 is now the recommended python version to use, 3.9 still
supported but getting deprecated in the nearish future. CI/CD runs on py310 so you probably want to align. If you
@@ -57,11 +57,6 @@ assists people when migrating to a new version.
translations inside the python package. This includes the .mo files needed by pybabel on the
backend, as well as the .json files used by the frontend. If you were doing anything before
as part of your bundling to expose translation packages, it's probably not needed anymore.
- [29264](https://github.com/apache/superset/pull/29264) Slack has updated its file upload api, and we are now supporting this new api in Superset, although the Slack api is not backward compatible. The original Slack integration is deprecated and we will require a new Slack scope `channels:read` to be added to Slack workspaces in order to use this new api. In an upcoming release, we will make this new Slack scope mandatory and remove the old Slack functionality.
- [29798](https://github.com/apache/superset/pull/29798) Since 3.1.0, the intial schedule for an alert or report was mistakenly offset by the specified timezone's relation to UTC. The initial schedule should now begin at the correct time.
- [30021](https://github.com/apache/superset/pull/30021) The `dev` layer in our Dockerfile no long includes firefox binaries, only Chromium to reduce bloat/docker-build-time
- [30274](https://github.com/apache/superset/pull/30274) Moved SLACK_ENABLE_AVATAR from config.py to the feature flag framework, please adapt your configs
- [30099](https://github.com/apache/superset/pull/30099) Translations are no longer included in the default docker image builds. If your environment requires translations, you'll want to set the docker build arg `BUILD_TRANSACTION=true`.
### Potential Downtime
@@ -119,7 +114,7 @@ assists people when migrating to a new version.
- [24911](https://github.com/apache/superset/pull/24911): Changes the column type from `TEXT` to `MediumText` in table `logs`, potentially requiring a table lock on MySQL dbs or taking some time to complete on large deployments.
- [24939](https://github.com/apache/superset/pull/24939): Augments the foreign key constraints for the `embedded_dashboards` table to include an explicit CASCADE ON DELETE to ensure the relevant records are deleted when a dashboard is deleted. Scheduled downtime may be advised.
- [24938](https://github.com/apache/superset/pull/24938): Augments the foreign key constraints for the `dashboard_slices` table to include an explicit CASCADE ON DELETE to ensure the relevant records are deleted when a dashboard or slice is deleted. Scheduled downtime may be advised.
- [24628](https://github.com/apache/superset/pull/24628): Augments the foreign key constraints for the `dashboard_owner`, `report_schedule_owner`, and `slice_owner` tables to include an explicit CASCADE ON DELETE to ensure the relevant ownership records are deleted when a dataset is deleted. Scheduled downtime may be advised.
- [24628]https://github.com/apache/superset/pull/24628): Augments the foreign key constraints for the `dashboard_owner`, `report_schedule_owner`, and `slice_owner` tables to include an explicit CASCADE ON DELETE to ensure the relevant ownership records are deleted when a dataset is deleted. Scheduled downtime may be advised.
- [24488](https://github.com/apache/superset/pull/24488): Augments the foreign key constraints for the `sql_metrics`, `sqlatable_user`, and `table_columns` tables which reference the `tables` table to include an explicit CASCADE ON DELETE to ensure the relevant records are deleted when a dataset is deleted. Scheduled downtime may be advised.
- [24232](https://github.com/apache/superset/pull/24232): Enables ENABLE_TEMPLATE_REMOVE_FILTERS, DRILL_TO_DETAIL, DASHBOARD_CROSS_FILTERS by default, marks VERSIONED_EXPORT and ENABLE_TEMPLATE_REMOVE_FILTERS as deprecated.
- [23652](https://github.com/apache/superset/pull/23652): Enables GENERIC_CHART_AXES feature flag by default.
@@ -135,7 +130,7 @@ assists people when migrating to a new version.
### Breaking Changes
- [24686](https://github.com/apache/superset/pull/24686): All dataset's custom explore_url are handled as relative URLs on the frontend, behaviour controlled by PREVENT_UNSAFE_DEFAULT_URLS_ON_DATASET.
- [24686]https://github.com/apache/superset/pull/24686): All dataset's custom explore_url are handled as relative URLs on the frontend, behaviour controlled by PREVENT_UNSAFE_DEFAULT_URLS_ON_DATASET.
- [24262](https://github.com/apache/superset/pull/24262): Enabled `TALISMAN_ENABLED` flag by default and provided stricter default Content Security Policy
- [24415](https://github.com/apache/superset/pull/24415): Removed the obsolete Druid NoSQL REGEX operator.
- [24423](https://github.com/apache/superset/pull/24423): Removed deprecated APIs `/superset/slice_json/...`, `/superset/annotation_json/...`
@@ -231,8 +226,7 @@ assists people when migrating to a new version.
- [19273](https://github.com/apache/superset/pull/19273): The `SUPERSET_CELERY_WORKERS` and `SUPERSET_WORKERS` config keys has been removed. Configure Celery directly using `CELERY_CONFIG` on Superset.
- [19231](https://github.com/apache/superset/pull/19231): The `ENABLE_REACT_CRUD_VIEWS` feature flag has been removed (permanently enabled). Any deployments which had set this flag to false will need to verify that the React views support their use case.
- [19230](https://github.com/apache/superset/pull/19230): The `ROW_LEVEL_SECURITY` feature flag has been removed (permanently enabled). Any deployments which had set this flag to false will need to verify that the presence of the Row Level Security feature does not interfere with their use case.
- [19168](https://github.com/apache/superset/pull/19168): Celery upgrade to 5.X resulted in breaking changes to its command line invocation.
html#step-1-adjust-your-command-line-invocation) instructions for adjustments. Also consider migrating you Celery config per [here](https://docs.celeryq.dev/en/stable/userguide/configuration.html#conf-old-settings-map).
- [19168](https://github.com/apache/superset/pull/19168): Celery upgrade to 5.X resulted in breaking changes to its command line invocation. Please follow [these](https://docs.celeryq.dev/en/stable/whatsnew-5.2.html#step-1-adjust-your-command-line-invocation) instructions for adjustments. Also consider migrating you Celery config per [here](https://docs.celeryq.dev/en/stable/userguide/configuration.html#conf-old-settings-map).
- [19142](https://github.com/apache/superset/pull/19142): The `VERSIONED_EXPORT` config key is now `True` by default.
- [19113](https://github.com/apache/superset/pull/19113): The `ENABLE_JAVASCRIPT_CONTROLS` config key has moved from an app config to a feature flag. Any deployments who overrode this setting will now need to override the feature flag from here onward.
- [19107](https://github.com/apache/superset/pull/19107): The `SQLLAB_BACKEND_PERSISTENCE` feature flag is now `True` by default, which enables persisting SQL Lab tabs in the backend instead of the browser's `localStorage`.
@@ -320,7 +314,8 @@ html#step-1-adjust-your-command-line-invocation) instructions for adjustments. A
### Potential Downtime
- [14234](https://github.com/apache/superset/pull/14234): Adds the `limiting_factor` column to the `query` table. Give the migration includes a DDL operation on a heavily trafficked table, potential service downtime may be required.
- [16454](https://github.com/apache/superset/pull/16454): Adds the `extra` column to the `table_columns` table. Users using MySQL will either need to schedule downtime or use the percona toolkit (or similar) to perform the migration.
-[16454](https://github.com/apache/superset/pull/16454): Adds the `extra` column to the `table_columns` table. Users using MySQL will either need to schedule downtime or use the percona toolkit (or similar) to perform the migration.
## 1.2.0
@@ -542,7 +537,7 @@ html#step-1-adjust-your-command-line-invocation) instructions for adjustments. A
- [8117](https://github.com/apache/superset/pull/8117): If you are
using `ENABLE_PROXY_FIX = True`, review the newly-introduced variable,
`PROXY_FIX_CONFIG`, which changes the proxy behavior in accordance with
Werkzeug.
[Werkzeug](https://werkzeug.palletsprojects.com/en/0.15.x/middleware/proxy_fix/)
- [8069](https://github.com/apache/superset/pull/8069): introduces
[MessagePack](https://github.com/msgpack/msgpack-python) and

View File

@@ -16,12 +16,12 @@
#
# -----------------------------------------------------------------------
# We don't support docker compose for production environments.
# We don't support docker-compose for production environments.
# If you choose to use this type of deployment make sure to
# create you own docker environment file (docker/.env) with your own
# unique random secure passwords and SECRET_KEY.
# -----------------------------------------------------------------------
x-superset-image: &superset-image apachesuperset.docker.scarf.sh/apache/superset:${TAG:-latest-dev}
x-superset-image: &superset-image apachesuperset.docker.scarf.sh/apache/superset:${TAG:-latest}
x-superset-depends-on: &superset-depends-on
- db
- redis
@@ -30,6 +30,7 @@ x-superset-volumes:
- ./docker:/app/docker
- superset_home:/app/superset_home
version: "3.7"
services:
redis:
image: redis:7

View File

@@ -16,7 +16,7 @@
#
# -----------------------------------------------------------------------
# We don't support docker compose for production environments.
# We don't support docker-compose for production environments.
# If you choose to use this type of deployment make sure to
# create you own docker environment file (docker/.env) with your own
# unique random secure passwords and SECRET_KEY.
@@ -35,6 +35,7 @@ x-common-build: &common-build
cache_from:
- apache/superset-cache:3.10-slim-bookworm
version: "4.0"
services:
redis:
image: redis:7

View File

@@ -16,7 +16,7 @@
#
# -----------------------------------------------------------------------
# We don't support docker compose for production environments.
# We don't support docker-compose for production environments.
# If you choose to use this type of deployment make sure to
# create you own docker environment file (docker/.env) with your own
# unique random secure passwords and SECRET_KEY.
@@ -38,8 +38,6 @@ x-common-build: &common-build
target: dev
cache_from:
- apache/superset-cache:3.10-slim-bookworm
args:
DEV_MODE: "true"
services:
nginx:
@@ -122,7 +120,7 @@ services:
- /home/superset-websocket/dist
# Mounting a config file that contains a dummy secret required to boot up.
# do not use this docker compose in production
# do not use this docker-compose in production
- ./docker/superset-websocket/config.json:/home/superset-websocket/config.json
environment:
- PORT=8080
@@ -149,18 +147,10 @@ services:
disable: true
superset-node:
build:
context: .
target: superset-node
args:
# This prevents building the frontend bundle since we'll mount local folder
# and build it on startup while firing docker-frontend.sh in dev mode, where
# it'll mount and watch local files and rebuild as you update them
DEV_MODE: "true"
image: node:18
environment:
# set this to false if you have perf issues running the npm i; npm run dev in-docker
# if you do so, you have to run this manually on the host, which should perform better!
BUILD_SUPERSET_FRONTEND_IN_DOCKER: true
SCARF_ANALYTICS: "${SCARF_ANALYTICS:-}"
container_name: superset_node
command: ["/app/docker/docker-frontend.sh"]

View File

@@ -24,16 +24,12 @@ if [ "$PUPPETEER_SKIP_CHROMIUM_DOWNLOAD" = "false" ]; then
fi
if [ "$BUILD_SUPERSET_FRONTEND_IN_DOCKER" = "true" ]; then
echo "Building Superset frontend in dev mode inside docker container"
cd /app/superset-frontend
echo "Running `npm install`"
npm install
npm install -f --no-optional --global webpack webpack-cli
npm install -f --no-optional
echo "Running frontend"
npm run dev
else
echo "Skipping frontend build steps - YOU NEED TO RUN IT MANUALLY ON THE HOST!"
echo "https://superset.apache.org/docs/contributing/development/#webpack-dev-server"
echo "Skipping frontend build steps - YOU RUN IT MANUALLY ON THE HOST!"
fi

View File

@@ -37,7 +37,7 @@ Init Step ${1}/${STEP_CNT} [${2}] -- ${3}
EOF
}
ADMIN_PASSWORD="${ADMIN_PASSWORD:-admin}"
ADMIN_PASSWORD="admin"
# If Cypress run overwrite the password for admin and export env variables
if [ "$CYPRESS_CONFIG" == "true" ]; then
ADMIN_PASSWORD="general"
@@ -57,7 +57,7 @@ superset fab create-admin \
--firstname Superset \
--lastname Admin \
--email admin@superset.com \
--password "$ADMIN_PASSWORD"
--password $ADMIN_PASSWORD
echo_step "2" "Complete" "Setting up admin user"
# Create default roles and permissions
echo_step "3" "Starting" "Setting up roles and perms"

View File

@@ -74,12 +74,7 @@ DATA_CACHE_CONFIG = CACHE_CONFIG
class CeleryConfig:
broker_url = f"redis://{REDIS_HOST}:{REDIS_PORT}/{REDIS_CELERY_DB}"
imports = (
"superset.sql_lab",
"superset.tasks.scheduler",
"superset.tasks.thumbnails",
"superset.tasks.cache",
)
imports = ("superset.sql_lab",)
result_backend = f"redis://{REDIS_HOST}:{REDIS_PORT}/{REDIS_RESULTS_DB}"
worker_prefetch_multiplier = 1
task_acks_late = False

View File

@@ -18,7 +18,7 @@
#
# This is an example "local" configuration file. In order to set/override config
# options that ONLY apply to your local environment, simply copy/rename this file
# to docker/pythonpath_dev/superset_config_docker.py
# to docker/pythonpath/superset_config_docker.py
# It ends up being imported by docker/superset_config.py which is loaded by
# superset/config.py
#

View File

@@ -26,7 +26,6 @@ gunicorn \
--workers ${SERVER_WORKER_AMOUNT:-1} \
--worker-class ${SERVER_WORKER_CLASS:-gthread} \
--threads ${SERVER_THREADS_AMOUNT:-20} \
--log-level "${GUNICORN_LOGLEVEL:info}" \
--timeout ${GUNICORN_TIMEOUT:-60} \
--keep-alive ${GUNICORN_KEEPALIVE:-2} \
--max-requests ${WORKER_MAX_REQUESTS:-0} \

View File

@@ -1 +1 @@
v20.16.0
v20.12.2

View File

@@ -77,7 +77,6 @@
"Guyana",
"Haiti",
"Honduras",
"Hungary",
"Iceland",
"India",
"Indonesia",

View File

@@ -251,18 +251,15 @@ FROM apache/superset:3.1.0
USER root
RUN apt-get update && \
apt-get install -y wget zip libaio1
RUN export CHROMEDRIVER_VERSION=$(curl --silent https://googlechromelabs.github.io/chrome-for-testing/LATEST_RELEASE_116) && \
wget -O google-chrome-stable_current_amd64.deb -q http://dl.google.com/linux/chrome/deb/pool/main/g/google-chrome-stable/google-chrome-stable_${CHROMEDRIVER_VERSION}-1_amd64.deb && \
wget -q https://dl.google.com/linux/direct/google-chrome-stable_current_amd64.deb && \
apt-get install -y --no-install-recommends ./google-chrome-stable_current_amd64.deb && \
rm -f google-chrome-stable_current_amd64.deb
RUN export CHROMEDRIVER_VERSION=$(curl --silent https://googlechromelabs.github.io/chrome-for-testing/LATEST_RELEASE_116) && \
wget -q https://storage.googleapis.com/chrome-for-testing-public/${CHROMEDRIVER_VERSION}/linux64/chromedriver-linux64.zip && \
unzip -j chromedriver-linux64.zip -d /usr/bin && \
RUN export CHROMEDRIVER_VERSION=$(curl --silent https://chromedriver.storage.googleapis.com/LATEST_RELEASE_102) && \
wget -q https://chromedriver.storage.googleapis.com/${CHROMEDRIVER_VERSION}/chromedriver_linux64.zip && \
unzip chromedriver_linux64.zip -d /usr/bin && \
chmod 755 /usr/bin/chromedriver && \
rm -f chromedriver-linux64.zip
rm -f chromedriver_linux64.zip
RUN pip install --no-cache gevent psycopg2 redis

View File

@@ -14,7 +14,7 @@ SimpleCache (in-memory), or the local filesystem.
are also supported.
Caching can be configured by providing a dictionaries in
`superset_config.py` that comply with [the Flask-Caching config specifications](https://flask-caching.readthedocs.io/en/latest/#configuring-flask-caching).
`superset_config.py` that comply with[the Flask-Caching config specifications](https://flask-caching.readthedocs.io/en/latest/#configuring-flask-caching).
The following cache configurations can be customized in this way:
- Dashboard filter state (required): `FILTER_STATE_CACHE_CONFIG`.

View File

@@ -15,9 +15,9 @@ variables and objects exposed act as a public interface of the bulk of what you
to configure, alter and interface with. In this python module, you'll find all these
parameters, sensible defaults, as well as rich documentation in the form of comments
To configure your application, you need to create your own configuration module, which
To configure your application, you need to create you own configuration module, which
will allow you to override few or many of these parameters. Instead of altering the core module,
you'll want to define your own module (typically a file named `superset_config.py`).
You'll want to define your own module (typically a file named `superset_config.py`.
Add this file to your `PYTHONPATH` or create an environment variable
`SUPERSET_CONFIG_PATH` specifying the full path of the `superset_config.py`.
@@ -28,7 +28,7 @@ For example, if deploying on Superset directly on a Linux-based system where you
export SUPERSET_CONFIG_PATH=/app/superset_config.py
```
If you are using your own custom Dockerfile with the official Superset image as base image,
If you are using your own custom Dockerfile with official Superset image as base image,
then you can add your overrides as shown below:
```bash
@@ -36,8 +36,8 @@ COPY --chown=superset superset_config.py /app/
ENV SUPERSET_CONFIG_PATH /app/superset_config.py
```
Docker compose deployments handle application configuration differently using specific conventions.
Refer to the [docker compose tips & configuration](/docs/installation/docker-compose#docker-compose-tips--configuration)
Docker compose deployments handle application configuration differently using specific conventions..
Refer to the [docker-compose tips & configuration](/docs/installation/docker-compose#docker-compose-tips--configuration)
for details.
The following is an example of just a few of the parameters you can set in your `superset_config.py` file:
@@ -87,7 +87,7 @@ can be altered in your local `superset_config.py`. Administrators will want to r
to understand what can be configured locally as well as the default values in place.
Since `superset_config.py` acts as a Flask configuration module, it can be used to alter the
settings of Flask itself, as well as Flask extensions that Superset bundles like
settings Flask itself, as well as Flask extensions that Superset bundles like
`flask-wtf`, `flask-caching`, `flask-migrate`,
and `flask-appbuilder`. Each one of these extensions offers intricate configurability.
Flask App Builder, the web framework used by Superset, also offers many
@@ -95,7 +95,15 @@ configuration settings. Please consult the
[Flask App Builder Documentation](https://flask-appbuilder.readthedocs.org/en/latest/config.html)
for more information on how to configure it.
At the very least, you'll want to change `SECRET_KEY` and `SQLALCHEMY_DATABASE_URI`. Continue reading for more about each of these.
You'll want to change:
- `SECRET_KEY`: to a long random string
- `SQLALCHEMY_DATABASE_URI`: that by default points to sqlite database located at
~/.superset/superset.db
```
WTF_CSRF_EXEMPT_LIST = []
```
## Specifying a SECRET_KEY
@@ -313,103 +321,7 @@ CUSTOM_SECURITY_MANAGER = CustomSsoSecurityManager
}
]
```
### Keycloak-Specific Configuration using Flask-OIDC
If you are using Keycloak as OpenID Connect 1.0 Provider, the above configuration based on [`Authlib`](https://authlib.org/) might not work. In this case using [`Flask-OIDC`](https://pypi.org/project/flask-oidc/) is a viable option.
Make sure the pip package [`Flask-OIDC`](https://pypi.org/project/flask-oidc/) is installed on the webserver. This was succesfully tested using version 2.2.0. This package requires [`Flask-OpenID`](https://pypi.org/project/Flask-OpenID/) as a dependency.
The following code defines a new security manager. Add it to a new file named `keycloak_security_manager.py`, placed in the same directory as your `superset_config.py` file.
```python
from flask_appbuilder.security.manager import AUTH_OID
from superset.security import SupersetSecurityManager
from flask_oidc import OpenIDConnect
from flask_appbuilder.security.views import AuthOIDView
from flask_login import login_user
from urllib.parse import quote
from flask_appbuilder.views import ModelView, SimpleFormView, expose
from flask import (
redirect,
request
)
import logging
class OIDCSecurityManager(SupersetSecurityManager):
def __init__(self, appbuilder):
super(OIDCSecurityManager, self).__init__(appbuilder)
if self.auth_type == AUTH_OID:
self.oid = OpenIDConnect(self.appbuilder.get_app)
self.authoidview = AuthOIDCView
class AuthOIDCView(AuthOIDView):
@expose('/login/', methods=['GET', 'POST'])
def login(self, flag=True):
sm = self.appbuilder.sm
oidc = sm.oid
@self.appbuilder.sm.oid.require_login
def handle_login():
user = sm.auth_user_oid(oidc.user_getfield('email'))
if user is None:
info = oidc.user_getinfo(['preferred_username', 'given_name', 'family_name', 'email'])
user = sm.add_user(info.get('preferred_username'), info.get('given_name'), info.get('family_name'),
info.get('email'), sm.find_role('Gamma'))
login_user(user, remember=False)
return redirect(self.appbuilder.get_url_for_index)
return handle_login()
@expose('/logout/', methods=['GET', 'POST'])
def logout(self):
oidc = self.appbuilder.sm.oid
oidc.logout()
super(AuthOIDCView, self).logout()
redirect_url = request.url_root.strip('/') + self.appbuilder.get_url_for_login
return redirect(
oidc.client_secrets.get('issuer') + '/protocol/openid-connect/logout?redirect_uri=' + quote(redirect_url))
```
Then add to your `superset_config.py` file:
```python
from keycloak_security_manager import OIDCSecurityManager
from flask_appbuilder.security.manager import AUTH_OID, AUTH_REMOTE_USER, AUTH_DB, AUTH_LDAP, AUTH_OAUTH
import os
AUTH_TYPE = AUTH_OID
SECRET_KEY: 'SomethingNotEntirelySecret'
OIDC_CLIENT_SECRETS = '/path/to/client_secret.json'
OIDC_ID_TOKEN_COOKIE_SECURE = False
OIDC_OPENID_REALM: '<myRealm>'
OIDC_INTROSPECTION_AUTH_METHOD: 'client_secret_post'
CUSTOM_SECURITY_MANAGER = OIDCSecurityManager
# Will allow user self registration, allowing to create Flask users from Authorized User
AUTH_USER_REGISTRATION = True
# The default user self registration role
AUTH_USER_REGISTRATION_ROLE = 'Public'
```
Store your client-specific OpenID information in a file called `client_secret.json`. Create this file in the same directory as `superset_config.py`:
```json
{
"<myOpenIDProvider>": {
"issuer": "https://<myKeycloakDomain>/realms/<myRealm>",
"auth_uri": "https://<myKeycloakDomain>/realms/<myRealm>/protocol/openid-connect/auth",
"client_id": "https://<myKeycloakDomain>",
"client_secret": "<myClientSecret>",
"redirect_uris": [
"https://<SupersetWebserver>/oauth-authorized/<myOpenIDProvider>"
],
"userinfo_uri": "https://<myKeycloakDomain>/realms/<myRealm>/protocol/openid-connect/userinfo",
"token_uri": "https://<myKeycloakDomain>/realms/<myRealm>/protocol/openid-connect/token",
"token_introspection_uri": "https://<myKeycloakDomain>/realms/<myRealm>/protocol/openid-connect/token/introspect"
}
}
```
## LDAP Authentication
FAB supports authenticating user credentials against an LDAP server.

View File

@@ -54,8 +54,7 @@ are compatible with Superset.
| [Azure MS SQL](/docs/configuration/databases#sql-server) | `pip install pymssql` | `mssql+pymssql://UserName@presetSQL:TestPassword@presetSQL.database.windows.net:1433/TestSchema` |
| [ClickHouse](/docs/configuration/databases#clickhouse) | `pip install clickhouse-connect` | `clickhousedb://{username}:{password}@{hostname}:{port}/{database}` |
| [CockroachDB](/docs/configuration/databases#cockroachdb) | `pip install cockroachdb` | `cockroachdb://root@{hostname}:{port}/{database}?sslmode=disable` |
| [Couchbase](/docs/configuration/databases#couchbase) | `pip install couchbase-sqlalchemy` | `couchbase://{username}:{password}@{hostname}:{port}?truststorepath={ssl certificate path}` |
| [Dremio](/docs/configuration/databases#dremio) | `pip install sqlalchemy_dremio` |`dremio+flight://{username}:{password}@{host}:31010, For ODBC dremio+pyodbc://{username}:{password}@{host}:32010` |
| [Dremio](/docs/configuration/databases#dremio) | `pip install sqlalchemy_dremio` | `dremio://user:pwd@host:31010/` |
| [Elasticsearch](/docs/configuration/databases#elasticsearch) | `pip install elasticsearch-dbapi` | `elasticsearch+http://{user}:{password}@{host}:9200/` |
| [Exasol](/docs/configuration/databases#exasol) | `pip install sqlalchemy-exasol` | `exa+pyodbc://{username}:{password}@{hostname}:{port}/my_schema?CONNECTIONLCALL=en_US.UTF-8&driver=EXAODBC` |
| [Google BigQuery](/docs/configuration/databases#google-bigquery) | `pip install sqlalchemy-bigquery` | `bigquery://{project_id}` |
@@ -65,7 +64,6 @@ are compatible with Superset.
| [IBM Db2](/docs/configuration/databases#ibm-db2) | `pip install ibm_db_sa` | `db2+ibm_db://` |
| [IBM Netezza Performance Server](/docs/configuration/databases#ibm-netezza-performance-server) | `pip install nzalchemy` | `netezza+nzpy://<UserName>:<DBPassword>@<Database Host>/<Database Name>` |
| [MySQL](/docs/configuration/databases#mysql) | `pip install mysqlclient` | `mysql://<UserName>:<DBPassword>@<Database Host>/<Database Name>` |
| [OceanBase](/docs/configuration/databases#oceanbase) | `pip install oceanbase_py` | `oceanbase://<UserName>:<DBPassword>@<Database Host>/<Database Name>` |
| [Oracle](/docs/configuration/databases#oracle) | `pip install cx_Oracle` | `oracle://` |
| [PostgreSQL](/docs/configuration/databases#postgres) | `pip install psycopg2` | `postgresql://<UserName>:<DBPassword>@<Database Host>/<Database Name>` |
| [Presto](/docs/configuration/databases#presto) | `pip install pyhive` | `presto://` |
@@ -374,23 +372,6 @@ cockroachdb://root@{hostname}:{port}/{database}?sslmode=disable
```
#### Couchbase
The Couchbase's Superset connection is designed to support two services: Couchbase Analytics and Couchbase Columnar.
The recommended connector library for couchbase is
[couchbase-sqlalchemy](https://github.com/couchbase/couchbase-sqlalchemy).
```
pip install couchbase-sqlalchemy
```
The expected connection string is formatted as follows:
```
couchbase://{username}:{password}@{hostname}:{port}?truststorepath={certificate path}?ssl={true/false}
```
#### CrateDB
The recommended connector library for CrateDB is
@@ -522,7 +503,7 @@ The recommended connector library for Dremio is
The expected connection string for ODBC (Default port is 31010) is formatted as follows:
```
dremio+pyodbc://{username}:{password}@{host}:{port}/{database_name}/dremio?SSL=1
dremio://{username}:{password}@{host}:{port}/{database_name}/dremio?SSL=1
```
The expected connection string for Arrow Flight (Dremio 4.9.1+. Default port is 32010) is formatted as follows:
@@ -1007,19 +988,6 @@ Here's the recommended connection string:
netezza+nzpy://{username}:{password}@{hostname}:{port}/{database}
```
#### OceanBase
The [sqlalchemy-oceanbase](https://pypi.org/project/oceanbase_py/) library is the recommended
way to connect to OceanBase through SQLAlchemy.
The connection string for OceanBase looks like this:
```
oceanbase://<User>:<Password>@<Host>:<Port>/<Database>
```
#### Ocient DB
The recommended connector library for Ocient is [sqlalchemy-ocient](https://pypi.org/project/sqlalchemy-ocient).
@@ -1067,11 +1035,6 @@ The expected connection string using username and password is formatted as follo
pinot://<username>:<password>@<pinot-broker-host>:<pinot-broker-port>/query/sql?controller=http://<pinot-controller-host>:<pinot-controller-port>/verify_ssl=true``
```
If you want to use explore view or joins, window functions, etc. then enable [multi-stage query engine](https://docs.pinot.apache.org/reference/multi-stage-engine).
Add below argument while creating database connection in Advanced -> Other -> ENGINE PARAMETERS
```
{"connect_args":{"use_multistage_engine":"true"}}
```
#### Postgres

View File

@@ -1,4 +1,3 @@
---
title: Network and Security Settings
sidebar_position: 7
@@ -25,65 +24,9 @@ The following keys in `superset_config.py` can be specified to configure CORS:
## HTTP headers
Note that Superset bundles [flask-talisman](https://pypi.org/project/talisman/)
Self-described as a small Flask extension that handles setting HTTP headers that can help
Self-descried as a small Flask extension that handles setting HTTP headers that can help
protect against a few common web application security issues.
## HTML Embedding of Dashboards and Charts
There are two ways to embed a dashboard: Using the [SDK](https://www.npmjs.com/package/@superset-ui/embedded-sdk) or embedding a direct link. Note that in the latter case everybody who knows the link is able to access the dashboard.
### Embedding a Public Direct Link to a Dashboard
This works by first changing the content security policy (CSP) of [flask-talisman](https://github.com/GoogleCloudPlatform/flask-talisman) to allow for certain domains to display Superset content. Then a dashboard can be made publicly accessible, i.e. **bypassing authentication**. Once made public, the dashboard's URL can be added to an iframe in another website's HTML code.
#### Changing flask-talisman CSP
Add to `superset_config.py` the entire `TALISMAN_CONFIG` section from `config.py` and include a `frame-ancestors` section:
```python
TALISMAN_ENABLED = True
TALISMAN_CONFIG = {
"content_security_policy": {
...
"frame-ancestors": ["*.my-domain.com", "*.another-domain.com"],
...
```
Restart Superset for this configuration change to take effect.
#### Making a Dashboard Public
1. Add the `'DASHBOARD_RBAC': True` [Feature Flag](https://github.com/apache/superset/blob/master/RESOURCES/FEATURE_FLAGS.md) to `superset_config.py`
2. Add the `Public` role to your dashboard as described [here](https://superset.apache.org/docs/using-superset/creating-your-first-dashboard/#manage-access-to-dashboards)
#### Embedding a Public Dashboard
Now anybody can directly access the dashboard's URL. You can embed it in an iframe like so:
```html
<iframe
width="600"
height="400"
seamless
frameBorder="0"
scrolling="no"
src="https://superset.my-domain.com/superset/dashboard/10/?standalone=1&height=400"
>
</iframe>
```
#### Embedding a Chart
A chart's embed code can be generated by going to a chart's edit view and then clicking at the top right on `...` > `Share` > `Embed code`
### Enabling Embedding via the SDK
Clicking on `...` next to `EDIT DASHBOARD` on the top right of the dashboard's overview page should yield a drop-down menu including the entry "Embed dashboard".
To enable this entry, add the following line to the `.env` file:
```text
SUPERSET_FEATURE_EMBEDDED_SUPERSET=true
```
## CSRF settings
Similarly, [flask-wtf](https://flask-wtf.readthedocs.io/en/0.15.x/config/) is used manage

View File

@@ -17,8 +17,8 @@ made available in the Jinja context:
- `columns`: columns which to group by in the query
- `filter`: filters applied in the query
- `from_dttm`: start `datetime` value from the selected time range (`None` if undefined) (deprecated beginning in version 5.0, use `get_time_filter` instead)
- `to_dttm`: end `datetime` value from the selected time range (`None` if undefined). (deprecated beginning in version 5.0, use `get_time_filter` instead)
- `from_dttm`: start `datetime` value from the selected time range (`None` if undefined)
- `to_dttm`: end `datetime` value from the selected time range (`None` if undefined)
- `groupby`: columns which to group by in the query (deprecated)
- `metrics`: aggregate expressions in the query
- `row_limit`: row limit of the query
@@ -94,7 +94,7 @@ There is a special ``_filters`` parameter which can be used to test filters used
```sql
SELECT action, count(*) as times
FROM logs
WHERE action in {{ filter_values('action_type')|where_in }}
WHERE action in {{ filter_values('action_type'))|where_in }}
GROUP BY action
```
@@ -346,78 +346,6 @@ Here's a concrete example:
order by lineage, level
```
**Time Filter**
The `{{ get_time_filter() }}` macro returns the time filter applied to a specific column. This is useful if you want
to handle time filters inside the virtual dataset, as by default the time filter is placed on the outer query. This can
considerably improve performance, as many databases and query engines are able to optimize the query better
if the temporal filter is placed on the inner query, as opposed to the outer query.
The macro takes the following parameters:
- `column`: Name of the temporal column. Leave undefined to reference the time range from a Dashboard Native Time Range
filter (when present).
- `default`: The default value to fall back to if the time filter is not present, or has the value `No filter`
- `target_type`: The target temporal type as recognized by the target database (e.g. `TIMESTAMP`, `DATE` or
`DATETIME`). If `column` is defined, the format will default to the type of the column. This is used to produce
the format of the `from_expr` and `to_expr` properties of the returned `TimeFilter` object.
- `strftime`: format using the `strftime` method of `datetime` for custom time formatting.
([see docs for valid format codes](https://docs.python.org/3/library/datetime.html#strftime-and-strptime-format-codes)).
When defined `target_type` will be ignored.
- `remove_filter`: When set to true, mark the filter as processed, removing it from the outer query. Useful when a
filter should only apply to the inner query.
The return type has the following properties:
- `from_expr`: the start of the time filter (if any)
- `to_expr`: the end of the time filter (if any)
- `time_range`: The applied time range
Here's a concrete example using the `logs` table from the Superset metastore:
```
{% set time_filter = get_time_filter("dttm", remove_filter=True) %}
{% set from_expr = time_filter.from_expr %}
{% set to_expr = time_filter.to_expr %}
{% set time_range = time_filter.time_range %}
SELECT
*,
'{{ time_range }}' as time_range
FROM logs
{% if from_expr or to_expr %}WHERE 1 = 1
{% if from_expr %}AND dttm >= {{ from_expr }}{% endif %}
{% if to_expr %}AND dttm < {{ to_expr }}{% endif %}
{% endif %}
```
Assuming we are creating a table chart with a simple `COUNT(*)` as the metric with a time filter `Last week` on the
`dttm` column, this would render the following query on Postgres (note the formatting of the temporal filters, and
the absence of time filters on the outer query):
```
SELECT COUNT(*) AS count
FROM
(SELECT *,
'Last week' AS time_range
FROM public.logs
WHERE 1 = 1
AND dttm >= TO_TIMESTAMP('2024-08-27 00:00:00.000000', 'YYYY-MM-DD HH24:MI:SS.US')
AND dttm < TO_TIMESTAMP('2024-09-03 00:00:00.000000', 'YYYY-MM-DD HH24:MI:SS.US')) AS virtual_table
ORDER BY count DESC
LIMIT 1000;
```
When using the `default` parameter, the templated query can be simplified, as the endpoints will always be defined
(to use a fixed time range, you can also use something like `default="2024-08-27 : 2024-09-03"`)
```
{% set time_filter = get_time_filter("dttm", default="Last week", remove_filter=True) %}
SELECT
*,
'{{ time_filter.time_range }}' as time_range
FROM logs
WHERE
dttm >= {{ time_filter.from_expr }}
AND dttm < {{ time_filter.to_expr }}
```
**Datasets**
It's possible to query physical and virtual datasets using the `dataset` macro. This is useful if you've defined computed columns and metrics on your datasets, and want to reuse the definition in adhoc SQL Lab queries.

View File

@@ -18,9 +18,10 @@ which can be joined by anyone):
- [Superset Community Calendar](https://superset.apache.org/community)
More references:
- [Comprehensive Tutorial for Contributing Code to Apache Superset](https://preset.io/blog/tutorial-contributing-code-to-apache-superset/)
- [Superset Wiki (code guidelines and additional resources)](https://github.com/apache/superset/wiki)
## Orientation
Here's a list of repositories that contain Superset-related packages:
@@ -36,6 +37,7 @@ Here's a list of repositories that contain Superset-related packages:
GitHub organization under which we manage Superset-related
small tools, forks and Superset-related experimental ideas.
## Types of Contributions
### Report Bug
@@ -86,7 +88,7 @@ text strings from Superset's UI. You can jump into the existing
language dictionaries at
`superset/translations/<language_code>/LC_MESSAGES/messages.po`, or
even create a dictionary for a new language altogether.
See [Translating](howtos#contributing-translations) for more details.
See [Translating](howtos#contribute-translations) for more details.
### Ask Questions

View File

@@ -6,13 +6,13 @@ version: 1
# Setting up a Development Environment
The documentation in this section is a bit of a patchwork of knowledge representing the
multitude of ways that exist to run Superset (`docker compose`, just "docker", on "metal", using
multitude of ways that exist to run Superset (`docker-compose`, just "docker", on "metal", using
a Makefile).
:::note
Now we have evolved to recommend and support `docker compose` more actively as the main way
Now we have evolved to recommend and support `docker-compose` more actively as the main way
to run Superset for development and preserve your sanity. **Most people should stick to
the first few sections - ("Fork & Clone", "docker compose" and "Installing Dev Tools")**
the first few sections - ("Fork & Clone", "docker-compose" and "Installing Dev Tools")**
:::
## Fork and Clone
@@ -27,16 +27,14 @@ git clone git@github.com:your-username/superset.git
cd superset
```
## docker compose (recommended!)
## docker-compose (recommended!)
Setting things up to squeeze an "hello world" into any part of Superset should be as simple as
```bash
docker compose up
docker-compose up
```
Note that:
- this will pull/build docker images and run a cluster of services, including:
- A Superset **Flask web server**, mounting the local python repo/code
- A Superset **Celery worker**, also mounting the local python repo/code
@@ -54,9 +52,9 @@ Note that:
- You can login with admin/admin
:::caution
Since `docker compose` is primarily designed to run a set of containers on **a single host**
Since `docker-compose` is primarily designed to run a set of containers on **a single host**
and can't credibly support **high availability** as a result, we do not support nor recommend
using our `docker compose` constructs to support production-type use-cases. For single host
using our `docker-compose` constructs to support production-type use-cases. For single host
environments, we recommend using [minikube](https://minikube.sigs.k8s.io/docs/start/) along
our [installing on k8s](https://superset.apache.org/docs/installation/running-on-kubernetes)
documentation.
@@ -66,10 +64,10 @@ configured to be secure.
## Installing Development Tools
:::note
While `docker compose` simplifies a lot of the setup, there are still
While docker-compose simplifies a lot of the setup, there are still
many things you'll want to set up locally to power your IDE, and things like
**commit hooks**, **linters**, and **test-runners**. Note that you can do these
things inside docker images with commands like `docker compose exec superset_app bash` for
things inside docker images with commands like `docker-compose exec superset_app bash` for
instance, but many people like to run that tooling from their host.
:::
@@ -92,56 +90,13 @@ To install run the following:
pre-commit install
```
This will install the hooks in your local repository. From now on, a series of checks will
automatically run whenever you make a Git commit.
A series of checks will now run when you make a git commit.
#### Running Pre-commit Manually
You can also run the pre-commit checks manually in various ways:
- **Run pre-commit on all files (same as CI):**
To run the pre-commit checks across all files in your repository, use the following command:
```bash
pre-commit run --all-files
```
This is the same set of checks that will run during CI, ensuring your changes meet the project's standards.
- **Run pre-commit on a specific file:**
If you want to check or fix a specific file, you can do so by specifying the file path:
```bash
pre-commit run --files path/to/your/file.py
```
This will only run the checks on the file(s) you specify.
- **Run a specific pre-commit check:**
To run a specific check (hook) across all files or a particular file, use the following command:
```bash
pre-commit run <hook_id> --all-files
```
Or for a specific file:
```bash
pre-commit run <hook_id> --files path/to/your/file.py
```
Replace `<hook_id>` with the ID of the specific hook you want to run. You can find the list
of available hooks in the `.pre-commit-config.yaml` file.
## Alternatives to `docker compose`
## Alternatives to docker-compose
:::caution
This part of the documentation is a patchwork of information related to setting up
development environments without `docker compose` and are documented/supported to varying
development environments without `docker-compose` and are documented/supported to varying
degrees. It's been difficult to maintain this wide array of methods and insure they're
functioning across environments.
:::
@@ -151,7 +106,7 @@ functioning across environments.
#### OS Dependencies
Make sure your machine meets the [OS dependencies](https://superset.apache.org/docs/installation/pypi#os-dependencies) before following these steps.
You also need to install MySQL.
You also need to install MySQL or [MariaDB](https://mariadb.com/downloads).
Ensure that you are using Python version 3.9, 3.10 or 3.11, then proceed with:
@@ -268,7 +223,7 @@ Frontend assets (TypeScript, JavaScript, CSS, and images) must be compiled in or
First, be sure you are using the following versions of Node.js and npm:
- `Node.js`: Version 20
- `Node.js`: Version 18
- `npm`: Version 10
We recommend using [nvm](https://github.com/nvm-sh/nvm) to manage your node environment:
@@ -332,28 +287,22 @@ If while using the above commands you encounter an error related to the limit of
```bash
Error: ENOSPC: System limit for number of file watchers reached
```
The error is thrown because the number of files monitored by the system has reached the limit.
You can address this this error by increasing the number of inotify watchers.
The current value of max watches can be checked with:
```bash
cat /proc/sys/fs/inotify/max_user_watches
```
Edit the file `/etc/sysctl.conf` to increase this value.
Edit the file /etc/sysctl.conf to increase this value.
The value needs to be decided based on the system memory [(see this StackOverflow answer for more context)](https://stackoverflow.com/questions/535768/what-is-a-reasonable-amount-of-inotify-watches-with-linux).
Open the file in editor and add a line at the bottom specifying the max watches values.
```bash
fs.inotify.max_user_watches=524288
```
Save the file and exit editor.
To confirm that the change succeeded, run the following command to load the updated value of max_user_watches from `sysctl.conf`:
To confirm that the change succeeded, run the following command to load the updated value of max_user_watches from sysctl.conf:
```bash
sudo sysctl -p
```
@@ -365,18 +314,14 @@ The dev server by default starts at `http://localhost:9000` and proxies the back
So a typical development workflow is the following:
1. [run Superset locally](#flask-server) using Flask, on port `8088` — but don't access it directly,<br/>
```bash
# Install Superset and dependencies, plus load your virtual environment first, as detailed above.
superset run -p 8088 --with-threads --reload --debugger --debug
```
2. in parallel, run the Webpack dev server locally on port `9000`,<br/>
```bash
npm run dev-server
```
3. access `http://localhost:9000` (the Webpack server, _not_ Flask) in your web browser. This will use the hot-reloading front-end assets from the Webpack development server while redirecting back-end queries to Flask/Superset: your changes on Superset codebase — either front or back-end — will then be reflected live in the browser.
It's possible to change the Webpack server settings:
@@ -652,7 +597,7 @@ For debugging locally using VSCode, you can configure a launch configuration fil
}
```
#### Raw Docker (without `docker compose`)
#### Raw Docker (without docker-compose)
Follow these instructions to debug the Flask app running inside a docker container. Note that
this will run a barebones Superset web server,
@@ -759,9 +704,9 @@ VSCode will not stop on breakpoints right away. We've attached to PID 6 however
### Debugging Server App in Kubernetes Environment
To debug Flask running in POD inside a kubernetes cluster, you'll need to make sure the pod runs as root and is granted the SYS_TRACE capability.These settings should not be used in production environments.
To debug Flask running in POD inside kubernetes cluster. You'll need to make sure the pod runs as root and is granted the SYS_TRACE capability.These settings should not be used in production environments.
```yaml
```
securityContext:
capabilities:
add: ["SYS_PTRACE"]
@@ -771,11 +716,11 @@ See (set capabilities for a container)[https://kubernetes.io/docs/tasks/configur
Once the pod is running as root and has the SYS_PTRACE capability it will be able to debug the Flask app.
You can follow the same instructions as in `docker compose`. Enter the pod and install the required library and packages; gdb, netstat and debugpy.
You can follow the same instructions as in the docker-compose. Enter the pod and install the required library and packages; gdb, netstat and debugpy.
Often in a Kubernetes environment nodes are not addressable from outside the cluster. VSCode will thus be unable to remotely connect to port 5678 on a Kubernetes node. In order to do this you need to create a tunnel that port forwards 5678 to your local machine.
```bash
```
kubectl port-forward pod/superset-<some random id> 5678:5678
```
@@ -856,7 +801,7 @@ Submissions will be considered for submission (or removal) on a case-by-case bas
The output should look like this:
```log
```
INFO [alembic.runtime.migration] Context impl SQLiteImpl.
INFO [alembic.runtime.migration] Will assume transactional DDL.
INFO [alembic.runtime.migration] Running upgrade 1a1d627ebd8e -> 40a0a483dd12, add_metadata_column_to_annotation_model.py
@@ -876,7 +821,7 @@ Submissions will be considered for submission (or removal) on a case-by-case bas
The output should look like this:
```log
```
INFO [alembic.runtime.migration] Context impl SQLiteImpl.
INFO [alembic.runtime.migration] Will assume transactional DDL.
INFO [alembic.runtime.migration] Running downgrade 40a0a483dd12 -> 1a1d627ebd8e, add_metadata_column_to_annotation_model.py

View File

@@ -12,7 +12,7 @@ A philosophy we would like to strongly encourage is
The purpose is to separate problem from possible solutions.
**Bug fixes:** If youre only fixing a small bug, its fine to submit a pull request right away but we highly recommend filing an issue detailing what youre fixing. This is helpful in case we dont accept that specific fix but want to keep track of the issue. Please keep in mind that the project maintainers reserve the rights to accept or reject incoming PRs, so it is better to separate the issue and the code to fix it from each other. In some cases, project maintainers may request you to create a separate issue from PR before proceeding.
**Bug fixes:** If youre only fixing a small bug, its fine to submit a pull request right away but we highly recommend to file an issue detailing what youre fixing. This is helpful in case we dont accept that specific fix but want to keep track of the issue. Please keep in mind that the project maintainers reserve the rights to accept or reject incoming PRs, so it is better to separate the issue and the code to fix it from each other. In some cases, project maintainers may request you to create a separate issue from PR before proceeding.
**Refactor:** For small refactors, it can be a standalone PR itself detailing what you are refactoring and why. If there are concerns, project maintainers may request you to create a `#SIP` for the PR before proceeding.
@@ -96,6 +96,7 @@ Finally, never submit a PR that will put master branch in broken state. If the P
- Project maintainers may contact the PR author if new issues are introduced by the PR.
- Project maintainers may revert your changes if a critical issue is found, such as breaking master branch CI.
## Managing Issues and PRs
To handle issues and PRs that are coming in, committers read issues/PRs and flag them with labels to categorize and help contributors spot where to take actions, as contributors usually have different expertises.
@@ -151,8 +152,10 @@ Should you decide that reverting is desirable, it is the responsibility of the C
- **Provide concise reproduction steps:** Ensure that the issue can be clearly understood and duplicated by the original author of the PR.
- **Put the revert through code review:** The revert must be approved by another committer.
## Design Guidelines
## Design Guidelines
### Capitalization guidelines
#### Sentence case
@@ -194,11 +197,12 @@ Often a product page will have the same title as the objects it contains. In thi
- Queries that you save will appear on the Saved queries page
- Create custom queries in SQL Lab then create dashboards
#### \*\*Exceptions to sentence case
#### \*\*Exceptions to sentence case:
- Input labels, buttons and UI tabs are all caps
- User input values (e.g. column names, SQL Lab tab names) should be in their original case
## Programming Language Conventions
### Python

View File

@@ -88,7 +88,7 @@ yo @superset-ui/superset
After that the generator will ask a few questions (the defaults should be fine):
```bash
```
$ yo @superset-ui/superset
_-----_ ╭──────────────────────────╮
| | │ Welcome to the │
@@ -125,7 +125,7 @@ $ yo @superset-ui/superset
To build the viz plugin, run the following commands:
```bash
```
npm i --force
npm run build
```
@@ -133,7 +133,7 @@ npm run build
Alternatively, to run the viz plugin in development mode (=rebuilding whenever changes
are made), start the dev server with the following command:
```bash
```
npm run dev
```
@@ -399,7 +399,7 @@ tcp 0 0 0.0.0.0:8088 0.0.0.0:* LISTEN
You are now ready to attach a debugger to the process. Using VSCode you can configure a launch configuration file .vscode/launch.json like so.
```json
```
{
"version": "0.2.0",
"configurations": [
@@ -426,23 +426,23 @@ VSCode will not stop on breakpoints right away. We've attached to PID 6 however
### Debugging Server App in Kubernetes Environment
To debug Flask running in POD inside a kubernetes cluster, you'll need to make sure the pod runs as root and is granted the `SYS_TRACE` capability. These settings should not be used in production environments.
To debug Flask running in POD inside kubernetes cluster. You'll need to make sure the pod runs as root and is granted the SYS_TRACE capability.These settings should not be used in production environments.
```yaml
```
securityContext:
capabilities:
add: ["SYS_PTRACE"]
```
See [set capabilities for a container](https://kubernetes.io/docs/tasks/configure-pod-container/security-context/#set-capabilities-for-a-container) for more details.
See (set capabilities for a container)[https://kubernetes.io/docs/tasks/configure-pod-container/security-context/#set-capabilities-for-a-container] for more details.
Once the pod is running as root and has the `SYS_PTRACE` capability it will be able to debug the Flask app.
Once the pod is running as root and has the SYS_PTRACE capability it will be able to debug the Flask app.
You can follow the same instructions as in `docker compose`. Enter the pod and install the required library and packages; gdb, netstat and debugpy.
You can follow the same instructions as in the docker-compose. Enter the pod and install the required library and packages; gdb, netstat and debugpy.
Often in a Kubernetes environment nodes are not addressable from outside the cluster. VSCode will thus be unable to remotely connect to port 5678 on a Kubernetes node. In order to do this you need to create a tunnel that port forwards 5678 to your local machine.
```bash
```
kubectl port-forward pod/superset-<some random id> 5678:5678
```
@@ -465,7 +465,6 @@ We use [Flask-Babel](https://python-babel.github.io/flask-babel/) to translate S
In Python files, we use the following
[translation functions](https://python-babel.github.io/flask-babel/#using-translations)
from `Flask-Babel`:
- `gettext` and `lazy_gettext` (usually aliased to `_`): for translating singular
strings.
- `ngettext`: for translating strings that might become plural.
@@ -520,7 +519,6 @@ easier to translate the application to your target language:
[Flask AppBuilder i18n documentation](https://flask-appbuilder.readthedocs.io/en/latest/i18n.html)
To create a dictionary for a new language, first make sure the necessary dependencies are installed:
```bash
pip install -r superset/translations/requirements.txt
```
@@ -577,20 +575,16 @@ case of the Finnish translation, this would be `superset/translations/fi/LC_MESS
To make the translations available on the frontend, we need to convert the PO file into
a collection of JSON files. To convert all PO files to formatted JSON files you can use
the `build-translation` script
the build-translation script
```bash
# Install dependencies if you haven't already
cd superset-frontend/ && npm ci
# Compile translations for the frontend
npm run build-translation
```
Finally, for the translations to take effect we need to compile translation catalogs into
binary MO files for the backend using `pybabel`.
binary MO files for the backend using pybabel.
```bash
# inside the project root
pybabel compile -d superset/translations
```

View File

@@ -4,6 +4,7 @@ sidebar_position: 9
# FAQ
## How big of a dataset can Superset handle?
Superset can work with even gigantic databases! Superset acts as a thin layer above your underlying
@@ -27,6 +28,7 @@ to occur in spikes, e.g., if everyone in a meeting loads the same dashboard at o
Superset's application metadata does not require a very large database to store it, though
the log file grows over time.
## Can I join / query multiple tables at one time?
Not in the Explore or Visualization UI. A Superset SQLAlchemy datasource can only be a single table
@@ -176,7 +178,7 @@ You can take a look at this Flask-AppBuilder
It is possible on a per-dashboard basis by providing a mapping of labels to colors in the JSON
Metadata attribute using the `label_colors` key.
```json
```
{
"label_colors": {
"Girls": "#FF69B4",

View File

@@ -1,68 +0,0 @@
---
title: Architecture
hide_title: true
sidebar_position: 1
version: 1
---
import useBaseUrl from "@docusaurus/useBaseUrl";
# Architecture
This page is meant to give new administrators an understanding of Superset's components.
## Components
A Superset installation is made up of these components:
1. The Superset application itself
2. A metadata database
3. A caching layer (optional, but necessary for some features)
4. A worker & beat (optional, but necessary for some features)
### Optional components and associated features
The optional components above are necessary to enable these features:
- [Alerts and Reports](/docs/configuration/alerts-reports)
- [Caching](/docs/configuration/cache)
- [Async Queries](/docs/configuration/async-queries-celery/)
- [Dashboard Thumbnails](/docs/configuration/cache/#caching-thumbnails)
If you install with Kubernetes or Docker Compose, all of these components will be created.
However, installing from PyPI only creates the application itself. Users installing from PyPI will need to configure a caching layer, worker, and beat on their own if they wish to enable the above features. Configuration of those components for a PyPI install is not currently covered in this documentation.
Here are further details on each component.
### The Superset Application
This is the core application. Superset operates like this:
- A user visits a chart or dashboard
- That triggers a SQL query to the data warehouse holding the underlying dataset
- The resulting data is served up in a data visualization
- The Superset application is comprised of the Python (Flask) backend application (server), API layer, and the React frontend, built via Webpack, and static assets needed for the application to work
### Metadata Database
This is where chart and dashboard definitions, user information, logs, etc. are stored. Superset is tested to work with PostgreSQL and MySQL databases as the metadata database (not be confused with a data source like your data warehouse, which could be a much greater variety of options like Snowflake, Redshift, etc.).
Some installation methods like our Quickstart and PyPI come configured by default to use a SQLite on-disk database. And in a Docker Compose installation, the data would be stored in a PostgresQL container volume. Neither of these cases are recommended for production instances of Superset.
For production, a properly-configured, managed, standalone database is recommended. No matter what database you use, you should plan to back it up regularly.
### Caching Layer
The caching layer serves two main functions:
- Store the results of queries to your data warehouse so that when a chart is loaded twice, it pulls from the cache the second time, speeding up the application and reducing load on your data warehouse.
- Act as a message broker for the worker, enabling the Alerts & Reports, async queries, and thumbnail caching features.
Most people use Redis for their cache, but Superset supports other options too. See the [cache docs](/docs/configuration/cache/) for more.
### Worker and Beat
This is one or more workers who execute tasks like run async queries or take snapshots of reports and send emails, and a "beat" that acts as the scheduler and tells workers when to perform their tasks. Most installations use Celery for these components.
## Other components
Other components can be incorporated into Superset. The best place to learn about additional configurations is the [Configuration page](/docs/configuration/configuring-superset). For instance, you could set up a load balancer or reverse proxy to implement HTTPS in front of your Superset application, or specify a Mapbox URL to enable geospatial charts, etc.
Superset won't even start without certain configuration settings established, so it's essential to review that page.

View File

@@ -1,7 +1,7 @@
---
title: Docker Builds
hide_title: true
sidebar_position: 6
sidebar_position: 5
version: 1
---
@@ -23,24 +23,16 @@ Different sets of images are built and/or published at different times:
- **Merges to the main branch** (`push`): resulting in new SHAs, with tags
prefixed with `master` for the latest `master` version.
## Build presets
# Build presets
We have a set of build "presets" that each represent a combination of
parameters for the build, mostly pointing to either different target layer
for the build, and/or base image.
Here are the build presets that are exposed through the `build_docker.py` script:
- `lean`: The default Docker image, including both frontend and backend. Tags
without a build_preset are lean builds (ie: `latest`, `4.0.0`, `3.0.0`, ...). `lean`
builds do not contain database
drivers, meaning you need to install your own. That applies to analytics databases **AND
the metadata database**. You'll likely want to layer either `mysqlclient` or `psycopg2-binary`
depending on the metadata database you choose for your installation, plus the required
drivers to connect to your analytics database(s).
- `dev`: For development, with a headless browser, dev-related utilities and root access. This
includes some commonly used database drivers like `mysqlclient`, `psycopg2-binary` and
some other used for development/CI
without a build_preset are lean builds, e.g., `latest`.
- `dev`: For development, with a headless browser, dev-related utilities and root access.
- `py311`, e.g., Py311: Similar to lean but with a different Python version (in this example, 3.11).
- `ci`: For certain CI workloads.
- `websocket`: For Superset clusters supporting advanced features.
@@ -59,29 +51,11 @@ Here are the build presets that are exposed through the `build_docker.py` script
this specific SHA, which could be from a `master` merge, or release.
- `websocket-latest`: The WebSocket image for use in a Superset cluster.
For insights or modifications to the build matrix and tagging conventions,
check the [build_docker.py](https://github.com/apache/superset/blob/master/scripts/build_docker.py)
script and the [docker.yml](https://github.com/apache/superset/blob/master/.github/workflows/docker.yml)
GitHub action.
## Key ARGs in Dockerfile
- `BUILD_TRANSLATIONS`: whether to build the translations into the image. For the
frontend build this tells webpack to strip out all locales other than `en` from
the `moment-timezone` library. For the backendthis skips compiling the
`*.po` translation files
- `DEV_MODE`: whether to skip the frontend build, this is used by our `docker-compose` dev setup
where we mount the local volume and build using `webpack` in `--watch` mode, meaning as you
alter the code in the local file system, webpack, from within a docker image used for this
purpose, will constantly rebuild the frontend as you go. This ARG enables the initial
`docker-compose` build to take much less time and resources
- `INCLUDE_CHROMIUM`: whether to include chromium in the backend build so that it can be
used as a headless browser for workloads related to "Alerts & Reports" and thumbnail generation
- `INCLUDE_FIREFOX`: same as above, but for firefox
- `PY_VER`: specifying the base image for the python backend, we don't recommend altering
this setting if you're not working on forwards or backwards compatibility
## Caching
To accelerate builds, we follow Docker best practices and use `apache/superset-cache`.
@@ -101,7 +75,7 @@ add database support for the database you need.
Currently all automated builds are multi-platform, supporting both `linux/arm64`
and `linux/amd64`. This enables higher level constructs like `helm` and
`docker compose` to point to these images and effectively be multi-platform
docker-compose to point to these images and effectively be multi-platform
as well.
Pull requests and master builds
@@ -118,7 +92,7 @@ configured in that way). Setting the environment
variable `DOCKER_DEFAULT_PLATFORM` to `linux/amd64` seems to function in
term of leveraging, and building upon the Superset builds provided here.
```bash
```
export DOCKER_DEFAULT_PLATFORM=linux/amd64
```

View File

@@ -1,7 +1,7 @@
---
title: Docker Compose
hide_title: true
sidebar_position: 4
sidebar_position: 3
version: 1
---
@@ -9,25 +9,26 @@ import useBaseUrl from "@docusaurus/useBaseUrl";
# Using Docker Compose
<img src={useBaseUrl("/img/docker-compose.webp" )} width="150" />
<br /><br />
:::caution
Since `docker compose` is primarily designed to run a set of containers on **a single host**
Since `docker-compose` is primarily designed to run a set of containers on **a single host**
and can't support requirements for **high availability**, we do not support nor recommend
using our `docker compose` constructs to support production-type use-cases. For single host
using our `docker-compose` constructs to support production-type use-cases. For single host
environments, we recommend using [minikube](https://minikube.sigs.k8s.io/docs/start/) along
our [installing on k8s](https://superset.apache.org/docs/installation/running-on-kubernetes)
documentation.
:::
As mentioned in our [quickstart guide](/docs/quickstart), the fastest way to try
Superset locally is using Docker Compose on a Linux or Mac OSX
computer. Superset does not have official support for Windows. It's also the easiest
way to launch a fully functioning **development environment** quickly.
Note that there are 3 major ways we support to run `docker compose`:
Note that there are 3 major ways we support to run docker-compose:
1. **docker-compose.yml:** for interactive development, where we mount your local folder with the
frontend/backend files that you can edit and experience the changes you
make in the app in real time
@@ -38,20 +39,16 @@ Note that there are 3 major ways we support to run `docker compose`:
1. **docker-compose-image-tag.yml** where we fetch an image from docker-hub say for the
`3.0.0` release for instance, and fire it up so you can try it. Here what's in
the local branch has no effects on what's running, we just fetch and run
pre-built images from docker-hub. For `docker compose` to work along with the
Postgres image it boots up, you'll want to point to a `-dev`-suffixed TAG, as in
`export TAG=4.0.0-dev` or `export TAG=3.0.0-dev`, with `latest-dev` being the default.
That's because The `dev` builds happen to package the `psycopg2-binary` required to connect
to the Postgres database launched as part of the `docker compose` builds.
``
pre-built images from docker-hub
More on these two approaches after setting up the requirements for either.
## Requirements
Note that this documentation assumes that you have [Docker](https://www.docker.com) and
[git](https://git-scm.com/) installed. Note also that we used to use `docker-compose` but that
is on the path to deprecation so we now use `docker compose` instead.
Note that this documentation assumes that you have [Docker](https://www.docker.com),
[docker-compose](https://docs.docker.com/compose/), and
[git](https://git-scm.com/) installed.
## 1. Clone Superset's GitHub repository
@@ -67,7 +64,7 @@ current directory.
## 2. Launch Superset Through Docker Compose
First let's assume you're familiar with `docker compose` mechanics. Here we'll refer generally
First let's assume you're familiar with docker-compose mechanics. Here we'll refer generally
to `docker compose up` even though in some cases you may want to force a check for newer remote
images using `docker compose pull`, force a build with `docker compose build` or force a build
on latest base images using `docker compose build --pull`. In most cases though, the simple
@@ -112,7 +109,7 @@ Here various release tags, github SHA, and latest `master` can be referenced by
Refer to the docker-related documentation to learn more about existing tags you can point to
from Docker Hub.
## `docker compose` tips & configuration
## docker-compose tips & configuration
:::caution
All of the content belonging to a Superset instance - charts, dashboards, users, etc. - is stored in
@@ -137,7 +134,7 @@ You can install additional python packages and apply config overrides by followi
mentioned in [docker/README.md](https://github.com/apache/superset/tree/master/docker#configuration)
Note that `docker/.env` sets the default environment variables for all the docker images
used by `docker compose`, and that `docker/.env-local` can be used to override those defaults.
used by `docker-compose`, and that `docker/.env-local` can be used to override those defaults.
Also note that `docker/.env-local` is referenced in our `.gitignore`,
preventing developers from risking committing potentially sensitive configuration to the repository.
@@ -154,6 +151,7 @@ located in your `PYTHONPATH`, note that it can be done by providing a
The mechanics of this are in `docker/pythonpath_dev/superset_config.py` where you can see
that the logic runs a `from superset_config_docker import *`
:::note
Users often want to connect to other databases from Superset. Currently, the easiest way to
do this is to modify the `docker-compose-non-dev.yml` file and add your database as a service that
@@ -216,14 +214,13 @@ connections from the Docker involves making one-line changes to the files `postg
`pg_hba.conf`; you can find helpful links tailored to your OS / PG version on the web easily for
this task. For Docker it suffices to only whitelist IPs `172.0.0.0/8` instead of `*`, but in any
case you are _warned_ that doing this in a production database _may_ have disastrous consequences as
you are opening your database to the public internet.
1. Instead of `localhost`, try using `host.docker.internal` (Mac users, Ubuntu) or `172.18.0.1`
(Linux users) as the hostname when attempting to connect to the database. This is a Docker internal
detail -- what is happening is that, in Mac systems, Docker Desktop creates a dns entry for the
hostname `host.docker.internal` which resolves to the correct address for the host machine, whereas
in Linux this is not the case (at least by default). If neither of these 2 hostnames work then you
may want to find the exact hostname you want to use, for that you can do `ifconfig` or
`ip addr show` and look at the IP address of `docker0` interface that must have been created by
Docker for you. Alternately if you don't even see the `docker0` interface try (if needed with sudo)
`docker network inspect bridge` and see if there is an entry for `"Gateway"` and note the IP
address.
you are opening your database to the public internet. 2. Instead of `localhost`, try using
`host.docker.internal` (Mac users, Ubuntu) or `172.18.0.1` (Linux users) as the hostname when
attempting to connect to the database. This is a Docker internal detail -- what is happening is
that, in Mac systems, Docker Desktop creates a dns entry for the hostname `host.docker.internal`
which resolves to the correct address for the host machine, whereas in Linux this is not the case
(at least by default). If neither of these 2 hostnames work then you may want to find the exact
hostname you want to use, for that you can do `ifconfig` or `ip addr show` and look at the IP
address of `docker0` interface that must have been created by Docker for you. Alternately if you
don't even see the `docker0` interface try (if needed with sudo) `docker network inspect bridge` and
see if there is an entry for `"Gateway"` and note the IP address.

View File

@@ -1,7 +1,7 @@
---
title: Kubernetes
hide_title: true
sidebar_position: 2
sidebar_position: 1
version: 1
---
@@ -9,6 +9,7 @@ import useBaseUrl from "@docusaurus/useBaseUrl";
# Installing on Kubernetes
<img src={useBaseUrl("/img/k8s.png" )} width="150" />
<br /><br />
@@ -26,6 +27,7 @@ For simpler, single host environments, we recommend using
and works fantastically well with the Helm chart referenced here.
:::
## Running
1. Add the Superset helm repository
@@ -153,7 +155,9 @@ See [Install Database Drivers](/docs/configuration/databases) for more informati
:::
The following example installs the drivers for BigQuery and Elasticsearch, allowing you to connect to these data sources within your Superset setup:
The following example installs the Big Query and Elasticsearch database drivers so that you can
connect to those datasources in your Superset installation:
```yaml
bootstrapScript: |
#!/bin/bash
@@ -430,12 +434,9 @@ configOverrides:
"--disable-extensions",
]
```
### Load the Examples data and dashboards
If you are trying Superset out and want some data and dashboards to explore, you can load some examples by creating a `my_values.yaml` and deploying it as described above in the **Configure your setting overrides** step of the **Running** section.
To load the examples, add the following to the `my_values.yaml` file:
```yaml
init:
loadExamples: true

View File

@@ -1,7 +1,7 @@
---
title: PyPI
hide_title: true
sidebar_position: 3
sidebar_position: 2
version: 1
---
@@ -24,13 +24,13 @@ level dependencies.
The following command will ensure that the required dependencies are installed:
```bash
```
sudo apt-get install build-essential libssl-dev libffi-dev python-dev python-pip libsasl2-dev libldap2-dev default-libmysqlclient-dev
```
In Ubuntu 20.04 the following command will ensure that the required dependencies are installed:
```bash
```
sudo apt-get install build-essential libssl-dev libffi-dev python3-dev python3-pip libsasl2-dev libldap2-dev default-libmysqlclient-dev
```
@@ -38,19 +38,19 @@ sudo apt-get install build-essential libssl-dev libffi-dev python3-dev python3-p
Install the following packages using the `yum` package manager:
```bash
```
sudo yum install gcc gcc-c++ libffi-devel python-devel python-pip python-wheel openssl-devel cyrus-sasl-devel openldap-devel
```
In more recent versions of CentOS and Fedora, you may need to install a slightly different set of packages using `dnf`:
```bash
```
sudo dnf install gcc gcc-c++ libffi-devel python3-devel python3-pip python3-wheel openssl-devel cyrus-sasl-devel openldap-devel
```
Also, on CentOS, you may need to upgrade pip for the install to work:
```bash
```
pip3 install --upgrade pip
```
@@ -60,14 +60,14 @@ If you're not on the latest version of OS X, we recommend upgrading because we'v
issues people have run into are linked to older versions of Mac OS X. After updating, install the
latest version of XCode command line tools:
```bash
```
xcode-select --install
```
We don't recommend using the system installed Python. Instead, first install the
[homebrew](https://brew.sh/) manager and then run the following commands:
```bash
```
brew install readline pkg-config libffi openssl mysql postgresql@14
```
@@ -83,13 +83,13 @@ To identify the Python version used by the official docker image, see the [Docke
Let's also make sure we have the latest version of `pip` and `setuptools`:
```bash
```
pip install --upgrade setuptools pip
```
Lastly, you may need to set LDFLAGS and CFLAGS for certain Python packages to properly build. You can export these variables with:
```bash
```
export LDFLAGS="-L$(brew --prefix openssl)/lib"
export CFLAGS="-I$(brew --prefix openssl)/include"
```
@@ -101,13 +101,13 @@ These will now be available when pip installing requirements.
We highly recommend installing Superset inside of a virtual environment. Python ships with
`virtualenv` out of the box. If you're using [pyenv](https://github.com/pyenv/pyenv), you can install [pyenv-virtualenv](https://github.com/pyenv/pyenv-virtualenv). Or you can install it with `pip`:
```bash
```
pip install virtualenv
```
You can create and activate a virtual environment using:
```bash
```
# virtualenv is shipped in Python 3.6+ as venv instead of pyvenv.
# See https://docs.python.org/3.6/library/venv.html
python3 -m venv venv
@@ -116,7 +116,7 @@ python3 -m venv venv
Or with pyenv-virtualenv:
```bash
```
# Here we name the virtual env 'superset'
pyenv virtualenv superset
pyenv activate superset
@@ -130,13 +130,13 @@ command line.
First, start by installing `apache-superset`:
```bash
```
pip install apache-superset
```
Then, you need to initialize the database:
```bash
```
superset db upgrade
```
@@ -146,7 +146,7 @@ Note that some configuration is mandatory for production instances of Superset.
Finish installing by running through the following commands:
```bash
```
# Create an admin user in your metadata database (use `admin` as username to be able to load the examples)
export FLASK_APP=superset
superset fab create-admin

View File

@@ -1,7 +1,7 @@
---
title: Upgrading Superset
hide_title: true
sidebar_position: 5
sidebar_position: 4
version: 1
---

View File

@@ -34,18 +34,14 @@ $ cd superset
# Fire up Superset using Docker Compose
$ docker compose -f docker-compose-image-tag.yml up
```
This may take a moment as Docker Compose will fetch the underlying
container images and will load up some examples. Once all containers
are downloaded and the output settles, you're ready to log in.
⚠️ If you get an error message like `validating superset\docker-compose-image-tag.yml: services.superset-worker-beat.env_file.0 must be a string`, you need to update your version of `docker-compose`.
Note that `docker-compose` is on the path to deprecation and you should now use `docker compose` instead.
### 3. Log into Superset
Now head over to [http://localhost:8088](http://localhost:8088) and log in with the default created account:
```bash
username: admin
password: admin
@@ -54,13 +50,10 @@ password: admin
#### 🎉 Congratulations! Superset is now up and running on your machine! 🎉
### Wrapping Up
Once you're done with Superset, you can stop and delete just like any other container environment:
```bash
$ docker compose down
```
:::tip
You can use the same environment more than once, as Superset will persist data locally. However, make sure to properly stop all
processes by running Docker Compose `stop` command. By doing so, you can avoid data corruption and/or loss of data.
@@ -69,7 +62,6 @@ processes by running Docker Compose `stop` command. By doing so, you can avoid d
## What's next?
From this point on, you can head on to:
- [Create your first Dashboard](/docs/using-superset/creating-your-first-dashboard)
- [Connect to a Database](/docs/configuration/databases)
- [Using Docker Compose](/docs/installation/docker-compose)

View File

@@ -2,12 +2,6 @@
title: CVEs fixed by release
sidebar_position: 2
---
#### Version 4.0.2
| CVE | Title | Affected |
|:---------------|:----------------------------|---------:|
| CVE-2024-39887 | Improper SQL authorization | < 4.0.1 |
#### Version 3.1.3, 4.0.1
| CVE | Title | Affected |
@@ -44,6 +38,7 @@ sidebar_position: 2
| CVE-2023-49736 | SQL Injection on where_in JINJA macro | < 2.1.3, >= 3.0.0, < 3.0.2 |
| CVE-2023-49734 | Privilege Escalation Vulnerability | < 2.1.3, >= 3.0.0, < 3.0.2 |
#### Version 3.0.0
| CVE | Title | Affected |
@@ -51,12 +46,14 @@ sidebar_position: 2
| CVE-2023-42502 | Open Redirect Vulnerability | < 3.0.0 |
| CVE-2023-42505 | Sensitive information disclosure on db connection details | < 3.0.0 |
#### Version 2.1.3
| CVE | Title | Affected |
|:---------------|:------------------------------------------------------------------------|---------:|
| CVE-2023-42504 | Lack of rate limiting allows for possible denial of service | < 2.1.3 |
#### Version 2.1.2
| CVE | Title | Affected |
@@ -65,6 +62,7 @@ sidebar_position: 2
| CVE-2023-42501 | Unnecessary read permissions within the Gamma role | < 2.1.2 |
| CVE-2023-43701 | Stored XSS on API endpoint | < 2.1.2 |
#### Version 2.1.1
| CVE | Title | Affected |
@@ -78,6 +76,7 @@ sidebar_position: 2
| CVE-2023-37941 | Metadata db write access can lead to remote code execution | < 2.1.1 |
| CVE-2023-32672 | SQL parser edge case bypasses data access authorization | < 2.1.1 |
#### Version 2.1.0
| CVE | Title | Affected |
@@ -87,6 +86,7 @@ sidebar_position: 2
| CVE-2023-27525 | Incorrect default permissions for Gamma role | < 2.1.0 |
| CVE-2023-30776 | Database connection password leak | < 2.1.0 |
#### Version 2.0.1
| CVE | Title | Affected |

View File

@@ -44,8 +44,6 @@ or for the purposes of this walkthrough, you can click the link below all these
<img src={useBaseUrl("/img/tutorial/tutorial_03a_database_connection_string_link.png" )} width="600" />{" "} <br/><br/>
Please note, if you are trying to connect to another locally running database (whether on host or another container), and you get the message `The port is closed.`, then you need to adjust the HOST to `host.docker.internal`
Once you've clicked that link you only need to specify two things (the database name and SQLAlchemy URI):
@@ -106,7 +104,7 @@ You can also certify metrics if you'd like for your team in this view.
2. Virtual calculated columns: you can write SQL queries that
customize the appearance and behavior
of a specific column (e.g. `CAST(recovery_rate as float)`).
of a specific column (e.g. `CAST(recovery_rate) as float`).
Aggregate functions aren't allowed in calculated columns.
<img src={useBaseUrl("/img/tutorial/tutorial_calculated_column.png" )} />

View File

@@ -187,10 +187,6 @@ const config = {
to: '/docs/configuration/event-logging',
from: '/docs/installation/event-logging/',
},
{
to: '/docs/contributing/howtos',
from: '/docs/contributing/translations/',
},
],
},
],

View File

@@ -6,51 +6,51 @@
"scripts": {
"docusaurus": "docusaurus",
"_init": "cat src/intro_header.txt ../README.md > docs/intro.md",
"start": "yarn run _init && docusaurus start",
"build": "yarn run _init && DEBUG=docusaurus:* docusaurus build",
"start": "npm run _init && docusaurus start",
"build": "npm run _init && docusaurus build",
"swizzle": "docusaurus swizzle",
"deploy": "docusaurus deploy",
"clear": "docusaurus clear",
"serve": "yarn run _init && docusaurus serve",
"serve": "npm run _init && docusaurus serve",
"write-translations": "docusaurus write-translations",
"write-heading-ids": "docusaurus write-heading-ids",
"typecheck": "tsc"
},
"dependencies": {
"@algolia/client-search": "^4.24.0",
"@ant-design/icons": "^5.4.0",
"@docsearch/react": "^3.6.1",
"@docusaurus/core": "^3.5.2",
"@docusaurus/plugin-client-redirects": "^3.5.2",
"@docusaurus/preset-classic": "^3.5.2",
"@ant-design/icons": "^5.3.7",
"@docsearch/react": "^3.6.0",
"@docusaurus/core": "^3.3.2",
"@docusaurus/plugin-client-redirects": "^3.3.2",
"@docusaurus/preset-classic": "^3.3.2",
"@emotion/core": "^10.1.1",
"@emotion/styled": "^10.0.27",
"@mdx-js/react": "^3.0.0",
"@saucelabs/theme-github-codeblock": "^0.2.3",
"@superset-ui/style": "^0.14.23",
"@svgr/webpack": "^8.1.0",
"antd": "^5.20.5",
"antd": "^4.19.3",
"buffer": "^6.0.3",
"clsx": "^2.1.1",
"docusaurus-plugin-less": "^2.0.2",
"file-loader": "^6.2.0",
"less": "^4.2.0",
"less-loader": "^11.0.0",
"prism-react-renderer": "^2.4.0",
"prism-react-renderer": "^2.3.1",
"react": "^18.3.1",
"react-dom": "^18.3.1",
"react-github-btn": "^1.4.0",
"react-svg-pan-zoom": "^3.13.1",
"react-svg-pan-zoom": "^3.12.1",
"stream": "^0.0.3",
"swagger-ui-react": "^5.17.14",
"url-loader": "^4.1.1"
},
"devDependencies": {
"@docusaurus/module-type-aliases": "^3.5.2",
"@docusaurus/tsconfig": "^3.5.2",
"@types/react": "^18.3.5",
"typescript": "^5.5.4",
"webpack": "^5.94.0"
"@docusaurus/module-type-aliases": "^3.4.0",
"@docusaurus/tsconfig": "^3.4.0",
"@types/react": "^18.3.3",
"typescript": "^5.5.2",
"webpack": "^5.92.1"
},
"browserslist": {
"production": [

View File

@@ -122,14 +122,4 @@ export const Databases = [
href: 'https://doris.apache.org/',
imgName: 'doris.png',
},
{
title: 'OceanBase',
href: 'https://www.oceanbase.com/',
imgName: 'oceanbase.svg',
},
{
title: 'Couchbase',
href: 'https://www.couchbase.com/',
imgName: 'couchbase.svg',
},
];

View File

@@ -16,6 +16,8 @@
* specific language governing permissions and limitations
* under the License.
*/
@import '~antd/lib/style/themes/default.less';
@import '~antd/dist/antd.less'; // Import Ant Design styles by less entry
@import 'antd-theme.less';
body {

View File

@@ -1,19 +0,0 @@
<!--
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
-->
<svg xmlns="http://www.w3.org/2000/svg" width="2500" height="575" viewBox="0.6 0.1 575.509 132.4"><title>logo</title><path d="M199.3 96.9c-20.3 0-30.5-14.601-30.5-30.8 0-16.1 10.6-30.6 30.7-30.6 7.7 0 13.2 1.7 17.9 4.7l-5.8 9.5c-3.3-2.1-6.9-3.4-12.3-3.4-10.9 0-16.7 8.7-16.7 19.5 0 11.101 5.6 20.3 16.9 20.3 6.4 0 10.2-2.199 13.3-4.5l5.3 9.101c-3 2.699-10.1 6.199-18.8 6.199zm43.1-36.4c-6.5 0-8.6 5.5-8.6 13.5s2.6 13.7 9.1 13.7c6.6 0 8.8-5.4 8.8-13.4-.1-8-2.7-13.8-9.3-13.8zm.2 36.4c-15.2 0-21.8-11.4-21.8-22.601 0-11.2 6.6-22.9 21.8-22.9 15.2 0 22.1 11.3 22.1 22.5C264.6 85 257.9 96.9 242.6 96.9zm41.2-44.4v27.8c0 4.3 1.5 6.4 5.601 6.4 4.399 0 7.699-4.2 8.6-5.2v-29h12.2v30.6c0 5.7.6 10.101 1.2 12.7H299.3c-.399-1.2-.8-4-.899-5.8-2.301 3-6.801 6.8-14 6.8-9.6 0-13-6.1-13-14.1V52.4h12.4v.1h-.001zm56.5 44.4c-14 0-22.6-9.101-22.6-22.7 0-14.6 9.7-22.8 23-22.8 7 0 11.2 2.1 13.3 3.4l-3.9 8.4c-1.899-1.2-4.699-2.5-9-2.5-6.8 0-10.399 5.3-10.399 13.2 0 7.899 3.399 13.6 10.7 13.6 5 0 7.899-1.8 9.199-2.6l3.7 8.199c-2 1.301-6 3.801-14 3.801zm46.8-1V68c0-4.3-1.5-6.4-5.6-6.4-4.4 0-7.8 4.1-8.7 5.2v29h-12.2v-64h12.2v25.8c2.2-2.4 6.4-6.2 13.5-6.2 9.601 0 13 6.1 13 14.1v30.3h-12.2v.1zm34.4-9.5c.8.3 2.6.899 5.8.899 6.3 0 10.2-4.6 10.2-13.5 0-8-2.7-12.8-8.9-12.8-3.6 0-6.399 2.3-7.1 3.2V86.4zm0-54.5v24c2-2 5.8-4.5 10.6-4.5 10.801 0 18.4 7.4 18.4 22.4 0 14.9-10 23.101-23.2 23.101-9.899 0-15.899-2.5-18.1-3.5V31.8h12.3v.1zm58.1 44.9h-1.5c-6.699 0-11.399 2-11.399 6.9 0 3.1 2.5 4.399 5.2 4.399 4.1 0 6.5-2.399 7.699-3.6V76.8zm1.3 19.1c-.4-1.101-.601-3.301-.7-4.601-1.9 2.3-6 5.7-12.101 5.7C460.7 97 455 92.6 455 84.8c0-11.3 11.6-15.399 23.1-15.399h1.5V67c0-3.6-1.5-5.8-6.899-5.8-5.601 0-9.4 2.9-11 4l-5.3-7.4c2.6-2.4 8.399-6.3 17.699-6.3 12 0 17.7 4.6 17.7 16.2v15.2c0 6 .601 10.199 1.2 13h-12.1zm32.9 1c-7.8 0-12.8-2.301-15.5-4.101l4.101-8.5C504.1 85.5 508.3 87.9 513.1 87.9c4.5 0 7-1.301 7-3.7 0-2.8-4.899-3.601-11.1-6.601-6-2.899-9.5-6.199-9.5-12.8 0-8.2 6.5-13.4 16.4-13.4 7.399 0 12 2.3 14 3.4l-4.2 8.3c-1.7-1-5.101-2.7-9.2-2.7s-5.9 1.4-5.9 3.7c0 2.8 4 3.5 9 5.5 6.801 2.801 11.7 6 11.7 13.2-.2 9.301-6.8 14.101-17.5 14.101zm50.3-28c-.1-4.8-2-8.7-7.199-8.7-4.801 0-7.601 3.1-8.4 8.7H564.1zm-4.6 19c5.9 0 9.1-1.9 11.3-3l3.9 7.8c-2.9 1.6-7.3 4.2-16.3 4.2-14.601 0-22.9-9.101-22.9-23 0-13.8 9.5-22.5 21.6-22.5 13.7 0 20.301 9.5 18.801 25.7H548.6c.5 6.5 3.9 10.8 10.9 10.8z"/><path d="M66.8.1C30.3.1.6 29.7.6 66.3c0 36.5 29.6 66.2 66.2 66.2 36.5 0 66.2-29.6 66.2-66.2S103.3.1 66.8.1zm44.7 77.8c0 4-2.3 7.5-6.8 8.3-7.8 1.399-24.2 2.2-37.9 2.2s-30.1-.801-37.9-2.2c-4.5-.8-6.8-4.3-6.8-8.3V52.1c0-4 3.1-7.7 6.8-8.3 2.3-.4 7.7-.8 11.9-.8 1.6 0 2.9 1.2 2.9 3.1v18.1l23.2-.5 23.2.5V46.1c0-1.9 1.3-3.1 2.9-3.1 4.2 0 9.6.4 11.9.8 3.8.6 6.8 4.3 6.8 8.3-.2 8.5-.2 17.2-.2 25.8z" fill="#ed2226"/></svg>

Before

Width:  |  Height:  |  Size: 3.5 KiB

View File

@@ -1,67 +0,0 @@
<!--
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
-->
<svg id="_图层_1" data-name="图层 1" xmlns="http://www.w3.org/2000/svg" viewBox="0 0 95.26 13.73">
<defs>
<style>
.cls-1 {
fill: #0181fd;
}
.cls-2 {
fill: #fff;
}
.cls-3 {
fill: #ffa005;
}
.cls-4 {
fill: #181818;
}
.cls-5 {
fill: #07c846;
}
</style>
</defs>
<rect class="cls-2" width="95.26" height="13.73"/>
<g>
<g>
<path class="cls-4" d="M51,2.66l-4.58,8.49h2.28l.74-1.48h3.05l.24,1.48h2.28l-1.68-8.49h-2.33Zm-.7,5.34l1.44-2.83,.48,2.83h-1.92Z"/>
<path class="cls-4" d="M31.98,3.71c-.33-.41-.76-.72-1.28-.95-.52-.23-1.11-.34-1.77-.34s-1.3,.11-1.9,.34c-.6,.23-1.13,.55-1.6,.95-.47,.41-.86,.88-1.18,1.42-.32,.54-.53,1.13-.64,1.76-.11,.63-.1,1.22,.04,1.76,.13,.54,.36,1.02,.69,1.42,.33,.41,.75,.72,1.27,.95,.52,.23,1.11,.34,1.78,.34s1.29-.11,1.89-.34c.6-.23,1.13-.55,1.61-.95,.47-.41,.87-.88,1.19-1.42,.32-.54,.53-1.13,.64-1.76,.11-.63,.1-1.22-.04-1.76-.13-.54-.37-1.02-.7-1.42m-1.84,4.12c-.17,.29-.38,.53-.63,.74-.25,.21-.53,.37-.83,.49-.3,.12-.61,.17-.92,.17s-.6-.06-.86-.17c-.26-.12-.48-.28-.65-.49-.17-.21-.3-.46-.37-.74-.07-.29-.08-.6-.03-.93,.06-.34,.17-.65,.35-.93,.17-.29,.38-.53,.63-.74,.25-.21,.52-.37,.82-.49,.3-.12,.61-.17,.92-.17s.6,.06,.86,.17c.26,.12,.48,.28,.66,.49,.18,.21,.31,.46,.38,.74,.08,.29,.08,.6,.03,.93-.06,.34-.17,.65-.34,.93"/>
<polygon class="cls-4" points="46.4 9.28 42.07 9.28 42.32 7.81 46.51 7.81 46.82 5.94 42.64 5.94 42.88 4.52 47.21 4.52 47.53 2.66 41.06 2.66 39.61 11.15 46.08 11.15 46.4 9.28"/>
<polygon class="cls-4" points="61.29 7.75 58.79 2.66 56.65 2.66 55.2 11.15 57.34 11.15 58.23 6.05 60.72 11.15 62.85 11.15 64.3 2.66 62.17 2.66 61.29 7.75"/>
<path class="cls-4" d="M70.83,8.68c.04-.26,.06-.5,.04-.72-.02-.23-.08-.43-.19-.61s-.25-.33-.45-.46c-.19-.12-.44-.22-.73-.28,.34-.2,.61-.43,.8-.72,.2-.28,.33-.64,.41-1.06,.12-.71,.02-1.25-.31-1.62-.33-.38-.87-.56-1.63-.56h-3.47l-1.45,8.49h3.72c.44,0,.84-.05,1.2-.16,.37-.1,.69-.26,.97-.47,.28-.21,.51-.47,.7-.78,.19-.31,.31-.66,.38-1.06m-3.67-4.34h.67c.67,0,.96,.27,.86,.82-.09,.55-.47,.82-1.14,.82h-.67l.28-1.64Zm1.07,4.88c-.26,.14-.65,.21-1.19,.21h-.75l.31-1.8h.75c.54,0,.91,.07,1.12,.21,.21,.14,.29,.37,.23,.69-.05,.32-.21,.54-.47,.69"/>
<path class="cls-4" d="M83.57,5.97c-.16-.06-.33-.12-.5-.17-.17-.05-.32-.11-.45-.19-.13-.07-.23-.16-.3-.25-.07-.1-.09-.22-.07-.37,.04-.22,.15-.39,.35-.53,.2-.14,.43-.2,.71-.2,.22,0,.44,.05,.65,.14,.22,.09,.42,.24,.62,.43l1.14-1.72c-.37-.23-.77-.4-1.19-.51-.42-.12-.84-.17-1.25-.17-.44,0-.84,.07-1.22,.2-.38,.13-.71,.32-.99,.57-.29,.24-.52,.54-.72,.88-.19,.34-.33,.72-.4,1.14-.07,.42-.07,.77,.01,1.04,.08,.27,.21,.5,.39,.68,.18,.18,.39,.32,.63,.43,.25,.11,.5,.21,.76,.3,.22,.08,.4,.16,.56,.23,.15,.07,.27,.14,.36,.22,.09,.08,.15,.17,.17,.26,.03,.09,.03,.2,0,.33-.04,.21-.15,.4-.35,.56-.2,.17-.47,.25-.82,.25-.31,0-.6-.07-.89-.21-.29-.14-.57-.36-.84-.65l-1.22,1.78c.78,.64,1.69,.96,2.73,.96,.5,0,.96-.07,1.38-.21,.42-.14,.79-.33,1.1-.59,.32-.25,.57-.55,.78-.91,.2-.35,.34-.75,.41-1.18,.11-.65,.04-1.17-.22-1.57-.26-.4-.71-.72-1.37-.96"/>
<path class="cls-4" d="M38.16,2.42c-.57,0-1.14,.11-1.7,.33-.56,.22-1.07,.53-1.54,.92-.46,.39-.86,.86-1.19,1.41-.33,.54-.55,1.14-.66,1.8-.11,.65-.09,1.26,.05,1.81,.15,.55,.38,1.03,.71,1.43,.33,.4,.73,.71,1.21,.93,.48,.22,1.01,.33,1.57,.33,.28,0,.56-.03,.85-.08,.29-.06,.61-.14,.96-.26l.23-.08,.45-2.61c-.65,.6-1.31,.9-2,.9-.31,0-.59-.06-.84-.18-.25-.12-.45-.29-.62-.5-.16-.21-.28-.46-.34-.74-.06-.29-.07-.6,0-.93,.06-.33,.17-.64,.33-.92,.16-.29,.36-.53,.59-.74,.23-.21,.49-.37,.79-.49,.29-.12,.6-.18,.92-.18,.73,0,1.28,.31,1.67,.93l.45-2.64c-.32-.15-.64-.26-.94-.33-.3-.07-.62-.11-.95-.11"/>
<path class="cls-4" d="M78.31,9.36c-.35-.07-1.02-.11-2.18,.17l.26,1.65h2.28l-.36-1.81Z"/>
<path class="cls-4" d="M78.22,8.87l-.37-1.87c-.55,0-1.24,.09-2.1,.31-.09,.02-.19,.05-.29,.08-.47,.13-.91,.22-1.31,.28l1.25-2.46,.27,1.65c.86-.22,1.55-.3,2.1-.31l-.77-3.85h-2.33l-4.58,8.49h2.28l.82-1.62c.69-.05,1.52-.17,2.46-.43,.14-.04,.27-.07,.39-.1,1.16-.27,1.83-.24,2.18-.17"/>
<g>
<path class="cls-4" d="M89.66,6.01c-1.7,.44-2.99,.4-3.55,.35l-.3,1.83c.16,.01,.35,.02,.56,.03,.84,.02,2.06-.06,3.54-.45,1.4-.37,2.23-.34,2.65-.27l.3-1.83c-.71-.07-1.75-.03-3.21,.35"/>
<path class="cls-4" d="M90.1,2.73c-1.62,.42-2.87,.41-3.47,.36l-.45,2.8c.4,.04,1.15,.07,2.15-.07l.16-.98c.56-.07,1.19-.18,1.87-.36,1.5-.39,2.34-.34,2.73-.26l.3-1.84c-.71-.09-1.77-.05-3.29,.34"/>
<path class="cls-4" d="M89.25,9.18c-.54,.14-1.03,.23-1.48,.29l.15-.9c-.62,.07-1.16,.09-1.59,.08-.21,0-.39-.01-.55-.03l-.46,2.72c.18,.01,.39,.03,.64,.03,.84,.02,2.06-.06,3.54-.45,1.32-.35,2.13-.35,2.58-.29l.3-1.83c-.71-.06-1.72,0-3.13,.36"/>
</g>
</g>
<g>
<path class="cls-5" d="M11.91,12.89c.36-.15,.72-.29,1.1-.42,.17-.06,.35-.12,.52-.17,.14-.04,.28-.08,.42-.12,2.12-.59,4.27-.75,6.35-.52v-2.17c-2.3-.23-4.67-.03-7.02,.63-.14,.04-.28,.08-.42,.12-.18,.06-.37,.12-.55,.18-.36,.12-.72,.26-1.07,.4l.67,2.06Z"/>
<path class="cls-1" d="M20.29,2.46v6.49c-2.39-.23-4.85-.02-7.27,.69-.3,.09-.59,.18-.89,.28-.36,.12-.72,.24-1.07,.34-3.06,.9-6.16,1.22-9.18,1V4.77c2.36,.23,4.79,.02,7.18-.66,.33-.09,.65-.2,.98-.31,.33-.11,.66-.22,.98-.32,3.09-.92,6.22-1.24,9.27-1.03Z"/>
<path class="cls-3" d="M10.26,.84c-.36,.15-.72,.29-1.1,.42-.17,.06-.35,.12-.52,.17-.14,.04-.28,.08-.42,.12-2.12,.59-4.27,.75-6.35,.52v2.17c2.3,.23,4.67,.03,7.02-.63,.14-.04,.28-.08,.42-.12,.18-.06,.37-.12,.55-.18,.36-.12,.72-.26,1.07-.4l-.67-2.06Z"/>
</g>
</g>
</svg>

Before

Width:  |  Height:  |  Size: 6.2 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 11 KiB

After

Width:  |  Height:  |  Size: 16 KiB

File diff suppressed because it is too large Load Diff

View File

@@ -29,7 +29,7 @@ maintainers:
- name: craig-rueda
email: craig@craigrueda.com
url: https://github.com/craig-rueda
version: 0.12.13
version: 0.12.12
dependencies:
- name: postgresql
version: 12.1.6

View File

@@ -23,7 +23,7 @@ NOTE: This file is generated by helm-docs: https://github.com/norwoodj/helm-docs
# superset
![Version: 0.12.13](https://img.shields.io/badge/Version-0.12.13-informational?style=flat-square)
![Version: 0.12.12](https://img.shields.io/badge/Version-0.12.12-informational?style=flat-square)
Apache Superset is a modern, enterprise-ready business intelligence web application

View File

@@ -867,7 +867,7 @@ redis:
## docker registry secret names (list)
# pullSecrets: nil
##
## Configure persistence
## Configure persistance
persistence:
##
## Use a PVC to persist data.

View File

@@ -42,7 +42,7 @@ dependencies = [
"colorama",
"croniter>=0.3.28",
"cron-descriptor",
"cryptography>=42.0.4, <44.0.0",
"cryptography>=42.0.4, <43.0.0",
"deprecation>=2.1.0, <2.2.0",
"flask>=2.2.5, <3.0.0",
"flask-appbuilder>=4.5.0, <5.0.0",
@@ -61,7 +61,6 @@ dependencies = [
"humanize",
"importlib_metadata",
"isodate",
"jsonpath-ng>=1.6.1, <2",
"Mako>=1.2.2",
"markdown>=3.0",
"msgpack>=1.0.0, <1.1",
@@ -118,7 +117,7 @@ databricks = [
"sqlalchemy-databricks>=0.2.0",
]
db2 = ["ibm-db-sa>0.3.8, <=0.4.0"]
dremio = ["sqlalchemy-dremio>=1.2.1, <4"]
dremio = ["sqlalchemy-dremio>=1.1.5, <1.3"]
drill = ["sqlalchemy-drill>=1.1.4, <2"]
druid = ["pydruid>=0.6.5,<0.7"]
duckdb = ["duckdb-engine>=0.9.5, <0.10"]
@@ -152,7 +151,7 @@ ocient = [
"geojson",
]
oracle = ["cx-Oracle>8.0.0, <8.1"]
pinot = ["pinotdb>=5.0.0, <6.0.0"]
pinot = ["pinotdb>=0.3.3, <0.4"]
playwright = ["playwright>=1.37.0, <2"]
postgres = ["psycopg2-binary==2.9.6"]
presto = ["pyhive[presto]>=0.6.5"]
@@ -174,7 +173,6 @@ vertica = ["sqlalchemy-vertica-python>=0.5.9, < 0.6"]
netezza = ["nzalchemy>=11.0.2"]
starrocks = ["starrocks>=1.0.0"]
doris = ["pydoris>=1.0.0, <2.0.0"]
oceanbase = ["oceanbase_py>=0.0.1"]
development = [
"docker",
"flask-testing",
@@ -231,7 +229,6 @@ module = "tests.*"
check_untyped_defs = false
disallow_untyped_calls = false
disallow_untyped_defs = false
disable_error_code = "annotation-unchecked"
[tool.tox]
legacy_tox_ini = """

View File

@@ -15,8 +15,6 @@ apispec[yaml]==6.3.0
# via flask-appbuilder
apsw==3.46.0.0
# via shillelagh
async-timeout==4.0.3
# via redis
attrs==23.2.0
# via
# cattrs
@@ -80,7 +78,7 @@ cron-descriptor==1.4.3
# via apache-superset
croniter==2.0.5
# via apache-superset
cryptography==42.0.8
cryptography==42.0.7
# via
# apache-superset
# paramiko
@@ -93,8 +91,6 @@ dnspython==2.6.1
# via email-validator
email-validator==2.1.1
# via flask-appbuilder
exceptiongroup==1.2.2
# via cattrs
flask==2.3.3
# via
# apache-superset
@@ -148,7 +144,9 @@ geopy==2.4.1
google-auth==2.29.0
# via shillelagh
greenlet==3.0.3
# via shillelagh
# via
# shillelagh
# sqlalchemy
gunicorn==22.0.0
# via apache-superset
hashids==1.3.1
@@ -175,8 +173,6 @@ jinja2==3.1.4
# via
# flask
# flask-babel
jsonpath-ng==1.6.1
# via apache-superset
jsonschema==4.17.3
# via flask-appbuilder
kombu==5.3.7
@@ -253,8 +249,6 @@ pgsanity==0.2.9
# via apache-superset
platformdirs==3.8.1
# via requests-cache
ply==3.11
# via jsonpath-ng
polyline==2.0.2
# via apache-superset
prison==0.2.1
@@ -362,7 +356,6 @@ typing-extensions==4.12.0
# via
# alembic
# apache-superset
# cattrs
# flask-limiter
# limits
# shillelagh

View File

@@ -10,12 +10,12 @@
# via
# -r requirements/base.in
# -r requirements/development.in
appnope==0.1.4
# via ipython
astroid==3.1.0
# via pylint
boto3==1.34.112
# via
# apache-superset
# dataflows-tabulator
# via dataflows-tabulator
botocore==1.34.112
# via
# boto3
@@ -177,7 +177,9 @@ protobuf==4.23.0
psycopg2-binary==2.9.6
# via apache-superset
pure-sasl==0.6.2
# via thrift-sasl
# via
# pyhive
# thrift-sasl
pydata-google-auth==1.7.0
# via pandas-gbq
pydruid==0.6.9
@@ -230,9 +232,18 @@ tableschema==1.20.10
thrift==0.16.0
# via
# apache-superset
# pyhive
# thrift-sasl
thrift-sasl==0.4.3
# via apache-superset
# via
# build
# coverage
# pip-tools
# pylint
# pyproject-api
# pyproject-hooks
# pytest
# tox
tomlkit==0.12.5
# via pylint
toposort==1.10
@@ -243,6 +254,9 @@ tqdm==4.66.4
# via
# cmdstanpy
# prophet
traitlets==5.14.3
# via
# matplotlib-inline
trino==0.328.0
# via apache-superset
tzlocal==5.2

View File

@@ -52,7 +52,7 @@ GITHUB_TOKEN = os.environ.get("GITHUB_TOKEN")
def fetch_files_github_api(url: str): # type: ignore
"""Fetches data using GitHub API."""
req = Request(url)
req.add_header("Authorization", f"Bearer {GITHUB_TOKEN}")
req.add_header("Authorization", f"token {GITHUB_TOKEN}")
req.add_header("Accept", "application/vnd.github.v3+json")
print(f"Fetching from {url}")

Some files were not shown because too many files have changed in this diff Show More