Compare commits

...

5 Commits

Author SHA1 Message Date
GitHub Action
41000962ad chore(🦾): bump python isort 5.12.0 -> 5.13.2 2024-11-24 18:04:34 +00:00
Geido
91301bcd5b fix(Dashboard): Ensure shared label colors are updated (#31031) 2024-11-23 16:39:40 +02:00
Maxime Beauchemin
67ad7da5cc fix: ephemeral environments missing env var (#31035) 2024-11-22 17:39:34 -08:00
Maxime Beauchemin
e0deb704f9 feat: make ephemeral env use supersetbot + deprecate build_docker.py (#30870) 2024-11-22 14:19:08 -08:00
Kamil Gabryjelski
abf3790ea6 chore: Cleanup code related to MetadataBar, fix types (#31030) 2024-11-22 16:02:13 +01:00
30 changed files with 383 additions and 1112 deletions

1
.gitattributes vendored
View File

@@ -1,2 +1,3 @@
docker/**/*.sh text eol=lf
*.svg binary
*.ipynb binary

View File

@@ -1,30 +1,25 @@
name: Ephemeral env workflow
# Example manual trigger: gh workflow run ephemeral-env.yml --ref fix_ephemerals --field comment_body="/testenv up" --field issue_number=666
on:
issue_comment:
types: [created]
workflow_dispatch:
inputs:
comment_body:
description: 'Comment body to simulate /testenv command'
required: true
default: '/testenv up'
issue_number:
description: 'Issue or PR number'
required: true
jobs:
config:
runs-on: "ubuntu-22.04"
if: github.event.issue.pull_request
outputs:
has-secrets: ${{ steps.check.outputs.has-secrets }}
steps:
- name: "Check for secrets"
id: check
shell: bash
run: |
if [ -n "${{ (secrets.AWS_ACCESS_KEY_ID != '' && secrets.AWS_SECRET_ACCESS_KEY != '') || '' }}" ]; then
echo "has-secrets=1" >> "$GITHUB_OUTPUT"
fi
ephemeral-env-comment:
concurrency:
group: ${{ github.workflow }}-${{ github.event.issue.number || github.run_id }}-comment
group: ${{ github.workflow }}-${{ github.event.inputs.issue_number || github.event.issue.number || github.run_id }}-comment
cancel-in-progress: true
needs: config
if: needs.config.outputs.has-secrets
name: Evaluate ephemeral env comment trigger (/testenv)
runs-on: ubuntu-22.04
permissions:
@@ -44,18 +39,18 @@ jobs:
with:
result-encoding: string
script: |
const pattern = /^\/testenv (up|down)/
const result = pattern.exec(context.payload.comment.body)
return result === null ? 'noop' : result[1]
const pattern = /^\/testenv (up|down)/;
const result = pattern.exec('${{ github.event.inputs.comment_body || github.event.comment.body }}');
return result === null ? 'noop' : result[1];
- name: Eval comment body for feature flags
- name: Looking for feature flags
uses: actions/github-script@v7
id: eval-feature-flags
with:
script: |
const pattern = /FEATURE_(\w+)=(\w+)/g;
let results = [];
[...context.payload.comment.body.matchAll(pattern)].forEach(match => {
[...'${{ github.event.inputs.comment_body || github.event.comment.body }}'.matchAll(pattern)].forEach(match => {
const config = {
name: `SUPERSET_FEATURE_${match[1]}`,
value: match[2],
@@ -67,24 +62,47 @@ jobs:
- name: Limit to committers
if: >
steps.eval-body.outputs.result != 'noop' &&
github.event_name == 'issue_comment' &&
github.event.comment.author_association != 'MEMBER' &&
github.event.comment.author_association != 'OWNER'
uses: actions/github-script@v7
with:
github-token: ${{github.token}}
github-token: ${{ github.token }}
script: |
const errMsg = '@${{ github.event.comment.user.login }} Ephemeral environment creation is currently limited to committers.'
const errMsg = '@${{ github.event.comment.user.login }} Ephemeral environment creation is currently limited to committers.';
github.rest.issues.createComment({
issue_number: ${{ github.event.issue.number }},
owner: context.repo.owner,
repo: context.repo.repo,
body: errMsg
})
core.setFailed(errMsg)
});
core.setFailed(errMsg);
- name: Reply with confirmation comment
uses: actions/github-script@v7
with:
github-token: ${{ secrets.GITHUB_TOKEN }}
script: |
const issueNumber = ${{ github.event.inputs.issue_number || github.event.issue.number }};
const user = '${{ github.event.comment.user.login || github.actor }}';
const action = '${{ steps.eval-body.outputs.result }}';
const runId = context.runId;
const workflowUrl = `${context.serverUrl}/${context.repo.owner}/${context.repo.repo}/actions/runs/${runId}`;
const body = action === 'noop'
? `@${user} No ephemeral environment action detected. Please use '/testenv up' or '/testenv down'. [View workflow run](${workflowUrl}).`
: `@${user} Processing your ephemeral environment request [here](${workflowUrl}).`;
await github.rest.issues.createComment({
owner: context.repo.owner,
repo: context.repo.repo,
issue_number: issueNumber,
body,
});
ephemeral-docker-build:
concurrency:
group: ${{ github.workflow }}-${{ github.event.issue.number || github.run_id }}-build
group: ${{ github.workflow }}-${{ github.event.inputs.issue_number || github.event.issue.number || github.run_id }}-build
cancel-in-progress: true
needs: ephemeral-env-comment
name: ephemeral-docker-build
@@ -98,9 +116,9 @@ jobs:
const request = {
owner: context.repo.owner,
repo: context.repo.repo,
pull_number: ${{ github.event.issue.number }},
}
core.info(`Getting PR #${request.pull_number} from ${request.owner}/${request.repo}`)
pull_number: ${{ github.event.inputs.issue_number || github.event.issue.number }},
};
core.info(`Getting PR #${request.pull_number} from ${request.owner}/${request.repo}`);
const pr = await github.rest.pulls.get(request);
return pr.data;
@@ -121,12 +139,17 @@ jobs:
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
- name: Setup supersetbot
uses: ./.github/actions/setup-supersetbot/
- name: Build ephemeral env image
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: |
./scripts/build_docker.py \
"ci" \
"pull_request" \
--build_context_ref ${{ github.event.issue.number }}
supersetbot docker \
--preset ci \
--platform linux/amd64 \
--context-ref "$RELEASE"
- name: Configure AWS credentials
uses: aws-actions/configure-aws-credentials@v4
@@ -146,7 +169,7 @@ jobs:
ECR_REPOSITORY: superset-ci
IMAGE_TAG: apache/superset:${{ steps.get-sha.outputs.sha }}-ci
run: |
docker tag $IMAGE_TAG $ECR_REGISTRY/$ECR_REPOSITORY:pr-${{ github.event.issue.number }}-ci
docker tag $IMAGE_TAG $ECR_REGISTRY/$ECR_REPOSITORY:pr-${{ github.event.inputs.issue_number || github.event.issue.number }}-ci
docker push -a $ECR_REGISTRY/$ECR_REPOSITORY
ephemeral-env-up:
@@ -181,22 +204,22 @@ jobs:
aws ecr describe-images \
--registry-id $(echo "${{ steps.login-ecr.outputs.registry }}" | grep -Eo "^[0-9]+") \
--repository-name superset-ci \
--image-ids imageTag=pr-${{ github.event.issue.number }}-ci
--image-ids imageTag=pr-${{ github.event.inputs.issue_number || github.event.issue.number }}-ci
- name: Fail on missing container image
if: steps.check-image.outcome == 'failure'
uses: actions/github-script@v7
with:
github-token: ${{github.token}}
github-token: ${{ github.token }}
script: |
const errMsg = '@${{ github.event.comment.user.login }} Container image not yet published for this PR. Please try again when build is complete.'
const errMsg = '@${{ github.event.comment.user.login }} Container image not yet published for this PR. Please try again when build is complete.';
github.rest.issues.createComment({
issue_number: ${{ github.event.issue.number }},
issue_number: ${{ github.event.inputs.issue_number || github.event.issue.number }},
owner: context.repo.owner,
repo: context.repo.repo,
body: errMsg
})
core.setFailed(errMsg)
});
core.setFailed(errMsg);
- name: Fill in the new image ID in the Amazon ECS task definition
id: task-def
@@ -204,7 +227,7 @@ jobs:
with:
task-definition: .github/workflows/ecs-task-definition.json
container-name: superset-ci
image: ${{ steps.login-ecr.outputs.registry }}/superset-ci:pr-${{ github.event.issue.number }}-ci
image: ${{ steps.login-ecr.outputs.registry }}/superset-ci:pr-${{ github.event.inputs.issue_number || github.event.issue.number }}-ci
- name: Update env vars in the Amazon ECS task definition
run: |
@@ -213,30 +236,29 @@ jobs:
- name: Describe ECS service
id: describe-services
run: |
echo "active=$(aws ecs describe-services --cluster superset-ci --services pr-${{ github.event.issue.number }}-service | jq '.services[] | select(.status == "ACTIVE") | any')" >> $GITHUB_OUTPUT
echo "active=$(aws ecs describe-services --cluster superset-ci --services pr-${{ github.event.inputs.issue_number || github.event.issue.number }}-service | jq '.services[] | select(.status == "ACTIVE") | any')" >> $GITHUB_OUTPUT
- name: Create ECS service
if: steps.describe-services.outputs.active != 'true'
id: create-service
if: steps.describe-services.outputs.active != 'true'
env:
ECR_SUBNETS: subnet-0e15a5034b4121710,subnet-0e8efef4a72224974
ECR_SECURITY_GROUP: sg-092ff3a6ae0574d91
run: |
aws ecs create-service \
--cluster superset-ci \
--service-name pr-${{ github.event.issue.number }}-service \
--service-name pr-${{ github.event.inputs.issue_number || github.event.issue.number }}-service \
--task-definition superset-ci \
--launch-type FARGATE \
--desired-count 1 \
--platform-version LATEST \
--network-configuration "awsvpcConfiguration={subnets=[$ECR_SUBNETS],securityGroups=[$ECR_SECURITY_GROUP],assignPublicIp=ENABLED}" \
--tags key=pr,value=${{ github.event.issue.number }} key=github_user,value=${{ github.actor }}
--tags key=pr,value=${{ github.event.inputs.issue_number || github.event.issue.number }} key=github_user,value=${{ github.actor }}
- name: Deploy Amazon ECS task definition
id: deploy-task
uses: aws-actions/amazon-ecs-deploy-task-definition@v2
with:
task-definition: ${{ steps.task-def.outputs.task-definition }}
service: pr-${{ github.event.issue.number }}-service
service: pr-${{ github.event.inputs.issue_number || github.event.issue.number }}-service
cluster: superset-ci
wait-for-service-stability: true
wait-for-minutes: 10
@@ -244,18 +266,15 @@ jobs:
- name: List tasks
id: list-tasks
run: |
echo "task=$(aws ecs list-tasks --cluster superset-ci --service-name pr-${{ github.event.issue.number }}-service | jq '.taskArns | first')" >> $GITHUB_OUTPUT
echo "task=$(aws ecs list-tasks --cluster superset-ci --service-name pr-${{ github.event.inputs.issue_number || github.event.issue.number }}-service | jq '.taskArns | first')" >> $GITHUB_OUTPUT
- name: Get network interface
id: get-eni
run: |
echo "eni=$(aws ecs describe-tasks --cluster superset-ci --tasks ${{ steps.list-tasks.outputs.task }} | jq '.tasks | .[0] | .attachments | .[0] | .details | map(select(.name=="networkInterfaceId")) | .[0] | .value')" >> $GITHUB_OUTPUT
echo "eni=$(aws ecs describe-tasks --cluster superset-ci --tasks ${{ steps.list-tasks.outputs.task }} | jq '.tasks | .[0] | .attachments | .[0] | .details | map(select(.name==\"networkInterfaceId\")) | .[0] | .value')" >> $GITHUB_OUTPUT
- name: Get public IP
id: get-ip
run: |
echo "ip=$(aws ec2 describe-network-interfaces --network-interface-ids ${{ steps.get-eni.outputs.eni }} | jq -r '.NetworkInterfaces | first | .Association.PublicIp')" >> $GITHUB_OUTPUT
- name: Comment (success)
if: ${{ success() }}
uses: actions/github-script@v7
@@ -263,12 +282,11 @@ jobs:
github-token: ${{github.token}}
script: |
github.rest.issues.createComment({
issue_number: ${{ github.event.issue.number }},
issue_number: ${{ github.event.inputs.issue_number || github.event.issue.number }},
owner: context.repo.owner,
repo: context.repo.repo,
body: '@${{ github.event.comment.user.login }} Ephemeral environment spinning up at http://${{ steps.get-ip.outputs.ip }}:8080. Credentials are `admin`/`admin`. Please allow several minutes for bootstrapping and startup.'
body: '@${{ github.event.inputs.user_login || github.event.comment.user.login }} Ephemeral environment spinning up at http://${{ steps.get-ip.outputs.ip }}:8080. Credentials are `admin`/`admin`. Please allow several minutes for bootstrapping and startup.'
})
- name: Comment (failure)
if: ${{ failure() }}
uses: actions/github-script@v7
@@ -276,8 +294,8 @@ jobs:
github-token: ${{github.token}}
script: |
github.rest.issues.createComment({
issue_number: ${{ github.event.issue.number }},
issue_number: ${{ github.event.inputs.issue_number || github.event.issue.number }},
owner: context.repo.owner,
repo: context.repo.repo,
body: '@${{ github.event.comment.user.login }} Ephemeral environment creation failed. Please check the Actions logs for details.'
body: '@${{ github.event.inputs.user_login || github.event.comment.user.login }} Ephemeral environment creation failed. Please check the Actions logs for details.'
})

View File

@@ -115,7 +115,7 @@ RUN mkdir -p ${PYTHONPATH} superset/static requirements superset-frontend apache
libldap2-dev \
&& touch superset/static/version_info.json \
&& chown -R superset:superset ./* \
&& rm -rf /var/lib/apt/lists/*
&& rm -rf /var/lib/apt/lists/* /var/cache/apt/archives/*
COPY --chown=superset:superset pyproject.toml setup.py MANIFEST.in README.md ./
# setup.py uses the version information in package.json
@@ -128,7 +128,7 @@ RUN --mount=type=cache,target=/root/.cache/pip \
&& pip install --no-cache-dir --upgrade setuptools pip \
&& pip install --no-cache-dir -r requirements/base.txt \
&& apt-get autoremove -yqq --purge build-essential \
&& rm -rf /var/lib/apt/lists/*
&& rm -rf /var/lib/apt/lists/* /var/cache/apt/archives/*
# Copy the compiled frontend assets
COPY --chown=superset:superset --from=superset-node /app/superset/static/assets superset/static/assets
@@ -177,7 +177,7 @@ RUN apt-get update -qq \
libxtst6 \
git \
pkg-config \
&& rm -rf /var/lib/apt/lists/*
&& rm -rf /var/cache/apt/archives/* /var/lib/apt/lists/*
RUN --mount=type=cache,target=/root/.cache/pip \
pip install --no-cache-dir playwright
@@ -199,13 +199,13 @@ RUN if [ "$INCLUDE_FIREFOX" = "true" ]; then \
&& wget -q https://github.com/mozilla/geckodriver/releases/download/${GECKODRIVER_VERSION}/geckodriver-${GECKODRIVER_VERSION}-linux64.tar.gz -O - | tar xfz - -C /usr/local/bin \
&& wget -q https://download-installer.cdn.mozilla.net/pub/firefox/releases/${FIREFOX_VERSION}/linux-x86_64/en-US/firefox-${FIREFOX_VERSION}.tar.bz2 -O - | tar xfj - -C /opt \
&& ln -s /opt/firefox/firefox /usr/local/bin/firefox \
&& apt-get autoremove -yqq --purge wget bzip2 && rm -rf /var/[log,tmp]/* /tmp/* /var/lib/apt/lists/*; \
&& apt-get autoremove -yqq --purge wget bzip2 && rm -rf /var/[log,tmp]/* /tmp/* /var/lib/apt/lists/* /var/cache/apt/archives/*; \
fi
# Installing mysql client os-level dependencies in dev image only because GPL
RUN apt-get install -yqq --no-install-recommends \
default-libmysqlclient-dev \
&& rm -rf /var/lib/apt/lists/*
&& rm -rf /var/lib/apt/lists/* /var/cache/apt/archives/*
COPY --chown=superset:superset requirements/development.txt requirements/
RUN --mount=type=cache,target=/root/.cache/pip \
@@ -213,7 +213,7 @@ RUN --mount=type=cache,target=/root/.cache/pip \
build-essential \
&& pip install --no-cache-dir -r requirements/development.txt \
&& apt-get autoremove -yqq --purge build-essential \
&& rm -rf /var/lib/apt/lists/*
&& rm -rf /var/lib/apt/lists/* /var/cache/apt/archives/*
USER superset
######################################################################

View File

@@ -25,7 +25,6 @@ x-superset-user: &superset-user root
x-superset-depends-on: &superset-depends-on
- db
- redis
- superset-checks
x-superset-volumes: &superset-volumes
# /app/pythonpath_docker will be appended to the PYTHONPATH in the final container
- ./docker:/app/docker
@@ -131,23 +130,6 @@ services:
- REDIS_PORT=6379
- REDIS_SSL=false
superset-checks:
build:
context: .
target: python-base
cache_from:
- apache/superset-cache:3.10-slim-bookworm
container_name: superset_checks
command: ["/app/scripts/check-env.py"]
env_file:
- path: docker/.env # default
required: true
- path: docker/.env-local # optional override
required: false
user: *superset-user
healthcheck:
disable: true
superset-init:
build:
<<: *common-build
@@ -179,6 +161,7 @@ services:
# set this to false if you have perf issues running the npm i; npm run dev in-docker
# if you do so, you have to run this manually on the host, which should perform better!
BUILD_SUPERSET_FRONTEND_IN_DOCKER: true
NPM_RUN_PRUNE: false
SCARF_ANALYTICS: "${SCARF_ANALYTICS:-}"
container_name: superset_node
command: ["/app/docker/docker-frontend.sh"]

View File

@@ -27,6 +27,11 @@ if [ "$BUILD_SUPERSET_FRONTEND_IN_DOCKER" = "true" ]; then
echo "Building Superset frontend in dev mode inside docker container"
cd /app/superset-frontend
if [ "$NPM_RUN_PRUNE" = "true" ]; then
echo "Running `npm run prune`"
npm run prune
fi
echo "Running `npm install`"
npm install

View File

@@ -29,7 +29,7 @@ We have a set of build "presets" that each represent a combination of
parameters for the build, mostly pointing to either different target layer
for the build, and/or base image.
Here are the build presets that are exposed through the `build_docker.py` script:
Here are the build presets that are exposed through the `supersetbot docker` utility:
- `lean`: The default Docker image, including both frontend and backend. Tags
without a build_preset are lean builds (ie: `latest`, `4.0.0`, `3.0.0`, ...). `lean`
@@ -62,8 +62,8 @@ Here are the build presets that are exposed through the `build_docker.py` script
For insights or modifications to the build matrix and tagging conventions,
check the [build_docker.py](https://github.com/apache/superset/blob/master/scripts/build_docker.py)
script and the [docker.yml](https://github.com/apache/superset/blob/master/.github/workflows/docker.yml)
check the [supersetbot docker](https://github.com/apache-superset/supersetbot)
subcommand and the [docker.yml](https://github.com/apache/superset/blob/master/.github/workflows/docker.yml)
GitHub action.
## Key ARGs in Dockerfile

View File

@@ -95,6 +95,14 @@ perform those operations. In this case, we recommend you set the env var
Simply trigger `npm i && npm run dev`, this should be MUCH faster.
:::
:::tip
Sometimes, your npm-related state can get out-of-wack, running `npm run prune` from
the `superset-frontend/` folder will nuke the various' packages `node_module/` folders
and help you start fresh. In the context of `docker compose` setting
`export NPM_RUN_PRUNE=true` prior to running `docker compose up` will trigger that
from within docker. This will slow down the startup, but will fix various npm-related issues.
:::
### Option #2 - build a set of immutable images from the local branch
```bash

View File

@@ -77,10 +77,6 @@ versions officially supported by Superset. We'd recommend using a Python version
like [pyenv](https://github.com/pyenv/pyenv)
(and also [pyenv-virtualenv](https://github.com/pyenv/pyenv-virtualenv)).
:::tip
To identify the Python version used by the official docker image, see the [Dockerfile](https://github.com/apache/superset/blob/master/Dockerfile). Additional docker images published for newer versions of Python can be found in [this file](https://github.com/apache/superset/blob/master/scripts/build_docker.py).
:::
Let's also make sure we have the latest version of `pip` and `setuptools`:
```bash

View File

@@ -32,7 +32,7 @@ billiard==4.2.0
# via celery
blinker==1.9.0
# via flask
bottleneck==1.3.8
bottleneck==1.4.2
# via pandas
brotli==1.1.0
# via flask-compress
@@ -148,9 +148,7 @@ geopy==2.4.1
google-auth==2.29.0
# via shillelagh
greenlet==3.0.3
# via
# shillelagh
# sqlalchemy
# via shillelagh
gunicorn==22.0.0
# via apache-superset
hashids==1.3.1

View File

@@ -8,7 +8,7 @@
-r base.txt
-e file:.
# via
# -r requirements/base.in
# -r /home/runner/work/superset/superset/requirements/base.in
# -r requirements/development.in
astroid==3.1.0
# via pylint
@@ -107,7 +107,7 @@ ijson==3.2.3
# via dataflows-tabulator
iniconfig==2.0.0
# via pytest
isort==5.12.0
isort==5.13.2
# via pylint
jmespath==1.0.1
# via
@@ -188,7 +188,7 @@ pyee==11.0.1
# via playwright
pyfakefs==5.3.5
# via apache-superset
pyhive[presto]==0.7.0
pyhive[hive]==0.7.0
# via apache-superset
pyinstrument==4.4.0
# via apache-superset
@@ -221,6 +221,8 @@ ruff==0.4.5
# via apache-superset
s3transfer==0.10.1
# via boto3
sasl==0.3.1
# via pyhive
sqlalchemy-bigquery==1.11.0
# via apache-superset
sqloxide==0.1.43
@@ -232,9 +234,12 @@ tableschema==1.20.10
thrift==0.16.0
# via
# apache-superset
# pyhive
# thrift-sasl
thrift-sasl==0.4.3
# via apache-superset
# via
# apache-superset
# pyhive
tomli==2.0.1
# via
# build

View File

@@ -1,294 +0,0 @@
#!/usr/bin/env python3
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import os
import re
import subprocess
from textwrap import dedent
import click
REPO = "apache/superset"
CACHE_REPO = f"{REPO}-cache"
BASE_PY_IMAGE = "3.10-slim-bookworm"
def run_cmd(command: str, raise_on_failure: bool = True) -> str:
process = subprocess.Popen(
command, shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, text=True
)
output = ""
if process.stdout is not None:
for line in iter(process.stdout.readline, ""):
print(line.strip()) # Print the line to stdout in real-time
output += line
process.wait() # Wait for the subprocess to finish
if process.returncode != 0 and raise_on_failure:
raise subprocess.CalledProcessError(process.returncode, command, output)
return output
def get_git_sha() -> str:
return run_cmd("git rev-parse HEAD").strip()
def get_build_context_ref(build_context: str) -> str:
"""
Given a context, return a ref:
- if context is pull_request, return the PR's id
- if context is push, return the branch
- if context is release, return the release ref
"""
event = os.getenv("GITHUB_EVENT_NAME")
github_ref = os.getenv("GITHUB_REF", "")
if event == "pull_request":
github_head_ref = os.getenv("GITHUB_HEAD_REF", "")
return re.sub("[^a-zA-Z0-9]", "-", github_head_ref)[:40]
elif event == "release":
return re.sub("refs/tags/", "", github_ref)[:40]
elif event == "push":
return re.sub("[^a-zA-Z0-9]", "-", re.sub("refs/heads/", "", github_ref))[:40]
return ""
def is_latest_release(release: str) -> bool:
output = (
run_cmd(
f"./scripts/tag_latest_release.sh {release} --dry-run",
raise_on_failure=False,
)
or ""
)
return "SKIP_TAG::false" in output
def make_docker_tag(l: list[str]) -> str: # noqa: E741
return f"{REPO}:" + "-".join([o for o in l if o])
def get_docker_tags(
build_preset: str,
build_platforms: list[str],
sha: str,
build_context: str,
build_context_ref: str,
force_latest: bool = False,
) -> set[str]:
"""
Return a set of tags given a given build context
"""
tags: set[str] = set()
tag_chunks: list[str] = []
is_latest = is_latest_release(build_context_ref)
if build_preset != "lean":
# Always add the preset_build name if different from default (lean)
tag_chunks += [build_preset]
if len(build_platforms) == 1:
build_platform = build_platforms[0]
short_build_platform = build_platform.replace("linux/", "").replace("64", "")
if short_build_platform != "amd":
# Always a platform indicator if different from default (amd)
tag_chunks += [short_build_platform]
# Always craft a tag for the SHA
tags.add(make_docker_tag([sha] + tag_chunks))
# also a short SHA, cause it's nice
tags.add(make_docker_tag([sha[:7]] + tag_chunks))
if build_context == "release":
# add a release tag
tags.add(make_docker_tag([build_context_ref] + tag_chunks))
if is_latest or force_latest:
# add a latest tag
tags.add(make_docker_tag(["latest"] + tag_chunks))
elif build_context == "push" and build_context_ref == "master":
tags.add(make_docker_tag(["master"] + tag_chunks))
elif build_context == "pull_request":
tags.add(make_docker_tag([f"pr-{build_context_ref}"] + tag_chunks))
return tags
def get_docker_command(
build_preset: str,
build_platforms: list[str],
is_authenticated: bool,
sha: str,
build_context: str,
build_context_ref: str,
force_latest: bool = False,
) -> str:
tag = "" # noqa: F841
build_target = ""
py_ver = BASE_PY_IMAGE
docker_context = "."
if build_preset == "dev":
build_target = "dev"
elif build_preset == "lean":
build_target = "lean"
elif build_preset == "py311":
build_target = "lean"
py_ver = "3.11-slim-bookworm"
elif build_preset == "websocket":
build_target = ""
docker_context = "superset-websocket"
elif build_preset == "ci":
build_target = "ci"
elif build_preset == "dockerize":
build_target = ""
docker_context = "-f dockerize.Dockerfile ."
else:
print(f"Invalid build preset: {build_preset}")
exit(1)
# Try to get context reference if missing
if not build_context_ref:
build_context_ref = get_build_context_ref(build_context)
tags = get_docker_tags(
build_preset,
build_platforms,
sha,
build_context,
build_context_ref,
force_latest,
)
docker_tags = ("\\\n" + 8 * " ").join([f"-t {s} " for s in tags])
docker_args = "--load" if not is_authenticated else "--push"
target_argument = f"--target {build_target}" if build_target else ""
cache_ref = f"{CACHE_REPO}:{py_ver}"
if len(build_platforms) == 1:
build_platform = build_platforms[0]
short_build_platform = build_platform.replace("linux/", "").replace("64", "")
cache_ref = f"{CACHE_REPO}:{py_ver}-{short_build_platform}"
platform_arg = "--platform " + ",".join(build_platforms)
cache_from_arg = f"--cache-from=type=registry,ref={cache_ref}"
cache_to_arg = (
f"--cache-to=type=registry,mode=max,ref={cache_ref}" if is_authenticated else ""
)
build_arg = f"--build-arg PY_VER={py_ver}" if py_ver else ""
actor = os.getenv("GITHUB_ACTOR")
return dedent(
f"""\
docker buildx build \\
{docker_args} \\
{docker_tags} \\
{cache_from_arg} \\
{cache_to_arg} \\
{build_arg} \\
{platform_arg} \\
{target_argument} \\
--label sha={sha} \\
--label target={build_target} \\
--label build_trigger={build_context} \\
--label base={py_ver} \\
--label build_actor={actor} \\
{docker_context}"""
)
@click.command()
@click.argument(
"build_preset",
type=click.Choice(["lean", "dev", "dockerize", "websocket", "py311", "ci"]),
)
@click.argument("build_context", type=click.Choice(["push", "pull_request", "release"]))
@click.option(
"--platform",
type=click.Choice(["linux/arm64", "linux/amd64"]),
default=["linux/amd64"],
multiple=True,
)
@click.option("--build_context_ref", help="a reference to the pr, release or branch")
@click.option("--dry-run", is_flag=True, help="Run the command in dry-run mode.")
@click.option("--verbose", is_flag=True, help="Print more info")
@click.option(
"--force-latest", is_flag=True, help="Force the 'latest' tag on the release"
)
def main(
build_preset: str,
build_context: str,
build_context_ref: str,
platform: list[str],
dry_run: bool,
force_latest: bool,
verbose: bool,
) -> None:
"""
This script executes docker build and push commands based on given arguments.
"""
is_authenticated = (
True if os.getenv("DOCKERHUB_TOKEN") and os.getenv("DOCKERHUB_USER") else False
)
if force_latest and build_context != "release":
print(
"--force-latest can only be applied if the build context is set to 'release'"
)
exit(1)
if build_context == "release" and not build_context_ref.strip():
print("Release number has to be provided")
exit(1)
docker_build_command = get_docker_command(
build_preset,
platform,
is_authenticated,
get_git_sha(),
build_context,
build_context_ref,
force_latest,
)
if not dry_run:
print("Executing Docker Build Command:")
print(docker_build_command)
script = ""
if os.getenv("DOCKERHUB_USER"):
script = dedent(
f"""\
docker logout
docker login --username "{os.getenv("DOCKERHUB_USER")}" --password "{os.getenv("DOCKERHUB_TOKEN")}"
DOCKER_ARGS="--push"
"""
)
script = script + docker_build_command
if verbose:
run_cmd("cat Dockerfile")
stdout = run_cmd(script) # noqa: F841
else:
print("Dry Run - Docker Build Command:")
print(docker_build_command)
if __name__ == "__main__":
main()

View File

@@ -1,13 +0,0 @@
{
"name": "@superset-ui/switchboard",
"version": "0.18.26-0",
"lockfileVersion": 2,
"requires": true,
"packages": {
"": {
"name": "@superset-ui/switchboard",
"version": "0.18.26-0",
"license": "Apache-2.0"
}
}
}

View File

@@ -51,7 +51,7 @@ export type LastModified = {
export type Owner = {
type: MetadataType.Owner;
createdBy: string;
owners?: string[];
owners?: string[] | string;
createdOn: string;
onClick?: (type: string) => void;
};

View File

@@ -23,27 +23,18 @@ import { styled } from '@superset-ui/core';
import { Tooltip, TooltipPlacement } from 'src/components/Tooltip';
import { ContentType } from './ContentType';
import { config } from './ContentConfig';
export const MIN_NUMBER_ITEMS = 2;
export const MAX_NUMBER_ITEMS = 6;
const HORIZONTAL_PADDING = 12;
const VERTICAL_PADDING = 8;
const ICON_PADDING = 8;
const SPACE_BETWEEN_ITEMS = 16;
const ICON_WIDTH = 16;
const TEXT_MIN_WIDTH = 70;
const TEXT_MAX_WIDTH = 150;
const ORDER = {
dashboards: 0,
table: 1,
sql: 2,
rows: 3,
tags: 4,
description: 5,
owner: 6,
lastModified: 7,
};
import {
HORIZONTAL_PADDING,
ICON_PADDING,
ICON_WIDTH,
VERTICAL_PADDING,
TEXT_MIN_WIDTH,
TEXT_MAX_WIDTH,
SPACE_BETWEEN_ITEMS,
ORDER,
MIN_NUMBER_ITEMS,
MAX_NUMBER_ITEMS,
} from './constants';
const Bar = styled.div<{ count: number }>`
${({ theme, count }) => `

View File

@@ -0,0 +1,39 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
export const MIN_NUMBER_ITEMS = 2;
export const MAX_NUMBER_ITEMS = 6;
export const HORIZONTAL_PADDING = 12;
export const VERTICAL_PADDING = 8;
export const ICON_PADDING = 8;
export const SPACE_BETWEEN_ITEMS = 16;
export const ICON_WIDTH = 16;
export const TEXT_MIN_WIDTH = 70;
export const TEXT_MAX_WIDTH = 150;
export const ORDER = {
dashboards: 0,
table: 1,
sql: 2,
rows: 3,
tags: 4,
description: 5,
owner: 6,
lastModified: 7,
};

View File

@@ -16,7 +16,8 @@
* specific language governing permissions and limitations
* under the License.
*/
import MetadataBar, { MIN_NUMBER_ITEMS, MAX_NUMBER_ITEMS } from './MetadataBar';
import MetadataBar from './MetadataBar';
import { MIN_NUMBER_ITEMS, MAX_NUMBER_ITEMS } from './constants';
export type { MetadataBarProps } from './MetadataBar';

View File

@@ -28,6 +28,7 @@ import {
t,
getClientErrorObject,
getCategoricalSchemeRegistry,
promiseTimeout,
} from '@superset-ui/core';
import {
addChart,
@@ -737,9 +738,9 @@ export const persistDashboardLabelsColor = () => async (dispatch, getState) => {
} = getState();
if (labelsColorMapMustSync || sharedLabelsColorsMustSync) {
storeDashboardMetadata(id, metadata);
dispatch(setDashboardLabelsColorMapSynced());
dispatch(setDashboardSharedLabelsColorsSynced());
storeDashboardMetadata(id, metadata);
}
};
@@ -755,16 +756,13 @@ export const applyDashboardLabelsColorOnLoad = metadata => async dispatch => {
try {
const updatedMetadata = { ...metadata };
const customLabelsColor = metadata.label_colors || {};
const sharedLabelsColor = metadata.shared_label_colors || [];
let hasChanged = false;
// backward compatibility of shared_label_colors
const sharedLabels = metadata.shared_label_colors || [];
if (!Array.isArray(sharedLabels) && Object.keys(sharedLabels).length > 0) {
hasChanged = true;
updatedMetadata.shared_label_colors = getFreshSharedLabels(
Object.keys(sharedLabelsColor),
);
updatedMetadata.shared_label_colors = [];
}
// backward compatibility of map_label_colors
const hasMapLabelColors =
@@ -828,27 +826,28 @@ export const applyDashboardLabelsColorOnLoad = metadata => async dispatch => {
* @returns void
*/
export const ensureSyncedLabelsColorMap = metadata => (dispatch, getState) => {
const {
dashboardState: { labelsColorMapMustSync },
} = getState();
const updatedMetadata = { ...metadata };
const customLabelsColor = metadata.label_colors || {};
const isMapSynced = isLabelsColorMapSynced(metadata);
const mustSync = !isMapSynced;
const syncLabelsColorMap = () => {
const {
dashboardState: { labelsColorMapMustSync },
} = getState();
const updatedMetadata = { ...metadata };
const customLabelsColor = metadata.label_colors || {};
const isMapSynced = isLabelsColorMapSynced(metadata);
const mustSync = !isMapSynced;
if (mustSync) {
const freshestColorMapEntries = getLabelsColorMapEntries(customLabelsColor);
updatedMetadata.map_label_colors = freshestColorMapEntries;
dispatch(setDashboardMetadata(updatedMetadata));
}
if (mustSync) {
const freshestColorMapEntries =
getLabelsColorMapEntries(customLabelsColor);
updatedMetadata.map_label_colors = freshestColorMapEntries;
dispatch(setDashboardMetadata(updatedMetadata));
}
if (mustSync && !labelsColorMapMustSync) {
// prepare to persist the just applied labels color map
dispatch(setDashboardLabelsColorMapSync());
}
if (!mustSync && labelsColorMapMustSync) {
dispatch(setDashboardLabelsColorMapSynced());
}
if (mustSync && !labelsColorMapMustSync) {
// prepare to persist the just applied labels color map
dispatch(setDashboardLabelsColorMapSync());
}
};
promiseTimeout(syncLabelsColorMap, 500);
};
/**
@@ -856,18 +855,21 @@ export const ensureSyncedLabelsColorMap = metadata => (dispatch, getState) => {
* Ensure that the stored shared labels colors match current.
*
* @param {*} metadata - the dashboard metadata
* @param {*} forceFresh - when true it will use the fresh shared labels ignoring stored ones
* @returns void
*/
export const ensureSyncedSharedLabelsColors =
metadata => (dispatch, getState) => {
// using a timeout to let the rendered charts finish processing labels
setTimeout(() => {
(metadata, forceFresh = false) =>
(dispatch, getState) => {
const syncSharedLabelsColors = () => {
const {
dashboardState: { sharedLabelsColorsMustSync },
} = getState();
const updatedMetadata = { ...metadata };
const sharedLabelsColors = metadata.shared_label_colors || [];
const freshLabelsColors = getFreshSharedLabels(sharedLabelsColors);
const freshLabelsColors = getFreshSharedLabels(
forceFresh ? [] : sharedLabelsColors,
);
const isSharedLabelsColorsSynced = isEqual(
sharedLabelsColors,
freshLabelsColors,
@@ -884,10 +886,8 @@ export const ensureSyncedSharedLabelsColors =
// prepare to persist the shared labels colors
dispatch(setDashboardSharedLabelsColorsSync());
}
if (!mustSync && sharedLabelsColorsMustSync) {
dispatch(setDashboardSharedLabelsColorsSynced());
}
}, 500);
};
promiseTimeout(syncSharedLabelsColors, 500);
};
/**
@@ -909,7 +909,6 @@ export const updateDashboardLabelsColor =
const fullLabelsColors = metadata.map_label_colors || {};
const sharedLabelsColors = metadata.shared_label_colors || [];
const customLabelsColors = metadata.label_colors || {};
const updatedMetadata = { ...metadata };
// for dashboards with no color scheme, the charts should always use their individual schemes
// this logic looks for unique labels (not shared across multiple charts) of each rendered chart
@@ -965,11 +964,7 @@ export const updateDashboardLabelsColor =
const shouldGoFresh = shouldReset.length > 0 ? shouldReset : false;
const shouldMerge = !shouldGoFresh;
// re-apply the color map first to get fresh maps accordingly
applyColors(updatedMetadata, shouldGoFresh, shouldMerge);
// new data may have appeared in the map (data changes)
// or new slices may have appeared while changing tabs
dispatch(ensureSyncedLabelsColorMap(updatedMetadata));
dispatch(ensureSyncedSharedLabelsColors(updatedMetadata));
applyColors(metadata, shouldGoFresh, shouldMerge);
} catch (e) {
console.error('Failed to update colors for new charts and labels:', e);
}

View File

@@ -47,6 +47,8 @@ import {
applyDashboardLabelsColorOnLoad,
updateDashboardLabelsColor,
persistDashboardLabelsColor,
ensureSyncedSharedLabelsColors,
ensureSyncedLabelsColorMap,
} from 'src/dashboard/actions/dashboardState';
import { getColorNamespace, resetColors } from 'src/utils/colorScheme';
import { NATIVE_FILTER_DIVIDER_PREFIX } from '../nativeFilters/FiltersConfigModal/utils';
@@ -96,7 +98,6 @@ const DashboardContainer: FC<DashboardContainerProps> = ({ topLevelTabs }) => {
const [dashboardLabelsColorInitiated, setDashboardLabelsColorInitiated] =
useState(false);
const prevRenderedChartIds = useRef<number[]>([]);
const prevTabIndexRef = useRef();
const tabIndex = useMemo(() => {
const nextTabIndex = findTabIndexByComponentId({
@@ -110,6 +111,18 @@ const DashboardContainer: FC<DashboardContainerProps> = ({ topLevelTabs }) => {
prevTabIndexRef.current = nextTabIndex;
return nextTabIndex;
}, [dashboardLayout, directPathToChild]);
// when all charts have rendered, enforce fresh shared labels
const shouldForceFreshSharedLabelsColors =
dashboardLabelsColorInitiated &&
renderedChartIds.length > 0 &&
chartIds.length === renderedChartIds.length &&
prevRenderedChartIds.current.length < renderedChartIds.length;
const onBeforeUnload = useCallback(() => {
dispatch(persistDashboardLabelsColor());
resetColors(getColorNamespace(dashboardInfo?.metadata?.color_namespace));
prevRenderedChartIds.current = [];
}, [dashboardInfo?.metadata?.color_namespace, dispatch]);
useEffect(() => {
if (nativeFilterScopes.length === 0) {
@@ -148,11 +161,12 @@ const DashboardContainer: FC<DashboardContainerProps> = ({ topLevelTabs }) => {
const activeKey = min === 0 ? DASHBOARD_GRID_ID : min.toString();
const TOP_OF_PAGE_RANGE = 220;
const onBeforeUnload = useCallback(() => {
dispatch(persistDashboardLabelsColor());
resetColors(getColorNamespace(dashboardInfo?.metadata?.color_namespace));
prevRenderedChartIds.current = [];
}, [dashboardInfo?.metadata?.color_namespace, dispatch]);
useEffect(() => {
if (shouldForceFreshSharedLabelsColors) {
// all available charts have rendered, enforce freshest shared label colors
dispatch(ensureSyncedSharedLabelsColors(dashboardInfo.metadata, true));
}
}, [dashboardInfo.metadata, dispatch, shouldForceFreshSharedLabelsColors]);
useEffect(() => {
// verify freshness of color map
@@ -161,7 +175,6 @@ const DashboardContainer: FC<DashboardContainerProps> = ({ topLevelTabs }) => {
if (
dashboardLabelsColorInitiated &&
dashboardInfo?.id &&
numRenderedCharts > 0 &&
prevRenderedChartIds.current.length < numRenderedCharts
) {
@@ -170,12 +183,20 @@ const DashboardContainer: FC<DashboardContainerProps> = ({ topLevelTabs }) => {
);
prevRenderedChartIds.current = renderedChartIds;
dispatch(updateDashboardLabelsColor(newRenderedChartIds));
// new data may have appeared in the map (data changes)
// or new slices may have appeared while changing tabs
dispatch(ensureSyncedLabelsColorMap(dashboardInfo.metadata));
if (!shouldForceFreshSharedLabelsColors) {
dispatch(ensureSyncedSharedLabelsColors(dashboardInfo.metadata));
}
}
}, [
dashboardInfo?.id,
renderedChartIds,
dispatch,
dashboardLabelsColorInitiated,
dashboardInfo.metadata,
shouldForceFreshSharedLabelsColors,
]);
useEffect(() => {
@@ -183,9 +204,9 @@ const DashboardContainer: FC<DashboardContainerProps> = ({ topLevelTabs }) => {
labelsColorMap.source = LabelsColorMapSource.Dashboard;
if (dashboardInfo?.id && !dashboardLabelsColorInitiated) {
dispatch(applyDashboardLabelsColorOnLoad(dashboardInfo.metadata));
// apply labels color as dictated by stored metadata (if any)
setDashboardLabelsColorInitiated(true);
dispatch(applyDashboardLabelsColorOnLoad(dashboardInfo.metadata));
}
return () => {

View File

@@ -44,7 +44,6 @@ import ConnectedHeaderActionsDropdown from 'src/dashboard/components/Header/Head
import PublishedStatus from 'src/dashboard/components/PublishedStatus';
import UndoRedoKeyListeners from 'src/dashboard/components/UndoRedoKeyListeners';
import PropertiesModal from 'src/dashboard/components/PropertiesModal';
import getOwnerName from 'src/utils/getOwnerName';
import {
UNDO_LIMIT,
SAVE_TYPE_OVERWRITE,
@@ -55,7 +54,6 @@ import setPeriodicRunner, {
stopPeriodicRender,
} from 'src/dashboard/util/setPeriodicRunner';
import { PageHeaderWithActions } from 'src/components/PageHeaderWithActions';
import MetadataBar, { MetadataType } from 'src/components/MetadataBar';
import DashboardEmbedModal from '../EmbeddedModal';
import OverwriteConfirm from '../OverwriteConfirm';
import {
@@ -88,6 +86,7 @@ import { logEvent } from '../../../logger/actions';
import { dashboardInfoChanged } from '../../actions/dashboardInfo';
import isDashboardLoading from '../../util/isDashboardLoading';
import { useChartIds } from '../../util/charts/useChartIds';
import { useDashboardMetadataBar } from './useDashboardMetadataBar';
const extensionsRegistry = getExtensionsRegistry();
@@ -472,32 +471,7 @@ const Header = () => {
setShowingEmbedModal(false);
}, []);
const getMetadataItems = useCallback(
() => [
{
type: MetadataType.LastModified,
value: dashboardInfo.changed_on_delta_humanized,
modifiedBy:
getOwnerName(dashboardInfo.changed_by) || t('Not available'),
},
{
type: MetadataType.Owner,
createdBy: getOwnerName(dashboardInfo.created_by) || t('Not available'),
owners:
dashboardInfo.owners.length > 0
? dashboardInfo.owners.map(getOwnerName)
: t('None'),
createdOn: dashboardInfo.created_on_delta_humanized,
},
],
[
dashboardInfo.changed_by,
dashboardInfo.changed_on_delta_humanized,
dashboardInfo.created_by,
dashboardInfo.created_on_delta_humanized,
dashboardInfo.owners,
],
);
const metadataBar = useDashboardMetadataBar(dashboardInfo);
const userCanEdit =
dashboardInfo.dash_edit_perm && !dashboardInfo.is_managed_externally;
@@ -579,15 +553,13 @@ const Header = () => {
visible={!editMode}
/>
),
!editMode && !isEmbedded && (
<MetadataBar items={getMetadataItems()} tooltipPlacement="bottom" />
),
!editMode && !isEmbedded && metadataBar,
],
[
boundActionCreators.savePublished,
dashboardInfo.id,
editMode,
getMetadataItems,
metadataBar,
isEmbedded,
isPublished,
userCanEdit,

View File

@@ -0,0 +1,54 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import { useMemo } from 'react';
import { t } from '@superset-ui/core';
import { DashboardInfo } from 'src/dashboard/types';
import MetadataBar, { MetadataType } from 'src/components/MetadataBar';
import getOwnerName from 'src/utils/getOwnerName';
export const useDashboardMetadataBar = (dashboardInfo: DashboardInfo) => {
const items = useMemo(
() => [
{
type: MetadataType.LastModified as const,
value: dashboardInfo.changed_on_delta_humanized,
modifiedBy:
getOwnerName(dashboardInfo.changed_by) || t('Not available'),
},
{
type: MetadataType.Owner as const,
createdBy: getOwnerName(dashboardInfo.created_by) || t('Not available'),
owners:
dashboardInfo.owners.length > 0
? dashboardInfo.owners.map(getOwnerName)
: t('None'),
createdOn: dashboardInfo.created_on_delta_humanized,
},
],
[
dashboardInfo.changed_by,
dashboardInfo.changed_on_delta_humanized,
dashboardInfo.created_by,
dashboardInfo.created_on_delta_humanized,
dashboardInfo.owners,
],
);
return <MetadataBar items={items} tooltipPlacement="bottom" />;
};

View File

@@ -52,6 +52,9 @@ const initialState: { dashboardInfo: DashboardInfo } = {
conf: {},
},
crossFiltersEnabled: true,
created_on_delta_humanized: '',
changed_on_delta_humanized: '',
owners: [],
},
};

View File

@@ -33,6 +33,7 @@ import Database from 'src/types/Database';
import { UrlParamEntries } from 'src/utils/urlUtils';
import { UserWithPermissionsAndRoles } from 'src/types/bootstrapTypes';
import Owner from 'src/types/Owner';
import { ChartState } from '../explore/types';
export type { Dashboard } from 'src/types/Dashboard';
@@ -139,6 +140,11 @@ export type DashboardInfo = {
};
crossFiltersEnabled: boolean;
filterBarOrientation: FilterBarOrientation;
created_on_delta_humanized: string;
changed_on_delta_humanized: string;
changed_by?: Owner;
created_by?: Owner;
owners: Owner[];
};
export type ChartsState = { [key: string]: Chart };

View File

@@ -16,12 +16,12 @@
* specific language governing permissions and limitations
* under the License.
*/
import { useCallback, useEffect, useMemo, useState } from 'react';
import { useCallback, useEffect, useState } from 'react';
import { useHistory } from 'react-router-dom';
import { useDispatch } from 'react-redux';
import PropTypes from 'prop-types';
import { Tooltip } from 'src/components/Tooltip';
import { css, logging, SupersetClient, t, tn } from '@superset-ui/core';
import { css, logging, SupersetClient, t } from '@superset-ui/core';
import { chartPropShape } from 'src/dashboard/util/propShapes';
import AlteredSliceTag from 'src/components/AlteredSliceTag';
import Button from 'src/components/Button';
@@ -29,10 +29,10 @@ import Icons from 'src/components/Icons';
import PropertiesModal from 'src/explore/components/PropertiesModal';
import { sliceUpdated } from 'src/explore/actions/exploreActions';
import { PageHeaderWithActions } from 'src/components/PageHeaderWithActions';
import MetadataBar, { MetadataType } from 'src/components/MetadataBar';
import { setSaveChartModalVisibility } from 'src/explore/actions/saveModalActions';
import { applyColors, resetColors } from 'src/utils/colorScheme';
import { useExploreAdditionalActionsMenu } from '../useExploreAdditionalActionsMenu';
import { useExploreMetadataBar } from './useExploreMetadataBar';
const propTypes = {
actions: PropTypes.object.isRequired,
@@ -160,48 +160,7 @@ export const ExploreChartHeader = ({
metadata?.dashboards,
);
const metadataBar = useMemo(() => {
if (!metadata) {
return null;
}
const items = [];
items.push({
type: MetadataType.Dashboards,
title:
metadata.dashboards.length > 0
? tn(
'Added to 1 dashboard',
'Added to %s dashboards',
metadata.dashboards.length,
metadata.dashboards.length,
)
: t('Not added to any dashboard'),
description:
metadata.dashboards.length > 0
? t(
'You can preview the list of dashboards in the chart settings dropdown.',
)
: undefined,
});
items.push({
type: MetadataType.LastModified,
value: metadata.changed_on_humanized,
modifiedBy: metadata.changed_by || t('Not available'),
});
items.push({
type: MetadataType.Owner,
createdBy: metadata.created_by || t('Not available'),
owners: metadata.owners.length > 0 ? metadata.owners : t('None'),
createdOn: metadata.created_on_humanized,
});
if (slice?.description) {
items.push({
type: MetadataType.Description,
value: slice?.description,
});
}
return <MetadataBar items={items} tooltipPlacement="bottom" />;
}, [metadata, slice?.description]);
const metadataBar = useExploreMetadataBar(metadata, slice);
const oldSliceName = slice?.slice_name;
return (

View File

@@ -0,0 +1,71 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import { useMemo } from 'react';
import { t, tn } from '@superset-ui/core';
import MetadataBar, { MetadataType } from 'src/components/MetadataBar';
import { ExplorePageInitialData } from 'src/explore/types';
export const useExploreMetadataBar = (
metadata: ExplorePageInitialData['metadata'],
slice: ExplorePageInitialData['slice'],
) =>
useMemo(() => {
if (!metadata) {
return null;
}
const items = [];
if (metadata.dashboards) {
items.push({
type: MetadataType.Dashboards as const,
title:
metadata.dashboards.length > 0
? tn(
'Added to 1 dashboard',
'Added to %s dashboards',
metadata.dashboards.length,
metadata.dashboards.length,
)
: t('Not added to any dashboard'),
description:
metadata.dashboards.length > 0
? t(
'You can preview the list of dashboards in the chart settings dropdown.',
)
: undefined,
});
}
items.push({
type: MetadataType.LastModified as const,
value: metadata.changed_on_humanized,
modifiedBy: metadata.changed_by || t('Not available'),
});
items.push({
type: MetadataType.Owner as const,
createdBy: metadata.created_by || t('Not available'),
owners: metadata.owners.length > 0 ? metadata.owners : t('None'),
createdOn: metadata.created_on_humanized,
});
if (slice?.description) {
items.push({
type: MetadataType.Description as const,
value: slice?.description,
});
}
return <MetadataBar items={items} tooltipPlacement="bottom" />;
}, [metadata, slice?.description]);

View File

@@ -82,6 +82,10 @@ export interface ExplorePageInitialData {
owners: string[];
created_by?: string;
changed_by?: string;
dashboards?: {
id: number;
dashboard_title: string;
}[];
};
saveAction?: SaveActionType | null;
}

View File

@@ -35,6 +35,7 @@ export const getColorNamespace = (namespace?: string) => namespace || undefined;
* Get labels shared across all charts in a dashboard.
* Merges a fresh instance of shared label colors with a stored one.
*
* @param currentSharedLabels - existing shared labels to merge with fresh
* @returns Record<string, string>
*/
export const getFreshSharedLabels = (
@@ -74,7 +75,7 @@ export const getSharedLabelsColorMapEntries = (
* @returns all color entries except custom label colors
*/
export const getLabelsColorMapEntries = (
customLabelsColor: Record<string, string>,
customLabelsColor: Record<string, string> = {},
): Record<string, string> => {
const labelsColorMapInstance = getLabelsColorMap();
const allEntries = Object.fromEntries(labelsColorMapInstance.getColorMap());

View File

@@ -33,7 +33,6 @@ from sqlalchemy import text
from sqlalchemy.engine.reflection import Inspector
from sqlalchemy.engine.url import URL
from sqlalchemy.exc import NoSuchTableError
from trino.exceptions import HttpError
from superset import db
from superset.constants import QUERY_CANCEL_KEY, QUERY_EARLY_CANCEL_KEY, USER_AGENT
@@ -61,6 +60,12 @@ if TYPE_CHECKING:
logger = logging.getLogger(__name__)
try:
# since trino is an optional dependency, we need to handle the ImportError
from trino.exceptions import HttpError
except ImportError:
HttpError = Exception
class CustomTrinoAuthErrorMeta(type):
def __instancecheck__(cls, instance: object) -> bool:

View File

@@ -1,290 +0,0 @@
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import os
import sys
import pytest
SHA = "22e7c602b9aa321ec7e0df4bb0033048664dcdf0"
PR_ID = "666"
OLD_REL = "2.1.0"
NEW_REL = "2.1.1"
REPO = "apache/superset"
# Add the 'scripts' directory to sys.path
scripts_dir = os.path.abspath(
os.path.join(os.path.dirname(__file__), "../../../scripts")
)
sys.path.append(scripts_dir)
import build_docker as docker_utils # Replace with the actual function name # noqa: E402
@pytest.fixture(autouse=True)
def set_env_var():
os.environ["TEST_ENV"] = "true"
yield
del os.environ["TEST_ENV"]
@pytest.mark.parametrize(
"release, expected_bool",
[
("2.1.0", False),
("2.1.1", True),
("1.0.0", False),
("3.0.0", True),
],
)
def test_is_latest_release(release, expected_bool):
assert docker_utils.is_latest_release(release) == expected_bool
@pytest.mark.parametrize(
"build_preset, build_platforms, sha, build_context, build_context_ref, expected_tags",
[
# PRs
(
"lean",
["linux/arm64"],
SHA,
"pull_request",
PR_ID,
[f"{REPO}:22e7c60-arm", f"{REPO}:{SHA}-arm", f"{REPO}:pr-{PR_ID}-arm"],
),
(
"ci",
["linux/amd64"],
SHA,
"pull_request",
PR_ID,
[f"{REPO}:22e7c60-ci", f"{REPO}:{SHA}-ci", f"{REPO}:pr-{PR_ID}-ci"],
),
(
"lean",
["linux/amd64"],
SHA,
"pull_request",
PR_ID,
[f"{REPO}:22e7c60", f"{REPO}:{SHA}", f"{REPO}:pr-{PR_ID}"],
),
(
"dev",
["linux/arm64"],
SHA,
"pull_request",
PR_ID,
[
f"{REPO}:22e7c60-dev-arm",
f"{REPO}:{SHA}-dev-arm",
f"{REPO}:pr-{PR_ID}-dev-arm",
],
),
(
"dev",
["linux/amd64"],
SHA,
"pull_request",
PR_ID,
[f"{REPO}:22e7c60-dev", f"{REPO}:{SHA}-dev", f"{REPO}:pr-{PR_ID}-dev"],
),
# old releases
(
"lean",
["linux/arm64"],
SHA,
"release",
OLD_REL,
[f"{REPO}:22e7c60-arm", f"{REPO}:{SHA}-arm", f"{REPO}:{OLD_REL}-arm"],
),
(
"lean",
["linux/amd64"],
SHA,
"release",
OLD_REL,
[f"{REPO}:22e7c60", f"{REPO}:{SHA}", f"{REPO}:{OLD_REL}"],
),
(
"dev",
["linux/arm64"],
SHA,
"release",
OLD_REL,
[
f"{REPO}:22e7c60-dev-arm",
f"{REPO}:{SHA}-dev-arm",
f"{REPO}:{OLD_REL}-dev-arm",
],
),
(
"dev",
["linux/amd64"],
SHA,
"release",
OLD_REL,
[f"{REPO}:22e7c60-dev", f"{REPO}:{SHA}-dev", f"{REPO}:{OLD_REL}-dev"],
),
# new releases
(
"lean",
["linux/arm64"],
SHA,
"release",
NEW_REL,
[
f"{REPO}:22e7c60-arm",
f"{REPO}:{SHA}-arm",
f"{REPO}:{NEW_REL}-arm",
f"{REPO}:latest-arm",
],
),
(
"lean",
["linux/amd64"],
SHA,
"release",
NEW_REL,
[f"{REPO}:22e7c60", f"{REPO}:{SHA}", f"{REPO}:{NEW_REL}", f"{REPO}:latest"],
),
(
"dev",
["linux/arm64"],
SHA,
"release",
NEW_REL,
[
f"{REPO}:22e7c60-dev-arm",
f"{REPO}:{SHA}-dev-arm",
f"{REPO}:{NEW_REL}-dev-arm",
f"{REPO}:latest-dev-arm",
],
),
(
"dev",
["linux/amd64"],
SHA,
"release",
NEW_REL,
[
f"{REPO}:22e7c60-dev",
f"{REPO}:{SHA}-dev",
f"{REPO}:{NEW_REL}-dev",
f"{REPO}:latest-dev",
],
),
# merge on master
(
"lean",
["linux/arm64"],
SHA,
"push",
"master",
[f"{REPO}:22e7c60-arm", f"{REPO}:{SHA}-arm", f"{REPO}:master-arm"],
),
(
"lean",
["linux/amd64"],
SHA,
"push",
"master",
[f"{REPO}:22e7c60", f"{REPO}:{SHA}", f"{REPO}:master"],
),
(
"dev",
["linux/arm64"],
SHA,
"push",
"master",
[
f"{REPO}:22e7c60-dev-arm",
f"{REPO}:{SHA}-dev-arm",
f"{REPO}:master-dev-arm",
],
),
(
"dev",
["linux/amd64"],
SHA,
"push",
"master",
[f"{REPO}:22e7c60-dev", f"{REPO}:{SHA}-dev", f"{REPO}:master-dev"],
),
],
)
def test_get_docker_tags(
build_preset, build_platforms, sha, build_context, build_context_ref, expected_tags
):
tags = docker_utils.get_docker_tags(
build_preset, build_platforms, sha, build_context, build_context_ref
)
for tag in expected_tags:
assert tag in tags
@pytest.mark.parametrize(
"build_preset, build_platforms, is_authenticated, sha, build_context, build_context_ref, contains",
[
(
"lean",
["linux/amd64"],
True,
SHA,
"push",
"master",
["--push", f"-t {REPO}:master "],
),
(
"dev",
["linux/amd64"],
False,
SHA,
"push",
"master",
["--load", f"-t {REPO}:master-dev ", "--target dev"],
),
# multi-platform
(
"lean",
["linux/arm64", "linux/amd64"],
True,
SHA,
"push",
"master",
["--platform linux/arm64,linux/amd64"],
),
],
)
def test_get_docker_command(
build_preset,
build_platforms,
is_authenticated,
sha,
build_context,
build_context_ref,
contains,
):
cmd = docker_utils.get_docker_command(
build_preset,
build_platforms,
is_authenticated,
sha,
build_context,
build_context_ref,
)
for s in contains:
assert s in cmd

View File

@@ -1,268 +0,0 @@
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import os
import sys
import pytest
SHA = "22e7c602b9aa321ec7e0df4bb0033048664dcdf0"
PR_ID = "666"
OLD_REL = "2.1.0"
NEW_REL = "2.1.1"
REPO = "apache/superset"
# Add the 'scripts' directory to sys.path
scripts_dir = os.path.abspath(
os.path.join(os.path.dirname(__file__), "../../../scripts")
)
sys.path.append(scripts_dir)
import build_docker as docker_utils # Replace with the actual function name # noqa: E402
@pytest.fixture(autouse=True)
def set_env_var():
os.environ["TEST_ENV"] = "true"
yield
del os.environ["TEST_ENV"]
@pytest.mark.parametrize(
"release, expected_bool",
[
("2.1.0", False),
("2.1.1", True),
("1.0.0", False),
("3.0.0", True),
],
)
def test_is_latest_release(release, expected_bool):
assert docker_utils.is_latest_release(release) == expected_bool
@pytest.mark.parametrize(
"build_preset, build_platform, sha, build_context, build_context_ref, expected_tags",
[
# PRs
(
"lean",
"linux/arm64",
SHA,
"pull_request",
PR_ID,
[f"{REPO}:22e7c60-arm", f"{REPO}:{SHA}-arm"],
),
(
"lean",
"linux/amd64",
SHA,
"pull_request",
PR_ID,
[f"{REPO}:22e7c60", f"{REPO}:{SHA}"],
),
(
"dev",
"linux/arm64",
SHA,
"pull_request",
PR_ID,
[f"{REPO}:22e7c60-dev-arm", f"{REPO}:{SHA}-dev-arm"],
),
(
"dev",
"linux/amd64",
SHA,
"pull_request",
PR_ID,
[f"{REPO}:22e7c60-dev", f"{REPO}:{SHA}-dev"],
),
# old releases
(
"lean",
"linux/arm64",
SHA,
"release",
OLD_REL,
[f"{REPO}:22e7c60-arm", f"{REPO}:{SHA}-arm", f"{REPO}:{OLD_REL}-arm"],
),
(
"lean",
"linux/amd64",
SHA,
"release",
OLD_REL,
[f"{REPO}:22e7c60", f"{REPO}:{SHA}", f"{REPO}:{OLD_REL}"],
),
(
"dev",
"linux/arm64",
SHA,
"release",
OLD_REL,
[
f"{REPO}:22e7c60-dev-arm",
f"{REPO}:{SHA}-dev-arm",
f"{REPO}:{OLD_REL}-dev-arm",
],
),
(
"dev",
"linux/amd64",
SHA,
"release",
OLD_REL,
[f"{REPO}:22e7c60-dev", f"{REPO}:{SHA}-dev", f"{REPO}:{OLD_REL}-dev"],
),
# new releases
(
"lean",
"linux/arm64",
SHA,
"release",
NEW_REL,
[
f"{REPO}:22e7c60-arm",
f"{REPO}:{SHA}-arm",
f"{REPO}:{NEW_REL}-arm",
f"{REPO}:latest-arm",
],
),
(
"lean",
"linux/amd64",
SHA,
"release",
NEW_REL,
[f"{REPO}:22e7c60", f"{REPO}:{SHA}", f"{REPO}:{NEW_REL}", f"{REPO}:latest"],
),
(
"dev",
"linux/arm64",
SHA,
"release",
NEW_REL,
[
f"{REPO}:22e7c60-dev-arm",
f"{REPO}:{SHA}-dev-arm",
f"{REPO}:{NEW_REL}-dev-arm",
f"{REPO}:latest-dev-arm",
],
),
(
"dev",
"linux/amd64",
SHA,
"release",
NEW_REL,
[
f"{REPO}:22e7c60-dev",
f"{REPO}:{SHA}-dev",
f"{REPO}:{NEW_REL}-dev",
f"{REPO}:latest-dev",
],
),
# merge on master
(
"lean",
"linux/arm64",
SHA,
"push",
"master",
[f"{REPO}:22e7c60-arm", f"{REPO}:{SHA}-arm", f"{REPO}:master-arm"],
),
(
"lean",
"linux/amd64",
SHA,
"push",
"master",
[f"{REPO}:22e7c60", f"{REPO}:{SHA}", f"{REPO}:master"],
),
(
"dev",
"linux/arm64",
SHA,
"push",
"master",
[
f"{REPO}:22e7c60-dev-arm",
f"{REPO}:{SHA}-dev-arm",
f"{REPO}:master-dev-arm",
],
),
(
"dev",
"linux/amd64",
SHA,
"push",
"master",
[f"{REPO}:22e7c60-dev", f"{REPO}:{SHA}-dev", f"{REPO}:master-dev"],
),
],
)
def test_get_docker_tags(
build_preset, build_platform, sha, build_context, build_context_ref, expected_tags
):
tags = docker_utils.get_docker_tags(
build_preset, build_platform, sha, build_context, build_context_ref
)
for tag in expected_tags:
assert tag in tags
@pytest.mark.parametrize(
"build_preset, build_platform, is_authenticated, sha, build_context, build_context_ref, contains",
[
(
"lean",
"linux/amd64",
True,
SHA,
"push",
"master",
["--push", f"-t {REPO}:master "],
),
(
"dev",
"linux/amd64",
False,
SHA,
"push",
"master",
["--load", f"-t {REPO}:master-dev "],
),
],
)
def test_get_docker_command(
build_preset,
build_platform,
is_authenticated,
sha,
build_context,
build_context_ref,
contains,
):
cmd = docker_utils.get_docker_command(
build_preset,
build_platform,
is_authenticated,
sha,
build_context,
build_context_ref,
)
for s in contains:
assert s in cmd