mirror of
https://github.com/apache/superset.git
synced 2026-05-03 15:04:28 +00:00
Compare commits
16 Commits
standardiz
...
rison
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
58b448dfc3 | ||
|
|
c73e132369 | ||
|
|
0f9d0996a2 | ||
|
|
2f4f216e47 | ||
|
|
ab916bf199 | ||
|
|
01169bc5db | ||
|
|
5c710def85 | ||
|
|
0b0a295b83 | ||
|
|
c9b587e363 | ||
|
|
bb5660512b | ||
|
|
659db162d6 | ||
|
|
cb24737825 | ||
|
|
97b35a4640 | ||
|
|
54af1cb2c8 | ||
|
|
b8c2f7db47 | ||
|
|
4701e78f1f |
@@ -1,5 +1,5 @@
|
||||
# Keep this in sync with the base image in the main Dockerfile (ARG PY_VER)
|
||||
FROM python:3.11.13-bookworm AS base
|
||||
FROM python:3.11.13-trixie AS base
|
||||
|
||||
# Install system dependencies that Superset needs
|
||||
# This layer will be cached across Codespace sessions
|
||||
|
||||
2
.github/workflows/superset-frontend.yml
vendored
2
.github/workflows/superset-frontend.yml
vendored
@@ -47,7 +47,7 @@ jobs:
|
||||
git show -s --format=raw HEAD
|
||||
docker buildx build \
|
||||
-t $TAG \
|
||||
--cache-from=type=registry,ref=apache/superset-cache:3.10-slim-bookworm \
|
||||
--cache-from=type=registry,ref=apache/superset-cache:3.10-slim-trixie \
|
||||
--target superset-node-ci \
|
||||
.
|
||||
|
||||
|
||||
30
Dockerfile
30
Dockerfile
@@ -18,7 +18,7 @@
|
||||
######################################################################
|
||||
# Node stage to deal with static asset construction
|
||||
######################################################################
|
||||
ARG PY_VER=3.11.13-slim-bookworm
|
||||
ARG PY_VER=3.11.13-slim-trixie
|
||||
|
||||
# If BUILDPLATFORM is null, set it to 'amd64' (or leave as is otherwise).
|
||||
ARG BUILDPLATFORM=${BUILDPLATFORM:-amd64}
|
||||
@@ -29,7 +29,7 @@ ARG BUILD_TRANSLATIONS="false"
|
||||
######################################################################
|
||||
# superset-node-ci used as a base for building frontend assets and CI
|
||||
######################################################################
|
||||
FROM --platform=${BUILDPLATFORM} node:20-bookworm-slim AS superset-node-ci
|
||||
FROM --platform=${BUILDPLATFORM} node:20-trixie-slim AS superset-node-ci
|
||||
ARG BUILD_TRANSLATIONS
|
||||
ENV BUILD_TRANSLATIONS=${BUILD_TRANSLATIONS}
|
||||
ARG DEV_MODE="false" # Skip frontend build in dev mode
|
||||
@@ -64,7 +64,7 @@ RUN --mount=type=bind,source=./superset-frontend/package.json,target=./package.j
|
||||
--mount=type=bind,source=./superset-frontend/package-lock.json,target=./package-lock.json \
|
||||
--mount=type=cache,target=/root/.cache \
|
||||
--mount=type=cache,target=/root/.npm \
|
||||
if [ "$DEV_MODE" = "false" ]; then \
|
||||
if [ "${DEV_MODE}" = "false" ]; then \
|
||||
npm ci; \
|
||||
else \
|
||||
echo "Skipping 'npm ci' in dev mode"; \
|
||||
@@ -80,7 +80,7 @@ FROM superset-node-ci AS superset-node
|
||||
|
||||
# Build the frontend if not in dev mode
|
||||
RUN --mount=type=cache,target=/root/.npm \
|
||||
if [ "$DEV_MODE" = "false" ]; then \
|
||||
if [ "${DEV_MODE}" = "false" ]; then \
|
||||
echo "Running 'npm run ${BUILD_CMD}'"; \
|
||||
npm run ${BUILD_CMD}; \
|
||||
else \
|
||||
@@ -91,11 +91,10 @@ RUN --mount=type=cache,target=/root/.npm \
|
||||
COPY superset/translations /app/superset/translations
|
||||
|
||||
# Build translations if enabled, then cleanup localization files
|
||||
RUN if [ "$BUILD_TRANSLATIONS" = "true" ]; then \
|
||||
RUN if [ "${BUILD_TRANSLATIONS}" = "true" ]; then \
|
||||
npm run build-translation; \
|
||||
fi; \
|
||||
rm -rf /app/superset/translations/*/*/*.po; \
|
||||
rm -rf /app/superset/translations/*/*/*.mo;
|
||||
rm -rf /app/superset/translations/*/*/*.[po,mo];
|
||||
|
||||
|
||||
######################################################################
|
||||
@@ -106,10 +105,10 @@ FROM python:${PY_VER} AS python-base
|
||||
ARG SUPERSET_HOME="/app/superset_home"
|
||||
ENV SUPERSET_HOME=${SUPERSET_HOME}
|
||||
|
||||
RUN mkdir -p $SUPERSET_HOME
|
||||
RUN mkdir -p ${SUPERSET_HOME}
|
||||
RUN useradd --user-group -d ${SUPERSET_HOME} -m --no-log-init --shell /bin/bash superset \
|
||||
&& chmod -R 1777 $SUPERSET_HOME \
|
||||
&& chown -R superset:superset $SUPERSET_HOME
|
||||
&& chmod -R 1777 ${SUPERSET_HOME} \
|
||||
&& chown -R superset:superset ${SUPERSET_HOME}
|
||||
|
||||
# Some bash scripts needed throughout the layers
|
||||
COPY --chmod=755 docker/*.sh /app/docker/
|
||||
@@ -134,11 +133,10 @@ RUN --mount=type=cache,target=/root/.cache/uv \
|
||||
. /app/.venv/bin/activate && /app/docker/pip-install.sh --requires-build-essential -r requirements/translations.txt
|
||||
|
||||
COPY superset/translations/ /app/translations_mo/
|
||||
RUN if [ "$BUILD_TRANSLATIONS" = "true" ]; then \
|
||||
RUN if [ "${BUILD_TRANSLATIONS}" = "true" ]; then \
|
||||
pybabel compile -d /app/translations_mo | true; \
|
||||
fi; \
|
||||
rm -f /app/translations_mo/*/*/*.po; \
|
||||
rm -f /app/translations_mo/*/*/*.json;
|
||||
rm -f /app/translations_mo/*/*/*.[po,json]
|
||||
|
||||
######################################################################
|
||||
# Python APP common layer
|
||||
@@ -173,11 +171,11 @@ RUN mkdir -p \
|
||||
ARG INCLUDE_CHROMIUM="false"
|
||||
ARG INCLUDE_FIREFOX="false"
|
||||
RUN --mount=type=cache,target=${SUPERSET_HOME}/.cache/uv \
|
||||
if [ "$INCLUDE_CHROMIUM" = "true" ] || [ "$INCLUDE_FIREFOX" = "true" ]; then \
|
||||
if [ "${INCLUDE_CHROMIUM}" = "true" ] || [ "${INCLUDE_FIREFOX}" = "true" ]; then \
|
||||
uv pip install playwright && \
|
||||
playwright install-deps && \
|
||||
if [ "$INCLUDE_CHROMIUM" = "true" ]; then playwright install chromium; fi && \
|
||||
if [ "$INCLUDE_FIREFOX" = "true" ]; then playwright install firefox; fi; \
|
||||
if [ "${INCLUDE_CHROMIUM}" = "true" ]; then playwright install chromium; fi && \
|
||||
if [ "${INCLUDE_FIREFOX}" = "true" ]; then playwright install firefox; fi; \
|
||||
else \
|
||||
echo "Skipping browser installation"; \
|
||||
fi
|
||||
|
||||
2
Makefile
2
Makefile
@@ -91,7 +91,7 @@ js-format:
|
||||
cd superset-frontend; npm run prettier
|
||||
|
||||
flask-app:
|
||||
flask run -p 8088 --with-threads --reload --debugger
|
||||
flask run -p 8088 --reload --debugger
|
||||
|
||||
node-app:
|
||||
cd superset-frontend; npm run dev-server
|
||||
|
||||
@@ -14,7 +14,7 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
FROM python:3.10-slim-bookworm
|
||||
FROM python:3.10-slim-trixie
|
||||
|
||||
RUN useradd --user-group --create-home --no-log-init --shell /bin/bash superset
|
||||
|
||||
|
||||
@@ -14,7 +14,7 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
FROM python:3.10-slim-bookworm
|
||||
FROM python:3.10-slim-trixie
|
||||
|
||||
RUN useradd --user-group --create-home --no-log-init --shell /bin/bash superset
|
||||
|
||||
|
||||
@@ -14,7 +14,7 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
FROM python:3.10-slim-bookworm
|
||||
FROM python:3.10-slim-trixie
|
||||
ARG VERSION
|
||||
|
||||
RUN git clone --depth 1 --branch ${VERSION} https://github.com/apache/superset.git /superset
|
||||
|
||||
@@ -14,7 +14,7 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
FROM python:3.10-slim-bookworm
|
||||
FROM python:3.10-slim-trixie
|
||||
|
||||
RUN apt-get update -y
|
||||
RUN apt-get install -y \
|
||||
|
||||
@@ -71,7 +71,7 @@ x-common-build: &common-build
|
||||
context: .
|
||||
target: ${SUPERSET_BUILD_TARGET:-dev} # can use `dev` (default) or `lean`
|
||||
cache_from:
|
||||
- apache/superset-cache:3.10-slim-bookworm
|
||||
- apache/superset-cache:3.10-slim-trixie
|
||||
args:
|
||||
DEV_MODE: "true"
|
||||
INCLUDE_CHROMIUM: ${INCLUDE_CHROMIUM:-false}
|
||||
|
||||
@@ -33,7 +33,7 @@ x-common-build: &common-build
|
||||
context: .
|
||||
target: dev
|
||||
cache_from:
|
||||
- apache/superset-cache:3.10-slim-bookworm
|
||||
- apache/superset-cache:3.10-slim-trixie
|
||||
|
||||
services:
|
||||
redis:
|
||||
|
||||
@@ -36,7 +36,7 @@ x-common-build: &common-build
|
||||
context: .
|
||||
target: ${SUPERSET_BUILD_TARGET:-dev} # can use `dev` (default) or `lean`
|
||||
cache_from:
|
||||
- apache/superset-cache:3.10-slim-bookworm
|
||||
- apache/superset-cache:3.10-slim-trixie
|
||||
args:
|
||||
DEV_MODE: "true"
|
||||
INCLUDE_CHROMIUM: ${INCLUDE_CHROMIUM:-false}
|
||||
|
||||
@@ -18,7 +18,7 @@
|
||||
set -euo pipefail
|
||||
|
||||
# Ensure this script is run as root
|
||||
if [[ $EUID -ne 0 ]]; then
|
||||
if [[ ${EUID} -ne 0 ]]; then
|
||||
echo "This script must be run as root" >&2
|
||||
exit 1
|
||||
fi
|
||||
@@ -42,7 +42,7 @@ echo -e "${GREEN}Installing packages: $@${RESET}"
|
||||
apt-get install -yqq --no-install-recommends "$@"
|
||||
|
||||
echo -e "${GREEN}Autoremoving unnecessary packages...${RESET}"
|
||||
apt-get autoremove -y
|
||||
apt-get autoremove -yqq --purge
|
||||
|
||||
echo -e "${GREEN}Cleaning up package cache and metadata...${RESET}"
|
||||
apt-get clean
|
||||
|
||||
@@ -72,7 +72,7 @@ case "${1}" in
|
||||
;;
|
||||
app)
|
||||
echo "Starting web app (using development server)..."
|
||||
flask run -p $PORT --with-threads --reload --debugger --host=0.0.0.0
|
||||
flask run -p $PORT --reload --debugger --host=0.0.0.0
|
||||
;;
|
||||
app-gunicorn)
|
||||
echo "Starting web app..."
|
||||
|
||||
@@ -38,14 +38,14 @@ for arg in "$@"; do
|
||||
done
|
||||
|
||||
# Install build-essential if required
|
||||
if $REQUIRES_BUILD_ESSENTIAL; then
|
||||
if ${REQUIRES_BUILD_ESSENTIAL}; then
|
||||
echo "Installing build-essential for package builds..."
|
||||
apt-get update -qq \
|
||||
&& apt-get install -yqq --no-install-recommends build-essential
|
||||
fi
|
||||
|
||||
# Choose whether to use pip cache
|
||||
if $USE_CACHE; then
|
||||
if ${USE_CACHE}; then
|
||||
echo "Using pip cache..."
|
||||
uv pip install "${ARGS[@]}"
|
||||
else
|
||||
@@ -54,7 +54,7 @@ else
|
||||
fi
|
||||
|
||||
# Remove build-essential if it was installed
|
||||
if $REQUIRES_BUILD_ESSENTIAL; then
|
||||
if ${REQUIRES_BUILD_ESSENTIAL}; then
|
||||
echo "Removing build-essential to keep the image lean..."
|
||||
apt-get autoremove -yqq --purge build-essential \
|
||||
&& apt-get clean \
|
||||
|
||||
283
docs/docs/using-superset/url-filters.mdx
Normal file
283
docs/docs/using-superset/url-filters.mdx
Normal file
@@ -0,0 +1,283 @@
|
||||
---
|
||||
title: URL Filters
|
||||
hide_title: false
|
||||
sidebar_position: 4
|
||||
version: 1
|
||||
---
|
||||
|
||||
# URL Filters
|
||||
|
||||
Apply filters to dashboards and charts directly through the URL using a simple, human-readable syntax.
|
||||
|
||||
Superset URL filters use [Rison](https://github.com/Nanonid/rison), a data serialization format that's JSON-compatible but optimized for URLs - it's expressive, compact, and looks as great as URLs can look without all the percent-encoding clutter.
|
||||
|
||||
## Why URL Filters?
|
||||
|
||||
URL filters allow you to:
|
||||
- Share specific views of data with colleagues
|
||||
- Bookmark frequently used filter combinations
|
||||
- Create dynamic links in external applications
|
||||
- Override saved dashboard states temporarily
|
||||
- Build data-driven workflows
|
||||
|
||||
## Quick Start
|
||||
|
||||
Add the `f` parameter to any dashboard or explore URL:
|
||||
|
||||
```
|
||||
/dashboard/123?f=(country:USA)
|
||||
/explore?f=(year:2024)
|
||||
```
|
||||
|
||||
## Basic Syntax
|
||||
|
||||
### Single Filter
|
||||
|
||||
Filter by a single value:
|
||||
|
||||
```
|
||||
f=(country:USA)
|
||||
```
|
||||
|
||||
### Multiple Filters (AND)
|
||||
|
||||
Combine multiple filters with commas (AND logic):
|
||||
|
||||
```
|
||||
f=(country:USA,year:2024)
|
||||
f=(status:active,department:Sales,region:North)
|
||||
```
|
||||
|
||||
### Lists (IN Operator)
|
||||
|
||||
Use `!()` to filter by multiple values (OR within the field):
|
||||
|
||||
```
|
||||
f=(country:!(USA,Canada)) # country IN ('USA', 'Canada')
|
||||
f=(status:!(active,pending,review)) # status IN ('active', 'pending', 'review')
|
||||
```
|
||||
|
||||
## Logical Operators
|
||||
|
||||
### NOT Operator
|
||||
|
||||
Exclude specific values:
|
||||
|
||||
```
|
||||
f=(NOT:(country:USA)) # country != 'USA'
|
||||
f=(NOT:(status:deleted)) # status != 'deleted'
|
||||
```
|
||||
|
||||
Exclude multiple values (NOT IN):
|
||||
|
||||
```
|
||||
f=(NOT:(country:!(USA,Canada))) # country NOT IN ('USA', 'Canada')
|
||||
f=(NOT:(type:!(test,demo))) # type NOT IN ('test', 'demo')
|
||||
```
|
||||
|
||||
### OR Operator
|
||||
|
||||
Create OR conditions across different fields:
|
||||
|
||||
```
|
||||
f=(OR:!(status:urgent,priority:high)) # status = 'urgent' OR priority = 'high'
|
||||
f=(OR:!(region:Europe,country:USA)) # region = 'Europe' OR country = 'USA'
|
||||
```
|
||||
|
||||
## Comparison Operators
|
||||
|
||||
### Numeric Comparisons
|
||||
|
||||
Use comparison operators for numeric fields:
|
||||
|
||||
```
|
||||
f=(sales:(gt:100000)) # sales > 100000
|
||||
f=(age:(gte:18)) # age >= 18
|
||||
f=(temperature:(lt:32)) # temperature < 32
|
||||
f=(price:(lte:1000)) # price <= 1000
|
||||
```
|
||||
|
||||
### Range Queries (BETWEEN)
|
||||
|
||||
Filter values within a range:
|
||||
|
||||
```
|
||||
f=(date:(between:!(2024-01-01,2024-12-31))) # Full year 2024
|
||||
f=(age:(between:!(25,65))) # Age 25 to 65 inclusive
|
||||
f=(revenue:(between:!(10000,50000))) # Revenue range
|
||||
```
|
||||
|
||||
### Text Matching (LIKE)
|
||||
|
||||
Use SQL LIKE patterns for text fields:
|
||||
|
||||
```
|
||||
f=(name:(like:'John%')) # Names starting with John
|
||||
f=(email:(like:'%@company.com')) # Company emails
|
||||
f=(description:(like:'%urgent%')) # Contains 'urgent'
|
||||
```
|
||||
|
||||
## Complex Examples
|
||||
|
||||
### E-commerce Dashboard
|
||||
|
||||
Show high-value orders from North America, excluding test accounts:
|
||||
|
||||
```
|
||||
f=(region:!(USA,Canada,Mexico),amount:(gt:1000),NOT:(account_type:test))
|
||||
```
|
||||
|
||||
### Sales Analytics
|
||||
|
||||
Q4 data for either VIP customers or high revenue:
|
||||
|
||||
```
|
||||
f=(quarter:Q4,OR:!(customer_type:VIP,revenue:(gt:100000)))
|
||||
```
|
||||
|
||||
### User Activity
|
||||
|
||||
Active users in specific departments, excluding contractors:
|
||||
|
||||
```
|
||||
f=(status:active,department:!(Engineering,Sales),NOT:(employee_type:contractor))
|
||||
```
|
||||
|
||||
## Integration with Existing Features
|
||||
|
||||
### With Permalinks
|
||||
|
||||
Override saved permalink state:
|
||||
|
||||
```
|
||||
/dashboard/permalink/xyz789?f=(region:Europe)
|
||||
```
|
||||
|
||||
The filter will override the region saved in the permalink while preserving other settings.
|
||||
|
||||
### With Form Data Keys
|
||||
|
||||
Apply filters on top of cached explore state:
|
||||
|
||||
```
|
||||
/explore?form_data_key=abc123&f=(metric:(gt:baseline))
|
||||
```
|
||||
|
||||
### With Embedded Dashboards
|
||||
|
||||
Filter embedded dashboards:
|
||||
|
||||
```
|
||||
/dashboard/42/embedded?f=(client:ACME)
|
||||
```
|
||||
|
||||
## Combining Everything
|
||||
|
||||
Here's a complex real-world example that combines multiple features:
|
||||
|
||||
```
|
||||
f=(
|
||||
year:2024,
|
||||
quarter:!(Q3,Q4),
|
||||
region:!(North,South),
|
||||
NOT:(status:!(cancelled,refunded)),
|
||||
revenue:(gt:50000),
|
||||
OR:!(priority:urgent,escalated:!t)
|
||||
)
|
||||
```
|
||||
|
||||
This filters for:
|
||||
- Year 2024
|
||||
- Q3 or Q4
|
||||
- North or South regions
|
||||
- Excluding cancelled or refunded orders
|
||||
- Revenue greater than $50,000
|
||||
- And either urgent priority OR escalated flag is true
|
||||
|
||||
## Tips and Best Practices
|
||||
|
||||
### Keep It Simple
|
||||
Start with basic filters and add complexity only when needed:
|
||||
- ✅ Good: `f=(country:USA,year:2024)`
|
||||
- ❌ Avoid: Complex nested logic when native filters would be clearer
|
||||
|
||||
### Use Lists for Same-Field OR
|
||||
Instead of complex OR operators, use lists when filtering one field:
|
||||
- ✅ Better: `f=(status:!(active,pending,review))`
|
||||
- ❌ Avoid: `f=(OR:!(status:active,OR:!(status:pending,status:review)))`
|
||||
|
||||
### Quote Strings with Special Characters
|
||||
Use single quotes for strings containing spaces or special characters:
|
||||
- `f=(city:'New York')`
|
||||
- `f=(name:'O''Brien')` # Escape single quotes by doubling
|
||||
|
||||
### Date Formats
|
||||
Use ISO 8601 format for dates:
|
||||
- `f=(date:2024-01-15)`
|
||||
- `f=(created:(between:!(2024-01-01,2024-12-31)))`
|
||||
|
||||
## Limitations
|
||||
|
||||
- **Complex Boolean Logic**: For nested AND/OR combinations beyond what's shown here, use Superset's native filters
|
||||
- **Column Names**: Must not conflict with reserved operators (OR, NOT)
|
||||
- **URL Length**: Browsers have URL length limits; for very complex filters, use native filters
|
||||
- **Special Characters**: Some characters may need URL encoding
|
||||
|
||||
## API Reference
|
||||
|
||||
### Logical Operators
|
||||
| Operator | Syntax | Description |
|
||||
|----------|--------|-------------|
|
||||
| AND | `,` (comma) | Default between conditions |
|
||||
| OR | `OR:!(...)` | Explicit OR across conditions |
|
||||
| NOT | `NOT:(...)` | Negation |
|
||||
|
||||
### Comparison Operators
|
||||
| Operator | Syntax | Example |
|
||||
|----------|--------|---------|
|
||||
| Equals | `:` | `country:USA` |
|
||||
| IN | `:!(...)` | `country:!(USA,Canada)` |
|
||||
| Greater Than | `(gt:n)` | `sales:(gt:1000)` |
|
||||
| Greater Than or Equal | `(gte:n)` | `age:(gte:18)` |
|
||||
| Less Than | `(lt:n)` | `temp:(lt:32)` |
|
||||
| Less Than or Equal | `(lte:n)` | `price:(lte:100)` |
|
||||
| BETWEEN | `(between:!(a,b))` | `date:(between:!(2024-01-01,2024-12-31))` |
|
||||
| LIKE | `(like:pattern)` | `name:(like:'%smith%')` |
|
||||
|
||||
### Data Types
|
||||
| Type | Example | Notes |
|
||||
|------|---------|-------|
|
||||
| String | `USA` or `'North America'` | Use quotes for spaces/special chars |
|
||||
| Number | `42` or `3.14` | No quotes needed |
|
||||
| Boolean | `!t` or `!f` | Rison boolean syntax |
|
||||
| Null | `!n` | Rison null syntax |
|
||||
| Array | `!(val1,val2)` | For IN operations |
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
### Filters Not Working?
|
||||
1. Check that you're using the `f=()` wrapper
|
||||
2. Verify column names match exactly (case-sensitive)
|
||||
3. Ensure proper Rison syntax (especially for arrays and objects)
|
||||
4. Check browser console for error messages
|
||||
|
||||
### URL Too Long?
|
||||
If your filter URL becomes too long (browsers typically limit URLs to ~2000 characters):
|
||||
1. Use dashboard native filters for complex logic
|
||||
2. Use the Superset Permalink API to store complex filter state and reference it with a key
|
||||
3. Create a permalink with base filters, then add `f` parameter for variations
|
||||
4. Consider splitting filters across multiple parameters (future feature)
|
||||
|
||||
### Special Characters Issues?
|
||||
- **Spaces in values**: Use single quotes around the entire value
|
||||
- Example: `f=(region:'North America')` for filtering on "North America"
|
||||
- Example: `f=(city:'Los Angeles')` for filtering on "Los Angeles"
|
||||
- **Single quotes in values**: Double them for escaping
|
||||
- Example: `f=(name:'O''Brien')` for filtering on "O'Brien"
|
||||
- **URL encoding**: The browser handles this automatically
|
||||
- Spaces become `%20`, special chars are encoded as needed
|
||||
- You type: `f=(region:'North America')`
|
||||
- Browser sends: `f=(region:'North%20America')`
|
||||
- **Multiple words**: Always quote multi-word values
|
||||
- Correct: `f=(status:'In Progress')`
|
||||
- Wrong: `f=(status:In Progress)` (will cause parse error)
|
||||
@@ -47,7 +47,7 @@ dependencies = [
|
||||
"cryptography>=42.0.4, <45.0.0",
|
||||
"deprecation>=2.1.0, <2.2.0",
|
||||
"flask>=2.2.5, <3.0.0",
|
||||
"flask-appbuilder>=4.8.0, <5.0.0",
|
||||
"flask-appbuilder>=4.8.1, <5.0.0",
|
||||
"flask-caching>=2.1.0, <3",
|
||||
"flask-compress>=1.13, <2.0",
|
||||
"flask-talisman>=1.0.0, <2.0",
|
||||
|
||||
@@ -114,7 +114,7 @@ flask==2.3.3
|
||||
# flask-session
|
||||
# flask-sqlalchemy
|
||||
# flask-wtf
|
||||
flask-appbuilder==4.8.0
|
||||
flask-appbuilder==4.8.1
|
||||
# via
|
||||
# apache-superset (pyproject.toml)
|
||||
# apache-superset-core
|
||||
|
||||
@@ -208,7 +208,7 @@ flask==2.3.3
|
||||
# flask-sqlalchemy
|
||||
# flask-testing
|
||||
# flask-wtf
|
||||
flask-appbuilder==4.8.0
|
||||
flask-appbuilder==4.8.1
|
||||
# via
|
||||
# -c requirements/base-constraint.txt
|
||||
# apache-superset
|
||||
|
||||
@@ -34,8 +34,10 @@ const StyledEditableTitle = styled.span<{
|
||||
canEdit: boolean;
|
||||
}>`
|
||||
&.editable-title {
|
||||
display: inline-block;
|
||||
width: 100%;
|
||||
display: inline;
|
||||
&.editable-title--editing {
|
||||
width: 100%;
|
||||
}
|
||||
|
||||
input,
|
||||
textarea {
|
||||
|
||||
@@ -104,6 +104,7 @@ import {
|
||||
PicCenterOutlined,
|
||||
PlusCircleOutlined,
|
||||
PlusOutlined,
|
||||
ProfileOutlined,
|
||||
QuestionCircleOutlined,
|
||||
ReloadOutlined,
|
||||
RightOutlined,
|
||||
@@ -242,6 +243,7 @@ const AntdIcons = {
|
||||
PicCenterOutlined,
|
||||
PlusCircleOutlined,
|
||||
PlusOutlined,
|
||||
ProfileOutlined,
|
||||
ReloadOutlined,
|
||||
QuestionCircleOutlined,
|
||||
RightOutlined,
|
||||
|
||||
@@ -17,7 +17,7 @@
|
||||
* under the License.
|
||||
*/
|
||||
import { Component, cloneElement, ReactElement } from 'react';
|
||||
import { t } from '@superset-ui/core';
|
||||
import { t, css, SupersetTheme } from '@superset-ui/core';
|
||||
import copyTextToClipboard from 'src/utils/copy';
|
||||
import { Tooltip } from '@superset-ui/core/components';
|
||||
import withToasts from '../MessageToasts/withToasts';
|
||||
@@ -104,7 +104,14 @@ class CopyToClip extends Component<CopyToClipboardProps> {
|
||||
return (
|
||||
<span css={{ display: 'inline-flex', alignItems: 'center' }}>
|
||||
{this.props.shouldShowText && this.props.text && (
|
||||
<span data-test="short-url">{this.props.text}</span>
|
||||
<span
|
||||
data-test="short-url"
|
||||
css={(theme: SupersetTheme) => css`
|
||||
margin-right: ${theme.sizeUnit}px;
|
||||
`}
|
||||
>
|
||||
{this.props.text}
|
||||
</span>
|
||||
)}
|
||||
{this.renderTooltip('pointer')}
|
||||
</span>
|
||||
|
||||
@@ -29,7 +29,7 @@ import {
|
||||
} from 'spec/helpers/testing-library';
|
||||
import { api } from 'src/hooks/apiResources/queryApi';
|
||||
import fetchMock from 'fetch-mock';
|
||||
import TableSelector, { TableSelectorMultiple } from '.';
|
||||
import TableSelector, { TableSelectorMultiple, TableOption } from '.';
|
||||
|
||||
const createProps = (props = {}) => ({
|
||||
database: {
|
||||
@@ -259,3 +259,34 @@ test('table multi select retain all the values selected', async () => {
|
||||
expect(selections[0]).toHaveTextContent('table_b');
|
||||
expect(selections[1]).toHaveTextContent('table_c');
|
||||
});
|
||||
|
||||
test('TableOption renders correct icons for different table types', () => {
|
||||
// Test regular table
|
||||
const tableTable = {
|
||||
value: 'test_table',
|
||||
type: 'table',
|
||||
label: 'test_table',
|
||||
};
|
||||
const { container: tableContainer } = render(
|
||||
<TableOption table={tableTable} />,
|
||||
);
|
||||
expect(tableContainer.querySelector('.anticon')).toBeInTheDocument();
|
||||
|
||||
// Test view
|
||||
const viewTable = { value: 'test_view', type: 'view', label: 'test_view' };
|
||||
const { container: viewContainer } = render(
|
||||
<TableOption table={viewTable} />,
|
||||
);
|
||||
expect(viewContainer.querySelector('.anticon')).toBeInTheDocument();
|
||||
|
||||
// Test materialized view
|
||||
const materializedViewTable = {
|
||||
value: 'test_materialized_view',
|
||||
type: 'materialized_view',
|
||||
label: 'test_materialized_view',
|
||||
};
|
||||
const { container: mvContainer } = render(
|
||||
<TableOption table={materializedViewTable} />,
|
||||
);
|
||||
expect(mvContainer.querySelector('.anticon')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
@@ -126,9 +126,11 @@ export const TableOption = ({ table }: { table: Table }) => {
|
||||
return (
|
||||
<TableLabel title={value}>
|
||||
{type === 'view' ? (
|
||||
<Icons.EyeOutlined iconSize="m" />
|
||||
<Icons.FunctionOutlined iconSize="m" />
|
||||
) : type === 'materialized_view' ? (
|
||||
<Icons.ProfileOutlined iconSize="m" />
|
||||
) : (
|
||||
<Icons.InsertRowAboveOutlined iconSize="m" />
|
||||
<Icons.TableOutlined iconSize="m" />
|
||||
)}
|
||||
{extra?.certification && (
|
||||
<CertifiedBadge
|
||||
|
||||
@@ -75,6 +75,18 @@ const HeaderStyles = styled.div`
|
||||
font-size: ${theme.fontSizeXXL}px;
|
||||
}
|
||||
|
||||
.anchor-link-container {
|
||||
display: inline;
|
||||
line-height: 0;
|
||||
vertical-align: bottom; /* trick to align the anchor with text */
|
||||
opacity: 0;
|
||||
transition: opacity ${theme.motionDurationMid} ease-in-out;
|
||||
}
|
||||
|
||||
&:hover .anchor-link-container {
|
||||
opacity: 1;
|
||||
}
|
||||
|
||||
.dashboard--editing .dashboard-grid & {
|
||||
&:after {
|
||||
border: 1px dashed transparent;
|
||||
|
||||
@@ -79,10 +79,23 @@ const defaultProps = {
|
||||
|
||||
const TabTitleContainer = styled.div`
|
||||
${({ isHighlighted, theme: { sizeUnit, colorPrimaryBg } }) => `
|
||||
padding: ${sizeUnit}px ${sizeUnit * 8}px ${sizeUnit}px ${sizeUnit * 2}px;
|
||||
margin: ${-sizeUnit}px ${sizeUnit * -2}px;
|
||||
display: inline-flex;
|
||||
position: relative;
|
||||
align-items: center;
|
||||
margin: 0 ${sizeUnit * 2}px;
|
||||
transition: box-shadow 0.2s ease-in-out;
|
||||
${isHighlighted && `box-shadow: 0 0 ${sizeUnit}px ${colorPrimaryBg};`}
|
||||
${isHighlighted ? `box-shadow: 0 0 ${sizeUnit}px ${colorPrimaryBg};` : ''}
|
||||
|
||||
.anchor-link-container {
|
||||
position: absolute;
|
||||
left: 100%;
|
||||
opacity: 0;
|
||||
transition: opacity 0.2s ease-in-out;
|
||||
}
|
||||
|
||||
&:hover .anchor-link-container {
|
||||
opacity: 1;
|
||||
}
|
||||
`}
|
||||
`;
|
||||
|
||||
|
||||
@@ -19,11 +19,16 @@
|
||||
|
||||
import { FC, memo, useMemo } from 'react';
|
||||
import { DataMaskStateWithId, styled, t } from '@superset-ui/core';
|
||||
import { Icons } from '@superset-ui/core/components/Icons';
|
||||
import { Loading } from '@superset-ui/core/components';
|
||||
import { RootState } from 'src/dashboard/types';
|
||||
import { useChartLayoutItems } from 'src/dashboard/util/useChartLayoutItems';
|
||||
import { useChartIds } from 'src/dashboard/util/charts/useChartIds';
|
||||
import { useSelector } from 'react-redux';
|
||||
import {
|
||||
getRisonFilterParam,
|
||||
parseRisonFilters,
|
||||
} from 'src/dashboard/util/risonFilters';
|
||||
import FilterControls from './FilterControls/FilterControls';
|
||||
import { useChartsVerboseMaps, getFilterBarTestId } from './utils';
|
||||
import { HorizontalBarProps } from './types';
|
||||
@@ -63,6 +68,40 @@ const FilterBarEmptyStateContainer = styled.div`
|
||||
`}
|
||||
`;
|
||||
|
||||
const RisonFiltersContainer = styled.div`
|
||||
${({ theme }) => `
|
||||
display: flex;
|
||||
flex-direction: row;
|
||||
align-items: center;
|
||||
gap: ${theme.sizeUnit * 2}px;
|
||||
padding: 0 ${theme.sizeUnit * 2}px;
|
||||
margin-right: ${theme.sizeUnit * 2}px;
|
||||
border-right: 1px solid ${theme.colorBorder};
|
||||
`}
|
||||
`;
|
||||
|
||||
const RisonFilterTitle = styled.div`
|
||||
${({ theme }) => `
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: ${theme.sizeUnit}px;
|
||||
font-weight: ${theme.fontWeightStrong};
|
||||
font-size: ${theme.fontSizeSM}px;
|
||||
`}
|
||||
`;
|
||||
|
||||
const RisonFilterItem = styled.div`
|
||||
${({ theme }) => `
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: ${theme.sizeUnit}px;
|
||||
padding: ${theme.sizeUnit}px ${theme.sizeUnit * 2}px;
|
||||
background: ${theme.colorBgContainer};
|
||||
border-radius: ${theme.borderRadius}px;
|
||||
font-size: ${theme.fontSizeSM}px;
|
||||
`}
|
||||
`;
|
||||
|
||||
const HorizontalFilterBar: FC<HorizontalBarProps> = ({
|
||||
actions,
|
||||
dataMaskSelected,
|
||||
@@ -90,7 +129,43 @@ const HorizontalFilterBar: FC<HorizontalBarProps> = ({
|
||||
[chartIds, chartLayoutItems, dataMask, verboseMaps],
|
||||
);
|
||||
|
||||
const hasFilters = filterValues.length > 0 || selectedCrossFilters.length > 0;
|
||||
// Get active Rison filters from URL
|
||||
const activeRisonFilters = useMemo(() => {
|
||||
const risonParam = getRisonFilterParam();
|
||||
if (risonParam) {
|
||||
return parseRisonFilters(risonParam);
|
||||
}
|
||||
return [];
|
||||
}, []);
|
||||
|
||||
const risonFiltersComponent = useMemo(() => {
|
||||
if (activeRisonFilters.length === 0) return null;
|
||||
|
||||
return (
|
||||
<RisonFiltersContainer>
|
||||
<RisonFilterTitle>
|
||||
<Icons.LinkOutlined iconSize="s" />
|
||||
{t('URL Filters')}
|
||||
</RisonFilterTitle>
|
||||
{activeRisonFilters.map((filter, index) => (
|
||||
<RisonFilterItem key={`${filter.subject}-${index}`}>
|
||||
<strong>{filter.subject}</strong>
|
||||
<span>{filter.operator}</span>
|
||||
<span>
|
||||
{Array.isArray(filter.comparator)
|
||||
? filter.comparator.join(', ')
|
||||
: filter.comparator}
|
||||
</span>
|
||||
</RisonFilterItem>
|
||||
))}
|
||||
</RisonFiltersContainer>
|
||||
);
|
||||
}, [activeRisonFilters]);
|
||||
|
||||
const hasFilters =
|
||||
filterValues.length > 0 ||
|
||||
selectedCrossFilters.length > 0 ||
|
||||
activeRisonFilters.length > 0;
|
||||
|
||||
return (
|
||||
<HorizontalBar {...getFilterBarTestId()}>
|
||||
@@ -106,12 +181,15 @@ const HorizontalFilterBar: FC<HorizontalBarProps> = ({
|
||||
</FilterBarEmptyStateContainer>
|
||||
)}
|
||||
{hasFilters && (
|
||||
<FilterControls
|
||||
dataMaskSelected={dataMaskSelected}
|
||||
onFilterSelectionChange={onSelectionChange}
|
||||
clearAllTriggers={clearAllTriggers}
|
||||
onClearAllComplete={onClearAllComplete}
|
||||
/>
|
||||
<>
|
||||
{risonFiltersComponent}
|
||||
<FilterControls
|
||||
dataMaskSelected={dataMaskSelected}
|
||||
onFilterSelectionChange={onSelectionChange}
|
||||
clearAllTriggers={clearAllTriggers}
|
||||
onClearAllComplete={onClearAllComplete}
|
||||
/>
|
||||
</>
|
||||
)}
|
||||
{actions}
|
||||
</>
|
||||
|
||||
@@ -38,6 +38,10 @@ import { VerticalBarProps } from './types';
|
||||
import Header from './Header';
|
||||
import FilterControls from './FilterControls/FilterControls';
|
||||
import CrossFiltersVertical from './CrossFilters/Vertical';
|
||||
import {
|
||||
getRisonFilterParam,
|
||||
parseRisonFilters,
|
||||
} from '../../../util/risonFilters';
|
||||
|
||||
const BarWrapper = styled.div<{ width: number }>`
|
||||
width: ${({ theme }) => theme.sizeUnit * 8}px;
|
||||
@@ -111,6 +115,40 @@ const FilterControlsWrapper = styled.div`
|
||||
`}
|
||||
`;
|
||||
|
||||
const RisonFiltersContainer = styled.div`
|
||||
${({ theme }) => `
|
||||
padding: ${theme.sizeUnit * 2}px ${theme.sizeUnit * 4}px;
|
||||
border-bottom: 1px solid ${theme.colorSplit};
|
||||
background-color: ${theme.colorPrimaryBg};
|
||||
`}
|
||||
`;
|
||||
|
||||
const RisonFilterItem = styled.div`
|
||||
${({ theme }) => `
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: ${theme.sizeUnit}px;
|
||||
padding: ${theme.sizeUnit}px;
|
||||
margin: ${theme.sizeUnit / 2}px 0;
|
||||
background-color: ${theme.colorPrimaryBg};
|
||||
border-radius: ${theme.sizeUnit}px;
|
||||
font-size: ${theme.fontSizeSM}px;
|
||||
color: ${theme.colorPrimaryText};
|
||||
`}
|
||||
`;
|
||||
|
||||
const RisonFilterTitle = styled.div`
|
||||
${({ theme }) => `
|
||||
font-weight: bold;
|
||||
color: ${theme.colorPrimaryText};
|
||||
margin-bottom: ${theme.sizeUnit}px;
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: ${theme.sizeUnit}px;
|
||||
font-size: ${theme.fontSizeSM}px;
|
||||
`}
|
||||
`;
|
||||
|
||||
export const FilterBarScrollContext = createContext(false);
|
||||
const VerticalFilterBar: FC<VerticalBarProps> = ({
|
||||
actions,
|
||||
@@ -160,6 +198,39 @@ const VerticalFilterBar: FC<VerticalBarProps> = ({
|
||||
[height],
|
||||
);
|
||||
|
||||
// Get active Rison filters from URL
|
||||
const activeRisonFilters = useMemo(() => {
|
||||
const risonParam = getRisonFilterParam();
|
||||
if (risonParam) {
|
||||
return parseRisonFilters(risonParam);
|
||||
}
|
||||
return [];
|
||||
}, []);
|
||||
|
||||
const risonFiltersComponent = useMemo(() => {
|
||||
if (activeRisonFilters.length === 0) return null;
|
||||
|
||||
return (
|
||||
<RisonFiltersContainer>
|
||||
<RisonFilterTitle>
|
||||
<Icons.LinkOutlined iconSize="s" />
|
||||
{t('URL Filters')}
|
||||
</RisonFilterTitle>
|
||||
{activeRisonFilters.map((filter, index) => (
|
||||
<RisonFilterItem key={`${filter.subject}-${index}`}>
|
||||
<strong>{filter.subject}</strong>
|
||||
<span>{filter.operator}</span>
|
||||
<span>
|
||||
{Array.isArray(filter.comparator)
|
||||
? filter.comparator.join(', ')
|
||||
: filter.comparator}
|
||||
</span>
|
||||
</RisonFilterItem>
|
||||
))}
|
||||
</RisonFiltersContainer>
|
||||
);
|
||||
}, [activeRisonFilters]);
|
||||
|
||||
const filterControls = useMemo(
|
||||
() =>
|
||||
filterValues.length === 0 ? (
|
||||
@@ -228,6 +299,7 @@ const VerticalFilterBar: FC<VerticalBarProps> = ({
|
||||
) : (
|
||||
<div css={tabPaneStyle} onScroll={onScroll}>
|
||||
<>
|
||||
{risonFiltersComponent}
|
||||
<CrossFiltersVertical />
|
||||
{filterControls}
|
||||
</>
|
||||
|
||||
@@ -90,9 +90,16 @@ const publishDataMask = debounce(
|
||||
const previousParams = new URLSearchParams(search);
|
||||
const newParams = new URLSearchParams();
|
||||
let dataMaskKey: string | null;
|
||||
let risonFilterValue: string | null = null;
|
||||
|
||||
previousParams.forEach((value, key) => {
|
||||
if (!EXCLUDED_URL_PARAMS.includes(key)) {
|
||||
newParams.append(key, value);
|
||||
if (key === 'f') {
|
||||
// Preserve the original Rison filter value to avoid encoding
|
||||
risonFilterValue = value;
|
||||
} else {
|
||||
newParams.append(key, value);
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
@@ -131,8 +138,16 @@ const publishDataMask = debounce(
|
||||
replacement_pathname = replacement_pathname.substring(appRoot.length);
|
||||
}
|
||||
history.location.pathname = replacement_pathname;
|
||||
|
||||
// Manually reconstruct the search string to preserve Rison filter encoding
|
||||
let searchString = newParams.toString();
|
||||
if (risonFilterValue) {
|
||||
const separator = searchString ? '&' : '';
|
||||
searchString = `${searchString}${separator}f=${risonFilterValue}`;
|
||||
}
|
||||
|
||||
history.replace({
|
||||
search: newParams.toString(),
|
||||
search: searchString,
|
||||
});
|
||||
}
|
||||
},
|
||||
|
||||
@@ -60,6 +60,14 @@ import {
|
||||
import SyncDashboardState, {
|
||||
getDashboardContextLocalStorage,
|
||||
} from '../components/SyncDashboardState';
|
||||
import {
|
||||
parseRisonFilters,
|
||||
risonToAdhocFilters,
|
||||
getRisonFilterParam,
|
||||
prettifyRisonFilterUrl,
|
||||
injectRisonFiltersIntelligently,
|
||||
updateUrlWithUnmatchedFilters,
|
||||
} from '../util/risonFilters';
|
||||
|
||||
export const DashboardPageIdContext = createContext('');
|
||||
|
||||
@@ -186,6 +194,60 @@ export const DashboardPage: FC<PageProps> = ({ idOrSlug }: PageProps) => {
|
||||
dataMask = isOldRison;
|
||||
}
|
||||
|
||||
// Parse Rison URL filters with intelligent native filter injection
|
||||
const risonFilterParam = getRisonFilterParam();
|
||||
if (risonFilterParam) {
|
||||
const risonFilters = parseRisonFilters(risonFilterParam);
|
||||
if (risonFilters.length > 0) {
|
||||
// Try to intelligently inject into native filters first
|
||||
const nativeFilters =
|
||||
dashboard?.metadata?.native_filter_configuration || {};
|
||||
const injectionResult = injectRisonFiltersIntelligently(
|
||||
risonFilters,
|
||||
nativeFilters,
|
||||
dataMask,
|
||||
);
|
||||
|
||||
// Use the updated dataMask with native filter injections
|
||||
dataMask = injectionResult.updatedDataMask;
|
||||
|
||||
// For any unmatched filters, fall back to the old brute-force approach
|
||||
if (injectionResult.unmatchedFilters.length > 0) {
|
||||
const unmatchedAdhocFilters = risonToAdhocFilters(
|
||||
injectionResult.unmatchedFilters,
|
||||
);
|
||||
|
||||
// Store unmatched Rison filters in a virtual filter state
|
||||
const risonDataMask = {
|
||||
__rison_filters__: {
|
||||
filterState: { value: unmatchedAdhocFilters },
|
||||
ownState: {},
|
||||
},
|
||||
};
|
||||
|
||||
// Merge with existing dataMask
|
||||
dataMask = { ...dataMask, ...risonDataMask };
|
||||
}
|
||||
|
||||
// Clean up URL: remove matched filters, keep only unmatched ones
|
||||
// This prevents duplication between native filter bar and URL filters section
|
||||
const matchedCount =
|
||||
risonFilters.length - injectionResult.unmatchedFilters.length;
|
||||
if (matchedCount > 0) {
|
||||
setTimeout(
|
||||
() =>
|
||||
updateUrlWithUnmatchedFilters(injectionResult.unmatchedFilters),
|
||||
100,
|
||||
);
|
||||
}
|
||||
|
||||
// Only prettify URL if we have unmatched filters (brute-force case)
|
||||
if (injectionResult.unmatchedFilters.length > 0) {
|
||||
setTimeout(() => prettifyRisonFilterUrl(), 150);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (readyToRender) {
|
||||
if (!isDashboardHydrated.current) {
|
||||
isDashboardHydrated.current = true;
|
||||
|
||||
@@ -34,6 +34,11 @@ import { areObjectsEqual } from 'src/reduxUtils';
|
||||
import { isEqual } from 'lodash';
|
||||
import getEffectiveExtraFilters from './getEffectiveExtraFilters';
|
||||
import { getAllActiveFilters } from '../activeAllDashboardFilters';
|
||||
import {
|
||||
getRisonFilterParam,
|
||||
parseRisonFilters,
|
||||
risonToAdhocFilters,
|
||||
} from '../risonFilters';
|
||||
|
||||
interface CachedFormData {
|
||||
extra_form_data?: JsonObject;
|
||||
@@ -205,6 +210,44 @@ export default function getFormDataWithExtraFilters({
|
||||
}
|
||||
}
|
||||
|
||||
// Check for Rison filters in URL and add to adhoc_filters
|
||||
// Only add Rison filters if we're in dashboard context (not explore)
|
||||
// In explore context, Chart/index.tsx handles Rison filters to avoid duplication
|
||||
const risonFilterParam = getRisonFilterParam();
|
||||
let risonAdhocFilters: any[] = [];
|
||||
|
||||
// Check if we're in explore context by looking at the URL path
|
||||
const isExploreContext = window.location.pathname.includes('/explore');
|
||||
|
||||
// Only process Rison filters if we're NOT in explore context
|
||||
// This avoids duplication since Chart/index.tsx handles them in explore
|
||||
if (!isExploreContext && risonFilterParam) {
|
||||
const risonFilters = parseRisonFilters(risonFilterParam);
|
||||
if (risonFilters.length > 0) {
|
||||
risonAdhocFilters = risonToAdhocFilters(risonFilters);
|
||||
}
|
||||
}
|
||||
|
||||
// Also check if Rison filters were stored in dataMask from DashboardPage
|
||||
const risonDataMaskFilters =
|
||||
// eslint-disable-next-line no-underscore-dangle
|
||||
(dataMask as any)?.__rison_filters__?.filterState?.value || [];
|
||||
if (risonDataMaskFilters.length > 0 && risonAdhocFilters.length === 0) {
|
||||
risonAdhocFilters = risonDataMaskFilters;
|
||||
}
|
||||
|
||||
// Deduplicate Rison filters before adding them
|
||||
let finalAdhocFilters = chart.form_data?.adhoc_filters || [];
|
||||
if (risonAdhocFilters.length > 0) {
|
||||
// Remove any existing Rison filters from the form data
|
||||
const nonRisonFilters = finalAdhocFilters.filter(
|
||||
// eslint-disable-next-line no-underscore-dangle
|
||||
(f: any) => !f.__superset_rison_filter__,
|
||||
);
|
||||
// Add the new Rison filters
|
||||
finalAdhocFilters = [...nonRisonFilters, ...risonAdhocFilters];
|
||||
}
|
||||
|
||||
const formData: CachedFormDataWithExtraControls = {
|
||||
...chart.form_data,
|
||||
chart_id: chart.id,
|
||||
@@ -216,6 +259,10 @@ export default function getFormDataWithExtraFilters({
|
||||
own_color_scheme: ownColorScheme,
|
||||
}),
|
||||
extra_filters: getEffectiveExtraFilters(filters),
|
||||
// Use the deduplicated adhoc_filters
|
||||
...(finalAdhocFilters.length > 0 && {
|
||||
adhoc_filters: finalAdhocFilters,
|
||||
}),
|
||||
...extraData,
|
||||
...extraControls,
|
||||
...(layerFilterScope && { layer_filter_scope: layerFilterScope }),
|
||||
|
||||
145
superset-frontend/src/dashboard/util/risonFilters.test.ts
Normal file
145
superset-frontend/src/dashboard/util/risonFilters.test.ts
Normal file
@@ -0,0 +1,145 @@
|
||||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
import { PartialFilters, DataMaskStateWithId } from '@superset-ui/core';
|
||||
import {
|
||||
injectRisonFiltersIntelligently,
|
||||
RisonFilter,
|
||||
parseRisonFilters,
|
||||
} from './risonFilters';
|
||||
|
||||
describe('risonFilters intelligent injection', () => {
|
||||
const mockNativeFilters: PartialFilters = {
|
||||
filter_1: {
|
||||
id: 'filter_1',
|
||||
targets: [
|
||||
{
|
||||
column: { name: 'country' },
|
||||
datasetId: 1,
|
||||
},
|
||||
],
|
||||
filterType: 'filter_select',
|
||||
},
|
||||
filter_2: {
|
||||
id: 'filter_2',
|
||||
targets: [
|
||||
{
|
||||
column: { name: 'year' },
|
||||
datasetId: 1,
|
||||
},
|
||||
],
|
||||
filterType: 'filter_range',
|
||||
},
|
||||
};
|
||||
|
||||
const mockDataMask: DataMaskStateWithId = {
|
||||
filter_1: {
|
||||
id: 'filter_1',
|
||||
filterState: { value: undefined },
|
||||
ownState: {},
|
||||
},
|
||||
};
|
||||
|
||||
it('should match Rison filter to native filter by column name', () => {
|
||||
const risonFilters: RisonFilter[] = [
|
||||
{ subject: 'country', operator: '==', comparator: 'USA' },
|
||||
];
|
||||
|
||||
const result = injectRisonFiltersIntelligently(
|
||||
risonFilters,
|
||||
mockNativeFilters,
|
||||
mockDataMask,
|
||||
);
|
||||
|
||||
expect(result.updatedDataMask.filter_1.filterState?.value).toEqual(['USA']);
|
||||
expect(result.unmatchedFilters).toHaveLength(0);
|
||||
});
|
||||
|
||||
it('should handle unmatched filters with fallback', () => {
|
||||
const risonFilters: RisonFilter[] = [
|
||||
{ subject: 'region', operator: '==', comparator: 'North America' }, // No matching native filter
|
||||
];
|
||||
|
||||
const result = injectRisonFiltersIntelligently(
|
||||
risonFilters,
|
||||
mockNativeFilters,
|
||||
mockDataMask,
|
||||
);
|
||||
|
||||
expect(result.unmatchedFilters).toHaveLength(1);
|
||||
expect(result.unmatchedFilters[0].subject).toBe('region');
|
||||
});
|
||||
|
||||
it('should convert values correctly for different filter types', () => {
|
||||
const risonFilters: RisonFilter[] = [
|
||||
{ subject: 'country', operator: '==', comparator: 'USA' },
|
||||
{ subject: 'year', operator: 'BETWEEN', comparator: [2020, 2024] },
|
||||
];
|
||||
|
||||
const result = injectRisonFiltersIntelligently(
|
||||
risonFilters,
|
||||
mockNativeFilters,
|
||||
mockDataMask,
|
||||
);
|
||||
|
||||
// Select filter should be array
|
||||
expect(result.updatedDataMask.filter_1.filterState?.value).toEqual(['USA']);
|
||||
|
||||
// Range filter should be min/max object
|
||||
expect(result.updatedDataMask.filter_2.filterState?.value).toEqual({
|
||||
min: 2020,
|
||||
max: 2024,
|
||||
});
|
||||
|
||||
expect(result.unmatchedFilters).toHaveLength(0);
|
||||
});
|
||||
|
||||
it('should handle mixed matched and unmatched filters', () => {
|
||||
const risonFilters: RisonFilter[] = [
|
||||
{ subject: 'country', operator: '==', comparator: 'USA' }, // Should match
|
||||
{ subject: 'category', operator: '==', comparator: 'Sales' }, // No match
|
||||
];
|
||||
|
||||
const result = injectRisonFiltersIntelligently(
|
||||
risonFilters,
|
||||
mockNativeFilters,
|
||||
mockDataMask,
|
||||
);
|
||||
|
||||
expect(result.updatedDataMask.filter_1.filterState?.value).toEqual(['USA']);
|
||||
expect(result.unmatchedFilters).toHaveLength(1);
|
||||
expect(result.unmatchedFilters[0].subject).toBe('category');
|
||||
});
|
||||
|
||||
it('should parse Rison filters correctly', () => {
|
||||
const risonString = '(country:USA,year:2024)';
|
||||
const result = parseRisonFilters(risonString);
|
||||
|
||||
expect(result).toHaveLength(2);
|
||||
expect(result[0]).toEqual({
|
||||
subject: 'country',
|
||||
operator: '==',
|
||||
comparator: 'USA',
|
||||
});
|
||||
expect(result[1]).toEqual({
|
||||
subject: 'year',
|
||||
operator: '==',
|
||||
comparator: 2024,
|
||||
});
|
||||
});
|
||||
});
|
||||
453
superset-frontend/src/dashboard/util/risonFilters.ts
Normal file
453
superset-frontend/src/dashboard/util/risonFilters.ts
Normal file
@@ -0,0 +1,453 @@
|
||||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
import {
|
||||
QueryObjectFilterClause,
|
||||
PartialFilters,
|
||||
DataMaskStateWithId,
|
||||
} from '@superset-ui/core';
|
||||
import rison from 'rison';
|
||||
|
||||
export interface RisonFilter {
|
||||
subject: string;
|
||||
operator: string;
|
||||
comparator: any;
|
||||
}
|
||||
|
||||
export interface IntelligentRisonInjectionResult {
|
||||
updatedDataMask: DataMaskStateWithId;
|
||||
unmatchedFilters: RisonFilter[];
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse Rison filter syntax from URL parameter
|
||||
* Supports formats like: (country:USA,year:2024)
|
||||
*/
|
||||
export function parseRisonFilters(risonString: string): RisonFilter[] {
|
||||
try {
|
||||
const parsed = rison.decode(risonString);
|
||||
const filters: RisonFilter[] = [];
|
||||
|
||||
if (!parsed || typeof parsed !== 'object') {
|
||||
return filters;
|
||||
}
|
||||
|
||||
const parsedObj = parsed as Record<string, any>;
|
||||
|
||||
// Handle OR operator: OR:!(condition1,condition2)
|
||||
if (parsedObj.OR && Array.isArray(parsedObj.OR)) {
|
||||
parsedObj.OR.forEach((condition: any) => {
|
||||
if (typeof condition === 'object') {
|
||||
Object.entries(condition).forEach(([key, value]) => {
|
||||
filters.push(parseFilterCondition(key, value));
|
||||
});
|
||||
}
|
||||
});
|
||||
return filters;
|
||||
}
|
||||
|
||||
// Handle NOT operator: NOT:(condition)
|
||||
if (parsedObj.NOT && typeof parsedObj.NOT === 'object') {
|
||||
Object.entries(parsedObj.NOT).forEach(([key, value]) => {
|
||||
const filter = parseFilterCondition(key, value);
|
||||
// Negate the operator
|
||||
if (filter.operator === '==') {
|
||||
filter.operator = '!=';
|
||||
} else if (filter.operator === 'IN') {
|
||||
filter.operator = 'NOT IN';
|
||||
}
|
||||
filters.push(filter);
|
||||
});
|
||||
return filters;
|
||||
}
|
||||
|
||||
// Handle regular filters
|
||||
Object.entries(parsedObj).forEach(([key, value]) => {
|
||||
if (key !== 'OR' && key !== 'NOT') {
|
||||
filters.push(parseFilterCondition(key, value));
|
||||
}
|
||||
});
|
||||
|
||||
return filters;
|
||||
} catch (error) {
|
||||
console.warn('Failed to parse Rison filters:', error);
|
||||
return [];
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse individual filter condition
|
||||
*/
|
||||
function parseFilterCondition(key: string, value: any): RisonFilter {
|
||||
// Handle comparison operators: (gt:100), (between:!(1,10))
|
||||
if (typeof value === 'object' && value !== null && !Array.isArray(value)) {
|
||||
const [operator, operatorValue] = Object.entries(value)[0];
|
||||
|
||||
switch (operator) {
|
||||
case 'gt':
|
||||
return { subject: key, operator: '>', comparator: operatorValue };
|
||||
case 'gte':
|
||||
return { subject: key, operator: '>=', comparator: operatorValue };
|
||||
case 'lt':
|
||||
return { subject: key, operator: '<', comparator: operatorValue };
|
||||
case 'lte':
|
||||
return { subject: key, operator: '<=', comparator: operatorValue };
|
||||
case 'between':
|
||||
return { subject: key, operator: 'BETWEEN', comparator: operatorValue };
|
||||
case 'like':
|
||||
return { subject: key, operator: 'LIKE', comparator: operatorValue };
|
||||
default:
|
||||
return { subject: key, operator: '==', comparator: value };
|
||||
}
|
||||
}
|
||||
|
||||
// Handle IN operator: !(value1,value2)
|
||||
if (Array.isArray(value)) {
|
||||
return { subject: key, operator: 'IN', comparator: value };
|
||||
}
|
||||
|
||||
// Handle simple equality
|
||||
return { subject: key, operator: '==', comparator: value };
|
||||
}
|
||||
|
||||
/**
|
||||
* Convert Rison filters to Superset adhoc filter format
|
||||
*/
|
||||
export function risonToAdhocFilters(
|
||||
risonFilters: RisonFilter[],
|
||||
): QueryObjectFilterClause[] {
|
||||
return risonFilters.map(
|
||||
filter =>
|
||||
({
|
||||
expressionType: 'SIMPLE' as const,
|
||||
clause: 'WHERE' as const,
|
||||
subject: filter.subject,
|
||||
operator: filter.operator,
|
||||
comparator: filter.comparator,
|
||||
// eslint-disable-next-line no-underscore-dangle
|
||||
__superset_rison_filter__: true, // Metadata to identify Rison filters
|
||||
}) as any,
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if a filter was derived from Rison
|
||||
*/
|
||||
export function isRisonFilter(filter: any): boolean {
|
||||
// eslint-disable-next-line no-underscore-dangle
|
||||
return filter && filter.__superset_rison_filter__ === true;
|
||||
}
|
||||
|
||||
/**
|
||||
* Filter out Rison-derived filters from an array
|
||||
*/
|
||||
export function excludeRisonFilters(filters: any[]): any[] {
|
||||
return filters.filter(filter => !isRisonFilter(filter));
|
||||
}
|
||||
|
||||
/**
|
||||
* Prettify Rison filter URL by replacing encoded characters
|
||||
* Uses browser history API to update URL without page reload
|
||||
*/
|
||||
export function prettifyRisonFilterUrl(): void {
|
||||
try {
|
||||
const currentUrl = window.location.href;
|
||||
|
||||
// Check if URL contains Rison parameters (encoded or not)
|
||||
if (!currentUrl.includes('&f=') && !currentUrl.includes('?f=')) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Extract the Rison parameter value
|
||||
const urlMatch = currentUrl.match(/([?&])f=([^&]*)/);
|
||||
if (!urlMatch) {
|
||||
return;
|
||||
}
|
||||
|
||||
const separator = urlMatch[1];
|
||||
let risonValue = urlMatch[2];
|
||||
|
||||
// Check if value needs prettification (contains encoded characters)
|
||||
if (!risonValue.includes('%') && !risonValue.includes('+')) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Decode multiple times if needed (handles multiple encoding layers)
|
||||
let previousValue = '';
|
||||
let decodeAttempts = 0;
|
||||
while (risonValue !== previousValue && decodeAttempts < 5) {
|
||||
previousValue = risonValue;
|
||||
try {
|
||||
// Decode percent-encoded characters
|
||||
if (risonValue.includes('%')) {
|
||||
risonValue = decodeURIComponent(risonValue);
|
||||
}
|
||||
} catch (e) {
|
||||
// If decoding fails, stop trying
|
||||
break;
|
||||
}
|
||||
decodeAttempts += 1;
|
||||
}
|
||||
|
||||
// Clean up + signs that should be spaces
|
||||
risonValue = risonValue.replace(/\+/g, ' ');
|
||||
|
||||
// Reconstruct the URL with the clean Rison parameter
|
||||
const matchIndex = urlMatch.index ?? 0;
|
||||
const beforeRison = currentUrl.substring(0, matchIndex);
|
||||
const afterRison = currentUrl.substring(matchIndex + urlMatch[0].length);
|
||||
const prettifiedUrl = `${beforeRison}${separator}f=${risonValue}${afterRison}`;
|
||||
|
||||
// Only update if the URL actually changed
|
||||
if (prettifiedUrl !== currentUrl) {
|
||||
window.history.replaceState(window.history.state, '', prettifiedUrl);
|
||||
}
|
||||
} catch (error) {
|
||||
console.warn('Failed to prettify Rison URL:', error);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get Rison filter parameter from current URL
|
||||
*/
|
||||
export function getRisonFilterParam(): string | null {
|
||||
const params = new URLSearchParams(window.location.search);
|
||||
return params.get('f');
|
||||
}
|
||||
|
||||
/**
|
||||
* Convert an array of RisonFilter back to Rison string format
|
||||
*/
|
||||
export function risonFiltersToString(filters: RisonFilter[]): string {
|
||||
if (filters.length === 0) {
|
||||
return '';
|
||||
}
|
||||
|
||||
const risonObject: Record<string, any> = {};
|
||||
|
||||
filters.forEach(filter => {
|
||||
if (filter.operator === 'IN' && Array.isArray(filter.comparator)) {
|
||||
// Array values: !(value1,value2)
|
||||
risonObject[filter.subject] = filter.comparator;
|
||||
} else if (filter.operator === '==') {
|
||||
// Simple equality
|
||||
risonObject[filter.subject] = filter.comparator;
|
||||
} else {
|
||||
// Other operators: {gt:100}, {between:!(1,10)}
|
||||
const operatorMap: Record<string, string> = {
|
||||
'>': 'gt',
|
||||
'>=': 'gte',
|
||||
'<': 'lt',
|
||||
'<=': 'lte',
|
||||
BETWEEN: 'between',
|
||||
LIKE: 'like',
|
||||
};
|
||||
|
||||
const risonOp = operatorMap[filter.operator] || filter.operator;
|
||||
risonObject[filter.subject] = { [risonOp]: filter.comparator };
|
||||
}
|
||||
});
|
||||
|
||||
try {
|
||||
return rison.encode(risonObject);
|
||||
} catch (error) {
|
||||
console.warn('Failed to encode Rison filters:', error);
|
||||
return '';
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Update the URL to remove successfully matched filters, keeping only unmatched ones
|
||||
*/
|
||||
export function updateUrlWithUnmatchedFilters(
|
||||
unmatchedFilters: RisonFilter[],
|
||||
): void {
|
||||
try {
|
||||
const currentUrl = new URL(window.location.href);
|
||||
|
||||
if (unmatchedFilters.length === 0) {
|
||||
// No unmatched filters - remove the f parameter entirely
|
||||
currentUrl.searchParams.delete('f');
|
||||
} else {
|
||||
// Convert unmatched filters back to Rison and update URL
|
||||
const newRisonString = risonFiltersToString(unmatchedFilters);
|
||||
if (newRisonString) {
|
||||
currentUrl.searchParams.set('f', newRisonString);
|
||||
} else {
|
||||
currentUrl.searchParams.delete('f');
|
||||
}
|
||||
}
|
||||
|
||||
// Update URL without page reload
|
||||
window.history.replaceState(
|
||||
window.history.state,
|
||||
'',
|
||||
currentUrl.toString(),
|
||||
);
|
||||
} catch (error) {
|
||||
console.warn('Failed to update URL with unmatched filters:', error);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Set up automatic URL prettification
|
||||
* Watches for URL changes and prettifies Rison parameters
|
||||
*/
|
||||
export function setupRisonUrlPrettification(): void {
|
||||
// Initial prettification
|
||||
prettifyRisonFilterUrl();
|
||||
|
||||
// Watch for URL changes using a simple polling mechanism
|
||||
let lastUrl = window.location.href;
|
||||
const checkInterval = setInterval(() => {
|
||||
const currentUrl = window.location.href;
|
||||
if (currentUrl !== lastUrl) {
|
||||
lastUrl = currentUrl;
|
||||
// Small delay to let the URL settle
|
||||
setTimeout(() => prettifyRisonFilterUrl(), 10);
|
||||
}
|
||||
}, 100);
|
||||
|
||||
// Clean up on page unload
|
||||
window.addEventListener('beforeunload', () => {
|
||||
clearInterval(checkInterval);
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Find a native filter that matches a Rison filter by column name
|
||||
*/
|
||||
function findMatchingNativeFilter(
|
||||
risonFilter: RisonFilter,
|
||||
nativeFilters: PartialFilters,
|
||||
): string | null {
|
||||
for (const [filterId, nativeFilter] of Object.entries(nativeFilters)) {
|
||||
if (!nativeFilter?.targets) continue;
|
||||
|
||||
// Check if any target matches the Rison filter's subject (column)
|
||||
const hasMatchingTarget = nativeFilter.targets.some(target => {
|
||||
if (typeof target === 'object' && target && 'column' in target) {
|
||||
return target.column?.name === risonFilter.subject;
|
||||
}
|
||||
return false;
|
||||
});
|
||||
|
||||
if (hasMatchingTarget) {
|
||||
return filterId;
|
||||
}
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Convert a Rison filter value to the format expected by a native filter
|
||||
*/
|
||||
function convertRisonToNativeValue(
|
||||
risonFilter: RisonFilter,
|
||||
nativeFilter: any,
|
||||
): any {
|
||||
const { comparator, operator } = risonFilter;
|
||||
const filterType = nativeFilter?.filterType;
|
||||
|
||||
switch (filterType) {
|
||||
case 'filter_select':
|
||||
// Select filters expect arrays
|
||||
if (operator === 'IN' || Array.isArray(comparator)) {
|
||||
return Array.isArray(comparator) ? comparator : [comparator];
|
||||
}
|
||||
return [comparator];
|
||||
|
||||
case 'filter_range':
|
||||
// Range filters expect min/max object or array
|
||||
if (
|
||||
operator === 'BETWEEN' &&
|
||||
Array.isArray(comparator) &&
|
||||
comparator.length === 2
|
||||
) {
|
||||
return { min: comparator[0], max: comparator[1] };
|
||||
}
|
||||
return comparator;
|
||||
|
||||
case 'filter_time_range':
|
||||
case 'filter_timecolumn':
|
||||
// Time filters - pass through as-is for now
|
||||
// More sophisticated time parsing could be added here
|
||||
return comparator;
|
||||
|
||||
default:
|
||||
// For other filter types, use the comparator as-is
|
||||
return Array.isArray(comparator) ? comparator : [comparator];
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Intelligently inject Rison filters into native filters where possible,
|
||||
* falling back to brute-force injection for unmatched filters
|
||||
*/
|
||||
export function injectRisonFiltersIntelligently(
|
||||
risonFilters: RisonFilter[],
|
||||
nativeFilters: PartialFilters,
|
||||
currentDataMask: DataMaskStateWithId,
|
||||
): IntelligentRisonInjectionResult {
|
||||
const updatedDataMask = { ...currentDataMask };
|
||||
const unmatchedFilters: RisonFilter[] = [];
|
||||
|
||||
risonFilters.forEach(risonFilter => {
|
||||
const matchingFilterId = findMatchingNativeFilter(
|
||||
risonFilter,
|
||||
nativeFilters,
|
||||
);
|
||||
|
||||
if (
|
||||
matchingFilterId &&
|
||||
(Array.isArray(nativeFilters)
|
||||
? nativeFilters[parseInt(matchingFilterId, 10)]
|
||||
: nativeFilters[matchingFilterId])
|
||||
) {
|
||||
// Found a matching native filter - inject the value
|
||||
const matchedFilter = Array.isArray(nativeFilters)
|
||||
? nativeFilters[parseInt(matchingFilterId, 10)]
|
||||
: nativeFilters[matchingFilterId];
|
||||
const convertedValue = convertRisonToNativeValue(
|
||||
risonFilter,
|
||||
matchedFilter,
|
||||
);
|
||||
|
||||
// Update the data mask for this native filter - use the actual filter ID, not the array index
|
||||
const actualFilterId = matchedFilter.id;
|
||||
updatedDataMask[actualFilterId] = {
|
||||
...updatedDataMask[actualFilterId],
|
||||
id: actualFilterId,
|
||||
filterState: {
|
||||
value: convertedValue,
|
||||
},
|
||||
ownState: {},
|
||||
};
|
||||
} else {
|
||||
// No matching native filter found - add to unmatched list for brute-force fallback
|
||||
unmatchedFilters.push(risonFilter);
|
||||
}
|
||||
});
|
||||
|
||||
return {
|
||||
updatedDataMask,
|
||||
unmatchedFilters,
|
||||
};
|
||||
}
|
||||
@@ -66,6 +66,7 @@ import * as exploreActions from 'src/explore/actions/exploreActions';
|
||||
import * as saveModalActions from 'src/explore/actions/saveModalActions';
|
||||
import { useTabId } from 'src/hooks/useTabId';
|
||||
import withToasts from 'src/components/MessageToasts/withToasts';
|
||||
import { prettifyRisonFilterUrl } from 'src/dashboard/util/risonFilters';
|
||||
import ExploreChartPanel from '../ExploreChartPanel';
|
||||
import ConnectedControlPanelsContainer from '../ControlPanelsContainer';
|
||||
import SaveModal from '../SaveModal';
|
||||
@@ -190,6 +191,12 @@ const updateHistory = debounce(
|
||||
}
|
||||
});
|
||||
|
||||
// Preserve Rison filter parameter during URL redirects
|
||||
const risonParam = params.get('f');
|
||||
if (risonParam) {
|
||||
additionalParam.f = risonParam;
|
||||
}
|
||||
|
||||
try {
|
||||
let key;
|
||||
let stateModifier;
|
||||
@@ -225,6 +232,11 @@ const updateHistory = debounce(
|
||||
force,
|
||||
);
|
||||
window.history[stateModifier](payload, title, url);
|
||||
|
||||
// Prettify the URL after updating history to maintain human-readable Rison filters
|
||||
if (additionalParam.f) {
|
||||
setTimeout(() => prettifyRisonFilterUrl(), 50);
|
||||
}
|
||||
}
|
||||
} catch (e) {
|
||||
logging.warn('Failed at altering browser history', e);
|
||||
|
||||
@@ -94,16 +94,37 @@ export function mountExploreUrl(endpointType, extraSearch = {}, force = false) {
|
||||
const uri = new URI('/');
|
||||
const directory = getURIDirectory(endpointType);
|
||||
const search = uri.search(true);
|
||||
|
||||
// Store the Rison filter parameter separately to preserve its format
|
||||
let risonParam = null;
|
||||
|
||||
Object.keys(extraSearch).forEach(key => {
|
||||
search[key] = extraSearch[key];
|
||||
if (key === 'f' && extraSearch[key]) {
|
||||
// Don't add Rison filter to URI search params yet
|
||||
risonParam = extraSearch[key];
|
||||
} else {
|
||||
search[key] = extraSearch[key];
|
||||
}
|
||||
});
|
||||
|
||||
if (endpointType === URL_PARAMS.standalone.name) {
|
||||
if (force) {
|
||||
search.force = '1';
|
||||
}
|
||||
search.standalone = DashboardStandaloneMode.HideNav;
|
||||
}
|
||||
return uri.directory(directory).search(search).toString();
|
||||
|
||||
let url = uri.directory(directory).search(search).toString();
|
||||
|
||||
// Manually append the Rison filter parameter to preserve its human-readable format
|
||||
if (risonParam) {
|
||||
const separator = url.includes('?') ? '&' : '?';
|
||||
// Ensure we preserve the human-readable format
|
||||
const cleanRison = risonParam.replace(/%20/g, ' ').replace(/%27/g, "'");
|
||||
url = `${url}${separator}f=${cleanRison}`;
|
||||
}
|
||||
|
||||
return url;
|
||||
}
|
||||
|
||||
export function getChartDataUri({ path, qs, allowDomainSharding = false }) {
|
||||
|
||||
@@ -42,6 +42,12 @@ import { fallbackExploreInitialData } from 'src/explore/fixtures';
|
||||
import { getItem, LocalStorageKeys } from 'src/utils/localStorageHelpers';
|
||||
import { getFormDataWithDashboardContext } from 'src/explore/controlUtils/getFormDataWithDashboardContext';
|
||||
import type Chart from 'src/types/Chart';
|
||||
import {
|
||||
parseRisonFilters,
|
||||
risonToAdhocFilters,
|
||||
prettifyRisonFilterUrl,
|
||||
setupRisonUrlPrettification,
|
||||
} from 'src/dashboard/util/risonFilters';
|
||||
|
||||
const isValidResult = (rv: JsonObject): boolean =>
|
||||
rv?.result?.form_data && rv?.result?.dataset;
|
||||
@@ -132,6 +138,9 @@ export default function ExplorePage() {
|
||||
const location = useLocation();
|
||||
|
||||
useEffect(() => {
|
||||
// Set up automatic URL prettification for Rison filters
|
||||
setupRisonUrlPrettification();
|
||||
|
||||
const exploreUrlParams = getParsedExploreURLParams(location);
|
||||
const saveAction = getUrlParam(
|
||||
URL_PARAMS.saveAction,
|
||||
@@ -141,7 +150,7 @@ export default function ExplorePage() {
|
||||
if (!isExploreInitialized.current || !!saveAction) {
|
||||
fetchExploreData(exploreUrlParams)
|
||||
.then(({ result }) => {
|
||||
const formData = dashboardContextFormData
|
||||
let formData = dashboardContextFormData
|
||||
? getFormDataWithDashboardContext(
|
||||
result.form_data,
|
||||
dashboardContextFormData,
|
||||
@@ -149,6 +158,31 @@ export default function ExplorePage() {
|
||||
)
|
||||
: result.form_data;
|
||||
|
||||
// Parse Rison URL filters and add to form_data
|
||||
const params = new URLSearchParams(window.location.search);
|
||||
const risonFilterParam = params.get('f');
|
||||
if (risonFilterParam) {
|
||||
const risonFilters = parseRisonFilters(risonFilterParam);
|
||||
if (risonFilters.length > 0) {
|
||||
const risonAdhocFilters = risonToAdhocFilters(
|
||||
risonFilters,
|
||||
) as any;
|
||||
|
||||
// Simply add the Rison filters to existing filters
|
||||
// Backend no longer processes them, so no duplication
|
||||
const existingFilters = formData.adhoc_filters || [];
|
||||
formData = {
|
||||
...formData,
|
||||
adhoc_filters: [...existingFilters, ...risonAdhocFilters],
|
||||
};
|
||||
}
|
||||
|
||||
// Prettify URL after processing - try multiple times to catch all mutations
|
||||
setTimeout(() => prettifyRisonFilterUrl(), 50);
|
||||
setTimeout(() => prettifyRisonFilterUrl(), 200);
|
||||
setTimeout(() => prettifyRisonFilterUrl(), 500);
|
||||
}
|
||||
|
||||
dispatch(
|
||||
hydrateExplore({
|
||||
...result,
|
||||
|
||||
@@ -21,5 +21,33 @@ import { omit } from 'lodash';
|
||||
|
||||
const TEMPORARY_CONTROLS: string[] = ['url_params'];
|
||||
|
||||
export const sanitizeFormData = (formData: JsonObject): JsonObject =>
|
||||
omit(formData, TEMPORARY_CONTROLS);
|
||||
/**
|
||||
* Check if a filter was derived from Rison URL parameters
|
||||
*/
|
||||
function isRisonFilter(filter: any): boolean {
|
||||
// eslint-disable-next-line no-underscore-dangle
|
||||
return filter && filter.__superset_rison_filter__ === true;
|
||||
}
|
||||
|
||||
/**
|
||||
* Filter out Rison-derived filters from an array
|
||||
*/
|
||||
function excludeRisonFilters(filters: any[]): any[] {
|
||||
return filters.filter(filter => !isRisonFilter(filter));
|
||||
}
|
||||
|
||||
export const sanitizeFormData = (formData: JsonObject): JsonObject => {
|
||||
const sanitized = omit(formData, TEMPORARY_CONTROLS);
|
||||
|
||||
// Remove Rison filters from adhoc_filters to prevent them from being stored server-side
|
||||
if (
|
||||
(sanitized as any).adhoc_filters &&
|
||||
Array.isArray((sanitized as any).adhoc_filters)
|
||||
) {
|
||||
(sanitized as any).adhoc_filters = excludeRisonFilters(
|
||||
(sanitized as any).adhoc_filters,
|
||||
);
|
||||
}
|
||||
|
||||
return sanitized;
|
||||
};
|
||||
|
||||
@@ -94,6 +94,25 @@ class TablesDatabaseCommand(BaseCommand):
|
||||
),
|
||||
)
|
||||
|
||||
# Get materialized views if the database supports them
|
||||
materialized_views = security_manager.get_datasources_accessible_by_user(
|
||||
database=self._model,
|
||||
catalog=self._catalog_name,
|
||||
schema=self._schema_name,
|
||||
datasource_names=sorted(
|
||||
DatasourceName(table.table, table.schema, table.catalog)
|
||||
for table in (
|
||||
self._model.get_all_materialized_view_names_in_schema(
|
||||
catalog=self._catalog_name,
|
||||
schema=self._schema_name,
|
||||
force=self._force,
|
||||
cache=self._model.table_cache_enabled,
|
||||
cache_timeout=self._model.table_cache_timeout,
|
||||
)
|
||||
)
|
||||
),
|
||||
)
|
||||
|
||||
extra_dict_by_name = {
|
||||
table.name: table.extra_dict
|
||||
for table in (
|
||||
@@ -131,11 +150,21 @@ class TablesDatabaseCommand(BaseCommand):
|
||||
"type": "view",
|
||||
}
|
||||
for view in views
|
||||
]
|
||||
+ [
|
||||
{
|
||||
"value": mv.table,
|
||||
"type": "materialized_view",
|
||||
}
|
||||
for mv in materialized_views
|
||||
],
|
||||
key=lambda item: item["value"],
|
||||
)
|
||||
|
||||
payload = {"count": len(tables) + len(views), "result": options}
|
||||
payload = {
|
||||
"count": len(tables) + len(views) + len(materialized_views),
|
||||
"result": options,
|
||||
}
|
||||
return payload
|
||||
except SupersetException:
|
||||
raise
|
||||
|
||||
@@ -135,6 +135,12 @@ class GetExploreCommand(BaseCommand, ABC):
|
||||
utils.merge_extra_filters(form_data)
|
||||
utils.merge_request_params(form_data, request.args)
|
||||
|
||||
# Don't merge Rison filters here - let the frontend handle them
|
||||
# to avoid duplication. The frontend will process the 'f' parameter
|
||||
# and add the filters to the form_data
|
||||
# from superset.utils.rison_filters import merge_rison_filters
|
||||
# merge_rison_filters(form_data)
|
||||
|
||||
# TODO: this is a dummy placeholder - should be refactored to being just `None`
|
||||
datasource_data: dict[str, Any] = {
|
||||
"type": self._datasource_type,
|
||||
|
||||
@@ -625,7 +625,7 @@ DEFAULT_FEATURE_FLAGS: dict[str, bool] = {
|
||||
# in addition to relative timeshifts (e.g., "1 day ago")
|
||||
"DATE_RANGE_TIMESHIFTS_ENABLED": False,
|
||||
# Enable Matrixify feature for matrix-style chart layouts
|
||||
"MATRIXIFY": False,
|
||||
"MATRIXIFY": True,
|
||||
}
|
||||
|
||||
# ------------------------------
|
||||
|
||||
@@ -1493,6 +1493,18 @@ class BaseEngineSpec: # pylint: disable=too-many-public-methods
|
||||
views = {re.sub(f"^{schema}\\.", "", view) for view in views}
|
||||
return views
|
||||
|
||||
@classmethod
|
||||
def get_materialized_view_names(
|
||||
cls,
|
||||
database: Database,
|
||||
inspector: Inspector,
|
||||
schema: str | None,
|
||||
) -> set[str]:
|
||||
"""
|
||||
Get all materialized views.
|
||||
"""
|
||||
return set()
|
||||
|
||||
@classmethod
|
||||
def get_indexes(
|
||||
cls,
|
||||
|
||||
@@ -828,3 +828,97 @@ class BigQueryEngineSpec(BaseEngineSpec): # pylint: disable=too-many-public-met
|
||||
# If for some reason we get an exception, for example, no new line
|
||||
# We will return the original exception
|
||||
return exception
|
||||
|
||||
@classmethod
|
||||
def get_materialized_view_names(
|
||||
cls,
|
||||
database: Database,
|
||||
inspector: Inspector,
|
||||
schema: str | None,
|
||||
) -> set[str]:
|
||||
"""
|
||||
Get all materialized views from BigQuery.
|
||||
|
||||
BigQuery materialized views are not returned by the standard
|
||||
get_view_names() method, so we need to query INFORMATION_SCHEMA directly.
|
||||
"""
|
||||
if not schema:
|
||||
return set()
|
||||
|
||||
# Construct the query to get materialized views from INFORMATION_SCHEMA
|
||||
if catalog := database.get_default_catalog():
|
||||
information_schema = f"`{catalog}.{schema}.INFORMATION_SCHEMA.TABLES`"
|
||||
else:
|
||||
information_schema = f"`{schema}.INFORMATION_SCHEMA.TABLES`"
|
||||
|
||||
# Use string formatting for the table name since it's not user input
|
||||
# The catalog and schema are from trusted sources (database configuration)
|
||||
query = f"""
|
||||
SELECT table_name
|
||||
FROM {information_schema}
|
||||
WHERE table_type = 'MATERIALIZED VIEW'
|
||||
""" # noqa: S608
|
||||
|
||||
materialized_views = set()
|
||||
try:
|
||||
with database.get_raw_connection(catalog=catalog, schema=schema) as conn:
|
||||
cursor = conn.cursor()
|
||||
cursor.execute(query)
|
||||
materialized_views = {row[0] for row in cursor.fetchall()}
|
||||
except Exception:
|
||||
# If we can't fetch materialized views, return empty set
|
||||
logger.warning(
|
||||
"Unable to fetch materialized views for schema %s",
|
||||
schema,
|
||||
exc_info=True,
|
||||
)
|
||||
|
||||
return materialized_views
|
||||
|
||||
@classmethod
|
||||
def get_view_names(
|
||||
cls,
|
||||
database: Database,
|
||||
inspector: Inspector,
|
||||
schema: str | None,
|
||||
) -> set[str]:
|
||||
"""
|
||||
Get all views from BigQuery, excluding materialized views.
|
||||
|
||||
BigQuery's standard view discovery includes materialized views,
|
||||
but we want to separate them for proper categorization.
|
||||
"""
|
||||
if not schema:
|
||||
return set()
|
||||
|
||||
# Construct the query to get regular views from INFORMATION_SCHEMA
|
||||
catalog = database.get_default_catalog()
|
||||
if catalog:
|
||||
information_schema = f"`{catalog}.{schema}.INFORMATION_SCHEMA.TABLES`"
|
||||
else:
|
||||
information_schema = f"`{schema}.INFORMATION_SCHEMA.TABLES`"
|
||||
|
||||
# Use string formatting for the table name since it's not user input
|
||||
# The catalog and schema are from trusted sources (database configuration)
|
||||
query = f"""
|
||||
SELECT table_name
|
||||
FROM {information_schema}
|
||||
WHERE table_type = 'VIEW'
|
||||
""" # noqa: S608
|
||||
|
||||
views = set()
|
||||
try:
|
||||
with database.get_raw_connection(catalog=catalog, schema=schema) as conn:
|
||||
cursor = conn.cursor()
|
||||
cursor.execute(query)
|
||||
views = {row[0] for row in cursor.fetchall()}
|
||||
except Exception:
|
||||
# If we can't fetch views, fall back to the default implementation
|
||||
logger.warning(
|
||||
"Unable to fetch views for schema %s, falling back to default",
|
||||
schema,
|
||||
exc_info=True,
|
||||
)
|
||||
return super().get_view_names(database, inspector, schema)
|
||||
|
||||
return views
|
||||
|
||||
@@ -930,6 +930,44 @@ class Database(Model, AuditMixinNullable, ImportExportMixin): # pylint: disable
|
||||
except Exception as ex:
|
||||
raise self.db_engine_spec.get_dbapi_mapped_exception(ex) from ex
|
||||
|
||||
@cache_util.memoized_func(
|
||||
key="db:{self.id}:catalog:{catalog}:schema:{schema}:materialized_view_list",
|
||||
cache=cache_manager.cache,
|
||||
)
|
||||
def get_all_materialized_view_names_in_schema(
|
||||
self,
|
||||
catalog: str | None,
|
||||
schema: str,
|
||||
) -> set[Table]:
|
||||
"""Get all materialized views in the specified schema.
|
||||
|
||||
Parameters need to be passed as keyword arguments.
|
||||
|
||||
For unused parameters, they are referenced in
|
||||
cache_util.memoized_func decorator.
|
||||
|
||||
:param catalog: optional catalog name
|
||||
:param schema: schema name
|
||||
:param cache: whether cache is enabled for the function
|
||||
:param cache_timeout: timeout in seconds for the cache
|
||||
:param force: whether to force refresh the cache
|
||||
:return: set of materialized views
|
||||
"""
|
||||
try:
|
||||
with self.get_inspector(catalog=catalog, schema=schema) as inspector:
|
||||
return {
|
||||
Table(view, schema, catalog)
|
||||
for view in self.db_engine_spec.get_materialized_view_names(
|
||||
database=self,
|
||||
inspector=inspector,
|
||||
schema=schema,
|
||||
)
|
||||
}
|
||||
except Exception as ex:
|
||||
raise self.db_engine_spec.get_dbapi_mapped_exception(ex) from ex
|
||||
|
||||
return set()
|
||||
|
||||
@contextmanager
|
||||
def get_inspector(
|
||||
self,
|
||||
|
||||
329
superset/utils/rison_filters.py
Normal file
329
superset/utils/rison_filters.py
Normal file
@@ -0,0 +1,329 @@
|
||||
# Licensed to the Apache Software Foundation (ASF) under one
|
||||
# or more contributor license agreements. See the NOTICE file
|
||||
# distributed with this work for additional information
|
||||
# regarding copyright ownership. The ASF licenses this file
|
||||
# to you under the Apache License, Version 2.0 (the
|
||||
# "License"); you may not use this file except in compliance
|
||||
# with the License. You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing,
|
||||
# software distributed under the License is distributed on an
|
||||
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
# KIND, either express or implied. See the License for the
|
||||
# specific language governing permissions and limitations
|
||||
# under the License.
|
||||
"""
|
||||
Parser for Rison URL filters that converts simplified filter syntax
|
||||
to Superset's adhoc_filters format.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
from typing import Any, Optional, Union
|
||||
|
||||
import prison
|
||||
from flask import request
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class RisonFilterParser:
|
||||
"""
|
||||
Parse Rison filter syntax from URL parameter 'f' and convert to adhoc_filters.
|
||||
|
||||
Supports:
|
||||
- Simple equality: f=(country:USA)
|
||||
- Lists (IN): f=(country:!(USA,Canada))
|
||||
- NOT operator: f=(NOT:(country:USA))
|
||||
- OR operator: f=(OR:!(condition1,condition2))
|
||||
- Comparison operators: f=(sales:(gt:100000))
|
||||
- BETWEEN: f=(date:(between:!(2024-01-01,2024-12-31)))
|
||||
- LIKE: f=(name:(like:'%smith%'))
|
||||
"""
|
||||
|
||||
# Comparison operators mapping to SQL operators
|
||||
OPERATORS = {
|
||||
"gt": ">",
|
||||
"gte": ">=",
|
||||
"lt": "<",
|
||||
"lte": "<=",
|
||||
"between": "BETWEEN",
|
||||
"like": "LIKE",
|
||||
"ilike": "ILIKE",
|
||||
"ne": "!=",
|
||||
"eq": "==",
|
||||
}
|
||||
|
||||
def parse(self, filter_string: Optional[str] = None) -> list[dict[str, Any]]:
|
||||
"""
|
||||
Parse Rison filter string and convert to adhoc_filters format.
|
||||
|
||||
Args:
|
||||
filter_string: Rison-encoded filter string, or None to get from request
|
||||
|
||||
Returns:
|
||||
List of adhoc_filter dictionaries
|
||||
"""
|
||||
if filter_string is None:
|
||||
# Get from request args
|
||||
filter_string = request.args.get("f")
|
||||
|
||||
if not filter_string:
|
||||
return []
|
||||
|
||||
try:
|
||||
# Parse Rison to Python object
|
||||
filters_obj = prison.loads(filter_string)
|
||||
|
||||
# Convert to adhoc_filters
|
||||
return self._convert_to_adhoc_filters(filters_obj)
|
||||
|
||||
except Exception:
|
||||
logger.warning(
|
||||
f"Failed to parse Rison filters: {filter_string}", exc_info=True
|
||||
)
|
||||
# Return empty list on parse error to not break the request
|
||||
return []
|
||||
|
||||
def _convert_to_adhoc_filters(
|
||||
self, filters_obj: Union[dict[str, Any], list[Any], Any]
|
||||
) -> list[dict[str, Any]]:
|
||||
"""
|
||||
Convert parsed Rison object to adhoc_filters format.
|
||||
|
||||
Args:
|
||||
filters_obj: Parsed Rison object
|
||||
|
||||
Returns:
|
||||
List of adhoc_filter dictionaries
|
||||
"""
|
||||
if not isinstance(filters_obj, dict):
|
||||
return []
|
||||
|
||||
adhoc_filters = []
|
||||
|
||||
for key, value in filters_obj.items():
|
||||
# Handle special operators
|
||||
if key == "OR":
|
||||
# OR operator creates a single filter with OR clause
|
||||
or_filters = self._handle_or_operator(value)
|
||||
adhoc_filters.extend(or_filters)
|
||||
elif key == "NOT":
|
||||
# NOT operator negates the contained filter
|
||||
not_filters = self._handle_not_operator(value)
|
||||
adhoc_filters.extend(not_filters)
|
||||
else:
|
||||
# Regular field filter
|
||||
filter_dict = self._create_filter(key, value)
|
||||
if filter_dict:
|
||||
adhoc_filters.append(filter_dict)
|
||||
|
||||
return adhoc_filters
|
||||
|
||||
def _create_filter(
|
||||
self, column: str, value: Any, negate: bool = False
|
||||
) -> Optional[dict[str, Any]]:
|
||||
"""
|
||||
Create a single adhoc_filter dictionary.
|
||||
|
||||
Args:
|
||||
column: Column name
|
||||
value: Filter value (can be scalar, list, or operator dict)
|
||||
negate: Whether to negate the filter
|
||||
|
||||
Returns:
|
||||
adhoc_filter dictionary or None if invalid
|
||||
"""
|
||||
# Base filter structure
|
||||
filter_dict: dict[str, Any] = {
|
||||
"expressionType": "SIMPLE",
|
||||
"clause": "WHERE",
|
||||
"subject": column,
|
||||
}
|
||||
|
||||
# Handle different value types
|
||||
if isinstance(value, list):
|
||||
# List means IN operator
|
||||
filter_dict["operator"] = "NOT IN" if negate else "IN"
|
||||
filter_dict["comparator"] = value
|
||||
|
||||
elif isinstance(value, dict):
|
||||
# Dictionary contains operator and value
|
||||
operator_info = self._parse_operator_dict(value)
|
||||
if operator_info:
|
||||
operator, comparator = operator_info
|
||||
if negate and operator == "==":
|
||||
operator = "!="
|
||||
elif negate and operator == "IN":
|
||||
operator = "NOT IN"
|
||||
filter_dict["operator"] = operator
|
||||
filter_dict["comparator"] = comparator
|
||||
else:
|
||||
return None
|
||||
|
||||
else:
|
||||
# Simple scalar value
|
||||
filter_dict["operator"] = "!=" if negate else "=="
|
||||
filter_dict["comparator"] = value
|
||||
|
||||
return filter_dict
|
||||
|
||||
def _parse_operator_dict(
|
||||
self, op_dict: dict[str, Any]
|
||||
) -> Optional[tuple[str, Any]]:
|
||||
"""
|
||||
Parse operator dictionary like {gt: 100} or {between: [1, 10]}.
|
||||
|
||||
Args:
|
||||
op_dict: Operator dictionary
|
||||
|
||||
Returns:
|
||||
Tuple of (operator, comparator) or None
|
||||
"""
|
||||
if not op_dict:
|
||||
return None
|
||||
|
||||
# Get first key-value pair (should only be one)
|
||||
for op_key, op_value in op_dict.items():
|
||||
if op_key in self.OPERATORS:
|
||||
operator = self.OPERATORS[op_key]
|
||||
|
||||
# Special handling for BETWEEN
|
||||
if (
|
||||
operator == "BETWEEN"
|
||||
and isinstance(op_value, list)
|
||||
and len(op_value) == 2
|
||||
):
|
||||
return operator, op_value
|
||||
|
||||
return operator, op_value
|
||||
elif op_key == "in":
|
||||
# Explicit IN operator
|
||||
return "IN", op_value if isinstance(op_value, list) else [op_value]
|
||||
elif op_key == "nin":
|
||||
# NOT IN operator
|
||||
return "NOT IN", op_value if isinstance(op_value, list) else [op_value]
|
||||
|
||||
return None
|
||||
|
||||
def _handle_or_operator(self, or_value: Any) -> list[dict[str, Any]]:
|
||||
"""
|
||||
Handle OR operator by creating appropriate filters.
|
||||
|
||||
Note: Superset's adhoc_filters don't directly support OR between different
|
||||
fields in SIMPLE mode. This creates a SQL expression instead.
|
||||
|
||||
Args:
|
||||
or_value: Value of OR operator (usually a list)
|
||||
|
||||
Returns:
|
||||
List containing SQL expression filter
|
||||
"""
|
||||
if not isinstance(or_value, list):
|
||||
return []
|
||||
|
||||
# Build SQL expression for OR
|
||||
sql_parts = []
|
||||
|
||||
for item in or_value:
|
||||
if isinstance(item, dict):
|
||||
for col, val in item.items():
|
||||
if col not in ["OR", "NOT"]: # Skip nested operators for now
|
||||
sql_part = self._build_sql_condition(col, val)
|
||||
if sql_part:
|
||||
sql_parts.append(sql_part)
|
||||
|
||||
if sql_parts:
|
||||
return [
|
||||
{
|
||||
"expressionType": "SQL",
|
||||
"clause": "WHERE",
|
||||
"sqlExpression": f"({' OR '.join(sql_parts)})",
|
||||
}
|
||||
]
|
||||
|
||||
return []
|
||||
|
||||
def _build_sql_condition(self, column: str, value: Any) -> Optional[str]:
|
||||
"""
|
||||
Build a SQL condition string for a single column-value pair.
|
||||
|
||||
Args:
|
||||
column: Column name
|
||||
value: Value (can be scalar, list, or operator dict)
|
||||
|
||||
Returns:
|
||||
SQL condition string or None
|
||||
"""
|
||||
if isinstance(value, list):
|
||||
# IN clause
|
||||
values_str = ", ".join(
|
||||
[f"'{v}'" if isinstance(v, str) else str(v) for v in value]
|
||||
)
|
||||
return f"{column} IN ({values_str})"
|
||||
|
||||
elif isinstance(value, dict):
|
||||
# Operator
|
||||
operator_info = self._parse_operator_dict(value)
|
||||
if operator_info:
|
||||
op, comp = operator_info
|
||||
if op == "BETWEEN" and isinstance(comp, list):
|
||||
return f"{column} BETWEEN '{comp[0]}' AND '{comp[1]}'"
|
||||
elif op == "LIKE":
|
||||
return f"{column} LIKE '{comp}'"
|
||||
else:
|
||||
comp_str = f"'{comp}'" if isinstance(comp, str) else str(comp)
|
||||
return f"{column} {op} {comp_str}"
|
||||
|
||||
else:
|
||||
# Simple equality
|
||||
val_str = f"'{value}'" if isinstance(value, str) else str(value)
|
||||
return f"{column} = {val_str}"
|
||||
|
||||
return None
|
||||
|
||||
def _handle_not_operator(self, not_value: Any) -> list[dict[str, Any]]:
|
||||
"""
|
||||
Handle NOT operator by negating the contained filter.
|
||||
|
||||
Args:
|
||||
not_value: Value to negate
|
||||
|
||||
Returns:
|
||||
List of negated filters
|
||||
"""
|
||||
if isinstance(not_value, dict):
|
||||
filters = []
|
||||
for col, val in not_value.items():
|
||||
if col not in ["OR", "NOT"]: # Skip nested operators
|
||||
filter_dict = self._create_filter(col, val, negate=True)
|
||||
if filter_dict:
|
||||
filters.append(filter_dict)
|
||||
return filters
|
||||
|
||||
return []
|
||||
|
||||
|
||||
def merge_rison_filters(form_data: dict[str, Any]) -> None:
|
||||
"""
|
||||
Merge Rison filters from 'f' parameter into form_data.
|
||||
|
||||
This function modifies form_data in place, adding parsed filters
|
||||
to the adhoc_filters list.
|
||||
|
||||
Args:
|
||||
form_data: Form data dictionary to modify
|
||||
"""
|
||||
parser = RisonFilterParser()
|
||||
|
||||
if rison_filters := parser.parse():
|
||||
# Get existing adhoc_filters or create empty list
|
||||
existing_filters = form_data.get("adhoc_filters", [])
|
||||
|
||||
# Add new filters
|
||||
form_data["adhoc_filters"] = existing_filters + rison_filters
|
||||
|
||||
logger.info(f"Added {len(rison_filters)} filters from Rison parameter")
|
||||
@@ -1224,6 +1224,9 @@ class TestTablesDatabaseCommand(SupersetTestCase):
|
||||
patch.object(
|
||||
database, "get_all_view_names_in_schema", return_value=[]
|
||||
) as mock_get_all_view_names,
|
||||
patch.object(
|
||||
database, "get_all_materialized_view_names_in_schema", return_value=[]
|
||||
) as mock_get_all_materialized_view_names,
|
||||
):
|
||||
command = TablesDatabaseCommand(database.id, None, "schema_name", False)
|
||||
command.run()
|
||||
@@ -1243,3 +1246,10 @@ class TestTablesDatabaseCommand(SupersetTestCase):
|
||||
cache=database.table_cache_enabled,
|
||||
cache_timeout=database.table_cache_timeout,
|
||||
)
|
||||
mock_get_all_materialized_view_names.assert_called_once_with(
|
||||
catalog="default_catalog",
|
||||
schema="schema_name",
|
||||
force=False,
|
||||
cache=database.table_cache_enabled,
|
||||
cache_timeout=database.table_cache_timeout,
|
||||
)
|
||||
|
||||
@@ -42,6 +42,7 @@ def database_with_catalog(mocker: MockerFixture) -> MagicMock:
|
||||
database.get_all_view_names_in_schema.return_value = {
|
||||
("view1", "schema1", "catalog1"),
|
||||
}
|
||||
database.get_all_materialized_view_names_in_schema.return_value = set()
|
||||
|
||||
DatabaseDAO = mocker.patch("superset.commands.database.tables.DatabaseDAO") # noqa: N806
|
||||
DatabaseDAO.find_by_id.return_value = database
|
||||
@@ -66,6 +67,7 @@ def database_without_catalog(mocker: MockerFixture) -> MagicMock:
|
||||
database.get_all_view_names_in_schema.return_value = {
|
||||
("view1", "schema1", None),
|
||||
}
|
||||
database.get_all_materialized_view_names_in_schema.return_value = set()
|
||||
|
||||
DatabaseDAO = mocker.patch("superset.commands.database.tables.DatabaseDAO") # noqa: N806
|
||||
DatabaseDAO.find_by_id.return_value = database
|
||||
@@ -89,6 +91,7 @@ def test_tables_with_catalog(
|
||||
DatasourceName("table2", "schema1", "catalog1"),
|
||||
},
|
||||
{DatasourceName("view1", "schema1", "catalog1")},
|
||||
set(), # Empty set for materialized views
|
||||
],
|
||||
)
|
||||
|
||||
@@ -127,6 +130,12 @@ def test_tables_with_catalog(
|
||||
DatasourceName("view1", "schema1", "catalog1"),
|
||||
],
|
||||
),
|
||||
mocker.call(
|
||||
database=database_with_catalog,
|
||||
catalog="catalog1",
|
||||
schema="schema1",
|
||||
datasource_names=[],
|
||||
),
|
||||
],
|
||||
)
|
||||
|
||||
@@ -155,6 +164,7 @@ def test_tables_without_catalog(
|
||||
DatasourceName("table2", "schema1"),
|
||||
},
|
||||
{DatasourceName("view1", "schema1")},
|
||||
set(), # Empty set for materialized views
|
||||
],
|
||||
)
|
||||
|
||||
@@ -193,6 +203,12 @@ def test_tables_without_catalog(
|
||||
DatasourceName("view1", "schema1"),
|
||||
],
|
||||
),
|
||||
mocker.call(
|
||||
database=database_without_catalog,
|
||||
catalog=None,
|
||||
schema="schema1",
|
||||
datasource_names=[],
|
||||
),
|
||||
],
|
||||
)
|
||||
|
||||
|
||||
@@ -19,6 +19,7 @@
|
||||
|
||||
from datetime import datetime
|
||||
from typing import Optional
|
||||
from unittest import mock
|
||||
|
||||
import pytest
|
||||
from pytest_mock import MockerFixture
|
||||
@@ -451,43 +452,80 @@ def test_adjust_engine_params_catalog_as_host() -> None:
|
||||
assert str(uri) == "bigquery://other-project/"
|
||||
|
||||
|
||||
def test_adjust_engine_params_catalog_as_database() -> None:
|
||||
def test_get_materialized_view_names() -> None:
|
||||
"""
|
||||
Test passing a custom catalog.
|
||||
|
||||
In this test, the original URI has the catalog as the database.
|
||||
Test get_materialized_view_names method.
|
||||
"""
|
||||
from superset.db_engine_specs.bigquery import BigQueryEngineSpec
|
||||
|
||||
url = make_url("bigquery:///project")
|
||||
database = mock.Mock()
|
||||
database.get_default_catalog.return_value = "my_project"
|
||||
|
||||
uri = BigQueryEngineSpec.adjust_engine_params(url, {})[0]
|
||||
assert str(uri) == "bigquery:///project"
|
||||
inspector = mock.Mock()
|
||||
|
||||
uri = BigQueryEngineSpec.adjust_engine_params(
|
||||
url,
|
||||
{},
|
||||
catalog="other-project",
|
||||
)[0]
|
||||
assert str(uri) == "bigquery://other-project/"
|
||||
# Mock the raw connection and cursor
|
||||
cursor_mock = mock.Mock()
|
||||
cursor_mock.fetchall.return_value = [
|
||||
("materialized_view_1",),
|
||||
("materialized_view_2",),
|
||||
]
|
||||
|
||||
connection_mock = mock.Mock()
|
||||
connection_mock.cursor.return_value = cursor_mock
|
||||
connection_mock.__enter__ = mock.Mock(return_value=connection_mock)
|
||||
connection_mock.__exit__ = mock.Mock(return_value=None)
|
||||
|
||||
database.get_raw_connection.return_value = connection_mock
|
||||
|
||||
result = BigQueryEngineSpec.get_materialized_view_names(
|
||||
database=database, inspector=inspector, schema="my_dataset"
|
||||
)
|
||||
|
||||
assert result == {"materialized_view_1", "materialized_view_2"}
|
||||
|
||||
# Verify the SQL query was correct
|
||||
cursor_mock.execute.assert_called_once()
|
||||
executed_query = cursor_mock.execute.call_args[0][0]
|
||||
assert "INFORMATION_SCHEMA.TABLES" in executed_query
|
||||
assert "table_type = 'MATERIALIZED VIEW'" in executed_query
|
||||
|
||||
|
||||
def test_adjust_engine_params_no_catalog() -> None:
|
||||
def test_get_view_names_excludes_materialized_views() -> None:
|
||||
"""
|
||||
Test passing a custom catalog.
|
||||
|
||||
In this test, the original URI has no catalog.
|
||||
Test get_view_names excludes materialized views.
|
||||
"""
|
||||
from superset.db_engine_specs.bigquery import BigQueryEngineSpec
|
||||
|
||||
url = make_url("bigquery://")
|
||||
database = mock.Mock()
|
||||
database.get_default_catalog.return_value = "my_project"
|
||||
|
||||
uri = BigQueryEngineSpec.adjust_engine_params(url, {})[0]
|
||||
assert str(uri) == "bigquery://"
|
||||
inspector = mock.Mock()
|
||||
|
||||
uri = BigQueryEngineSpec.adjust_engine_params(
|
||||
url,
|
||||
{},
|
||||
catalog="other-project",
|
||||
)[0]
|
||||
assert str(uri) == "bigquery://other-project/"
|
||||
# Mock the raw connection and cursor
|
||||
cursor_mock = mock.Mock()
|
||||
# Return only regular views, not materialized views
|
||||
cursor_mock.fetchall.return_value = [
|
||||
("regular_view_1",),
|
||||
("regular_view_2",),
|
||||
]
|
||||
|
||||
connection_mock = mock.Mock()
|
||||
connection_mock.cursor.return_value = cursor_mock
|
||||
connection_mock.__enter__ = mock.Mock(return_value=connection_mock)
|
||||
connection_mock.__exit__ = mock.Mock(return_value=None)
|
||||
|
||||
database.get_raw_connection.return_value = connection_mock
|
||||
|
||||
result = BigQueryEngineSpec.get_view_names(
|
||||
database=database, inspector=inspector, schema="my_dataset"
|
||||
)
|
||||
|
||||
assert result == {"regular_view_1", "regular_view_2"}
|
||||
|
||||
# Verify the SQL query only gets regular views
|
||||
cursor_mock.execute.assert_called_once()
|
||||
executed_query = cursor_mock.execute.call_args[0][0]
|
||||
assert "INFORMATION_SCHEMA.TABLES" in executed_query
|
||||
assert "table_type = 'VIEW'" in executed_query
|
||||
# Ensure it's not querying for materialized views
|
||||
assert "MATERIALIZED VIEW" not in executed_query
|
||||
|
||||
218
tests/unit_tests/utils/test_rison_filters.py
Normal file
218
tests/unit_tests/utils/test_rison_filters.py
Normal file
@@ -0,0 +1,218 @@
|
||||
# Licensed to the Apache Software Foundation (ASF) under one
|
||||
# or more contributor license agreements. See the NOTICE file
|
||||
# distributed with this work for additional information
|
||||
# regarding copyright ownership. The ASF licenses this file
|
||||
# to you under the Apache License, Version 2.0 (the
|
||||
# "License"); you may not use this file except in compliance
|
||||
# with the License. You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing,
|
||||
# software distributed under the License is distributed on an
|
||||
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
# KIND, either express or implied. See the License for the
|
||||
# specific language governing permissions and limitations
|
||||
# under the License.
|
||||
"""Unit tests for Rison filter parser."""
|
||||
|
||||
from superset.utils.rison_filters import merge_rison_filters, RisonFilterParser
|
||||
from tests.integration_tests.base_tests import SupersetTestCase
|
||||
|
||||
|
||||
class TestRisonFilterParser(SupersetTestCase):
|
||||
"""Test the RisonFilterParser class."""
|
||||
|
||||
def test_simple_equality(self):
|
||||
"""Test simple equality filter."""
|
||||
parser = RisonFilterParser()
|
||||
result = parser.parse("(country:USA)")
|
||||
|
||||
assert len(result) == 1
|
||||
assert result[0]["expressionType"] == "SIMPLE"
|
||||
assert result[0]["clause"] == "WHERE"
|
||||
assert result[0]["subject"] == "country"
|
||||
assert result[0]["operator"] == "=="
|
||||
assert result[0]["comparator"] == "USA"
|
||||
|
||||
def test_multiple_filters_and(self):
|
||||
"""Test multiple filters with AND logic."""
|
||||
parser = RisonFilterParser()
|
||||
result = parser.parse("(country:USA,year:2024)")
|
||||
|
||||
assert len(result) == 2
|
||||
assert result[0]["subject"] == "country"
|
||||
assert result[0]["comparator"] == "USA"
|
||||
assert result[1]["subject"] == "year"
|
||||
assert result[1]["comparator"] == 2024
|
||||
|
||||
def test_list_in_operator(self):
|
||||
"""Test list values for IN operator."""
|
||||
parser = RisonFilterParser()
|
||||
result = parser.parse("(country:!(USA,Canada))")
|
||||
|
||||
assert len(result) == 1
|
||||
assert result[0]["subject"] == "country"
|
||||
assert result[0]["operator"] == "IN"
|
||||
assert result[0]["comparator"] == ["USA", "Canada"]
|
||||
|
||||
def test_not_operator(self):
|
||||
"""Test NOT operator."""
|
||||
parser = RisonFilterParser()
|
||||
result = parser.parse("(NOT:(country:USA))")
|
||||
|
||||
assert len(result) == 1
|
||||
assert result[0]["subject"] == "country"
|
||||
assert result[0]["operator"] == "!="
|
||||
assert result[0]["comparator"] == "USA"
|
||||
|
||||
def test_not_in_operator(self):
|
||||
"""Test NOT IN operator."""
|
||||
parser = RisonFilterParser()
|
||||
result = parser.parse("(NOT:(country:!(USA,Canada)))")
|
||||
|
||||
assert len(result) == 1
|
||||
assert result[0]["subject"] == "country"
|
||||
assert result[0]["operator"] == "NOT IN"
|
||||
assert result[0]["comparator"] == ["USA", "Canada"]
|
||||
|
||||
def test_or_operator(self):
|
||||
"""Test OR operator creates SQL expression."""
|
||||
parser = RisonFilterParser()
|
||||
result = parser.parse("(OR:!((status:active),(priority:high)))")
|
||||
|
||||
assert len(result) == 1
|
||||
assert result[0]["expressionType"] == "SQL"
|
||||
assert result[0]["clause"] == "WHERE"
|
||||
assert "status = 'active' OR priority = 'high'" in result[0]["sqlExpression"]
|
||||
|
||||
def test_comparison_operators(self):
|
||||
"""Test various comparison operators."""
|
||||
parser = RisonFilterParser()
|
||||
|
||||
# Greater than
|
||||
result = parser.parse("(sales:(gt:100000))")
|
||||
assert result[0]["operator"] == ">"
|
||||
assert result[0]["comparator"] == 100000
|
||||
|
||||
# Greater than or equal
|
||||
result = parser.parse("(age:(gte:18))")
|
||||
assert result[0]["operator"] == ">="
|
||||
assert result[0]["comparator"] == 18
|
||||
|
||||
# Less than
|
||||
result = parser.parse("(temp:(lt:32))")
|
||||
assert result[0]["operator"] == "<"
|
||||
assert result[0]["comparator"] == 32
|
||||
|
||||
# Less than or equal
|
||||
result = parser.parse("(price:(lte:1000))")
|
||||
assert result[0]["operator"] == "<="
|
||||
assert result[0]["comparator"] == 1000
|
||||
|
||||
def test_between_operator(self):
|
||||
"""Test BETWEEN operator."""
|
||||
parser = RisonFilterParser()
|
||||
result = parser.parse("(date:(between:!('2024-01-01','2024-12-31')))")
|
||||
|
||||
assert len(result) == 1
|
||||
assert result[0]["operator"] == "BETWEEN"
|
||||
assert result[0]["comparator"] == ["2024-01-01", "2024-12-31"]
|
||||
|
||||
def test_like_operator(self):
|
||||
"""Test LIKE operator."""
|
||||
parser = RisonFilterParser()
|
||||
result = parser.parse("(name:(like:'%smith%'))")
|
||||
|
||||
assert len(result) == 1
|
||||
assert result[0]["operator"] == "LIKE"
|
||||
assert result[0]["comparator"] == "%smith%"
|
||||
|
||||
def test_complex_combination(self):
|
||||
"""Test complex filter combination."""
|
||||
parser = RisonFilterParser()
|
||||
result = parser.parse(
|
||||
"(year:2024,region:!(North,South),NOT:(status:test),revenue:(gt:100000))"
|
||||
)
|
||||
|
||||
assert len(result) == 4
|
||||
|
||||
# Check year filter
|
||||
year_filter = next(f for f in result if f["subject"] == "year")
|
||||
assert year_filter["operator"] == "=="
|
||||
assert year_filter["comparator"] == 2024
|
||||
|
||||
# Check region filter
|
||||
region_filter = next(f for f in result if f["subject"] == "region")
|
||||
assert region_filter["operator"] == "IN"
|
||||
assert region_filter["comparator"] == ["North", "South"]
|
||||
|
||||
# Check NOT status filter
|
||||
status_filter = next(f for f in result if f["subject"] == "status")
|
||||
assert status_filter["operator"] == "!="
|
||||
assert status_filter["comparator"] == "test"
|
||||
|
||||
# Check revenue filter
|
||||
revenue_filter = next(f for f in result if f["subject"] == "revenue")
|
||||
assert revenue_filter["operator"] == ">"
|
||||
assert revenue_filter["comparator"] == 100000
|
||||
|
||||
def test_empty_filter(self):
|
||||
"""Test empty or missing filter parameter."""
|
||||
parser = RisonFilterParser()
|
||||
|
||||
assert parser.parse("") == []
|
||||
assert parser.parse("()") == []
|
||||
|
||||
def test_invalid_rison(self):
|
||||
"""Test invalid Rison syntax returns empty list."""
|
||||
parser = RisonFilterParser()
|
||||
|
||||
# Invalid Rison should not crash but return empty list
|
||||
assert parser.parse("invalid rison") == []
|
||||
assert parser.parse("(unclosed") == []
|
||||
|
||||
def test_parse_from_request(self):
|
||||
"""Test parsing from request args."""
|
||||
with self.client:
|
||||
with self.client.get("/?f=(country:USA)").request:
|
||||
parser = RisonFilterParser()
|
||||
result = parser.parse() # No argument, should get from request
|
||||
|
||||
assert len(result) == 1
|
||||
assert result[0]["subject"] == "country"
|
||||
assert result[0]["comparator"] == "USA"
|
||||
|
||||
def test_merge_rison_filters(self):
|
||||
"""Test merging Rison filters into form_data."""
|
||||
form_data = {
|
||||
"adhoc_filters": [
|
||||
{
|
||||
"expressionType": "SIMPLE",
|
||||
"clause": "WHERE",
|
||||
"subject": "existing",
|
||||
"operator": "==",
|
||||
"comparator": "value",
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
with self.client:
|
||||
with self.client.get("/?f=(country:USA)").request:
|
||||
merge_rison_filters(form_data)
|
||||
|
||||
# Should have both existing and new filter
|
||||
assert len(form_data["adhoc_filters"]) == 2
|
||||
assert form_data["adhoc_filters"][0]["subject"] == "existing"
|
||||
assert form_data["adhoc_filters"][1]["subject"] == "country"
|
||||
|
||||
def test_merge_rison_filters_empty(self):
|
||||
"""Test merging with no Rison filters."""
|
||||
form_data = {"adhoc_filters": []}
|
||||
|
||||
with self.client:
|
||||
with self.client.get("/").request:
|
||||
merge_rison_filters(form_data)
|
||||
|
||||
# Should remain empty
|
||||
assert form_data["adhoc_filters"] == []
|
||||
Reference in New Issue
Block a user