fix(docs): regenerate 6.1.0 snapshots with depth-aware import rewriter

The previous import-path fixer only matched two-level relative paths
(`../../src/` and `../../data/`), missing files at deeper nesting in the
section tree. After the 6.1.0 cut for developer_docs, ~50 component MDX
files at depth 3 still referenced `../../../src/components/StorybookWrapper`
(should have been `../../../../src/...`), and the components Button page
referenced `../../../superset-frontend/...` (should have been
`../../../../superset-frontend/...`). The Docusaurus production build
failed with module-not-found errors as a result.

Replace the pattern-specific regex with a depth-aware walker that
- counts the file's nesting depth within the snapshot,
- bumps any relative import whose `../` count exceeds that depth (i.e.
  the import escapes the section root and so must compensate for the
  extra `version-X.X.X/` directory the snapshot lives under),
- skips fenced code blocks so documented sample imports (e.g. Playwright
  page-object examples in developer_docs/testing/e2e-testing.md) are not
  rewritten.

Re-cut all four sections under the new fixer. yarn build now passes
locally.
This commit is contained in:
Superset Dev
2026-05-04 08:53:25 -07:00
parent 752ebd47cb
commit 5a5a0e70fc
355 changed files with 3748 additions and 368 deletions

View File

@@ -26,6 +26,6 @@ under the License.
import { DatabasePage } from '@site/src/components/databases';
export const databaseInfo = {"time_grains":{"SECOND":true,"FIVE_SECONDS":false,"THIRTY_SECONDS":false,"MINUTE":true,"FIVE_MINUTES":false,"TEN_MINUTES":false,"FIFTEEN_MINUTES":false,"THIRTY_MINUTES":false,"HALF_HOUR":false,"HOUR":true,"SIX_HOURS":false,"DAY":true,"WEEK":true,"WEEK_STARTING_SUNDAY":false,"WEEK_STARTING_MONDAY":false,"WEEK_ENDING_SATURDAY":false,"WEEK_ENDING_SUNDAY":false,"MONTH":true,"QUARTER":true,"QUARTER_YEAR":false,"YEAR":true},"module":"superset.db_engine_specs.parseable","limit_method":1,"limit_clause":true,"joins":true,"subqueries":true,"alias_in_select":true,"alias_in_orderby":true,"time_groupby_inline":false,"alias_to_source_column":false,"order_by_not_in_select":true,"expressions_in_orderby":false,"cte_in_subquery":true,"max_column_name":null,"sql_comments":true,"escaped_colons":true,"masked_encrypted_extra":false,"column_type_mapping":false,"function_names":false,"user_impersonation":false,"file_upload":true,"get_extra_table_metadata":false,"dbapi_exception_mapping":false,"custom_errors":false,"dynamic_schema":false,"catalog":false,"dynamic_catalog":false,"ssh_tunneling":true,"query_cancelation":false,"get_metrics":false,"where_latest_partition":false,"expand_data":false,"query_cost_estimation":false,"sql_validation":false,"score":28,"max_score":201,"documentation":{"description":"Parseable is a distributed log analytics database with SQL-like query interface.","categories":["Search & NoSQL","Open Source"],"pypi_packages":["sqlalchemy-parseable"],"connection_string":"parseable://{username}:{password}@{hostname}:{port}/{stream_name}","connection_examples":[{"description":"Example connection","connection_string":"parseable://admin:admin@demo.parseable.com:443/ingress-nginx"}],"notes":"Stream name in URI represents the Parseable logstream to query. Supports HTTP (80) and HTTPS (443).","docs_url":"https://www.parseable.io","category":"Other Databases"},"engine":"parseable","engine_name":"Parseable","engine_aliases":[],"default_driver":null,"supports_file_upload":true,"supports_dynamic_schema":false,"supports_catalog":false,"supports_dynamic_catalog":false};
export const databaseInfo = {"engine":"parseable","engine_name":"Parseable","module":"parseable","documentation":{"description":"Parseable is a distributed log analytics database with SQL-like query interface.","categories":["SEARCH_NOSQL","OPEN_SOURCE"],"pypi_packages":["sqlalchemy-parseable"],"connection_string":"parseable://{username}:{password}@{hostname}:{port}/{stream_name}","connection_examples":[{"description":"Example connection","connection_string":"parseable://admin:admin@demo.parseable.com:443/ingress-nginx"}],"notes":"Stream name in URI represents the Parseable logstream to query. Supports HTTP (80) and HTTPS (443).","docs_url":"https://www.parseable.io"},"time_grains":{"SECOND":true,"FIVE_SECONDS":false,"THIRTY_SECONDS":false,"MINUTE":true,"FIVE_MINUTES":false,"TEN_MINUTES":false,"FIFTEEN_MINUTES":false,"THIRTY_MINUTES":false,"HALF_HOUR":false,"HOUR":true,"SIX_HOURS":false,"DAY":true,"WEEK":true,"WEEK_STARTING_SUNDAY":false,"WEEK_STARTING_MONDAY":false,"WEEK_ENDING_SATURDAY":false,"WEEK_ENDING_SUNDAY":false,"MONTH":true,"QUARTER":true,"QUARTER_YEAR":false,"YEAR":true},"score":28,"max_score":201,"joins":true,"subqueries":true,"supports_dynamic_schema":false,"supports_catalog":false,"supports_dynamic_catalog":false,"ssh_tunneling":true,"query_cancelation":false,"supports_file_upload":true,"user_impersonation":false,"query_cost_estimation":false,"sql_validation":false};
<DatabasePage name="Parseable" database={databaseInfo} />