fix(docs): regenerate 6.1.0 snapshots with depth-aware import rewriter

The previous import-path fixer only matched two-level relative paths
(`../../src/` and `../../data/`), missing files at deeper nesting in the
section tree. After the 6.1.0 cut for developer_docs, ~50 component MDX
files at depth 3 still referenced `../../../src/components/StorybookWrapper`
(should have been `../../../../src/...`), and the components Button page
referenced `../../../superset-frontend/...` (should have been
`../../../../superset-frontend/...`). The Docusaurus production build
failed with module-not-found errors as a result.

Replace the pattern-specific regex with a depth-aware walker that
- counts the file's nesting depth within the snapshot,
- bumps any relative import whose `../` count exceeds that depth (i.e.
  the import escapes the section root and so must compensate for the
  extra `version-X.X.X/` directory the snapshot lives under),
- skips fenced code blocks so documented sample imports (e.g. Playwright
  page-object examples in developer_docs/testing/e2e-testing.md) are not
  rewritten.

Re-cut all four sections under the new fixer. yarn build now passes
locally.
This commit is contained in:
Superset Dev
2026-05-04 08:53:25 -07:00
parent 752ebd47cb
commit 5a5a0e70fc
355 changed files with 3748 additions and 368 deletions

View File

@@ -26,6 +26,6 @@ under the License.
import { DatabasePage } from '@site/src/components/databases';
export const databaseInfo = {"time_grains":{"SECOND":true,"FIVE_SECONDS":false,"THIRTY_SECONDS":false,"MINUTE":true,"FIVE_MINUTES":true,"TEN_MINUTES":true,"FIFTEEN_MINUTES":true,"THIRTY_MINUTES":true,"HALF_HOUR":false,"HOUR":true,"SIX_HOURS":false,"DAY":true,"WEEK":true,"WEEK_STARTING_SUNDAY":false,"WEEK_STARTING_MONDAY":true,"WEEK_ENDING_SATURDAY":false,"WEEK_ENDING_SUNDAY":false,"MONTH":true,"QUARTER":true,"QUARTER_YEAR":false,"YEAR":true},"module":"superset.db_engine_specs.bigquery","limit_method":1,"limit_clause":true,"joins":true,"subqueries":true,"alias_in_select":true,"alias_in_orderby":true,"time_groupby_inline":false,"alias_to_source_column":false,"order_by_not_in_select":true,"expressions_in_orderby":true,"cte_in_subquery":true,"max_column_name":128,"sql_comments":true,"escaped_colons":true,"masked_encrypted_extra":false,"column_type_mapping":false,"function_names":false,"user_impersonation":false,"file_upload":true,"get_extra_table_metadata":true,"dbapi_exception_mapping":true,"custom_errors":false,"dynamic_schema":false,"catalog":true,"dynamic_catalog":true,"ssh_tunneling":false,"query_cancelation":false,"get_metrics":false,"where_latest_partition":true,"expand_data":false,"query_cost_estimation":true,"sql_validation":false,"score":83,"max_score":201,"documentation":{"description":"Google BigQuery is a serverless, highly scalable data warehouse.","logo":"google-big-query.svg","homepage_url":"https://cloud.google.com/bigquery/","categories":["Cloud - Google","Analytical Databases","Proprietary"],"pypi_packages":["sqlalchemy-bigquery"],"connection_string":"bigquery://{project_id}","install_instructions":"echo \"sqlalchemy-bigquery\" >> ./docker/requirements-local.txt","authentication_methods":[{"name":"Service Account JSON","description":"Upload service account credentials JSON or paste in Secure Extra","secure_extra":{"credentials_info":{"type":"service_account","project_id":"...","private_key_id":"...","private_key":"...","client_email":"...","client_id":"...","auth_uri":"...","token_uri":"..."}}}],"notes":"Create a Service Account via GCP console with access to BigQuery datasets. For CSV/Excel uploads, also install pandas_gbq.","warnings":["Google BigQuery Python SDK is not compatible with gevent. Use a worker type other than gevent when deploying with gunicorn."],"docs_url":"https://github.com/googleapis/python-bigquery-sqlalchemy","category":"Cloud - Google","custom_errors":[{"regex_name":"CONNECTION_DATABASE_PERMISSIONS_REGEX","message_template":"Unable to connect. Verify that the following roles are set on the service account: \"BigQuery Data Viewer\", \"BigQuery Metadata Viewer\", \"BigQuery Job User\" and the following permissions are set \"bigquery.readsessions.create\", \"bigquery.readsessions.getData\"","error_type":"CONNECTION_DATABASE_PERMISSIONS_ERROR","category":"Permissions","description":"Insufficient permissions","issue_codes":[1017]},{"regex_name":"TABLE_DOES_NOT_EXIST_REGEX","message_template":"The table \"%(table)s\" does not exist. A valid table must be used to run this query.","error_type":"TABLE_DOES_NOT_EXIST_ERROR","category":"Query","description":"Table not found","issue_codes":[1003,1005]},{"regex_name":"COLUMN_DOES_NOT_EXIST_REGEX","message_template":"We can't seem to resolve column \"%(column)s\" at line %(location)s.","error_type":"COLUMN_DOES_NOT_EXIST_ERROR","category":"Query","description":"Column not found","issue_codes":[1003,1004]},{"regex_name":"SCHEMA_DOES_NOT_EXIST_REGEX","message_template":"The schema \"%(schema)s\" does not exist. A valid schema must be used to run this query.","error_type":"SCHEMA_DOES_NOT_EXIST_ERROR","category":"Query","description":"Schema not found","issue_codes":[1003,1016]},{"regex_name":"SYNTAX_ERROR_REGEX","message_template":"Please check your query for syntax errors at or near \"%(syntax_error)s\". Then, try running your query again.","error_type":"SYNTAX_ERROR","category":"Query","description":"SQL syntax error","issue_codes":[1030]}]},"engine":"bigquery","engine_name":"Google BigQuery","engine_aliases":[],"default_driver":"bigquery","supports_file_upload":true,"supports_dynamic_schema":false,"supports_catalog":true,"supports_dynamic_catalog":true};
export const databaseInfo = {"engine":"google_bigquery","engine_name":"Google BigQuery","module":"bigquery","documentation":{"description":"Google BigQuery is a serverless, highly scalable data warehouse.","logo":"google-big-query.svg","homepage_url":"https://cloud.google.com/bigquery/","categories":["CLOUD_GCP","ANALYTICAL_DATABASES","PROPRIETARY"],"pypi_packages":["sqlalchemy-bigquery"],"connection_string":"bigquery://{project_id}","install_instructions":"echo \"sqlalchemy-bigquery\" >> ./docker/requirements-local.txt","authentication_methods":[{"name":"Service Account JSON","description":"Upload service account credentials JSON or paste in Secure Extra","secure_extra":{"credentials_info":{"type":"service_account","project_id":"...","private_key_id":"...","private_key":"...","client_email":"...","client_id":"...","auth_uri":"...","token_uri":"..."}}}],"notes":"Create a Service Account via GCP console with access to BigQuery datasets. For CSV/Excel uploads, also install pandas_gbq.","warnings":["Google BigQuery Python SDK is not compatible with gevent. Use a worker type other than gevent when deploying with gunicorn."],"docs_url":"https://github.com/googleapis/python-bigquery-sqlalchemy","custom_errors":[{"regex_name":"CONNECTION_DATABASE_PERMISSIONS_REGEX","message_template":"Unable to connect. Verify that the following roles are set on the service account: \"BigQuery Data Viewer\", \"BigQuery Metadata Viewer\", \"BigQuery Job User\" and the following permissions are set \"bigquery.readsessions.create\", \"bigquery.readsessions.getData\"","error_type":"CONNECTION_DATABASE_PERMISSIONS_ERROR","category":"Permissions","description":"Insufficient permissions","issue_codes":[1017]},{"regex_name":"TABLE_DOES_NOT_EXIST_REGEX","message_template":"The table \"%(table)s\" does not exist. A valid table must be used to run this query.","error_type":"TABLE_DOES_NOT_EXIST_ERROR","category":"Query","description":"Table not found","issue_codes":[1003,1005]},{"regex_name":"COLUMN_DOES_NOT_EXIST_REGEX","message_template":"We can't seem to resolve column \"%(column)s\" at line %(location)s.","error_type":"COLUMN_DOES_NOT_EXIST_ERROR","category":"Query","description":"Column not found","issue_codes":[1003,1004]},{"regex_name":"SCHEMA_DOES_NOT_EXIST_REGEX","message_template":"The schema \"%(schema)s\" does not exist. A valid schema must be used to run this query.","error_type":"SCHEMA_DOES_NOT_EXIST_ERROR","category":"Query","description":"Schema not found","issue_codes":[1003,1016]},{"regex_name":"SYNTAX_ERROR_REGEX","message_template":"Please check your query for syntax errors at or near \"%(syntax_error)s\". Then, try running your query again.","error_type":"SYNTAX_ERROR","category":"Query","description":"SQL syntax error","issue_codes":[1030]}]},"time_grains":{},"score":0,"max_score":0,"joins":true,"subqueries":true,"supports_dynamic_schema":false,"supports_catalog":false,"supports_dynamic_catalog":false,"ssh_tunneling":false,"query_cancelation":false,"supports_file_upload":false,"user_impersonation":false,"query_cost_estimation":false,"sql_validation":false};
<DatabasePage name="Google BigQuery" database={databaseInfo} />