mirror of
https://github.com/apache/superset.git
synced 2026-05-09 09:55:19 +00:00
fix(docs): regenerate 6.1.0 snapshots with depth-aware import rewriter
The previous import-path fixer only matched two-level relative paths (`../../src/` and `../../data/`), missing files at deeper nesting in the section tree. After the 6.1.0 cut for developer_docs, ~50 component MDX files at depth 3 still referenced `../../../src/components/StorybookWrapper` (should have been `../../../../src/...`), and the components Button page referenced `../../../superset-frontend/...` (should have been `../../../../superset-frontend/...`). The Docusaurus production build failed with module-not-found errors as a result. Replace the pattern-specific regex with a depth-aware walker that - counts the file's nesting depth within the snapshot, - bumps any relative import whose `../` count exceeds that depth (i.e. the import escapes the section root and so must compensate for the extra `version-X.X.X/` directory the snapshot lives under), - skips fenced code blocks so documented sample imports (e.g. Playwright page-object examples in developer_docs/testing/e2e-testing.md) are not rewritten. Re-cut all four sections under the new fixer. yarn build now passes locally.
This commit is contained in:
@@ -26,6 +26,6 @@ under the License.
|
||||
|
||||
import { DatabasePage } from '@site/src/components/databases';
|
||||
|
||||
export const databaseInfo = {"time_grains":{"SECOND":true,"FIVE_SECONDS":false,"THIRTY_SECONDS":false,"MINUTE":true,"FIVE_MINUTES":false,"TEN_MINUTES":false,"FIFTEEN_MINUTES":false,"THIRTY_MINUTES":false,"HALF_HOUR":false,"HOUR":true,"SIX_HOURS":false,"DAY":true,"WEEK":true,"WEEK_STARTING_SUNDAY":true,"WEEK_STARTING_MONDAY":false,"WEEK_ENDING_SATURDAY":true,"WEEK_ENDING_SUNDAY":false,"MONTH":true,"QUARTER":true,"QUARTER_YEAR":false,"YEAR":true},"module":"superset.db_engine_specs.databricks","limit_method":1,"limit_clause":true,"joins":true,"subqueries":true,"alias_in_select":true,"alias_in_orderby":true,"time_groupby_inline":false,"alias_to_source_column":false,"order_by_not_in_select":false,"expressions_in_orderby":false,"cte_in_subquery":true,"max_column_name":767,"sql_comments":true,"escaped_colons":true,"masked_encrypted_extra":false,"column_type_mapping":true,"function_names":true,"user_impersonation":true,"file_upload":true,"get_extra_table_metadata":true,"dbapi_exception_mapping":false,"custom_errors":false,"dynamic_schema":true,"catalog":true,"dynamic_catalog":true,"ssh_tunneling":true,"query_cancelation":true,"get_metrics":false,"where_latest_partition":true,"expand_data":true,"query_cost_estimation":true,"sql_validation":false,"score":140,"max_score":201,"documentation":{"description":"Apache Hive is a data warehouse infrastructure built on Hadoop.","logo":"apache-hive.svg","homepage_url":"https://hive.apache.org/","categories":["Apache Projects","Query Engines","Open Source"],"pypi_packages":["pyhive"],"connection_string":"hive://hive@{hostname}:{port}/{database}","default_port":10000,"category":"Cloud Data Warehouses"},"engine":"databricks","engine_name":"Databricks Interactive Cluster","engine_aliases":[],"default_driver":"pyhive","supports_file_upload":true,"supports_dynamic_schema":true,"supports_catalog":true,"supports_dynamic_catalog":true};
|
||||
export const databaseInfo = {"engine":"databricks_interactive_cluster","engine_name":"Databricks Interactive Cluster","module":"databricks","documentation":{"description":"Apache Hive is a data warehouse infrastructure built on Hadoop.","logo":"apache-hive.svg","homepage_url":"https://hive.apache.org/","pypi_packages":["pyhive","pyhive"],"install_instructions":"pip install \"apache-superset[presto]\"","connection_string":"hive://hive@{hostname}:{port}/{database}","default_port":10000,"parameters":{"hostname":"Presto coordinator hostname","port":"Presto coordinator port (default 8080)","database":"Catalog name"},"drivers":[{"name":"PyHive","pypi_package":"pyhive","connection_string":"presto://{hostname}:{port}/{database}","is_recommended":true}],"categories":["APACHE_PROJECTS","QUERY_ENGINES","OPEN_SOURCE"]},"time_grains":{},"score":0,"max_score":0,"joins":true,"subqueries":true,"supports_dynamic_schema":false,"supports_catalog":false,"supports_dynamic_catalog":false,"ssh_tunneling":false,"query_cancelation":false,"supports_file_upload":false,"user_impersonation":false,"query_cost_estimation":false,"sql_validation":false};
|
||||
|
||||
<DatabasePage name="Databricks Interactive Cluster" database={databaseInfo} />
|
||||
|
||||
Reference in New Issue
Block a user