mirror of
https://github.com/apache/superset.git
synced 2026-05-11 10:55:43 +00:00
fix(docs): regenerate 6.1.0 snapshots with depth-aware import rewriter
The previous import-path fixer only matched two-level relative paths (`../../src/` and `../../data/`), missing files at deeper nesting in the section tree. After the 6.1.0 cut for developer_docs, ~50 component MDX files at depth 3 still referenced `../../../src/components/StorybookWrapper` (should have been `../../../../src/...`), and the components Button page referenced `../../../superset-frontend/...` (should have been `../../../../superset-frontend/...`). The Docusaurus production build failed with module-not-found errors as a result. Replace the pattern-specific regex with a depth-aware walker that - counts the file's nesting depth within the snapshot, - bumps any relative import whose `../` count exceeds that depth (i.e. the import escapes the section root and so must compensate for the extra `version-X.X.X/` directory the snapshot lives under), - skips fenced code blocks so documented sample imports (e.g. Playwright page-object examples in developer_docs/testing/e2e-testing.md) are not rewritten. Re-cut all four sections under the new fixer. yarn build now passes locally.
This commit is contained in:
@@ -26,6 +26,6 @@ under the License.
|
||||
|
||||
import { DatabasePage } from '@site/src/components/databases';
|
||||
|
||||
export const databaseInfo = {"time_grains":{"SECOND":true,"FIVE_SECONDS":true,"THIRTY_SECONDS":true,"MINUTE":true,"FIVE_MINUTES":true,"TEN_MINUTES":true,"FIFTEEN_MINUTES":true,"THIRTY_MINUTES":true,"HALF_HOUR":false,"HOUR":true,"SIX_HOURS":true,"DAY":true,"WEEK":true,"WEEK_STARTING_SUNDAY":true,"WEEK_STARTING_MONDAY":false,"WEEK_ENDING_SATURDAY":true,"WEEK_ENDING_SUNDAY":false,"MONTH":true,"QUARTER":true,"QUARTER_YEAR":false,"YEAR":true},"module":"superset.db_engine_specs.druid","limit_method":1,"limit_clause":true,"joins":false,"subqueries":true,"alias_in_select":true,"alias_in_orderby":true,"time_groupby_inline":false,"alias_to_source_column":false,"order_by_not_in_select":true,"expressions_in_orderby":false,"cte_in_subquery":true,"max_column_name":null,"sql_comments":true,"escaped_colons":true,"masked_encrypted_extra":false,"column_type_mapping":false,"function_names":false,"user_impersonation":false,"file_upload":true,"get_extra_table_metadata":false,"dbapi_exception_mapping":true,"custom_errors":false,"dynamic_schema":false,"catalog":false,"dynamic_catalog":false,"ssh_tunneling":true,"query_cancelation":false,"get_metrics":false,"where_latest_partition":false,"expand_data":false,"query_cost_estimation":false,"sql_validation":false,"score":47,"max_score":201,"documentation":{"description":"Apache Druid is a high performance real-time analytics database.","logo":"druid.png","homepage_url":"https://druid.apache.org/","categories":["Apache Projects","Time Series Databases","Open Source"],"pypi_packages":["pydruid"],"connection_string":"druid://{username}:{password}@{host}:{port}/druid/v2/sql","default_port":9088,"parameters":{"username":"Database username","password":"Database password","host":"IP address or URL of the host","port":"Default 9088"},"ssl_configuration":{"custom_certificate":"Add certificate in Root Certificate field. pydruid will automatically use https.","disable_ssl_verification":{"engine_params":{"connect_args":{"scheme":"https","ssl_verify_cert":false}}}},"advanced_features":{"aggregations":"Define common aggregations in datasource edit view under List Druid Column tab.","post_aggregations":"Create metrics with postagg as Metric Type and provide valid JSON post-aggregation definition."},"notes":"A native Druid connector ships with Superset (behind DRUID_IS_ACTIVE flag) but SQLAlchemy connector via pydruid is preferred.","compatible_databases":[{"name":"Imply","description":"Imply is a fully-managed cloud platform and enterprise distribution built on Apache Druid. It provides real-time analytics with enterprise security and support.","logo":"imply.png","homepage_url":"https://imply.io/","categories":["Time Series Databases","Cloud Data Warehouses","Hosted Open Source"],"pypi_packages":["pydruid"],"connection_string":"druid://{username}:{password}@{host}/druid/v2/sql","docs_url":"https://docs.imply.io/"}],"category":"Apache Projects"},"engine":"druid","engine_name":"Apache Druid","engine_aliases":[],"default_driver":null,"supports_file_upload":true,"supports_dynamic_schema":false,"supports_catalog":false,"supports_dynamic_catalog":false};
|
||||
export const databaseInfo = {"engine":"apache_druid","engine_name":"Apache Druid","module":"druid","documentation":{"description":"Apache Druid is a high performance real-time analytics database.","logo":"druid.png","homepage_url":"https://druid.apache.org/","categories":["APACHE_PROJECTS","TIME_SERIES","OPEN_SOURCE"],"pypi_packages":["pydruid"],"connection_string":"druid://{username}:{password}@{host}:{port}/druid/v2/sql","default_port":9088,"parameters":{"username":"Database username","password":"Database password","host":"IP address or URL of the host","port":"Default 9088"},"ssl_configuration":{"custom_certificate":"Add certificate in Root Certificate field. pydruid will automatically use https.","disable_ssl_verification":{"engine_params":{"connect_args":{"scheme":"https","ssl_verify_cert":false}}}},"advanced_features":{"aggregations":"Define common aggregations in datasource edit view under List Druid Column tab.","post_aggregations":"Create metrics with postagg as Metric Type and provide valid JSON post-aggregation definition."},"notes":"A native Druid connector ships with Superset (behind DRUID_IS_ACTIVE flag) but SQLAlchemy connector via pydruid is preferred.","compatible_databases":[{"name":"Imply","description":"Imply is a fully-managed cloud platform and enterprise distribution built on Apache Druid. It provides real-time analytics with enterprise security and support.","logo":"imply.png","homepage_url":"https://imply.io/","categories":["TIME_SERIES","CLOUD_DATA_WAREHOUSES","HOSTED_OPEN_SOURCE"],"pypi_packages":["pydruid"],"connection_string":"druid://{username}:{password}@{host}/druid/v2/sql","docs_url":"https://docs.imply.io/"}]},"time_grains":{"SECOND":true,"FIVE_SECONDS":true,"THIRTY_SECONDS":true,"MINUTE":true,"FIVE_MINUTES":true,"TEN_MINUTES":true,"FIFTEEN_MINUTES":true,"THIRTY_MINUTES":true,"HALF_HOUR":false,"HOUR":true,"SIX_HOURS":true,"DAY":true,"WEEK":true,"WEEK_STARTING_SUNDAY":true,"WEEK_STARTING_MONDAY":false,"WEEK_ENDING_SATURDAY":true,"WEEK_ENDING_SUNDAY":false,"MONTH":true,"QUARTER":true,"QUARTER_YEAR":false,"YEAR":true},"score":47,"max_score":201,"joins":false,"subqueries":true,"supports_dynamic_schema":false,"supports_catalog":false,"supports_dynamic_catalog":false,"ssh_tunneling":true,"query_cancelation":false,"supports_file_upload":true,"user_impersonation":false,"query_cost_estimation":false,"sql_validation":false};
|
||||
|
||||
<DatabasePage name="Apache Druid" database={databaseInfo} />
|
||||
|
||||
Reference in New Issue
Block a user