mirror of
https://github.com/apache/superset.git
synced 2026-05-08 17:35:33 +00:00
The previous import-path fixer only matched two-level relative paths (`../../src/` and `../../data/`), missing files at deeper nesting in the section tree. After the 6.1.0 cut for developer_docs, ~50 component MDX files at depth 3 still referenced `../../../src/components/StorybookWrapper` (should have been `../../../../src/...`), and the components Button page referenced `../../../superset-frontend/...` (should have been `../../../../superset-frontend/...`). The Docusaurus production build failed with module-not-found errors as a result. Replace the pattern-specific regex with a depth-aware walker that - counts the file's nesting depth within the snapshot, - bumps any relative import whose `../` count exceeds that depth (i.e. the import escapes the section root and so must compensate for the extra `version-X.X.X/` directory the snapshot lives under), - skips fenced code blocks so documented sample imports (e.g. Playwright page-object examples in developer_docs/testing/e2e-testing.md) are not rewritten. Re-cut all four sections under the new fixer. yarn build now passes locally.
32 lines
3.5 KiB
Plaintext
32 lines
3.5 KiB
Plaintext
---
|
|
title: Databricks
|
|
sidebar_label: Databricks
|
|
description: "Databricks is a unified analytics platform built on Apache Spark, providing data engineering, data science, and machine learning capabilities in the cloud. Use "
|
|
hide_title: true
|
|
---
|
|
|
|
{/*
|
|
Licensed to the Apache Software Foundation (ASF) under one
|
|
or more contributor license agreements. See the NOTICE file
|
|
distributed with this work for additional information
|
|
regarding copyright ownership. The ASF licenses this file
|
|
to you under the Apache License, Version 2.0 (the
|
|
"License"); you may not use this file except in compliance
|
|
with the License. You may obtain a copy of the License at
|
|
|
|
http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
Unless required by applicable law or agreed to in writing,
|
|
software distributed under the License is distributed on an
|
|
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
|
KIND, either express or implied. See the License for the
|
|
specific language governing permissions and limitations
|
|
under the License.
|
|
*/}
|
|
|
|
import { DatabasePage } from '@site/src/components/databases';
|
|
|
|
export const databaseInfo = {"engine":"databricks","engine_name":"Databricks","module":"databricks","documentation":{"description":"Databricks is a unified analytics platform built on Apache Spark, providing data engineering, data science, and machine learning capabilities in the cloud. Use the Python Connector for SQL warehouses and clusters.","logo":"databricks.png","homepage_url":"https://www.databricks.com/","categories":["CLOUD_DATA_WAREHOUSES","ANALYTICAL_DATABASES","HOSTED_OPEN_SOURCE"],"pypi_packages":["apache-superset[databricks]"],"install_instructions":"pip install apache-superset[databricks]","connection_string":"databricks://token:{access_token}@{host}:{port}?http_path={http_path}&catalog={catalog}&schema={schema}","parameters":{"access_token":"Personal access token from Settings > User Settings","host":"Server hostname from cluster JDBC/ODBC settings","port":"Port (default 443)","http_path":"HTTP path from cluster JDBC/ODBC settings"},"drivers":[{"name":"Databricks Python Connector (Recommended)","pypi_package":"databricks-sql-connector","connection_string":"databricks://token:{access_token}@{host}:{port}?http_path={http_path}&catalog={catalog}&schema={schema}","is_recommended":true,"notes":"Official Databricks connector. Best for SQL warehouses and clusters."},{"name":"Hive Connector (Interactive Clusters)","pypi_package":"databricks-dbapi[sqlalchemy]","connection_string":"databricks+pyhive://token:{access_token}@{host}:{port}/{database}","is_recommended":false,"notes":"For Interactive Clusters. Requires http_path in engine parameters."},{"name":"ODBC (SQL Endpoints)","pypi_package":"pyodbc","connection_string":"databricks+pyodbc://token:{access_token}@{host}:{port}/{database}","is_recommended":false,"notes":"Requires ODBC driver. For serverless SQL warehouses."},{"name":"databricks-dbapi (Legacy)","pypi_package":"databricks-dbapi[sqlalchemy]","connection_string":"databricks+connector://token:{access_token}@{host}:{port}/{database}","is_recommended":false,"notes":"Legacy connector. Use Python Connector for new deployments."}]},"time_grains":{},"score":0,"max_score":0,"joins":true,"subqueries":true,"supports_dynamic_schema":false,"supports_catalog":false,"supports_dynamic_catalog":false,"ssh_tunneling":false,"query_cancelation":false,"supports_file_upload":false,"user_impersonation":false,"query_cost_estimation":false,"sql_validation":false};
|
|
|
|
<DatabasePage name="Databricks" database={databaseInfo} />
|