Files
superset2/docs/versioned_docs/version-6.1.0/databases/supported/google-bigquery.mdx
Superset Dev 5a5a0e70fc fix(docs): regenerate 6.1.0 snapshots with depth-aware import rewriter
The previous import-path fixer only matched two-level relative paths
(`../../src/` and `../../data/`), missing files at deeper nesting in the
section tree. After the 6.1.0 cut for developer_docs, ~50 component MDX
files at depth 3 still referenced `../../../src/components/StorybookWrapper`
(should have been `../../../../src/...`), and the components Button page
referenced `../../../superset-frontend/...` (should have been
`../../../../superset-frontend/...`). The Docusaurus production build
failed with module-not-found errors as a result.

Replace the pattern-specific regex with a depth-aware walker that
- counts the file's nesting depth within the snapshot,
- bumps any relative import whose `../` count exceeds that depth (i.e.
  the import escapes the section root and so must compensate for the
  extra `version-X.X.X/` directory the snapshot lives under),
- skips fenced code blocks so documented sample imports (e.g. Playwright
  page-object examples in developer_docs/testing/e2e-testing.md) are not
  rewritten.

Re-cut all four sections under the new fixer. yarn build now passes
locally.
2026-05-04 08:53:25 -07:00

32 lines
4.1 KiB
Plaintext

---
title: Google BigQuery
sidebar_label: Google BigQuery
description: "Google BigQuery is a serverless, highly scalable data warehouse."
hide_title: true
---
{/*
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
*/}
import { DatabasePage } from '@site/src/components/databases';
export const databaseInfo = {"engine":"google_bigquery","engine_name":"Google BigQuery","module":"bigquery","documentation":{"description":"Google BigQuery is a serverless, highly scalable data warehouse.","logo":"google-big-query.svg","homepage_url":"https://cloud.google.com/bigquery/","categories":["CLOUD_GCP","ANALYTICAL_DATABASES","PROPRIETARY"],"pypi_packages":["sqlalchemy-bigquery"],"connection_string":"bigquery://{project_id}","install_instructions":"echo \"sqlalchemy-bigquery\" >> ./docker/requirements-local.txt","authentication_methods":[{"name":"Service Account JSON","description":"Upload service account credentials JSON or paste in Secure Extra","secure_extra":{"credentials_info":{"type":"service_account","project_id":"...","private_key_id":"...","private_key":"...","client_email":"...","client_id":"...","auth_uri":"...","token_uri":"..."}}}],"notes":"Create a Service Account via GCP console with access to BigQuery datasets. For CSV/Excel uploads, also install pandas_gbq.","warnings":["Google BigQuery Python SDK is not compatible with gevent. Use a worker type other than gevent when deploying with gunicorn."],"docs_url":"https://github.com/googleapis/python-bigquery-sqlalchemy","custom_errors":[{"regex_name":"CONNECTION_DATABASE_PERMISSIONS_REGEX","message_template":"Unable to connect. Verify that the following roles are set on the service account: \"BigQuery Data Viewer\", \"BigQuery Metadata Viewer\", \"BigQuery Job User\" and the following permissions are set \"bigquery.readsessions.create\", \"bigquery.readsessions.getData\"","error_type":"CONNECTION_DATABASE_PERMISSIONS_ERROR","category":"Permissions","description":"Insufficient permissions","issue_codes":[1017]},{"regex_name":"TABLE_DOES_NOT_EXIST_REGEX","message_template":"The table \"%(table)s\" does not exist. A valid table must be used to run this query.","error_type":"TABLE_DOES_NOT_EXIST_ERROR","category":"Query","description":"Table not found","issue_codes":[1003,1005]},{"regex_name":"COLUMN_DOES_NOT_EXIST_REGEX","message_template":"We can't seem to resolve column \"%(column)s\" at line %(location)s.","error_type":"COLUMN_DOES_NOT_EXIST_ERROR","category":"Query","description":"Column not found","issue_codes":[1003,1004]},{"regex_name":"SCHEMA_DOES_NOT_EXIST_REGEX","message_template":"The schema \"%(schema)s\" does not exist. A valid schema must be used to run this query.","error_type":"SCHEMA_DOES_NOT_EXIST_ERROR","category":"Query","description":"Schema not found","issue_codes":[1003,1016]},{"regex_name":"SYNTAX_ERROR_REGEX","message_template":"Please check your query for syntax errors at or near \"%(syntax_error)s\". Then, try running your query again.","error_type":"SYNTAX_ERROR","category":"Query","description":"SQL syntax error","issue_codes":[1030]}]},"time_grains":{},"score":0,"max_score":0,"joins":true,"subqueries":true,"supports_dynamic_schema":false,"supports_catalog":false,"supports_dynamic_catalog":false,"ssh_tunneling":false,"query_cancelation":false,"supports_file_upload":false,"user_impersonation":false,"query_cost_estimation":false,"sql_validation":false};
<DatabasePage name="Google BigQuery" database={databaseInfo} />