mirror of
https://github.com/apache/superset.git
synced 2026-04-09 19:35:21 +00:00
* fix(elasticsearch): cast does not take effect for time zone settings * test(elasticsearch): add test * fix(test): fix typo * docs(elasticsearch): add annotation * docs(elasticsearch): add time_zone desc * docs(elasticsearch): fix typo * refactor(db_engine): change convert_dttm signature * fix(test): fix test * fix(es): add try catch * fix(test): fix caplog * fix(test): fix typo
279 lines
9.3 KiB
Python
279 lines
9.3 KiB
Python
# Licensed to the Apache Software Foundation (ASF) under one
|
|
# or more contributor license agreements. See the NOTICE file
|
|
# distributed with this work for additional information
|
|
# regarding copyright ownership. The ASF licenses this file
|
|
# to you under the Apache License, Version 2.0 (the
|
|
# "License"); you may not use this file except in compliance
|
|
# with the License. You may obtain a copy of the License at
|
|
#
|
|
# http://www.apache.org/licenses/LICENSE-2.0
|
|
#
|
|
# Unless required by applicable law or agreed to in writing,
|
|
# software distributed under the License is distributed on an
|
|
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
|
# KIND, either express or implied. See the License for the
|
|
# specific language governing permissions and limitations
|
|
# under the License.
|
|
import json
|
|
import re
|
|
from datetime import datetime
|
|
from typing import Any, Dict, List, Optional, Pattern, Tuple, TYPE_CHECKING
|
|
from urllib import parse
|
|
|
|
from apispec import APISpec
|
|
from apispec.ext.marshmallow import MarshmallowPlugin
|
|
from flask_babel import gettext as __
|
|
from marshmallow import fields, Schema
|
|
from sqlalchemy.engine.url import make_url, URL
|
|
from typing_extensions import TypedDict
|
|
|
|
from superset.db_engine_specs.postgres import PostgresBaseEngineSpec
|
|
from superset.errors import ErrorLevel, SupersetError, SupersetErrorType
|
|
from superset.models.sql_lab import Query
|
|
from superset.utils import core as utils
|
|
|
|
if TYPE_CHECKING:
|
|
from superset.models.core import Database
|
|
|
|
# Regular expressions to catch custom errors
|
|
OBJECT_DOES_NOT_EXIST_REGEX = re.compile(
|
|
r"Object (?P<object>.*?) does not exist or not authorized."
|
|
)
|
|
|
|
SYNTAX_ERROR_REGEX = re.compile(
|
|
"syntax error line (?P<line>.+?) at position (?P<position>.+?) "
|
|
"unexpected '(?P<syntax_error>.+?)'."
|
|
)
|
|
|
|
|
|
class SnowflakeParametersSchema(Schema):
|
|
username = fields.Str(required=True)
|
|
password = fields.Str(required=True)
|
|
account = fields.Str(required=True)
|
|
database = fields.Str(required=True)
|
|
role = fields.Str(required=True)
|
|
warehouse = fields.Str(required=True)
|
|
|
|
|
|
class SnowflakeParametersType(TypedDict):
|
|
username: str
|
|
password: str
|
|
account: str
|
|
database: str
|
|
role: str
|
|
warehouse: str
|
|
|
|
|
|
class SnowflakeEngineSpec(PostgresBaseEngineSpec):
|
|
engine = "snowflake"
|
|
engine_name = "Snowflake"
|
|
force_column_alias_quotes = True
|
|
max_column_name_length = 256
|
|
|
|
parameters_schema = SnowflakeParametersSchema()
|
|
default_driver = "snowflake"
|
|
sqlalchemy_uri_placeholder = "snowflake://"
|
|
|
|
_time_grain_expressions = {
|
|
None: "{col}",
|
|
"PT1S": "DATE_TRUNC('SECOND', {col})",
|
|
"PT1M": "DATE_TRUNC('MINUTE', {col})",
|
|
"PT5M": "DATEADD(MINUTE, FLOOR(DATE_PART(MINUTE, {col}) / 5) * 5, \
|
|
DATE_TRUNC('HOUR', {col}))",
|
|
"PT10M": "DATEADD(MINUTE, FLOOR(DATE_PART(MINUTE, {col}) / 10) * 10, \
|
|
DATE_TRUNC('HOUR', {col}))",
|
|
"PT15M": "DATEADD(MINUTE, FLOOR(DATE_PART(MINUTE, {col}) / 15) * 15, \
|
|
DATE_TRUNC('HOUR', {col}))",
|
|
"PT30M": "DATEADD(MINUTE, FLOOR(DATE_PART(MINUTE, {col}) / 30) * 30, \
|
|
DATE_TRUNC('HOUR', {col}))",
|
|
"PT1H": "DATE_TRUNC('HOUR', {col})",
|
|
"P1D": "DATE_TRUNC('DAY', {col})",
|
|
"P1W": "DATE_TRUNC('WEEK', {col})",
|
|
"P1M": "DATE_TRUNC('MONTH', {col})",
|
|
"P3M": "DATE_TRUNC('QUARTER', {col})",
|
|
"P1Y": "DATE_TRUNC('YEAR', {col})",
|
|
}
|
|
|
|
custom_errors: Dict[Pattern[str], Tuple[str, SupersetErrorType, Dict[str, Any]]] = {
|
|
OBJECT_DOES_NOT_EXIST_REGEX: (
|
|
__("%(object)s does not exist in this database."),
|
|
SupersetErrorType.OBJECT_DOES_NOT_EXIST_ERROR,
|
|
{},
|
|
),
|
|
SYNTAX_ERROR_REGEX: (
|
|
__(
|
|
"Please check your query for syntax errors at or "
|
|
'near "%(syntax_error)s". Then, try running your query again.'
|
|
),
|
|
SupersetErrorType.SYNTAX_ERROR,
|
|
{},
|
|
),
|
|
}
|
|
|
|
@classmethod
|
|
def adjust_database_uri(
|
|
cls, uri: URL, selected_schema: Optional[str] = None
|
|
) -> None:
|
|
database = uri.database
|
|
if "/" in uri.database:
|
|
database = uri.database.split("/")[0]
|
|
if selected_schema:
|
|
selected_schema = parse.quote(selected_schema, safe="")
|
|
uri.database = database + "/" + selected_schema
|
|
|
|
@classmethod
|
|
def epoch_to_dttm(cls) -> str:
|
|
return "DATEADD(S, {col}, '1970-01-01')"
|
|
|
|
@classmethod
|
|
def epoch_ms_to_dttm(cls) -> str:
|
|
return "DATEADD(MS, {col}, '1970-01-01')"
|
|
|
|
@classmethod
|
|
def convert_dttm(
|
|
cls, target_type: str, dttm: datetime, db_extra: Optional[Dict[str, Any]] = None
|
|
) -> Optional[str]:
|
|
tt = target_type.upper()
|
|
if tt == utils.TemporalType.DATE:
|
|
return f"TO_DATE('{dttm.date().isoformat()}')"
|
|
if tt == utils.TemporalType.DATETIME:
|
|
return f"""CAST('{dttm.isoformat(timespec="microseconds")}' AS DATETIME)"""
|
|
if tt == utils.TemporalType.TIMESTAMP:
|
|
return f"""TO_TIMESTAMP('{dttm.isoformat(timespec="microseconds")}')"""
|
|
return None
|
|
|
|
@staticmethod
|
|
def mutate_db_for_connection_test(database: "Database") -> None:
|
|
"""
|
|
By default, snowflake doesn't validate if the user/role has access to the chosen
|
|
database.
|
|
|
|
:param database: instance to be mutated
|
|
"""
|
|
extra = json.loads(database.extra or "{}")
|
|
engine_params = extra.get("engine_params", {})
|
|
connect_args = engine_params.get("connect_args", {})
|
|
connect_args["validate_default_parameters"] = True
|
|
engine_params["connect_args"] = connect_args
|
|
extra["engine_params"] = engine_params
|
|
database.extra = json.dumps(extra)
|
|
|
|
@classmethod
|
|
def get_cancel_query_id(cls, cursor: Any, query: Query) -> Optional[str]:
|
|
"""
|
|
Get Snowflake session ID that will be used to cancel all other running
|
|
queries in the same session.
|
|
|
|
:param cursor: Cursor instance in which the query will be executed
|
|
:param query: Query instance
|
|
:return: Snowflake Session ID
|
|
"""
|
|
cursor.execute("SELECT CURRENT_SESSION()")
|
|
row = cursor.fetchone()
|
|
return row[0]
|
|
|
|
@classmethod
|
|
def cancel_query(cls, cursor: Any, query: Query, cancel_query_id: str) -> bool:
|
|
"""
|
|
Cancel query in the underlying database.
|
|
|
|
:param cursor: New cursor instance to the db of the query
|
|
:param query: Query instance
|
|
:param cancel_query_id: Snowflake Session ID
|
|
:return: True if query cancelled successfully, False otherwise
|
|
"""
|
|
try:
|
|
cursor.execute(f"SELECT SYSTEM$CANCEL_ALL_QUERIES({cancel_query_id})")
|
|
except Exception: # pylint: disable=broad-except
|
|
return False
|
|
|
|
return True
|
|
|
|
@classmethod
|
|
def build_sqlalchemy_uri(
|
|
cls,
|
|
parameters: SnowflakeParametersType,
|
|
encrypted_extra: Optional[ # pylint: disable=unused-argument
|
|
Dict[str, Any]
|
|
] = None,
|
|
) -> str:
|
|
|
|
return str(
|
|
URL(
|
|
"snowflake",
|
|
username=parameters.get("username"),
|
|
password=parameters.get("password"),
|
|
host=parameters.get("account"),
|
|
database=parameters.get("database"),
|
|
query={
|
|
"role": parameters.get("role"),
|
|
"warehouse": parameters.get("warehouse"),
|
|
},
|
|
)
|
|
)
|
|
|
|
@classmethod
|
|
def get_parameters_from_uri(
|
|
cls,
|
|
uri: str,
|
|
encrypted_extra: Optional[ # pylint: disable=unused-argument
|
|
Dict[str, str]
|
|
] = None,
|
|
) -> Any:
|
|
url = make_url(uri)
|
|
query = dict(url.query.items())
|
|
return {
|
|
"username": url.username,
|
|
"password": url.password,
|
|
"account": url.host,
|
|
"database": url.database,
|
|
"role": query.get("role"),
|
|
"warehouse": query.get("warehouse"),
|
|
}
|
|
|
|
@classmethod
|
|
def validate_parameters(
|
|
cls, parameters: SnowflakeParametersType
|
|
) -> List[SupersetError]:
|
|
errors: List[SupersetError] = []
|
|
required = {
|
|
"warehouse",
|
|
"username",
|
|
"database",
|
|
"account",
|
|
"role",
|
|
"password",
|
|
}
|
|
present = {key for key in parameters if parameters.get(key, ())}
|
|
missing = sorted(required - present)
|
|
|
|
if missing:
|
|
errors.append(
|
|
SupersetError(
|
|
message=f'One or more parameters are missing: {", ".join(missing)}',
|
|
error_type=SupersetErrorType.CONNECTION_MISSING_PARAMETERS_ERROR,
|
|
level=ErrorLevel.WARNING,
|
|
extra={"missing": missing},
|
|
),
|
|
)
|
|
return errors
|
|
|
|
@classmethod
|
|
def parameters_json_schema(cls) -> Any:
|
|
"""
|
|
Return configuration parameters as OpenAPI.
|
|
"""
|
|
if not cls.parameters_schema:
|
|
return None
|
|
|
|
ma_plugin = MarshmallowPlugin()
|
|
spec = APISpec(
|
|
title="Database Parameters",
|
|
version="1.0.0",
|
|
openapi_version="3.0.0",
|
|
plugins=[ma_plugin],
|
|
)
|
|
|
|
spec.components.schema(cls.__name__, schema=cls.parameters_schema)
|
|
return spec.to_dict()["components"]["schemas"][cls.__name__]
|