mirror of
https://github.com/apache/superset.git
synced 2026-04-19 08:04:53 +00:00
feat(docs): add auto-generated troubleshooting section to database pages (#37345)
Co-authored-by: Claude Opus 4.5 <noreply@anthropic.com>
This commit is contained in:
296
docs/scripts/extract_custom_errors.py
Normal file
296
docs/scripts/extract_custom_errors.py
Normal file
@@ -0,0 +1,296 @@
|
||||
# Licensed to the Apache Software Foundation (ASF) under one
|
||||
# or more contributor license agreements. See the NOTICE file
|
||||
# distributed with this work for additional information
|
||||
# regarding copyright ownership. The ASF licenses this file
|
||||
# to you under the Apache License, Version 2.0 (the
|
||||
# "License"); you may not use this file except in compliance
|
||||
# with the License. You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing,
|
||||
# software distributed under the License is distributed on an
|
||||
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
# KIND, either express or implied. See the License for the
|
||||
# specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
"""
|
||||
Extract custom_errors from database engine specs for documentation.
|
||||
|
||||
This script parses engine spec files to extract error handling information
|
||||
that can be displayed on database documentation pages.
|
||||
|
||||
Usage: python scripts/extract_custom_errors.py
|
||||
Output: JSON mapping of engine spec module names to their custom errors
|
||||
"""
|
||||
|
||||
import ast
|
||||
import json # noqa: TID251 - standalone docs script, not part of superset
|
||||
import sys
|
||||
from pathlib import Path
|
||||
from typing import Any
|
||||
|
||||
# Map SupersetErrorType values to human-readable categories and issue codes
|
||||
ERROR_TYPE_INFO = {
|
||||
"CONNECTION_INVALID_USERNAME_ERROR": {
|
||||
"category": "Authentication",
|
||||
"description": "Invalid username",
|
||||
"issue_codes": [1012],
|
||||
},
|
||||
"CONNECTION_INVALID_PASSWORD_ERROR": {
|
||||
"category": "Authentication",
|
||||
"description": "Invalid password",
|
||||
"issue_codes": [1013],
|
||||
},
|
||||
"CONNECTION_ACCESS_DENIED_ERROR": {
|
||||
"category": "Authentication",
|
||||
"description": "Access denied",
|
||||
"issue_codes": [1014, 1015],
|
||||
},
|
||||
"CONNECTION_INVALID_HOSTNAME_ERROR": {
|
||||
"category": "Connection",
|
||||
"description": "Invalid hostname",
|
||||
"issue_codes": [1007],
|
||||
},
|
||||
"CONNECTION_PORT_CLOSED_ERROR": {
|
||||
"category": "Connection",
|
||||
"description": "Port closed or refused",
|
||||
"issue_codes": [1008],
|
||||
},
|
||||
"CONNECTION_HOST_DOWN_ERROR": {
|
||||
"category": "Connection",
|
||||
"description": "Host unreachable",
|
||||
"issue_codes": [1009],
|
||||
},
|
||||
"CONNECTION_UNKNOWN_DATABASE_ERROR": {
|
||||
"category": "Connection",
|
||||
"description": "Unknown database",
|
||||
"issue_codes": [1015],
|
||||
},
|
||||
"CONNECTION_DATABASE_PERMISSIONS_ERROR": {
|
||||
"category": "Permissions",
|
||||
"description": "Insufficient permissions",
|
||||
"issue_codes": [1017],
|
||||
},
|
||||
"CONNECTION_MISSING_PARAMETERS_ERROR": {
|
||||
"category": "Configuration",
|
||||
"description": "Missing parameters",
|
||||
"issue_codes": [1018],
|
||||
},
|
||||
"CONNECTION_DATABASE_TIMEOUT": {
|
||||
"category": "Connection",
|
||||
"description": "Connection timeout",
|
||||
"issue_codes": [1001, 1009],
|
||||
},
|
||||
"COLUMN_DOES_NOT_EXIST_ERROR": {
|
||||
"category": "Query",
|
||||
"description": "Column not found",
|
||||
"issue_codes": [1003, 1004],
|
||||
},
|
||||
"TABLE_DOES_NOT_EXIST_ERROR": {
|
||||
"category": "Query",
|
||||
"description": "Table not found",
|
||||
"issue_codes": [1003, 1005],
|
||||
},
|
||||
"SCHEMA_DOES_NOT_EXIST_ERROR": {
|
||||
"category": "Query",
|
||||
"description": "Schema not found",
|
||||
"issue_codes": [1003, 1016],
|
||||
},
|
||||
"SYNTAX_ERROR": {
|
||||
"category": "Query",
|
||||
"description": "SQL syntax error",
|
||||
"issue_codes": [1030],
|
||||
},
|
||||
"OBJECT_DOES_NOT_EXIST_ERROR": {
|
||||
"category": "Query",
|
||||
"description": "Object not found",
|
||||
"issue_codes": [1029],
|
||||
},
|
||||
"GENERIC_DB_ENGINE_ERROR": {
|
||||
"category": "General",
|
||||
"description": "Database engine error",
|
||||
"issue_codes": [1002],
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
def extract_string_from_call(node: ast.Call) -> str | None:
|
||||
"""Extract string from __() or _() translation calls."""
|
||||
if not node.args:
|
||||
return None
|
||||
arg = node.args[0]
|
||||
if isinstance(arg, ast.Constant) and isinstance(arg.value, str):
|
||||
return arg.value
|
||||
elif isinstance(arg, ast.JoinedStr):
|
||||
# f-string - try to reconstruct
|
||||
parts = []
|
||||
for value in arg.values:
|
||||
if isinstance(value, ast.Constant):
|
||||
parts.append(str(value.value))
|
||||
elif isinstance(value, ast.FormattedValue):
|
||||
# Just use a placeholder
|
||||
parts.append("{...}")
|
||||
return "".join(parts)
|
||||
return None
|
||||
|
||||
|
||||
def extract_custom_errors_from_file(filepath: Path) -> dict[str, list[dict[str, Any]]]:
|
||||
"""
|
||||
Extract custom_errors definitions from a Python engine spec file.
|
||||
|
||||
Returns a dict mapping class names to their custom errors list.
|
||||
"""
|
||||
results = {}
|
||||
|
||||
try:
|
||||
with open(filepath, "r", encoding="utf-8") as f:
|
||||
source = f.read()
|
||||
|
||||
tree = ast.parse(source)
|
||||
|
||||
for node in ast.walk(tree):
|
||||
if isinstance(node, ast.ClassDef):
|
||||
class_name = node.name
|
||||
|
||||
for item in node.body:
|
||||
# Look for custom_errors = { ... }
|
||||
if (
|
||||
isinstance(item, ast.AnnAssign)
|
||||
and isinstance(item.target, ast.Name)
|
||||
and item.target.id == "custom_errors"
|
||||
and isinstance(item.value, ast.Dict)
|
||||
):
|
||||
errors = extract_errors_from_dict(item.value, source)
|
||||
if errors:
|
||||
results[class_name] = errors
|
||||
|
||||
# Also handle simple assignment: custom_errors = { ... }
|
||||
elif (
|
||||
isinstance(item, ast.Assign)
|
||||
and len(item.targets) == 1
|
||||
and isinstance(item.targets[0], ast.Name)
|
||||
and item.targets[0].id == "custom_errors"
|
||||
and isinstance(item.value, ast.Dict)
|
||||
):
|
||||
errors = extract_errors_from_dict(item.value, source)
|
||||
if errors:
|
||||
results[class_name] = errors
|
||||
|
||||
except (OSError, SyntaxError, ValueError) as e:
|
||||
print(f"Error parsing {filepath}: {e}", file=sys.stderr)
|
||||
|
||||
return results
|
||||
|
||||
|
||||
def extract_regex_info(key: ast.expr) -> dict[str, Any]:
|
||||
"""Extract regex pattern info from the dict key."""
|
||||
if isinstance(key, ast.Name):
|
||||
return {"regex_name": key.id}
|
||||
if isinstance(key, ast.Call):
|
||||
if (
|
||||
isinstance(key.func, ast.Attribute)
|
||||
and key.func.attr == "compile"
|
||||
and key.args
|
||||
and isinstance(key.args[0], ast.Constant)
|
||||
):
|
||||
return {"regex_pattern": key.args[0].value}
|
||||
return {}
|
||||
|
||||
|
||||
def extract_invalid_fields(extra_node: ast.Dict) -> list[str]:
|
||||
"""Extract invalid fields from the extra dict."""
|
||||
for k, v in zip(extra_node.keys, extra_node.values, strict=False):
|
||||
if (
|
||||
isinstance(k, ast.Constant)
|
||||
and k.value == "invalid"
|
||||
and isinstance(v, ast.List)
|
||||
):
|
||||
return [elem.value for elem in v.elts if isinstance(elem, ast.Constant)]
|
||||
return []
|
||||
|
||||
|
||||
def extract_error_tuple_info(value: ast.Tuple) -> dict[str, Any]:
|
||||
"""Extract error info from the (message, error_type, extra) tuple."""
|
||||
result: dict[str, Any] = {}
|
||||
|
||||
# First element: message template
|
||||
msg_node = value.elts[0]
|
||||
if isinstance(msg_node, ast.Call):
|
||||
message = extract_string_from_call(msg_node)
|
||||
if message:
|
||||
result["message_template"] = message
|
||||
elif isinstance(msg_node, ast.Constant):
|
||||
result["message_template"] = msg_node.value
|
||||
|
||||
# Second element: SupersetErrorType.SOMETHING
|
||||
type_node = value.elts[1]
|
||||
if isinstance(type_node, ast.Attribute):
|
||||
error_type = type_node.attr
|
||||
result["error_type"] = error_type
|
||||
if error_type in ERROR_TYPE_INFO:
|
||||
type_info = ERROR_TYPE_INFO[error_type]
|
||||
result["category"] = type_info["category"]
|
||||
result["description"] = type_info["description"]
|
||||
result["issue_codes"] = type_info["issue_codes"]
|
||||
|
||||
# Third element: extra dict with invalid fields
|
||||
if len(value.elts) >= 3 and isinstance(value.elts[2], ast.Dict):
|
||||
invalid_fields = extract_invalid_fields(value.elts[2])
|
||||
if invalid_fields:
|
||||
result["invalid_fields"] = invalid_fields
|
||||
|
||||
return result
|
||||
|
||||
|
||||
def extract_errors_from_dict(dict_node: ast.Dict, source: str) -> list[dict[str, Any]]:
|
||||
"""Extract error information from a custom_errors dict AST node."""
|
||||
errors = []
|
||||
|
||||
for key, value in zip(dict_node.keys, dict_node.values, strict=False):
|
||||
if key is None or value is None:
|
||||
continue
|
||||
|
||||
error_info = extract_regex_info(key)
|
||||
|
||||
if isinstance(value, ast.Tuple) and len(value.elts) >= 2:
|
||||
error_info.update(extract_error_tuple_info(value))
|
||||
|
||||
if error_info.get("error_type") and error_info.get("message_template"):
|
||||
errors.append(error_info)
|
||||
|
||||
return errors
|
||||
|
||||
|
||||
def main() -> None:
|
||||
"""Main function to extract custom_errors from all engine specs."""
|
||||
# Find the superset root directory
|
||||
script_dir = Path(__file__).parent
|
||||
root_dir = script_dir.parent.parent
|
||||
specs_dir = root_dir / "superset" / "db_engine_specs"
|
||||
|
||||
if not specs_dir.exists():
|
||||
print(f"Error: Engine specs directory not found: {specs_dir}", file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
all_errors = {}
|
||||
|
||||
# Process each Python file in the specs directory
|
||||
for filepath in sorted(specs_dir.glob("*.py")):
|
||||
if filepath.name.startswith("_"):
|
||||
continue
|
||||
|
||||
module_name = filepath.stem
|
||||
class_errors = extract_custom_errors_from_file(filepath)
|
||||
|
||||
if class_errors:
|
||||
# Store errors by module and class
|
||||
all_errors[module_name] = class_errors
|
||||
|
||||
# Output as JSON
|
||||
print(json.dumps(all_errors, indent=2))
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
@@ -675,6 +675,78 @@ function updateReadme(databases) {
|
||||
return false;
|
||||
}
|
||||
|
||||
/**
|
||||
* Extract custom_errors from engine specs for troubleshooting documentation
|
||||
* Returns a map of module names to their custom errors
|
||||
*/
|
||||
function extractCustomErrors() {
|
||||
console.log('Extracting custom_errors from engine specs...');
|
||||
|
||||
try {
|
||||
const scriptPath = path.join(__dirname, 'extract_custom_errors.py');
|
||||
const result = spawnSync('python3', [scriptPath], {
|
||||
cwd: ROOT_DIR,
|
||||
encoding: 'utf-8',
|
||||
timeout: 30000,
|
||||
maxBuffer: 10 * 1024 * 1024,
|
||||
});
|
||||
|
||||
if (result.error) {
|
||||
throw result.error;
|
||||
}
|
||||
if (result.status !== 0) {
|
||||
throw new Error(result.stderr || 'Python script failed');
|
||||
}
|
||||
|
||||
const customErrors = JSON.parse(result.stdout);
|
||||
const moduleCount = Object.keys(customErrors).length;
|
||||
const errorCount = Object.values(customErrors).reduce((sum, classes) =>
|
||||
sum + Object.values(classes).reduce((s, errs) => s + errs.length, 0), 0);
|
||||
console.log(` Found ${errorCount} custom errors across ${moduleCount} modules`);
|
||||
return customErrors;
|
||||
} catch (err) {
|
||||
console.log(' Could not extract custom_errors:', err.message);
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Merge custom_errors into database documentation
|
||||
* Maps by module name since that's how both datasets are keyed
|
||||
*/
|
||||
function mergeCustomErrors(databases, customErrors) {
|
||||
if (!customErrors) return;
|
||||
|
||||
let mergedCount = 0;
|
||||
|
||||
for (const [, db] of Object.entries(databases)) {
|
||||
if (!db.module) continue;
|
||||
// Normalize module name: Flask mode uses full path (superset.db_engine_specs.postgres),
|
||||
// but customErrors is keyed by file stem (postgres)
|
||||
const moduleName = db.module.split('.').pop();
|
||||
if (!customErrors[moduleName]) continue;
|
||||
|
||||
// Get all errors from all classes in this module
|
||||
const moduleErrors = customErrors[moduleName];
|
||||
const allErrors = [];
|
||||
|
||||
for (const classErrors of Object.values(moduleErrors)) {
|
||||
allErrors.push(...classErrors);
|
||||
}
|
||||
|
||||
if (allErrors.length > 0) {
|
||||
// Add to documentation
|
||||
db.documentation = db.documentation || {};
|
||||
db.documentation.custom_errors = allErrors;
|
||||
mergedCount++;
|
||||
}
|
||||
}
|
||||
|
||||
if (mergedCount > 0) {
|
||||
console.log(` Merged custom_errors into ${mergedCount} database docs`);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Load existing database data if available
|
||||
*/
|
||||
@@ -768,6 +840,10 @@ async function main() {
|
||||
databases = mergeWithExistingDiagnostics(databases, existingData);
|
||||
}
|
||||
|
||||
// Extract and merge custom_errors for troubleshooting documentation
|
||||
const customErrors = extractCustomErrors();
|
||||
mergeCustomErrors(databases, customErrors);
|
||||
|
||||
// Build statistics
|
||||
const statistics = buildStatistics(databases);
|
||||
|
||||
|
||||
@@ -26,6 +26,7 @@ import {
|
||||
KeyOutlined,
|
||||
SearchOutlined,
|
||||
LinkOutlined,
|
||||
BugOutlined,
|
||||
} from '@ant-design/icons';
|
||||
import type { DatabaseData, DatabaseInfo, TimeGrains } from './types';
|
||||
|
||||
@@ -44,6 +45,8 @@ interface TableEntry {
|
||||
hasDrivers: boolean;
|
||||
hasAuthMethods: boolean;
|
||||
hasConnectionString: boolean;
|
||||
hasCustomErrors: boolean;
|
||||
customErrorCount: number;
|
||||
joins?: boolean;
|
||||
subqueries?: boolean;
|
||||
supports_dynamic_schema?: boolean;
|
||||
@@ -223,6 +226,8 @@ const DatabaseIndex: React.FC<DatabaseIndexProps> = ({ data }) => {
|
||||
db.documentation?.connection_string ||
|
||||
(db.documentation?.drivers?.length ?? 0) > 0
|
||||
),
|
||||
hasCustomErrors: (db.documentation?.custom_errors?.length ?? 0) > 0,
|
||||
customErrorCount: db.documentation?.custom_errors?.length ?? 0,
|
||||
isCompatible: false,
|
||||
});
|
||||
|
||||
@@ -246,6 +251,8 @@ const DatabaseIndex: React.FC<DatabaseIndexProps> = ({ data }) => {
|
||||
hasDrivers: false,
|
||||
hasAuthMethods: false,
|
||||
hasConnectionString: Boolean(compat.connection_string),
|
||||
hasCustomErrors: false,
|
||||
customErrorCount: 0,
|
||||
joins: db.joins,
|
||||
subqueries: db.subqueries,
|
||||
supports_dynamic_schema: db.supports_dynamic_schema,
|
||||
@@ -457,7 +464,7 @@ const DatabaseIndex: React.FC<DatabaseIndexProps> = ({ data }) => {
|
||||
{
|
||||
title: 'Documentation',
|
||||
key: 'docs',
|
||||
width: 150,
|
||||
width: 180,
|
||||
render: (_: unknown, record: TableEntry) => (
|
||||
<div style={{ display: 'flex', gap: '4px', flexWrap: 'wrap' }}>
|
||||
{record.hasConnectionString && (
|
||||
@@ -475,6 +482,13 @@ const DatabaseIndex: React.FC<DatabaseIndexProps> = ({ data }) => {
|
||||
Auth
|
||||
</Tag>
|
||||
)}
|
||||
{record.hasCustomErrors && (
|
||||
<Tooltip title={`${record.customErrorCount} troubleshooting tips`}>
|
||||
<Tag icon={<BugOutlined />} color="volcano">
|
||||
Errors
|
||||
</Tag>
|
||||
</Tooltip>
|
||||
)}
|
||||
</div>
|
||||
),
|
||||
},
|
||||
|
||||
@@ -39,6 +39,7 @@ import {
|
||||
BookOutlined,
|
||||
EditOutlined,
|
||||
GithubOutlined,
|
||||
BugOutlined,
|
||||
} from '@ant-design/icons';
|
||||
import type { DatabaseInfo } from './types';
|
||||
|
||||
@@ -414,6 +415,132 @@ const DatabasePage: React.FC<DatabasePageProps> = ({ database, name }) => {
|
||||
);
|
||||
};
|
||||
|
||||
// Render troubleshooting / custom errors section
|
||||
const renderTroubleshooting = () => {
|
||||
if (!docs?.custom_errors?.length) return null;
|
||||
|
||||
// Group errors by category
|
||||
const errorsByCategory: Record<string, typeof docs.custom_errors> = {};
|
||||
for (const error of docs.custom_errors) {
|
||||
const category = error.category || 'General';
|
||||
if (!errorsByCategory[category]) {
|
||||
errorsByCategory[category] = [];
|
||||
}
|
||||
errorsByCategory[category].push(error);
|
||||
}
|
||||
|
||||
// Define category order for consistent display
|
||||
const categoryOrder = [
|
||||
'Authentication',
|
||||
'Connection',
|
||||
'Permissions',
|
||||
'Query',
|
||||
'Configuration',
|
||||
'General',
|
||||
];
|
||||
|
||||
const sortedCategories = Object.keys(errorsByCategory).sort((a, b) => {
|
||||
const aIdx = categoryOrder.indexOf(a);
|
||||
const bIdx = categoryOrder.indexOf(b);
|
||||
if (aIdx === -1 && bIdx === -1) return a.localeCompare(b);
|
||||
if (aIdx === -1) return 1;
|
||||
if (bIdx === -1) return -1;
|
||||
return aIdx - bIdx;
|
||||
});
|
||||
|
||||
// Category colors
|
||||
const categoryColors: Record<string, string> = {
|
||||
Authentication: 'orange',
|
||||
Connection: 'red',
|
||||
Permissions: 'purple',
|
||||
Query: 'blue',
|
||||
Configuration: 'cyan',
|
||||
General: 'default',
|
||||
};
|
||||
|
||||
return (
|
||||
<Card
|
||||
title={
|
||||
<>
|
||||
<BugOutlined /> Troubleshooting
|
||||
</>
|
||||
}
|
||||
style={{ marginBottom: 16 }}
|
||||
>
|
||||
<Paragraph type="secondary">
|
||||
Common error messages you may encounter when connecting to or querying{' '}
|
||||
{name}, along with their causes and solutions.
|
||||
</Paragraph>
|
||||
<Collapse accordion>
|
||||
{sortedCategories.map((category) => (
|
||||
<Panel
|
||||
header={
|
||||
<span>
|
||||
<Tag color={categoryColors[category] || 'default'}>
|
||||
{category}
|
||||
</Tag>
|
||||
{errorsByCategory[category].length} error
|
||||
{errorsByCategory[category].length !== 1 ? 's' : ''}
|
||||
</span>
|
||||
}
|
||||
key={category}
|
||||
>
|
||||
{errorsByCategory[category].map((error, idx) => (
|
||||
<div
|
||||
key={idx}
|
||||
style={{
|
||||
marginBottom:
|
||||
idx < errorsByCategory[category].length - 1 ? 16 : 0,
|
||||
paddingBottom:
|
||||
idx < errorsByCategory[category].length - 1 ? 16 : 0,
|
||||
borderBottom:
|
||||
idx < errorsByCategory[category].length - 1
|
||||
? '1px solid var(--ifm-color-emphasis-200)'
|
||||
: 'none',
|
||||
}}
|
||||
>
|
||||
<div style={{ marginBottom: 8 }}>
|
||||
<Text strong>{error.description || error.error_type}</Text>
|
||||
</div>
|
||||
<Alert
|
||||
message={error.message_template}
|
||||
type="error"
|
||||
style={{ marginBottom: 8 }}
|
||||
/>
|
||||
{error.invalid_fields && error.invalid_fields.length > 0 && (
|
||||
<div style={{ marginBottom: 8 }}>
|
||||
<Text type="secondary">Check these fields: </Text>
|
||||
{error.invalid_fields.map((field) => (
|
||||
<Tag key={field} color="warning">
|
||||
{field}
|
||||
</Tag>
|
||||
))}
|
||||
</div>
|
||||
)}
|
||||
{error.issue_codes && error.issue_codes.length > 0 && (
|
||||
<div>
|
||||
<Text type="secondary">Related issue codes: </Text>
|
||||
{error.issue_codes.map((code) => (
|
||||
<Tag key={code}>
|
||||
<a
|
||||
href={`/docs/using-superset/issue-codes#issue-${code}`}
|
||||
style={{ color: 'inherit' }}
|
||||
>
|
||||
Issue {code}
|
||||
</a>
|
||||
</Tag>
|
||||
))}
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
))}
|
||||
</Panel>
|
||||
))}
|
||||
</Collapse>
|
||||
</Card>
|
||||
);
|
||||
};
|
||||
|
||||
return (
|
||||
<div
|
||||
className="database-page"
|
||||
@@ -556,6 +683,9 @@ const DatabasePage: React.FC<DatabasePageProps> = ({ database, name }) => {
|
||||
{/* Time Grains */}
|
||||
{renderTimeGrains()}
|
||||
|
||||
{/* Troubleshooting / Custom Errors */}
|
||||
{renderTroubleshooting()}
|
||||
|
||||
{/* Compatible Databases */}
|
||||
{renderCompatibleDatabases()}
|
||||
|
||||
|
||||
@@ -86,6 +86,17 @@ export interface CompatibleDatabase {
|
||||
docs_url?: string;
|
||||
}
|
||||
|
||||
export interface CustomError {
|
||||
error_type: string; // e.g., "CONNECTION_INVALID_USERNAME_ERROR"
|
||||
message_template: string; // e.g., 'The username "%(username)s" does not exist.'
|
||||
regex_pattern?: string; // The regex pattern that matches this error (optional, for reference)
|
||||
regex_name?: string; // The name of the regex constant (e.g., "CONNECTION_INVALID_USERNAME_REGEX")
|
||||
invalid_fields?: string[]; // Fields that are invalid, e.g., ["username", "password"]
|
||||
issue_codes?: number[]; // Related issue codes from ISSUE_CODES mapping
|
||||
category?: string; // Error category: "Authentication", "Connection", "Query", etc.
|
||||
description?: string; // Human-readable short description of the error type
|
||||
}
|
||||
|
||||
export interface DatabaseDocumentation {
|
||||
description?: string;
|
||||
logo?: string;
|
||||
@@ -111,6 +122,7 @@ export interface DatabaseDocumentation {
|
||||
sqlalchemy_docs_url?: string;
|
||||
advanced_features?: Record<string, string>;
|
||||
compatible_databases?: CompatibleDatabase[];
|
||||
custom_errors?: CustomError[]; // Database-specific error messages and troubleshooting info
|
||||
}
|
||||
|
||||
export interface TimeGrains {
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
Reference in New Issue
Block a user