Compare commits

...

2 Commits

Author SHA1 Message Date
Maxime Beauchemin
29fd4a3094 clarifying messages, removing some useless output 2024-12-15 23:17:33 -08:00
Maxime Beauchemin
cd6cc34735 chore: set dev env logging level to INFO (from DEBUG)
Using a higher logging default in dev mode to clean up output. Also setting so TMI messages to debug to prevent not-so-useful information to show up on startup.

Devs can easily switch the logging level to DEBUG if/when needed.
2024-12-15 22:55:47 -08:00
6 changed files with 17 additions and 25 deletions

View File

@@ -20,7 +20,8 @@ set -eo pipefail
# Make python interactive # Make python interactive
if [ "$DEV_MODE" == "true" ]; then if [ "$DEV_MODE" == "true" ]; then
echo "Reinstalling the app in editable mode" echo "[DEV_MODE detected] Setting the superset package to be in editable mode"
echo "RUN: uv pip install -e ."
uv pip install -e . uv pip install -e .
fi fi
REQUIREMENTS_LOCAL="/app/docker/requirements-local.txt" REQUIREMENTS_LOCAL="/app/docker/requirements-local.txt"
@@ -34,10 +35,8 @@ fi
# Make sure we have dev requirements installed # Make sure we have dev requirements installed
# #
if [ -f "${REQUIREMENTS_LOCAL}" ]; then if [ -f "${REQUIREMENTS_LOCAL}" ]; then
echo "Installing local overrides at ${REQUIREMENTS_LOCAL}" echo "Installing python packages specified at ${REQUIREMENTS_LOCAL}"
pip install --no-cache-dir -r "${REQUIREMENTS_LOCAL}" uv pip install --no-cache-dir -r "${REQUIREMENTS_LOCAL}"
else
echo "Skipping local overrides"
fi fi
case "${1}" in case "${1}" in

View File

@@ -30,15 +30,9 @@ fi
echo_step() { echo_step() {
cat <<EOF cat <<EOF
###################################################################### ######################################################################
Docker Init Step ${1}/${STEP_CNT} [${2}] -- ${3}
Init Step ${1}/${STEP_CNT} [${2}] -- ${3}
###################################################################### ######################################################################
EOF EOF
} }
ADMIN_PASSWORD="${ADMIN_PASSWORD:-admin}" ADMIN_PASSWORD="${ADMIN_PASSWORD:-admin}"
@@ -52,7 +46,6 @@ fi
# Initialize the database # Initialize the database
echo_step "1" "Starting" "Applying DB migrations" echo_step "1" "Starting" "Applying DB migrations"
superset db upgrade superset db upgrade
echo_step "1" "Complete" "Applying DB migrations"
# Create an admin user # Create an admin user
echo_step "2" "Starting" "Setting up admin user ( admin / $ADMIN_PASSWORD )" echo_step "2" "Starting" "Setting up admin user ( admin / $ADMIN_PASSWORD )"
@@ -62,11 +55,9 @@ superset fab create-admin \
--lastname Admin \ --lastname Admin \
--email admin@superset.com \ --email admin@superset.com \
--password "$ADMIN_PASSWORD" --password "$ADMIN_PASSWORD"
echo_step "2" "Complete" "Setting up admin user"
# Create default roles and permissions # Create default roles and permissions
echo_step "3" "Starting" "Setting up roles and perms" echo_step "3" "Starting" "Setting up roles and perms"
superset init superset init
echo_step "3" "Complete" "Setting up roles and perms"
if [ "$SUPERSET_LOAD_EXAMPLES" = "yes" ]; then if [ "$SUPERSET_LOAD_EXAMPLES" = "yes" ]; then
# Load some data to play with # Load some data to play with
@@ -78,5 +69,4 @@ if [ "$SUPERSET_LOAD_EXAMPLES" = "yes" ]; then
else else
superset load_examples --force superset load_examples --force
fi fi
echo_step "4" "Complete" "Loading examples"
fi fi

View File

@@ -103,6 +103,7 @@ WEBDRIVER_BASEURL = "http://superset:8088/" # When using docker compose baseurl
# The base URL for the email report hyperlinks. # The base URL for the email report hyperlinks.
WEBDRIVER_BASEURL_USER_FRIENDLY = WEBDRIVER_BASEURL WEBDRIVER_BASEURL_USER_FRIENDLY = WEBDRIVER_BASEURL
SQLLAB_CTAS_NO_LIMIT = True SQLLAB_CTAS_NO_LIMIT = True
LOG_LEVEL = logging.INFO
# #
# Optionally import superset_config_docker.py (which will have been included on # Optionally import superset_config_docker.py (which will have been included on

View File

@@ -206,7 +206,7 @@ def load_data(data_uri: str, dataset: SqlaTable, database: Database) -> None:
# reuse session when loading data if possible, to make import atomic # reuse session when loading data if possible, to make import atomic
if database.sqlalchemy_uri == current_app.config.get("SQLALCHEMY_DATABASE_URI"): if database.sqlalchemy_uri == current_app.config.get("SQLALCHEMY_DATABASE_URI"):
logger.info("Loading data inside the import transaction") logger.debug("Loading data inside the import transaction")
connection = db.session.connection() connection = db.session.connection()
df.to_sql( df.to_sql(
dataset.table_name, dataset.table_name,
@@ -219,7 +219,7 @@ def load_data(data_uri: str, dataset: SqlaTable, database: Database) -> None:
method="multi", method="multi",
) )
else: else:
logger.warning("Loading data outside the import transaction") logger.debug("Loading data outside the import transaction")
with database.get_sqla_engine( with database.get_sqla_engine(
catalog=dataset.catalog, catalog=dataset.catalog,
schema=dataset.schema, schema=dataset.schema,

View File

@@ -14,6 +14,7 @@
# KIND, either express or implied. See the License for the # KIND, either express or implied. See the License for the
# specific language governing permissions and limitations # specific language governing permissions and limitations
# under the License. # under the License.
import logging
import textwrap import textwrap
from typing import Union from typing import Union
@@ -40,6 +41,8 @@ from .helpers import (
update_slice_ids, update_slice_ids,
) )
logger = logging.getLogger(__name__)
def gen_filter( def gen_filter(
subject: str, comparator: str, operator: str = "==" subject: str, comparator: str, operator: str = "=="
@@ -83,8 +86,7 @@ def load_data(tbl_name: str, database: Database, sample: bool = False) -> None:
method="multi", method="multi",
index=False, index=False,
) )
print("Done loading table!") logging.debug("Done loading table!")
print("-" * 80)
def load_birth_names( def load_birth_names(
@@ -104,7 +106,7 @@ def load_birth_names(
table = get_table_connector_registry() table = get_table_connector_registry()
obj = db.session.query(table).filter_by(table_name=tbl_name, schema=schema).first() obj = db.session.query(table).filter_by(table_name=tbl_name, schema=schema).first()
if not obj: if not obj:
print(f"Creating table [{tbl_name}] reference") logging.debug(f"Creating table [{tbl_name}] reference")
obj = table(table_name=tbl_name, schema=schema) obj = table(table_name=tbl_name, schema=schema)
db.session.add(obj) db.session.add(obj)
@@ -196,7 +198,7 @@ def create_slices(tbl: SqlaTable) -> tuple[list[Slice], list[Slice]]:
"datasource_type": DatasourceType.TABLE, "datasource_type": DatasourceType.TABLE,
} }
print("Creating some slices") logger.debug("Creating some slices")
slices = [ slices = [
Slice( Slice(
**slice_kwargs, **slice_kwargs,
@@ -561,7 +563,7 @@ def create_slices(tbl: SqlaTable) -> tuple[list[Slice], list[Slice]]:
def create_dashboard(slices: list[Slice]) -> Dashboard: def create_dashboard(slices: list[Slice]) -> Dashboard:
print("Creating a dashboard") logger.debug("Creating a dashboard")
dash = db.session.query(Dashboard).filter_by(slug="births").first() dash = db.session.query(Dashboard).filter_by(slug="births").first()
if not dash: if not dash:
dash = Dashboard() dash = Dashboard()

View File

@@ -329,13 +329,13 @@ class ImportExportMixin:
is_new_obj = True is_new_obj = True
# Create new DB object # Create new DB object
obj = cls(**dict_rep) obj = cls(**dict_rep)
logger.info("Importing new %s %s", obj.__tablename__, str(obj)) logger.debug("Importing new %s %s", obj.__tablename__, str(obj))
if cls.export_parent and parent: if cls.export_parent and parent:
setattr(obj, cls.export_parent, parent) setattr(obj, cls.export_parent, parent)
db.session.add(obj) db.session.add(obj)
else: else:
is_new_obj = False is_new_obj = False
logger.info("Updating %s %s", obj.__tablename__, str(obj)) logger.debug("Updating %s %s", obj.__tablename__, str(obj))
# Update columns # Update columns
for k, v in dict_rep.items(): for k, v in dict_rep.items():
setattr(obj, k, v) setattr(obj, k, v)