mirror of
https://github.com/apache/superset.git
synced 2026-04-21 00:54:44 +00:00
* [utils] gathering/refactoring into a "utils/" folder Moving current utils.py into utils/core.py and moving other *util* modules under this new "utils/" as well. Following steps include eroding at "utils/core.py" and breaking it down into smaller modules. * Improve tests * Make loading examples in scope for tests * Remove test class attrs examples_loaded and requires_examples
39 lines
1.2 KiB
Python
39 lines
1.2 KiB
Python
# pylint: disable=C,R,W
|
|
from __future__ import absolute_import
|
|
from __future__ import division
|
|
from __future__ import print_function
|
|
from __future__ import unicode_literals
|
|
|
|
import json
|
|
import logging
|
|
import time
|
|
|
|
from superset.models.core import Dashboard
|
|
from superset.utils.core import decode_dashboards
|
|
|
|
|
|
def import_dashboards(session, data_stream, import_time=None):
|
|
"""Imports dashboards from a stream to databases"""
|
|
current_tt = int(time.time())
|
|
import_time = current_tt if import_time is None else import_time
|
|
data = json.loads(data_stream.read(), object_hook=decode_dashboards)
|
|
# TODO: import DRUID datasources
|
|
for table in data['datasources']:
|
|
type(table).import_obj(table, import_time=import_time)
|
|
session.commit()
|
|
for dashboard in data['dashboards']:
|
|
Dashboard.import_obj(
|
|
dashboard, import_time=import_time)
|
|
session.commit()
|
|
|
|
|
|
def export_dashboards(session):
|
|
"""Returns all dashboards metadata as a json dump"""
|
|
logging.info('Starting export')
|
|
dashboards = session.query(Dashboard)
|
|
dashboard_ids = []
|
|
for dashboard in dashboards:
|
|
dashboard_ids.append(dashboard.id)
|
|
data = Dashboard.export_dashboards(dashboard_ids)
|
|
return data
|