mirror of
https://github.com/apache/superset.git
synced 2026-04-19 08:04:53 +00:00
[WiP] Deprecate Explore v1 (#2064)
* Simplifying the viz interface (#2005) * Working on dashes * Making this a collaborative branch * Fixing some bugs * Fixing bugs * More improvements * Add datasource back in bootstrap data * Decent state * Linting * Moving forward * Some more linting * Fix the timer * Triggering events through state * Lingint * Put filters in an array instead of flt strings (#2090) * Put filters in an array instead of flt strings * Remove query_filter(), put opChoices into Filter * Update version_info.json * Fix migrations * More renderTrigger=true * Fixing bugs * Working on standalone * getting standalone to work * Fixed forcedHeight for standalone =view * Linting * Get save slice working in v2 (#2106) * Filter bugfix * Fixing empty series limit bug * Fixed dashboard view * Fixing short urls * Only allow owners to overwrite slice (#2142) * Raise exception when date range is wrong * Only allow owner to overwrite a slice * Fix tests for deprecate v1 (#2140) * Fixed tests for control panels container and filters * Fixed python tests for explorev2 * Fix linting errors * Add in stop button during slice querying/rendering (#2121) * Add in stop button during slice querying/rendering * Abort ajax request on stop * Adding missing legacy module * Removing select2.sortable.js because of license * Allow query to display while slice is loading (#2100) * Allow query to display while slice is loading * Put latestQueryFormData in store * Reorganized query function, got rid of tu[le return values * Merging migrations * Wrapping up shortner migration * Fixing tests * Add folder creation to syncBackend * Fixing edit URL in explore view * Fix look of Stop button * Adding syntax highlighting to query modal * Fix cast_form_data and flase checkbox on dash * Bugfix * Going deeper * Fix filtering * Deleing invalid filters when changing datasource * Minor adjustments * Fixing calendar heatmap examples * Moving edit datasource button to header's right side * Fixing mapbox example * Show stack trace when clicking alert * Adding npm sync-backend command to build instruction * Bumping up JS dependencies * rm dep on select2 * Fix py3 urlparse * rm superset-select2.js * Improving migration scripts * Bugfixes on staging * Fixing Markup viz
This commit is contained in:
committed by
GitHub
parent
3b023e5eaa
commit
0cc8eff1c3
@@ -78,14 +78,22 @@ class CoreTests(SupersetTestCase):
|
||||
self.login(username='admin')
|
||||
slc = self.get_slice("Girls", db.session)
|
||||
|
||||
resp = self.get_resp(slc.viz.json_endpoint)
|
||||
json_endpoint = (
|
||||
'/superset/explore_json/{}/{}?form_data={}'
|
||||
.format(slc.datasource_type, slc.datasource_id, json.dumps(slc.viz.form_data))
|
||||
)
|
||||
resp = self.get_resp(json_endpoint)
|
||||
assert '"Jennifer"' in resp
|
||||
|
||||
def test_slice_csv_endpoint(self):
|
||||
self.login(username='admin')
|
||||
slc = self.get_slice("Girls", db.session)
|
||||
|
||||
resp = self.get_resp(slc.viz.csv_endpoint)
|
||||
csv_endpoint = (
|
||||
'/superset/explore_json/{}/{}?csv=true&form_data={}'
|
||||
.format(slc.datasource_type, slc.datasource_id, json.dumps(slc.viz.form_data))
|
||||
)
|
||||
resp = self.get_resp(csv_endpoint)
|
||||
assert 'Jennifer,' in resp
|
||||
|
||||
def test_admin_only_permissions(self):
|
||||
@@ -122,24 +130,55 @@ class CoreTests(SupersetTestCase):
|
||||
db.session.commit()
|
||||
copy_name = "Test Sankey Save"
|
||||
tbl_id = self.table_ids.get('energy_usage')
|
||||
new_slice_name = "Test Sankey Overwirte"
|
||||
|
||||
url = (
|
||||
"/superset/explore/table/{}/?viz_type=sankey&groupby=source&"
|
||||
"groupby=target&metric=sum__value&row_limit=5000&where=&having=&"
|
||||
"flt_col_0=source&flt_op_0=in&flt_eq_0=&slice_id={}&slice_name={}&"
|
||||
"collapsed_fieldsets=&action={}&datasource_name=energy_usage&"
|
||||
"datasource_id=1&datasource_type=table&previous_viz_type=sankey")
|
||||
"/superset/explore/table/{}/?slice_name={}&"
|
||||
"action={}&datasource_name=energy_usage&form_data={}")
|
||||
|
||||
# Changing name
|
||||
resp = self.get_resp(url.format(tbl_id, slice_id, copy_name, 'save'))
|
||||
assert copy_name in resp
|
||||
form_data = {
|
||||
'viz_type': 'sankey',
|
||||
'groupby': 'source',
|
||||
'groupby': 'target',
|
||||
'metric': 'sum__value',
|
||||
'row_limit': 5000,
|
||||
'slice_id': slice_id,
|
||||
}
|
||||
# Changing name and save as a new slice
|
||||
resp = self.get_resp(
|
||||
url.format(
|
||||
tbl_id,
|
||||
copy_name,
|
||||
'saveas',
|
||||
json.dumps(form_data)
|
||||
)
|
||||
)
|
||||
slices = db.session.query(models.Slice) \
|
||||
.filter_by(slice_name=copy_name).all()
|
||||
assert len(slices) == 1
|
||||
new_slice_id = slices[0].id
|
||||
|
||||
# Setting the name back to its original name
|
||||
resp = self.get_resp(url.format(tbl_id, slice_id, slice_name, 'save'))
|
||||
assert slice_name in resp
|
||||
form_data = {
|
||||
'viz_type': 'sankey',
|
||||
'groupby': 'source',
|
||||
'groupby': 'target',
|
||||
'metric': 'sum__value',
|
||||
'row_limit': 5000,
|
||||
'slice_id': new_slice_id,
|
||||
}
|
||||
# Setting the name back to its original name by overwriting new slice
|
||||
resp = self.get_resp(
|
||||
url.format(
|
||||
tbl_id,
|
||||
new_slice_name,
|
||||
'overwrite',
|
||||
json.dumps(form_data)
|
||||
)
|
||||
)
|
||||
slc = db.session.query(models.Slice).filter_by(id=new_slice_id).first()
|
||||
assert slc.slice_name == new_slice_name
|
||||
db.session.delete(slc)
|
||||
|
||||
# Doing a basic overwrite
|
||||
assert 'Energy' in self.get_resp(
|
||||
url.format(tbl_id, slice_id, copy_name, 'overwrite'))
|
||||
|
||||
def test_filter_endpoint(self):
|
||||
self.login(username='admin')
|
||||
@@ -168,8 +207,6 @@ class CoreTests(SupersetTestCase):
|
||||
for slc in db.session.query(Slc).all():
|
||||
urls += [
|
||||
(slc.slice_name, 'slice_url', slc.slice_url),
|
||||
(slc.slice_name, 'json_endpoint', slc.viz.json_endpoint),
|
||||
(slc.slice_name, 'csv_endpoint', slc.viz.csv_endpoint),
|
||||
(slc.slice_name, 'slice_id_url', slc.slice_id_url),
|
||||
]
|
||||
for name, method, url in urls:
|
||||
@@ -544,8 +581,7 @@ class CoreTests(SupersetTestCase):
|
||||
self.login(username='admin')
|
||||
url = (
|
||||
'/superset/fetch_datasource_metadata?'
|
||||
'datasource_type=table&'
|
||||
'datasource_id=1'
|
||||
+ 'datasourceKey=1__table'
|
||||
)
|
||||
resp = self.get_json_resp(url)
|
||||
keys = [
|
||||
|
||||
@@ -116,30 +116,44 @@ class DruidTests(SupersetTestCase):
|
||||
|
||||
resp = self.get_resp('/superset/explore/druid/{}/'.format(
|
||||
datasource_id))
|
||||
self.assertIn("[test_cluster].[test_datasource]", resp)
|
||||
|
||||
self.assertIn("test_datasource", resp)
|
||||
form_data = {
|
||||
'viz_type': 'table',
|
||||
'granularity': 'one+day',
|
||||
'druid_time_origin': '',
|
||||
'since': '7+days+ago',
|
||||
'until': 'now',
|
||||
'row_limit': 5000,
|
||||
'include_search': 'false',
|
||||
'metrics': ['count'],
|
||||
'groupby': ['dim1'],
|
||||
'force': 'true',
|
||||
}
|
||||
# One groupby
|
||||
url = (
|
||||
'/superset/explore_json/druid/{}/?viz_type=table&granularity=one+day&'
|
||||
'druid_time_origin=&since=7+days+ago&until=now&row_limit=5000&'
|
||||
'include_search=false&metrics=count&groupby=dim1&flt_col_0=dim1&'
|
||||
'flt_op_0=in&flt_eq_0=&slice_id=&slice_name=&collapsed_fieldsets=&'
|
||||
'action=&datasource_name=test_datasource&datasource_id={}&'
|
||||
'datasource_type=druid&previous_viz_type=table&'
|
||||
'force=true'.format(datasource_id, datasource_id))
|
||||
'/superset/explore_json/druid/{}/?form_data={}'.format(
|
||||
datasource_id, json.dumps(form_data))
|
||||
)
|
||||
resp = self.get_json_resp(url)
|
||||
self.assertEqual("Canada", resp['data']['records'][0]['dim1'])
|
||||
|
||||
form_data = {
|
||||
'viz_type': 'table',
|
||||
'granularity': 'one+day',
|
||||
'druid_time_origin': '',
|
||||
'since': '7+days+ago',
|
||||
'until': 'now',
|
||||
'row_limit': 5000,
|
||||
'include_search': 'false',
|
||||
'metrics': ['count'],
|
||||
'groupby': ['dim1', 'dim2d'],
|
||||
'force': 'true',
|
||||
}
|
||||
# two groupby
|
||||
url = (
|
||||
'/superset/explore_json/druid/{}/?viz_type=table&granularity=one+day&'
|
||||
'druid_time_origin=&since=7+days+ago&until=now&row_limit=5000&'
|
||||
'include_search=false&metrics=count&groupby=dim1&'
|
||||
'flt_col_0=dim1&groupby=dim2d&'
|
||||
'flt_op_0=in&flt_eq_0=&slice_id=&slice_name=&collapsed_fieldsets=&'
|
||||
'action=&datasource_name=test_datasource&datasource_id={}&'
|
||||
'datasource_type=druid&previous_viz_type=table&'
|
||||
'force=true'.format(datasource_id, datasource_id))
|
||||
'/superset/explore_json/druid/{}/?form_data={}'.format(
|
||||
datasource_id, json.dumps(form_data))
|
||||
)
|
||||
resp = self.get_json_resp(url)
|
||||
self.assertEqual("Canada", resp['data']['records'][0]['dim1'])
|
||||
|
||||
|
||||
Reference in New Issue
Block a user