mirror of
https://github.com/apache/superset.git
synced 2026-05-07 08:54:23 +00:00
Compare commits
1 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
e6086feb01 |
@@ -1,40 +0,0 @@
|
||||
engines:
|
||||
csslint:
|
||||
enabled: false
|
||||
duplication:
|
||||
enabled: false
|
||||
eslint:
|
||||
enabled: true
|
||||
checks:
|
||||
import/extensions:
|
||||
enabled: false
|
||||
import/no-extraneous-dependencies:
|
||||
enabled: false
|
||||
config:
|
||||
config: superset/assets/.eslintrc
|
||||
pep8:
|
||||
enabled: true
|
||||
fixme:
|
||||
enabled: false
|
||||
radon:
|
||||
enabled: true
|
||||
checks:
|
||||
Complexity:
|
||||
enabled: false
|
||||
ratings:
|
||||
paths:
|
||||
- "**.py"
|
||||
- "superset/assets/**.js"
|
||||
- "superset/assets/**.jsx"
|
||||
exclude_paths:
|
||||
- ".*"
|
||||
- "**.pyc"
|
||||
- "**.gz"
|
||||
- "env/"
|
||||
- "tests/"
|
||||
- "superset/assets/images/"
|
||||
- "superset/assets/vendor/"
|
||||
- "superset/assets/node_modules/"
|
||||
- "superset/assets/javascripts/dist/"
|
||||
- "superset/migrations"
|
||||
- "docs/"
|
||||
@@ -1 +1 @@
|
||||
repo_token: 4P9MpvLrZfJKzHdGZsdV3MzO43OZJgYFn
|
||||
repo_token: EMkVRVEKYgUESKaNN9QyOhPnFnKNqyDcJ
|
||||
|
||||
26
.gitignore
vendored
26
.gitignore
vendored
@@ -1,39 +1,23 @@
|
||||
*.pyc
|
||||
yarn-error.log
|
||||
_modules
|
||||
superset/assets/coverage/*
|
||||
changelog.sh
|
||||
babel
|
||||
.DS_Store
|
||||
.coverage
|
||||
_build
|
||||
_static
|
||||
_images
|
||||
_modules
|
||||
superset/bin/supersetc
|
||||
env_py3
|
||||
.eggs
|
||||
panoramix/bin/panoramixc
|
||||
build
|
||||
*.db
|
||||
tmp
|
||||
superset_config.py
|
||||
panoramix_config.py
|
||||
local_config.py
|
||||
env
|
||||
dist
|
||||
superset.egg-info/
|
||||
panoramix.egg-info/
|
||||
app.db
|
||||
*.bak
|
||||
.idea
|
||||
*.sqllite
|
||||
.vscode
|
||||
.python-version
|
||||
|
||||
# Node.js, webpack artifacts
|
||||
*.entry.js
|
||||
*.js.map
|
||||
node_modules
|
||||
npm-debug.log*
|
||||
yarn.lock
|
||||
superset/assets/version_info.json
|
||||
|
||||
# IntelliJ
|
||||
*.iml
|
||||
npm-debug.log
|
||||
|
||||
@@ -9,14 +9,17 @@ pylint:
|
||||
disable:
|
||||
- cyclic-import
|
||||
- invalid-name
|
||||
- logging-format-interpolation
|
||||
options:
|
||||
docstring-min-length: 10
|
||||
pep8:
|
||||
full: true
|
||||
ignore-paths:
|
||||
- docs
|
||||
- superset/migrations/env.py
|
||||
- panoramix/migrations/env.py
|
||||
- panoramix/ascii_art.py
|
||||
ignore-patterns:
|
||||
- ^example/doc_.*\.py$
|
||||
- (^|/)docs(/|$)
|
||||
python-targets:
|
||||
- 2
|
||||
- 3
|
||||
|
||||
407
.pylintrc
407
.pylintrc
@@ -1,407 +0,0 @@
|
||||
[MASTER]
|
||||
|
||||
# Specify a configuration file.
|
||||
#rcfile=
|
||||
|
||||
# Python code to execute, usually for sys.path manipulation such as
|
||||
# pygtk.require().
|
||||
#init-hook=
|
||||
|
||||
# Add files or directories to the blacklist. They should be base names, not
|
||||
# paths.
|
||||
ignore=CVS
|
||||
|
||||
# Add files or directories matching the regex patterns to the blacklist. The
|
||||
# regex matches against base names, not paths.
|
||||
ignore-patterns=
|
||||
|
||||
# Pickle collected data for later comparisons.
|
||||
persistent=yes
|
||||
|
||||
# List of plugins (as comma separated values of python modules names) to load,
|
||||
# usually to register additional checkers.
|
||||
load-plugins=
|
||||
|
||||
# Use multiple processes to speed up Pylint.
|
||||
jobs=1
|
||||
|
||||
# Allow loading of arbitrary C extensions. Extensions are imported into the
|
||||
# active Python interpreter and may run arbitrary code.
|
||||
unsafe-load-any-extension=no
|
||||
|
||||
# A comma-separated list of package or module names from where C extensions may
|
||||
# be loaded. Extensions are loading into the active Python interpreter and may
|
||||
# run arbitrary code
|
||||
extension-pkg-whitelist=
|
||||
|
||||
# Allow optimization of some AST trees. This will activate a peephole AST
|
||||
# optimizer, which will apply various small optimizations. For instance, it can
|
||||
# be used to obtain the result of joining multiple strings with the addition
|
||||
# operator. Joining a lot of strings can lead to a maximum recursion error in
|
||||
# Pylint and this flag can prevent that. It has one side effect, the resulting
|
||||
# AST will be different than the one from reality. This option is deprecated
|
||||
# and it will be removed in Pylint 2.0.
|
||||
optimize-ast=no
|
||||
|
||||
|
||||
[MESSAGES CONTROL]
|
||||
|
||||
# Only show warnings with the listed confidence levels. Leave empty to show
|
||||
# all. Valid levels: HIGH, INFERENCE, INFERENCE_FAILURE, UNDEFINED
|
||||
confidence=
|
||||
|
||||
# Enable the message, report, category or checker with the given id(s). You can
|
||||
# either give multiple identifier separated by comma (,) or put this option
|
||||
# multiple time (only on the command line, not in the configuration file where
|
||||
# it should appear only once). See also the "--disable" option for examples.
|
||||
#enable=
|
||||
|
||||
# Disable the message, report, category or checker with the given id(s). You
|
||||
# can either give multiple identifiers separated by comma (,) or put this
|
||||
# option multiple times (only on the command line, not in the configuration
|
||||
# file where it should appear only once).You can also use "--disable=all" to
|
||||
# disable everything first and then reenable specific checks. For example, if
|
||||
# you want to run only the similarities checker, you can use "--disable=all
|
||||
# --enable=similarities". If you want to run only the classes checker, but have
|
||||
# no Warning level messages displayed, use"--disable=all --enable=classes
|
||||
# --disable=W"
|
||||
disable=standarderror-builtin,long-builtin,dict-view-method,intern-builtin,suppressed-message,no-absolute-import,unpacking-in-except,apply-builtin,delslice-method,indexing-exception,old-raise-syntax,print-statement,cmp-builtin,reduce-builtin,useless-suppression,coerce-method,input-builtin,cmp-method,raw_input-builtin,nonzero-method,backtick,basestring-builtin,setslice-method,reload-builtin,oct-method,map-builtin-not-iterating,execfile-builtin,old-octal-literal,zip-builtin-not-iterating,buffer-builtin,getslice-method,metaclass-assignment,xrange-builtin,long-suffix,round-builtin,range-builtin-not-iterating,next-method-called,dict-iter-method,parameter-unpacking,unicode-builtin,unichr-builtin,import-star-module-level,raising-string,filter-builtin-not-iterating,old-ne-operator,using-cmp-argument,coerce-builtin,file-builtin,old-division,hex-method,invalid-unary-operand-type
|
||||
|
||||
|
||||
[REPORTS]
|
||||
|
||||
# Set the output format. Available formats are text, parseable, colorized, msvs
|
||||
# (visual studio) and html. You can also give a reporter class, eg
|
||||
# mypackage.mymodule.MyReporterClass.
|
||||
output-format=text
|
||||
|
||||
# Put messages in a separate file for each module / package specified on the
|
||||
# command line instead of printing them on stdout. Reports (if any) will be
|
||||
# written in a file name "pylint_global.[txt|html]". This option is deprecated
|
||||
# and it will be removed in Pylint 2.0.
|
||||
files-output=no
|
||||
|
||||
# Tells whether to display a full report or only the messages
|
||||
reports=yes
|
||||
|
||||
# Python expression which should return a note less than 10 (10 is the highest
|
||||
# note). You have access to the variables errors warning, statement which
|
||||
# respectively contain the number of errors / warnings messages and the total
|
||||
# number of statements analyzed. This is used by the global evaluation report
|
||||
# (RP0004).
|
||||
evaluation=10.0 - ((float(5 * error + warning + refactor + convention) / statement) * 10)
|
||||
|
||||
# Template used to display messages. This is a python new-style format string
|
||||
# used to format the message information. See doc for all details
|
||||
#msg-template=
|
||||
|
||||
|
||||
[BASIC]
|
||||
|
||||
# Good variable names which should always be accepted, separated by a comma
|
||||
good-names=i,j,k,ex,Run,_,d,e,v,o,l,x,ts
|
||||
|
||||
# Bad variable names which should always be refused, separated by a comma
|
||||
bad-names=foo,bar,baz,toto,tutu,tata,d,fd
|
||||
|
||||
# Colon-delimited sets of names that determine each other's naming style when
|
||||
# the name regexes allow several styles.
|
||||
name-group=
|
||||
|
||||
# Include a hint for the correct naming format with invalid-name
|
||||
include-naming-hint=no
|
||||
|
||||
# List of decorators that produce properties, such as abc.abstractproperty. Add
|
||||
# to this list to register other decorators that produce valid properties.
|
||||
property-classes=abc.abstractproperty
|
||||
|
||||
# Regular expression matching correct argument names
|
||||
argument-rgx=[a-z_][a-z0-9_]{2,30}$
|
||||
|
||||
# Naming hint for argument names
|
||||
argument-name-hint=[a-z_][a-z0-9_]{2,30}$
|
||||
|
||||
# Regular expression matching correct method names
|
||||
method-rgx=[a-z_][a-z0-9_]{2,30}$
|
||||
|
||||
# Naming hint for method names
|
||||
method-name-hint=[a-z_][a-z0-9_]{2,30}$
|
||||
|
||||
# Regular expression matching correct variable names
|
||||
variable-rgx=[a-z_][a-z0-9_]{1,30}$
|
||||
|
||||
# Naming hint for variable names
|
||||
variable-name-hint=[a-z_][a-z0-9_]{2,30}$
|
||||
|
||||
# Regular expression matching correct inline iteration names
|
||||
inlinevar-rgx=[A-Za-z_][A-Za-z0-9_]*$
|
||||
|
||||
# Naming hint for inline iteration names
|
||||
inlinevar-name-hint=[A-Za-z_][A-Za-z0-9_]*$
|
||||
|
||||
# Regular expression matching correct constant names
|
||||
const-rgx=(([A-Za-z_][A-Za-z0-9_]*)|(__.*__))$
|
||||
|
||||
# Naming hint for constant names
|
||||
const-name-hint=(([A-Z_][A-Z0-9_]*)|(__.*__))$
|
||||
|
||||
# Regular expression matching correct class names
|
||||
class-rgx=[A-Z_][a-zA-Z0-9]+$
|
||||
|
||||
# Naming hint for class names
|
||||
class-name-hint=[A-Z_][a-zA-Z0-9]+$
|
||||
|
||||
# Regular expression matching correct class attribute names
|
||||
class-attribute-rgx=([A-Za-z_][A-Za-z0-9_]{2,30}|(__.*__))$
|
||||
|
||||
# Naming hint for class attribute names
|
||||
class-attribute-name-hint=([A-Za-z_][A-Za-z0-9_]{2,30}|(__.*__))$
|
||||
|
||||
# Regular expression matching correct module names
|
||||
module-rgx=(([a-z_][a-z0-9_]*)|([A-Z][a-zA-Z0-9]+))$
|
||||
|
||||
# Naming hint for module names
|
||||
module-name-hint=(([a-z_][a-z0-9_]*)|([A-Z][a-zA-Z0-9]+))$
|
||||
|
||||
# Regular expression matching correct attribute names
|
||||
attr-rgx=[a-z_][a-z0-9_]{2,30}$
|
||||
|
||||
# Naming hint for attribute names
|
||||
attr-name-hint=[a-z_][a-z0-9_]{2,30}$
|
||||
|
||||
# Regular expression matching correct function names
|
||||
function-rgx=[a-z_][a-z0-9_]{2,30}$
|
||||
|
||||
# Naming hint for function names
|
||||
function-name-hint=[a-z_][a-z0-9_]{2,30}$
|
||||
|
||||
# Regular expression which should only match function or class names that do
|
||||
# not require a docstring.
|
||||
no-docstring-rgx=^_
|
||||
|
||||
# Minimum line length for functions/classes that require docstrings, shorter
|
||||
# ones are exempt.
|
||||
docstring-min-length=10
|
||||
|
||||
|
||||
[ELIF]
|
||||
|
||||
# Maximum number of nested blocks for function / method body
|
||||
max-nested-blocks=5
|
||||
|
||||
|
||||
[FORMAT]
|
||||
|
||||
# Maximum number of characters on a single line.
|
||||
max-line-length=90
|
||||
|
||||
# Regexp for a line that is allowed to be longer than the limit.
|
||||
ignore-long-lines=^\s*(# )?<?https?://\S+>?$
|
||||
|
||||
# Allow the body of an if to be on the same line as the test if there is no
|
||||
# else.
|
||||
single-line-if-stmt=no
|
||||
|
||||
# List of optional constructs for which whitespace checking is disabled. `dict-
|
||||
# separator` is used to allow tabulation in dicts, etc.: {1 : 1,\n222: 2}.
|
||||
# `trailing-comma` allows a space between comma and closing bracket: (a, ).
|
||||
# `empty-line` allows space-only lines.
|
||||
no-space-check=trailing-comma,dict-separator
|
||||
|
||||
# Maximum number of lines in a module
|
||||
max-module-lines=1000
|
||||
|
||||
# String used as indentation unit. This is usually " " (4 spaces) or "\t" (1
|
||||
# tab).
|
||||
indent-string=' '
|
||||
|
||||
# Number of spaces of indent required inside a hanging or continued line.
|
||||
indent-after-paren=4
|
||||
|
||||
# Expected format of line ending, e.g. empty (any line ending), LF or CRLF.
|
||||
expected-line-ending-format=
|
||||
|
||||
|
||||
[LOGGING]
|
||||
|
||||
# Logging modules to check that the string format arguments are in logging
|
||||
# function parameter format
|
||||
logging-modules=logging
|
||||
|
||||
|
||||
[MISCELLANEOUS]
|
||||
|
||||
# List of note tags to take in consideration, separated by a comma.
|
||||
notes=FIXME,XXX,TODO
|
||||
|
||||
|
||||
[SIMILARITIES]
|
||||
|
||||
# Minimum lines number of a similarity.
|
||||
min-similarity-lines=4
|
||||
|
||||
# Ignore comments when computing similarities.
|
||||
ignore-comments=yes
|
||||
|
||||
# Ignore docstrings when computing similarities.
|
||||
ignore-docstrings=yes
|
||||
|
||||
# Ignore imports when computing similarities.
|
||||
ignore-imports=no
|
||||
|
||||
|
||||
[SPELLING]
|
||||
|
||||
# Spelling dictionary name. Available dictionaries: none. To make it working
|
||||
# install python-enchant package.
|
||||
spelling-dict=
|
||||
|
||||
# List of comma separated words that should not be checked.
|
||||
spelling-ignore-words=
|
||||
|
||||
# A path to a file that contains private dictionary; one word per line.
|
||||
spelling-private-dict-file=
|
||||
|
||||
# Tells whether to store unknown words to indicated private dictionary in
|
||||
# --spelling-private-dict-file option instead of raising a message.
|
||||
spelling-store-unknown-words=no
|
||||
|
||||
|
||||
[TYPECHECK]
|
||||
|
||||
# Tells whether missing members accessed in mixin class should be ignored. A
|
||||
# mixin class is detected if its name ends with "mixin" (case insensitive).
|
||||
ignore-mixin-members=yes
|
||||
|
||||
# List of module names for which member attributes should not be checked
|
||||
# (useful for modules/projects where namespaces are manipulated during runtime
|
||||
# and thus existing member attributes cannot be deduced by static analysis. It
|
||||
# supports qualified module names, as well as Unix pattern matching.
|
||||
ignored-modules=numpy,pandas,alembic.op,sqlalchemy,alembic.context,flask_appbuilder.security.sqla.PermissionView.role,flask_appbuilder.Model.metadata,flask_appbuilder.Base.metadata
|
||||
|
||||
# List of class names for which member attributes should not be checked (useful
|
||||
# for classes with dynamically set attributes). This supports the use of
|
||||
# qualified names.
|
||||
ignored-classes=optparse.Values,thread._local,_thread._local
|
||||
|
||||
# List of members which are set dynamically and missed by pylint inference
|
||||
# system, and so shouldn't trigger E1101 when accessed. Python regular
|
||||
# expressions are accepted.
|
||||
generated-members=
|
||||
|
||||
# List of decorators that produce context managers, such as
|
||||
# contextlib.contextmanager. Add to this list to register other decorators that
|
||||
# produce valid context managers.
|
||||
contextmanager-decorators=contextlib.contextmanager
|
||||
|
||||
|
||||
[VARIABLES]
|
||||
|
||||
# Tells whether we should check for unused import in __init__ files.
|
||||
init-import=no
|
||||
|
||||
# A regular expression matching the name of dummy variables (i.e. expectedly
|
||||
# not used).
|
||||
dummy-variables-rgx=(_+[a-zA-Z0-9]*?$)|dummy
|
||||
|
||||
# List of additional names supposed to be defined in builtins. Remember that
|
||||
# you should avoid to define new builtins when possible.
|
||||
additional-builtins=
|
||||
|
||||
# List of strings which can identify a callback function by name. A callback
|
||||
# name must start or end with one of those strings.
|
||||
callbacks=cb_,_cb
|
||||
|
||||
# List of qualified module names which can have objects that can redefine
|
||||
# builtins.
|
||||
redefining-builtins-modules=six.moves,future.builtins
|
||||
|
||||
|
||||
[CLASSES]
|
||||
|
||||
# List of method names used to declare (i.e. assign) instance attributes.
|
||||
defining-attr-methods=__init__,__new__,setUp
|
||||
|
||||
# List of valid names for the first argument in a class method.
|
||||
valid-classmethod-first-arg=cls
|
||||
|
||||
# List of valid names for the first argument in a metaclass class method.
|
||||
valid-metaclass-classmethod-first-arg=mcs
|
||||
|
||||
# List of member names, which should be excluded from the protected access
|
||||
# warning.
|
||||
exclude-protected=_asdict,_fields,_replace,_source,_make
|
||||
|
||||
|
||||
[DESIGN]
|
||||
|
||||
# Maximum number of arguments for function / method
|
||||
max-args=5
|
||||
|
||||
# Argument names that match this expression will be ignored. Default to name
|
||||
# with leading underscore
|
||||
ignored-argument-names=_.*
|
||||
|
||||
# Maximum number of locals for function / method body
|
||||
max-locals=15
|
||||
|
||||
# Maximum number of return / yield for function / method body
|
||||
max-returns=6
|
||||
|
||||
# Maximum number of branch for function / method body
|
||||
max-branches=12
|
||||
|
||||
# Maximum number of statements in function / method body
|
||||
max-statements=50
|
||||
|
||||
# Maximum number of parents for a class (see R0901).
|
||||
max-parents=7
|
||||
|
||||
# Maximum number of attributes for a class (see R0902).
|
||||
max-attributes=7
|
||||
|
||||
# Minimum number of public methods for a class (see R0903).
|
||||
min-public-methods=2
|
||||
|
||||
# Maximum number of public methods for a class (see R0904).
|
||||
max-public-methods=20
|
||||
|
||||
# Maximum number of boolean expressions in a if statement
|
||||
max-bool-expr=5
|
||||
|
||||
|
||||
[IMPORTS]
|
||||
|
||||
# Deprecated modules which should not be used, separated by a comma
|
||||
deprecated-modules=optparse
|
||||
|
||||
# Create a graph of every (i.e. internal and external) dependencies in the
|
||||
# given file (report RP0402 must not be disabled)
|
||||
import-graph=
|
||||
|
||||
# Create a graph of external dependencies in the given file (report RP0402 must
|
||||
# not be disabled)
|
||||
ext-import-graph=
|
||||
|
||||
# Create a graph of internal dependencies in the given file (report RP0402 must
|
||||
# not be disabled)
|
||||
int-import-graph=
|
||||
|
||||
# Force import order to recognize a module as part of the standard
|
||||
# compatibility libraries.
|
||||
known-standard-library=
|
||||
|
||||
# Force import order to recognize a module as part of a third party library.
|
||||
known-third-party=enchant
|
||||
|
||||
# Analyse import fallback blocks. This can be used to support both Python 2 and
|
||||
# 3 compatible code, which means that the block might have code that exists
|
||||
# only in one or another interpreter, leading to false positives when analysed.
|
||||
analyse-fallback-blocks=no
|
||||
|
||||
|
||||
[EXCEPTIONS]
|
||||
|
||||
# Exceptions that will emit a warning when being caught. Defaults to
|
||||
# "Exception"
|
||||
overgeneral-exceptions=Exception
|
||||
41
.travis.yml
41
.travis.yml
@@ -1,31 +1,20 @@
|
||||
language: python
|
||||
services:
|
||||
- redis-server
|
||||
addons:
|
||||
code_climate:
|
||||
repo_token: 5f3a06c425eef7be4b43627d7d07a3e46c45bdc07155217825ff7c49cb6a470c
|
||||
python:
|
||||
- "2.7"
|
||||
- "3.4"
|
||||
cache:
|
||||
directories:
|
||||
- $HOME/.wheelhouse/
|
||||
env:
|
||||
global:
|
||||
- TRAVIS_CACHE=$HOME/.travis_cache/
|
||||
matrix:
|
||||
- TOX_ENV=flake8
|
||||
- TOX_ENV=javascript
|
||||
- TOX_ENV=pylint
|
||||
- TOX_ENV=py34-postgres
|
||||
- TOX_ENV=py34-sqlite
|
||||
- TOX_ENV=py27-mysql
|
||||
- TOX_ENV=py27-sqlite
|
||||
before_script:
|
||||
- mysql -u root -e "DROP DATABASE IF EXISTS superset; CREATE DATABASE superset DEFAULT CHARACTER SET utf8 COLLATE utf8_unicode_ci"
|
||||
- mysql -u root -e "CREATE USER 'mysqluser'@'localhost' IDENTIFIED BY 'mysqluserpassword';"
|
||||
- mysql -u root -e "GRANT ALL ON superset.* TO 'mysqluser'@'localhost';"
|
||||
- psql -U postgres -c "CREATE DATABASE superset;"
|
||||
- psql -U postgres -c "CREATE USER postgresuser WITH PASSWORD 'pguserpassword';"
|
||||
- export PATH=${PATH}:/tmp/hive/bin
|
||||
install:
|
||||
- pip install --upgrade pip
|
||||
- pip install tox tox-travis
|
||||
script: tox -e $TOX_ENV
|
||||
- pip wheel -w $HOME/.wheelhouse -f $HOME/.wheelhouse -r requirements.txt
|
||||
- pip install --find-links=$HOME/.wheelhouse --no-index -rrequirements.txt
|
||||
- python setup.py install
|
||||
- cd panoramix/assets
|
||||
- npm install
|
||||
- npm run prod
|
||||
- cd $TRAVIS_BUILD_DIR
|
||||
script: bash run_tests.sh
|
||||
after_success:
|
||||
- coveralls
|
||||
- cd panoramix/assets
|
||||
- npm run lint
|
||||
|
||||
2723
CHANGELOG.md
2723
CHANGELOG.md
File diff suppressed because it is too large
Load Diff
@@ -1,84 +0,0 @@
|
||||
# Code of Conduct
|
||||
|
||||
## 1. Purpose
|
||||
|
||||
A primary goal of Apache Superset is to be inclusive to the largest number of contributors, with the most varied and diverse backgrounds possible. As such, we are committed to providing a friendly, safe and welcoming environment for all, regardless of gender, sexual orientation, ability, ethnicity, socioeconomic status, and religion (or lack thereof).
|
||||
|
||||
This code of conduct outlines our expectations for all those who participate in our community, as well as the consequences for unacceptable behavior.
|
||||
|
||||
We invite all those who participate in Apache Superset to help us create safe and positive experiences for everyone.
|
||||
|
||||
## 2. Open Source Citizenship
|
||||
|
||||
A supplemental goal of this Code of Conduct is to increase open source citizenship by encouraging participants to recognize and strengthen the relationships between our actions and their effects on our community.
|
||||
|
||||
Communities mirror the societies in which they exist and positive action is essential to counteract the many forms of inequality and abuses of power that exist in society.
|
||||
|
||||
If you see someone who is making an extra effort to ensure our community is welcoming, friendly, and encourages all participants to contribute to the fullest extent, we want to know.
|
||||
|
||||
## 3. Expected Behavior
|
||||
|
||||
The following behaviors are expected and requested of all community members:
|
||||
|
||||
* Participate in an authentic and active way. In doing so, you contribute to the health and longevity of this community.
|
||||
* Exercise consideration and respect in your speech and actions.
|
||||
* Attempt collaboration before conflict.
|
||||
* Refrain from demeaning, discriminatory, or harassing behavior and speech.
|
||||
* Be mindful of your surroundings and of your fellow participants. Alert community leaders if you notice a dangerous situation, someone in distress, or violations of this Code of Conduct, even if they seem inconsequential.
|
||||
* Remember that community event venues may be shared with members of the public; please be respectful to all patrons of these locations.
|
||||
|
||||
## 4. Unacceptable Behavior
|
||||
|
||||
The following behaviors are considered harassment and are unacceptable within our community:
|
||||
|
||||
* Violence, threats of violence or violent language directed against another person.
|
||||
* Sexist, racist, homophobic, transphobic, ableist or otherwise discriminatory jokes and language.
|
||||
* Posting or displaying sexually explicit or violent material.
|
||||
* Posting or threatening to post other people’s personally identifying information ("doxing").
|
||||
* Personal insults, particularly those related to gender, sexual orientation, race, religion, or disability.
|
||||
* Inappropriate photography or recording.
|
||||
* Inappropriate physical contact. You should have someone’s consent before touching them.
|
||||
* Unwelcome sexual attention. This includes, sexualized comments or jokes; inappropriate touching, groping, and unwelcomed sexual advances.
|
||||
* Deliberate intimidation, stalking or following (online or in person).
|
||||
* Advocating for, or encouraging, any of the above behavior.
|
||||
* Sustained disruption of community events, including talks and presentations.
|
||||
|
||||
## 5. Consequences of Unacceptable Behavior
|
||||
|
||||
Unacceptable behavior from any community member, including sponsors and those with decision-making authority, will not be tolerated.
|
||||
|
||||
Anyone asked to stop unacceptable behavior is expected to comply immediately.
|
||||
|
||||
If a community member engages in unacceptable behavior, the community organizers may take any action they deem appropriate, up to and including a temporary ban or permanent expulsion from the community without warning (and without refund in the case of a paid event).
|
||||
|
||||
## 6. Reporting Guidelines
|
||||
|
||||
If you are subject to or witness unacceptable behavior, or have any other concerns, please notify a community organizer as soon as possible. dev@superset.incubator.apache.org .
|
||||
|
||||
|
||||
|
||||
Additionally, community organizers are available to help community members engage with local law enforcement or to otherwise help those experiencing unacceptable behavior feel safe. In the context of in-person events, organizers will also provide escorts as desired by the person experiencing distress.
|
||||
|
||||
## 7. Addressing Grievances
|
||||
|
||||
If you feel you have been falsely or unfairly accused of violating this Code of Conduct, you should notify Apache with a concise description of your grievance. Your grievance will be handled in accordance with our existing governing policies.
|
||||
|
||||
|
||||
|
||||
## 8. Scope
|
||||
|
||||
We expect all community participants (contributors, paid or otherwise; sponsors; and other guests) to abide by this Code of Conduct in all community venues–online and in-person–as well as in all one-on-one communications pertaining to community business.
|
||||
|
||||
This code of conduct and its related procedures also applies to unacceptable behavior occurring outside the scope of community activities when such behavior has the potential to adversely affect the safety and well-being of community members.
|
||||
|
||||
## 9. Contact info
|
||||
|
||||
dev@superset.incubator.apache.org
|
||||
|
||||
## 10. License and attribution
|
||||
|
||||
This Code of Conduct is distributed under a [Creative Commons Attribution-ShareAlike license](http://creativecommons.org/licenses/by-sa/3.0/).
|
||||
|
||||
Portions of text derived from the [Django Code of Conduct](https://www.djangoproject.com/conduct/) and the [Geek Feminism Anti-Harassment Policy](http://geekfeminism.wikia.com/wiki/Conference_anti-harassment/Policy).
|
||||
|
||||
Retrieved on November 22, 2016 from [http://citizencodeofconduct.org/](http://citizencodeofconduct.org/)
|
||||
346
CONTRIBUTING.md
346
CONTRIBUTING.md
@@ -9,7 +9,7 @@ You can contribute in many ways:
|
||||
|
||||
### Report Bugs
|
||||
|
||||
Report bugs through GitHub
|
||||
Report bugs through Github
|
||||
|
||||
If you are reporting a bug, please include:
|
||||
|
||||
@@ -18,9 +18,6 @@ If you are reporting a bug, please include:
|
||||
troubleshooting.
|
||||
- Detailed steps to reproduce the bug.
|
||||
|
||||
When you post python stack traces please quote them using
|
||||
[markdown blocks](https://help.github.com/articles/creating-and-highlighting-code-blocks/).
|
||||
|
||||
### Fix Bugs
|
||||
|
||||
Look through the GitHub issues for bugs. Anything tagged with "bug" is
|
||||
@@ -29,18 +26,18 @@ open to whoever wants to implement it.
|
||||
### Implement Features
|
||||
|
||||
Look through the GitHub issues for features. Anything tagged with
|
||||
"feature" or "starter_task" is open to whoever wants to implement it.
|
||||
"feature" is open to whoever wants to implement it.
|
||||
|
||||
### Documentation
|
||||
|
||||
Superset could always use better documentation,
|
||||
whether as part of the official Superset docs,
|
||||
Panoramix could always use better documentation,
|
||||
whether as part of the official Panoramix docs,
|
||||
in docstrings, `docs/*.rst` or even on the web as blog posts or
|
||||
articles.
|
||||
|
||||
### Submit Feedback
|
||||
|
||||
The best way to send feedback is to file an issue on GitHub.
|
||||
The best way to send feedback is to file an issue on Github.
|
||||
|
||||
If you are proposing a feature:
|
||||
|
||||
@@ -49,161 +46,48 @@ If you are proposing a feature:
|
||||
implement.
|
||||
- Remember that this is a volunteer-driven project, and that
|
||||
contributions are welcome :)
|
||||
|
||||
### Questions
|
||||
|
||||
There is a dedicated [tag](https://stackoverflow.com/questions/tagged/apache-superset) on [stackoverflow](https://stackoverflow.com/). Please use it when asking questions.
|
||||
## Latest Documentation
|
||||
|
||||
## Pull Request Guidelines
|
||||
|
||||
Before you submit a pull request from your forked repo, check that it
|
||||
meets these guidelines:
|
||||
|
||||
1. The pull request should include tests, either as doctests,
|
||||
unit tests, or both.
|
||||
2. If the pull request adds functionality, the docs should be updated
|
||||
as part of the same PR. Doc string are often sufficient, make
|
||||
sure to follow the sphinx compatible standards.
|
||||
3. The pull request should work for Python 2.7, and ideally python 3.4+.
|
||||
``from __future__ import`` will be required in every `.py` file soon.
|
||||
4. Code will be reviewed by re running the unittests, flake8 and syntax
|
||||
should be as rigorous as the core Python project.
|
||||
5. Please rebase and resolve all conflicts before submitting.
|
||||
6. If you are asked to update your pull request with some changes there's
|
||||
no need to create a new one. Push your changes to the same branch.
|
||||
|
||||
## Documentation
|
||||
|
||||
The latest documentation and tutorial are available [here](https://superset.incubator.apache.org/).
|
||||
|
||||
Contributing to the official documentation is relatively easy, once you've setup
|
||||
your environment and done an edit end-to-end. The docs can be found in the
|
||||
`docs/` subdirectory of the repository, and are written in the
|
||||
[reStructuredText format](https://en.wikipedia.org/wiki/ReStructuredText) (.rst).
|
||||
If you've written Markdown before, you'll find the reStructuredText format familiar.
|
||||
|
||||
Superset uses [Sphinx](http://www.sphinx-doc.org/en/1.5.1/) to convert the rst files
|
||||
in `docs/` to the final HTML output users see.
|
||||
|
||||
Before you start changing the docs, you'll want to
|
||||
[fork the Superset project on Github](https://help.github.com/articles/fork-a-repo/).
|
||||
Once that new repository has been created, clone it on your local machine:
|
||||
|
||||
git clone git@github.com:your_username/incubator-superset.git
|
||||
|
||||
At this point, you may also want to create a
|
||||
[Python virtual environment](http://docs.python-guide.org/en/latest/dev/virtualenvs/)
|
||||
to manage the Python packages you're about to install:
|
||||
|
||||
virtualenv superset-dev
|
||||
source superset-dev/bin/activate
|
||||
|
||||
Finally, to make changes to the rst files and build the docs using Sphinx,
|
||||
you'll need to install a handful of dependencies from the repo you cloned:
|
||||
|
||||
cd incubator-superset
|
||||
pip install -r dev-reqs-for-docs.txt
|
||||
|
||||
To get the feel for how to edit and build the docs, let's edit a file, build
|
||||
the docs and see our changes in action. First, you'll want to
|
||||
[create a new branch](https://git-scm.com/book/en/v2/Git-Branching-Basic-Branching-and-Merging)
|
||||
to work on your changes:
|
||||
|
||||
git checkout -b changes-to-docs
|
||||
|
||||
Now, go ahead and edit one of the files under `docs/`, say `docs/tutorial.rst`
|
||||
- change it however you want. Check out the
|
||||
[ReStructuredText Primer](http://docutils.sourceforge.net/docs/user/rst/quickstart.html)
|
||||
for a reference on the formatting of the rst files.
|
||||
|
||||
Once you've made your changes, run this command from the root of the Superset
|
||||
repo to convert the docs into HTML:
|
||||
|
||||
python setup.py build_sphinx
|
||||
|
||||
You'll see a lot of output as Sphinx handles the conversion. After it's done, the
|
||||
HTML Sphinx generated should be in `docs/_build/html`. Go ahead and navigate there
|
||||
and start a simple web server so we can check out the docs in a browser:
|
||||
|
||||
cd docs/_build/html
|
||||
python -m SimpleHTTPServer
|
||||
|
||||
This will start a small Python web server listening on port 8000. Point your
|
||||
browser to [http://localhost:8000/](http://localhost:8000/), find the file
|
||||
you edited earlier, and check out your changes!
|
||||
|
||||
If you've made a change you'd like to contribute to the actual docs, just commit
|
||||
your code, push your new branch to Github:
|
||||
|
||||
git add docs/tutorial.rst
|
||||
git commit -m 'Awesome new change to tutorial'
|
||||
git push origin changes-to-docs
|
||||
|
||||
Then, [open a pull request](https://help.github.com/articles/about-pull-requests/).
|
||||
|
||||
If you're adding new images to the documentation, you'll notice that the images
|
||||
referenced in the rst, e.g.
|
||||
|
||||
.. image:: _static/img/tutorial/tutorial_01_sources_database.png
|
||||
|
||||
aren't actually included in that directory. _Instead_, you'll want to add and commit
|
||||
images (and any other static assets) to the _superset/assets/images_ directory.
|
||||
When the docs are being pushed to [Apache Superset (incubating)](https://superset.incubator.apache.org/), images
|
||||
will be moved from there to the _\_static/img_ directory, just like they're referenced
|
||||
in the docs.
|
||||
|
||||
For example, the image referenced above actually lives in
|
||||
|
||||
superset/assets/images/tutorial
|
||||
|
||||
Since the image is moved during the documentation build process, the docs reference the
|
||||
image in
|
||||
|
||||
_static/img/tutorial
|
||||
|
||||
instead.
|
||||
[API Documentation](http://pythonhosted.com/panoramix)
|
||||
|
||||
## Setting up a Python development environment
|
||||
|
||||
Check the [OS dependencies](https://superset.incubator.apache.org/installation.html#os-dependencies) before follows these steps.
|
||||
|
||||
# fork the repo on GitHub and then clone it
|
||||
# fork the repo on github and then clone it
|
||||
# alternatively you may want to clone the main repo but that won't work
|
||||
# so well if you are planning on sending PRs
|
||||
# git clone git@github.com:apache/incubator-superset.git
|
||||
# git clone git@github.com:mistercrunch/panoramix.git
|
||||
|
||||
# [optional] setup a virtual env and activate it
|
||||
virtualenv env
|
||||
source env/bin/activate
|
||||
|
||||
# install for development
|
||||
pip install -e .
|
||||
python setup.py develop
|
||||
|
||||
# Create an admin user
|
||||
fabmanager create-admin --app superset
|
||||
fabmanager create-admin --app panoramix
|
||||
|
||||
# Initialize the database
|
||||
superset db upgrade
|
||||
panoramix db upgrade
|
||||
|
||||
# Create default roles and permissions
|
||||
superset init
|
||||
panoramix init
|
||||
|
||||
# Load some data to play with
|
||||
superset load_examples
|
||||
panoramix load_examples
|
||||
|
||||
# start a dev web server
|
||||
superset runserver -d
|
||||
panoramix runserver -d
|
||||
|
||||
|
||||
## Setting up the node / npm javascript environment
|
||||
|
||||
`superset/assets` contains all npm-managed, front end assets.
|
||||
`panoramix/assets` contains all npm-managed, front end assets.
|
||||
Flask-Appbuilder itself comes bundled with jQuery and bootstrap.
|
||||
While these may be phased out over time, these packages are currently not
|
||||
managed with npm.
|
||||
|
||||
### Node/npm versions
|
||||
Make sure you are using recent versions of node and npm. No problems have been found with node>=5.10 and 4.0. > npm>=3.9.
|
||||
|
||||
### Using npm to generate bundled files
|
||||
|
||||
@@ -216,34 +100,27 @@ echo prefix=~/.npm-packages >> ~/.npmrc
|
||||
curl -L https://www.npmjs.com/install.sh | sh
|
||||
```
|
||||
|
||||
The final step is to add `~/.npm-packages/bin` to your `PATH` so commands you install globally are usable.
|
||||
Add something like this to your `.bashrc` file, then `source ~/.bashrc` to reflect the change.
|
||||
The final step is to add
|
||||
`~/.node/bin` to your `PATH` so commands you install globally are usable.
|
||||
Add something like this to your `.bashrc` file.
|
||||
```
|
||||
export PATH="$HOME/.npm-packages/bin:$PATH"
|
||||
export PATH="$HOME/.node/bin:$PATH"
|
||||
```
|
||||
|
||||
#### npm packages
|
||||
To install third party libraries defined in `package.json`, run the
|
||||
following within the `superset/assets/` directory which will install them in a
|
||||
following within this directory which will install them in a
|
||||
new `node_modules/` folder within `assets/`.
|
||||
|
||||
```bash
|
||||
# from the root of the repository, move to where our JS package.json lives
|
||||
cd superset/assets/
|
||||
# install yarn, a replacement for `npm install` that is faster and more deterministic
|
||||
npm install -g yarn
|
||||
# run yarn to fetch all the dependencies
|
||||
yarn
|
||||
```
|
||||
npm install
|
||||
```
|
||||
|
||||
To parse and generate bundled files for superset, run either of the
|
||||
To parse and generate bundled files for panoramix, run either of the
|
||||
following commands. The `dev` flag will keep the npm script running and
|
||||
re-run it upon any changes within the assets directory.
|
||||
|
||||
```
|
||||
# Copies a conf file from the frontend to the backend
|
||||
npm run sync-backend
|
||||
|
||||
# Compiles the production / optimized js & css
|
||||
npm run prod
|
||||
|
||||
@@ -255,67 +132,24 @@ For every development session you will have to start a flask dev server
|
||||
as well as an npm watcher
|
||||
|
||||
```
|
||||
superset runserver -d -p 8081
|
||||
panoramix runserver -d -p 8081
|
||||
npm run dev
|
||||
```
|
||||
|
||||
## Testing
|
||||
|
||||
Before running python unit tests, please setup local testing environment:
|
||||
```
|
||||
pip install -r dev-reqs.txt
|
||||
```
|
||||
Tests can then be run with:
|
||||
|
||||
All python tests can be run with:
|
||||
|
||||
./run_tests.sh
|
||||
|
||||
Alternatively, you can run a specific test with:
|
||||
|
||||
./run_specific_test.sh tests.core_tests:CoreTests.test_function_name
|
||||
|
||||
Note that before running specific tests, you have to both setup the local testing environment and run all tests.
|
||||
|
||||
We use [Mocha](https://mochajs.org/), [Chai](http://chaijs.com/) and [Enzyme](http://airbnb.io/enzyme/) to test Javascript. Tests can be run with:
|
||||
|
||||
cd /superset/superset/assets/javascripts
|
||||
npm i
|
||||
npm run test
|
||||
|
||||
## Linting
|
||||
./run_unit_tests.sh
|
||||
|
||||
Lint the project with:
|
||||
|
||||
# for python
|
||||
flake8
|
||||
# for python changes
|
||||
flake8 changes tests
|
||||
|
||||
# for javascript
|
||||
npm run lint
|
||||
|
||||
## Linting with codeclimate
|
||||
Codeclimate is a service we use to measure code quality and test coverage. To get codeclimate's report on your branch, ideally before sending your PR, you can setup codeclimate against your Superset fork. After you push to your fork, you should be able to get the report at http://codeclimate.com . Alternatively, if you prefer to work locally, you can install the codeclimate cli tool.
|
||||
|
||||
*Install the codeclimate cli tool*
|
||||
```
|
||||
curl -L https://github.com/docker/machine/releases/download/v0.7.0/docker-machine-`uname -s`-`uname -m` > /usr/local/bin/docker-machine && chmod +x /usr/local/bin/docker-machine
|
||||
brew install docker
|
||||
docker-machine create --driver virtual box default
|
||||
docker-machine env default
|
||||
eval "$(docker-machine env default)"
|
||||
docker pull codeclimate/codeclimate
|
||||
brew tap codeclimate/formulae
|
||||
brew install codeclimate
|
||||
```
|
||||
|
||||
*Run the lint command:*
|
||||
```
|
||||
docker-machine start
|
||||
eval "$(docker-machine env default)”
|
||||
codeclimate analyze
|
||||
```
|
||||
More info can be found here: https://docs.codeclimate.com/docs/open-source-free
|
||||
|
||||
|
||||
## API documentation
|
||||
|
||||
Generate the documentation with:
|
||||
@@ -323,115 +157,27 @@ Generate the documentation with:
|
||||
cd docs && ./build.sh
|
||||
|
||||
## CSS Themes
|
||||
As part of the npm build process, CSS for Superset is compiled from `Less`, a dynamic stylesheet language.
|
||||
As part of the npm build process, CSS for Panoramix is compiled from ```Less```, a dynamic stylesheet language.
|
||||
|
||||
It's possible to customize or add your own theme to Superset, either by overriding CSS rules or preferably
|
||||
by modifying the Less variables or files in `assets/stylesheets/less/`.
|
||||
It's possible to customize or add your own theme to Panoramix, either by overriding CSS rules or preferably
|
||||
by modifying the Less variables or files in ```assets/stylesheets/less/```.
|
||||
|
||||
The `variables.less` and `bootswatch.less` files that ship with Superset are derived from
|
||||
The ```variables.less``` and ```bootswatch.less``` files that ship with Panoramix are derived from
|
||||
[Bootswatch](https://bootswatch.com) and thus extend Bootstrap. Modify variables in these files directly, or
|
||||
swap them out entirely with the equivalent files from other Bootswatch (themes)[https://github.com/thomaspark/bootswatch.git]
|
||||
|
||||
## Translations
|
||||
## Pull Request Guidelines
|
||||
|
||||
We use [Babel](http://babel.pocoo.org/en/latest/) to translate Superset. The
|
||||
key is to instrument the strings that need translation using
|
||||
`from flask_babel import lazy_gettext as _`. Once this is imported in
|
||||
a module, all you have to do is to `_("Wrap your strings")` using the
|
||||
underscore `_` "function".
|
||||
Before you submit a pull request from your forked repo, check that it
|
||||
meets these guidelines:
|
||||
|
||||
We use `import {t, tn, TCT} from locales;` in js, JSX file, locales is in `./superset/assets/javascripts/` directory.
|
||||
|
||||
To enable changing language in your environment, you can simply add the
|
||||
`LANGUAGES` parameter to your `superset_config.py`. Having more than one
|
||||
options here will add a language selection dropdown on the right side of the
|
||||
navigation bar.
|
||||
|
||||
LANGUAGES = {
|
||||
'en': {'flag': 'us', 'name': 'English'},
|
||||
'fr': {'flag': 'fr', 'name': 'French'},
|
||||
'zh': {'flag': 'cn', 'name': 'Chinese'},
|
||||
}
|
||||
|
||||
As per the [Flask AppBuilder documentation] about translation, to create a
|
||||
new language dictionary, run the following command (where `es` is replaced with
|
||||
the language code for your target language):
|
||||
|
||||
pybabel init -i superset/translations/messages.pot -d superset/translations -l es
|
||||
|
||||
Then it's a matter of running the statement below to gather all strings that
|
||||
need translation
|
||||
|
||||
fabmanager babel-extract --target superset/translations/ --output superset/translations/messages.pot --config superset/translations/babel.cfg -k _ -k __ -k t -k tn -k tct
|
||||
|
||||
You can then translate the strings gathered in files located under
|
||||
`superset/translation`, where there's one per language. For the translations
|
||||
to take effect, they need to be compiled using this command:
|
||||
|
||||
fabmanager babel-compile --target superset/translations/
|
||||
|
||||
In the case of JS translation, we need to convert the PO file into a JSON file, and we need the global download of the npm package po2json.
|
||||
We need to be compiled using this command:
|
||||
|
||||
npm install po2json -g
|
||||
|
||||
Execute this command to convert the en PO file into a json file:
|
||||
|
||||
po2json -d superset -f jed1.x superset/translations/en/LC_MESSAGES/messages.po superset/translations/en/LC_MESSAGES/messages.json
|
||||
|
||||
If you get errors running `po2json`, you might be running the ubuntu package with the same
|
||||
name rather than the nodejs package (they have a different format for the arguments). You
|
||||
need to be running the nodejs version, and so if there is a conflict you may need to point
|
||||
directly at `/usr/local/bin/po2json` rather than just `po2json`.
|
||||
|
||||
## Adding new datasources
|
||||
|
||||
1. Create Models and Views for the datasource, add them under superset folder, like a new my_models.py
|
||||
with models for cluster, datasources, columns and metrics and my_views.py with clustermodelview
|
||||
and datasourcemodelview.
|
||||
|
||||
2. Create db migration files for the new models
|
||||
|
||||
3. Specify this variable to add the datasource model and from which module it is from in config.py:
|
||||
|
||||
For example:
|
||||
|
||||
`ADDITIONAL_MODULE_DS_MAP = {'superset.my_models': ['MyDatasource', 'MyOtherDatasource']}`
|
||||
|
||||
This means it'll register MyDatasource and MyOtherDatasource in superset.my_models module in the source registry.
|
||||
|
||||
## Creating a new visualization type
|
||||
|
||||
Here's an example as a Github PR with comments that describe what the
|
||||
different sections of the code do:
|
||||
https://github.com/apache/incubator-superset/pull/3013
|
||||
|
||||
## Refresh documentation website
|
||||
|
||||
Every once in a while we want to compile the documentation and publish it.
|
||||
Here's how to do it.
|
||||
|
||||
.. code::
|
||||
|
||||
# install doc dependencies
|
||||
pip install -r dev-reqs-for-docs.txt
|
||||
|
||||
# build the docs
|
||||
python setup.py build_sphinx
|
||||
|
||||
# copy html files to temp folder
|
||||
cp -r docs/_build/html/ /tmp/tmp_superset_docs/
|
||||
|
||||
# clone the docs repo
|
||||
cd ~/
|
||||
git clone https://git-wip-us.apache.org/repos/asf/incubator-superset-site.git
|
||||
|
||||
# copy
|
||||
cp -r /tmp/tmp_superset_docs/ ~/incubator-superset-site.git/
|
||||
|
||||
# commit and push to `asf-site` branch
|
||||
cd ~/incubator-superset-site.git/
|
||||
git checkout asf-site
|
||||
git add .
|
||||
git commit -a -m "New doc version"
|
||||
git push origin master
|
||||
1. The pull request should include tests, either as doctests,
|
||||
unit tests, or both.
|
||||
2. If the pull request adds functionality, the docs should be updated
|
||||
as part of the same PR. Doc string are often sufficient, make
|
||||
sure to follow the sphinx compatible standards.
|
||||
3. The pull request should work for Python 2.6, 2.7, and ideally python 3.3.
|
||||
`from __future__ import ` will be required in every `.py` file soon.
|
||||
4. Code will be reviewed by re running the unittests, flake8 and syntax
|
||||
should be as rigorous as the core Python project.
|
||||
5. Please rebase and resolve all conflicts before submitting.
|
||||
|
||||
@@ -1,19 +0,0 @@
|
||||
Make sure these boxes are checked before submitting your issue - thank you!
|
||||
|
||||
- [ ] I have checked the superset logs for python stacktraces and included it here as text if any
|
||||
- [ ] I have reproduced the issue with at least the latest released version of superset
|
||||
- [ ] I have checked the issue tracker for the same issue and I haven't found one similar
|
||||
|
||||
|
||||
### Superset version
|
||||
|
||||
|
||||
### Expected results
|
||||
|
||||
|
||||
### Actual results
|
||||
|
||||
|
||||
### Steps to reproduce
|
||||
|
||||
|
||||
15
MANIFEST.in
15
MANIFEST.in
@@ -1,9 +1,8 @@
|
||||
recursive-include superset/data *
|
||||
recursive-include superset/migrations *
|
||||
recursive-include superset/static *
|
||||
recursive-exclude superset/static/docs *
|
||||
recursive-exclude superset/static/spec *
|
||||
recursive-exclude superset/static/assets/node_modules *
|
||||
recursive-include superset/templates *
|
||||
recursive-include superset/translations *
|
||||
recursive-include panoramix/templates *
|
||||
recursive-include panoramix/static *
|
||||
recursive-exclude panoramix/static/assets/node_modules *
|
||||
recursive-include panoramix/static/assets/node_modules/font-awesome *
|
||||
recursive-exclude panoramix/static/docs *
|
||||
recursive-exclude tests *
|
||||
recursive-include panoramix/data *
|
||||
recursive-include panoramix/migrations *
|
||||
|
||||
321
README.md
321
README.md
@@ -1,200 +1,183 @@
|
||||
Superset
|
||||
Panoramix
|
||||
=========
|
||||
|
||||
[](https://travis-ci.org/apache/incubator-superset)
|
||||
[](https://badge.fury.io/py/superset)
|
||||
[](https://coveralls.io/github/apache/incubator-superset?branch=master)
|
||||
[](https://pypi.python.org/pypi/superset)
|
||||
[](https://requires.io/github/apache/incubator-superset/requirements/?branch=master)
|
||||
[](https://gitter.im/airbnb/superset?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge)
|
||||
[](https://superset.incubator.apache.org)
|
||||
[](https://david-dm.org/apache/incubator-superset?path=superset/assets)
|
||||
[](https://gitter.im/mistercrunch/panoramix?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge)
|
||||

|
||||
[](https://coveralls.io/github/mistercrunch/panoramix?branch=master)
|
||||
[](https://landscape.io/github/mistercrunch/panoramix/master)
|
||||
|
||||
<img
|
||||
src="https://cloud.githubusercontent.com/assets/130878/20946612/49a8a25c-bbc0-11e6-8314-10bef902af51.png"
|
||||
alt="Superset"
|
||||
width="500"
|
||||
/>
|
||||
|
||||
**Apache Superset** (incubating) is a modern, enterprise-ready
|
||||
business intelligence web application
|
||||
|
||||
[this project used to be named **Caravel**, and **Panoramix** in the past]
|
||||
Panoramix is a data exploration platform designed to be visual, intuitive
|
||||
and interactive.
|
||||
|
||||
|
||||
Screenshots & Gifs
|
||||
------------------
|
||||
Video - Introduction to Panoramix
|
||||
---------------------------------
|
||||
[](http://www.youtube.com/watch?v=3Txm_nj_R7M)
|
||||
|
||||
**View Dashboards**
|
||||
Screenshots
|
||||
------------
|
||||

|
||||

|
||||
|
||||

|
||||
Panoramix
|
||||
---------
|
||||
Panoramix's main goal is to make it easy to slice, dice and visualize data.
|
||||
It empowers its user to perform **analytics at the speed of thought**.
|
||||
|
||||
<br/>
|
||||
|
||||
**View/Edit a Slice**
|
||||
|
||||

|
||||
|
||||
<br/>
|
||||
|
||||
**Query and Visualize with SQL Lab**
|
||||
|
||||

|
||||
|
||||
<br/>
|
||||
|
||||

|
||||
|
||||

|
||||
|
||||

|
||||
|
||||
Apache Superset
|
||||
---------------
|
||||
Apache Superset is a data exploration and visualization web application.
|
||||
|
||||
Superset provides:
|
||||
* An intuitive interface to explore and visualize datasets, and
|
||||
create interactive dashboards.
|
||||
* A wide array of beautiful visualizations to showcase your data.
|
||||
* Easy, code-free, user flows to drill down and slice and dice the data
|
||||
underlying exposed dashboards. The dashboards and charts acts as a starting
|
||||
point for deeper analysis.
|
||||
* A state of the art SQL editor/IDE exposing a rich metadata browser, and
|
||||
an easy workflow to create visualizations out of any result set.
|
||||
Panoramix provides:
|
||||
* A quick way to intuitively visualize datasets
|
||||
* Create and share interactive dashboards
|
||||
* A rich set of visualizations to analyze your data, as well as a flexible
|
||||
way to extend the capabilities
|
||||
* An extensible, high granularity security model allowing intricate rules
|
||||
on who can access which product features and datasets.
|
||||
Integration with major
|
||||
authentication backends (database, OpenID, LDAP, OAuth, REMOTE_USER, ...)
|
||||
* A lightweight semantic layer, allowing to control how data sources are
|
||||
exposed to the user by defining dimensions and metrics
|
||||
* Out of the box support for most SQL-speaking databases
|
||||
* Deep integration with Druid allows for Superset to stay blazing fast while
|
||||
on who can access which features, and integration with major
|
||||
authentication providers (database, OpenID, LDAP, OAuth & REMOTE_USER
|
||||
through Flask AppBuiler)
|
||||
* A simple semantic layer, allowing to control how data sources are
|
||||
displayed in the UI,
|
||||
by defining which fields should show up in which dropdown and which
|
||||
aggregation and function (metrics) are made available to the user
|
||||
* Deep integration with Druid allows for Panoramix to stay blazing fast while
|
||||
slicing and dicing large, realtime datasets
|
||||
* Fast loading dashboards with configurable caching
|
||||
|
||||
|
||||
Buzz Phrases
|
||||
------------
|
||||
|
||||
* Analytics at the speed of thought!
|
||||
* Instantaneous learning curve
|
||||
* Realtime analytics when querying [Druid.io](http://druid.io)
|
||||
* Extentsible to infinity
|
||||
|
||||
Database Support
|
||||
----------------
|
||||
|
||||
Superset speaks many SQL dialects through SQLAlchemy, a Python
|
||||
Panoramix was originally designed on to of Druid.io, but quickly broadened
|
||||
its scope to support other databases through the use of SqlAlchemy, a Python
|
||||
ORM that is compatible with
|
||||
[most common databases](http://docs.sqlalchemy.org/en/rel_1_0/core/engines.html).
|
||||
|
||||
Superset can be used to visualize data out of most databases:
|
||||
* MySQL
|
||||
* Postgres
|
||||
* Vertica
|
||||
* Oracle
|
||||
* Microsoft SQL Server
|
||||
* SQLite
|
||||
* Greenplum
|
||||
* Firebird
|
||||
* MariaDB
|
||||
* Sybase
|
||||
* IBM DB2
|
||||
* Exasol
|
||||
* MonetDB
|
||||
* Snowflake
|
||||
* Redshift
|
||||
* **more!** look for the availability of a SQLAlchemy dialect for your database
|
||||
to find out whether it will work with Superset
|
||||
[most common databases](http://docs.sqlalchemy.org/en/rel_1_0/core/engines.html).
|
||||
|
||||
|
||||
Druid!
|
||||
------
|
||||
What is Druid?
|
||||
-------------
|
||||
From their website at http://druid.io
|
||||
|
||||
On top of having the ability to query your relational databases,
|
||||
Superset has ships with deep integration with Druid (a real time distributed
|
||||
column-store). When querying Druid,
|
||||
Superset can query humongous amounts of data on top of real time dataset.
|
||||
Note that Superset does not require Druid in any way to function, it's simply
|
||||
another database backend that it can query.
|
||||
|
||||
Here's a description of Druid from the http://druid.io website:
|
||||
|
||||
*Druid is an open-source analytics data store designed for
|
||||
business intelligence (OLAP) queries on event data. Druid provides low
|
||||
latency (real-time) data ingestion, flexible data exploration,
|
||||
and fast data aggregation. Existing Druid deployments have scaled to
|
||||
trillions of events and petabytes of data. Druid is best used to
|
||||
*Druid is an open-source analytics data store designed for
|
||||
business intelligence (OLAP) queries on event data. Druid provides low
|
||||
latency (real-time) data ingestion, flexible data exploration,
|
||||
and fast data aggregation. Existing Druid deployments have scaled to
|
||||
trillions of events and petabytes of data. Druid is best used to
|
||||
power analytic dashboards and applications.*
|
||||
|
||||
|
||||
Installation & Configuration
|
||||
----------------------------
|
||||
Installation
|
||||
------------
|
||||
|
||||
[See in the documentation](https://superset.incubator.apache.org/installation.html)
|
||||
Panoramix is currently only tested using Python 2.7.*. Python 3 support is
|
||||
on the roadmap, Python 2.6 won't be supported.
|
||||
|
||||
Follow these few simple steps to install Panoramix.
|
||||
|
||||
Resources
|
||||
```
|
||||
# Install panoramix
|
||||
pip install panoramix
|
||||
|
||||
# Create an admin user
|
||||
fabmanager create-admin --app panoramix
|
||||
|
||||
# Initialize the database
|
||||
panoramix db upgrade
|
||||
|
||||
# Create default roles and permissions
|
||||
panoramix init
|
||||
|
||||
# Load some data to play with
|
||||
panoramix load_examples
|
||||
|
||||
# Start the development web server
|
||||
panoramix runserver -d
|
||||
```
|
||||
|
||||
After installation, you should be able to point your browser to the right
|
||||
hostname:port [http://localhost:8088](http://localhost:8088), login using
|
||||
the credential you entered while creating the admin account, and navigate to
|
||||
`Menu -> Admin -> Refresh Metadata`. This action should bring in all of
|
||||
your datasources for Panoramix to be aware of, and they should show up in
|
||||
`Menu -> Datasources`, from where you can start playing with your data!
|
||||
|
||||
Configuration
|
||||
-------------
|
||||
* [Mailing list](https://lists.apache.org/list.html?dev@superset.apache.org/)
|
||||
* [Gitter (live chat) Channel](https://gitter.im/airbnb/superset)
|
||||
* [Docker image](https://hub.docker.com/r/amancevice/superset/) (community contributed)
|
||||
* [Slides from Strata (March 2016)](https://drive.google.com/open?id=0B5PVE0gzO81oOVJkdF9aNkJMSmM)
|
||||
* [Stackoverflow tag](https://stackoverflow.com/questions/tagged/apache-superset)
|
||||
* [DEPRECATED Google Group](https://groups.google.com/forum/#!forum/airbnb_superset)
|
||||
|
||||
To configure your application, you need to create a file (module)
|
||||
`panoramix_config.py` and make sure it is in your PYTHONPATH. Here are some
|
||||
of the parameters you can copy / paste in that configuration module:
|
||||
|
||||
```
|
||||
#---------------------------------------------------------
|
||||
# Panoramix specifix config
|
||||
#---------------------------------------------------------
|
||||
ROW_LIMIT = 5000
|
||||
WEBSERVER_THREADS = 8
|
||||
|
||||
PANORAMIX_WEBSERVER_PORT = 8088
|
||||
#---------------------------------------------------------
|
||||
|
||||
#---------------------------------------------------------
|
||||
# Flask App Builder configuration
|
||||
#---------------------------------------------------------
|
||||
# Your App secret key
|
||||
SECRET_KEY = '\2\1thisismyscretkey\1\2\e\y\y\h'
|
||||
|
||||
# The SQLAlchemy connection string.
|
||||
SQLALCHEMY_DATABASE_URI = 'sqlite:////tmp/panoramix.db'
|
||||
|
||||
# Flask-WTF flag for CSRF
|
||||
CSRF_ENABLED = True
|
||||
|
||||
# Whether to run the web server in debug mode or not
|
||||
DEBUG = True
|
||||
```
|
||||
|
||||
This file also allows you to define configuration parameters used by
|
||||
Flask App Builder, the web framework used by Panoramix. Please consult
|
||||
the [Flask App Builder Documentation](http://flask-appbuilder.readthedocs.org/en/latest/config.html) for more information on how to configure Panoramix.
|
||||
|
||||
|
||||
* From the UI, enter the information about your clusters in the
|
||||
``Admin->Clusters`` menu by hitting the + sign.
|
||||
|
||||
* Once the Druid cluster connection information is entered, hit the
|
||||
``Admin->Refresh Metadata`` menu item to populate
|
||||
|
||||
* Navigate to your datasources
|
||||
|
||||
More screenshots
|
||||
----------------
|
||||
|
||||

|
||||

|
||||

|
||||

|
||||
|
||||
Related Links
|
||||
-------------
|
||||
* [Panoramix Google Group] (https://groups.google.com/forum/#!forum/airbnb_panoramix)
|
||||
* [Gitter (live chat) Channel](https://gitter.im/mistercrunch/panoramix)
|
||||
|
||||
|
||||
Tip of the Hat
|
||||
--------------
|
||||
|
||||
Panoramix would not be possible without these great frameworks / libs
|
||||
|
||||
* Flask App Builder - Allowing us to focus on building the app quickly while
|
||||
getting the foundation for free
|
||||
* The Flask ecosystem - Simply amazing. So much Plug, easy play.
|
||||
* NVD3 - One of the best charting library out there
|
||||
* Much more, check out the requirements.txt file!
|
||||
|
||||
|
||||
Contributing
|
||||
------------
|
||||
|
||||
Interested in contributing? Casual hacking? Check out
|
||||
[Contributing.MD](https://github.com/airbnb/superset/blob/master/CONTRIBUTING.md)
|
||||
|
||||
|
||||
Who uses Apache Superset (incubating)?
|
||||
--------------------------------------
|
||||
|
||||
Here's a list of organizations who have taken the time to send a PR to let
|
||||
the world know they are using Superset. Join our growing community!
|
||||
|
||||
- [AiHello](https://www.aihello.com)
|
||||
- [Airbnb](https://github.com/airbnb)
|
||||
- [Amino](https://amino.com)
|
||||
- [Brilliant.org](https://brilliant.org/)
|
||||
- [Capital Service S.A.](http://capitalservice.pl)
|
||||
- [Clark.de](http://clark.de/)
|
||||
- [Digit Game Studios](https://www.digitgaming.com/)
|
||||
- [Douban](https://www.douban.com/)
|
||||
- [Endress+Hauser](http://www.endress.com/)
|
||||
- [FBK - ICT center](http://ict.fbk.eu)
|
||||
- [Faasos](http://faasos.com/)
|
||||
- [GfK Data Lab](http://datalab.gfk.com)
|
||||
- [Konfío](http://konfio.mx)
|
||||
- [Lyft](https://www.lyft.com/)
|
||||
- [Maieutical Labs](https://cloudschooling.it)
|
||||
- [Ona](https://ona.io)
|
||||
- [Pronto Tools](http://www.prontotools.io)
|
||||
- [Qunar](https://www.qunar.com/)
|
||||
- [Shopee](https://shopee.sg)
|
||||
- [Shopkick](https://www.shopkick.com)
|
||||
- [Tails.com](https://tails.com)
|
||||
- [Tobii](http://www.tobii.com/)
|
||||
- [Tooploox](https://www.tooploox.com/)
|
||||
- [Twitter](https://twitter.com/)
|
||||
- [Udemy](https://www.udemy.com/)
|
||||
- [VIPKID](https://www.vipkid.com.cn/)
|
||||
- [Yahoo!](https://yahoo.com/)
|
||||
- [Zalando](https://www.zalando.com)
|
||||
|
||||
|
||||
More screenshots
|
||||
----------------
|
||||
|
||||

|
||||
|
||||

|
||||
|
||||

|
||||
|
||||

|
||||
|
||||

|
||||
|
||||

|
||||
|
||||

|
||||
|
||||

|
||||
Interested in contributing? Casual hacking? Check out [Contributing.MD](https://github.com/mistercrunch/panoramix/blob/master/CONTRIBUTING.md)
|
||||
|
||||
19
TODO.md
19
TODO.md
@@ -1,18 +1,25 @@
|
||||
# TODO
|
||||
List of TODO items for Superset
|
||||
List of TODO items for Panoramix
|
||||
|
||||
## Important
|
||||
* **Caching:** integrate with flask-cache
|
||||
* **Getting proper JS testing:** unit tests on the Python side are pretty
|
||||
solid, but now we need a test suite for the JS part of the site,
|
||||
testing all the ajax-type calls
|
||||
* **Viz Plugins:** Allow people to define and share visualization plugins.
|
||||
ideally one would only need to drop in a set of files in a folder and
|
||||
Superset would discover and expose the plugins
|
||||
Panoramix would discover and expose the plugins
|
||||
|
||||
## Features
|
||||
* **Stars:** set dashboards, slices and datasets as favorites
|
||||
* **Homepage:** a page that has links to your Slices and Dashes, favorited
|
||||
content, feed of recent actions (people viewing your objects)
|
||||
* **Comments:** allow for people to comment on slices and dashes
|
||||
* **Dashboard URL filters:** `{dash_url}#fltin__fieldname__value1,value2`
|
||||
* **Default slice:** choose a default slice for the dataset instead of
|
||||
default endpoint
|
||||
* **refresh freq**: specifying the refresh frequency of a dashboard and
|
||||
specific slices within it, some randomization would be nice
|
||||
* **Widget sets / chart grids:** a way to have all charts support making
|
||||
a series of charts and putting them in a grid. The same way that you
|
||||
can groupby for series, you could chart by. The form field set would be
|
||||
@@ -25,14 +32,15 @@ List of TODO items for Superset
|
||||
some visualizations as annotations. An example of a layer might be
|
||||
"holidays" or "site outages", ...
|
||||
* **Slack integration** - TBD
|
||||
* **Comments:** allow for people to comment on slices and dashes
|
||||
* **Sexy Viz Selector:** the visualization selector should be a nice large
|
||||
modal with nice thumbnails for each one of the viz
|
||||
|
||||
|
||||
## Easy-ish fix
|
||||
* Build matrix to include mysql using tox
|
||||
* CREATE VIEW button from SQL editor
|
||||
* Test button for when editing SQL expression
|
||||
* Slider form element
|
||||
* datasource in explore mode could be a dropdown
|
||||
* [druid] Allow for post aggregations (ratios!)
|
||||
* in/notin filters autocomplete (druid)
|
||||
|
||||
@@ -45,4 +53,5 @@ List of TODO items for Superset
|
||||
* ...
|
||||
|
||||
## Community
|
||||
* Turorial vids
|
||||
* Create a proper user documentation (started using Sphinx and boostrap...)
|
||||
* Usage vid
|
||||
|
||||
@@ -29,7 +29,7 @@ script_location = migrations
|
||||
# are written from script.py.mako
|
||||
# output_encoding = utf-8
|
||||
|
||||
sqlalchemy.url = scheme://localhost/superset
|
||||
sqlalchemy.url = scheme://localhost/panoramix
|
||||
|
||||
# Logging configuration
|
||||
[loggers]
|
||||
|
||||
@@ -1,3 +0,0 @@
|
||||
sphinx
|
||||
sphinx-rtd-theme
|
||||
sphinxcontrib.youtube
|
||||
15
dev-reqs.txt
15
dev-reqs.txt
@@ -1,15 +0,0 @@
|
||||
codeclimate-test-reporter
|
||||
coveralls
|
||||
flake8
|
||||
flask_cors
|
||||
ipdb
|
||||
mock
|
||||
mysqlclient
|
||||
nose
|
||||
psycopg2
|
||||
pylint
|
||||
pyyaml
|
||||
redis
|
||||
statsd
|
||||
# Also install everything we need to build Sphinx docs
|
||||
-r dev-reqs-for-docs.txt
|
||||
@@ -87,9 +87,9 @@ qthelp:
|
||||
@echo
|
||||
@echo "Build finished; now you can run "qcollectiongenerator" with the" \
|
||||
".qhcp project file in $(BUILDDIR)/qthelp, like this:"
|
||||
@echo "# qcollectiongenerator $(BUILDDIR)/qthelp/superset.qhcp"
|
||||
@echo "# qcollectiongenerator $(BUILDDIR)/qthelp/panoramix.qhcp"
|
||||
@echo "To view the help file:"
|
||||
@echo "# assistant -collectionFile $(BUILDDIR)/qthelp/superset.qhc"
|
||||
@echo "# assistant -collectionFile $(BUILDDIR)/qthelp/panoramix.qhc"
|
||||
|
||||
applehelp:
|
||||
$(SPHINXBUILD) -b applehelp $(ALLSPHINXOPTS) $(BUILDDIR)/applehelp
|
||||
@@ -104,8 +104,8 @@ devhelp:
|
||||
@echo
|
||||
@echo "Build finished."
|
||||
@echo "To view the help file:"
|
||||
@echo "# mkdir -p $$HOME/.local/share/devhelp/superset"
|
||||
@echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/superset"
|
||||
@echo "# mkdir -p $$HOME/.local/share/devhelp/panoramix"
|
||||
@echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/panoramix"
|
||||
@echo "# devhelp"
|
||||
|
||||
epub:
|
||||
|
||||
1
docs/_build/html/README.md
vendored
1
docs/_build/html/README.md
vendored
@@ -1 +0,0 @@
|
||||
Folder containing the sphinx-generated documentation
|
||||
17
docs/_static/docs.css
vendored
17
docs/_static/docs.css
vendored
@@ -6,23 +6,6 @@ div.navbar {
|
||||
margin-bottom: 0px;
|
||||
}
|
||||
|
||||
p {
|
||||
margin-top: 5px;
|
||||
margin-bottom: 15px;
|
||||
}
|
||||
|
||||
#tutorial img {
|
||||
border: 1px solid gray;
|
||||
box-shadow: 5px 5px 5px #888888;
|
||||
margin-bottom: 10px;
|
||||
}
|
||||
|
||||
#gallery img {
|
||||
border: 1px solid gray;
|
||||
box-shadow: 5px 5px 5px #888888;
|
||||
margin: 10px;
|
||||
}
|
||||
|
||||
.carousel img {
|
||||
max-height: 500px;
|
||||
}
|
||||
|
||||
4
docs/_templates/layout.html
vendored
4
docs/_templates/layout.html
vendored
@@ -21,7 +21,7 @@
|
||||
<img src="_static/img/dash.png">
|
||||
<div class="carousel-caption">
|
||||
<div>
|
||||
<h1>Superset</h1>
|
||||
<h1>Panoramix</h1>
|
||||
<p>
|
||||
an open source data visualization platform
|
||||
</p>
|
||||
@@ -80,7 +80,7 @@
|
||||
<hr/>
|
||||
<div class="container">
|
||||
<div class="jumbotron">
|
||||
<h1>Superset</h1>
|
||||
<h1>Panoramix</h1>
|
||||
<p>
|
||||
is an open source data visualization platform that provides easy
|
||||
exploration of your data and allows you to create and share
|
||||
|
||||
@@ -1,7 +1,4 @@
|
||||
#!/usr/bin/env bash
|
||||
rm -rf _build
|
||||
rm -r _build
|
||||
make html
|
||||
#cp -r ../superset/assets/images/ _build/html/_static/img/
|
||||
cp -r ../superset/assets/images/ _static/img/
|
||||
rm -rf /tmp/superset-docs
|
||||
cp -r _build/html /tmp/superset-docs
|
||||
cp -r _build/html/ ../../panoramix-docs/
|
||||
|
||||
32
docs/conf.py
32
docs/conf.py
@@ -1,6 +1,6 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# superset documentation build configuration file, created by
|
||||
# panoramix documentation build configuration file, created by
|
||||
# sphinx-quickstart on Thu Dec 17 15:42:06 2015.
|
||||
#
|
||||
# This file is execfile()d with the current directory set to its
|
||||
@@ -15,7 +15,7 @@
|
||||
import sys
|
||||
import os
|
||||
import shlex
|
||||
import sphinx_rtd_theme
|
||||
import sphinx_bootstrap_theme
|
||||
|
||||
# If extensions (or modules to document with autodoc) are in another directory,
|
||||
# add these directories to sys.path here. If the directory is relative to the
|
||||
@@ -33,7 +33,6 @@ import sphinx_rtd_theme
|
||||
extensions = [
|
||||
'sphinx.ext.autodoc',
|
||||
'sphinx.ext.viewcode',
|
||||
'sphinxcontrib.youtube',
|
||||
]
|
||||
|
||||
# Add any paths that contain templates here, relative to this directory.
|
||||
@@ -51,8 +50,8 @@ source_suffix = '.rst'
|
||||
master_doc = 'index'
|
||||
|
||||
# General information about the project.
|
||||
project = "Apache Superset"
|
||||
copyright = None
|
||||
project = u'panoramix'
|
||||
copyright = u'2015, Maxime Beauchemin, Airbnb'
|
||||
author = u'Maxime Beauchemin'
|
||||
|
||||
# The version info for the project you're documenting, acts as replacement for
|
||||
@@ -113,13 +112,18 @@ todo_include_todos = False
|
||||
|
||||
# The theme to use for HTML and HTML Help pages. See the documentation for
|
||||
# a list of builtin themes.
|
||||
html_theme = "sphinx_rtd_theme"
|
||||
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
|
||||
html_theme = 'bootstrap'
|
||||
html_theme_path = sphinx_bootstrap_theme.get_html_theme_path()
|
||||
|
||||
# Theme options are theme-specific and customize the look and feel of a theme
|
||||
# further. For a list of options available for each theme, see the
|
||||
# documentation.
|
||||
html_theme_options = {}
|
||||
html_theme_options = {
|
||||
#'bootswatch_theme': 'simplex',
|
||||
'navbar_title': 'Panoramix Documentation',
|
||||
'navbar_fixed_top': "false",
|
||||
#'navbar_class': "navbar navbar-default",
|
||||
}
|
||||
|
||||
# Add any paths that contain custom themes here, relative to this directory.
|
||||
#html_theme_path = []
|
||||
@@ -181,7 +185,7 @@ html_show_sourcelink = False
|
||||
#html_show_sphinx = True
|
||||
|
||||
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
|
||||
html_show_copyright = False
|
||||
#html_show_copyright = True
|
||||
|
||||
# If true, an OpenSearch description file will be output, and all pages will
|
||||
# contain a <link> tag referring to it. The value of this option must be the
|
||||
@@ -206,7 +210,7 @@ html_show_copyright = False
|
||||
#html_search_scorer = 'scorer.js'
|
||||
|
||||
# Output file base name for HTML help builder.
|
||||
htmlhelp_basename = 'supersetdoc'
|
||||
htmlhelp_basename = 'panoramixdoc'
|
||||
|
||||
# -- Options for LaTeX output ---------------------------------------------
|
||||
|
||||
@@ -228,7 +232,7 @@ latex_elements = {
|
||||
# (source start file, target name, title,
|
||||
# author, documentclass [howto, manual, or own class]).
|
||||
latex_documents = [
|
||||
(master_doc, 'superset.tex', u'Superset Documentation',
|
||||
(master_doc, 'panoramix.tex', u'Panoramix Documentation',
|
||||
u'Maxime Beauchemin', 'manual'),
|
||||
]
|
||||
|
||||
@@ -258,7 +262,7 @@ latex_documents = [
|
||||
# One entry per manual page. List of tuples
|
||||
# (source start file, name, description, authors, manual section).
|
||||
man_pages = [
|
||||
(master_doc, 'Superset', u'superset Documentation',
|
||||
(master_doc, 'Panoramix', u'panoramix Documentation',
|
||||
[author], 1)
|
||||
]
|
||||
|
||||
@@ -272,8 +276,8 @@ man_pages = [
|
||||
# (source start file, target name, title, author,
|
||||
# dir menu entry, description, category)
|
||||
texinfo_documents = [
|
||||
(master_doc, 'Superset', u'Superset Documentation',
|
||||
author, 'Superset', 'One line description of project.',
|
||||
(master_doc, 'Panoramix', u'Panoramix Documentation',
|
||||
author, 'Panoramix', 'One line description of project.',
|
||||
'Miscellaneous'),
|
||||
]
|
||||
|
||||
|
||||
@@ -1,48 +0,0 @@
|
||||
Druid
|
||||
=====
|
||||
|
||||
Superset works well with Druid, though currently not all
|
||||
advanced features out of Druid are covered. This page clarifies what is
|
||||
covered and what isn't and explains how to use some of the features.
|
||||
|
||||
.. note ::
|
||||
Currently Airbnb runs against Druid ``0.8.x`` and previous /
|
||||
following versions are not tested against.
|
||||
|
||||
Supported
|
||||
'''''''''
|
||||
|
||||
Aggregations
|
||||
------------
|
||||
|
||||
Common aggregations, or Druid metrics can be defined and used in Superset.
|
||||
The first and simpler use case is to use the checkbox matrix expose in your
|
||||
datasource's edit view (``Sources -> Druid Datasources ->
|
||||
[your datasource] -> Edit -> [tab] List Druid Column``).
|
||||
Clicking the ``GroupBy`` and ``Filterable`` checkboxes will make the column
|
||||
appear in the related dropdowns while in explore view. Checking
|
||||
``Count Distinct``, ``Min``, ``Max`` or ``Sum`` will result in creating
|
||||
new metrics that will appear in the ``List Druid Metric`` tab upon saving the
|
||||
datasource. By editing these metrics, you'll notice that they their ``json``
|
||||
element correspond to Druid aggregation definition. You can create your own
|
||||
aggregations manually from the ``List Druid Metric`` tab following Druid
|
||||
documentation.
|
||||
|
||||
.. image:: _static/img/druid_agg.png
|
||||
:scale: 50 %
|
||||
|
||||
Post-Aggregations
|
||||
-----------------
|
||||
|
||||
Druid supports post aggregation and this works in Superset. All you have to
|
||||
do is creating a metric, much like you would create an aggregation manually,
|
||||
but specify ``postagg`` as a ``Metric Type``. You then have to provide a valid
|
||||
json post-aggregation definition (as specified in the Druid docs) in the
|
||||
Json field.
|
||||
|
||||
|
||||
Not yet supported
|
||||
'''''''''''''''''
|
||||
|
||||
- Regex filters
|
||||
- Lookups / joins
|
||||
246
docs/faq.rst
246
docs/faq.rst
@@ -1,246 +0,0 @@
|
||||
FAQ
|
||||
===
|
||||
|
||||
|
||||
Can I query/join multiple tables at one time?
|
||||
---------------------------------------------
|
||||
Not directly no. A Superset SQLAlchemy datasource can only be a single table
|
||||
or a view.
|
||||
|
||||
When working with tables, the solution would be to materialize
|
||||
a table that contains all the fields needed for your analysis, most likely
|
||||
through some scheduled batch process.
|
||||
|
||||
A view is a simple logical layer that abstract an arbitrary SQL queries as
|
||||
a virtual table. This can allow you to join and union multiple tables, and
|
||||
to apply some transformation using arbitrary SQL expressions. The limitation
|
||||
there is your database performance as Superset effectively will run a query
|
||||
on top of your query (view). A good practice may be to limit yourself to
|
||||
joining your main large table to one or many small tables only, and avoid
|
||||
using ``GROUP BY`` where possible as Superset will do its own ``GROUP BY`` and
|
||||
doing the work twice might slow down performance.
|
||||
|
||||
Whether you use a table or a view, the important factor is whether your
|
||||
database is fast enough to serve it in an interactive fashion to provide
|
||||
a good user experience in Superset.
|
||||
|
||||
|
||||
How BIG can my data source be?
|
||||
------------------------------
|
||||
|
||||
It can be gigantic! As mentioned above, the main criteria is whether your
|
||||
database can execute queries and return results in a time frame that is
|
||||
acceptable to your users. Many distributed databases out there can execute
|
||||
queries that scan through terabytes in an interactive fashion.
|
||||
|
||||
|
||||
How do I create my own visualization?
|
||||
-------------------------------------
|
||||
|
||||
We are planning on making it easier to add new visualizations to the
|
||||
framework, in the meantime, we've tagged a few pull requests as
|
||||
``example`` to give people examples of how to contribute new
|
||||
visualizations.
|
||||
|
||||
https://github.com/airbnb/superset/issues?q=label%3Aexample+is%3Aclosed
|
||||
|
||||
|
||||
Can I upload and visualize csv data?
|
||||
------------------------------------
|
||||
|
||||
Yes, using the ``Upload a CSV`` button under the ``Sources``
|
||||
menu item. This brings up a form that allows you specify required information. After creating the table from CSV, it can then be loaded like any other on the ``Sources -> Tables``page.
|
||||
|
||||
|
||||
Why are my queries timing out?
|
||||
------------------------------
|
||||
|
||||
There are many reasons may cause long query timing out.
|
||||
|
||||
|
||||
- For running long query from Sql Lab, by default Superset allows it run as long as 6 hours before it being killed by celery. If you want to increase the time for running query, you can specify the timeout in configuration. For example:
|
||||
|
||||
``SQLLAB_ASYNC_TIME_LIMIT_SEC = 60 * 60 * 6``
|
||||
|
||||
|
||||
- Superset is running on gunicorn web server, which may time out web requests. If you want to increase the default (50), you can specify the timeout when starting the web server with the ``-t`` flag, which is expressed in seconds.
|
||||
|
||||
``superset runserver -t 300``
|
||||
|
||||
- If you are seeing timeouts (504 Gateway Time-out) when loading dashboard or explore slice, you are probably behind gateway or proxy server (such as Nginx). If it did not receive a timely response from Superset server (which is processing long queries), these web servers will send 504 status code to clients directly. Superset has a client-side timeout limit to address this issue. If query didn't come back within clint-side timeout (60 seconds by default), Superset will display warning message to avoid gateway timeout message. If you have a longer gateway timeout limit, you can change the timeout settings in ``superset_config.py``:
|
||||
|
||||
``SUPERSET_WEBSERVER_TIMEOUT = 60``
|
||||
|
||||
|
||||
Why is the map not visible in the mapbox visualization?
|
||||
-------------------------------------------------------
|
||||
|
||||
You need to register to mapbox.com, get an API key and configure it as
|
||||
``MAPBOX_API_KEY`` in ``superset_config.py``.
|
||||
|
||||
|
||||
How to add dynamic filters to a dashboard?
|
||||
------------------------------------------
|
||||
|
||||
It's easy: use the ``Filter Box`` widget, build a slice, and add it to your
|
||||
dashboard.
|
||||
|
||||
The ``Filter Box`` widget allows you to define a query to populate dropdowns
|
||||
that can be use for filtering. To build the list of distinct values, we
|
||||
run a query, and sort the result by the metric you provide, sorting
|
||||
descending.
|
||||
|
||||
The widget also has a checkbox ``Date Filter``, which enables time filtering
|
||||
capabilities to your dashboard. After checking the box and refreshing, you'll
|
||||
see a ``from`` and a ``to`` dropdown show up.
|
||||
|
||||
By default, the filtering will be applied to all the slices that are built
|
||||
on top of a datasource that shares the column name that the filter is based
|
||||
on. It's also a requirement for that column to be checked as "filterable"
|
||||
in the column tab of the table editor.
|
||||
|
||||
But what about if you don't want certain widgets to get filtered on your
|
||||
dashboard? You can do that by editing your dashboard, and in the form,
|
||||
edit the ``JSON Metadata`` field, more specifically the
|
||||
``filter_immune_slices`` key, that receives an array of sliceIds that should
|
||||
never be affected by any dashboard level filtering.
|
||||
|
||||
|
||||
..code:: json
|
||||
|
||||
{
|
||||
"filter_immune_slices": [324, 65, 92],
|
||||
"expanded_slices": {},
|
||||
"filter_immune_slice_fields": {
|
||||
"177": ["country_name", "__from", "__to"],
|
||||
"32": ["__from", "__to"]
|
||||
},
|
||||
"timed_refresh_immune_slices": [324]
|
||||
}
|
||||
|
||||
In the json blob above, slices 324, 65 and 92 won't be affected by any
|
||||
dashboard level filtering.
|
||||
|
||||
Now note the ``filter_immune_slice_fields`` key. This one allows you to
|
||||
be more specific and define for a specific slice_id, which filter fields
|
||||
should be disregarded.
|
||||
|
||||
Note the use of the ``__from`` and ``__to`` keywords, those are reserved
|
||||
for dealing with the time boundary filtering mentioned above.
|
||||
|
||||
But what happens with filtering when dealing with slices coming from
|
||||
different tables or databases? If the column name is shared, the filter will
|
||||
be applied, it's as simple as that.
|
||||
|
||||
|
||||
How to limit the timed refresh on a dashboard?
|
||||
----------------------------------------------
|
||||
By default, the dashboard timed refresh feature allows you to automatically re-query every slice
|
||||
on a dashboard according to a set schedule. Sometimes, however, you won't want all of the slices
|
||||
to be refreshed - especially if some data is slow moving, or run heavy queries. To exclude specific
|
||||
slices from the timed refresh process, add the ``timed_refresh_immune_slices`` key to the dashboard
|
||||
``JSON Metadata`` field:
|
||||
|
||||
..code:: json
|
||||
|
||||
{
|
||||
"filter_immune_slices": [],
|
||||
"expanded_slices": {},
|
||||
"filter_immune_slice_fields": {},
|
||||
"timed_refresh_immune_slices": [324]
|
||||
}
|
||||
|
||||
In the example above, if a timed refresh is set for the dashboard, then every slice except 324 will
|
||||
be automatically re-queried on schedule.
|
||||
|
||||
Slice refresh will also be staggered over the specified period. You can turn off this staggering
|
||||
by setting the ``stagger_refresh`` to ``false`` and modify the stagger period by setting
|
||||
``stagger_time`` to a value in milliseconds in the ``JSON Metadata`` field:
|
||||
|
||||
..code:: json
|
||||
|
||||
{
|
||||
"stagger_refresh": false,
|
||||
"stagger_time": 2500
|
||||
}
|
||||
|
||||
Here, the entire dashboard will refresh at once if periodic refresh is on. The stagger time of
|
||||
2.5 seconds is ignored.
|
||||
|
||||
Why does fabmanager or superset freezed/hung/not responding when started (my home directory is NFS mounted)?
|
||||
-----------------------------------------------------------------------------------------
|
||||
By default, superset creates and uses an sqlite database at ``~/.superset/superset.db``. Sqlite is known to `don't work well if used on NFS`__ due to broken file locking implementation on NFS.
|
||||
|
||||
__ https://www.sqlite.org/lockingv3.html
|
||||
|
||||
You can override this path using the ``SUPERSET_HOME`` environment variable.
|
||||
|
||||
Another work around is to change where superset stores the sqlite database by adding ``SQLALCHEMY_DATABASE_URI = 'sqlite:////new/location/superset.db'`` in superset_config.py (create the file if needed), then adding the directory where superset_config.py lives to PYTHONPATH environment variable (e.g. ``export PYTHONPATH=/opt/logs/sandbox/airbnb/``).
|
||||
|
||||
What if the table schema changed?
|
||||
---------------------------------
|
||||
|
||||
Table schemas evolve, and Superset needs to reflect that. It's pretty common
|
||||
in the life cycle of a dashboard to want to add a new dimension or metric.
|
||||
To get Superset to discover your new columns, all you have to do is to
|
||||
go to ``Menu -> Sources -> Tables``, click the ``edit`` icon next to the
|
||||
table who's schema has changed, and hit ``Save`` from the ``Detail`` tab.
|
||||
Behind the scene, the new columns will get merged it. Following this,
|
||||
you may want to
|
||||
re-edit the table afterwards to configure the ``Column`` tab, check the
|
||||
appropriate boxes and save again.
|
||||
|
||||
How do I go about developing a new visualization type?
|
||||
------------------------------------------------------
|
||||
Here's an example as a Github PR with comments that describe what the
|
||||
different sections of the code do:
|
||||
https://github.com/airbnb/superset/pull/3013
|
||||
|
||||
What database engine can I use as a backend for Superset?
|
||||
---------------------------------------------------------
|
||||
|
||||
To clarify, the *database backend* is an OLTP database used by Superset to store its internal
|
||||
information like your list of users, slices and dashboard definitions.
|
||||
|
||||
Superset is tested using Mysql, Postgresql and Sqlite for its backend. It's recommended you
|
||||
install Superset on one of these database server for production.
|
||||
|
||||
Using a column-store, non-OLTP databases like Vertica, Redshift or Presto as a database backend simply won't work as these databases are not designed for this type of workload. Installation on Oracle, Microsoft SQL Server, or other OLTP databases may work but isn't tested.
|
||||
|
||||
Please note that pretty much any databases that have a SqlAlchemy integration should work perfectly fine as a datasource for Superset, just not as the OLTP backend.
|
||||
|
||||
How can i configure OAuth authentication and authorization?
|
||||
-----------------------------------------------------------
|
||||
|
||||
You can take a look at this Flask-AppBuilder `configuration example
|
||||
<https://github.com/dpgaspar/Flask-AppBuilder/blob/master/examples/oauth/config.py>`_.
|
||||
|
||||
How can I set a default filter on my dashboard?
|
||||
-----------------------------------------------
|
||||
|
||||
Easy. Simply apply the filter and save the dashboard while the filter
|
||||
is active.
|
||||
|
||||
How do I get Superset to refresh the schema of my table?
|
||||
--------------------------------------------------------
|
||||
|
||||
When adding columns to a table, you can have Superset detect and merge the
|
||||
new columns in by using the "Refresh Metadata" action in the
|
||||
``Source -> Tables`` page. Simply check the box next to the tables
|
||||
you want the schema refreshed, and click ``Actions -> Refresh Metadata``.
|
||||
|
||||
Is there a way to force the use specific colors?
|
||||
------------------------------------------------
|
||||
|
||||
It is possible on a per-dashboard basis by providing a mapping of
|
||||
labels to colors in the ``JSON Metadata`` attribute using the
|
||||
``label_colors`` key.
|
||||
|
||||
..code:: json
|
||||
|
||||
{
|
||||
"label_colors": {
|
||||
"Girls": "#FF69B4",
|
||||
"Boys": "#ADD8E6"
|
||||
}
|
||||
}
|
||||
@@ -1,89 +0,0 @@
|
||||
Gallery
|
||||
=======
|
||||
|
||||
.. image:: _static/img/viz_thumbnails/line.png
|
||||
:scale: 25 %
|
||||
|
||||
.. image:: _static/img/viz_thumbnails/bubble.png
|
||||
:scale: 25 %
|
||||
|
||||
.. image:: _static/img/viz_thumbnails/table.png
|
||||
:scale: 25 %
|
||||
|
||||
.. image:: _static/img/viz_thumbnails/pie.png
|
||||
:scale: 25 %
|
||||
|
||||
.. image:: _static/img/viz_thumbnails/bar.png
|
||||
:scale: 25 %
|
||||
|
||||
.. image:: _static/img/viz_thumbnails/world_map.png
|
||||
:scale: 25 %
|
||||
|
||||
.. image:: _static/img/viz_thumbnails/sankey.png
|
||||
:scale: 25 %
|
||||
|
||||
.. image:: _static/img/viz_thumbnails/word_cloud.png
|
||||
:scale: 25 %
|
||||
|
||||
.. image:: _static/img/viz_thumbnails/filter_box.png
|
||||
:scale: 25 %
|
||||
|
||||
.. image:: _static/img/viz_thumbnails/pivot_table.png
|
||||
:scale: 25 %
|
||||
|
||||
.. image:: _static/img/viz_thumbnails/directed_force.png
|
||||
:scale: 25 %
|
||||
|
||||
.. image:: _static/img/viz_thumbnails/compare.png
|
||||
:scale: 25 %
|
||||
|
||||
.. image:: _static/img/viz_thumbnails/sunburst.png
|
||||
:scale: 25 %
|
||||
|
||||
.. image:: _static/img/viz_thumbnails/area.png
|
||||
:scale: 25 %
|
||||
|
||||
.. image:: _static/img/viz_thumbnails/big_number.png
|
||||
:scale: 25 %
|
||||
|
||||
.. image:: _static/img/viz_thumbnails/big_number_total.png
|
||||
:scale: 25 %
|
||||
|
||||
.. image:: _static/img/viz_thumbnails/bullet.png
|
||||
:scale: 25 %
|
||||
|
||||
.. image:: _static/img/viz_thumbnails/dist_bar.png
|
||||
:scale: 25 %
|
||||
|
||||
.. image:: _static/img/viz_thumbnails/heatmap.png
|
||||
:scale: 25 %
|
||||
|
||||
.. image:: _static/img/viz_thumbnails/markup.png
|
||||
:scale: 25 %
|
||||
|
||||
.. image:: _static/img/viz_thumbnails/para.png
|
||||
:scale: 25 %
|
||||
|
||||
.. image:: _static/img/viz_thumbnails/iframe.png
|
||||
:scale: 25 %
|
||||
|
||||
.. image:: _static/img/viz_thumbnails/box_plot.png
|
||||
:scale: 25 %
|
||||
|
||||
.. image:: _static/img/viz_thumbnails/treemap.png
|
||||
:scale: 25 %
|
||||
|
||||
.. image:: _static/img/viz_thumbnails/cal_heatmap.png
|
||||
:scale: 25 %
|
||||
|
||||
.. image:: _static/img/viz_thumbnails/horizon.png
|
||||
:scale: 25 %
|
||||
|
||||
.. image:: _static/img/viz_thumbnails/mapbox.png
|
||||
:scale: 25 %
|
||||
|
||||
.. image:: _static/img/viz_thumbnails/separator.png
|
||||
:scale: 25 %
|
||||
|
||||
.. image:: _static/img/viz_thumbnails/histogram.png
|
||||
:scale: 25 %
|
||||
@@ -1,103 +0,0 @@
|
||||
Importing and Exporting Datasources
|
||||
===================================
|
||||
|
||||
The superset cli allows you to import and export datasources from and to YAML.
|
||||
Datasources include both databases and druid clusters. The data is expected to be organized in the following hierarchy: ::
|
||||
|
||||
.
|
||||
├──databases
|
||||
| ├──database_1
|
||||
| | ├──table_1
|
||||
| | | ├──columns
|
||||
| | | | ├──column_1
|
||||
| | | | ├──column_2
|
||||
| | | | └──... (more columns)
|
||||
| | | └──metrics
|
||||
| | | ├──metric_1
|
||||
| | | ├──metric_2
|
||||
| | | └──... (more metrics)
|
||||
| | └── ... (more tables)
|
||||
| └── ... (more databases)
|
||||
└──druid_clusters
|
||||
├──cluster_1
|
||||
| ├──datasource_1
|
||||
| | ├──columns
|
||||
| | | ├──column_1
|
||||
| | | ├──column_2
|
||||
| | | └──... (more columns)
|
||||
| | └──metrics
|
||||
| | ├──metric_1
|
||||
| | ├──metric_2
|
||||
| | └──... (more metrics)
|
||||
| └── ... (more datasources)
|
||||
└── ... (more clusters)
|
||||
|
||||
|
||||
Exporting Datasources to YAML
|
||||
-----------------------------
|
||||
You can print your current datasources to stdout by running: ::
|
||||
|
||||
superset export_datasources
|
||||
|
||||
|
||||
To save your datasources to a file run: ::
|
||||
|
||||
superset export_datasources -f <filename>
|
||||
|
||||
|
||||
By default, default (null) values will be omitted. Use the ``-d`` flag to include them.
|
||||
If you want back references to be included (e.g. a column to include the table id
|
||||
it belongs to) use the ``-b`` flag.
|
||||
|
||||
Alternatively you can export datasources using the UI: ::
|
||||
|
||||
1. Open **Sources** -> **Databases** to export all tables associated to a single or multiple databases. (**Tables** for one or more tables, **Druid Clusters** for clusters, **Druid Datasources** for datasources)
|
||||
2. Select the items you would like to export
|
||||
3. Click **Actions** -> **Export to YAML**
|
||||
4. If you want to import an item that you exported through the UI, you will need to nest it inside its parent element, e.g. a `database` needs to be nested under `databases` a `table` needs to be nested inside a `database` element.
|
||||
|
||||
Exporting the complete supported YAML schema
|
||||
--------------------------------------------
|
||||
In order to obtain an exhaustive list of all fields you can import using the YAML import run: ::
|
||||
|
||||
superset export_datasource_schema
|
||||
|
||||
Again, you can use the ``-b`` flag to include back references.
|
||||
|
||||
|
||||
Importing Datasources from YAML
|
||||
-------------------------------
|
||||
In order to import datasources from a YAML file(s), run: ::
|
||||
|
||||
superset import_datasources -p <path or filename>
|
||||
|
||||
If you supply a path all files ending with ``*.yaml`` or ``*.yml`` will be parsed.
|
||||
You can apply additional flags e.g.: ::
|
||||
|
||||
superset import_datasources -p <path> -r
|
||||
|
||||
Will search the supplied path recursively.
|
||||
|
||||
The sync flag ``-s`` takes parameters in order to sync the supplied elements with
|
||||
your file. Be careful this can delete the contents of your meta database. Example:
|
||||
|
||||
superset import_datasources -p <path / filename> -s columns,metrics
|
||||
|
||||
This will sync all ``metrics`` and ``columns`` for all datasources found in the
|
||||
``<path / filename>`` in the Superset meta database. This means columns and metrics
|
||||
not specified in YAML will be deleted. If you would add ``tables`` to ``columns,metrics``
|
||||
those would be synchronised as well.
|
||||
|
||||
|
||||
If you don't supply the sync flag (``-s``) importing will only add and update (override) fields.
|
||||
E.g. you can add a ``verbose_name`` to the the column ``ds`` in the table ``random_time_series`` from the example datasets
|
||||
by saving the following YAML to file and then running the ``import_datasources`` command. ::
|
||||
|
||||
databases:
|
||||
- database_name: main
|
||||
tables:
|
||||
- table_name: random_time_series
|
||||
columns:
|
||||
- column_name: ds
|
||||
verbose_name: datetime
|
||||
|
||||
@@ -1,80 +1,34 @@
|
||||
.. image:: _static/img/s.png
|
||||
|
||||
Apache Superset (incubating)
|
||||
''''''''''''''''''''''''''''
|
||||
|
||||
Apache Superset (incubating) is a modern, enterprise-ready business
|
||||
intelligence web application
|
||||
|
||||
|
||||
----------------
|
||||
|
||||
.. warning:: This project was originally named Panoramix, was renamed to
|
||||
Caravel in March 2016, and is currently named Superset as of November 2016
|
||||
|
||||
.. important::
|
||||
|
||||
**Disclaimer**: Apache Superset is an effort undergoing incubation at The
|
||||
Apache Software Foundation (ASF), sponsored by the Apache Incubator.
|
||||
Incubation is required of all newly accepted projects until a further
|
||||
review indicates that the infrastructure, communications, and
|
||||
decision making process have stabilized in a manner consistent with
|
||||
other successful ASF projects. While incubation status is not
|
||||
necessarily a reflection of the completeness or stability of
|
||||
the code, it does indicate that the project has yet to be fully
|
||||
endorsed by the ASF.
|
||||
.. image:: img/tux_panoramix.png
|
||||
|
||||
Overview
|
||||
========
|
||||
=======================================
|
||||
|
||||
Features
|
||||
--------
|
||||
---------
|
||||
|
||||
- A rich set of data visualizations
|
||||
- An easy-to-use interface for exploring and visualizing data
|
||||
- Create and share dashboards
|
||||
- Enterprise-ready authentication with integration with major authentication
|
||||
providers (database, OpenID, LDAP, OAuth & REMOTE_USER through
|
||||
Flask AppBuilder)
|
||||
- A rich set of data visualizations, integrated from some of the best
|
||||
visualization libraries
|
||||
- Create and share simple dashboards
|
||||
- An extensible, high-granularity security/permission model allowing
|
||||
intricate rules on who can access individual features and the dataset
|
||||
intricate rules on who can access individual features and the dataset
|
||||
- Enterprise-ready authentication with integration with major authentication
|
||||
providers (database, OpenID, LDAP, OAuth & REMOTE_USER through
|
||||
Flask AppBuilder)
|
||||
- A simple semantic layer, allowing users to control how data sources are
|
||||
displayed in the UI by defining which fields should show up in which
|
||||
drop-down and which aggregation and function metrics are made available
|
||||
to the user
|
||||
- Integration with most SQL-speaking RDBMS through SQLAlchemy
|
||||
- Integration with most RDBMS through SqlAlchemy
|
||||
- Deep integration with Druid.io
|
||||
|
||||
------
|
||||
|
||||
.. image:: https://camo.githubusercontent.com/82e264ef777ba06e1858766fe3b8817ee108eb7e/687474703a2f2f672e7265636f726469742e636f2f784658537661475574732e676966
|
||||
|
||||
------
|
||||
|
||||
.. image:: https://camo.githubusercontent.com/4991ff37a0005ea4e4267919a52786fda82d2d21/687474703a2f2f672e7265636f726469742e636f2f755a6767594f645235672e676966
|
||||
|
||||
------
|
||||
|
||||
.. image:: https://camo.githubusercontent.com/a389af15ac1e32a3d0fee941b4c62c850b1d583b/687474703a2f2f672e7265636f726469742e636f2f55373046574c704c76682e676966
|
||||
|
||||
------
|
||||
|
||||
|
||||
Contents
|
||||
--------
|
||||
---------
|
||||
|
||||
.. toctree::
|
||||
:maxdepth: 2
|
||||
|
||||
installation
|
||||
tutorial
|
||||
security
|
||||
sqllab
|
||||
visualization
|
||||
videos
|
||||
gallery
|
||||
druid
|
||||
faq
|
||||
user_guide
|
||||
|
||||
|
||||
|
||||
Indices and tables
|
||||
|
||||
@@ -1,608 +0,0 @@
|
||||
Installation & Configuration
|
||||
============================
|
||||
|
||||
Getting Started
|
||||
---------------
|
||||
|
||||
Superset is tested against Python ``2.7`` and Python ``3.4``.
|
||||
Airbnb currently uses 2.7.* in production. We do not plan on supporting
|
||||
Python ``2.6``.
|
||||
|
||||
Cloud-native!
|
||||
-------------
|
||||
|
||||
Superset is designed to be highly available. It is
|
||||
"cloud-native" as it has been designed scale out in large,
|
||||
distributed environments, and works well inside containers.
|
||||
While you can easily
|
||||
test drive Superset on a modest setup or simply on your laptop,
|
||||
there's virtually no limit around scaling out the platform.
|
||||
Superset is also cloud-native in the sense that it is
|
||||
flexible and lets you choose your web server (Gunicorn, Nginx, Apache),
|
||||
your metadata database engine (MySQL, Postgres, MariaDB, ...),
|
||||
your message queue (Redis, RabbitMQ, SQS, ...),
|
||||
your results backend (S3, Redis, Memcached, ...), your caching layer
|
||||
(memcached, Redis, ...), works well with services like NewRelic, StatsD and
|
||||
DataDog, and has the ability to run analytic workloads against
|
||||
most popular database technologies.
|
||||
|
||||
Superset is battle tested in large environments with hundreds
|
||||
of concurrent users. Airbnb's production environment runs inside
|
||||
Kubernetes and serves 600+ daily active users viewing over 100K charts a
|
||||
day.
|
||||
|
||||
The Superset web server and the Superset Celery workers (optional)
|
||||
are stateless, so you can scale out by running on as many servers
|
||||
as needed.
|
||||
|
||||
OS dependencies
|
||||
---------------
|
||||
|
||||
Superset stores database connection information in its metadata database.
|
||||
For that purpose, we use the ``cryptography`` Python library to encrypt
|
||||
connection passwords. Unfortunately this library has OS level dependencies.
|
||||
|
||||
You may want to attempt the next step
|
||||
("Superset installation and initialization") and come back to this step if
|
||||
you encounter an error.
|
||||
|
||||
Here's how to install them:
|
||||
|
||||
For **Debian** and **Ubuntu**, the following command will ensure that
|
||||
the required dependencies are installed: ::
|
||||
|
||||
sudo apt-get install build-essential libssl-dev libffi-dev python-dev python-pip libsasl2-dev libldap2-dev
|
||||
|
||||
**Ubuntu 16.04** If you have python3.5 installed alongside with python2.7, as is default on **Ubuntu 16.04 LTS**, run this command also
|
||||
|
||||
sudo apt-get install build-essential libssl-dev libffi-dev python3.5-dev python-pip libsasl2-dev libldap2-dev
|
||||
|
||||
otherwhise build for ``cryptography`` fails.
|
||||
|
||||
For **Fedora** and **RHEL-derivatives**, the following command will ensure
|
||||
that the required dependencies are installed: ::
|
||||
|
||||
sudo yum upgrade python-setuptools
|
||||
sudo yum install gcc gcc-c++ libffi-devel python-devel python-pip python-wheel openssl-devel libsasl2-devel openldap-devel
|
||||
|
||||
**OSX**, system python is not recommended. brew's python also ships with pip ::
|
||||
|
||||
brew install pkg-config libffi openssl python
|
||||
env LDFLAGS="-L$(brew --prefix openssl)/lib" CFLAGS="-I$(brew --prefix openssl)/include" pip install cryptography==1.9
|
||||
|
||||
**Windows** isn't officially supported at this point, but if you want to
|
||||
attempt it, download `get-pip.py <https://bootstrap.pypa.io/get-pip.py>`_, and run ``python get-pip.py`` which may need admin access. Then run the following: ::
|
||||
|
||||
C:\> pip install cryptography
|
||||
|
||||
# You may also have to create C:\Temp
|
||||
C:\> md C:\Temp
|
||||
|
||||
Python virtualenv
|
||||
-----------------
|
||||
It is recommended to install Superset inside a virtualenv. Python 3 already ships virtualenv, for
|
||||
Python 2 you need to install it. If it's packaged for your operating systems install it from there
|
||||
otherwise you can install from pip: ::
|
||||
|
||||
pip install virtualenv
|
||||
|
||||
You can create and activate a virtualenv by: ::
|
||||
|
||||
# virtualenv is shipped in Python 3 as pyvenv
|
||||
virtualenv venv
|
||||
. ./venv/bin/activate
|
||||
|
||||
On windows the syntax for activating it is a bit different: ::
|
||||
|
||||
venv\Scripts\activate
|
||||
|
||||
Once you activated your virtualenv everything you are doing is confined inside the virtualenv.
|
||||
To exit a virtualenv just type ``deactivate``.
|
||||
|
||||
Python's setup tools and pip
|
||||
----------------------------
|
||||
Put all the chances on your side by getting the very latest ``pip``
|
||||
and ``setuptools`` libraries.::
|
||||
|
||||
pip install --upgrade setuptools pip
|
||||
|
||||
Superset installation and initialization
|
||||
----------------------------------------
|
||||
Follow these few simple steps to install Superset.::
|
||||
|
||||
# Install superset
|
||||
pip install superset
|
||||
|
||||
# Create an admin user (you will be prompted to set username, first and last name before setting a password)
|
||||
fabmanager create-admin --app superset
|
||||
|
||||
# Initialize the database
|
||||
superset db upgrade
|
||||
|
||||
# Load some data to play with
|
||||
superset load_examples
|
||||
|
||||
# Create default roles and permissions
|
||||
superset init
|
||||
|
||||
# Start the web server on port 8088, use -p to bind to another port
|
||||
superset runserver
|
||||
|
||||
# To start a development web server, use the -d switch
|
||||
# superset runserver -d
|
||||
|
||||
|
||||
After installation, you should be able to point your browser to the right
|
||||
hostname:port `http://localhost:8088 <http://localhost:8088>`_, login using
|
||||
the credential you entered while creating the admin account, and navigate to
|
||||
`Menu -> Admin -> Refresh Metadata`. This action should bring in all of
|
||||
your datasources for Superset to be aware of, and they should show up in
|
||||
`Menu -> Datasources`, from where you can start playing with your data!
|
||||
|
||||
A proper WSGI HTTP Server
|
||||
-------------------------
|
||||
|
||||
While you can setup Superset to run on Nginx or Apache, many use
|
||||
Gunicorn, preferably in **async mode**, which allows for impressive
|
||||
concurrency even and is fairly easy to install and configure. Please
|
||||
refer to the
|
||||
documentation of your preferred technology to set up this Flask WSGI
|
||||
application in a way that works well in your environment.
|
||||
|
||||
While the `superset runserver` command act as an quick wrapper
|
||||
around `gunicorn`, it doesn't expose all the options you may need,
|
||||
so you'll want to craft your own `gunicorn` command in your production
|
||||
environment. Here's an **async** setup known to work well: ::
|
||||
|
||||
gunicorn \
|
||||
-w 10 \
|
||||
-k gevent \
|
||||
--timeout 120 \
|
||||
-b 0.0.0.0:6666 \
|
||||
--limit-request-line 0 \
|
||||
--limit-request-field_size 0 \
|
||||
--statsd-host localhost:8125 \
|
||||
superset:app
|
||||
|
||||
Refer to the
|
||||
[Gunicorn documentation](http://docs.gunicorn.org/en/stable/design.html)
|
||||
for more information.
|
||||
|
||||
Note that *gunicorn* does not
|
||||
work on Windows so the `superset runserver` command is not expected to work
|
||||
in that context. Also note that the development web
|
||||
server (`superset runserver -d`) is not intended for production use.
|
||||
|
||||
Flask-AppBuilder Permissions
|
||||
----------------------------
|
||||
|
||||
By default every time the Flask-AppBuilder (FAB) app is initialized the
|
||||
permissions and views are added automatically to the backend and associated with
|
||||
the ‘Admin’ role. The issue however is when you are running multiple concurrent
|
||||
workers this creates a lot of contention and race conditions when defining
|
||||
permissions and views.
|
||||
|
||||
To alleviate this issue, the automatic updating of permissions can be disabled
|
||||
by setting the :envvar:`SUPERSET_UPDATE_PERMS` environment variable to `0`.
|
||||
The value `1` enables it, `0` disables it. Note if undefined the functionality
|
||||
is enabled to maintain backwards compatibility.
|
||||
|
||||
In a production environment initialization could take on the following form:
|
||||
|
||||
export SUPERSET_UPDATE_PERMS=1
|
||||
superset init
|
||||
|
||||
export SUPERSET_UPDATE_PERMS=0
|
||||
gunicorn -w 10 ... superset:app
|
||||
|
||||
Configuration behind a load balancer
|
||||
------------------------------------
|
||||
|
||||
If you are running superset behind a load balancer or reverse proxy (e.g. NGINX
|
||||
or ELB on AWS), you may need to utilise a healthcheck endpoint so that your
|
||||
load balancer knows if your superset instance is running. This is provided
|
||||
at ``/health`` which will return a 200 response containing "OK" if the
|
||||
webserver is running.
|
||||
|
||||
If the load balancer is inserting X-Forwarded-For/X-Forwarded-Proto headers, you
|
||||
should set `ENABLE_PROXY_FIX = True` in the superset config file to extract and use
|
||||
the headers.
|
||||
|
||||
In case that the reverse proxy is used for providing ssl encryption,
|
||||
an explicit definition of the `X-Forwarded-Proto` may be required.
|
||||
For the Apache webserver this can be set as follows: ::
|
||||
|
||||
RequestHeader set X-Forwarded-Proto "https"
|
||||
|
||||
Configuration
|
||||
-------------
|
||||
|
||||
To configure your application, you need to create a file (module)
|
||||
``superset_config.py`` and make sure it is in your PYTHONPATH. Here are some
|
||||
of the parameters you can copy / paste in that configuration module: ::
|
||||
|
||||
#---------------------------------------------------------
|
||||
# Superset specific config
|
||||
#---------------------------------------------------------
|
||||
ROW_LIMIT = 5000
|
||||
SUPERSET_WORKERS = 4
|
||||
|
||||
SUPERSET_WEBSERVER_PORT = 8088
|
||||
#---------------------------------------------------------
|
||||
|
||||
#---------------------------------------------------------
|
||||
# Flask App Builder configuration
|
||||
#---------------------------------------------------------
|
||||
# Your App secret key
|
||||
SECRET_KEY = '\2\1thisismyscretkey\1\2\e\y\y\h'
|
||||
|
||||
# The SQLAlchemy connection string to your database backend
|
||||
# This connection defines the path to the database that stores your
|
||||
# superset metadata (slices, connections, tables, dashboards, ...).
|
||||
# Note that the connection information to connect to the datasources
|
||||
# you want to explore are managed directly in the web UI
|
||||
SQLALCHEMY_DATABASE_URI = 'sqlite:////path/to/superset.db'
|
||||
|
||||
# Flask-WTF flag for CSRF
|
||||
WTF_CSRF_ENABLED = True
|
||||
# Add endpoints that need to be exempt from CSRF protection
|
||||
WTF_CSRF_EXEMPT_LIST = []
|
||||
|
||||
# Set this API key to enable Mapbox visualizations
|
||||
MAPBOX_API_KEY = ''
|
||||
|
||||
This file also allows you to define configuration parameters used by
|
||||
Flask App Builder, the web framework used by Superset. Please consult
|
||||
the `Flask App Builder Documentation
|
||||
<http://flask-appbuilder.readthedocs.org/en/latest/config.html>`_
|
||||
for more information on how to configure Superset.
|
||||
|
||||
Please make sure to change:
|
||||
|
||||
* *SQLALCHEMY_DATABASE_URI*, by default it is stored at *~/.superset/superset.db*
|
||||
* *SECRET_KEY*, to a long random string
|
||||
|
||||
In case you need to exempt endpoints from CSRF, e.g. you are running a custom
|
||||
auth postback endpoint, you can add them to *WTF_CSRF_EXEMPT_LIST*
|
||||
|
||||
WTF_CSRF_EXEMPT_LIST = ['']
|
||||
|
||||
Database dependencies
|
||||
---------------------
|
||||
|
||||
Superset does not ship bundled with connectivity to databases, except
|
||||
for Sqlite, which is part of the Python standard library.
|
||||
You'll need to install the required packages for the database you
|
||||
want to use as your metadata database as well as the packages needed to
|
||||
connect to the databases you want to access through Superset.
|
||||
|
||||
Here's a list of some of the recommended packages.
|
||||
|
||||
+---------------+-------------------------------------+-------------------------------------------------+
|
||||
| database | pypi package | SQLAlchemy URI prefix |
|
||||
+===============+=====================================+=================================================+
|
||||
| MySQL | ``pip install mysqlclient`` | ``mysql://`` |
|
||||
+---------------+-------------------------------------+-------------------------------------------------+
|
||||
| Postgres | ``pip install psycopg2`` | ``postgresql+psycopg2://`` |
|
||||
+---------------+-------------------------------------+-------------------------------------------------+
|
||||
| Presto | ``pip install pyhive`` | ``presto://`` |
|
||||
+---------------+-------------------------------------+-------------------------------------------------+
|
||||
| Oracle | ``pip install cx_Oracle`` | ``oracle://`` |
|
||||
+---------------+-------------------------------------+-------------------------------------------------+
|
||||
| sqlite | | ``sqlite://`` |
|
||||
+---------------+-------------------------------------+-------------------------------------------------+
|
||||
| Redshift | ``pip install sqlalchemy-redshift`` | ``postgresql+psycopg2://`` |
|
||||
+---------------+-------------------------------------+-------------------------------------------------+
|
||||
| MSSQL | ``pip install pymssql`` | ``mssql://`` |
|
||||
+---------------+-------------------------------------+-------------------------------------------------+
|
||||
| Impala | ``pip install impyla`` | ``impala://`` |
|
||||
+---------------+-------------------------------------+-------------------------------------------------+
|
||||
| SparkSQL | ``pip install pyhive`` | ``jdbc+hive://`` |
|
||||
+---------------+-------------------------------------+-------------------------------------------------+
|
||||
| Greenplum | ``pip install psycopg2`` | ``postgresql+psycopg2://`` |
|
||||
+---------------+-------------------------------------+-------------------------------------------------+
|
||||
| Athena | ``pip install "PyAthenaJDBC>1.0.9"``| ``awsathena+jdbc://`` |
|
||||
+---------------+-------------------------------------+-------------------------------------------------+
|
||||
| Vertica | ``pip install | ``vertica+vertica_python://`` |
|
||||
| | sqlalchemy-vertica-python`` | |
|
||||
+---------------+-------------------------------------+-------------------------------------------------+
|
||||
| ClickHouse | ``pip install | ``clickhouse://`` |
|
||||
| | sqlalchemy-clickhouse`` | |
|
||||
+---------------+-------------------------------------+-------------------------------------------------+
|
||||
| Kylin | ``pip install kylinpy`` | ``kylin://`` |
|
||||
+---------------+-------------------------------------+-------------------------------------------------+
|
||||
|
||||
Note that many other database are supported, the main criteria being the
|
||||
existence of a functional SqlAlchemy dialect and Python driver. Googling
|
||||
the keyword ``sqlalchemy`` in addition of a keyword that describes the
|
||||
database you want to connect to should get you to the right place.
|
||||
|
||||
(AWS) Athena
|
||||
------------
|
||||
|
||||
The connection string for Athena looks like this ::
|
||||
|
||||
awsathena+jdbc://{aws_access_key_id}:{aws_secret_access_key}@athena.{region_name}.amazonaws.com/{schema_name}?s3_staging_dir={s3_staging_dir}&...
|
||||
|
||||
Where you need to escape/encode at least the s3_staging_dir, i.e., ::
|
||||
|
||||
s3://... -> s3%3A//...
|
||||
|
||||
|
||||
Caching
|
||||
-------
|
||||
|
||||
Superset uses `Flask-Cache <https://pythonhosted.org/Flask-Cache/>`_ for
|
||||
caching purpose. Configuring your caching backend is as easy as providing
|
||||
a ``CACHE_CONFIG``, constant in your ``superset_config.py`` that
|
||||
complies with the Flask-Cache specifications.
|
||||
|
||||
Flask-Cache supports multiple caching backends (Redis, Memcached,
|
||||
SimpleCache (in-memory), or the local filesystem). If you are going to use
|
||||
Memcached please use the `pylibmc` client library as `python-memcached` does
|
||||
not handle storing binary data correctly. If you use Redis, please install
|
||||
the `redis <https://pypi.python.org/pypi/redis>`_ Python package: ::
|
||||
|
||||
pip install redis
|
||||
|
||||
For setting your timeouts, this is done in the Superset metadata and goes
|
||||
up the "timeout searchpath", from your slice configuration, to your
|
||||
data source's configuration, to your database's and ultimately falls back
|
||||
into your global default defined in ``CACHE_CONFIG``.
|
||||
|
||||
|
||||
Deeper SQLAlchemy integration
|
||||
-----------------------------
|
||||
|
||||
It is possible to tweak the database connection information using the
|
||||
parameters exposed by SQLAlchemy. In the ``Database`` edit view, you will
|
||||
find an ``extra`` field as a ``JSON`` blob.
|
||||
|
||||
.. image:: _static/img/tutorial/add_db.png
|
||||
:scale: 30 %
|
||||
|
||||
This JSON string contains extra configuration elements. The ``engine_params``
|
||||
object gets unpacked into the
|
||||
`sqlalchemy.create_engine <http://docs.sqlalchemy.org/en/latest/core/engines.html#sqlalchemy.create_engine>`_ call,
|
||||
while the ``metadata_params`` get unpacked into the
|
||||
`sqlalchemy.MetaData <http://docs.sqlalchemy.org/en/rel_1_0/core/metadata.html#sqlalchemy.schema.MetaData>`_ call. Refer to the SQLAlchemy docs for more information.
|
||||
|
||||
|
||||
Schemas (Postgres & Redshift)
|
||||
-----------------------------
|
||||
|
||||
Postgres and Redshift, as well as other database,
|
||||
use the concept of **schema** as a logical entity
|
||||
on top of the **database**. For Superset to connect to a specific schema,
|
||||
there's a **schema** parameter you can set in the table form.
|
||||
|
||||
|
||||
External Password store for SQLAlchemy connections
|
||||
--------------------------------------------------
|
||||
It is possible to use an external store for you database passwords. This is
|
||||
useful if you a running a custom secret distribution framework and do not wish
|
||||
to store secrets in Superset's meta database.
|
||||
|
||||
Example:
|
||||
Write a function that takes a single argument of type ``sqla.engine.url`` and returns
|
||||
the password for the given connection string. Then set ``SQLALCHEMY_CUSTOM_PASSWORD_STORE``
|
||||
in your config file to point to that function. ::
|
||||
|
||||
def example_lookup_password(url):
|
||||
secret = <<get password from external framework>>
|
||||
return 'secret'
|
||||
|
||||
SQLALCHEMY_CUSTOM_PASSWORD_STORE = example_lookup_password
|
||||
|
||||
|
||||
SSL Access to databases
|
||||
-----------------------
|
||||
This example worked with a MySQL database that requires SSL. The configuration
|
||||
may differ with other backends. This is what was put in the ``extra``
|
||||
parameter ::
|
||||
|
||||
{
|
||||
"metadata_params": {},
|
||||
"engine_params": {
|
||||
"connect_args":{
|
||||
"sslmode":"require",
|
||||
"sslrootcert": "/path/to/my/pem"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
Druid
|
||||
-----
|
||||
|
||||
* From the UI, enter the information about your clusters in the
|
||||
`Sources -> Druid Clusters` menu by hitting the + sign.
|
||||
|
||||
* Once the Druid cluster connection information is entered, hit the
|
||||
`Sources -> Refresh Druid Metadata` menu item to populate
|
||||
|
||||
* Navigate to your datasources
|
||||
|
||||
Note that you can run the ``superset refresh_druid`` command to refresh the
|
||||
metadata from your Druid cluster(s)
|
||||
|
||||
|
||||
CORS
|
||||
----
|
||||
|
||||
The extra CORS Dependency must be installed:
|
||||
|
||||
superset[cors]
|
||||
|
||||
|
||||
The following keys in `superset_config.py` can be specified to configure CORS:
|
||||
|
||||
|
||||
* ``ENABLE_CORS``: Must be set to True in order to enable CORS
|
||||
* ``CORS_OPTIONS``: options passed to Flask-CORS (`documentation <http://flask-cors.corydolphin.com/en/latest/api.html#extension>`)
|
||||
|
||||
|
||||
MIDDLEWARE
|
||||
----------
|
||||
|
||||
Superset allows you to add your own middleware. To add your own middleware, update the ``ADDITIONAL_MIDDLEWARE`` key in
|
||||
your `superset_config.py`. ``ADDITIONAL_MIDDLEWARE`` should be a list of your additional middleware classes.
|
||||
|
||||
For example, to use AUTH_REMOTE_USER from behind a proxy server like nginx, you have to add a simple middleware class to
|
||||
add the value of ``HTTP_X_PROXY_REMOTE_USER`` (or any other custom header from the proxy) to Gunicorn's ``REMOTE_USER``
|
||||
environment variable: ::
|
||||
|
||||
class RemoteUserMiddleware(object):
|
||||
def __init__(self, app):
|
||||
self.app = app
|
||||
def __call__(self, environ, start_response):
|
||||
user = environ.pop('HTTP_X_PROXY_REMOTE_USER', None)
|
||||
environ['REMOTE_USER'] = user
|
||||
return self.app(environ, start_response)
|
||||
|
||||
ADDITIONAL_MIDDLEWARE = [RemoteUserMiddleware, ]
|
||||
|
||||
*Adapted from http://flask.pocoo.org/snippets/69/*
|
||||
|
||||
|
||||
Upgrading
|
||||
---------
|
||||
|
||||
Upgrading should be as straightforward as running::
|
||||
|
||||
pip install superset --upgrade
|
||||
superset db upgrade
|
||||
superset init
|
||||
|
||||
SQL Lab
|
||||
-------
|
||||
SQL Lab is a powerful SQL IDE that works with all SQLAlchemy compatible
|
||||
databases. By default, queries are executed in the scope of a web
|
||||
request so they
|
||||
may eventually timeout as queries exceed the maximum duration of a web
|
||||
request in your environment, whether it'd be a reverse proxy or the Superset
|
||||
server itself.
|
||||
|
||||
On large analytic databases, it's common to run queries that
|
||||
execute for minutes or hours.
|
||||
To enable support for long running queries that
|
||||
execute beyond the typical web request's timeout (30-60 seconds), it is
|
||||
necessary to configure an asynchronous backend for Superset which consist of:
|
||||
|
||||
* one or many Superset worker (which is implemented as a Celery worker), and
|
||||
can be started with the ``superset worker`` command, run
|
||||
``superset worker --help`` to view the related options
|
||||
* a celery broker (message queue) for which we recommend using Redis
|
||||
or RabbitMQ
|
||||
* a results backend that defines where the worker will persist the query
|
||||
results
|
||||
|
||||
Configuring Celery requires defining a ``CELERY_CONFIG`` in your
|
||||
``superset_config.py``. Both the worker and web server processes should
|
||||
have the same configuration.
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
class CeleryConfig(object):
|
||||
BROKER_URL = 'redis://localhost:6379/0'
|
||||
CELERY_IMPORTS = ('superset.sql_lab', )
|
||||
CELERY_RESULT_BACKEND = 'redis://localhost:6379/0'
|
||||
CELERY_ANNOTATIONS = {'tasks.add': {'rate_limit': '10/s'}}
|
||||
|
||||
CELERY_CONFIG = CeleryConfig
|
||||
|
||||
To setup a result backend, you need to pass an instance of a derivative
|
||||
of ``werkzeug.contrib.cache.BaseCache`` to the ``RESULTS_BACKEND``
|
||||
configuration key in your ``superset_config.py``. It's possible to use
|
||||
Memcached, Redis, S3 (https://pypi.python.org/pypi/s3werkzeugcache),
|
||||
memory or the file system (in a single server-type setup or for testing),
|
||||
or to write your own caching interface. Your ``superset_config.py`` may
|
||||
look something like:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
# On S3
|
||||
from s3cache.s3cache import S3Cache
|
||||
S3_CACHE_BUCKET = 'foobar-superset'
|
||||
S3_CACHE_KEY_PREFIX = 'sql_lab_result'
|
||||
RESULTS_BACKEND = S3Cache(S3_CACHE_BUCKET, S3_CACHE_KEY_PREFIX)
|
||||
|
||||
# On Redis
|
||||
from werkzeug.contrib.cache import RedisCache
|
||||
RESULTS_BACKEND = RedisCache(
|
||||
host='localhost', port=6379, key_prefix='superset_results')
|
||||
|
||||
Note that it's important that all the worker nodes and web servers in
|
||||
the Superset cluster share a common metadata database.
|
||||
This means that SQLite will not work in this context since it has
|
||||
limited support for concurrency and
|
||||
typically lives on the local file system.
|
||||
|
||||
Also note that SQL Lab supports Jinja templating in queries, and that it's
|
||||
possible to overload
|
||||
the default Jinja context in your environment by defining the
|
||||
``JINJA_CONTEXT_ADDONS`` in your superset configuration. Objects referenced
|
||||
in this dictionary are made available for users to use in their SQL.
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
JINJA_CONTEXT_ADDONS = {
|
||||
'my_crazy_macro': lambda x: x*2,
|
||||
}
|
||||
|
||||
|
||||
Making your own build
|
||||
---------------------
|
||||
|
||||
For more advanced users, you may want to build Superset from sources. That
|
||||
would be the case if you fork the project to add features specific to
|
||||
your environment.::
|
||||
|
||||
# assuming $SUPERSET_HOME as the root of the repo
|
||||
cd $SUPERSET_HOME/superset/assets
|
||||
yarn
|
||||
yarn run build
|
||||
cd $SUPERSET_HOME
|
||||
python setup.py install
|
||||
|
||||
|
||||
Blueprints
|
||||
----------
|
||||
|
||||
`Blueprints are Flask's reusable apps <http://flask.pocoo.org/docs/0.12/blueprints/>`_.
|
||||
Superset allows you to specify an array of Blueprints
|
||||
in your ``superset_config`` module. Here's
|
||||
an example on how this can work with a simple Blueprint. By doing
|
||||
so, you can expect Superset to serve a page that says "OK"
|
||||
at the ``/simple_page`` url. This can allow you to run other things such
|
||||
as custom data visualization applications alongside Superset, on the
|
||||
same server.
|
||||
|
||||
..code ::
|
||||
|
||||
from flask import Blueprint
|
||||
simple_page = Blueprint('simple_page', __name__,
|
||||
template_folder='templates')
|
||||
@simple_page.route('/', defaults={'page': 'index'})
|
||||
@simple_page.route('/<page>')
|
||||
def show(page):
|
||||
return "Ok"
|
||||
|
||||
BLUEPRINTS = [simple_page]
|
||||
|
||||
StatsD logging
|
||||
--------------
|
||||
|
||||
Superset is instrumented to log events to StatsD if desired. Most endpoints hit
|
||||
are logged as well as key events like query start and end in SQL Lab.
|
||||
|
||||
To setup StatsD logging, it's a matter of configuring the logger in your
|
||||
``superset_config.py``.
|
||||
|
||||
..code ::
|
||||
|
||||
from superset.stats_logger import StatsdStatsLogger
|
||||
STATS_LOGGER = StatsdStatsLogger(host='localhost', port=8125, prefix='superset')
|
||||
|
||||
Note that it's also possible to implement you own logger by deriving
|
||||
``superset.stats_logger.BaseStatsLogger``.
|
||||
@@ -1,162 +0,0 @@
|
||||
Security
|
||||
========
|
||||
Security in Superset is handled by Flask AppBuilder (FAB). FAB is a
|
||||
"Simple and rapid application development framework, built on top of Flask.".
|
||||
FAB provides authentication, user management, permissions and roles.
|
||||
Please read its `Security documentation
|
||||
<http://flask-appbuilder.readthedocs.io/en/latest/security.html>`_.
|
||||
|
||||
Provided Roles
|
||||
--------------
|
||||
Superset ships with a set of roles that are handled by Superset itself.
|
||||
You can assume that these roles will stay up-to-date as Superset evolves.
|
||||
Even though it's possible for ``Admin`` users to do so, it is not recommended
|
||||
that you alter these roles in any way by removing
|
||||
or adding permissions to them as these roles will be re-synchronized to
|
||||
their original values as you run your next ``superset init`` command.
|
||||
|
||||
Since it's not recommended to alter the roles described here, it's right
|
||||
to assume that your security strategy should be to compose user access based
|
||||
on these base roles and roles that you create. For instance you could
|
||||
create a role ``Financial Analyst`` that would be made of set of permissions
|
||||
to a set of data sources (tables) and/or databases. Users would then be
|
||||
granted ``Gamma``, ``Financial Analyst``, and perhaps ``sql_lab``.
|
||||
|
||||
Admin
|
||||
"""""
|
||||
Admins have all possible rights, including granting or revoking rights from
|
||||
other users and altering other people's slices and dashboards.
|
||||
|
||||
Alpha
|
||||
"""""
|
||||
Alpha have access to all data sources, but they cannot grant or revoke access
|
||||
from other users. They are also limited to altering the objects that they
|
||||
own. Alpha users can add and alter data sources.
|
||||
|
||||
Gamma
|
||||
"""""
|
||||
Gamma have limited access. They can only consume data coming from data sources
|
||||
they have been given access to through another complementary role.
|
||||
They only have access to view the slices and
|
||||
dashboards made from data sources that they have access to. Currently Gamma
|
||||
users are not able to alter or add data sources. We assume that they are
|
||||
mostly content consumers, though they can create slices and dashboards.
|
||||
|
||||
Also note that when Gamma users look at the dashboards and slices list view,
|
||||
they will only see the objects that they have access to.
|
||||
|
||||
sql_lab
|
||||
"""""""
|
||||
The ``sql_lab`` role grants access to SQL Lab. Note that while ``Admin``
|
||||
users have access to all databases by default, both ``Alpha`` and ``Gamma``
|
||||
users need to be given access on a per database basis.
|
||||
|
||||
Public
|
||||
""""""
|
||||
It's possible to allow logged out users to access some Superset features.
|
||||
|
||||
By setting ``PUBLIC_ROLE_LIKE_GAMMA = True`` in your ``superset_config.py``,
|
||||
you grant public role the same set of permissions as for the GAMMA role.
|
||||
This is useful if one wants to enable anonymous users to view
|
||||
dashboards. Explicit grant on specific datasets is still required, meaning
|
||||
that you need to edit the ``Public`` role and add the Public data sources
|
||||
to the role manually.
|
||||
|
||||
|
||||
Managing Gamma per data source access
|
||||
-------------------------------------
|
||||
Here's how to provide users access to only specific datasets. First make
|
||||
sure the users with limited access have [only] the Gamma role assigned to
|
||||
them. Second, create a new role (``Menu -> Security -> List Roles``) and
|
||||
click the ``+`` sign.
|
||||
|
||||
.. image:: _static/img/create_role.png
|
||||
:scale: 50 %
|
||||
|
||||
This new window allows you to give this new role a name, attribute it to users
|
||||
and select the tables in the ``Permissions`` dropdown. To select the data
|
||||
sources you want to associate with this role, simply click in the dropdown
|
||||
and use the typeahead to search for your table names.
|
||||
|
||||
You can then confirm with your Gamma users that they see the objects
|
||||
(dashboards and slices) associated with the tables related to their roles.
|
||||
|
||||
|
||||
Customizing
|
||||
-----------
|
||||
|
||||
The permissions exposed by FAB are very granular and allow for a great level
|
||||
of customization. FAB creates many permissions automagically for each model
|
||||
that is create (can_add, can_delete, can_show, can_edit, ...) as well as for
|
||||
each view. On top of that, Superset can expose more granular permissions like
|
||||
``all_datasource_access``.
|
||||
|
||||
We do not recommend altering the 3 base roles as there
|
||||
are a set of assumptions that Superset build upon. It is possible though for
|
||||
you to create your own roles, and union them to existing ones.
|
||||
|
||||
Permissions
|
||||
"""""""""""
|
||||
|
||||
Roles are composed of a set of permissions, and Superset has many categories
|
||||
of permissions. Here are the different categories of permissions:
|
||||
|
||||
- **Model & action**: models are entities like ``Dashboard``,
|
||||
``Slice``, or ``User``. Each model has a fixed set of permissions, like
|
||||
``can_edit``, ``can_show``, ``can_delete``, ``can_list``, ``can_add``, and
|
||||
so on. By adding ``can_delete on Dashboard`` to a role, and granting that
|
||||
role to a user, this user will be able to delete dashboards.
|
||||
- **Views**: views are individual web pages, like the ``explore`` view or the
|
||||
``SQL Lab`` view. When granted to a user, he/she will see that view in
|
||||
the its menu items, and be able to load that page.
|
||||
- **Data source**: For each data source, a permission is created. If the user
|
||||
does not have the ``all_datasource_access`` permission granted, the user
|
||||
will only be able to see Slices or explore the data sources that are granted
|
||||
to them
|
||||
- **Database**: Granting access to a database allows for the user to access
|
||||
all data sources within that database, and will enable the user to query
|
||||
that database in SQL Lab, provided that the SQL Lab specific permission
|
||||
have been granted to the user
|
||||
|
||||
|
||||
Restricting access to a subset of data sources
|
||||
""""""""""""""""""""""""""""""""""""""""""""""
|
||||
|
||||
The best way to go is probably to give user ``Gamma`` plus one or many other
|
||||
roles that would add access to specific data sources. We recommend that you
|
||||
create individual roles for each access profile. Say people in your finance
|
||||
department might have access to a set of databases and data sources, and
|
||||
these permissions can be consolidated in a single role. Users with this
|
||||
profile then need to be attributed ``Gamma`` as a foundation to the models
|
||||
and views they can access, and that ``Finance`` role that is a collection
|
||||
of permissions to data objects.
|
||||
|
||||
One user can have many roles, so a finance executive could be granted
|
||||
``Gamma``, ``Finance``, and perhaps another ``Executive`` role that gather
|
||||
a set of data sources that power dashboards only made available to executives.
|
||||
When looking at its dashboard list, this user will only see the
|
||||
list of dashboards it has access to, based on the roles and
|
||||
permissions that were attributed.
|
||||
|
||||
|
||||
Restricting the access to some metrics
|
||||
""""""""""""""""""""""""""""""""""""""
|
||||
|
||||
Sometimes some metrics are relatively sensitive (e.g. revenue).
|
||||
We may want to restrict those metrics to only a few roles.
|
||||
For example, assumed there is a metric ``[cluster1].[datasource1].[revenue]``
|
||||
and only Admin users are allowed to see it. Here’s how to restrict the access.
|
||||
|
||||
1. Edit the datasource (``Menu -> Source -> Druid datasources -> edit the
|
||||
record "datasource1"``) and go to the tab ``List Druid Metric``. Check
|
||||
the checkbox ``Is Restricted`` in the row of the metric ``revenue``.
|
||||
|
||||
2. Edit the role (``Menu -> Security -> List Roles -> edit the record
|
||||
“Admin”``), in the permissions field, type-and-search the permission
|
||||
``metric access on [cluster1].[datasource1].[revenue] (id: 1)``, then
|
||||
click the Save button on the bottom of the page.
|
||||
|
||||
Any users without the permission will see the error message
|
||||
*Access to the metrics denied: revenue (Status: 500)* in the slices.
|
||||
It also happens when the user wants to access a post-aggregation metric that
|
||||
is dependent on revenue.
|
||||
@@ -1,72 +0,0 @@
|
||||
SQL Lab
|
||||
=======
|
||||
|
||||
SQL Lab is a modern, feature-rich SQL IDE written in
|
||||
`React <https://facebook.github.io/react/>`_.
|
||||
|
||||
|
||||
Feature Overview
|
||||
----------------
|
||||
- Connects to just about any database backend
|
||||
- A multi-tab environment to work on multiple queries at a time
|
||||
- A smooth flow to visualize your query results using Superset's rich
|
||||
visualization capabilities
|
||||
- Browse database metadata: tables, columns, indexes, partitions
|
||||
- Support for long-running queries
|
||||
|
||||
- uses the `Celery distributed queue <http://www.python.org/>`_
|
||||
to dispatch query handling to workers
|
||||
- supports defining a "results backend" to persist query results
|
||||
|
||||
- A search engine to find queries executed in the past
|
||||
- Supports templating using the
|
||||
`Jinja templating language <http://jinja.pocoo.org/docs/dev/>`_
|
||||
which allows for using macros in your SQL code
|
||||
|
||||
Extra features
|
||||
--------------
|
||||
- Hit ``alt + enter`` as a keyboard shortcut to run your query
|
||||
|
||||
Templating with Jinja
|
||||
---------------------
|
||||
|
||||
.. code-block:: sql
|
||||
|
||||
SELECT *
|
||||
FROM some_table
|
||||
WHERE partition_key = '{{ presto.latest_partition('some_table') }}'
|
||||
|
||||
Templating unleashes the power and capabilities of a
|
||||
programming language within your SQL code.
|
||||
|
||||
Templates can also be used to write generic queries that are
|
||||
parameterized so they can be re-used easily.
|
||||
|
||||
|
||||
Available macros
|
||||
''''''''''''''''
|
||||
|
||||
We expose certain modules from Python's standard library in
|
||||
Superset's Jinja context:
|
||||
|
||||
- ``time``: ``time``
|
||||
- ``datetime``: ``datetime.datetime``
|
||||
- ``uuid``: ``uuid``
|
||||
- ``random``: ``random``
|
||||
- ``relativedelta``: ``dateutil.relativedelta.relativedelta``
|
||||
|
||||
`Jinja's builtin filters <http://jinja.pocoo.org/docs/dev/templates/>`_ can be also be applied where needed.
|
||||
|
||||
.. autoclass:: superset.jinja_context.PrestoTemplateProcessor
|
||||
:members:
|
||||
|
||||
.. autofunction:: superset.jinja_context.url_param
|
||||
|
||||
Extending macros
|
||||
''''''''''''''''
|
||||
|
||||
As mentioned in the `Installation & Configuration <https://superset.incubator.apache.org/installation.html#installation-configuration>`_ documentation,
|
||||
it's possible for administrators to expose more more macros in their
|
||||
environment using the configuration variable ``JINJA_CONTEXT_ADDONS``.
|
||||
All objects referenced in this dictionary will become available for users
|
||||
to integrate in their queries in **SQL Lab**.
|
||||
@@ -1,308 +0,0 @@
|
||||
Tutorial for Superset Administrators
|
||||
====================================
|
||||
|
||||
This tutorial targets a Superset administrator: someone configuring Superset
|
||||
for an organization on behalf of users. We'll show you how to connect Superset
|
||||
to a new database and configure a table in that database for analysis. You'll
|
||||
also explore the data you've exposed and add a visualization to a dashboard
|
||||
so that you get a feel for the end-to-end user experience.
|
||||
|
||||
Connecting to a new database
|
||||
----------------------------
|
||||
|
||||
We assume you already have a database configured and can connect to it from the
|
||||
instance on which you’re running Superset. If you’re just testing Superset and
|
||||
want to explore sample data, you can load some
|
||||
`sample PostgreSQL datasets <https://wiki.postgresql.org/wiki/Sample_Databases>`_
|
||||
into a fresh DB, or configure the
|
||||
`example weather data <https://github.com/dylburger/noaa-ghcn-weather-data>`_
|
||||
we use here.
|
||||
|
||||
Under the **Sources** menu, select the *Databases* option:
|
||||
|
||||
.. image:: _static/img/tutorial/tutorial_01_sources_database.png
|
||||
:scale: 70%
|
||||
|
||||
On the resulting page, click on the green plus sign, near the top right:
|
||||
|
||||
.. image:: _static/img/tutorial/tutorial_02_add_database.png
|
||||
:scale: 70%
|
||||
|
||||
You can configure a number of advanced options on this page, but for
|
||||
this walkthrough, you’ll only need to do **two things**:
|
||||
|
||||
1. Name your database connection:
|
||||
|
||||
.. image:: _static/img/tutorial/tutorial_03_database_name.png
|
||||
:scale: 70%
|
||||
|
||||
2. Provide the SQLAlchemy Connection URI and test the connection:
|
||||
|
||||
.. image:: _static/img/tutorial/tutorial_04_sqlalchemy_connection_string.png
|
||||
:scale: 70%
|
||||
|
||||
This example shows the connection string for our test weather database.
|
||||
As noted in the text below the URI, you should refer to the SQLAlchemy
|
||||
documentation on
|
||||
`creating new connection URIs <http://docs.sqlalchemy.org/en/rel_1_0/core/engines.html#database-urls>`_
|
||||
for your target database.
|
||||
|
||||
Click the **Test Connection** button to confirm things work end to end.
|
||||
Once Superset can successfully connect and authenticate, you should see
|
||||
a popup like this:
|
||||
|
||||
.. image:: _static/img/tutorial/tutorial_05_connection_popup.png
|
||||
:scale: 50%
|
||||
|
||||
Moreover, you should also see the list of tables Superset can read from
|
||||
the schema you’re connected to, at the bottom of the page:
|
||||
|
||||
.. image:: _static/img/tutorial/tutorial_06_list_of_tables.png
|
||||
:scale: 70%
|
||||
|
||||
If the connection looks good, save the configuration by clicking the **Save**
|
||||
button at the bottom of the page:
|
||||
|
||||
.. image:: _static/img/tutorial/tutorial_07_save_button.png
|
||||
:scale: 70%
|
||||
|
||||
Adding a new table
|
||||
------------------
|
||||
|
||||
Now that you’ve configured a database, you’ll need to add specific tables
|
||||
to Superset that you’d like to query.
|
||||
|
||||
Under the **Sources** menu, select the *Tables* option:
|
||||
|
||||
.. image:: _static/img/tutorial/tutorial_08_sources_tables.png
|
||||
:scale: 70%
|
||||
|
||||
On the resulting page, click on the green plus sign, near the top left:
|
||||
|
||||
.. image:: _static/img/tutorial/tutorial_09_add_new_table.png
|
||||
:scale: 70%
|
||||
|
||||
You only need a few pieces of information to add a new table to Superset:
|
||||
|
||||
* The name of the table
|
||||
|
||||
.. image:: _static/img/tutorial/tutorial_10_table_name.png
|
||||
:scale: 70%
|
||||
|
||||
* The target database from the **Database** drop-down menu (i.e. the one
|
||||
you just added above)
|
||||
|
||||
.. image:: _static/img/tutorial/tutorial_11_choose_db.png
|
||||
:scale: 70%
|
||||
|
||||
* Optionally, the database schema. If the table exists in the “default” schema
|
||||
(e.g. the *public* schema in PostgreSQL or Redshift), you can leave the schema
|
||||
field blank.
|
||||
|
||||
Click on the **Save** button to save the configuration:
|
||||
|
||||
.. image:: _static/img/tutorial/tutorial_07_save_button.png
|
||||
:scale: 70%
|
||||
|
||||
When redirected back to the list of tables, you should see a message indicating
|
||||
that your table was created:
|
||||
|
||||
.. image:: _static/img/tutorial/tutorial_12_table_creation_success_msg.png
|
||||
:scale: 70%
|
||||
|
||||
This message also directs you to edit the table configuration. We’ll edit a limited
|
||||
portion of the configuration now - just to get you started - and leave the rest for
|
||||
a more advanced tutorial.
|
||||
|
||||
Click on the edit button next to the table you’ve created:
|
||||
|
||||
.. image:: _static/img/tutorial/tutorial_13_edit_table_config.png
|
||||
:scale: 70%
|
||||
|
||||
On the resulting page, click on the **List Table Column** tab. Here, you’ll define the
|
||||
way you can use specific columns of your table when exploring your data. We’ll run
|
||||
through these options to describe their purpose:
|
||||
|
||||
* If you want users to group metrics by a specific field, mark it as **Groupable**.
|
||||
* If you need to filter on a specific field, mark it as **Filterable**.
|
||||
* Is this field something you’d like to get the distinct count of? Check the **Count
|
||||
Distinct** box.
|
||||
* Is this a metric you want to sum, or get basic summary statistics for? The **Sum**,
|
||||
**Min**, and **Max** columns will help.
|
||||
* The **is temporal** field should be checked for any date or time fields. We’ll cover
|
||||
how this manifests itself in analyses in a moment.
|
||||
|
||||
Here’s how we’ve configured fields for the weather data. Even for measures like the
|
||||
weather measurements (precipitation, snowfall, etc.), it’s ideal to group and filter
|
||||
by these values:
|
||||
|
||||
.. image:: _static/img/tutorial/tutorial_14_field_config.png
|
||||
|
||||
As with the configurations above, click the **Save** button to save these settings.
|
||||
|
||||
Exploring your data
|
||||
-------------------
|
||||
|
||||
To start exploring your data, simply click on the table name you just created in
|
||||
the list of available tables:
|
||||
|
||||
.. image:: _static/img/tutorial/tutorial_15_click_table_name.png
|
||||
|
||||
By default, you’ll be presented with a Table View:
|
||||
|
||||
.. image:: _static/img/tutorial/tutorial_16_datasource_chart_type.png
|
||||
|
||||
Let’s walk through a basic query to get the count of all records in our table.
|
||||
First, we’ll need to change the **Since** filter to capture the range of our data.
|
||||
You can use simple phrases to apply these filters, like "3 years ago":
|
||||
|
||||
.. image:: _static/img/tutorial/tutorial_17_choose_time_range.png
|
||||
|
||||
The upper limit for time, the **Until** filter, defaults to "now", which may or may
|
||||
not be what you want.
|
||||
|
||||
Look for the Metrics section under the **GROUP BY** header, and start typing "Count"
|
||||
- you’ll see a list of metrics matching what you type:
|
||||
|
||||
.. image:: _static/img/tutorial/tutorial_18_choose_metric.png
|
||||
|
||||
Select the *COUNT(\*)* metric, then click the green **Query** button near the top
|
||||
of the explore:
|
||||
|
||||
.. image:: _static/img/tutorial/tutorial_19_click_query.png
|
||||
|
||||
You’ll see your results in the table:
|
||||
|
||||
.. image:: _static/img/tutorial/tutorial_20_count_star_result.png
|
||||
|
||||
Let’s group this by the *weather_description* field to get the count of records by
|
||||
the type of weather recorded by adding it to the *Group by* section:
|
||||
|
||||
.. image:: _static/img/tutorial/tutorial_21_group_by.png
|
||||
|
||||
and run the query:
|
||||
|
||||
.. image:: _static/img/tutorial/tutorial_22_group_by_result.png
|
||||
|
||||
Let’s find a more useful data point: the top 10 times and places that recorded the
|
||||
highest temperature in 2015.
|
||||
|
||||
We replace *weather_description* with *latitude*, *longitude* and *measurement_date* in the
|
||||
*Group by* section:
|
||||
|
||||
.. image:: _static/img/tutorial/tutorial_23_group_by_more_dimensions.png
|
||||
|
||||
And replace *COUNT(\*)* with *max__measurement_flag*:
|
||||
|
||||
.. image:: _static/img/tutorial/tutorial_24_max_metric.png
|
||||
|
||||
The *max__measurement_flag* metric was created when we checked the box under **Max** and
|
||||
next to the *measurement_flag* field, indicating that this field was numeric and that
|
||||
we wanted to find its maximum value when grouped by specific fields.
|
||||
|
||||
In our case, *measurement_flag* is the value of the measurement taken, which clearly
|
||||
depends on the type of measurement (the researchers recorded different values for
|
||||
precipitation and temperature). Therefore, we must filter our query only on records
|
||||
where the *weather_description* is equal to "Maximum temperature", which we do in
|
||||
the **Filters** section at the bottom of the explore:
|
||||
|
||||
.. image:: _static/img/tutorial/tutorial_25_max_temp_filter.png
|
||||
|
||||
Finally, since we only care about the top 10 measurements, we limit our results to
|
||||
10 records using the *Row limit* option under the **Options** header:
|
||||
|
||||
.. image:: _static/img/tutorial/tutorial_26_row_limit.png
|
||||
|
||||
We click **Query** and get the following results:
|
||||
|
||||
.. image:: _static/img/tutorial/tutorial_27_top_10_max_temps.png
|
||||
|
||||
In this dataset, the maximum temperature is recorded in tenths of a degree Celsius.
|
||||
The top value of 1370, measured in the middle of Nevada, is equal to 137 C, or roughly
|
||||
278 degrees F. It’s unlikely this value was correctly recorded. We’ve already been able
|
||||
to investigate some outliers with Superset, but this just scratches the surface of what
|
||||
we can do.
|
||||
|
||||
You may want to do a couple more things with this measure:
|
||||
|
||||
* The default formatting shows values like 1.37k, which may be difficult for some
|
||||
users to read. It’s likely you may want to see the full, comma-separated value.
|
||||
You can change the formatting of any measure by editing its config (*Edit Table
|
||||
Config > List Sql Metric > Edit Metric > D3Format*)
|
||||
* Moreover, you may want to see the temperature measurements in plain degrees C,
|
||||
not tenths of a degree. Or you may want to convert the temperature to degrees
|
||||
Fahrenheit. You can change the SQL that gets executed agains the database, baking
|
||||
the logic into the measure itself (*Edit Table Config > List Sql Metric > Edit
|
||||
Metric > SQL Expression*)
|
||||
|
||||
For now, though, let’s create a better visualization of these data and add it to
|
||||
a dashboard.
|
||||
|
||||
We change the Chart Type to "Distribution - Bar Chart":
|
||||
|
||||
.. image:: _static/img/tutorial/tutorial_28_bar_chart.png
|
||||
|
||||
Our filter on Maximum temperature measurements was retained, but the query and
|
||||
formatting options are dependent on the chart type, so you’ll have to set the
|
||||
values again:
|
||||
|
||||
.. image:: _static/img/tutorial/tutorial_29_bar_chart_series_metrics.png
|
||||
|
||||
You should note the extensive formatting options for this chart: the ability to
|
||||
set axis labels, margins, ticks, etc. To make the data presentable to a broad
|
||||
audience, you’ll want to apply many of these to slices that end up in dashboards.
|
||||
For now, though, we run our query and get the following chart:
|
||||
|
||||
.. image:: _static/img/tutorial/tutorial_30_bar_chart_results.png
|
||||
:scale: 70%
|
||||
|
||||
Creating a slice and dashboard
|
||||
------------------------------
|
||||
|
||||
This view might be interesting to researchers, so let’s save it. In Superset,
|
||||
a saved query is called a **Slice**.
|
||||
|
||||
To create a slice, click the **Save as** button near the top-left of the
|
||||
explore:
|
||||
|
||||
.. image:: _static/img/tutorial/tutorial_19_click_query.png
|
||||
|
||||
A popup should appear, asking you to name the slice, and optionally add it to a
|
||||
dashboard. Since we haven’t yet created any dashboards, we can create one and
|
||||
immediately add our slice to it. Let’s do it:
|
||||
|
||||
.. image:: _static/img/tutorial/tutorial_31_save_slice_to_dashboard.png
|
||||
:scale: 70%
|
||||
|
||||
Click Save, which will direct you back to your original query. We see that
|
||||
our slice and dashboard were successfully created:
|
||||
|
||||
.. image:: _static/img/tutorial/tutorial_32_save_slice_confirmation.png
|
||||
:scale: 70%
|
||||
|
||||
Let’s check out our new dashboard. We click on the **Dashboards** menu:
|
||||
|
||||
.. image:: _static/img/tutorial/tutorial_33_dashboard.png
|
||||
|
||||
and find the dashboard we just created:
|
||||
|
||||
.. image:: _static/img/tutorial/tutorial_34_weather_dashboard.png
|
||||
|
||||
Things seemed to have worked - our slice is here!
|
||||
|
||||
.. image:: _static/img/tutorial/tutorial_35_slice_on_dashboard.png
|
||||
:scale: 70%
|
||||
|
||||
But it’s a bit smaller than we might like. Luckily, you can adjust the size
|
||||
of slices in a dashboard by clicking, holding and dragging the bottom-right
|
||||
corner to your desired dimensions:
|
||||
|
||||
.. image:: _static/img/tutorial/tutorial_36_adjust_dimensions.gif
|
||||
:scale: 120%
|
||||
|
||||
After adjusting the size, you’ll be asked to click on the icon near the
|
||||
top-right of the dashboard to save the new configuration.
|
||||
|
||||
Congrats! You’ve successfully linked, analyzed, and visualized data in Superset.
|
||||
There are a wealth of other table configuration and visualization options, so
|
||||
please start exploring and creating slices and dashboards of your own.
|
||||
@@ -1,15 +1,15 @@
|
||||
Videos
|
||||
======
|
||||
User Guide
|
||||
==========
|
||||
|
||||
Here is a collection of short videos showing different aspect
|
||||
of Superset.
|
||||
The user guide is a collection of short videos showing different aspect
|
||||
of Panoramix.
|
||||
|
||||
Quick Intro
|
||||
'''''''''''
|
||||
This video demonstrates how Superset works at a high level, it shows how
|
||||
This video demonstrates how Panoramix works at a high level, it shows how
|
||||
to navigate through datasets and dashboards that are already available.
|
||||
|
||||
.. youtube:: https://www.youtube.com/watch?v=3Txm_nj_R7M
|
||||
- Coming soon!
|
||||
|
||||
Dashboard Creation
|
||||
''''''''''''''''''
|
||||
@@ -41,7 +41,7 @@ to toggle them on dashboards.
|
||||
|
||||
Adding a Table
|
||||
''''''''''''''
|
||||
This videos shows you how to expose a new table in Superset, and how to
|
||||
This videos shows you how to expose a new table in Panoramix, and how to
|
||||
define the semantics on how this can be accessed by others in the ``Explore``
|
||||
and ``Dashboard`` views.
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,7 +0,0 @@
|
||||
# requires github-changes, run
|
||||
# `npm install -g github-changes`
|
||||
# requires $GITHUB_TOKEN to be set
|
||||
|
||||
# usage: ./github-changes 0.20.0 0.20.1
|
||||
# will overwrites the local CHANGELOG.md, somehow you need to merge it in
|
||||
github-changes -o apache -r incubator-superset --token $GITHUB_TOKEN --between-tags $1...$2
|
||||
38
panoramix/__init__.py
Normal file
38
panoramix/__init__.py
Normal file
@@ -0,0 +1,38 @@
|
||||
"""Package's main module!"""
|
||||
|
||||
import logging
|
||||
import os
|
||||
from flask import Flask, redirect
|
||||
from flask.ext.appbuilder import SQLA, AppBuilder, IndexView
|
||||
from flask.ext.appbuilder.baseviews import expose
|
||||
from flask.ext.migrate import Migrate
|
||||
|
||||
|
||||
APP_DIR = os.path.dirname(__file__)
|
||||
CONFIG_MODULE = os.environ.get('PANORAMIX_CONFIG', 'panoramix.config')
|
||||
|
||||
# Logging configuration
|
||||
logging.basicConfig(format='%(asctime)s:%(levelname)s:%(name)s:%(message)s')
|
||||
logging.getLogger().setLevel(logging.DEBUG)
|
||||
|
||||
app = Flask(__name__)
|
||||
app.config.from_object(CONFIG_MODULE)
|
||||
db = SQLA(app)
|
||||
migrate = Migrate(app, db, directory=APP_DIR + "/migrations")
|
||||
|
||||
|
||||
class MyIndexView(IndexView):
|
||||
@expose('/')
|
||||
def index(self):
|
||||
return redirect('/panoramix/featured')
|
||||
|
||||
appbuilder = AppBuilder(
|
||||
app, db.session,
|
||||
base_template='panoramix/base.html',
|
||||
indexview=MyIndexView,
|
||||
security_manager_class=app.config.get("CUSTOM_SECURITY_MANAGER"))
|
||||
|
||||
sm = appbuilder.sm
|
||||
|
||||
get_session = appbuilder.get_session
|
||||
from panoramix import config, views # noqa
|
||||
68
panoramix/ascii_art.py
Normal file
68
panoramix/ascii_art.py
Normal file
@@ -0,0 +1,68 @@
|
||||
error = (
|
||||
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\n"+
|
||||
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMNNMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\n"+
|
||||
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMM8OI++=~~~~~~=+?IODMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\n"+
|
||||
"MMMMMMMMMMMMMMMMMMMMMMMMMD$~~~~~~~~~~~~~~~~~~~~~~~=$MMMMMMMMMMMMMMMMMMMMMMMMMMMM\n"+
|
||||
"MMMMMMMMMMMMMMMMMMMMMMN8?:~~~~~~~~~~~~~~~~~~~~~~~~~~=+8NMMMMMMMMMMMMMMMMMMMMMMMM\n"+
|
||||
"MMMMMMMMMMMMMMMMMMMMO=~~~~~~~~~~~~~~~~~+I??~~~~~~~~~~~~~+DMMMMMMMMMMMMMMMMMMMMMM\n"+
|
||||
"MMMMMMMMMMMMMMMMMMNI~~~~~~~~~~~~~~~~~~IIIII=~~~~~~~~~~~~~~=NMMMMMMMMMMMMMMMMMMMM\n"+
|
||||
"MMMMMMMMMMMMMMMMM+=~~~~~~~~~~~~~~~~~~~=III+~~~~~~~~~~~~~~~~~?8MMMMMMMMMMMMMMMMMM\n"+
|
||||
"MMMMMMMMMMMMMMMM?~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~+++=~~~~8MMMMMMMMMMMMMMMMM\n"+
|
||||
"MMMMMMMMMMMMMMI=~~~~~~~~~~~~~~~~~~~~~~~~~III?I~~~~~~~~,:++++++~~8MMMMMMMMMMMMMMM\n"+
|
||||
"MMMMMMMMMMMMN7~~~~~~~~~~~~~~~~==+=~~~~~~=IIIII~~~~~~:. ..:=++=~=MMMMMMMMMMMMMMM\n"+
|
||||
"MMMMMMMMMMMO=~~~~~~~~~~~~~~~~+++=~~~~~~~~??I?I~~~~~~. ...,~~~~IMMMMMMMMMMMMM\n"+
|
||||
"MMMMMMMMMMM~~~~~~~~~~~~~~~~~+++:,~~~~~~~~~~~?=~~~~~:. ..~~~~~OMMMMMMMMMMMM\n"+
|
||||
"MMMMMMMMM$=~~~~~~~~~~~~~~~=++:.. ..~~~~~~~~~~~~~~~~,. . . :~~~~~OMMMMMMMMMMM\n"+
|
||||
"MMMMMMMMM~~~~~~~~~~~~~~~~+++,. .~~~~~~~~~~~~~~~.. .. . .~~~~~=OMMMMMMMMMM\n"+
|
||||
"MMMMMMMM?~~~~~~~~~~~~~~~=+~. .~~~~~~~~~~~~~~. ,MMMMM,=~~~~~~NMMMMMMMMM\n"+
|
||||
"MMMMMMMN~~~~~~~~~~~~~~~~~,. .,~~~~~~~~~~~~~.. ZMMM,+Z:~~~~~~$MMMMMMMMM\n"+
|
||||
"MMMMMM8?~~~~~~~~~~~~~~~~~.. ..~~~~~~~~~~~~~:. DMMM,+D~~~~~~~~IMMMMMMMM\n"+
|
||||
"MMMMMMI~~~~~~~~~~~~~~~~~~.. :MMMO~~~~~~~~~~~~~~~,.. ?MMMMMI~~~~~~~~~MMMMMMMM\n"+
|
||||
"MMMMMM=~~~~~~~~~~~~~~~~~~.. MMM+=M:~~~~~~~~~~~~~:. .:IM$~~~~~~~~~~~8MMMMMMM\n"+
|
||||
"MMMMMD~~~~~~~~~~~~~~~~~~~:. MMM:,M:~~~~~~~~~~~~~~~.......:~~~~~~~~~~$MMMMMMM\n"+
|
||||
"MMMMMI~~~~~~~~~~~~~~~~~~~~, MMMMMM~~~~~~~~~~~~~~~~~~,..:~~~~~~~~~~~~+MMMMMMM\n"+
|
||||
"MMMMD+~~~~~~~~~~~~~~~~~~~~~. $MMMM$~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~=MMMMMMM\n"+
|
||||
"MMMM8~~~~~~~~~~~~~~~~~~~~~~:. . .:~~~~~~,..:. .=~~~~~~~~~~~~~~~~~~~~MMMMMMM\n"+
|
||||
"MMMMO~~~~~~~~~~~~~~~~~~~~~~~:, .:~~~~~=8.. .+ . =8ZI~~~~~~~~~~~~~~~~=MMMMMMM\n"+
|
||||
"MMMMZ=~~~~~~~~~~~~~~~~~~~~~~~~:,,,:~~~~~~IZ8:. .O....888?~~~~~~~~~~~~~~~+MMMMMMM\n"+
|
||||
"MMMMO=~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~?888=...I~I88888O?~~~~~~~~~~~~~~7MMMMMMM\n"+
|
||||
"MMMMO~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~Z888OO88888888888O?~~~~~~~~~~~~~OMMMMMMM\n"+
|
||||
"MMMMD+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~=8888888888888888888~~~~~~~~~~~~+MMMMMMMM\n"+
|
||||
"MMMMM7~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~?8888888888888888888?~~~~~~~~~~=$MMMMMMMM\n"+
|
||||
"MMMMMD~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~=$8888888888888888888O~~~~~~~~~~8MMMMMMMMM\n"+
|
||||
"MMMMMN=~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~+Z88888888888888888ZZ7=~~~~~~~~?MMMMMMMMMM\n"+
|
||||
"MMMMMMZ=~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~+Z88888888Z7I===~~~~~~~~~~~~~=OMMMMMMMMMMM\n"+
|
||||
"MMMMMMN$~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~=$88888O7?=~~~~~~~~~~~~~~~~~~OMMMMMMMMMMMM\n"+
|
||||
"MMMMMMMM?~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~I8OZ+~~~~~~~~~~~~~~~~~~~~=DMMMMMMMMMMMMMM\n"+
|
||||
"MMMMMMMM8=~~~~~~~~~~~~~~~~~~~~~~~~~~~~~+$+=~~~~~~~~~~~~~~~~~~~~+MMMMMMMMMMMMMMMM\n"+
|
||||
"MMMMMMMMMD7~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~=$DMMMMMMMMMMMMMMMMMM\n"+
|
||||
"MMMMMMMMMMM?~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~=$OMMMMMMMMMMMMMMMMMMMMM\n"+
|
||||
"MMMMMMMMMMMMD7=~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~+ZMMMMMMMMMMMMMMMMMMMMMMMMMM\n"+
|
||||
"MMMMMMMMMMMMMMZ7=~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~78MMMMMMMMMMMMMMMMMMMMMMMMMMMMM\n"+
|
||||
"MMMMMMMMMMMMMMMMM8OI=~~~~~~~~~~~~~~~~~~~=+?ZDNMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\n"+
|
||||
"MMMMMMMMMMMMMMMMMMMMNDZ7?++~=~==~+?IONMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\n"+
|
||||
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\n"+
|
||||
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\n"+
|
||||
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\n"+
|
||||
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM")
|
||||
|
||||
stacktrace="""
|
||||
-------------------------------------------------------------------------------------------------------
|
||||
=======================================================================================================
|
||||
-------------------------------------------------------------------------------------------------------
|
||||
___ ___ ___
|
||||
( ) ( ) ( )
|
||||
.--. | |_ .---. .--. | | ___ | |_ ___ .-. .---. .--. .--.
|
||||
/ _ \ ( __) / .-, \ / \ | | ( ) ( __) ( ) \ / .-, \ / \ / \\
|
||||
. .' `. ; | | (__) ; | | .-. ; | | ' / | | | ' .-. ; (__) ; | | .-. ; | .-. ;
|
||||
| ' | | | | ___ .'` | | |(___) | |,' / | | ___ | / (___) .'` | | |(___) | | | |
|
||||
_\_`.(___) | |( ) / .'| | | | | . '. | |( ) | | / .'| | | | | |/ |
|
||||
( ). '. | | | | | / | | | | ___ | | `. \ | | | | | | | / | | | | ___ | ' _.'
|
||||
| | `\ | | ' | | ; | ; | | '( ) | | \ \ | ' | | | | ; | ; | | '( ) | .'.-.
|
||||
; '._,' ' ' `-' ; ' `-' | ' `-' | | | \ . ' `-' ; | | ' `-' | ' `-' | ' `-' /
|
||||
'.___.' `.__. `.__.'_. `.__,' (___ ) (___) `.__. (___) `.__.'_. `.__,' `.__.'
|
||||
|
||||
-------------------------------------------------------------------------------------------------------
|
||||
=======================================================================================================
|
||||
-------------------------------------------------------------------------------------------------------
|
||||
"""
|
||||
3
panoramix/assets/.babelrc
Normal file
3
panoramix/assets/.babelrc
Normal file
@@ -0,0 +1,3 @@
|
||||
{
|
||||
"presets" : ["es2015", "react"]
|
||||
}
|
||||
3
panoramix/assets/.eslintignore
Normal file
3
panoramix/assets/.eslintignore
Normal file
@@ -0,0 +1,3 @@
|
||||
node_modules/*
|
||||
vendor/*
|
||||
javascripts/dist/*
|
||||
234
panoramix/assets/.eslintrc
Normal file
234
panoramix/assets/.eslintrc
Normal file
@@ -0,0 +1,234 @@
|
||||
{
|
||||
"root": true,
|
||||
|
||||
"globals": {
|
||||
"Symbol": false,
|
||||
"Map": false,
|
||||
"Set": false,
|
||||
"Reflect": false,
|
||||
},
|
||||
|
||||
"env": {
|
||||
"es6": false,
|
||||
"browser": true,
|
||||
"node": true,
|
||||
},
|
||||
|
||||
"parserOptions": {
|
||||
"ecmaVersion": 5,
|
||||
"sourceType": "module"
|
||||
},
|
||||
|
||||
"rules": {
|
||||
"array-bracket-spacing": [2, "never", {
|
||||
"singleValue": false,
|
||||
"objectsInArrays": false,
|
||||
"arraysInArrays": false
|
||||
}],
|
||||
"array-callback-return": [2],
|
||||
"block-spacing": [2, "always"],
|
||||
"brace-style": [2, "1tbs", { "allowSingleLine": true }],
|
||||
"callback-return": [2, ["callback"]],
|
||||
"camelcase": [0],
|
||||
"comma-dangle": [2, "never"],
|
||||
"comma-spacing": [2],
|
||||
"comma-style": [2, "last"],
|
||||
"curly": [2, "all"],
|
||||
"eqeqeq": 2,
|
||||
"func-names": [0],
|
||||
"id-length": [2, { "min": 1, "max": 25, "properties": "never" }],
|
||||
"key-spacing": [2, { "beforeColon": false, "afterColon": true }],
|
||||
"keyword-spacing": [2, {
|
||||
"before": true,
|
||||
"after": true,
|
||||
"overrides": {
|
||||
"return": { "after": true },
|
||||
"throw": { "after": true },
|
||||
"case": { "after": true }
|
||||
}
|
||||
}],
|
||||
"linebreak-style": [2, "unix"],
|
||||
"lines-around-comment": [2, {
|
||||
"beforeBlockComment": false,
|
||||
"afterBlockComment": false,
|
||||
"beforeLineComment": false,
|
||||
"allowBlockStart": true,
|
||||
"allowBlockEnd": true
|
||||
}],
|
||||
"max-depth": [2, 5],
|
||||
"max-len": [0, 80, 4],
|
||||
"max-nested-callbacks": [1, 2],
|
||||
"max-params": [1, 4],
|
||||
"new-parens": [2],
|
||||
"newline-after-var": [0],
|
||||
"no-bitwise": [0],
|
||||
"no-cond-assign": [2],
|
||||
"no-console": [2],
|
||||
"no-const-assign": [2],
|
||||
"no-constant-condition": [2],
|
||||
"no-control-regex": [2],
|
||||
"no-debugger": [2],
|
||||
"no-delete-var": [2],
|
||||
"no-dupe-args": [2],
|
||||
"no-dupe-class-members": [2],
|
||||
"no-dupe-keys": [2],
|
||||
"no-duplicate-case": [2],
|
||||
"no-else-return": [0],
|
||||
"no-empty": [2],
|
||||
"no-eq-null": [0],
|
||||
"no-eval": [2],
|
||||
"no-ex-assign": [2],
|
||||
"no-extend-native": [2],
|
||||
"no-extra-bind": [2],
|
||||
"no-extra-boolean-cast": [2],
|
||||
"no-extra-label": [2],
|
||||
"no-extra-parens": [0], // needed for clearer #math eg (a - b) / c
|
||||
"no-extra-semi": [2],
|
||||
"no-fallthrough": [2],
|
||||
"no-floating-decimal": [2],
|
||||
"no-func-assign": [2],
|
||||
"no-implied-eval": [2],
|
||||
"no-implicit-coercion": [2, {
|
||||
"boolean": false,
|
||||
"number": true,
|
||||
"string": true
|
||||
}],
|
||||
"no-implicit-globals": [2],
|
||||
"no-inline-comments": [0],
|
||||
"no-invalid-regexp": [2],
|
||||
"no-irregular-whitespace": [2],
|
||||
"no-iterator": [2],
|
||||
"no-label-var": [2],
|
||||
"no-labels": [2, { "allowLoop": false, "allowSwitch": false }],
|
||||
"no-lone-blocks": [2],
|
||||
"no-lonely-if": [2],
|
||||
"no-loop-func": [2],
|
||||
"no-magic-numbers": [0], // doesn't work well with vis cosmetic constant
|
||||
"no-mixed-requires": [1, false],
|
||||
"no-mixed-spaces-and-tabs": [2, false],
|
||||
"no-multi-spaces": [2, {
|
||||
"exceptions": {
|
||||
"ImportDeclaration": true,
|
||||
"Property": true,
|
||||
"VariableDeclarator": true
|
||||
}
|
||||
}],
|
||||
"no-multi-str": [2],
|
||||
"no-multiple-empty-lines": [2, { "max": 1, "maxEOF": 1 }],
|
||||
"no-native-reassign": [2],
|
||||
"no-negated-condition": [2],
|
||||
"no-negated-in-lhs": [2],
|
||||
"no-nested-ternary": [0],
|
||||
"no-new": [2],
|
||||
"no-new-func": [2],
|
||||
"no-new-object": [2],
|
||||
"no-new-require": [0],
|
||||
"no-new-symbol": [2],
|
||||
"no-new-wrappers": [2],
|
||||
"no-obj-calls": [2],
|
||||
"no-octal": [2],
|
||||
"no-octal-escape": [2],
|
||||
"no-path-concat": [0],
|
||||
"no-process-env": [0],
|
||||
"no-process-exit": [2],
|
||||
"no-proto": [2],
|
||||
"no-redeclare": [2],
|
||||
"no-regex-spaces": [2],
|
||||
"no-restricted-modules": [0],
|
||||
"no-restricted-imports": [0],
|
||||
"no-restricted-syntax": [2,
|
||||
"DebuggerStatement",
|
||||
"LabeledStatement",
|
||||
"WithStatement"
|
||||
],
|
||||
"no-return-assign": [2, "always"],
|
||||
"no-script-url": [2],
|
||||
"no-self-assign": [2],
|
||||
"no-self-compare": [0],
|
||||
"no-sequences": [2],
|
||||
"no-shadow-restricted-names": [2],
|
||||
"no-spaced-func": [2],
|
||||
"no-sparse-arrays": [2],
|
||||
"no-sync": [0],
|
||||
"no-ternary": [0],
|
||||
"no-this-before-super": [2],
|
||||
"no-throw-literal": [2],
|
||||
"no-trailing-spaces": [2, { "skipBlankLines": false }],
|
||||
"no-undef": [2, { "typeof": true }],
|
||||
"no-undef-init": [2],
|
||||
"no-undefined": [0],
|
||||
"no-underscore-dangle": [0], // __data__ sometimes
|
||||
"no-unexpected-multiline": [2],
|
||||
"no-unmodified-loop-condition": [2],
|
||||
"no-unneeded-ternary": [2],
|
||||
"no-unreachable": [2],
|
||||
"no-unused-expressions": [2],
|
||||
"no-unused-labels": [2],
|
||||
"no-unused-vars": [2, {
|
||||
"vars": "all",
|
||||
"args": "none", // (d, i) pattern d3 func makes difficult to enforce
|
||||
"varsIgnorePattern": "jQuery"
|
||||
}],
|
||||
"no-use-before-define": [0],
|
||||
"no-useless-call": [2],
|
||||
"no-useless-concat": [2],
|
||||
"no-useless-constructor": [2],
|
||||
"no-void": [0],
|
||||
"no-warning-comments": [0, { "terms": ["todo", "fixme", "xxx"], "location": "start" }],
|
||||
"no-with": [2],
|
||||
"no-whitespace-before-property": [2],
|
||||
"object-curly-spacing": [2, "always"],
|
||||
"object-shorthand": [2, "never"],
|
||||
"one-var": [0],
|
||||
"one-var-declaration-per-line": [2, "initializations"],
|
||||
"operator-assignment": [0, "always"],
|
||||
"padded-blocks": [0],
|
||||
"prefer-arrow-callback": [0],
|
||||
"prefer-const": [0],
|
||||
"prefer-reflect": [0],
|
||||
"prefer-rest-params": [0],
|
||||
"prefer-spread": [0],
|
||||
"prefer-template": [0],
|
||||
"quote-props": [2, "as-needed", { "keywords": true }],
|
||||
"radix": [2],
|
||||
"require-yield": [2],
|
||||
"semi": [2],
|
||||
"semi-spacing": [2, { "before": false, "after": true }],
|
||||
"sort-vars": [0],
|
||||
"sort-imports": [0],
|
||||
"space-before-function-paren": [2, { "anonymous": "always", "named": "never" }],
|
||||
"space-before-blocks": [2, { "functions": "always", "keywords": "always" }],
|
||||
"space-in-brackets": [0, "never", {
|
||||
"singleValue": true,
|
||||
"arraysInArrays": false,
|
||||
"arraysInObjects": false,
|
||||
"objectsInArrays": true,
|
||||
"objectsInObjects": true,
|
||||
"propertyName": false
|
||||
}],
|
||||
},
|
||||
// Temporarily not enforced
|
||||
"new-cap": [2], // @TODO more tricky for the moment
|
||||
"newline-per-chained-call": [2, { "ignoreChainWithDepth": 6 }],
|
||||
"no-param-reassign": [0], // turn on once default args supported
|
||||
"no-shadow": [2, { // @TODO more tricky for the moment with eg 'data'
|
||||
"builtinGlobals": false,
|
||||
"hoist": "functions",
|
||||
"allow": ["i", "d"]
|
||||
}],
|
||||
"space-in-parens": [2, "never"],
|
||||
"space-infix-ops": [2],
|
||||
"space-unary-ops": [2, { "words": true, "nonwords": false }],
|
||||
"spaced-comment": [2, "always", { "markers": ["!"] }],
|
||||
"spaced-line-comment": [0, "always"],
|
||||
"strict": [2, "global"],
|
||||
"template-curly-spacing": [2, "never"],
|
||||
"use-isnan": [2],
|
||||
"valid-jsdoc": [0],
|
||||
"valid-typeof": [2],
|
||||
"vars-on-top": [0],
|
||||
"wrap-iife": [2],
|
||||
"wrap-regex": [2],
|
||||
"yield-star-spacing": [2, { "before": false, "after": true }],
|
||||
"yoda": [2, "never", { "exceptRange": true, "onlyEquality": false }]
|
||||
}
|
||||
BIN
panoramix/assets/images/favicon.png
Normal file
BIN
panoramix/assets/images/favicon.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 5.7 KiB |
1
panoramix/assets/javascripts/css-theme.js
Normal file
1
panoramix/assets/javascripts/css-theme.js
Normal file
@@ -0,0 +1 @@
|
||||
require('../stylesheets/less/index.less');
|
||||
229
panoramix/assets/javascripts/dashboard.js
Normal file
229
panoramix/assets/javascripts/dashboard.js
Normal file
@@ -0,0 +1,229 @@
|
||||
var $ = window.$ = require('jquery');
|
||||
var jQuery = window.jQuery = $;
|
||||
var px = require('./modules/panoramix.js');
|
||||
var d3 = require('d3');
|
||||
require('bootstrap');
|
||||
|
||||
var ace = require('brace');
|
||||
require('brace/mode/css');
|
||||
require('brace/theme/crimson_editor');
|
||||
|
||||
require('./panoramix-select2.js');
|
||||
require('../node_modules/gridster/dist/jquery.gridster.min.css');
|
||||
require('../node_modules/gridster/dist/jquery.gridster.min.js');
|
||||
|
||||
var Dashboard = function (dashboardData) {
|
||||
var dashboard = $.extend(dashboardData, {
|
||||
filters: {},
|
||||
init: function () {
|
||||
this.initDashboardView();
|
||||
var sliceObjects = [],
|
||||
dash = this;
|
||||
dashboard.slices.forEach(function (data) {
|
||||
var slice = px.Slice(data, dash);
|
||||
$("#slice_" + data.slice_id).find('a.refresh').click(function () {
|
||||
slice.render();
|
||||
});
|
||||
sliceObjects.push(slice);
|
||||
slice.render();
|
||||
});
|
||||
this.slices = sliceObjects;
|
||||
},
|
||||
setFilter: function (slice_id, col, vals) {
|
||||
this.addFilter(slice_id, col, vals, false);
|
||||
},
|
||||
addFilter: function (slice_id, col, vals, merge) {
|
||||
if (merge === undefined) {
|
||||
merge = true;
|
||||
}
|
||||
if (!(slice_id in this.filters)) {
|
||||
this.filters[slice_id] = {};
|
||||
}
|
||||
if (!(col in this.filters[slice_id]) || !merge) {
|
||||
this.filters[slice_id][col] = vals;
|
||||
} else {
|
||||
this.filters[slice_id][col] = d3.merge([this.filters[slice_id][col], vals]);
|
||||
}
|
||||
this.refreshExcept(slice_id);
|
||||
},
|
||||
readFilters: function () {
|
||||
// Returns a list of human readable active filters
|
||||
return JSON.stringify(this.filters, null, 4);
|
||||
},
|
||||
refreshExcept: function (slice_id) {
|
||||
var immune = this.metadata.filter_immune_slices;
|
||||
if (immune) {
|
||||
this.slices.forEach(function (slice) {
|
||||
if (slice.data.slice_id !== slice_id && immune.indexOf(slice.data.slice_id) === -1) {
|
||||
slice.render();
|
||||
}
|
||||
});
|
||||
}
|
||||
},
|
||||
clearFilters: function (slice_id) {
|
||||
delete this.filters[slice_id];
|
||||
this.refreshExcept(slice_id);
|
||||
},
|
||||
removeFilter: function (slice_id, col, vals) {
|
||||
if (slice_id in this.filters) {
|
||||
if (col in this.filters[slice_id]) {
|
||||
var a = [];
|
||||
this.filters[slice_id][col].forEach(function (v) {
|
||||
if (vals.indexOf(v) < 0) {
|
||||
a.push(v);
|
||||
}
|
||||
});
|
||||
this.filters[slice_id][col] = a;
|
||||
}
|
||||
}
|
||||
this.refreshExcept(slice_id);
|
||||
},
|
||||
getSlice: function (slice_id) {
|
||||
this.slices.forEach(function (slice, i) {
|
||||
if (slice.slice_id === slice_id) {
|
||||
return slice;
|
||||
}
|
||||
});
|
||||
},
|
||||
initDashboardView: function () {
|
||||
dashboard = this;
|
||||
var gridster = $(".gridster ul").gridster({
|
||||
autogrow_cols: true,
|
||||
widget_margins: [10, 10],
|
||||
widget_base_dimensions: [100, 100],
|
||||
draggable: {
|
||||
handle: '.drag'
|
||||
},
|
||||
resize: {
|
||||
enabled: true,
|
||||
stop: function (e, ui, element) {
|
||||
var slice_data = $(element).data('slice');
|
||||
if (slice_data) {
|
||||
dashboard.getSlice(slice_data.slice_id).resize();
|
||||
}
|
||||
}
|
||||
},
|
||||
serialize_params: function (_w, wgd) {
|
||||
return {
|
||||
slice_id: $(_w).attr('slice_id'),
|
||||
col: wgd.col,
|
||||
row: wgd.row,
|
||||
size_x: wgd.size_x,
|
||||
size_y: wgd.size_y
|
||||
};
|
||||
}
|
||||
}).data('gridster');
|
||||
$("div.gridster").css('visibility', 'visible');
|
||||
$("#savedash").click(function () {
|
||||
var expanded_slices = {};
|
||||
$.each($(".slice_info"), function (i, d) {
|
||||
var widget = $(this).parents('.widget');
|
||||
var slice_description = widget.find('.slice_description');
|
||||
if (slice_description.is(":visible")) {
|
||||
expanded_slices[$(d).attr('slice_id')] = true;
|
||||
}
|
||||
});
|
||||
var data = {
|
||||
positions: gridster.serialize(),
|
||||
css: editor.getValue(),
|
||||
expanded_slices: expanded_slices
|
||||
};
|
||||
$.ajax({
|
||||
type: "POST",
|
||||
url: '/panoramix/save_dash/' + dashboard.id + '/',
|
||||
data: {
|
||||
data: JSON.stringify(data)
|
||||
},
|
||||
success: function () {
|
||||
alert("Saved!");
|
||||
},
|
||||
error: function () {
|
||||
alert("Error :(");
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
var editor = ace.edit("dash_css");
|
||||
editor.$blockScrolling = Infinity;
|
||||
|
||||
editor.setTheme("ace/theme/crimson_editor");
|
||||
editor.setOptions({
|
||||
minLines: 16,
|
||||
maxLines: Infinity,
|
||||
useWorker: false
|
||||
});
|
||||
editor.getSession().setMode("ace/mode/css");
|
||||
|
||||
$(".select2").select2({
|
||||
dropdownAutoWidth: true
|
||||
});
|
||||
$("#css_template").on("change", function () {
|
||||
var css = $(this).find('option:selected').data('css');
|
||||
editor.setValue(css);
|
||||
|
||||
$('#dash_css').val(css);
|
||||
injectCss("dashboard-template", css);
|
||||
|
||||
});
|
||||
$('#filters').click(function () {
|
||||
alert(dashboard.readFilters());
|
||||
});
|
||||
$("a.remove-chart").click(function () {
|
||||
var li = $(this).parents("li");
|
||||
gridster.remove_widget(li);
|
||||
});
|
||||
|
||||
$("li.widget").click(function (e) {
|
||||
var $this = $(this);
|
||||
var $target = $(e.target);
|
||||
|
||||
if ($target.hasClass("slice_info")) {
|
||||
$this.find(".slice_description").slideToggle(0, function () {
|
||||
$this.find('.refresh').click();
|
||||
});
|
||||
} else if ($target.hasClass("controls-toggle")) {
|
||||
$this.find(".chart-controls").toggle();
|
||||
}
|
||||
});
|
||||
|
||||
editor.on("change", function () {
|
||||
var css = editor.getValue();
|
||||
$('#dash_css').val(css);
|
||||
injectCss("dashboard-template", css);
|
||||
});
|
||||
|
||||
var css = $('.dashboard').data('css');
|
||||
injectCss("dashboard-template", css);
|
||||
|
||||
// Injects the passed css string into a style sheet with the specified className
|
||||
// If a stylesheet doesn't exist with the passed className, one will be injected into <head>
|
||||
function injectCss(className, css) {
|
||||
|
||||
var head = document.head || document.getElementsByTagName('head')[0];
|
||||
var style = document.querySelector('.' + className);
|
||||
|
||||
if (!style) {
|
||||
if (className.split(' ').length > 1) {
|
||||
throw new Error("This method only supports selections with a single class name.");
|
||||
}
|
||||
style = document.createElement('style');
|
||||
style.className = className;
|
||||
style.type = 'text/css';
|
||||
head.appendChild(style);
|
||||
}
|
||||
|
||||
if (style.styleSheet) {
|
||||
style.styleSheet.cssText = css;
|
||||
} else {
|
||||
style.innerHTML = css;
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
dashboard.init();
|
||||
return dashboard;
|
||||
};
|
||||
|
||||
$(document).ready(function () {
|
||||
Dashboard($('.dashboard').data('dashboard'));
|
||||
});
|
||||
334
panoramix/assets/javascripts/explore.js
Normal file
334
panoramix/assets/javascripts/explore.js
Normal file
@@ -0,0 +1,334 @@
|
||||
// Javascript for the explorer page
|
||||
// Init explorer view -> load vis dependencies -> read data (from dynamic html) -> render slice
|
||||
// nb: to add a new vis, you must also add a Python fn in viz.py
|
||||
//
|
||||
// js
|
||||
var $ = window.$ = require('jquery');
|
||||
var jQuery = window.jQuery = $;
|
||||
var px = require('./modules/panoramix.js');
|
||||
|
||||
require('jquery-ui');
|
||||
$.widget.bridge('uitooltip', $.ui.tooltip); // Shutting down jq-ui tooltips
|
||||
require('bootstrap');
|
||||
|
||||
require('./panoramix-select2.js');
|
||||
|
||||
require('../node_modules/bootstrap-toggle/js/bootstrap-toggle.min.js');
|
||||
|
||||
// css
|
||||
require('../vendor/pygments.css');
|
||||
require('../node_modules/bootstrap-toggle/css/bootstrap-toggle.min.css');
|
||||
|
||||
var slice;
|
||||
|
||||
function prepForm() {
|
||||
var i = 1;
|
||||
// Assigning the right id to form elements in filters
|
||||
$("#filters > div").each(function () {
|
||||
$(this).attr("id", function () {
|
||||
return "flt_" + i;
|
||||
});
|
||||
$(this).find("#flt_col_0")
|
||||
.attr("id", function () {
|
||||
return "flt_col_" + i;
|
||||
})
|
||||
.attr("name", function () {
|
||||
return "flt_col_" + i;
|
||||
});
|
||||
$(this).find("#flt_op_0")
|
||||
.attr("id", function () {
|
||||
return "flt_op_" + i;
|
||||
})
|
||||
.attr("name", function () {
|
||||
return "flt_op_" + i;
|
||||
});
|
||||
$(this).find("#flt_eq_0")
|
||||
.attr("id", function () {
|
||||
return "flt_eq_" + i;
|
||||
})
|
||||
.attr("name", function () {
|
||||
return "flt_eq_" + i;
|
||||
});
|
||||
i++;
|
||||
});
|
||||
}
|
||||
|
||||
function renderSlice() {
|
||||
prepForm();
|
||||
slice.render();
|
||||
}
|
||||
|
||||
function initExploreView() {
|
||||
|
||||
function druidify() {
|
||||
$('div.alert').remove();
|
||||
history.pushState({}, document.title, slice.querystring());
|
||||
renderSlice();
|
||||
}
|
||||
|
||||
function get_collapsed_fieldsets() {
|
||||
var collapsed_fieldsets = $("#collapsed_fieldsets").val();
|
||||
|
||||
if (collapsed_fieldsets !== undefined && collapsed_fieldsets !== "") {
|
||||
collapsed_fieldsets = collapsed_fieldsets.split('||');
|
||||
} else {
|
||||
collapsed_fieldsets = [];
|
||||
}
|
||||
return collapsed_fieldsets;
|
||||
}
|
||||
|
||||
function toggle_fieldset(legend, animation) {
|
||||
var parent = legend.parent();
|
||||
var fieldset = parent.find(".legend_label").text();
|
||||
var collapsed_fieldsets = get_collapsed_fieldsets();
|
||||
var index;
|
||||
|
||||
if (parent.hasClass("collapsed")) {
|
||||
if (animation) {
|
||||
parent.find(".fieldset_content").slideDown();
|
||||
} else {
|
||||
parent.find(".fieldset_content").show();
|
||||
}
|
||||
parent.removeClass("collapsed");
|
||||
parent.find("span.collapser").text("[-]");
|
||||
|
||||
// removing from array, js is overcomplicated
|
||||
index = collapsed_fieldsets.indexOf(fieldset);
|
||||
if (index !== -1) {
|
||||
collapsed_fieldsets.splice(index, 1);
|
||||
}
|
||||
} else { // not collapsed
|
||||
if (animation) {
|
||||
parent.find(".fieldset_content").slideUp();
|
||||
} else {
|
||||
parent.find(".fieldset_content").hide();
|
||||
}
|
||||
|
||||
parent.addClass("collapsed");
|
||||
parent.find("span.collapser").text("[+]");
|
||||
index = collapsed_fieldsets.indexOf(fieldset);
|
||||
if (index === -1 && fieldset !== "" && fieldset !== undefined) {
|
||||
collapsed_fieldsets.push(fieldset);
|
||||
}
|
||||
}
|
||||
|
||||
$("#collapsed_fieldsets").val(collapsed_fieldsets.join("||"));
|
||||
}
|
||||
|
||||
$('legend').click(function () {
|
||||
toggle_fieldset($(this), true);
|
||||
});
|
||||
|
||||
function copyURLToClipboard(url) {
|
||||
var textArea = document.createElement("textarea");
|
||||
textArea.style.position = 'fixed';
|
||||
textArea.style.left = '-1000px';
|
||||
textArea.value = url;
|
||||
|
||||
document.body.appendChild(textArea);
|
||||
textArea.select();
|
||||
|
||||
try {
|
||||
var successful = document.execCommand('copy');
|
||||
if (!successful) {
|
||||
throw new Error("Not successful");
|
||||
}
|
||||
} catch (err) {
|
||||
window.alert("Sorry, your browser does not support copying. Use Ctrl / Cmd + C!");
|
||||
}
|
||||
document.body.removeChild(textArea);
|
||||
return successful;
|
||||
}
|
||||
|
||||
$('#shortner').click(function () {
|
||||
$.ajax({
|
||||
type: "POST",
|
||||
url: '/r/shortner/',
|
||||
data: {
|
||||
data: '/' + window.location.pathname + slice.querystring()
|
||||
},
|
||||
success: function (data) {
|
||||
var close = '<a style="cursor: pointer;"><i class="fa fa-close" id="close_shortner"></i></a>';
|
||||
var copy = '<a style="cursor: pointer;"><i class="fa fa-clipboard" title="Copy to clipboard" id="copy_url"></i></a>';
|
||||
var spaces = ' ';
|
||||
var popover = data + spaces + copy + spaces + close;
|
||||
|
||||
var $shortner = $('#shortner')
|
||||
.popover({
|
||||
content: popover,
|
||||
placement: 'left',
|
||||
html: true,
|
||||
trigger: 'manual'
|
||||
})
|
||||
.popover('show');
|
||||
|
||||
$('#copy_url').tooltip().click(function () {
|
||||
var success = copyURLToClipboard(data);
|
||||
if (success) {
|
||||
$(this).attr("data-original-title", "Copied!").tooltip('fixTitle').tooltip('show');
|
||||
window.setTimeout(destroyPopover, 1200);
|
||||
}
|
||||
});
|
||||
$('#close_shortner').click(destroyPopover);
|
||||
|
||||
function destroyPopover() {
|
||||
$shortner.popover('destroy');
|
||||
}
|
||||
},
|
||||
error: function () {
|
||||
alert("Error :(");
|
||||
}
|
||||
});
|
||||
});
|
||||
$("#viz_type").change(function () {
|
||||
$("#query").submit();
|
||||
});
|
||||
|
||||
var collapsed_fieldsets = get_collapsed_fieldsets();
|
||||
for (var i = 0; i < collapsed_fieldsets.length; i++) {
|
||||
toggle_fieldset($('legend:contains("' + collapsed_fieldsets[i] + '")'), false);
|
||||
}
|
||||
|
||||
$(".select2").select2({
|
||||
dropdownAutoWidth: true
|
||||
});
|
||||
$(".select2Sortable").select2({
|
||||
dropdownAutoWidth: true
|
||||
});
|
||||
$(".select2Sortable").select2Sortable({
|
||||
bindOrder: 'sortableStop'
|
||||
});
|
||||
$("form").show();
|
||||
$('[data-toggle="tooltip"]').tooltip({
|
||||
container: 'body'
|
||||
});
|
||||
$(".ui-helper-hidden-accessible").remove(); // jQuery-ui 1.11+ creates a div for every tooltip
|
||||
|
||||
function set_filters() {
|
||||
for (var i = 1; i < 10; i++) {
|
||||
var eq = px.getParam("flt_eq_" + i);
|
||||
if (eq !== '') {
|
||||
add_filter(i);
|
||||
}
|
||||
}
|
||||
}
|
||||
set_filters();
|
||||
|
||||
function add_filter(i) {
|
||||
var cp = $("#flt0").clone();
|
||||
$(cp).appendTo("#filters");
|
||||
$(cp).show();
|
||||
if (i !== undefined) {
|
||||
$(cp).find("#flt_eq_0").val(px.getParam("flt_eq_" + i));
|
||||
$(cp).find("#flt_op_0").val(px.getParam("flt_op_" + i));
|
||||
$(cp).find("#flt_col_0").val(px.getParam("flt_col_" + i));
|
||||
}
|
||||
$(cp).find('select').select2();
|
||||
$(cp).find('.remove').click(function () {
|
||||
$(this).parent().parent().remove();
|
||||
});
|
||||
}
|
||||
|
||||
$(window).bind("popstate", function (event) {
|
||||
// Browser back button
|
||||
var returnLocation = history.location || document.location;
|
||||
// Could do something more lightweight here, but we're not optimizing
|
||||
// for the use of the back button anyways
|
||||
returnLocation.reload();
|
||||
});
|
||||
|
||||
$("#plus").click(add_filter);
|
||||
$("#btn_save").click(function () {
|
||||
var slice_name = prompt("Name your slice!");
|
||||
if (slice_name !== "" && slice_name !== null) {
|
||||
$("#slice_name").val(slice_name);
|
||||
prepForm();
|
||||
$("#action").val("save");
|
||||
$("#query").submit();
|
||||
}
|
||||
});
|
||||
$("#btn_overwrite").click(function () {
|
||||
var flag = confirm("Overwrite slice [" + $("#slice_name").val() + "] !?");
|
||||
if (flag) {
|
||||
$("#action").val("overwrite");
|
||||
prepForm();
|
||||
$("#query").submit();
|
||||
}
|
||||
});
|
||||
|
||||
$(".druidify").click(druidify);
|
||||
|
||||
function create_choices(term, data) {
|
||||
var filtered = $(data).filter(function () {
|
||||
return this.text.localeCompare(term) === 0;
|
||||
});
|
||||
if (filtered.length === 0) {
|
||||
return {
|
||||
id: term,
|
||||
text: term
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
function initSelectionToValue(element, callback) {
|
||||
callback({
|
||||
id: element.val(),
|
||||
text: element.val()
|
||||
});
|
||||
}
|
||||
|
||||
$(".select2_freeform").each(function () {
|
||||
var parent = $(this).parent();
|
||||
var name = $(this).attr('name');
|
||||
var l = [];
|
||||
var selected = '';
|
||||
for (var i = 0; i < this.options.length; i++) {
|
||||
l.push({
|
||||
id: this.options[i].value,
|
||||
text: this.options[i].text
|
||||
});
|
||||
if (this.options[i].selected) {
|
||||
selected = this.options[i].value;
|
||||
}
|
||||
}
|
||||
parent.append(
|
||||
'<input class="' + $(this).attr('class') + '" name="' + name + '" type="text" value="' + selected + '">'
|
||||
);
|
||||
$("input[name='" + name + "']").select2({
|
||||
createSearchChoice: create_choices,
|
||||
initSelection: initSelectionToValue,
|
||||
dropdownAutoWidth: true,
|
||||
multiple: false,
|
||||
data: l
|
||||
});
|
||||
$(this).remove();
|
||||
});
|
||||
}
|
||||
|
||||
$(document).ready(function () {
|
||||
initExploreView();
|
||||
|
||||
// Dynamically register this visualization
|
||||
var visType = window.viz_type.value;
|
||||
px.registerViz(visType);
|
||||
|
||||
var data = $('.slice').data('slice');
|
||||
slice = px.Slice(data);
|
||||
|
||||
//
|
||||
$('.slice').data('slice', slice);
|
||||
|
||||
// call vis render method, which issues ajax
|
||||
renderSlice();
|
||||
|
||||
// make checkbox inputs display as toggles
|
||||
$(':checkbox')
|
||||
.addClass('pull-right')
|
||||
.attr("data-onstyle", "default")
|
||||
.bootstrapToggle({
|
||||
size: 'mini'
|
||||
});
|
||||
|
||||
$('div.toggle').addClass('pull-right');
|
||||
slice.bindResizeToWindowResize();
|
||||
});
|
||||
19
panoramix/assets/javascripts/featured.js
Normal file
19
panoramix/assets/javascripts/featured.js
Normal file
@@ -0,0 +1,19 @@
|
||||
var $ = window.$ = require('jquery');
|
||||
var jQuery = window.jQuery = $;
|
||||
var px = require('./modules/panoramix.js');
|
||||
|
||||
require('bootstrap');
|
||||
require('datatables');
|
||||
require('../node_modules/datatables-bootstrap3-plugin/media/css/datatables-bootstrap3.css');
|
||||
|
||||
$(document).ready(function () {
|
||||
$('#dataset-table').DataTable({
|
||||
bPaginate: false,
|
||||
order: [
|
||||
[1, "asc"]
|
||||
]
|
||||
});
|
||||
$('#dataset-table_info').remove();
|
||||
//$('input[type=search]').addClass('form-control'); # TODO get search box to look nice
|
||||
$('#dataset-table').show();
|
||||
});
|
||||
18
panoramix/assets/javascripts/index.jsx
Normal file
18
panoramix/assets/javascripts/index.jsx
Normal file
@@ -0,0 +1,18 @@
|
||||
var $ = require('jquery');
|
||||
var jQuery = $;
|
||||
import React from 'react';
|
||||
import { render } from 'react-dom';
|
||||
import { Jumbotron } from 'react-bootstrap';
|
||||
|
||||
class App extends React.Component {
|
||||
render () {
|
||||
return (
|
||||
<Jumbotron>
|
||||
<h1>Panoramix</h1>
|
||||
<p>Extensible visualization tool for exploring data from any database.</p>
|
||||
</Jumbotron>
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
render(<App />, document.getElementById('app'));
|
||||
300
panoramix/assets/javascripts/modules/panoramix.js
Normal file
300
panoramix/assets/javascripts/modules/panoramix.js
Normal file
@@ -0,0 +1,300 @@
|
||||
var $ = require('jquery');
|
||||
var jQuery = $;
|
||||
var d3 = require('d3');
|
||||
|
||||
require('../../stylesheets/panoramix.css');
|
||||
|
||||
// vis sources
|
||||
var sourceMap = {
|
||||
area: 'nvd3_vis.js',
|
||||
bar: 'nvd3_vis.js',
|
||||
bubble: 'nvd3_vis.js',
|
||||
big_number: 'big_number.js',
|
||||
compare: 'nvd3_vis.js',
|
||||
dist_bar: 'nvd3_vis.js',
|
||||
directed_force: 'directed_force.js',
|
||||
filter_box: 'filter_box.js',
|
||||
heatmap: 'heatmap.js',
|
||||
iframe: 'iframe.js',
|
||||
line: 'nvd3_vis.js',
|
||||
markup: 'markup.js',
|
||||
para: 'parallel_coordinates.js',
|
||||
pie: 'nvd3_vis.js',
|
||||
pivot_table: 'pivot_table.js',
|
||||
sankey: 'sankey.js',
|
||||
sunburst: 'sunburst.js',
|
||||
table: 'table.js',
|
||||
word_cloud: 'word_cloud.js',
|
||||
world_map: 'world_map.js'
|
||||
};
|
||||
|
||||
var color = function () {
|
||||
// Color related utility functions go in this object
|
||||
var bnbColors = [
|
||||
//rausch hackb kazan babu lima beach barol
|
||||
'#ff5a5f', '#7b0051', '#007A87', '#00d1c1', '#8ce071', '#ffb400', '#b4a76c',
|
||||
'#ff8083', '#cc0086', '#00a1b3', '#00ffeb', '#bbedab', '#ffd266', '#cbc29a',
|
||||
'#ff3339', '#ff1ab1', '#005c66', '#00b3a5', '#55d12e', '#b37e00', '#988b4e'
|
||||
];
|
||||
var spectrums = {
|
||||
blue_white_yellow: ['#00d1c1', 'white', '#ffb400'],
|
||||
fire: ['white', 'yellow', 'red', 'black'],
|
||||
white_black: ['white', 'black'],
|
||||
black_white: ['black', 'white']
|
||||
};
|
||||
var colorBnb = function () {
|
||||
// Color factory
|
||||
var seen = {};
|
||||
return function (s) {
|
||||
// next line is for dashed series that should have the same color
|
||||
s = s.replace('---', '');
|
||||
if (seen[s] === undefined) {
|
||||
seen[s] = Object.keys(seen).length;
|
||||
}
|
||||
return this.bnbColors[seen[s] % this.bnbColors.length];
|
||||
};
|
||||
};
|
||||
var colorScalerFactory = function (colors, data, accessor) {
|
||||
// Returns a linear scaler our of an array of color
|
||||
if (!Array.isArray(colors)) {
|
||||
colors = spectrums[colors];
|
||||
}
|
||||
|
||||
var ext = [0, 1];
|
||||
if (data !== undefined) {
|
||||
ext = d3.extent(data, accessor);
|
||||
}
|
||||
|
||||
var points = [];
|
||||
var chunkSize = (ext[1] - ext[0]) / colors.length;
|
||||
$.each(colors, function (i, c) {
|
||||
points.push(i * chunkSize);
|
||||
});
|
||||
return d3.scale.linear().domain(points).range(colors);
|
||||
};
|
||||
return {
|
||||
bnbColors: bnbColors,
|
||||
category21: colorBnb(),
|
||||
colorScalerFactory: colorScalerFactory
|
||||
};
|
||||
};
|
||||
|
||||
var px = (function () {
|
||||
|
||||
var visualizations = {};
|
||||
var slice;
|
||||
|
||||
function getParam(name) {
|
||||
name = name.replace(/[\[]/, "\\[").replace(/[\]]/, "\\]");
|
||||
var regex = new RegExp("[\\?&]" + name + "=([^&#]*)"),
|
||||
results = regex.exec(location.search);
|
||||
return results === null ? "" : decodeURIComponent(results[1].replace(/\+/g, " "));
|
||||
}
|
||||
|
||||
function UTC(dttm) {
|
||||
return new Date(dttm.getUTCFullYear(), dttm.getUTCMonth(), dttm.getUTCDate(), dttm.getUTCHours(), dttm.getUTCMinutes(), dttm.getUTCSeconds());
|
||||
}
|
||||
var tickMultiFormat = d3.time.format.multi([
|
||||
[".%L", function (d) {
|
||||
return d.getMilliseconds();
|
||||
}], // If there are millisections, show only them
|
||||
[":%S", function (d) {
|
||||
return d.getSeconds();
|
||||
}], // If there are seconds, show only them
|
||||
["%a %b %d, %I:%M %p", function (d) {
|
||||
return d.getMinutes() !== 0;
|
||||
}], // If there are non-zero minutes, show Date, Hour:Minute [AM/PM]
|
||||
["%a %b %d, %I %p", function (d) {
|
||||
return d.getHours() !== 0;
|
||||
}], // If there are hours that are multiples of 3, show date and AM/PM
|
||||
["%a %b %d, %Y", function (d) {
|
||||
return d.getDate() !== 1;
|
||||
}], // If not the first of the month, do "month day, year."
|
||||
["%B %Y", function (d) {
|
||||
return d.getMonth() !== 0 && d.getDate() === 1;
|
||||
}], // If the first of the month, do "month day, year."
|
||||
["%Y", function (d) {
|
||||
return true;
|
||||
}] // fall back on month, year
|
||||
]);
|
||||
|
||||
function formatDate(dttm) {
|
||||
var d = UTC(new Date(dttm));
|
||||
//d = new Date(d.getTime() - 1 * 60 * 60 * 1000);
|
||||
return tickMultiFormat(d);
|
||||
}
|
||||
|
||||
function timeFormatFactory(d3timeFormat) {
|
||||
var f = d3.time.format(d3timeFormat);
|
||||
return function (dttm) {
|
||||
var d = UTC(new Date(dttm));
|
||||
return f(d);
|
||||
};
|
||||
}
|
||||
|
||||
var Slice = function (data, dashboard) {
|
||||
var timer;
|
||||
var token = $('#' + data.token);
|
||||
var container_id = data.token + '_con';
|
||||
var selector = '#' + container_id;
|
||||
var container = $(selector);
|
||||
var slice_id = data.slice_id;
|
||||
var dttm = 0;
|
||||
var stopwatch = function () {
|
||||
dttm += 10;
|
||||
var num = dttm / 1000;
|
||||
$('#timer').text(num.toFixed(2) + " sec");
|
||||
};
|
||||
var qrystr = '';
|
||||
var always = function (data) {
|
||||
//Private f, runs after done and error
|
||||
clearInterval(timer);
|
||||
$('#timer').removeClass('btn-warning');
|
||||
};
|
||||
slice = {
|
||||
data: data,
|
||||
container: container,
|
||||
container_id: container_id,
|
||||
selector: selector,
|
||||
querystring: function () {
|
||||
var parser = document.createElement('a');
|
||||
parser.href = data.json_endpoint;
|
||||
if (dashboard !== undefined) {
|
||||
var flts = encodeURIComponent(JSON.stringify(dashboard.filters));
|
||||
qrystr = parser.search + "&extra_filters=" + flts;
|
||||
} else if ($('#query').length === 0) {
|
||||
qrystr = parser.search;
|
||||
} else {
|
||||
qrystr = '?' + $('#query').serialize();
|
||||
}
|
||||
return qrystr;
|
||||
},
|
||||
jsonEndpoint: function () {
|
||||
var parser = document.createElement('a');
|
||||
parser.href = data.json_endpoint;
|
||||
var endpoint = parser.pathname + this.querystring() + "&json=true";
|
||||
return endpoint;
|
||||
},
|
||||
done: function (data) {
|
||||
clearInterval(timer);
|
||||
token.find("img.loading").hide();
|
||||
container.show();
|
||||
if (data !== undefined) {
|
||||
$("#query_container").html(data.query);
|
||||
}
|
||||
$('#timer').removeClass('btn-warning');
|
||||
$('#timer').addClass('btn-success');
|
||||
$('span.query').removeClass('disabled');
|
||||
$('#json').click(function () {
|
||||
window.location = data.json_endpoint;
|
||||
});
|
||||
$('#standalone').click(function () {
|
||||
window.location = data.standalone_endpoint;
|
||||
});
|
||||
$('#csv').click(function () {
|
||||
window.location = data.csv_endpoint;
|
||||
});
|
||||
$('.btn-group.results span').removeAttr('disabled');
|
||||
always(data);
|
||||
},
|
||||
error: function (msg) {
|
||||
token.find("img.loading").hide();
|
||||
var err = '<div class="alert alert-danger">' + msg + '</div>';
|
||||
container.html(err);
|
||||
container.show();
|
||||
$('span.query').removeClass('disabled');
|
||||
$('#timer').addClass('btn-danger');
|
||||
always(data);
|
||||
},
|
||||
width: function () {
|
||||
return token.width();
|
||||
},
|
||||
height: function () {
|
||||
var others = 0;
|
||||
var widget = container.parents('.widget');
|
||||
var slice_description = widget.find('.slice_description');
|
||||
if (slice_description.is(":visible")) {
|
||||
others += widget.find('.slice_description').height() + 25;
|
||||
}
|
||||
others += widget.find('.chart-header').height();
|
||||
return widget.height() - others - 10;
|
||||
},
|
||||
bindResizeToWindowResize: function () {
|
||||
var resizeTimer;
|
||||
$(window).on('resize', function (e) {
|
||||
clearTimeout(resizeTimer);
|
||||
resizeTimer = setTimeout(function () {
|
||||
slice.resize();
|
||||
}, 500);
|
||||
});
|
||||
},
|
||||
render: function () {
|
||||
$('.btn-group.results span').attr('disabled', 'disabled');
|
||||
token.find("img.loading").show();
|
||||
container.hide();
|
||||
container.html('');
|
||||
dttm = 0;
|
||||
timer = setInterval(stopwatch, 10);
|
||||
$('#timer').removeClass('btn-danger btn-success');
|
||||
$('#timer').addClass('btn-warning');
|
||||
this.viz.render();
|
||||
},
|
||||
resize: function () {
|
||||
token.find("img.loading").show();
|
||||
container.hide();
|
||||
container.html('');
|
||||
this.viz.render();
|
||||
this.viz.resize();
|
||||
},
|
||||
addFilter: function (col, vals) {
|
||||
if (dashboard !== undefined) {
|
||||
dashboard.addFilter(slice_id, col, vals);
|
||||
}
|
||||
},
|
||||
setFilter: function (col, vals) {
|
||||
if (dashboard !== undefined) {
|
||||
dashboard.setFilter(slice_id, col, vals);
|
||||
}
|
||||
},
|
||||
clearFilter: function () {
|
||||
if (dashboard !== undefined) {
|
||||
delete dashboard.clearFilter(slice_id);
|
||||
}
|
||||
},
|
||||
removeFilter: function (col, vals) {
|
||||
if (dashboard !== undefined) {
|
||||
delete dashboard.removeFilter(slice_id, col, vals);
|
||||
}
|
||||
}
|
||||
};
|
||||
var visType = data.form_data.viz_type;
|
||||
px.registerViz(visType);
|
||||
slice.viz = visualizations[data.form_data.viz_type](slice);
|
||||
return slice;
|
||||
};
|
||||
|
||||
function registerViz(name) {
|
||||
var visSource = sourceMap[name];
|
||||
|
||||
if (visSource) {
|
||||
var visFactory = require('../../visualizations/' + visSource);
|
||||
if (typeof visFactory === 'function') {
|
||||
visualizations[name] = visFactory;
|
||||
}
|
||||
} else {
|
||||
throw new Error("require(" + name + ") failed.");
|
||||
}
|
||||
}
|
||||
|
||||
// Export public functions
|
||||
return {
|
||||
registerViz: registerViz,
|
||||
Slice: Slice,
|
||||
formatDate: formatDate,
|
||||
timeFormatFactory: timeFormatFactory,
|
||||
color: color(),
|
||||
getParam: getParam
|
||||
};
|
||||
})();
|
||||
|
||||
module.exports = px;
|
||||
55
panoramix/assets/javascripts/modules/utils.js
Normal file
55
panoramix/assets/javascripts/modules/utils.js
Normal file
@@ -0,0 +1,55 @@
|
||||
var d3 = require('d3');
|
||||
|
||||
/*
|
||||
Utility function that takes a d3 svg:text selection and a max width, and splits the
|
||||
text's text across multiple tspan lines such that any given line does not exceed max width
|
||||
|
||||
If text does not span multiple lines AND adjustedY is passed, will set the text to the passed val
|
||||
*/
|
||||
function wrapSvgText(text, width, adjustedY) {
|
||||
var lineHeight = 1; // ems
|
||||
|
||||
text.each(function () {
|
||||
var text = d3.select(this),
|
||||
words = text.text().split(/\s+/),
|
||||
word,
|
||||
line = [],
|
||||
lineNumber = 0,
|
||||
x = text.attr("x"),
|
||||
y = text.attr("y"),
|
||||
dy = parseFloat(text.attr("dy")),
|
||||
tspan = text.text(null)
|
||||
.append("tspan")
|
||||
.attr("x", x)
|
||||
.attr("y", y)
|
||||
.attr("dy", dy + "em");
|
||||
|
||||
var didWrap = false;
|
||||
|
||||
for (var i = 0; i < words.length; i++) {
|
||||
word = words[i];
|
||||
line.push(word);
|
||||
tspan.text(line.join(" "));
|
||||
|
||||
if (tspan.node().getComputedTextLength() > width) {
|
||||
line.pop(); // remove word that pushes over the limit
|
||||
tspan.text(line.join(" "));
|
||||
line = [word];
|
||||
tspan = text.append("tspan")
|
||||
.attr("x", x)
|
||||
.attr("y", y)
|
||||
.attr("dy", ++lineNumber * lineHeight + dy + "em")
|
||||
.text(word);
|
||||
|
||||
didWrap = true;
|
||||
}
|
||||
}
|
||||
if (!didWrap && typeof adjustedY !== "undefined") {
|
||||
tspan.attr("y", adjustedY);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
wrapSvgText: wrapSvgText
|
||||
};
|
||||
5
panoramix/assets/javascripts/panoramix-select2.js
Normal file
5
panoramix/assets/javascripts/panoramix-select2.js
Normal file
@@ -0,0 +1,5 @@
|
||||
require('../node_modules/select2/select2.css');
|
||||
require('../node_modules/select2-bootstrap-css/select2-bootstrap.min.css');
|
||||
require('../node_modules/jquery-ui/themes/base/jquery-ui.css');
|
||||
require('select2');
|
||||
require('../vendor/select2.sortable.js');
|
||||
97
panoramix/assets/javascripts/sql.js
Normal file
97
panoramix/assets/javascripts/sql.js
Normal file
@@ -0,0 +1,97 @@
|
||||
var $ = window.$ = require('jquery');
|
||||
var jQuery = window.jQuery = $;
|
||||
require('select2');
|
||||
require('datatables');
|
||||
require('bootstrap');
|
||||
|
||||
var ace = require('brace');
|
||||
require('brace/mode/sql');
|
||||
require('brace/theme/crimson_editor');
|
||||
|
||||
$(document).ready(function () {
|
||||
function getParam(name) {
|
||||
name = name.replace(/[\[]/, "\\[").replace(/[\]]/, "\\]");
|
||||
var regex = new RegExp("[\\?&]" + name + "=([^&#]*)"),
|
||||
results = regex.exec(location.search);
|
||||
return results === null ? "" : decodeURIComponent(results[1].replace(/\+/g, " "));
|
||||
}
|
||||
|
||||
function initSqlEditorView() {
|
||||
var database_id = $('#database_id').val();
|
||||
var editor = ace.edit("sql");
|
||||
editor.$blockScrolling = Infinity;
|
||||
editor.getSession().setUseWrapMode(true);
|
||||
|
||||
$('#sql').hide();
|
||||
editor.setTheme("ace/theme/crimson_editor");
|
||||
editor.setOptions({
|
||||
minLines: 16,
|
||||
maxLines: Infinity
|
||||
});
|
||||
editor.getSession().setMode("ace/mode/sql");
|
||||
editor.focus();
|
||||
$("select").select2({
|
||||
dropdownAutoWidth: true
|
||||
});
|
||||
|
||||
function showTableMetadata() {
|
||||
$(".metadata").load(
|
||||
'/panoramix/table/' + database_id + '/' + $("#dbtable").val() + '/');
|
||||
}
|
||||
$("#dbtable").on("change", showTableMetadata);
|
||||
showTableMetadata();
|
||||
$("#create_view").click(function () {
|
||||
alert("Not implemented");
|
||||
});
|
||||
$(".sqlcontent").show();
|
||||
|
||||
function selectStarOnClick() {
|
||||
$.ajax('/panoramix/select_star/' + database_id + '/' + $("#dbtable").val() + '/')
|
||||
.done(function (msg) {
|
||||
editor.setValue(msg);
|
||||
});
|
||||
}
|
||||
|
||||
$("#select_star").click(selectStarOnClick);
|
||||
|
||||
editor.setValue(getParam('sql'));
|
||||
$(window).bind("popstate", function (event) {
|
||||
// Could do something more lightweight here, but we're not optimizing
|
||||
// for the use of the back button anyways
|
||||
editor.setValue(getParam('sql'));
|
||||
$("#run").click();
|
||||
});
|
||||
$("#run").click(function () {
|
||||
$('#results').hide(0);
|
||||
$('#loading').show(0);
|
||||
history.pushState({}, document.title, '?sql=' + encodeURIComponent(editor.getValue()));
|
||||
$.ajax({
|
||||
type: "POST",
|
||||
url: '/panoramix/runsql/',
|
||||
data: {
|
||||
data: JSON.stringify({
|
||||
database_id: $('#database_id').val(),
|
||||
sql: editor.getSession().getValue()
|
||||
})
|
||||
},
|
||||
success: function (data) {
|
||||
$('#loading').hide(0);
|
||||
$('#results').show(0);
|
||||
$('#results').html(data);
|
||||
|
||||
$('table.sql_results').DataTable({
|
||||
paging: false,
|
||||
searching: true,
|
||||
aaSorting: []
|
||||
});
|
||||
},
|
||||
error: function (err, err2) {
|
||||
$('#loading').hide(0);
|
||||
$('#results').show(0);
|
||||
$('#results').html(err.responseText);
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
initSqlEditorView();
|
||||
});
|
||||
13
panoramix/assets/javascripts/standalone.js
Normal file
13
panoramix/assets/javascripts/standalone.js
Normal file
@@ -0,0 +1,13 @@
|
||||
var $ = window.$ = require('jquery');
|
||||
var jQuery = window.jQuery = $;
|
||||
var px = require('./modules/panoramix.js');
|
||||
|
||||
require('bootstrap');
|
||||
|
||||
$(document).ready(function () {
|
||||
var slice;
|
||||
var data = $('.slice').data('slice');
|
||||
slice = px.Slice(data);
|
||||
slice.render();
|
||||
slice.bindResizeToWindowResize();
|
||||
});
|
||||
77
panoramix/assets/package.json
Normal file
77
panoramix/assets/package.json
Normal file
@@ -0,0 +1,77 @@
|
||||
{
|
||||
"name": "panoramix",
|
||||
"version": "0.1.0",
|
||||
"description": "Any database to any visualization",
|
||||
"directories": {
|
||||
"doc": "docs",
|
||||
"test": "tests"
|
||||
},
|
||||
"scripts": {
|
||||
"test": "echo \"Error: no test specified\" && exit 1",
|
||||
"dev": "webpack -d --watch --colors",
|
||||
"prod": "webpack -p --colors",
|
||||
"lint": "npm run --silent lint:js",
|
||||
"lint:js": "eslint --ignore-path=.eslintignore --ext .js .; exit 0;"
|
||||
},
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git+https://github.com/mistercrunch/panoramix.git"
|
||||
},
|
||||
"keywords": [
|
||||
"big",
|
||||
"data",
|
||||
"exploratory",
|
||||
"analysis",
|
||||
"react",
|
||||
"d3",
|
||||
"airbnb",
|
||||
"nerds",
|
||||
"database",
|
||||
"flask"
|
||||
],
|
||||
"author": "Airbnb",
|
||||
"bugs": {
|
||||
"url": "https://github.com/mistercrunch/panoramix/issues"
|
||||
},
|
||||
"homepage": "https://github.com/mistercrunch/panoramix#readme",
|
||||
"dependencies": {
|
||||
"babel-loader": "^6.2.1",
|
||||
"babel-polyfill": "^6.3.14",
|
||||
"babel-preset-es2015": "^6.3.13",
|
||||
"babel-preset-react": "^6.3.13",
|
||||
"bootstrap": "^3.3.6",
|
||||
"bootstrap-datepicker": "^1.6.0",
|
||||
"bootstrap-toggle": "^2.2.1",
|
||||
"brace": "^0.7.0",
|
||||
"css-loader": "^0.23.1",
|
||||
"d3": "^3.5.14",
|
||||
"d3-cloud": "^1.2.1",
|
||||
"d3-sankey": "^0.2.1",
|
||||
"d3-tip": "^0.6.7",
|
||||
"d3.layout.cloud": "^1.2.0",
|
||||
"datamaps": "^0.4.4",
|
||||
"datatables": "^1.10.9",
|
||||
"datatables-bootstrap3-plugin": "^0.4.0",
|
||||
"exports-loader": "^0.6.3",
|
||||
"font-awesome": "^4.5.0",
|
||||
"gridster": "^0.5.6",
|
||||
"imports-loader": "^0.6.5",
|
||||
"jquery": "^2.2.1",
|
||||
"jquery-ui": "^1.10.5",
|
||||
"less-loader": "^2.2.2",
|
||||
"nvd3": "1.8.2",
|
||||
"react": "^0.14.7",
|
||||
"react-bootstrap": "^0.28.3",
|
||||
"react-dom": "^0.14.7",
|
||||
"select2": "3.5",
|
||||
"select2-bootstrap-css": "^1.4.6",
|
||||
"style-loader": "^0.13.0",
|
||||
"topojson": "^1.6.22",
|
||||
"webpack": "^1.12.12"
|
||||
},
|
||||
"devDependencies": {
|
||||
"eslint": "^2.2.0",
|
||||
"file-loader": "^0.8.5",
|
||||
"url-loader": "^0.5.7"
|
||||
}
|
||||
}
|
||||
616
panoramix/assets/stylesheets/less/bootswatch.less
Normal file
616
panoramix/assets/stylesheets/less/bootswatch.less
Normal file
@@ -0,0 +1,616 @@
|
||||
// Paper 3.3.5
|
||||
// Bootswatch
|
||||
// -----------------------------------------------------
|
||||
|
||||
@web-font-path: "https://fonts.googleapis.com/css?family=Roboto:300,400,500,700";
|
||||
|
||||
.web-font(@path) {
|
||||
@import url("@{path}");
|
||||
}
|
||||
.web-font(@web-font-path);
|
||||
|
||||
// Navbar =====================================================================
|
||||
|
||||
.navbar {
|
||||
border: none;
|
||||
.box-shadow(0 1px 2px rgba(0,0,0,.3));
|
||||
|
||||
&-brand {
|
||||
font-size: 24px;
|
||||
}
|
||||
|
||||
&-inverse {
|
||||
.navbar-form {
|
||||
|
||||
input[type=text],
|
||||
input[type=password] {
|
||||
color: #fff;
|
||||
.box-shadow(inset 0 -1px 0 @navbar-inverse-link-color);
|
||||
.placeholder(@navbar-inverse-link-color);
|
||||
|
||||
&:focus {
|
||||
.box-shadow(inset 0 -2px 0 #fff);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Buttons ====================================================================
|
||||
|
||||
#btn(@class,@bg) {
|
||||
.btn-@{class} {
|
||||
background-size: 200%;
|
||||
background-position: 50%;
|
||||
|
||||
&:focus {
|
||||
background-color: @bg;
|
||||
}
|
||||
|
||||
&:hover,
|
||||
&:active:hover {
|
||||
background-color: darken(@bg, 6%);
|
||||
}
|
||||
|
||||
&:active {
|
||||
background-color: darken(@bg, 12%);
|
||||
#gradient > .radial(darken(@bg, 12%) 10%, @bg 11%);
|
||||
background-size: 1000%;
|
||||
.box-shadow(2px 2px 4px rgba(0,0,0,.4));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#btn(default,@btn-default-bg);
|
||||
#btn(primary,@btn-primary-bg);
|
||||
#btn(success,@btn-success-bg);
|
||||
#btn(info,@btn-info-bg);
|
||||
#btn(warning,@btn-warning-bg);
|
||||
#btn(danger,@btn-danger-bg);
|
||||
#btn(link,#fff);
|
||||
|
||||
.btn {
|
||||
text-transform: uppercase;
|
||||
border: none;
|
||||
.box-shadow(1px 1px 4px rgba(0,0,0,.4));
|
||||
.transition(all 0.4s);
|
||||
|
||||
&-link {
|
||||
border-radius: @btn-border-radius-base;
|
||||
.box-shadow(none);
|
||||
color: @btn-default-color;
|
||||
|
||||
&:hover,
|
||||
&:focus {
|
||||
.box-shadow(none);
|
||||
color: @btn-default-color;
|
||||
text-decoration: none;
|
||||
}
|
||||
}
|
||||
|
||||
&-default {
|
||||
|
||||
&.disabled {
|
||||
background-color: rgba(0, 0, 0, 0.1);
|
||||
color: rgba(0, 0, 0, 0.4);
|
||||
opacity: 1;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
.btn-group {
|
||||
.btn + .btn,
|
||||
.btn + .btn-group,
|
||||
.btn-group + .btn,
|
||||
.btn-group + .btn-group {
|
||||
margin-left: 0;
|
||||
}
|
||||
|
||||
&-vertical {
|
||||
> .btn + .btn,
|
||||
> .btn + .btn-group,
|
||||
> .btn-group + .btn,
|
||||
> .btn-group + .btn-group {
|
||||
margin-top: 0;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Typography =================================================================
|
||||
|
||||
body {
|
||||
-webkit-font-smoothing: antialiased;
|
||||
letter-spacing: .1px;
|
||||
}
|
||||
|
||||
p {
|
||||
margin: 0 0 1em;
|
||||
}
|
||||
|
||||
input,
|
||||
button {
|
||||
-webkit-font-smoothing: antialiased;
|
||||
letter-spacing: .1px;
|
||||
}
|
||||
|
||||
a {
|
||||
.transition(all 0.2s);
|
||||
}
|
||||
|
||||
// Tables =====================================================================
|
||||
|
||||
.table-hover {
|
||||
> tbody > tr,
|
||||
> tbody > tr > th,
|
||||
> tbody > tr > td {
|
||||
.transition(all 0.2s);
|
||||
}
|
||||
}
|
||||
|
||||
// Forms ======================================================================
|
||||
|
||||
label {
|
||||
font-weight: normal;
|
||||
}
|
||||
|
||||
textarea,
|
||||
textarea.form-control,
|
||||
input.form-control,
|
||||
input[type=text],
|
||||
input[type=password],
|
||||
input[type=email],
|
||||
input[type=number],
|
||||
[type=text].form-control,
|
||||
[type=password].form-control,
|
||||
[type=email].form-control,
|
||||
[type=tel].form-control,
|
||||
[contenteditable].form-control {
|
||||
padding: 0;
|
||||
border: none;
|
||||
border-radius: 0;
|
||||
-webkit-appearance: none;
|
||||
.box-shadow(inset 0 -1px 0 #ddd);
|
||||
font-size: 16px;
|
||||
|
||||
&:focus {
|
||||
.box-shadow(inset 0 -2px 0 @brand-primary);
|
||||
}
|
||||
|
||||
&[disabled],
|
||||
&[readonly] {
|
||||
.box-shadow(none);
|
||||
border-bottom: 1px dotted #ddd;
|
||||
}
|
||||
|
||||
&.input {
|
||||
&-sm {
|
||||
font-size: @font-size-small;
|
||||
}
|
||||
|
||||
&-lg {
|
||||
font-size: @font-size-large;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
select,
|
||||
select.form-control {
|
||||
border: 0;
|
||||
border-radius: 0;
|
||||
-webkit-appearance: none;
|
||||
-moz-appearance: none;
|
||||
appearance: none;
|
||||
padding-left: 0;
|
||||
padding-right: 0\9; // remove padding for < ie9 since default arrow can't be removed
|
||||
background-image: url(data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAABoAAAAaCAMAAACelLz8AAAAJ1BMVEVmZmZmZmZmZmZmZmZmZmZmZmZmZmZmZmZmZmZmZmZmZmZmZmZmZmaP/QSjAAAADHRSTlMAAgMJC0uWpKa6wMxMdjkoAAAANUlEQVR4AeXJyQEAERAAsNl7Hf3X6xt0QL6JpZWq30pdvdadme+0PMdzvHm8YThHcT1H7K0BtOMDniZhWOgAAAAASUVORK5CYII=);
|
||||
background-size: 13px;
|
||||
background-repeat: no-repeat;
|
||||
background-position: right center;
|
||||
.box-shadow(inset 0 -1px 0 #ddd);
|
||||
font-size: 16px;
|
||||
line-height: 1.5;
|
||||
|
||||
&::-ms-expand {
|
||||
display: none;
|
||||
}
|
||||
|
||||
&.input {
|
||||
&-sm {
|
||||
font-size: @font-size-small;
|
||||
}
|
||||
|
||||
&-lg {
|
||||
font-size: @font-size-large;
|
||||
}
|
||||
}
|
||||
|
||||
&:focus {
|
||||
.box-shadow(inset 0 -2px 0 @brand-primary);
|
||||
background-image: url(data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAABoAAAAaCAMAAACelLz8AAAAJ1BMVEUhISEhISEhISEhISEhISEhISEhISEhISEhISEhISEhISEhISEhISF8S9ewAAAADHRSTlMAAgMJC0uWpKa6wMxMdjkoAAAANUlEQVR4AeXJyQEAERAAsNl7Hf3X6xt0QL6JpZWq30pdvdadme+0PMdzvHm8YThHcT1H7K0BtOMDniZhWOgAAAAASUVORK5CYII=);
|
||||
}
|
||||
|
||||
&[multiple] {
|
||||
background: none;
|
||||
}
|
||||
}
|
||||
|
||||
.radio,
|
||||
.radio-inline,
|
||||
.checkbox,
|
||||
.checkbox-inline {
|
||||
label {
|
||||
padding-left: 25px;
|
||||
}
|
||||
|
||||
input[type="radio"],
|
||||
input[type="checkbox"] {
|
||||
margin-left: -25px;
|
||||
}
|
||||
}
|
||||
|
||||
input[type="radio"],
|
||||
.radio input[type="radio"],
|
||||
.radio-inline input[type="radio"] {
|
||||
position: relative;
|
||||
margin-top: 6px;
|
||||
margin-right: 4px;
|
||||
vertical-align: top;
|
||||
border: none;
|
||||
background-color: transparent;
|
||||
-webkit-appearance: none;
|
||||
appearance: none;
|
||||
cursor: pointer;
|
||||
|
||||
&:focus {
|
||||
outline: none;
|
||||
}
|
||||
|
||||
&:before,
|
||||
&:after {
|
||||
content: "";
|
||||
display: block;
|
||||
width: 18px;
|
||||
height: 18px;
|
||||
border-radius: 50%;
|
||||
.transition(240ms);
|
||||
}
|
||||
|
||||
&:before {
|
||||
position: absolute;
|
||||
left: 0;
|
||||
top: -3px;
|
||||
background-color: @brand-primary;
|
||||
.scale(0);
|
||||
}
|
||||
|
||||
&:after {
|
||||
position: relative;
|
||||
top: -3px;
|
||||
border: 2px solid @gray;
|
||||
}
|
||||
|
||||
&:checked:before {
|
||||
.scale(0.5);
|
||||
}
|
||||
|
||||
&:disabled:checked:before {
|
||||
background-color: @gray-light;
|
||||
}
|
||||
|
||||
&:checked:after {
|
||||
border-color: @brand-primary;
|
||||
}
|
||||
|
||||
&:disabled:after,
|
||||
&:disabled:checked:after {
|
||||
border-color: @gray-light;
|
||||
}
|
||||
}
|
||||
|
||||
input[type="checkbox"],
|
||||
.checkbox input[type="checkbox"],
|
||||
.checkbox-inline input[type="checkbox"] {
|
||||
position: relative;
|
||||
border: none;
|
||||
margin-bottom: -4px;
|
||||
-webkit-appearance: none;
|
||||
appearance: none;
|
||||
cursor: pointer;
|
||||
|
||||
&:focus {
|
||||
outline: none;
|
||||
}
|
||||
|
||||
&:focus:after {
|
||||
border-color: @brand-primary;
|
||||
}
|
||||
|
||||
&:after {
|
||||
content: "";
|
||||
display: block;
|
||||
width: 18px;
|
||||
height: 18px;
|
||||
margin-top: -2px;
|
||||
margin-right: 5px;
|
||||
border: 2px solid @gray;
|
||||
border-radius: 2px;
|
||||
.transition(240ms);
|
||||
}
|
||||
|
||||
&:checked:before {
|
||||
content: "";
|
||||
position: absolute;
|
||||
top: 0;
|
||||
left: 6px;
|
||||
display: table;
|
||||
width: 6px;
|
||||
height: 12px;
|
||||
border: 2px solid #fff;
|
||||
border-top-width: 0;
|
||||
border-left-width: 0;
|
||||
.rotate(45deg);
|
||||
}
|
||||
|
||||
&:checked:after {
|
||||
background-color: @brand-primary;
|
||||
border-color: @brand-primary;
|
||||
}
|
||||
|
||||
&:disabled:after {
|
||||
border-color: @gray-light;
|
||||
}
|
||||
|
||||
&:disabled:checked:after {
|
||||
background-color: @gray-light;
|
||||
border-color: transparent;
|
||||
}
|
||||
}
|
||||
|
||||
.has-warning {
|
||||
input:not([type=checkbox]),
|
||||
.form-control,
|
||||
input.form-control[readonly],
|
||||
input[type=text][readonly],
|
||||
[type=text].form-control[readonly],
|
||||
input:not([type=checkbox]):focus,
|
||||
.form-control:focus {
|
||||
border-bottom: none;
|
||||
.box-shadow(inset 0 -2px 0 @brand-warning);
|
||||
}
|
||||
}
|
||||
|
||||
.has-error {
|
||||
input:not([type=checkbox]),
|
||||
.form-control,
|
||||
input.form-control[readonly],
|
||||
input[type=text][readonly],
|
||||
[type=text].form-control[readonly],
|
||||
input:not([type=checkbox]):focus,
|
||||
.form-control:focus {
|
||||
border-bottom: none;
|
||||
.box-shadow(inset 0 -2px 0 @brand-danger);
|
||||
}
|
||||
}
|
||||
|
||||
.has-success {
|
||||
input:not([type=checkbox]),
|
||||
.form-control,
|
||||
input.form-control[readonly],
|
||||
input[type=text][readonly],
|
||||
[type=text].form-control[readonly],
|
||||
input:not([type=checkbox]):focus,
|
||||
.form-control:focus {
|
||||
border-bottom: none;
|
||||
.box-shadow(inset 0 -2px 0 @brand-success);
|
||||
}
|
||||
}
|
||||
|
||||
// Remove the Bootstrap feedback styles for input addons
|
||||
.input-group-addon {
|
||||
.has-warning &, .has-error &, .has-success & {
|
||||
color: @input-color;
|
||||
border-color: @input-group-addon-border-color;
|
||||
background-color: @input-group-addon-bg;
|
||||
}
|
||||
}
|
||||
|
||||
// Navs =======================================================================
|
||||
|
||||
.nav-tabs {
|
||||
> li > a,
|
||||
> li > a:focus {
|
||||
margin-right: 0;
|
||||
background-color: transparent;
|
||||
border: none;
|
||||
color: @navbar-default-link-color;
|
||||
.box-shadow(inset 0 -1px 0 #ddd);
|
||||
.transition(all 0.2s);
|
||||
|
||||
&:hover {
|
||||
background-color: transparent;
|
||||
.box-shadow(inset 0 -2px 0 @brand-primary);
|
||||
color: @brand-primary;
|
||||
}
|
||||
}
|
||||
|
||||
& > li.active > a,
|
||||
& > li.active > a:focus {
|
||||
border: none;
|
||||
.box-shadow(inset 0 -2px 0 @brand-primary);
|
||||
color: @brand-primary;
|
||||
|
||||
&:hover {
|
||||
border: none;
|
||||
color: @brand-primary;
|
||||
}
|
||||
}
|
||||
|
||||
& > li.disabled > a {
|
||||
.box-shadow(inset 0 -1px 0 #ddd);
|
||||
}
|
||||
|
||||
&.nav-justified {
|
||||
|
||||
& > li > a,
|
||||
& > li > a:hover,
|
||||
& > li > a:focus,
|
||||
& > .active > a,
|
||||
& > .active > a:hover,
|
||||
& > .active > a:focus {
|
||||
border: none;
|
||||
}
|
||||
}
|
||||
|
||||
.dropdown-menu {
|
||||
margin-top: 0;
|
||||
}
|
||||
}
|
||||
|
||||
.dropdown-menu {
|
||||
margin-top: 0;
|
||||
border: none;
|
||||
.box-shadow(0 1px 4px rgba(0,0,0,.3));
|
||||
}
|
||||
|
||||
// Indicators =================================================================
|
||||
|
||||
.alert {
|
||||
border: none;
|
||||
color: #fff;
|
||||
|
||||
&-success {
|
||||
background-color: @brand-success;
|
||||
}
|
||||
|
||||
&-info {
|
||||
background-color: @brand-info;
|
||||
}
|
||||
|
||||
&-warning {
|
||||
background-color: @brand-warning;
|
||||
}
|
||||
|
||||
&-danger {
|
||||
background-color: @brand-danger;
|
||||
}
|
||||
|
||||
a:not(.close),
|
||||
.alert-link {
|
||||
color: #fff;
|
||||
font-weight: bold;
|
||||
}
|
||||
|
||||
.close {
|
||||
color: #fff;
|
||||
}
|
||||
}
|
||||
|
||||
.badge {
|
||||
padding: 4px 6px 4px;
|
||||
}
|
||||
|
||||
.progress {
|
||||
position: relative;
|
||||
z-index: 1;
|
||||
height: 6px;
|
||||
border-radius: 0;
|
||||
|
||||
.box-shadow(none);
|
||||
|
||||
&-bar {
|
||||
.box-shadow(none);
|
||||
|
||||
&:last-child {
|
||||
border-radius: 0 3px 3px 0;
|
||||
}
|
||||
|
||||
&:last-child {
|
||||
&:before {
|
||||
display: block;
|
||||
content: "";
|
||||
position: absolute;
|
||||
width: 100%;
|
||||
height: 100%;
|
||||
left: 0;
|
||||
right: 0;
|
||||
z-index: -1;
|
||||
background-color: lighten(@progress-bar-bg, 35%);
|
||||
}
|
||||
}
|
||||
|
||||
&-success:last-child.progress-bar:before {
|
||||
background-color: lighten(@brand-success, 35%);
|
||||
}
|
||||
|
||||
&-info:last-child.progress-bar:before {
|
||||
background-color: lighten(@brand-info, 45%);
|
||||
}
|
||||
&-warning:last-child.progress-bar:before {
|
||||
background-color: lighten(@brand-warning, 35%);
|
||||
}
|
||||
|
||||
&-danger:last-child.progress-bar:before {
|
||||
background-color: lighten(@brand-danger, 25%);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Progress bars ==============================================================
|
||||
|
||||
// Containers =================================================================
|
||||
|
||||
.close {
|
||||
font-size: 34px;
|
||||
font-weight: 300;
|
||||
line-height: 24px;
|
||||
opacity: 0.6;
|
||||
.transition(all 0.2s);
|
||||
|
||||
&:hover {
|
||||
opacity: 1;
|
||||
}
|
||||
}
|
||||
|
||||
.list-group {
|
||||
|
||||
&-item {
|
||||
padding: 15px;
|
||||
}
|
||||
|
||||
&-item-text {
|
||||
color: @gray-light;
|
||||
}
|
||||
}
|
||||
|
||||
.well {
|
||||
border-radius: 0;
|
||||
.box-shadow(none);
|
||||
}
|
||||
|
||||
.panel {
|
||||
border: none;
|
||||
border-radius: 2px;
|
||||
.box-shadow(0 1px 4px rgba(0,0,0,.3));
|
||||
|
||||
&-heading {
|
||||
border-bottom: none;
|
||||
}
|
||||
|
||||
&-footer {
|
||||
border-top: none;
|
||||
}
|
||||
}
|
||||
|
||||
.popover {
|
||||
border: none;
|
||||
.box-shadow(0 1px 4px rgba(0,0,0,.3));
|
||||
}
|
||||
|
||||
.carousel {
|
||||
&-caption {
|
||||
h1, h2, h3, h4, h5, h6 {
|
||||
color: inherit;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,7 +1,5 @@
|
||||
// Index .less, any imports here will be included in the final css build
|
||||
|
||||
@import "~bootstrap/less/bootstrap.less";
|
||||
@import "./cosmo/variables.less";
|
||||
@import "./cosmo/bootswatch.less";
|
||||
|
||||
@stroke-primary: @brand-primary;
|
||||
@import "./variables.less";
|
||||
@import "./bootswatch.less";
|
||||
@@ -1,33 +1,46 @@
|
||||
// Forked Cosmo 3.3.7
|
||||
// Modified from Bootswatch Paper 3.3.6
|
||||
// Variables
|
||||
// --------------------------------------------------
|
||||
|
||||
|
||||
//== Colors
|
||||
//
|
||||
//## Airbnb colors
|
||||
@rausch: #ff5a5f; // coral
|
||||
@kazan: #007a87; // dark teal
|
||||
@hackberry: #7b0051; // purple
|
||||
@babu: #00d1c1; // light teal
|
||||
@lima: #8ce071; // bright green
|
||||
@beach: #ffb400; // yellow
|
||||
@ebisu: #ffaa91; // peach
|
||||
@tirol: #b4a76c; // khaki
|
||||
@foggy: #9CA299; // dark grey
|
||||
@hof: #565A5C; // light grey
|
||||
|
||||
//## Gray and brand colors for use across Bootstrap.
|
||||
|
||||
@gray-base: #000;
|
||||
@gray-darker: lighten(@gray-base, 13.5%);
|
||||
@gray-dark: lighten(@gray-base, 20%);
|
||||
@gray: lighten(@gray-base, 33.5%);
|
||||
@gray-light: lighten(@gray-base, 70%);
|
||||
@gray-lighter: lighten(@gray-base, 95%);
|
||||
@gray-darker: lighten(@gray-base, 13.5%); // #222
|
||||
@gray-dark: #212121;
|
||||
@gray: #666;
|
||||
@gray-light: #bbb;
|
||||
@gray-lighter: lighten(@gray-base, 93.5%); // #eee
|
||||
|
||||
@brand-primary: darken(@babu, 5%);
|
||||
@brand-success: darken(@lima, 15%);
|
||||
@brand-info: @beach;
|
||||
@brand-warning: @hackberry;
|
||||
@brand-danger: darken(@rausch, 5%);
|
||||
|
||||
@brand-primary: #00A699;
|
||||
@brand-success: #4AC15F;
|
||||
@brand-info: lighten(#2AB7CA, 15%);
|
||||
@brand-warning: #FED766;
|
||||
@brand-danger: #FE4A49;
|
||||
|
||||
//== Scaffolding
|
||||
//
|
||||
//## Settings for some of the most global styles.
|
||||
|
||||
//** Background color for `<body>`.
|
||||
@body-bg: #f5f5f5;
|
||||
@body-bg: #fff;
|
||||
//** Global text color on `<body>`.
|
||||
@text-color: @gray-dark;
|
||||
@text-color: @gray;
|
||||
|
||||
//** Global textual link color.
|
||||
@link-color: @brand-primary;
|
||||
@@ -41,34 +54,33 @@
|
||||
//
|
||||
//## Font, line-height, and color for body text, headings, and more.
|
||||
|
||||
@font-family-sans-serif: Helvetica, Arial;
|
||||
|
||||
@font-family-sans-serif: "Roboto", "Helvetica Neue", Helvetica, Arial, sans-serif;
|
||||
@font-family-serif: Georgia, "Times New Roman", Times, serif;
|
||||
//** Default monospace fonts for `<code>`, `<kbd>`, and `<pre>`.
|
||||
@font-family-monospace: Menlo, Monaco, Consolas, "Courier New", monospace;
|
||||
@font-family-base: @font-family-sans-serif;
|
||||
|
||||
@font-size-base: 14px;
|
||||
@font-size-base: 13px;
|
||||
@font-size-large: ceil((@font-size-base * 1.25)); // ~18px
|
||||
@font-size-small: ceil((@font-size-base * 0.85)); // ~12px
|
||||
|
||||
@font-size-h1: floor((@font-size-base * 2.6)); // ~36px
|
||||
@font-size-h2: floor((@font-size-base * 2.15)); // ~30px
|
||||
@font-size-h3: ceil((@font-size-base * 1.7)); // ~24px
|
||||
@font-size-h4: ceil((@font-size-base * 1.25)); // ~18px
|
||||
@font-size-h5: @font-size-base;
|
||||
@font-size-h6: ceil((@font-size-base * 0.85)); // ~12px
|
||||
@font-size-h1: 56px;
|
||||
@font-size-h2: 45px;
|
||||
@font-size-h3: 34px;
|
||||
@font-size-h4: 24px;
|
||||
@font-size-h5: 20px;
|
||||
@font-size-h6: 14px;
|
||||
|
||||
//** Unit-less `line-height` for use in components like buttons.
|
||||
@line-height-base: 1.428571429; // 20/14
|
||||
@line-height-base: 1.846; // 20/14
|
||||
//** Computed "line-height" (`font-size` * `line-height`) for use with `margin`, `padding`, etc.
|
||||
@line-height-computed: floor((@font-size-base * @line-height-base)); // ~20px
|
||||
|
||||
//** By default, this inherits from the `<body>`.
|
||||
@headings-font-family: @font-family-base;
|
||||
@headings-font-weight: 300;
|
||||
@headings-font-family: inherit;
|
||||
@headings-font-weight: 400;
|
||||
@headings-line-height: 1.1;
|
||||
@headings-color: inherit;
|
||||
@headings-color: #444;
|
||||
|
||||
|
||||
//== Iconography
|
||||
@@ -87,11 +99,11 @@
|
||||
//
|
||||
//## Define common padding and border radius sizes and more. Values based on 14px text and 1.428 line-height (~20px to start).
|
||||
|
||||
@padding-base-vertical: 10px;
|
||||
@padding-base-horizontal: 18px;
|
||||
@padding-base-vertical: 6px;
|
||||
@padding-base-horizontal: 16px;
|
||||
|
||||
@padding-large-vertical: 18px;
|
||||
@padding-large-horizontal: 30px;
|
||||
@padding-large-vertical: 10px;
|
||||
@padding-large-horizontal: 16px;
|
||||
|
||||
@padding-small-vertical: 5px;
|
||||
@padding-small-horizontal: 10px;
|
||||
@@ -102,16 +114,16 @@
|
||||
@line-height-large: 1.3333333; // extra decimals for Win 8.1 Chrome
|
||||
@line-height-small: 1.5;
|
||||
|
||||
@border-radius-base: 2px;
|
||||
@border-radius-large: 2px;
|
||||
@border-radius-small: 2px;
|
||||
@border-radius-base: 3px;
|
||||
@border-radius-large: 3px;
|
||||
@border-radius-small: 3px;
|
||||
|
||||
//** Global color for active items (e.g., navs or dropdowns).
|
||||
@component-active-color: #fff;
|
||||
//** Global background color for active items (e.g., navs or dropdowns).
|
||||
@component-active-bg: @brand-primary;
|
||||
|
||||
//** Width of the `border` for generating carets that indicate dropdowns.
|
||||
//** Width of the `border` for generating carets that indicator dropdowns.
|
||||
@caret-width-base: 4px;
|
||||
//** Carets increase slightly in size for larger components.
|
||||
@caret-width-large: 5px;
|
||||
@@ -131,7 +143,7 @@
|
||||
//** Background color used for `.table-striped`.
|
||||
@table-bg-accent: #f9f9f9;
|
||||
//** Background color used for `.table-hover`.
|
||||
@table-bg-hover: #f5f5f5;
|
||||
@table-bg-hover: @gray-lighter;
|
||||
@table-bg-active: @table-bg-hover;
|
||||
|
||||
//** Border color for table and cell borders.
|
||||
@@ -144,29 +156,29 @@
|
||||
|
||||
@btn-font-weight: normal;
|
||||
|
||||
@btn-default-color: #444;
|
||||
@btn-default-bg: #fff;
|
||||
@btn-default-border: transparent;
|
||||
|
||||
@btn-primary-color: #fff;
|
||||
@btn-primary-bg: @brand-primary;
|
||||
@btn-primary-border: @brand-primary;
|
||||
@btn-primary-border: transparent;
|
||||
|
||||
@btn-default-color: @gray;
|
||||
@btn-default-bg: #fff;
|
||||
@btn-default-border: @gray-light;
|
||||
|
||||
@btn-success-color: @btn-primary-color;
|
||||
@btn-success-color: #fff;
|
||||
@btn-success-bg: @brand-success;
|
||||
@btn-success-border: @btn-success-bg;
|
||||
@btn-success-border: transparent;
|
||||
|
||||
@btn-info-color: @btn-primary-color;
|
||||
@btn-info-color: #fff;
|
||||
@btn-info-bg: @brand-info;
|
||||
@btn-info-border: @btn-info-bg;
|
||||
@btn-info-border: transparent;
|
||||
|
||||
@btn-warning-color: @btn-default-color;
|
||||
@btn-warning-color: #fff;
|
||||
@btn-warning-bg: @brand-warning;
|
||||
@btn-warning-border: @btn-warning-bg;
|
||||
@btn-warning-border: transparent;
|
||||
|
||||
@btn-danger-color: @btn-primary-color;
|
||||
@btn-danger-color: #fff;
|
||||
@btn-danger-bg: @brand-danger;
|
||||
@btn-danger-border: @btn-danger-bg;
|
||||
@btn-danger-border: transparent;
|
||||
|
||||
@btn-link-disabled-color: @gray-light;
|
||||
|
||||
@@ -181,14 +193,14 @@
|
||||
//##
|
||||
|
||||
//** `<input>` background color
|
||||
@input-bg: #fff;
|
||||
@input-bg: transparent;
|
||||
//** `<input disabled>` background color
|
||||
@input-bg-disabled: @gray-lighter;
|
||||
@input-bg-disabled: transparent;
|
||||
|
||||
//** Text color for `<input>`s
|
||||
@input-color: @text-color;
|
||||
@input-color: @gray;
|
||||
//** `<input>` border color
|
||||
@input-border: #ccc;
|
||||
@input-border: transparent;
|
||||
|
||||
// TODO: Rename `@input-border-radius` to `@input-border-radius-base` in v4
|
||||
//** Default `.form-control` border radius
|
||||
@@ -215,11 +227,11 @@
|
||||
//** `.form-group` margin
|
||||
@form-group-margin-bottom: 15px;
|
||||
|
||||
@legend-color: @text-color;
|
||||
@legend-color: @gray-dark;
|
||||
@legend-border-color: #e5e5e5;
|
||||
|
||||
//** Background color for textual input addons
|
||||
@input-group-addon-bg: @gray-lighter;
|
||||
@input-group-addon-bg: transparent;
|
||||
//** Border color for textual input addons
|
||||
@input-group-addon-border-color: @input-border;
|
||||
|
||||
@@ -241,14 +253,14 @@
|
||||
@dropdown-divider-bg: #e5e5e5;
|
||||
|
||||
//** Dropdown link text color.
|
||||
@dropdown-link-color: @gray-dark;
|
||||
@dropdown-link-color: @text-color;
|
||||
//** Hover color for dropdown links.
|
||||
@dropdown-link-hover-color: #fff;
|
||||
@dropdown-link-hover-color: darken(@gray-dark, 5%);
|
||||
//** Hover background for dropdown links.
|
||||
@dropdown-link-hover-bg: @component-active-bg;
|
||||
@dropdown-link-hover-bg: @gray-lighter;
|
||||
|
||||
//** Active dropdown menu item text color.
|
||||
@dropdown-link-active-color: #fff;
|
||||
@dropdown-link-active-color: @component-active-color;
|
||||
//** Active dropdown menu item background color.
|
||||
@dropdown-link-active-bg: @component-active-bg;
|
||||
|
||||
@@ -259,7 +271,7 @@
|
||||
@dropdown-header-color: @gray-light;
|
||||
|
||||
//** Deprecated `@dropdown-caret-color` as of v3.1.0
|
||||
@dropdown-caret-color: #000;
|
||||
@dropdown-caret-color: @gray-light;
|
||||
|
||||
|
||||
//-- Z-index master list
|
||||
@@ -324,7 +336,7 @@
|
||||
//** Number of columns in the grid.
|
||||
@grid-columns: 12;
|
||||
//** Padding between columns. Gets divided in half for the left and right.
|
||||
@grid-gutter-width: 20px;
|
||||
@grid-gutter-width: 30px;
|
||||
// Navbar collapse
|
||||
//** Point at which the navbar becomes uncollapsed.
|
||||
@grid-float-breakpoint: @screen-sm-min;
|
||||
@@ -357,60 +369,60 @@
|
||||
//##
|
||||
|
||||
// Basics of a navbar
|
||||
@navbar-height: 50px;
|
||||
@navbar-height: 64px;
|
||||
@navbar-margin-bottom: @line-height-computed;
|
||||
@navbar-border-radius: @border-radius-base;
|
||||
@navbar-padding-horizontal: floor((@grid-gutter-width / 2));
|
||||
@navbar-padding-vertical: ((@navbar-height - @line-height-computed) / 2);
|
||||
@navbar-collapse-max-height: 340px;
|
||||
|
||||
@navbar-default-color: #fff;
|
||||
@navbar-default-bg: @gray-darker;
|
||||
@navbar-default-border: darken(@navbar-default-bg, 6.5%);
|
||||
@navbar-default-color: @gray-light;
|
||||
@navbar-default-bg: #fff;
|
||||
@navbar-default-border: transparent;
|
||||
|
||||
// Navbar links
|
||||
@navbar-default-link-color: #fff;
|
||||
@navbar-default-link-hover-color: #fff;
|
||||
@navbar-default-link-hover-bg: darken(@navbar-default-bg, 10%);
|
||||
@navbar-default-link-active-color: @navbar-default-link-hover-color;
|
||||
@navbar-default-link-active-bg: @navbar-default-link-hover-bg;
|
||||
@navbar-default-link-color: @gray;
|
||||
@navbar-default-link-hover-color: @gray-dark;
|
||||
@navbar-default-link-hover-bg: transparent;
|
||||
@navbar-default-link-active-color: @gray-dark;
|
||||
@navbar-default-link-active-bg: darken(@navbar-default-bg, 6.5%);
|
||||
@navbar-default-link-disabled-color: #ccc;
|
||||
@navbar-default-link-disabled-bg: transparent;
|
||||
|
||||
// Navbar brand label
|
||||
@navbar-default-brand-color: @navbar-default-link-color;
|
||||
@navbar-default-brand-hover-color: #fff;
|
||||
@navbar-default-brand-hover-bg: none;
|
||||
@navbar-default-brand-hover-color: @navbar-default-link-hover-color;
|
||||
@navbar-default-brand-hover-bg: transparent;
|
||||
|
||||
// Navbar toggle
|
||||
@navbar-default-toggle-hover-bg: @navbar-default-link-hover-bg;
|
||||
@navbar-default-toggle-icon-bar-bg: #fff;
|
||||
@navbar-default-toggle-hover-bg: transparent;
|
||||
@navbar-default-toggle-icon-bar-bg: rgba(0,0,0,0.5);
|
||||
@navbar-default-toggle-border-color: transparent;
|
||||
|
||||
|
||||
//=== Inverted navbar
|
||||
// Reset inverted navbar basics
|
||||
@navbar-inverse-color: @gray-dark;
|
||||
@navbar-inverse-bg: #fff;
|
||||
@navbar-inverse-color: @gray-light;
|
||||
@navbar-inverse-bg: @brand-primary;
|
||||
@navbar-inverse-border: transparent;
|
||||
|
||||
// Inverted navbar links
|
||||
@navbar-inverse-link-color: @gray-dark;
|
||||
@navbar-inverse-link-hover-color: @gray-dark;
|
||||
@navbar-inverse-link-hover-bg: darken(@navbar-inverse-bg, 10%);
|
||||
@navbar-inverse-link-color: lighten(@brand-primary, 30%);
|
||||
@navbar-inverse-link-hover-color: #fff;
|
||||
@navbar-inverse-link-hover-bg: transparent;
|
||||
@navbar-inverse-link-active-color: @navbar-inverse-link-hover-color;
|
||||
@navbar-inverse-link-active-bg: @navbar-inverse-link-hover-bg;
|
||||
@navbar-inverse-link-disabled-color: @gray-lighter;
|
||||
@navbar-inverse-link-active-bg: darken(@navbar-inverse-bg, 10%);
|
||||
@navbar-inverse-link-disabled-color: #444;
|
||||
@navbar-inverse-link-disabled-bg: transparent;
|
||||
|
||||
// Inverted navbar brand label
|
||||
@navbar-inverse-brand-color: @navbar-inverse-link-color;
|
||||
@navbar-inverse-brand-hover-color: @gray-darker;
|
||||
@navbar-inverse-brand-hover-bg: none;
|
||||
@navbar-inverse-brand-hover-color: #fff;
|
||||
@navbar-inverse-brand-hover-bg: transparent;
|
||||
|
||||
// Inverted navbar toggle
|
||||
@navbar-inverse-toggle-hover-bg: @navbar-inverse-link-hover-bg;
|
||||
@navbar-inverse-toggle-icon-bar-bg: #fff;
|
||||
// Inverted navbar toggle\
|
||||
@navbar-inverse-toggle-hover-bg: transparent;
|
||||
@navbar-inverse-toggle-icon-bar-bg: rgba(0,0,0,0.5);
|
||||
@navbar-inverse-toggle-border-color: transparent;
|
||||
|
||||
|
||||
@@ -426,15 +438,15 @@
|
||||
@nav-disabled-link-hover-color: @gray-light;
|
||||
|
||||
//== Tabs
|
||||
@nav-tabs-border-color: #bbb;
|
||||
@nav-tabs-border-color: transparent;
|
||||
|
||||
@nav-tabs-link-hover-border-color: @gray-lighter;
|
||||
|
||||
@nav-tabs-active-link-hover-bg: @body-bg;
|
||||
@nav-tabs-active-link-hover-bg: transparent;
|
||||
@nav-tabs-active-link-hover-color: @gray;
|
||||
@nav-tabs-active-link-hover-border-color: #bbb;
|
||||
@nav-tabs-active-link-hover-border-color: transparent;
|
||||
|
||||
@nav-tabs-justified-link-border-color: #bbb;
|
||||
@nav-tabs-justified-link-border-color: @nav-tabs-border-color;
|
||||
@nav-tabs-justified-active-link-border-color: @body-bg;
|
||||
|
||||
//== Pills
|
||||
@@ -455,9 +467,9 @@
|
||||
@pagination-hover-bg: @gray-lighter;
|
||||
@pagination-hover-border: #ddd;
|
||||
|
||||
@pagination-active-color: @gray-light;
|
||||
@pagination-active-bg: #f5f5f5;
|
||||
@pagination-active-border: #ddd;
|
||||
@pagination-active-color: #fff;
|
||||
@pagination-active-bg: @brand-primary;
|
||||
@pagination-active-border: @brand-primary;
|
||||
|
||||
@pagination-disabled-color: @gray-light;
|
||||
@pagination-disabled-bg: #fff;
|
||||
@@ -470,14 +482,14 @@
|
||||
|
||||
@pager-bg: @pagination-bg;
|
||||
@pager-border: @pagination-border;
|
||||
@pager-border-radius: @border-radius-base;
|
||||
@pager-border-radius: 15px;
|
||||
|
||||
@pager-hover-bg: @pagination-hover-bg;
|
||||
|
||||
@pager-active-bg: @pagination-active-bg;
|
||||
@pager-active-color: @pagination-active-color;
|
||||
|
||||
@pager-disabled-color: @gray-light;
|
||||
@pager-disabled-color: @pagination-disabled-color;
|
||||
|
||||
|
||||
//== Jumbotron
|
||||
@@ -486,8 +498,8 @@
|
||||
|
||||
@jumbotron-padding: 30px;
|
||||
@jumbotron-color: inherit;
|
||||
@jumbotron-bg: @gray-lighter;
|
||||
@jumbotron-heading-color: inherit;
|
||||
@jumbotron-bg: #f9f9f9;
|
||||
@jumbotron-heading-color: @headings-color;
|
||||
@jumbotron-font-size: ceil((@font-size-base * 1.5));
|
||||
@jumbotron-heading-font-size: ceil((@font-size-base * 4.5));
|
||||
|
||||
@@ -496,21 +508,21 @@
|
||||
//
|
||||
//## Define colors for form feedback states and, by default, alerts.
|
||||
|
||||
@state-success-text: darken(@brand-success, 20%);
|
||||
@state-success-bg: lighten(@brand-success, 35%);
|
||||
@state-success-text: @brand-success;
|
||||
@state-success-bg: #dff0d8;
|
||||
@state-success-border: darken(spin(@state-success-bg, -10), 5%);
|
||||
|
||||
@state-info-text: darken(@brand-info, 22%);
|
||||
@state-info-bg: lighten(@brand-info, 20%);
|
||||
@state-info-text: @brand-info;
|
||||
@state-info-bg: #e1bee7;
|
||||
@state-info-border: darken(spin(@state-info-bg, -10), 7%);
|
||||
|
||||
@state-warning-text: darken(@brand-warning, 30%);
|
||||
@state-warning-bg: lighten(@brand-warning, 20%);
|
||||
@state-warning-border: darken(spin(@state-warning-bg, -10), 3%);
|
||||
@state-warning-text: @brand-warning;
|
||||
@state-warning-bg: #ffe0b2;
|
||||
@state-warning-border: darken(spin(@state-warning-bg, -10), 5%);
|
||||
|
||||
@state-danger-text: darken(@brand-danger, 25%);
|
||||
@state-danger-bg: lighten(@brand-danger, 22%);
|
||||
@state-danger-border: darken(spin(@state-danger-bg, -10), 3%);
|
||||
@state-danger-text: @brand-danger;
|
||||
@state-danger-bg: #f9bdbb;
|
||||
@state-danger-border: darken(spin(@state-danger-bg, -10), 5%);
|
||||
|
||||
|
||||
//== Tooltips
|
||||
@@ -522,7 +534,7 @@
|
||||
//** Tooltip text color
|
||||
@tooltip-color: #fff;
|
||||
//** Tooltip background color
|
||||
@tooltip-bg: #000;
|
||||
@tooltip-bg: #727272;
|
||||
@tooltip-opacity: .9;
|
||||
|
||||
//** Tooltip arrow width
|
||||
@@ -540,9 +552,9 @@
|
||||
//** Popover maximum width
|
||||
@popover-max-width: 276px;
|
||||
//** Popover border color
|
||||
@popover-border-color: rgba(0,0,0,.2);
|
||||
@popover-border-color: transparent;
|
||||
//** Popover fallback border color
|
||||
@popover-fallback-border-color: #ccc;
|
||||
@popover-fallback-border-color: transparent;
|
||||
|
||||
//** Popover title background color
|
||||
@popover-title-bg: darken(@popover-bg, 3%);
|
||||
@@ -555,7 +567,7 @@
|
||||
//** Popover outer arrow width
|
||||
@popover-arrow-outer-width: (@popover-arrow-width + 1);
|
||||
//** Popover outer arrow color
|
||||
@popover-arrow-outer-color: fadein(@popover-border-color, 5%);
|
||||
@popover-arrow-outer-color: fadein(@popover-border-color, 7.5%);
|
||||
//** Popover outer arrow fallback color
|
||||
@popover-arrow-outer-fallback-color: darken(@popover-fallback-border-color, 20%);
|
||||
|
||||
@@ -588,7 +600,7 @@
|
||||
//##
|
||||
|
||||
//** Padding applied to the modal body
|
||||
@modal-inner-padding: 20px;
|
||||
@modal-inner-padding: 15px;
|
||||
|
||||
//** Padding applied to the modal title
|
||||
@modal-title-padding: 15px;
|
||||
@@ -607,7 +619,7 @@
|
||||
//** Modal backdrop opacity
|
||||
@modal-backdrop-opacity: .5;
|
||||
//** Modal header border color
|
||||
@modal-header-border-color: #e5e5e5;
|
||||
@modal-header-border-color: transparent;
|
||||
//** Modal footer border color
|
||||
@modal-footer-border-color: @modal-header-border-color;
|
||||
|
||||
@@ -646,7 +658,7 @@
|
||||
//##
|
||||
|
||||
//** Background color of the whole progress component
|
||||
@progress-bg: #ccc;
|
||||
@progress-bg: #f5f5f5;
|
||||
//** Progress bar text color
|
||||
@progress-bar-color: #fff;
|
||||
//** Variable for setting rounded corners on progress bar.
|
||||
@@ -682,7 +694,7 @@
|
||||
//** Background color of active list items
|
||||
@list-group-active-bg: @component-active-bg;
|
||||
//** Border color of active list elements
|
||||
@list-group-active-border: @list-group-border;
|
||||
@list-group-active-border: @list-group-active-bg;
|
||||
//** Text color for content within active list items
|
||||
@list-group-active-text-color: lighten(@list-group-active-bg, 40%);
|
||||
|
||||
@@ -713,28 +725,28 @@
|
||||
@panel-footer-bg: #f5f5f5;
|
||||
|
||||
@panel-default-text: @gray-dark;
|
||||
@panel-default-border: transparent;
|
||||
@panel-default-heading-bg: #fff;
|
||||
@panel-default-border: #ddd;
|
||||
@panel-default-heading-bg: #f5f5f5;
|
||||
|
||||
@panel-primary-text: #fff;
|
||||
@panel-primary-border: transparent;
|
||||
@panel-primary-border: @brand-primary;
|
||||
@panel-primary-heading-bg: @brand-primary;
|
||||
|
||||
@panel-success-text: @state-success-text;
|
||||
@panel-success-border: transparent;
|
||||
@panel-success-heading-bg: @state-success-bg;
|
||||
@panel-success-text: #fff;
|
||||
@panel-success-border: @state-success-border;
|
||||
@panel-success-heading-bg: @brand-success;
|
||||
|
||||
@panel-info-text: @state-info-text;
|
||||
@panel-info-border: transparent;
|
||||
@panel-info-heading-bg: @state-info-bg;
|
||||
@panel-info-text: #fff;
|
||||
@panel-info-border: @state-info-border;
|
||||
@panel-info-heading-bg: @brand-info;
|
||||
|
||||
@panel-warning-text: @state-warning-text;
|
||||
@panel-warning-border: transparent;
|
||||
@panel-warning-heading-bg: @state-warning-bg;
|
||||
@panel-warning-text: #fff;
|
||||
@panel-warning-border: @state-warning-border;
|
||||
@panel-warning-heading-bg: @brand-warning;
|
||||
|
||||
@panel-danger-text: @state-danger-text;
|
||||
@panel-danger-border: transparent;
|
||||
@panel-danger-heading-bg: @state-danger-bg;
|
||||
@panel-danger-text: #fff;
|
||||
@panel-danger-border: @state-danger-border;
|
||||
@panel-danger-heading-bg: @brand-danger;
|
||||
|
||||
|
||||
//== Thumbnails
|
||||
@@ -760,8 +772,8 @@
|
||||
//
|
||||
//##
|
||||
|
||||
@well-bg: #f5f5f5;
|
||||
@well-border: darken(@well-bg, 7%);
|
||||
@well-bg: #f9f9f9;
|
||||
@well-border: transparent;
|
||||
|
||||
|
||||
//== Badges
|
||||
@@ -771,14 +783,14 @@
|
||||
@badge-color: #fff;
|
||||
//** Linked badge text color on hover
|
||||
@badge-link-hover-color: #fff;
|
||||
@badge-bg: @brand-primary;
|
||||
@badge-bg: @gray-light;
|
||||
|
||||
//** Badge text color in active nav link
|
||||
@badge-active-color: @link-color;
|
||||
//** Badge background color in active nav link
|
||||
@badge-active-bg: #fff;
|
||||
|
||||
@badge-font-weight: bold;
|
||||
@badge-font-weight: normal;
|
||||
@badge-line-height: 1;
|
||||
@badge-border-radius: 10px;
|
||||
|
||||
@@ -820,9 +832,9 @@
|
||||
//
|
||||
//##
|
||||
|
||||
@close-font-weight: bold;
|
||||
@close-color: #fff;
|
||||
@close-text-shadow: 0 1px 0 #fff;
|
||||
@close-font-weight: normal;
|
||||
@close-color: #000;
|
||||
@close-text-shadow: none;
|
||||
|
||||
|
||||
//== Code
|
||||
244
panoramix/assets/stylesheets/panoramix.css
Normal file
244
panoramix/assets/stylesheets/panoramix.css
Normal file
@@ -0,0 +1,244 @@
|
||||
body {
|
||||
margin: 0px !important;
|
||||
}
|
||||
|
||||
.modal-dialog {
|
||||
z-index: 1100;
|
||||
}
|
||||
|
||||
input.form-control {
|
||||
background-color: white;
|
||||
}
|
||||
|
||||
.col-left-fixed {
|
||||
width:350px;
|
||||
position: absolute;
|
||||
float: left;
|
||||
}
|
||||
.col-offset {
|
||||
margin-left: 365px;
|
||||
}
|
||||
|
||||
.slice_description{
|
||||
padding: 8px;
|
||||
margin: 5px;
|
||||
border: 1px solid #DDD;
|
||||
background-color: #F8F8F8;
|
||||
border-radius: 5px;
|
||||
font-size: 12px;
|
||||
}
|
||||
|
||||
.slice_info{
|
||||
cursor: pointer;
|
||||
}
|
||||
|
||||
.padded {
|
||||
padding: 10px;
|
||||
}
|
||||
|
||||
.intable-longtext{
|
||||
max-height: 200px;
|
||||
overflow: auto;
|
||||
}
|
||||
|
||||
.container-fluid {
|
||||
text-align: left;
|
||||
}
|
||||
input[type="checkbox"] {
|
||||
display: inline-block;
|
||||
width: 16px;
|
||||
height: 16px;
|
||||
float: right;
|
||||
}
|
||||
form div {
|
||||
padding-top: 1px;
|
||||
}
|
||||
.navbar-brand a {
|
||||
color: white;
|
||||
}
|
||||
|
||||
.header span{
|
||||
margin-left: 3px;
|
||||
}
|
||||
|
||||
#timer {
|
||||
width: 80px;
|
||||
text-align: right;
|
||||
}
|
||||
|
||||
.notbtn {
|
||||
cursor: default;
|
||||
}
|
||||
hr {
|
||||
margin-top: 15px;
|
||||
margin-bottom: 15px;
|
||||
}
|
||||
|
||||
span.title-block {
|
||||
background-color: #EEE;
|
||||
border-radius: 4px;
|
||||
padding: 6px 12px;
|
||||
margin: 0px 10px;
|
||||
font-size: 20px;
|
||||
}
|
||||
|
||||
fieldset.fs-style {
|
||||
font-family: Verdana, Arial, sans-serif;
|
||||
font-size: small;
|
||||
font-weight: normal;
|
||||
border: 1px solid #CCC;
|
||||
background-color: #F4F4F4;
|
||||
border-radius: 6px;
|
||||
padding: 10px;
|
||||
margin: 0px 0px 10px 0px;
|
||||
}
|
||||
legend.legend-style {
|
||||
font-size: 14px;
|
||||
padding: 0px 6px;
|
||||
cursor: pointer;
|
||||
margin: 0px;
|
||||
color: #444;
|
||||
background-color: transparent;
|
||||
font-weight: bold;
|
||||
}
|
||||
.nvtooltip {
|
||||
//position: relative !important;
|
||||
z-index: 888;
|
||||
}
|
||||
.nvtooltip table td{
|
||||
font-size: 11px !important;
|
||||
}
|
||||
legend {
|
||||
width: auto;
|
||||
border-bottom: 0px;
|
||||
}
|
||||
.navbar {
|
||||
-webkit-box-shadow: 0px 3px 3px #AAA;
|
||||
-moz-box-shadow: 0px 3px 3px #AAA;
|
||||
box-shadow: 0px 3px 3px #AAA;
|
||||
z-index: 999;
|
||||
}
|
||||
.panel.panel-primary {
|
||||
margin: 10px;
|
||||
}
|
||||
|
||||
.index .carousel img {
|
||||
max-height: 500px;
|
||||
}
|
||||
.index .carousel {
|
||||
overflow: hidden;
|
||||
height: 500px;
|
||||
}
|
||||
.index .carousel-caption h1 {
|
||||
font-size: 80px;
|
||||
}
|
||||
.index .carousel-caption p {
|
||||
font-size: 20px;
|
||||
}
|
||||
.index div.carousel-caption{
|
||||
background: rgba(0,0,0,0.5);
|
||||
border-radius: 20px;
|
||||
top: 150px;
|
||||
bottom: auto !important;
|
||||
}
|
||||
.index .carousel-inner > .item > img {
|
||||
margin: 0 auto;
|
||||
}
|
||||
.index {
|
||||
margin: -20px;
|
||||
}
|
||||
.index .carousel-indicators li {
|
||||
background-color: #AAA;
|
||||
border: 1px solid black;
|
||||
}
|
||||
|
||||
.index .carousel-indicators .active {
|
||||
background-color: #000;
|
||||
border: 5px solid black;
|
||||
}
|
||||
|
||||
.datasource form div.form-control {
|
||||
margin-bottom: 5px !important;
|
||||
}
|
||||
.datasource form input.form-control {
|
||||
margin-bottom: 5px !important;
|
||||
}
|
||||
.datasource .tooltip-inner {
|
||||
max-width: 350px;
|
||||
}
|
||||
img.loading {
|
||||
width: 40px;
|
||||
}
|
||||
|
||||
.dashboard a i {
|
||||
cursor: pointer;
|
||||
}
|
||||
.dashboard i.drag {
|
||||
cursor: move !important;
|
||||
}
|
||||
.dashboard .gridster .preview-holder {
|
||||
z-index: 1;
|
||||
position: absolute;
|
||||
background-color: #AAA;
|
||||
border-color: #AAA;
|
||||
opacity: 0.3;
|
||||
}
|
||||
.gridster li.widget{
|
||||
list-style-type: none;
|
||||
border-radius: 0;
|
||||
margin: 5px;
|
||||
border: 1px solid #ccc;
|
||||
box-shadow: 2px 1px 5px -2px #aaa;
|
||||
overflow: hidden;
|
||||
background-color: #fff;
|
||||
}
|
||||
.dashboard .gridster .dragging,
|
||||
.dashboard .gridster .resizing {
|
||||
opacity: 0.5;
|
||||
}
|
||||
.dashboard img.loading {
|
||||
width: 20px;
|
||||
margin: 5px;
|
||||
}
|
||||
.dashboard .title {
|
||||
text-align: center;
|
||||
}
|
||||
.dashboard .slice_title {
|
||||
text-align: center;
|
||||
font-weight: bold;
|
||||
font-size: 14px;
|
||||
padding: 5px;
|
||||
}
|
||||
.dashboard div.slice_content {
|
||||
width: 100%;
|
||||
height: 100%;
|
||||
}
|
||||
|
||||
.dashboard div.nvtooltip {
|
||||
z-index: 888; /* this lets tool tips go on top of other slices */
|
||||
}
|
||||
|
||||
div.header {
|
||||
font-weight: bold;
|
||||
}
|
||||
li.widget:hover {
|
||||
z-index: 1000;
|
||||
}
|
||||
|
||||
li.widget .chart-header {
|
||||
padding: 5px;
|
||||
background-color: #f1f1f1;
|
||||
}
|
||||
|
||||
li.widget .chart-header a {
|
||||
margin-left: 5px;
|
||||
}
|
||||
|
||||
li.widget .chart-controls {
|
||||
display: none;
|
||||
background-color: #f1f1f1;
|
||||
}
|
||||
|
||||
li.widget .slice_container {
|
||||
overflow: auto;
|
||||
}
|
||||
146
panoramix/assets/vendor/select2.sortable.js
vendored
Normal file
146
panoramix/assets/vendor/select2.sortable.js
vendored
Normal file
@@ -0,0 +1,146 @@
|
||||
/**
|
||||
* jQuery Select2 Sortable
|
||||
* - enable select2 to be sortable via normal select element
|
||||
*
|
||||
* author : Vafour
|
||||
* modified : Kevin Provance (kprovance)
|
||||
* inspired by : jQuery Chosen Sortable (https://github.com/mrhenry/jquery-chosen-sortable)
|
||||
* License : GPL
|
||||
*/
|
||||
|
||||
(function ($) {
|
||||
$.fn.extend({
|
||||
select2SortableOrder: function () {
|
||||
var $this = this.filter('[multiple]');
|
||||
|
||||
$this.each(function () {
|
||||
var $select = $(this);
|
||||
|
||||
// skip elements not select2-ed
|
||||
if (typeof ($select.data('select2')) !== 'object') {
|
||||
return false;
|
||||
}
|
||||
|
||||
var $select2 = $select.siblings('.select2-container');
|
||||
var sorted;
|
||||
|
||||
// Opt group names
|
||||
var optArr = [];
|
||||
|
||||
$select.find('optgroup').each(function(idx, val) {
|
||||
optArr.push (val);
|
||||
});
|
||||
|
||||
$select.find('option').each(function(idx, val) {
|
||||
var groupName = $(this).parent('optgroup').prop('label');
|
||||
var optVal = this;
|
||||
|
||||
if (groupName === undefined) {
|
||||
if (this.value !== '' && !this.selected) {
|
||||
optArr.push (optVal);
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
sorted = $($select2.find('.select2-choices li[class!="select2-search-field"]').map(function () {
|
||||
if (!this) {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
var id = $(this).data('select2Data').id;
|
||||
|
||||
return $select.find('option[value="' + id + '"]')[0];
|
||||
}));
|
||||
|
||||
sorted.push.apply(sorted, optArr);
|
||||
|
||||
$select.children().remove();
|
||||
$select.append(sorted);
|
||||
});
|
||||
|
||||
return $this;
|
||||
},
|
||||
|
||||
select2Sortable: function () {
|
||||
var args = Array.prototype.slice.call(arguments, 0);
|
||||
var $this = this.filter('[multiple]'),
|
||||
validMethods = ['destroy'];
|
||||
|
||||
if (args.length === 0 || typeof (args[0]) === 'object') {
|
||||
var defaultOptions = {
|
||||
bindOrder: 'formSubmit', // or sortableStop
|
||||
sortableOptions: {
|
||||
placeholder: 'ui-state-highlight',
|
||||
items: 'li:not(.select2-search-field)',
|
||||
tolerance: 'pointer'
|
||||
}
|
||||
};
|
||||
|
||||
var options = $.extend(defaultOptions, args[0]);
|
||||
|
||||
// Init select2 only if not already initialized to prevent select2 configuration loss
|
||||
if (typeof ($this.data('select2')) !== 'object') {
|
||||
$this.select2();
|
||||
}
|
||||
|
||||
$this.each(function () {
|
||||
var $select = $(this)
|
||||
var $select2choices = $select.siblings('.select2-container').find('.select2-choices');
|
||||
|
||||
// Init jQuery UI Sortable
|
||||
$select2choices.sortable(options.sortableOptions);
|
||||
|
||||
switch (options.bindOrder) {
|
||||
case 'sortableStop':
|
||||
// apply options ordering in sortstop event
|
||||
$select2choices.on("sortstop.select2sortable", function (event, ui) {
|
||||
$select.select2SortableOrder();
|
||||
});
|
||||
|
||||
$select.on('change', function (e) {
|
||||
$(this).select2SortableOrder();
|
||||
});
|
||||
break;
|
||||
|
||||
default:
|
||||
// apply options ordering in form submit
|
||||
$select.closest('form').unbind('submit.select2sortable').on('submit.select2sortable', function () {
|
||||
$select.select2SortableOrder();
|
||||
});
|
||||
break;
|
||||
}
|
||||
});
|
||||
}
|
||||
else if (typeof (args[0] === 'string')) {
|
||||
if ($.inArray(args[0], validMethods) == -1) {
|
||||
throw "Unknown method: " + args[0];
|
||||
}
|
||||
|
||||
if (args[0] === 'destroy') {
|
||||
$this.select2SortableDestroy();
|
||||
}
|
||||
}
|
||||
|
||||
return $this;
|
||||
},
|
||||
|
||||
select2SortableDestroy: function () {
|
||||
var $this = this.filter('[multiple]');
|
||||
$this.each(function () {
|
||||
var $select = $(this)
|
||||
var $select2choices = $select.parent().find('.select2-choices');
|
||||
|
||||
// unbind form submit event
|
||||
$select.closest('form').unbind('submit.select2sortable');
|
||||
|
||||
// unbind sortstop event
|
||||
$select2choices.unbind("sortstop.select2sortable");
|
||||
|
||||
// destroy select2Sortable
|
||||
$select2choices.sortable('destroy');
|
||||
});
|
||||
|
||||
return $this;
|
||||
}
|
||||
});
|
||||
}(jQuery));
|
||||
@@ -1,5 +1,4 @@
|
||||
.big_number g.axis text,
|
||||
.big_number_total g.axis text {
|
||||
.big_number g.axis text {
|
||||
font-size: 10px;
|
||||
font-weight: normal;
|
||||
color: gray;
|
||||
@@ -9,23 +8,19 @@
|
||||
font-weight: none;
|
||||
}
|
||||
|
||||
.big_number text.big,
|
||||
.big_number_total text.big{
|
||||
.big_number text.big {
|
||||
stroke: black;
|
||||
text-anchor: middle;
|
||||
fill: black;
|
||||
}
|
||||
|
||||
.big_number g.tick line,
|
||||
.big_number_total g.tick line{
|
||||
.big_number g.tick line {
|
||||
stroke-width: 1px;
|
||||
stroke: grey;
|
||||
}
|
||||
|
||||
.big_number .domain,
|
||||
.big_number_total .domain{
|
||||
.big_number .domain {
|
||||
fill: none;
|
||||
stroke: black;
|
||||
stroke-width: 1;
|
||||
}
|
||||
|
||||
162
panoramix/assets/visualizations/big_number.js
Normal file
162
panoramix/assets/visualizations/big_number.js
Normal file
@@ -0,0 +1,162 @@
|
||||
// JS
|
||||
var d3 = window.d3 || require('d3');
|
||||
|
||||
// CSS
|
||||
require('./big_number.css');
|
||||
|
||||
var px = require('../javascripts/modules/panoramix.js');
|
||||
|
||||
function bigNumberVis(slice) {
|
||||
var div = d3.select(slice.selector);
|
||||
|
||||
function render() {
|
||||
d3.json(slice.jsonEndpoint(), function (error, payload) {
|
||||
//Define the percentage bounds that define color from red to green
|
||||
if (error !== null) {
|
||||
slice.error(error.responseText);
|
||||
return '';
|
||||
}
|
||||
var fd = payload.form_data;
|
||||
var json = payload.data;
|
||||
var color_range = [-1, 1];
|
||||
|
||||
var f = d3.format(fd.y_axis_format);
|
||||
var fp = d3.format('+.1%');
|
||||
var width = slice.width();
|
||||
var height = slice.height();
|
||||
var svg = div.append('svg');
|
||||
svg.attr("width", width);
|
||||
svg.attr("height", height);
|
||||
var data = json.data;
|
||||
var compare_suffix = ' ' + json.compare_suffix;
|
||||
var v_compare = null;
|
||||
var v = data[data.length - 1][1];
|
||||
if (json.compare_lag > 0) {
|
||||
var pos = data.length - (json.compare_lag + 1);
|
||||
if (pos >= 0) {
|
||||
v_compare = (v / data[pos][1]) - 1;
|
||||
}
|
||||
}
|
||||
var date_ext = d3.extent(data, function (d) {
|
||||
return d[0];
|
||||
});
|
||||
var value_ext = d3.extent(data, function (d) {
|
||||
return d[1];
|
||||
});
|
||||
|
||||
var margin = 20;
|
||||
var scale_x = d3.time.scale.utc().domain(date_ext).range([margin, width - margin]);
|
||||
var scale_y = d3.scale.linear().domain(value_ext).range([height - (margin), margin]);
|
||||
var colorRange = [d3.hsl(0, 1, 0.3), d3.hsl(120, 1, 0.3)];
|
||||
var scale_color = d3.scale
|
||||
.linear().domain(color_range)
|
||||
.interpolate(d3.interpolateHsl)
|
||||
.range(colorRange).clamp(true);
|
||||
var line = d3.svg.line()
|
||||
.x(function (d) {
|
||||
return scale_x(d[0]);
|
||||
})
|
||||
.y(function (d) {
|
||||
return scale_y(d[1]);
|
||||
})
|
||||
.interpolate("basis");
|
||||
|
||||
//Drawing trend line
|
||||
var g = svg.append('g');
|
||||
|
||||
g.append('path')
|
||||
.attr('d', function (d) {
|
||||
return line(data);
|
||||
})
|
||||
.attr('stroke-width', 5)
|
||||
.attr('opacity', 0.5)
|
||||
.attr('fill', "none")
|
||||
.attr('stroke-linecap', "round")
|
||||
.attr('stroke', "grey");
|
||||
|
||||
g = svg.append('g')
|
||||
.attr('class', 'digits')
|
||||
.attr('opacity', 1);
|
||||
|
||||
var y = height / 2;
|
||||
if (v_compare !== null) {
|
||||
y = (height / 8) * 3;
|
||||
}
|
||||
|
||||
//Printing big number
|
||||
g.append('text')
|
||||
.attr('x', width / 2)
|
||||
.attr('y', y)
|
||||
.attr('class', 'big')
|
||||
.attr('alignment-baseline', 'middle')
|
||||
.attr('id', 'bigNumber')
|
||||
.style('font-weight', 'bold')
|
||||
.style('cursor', 'pointer')
|
||||
.text(f(v))
|
||||
.style('font-size', d3.min([height, width]) / 3.5)
|
||||
.attr('fill', 'white');
|
||||
|
||||
var c = scale_color(v_compare);
|
||||
|
||||
//Printing compare %
|
||||
if (v_compare !== null) {
|
||||
g.append('text')
|
||||
.attr('x', width / 2)
|
||||
.attr('y', (height / 16) * 12)
|
||||
.text(fp(v_compare) + compare_suffix)
|
||||
.style('font-size', d3.min([height, width]) / 8)
|
||||
.style('text-anchor', 'middle')
|
||||
.attr('fill', c)
|
||||
.attr('stroke', c);
|
||||
}
|
||||
|
||||
var g_axis = svg.append('g').attr('class', 'axis').attr('opacity', 0);
|
||||
g = g_axis.append('g');
|
||||
var x_axis = d3.svg.axis()
|
||||
.scale(scale_x)
|
||||
.orient('bottom')
|
||||
.ticks(4)
|
||||
.tickFormat(px.formatDate);
|
||||
g.call(x_axis);
|
||||
g.attr('transform', 'translate(0,' + (height - margin) + ')');
|
||||
|
||||
g = g_axis.append('g').attr('transform', 'translate(' + (width - margin) + ',0)');
|
||||
var y_axis = d3.svg.axis()
|
||||
.scale(scale_y)
|
||||
.orient('left')
|
||||
.tickFormat(d3.format(fd.y_axis_format))
|
||||
.tickValues(value_ext);
|
||||
g.call(y_axis);
|
||||
g.selectAll('text')
|
||||
.style('text-anchor', 'end')
|
||||
.attr('y', '-7')
|
||||
.attr('x', '-4');
|
||||
|
||||
g.selectAll("text")
|
||||
.style('font-size', '10px');
|
||||
|
||||
div.on('mouseover', function (d) {
|
||||
var div = d3.select(this);
|
||||
div.select('path').transition().duration(500).attr('opacity', 1)
|
||||
.style('stroke-width', '2px');
|
||||
div.select('g.digits').transition().duration(500).attr('opacity', 0.1);
|
||||
div.select('g.axis').transition().duration(500).attr('opacity', 1);
|
||||
})
|
||||
.on('mouseout', function (d) {
|
||||
var div = d3.select(this);
|
||||
div.select('path').transition().duration(500).attr('opacity', 0.5)
|
||||
.style('stroke-width', '5px');
|
||||
div.select('g.digits').transition().duration(500).attr('opacity', 1);
|
||||
div.select('g.axis').transition().duration(500).attr('opacity', 0);
|
||||
});
|
||||
slice.done(payload);
|
||||
});
|
||||
}
|
||||
|
||||
return {
|
||||
render: render,
|
||||
resize: render
|
||||
};
|
||||
}
|
||||
|
||||
module.exports = bigNumberVis;
|
||||
175
panoramix/assets/visualizations/directed_force.js
Normal file
175
panoramix/assets/visualizations/directed_force.js
Normal file
@@ -0,0 +1,175 @@
|
||||
// JS
|
||||
var d3 = window.d3 || require('d3');
|
||||
|
||||
// CSS
|
||||
require('./directed_force.css');
|
||||
|
||||
/* Modified from http://bl.ocks.org/d3noob/5141278 */
|
||||
function directedForceVis(slice) {
|
||||
var div = d3.select(slice.selector);
|
||||
var link_length = slice.data.form_data.link_length || 200;
|
||||
var charge = slice.data.form_data.charge || -500;
|
||||
|
||||
var render = function () {
|
||||
var width = slice.width();
|
||||
var height = slice.height() - 25;
|
||||
d3.json(slice.jsonEndpoint(), function (error, json) {
|
||||
|
||||
if (error !== null) {
|
||||
slice.error(error.responseText);
|
||||
return '';
|
||||
}
|
||||
var links = json.data;
|
||||
var nodes = {};
|
||||
// Compute the distinct nodes from the links.
|
||||
links.forEach(function (link) {
|
||||
link.source = nodes[link.source] || (nodes[link.source] = {
|
||||
name: link.source
|
||||
});
|
||||
link.target = nodes[link.target] || (nodes[link.target] = {
|
||||
name: link.target
|
||||
});
|
||||
link.value = Number(link.value);
|
||||
|
||||
var target_name = link.target.name;
|
||||
var source_name = link.source.name;
|
||||
|
||||
if (nodes[target_name].total === undefined) {
|
||||
nodes[target_name].total = link.value;
|
||||
}
|
||||
if (nodes[source_name].total === undefined) {
|
||||
nodes[source_name].total = 0;
|
||||
}
|
||||
if (nodes[target_name].max === undefined) {
|
||||
nodes[target_name].max = 0;
|
||||
}
|
||||
if (link.value > nodes[target_name].max) {
|
||||
nodes[target_name].max = link.value;
|
||||
}
|
||||
if (nodes[target_name].min === undefined) {
|
||||
nodes[target_name].min = 0;
|
||||
}
|
||||
if (link.value > nodes[target_name].min) {
|
||||
nodes[target_name].min = link.value;
|
||||
}
|
||||
|
||||
nodes[target_name].total += link.value;
|
||||
});
|
||||
|
||||
var force = d3.layout.force()
|
||||
.nodes(d3.values(nodes))
|
||||
.links(links)
|
||||
.size([width, height])
|
||||
.linkDistance(link_length)
|
||||
.charge(charge)
|
||||
.on("tick", tick)
|
||||
.start();
|
||||
|
||||
var svg = div.append("svg")
|
||||
.attr("width", width)
|
||||
.attr("height", height);
|
||||
|
||||
// build the arrow.
|
||||
svg.append("svg:defs").selectAll("marker")
|
||||
.data(["end"]) // Different link/path types can be defined here
|
||||
.enter().append("svg:marker") // This section adds in the arrows
|
||||
.attr("id", String)
|
||||
.attr("viewBox", "0 -5 10 10")
|
||||
.attr("refX", 15)
|
||||
.attr("refY", -1.5)
|
||||
.attr("markerWidth", 6)
|
||||
.attr("markerHeight", 6)
|
||||
.attr("orient", "auto")
|
||||
.append("svg:path")
|
||||
.attr("d", "M0,-5L10,0L0,5");
|
||||
|
||||
var edgeScale = d3.scale.linear()
|
||||
.range([0.1, 0.5]);
|
||||
// add the links and the arrows
|
||||
var path = svg.append("svg:g").selectAll("path")
|
||||
.data(force.links())
|
||||
.enter().append("svg:path")
|
||||
.attr("class", "link")
|
||||
.style("opacity", function (d) {
|
||||
return edgeScale(d.value / d.target.max);
|
||||
})
|
||||
.attr("marker-end", "url(#end)");
|
||||
|
||||
// define the nodes
|
||||
var node = svg.selectAll(".node")
|
||||
.data(force.nodes())
|
||||
.enter().append("g")
|
||||
.attr("class", "node")
|
||||
.on("mouseenter", function (d) {
|
||||
d3.select(this)
|
||||
.select("circle")
|
||||
.transition()
|
||||
.style('stroke-width', 5);
|
||||
|
||||
d3.select(this)
|
||||
.select("text")
|
||||
.transition()
|
||||
.style('font-size', 25);
|
||||
})
|
||||
.on("mouseleave", function (d) {
|
||||
d3.select(this)
|
||||
.select("circle")
|
||||
.transition()
|
||||
.style('stroke-width', 1.5);
|
||||
d3.select(this)
|
||||
.select("text")
|
||||
.transition()
|
||||
.style('font-size', 12);
|
||||
})
|
||||
.call(force.drag);
|
||||
|
||||
// add the nodes
|
||||
var ext = d3.extent(d3.values(nodes), function (d) {
|
||||
return Math.sqrt(d.total);
|
||||
});
|
||||
var circleScale = d3.scale.linear()
|
||||
.domain(ext)
|
||||
.range([3, 30]);
|
||||
|
||||
node.append("circle")
|
||||
.attr("r", function (d) {
|
||||
return circleScale(Math.sqrt(d.total));
|
||||
});
|
||||
|
||||
// add the text
|
||||
node.append("text")
|
||||
.attr("x", 6)
|
||||
.attr("dy", ".35em")
|
||||
.text(function (d) {
|
||||
return d.name;
|
||||
});
|
||||
|
||||
// add the curvy lines
|
||||
function tick() {
|
||||
path.attr("d", function (d) {
|
||||
var dx = d.target.x - d.source.x,
|
||||
dy = d.target.y - d.source.y,
|
||||
dr = Math.sqrt(dx * dx + dy * dy);
|
||||
return "M" +
|
||||
d.source.x + "," +
|
||||
d.source.y + "A" +
|
||||
dr + "," + dr + " 0 0,1 " +
|
||||
d.target.x + "," +
|
||||
d.target.y;
|
||||
});
|
||||
|
||||
node.attr("transform", function (d) {
|
||||
return "translate(" + d.x + "," + d.y + ")";
|
||||
});
|
||||
}
|
||||
|
||||
slice.done(json);
|
||||
});
|
||||
};
|
||||
return {
|
||||
render: render,
|
||||
resize: render
|
||||
};
|
||||
}
|
||||
|
||||
module.exports = directedForceVis;
|
||||
8
panoramix/assets/visualizations/filter_box.css
Normal file
8
panoramix/assets/visualizations/filter_box.css
Normal file
@@ -0,0 +1,8 @@
|
||||
.select2-highlighted > .filter_box {
|
||||
background-color: transparent;
|
||||
border: 1px dashed black;
|
||||
}
|
||||
|
||||
.dashboard .filter_box .slice_container > div {
|
||||
padding-top: 0;
|
||||
}
|
||||
82
panoramix/assets/visualizations/filter_box.js
Normal file
82
panoramix/assets/visualizations/filter_box.js
Normal file
@@ -0,0 +1,82 @@
|
||||
// JS
|
||||
var $ = window.$ = require('jquery');
|
||||
var jQuery = window.jQuery = $;
|
||||
var d3 = window.d3 || require('d3');
|
||||
|
||||
// CSS
|
||||
require('./filter_box.css');
|
||||
require('../javascripts/panoramix-select2.js');
|
||||
|
||||
function filterBox(slice) {
|
||||
var filtersObj = {};
|
||||
var d3token = d3.select(slice.selector);
|
||||
|
||||
var fltChanged = function () {
|
||||
var val = $(this).val();
|
||||
var vals = [];
|
||||
if (val !== '') {
|
||||
vals = val.split(',');
|
||||
}
|
||||
slice.setFilter($(this).attr('name'), vals);
|
||||
};
|
||||
|
||||
var refresh = function () {
|
||||
d3token.selectAll("*").remove();
|
||||
var container = d3token
|
||||
.append('div')
|
||||
.classed('padded', true);
|
||||
|
||||
$.getJSON(slice.jsonEndpoint(), function (payload) {
|
||||
var maxes = {};
|
||||
|
||||
for (var filter in payload.data) {
|
||||
var data = payload.data[filter];
|
||||
maxes[filter] = d3.max(data, function (d) {
|
||||
return d.metric;
|
||||
});
|
||||
var id = 'fltbox__' + filter;
|
||||
|
||||
var div = container.append('div');
|
||||
|
||||
div.append("label").text(filter);
|
||||
|
||||
div.append('div')
|
||||
.attr('name', filter)
|
||||
.classed('form-control', true)
|
||||
.attr('multiple', '')
|
||||
.attr('id', id);
|
||||
|
||||
filtersObj[filter] = $('#' + id).select2({
|
||||
placeholder: "Select [" + filter + ']',
|
||||
containment: 'parent',
|
||||
dropdownAutoWidth: true,
|
||||
data: data,
|
||||
multiple: true,
|
||||
formatResult: select2Formatter
|
||||
})
|
||||
.on('change', fltChanged);
|
||||
}
|
||||
slice.done();
|
||||
|
||||
function select2Formatter(result, container /*, query, escapeMarkup*/) {
|
||||
var perc = Math.round((result.metric / maxes[result.filter]) * 100);
|
||||
var style = 'padding: 2px 5px;';
|
||||
style += "background-image: ";
|
||||
style += "linear-gradient(to right, lightgrey, lightgrey " + perc + "%, rgba(0,0,0,0) " + perc + "%";
|
||||
|
||||
$(container).attr('style', 'padding: 0px; background: white;');
|
||||
$(container).addClass('filter_box');
|
||||
return '<div style="' + style + '"><span>' + result.text + '</span></div>';
|
||||
}
|
||||
})
|
||||
.fail(function (xhr) {
|
||||
slice.error(xhr.responseText);
|
||||
});
|
||||
};
|
||||
return {
|
||||
render: refresh,
|
||||
resize: refresh
|
||||
};
|
||||
}
|
||||
|
||||
module.exports = filterBox;
|
||||
@@ -1,12 +1,35 @@
|
||||
.heatmap .axis text {
|
||||
font: 10px sans-serif;
|
||||
}
|
||||
|
||||
.heatmap .axis path,
|
||||
.heatmap .axis line {
|
||||
fill: none;
|
||||
stroke: #000;
|
||||
shape-rendering: crispEdges;
|
||||
}
|
||||
|
||||
.heatmap svg {
|
||||
}
|
||||
|
||||
.heatmap canvas, .heatmap img {
|
||||
image-rendering: optimizeSpeed; /* Older versions of FF */
|
||||
image-rendering: -moz-crisp-edges; /* FF 6.0+ */
|
||||
image-rendering: -webkit-optimize-contrast; /* Safari */
|
||||
image-rendering: -o-crisp-edges; /* OS X & Windows Opera (12.02+) */
|
||||
image-rendering: pixelated; /* Awesome future-browsers */
|
||||
-ms-interpolation-mode: nearest-neighbor; /* IE */
|
||||
}
|
||||
|
||||
/* from d3-tip */
|
||||
.d3-tip {
|
||||
line-height: 1;
|
||||
font-weight: bold;
|
||||
padding: 12px;
|
||||
background: rgba(0, 0, 0, 0.8);
|
||||
color: #fff;
|
||||
border-radius: 2px;
|
||||
pointer-events: none;
|
||||
z-index: 1000;
|
||||
}
|
||||
|
||||
/* Creates a small triangle extender for the tooltip */
|
||||
209
panoramix/assets/visualizations/heatmap.js
Normal file
209
panoramix/assets/visualizations/heatmap.js
Normal file
@@ -0,0 +1,209 @@
|
||||
// JS
|
||||
var $ = window.$ || require('jquery');
|
||||
var px = window.px || require('../javascripts/modules/panoramix.js');
|
||||
var d3 = require('d3');
|
||||
|
||||
d3.tip = require('d3-tip'); //using window.d3 doesn't capture events properly bc of multiple instances
|
||||
|
||||
// CSS
|
||||
require('./heatmap.css');
|
||||
|
||||
// Inspired from http://bl.ocks.org/mbostock/3074470
|
||||
// https://jsfiddle.net/cyril123/h0reyumq/
|
||||
function heatmapVis(slice) {
|
||||
var margins = {
|
||||
t: 10,
|
||||
r: 10,
|
||||
b: 50,
|
||||
l: 60
|
||||
};
|
||||
|
||||
function refresh() {
|
||||
var width = slice.width();
|
||||
var height = slice.height();
|
||||
var hmWidth = width - (margins.l + margins.r);
|
||||
var hmHeight = height - (margins.b + margins.t);
|
||||
var fp = d3.format('.3p');
|
||||
d3.json(slice.jsonEndpoint(), function (error, payload) {
|
||||
var matrix = {};
|
||||
if (error) {
|
||||
slice.error(error.responseText);
|
||||
return '';
|
||||
}
|
||||
var fd = payload.form_data;
|
||||
var data = payload.data;
|
||||
|
||||
function ordScale(k, rangeBands, reverse) {
|
||||
if (reverse === undefined) {
|
||||
reverse = false;
|
||||
}
|
||||
var domain = {};
|
||||
$.each(data, function (i, d) {
|
||||
domain[d[k]] = true;
|
||||
});
|
||||
domain = Object.keys(domain).sort(function (a, b) {
|
||||
return b - a;
|
||||
});
|
||||
if (reverse) {
|
||||
domain.reverse();
|
||||
}
|
||||
if (rangeBands === undefined) {
|
||||
return d3.scale.ordinal().domain(domain).range(d3.range(domain.length));
|
||||
} else {
|
||||
return d3.scale.ordinal().domain(domain).rangeBands(rangeBands);
|
||||
}
|
||||
}
|
||||
var xScale = ordScale('x');
|
||||
var yScale = ordScale('y', undefined, true);
|
||||
var xRbScale = ordScale('x', [0, hmWidth]);
|
||||
var yRbScale = ordScale('y', [hmHeight, 0]);
|
||||
var X = 0,
|
||||
Y = 1;
|
||||
var heatmapDim = [xRbScale.domain().length, yRbScale.domain().length];
|
||||
|
||||
var color = px.color.colorScalerFactory(fd.linear_color_scheme);
|
||||
|
||||
var scale = [
|
||||
d3.scale.linear()
|
||||
.domain([0, heatmapDim[X]])
|
||||
.range([0, hmWidth]),
|
||||
d3.scale.linear()
|
||||
.domain([0, heatmapDim[Y]])
|
||||
.range([0, hmHeight])
|
||||
];
|
||||
|
||||
var container = d3.select(slice.selector)
|
||||
.style("left", "0px")
|
||||
.style("position", "relative")
|
||||
.style("top", "0px");
|
||||
|
||||
var canvas = container.append("canvas")
|
||||
.attr("width", heatmapDim[X])
|
||||
.attr("height", heatmapDim[Y])
|
||||
.style("width", hmWidth + "px")
|
||||
.style("height", hmHeight + "px")
|
||||
.style("image-rendering", fd.canvas_image_rendering)
|
||||
.style("left", margins.l + "px")
|
||||
.style("top", margins.t + "px")
|
||||
.style("position", "absolute");
|
||||
|
||||
var svg = container.append("svg")
|
||||
.attr("width", width)
|
||||
.attr("height", height)
|
||||
.style("left", "0px")
|
||||
.style("top", "0px")
|
||||
.style("position", "absolute");
|
||||
|
||||
var rect = svg.append('g')
|
||||
.attr("transform", "translate(" + margins.l + "," + margins.t + ")")
|
||||
.append('rect')
|
||||
.style('fill-opacity', 0)
|
||||
.attr('stroke', 'black')
|
||||
.attr("width", hmWidth)
|
||||
.attr("height", hmHeight);
|
||||
|
||||
var tip = d3.tip()
|
||||
.attr('class', 'd3-tip')
|
||||
.offset(function () {
|
||||
var k = d3.mouse(this);
|
||||
var x = k[0] - (hmWidth / 2);
|
||||
return [k[1] - 20, x];
|
||||
})
|
||||
.html(function (d) {
|
||||
var k = d3.mouse(this);
|
||||
var m = Math.floor(scale[0].invert(k[0]));
|
||||
var n = Math.floor(scale[1].invert(k[1]));
|
||||
if (m in matrix && n in matrix[m]) {
|
||||
var obj = matrix[m][n];
|
||||
var s = "";
|
||||
s += "<div><b>" + fd.all_columns_x + ": </b>" + obj.x + "<div>";
|
||||
s += "<div><b>" + fd.all_columns_y + ": </b>" + obj.y + "<div>";
|
||||
s += "<div><b>" + fd.metric + ": </b>" + obj.v + "<div>";
|
||||
s += "<div><b>%: </b>" + fp(obj.perc) + "<div>";
|
||||
return s;
|
||||
}
|
||||
});
|
||||
|
||||
rect.call(tip);
|
||||
|
||||
var xAxis = d3.svg.axis()
|
||||
.scale(xRbScale)
|
||||
.tickValues(xRbScale.domain().filter(
|
||||
function (d, i) {
|
||||
return !(i % (parseInt(fd.xscale_interval, 10)));
|
||||
}))
|
||||
.orient("bottom");
|
||||
var yAxis = d3.svg.axis()
|
||||
.scale(yRbScale)
|
||||
.tickValues(yRbScale.domain().filter(
|
||||
function (d, i) {
|
||||
return !(i % (parseInt(fd.yscale_interval, 10)));
|
||||
}))
|
||||
.orient("left");
|
||||
|
||||
svg.append("g")
|
||||
.attr("class", "x axis")
|
||||
.attr("transform", "translate(" + margins.l + "," + (margins.t + hmHeight) + ")")
|
||||
.call(xAxis)
|
||||
.selectAll("text")
|
||||
.style("text-anchor", "end")
|
||||
.attr("transform", "rotate(-45)")
|
||||
.style("font-weight", "bold");
|
||||
|
||||
svg.append("g")
|
||||
.attr("class", "y axis")
|
||||
.attr("transform", "translate(" + margins.l + ", 0)")
|
||||
.call(yAxis);
|
||||
|
||||
rect.on('mousemove', tip.show);
|
||||
rect.on('mouseout', tip.hide);
|
||||
|
||||
var context = canvas.node().getContext("2d");
|
||||
context.imageSmoothingEnabled = false;
|
||||
createImageObj();
|
||||
|
||||
// Compute the pixel colors; scaled by CSS.
|
||||
function createImageObj() {
|
||||
var imageObj = new Image();
|
||||
var image = context.createImageData(heatmapDim[0], heatmapDim[1]);
|
||||
var pixs = {};
|
||||
$.each(data, function (i, d) {
|
||||
var c = d3.rgb(color(d.perc));
|
||||
var x = xScale(d.x);
|
||||
var y = yScale(d.y);
|
||||
pixs[x + (y * xScale.domain().length)] = c;
|
||||
if (matrix[x] === undefined) {
|
||||
matrix[x] = {};
|
||||
}
|
||||
if (matrix[x][y] === undefined) {
|
||||
matrix[x][y] = d;
|
||||
}
|
||||
});
|
||||
|
||||
var p = -1;
|
||||
for (var i = 0; i < heatmapDim[0] * heatmapDim[1]; i++) {
|
||||
var c = pixs[i];
|
||||
var alpha = 255;
|
||||
if (c === undefined) {
|
||||
c = d3.rgb('#F00');
|
||||
alpha = 0;
|
||||
}
|
||||
image.data[++p] = c.r;
|
||||
image.data[++p] = c.g;
|
||||
image.data[++p] = c.b;
|
||||
image.data[++p] = alpha;
|
||||
}
|
||||
context.putImageData(image, 0, 0);
|
||||
imageObj.src = canvas.node().toDataURL();
|
||||
}
|
||||
slice.done();
|
||||
|
||||
});
|
||||
}
|
||||
return {
|
||||
render: refresh,
|
||||
resize: refresh
|
||||
};
|
||||
}
|
||||
|
||||
module.exports = heatmapVis;
|
||||
25
panoramix/assets/visualizations/iframe.js
Normal file
25
panoramix/assets/visualizations/iframe.js
Normal file
@@ -0,0 +1,25 @@
|
||||
var $ = window.$ || require('jquery');
|
||||
|
||||
function iframeWidget(slice) {
|
||||
|
||||
function refresh() {
|
||||
$('#code').attr('rows', '15');
|
||||
$.getJSON(slice.jsonEndpoint(), function (payload) {
|
||||
slice.container.html('<iframe style="width:100%;"></iframe>');
|
||||
var iframe = slice.container.find('iframe');
|
||||
iframe.css('height', slice.height());
|
||||
iframe.attr('src', payload.form_data.url);
|
||||
slice.done();
|
||||
})
|
||||
.fail(function (xhr) {
|
||||
slice.error(xhr.responseText);
|
||||
});
|
||||
}
|
||||
|
||||
return {
|
||||
render: refresh,
|
||||
resize: refresh
|
||||
};
|
||||
}
|
||||
|
||||
module.exports = iframeWidget;
|
||||
23
panoramix/assets/visualizations/markup.js
Normal file
23
panoramix/assets/visualizations/markup.js
Normal file
@@ -0,0 +1,23 @@
|
||||
var $ = window.$ || require('jquery');
|
||||
|
||||
function markupWidget(slice) {
|
||||
|
||||
function refresh() {
|
||||
$('#code').attr('rows', '15');
|
||||
|
||||
$.getJSON(slice.jsonEndpoint(), function (payload) {
|
||||
slice.container.html(payload.data.html);
|
||||
slice.done();
|
||||
})
|
||||
.fail(function (xhr) {
|
||||
slice.error(xhr.responseText);
|
||||
});
|
||||
}
|
||||
|
||||
return {
|
||||
render: refresh,
|
||||
resize: refresh
|
||||
};
|
||||
}
|
||||
|
||||
module.exports = markupWidget;
|
||||
8
panoramix/assets/visualizations/nvd3_vis.css
Normal file
8
panoramix/assets/visualizations/nvd3_vis.css
Normal file
@@ -0,0 +1,8 @@
|
||||
g.dashed path {
|
||||
stroke-dasharray: 5, 5;
|
||||
}
|
||||
|
||||
.nvtooltip tr.highlight td {
|
||||
font-weight: bold;
|
||||
font-size: 15px !important;
|
||||
}
|
||||
208
panoramix/assets/visualizations/nvd3_vis.js
Normal file
208
panoramix/assets/visualizations/nvd3_vis.js
Normal file
@@ -0,0 +1,208 @@
|
||||
// JS
|
||||
var $ = window.$ || require('jquery');
|
||||
var d3 = window.d3 || require('d3');
|
||||
var px = window.px || require('../javascripts/modules/panoramix.js');
|
||||
var nv = require('nvd3');
|
||||
|
||||
// CSS
|
||||
require('../node_modules/nvd3/build/nv.d3.min.css');
|
||||
require('./nvd3_vis.css');
|
||||
|
||||
function nvd3Vis(slice) {
|
||||
var chart;
|
||||
|
||||
var render = function () {
|
||||
$.getJSON(slice.jsonEndpoint(), function (payload) {
|
||||
var fd = payload.form_data;
|
||||
var viz_type = fd.viz_type;
|
||||
|
||||
var f = d3.format('.3s');
|
||||
var colorKey = 'key';
|
||||
|
||||
nv.addGraph(function () {
|
||||
switch (viz_type) {
|
||||
case 'line':
|
||||
if (fd.show_brush) {
|
||||
chart = nv.models.lineWithFocusChart();
|
||||
chart.lines2.xScale(d3.time.scale.utc());
|
||||
chart.x2Axis
|
||||
.showMaxMin(fd.x_axis_showminmax)
|
||||
.staggerLabels(true);
|
||||
} else {
|
||||
chart = nv.models.lineChart();
|
||||
}
|
||||
// To alter the tooltip header
|
||||
// chart.interactiveLayer.tooltip.headerFormatter(function(){return '';});
|
||||
chart.xScale(d3.time.scale.utc());
|
||||
chart.interpolate(fd.line_interpolation);
|
||||
chart.xAxis
|
||||
.showMaxMin(fd.x_axis_showminmax)
|
||||
.staggerLabels(true);
|
||||
break;
|
||||
|
||||
case 'bar':
|
||||
chart = nv.models.multiBarChart()
|
||||
.showControls(true)
|
||||
.groupSpacing(0.1);
|
||||
|
||||
chart.xAxis
|
||||
.showMaxMin(false)
|
||||
.staggerLabels(true);
|
||||
|
||||
chart.stacked(fd.bar_stacked);
|
||||
break;
|
||||
|
||||
case 'dist_bar':
|
||||
chart = nv.models.multiBarChart()
|
||||
.showControls(true) //Allow user to switch between 'Grouped' and 'Stacked' mode.
|
||||
.reduceXTicks(false)
|
||||
.rotateLabels(45)
|
||||
.groupSpacing(0.1); //Distance between each group of bars.
|
||||
|
||||
chart.xAxis
|
||||
.showMaxMin(false);
|
||||
|
||||
chart.stacked(fd.bar_stacked);
|
||||
break;
|
||||
|
||||
case 'pie':
|
||||
chart = nv.models.pieChart();
|
||||
colorKey = 'x';
|
||||
chart.valueFormat(f);
|
||||
if (fd.donut) {
|
||||
chart.donut(true);
|
||||
chart.labelsOutside(true);
|
||||
}
|
||||
chart.labelsOutside(true);
|
||||
chart.cornerRadius(true);
|
||||
break;
|
||||
|
||||
case 'column':
|
||||
chart = nv.models.multiBarChart()
|
||||
.reduceXTicks(false)
|
||||
.rotateLabels(45);
|
||||
break;
|
||||
|
||||
case 'compare':
|
||||
chart = nv.models.cumulativeLineChart();
|
||||
chart.xScale(d3.time.scale.utc());
|
||||
chart.xAxis
|
||||
.showMaxMin(false)
|
||||
.staggerLabels(true);
|
||||
break;
|
||||
|
||||
case 'bubble':
|
||||
var row = function (col1, col2) {
|
||||
return "<tr><td>" + col1 + "</td><td>" + col2 + "</td></tr>";
|
||||
};
|
||||
chart = nv.models.scatterChart();
|
||||
chart.showDistX(true);
|
||||
chart.showDistY(true);
|
||||
chart.tooltip.contentGenerator(function (obj) {
|
||||
var p = obj.point;
|
||||
var s = "<table>";
|
||||
s += '<tr><td style="color:' + p.color + ';"><strong>' + p[fd.entity] + '</strong> (' + p.group + ')</td></tr>';
|
||||
s += row(fd.x, f(p.x));
|
||||
s += row(fd.y, f(p.y));
|
||||
s += row(fd.size, f(p.size));
|
||||
s += "</table>";
|
||||
return s;
|
||||
});
|
||||
chart.pointRange([5, fd.max_bubble_size * fd.max_bubble_size]);
|
||||
break;
|
||||
|
||||
case 'area':
|
||||
chart = nv.models.stackedAreaChart();
|
||||
chart.style(fd.stacked_style);
|
||||
chart.xScale(d3.time.scale.utc());
|
||||
chart.xAxis
|
||||
.showMaxMin(false)
|
||||
.staggerLabels(true);
|
||||
break;
|
||||
|
||||
default:
|
||||
throw new Error("Unrecognized visualization for nvd3" + viz_type);
|
||||
}
|
||||
|
||||
if ("showLegend" in chart && typeof fd.show_legend !== 'undefined') {
|
||||
chart.showLegend(fd.show_legend);
|
||||
}
|
||||
|
||||
var height = slice.height();
|
||||
height -= 15; // accounting for the staggered xAxis
|
||||
|
||||
if (chart.hasOwnProperty("x2Axis")) {
|
||||
height += 30;
|
||||
}
|
||||
chart.height(height);
|
||||
slice.container.css('height', height + 'px');
|
||||
|
||||
if ((viz_type === "line" || viz_type === "area") && fd.rich_tooltip) {
|
||||
chart.useInteractiveGuideline(true);
|
||||
}
|
||||
if (fd.y_axis_zero) {
|
||||
chart.forceY([0, 1]);
|
||||
} else if (fd.y_log_scale) {
|
||||
chart.yScale(d3.scale.log());
|
||||
}
|
||||
if (fd.x_log_scale) {
|
||||
chart.xScale(d3.scale.log());
|
||||
}
|
||||
if (viz_type === 'bubble') {
|
||||
chart.xAxis.tickFormat(d3.format('.3s'));
|
||||
} else if (fd.x_axis_format === 'smart_date') {
|
||||
chart.xAxis.tickFormat(px.formatDate);
|
||||
} else if (fd.x_axis_format !== undefined) {
|
||||
chart.xAxis.tickFormat(px.timeFormatFactory(fd.x_axis_format));
|
||||
}
|
||||
if (chart.yAxis !== undefined) {
|
||||
chart.yAxis.tickFormat(d3.format('.3s'));
|
||||
}
|
||||
|
||||
if (fd.contribution || fd.num_period_compare || viz_type === 'compare') {
|
||||
chart.yAxis.tickFormat(d3.format('.3p'));
|
||||
if (chart.y2Axis !== undefined) {
|
||||
chart.y2Axis.tickFormat(d3.format('.3p'));
|
||||
}
|
||||
} else if (fd.y_axis_format) {
|
||||
chart.yAxis.tickFormat(d3.format(fd.y_axis_format));
|
||||
|
||||
if (chart.y2Axis !== undefined) {
|
||||
chart.y2Axis.tickFormat(d3.format(fd.y_axis_format));
|
||||
}
|
||||
}
|
||||
|
||||
chart.color(function (d, i) {
|
||||
return px.color.category21(d[colorKey]);
|
||||
});
|
||||
|
||||
d3.select(slice.selector).html('');
|
||||
d3.select(slice.selector).append("svg")
|
||||
.datum(payload.data)
|
||||
.transition().duration(500)
|
||||
.attr('height', height)
|
||||
.call(chart);
|
||||
|
||||
return chart;
|
||||
});
|
||||
|
||||
slice.done(payload);
|
||||
})
|
||||
.fail(function (xhr) {
|
||||
slice.error(xhr.responseText);
|
||||
});
|
||||
};
|
||||
|
||||
var update = function () {
|
||||
if (chart && chart.update) {
|
||||
chart.update();
|
||||
}
|
||||
};
|
||||
|
||||
return {
|
||||
render: render,
|
||||
resize: update
|
||||
};
|
||||
}
|
||||
|
||||
module.exports = nvd3Vis;
|
||||
92
panoramix/assets/visualizations/parallel_coordinates.js
Normal file
92
panoramix/assets/visualizations/parallel_coordinates.js
Normal file
@@ -0,0 +1,92 @@
|
||||
// JS
|
||||
var $ = window.$ || require('jquery');
|
||||
var d3 = window.d3 || require('d3');
|
||||
d3.parcoords = require('../vendor/parallel_coordinates/d3.parcoords.js');
|
||||
d3.divgrid = require('../vendor/parallel_coordinates/divgrid.js');
|
||||
|
||||
// CSS
|
||||
require('../vendor/parallel_coordinates/d3.parcoords.css');
|
||||
|
||||
function parallelCoordVis(slice) {
|
||||
|
||||
function refresh() {
|
||||
$('#code').attr('rows', '15');
|
||||
$.getJSON(slice.jsonEndpoint(), function (payload) {
|
||||
var data = payload.data;
|
||||
var fd = payload.form_data;
|
||||
var ext = d3.extent(data, function (d) {
|
||||
return d[fd.secondary_metric];
|
||||
});
|
||||
ext = [ext[0], (ext[1] - ext[0]) / 2, ext[1]];
|
||||
var cScale = d3.scale.linear()
|
||||
.domain(ext)
|
||||
.range(['red', 'grey', 'blue'])
|
||||
.interpolate(d3.interpolateLab);
|
||||
|
||||
var color = function (d) {
|
||||
return cScale(d[fd.secondary_metric]);
|
||||
};
|
||||
var container = d3.select(slice.selector);
|
||||
var eff_height = fd.show_datatable ? (slice.height() / 2) : slice.height();
|
||||
|
||||
container.append('div')
|
||||
.attr('id', 'parcoords_' + slice.container_id)
|
||||
.style('height', eff_height + 'px')
|
||||
.classed("parcoords", true);
|
||||
|
||||
var parcoords = d3.parcoords()('#parcoords_' + slice.container_id)
|
||||
.width(slice.width())
|
||||
.color(color)
|
||||
.alpha(0.5)
|
||||
.composite("darken")
|
||||
.height(eff_height)
|
||||
.data(payload.data)
|
||||
.render()
|
||||
.createAxes()
|
||||
.shadows()
|
||||
.reorderable()
|
||||
.brushMode("1D-axes");
|
||||
|
||||
if (fd.show_datatable) {
|
||||
// create data table, row hover highlighting
|
||||
var grid = d3.divgrid();
|
||||
container.append("div")
|
||||
.datum(data.slice(0, 10))
|
||||
.attr('id', "grid")
|
||||
.call(grid)
|
||||
.classed("parcoords", true)
|
||||
.selectAll(".row")
|
||||
.on({
|
||||
mouseover: function (d) {
|
||||
parcoords.highlight([d]);
|
||||
},
|
||||
mouseout: parcoords.unhighlight
|
||||
});
|
||||
// update data table on brush event
|
||||
parcoords.on("brush", function (d) {
|
||||
d3.select("#grid")
|
||||
.datum(d.slice(0, 10))
|
||||
.call(grid)
|
||||
.selectAll(".row")
|
||||
.on({
|
||||
mouseover: function (d) {
|
||||
parcoords.highlight([d]);
|
||||
},
|
||||
mouseout: parcoords.unhighlight
|
||||
});
|
||||
});
|
||||
}
|
||||
slice.done();
|
||||
})
|
||||
.fail(function (xhr) {
|
||||
slice.error(xhr.responseText);
|
||||
});
|
||||
}
|
||||
|
||||
return {
|
||||
render: refresh,
|
||||
resize: refresh
|
||||
};
|
||||
}
|
||||
|
||||
module.exports = parallelCoordVis;
|
||||
13
panoramix/assets/visualizations/pivot_table.css
Normal file
13
panoramix/assets/visualizations/pivot_table.css
Normal file
@@ -0,0 +1,13 @@
|
||||
.gridster .widget.pivot_table {
|
||||
overflow: auto !important;
|
||||
}
|
||||
|
||||
.table tr>th {
|
||||
padding: 1px 5px !important;
|
||||
font-size: small !important;
|
||||
}
|
||||
|
||||
.table tr>td {
|
||||
padding: 1px 5px !important;
|
||||
font-size: small !important;
|
||||
}
|
||||
31
panoramix/assets/visualizations/pivot_table.js
Normal file
31
panoramix/assets/visualizations/pivot_table.js
Normal file
@@ -0,0 +1,31 @@
|
||||
var $ = window.$ = require('jquery');
|
||||
var jQuery = window.jQuery = $;
|
||||
|
||||
require('datatables');
|
||||
require('./pivot_table.css');
|
||||
require('../node_modules/datatables-bootstrap3-plugin/media/css/datatables-bootstrap3.css');
|
||||
|
||||
module.exports = function (slice) {
|
||||
var container = slice.container;
|
||||
var form_data = slice.data.form_data;
|
||||
|
||||
function refresh() {
|
||||
$.getJSON(slice.jsonEndpoint(), function (json) {
|
||||
container.html(json.data);
|
||||
if (form_data.groupby.length === 1) {
|
||||
var table = container.find('table').DataTable({
|
||||
paging: false,
|
||||
searching: false
|
||||
});
|
||||
table.column('-1').order('desc').draw();
|
||||
}
|
||||
slice.done(json);
|
||||
}).fail(function (xhr) {
|
||||
slice.error(xhr.responseText);
|
||||
});
|
||||
}
|
||||
return {
|
||||
render: refresh,
|
||||
resize: refresh
|
||||
};
|
||||
};
|
||||
@@ -18,16 +18,3 @@
|
||||
.sankey .link:hover {
|
||||
stroke-opacity: .5;
|
||||
}
|
||||
|
||||
.sankey-tooltip {
|
||||
position: absolute;
|
||||
width: auto;
|
||||
background: #ddd;
|
||||
padding: 10px;
|
||||
font-size: 12px;
|
||||
font-weight: 200;
|
||||
color: #333;
|
||||
border: 1px solid #fff;
|
||||
text-align: center;
|
||||
pointer-events: none;
|
||||
}
|
||||
140
panoramix/assets/visualizations/sankey.js
Normal file
140
panoramix/assets/visualizations/sankey.js
Normal file
@@ -0,0 +1,140 @@
|
||||
// CSS
|
||||
require('./sankey.css');
|
||||
// JS
|
||||
var px = window.px || require('../javascripts/modules/panoramix.js');
|
||||
var d3 = window.d3 || require('d3');
|
||||
d3.sankey = require('d3-sankey').sankey;
|
||||
|
||||
function sankeyVis(slice) {
|
||||
var div = d3.select(slice.selector);
|
||||
|
||||
var render = function () {
|
||||
var margin = {
|
||||
top: 5,
|
||||
right: 5,
|
||||
bottom: 5,
|
||||
left: 5
|
||||
};
|
||||
var width = slice.width() - margin.left - margin.right;
|
||||
var height = slice.height() - margin.top - margin.bottom;
|
||||
|
||||
var formatNumber = d3.format(",.0f"),
|
||||
format = function (d) {
|
||||
return formatNumber(d) + " TWh";
|
||||
};
|
||||
|
||||
var svg = div.append("svg")
|
||||
.attr("width", width + margin.left + margin.right)
|
||||
.attr("height", height + margin.top + margin.bottom)
|
||||
.append("g")
|
||||
.attr("transform", "translate(" + margin.left + "," + margin.top + ")");
|
||||
|
||||
var sankey = d3.sankey()
|
||||
.nodeWidth(15)
|
||||
.nodePadding(10)
|
||||
.size([width, height]);
|
||||
|
||||
var path = sankey.link();
|
||||
|
||||
d3.json(slice.jsonEndpoint(), function (error, json) {
|
||||
if (error !== null) {
|
||||
slice.error(error.responseText);
|
||||
return '';
|
||||
}
|
||||
var links = json.data;
|
||||
var nodes = {};
|
||||
// Compute the distinct nodes from the links.
|
||||
links.forEach(function (link) {
|
||||
link.source = nodes[link.source] || (nodes[link.source] = { name: link.source });
|
||||
link.target = nodes[link.target] || (nodes[link.target] = { name: link.target });
|
||||
link.value = Number(link.value);
|
||||
});
|
||||
nodes = d3.values(nodes);
|
||||
|
||||
sankey
|
||||
.nodes(nodes)
|
||||
.links(links)
|
||||
.layout(32);
|
||||
|
||||
var link = svg.append("g").selectAll(".link")
|
||||
.data(links)
|
||||
.enter().append("path")
|
||||
.attr("class", "link")
|
||||
.attr("d", path)
|
||||
.style("stroke-width", function (d) {
|
||||
return Math.max(1, d.dy);
|
||||
})
|
||||
.sort(function (a, b) {
|
||||
return b.dy - a.dy;
|
||||
});
|
||||
|
||||
link.append("title")
|
||||
.text(function (d) {
|
||||
return d.source.name + " → " + d.target.name + "\n" + format(d.value);
|
||||
});
|
||||
|
||||
var node = svg.append("g").selectAll(".node")
|
||||
.data(nodes)
|
||||
.enter().append("g")
|
||||
.attr("class", "node")
|
||||
.attr("transform", function (d) {
|
||||
return "translate(" + d.x + "," + d.y + ")";
|
||||
})
|
||||
.call(d3.behavior.drag()
|
||||
.origin(function (d) {
|
||||
return d;
|
||||
})
|
||||
.on("dragstart", function () {
|
||||
this.parentNode.appendChild(this);
|
||||
})
|
||||
.on("drag", dragmove));
|
||||
|
||||
node.append("rect")
|
||||
.attr("height", function (d) {
|
||||
return d.dy;
|
||||
})
|
||||
.attr("width", sankey.nodeWidth())
|
||||
.style("fill", function (d) {
|
||||
d.color = px.color.category21(d.name.replace(/ .*/, ""));
|
||||
return d.color;
|
||||
})
|
||||
.style("stroke", function (d) {
|
||||
return d3.rgb(d.color).darker(2);
|
||||
})
|
||||
.append("title")
|
||||
.text(function (d) {
|
||||
return d.name + "\n" + format(d.value);
|
||||
});
|
||||
|
||||
node.append("text")
|
||||
.attr("x", -6)
|
||||
.attr("y", function (d) {
|
||||
return d.dy / 2;
|
||||
})
|
||||
.attr("dy", ".35em")
|
||||
.attr("text-anchor", "end")
|
||||
.attr("transform", null)
|
||||
.text(function (d) {
|
||||
return d.name;
|
||||
})
|
||||
.filter(function (d) {
|
||||
return d.x < width / 2;
|
||||
})
|
||||
.attr("x", 6 + sankey.nodeWidth())
|
||||
.attr("text-anchor", "start");
|
||||
|
||||
function dragmove(d) {
|
||||
d3.select(this).attr("transform", "translate(" + d.x + "," + (d.y = Math.max(0, Math.min(height - d.dy, d3.event.y))) + ")");
|
||||
sankey.relayout();
|
||||
link.attr("d", path);
|
||||
}
|
||||
slice.done(json);
|
||||
});
|
||||
};
|
||||
return {
|
||||
render: render,
|
||||
resize: render
|
||||
};
|
||||
}
|
||||
|
||||
module.exports = sankeyVis;
|
||||
@@ -1,5 +1,5 @@
|
||||
.sunburst text {
|
||||
text-rendering: optimizeLegibility;
|
||||
shape-rendering: crispEdges;
|
||||
}
|
||||
.sunburst path {
|
||||
stroke: #333;
|
||||
@@ -10,15 +10,11 @@
|
||||
fill: #000;
|
||||
pointer-events: none;
|
||||
}
|
||||
.sunburst .path-abs-percent {
|
||||
font-size: 3.5em;
|
||||
font-weight: 400;
|
||||
}
|
||||
.sunburst .path-cond-percent {
|
||||
font-size: 2em;
|
||||
.sunburst .path-percent {
|
||||
font-size: 4em;
|
||||
}
|
||||
.sunburst .path-metrics {
|
||||
font-size: 1.5em;
|
||||
font-size: 1.75em;
|
||||
}
|
||||
.sunburst .path-ratio {
|
||||
font-size: 1.2em;
|
||||
@@ -35,15 +31,9 @@
|
||||
.dashboard .sunburst text {
|
||||
font-size: 1em;
|
||||
}
|
||||
.dashboard .sunburst .path-abs-percent {
|
||||
.dashboard .sunburst .path-percent {
|
||||
font-size: 2.5em;
|
||||
}
|
||||
.dashboard .sunburst .path-cond-percent {
|
||||
font-size: 1.75em;
|
||||
}
|
||||
.dashboard .sunburst .path-metrics {
|
||||
font-size: 1em;
|
||||
}
|
||||
.dashboard .sunburst .path-ratio {
|
||||
font-size: 1em;
|
||||
}
|
||||
359
panoramix/assets/visualizations/sunburst.js
Normal file
359
panoramix/assets/visualizations/sunburst.js
Normal file
@@ -0,0 +1,359 @@
|
||||
var d3 = window.d3 || require('d3');
|
||||
var px = require('../javascripts/modules/panoramix.js');
|
||||
var wrapSvgText = require('../javascripts/modules/utils.js').wrapSvgText;
|
||||
|
||||
require('./sunburst.css');
|
||||
|
||||
// Modified from http://bl.ocks.org/kerryrodden/7090426
|
||||
function sunburstVis(slice) {
|
||||
var container = d3.select(slice.selector);
|
||||
|
||||
var render = function () {
|
||||
// vars with shared scope within this function
|
||||
var margin = { top: 10, right: 5, bottom: 10, left: 5 };
|
||||
var containerWidth = slice.width();
|
||||
var containerHeight = slice.height();
|
||||
var breadcrumbHeight = containerHeight * 0.085;
|
||||
var visWidth = containerWidth - margin.left - margin.right;
|
||||
var visHeight = containerHeight - margin.top - margin.bottom - breadcrumbHeight;
|
||||
var radius = Math.min(visWidth, visHeight) / 2;
|
||||
var colorByCategory = true; // color by category if primary/secondary metrics match
|
||||
|
||||
var maxBreadcrumbs, breadcrumbDims, // set based on data
|
||||
totalSize, // total size of all segments; set after loading the data.
|
||||
colorScale,
|
||||
breadcrumbs, vis, arcs, gMiddleText; // dom handles
|
||||
|
||||
// Helper + path gen functions
|
||||
var partition = d3.layout.partition()
|
||||
.size([2 * Math.PI, radius * radius])
|
||||
.value(function (d) { return d.m1; });
|
||||
|
||||
var arc = d3.svg.arc()
|
||||
.startAngle(function (d) {
|
||||
return d.x;
|
||||
})
|
||||
.endAngle(function (d) {
|
||||
return d.x + d.dx;
|
||||
})
|
||||
.innerRadius(function (d) {
|
||||
return Math.sqrt(d.y);
|
||||
})
|
||||
.outerRadius(function (d) {
|
||||
return Math.sqrt(d.y + d.dy);
|
||||
});
|
||||
|
||||
var f = d3.format(".3s");
|
||||
var fp = d3.format(".3p");
|
||||
|
||||
container.select("svg").remove();
|
||||
|
||||
var svg = container.append("svg:svg")
|
||||
.attr("width", containerWidth)
|
||||
.attr("height", containerHeight);
|
||||
|
||||
d3.json(slice.jsonEndpoint(), function (error, rawData) {
|
||||
if (error !== null) {
|
||||
slice.error(error.responseText);
|
||||
return '';
|
||||
}
|
||||
|
||||
createBreadcrumbs(rawData);
|
||||
createVisualization(rawData);
|
||||
|
||||
slice.done(rawData);
|
||||
});
|
||||
|
||||
function createBreadcrumbs(rawData) {
|
||||
var firstRowData = rawData.data[0];
|
||||
maxBreadcrumbs = (firstRowData.length - 2) + 1; // -2 bc row contains 2x metrics, +extra for %label and buffer
|
||||
|
||||
breadcrumbDims = {
|
||||
width: visWidth / maxBreadcrumbs,
|
||||
height: breadcrumbHeight *0.8, // more margin
|
||||
spacing: 3,
|
||||
tipTailWidth: 10
|
||||
};
|
||||
|
||||
breadcrumbs = svg.append("svg:g")
|
||||
.attr("class", "breadcrumbs")
|
||||
.attr("transform", "translate(" + margin.left + "," + margin.top + ")");
|
||||
|
||||
breadcrumbs.append("svg:text")
|
||||
.attr("class", "end-label");
|
||||
}
|
||||
|
||||
// Main function to draw and set up the visualization, once we have the data.
|
||||
function createVisualization(rawData) {
|
||||
var tree = buildHierarchy(rawData.data);
|
||||
|
||||
vis = svg.append("svg:g")
|
||||
.attr("class", "sunburst-vis")
|
||||
.attr("transform", "translate(" + (margin.left + (visWidth / 2)) + "," + (margin.top + breadcrumbHeight + (visHeight / 2)) + ")")
|
||||
.on("mouseleave", mouseleave);
|
||||
|
||||
arcs = vis.append("svg:g")
|
||||
.attr("id", "arcs");
|
||||
|
||||
gMiddleText = vis.append("svg:g")
|
||||
.attr("class", "center-label");
|
||||
|
||||
// Bounding circle underneath the sunburst, to make it easier to detect
|
||||
// when the mouse leaves the parent g.
|
||||
arcs.append("svg:circle")
|
||||
.attr("r", radius)
|
||||
.style("opacity", 0);
|
||||
|
||||
// For efficiency, filter nodes to keep only those large enough to see.
|
||||
var nodes = partition.nodes(tree)
|
||||
.filter(function (d) {
|
||||
return (d.dx > 0.005); // 0.005 radians = 0.29 degrees
|
||||
});
|
||||
|
||||
var ext;
|
||||
|
||||
if (rawData.form_data.metric !== rawData.form_data.secondary_metric) {
|
||||
colorByCategory = false;
|
||||
|
||||
ext = d3.extent(nodes, function (d) {
|
||||
return d.m2 / d.m1;
|
||||
});
|
||||
|
||||
colorScale = d3.scale.linear()
|
||||
.domain([ext[0], ext[0] + ((ext[1] - ext[0]) / 2), ext[1]])
|
||||
.range(["#00D1C1", "white", "#FFB400"]);
|
||||
}
|
||||
|
||||
var path = arcs.data([tree]).selectAll("path")
|
||||
.data(nodes)
|
||||
.enter().append("svg:path")
|
||||
.attr("display", function (d) {
|
||||
return d.depth ? null : "none";
|
||||
})
|
||||
.attr("d", arc)
|
||||
.attr("fill-rule", "evenodd")
|
||||
.style("fill", function (d) {
|
||||
return colorByCategory ? px.color.category21(d.name) : colorScale(d.m2 / d.m1);
|
||||
})
|
||||
.style("opacity", 1)
|
||||
.on("mouseenter", mouseenter);
|
||||
|
||||
// Get total size of the tree = value of root node from partition.
|
||||
totalSize = path.node().__data__.value;
|
||||
}
|
||||
|
||||
// Fade all but the current sequence, and show it in the breadcrumb trail.
|
||||
function mouseenter(d) {
|
||||
|
||||
var percentage = (d.m1 / totalSize).toPrecision(3);
|
||||
var percentageString = fp(percentage);
|
||||
var metricsMatch = Math.abs(d.m1 - d.m2) < 0.000001;
|
||||
|
||||
gMiddleText.selectAll("*").remove();
|
||||
|
||||
gMiddleText.append("text")
|
||||
.attr("class", "path-percent")
|
||||
.attr("y", "-10")
|
||||
.text(percentageString);
|
||||
|
||||
gMiddleText.append("text")
|
||||
.attr("class", "path-metrics")
|
||||
.attr("y", "25")
|
||||
.text("m1: " + f(d.m1) + (metricsMatch ? "" : ", m2: " + f(d.m2)));
|
||||
|
||||
gMiddleText.append("text")
|
||||
.attr("class", "path-ratio")
|
||||
.attr("y", "50")
|
||||
.text("m2/m1: " + fp(d.m2 / d.m1));
|
||||
|
||||
var sequenceArray = getAncestors(d);
|
||||
|
||||
// Reset and fade all the segments.
|
||||
arcs.selectAll("path")
|
||||
.style("stroke-width", null)
|
||||
.style("stroke", null)
|
||||
.style("opacity", 0.3);
|
||||
|
||||
// Then highlight only those that are an ancestor of the current segment.
|
||||
arcs.selectAll("path")
|
||||
.filter(function (node) {
|
||||
return (sequenceArray.indexOf(node) >= 0);
|
||||
})
|
||||
.style("opacity", 1)
|
||||
.style("stroke-width", "2px")
|
||||
.style("stroke", "#000");
|
||||
|
||||
updateBreadcrumbs(sequenceArray, percentageString);
|
||||
}
|
||||
|
||||
// Restore everything to full opacity when moving off the visualization.
|
||||
function mouseleave(d) {
|
||||
|
||||
// Hide the breadcrumb trail
|
||||
breadcrumbs.style("visibility", "hidden");
|
||||
|
||||
gMiddleText.selectAll("*").remove();
|
||||
|
||||
// Deactivate all segments during transition.
|
||||
arcs.selectAll("path").on("mouseenter", null);
|
||||
//gMiddleText.selectAll("*").remove();
|
||||
|
||||
// Transition each segment to full opacity and then reactivate it.
|
||||
arcs.selectAll("path")
|
||||
.transition()
|
||||
.duration(200)
|
||||
.style("opacity", 1)
|
||||
.style("stroke", null)
|
||||
.style("stroke-width", null)
|
||||
.each("end", function () {
|
||||
d3.select(this).on("mouseenter", mouseenter);
|
||||
});
|
||||
}
|
||||
|
||||
// Given a node in a partition layout, return an array of all of its ancestor
|
||||
// nodes, highest first, but excluding the root.
|
||||
function getAncestors(node) {
|
||||
var path = [];
|
||||
var current = node;
|
||||
while (current.parent) {
|
||||
path.unshift(current);
|
||||
current = current.parent;
|
||||
}
|
||||
return path;
|
||||
}
|
||||
|
||||
// Generate a string that describes the points of a breadcrumb polygon.
|
||||
function breadcrumbPoints(d, i) {
|
||||
var points = [];
|
||||
points.push("0,0");
|
||||
points.push(breadcrumbDims.width + ",0");
|
||||
points.push(breadcrumbDims.width + breadcrumbDims.tipTailWidth + "," + (breadcrumbDims.height / 2));
|
||||
points.push(breadcrumbDims.width+ "," + breadcrumbDims.height);
|
||||
points.push("0," + breadcrumbDims.height);
|
||||
if (i > 0) { // Leftmost breadcrumb; don't include 6th vertex.
|
||||
points.push(breadcrumbDims.tipTailWidth + "," + (breadcrumbDims.height / 2));
|
||||
}
|
||||
return points.join(" ");
|
||||
}
|
||||
|
||||
function updateBreadcrumbs(sequenceArray, percentageString) {
|
||||
var g = breadcrumbs.selectAll("g")
|
||||
.data(sequenceArray, function (d) {
|
||||
return d.name + d.depth;
|
||||
});
|
||||
|
||||
// Add breadcrumb and label for entering nodes.
|
||||
var entering = g.enter().append("svg:g");
|
||||
|
||||
entering.append("svg:polygon")
|
||||
.attr("points", breadcrumbPoints)
|
||||
.style("fill", function (d) {
|
||||
return colorByCategory ? px.color.category21(d.name) : colorScale(d.m2 / d.m1);
|
||||
});
|
||||
|
||||
entering.append("svg:text")
|
||||
.attr("x", (breadcrumbDims.width + breadcrumbDims.tipTailWidth) / 2)
|
||||
.attr("y", breadcrumbDims.height / 4)
|
||||
.attr("dy", "0.35em")
|
||||
.attr("class", "step-label")
|
||||
.text(function (d) { return d.name; })
|
||||
.call(wrapSvgText, breadcrumbDims.width, breadcrumbDims.height / 2);
|
||||
|
||||
// Set position for entering and updating nodes.
|
||||
g.attr("transform", function (d, i) {
|
||||
return "translate(" + i * (breadcrumbDims.width + breadcrumbDims.spacing) + ", 0)";
|
||||
});
|
||||
|
||||
// Remove exiting nodes.
|
||||
g.exit().remove();
|
||||
|
||||
// Now move and update the percentage at the end.
|
||||
breadcrumbs.select(".end-label")
|
||||
.attr("x", (sequenceArray.length + 0.5) * (breadcrumbDims.width + breadcrumbDims.spacing))
|
||||
.attr("y", breadcrumbDims.height / 2)
|
||||
.attr("dy", "0.35em")
|
||||
.text(percentageString);
|
||||
|
||||
// Make the breadcrumb trail visible, if it's hidden.
|
||||
breadcrumbs.style("visibility", null);
|
||||
}
|
||||
|
||||
function buildHierarchy(rows) {
|
||||
var root = {
|
||||
name: "root",
|
||||
children: []
|
||||
};
|
||||
for (var i = 0; i < rows.length; i++) {
|
||||
var row = rows[i];
|
||||
var m1 = Number(row[row.length - 2]);
|
||||
var m2 = Number(row[row.length - 1]);
|
||||
var levels = row.slice(0, row.length - 2);
|
||||
if (isNaN(m1)) { // e.g. if this is a header row
|
||||
continue;
|
||||
}
|
||||
var currentNode = root;
|
||||
for (var j = 0; j < levels.length; j++) {
|
||||
var children = currentNode.children;
|
||||
var nodeName = levels[j];
|
||||
// If the next node has the name "0", it will
|
||||
var isLeafNode = (j >= levels.length - 1) || levels[j+1] === 0;
|
||||
var childNode;
|
||||
|
||||
if (!isLeafNode) {
|
||||
// Not yet at the end of the sequence; move down the tree.
|
||||
var foundChild = false;
|
||||
for (var k = 0; k < children.length; k++) {
|
||||
if (children[k].name === nodeName) {
|
||||
childNode = children[k];
|
||||
foundChild = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
// If we don't already have a child node for this branch, create it.
|
||||
if (!foundChild) {
|
||||
childNode = {
|
||||
name: nodeName,
|
||||
children: []
|
||||
};
|
||||
children.push(childNode);
|
||||
}
|
||||
currentNode = childNode;
|
||||
} else if (nodeName !== 0) {
|
||||
// Reached the end of the sequence; create a leaf node.
|
||||
childNode = {
|
||||
name: nodeName,
|
||||
m1: m1,
|
||||
m2: m2
|
||||
};
|
||||
children.push(childNode);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function recurse(node) {
|
||||
if (node.children) {
|
||||
var sums;
|
||||
var m1 = 0;
|
||||
var m2 = 0;
|
||||
for (var i = 0; i < node.children.length; i++) {
|
||||
sums = recurse(node.children[i]);
|
||||
m1 += sums[0];
|
||||
m2 += sums[1];
|
||||
}
|
||||
node.m1 = m1;
|
||||
node.m2 = m2;
|
||||
}
|
||||
return [node.m1, node.m2];
|
||||
}
|
||||
recurse(root);
|
||||
return root;
|
||||
}
|
||||
};
|
||||
|
||||
return {
|
||||
render: render,
|
||||
resize: render
|
||||
};
|
||||
}
|
||||
|
||||
module.exports = sunburstVis;
|
||||
18
panoramix/assets/visualizations/table.css
Normal file
18
panoramix/assets/visualizations/table.css
Normal file
@@ -0,0 +1,18 @@
|
||||
.gridster .widget.table {
|
||||
overflow: auto !important;
|
||||
}
|
||||
|
||||
.widget.table td.filtered {
|
||||
background-color: #005a63;
|
||||
color: white;
|
||||
}
|
||||
|
||||
.table tr>th {
|
||||
padding: 1px 5px !important;
|
||||
font-size: small !important;
|
||||
}
|
||||
|
||||
.table tr>td {
|
||||
padding: 1px 5px !important;
|
||||
font-size: small !important;
|
||||
}
|
||||
124
panoramix/assets/visualizations/table.js
Normal file
124
panoramix/assets/visualizations/table.js
Normal file
@@ -0,0 +1,124 @@
|
||||
var $ = window.$ = require('jquery');
|
||||
var jQuery = window.jQuery = $;
|
||||
var d3 = require('d3');
|
||||
|
||||
require('./table.css');
|
||||
require('datatables');
|
||||
require('../node_modules/datatables-bootstrap3-plugin/media/css/datatables-bootstrap3.css');
|
||||
|
||||
function tableVis(slice) {
|
||||
var data = slice.data;
|
||||
var form_data = data.form_data;
|
||||
var f = d3.format('.3s');
|
||||
var fC = d3.format('0,000');
|
||||
|
||||
function refresh() {
|
||||
$.getJSON(slice.jsonEndpoint(), onSuccess).fail(onError);
|
||||
|
||||
function onError(xhr) {
|
||||
slice.error(xhr.responseText);
|
||||
}
|
||||
|
||||
function onSuccess(json) {
|
||||
var data = json.data;
|
||||
var metrics = json.form_data.metrics;
|
||||
|
||||
function col(c) {
|
||||
var arr = [];
|
||||
for (var i = 0; i < data.records.length; i++) {
|
||||
arr.push(json.data.records[i][c]);
|
||||
}
|
||||
return arr;
|
||||
}
|
||||
var maxes = {};
|
||||
for (var i = 0; i < metrics.length; i++) {
|
||||
maxes[metrics[i]] = d3.max(col(metrics[i]));
|
||||
}
|
||||
|
||||
var table = d3.select(slice.selector).append('table')
|
||||
.classed('dataframe dataframe table table-striped table-bordered table-condensed table-hover dataTable no-footer', true);
|
||||
|
||||
table.append('thead').append('tr')
|
||||
.selectAll('th')
|
||||
.data(data.columns).enter()
|
||||
.append('th')
|
||||
.text(function (d) {
|
||||
return d;
|
||||
});
|
||||
|
||||
table.append('tbody')
|
||||
.selectAll('tr')
|
||||
.data(data.records).enter()
|
||||
.append('tr')
|
||||
.selectAll('td')
|
||||
.data(function (row, i) {
|
||||
return data.columns.map(function (c) {
|
||||
return {
|
||||
col: c,
|
||||
val: row[c],
|
||||
isMetric: metrics.indexOf(c) >= 0
|
||||
};
|
||||
});
|
||||
}).enter()
|
||||
.append('td')
|
||||
.style('background-image', function (d) {
|
||||
if (d.isMetric) {
|
||||
var perc = Math.round((d.val / maxes[d.col]) * 100);
|
||||
return "linear-gradient(to right, lightgrey, lightgrey " + perc + "%, rgba(0,0,0,0) " + perc + "%";
|
||||
}
|
||||
})
|
||||
.attr('title', function (d) {
|
||||
if (!isNaN(d.val)) {
|
||||
return fC(d.val);
|
||||
}
|
||||
})
|
||||
.attr('data-sort', function (d) {
|
||||
if (d.isMetric) {
|
||||
return d.val;
|
||||
}
|
||||
})
|
||||
.on("click", function (d) {
|
||||
if (!d.isMetric) {
|
||||
var td = d3.select(this);
|
||||
if (td.classed('filtered')) {
|
||||
slice.removeFilter(d.col, [d.val]);
|
||||
d3.select(this).classed('filtered', false);
|
||||
} else {
|
||||
d3.select(this).classed('filtered', true);
|
||||
slice.addFilter(d.col, [d.val]);
|
||||
}
|
||||
}
|
||||
})
|
||||
.style("cursor", function (d) {
|
||||
if (!d.isMetric) {
|
||||
return 'pointer';
|
||||
}
|
||||
})
|
||||
.html(function (d) {
|
||||
if (d.isMetric) {
|
||||
return f(d.val);
|
||||
} else {
|
||||
return d.val;
|
||||
}
|
||||
});
|
||||
var datatable = slice.container.find('.dataTable').DataTable({
|
||||
paging: false,
|
||||
searching: form_data.include_search
|
||||
});
|
||||
// Sorting table by main column
|
||||
if (form_data.metrics.length > 0) {
|
||||
var main_metric = form_data.metrics[0];
|
||||
datatable.column(data.columns.indexOf(main_metric)).order('desc').draw();
|
||||
}
|
||||
slice.done(json);
|
||||
slice.container.parents('.widget').find('.tooltip').remove();
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
render: refresh,
|
||||
resize: function () {}
|
||||
};
|
||||
}
|
||||
|
||||
module.exports = tableVis;
|
||||
91
panoramix/assets/visualizations/word_cloud.js
Normal file
91
panoramix/assets/visualizations/word_cloud.js
Normal file
@@ -0,0 +1,91 @@
|
||||
var px = window.px || require('../javascripts/modules/panoramix.js');
|
||||
var d3 = window.d3 || require('d3');
|
||||
var cloudLayout = require('d3-cloud');
|
||||
|
||||
function wordCloudChart(slice) {
|
||||
var chart = d3.select(slice.selector);
|
||||
|
||||
function refresh() {
|
||||
d3.json(slice.jsonEndpoint(), function (error, json) {
|
||||
if (error !== null) {
|
||||
slice.error(error.responseText);
|
||||
return '';
|
||||
}
|
||||
var data = json.data;
|
||||
var range = [
|
||||
json.form_data.size_from,
|
||||
json.form_data.size_to
|
||||
];
|
||||
var rotation = json.form_data.rotation;
|
||||
var f_rotation;
|
||||
if (rotation === "square") {
|
||||
f_rotation = function () {
|
||||
return ~~(Math.random() * 2) * 90;
|
||||
};
|
||||
} else if (rotation === "flat") {
|
||||
f_rotation = function () {
|
||||
return 0;
|
||||
};
|
||||
} else {
|
||||
f_rotation = function () {
|
||||
return (~~(Math.random() * 6) - 3) * 30;
|
||||
};
|
||||
}
|
||||
var size = [slice.width(), slice.height()];
|
||||
|
||||
var scale = d3.scale.linear()
|
||||
.range(range)
|
||||
.domain(d3.extent(data, function (d) {
|
||||
return d.size;
|
||||
}));
|
||||
|
||||
var layout = cloudLayout()
|
||||
.size(size)
|
||||
.words(data)
|
||||
.padding(5)
|
||||
.rotate(f_rotation)
|
||||
.font("serif")
|
||||
.fontSize(function (d) {
|
||||
return scale(d.size);
|
||||
})
|
||||
.on("end", draw);
|
||||
|
||||
layout.start();
|
||||
|
||||
function draw(words) {
|
||||
chart.selectAll("*").remove();
|
||||
|
||||
chart.append("svg")
|
||||
.attr("width", layout.size()[0])
|
||||
.attr("height", layout.size()[1])
|
||||
.append("g")
|
||||
.attr("transform", "translate(" + layout.size()[0] / 2 + "," + layout.size()[1] / 2 + ")")
|
||||
.selectAll("text")
|
||||
.data(words)
|
||||
.enter().append("text")
|
||||
.style("font-size", function (d) {
|
||||
return d.size + "px";
|
||||
})
|
||||
.style("font-family", "Impact")
|
||||
.style("fill", function (d) {
|
||||
return px.color.category21(d.text);
|
||||
})
|
||||
.attr("text-anchor", "middle")
|
||||
.attr("transform", function (d) {
|
||||
return "translate(" + [d.x, d.y] + ") rotate(" + d.rotate + ")";
|
||||
})
|
||||
.text(function (d) {
|
||||
return d.text;
|
||||
});
|
||||
}
|
||||
slice.done(data);
|
||||
});
|
||||
}
|
||||
|
||||
return {
|
||||
render: refresh,
|
||||
resize: refresh
|
||||
};
|
||||
}
|
||||
|
||||
module.exports = wordCloudChart;
|
||||
110
panoramix/assets/visualizations/world_map.js
Normal file
110
panoramix/assets/visualizations/world_map.js
Normal file
@@ -0,0 +1,110 @@
|
||||
// JS
|
||||
var d3 = window.d3 || require('d3');
|
||||
//var Datamap = require('../vendor/datamaps/datamaps.all.js');
|
||||
var Datamap = require('datamaps');
|
||||
|
||||
// CSS
|
||||
require('./world_map.css');
|
||||
|
||||
function worldMapChart(slice) {
|
||||
var render = function () {
|
||||
var container = slice.container;
|
||||
var div = d3.select(slice.selector);
|
||||
|
||||
container.css('height', slice.height());
|
||||
|
||||
d3.json(slice.jsonEndpoint(), function (error, json) {
|
||||
var fd = json.form_data;
|
||||
|
||||
if (error !== null) {
|
||||
slice.error(error.responseText);
|
||||
return '';
|
||||
}
|
||||
var ext = d3.extent(json.data, function (d) {
|
||||
return d.m1;
|
||||
});
|
||||
var extRadius = d3.extent(json.data, function (d) {
|
||||
return d.m2;
|
||||
});
|
||||
var radiusScale = d3.scale.linear()
|
||||
.domain([extRadius[0], extRadius[1]])
|
||||
.range([1, fd.max_bubble_size]);
|
||||
|
||||
json.data.forEach(function (d) {
|
||||
d.radius = radiusScale(d.m2);
|
||||
});
|
||||
|
||||
var colorScale = d3.scale.linear()
|
||||
.domain([ext[0], ext[1]])
|
||||
.range(["#FFF", "black"]);
|
||||
|
||||
var d = {};
|
||||
for (var i = 0; i < json.data.length; i++) {
|
||||
var country = json.data[i];
|
||||
country.fillColor = colorScale(country.m1);
|
||||
d[country.country] = country;
|
||||
}
|
||||
|
||||
var f = d3.format('.3s');
|
||||
|
||||
container.show();
|
||||
|
||||
var map = new Datamap({
|
||||
element: slice.container.get(0),
|
||||
data: json.data,
|
||||
fills: {
|
||||
defaultFill: '#ddd'
|
||||
},
|
||||
geographyConfig: {
|
||||
popupOnHover: true,
|
||||
highlightOnHover: true,
|
||||
borderWidth: 1,
|
||||
borderColor: '#fff',
|
||||
highlightBorderColor: '#fff',
|
||||
highlightFillColor: '#005a63',
|
||||
highlightBorderWidth: 1,
|
||||
popupTemplate: function (geo, data) {
|
||||
return '<div class="hoverinfo"><strong>' + data.name + '</strong><br>' + f(data.m1) + '</div>';
|
||||
}
|
||||
},
|
||||
bubblesConfig: {
|
||||
borderWidth: 1,
|
||||
borderOpacity: 1,
|
||||
borderColor: '#005a63',
|
||||
popupOnHover: true,
|
||||
radius: null,
|
||||
popupTemplate: function (geo, data) {
|
||||
return '<div class="hoverinfo"><strong>' + data.name + '</strong><br>' + f(data.m2) + '</div>';
|
||||
},
|
||||
fillOpacity: 0.5,
|
||||
animate: true,
|
||||
highlightOnHover: true,
|
||||
highlightFillColor: '#005a63',
|
||||
highlightBorderColor: 'black',
|
||||
highlightBorderWidth: 2,
|
||||
highlightBorderOpacity: 1,
|
||||
highlightFillOpacity: 0.85,
|
||||
exitDelay: 100,
|
||||
key: JSON.stringify
|
||||
}
|
||||
});
|
||||
|
||||
map.updateChoropleth(d);
|
||||
|
||||
if (fd.show_bubbles) {
|
||||
map.bubbles(json.data);
|
||||
div.selectAll("circle.datamaps-bubble").style('fill', '#005a63');
|
||||
}
|
||||
|
||||
slice.done(json);
|
||||
|
||||
});
|
||||
};
|
||||
|
||||
return {
|
||||
render: render,
|
||||
resize: render
|
||||
};
|
||||
}
|
||||
|
||||
module.exports = worldMapChart;
|
||||
51
panoramix/assets/webpack.config.js
Normal file
51
panoramix/assets/webpack.config.js
Normal file
@@ -0,0 +1,51 @@
|
||||
var path = require('path');
|
||||
var APP_DIR = path.resolve(__dirname, './'); // input
|
||||
var BUILD_DIR = path.resolve(__dirname, './javascripts/dist'); // output
|
||||
|
||||
var config = {
|
||||
// for now generate one compiled js file per entry point / html page
|
||||
entry: {
|
||||
'css-theme': APP_DIR + '/javascripts/css-theme.js',
|
||||
dashboard: APP_DIR + '/javascripts/dashboard.js',
|
||||
explore: APP_DIR + '/javascripts/explore.js',
|
||||
featured: APP_DIR + '/javascripts/featured.js',
|
||||
sql: APP_DIR + '/javascripts/sql.js',
|
||||
standalone: APP_DIR + '/javascripts/standalone.js'
|
||||
},
|
||||
output: {
|
||||
path: BUILD_DIR,
|
||||
filename: '[name].entry.js'
|
||||
},
|
||||
module: {
|
||||
loaders: [
|
||||
{
|
||||
test: /\.jsx?/,
|
||||
include: APP_DIR,
|
||||
exclude: APP_DIR + '/node_modules',
|
||||
loader: 'babel'
|
||||
},
|
||||
/* for require('*.css') */
|
||||
{
|
||||
test: /\.css$/,
|
||||
include: APP_DIR,
|
||||
loader: "style-loader!css-loader"
|
||||
},
|
||||
/* for css linking images */
|
||||
{ test: /\.png$/, loader: "url-loader?limit=100000" },
|
||||
{ test: /\.jpg$/, loader: "file-loader" },
|
||||
{ test: /\.gif$/, loader: "file-loader" },
|
||||
/* for font-awesome */
|
||||
{ test: /\.woff(2)?(\?v=[0-9]\.[0-9]\.[0-9])?$/, loader: "url-loader?limit=10000&minetype=application/font-woff" },
|
||||
{ test: /\.(ttf|eot|svg)(\?v=[0-9]\.[0-9]\.[0-9])?$/, loader: "file-loader" },
|
||||
/* for require('*.less') */
|
||||
{
|
||||
test: /\.less$/,
|
||||
include: APP_DIR,
|
||||
loader: "style!css!less"
|
||||
}
|
||||
]
|
||||
},
|
||||
plugins: []
|
||||
};
|
||||
|
||||
module.exports = config;
|
||||
91
panoramix/bin/panoramix
Executable file
91
panoramix/bin/panoramix
Executable file
@@ -0,0 +1,91 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
from datetime import datetime
|
||||
import logging
|
||||
from subprocess import Popen
|
||||
|
||||
from flask.ext.script import Manager
|
||||
from panoramix import app
|
||||
from flask.ext.migrate import MigrateCommand
|
||||
import panoramix
|
||||
from panoramix import db
|
||||
from panoramix import data, utils
|
||||
|
||||
config = app.config
|
||||
|
||||
manager = Manager(app)
|
||||
manager.add_command('db', MigrateCommand)
|
||||
|
||||
|
||||
@manager.option(
|
||||
'-d', '--debug', action='store_true',
|
||||
help="Start the web server in debug mode")
|
||||
@manager.option(
|
||||
'-p', '--port', default=config.get("PANORAMIX_WEBSERVER_PORT"),
|
||||
help="Specify the port on which to run the web server")
|
||||
@manager.option(
|
||||
'-w', '--workers', default=config.get("PANORAMIX_WORKERS", 16),
|
||||
help="Number of gunicorn web server workers to fire up")
|
||||
@manager.option(
|
||||
'-t', '--timeout', default=config.get("PANORAMIX_WEBSERVER_TIMEOUT"),
|
||||
help="Specify the timeout (seconds) for the gunicorn web server")
|
||||
def runserver(debug, port, timeout, workers):
|
||||
"""Starts a Panoramix web server"""
|
||||
debug = debug or config.get("DEBUG")
|
||||
if debug:
|
||||
app.run(
|
||||
host='0.0.0.0',
|
||||
port=int(port),
|
||||
debug=True)
|
||||
else:
|
||||
cmd = (
|
||||
"gunicorn "
|
||||
"-w {workers} "
|
||||
"--timeout {timeout} "
|
||||
"-b 0.0.0.0:{port} "
|
||||
"panoramix:app").format(**locals())
|
||||
print("Starting server with command: " + cmd)
|
||||
Popen(cmd, shell=True).wait()
|
||||
|
||||
@manager.command
|
||||
def init():
|
||||
"""Inits the Panoramix application"""
|
||||
utils.init(panoramix)
|
||||
|
||||
@manager.option(
|
||||
'-s', '--sample', action='store_true',
|
||||
help="Only load 1000 rows (faster, used for testing)")
|
||||
def load_examples(sample):
|
||||
"""Loads a set of Slices and Dashboards and a supporting dataset """
|
||||
print("Loading examples into {}".format(db))
|
||||
|
||||
data.load_css_templates()
|
||||
|
||||
print("Loading [World Bank's Health Nutrition and Population Stats]")
|
||||
data.load_world_bank_health_n_pop()
|
||||
|
||||
print("Loading [Birth names]")
|
||||
data.load_birth_names()
|
||||
|
||||
@manager.command
|
||||
def refresh_druid():
|
||||
"""Refresh all druid datasources"""
|
||||
session = db.session()
|
||||
from panoramix import models
|
||||
for cluster in session.query(models.DruidCluster).all():
|
||||
try:
|
||||
cluster.refresh_datasources()
|
||||
except Exception as e:
|
||||
print(
|
||||
"Error while processing cluster '{}'\n{}".format(
|
||||
cluster, str(e)))
|
||||
logging.exception(e)
|
||||
cluster.metadata_last_refreshed = datetime.now()
|
||||
print(
|
||||
"Refreshed metadata from cluster "
|
||||
"[" + cluster.cluster_name + "]")
|
||||
session.commit()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
manager.run()
|
||||
118
panoramix/config.py
Normal file
118
panoramix/config.py
Normal file
@@ -0,0 +1,118 @@
|
||||
"""
|
||||
All configuration in this file can be overridden by providing a local_config
|
||||
in your PYTHONPATH.
|
||||
|
||||
There' a ``from local_config import *`` at the end of this file.
|
||||
"""
|
||||
import os
|
||||
from flask_appbuilder.security.manager import AUTH_DB
|
||||
# from flask_appbuilder.security.manager import (
|
||||
# AUTH_OID, AUTH_REMOTE_USER, AUTH_DB, AUTH_LDAP, AUTH_OAUTH)
|
||||
BASE_DIR = os.path.abspath(os.path.dirname(__file__))
|
||||
from dateutil import tz
|
||||
|
||||
|
||||
# ---------------------------------------------------------
|
||||
# Panoramix specifix config
|
||||
# ---------------------------------------------------------
|
||||
ROW_LIMIT = 50000
|
||||
WEBSERVER_THREADS = 8
|
||||
|
||||
PANORAMIX_WEBSERVER_PORT = 8088
|
||||
PANORAMIX_WEBSERVER_TIMEOUT = 60
|
||||
|
||||
CUSTOM_SECURITY_MANAGER = None
|
||||
# ---------------------------------------------------------
|
||||
|
||||
# Your App secret key
|
||||
SECRET_KEY = '\2\1thisismyscretkey\1\2\e\y\y\h' # noqa
|
||||
|
||||
# The SQLAlchemy connection string.
|
||||
SQLALCHEMY_DATABASE_URI = 'sqlite:////tmp/panoramix.db'
|
||||
# SQLALCHEMY_DATABASE_URI = 'mysql://myapp@localhost/myapp'
|
||||
# SQLALCHEMY_DATABASE_URI = 'postgresql://root:password@localhost/myapp'
|
||||
|
||||
# Flask-WTF flag for CSRF
|
||||
CSRF_ENABLED = True
|
||||
|
||||
# Whether to run the web server in debug mode or not
|
||||
DEBUG = True
|
||||
|
||||
# Whether to show the stacktrace on 500 error
|
||||
SHOW_STACKTRACE = True
|
||||
|
||||
# ------------------------------
|
||||
# GLOBALS FOR APP Builder
|
||||
# ------------------------------
|
||||
# Uncomment to setup Your App name
|
||||
APP_NAME = "Panoramix"
|
||||
|
||||
# Uncomment to setup Setup an App icon
|
||||
# APP_ICON = "/static/img/something.png"
|
||||
|
||||
# Druid query timezone
|
||||
# tz.tzutc() : Using utc timezone
|
||||
# tz.tzlocal() : Using local timezone
|
||||
# other tz can be overridden by providing a local_config
|
||||
DRUID_TZ = tz.tzutc()
|
||||
|
||||
# ----------------------------------------------------
|
||||
# AUTHENTICATION CONFIG
|
||||
# ----------------------------------------------------
|
||||
# The authentication type
|
||||
# AUTH_OID : Is for OpenID
|
||||
# AUTH_DB : Is for database (username/password()
|
||||
# AUTH_LDAP : Is for LDAP
|
||||
# AUTH_REMOTE_USER : Is for using REMOTE_USER from web server
|
||||
AUTH_TYPE = AUTH_DB
|
||||
|
||||
# Uncomment to setup Full admin role name
|
||||
# AUTH_ROLE_ADMIN = 'Admin'
|
||||
|
||||
# Uncomment to setup Public role name, no authentication needed
|
||||
# AUTH_ROLE_PUBLIC = 'Public'
|
||||
|
||||
# Will allow user self registration
|
||||
# AUTH_USER_REGISTRATION = True
|
||||
|
||||
# The default user self registration role
|
||||
# AUTH_USER_REGISTRATION_ROLE = "Public"
|
||||
|
||||
# When using LDAP Auth, setup the ldap server
|
||||
# AUTH_LDAP_SERVER = "ldap://ldapserver.new"
|
||||
|
||||
# Uncomment to setup OpenID providers example for OpenID authentication
|
||||
# OPENID_PROVIDERS = [
|
||||
# { 'name': 'Yahoo', 'url': 'https://me.yahoo.com' },
|
||||
# { 'name': 'AOL', 'url': 'http://openid.aol.com/<username>' },
|
||||
# { 'name': 'Flickr', 'url': 'http://www.flickr.com/<username>' },
|
||||
# { 'name': 'MyOpenID', 'url': 'https://www.myopenid.com' }]
|
||||
# ---------------------------------------------------
|
||||
# Babel config for translations
|
||||
# ---------------------------------------------------
|
||||
# Setup default language
|
||||
BABEL_DEFAULT_LOCALE = 'en'
|
||||
# Your application default translation path
|
||||
BABEL_DEFAULT_FOLDER = 'translations'
|
||||
# The allowed translation for you app
|
||||
LANGUAGES = {
|
||||
'en': {'flag': 'us', 'name': 'English'},
|
||||
}
|
||||
# ---------------------------------------------------
|
||||
# Image and file configuration
|
||||
# ---------------------------------------------------
|
||||
# The file upload folder, when using models with files
|
||||
UPLOAD_FOLDER = BASE_DIR + '/app/static/uploads/'
|
||||
|
||||
# The image upload folder, when using models with images
|
||||
IMG_UPLOAD_FOLDER = BASE_DIR + '/app/static/uploads/'
|
||||
|
||||
# The image upload url, when using models with images
|
||||
IMG_UPLOAD_URL = '/static/uploads/'
|
||||
# Setup image size default is (300, 200, True)
|
||||
# IMG_SIZE = (300, 200, True)
|
||||
|
||||
try:
|
||||
from panoramix_config import * # noqa
|
||||
except Exception:
|
||||
pass
|
||||
624
panoramix/data/__init__.py
Normal file
624
panoramix/data/__init__.py
Normal file
@@ -0,0 +1,624 @@
|
||||
"""Loads datasets, dashboards and slices in a new panoramix instance"""
|
||||
|
||||
import gzip
|
||||
import json
|
||||
import os
|
||||
import textwrap
|
||||
|
||||
import pandas as pd
|
||||
from sqlalchemy import String, DateTime
|
||||
|
||||
from panoramix import app, db, models, utils
|
||||
|
||||
# Shortcuts
|
||||
DB = models.Database
|
||||
Slice = models.Slice
|
||||
TBL = models.SqlaTable
|
||||
Dash = models.Dashboard
|
||||
|
||||
config = app.config
|
||||
|
||||
DATA_FOLDER = os.path.join(config.get("BASE_DIR"), 'data')
|
||||
|
||||
|
||||
def get_or_create_db(session):
|
||||
print("Creating database reference")
|
||||
dbobj = session.query(DB).filter_by(database_name='main').first()
|
||||
if not dbobj:
|
||||
dbobj = DB(database_name="main")
|
||||
print(config.get("SQLALCHEMY_DATABASE_URI"))
|
||||
dbobj.sqlalchemy_uri = config.get("SQLALCHEMY_DATABASE_URI")
|
||||
session.add(dbobj)
|
||||
session.commit()
|
||||
return dbobj
|
||||
|
||||
|
||||
def merge_slice(slc):
|
||||
o = db.session.query(Slice).filter_by(slice_name=slc.slice_name).first()
|
||||
if o:
|
||||
db.session.delete(o)
|
||||
db.session.add(slc)
|
||||
db.session.commit()
|
||||
|
||||
|
||||
def get_slice_json(defaults, **kwargs):
|
||||
d = defaults.copy()
|
||||
d.update(kwargs)
|
||||
return json.dumps(d, indent=4, sort_keys=True)
|
||||
|
||||
|
||||
def load_world_bank_health_n_pop():
|
||||
"""Loads the world bank health dataset, slices and a dashboard"""
|
||||
tbl_name = 'wb_health_population'
|
||||
with gzip.open(os.path.join(DATA_FOLDER, 'countries.json.gz')) as f:
|
||||
pdf = pd.read_json(f)
|
||||
pdf.columns = [col.replace('.', '_') for col in pdf.columns]
|
||||
pdf.year = pd.to_datetime(pdf.year)
|
||||
pdf.to_sql(
|
||||
tbl_name,
|
||||
db.engine,
|
||||
if_exists='replace',
|
||||
chunksize=500,
|
||||
dtype={
|
||||
'year': DateTime(),
|
||||
'country_code': String(3),
|
||||
'country_name': String(255),
|
||||
'region': String(255),
|
||||
},
|
||||
index=False)
|
||||
|
||||
print("Creating table [wb_health_population] reference")
|
||||
tbl = db.session.query(TBL).filter_by(table_name=tbl_name).first()
|
||||
if not tbl:
|
||||
tbl = TBL(table_name=tbl_name)
|
||||
tbl.description = utils.readfile(os.path.join(DATA_FOLDER, 'countries.md'))
|
||||
tbl.main_dttm_col = 'year'
|
||||
tbl.is_featured = True
|
||||
tbl.database = get_or_create_db(db.session)
|
||||
db.session.merge(tbl)
|
||||
db.session.commit()
|
||||
tbl.fetch_metadata()
|
||||
|
||||
defaults = {
|
||||
"compare_lag": "10",
|
||||
"compare_suffix": "o10Y",
|
||||
"datasource_id": "1",
|
||||
"datasource_name": "birth_names",
|
||||
"datasource_type": "table",
|
||||
"limit": "25",
|
||||
"granularity": "year",
|
||||
"groupby": [],
|
||||
"metric": 'sum__SP_POP_TOTL',
|
||||
"metrics": ["sum__SP_POP_TOTL"],
|
||||
"row_limit": config.get("ROW_LIMIT"),
|
||||
"since": "2014-01-01",
|
||||
"until": "2014-01-01",
|
||||
"where": "",
|
||||
"markup_type": "markdown",
|
||||
"country_fieldtype": "cca3",
|
||||
"secondary_metric": "sum__SP_POP_TOTL",
|
||||
"entity": "country_code",
|
||||
"show_bubbles": "y",
|
||||
}
|
||||
|
||||
print("Creating slices")
|
||||
slices = [
|
||||
Slice(
|
||||
slice_name="Region Filter",
|
||||
viz_type='filter_box',
|
||||
datasource_type='table',
|
||||
table=tbl,
|
||||
params=get_slice_json(
|
||||
defaults,
|
||||
viz_type='filter_box',
|
||||
groupby=['region'],
|
||||
)),
|
||||
Slice(
|
||||
slice_name="World's Population",
|
||||
viz_type='big_number',
|
||||
datasource_type='table',
|
||||
table=tbl,
|
||||
params=get_slice_json(
|
||||
defaults,
|
||||
since='2000',
|
||||
viz_type='big_number',
|
||||
compare_lag="10",
|
||||
metric='sum__SP_POP_TOTL',
|
||||
compare_suffix="over 10Y")),
|
||||
Slice(
|
||||
slice_name="Most Populated Countries",
|
||||
viz_type='table',
|
||||
datasource_type='table',
|
||||
table=tbl,
|
||||
params=get_slice_json(
|
||||
defaults,
|
||||
viz_type='table',
|
||||
metrics=["sum__SP_POP_TOTL"],
|
||||
groupby=['country_name'])),
|
||||
Slice(
|
||||
slice_name="Growth Rate",
|
||||
viz_type='line',
|
||||
datasource_type='table',
|
||||
table=tbl,
|
||||
params=get_slice_json(
|
||||
defaults,
|
||||
viz_type='line',
|
||||
since="1960-01-01",
|
||||
metrics=["sum__SP_POP_TOTL"],
|
||||
num_period_compare="10",
|
||||
groupby=['country_name'])),
|
||||
Slice(
|
||||
slice_name="% Rural",
|
||||
viz_type='world_map',
|
||||
datasource_type='table',
|
||||
table=tbl,
|
||||
params=get_slice_json(
|
||||
defaults,
|
||||
viz_type='world_map',
|
||||
metric= "sum__SP_RUR_TOTL_ZS",
|
||||
num_period_compare="10",)),
|
||||
Slice(
|
||||
slice_name="Life Expexctancy VS Rural %",
|
||||
viz_type='bubble',
|
||||
datasource_type='table',
|
||||
table=tbl,
|
||||
params=get_slice_json(
|
||||
defaults,
|
||||
viz_type='bubble',
|
||||
since= "2011-01-01",
|
||||
until= "2011-01-01",
|
||||
series="region",
|
||||
limit="0",
|
||||
entity="country_name",
|
||||
x="sum__SP_RUR_TOTL_ZS",
|
||||
y="sum__SP_DYN_LE00_IN",
|
||||
size="sum__SP_POP_TOTL",
|
||||
max_bubble_size="50",
|
||||
flt_col_1="country_code",
|
||||
flt_op_1= "not in",
|
||||
flt_eq_1="TCA,MNP,DMA,MHL,MCO,SXM,CYM,TUV,IMY,KNA,ASM,ADO,AMA,PLW",
|
||||
num_period_compare="10",)),
|
||||
Slice(
|
||||
slice_name="Rural Breakdown",
|
||||
viz_type='sunburst',
|
||||
datasource_type='table',
|
||||
table=tbl,
|
||||
params=get_slice_json(
|
||||
defaults,
|
||||
viz_type='sunburst',
|
||||
groupby=["region", "country_name"],
|
||||
secondary_metric="sum__SP_RUR_TOTL",
|
||||
since= "2011-01-01",
|
||||
until= "2011-01-01",)),
|
||||
Slice(
|
||||
slice_name="World's Pop Growth",
|
||||
viz_type='area',
|
||||
datasource_type='table',
|
||||
table=tbl,
|
||||
params=get_slice_json(
|
||||
defaults,
|
||||
since="1960-01-01",
|
||||
until="now",
|
||||
viz_type='area',
|
||||
groupby=["region"],)),
|
||||
]
|
||||
for slc in slices:
|
||||
merge_slice(slc)
|
||||
|
||||
print("Creating a World's Health Bank dashboard")
|
||||
dash_name = "World's Health Bank Dashboard"
|
||||
dash = db.session.query(Dash).filter_by(dashboard_title=dash_name).first()
|
||||
|
||||
if dash:
|
||||
db.session.delete(dash)
|
||||
js = """\
|
||||
[
|
||||
{
|
||||
"size_y": 1,
|
||||
"size_x": 3,
|
||||
"col": 1,
|
||||
"slice_id": "269",
|
||||
"row": 1
|
||||
},
|
||||
{
|
||||
"size_y": 3,
|
||||
"size_x": 3,
|
||||
"col": 1,
|
||||
"slice_id": "270",
|
||||
"row": 2
|
||||
},
|
||||
{
|
||||
"size_y": 7,
|
||||
"size_x": 3,
|
||||
"col": 10,
|
||||
"slice_id": "271",
|
||||
"row": 1
|
||||
},
|
||||
{
|
||||
"size_y": 3,
|
||||
"size_x": 6,
|
||||
"col": 1,
|
||||
"slice_id": "272",
|
||||
"row": 5
|
||||
},
|
||||
{
|
||||
"size_y": 4,
|
||||
"size_x": 6,
|
||||
"col": 4,
|
||||
"slice_id": "273",
|
||||
"row": 1
|
||||
},
|
||||
{
|
||||
"size_y": 4,
|
||||
"size_x": 6,
|
||||
"col": 7,
|
||||
"slice_id": "274",
|
||||
"row": 8
|
||||
},
|
||||
{
|
||||
"size_y": 3,
|
||||
"size_x": 3,
|
||||
"col": 7,
|
||||
"slice_id": "275",
|
||||
"row": 5
|
||||
},
|
||||
{
|
||||
"size_y": 4,
|
||||
"size_x": 6,
|
||||
"col": 1,
|
||||
"slice_id": "276",
|
||||
"row": 8
|
||||
}
|
||||
]
|
||||
"""
|
||||
l = json.loads(js)
|
||||
for i, pos in enumerate(l):
|
||||
pos['slice_id'] = str(slices[i].id)
|
||||
dash = Dash(
|
||||
dashboard_title=dash_name,
|
||||
position_json=json.dumps(l, indent=4),
|
||||
slug="world_health",
|
||||
)
|
||||
for s in slices:
|
||||
dash.slices.append(s)
|
||||
db.session.commit()
|
||||
|
||||
|
||||
def load_css_templates():
|
||||
"""Loads 2 css templates to demonstrate the feature"""
|
||||
print('Creating default CSS templates')
|
||||
CSS = models.CssTemplate
|
||||
|
||||
obj = db.session.query(CSS).filter_by(template_name='Flat').first()
|
||||
if not obj:
|
||||
obj = CSS(template_name="Flat")
|
||||
css = textwrap.dedent("""\
|
||||
.gridster li.widget {
|
||||
transition: background-color 0.5s ease;
|
||||
background-color: #FAFAFA;
|
||||
border: 1px solid #CCC;
|
||||
overflow: hidden;
|
||||
box-shadow: none;
|
||||
border-radius: 0px;
|
||||
}
|
||||
.gridster li.widget:hover {
|
||||
border: 1px solid #000;
|
||||
background-color: #EAEAEA;
|
||||
}
|
||||
.navbar {
|
||||
transition: opacity 0.5s ease;
|
||||
opacity: 0.05;
|
||||
}
|
||||
.navbar:hover {
|
||||
opacity: 1;
|
||||
}
|
||||
.chart-header .header{
|
||||
font-weight: normal;
|
||||
font-size: 12px;
|
||||
}
|
||||
/*
|
||||
var bnbColors = [
|
||||
//rausch hackb kazan babu lima beach tirol
|
||||
'#ff5a5f', '#7b0051', '#007A87', '#00d1c1', '#8ce071', '#ffb400', '#b4a76c',
|
||||
'#ff8083', '#cc0086', '#00a1b3', '#00ffeb', '#bbedab', '#ffd266', '#cbc29a',
|
||||
'#ff3339', '#ff1ab1', '#005c66', '#00b3a5', '#55d12e', '#b37e00', '#988b4e',
|
||||
];
|
||||
*/
|
||||
""")
|
||||
obj.css = css
|
||||
db.session.merge(obj)
|
||||
db.session.commit()
|
||||
|
||||
obj = (
|
||||
db.session.query(CSS).filter_by(template_name='Courier Black').first())
|
||||
if not obj:
|
||||
obj = CSS(template_name="Courier Black")
|
||||
css = textwrap.dedent("""\
|
||||
.gridster li.widget {
|
||||
transition: background-color 0.5s ease;
|
||||
background-color: #EEE;
|
||||
border: 2px solid #444;
|
||||
overflow: hidden;
|
||||
border-radius: 15px;
|
||||
box-shadow: none;
|
||||
}
|
||||
h2 {
|
||||
color: white;
|
||||
font-size: 52px;
|
||||
}
|
||||
.navbar {
|
||||
box-shadow: none;
|
||||
}
|
||||
.gridster li.widget:hover {
|
||||
border: 2px solid #000;
|
||||
background-color: #EAEAEA;
|
||||
}
|
||||
.navbar {
|
||||
transition: opacity 0.5s ease;
|
||||
opacity: 0.05;
|
||||
}
|
||||
.navbar:hover {
|
||||
opacity: 1;
|
||||
}
|
||||
.chart-header .header{
|
||||
font-weight: normal;
|
||||
font-size: 12px;
|
||||
}
|
||||
.nvd3 text {
|
||||
font-size: 12px;
|
||||
font-family: inherit;
|
||||
}
|
||||
body{
|
||||
background: #000;
|
||||
font-family: Courier, Monaco, monospace;;
|
||||
}
|
||||
/*
|
||||
var bnbColors = [
|
||||
//rausch hackb kazan babu lima beach tirol
|
||||
'#ff5a5f', '#7b0051', '#007A87', '#00d1c1', '#8ce071', '#ffb400', '#b4a76c',
|
||||
'#ff8083', '#cc0086', '#00a1b3', '#00ffeb', '#bbedab', '#ffd266', '#cbc29a',
|
||||
'#ff3339', '#ff1ab1', '#005c66', '#00b3a5', '#55d12e', '#b37e00', '#988b4e',
|
||||
];
|
||||
*/
|
||||
""")
|
||||
obj.css = css
|
||||
db.session.merge(obj)
|
||||
db.session.commit()
|
||||
|
||||
|
||||
def load_birth_names():
|
||||
with gzip.open(os.path.join(DATA_FOLDER, 'birth_names.json.gz')) as f:
|
||||
pdf = pd.read_json(f)
|
||||
pdf.ds = pd.to_datetime(pdf.ds, unit='ms')
|
||||
pdf.to_sql(
|
||||
'birth_names',
|
||||
db.engine,
|
||||
if_exists='replace',
|
||||
chunksize=500,
|
||||
dtype={
|
||||
'ds': DateTime,
|
||||
'gender': String(16),
|
||||
'state': String(10),
|
||||
'name': String(255),
|
||||
},
|
||||
index=False)
|
||||
l = []
|
||||
print("Done loading table!")
|
||||
print("-" * 80)
|
||||
|
||||
print("Creating table reference")
|
||||
obj = db.session.query(TBL).filter_by(table_name='birth_names').first()
|
||||
if not obj:
|
||||
obj = TBL(table_name = 'birth_names')
|
||||
obj.main_dttm_col = 'ds'
|
||||
obj.database = get_or_create_db(db.session)
|
||||
obj.is_featured = True
|
||||
db.session.merge(obj)
|
||||
db.session.commit()
|
||||
obj.fetch_metadata()
|
||||
tbl = obj
|
||||
|
||||
defaults = {
|
||||
"compare_lag": "10",
|
||||
"compare_suffix": "o10Y",
|
||||
"datasource_id": "1",
|
||||
"datasource_name": "birth_names",
|
||||
"datasource_type": "table",
|
||||
"flt_op_1": "in",
|
||||
"limit": "25",
|
||||
"granularity": "ds",
|
||||
"groupby": [],
|
||||
"metric": 'sum__num',
|
||||
"metrics": ["sum__num"],
|
||||
"row_limit": config.get("ROW_LIMIT"),
|
||||
"since": "100 years ago",
|
||||
"until": "now",
|
||||
"viz_type": "table",
|
||||
"where": "",
|
||||
"markup_type": "markdown",
|
||||
}
|
||||
|
||||
print("Creating some slices")
|
||||
slices = [
|
||||
Slice(
|
||||
slice_name="Girls",
|
||||
viz_type='table',
|
||||
datasource_type='table',
|
||||
table=tbl,
|
||||
params=get_slice_json(
|
||||
defaults,
|
||||
groupby=['name'],
|
||||
flt_col_1='gender',
|
||||
flt_eq_1="girl", row_limit=50)),
|
||||
Slice(
|
||||
slice_name="Boys",
|
||||
viz_type='table',
|
||||
datasource_type='table',
|
||||
table=tbl,
|
||||
params=get_slice_json(
|
||||
defaults,
|
||||
groupby=['name'],
|
||||
flt_col_1='gender',
|
||||
flt_eq_1="boy",
|
||||
row_limit=50)),
|
||||
Slice(
|
||||
slice_name="Participants",
|
||||
viz_type='big_number',
|
||||
datasource_type='table',
|
||||
table=tbl,
|
||||
params=get_slice_json(
|
||||
defaults,
|
||||
viz_type="big_number", granularity="ds",
|
||||
compare_lag="5", compare_suffix="over 5Y")),
|
||||
Slice(
|
||||
slice_name="Genders",
|
||||
viz_type='pie',
|
||||
datasource_type='table',
|
||||
table=tbl,
|
||||
params=get_slice_json(
|
||||
defaults,
|
||||
viz_type="pie", groupby=['gender'])),
|
||||
Slice(
|
||||
slice_name="Genders by State",
|
||||
viz_type='dist_bar',
|
||||
datasource_type='table',
|
||||
table=tbl,
|
||||
params=get_slice_json(
|
||||
defaults,
|
||||
flt_eq_1="other", viz_type="dist_bar",
|
||||
metrics=['sum__sum_girls', 'sum__sum_boys'],
|
||||
groupby=['state'], flt_op_1='not in', flt_col_1='state')),
|
||||
Slice(
|
||||
slice_name="Trends",
|
||||
viz_type='line',
|
||||
datasource_type='table',
|
||||
table=tbl,
|
||||
params=get_slice_json(
|
||||
defaults,
|
||||
viz_type="line", groupby=['name'],
|
||||
granularity='ds', rich_tooltip='y', show_legend='y')),
|
||||
Slice(
|
||||
slice_name="Title",
|
||||
viz_type='markup',
|
||||
datasource_type='table',
|
||||
table=tbl,
|
||||
params=get_slice_json(
|
||||
defaults,
|
||||
viz_type="markup", markup_type="html",
|
||||
code="""\
|
||||
<div style="text-align:center">
|
||||
<h1>Birth Names Dashboard</h1>
|
||||
<p>
|
||||
The source dataset came from
|
||||
<a href="https://github.com/hadley/babynames">[here]</a>
|
||||
</p>
|
||||
<img src="http://monblog.system-linux.net/image/tux/baby-tux_overlord59-tux.png">
|
||||
</div>
|
||||
"""
|
||||
)),
|
||||
Slice(
|
||||
slice_name="Name Cloud",
|
||||
viz_type='word_cloud',
|
||||
datasource_type='table',
|
||||
table=tbl,
|
||||
params=get_slice_json(
|
||||
defaults,
|
||||
viz_type="word_cloud", size_from="10",
|
||||
series='name', size_to="70", rotation="square",
|
||||
limit='100')),
|
||||
Slice(
|
||||
slice_name="Pivot Table",
|
||||
viz_type='pivot_table',
|
||||
datasource_type='table',
|
||||
table=tbl,
|
||||
params=get_slice_json(
|
||||
defaults,
|
||||
viz_type="pivot_table", metrics=['sum__num'],
|
||||
groupby=['name'], columns=['state'])),
|
||||
]
|
||||
for slc in slices:
|
||||
merge_slice(slc)
|
||||
|
||||
print("Creating a dashboard")
|
||||
dash = db.session.query(Dash).filter_by(dashboard_title="Births").first()
|
||||
|
||||
if dash:
|
||||
db.session.delete(dash)
|
||||
js = """
|
||||
[
|
||||
{
|
||||
"size_y": 4,
|
||||
"size_x": 2,
|
||||
"col": 8,
|
||||
"slice_id": "85",
|
||||
"row": 7
|
||||
},
|
||||
{
|
||||
"size_y": 4,
|
||||
"size_x": 2,
|
||||
"col": 10,
|
||||
"slice_id": "86",
|
||||
"row": 7
|
||||
},
|
||||
{
|
||||
"size_y": 2,
|
||||
"size_x": 2,
|
||||
"col": 1,
|
||||
"slice_id": "87",
|
||||
"row": 1
|
||||
},
|
||||
{
|
||||
"size_y": 2,
|
||||
"size_x": 2,
|
||||
"col": 3,
|
||||
"slice_id": "88",
|
||||
"row": 1
|
||||
},
|
||||
{
|
||||
"size_y": 3,
|
||||
"size_x": 7,
|
||||
"col": 5,
|
||||
"slice_id": "89",
|
||||
"row": 4
|
||||
},
|
||||
{
|
||||
"size_y": 4,
|
||||
"size_x": 7,
|
||||
"col": 1,
|
||||
"slice_id": "90",
|
||||
"row": 7
|
||||
},
|
||||
{
|
||||
"size_y": 3,
|
||||
"size_x": 3,
|
||||
"col": 9,
|
||||
"slice_id": "91",
|
||||
"row": 1
|
||||
},
|
||||
{
|
||||
"size_y": 3,
|
||||
"size_x": 4,
|
||||
"col": 5,
|
||||
"slice_id": "92",
|
||||
"row": 1
|
||||
},
|
||||
{
|
||||
"size_y": 4,
|
||||
"size_x": 4,
|
||||
"col": 1,
|
||||
"slice_id": "93",
|
||||
"row": 3
|
||||
}
|
||||
]
|
||||
"""
|
||||
l = json.loads(js)
|
||||
for i, pos in enumerate(l):
|
||||
pos['slice_id'] = str(slices[i].id)
|
||||
dash = Dash(
|
||||
dashboard_title="Births",
|
||||
position_json=json.dumps(l, indent=4),
|
||||
slug="births",
|
||||
)
|
||||
for s in slices:
|
||||
dash.slices.append(s)
|
||||
db.session.commit()
|
||||
BIN
panoramix/data/birth_names.csv.gz
Normal file
BIN
panoramix/data/birth_names.csv.gz
Normal file
Binary file not shown.
@@ -1,4 +1,4 @@
|
||||
This data was downloaded from the
|
||||
This data was download from the
|
||||
[World's Health Organization's website](http://data.worldbank.org/data-catalog/health-nutrition-and-population-statistics)
|
||||
|
||||
Here's the script that was used to massage the data:
|
||||
@@ -1,8 +1,6 @@
|
||||
"""This module contains data related to countries and is used for geo mapping"""
|
||||
from __future__ import absolute_import
|
||||
from __future__ import division
|
||||
from __future__ import print_function
|
||||
from __future__ import unicode_literals
|
||||
"""
|
||||
This module contains data related to countries and is used for geo mapping
|
||||
"""
|
||||
|
||||
countries = [
|
||||
{
|
||||
@@ -2484,7 +2482,6 @@ for lookup in lookups:
|
||||
for country in countries:
|
||||
all_lookups[lookup][country[lookup].lower()] = country
|
||||
|
||||
|
||||
def get(field, symbol):
|
||||
"""
|
||||
Get country data based on a standard code and a symbol
|
||||
588
panoramix/forms.py
Normal file
588
panoramix/forms.py
Normal file
@@ -0,0 +1,588 @@
|
||||
from wtforms import (
|
||||
Form, SelectMultipleField, SelectField, TextField, TextAreaField,
|
||||
BooleanField, IntegerField, HiddenField)
|
||||
from wtforms import validators, widgets
|
||||
from copy import copy
|
||||
from panoramix import app
|
||||
from collections import OrderedDict
|
||||
config = app.config
|
||||
|
||||
|
||||
class BetterBooleanField(BooleanField):
|
||||
|
||||
"""
|
||||
Fixes behavior of html forms omitting non checked <input>
|
||||
(which doesn't distinguish False from NULL/missing )
|
||||
If value is unchecked, this hidden <input> fills in False value
|
||||
"""
|
||||
|
||||
def __call__(self, **kwargs):
|
||||
html = super(BetterBooleanField, self).__call__(**kwargs)
|
||||
html += u'<input type="hidden" name="{}" value="false">'.format(self.name)
|
||||
return widgets.HTMLString(html)
|
||||
|
||||
|
||||
class SelectMultipleSortableField(SelectMultipleField):
|
||||
|
||||
"""Works along with select2sortable to preserves the sort order"""
|
||||
|
||||
def iter_choices(self):
|
||||
d = OrderedDict()
|
||||
for value, label in self.choices:
|
||||
selected = self.data is not None and self.coerce(value) in self.data
|
||||
d[value] = (value, label, selected)
|
||||
if self.data:
|
||||
for value in self.data:
|
||||
if value:
|
||||
yield d.pop(value)
|
||||
while d:
|
||||
yield d.pop(d.keys()[0])
|
||||
|
||||
|
||||
class FreeFormSelect(widgets.Select):
|
||||
|
||||
"""A WTF widget that allows for free form entry"""
|
||||
|
||||
def __call__(self, field, **kwargs):
|
||||
kwargs.setdefault('id', field.id)
|
||||
if self.multiple:
|
||||
kwargs['multiple'] = True
|
||||
html = ['<select %s>' % widgets.html_params(name=field.name, **kwargs)]
|
||||
found = False
|
||||
for val, label, selected in field.iter_choices():
|
||||
html.append(self.render_option(val, label, selected))
|
||||
if field.data and val == field.data:
|
||||
found = True
|
||||
if not found:
|
||||
html.insert(1, self.render_option(field.data, field.data, True))
|
||||
html.append('</select>')
|
||||
return widgets.HTMLString(''.join(html))
|
||||
|
||||
|
||||
class FreeFormSelectField(SelectField):
|
||||
|
||||
""" A WTF SelectField that allows for free form input """
|
||||
|
||||
widget = FreeFormSelect()
|
||||
def pre_validate(self, form):
|
||||
return
|
||||
|
||||
|
||||
class OmgWtForm(Form):
|
||||
|
||||
"""Panoramixification of the WTForm Form object"""
|
||||
|
||||
fieldsets = {}
|
||||
css_classes = dict()
|
||||
|
||||
def get_field(self, fieldname):
|
||||
return getattr(self, fieldname)
|
||||
|
||||
def field_css_classes(self, fieldname):
|
||||
if fieldname in self.css_classes:
|
||||
return " ".join(self.css_classes[fieldname])
|
||||
return ""
|
||||
|
||||
|
||||
class FormFactory(object):
|
||||
"""Used to create the forms in the explore view dynamically"""
|
||||
series_limits = [0, 5, 10, 25, 50, 100, 500]
|
||||
fieltype_class = {
|
||||
SelectField: 'select2',
|
||||
SelectMultipleField: 'select2',
|
||||
FreeFormSelectField: 'select2_freeform',
|
||||
SelectMultipleSortableField: 'select2Sortable',
|
||||
}
|
||||
|
||||
def __init__(self, viz):
|
||||
self.viz = viz
|
||||
from panoramix.viz import viz_types
|
||||
viz = self.viz
|
||||
datasource = viz.datasource
|
||||
default_metric = datasource.metrics_combo[0][0]
|
||||
default_groupby = datasource.groupby_column_names[0]
|
||||
group_by_choices = [(s, s) for s in datasource.groupby_column_names]
|
||||
# Pool of all the fields that can be used in Panoramix
|
||||
self.field_dict = {
|
||||
'viz_type': SelectField(
|
||||
'Viz',
|
||||
default='table',
|
||||
choices=[(k, v.verbose_name) for k, v in viz_types.items()],
|
||||
description="The type of visualization to display"),
|
||||
'metrics': SelectMultipleSortableField(
|
||||
'Metrics', choices=datasource.metrics_combo,
|
||||
default=[default_metric],
|
||||
description="One or many metrics to display"),
|
||||
'metric': SelectField(
|
||||
'Metric', choices=datasource.metrics_combo,
|
||||
default=default_metric,
|
||||
description="Chose the metric"),
|
||||
'stacked_style': SelectField(
|
||||
'Chart Style', choices=self.choicify(
|
||||
['stack', 'stream', 'expand']),
|
||||
default='stack',
|
||||
description=""),
|
||||
'linear_color_scheme': SelectField(
|
||||
'Color Scheme', choices=self.choicify([
|
||||
'fire', 'blue_white_yellow', 'white_black',
|
||||
'black_white']),
|
||||
default='fire',
|
||||
description=""),
|
||||
'normalize_across': SelectField(
|
||||
'Normalize Across', choices=self.choicify([
|
||||
'heatmap', 'x', 'y']),
|
||||
default='heatmap',
|
||||
description=(
|
||||
"Color will be rendered based on a ratio "
|
||||
"of the cell against the sum of across this "
|
||||
"criteria")),
|
||||
'canvas_image_rendering': SelectField(
|
||||
'Rendering', choices=(
|
||||
('pixelated', 'pixelated (Sharp)'),
|
||||
('auto', 'auto (Smooth)'),
|
||||
),
|
||||
default='pixelated',
|
||||
description=(
|
||||
"image-rendering CSS attribute of the canvas object that "
|
||||
"defines how the browser scales up the image")),
|
||||
'xscale_interval': SelectField(
|
||||
'XScale Interval', choices=self.choicify(range(1, 50)),
|
||||
default='1',
|
||||
description=(
|
||||
"Number of step to take between ticks when "
|
||||
"printing the x scale")),
|
||||
'yscale_interval': SelectField(
|
||||
'YScale Interval', choices=self.choicify(range(1, 50)),
|
||||
default='1',
|
||||
description=(
|
||||
"Number of step to take between ticks when "
|
||||
"printing the y scale")),
|
||||
'bar_stacked': BetterBooleanField(
|
||||
'Stacked Bars',
|
||||
default=False,
|
||||
description=""),
|
||||
'secondary_metric': SelectField(
|
||||
'Color Metric', choices=datasource.metrics_combo,
|
||||
default=default_metric,
|
||||
description="A metric to use for color"),
|
||||
'country_fieldtype': SelectField(
|
||||
'Country Field Type',
|
||||
default='cca2',
|
||||
choices=(
|
||||
('name', 'Full name'),
|
||||
('cioc', 'code International Olympic Committee (cioc)'),
|
||||
('cca2', 'code ISO 3166-1 alpha-2 (cca2)'),
|
||||
('cca3', 'code ISO 3166-1 alpha-3 (cca3)'),
|
||||
),
|
||||
description=(
|
||||
"The country code standard that Panoramix should expect "
|
||||
"to find in the [country] column")),
|
||||
'groupby': SelectMultipleSortableField(
|
||||
'Group by',
|
||||
choices=self.choicify(datasource.groupby_column_names),
|
||||
description="One or many fields to group by"),
|
||||
'columns': SelectMultipleSortableField(
|
||||
'Columns',
|
||||
choices=self.choicify(datasource.groupby_column_names),
|
||||
description="One or many fields to pivot as columns"),
|
||||
'all_columns': SelectMultipleSortableField(
|
||||
'Columns',
|
||||
choices=self.choicify(datasource.column_names),
|
||||
description="Columns to display"),
|
||||
'all_columns_x': SelectField(
|
||||
'X',
|
||||
choices=self.choicify(datasource.column_names),
|
||||
description="Columns to display"),
|
||||
'all_columns_y': SelectField(
|
||||
'Y',
|
||||
choices=self.choicify(datasource.column_names),
|
||||
description="Columns to display"),
|
||||
'granularity': FreeFormSelectField(
|
||||
'Time Granularity', default="one day",
|
||||
choices=self.choicify([
|
||||
'all',
|
||||
'5 seconds',
|
||||
'30 seconds',
|
||||
'1 minute',
|
||||
'5 minutes',
|
||||
'1 hour',
|
||||
'6 hour',
|
||||
'1 day',
|
||||
'7 days',
|
||||
]),
|
||||
description=(
|
||||
"The time granularity for the visualization. Note that you "
|
||||
"can type and use simple natural language as in '10 seconds', "
|
||||
"'1 day' or '56 weeks'")),
|
||||
'link_length': FreeFormSelectField(
|
||||
'Link Length', default="200",
|
||||
choices=self.choicify([
|
||||
'10',
|
||||
'25',
|
||||
'50',
|
||||
'75',
|
||||
'100',
|
||||
'150',
|
||||
'200',
|
||||
'250',
|
||||
]),
|
||||
description="Link length in the force layout"),
|
||||
'charge': FreeFormSelectField(
|
||||
'Charge', default="-500",
|
||||
choices=self.choicify([
|
||||
'-50',
|
||||
'-75',
|
||||
'-100',
|
||||
'-150',
|
||||
'-200',
|
||||
'-250',
|
||||
'-500',
|
||||
'-1000',
|
||||
'-2500',
|
||||
'-5000',
|
||||
]),
|
||||
description="Charge in the force layout"),
|
||||
'granularity_sqla': SelectField(
|
||||
'Time Column',
|
||||
default=datasource.main_dttm_col or datasource.any_dttm_col,
|
||||
choices=self.choicify(datasource.dttm_cols),
|
||||
description=(
|
||||
"The time column for the visualization. Note that you "
|
||||
"can define arbitrary expression that return a DATETIME "
|
||||
"column in the table editor. Also note that the "
|
||||
"filter bellow is applied against this column or "
|
||||
"expression")),
|
||||
'resample_rule': FreeFormSelectField(
|
||||
'Resample Rule', default='',
|
||||
choices=self.choicify(('1T', '1H', '1D', '7D', '1M', '1AS')),
|
||||
description=("Pandas resample rule")),
|
||||
'resample_how': FreeFormSelectField(
|
||||
'Resample How', default='',
|
||||
choices=self.choicify(('', 'mean', 'sum', 'median')),
|
||||
description=("Pandas resample how")),
|
||||
'resample_fillmethod': FreeFormSelectField(
|
||||
'Resample Fill Method', default='',
|
||||
choices=self.choicify(('', 'ffill', 'bfill')),
|
||||
description=("Pandas resample fill method")),
|
||||
'since': FreeFormSelectField(
|
||||
'Since', default="7 days ago",
|
||||
choices=self.choicify([
|
||||
'1 hour ago',
|
||||
'12 hours ago',
|
||||
'1 day ago',
|
||||
'7 days ago',
|
||||
'28 days ago',
|
||||
'90 days ago',
|
||||
'1 year ago'
|
||||
]),
|
||||
description=(
|
||||
"Timestamp from filter. This supports free form typing and "
|
||||
"natural language as in '1 day ago', '28 days' or '3 years'")),
|
||||
'until': FreeFormSelectField('Until', default="now",
|
||||
choices=self.choicify([
|
||||
'now',
|
||||
'1 day ago',
|
||||
'7 days ago',
|
||||
'28 days ago',
|
||||
'90 days ago',
|
||||
'1 year ago'])
|
||||
),
|
||||
'max_bubble_size': FreeFormSelectField(
|
||||
'Max Bubble Size', default="25",
|
||||
choices=self.choicify([
|
||||
'5',
|
||||
'10',
|
||||
'15',
|
||||
'25',
|
||||
'50',
|
||||
'75',
|
||||
'100',
|
||||
])
|
||||
),
|
||||
'row_limit':
|
||||
FreeFormSelectField(
|
||||
'Row limit',
|
||||
default=config.get("ROW_LIMIT"),
|
||||
choices=self.choicify(
|
||||
[10, 50, 100, 250, 500, 1000, 5000, 10000, 50000])),
|
||||
'limit':
|
||||
FreeFormSelectField(
|
||||
'Series limit',
|
||||
choices=self.choicify(self.series_limits),
|
||||
default=50,
|
||||
description=(
|
||||
"Limits the number of time series that get displayed")),
|
||||
'rolling_type': SelectField(
|
||||
'Rolling',
|
||||
default='None',
|
||||
choices=[(s, s) for s in ['None', 'mean', 'sum', 'std', 'cumsum']],
|
||||
description=(
|
||||
"Defines a rolling window function to apply, works along "
|
||||
"with the [Periods] text box")),
|
||||
'rolling_periods': IntegerField(
|
||||
'Periods',
|
||||
validators=[validators.optional()],
|
||||
description=(
|
||||
"Defines the size of the rolling window function, "
|
||||
"relative to the time granularity selected")),
|
||||
'series': SelectField(
|
||||
'Series', choices=group_by_choices,
|
||||
default=default_groupby,
|
||||
description=(
|
||||
"Defines the grouping of entities. "
|
||||
"Each serie is shown as a specific color on the chart and "
|
||||
"has a legend toggle")),
|
||||
'entity': SelectField('Entity', choices=group_by_choices,
|
||||
default=default_groupby,
|
||||
description="This define the element to be plotted on the chart"),
|
||||
'x': SelectField(
|
||||
'X Axis', choices=datasource.metrics_combo,
|
||||
default=default_metric,
|
||||
description="Metric assigned to the [X] axis"),
|
||||
'y': SelectField('Y Axis', choices=datasource.metrics_combo,
|
||||
default=default_metric,
|
||||
description="Metric assigned to the [Y] axis"),
|
||||
'size': SelectField(
|
||||
'Bubble Size',
|
||||
default=default_metric,
|
||||
choices=datasource.metrics_combo),
|
||||
'url': TextField(
|
||||
'URL', default='www.airbnb.com',),
|
||||
'where': TextField(
|
||||
'Custom WHERE clause', default='',
|
||||
description=(
|
||||
"The text in this box gets included in your query's WHERE "
|
||||
"clause, as an AND to other criteria. You can include "
|
||||
"complex expression, parenthesis and anything else "
|
||||
"supported by the backend it is directed towards.")),
|
||||
'having': TextField('Custom HAVING clause', default='',
|
||||
description=(
|
||||
"The text in this box gets included in your query's HAVING"
|
||||
" clause, as an AND to other criteria. You can include "
|
||||
"complex expression, parenthesis and anything else "
|
||||
"supported by the backend it is directed towards.")),
|
||||
'compare_lag': TextField('Comparison Period Lag',
|
||||
description=(
|
||||
"Based on granularity, number of time periods to "
|
||||
"compare against")),
|
||||
'compare_suffix': TextField('Comparison suffix',
|
||||
description="Suffix to apply after the percentage display"),
|
||||
'x_axis_format': FreeFormSelectField('X axis format',
|
||||
default='smart_date',
|
||||
choices=[
|
||||
('smart_date', 'Adaptative formating'),
|
||||
("%m/%d/%Y", '"%m/%d/%Y" | 01/14/2019'),
|
||||
("%Y-%m-%d", '"%Y-%m-%d" | 2019-01-14'),
|
||||
("%Y-%m-%d %H:%M:%S",
|
||||
'"%Y-%m-%d %H:%M:%S" | 2019-01-14 01:32:10'),
|
||||
("%H:%M:%S", '"%H:%M:%S" | 01:32:10'),
|
||||
],
|
||||
description="D3 format syntax for y axis "
|
||||
"https://github.com/mbostock/\n"
|
||||
"d3/wiki/Formatting"),
|
||||
'y_axis_format': FreeFormSelectField('Y axis format',
|
||||
default='.3s',
|
||||
choices=[
|
||||
('.3s', '".3s" | 12.3k'),
|
||||
('.3%', '".3%" | 1234543.210%'),
|
||||
('.4r', '".4r" | 12350'),
|
||||
('.3f', '".3f" | 12345.432'),
|
||||
('+,', '"+," | +12,345.4321'),
|
||||
('$,.2f', '"$,.2f" | $12,345.43'),
|
||||
],
|
||||
description="D3 format syntax for y axis "
|
||||
"https://github.com/mbostock/\n"
|
||||
"d3/wiki/Formatting"),
|
||||
'markup_type': SelectField(
|
||||
"Markup Type",
|
||||
choices=self.choicify(['markdown', 'html']),
|
||||
default="markdown",
|
||||
description="Pick your favorite markup language"),
|
||||
'rotation': SelectField(
|
||||
"Rotation",
|
||||
choices=[(s, s) for s in ['random', 'flat', 'square']],
|
||||
default="random",
|
||||
description="Rotation to apply to words in the cloud"),
|
||||
'line_interpolation': SelectField(
|
||||
"Line Style",
|
||||
choices=self.choicify([
|
||||
'linear', 'basis', 'cardinal', 'monotone',
|
||||
'step-before', 'step-after']),
|
||||
default='linear',
|
||||
description="Line interpolation as defined by d3.js"),
|
||||
'code': TextAreaField(
|
||||
"Code", description="Put your code here", default=''),
|
||||
'pandas_aggfunc': SelectField(
|
||||
"Aggregation function",
|
||||
choices=self.choicify([
|
||||
'sum', 'mean', 'min', 'max', 'median', 'stdev', 'var']),
|
||||
default='sum',
|
||||
description=(
|
||||
"Aggregate function to apply when pivoting and "
|
||||
"computing the total rows and columns")),
|
||||
'size_from': TextField(
|
||||
"Font Size From",
|
||||
default="20",
|
||||
description="Font size for the smallest value in the list"),
|
||||
'size_to': TextField(
|
||||
"Font Size To",
|
||||
default="150",
|
||||
description="Font size for the biggest value in the list"),
|
||||
'show_brush': BetterBooleanField(
|
||||
"Range Filter", default=False,
|
||||
description=(
|
||||
"Whether to display the time range interactive selector")),
|
||||
'show_datatable': BetterBooleanField(
|
||||
"Data Table", default=False,
|
||||
description="Whether to display the interactive data table"),
|
||||
'include_search': BetterBooleanField(
|
||||
"Search Box", default=False,
|
||||
description=(
|
||||
"Whether to include a client side search box")),
|
||||
'show_bubbles': BetterBooleanField(
|
||||
"Show Bubbles", default=False,
|
||||
description=(
|
||||
"Whether to display bubbles on top of countries")),
|
||||
'show_legend': BetterBooleanField(
|
||||
"Legend", default=True,
|
||||
description="Whether to display the legend (toggles)"),
|
||||
'x_axis_showminmax': BetterBooleanField(
|
||||
"X bounds", default=True,
|
||||
description=(
|
||||
"Whether to display the min and max values of the X axis")),
|
||||
'rich_tooltip': BetterBooleanField(
|
||||
"Rich Tooltip", default=True,
|
||||
description=(
|
||||
"The rich tooltip shows a list of all series for that"
|
||||
" point in time")),
|
||||
'y_axis_zero': BetterBooleanField(
|
||||
"Y Axis Zero", default=False,
|
||||
description=(
|
||||
"Force the Y axis to start at 0 instead of the minimum "
|
||||
"value")),
|
||||
'y_log_scale': BetterBooleanField(
|
||||
"Y Log", default=False,
|
||||
description="Use a log scale for the Y axis"),
|
||||
'x_log_scale': BetterBooleanField(
|
||||
"X Log", default=False,
|
||||
description="Use a log scale for the X axis"),
|
||||
'donut': BetterBooleanField(
|
||||
"Donut", default=False,
|
||||
description="Do you want a donut or a pie?"),
|
||||
'contribution': BetterBooleanField(
|
||||
"Contribution", default=False,
|
||||
description="Compute the contribution to the total"),
|
||||
'num_period_compare': IntegerField(
|
||||
"Period Ratio", default=None,
|
||||
validators=[validators.optional()],
|
||||
description=(
|
||||
"[integer] Number of period to compare against, "
|
||||
"this is relative to the granularity selected")),
|
||||
'time_compare': TextField(
|
||||
"Time Shift",
|
||||
default="",
|
||||
description=(
|
||||
"Overlay a timeseries from a "
|
||||
"relative time period. Expects relative time delta "
|
||||
"in natural language (example: 24 hours, 7 days, "
|
||||
"56 weeks, 365 days")),
|
||||
}
|
||||
|
||||
@staticmethod
|
||||
def choicify(l):
|
||||
return [("{}".format(obj), "{}".format(obj)) for obj in l]
|
||||
|
||||
def get_form(self):
|
||||
"""Returns a form object based on the viz/datasource/context"""
|
||||
viz = self.viz
|
||||
field_css_classes = {}
|
||||
for name, obj in self.field_dict.items():
|
||||
field_css_classes[name] = ['form-control']
|
||||
s = self.fieltype_class.get(obj.field_class)
|
||||
if s:
|
||||
field_css_classes[name] += [s]
|
||||
|
||||
for field in ('show_brush', 'show_legend', 'rich_tooltip'):
|
||||
field_css_classes[field] += ['input-sm']
|
||||
|
||||
class QueryForm(OmgWtForm):
|
||||
fieldsets = copy(viz.fieldsets)
|
||||
css_classes = field_css_classes
|
||||
standalone = HiddenField()
|
||||
async = HiddenField()
|
||||
extra_filters = HiddenField()
|
||||
json = HiddenField()
|
||||
slice_id = HiddenField()
|
||||
slice_name = HiddenField()
|
||||
previous_viz_type = HiddenField(default=viz.viz_type)
|
||||
collapsed_fieldsets = HiddenField()
|
||||
viz_type = self.field_dict.get('viz_type')
|
||||
|
||||
filter_cols = viz.datasource.filterable_column_names or ['']
|
||||
for i in range(10):
|
||||
setattr(QueryForm, 'flt_col_' + str(i), SelectField(
|
||||
'Filter 1',
|
||||
default=filter_cols[0],
|
||||
choices=self.choicify(filter_cols)))
|
||||
setattr(QueryForm, 'flt_op_' + str(i), SelectField(
|
||||
'Filter 1',
|
||||
default='in',
|
||||
choices=self.choicify(['in', 'not in'])))
|
||||
setattr(
|
||||
QueryForm, 'flt_eq_' + str(i),
|
||||
TextField("Super", default=''))
|
||||
|
||||
for field in viz.flat_form_fields():
|
||||
setattr(QueryForm, field, self.field_dict[field])
|
||||
|
||||
def add_to_form(attrs):
|
||||
for attr in attrs:
|
||||
setattr(QueryForm, attr, self.field_dict[attr])
|
||||
|
||||
# datasource type specific form elements
|
||||
if viz.datasource.__class__.__name__ == 'SqlaTable':
|
||||
QueryForm.fieldsets += ({
|
||||
'label': 'SQL',
|
||||
'fields': ['where', 'having'],
|
||||
'description': (
|
||||
"This section exposes ways to include snippets of "
|
||||
"SQL in your query"),
|
||||
},)
|
||||
add_to_form(('where', 'having'))
|
||||
grains = viz.datasource.database.grains()
|
||||
|
||||
if not viz.datasource.any_dttm_col:
|
||||
return QueryForm
|
||||
if grains:
|
||||
time_fields = ('granularity_sqla', 'time_grain_sqla')
|
||||
self.field_dict['time_grain_sqla'] = SelectField(
|
||||
'Time Grain',
|
||||
choices=self.choicify((grain.name for grain in grains)),
|
||||
default="Time Column",
|
||||
description=(
|
||||
"The time granularity for the visualization. This "
|
||||
"applies a date transformation to alter "
|
||||
"your time column and defines a new time granularity."
|
||||
"The options here are defined on a per database "
|
||||
"engine basis in the Panoramix source code"))
|
||||
add_to_form(time_fields)
|
||||
field_css_classes['time_grain_sqla'] = ['form-control', 'select2']
|
||||
field_css_classes['granularity_sqla'] = ['form-control', 'select2']
|
||||
else:
|
||||
time_fields = 'granularity_sqla'
|
||||
add_to_form((time_fields, ))
|
||||
else:
|
||||
time_fields = 'granularity'
|
||||
add_to_form(('granularity',))
|
||||
field_css_classes['granularity'] = ['form-control', 'select2']
|
||||
add_to_form(('since', 'until'))
|
||||
|
||||
QueryForm.fieldsets = ({
|
||||
'label': 'Time',
|
||||
'fields': (
|
||||
time_fields,
|
||||
('since', 'until'),
|
||||
),
|
||||
'description': "Time related form attributes",
|
||||
},) + tuple(QueryForm.fieldsets)
|
||||
return QueryForm
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user