mirror of
https://github.com/lana-k/sqliteviz.git
synced 2025-12-06 18:18:53 +08:00
Compare commits
7 Commits
0.20.0
...
9c0103fd05
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
9c0103fd05 | ||
|
|
e4b117ffb9 | ||
|
|
3c456ef135 | ||
|
|
c713c713b7 | ||
|
|
babf0074c0 | ||
|
|
e71e6700c1 | ||
|
|
84e66b8167 |
7
.github/workflows/main.yml
vendored
7
.github/workflows/main.yml
vendored
@@ -24,10 +24,11 @@ jobs:
|
||||
npm install
|
||||
npm run build
|
||||
|
||||
- name: Create archive
|
||||
- name: Create archives
|
||||
run: |
|
||||
cd dist
|
||||
zip -9 -r dist.zip . -x "js/*.map" -x "/*.map"
|
||||
zip -9 -r ../dist.zip . -x "js/*.map" -x "/*.map"
|
||||
zip -9 -r ../dist_map.zip .
|
||||
|
||||
- name: Create Release Notes
|
||||
run: |
|
||||
@@ -39,6 +40,6 @@ jobs:
|
||||
- name: Create release
|
||||
uses: ncipollo/release-action@v1
|
||||
with:
|
||||
artifacts: "dist/dist.zip"
|
||||
artifacts: "dist.zip,dist_map.zip"
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
bodyFile: "CHANGELOG.md"
|
||||
|
||||
2
.github/workflows/test.yml
vendored
2
.github/workflows/test.yml
vendored
@@ -11,7 +11,7 @@ on:
|
||||
jobs:
|
||||
test:
|
||||
name: Run tests
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: ubuntu-20.04
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- name: Use Node.js
|
||||
|
||||
24
Dockerfile.test
Normal file
24
Dockerfile.test
Normal file
@@ -0,0 +1,24 @@
|
||||
# An easy way to run tests locally without Nodejs installed:
|
||||
#
|
||||
# docker build -t sqliteviz/test -f Dockerfile.test .
|
||||
#
|
||||
|
||||
FROM node:12
|
||||
|
||||
RUN set -ex; \
|
||||
apt update; \
|
||||
apt install -y chromium firefox-esr; \
|
||||
npm install -g npm@7
|
||||
|
||||
WORKDIR /tmp/build
|
||||
|
||||
COPY package.json package-lock.json ./
|
||||
COPY lib lib
|
||||
RUN npm install
|
||||
|
||||
COPY . .
|
||||
|
||||
RUN set -ex; \
|
||||
sed -i 's/browsers: \[.*\],/browsers: ['"'FirefoxHeadlessTouch'"'],/' karma.conf.js
|
||||
|
||||
RUN npm run lint -- --no-fix && npm run test
|
||||
@@ -21,7 +21,7 @@ https://user-images.githubusercontent.com/24638357/128249848-f8fab0f5-9add-46e0-
|
||||
The latest release of sqliteviz is deployed on [sqliteviz.com/app][6].
|
||||
|
||||
## Wiki
|
||||
For user documentation, check out sqliteviz [Wiki][7].
|
||||
For user documentation, check out sqliteviz [documentation][7].
|
||||
|
||||
## Motivation
|
||||
It's a kind of middleground between [Plotly Falcon][1] and [Redash][2].
|
||||
@@ -35,7 +35,7 @@ It is built on top of [react-chart-editor][3], [PivotTable.js][12], [sql.js][4]
|
||||
[4]: https://github.com/sql-js/sql.js
|
||||
[5]: https://github.com/vuejs/vue
|
||||
[6]: https://sqliteviz.com/app/
|
||||
[7]: https://github.com/lana-k/sqliteviz/wiki
|
||||
[7]: https://sqliteviz.com/docs
|
||||
[8]: https://github.com/surmon-china/vue-codemirror#readme
|
||||
[9]: https://www.papaparse.com/
|
||||
[10]: https://github.com/lana-k/sqliteviz/wiki/Predefined-queries
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
FROM emscripten/emsdk:2.0.24
|
||||
FROM emscripten/emsdk:3.0.1
|
||||
|
||||
WORKDIR /tmp/build
|
||||
|
||||
|
||||
@@ -43,6 +43,8 @@ SQLite [miscellaneous extensions][3] included:
|
||||
SQLite 3rd party extensions included:
|
||||
|
||||
1. [pivot_vtab][5] -- a pivot virtual table
|
||||
2. `pearson` correlation coefficient function extension from [sqlean][21]
|
||||
(which is part of [squib][20])
|
||||
|
||||
To ease the step to have working clone locally, the build is committed into
|
||||
the repository.
|
||||
@@ -99,3 +101,5 @@ described in [this message from SQLite Forum][12]:
|
||||
[17]: https://sqlite.org/contrib/
|
||||
[18]: https://sqlite.org/contrib//download/extension-functions.c?get=25
|
||||
[19]: https://github.com/lana-k/sqliteviz/blob/master/tests/lib/database/sqliteExtensions.spec.js
|
||||
[20]: https://github.com/mrwilson/squib/blob/master/pearson.c
|
||||
[21]: https://github.com/nalgeon/sqlean/blob/incubator/src/pearson.c
|
||||
|
||||
@@ -1,14 +1,25 @@
|
||||
# SQLite WebAssembly build micro-benchmark
|
||||
|
||||
This directory contains a micro-benchmark for evaluating SQLite
|
||||
WebAssembly builds performance on typical SQL queries, run from
|
||||
`make.sh` script. It can also serve as a smoke test.
|
||||
This directory contains a micro-benchmark for evaluating SQLite WebAssembly
|
||||
builds performance on read and write SQL queries, run from `make.sh` script. If
|
||||
the script has permission to `nice` processes and [Procpath][1] is installed,
|
||||
e.g. it is run with `sudo -E env PATH=$PATH ./make.sh`, it'll `renice` all
|
||||
processes running inside the benchmark containers. It can also serve as a smoke
|
||||
test (e.g. for memory leaks).
|
||||
|
||||
The benchmark operates on a set of SQLite WebAssembly builds expected
|
||||
in `lib/build-$NAME` directories each containing `sql-wasm.js` and
|
||||
`sql-wasm.wasm`. Then it creates a Docker image for each, and runs
|
||||
the benchmark in Firefox and Chromium using Karma in the container.
|
||||
The benchmark operates on a set of SQLite WebAssembly builds expected in
|
||||
`lib/build-$NAME` directories each containing `sql-wasm.js` and
|
||||
`sql-wasm.wasm`. Then it creates a Docker image for each, and runs the
|
||||
benchmark in Firefox and Chromium using Karma in the container.
|
||||
|
||||
After successful run, the benchmark result of each build is contained
|
||||
in `build-$NAME-result.json`. The JSON result files can be analysed
|
||||
using `result-analysis.ipynb` Jupyter notebook.
|
||||
After successful run, the benchmark produces the following per each build:
|
||||
|
||||
- `build-$NAME-result.json`
|
||||
- `build-$NAME.sqlite` (if Procpath is installed)
|
||||
- `build-$NAME.svg` (if Procpath is installed)
|
||||
|
||||
These files can be analysed using `result-analysis.ipynb` Jupyter notebook.
|
||||
The SVG is a chart with CPU and RSS usage of each test container (i.e. Chromium
|
||||
run, then Firefox run per container).
|
||||
|
||||
[1]: https://pypi.org/project/Procpath/
|
||||
|
||||
@@ -1,7 +1,8 @@
|
||||
#!/bin/bash -e
|
||||
|
||||
cleanup () {
|
||||
rm -rf lib/dist $flag_file
|
||||
rm -rf lib/dist "$renice_flag_file"
|
||||
docker rm -f sqljs-benchmark-run 2> /dev/null || true
|
||||
}
|
||||
trap cleanup EXIT
|
||||
|
||||
@@ -11,34 +12,36 @@ if [ ! -f sample.csv ]; then
|
||||
| gunzip -c > sample.csv
|
||||
fi
|
||||
|
||||
PLAYBOOK=procpath/karma_docker.procpath
|
||||
|
||||
# for renice to work run like "sudo -E env PATH=$PATH ./make.sh"
|
||||
test_ni=$(nice -n -1 nice)
|
||||
if [ $test_ni == -1 ]; then
|
||||
flag_file=$(mktemp)
|
||||
test_ni=$(nice -n -5 nice)
|
||||
if [ $test_ni == -5 ]; then
|
||||
renice_flag_file=$(mktemp)
|
||||
fi
|
||||
(
|
||||
while [ -f $flag_file ]; do
|
||||
root_pid=$(
|
||||
docker ps -f status=running -f name='^sqljs-benchmark-' -q \
|
||||
| xargs -r -I{} -- docker inspect -f '{{.State.Pid}}' {}
|
||||
)
|
||||
if [ ! -z $root_pid ]; then
|
||||
procpath query -d $'\n' "$..children[?(@.stat.pid == $root_pid)]..pid" \
|
||||
| xargs -I{} -- renice -n -1 -p {} > /dev/null
|
||||
fi
|
||||
sleep 1
|
||||
done &
|
||||
)
|
||||
{
|
||||
while [ -f $renice_flag_file ]; do
|
||||
procpath --logging-level ERROR play -f $PLAYBOOK renice:watch
|
||||
done
|
||||
} &
|
||||
|
||||
shopt -s nullglob
|
||||
for d in lib/build-* ; do
|
||||
rm -rf lib/dist
|
||||
cp -r $d lib/dist
|
||||
sample_name=$(basename $d)
|
||||
|
||||
name=$(basename $d)
|
||||
docker build -t sqliteviz/sqljs-benchmark:$name .
|
||||
docker rm sqljs-benchmark-$name 2> /dev/null || true
|
||||
docker run -it --cpus 2 --name sqljs-benchmark-$name sqliteviz/sqljs-benchmark:$name
|
||||
docker cp sqljs-benchmark-$name:/tmp/build/suite-result.json ${name}-result.json
|
||||
docker rm sqljs-benchmark-$name
|
||||
docker build -t sqliteviz/sqljs-benchmark .
|
||||
docker rm sqljs-benchmark-run 2> /dev/null || true
|
||||
docker run -d -it --cpus 2 --name sqljs-benchmark-run sqliteviz/sqljs-benchmark
|
||||
{
|
||||
rm -f ${sample_name}.sqlite
|
||||
procpath play -f $PLAYBOOK -o database_file=${sample_name}.sqlite track:record
|
||||
procpath play -f $PLAYBOOK -o database_file=${sample_name}.sqlite \
|
||||
-o plot_file=${sample_name}.svg track:plot
|
||||
} &
|
||||
|
||||
docker attach sqljs-benchmark-run
|
||||
docker cp sqljs-benchmark-run:/tmp/build/suite-result.json ${sample_name}-result.json
|
||||
docker rm sqljs-benchmark-run
|
||||
done
|
||||
|
||||
28
lib/sql-js/benchmark/procpath/karma_docker.procpath
Normal file
28
lib/sql-js/benchmark/procpath/karma_docker.procpath
Normal file
@@ -0,0 +1,28 @@
|
||||
# This command may run when "sqljs-benchmark-run" does not yet exist or run
|
||||
[renice:watch]
|
||||
interval: 2
|
||||
repeat: 30
|
||||
environment:
|
||||
ROOT_PID=docker inspect -f "{{.State.Pid}}" sqljs-benchmark-run 2> /dev/null || true
|
||||
query:
|
||||
PIDS=$..children[?(@.stat.pid in [$ROOT_PID])]..pid
|
||||
command:
|
||||
echo $PIDS | tr , '\n' | xargs --no-run-if-empty -I{} -- renice -n -5 -p {}
|
||||
|
||||
# Expected input arguments: database_file
|
||||
[track:record]
|
||||
interval: 1
|
||||
stop_without_result: 1
|
||||
environment:
|
||||
ROOT_PID=docker inspect -f "{{.State.Pid}}" sqljs-benchmark-run
|
||||
query:
|
||||
$..children[?(@.stat.pid == $ROOT_PID)]
|
||||
pid_list: $ROOT_PID
|
||||
|
||||
# Expected input arguments: database_file, plot_file
|
||||
[track:plot]
|
||||
moving_average_window: 5
|
||||
title: Chromium vs Firefox (№1 RSS, №2 CPU)
|
||||
custom_query_file:
|
||||
procpath/top2_rss.sql
|
||||
procpath/top2_cpu.sql
|
||||
29
lib/sql-js/benchmark/procpath/top2_cpu.sql
Normal file
29
lib/sql-js/benchmark/procpath/top2_cpu.sql
Normal file
@@ -0,0 +1,29 @@
|
||||
WITH diff_all AS (
|
||||
SELECT
|
||||
record_id,
|
||||
ts,
|
||||
stat_pid,
|
||||
stat_utime + stat_stime - LAG(stat_utime + stat_stime) OVER (
|
||||
PARTITION BY stat_pid
|
||||
ORDER BY record_id
|
||||
) tick_diff,
|
||||
ts - LAG(ts) OVER (
|
||||
PARTITION BY stat_pid
|
||||
ORDER BY record_id
|
||||
) ts_diff
|
||||
FROM record
|
||||
), diff AS (
|
||||
SELECT * FROM diff_all WHERE tick_diff IS NOT NULL
|
||||
), one_time_pid_condition AS (
|
||||
SELECT stat_pid
|
||||
FROM record
|
||||
GROUP BY 1
|
||||
ORDER BY SUM(stat_utime + stat_stime) DESC
|
||||
LIMIT 2
|
||||
)
|
||||
SELECT
|
||||
ts,
|
||||
stat_pid pid,
|
||||
100.0 * tick_diff / (SELECT value FROM meta WHERE key = 'clock_ticks') / ts_diff value
|
||||
FROM diff
|
||||
JOIN one_time_pid_condition USING(stat_pid)
|
||||
13
lib/sql-js/benchmark/procpath/top2_rss.sql
Normal file
13
lib/sql-js/benchmark/procpath/top2_rss.sql
Normal file
@@ -0,0 +1,13 @@
|
||||
WITH one_time_pid_condition AS (
|
||||
SELECT stat_pid
|
||||
FROM record
|
||||
GROUP BY 1
|
||||
ORDER BY SUM(stat_rss) DESC
|
||||
LIMIT 2
|
||||
)
|
||||
SELECT
|
||||
ts,
|
||||
stat_pid pid,
|
||||
stat_rss / 1024.0 / 1024 * (SELECT value FROM meta WHERE key = 'page_size') value
|
||||
FROM record
|
||||
JOIN one_time_pid_condition USING(stat_pid)
|
||||
File diff suppressed because one or more lines are too long
@@ -2,9 +2,11 @@ import logging
|
||||
import subprocess
|
||||
from pathlib import Path
|
||||
|
||||
|
||||
# See the setting descriptions on these pages:
|
||||
# - https://emscripten.org/docs/optimizing/Optimizing-Code.html
|
||||
# - https://github.com/emscripten-core/emscripten/blob/main/src/settings.js
|
||||
cflags = (
|
||||
'-O2',
|
||||
# SQLite configuration
|
||||
'-DSQLITE_DEFAULT_CACHE_SIZE=-65536', # 64 MiB
|
||||
'-DSQLITE_DEFAULT_MEMSTATUS=0',
|
||||
'-DSQLITE_DEFAULT_SYNCHRONOUS=0',
|
||||
@@ -13,26 +15,26 @@ cflags = (
|
||||
'-DSQLITE_ENABLE_FTS3',
|
||||
'-DSQLITE_ENABLE_FTS3_PARENTHESIS',
|
||||
'-DSQLITE_ENABLE_FTS5',
|
||||
'-DSQLITE_ENABLE_JSON1',
|
||||
'-DSQLITE_ENABLE_NORMALIZE',
|
||||
'-DSQLITE_EXTRA_INIT=extra_init',
|
||||
'-DSQLITE_OMIT_DEPRECATED',
|
||||
'-DSQLITE_OMIT_LOAD_EXTENSION',
|
||||
'-DSQLITE_OMIT_SHARED_CACHE',
|
||||
'-DSQLITE_THREADSAFE=0',
|
||||
# Compile-time optimisation
|
||||
'-Os', # reduces the code size about in half comparing to -O2
|
||||
'-flto',
|
||||
)
|
||||
emflags = (
|
||||
# Base
|
||||
'--memory-init-file', '0',
|
||||
'-s', 'RESERVED_FUNCTION_POINTERS=64',
|
||||
'-s', 'ALLOW_TABLE_GROWTH=1',
|
||||
'-s', 'SINGLE_FILE=0',
|
||||
# WASM
|
||||
'-s', 'WASM=1',
|
||||
'-s', 'ALLOW_MEMORY_GROWTH=1',
|
||||
# Optimisation
|
||||
'-s', 'INLINING_LIMIT=50',
|
||||
'-O3',
|
||||
'-s', 'ENVIRONMENT=web,worker',
|
||||
# Link-time optimisation
|
||||
'-Os',
|
||||
'-flto',
|
||||
# sql.js
|
||||
'-s', 'EXPORTED_FUNCTIONS=@src/sqljs/exported_functions.json',
|
||||
@@ -50,22 +52,22 @@ def build(src: Path, dst: Path):
|
||||
'emcc',
|
||||
*cflags,
|
||||
'-c', src / 'sqlite3.c',
|
||||
'-o', out / 'sqlite3.bc',
|
||||
'-o', out / 'sqlite3.o',
|
||||
])
|
||||
logging.info('Building LLVM bitcode for extension-functions.c')
|
||||
subprocess.check_call([
|
||||
'emcc',
|
||||
*cflags,
|
||||
'-c', src / 'extension-functions.c',
|
||||
'-o', out / 'extension-functions.bc',
|
||||
'-o', out / 'extension-functions.o',
|
||||
])
|
||||
|
||||
logging.info('Building WASM from bitcode')
|
||||
subprocess.check_call([
|
||||
'emcc',
|
||||
*emflags,
|
||||
out / 'sqlite3.bc',
|
||||
out / 'extension-functions.bc',
|
||||
out / 'sqlite3.o',
|
||||
out / 'extension-functions.o',
|
||||
'-o', out / 'sql-wasm.js',
|
||||
])
|
||||
|
||||
|
||||
@@ -8,7 +8,7 @@ from pathlib import Path
|
||||
from urllib import request
|
||||
|
||||
|
||||
amalgamation_url = 'https://sqlite.org/2022/sqlite-amalgamation-3390000.zip'
|
||||
amalgamation_url = 'https://sqlite.org/2023/sqlite-amalgamation-3410000.zip'
|
||||
|
||||
# Extension-functions
|
||||
# ===================
|
||||
@@ -28,10 +28,11 @@ extension_urls = (
|
||||
('https://sqlite.org/src/raw/09f967dc?at=decimal.c', 'sqlite3_decimal_init'),
|
||||
# Third-party extension
|
||||
# =====================
|
||||
('https://github.com/jakethaw/pivot_vtab/raw/08ab0797/pivot_vtab.c', 'sqlite3_pivotvtab_init'),
|
||||
('https://github.com/jakethaw/pivot_vtab/raw/9323ef93/pivot_vtab.c', 'sqlite3_pivotvtab_init'),
|
||||
('https://github.com/nalgeon/sqlean/raw/95e8d21a/src/pearson.c', 'sqlite3_pearson_init'),
|
||||
)
|
||||
|
||||
sqljs_url = 'https://github.com/sql-js/sql.js/archive/refs/tags/v1.5.0.zip'
|
||||
sqljs_url = 'https://github.com/sql-js/sql.js/archive/refs/tags/v1.7.0.zip'
|
||||
|
||||
|
||||
def _generate_extra_init_c_function(init_function_names):
|
||||
|
||||
2
lib/sql-js/dist/sql-wasm.js
vendored
2
lib/sql-js/dist/sql-wasm.js
vendored
File diff suppressed because one or more lines are too long
BIN
lib/sql-js/dist/sql-wasm.wasm
vendored
BIN
lib/sql-js/dist/sql-wasm.wasm
vendored
Binary file not shown.
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "sqliteviz",
|
||||
"version": "0.20.0",
|
||||
"version": "0.21.1",
|
||||
"license": "Apache-2.0",
|
||||
"private": true,
|
||||
"scripts": {
|
||||
|
||||
@@ -112,7 +112,7 @@ import SqlTable from '@/components/SqlTable'
|
||||
import Logs from '@/components/Logs'
|
||||
import time from '@/lib/utils/time'
|
||||
import fIo from '@/lib/utils/fileIo'
|
||||
import { send } from '@/lib/utils/events'
|
||||
import events from '@/lib/utils/events'
|
||||
|
||||
export default {
|
||||
name: 'CsvImport',
|
||||
@@ -336,7 +336,7 @@ export default {
|
||||
this.$store.commit('setCurrentTabId', tabId)
|
||||
this.importCsvCompleted = false
|
||||
this.$emit('finish')
|
||||
send('inquiry.create', undefined, { auto: true })
|
||||
events.send('inquiry.create', null, { auto: true })
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -58,7 +58,7 @@ import fIo from '@/lib/utils/fileIo'
|
||||
import ChangeDbIcon from '@/components/svg/changeDb'
|
||||
import database from '@/lib/database'
|
||||
import CsvImport from '@/components/CsvImport'
|
||||
import { send } from '@/lib/utils/events'
|
||||
import events from '@/lib/utils/events'
|
||||
|
||||
export default {
|
||||
name: 'DbUploader',
|
||||
@@ -128,7 +128,7 @@ export default {
|
||||
if (fIo.isDatabase(file)) {
|
||||
this.loadDb(file)
|
||||
} else {
|
||||
send('database.import', file.size, {
|
||||
events.send('database.import', file.size, {
|
||||
from: 'csv',
|
||||
new_db: true
|
||||
})
|
||||
|
||||
@@ -7,7 +7,7 @@ import Worker from './_worker.js'
|
||||
// https://github.com/nolanlawson/promise-worker
|
||||
import PromiseWorker from 'promise-worker'
|
||||
|
||||
import { send } from '@/lib/utils/events'
|
||||
import events from '@/lib/utils/events'
|
||||
|
||||
function getNewDatabase () {
|
||||
const worker = new Worker()
|
||||
@@ -79,7 +79,7 @@ class Database {
|
||||
this.dbName = file ? fu.getFileName(file) : 'database'
|
||||
this.refreshSchema()
|
||||
|
||||
send('database.import', file ? file.size : 0, {
|
||||
events.send('database.import', file ? file.size : 0, {
|
||||
from: file ? 'sqlite' : 'none',
|
||||
new_db: true
|
||||
})
|
||||
@@ -121,7 +121,7 @@ class Database {
|
||||
throw new Error(data.error)
|
||||
}
|
||||
fu.exportToFile(data, fileName)
|
||||
send('database.export', data.byteLength, { to: 'sqlite' })
|
||||
events.send('database.export', data.byteLength, { to: 'sqlite' })
|
||||
}
|
||||
|
||||
async validateTableName (name) {
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import { nanoid } from 'nanoid'
|
||||
import fu from '@/lib/utils/fileIo'
|
||||
import { send } from '@/lib/utils/events'
|
||||
import events from '@/lib/utils/events'
|
||||
import migration from './_migrations'
|
||||
|
||||
const migrate = migration._migrate
|
||||
@@ -106,7 +106,7 @@ export default {
|
||||
.then(str => {
|
||||
const inquires = this.deserialiseInquiries(str)
|
||||
|
||||
send('inquiry.import', inquires.length)
|
||||
events.send('inquiry.import', inquires.length)
|
||||
|
||||
return inquires
|
||||
})
|
||||
@@ -115,7 +115,7 @@ export default {
|
||||
const jsonStr = this.serialiseInquiries(inquiryList)
|
||||
fu.exportToFile(jsonStr, fileName)
|
||||
|
||||
send('inquiry.export', inquiryList.length)
|
||||
events.send('inquiry.export', inquiryList.length)
|
||||
},
|
||||
|
||||
async readPredefinedInquiries () {
|
||||
|
||||
@@ -1,10 +1,12 @@
|
||||
export function send (name, value, labels) {
|
||||
export default {
|
||||
send (name, value, labels) {
|
||||
const event = new CustomEvent('sqliteviz-app-event', {
|
||||
detail: {
|
||||
name,
|
||||
value,
|
||||
labels
|
||||
labels: labels || {}
|
||||
}
|
||||
})
|
||||
window.dispatchEvent(event)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { send } from '@/lib/utils/events'
|
||||
import events from '@/lib/utils/events'
|
||||
let refresh = false
|
||||
|
||||
function invokeServiceWorkerUpdateFlow (registration) {
|
||||
@@ -44,6 +44,6 @@ if ('serviceWorker' in navigator) {
|
||||
})
|
||||
|
||||
window.addEventListener('appinstalled', () => {
|
||||
send('pwa.install')
|
||||
events.send('pwa.install')
|
||||
})
|
||||
}
|
||||
|
||||
@@ -60,7 +60,7 @@ import TextField from '@/components/TextField'
|
||||
import CloseIcon from '@/components/svg/close'
|
||||
import storedInquiries from '@/lib/storedInquiries'
|
||||
import AppDiagnosticInfo from './AppDiagnosticInfo'
|
||||
import { send } from '@/lib/utils/events'
|
||||
import events from '@/lib/utils/events'
|
||||
|
||||
export default {
|
||||
name: 'MainMenu',
|
||||
@@ -115,7 +115,7 @@ export default {
|
||||
}
|
||||
})
|
||||
|
||||
send('inquiry.create', undefined, { auto: false })
|
||||
events.send('inquiry.create', null, { auto: false })
|
||||
},
|
||||
cancelSave () {
|
||||
this.$modal.hide('save')
|
||||
@@ -169,7 +169,7 @@ export default {
|
||||
|
||||
// Signal about saving
|
||||
this.$root.$emit('inquirySaved')
|
||||
send('inquiry.save')
|
||||
events.send('inquiry.save')
|
||||
},
|
||||
_keyListener (e) {
|
||||
if (this.$route.path === '/workspace') {
|
||||
|
||||
@@ -33,7 +33,7 @@
|
||||
|
||||
<script>
|
||||
import fIo from '@/lib/utils/fileIo'
|
||||
import { send } from '@/lib/utils/events'
|
||||
import events from '@/lib/utils/events'
|
||||
import TableDescription from './TableDescription'
|
||||
import TextField from '@/components/TextField'
|
||||
import TreeChevron from '@/components/svg/treeChevron'
|
||||
@@ -88,7 +88,7 @@ export default {
|
||||
await csvImport.previewCsv()
|
||||
csvImport.open()
|
||||
|
||||
send('database.import', this.file.size, {
|
||||
events.send('database.import', this.file.size, {
|
||||
from: 'csv',
|
||||
new_db: false
|
||||
})
|
||||
|
||||
@@ -31,7 +31,7 @@ import PlotlyEditor from 'react-chart-editor'
|
||||
import chartHelper from '@/lib/chartHelper'
|
||||
import dereference from 'react-chart-editor/lib/lib/dereference'
|
||||
import fIo from '@/lib/utils/fileIo'
|
||||
import { send } from '@/lib/utils/events'
|
||||
import events from '@/lib/utils/events'
|
||||
|
||||
export default {
|
||||
name: 'Chart',
|
||||
@@ -66,11 +66,10 @@ export default {
|
||||
notifyOnLogging: 1
|
||||
})
|
||||
this.$watch(
|
||||
() => JSON.stringify(
|
||||
this.state.data.map(trace => `${trace.type}-${trace.mode}`)
|
||||
),
|
||||
() => this.state.data.map(trace => `${trace.type}-${trace.mode}`)
|
||||
.join(','),
|
||||
(value) => {
|
||||
send('viz_plotly.render', undefined, {
|
||||
events.send('viz_plotly.render', null, {
|
||||
type: value,
|
||||
pivot: !!this.forPivot
|
||||
})
|
||||
|
||||
@@ -23,7 +23,7 @@ import pivotHelper from './pivotHelper'
|
||||
import Chart from '@/views/Main/Workspace/Tabs/Tab/DataView/Chart'
|
||||
import chartHelper from '@/lib/chartHelper'
|
||||
import Vue from 'vue'
|
||||
import { send } from '@/lib/utils/events'
|
||||
import events from '@/lib/utils/events'
|
||||
const ChartClass = Vue.extend(Chart)
|
||||
|
||||
export default {
|
||||
@@ -96,7 +96,7 @@ export default {
|
||||
'update:importToSvgEnabled',
|
||||
this.viewStandartChart || this.viewCustomChart
|
||||
)
|
||||
send('viz_pivot.render', undefined, {
|
||||
events.send('viz_pivot.render', null, {
|
||||
type: this.pivotOptions.rendererName
|
||||
})
|
||||
}
|
||||
|
||||
@@ -95,7 +95,7 @@ import ClipboardIcon from '@/components/svg/clipboard'
|
||||
import cIo from '@/lib/utils/clipboardIo'
|
||||
import loadingDialog from '@/components/LoadingDialog'
|
||||
import time from '@/lib/utils/time'
|
||||
import { send } from '@/lib/utils/events'
|
||||
import events from '@/lib/utils/events'
|
||||
|
||||
export default {
|
||||
name: 'DataView',
|
||||
@@ -207,11 +207,11 @@ export default {
|
||||
eventLabels.pivot = this.plotlyInPivot
|
||||
}
|
||||
|
||||
send(
|
||||
events.send(
|
||||
this.mode === 'chart' || this.plotlyInPivot
|
||||
? 'viz_plotly.export'
|
||||
: 'viz_pivot.export',
|
||||
undefined,
|
||||
null,
|
||||
eventLabels
|
||||
)
|
||||
}
|
||||
|
||||
@@ -72,7 +72,7 @@ import fIo from '@/lib/utils/fileIo'
|
||||
import cIo from '@/lib/utils/clipboardIo'
|
||||
import time from '@/lib/utils/time'
|
||||
import loadingDialog from '@/components/LoadingDialog'
|
||||
import { send } from '@/lib/utils/events'
|
||||
import events from '@/lib/utils/events'
|
||||
|
||||
export default {
|
||||
name: 'RunResult',
|
||||
@@ -119,7 +119,7 @@ export default {
|
||||
|
||||
exportToCsv () {
|
||||
if (this.result && this.result.values) {
|
||||
send('resultset.export',
|
||||
events.send('resultset.export',
|
||||
this.result.values[this.result.columns[0]].length,
|
||||
{ to: 'csv' }
|
||||
)
|
||||
@@ -130,7 +130,7 @@ export default {
|
||||
|
||||
async prepareCopy () {
|
||||
if (this.result && this.result.values) {
|
||||
send('resultset.export',
|
||||
events.send('resultset.export',
|
||||
this.result.values[this.result.columns[0]].length,
|
||||
{ to: 'clipboard' }
|
||||
)
|
||||
|
||||
@@ -3,14 +3,20 @@ import 'codemirror/addon/hint/show-hint.js'
|
||||
import 'codemirror/addon/hint/sql-hint.js'
|
||||
import store from '@/store'
|
||||
|
||||
function _getHintText (hint) {
|
||||
return typeof hint === 'string' ? hint : hint.text
|
||||
}
|
||||
export function getHints (cm, options) {
|
||||
const token = cm.getTokenAt(cm.getCursor()).string.toUpperCase()
|
||||
const result = CM.hint.sql(cm, options)
|
||||
|
||||
// Don't show the hint if there is only one option
|
||||
// and the token is already completed with this option
|
||||
if (result.list.length === 1 && result.list[0].text.toUpperCase() === token) {
|
||||
// and the replacingText is already equals to this option
|
||||
const replacedText = cm.getRange(result.from, result.to).toUpperCase()
|
||||
if (result.list.length === 1 &&
|
||||
_getHintText(result.list[0]).toUpperCase() === replacedText) {
|
||||
result.list = []
|
||||
}
|
||||
|
||||
return result
|
||||
}
|
||||
|
||||
|
||||
@@ -56,7 +56,7 @@ import DataView from './DataView'
|
||||
import RunResult from './RunResult'
|
||||
import time from '@/lib/utils/time'
|
||||
import Teleport from 'vue2-teleport'
|
||||
import { send } from '@/lib/utils/events'
|
||||
import events from '@/lib/utils/events'
|
||||
|
||||
export default {
|
||||
name: 'Tab',
|
||||
@@ -110,7 +110,7 @@ export default {
|
||||
this.layout[from] = this.layout[to]
|
||||
this.layout[to] = fromPosition
|
||||
|
||||
send('inquiry.panel', undefined, { panel: to })
|
||||
events.send('inquiry.panel', null, { panel: to })
|
||||
},
|
||||
onDataViewUpdate () {
|
||||
this.$store.commit('updateTab', { index: this.tabIndex, isSaved: false })
|
||||
@@ -126,19 +126,19 @@ export default {
|
||||
this.time = time.getPeriod(start, new Date())
|
||||
|
||||
if (this.result && this.result.values) {
|
||||
send('resultset.create',
|
||||
events.send('resultset.create',
|
||||
this.result.values[this.result.columns[0]].length
|
||||
)
|
||||
}
|
||||
|
||||
send('query.run', parseFloat(this.time), { status: 'success' })
|
||||
events.send('query.run', parseFloat(this.time), { status: 'success' })
|
||||
} catch (err) {
|
||||
this.error = {
|
||||
type: 'error',
|
||||
message: err
|
||||
}
|
||||
|
||||
send('query.run', 0, { status: 'error' })
|
||||
events.send('query.run', 0, { status: 'error' })
|
||||
}
|
||||
state.db.refreshSchema()
|
||||
this.isGettingResults = false
|
||||
|
||||
@@ -19,7 +19,7 @@
|
||||
import Splitpanes from '@/components/Splitpanes'
|
||||
import Schema from './Schema'
|
||||
import Tabs from './Tabs'
|
||||
import { send } from '@/lib/utils/events'
|
||||
import events from '@/lib/utils/events'
|
||||
|
||||
export default {
|
||||
name: 'Workspace',
|
||||
@@ -51,7 +51,7 @@ export default {
|
||||
const tabId = await this.$store.dispatch('addTab', { query: stmt })
|
||||
this.$store.commit('setCurrentTabId', tabId)
|
||||
|
||||
send('inquiry.create', undefined, { auto: true })
|
||||
events.send('inquiry.create', null, { auto: true })
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -293,7 +293,7 @@ describe('SQLite extensions', function () {
|
||||
|
||||
it('supports decimal', async function () {
|
||||
const actual = await db.execute(`
|
||||
select
|
||||
SELECT
|
||||
decimal_add(decimal('0.1'), decimal('0.2')) "add",
|
||||
decimal_sub(0.2, 0.1) sub,
|
||||
decimal_mul(power(2, 69), 2) mul,
|
||||
@@ -430,4 +430,29 @@ describe('SQLite extensions', function () {
|
||||
]
|
||||
})
|
||||
})
|
||||
|
||||
it('supports pearson', async function () {
|
||||
const actual = await db.execute(`
|
||||
CREATE TABLE dataset(x REAL, y REAL, z REAL);
|
||||
INSERT INTO dataset VALUES
|
||||
(5,3,3.2), (5,6,4.3), (5,9,5.4),
|
||||
(10,3,4), (10,6,3.8), (10,9,3.6),
|
||||
(15,3,4.8), (15,6,4), (15,9,3.5);
|
||||
|
||||
SELECT
|
||||
pearson(x, x) xx,
|
||||
pearson(x, y) xy,
|
||||
abs(-0.12666 - pearson(x, z)) < 0.00001 xz,
|
||||
pearson(y, x) yx,
|
||||
pearson(y, y) yy,
|
||||
abs(0.10555 - pearson(y, z)) < 0.00001 yz,
|
||||
abs(-0.12666 - pearson(z, x)) < 0.00001 zx,
|
||||
abs(0.10555 - pearson(z, y)) < 0.00001 zy,
|
||||
pearson(z, z) zz
|
||||
FROM dataset;
|
||||
`)
|
||||
expect(actual.values).to.eql({
|
||||
xx: [1], xy: [0], xz: [1], yx: [0], yy: [1], yz: [1], zx: [1], zy: [1], zz: [1]
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
@@ -138,15 +138,37 @@ describe('hint.js', () => {
|
||||
'getHints returns [ ] if there is only one option and token is completed with this option',
|
||||
() => {
|
||||
// mock CM.hint.sql and editor
|
||||
sinon.stub(CM.hint, 'sql').returns({ list: [{ text: 'SELECT' }] })
|
||||
sinon.stub(CM.hint, 'sql').returns({
|
||||
list: [{ text: 'SELECT' }],
|
||||
from: null, // from/to doesn't metter because getRange is mocked
|
||||
to: null
|
||||
})
|
||||
|
||||
const editor = {
|
||||
getTokenAt () {
|
||||
return {
|
||||
string: 'select',
|
||||
type: 'keyword'
|
||||
getRange () {
|
||||
return 'select'
|
||||
}
|
||||
}
|
||||
|
||||
const hints = getHints(editor, {})
|
||||
expect(hints.list).to.eql([])
|
||||
}
|
||||
)
|
||||
|
||||
it(
|
||||
'getHints returns [ ] if there is only one string option and token ' +
|
||||
'is completed with this option',
|
||||
() => {
|
||||
// mock CM.hint.sql and editor
|
||||
sinon.stub(CM.hint, 'sql').returns({
|
||||
list: ['house.name'],
|
||||
from: null, // from/to doesn't metter because getRange is mocked
|
||||
to: null
|
||||
})
|
||||
const editor = {
|
||||
getRange () {
|
||||
return 'house.name'
|
||||
}
|
||||
},
|
||||
getCursor: sinon.stub()
|
||||
}
|
||||
|
||||
const hints = getHints(editor, {})
|
||||
@@ -160,15 +182,11 @@ describe('hint.js', () => {
|
||||
{ text: 'SELECT' },
|
||||
{ text: 'ST' }
|
||||
]
|
||||
sinon.stub(CM.hint, 'sql').returns({ list })
|
||||
sinon.stub(CM.hint, 'sql').returns({ list, from: null, to: null })
|
||||
const editor = {
|
||||
getTokenAt () {
|
||||
return {
|
||||
string: 'se',
|
||||
type: 'keyword'
|
||||
getRange () {
|
||||
return 'se'
|
||||
}
|
||||
},
|
||||
getCursor: sinon.stub()
|
||||
}
|
||||
|
||||
const hints = getHints(editor, {})
|
||||
@@ -182,15 +200,11 @@ describe('hint.js', () => {
|
||||
() => {
|
||||
// mock CM.hint.sql and editor
|
||||
const list = [{ text: 'SELECT' }]
|
||||
sinon.stub(CM.hint, 'sql').returns({ list })
|
||||
sinon.stub(CM.hint, 'sql').returns({ list, from: null, to: null })
|
||||
const editor = {
|
||||
getTokenAt () {
|
||||
return {
|
||||
string: 'sele',
|
||||
type: 'keyword'
|
||||
getRange () {
|
||||
return 'sele'
|
||||
}
|
||||
},
|
||||
getCursor: sinon.stub()
|
||||
}
|
||||
|
||||
const hints = getHints(editor, {})
|
||||
|
||||
Reference in New Issue
Block a user