mirror of
https://github.com/lana-k/sqliteviz.git
synced 2025-12-06 18:18:53 +08:00
Merge branch 'master' of github.com:lana-k/sqliteviz
This commit is contained in:
2
.github/workflows/test.yml
vendored
2
.github/workflows/test.yml
vendored
@@ -11,7 +11,7 @@ on:
|
|||||||
jobs:
|
jobs:
|
||||||
test:
|
test:
|
||||||
name: Run tests
|
name: Run tests
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-20.04
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v2
|
- uses: actions/checkout@v2
|
||||||
- name: Use Node.js
|
- name: Use Node.js
|
||||||
|
|||||||
24
Dockerfile.test
Normal file
24
Dockerfile.test
Normal file
@@ -0,0 +1,24 @@
|
|||||||
|
# An easy way to run tests locally without Nodejs installed:
|
||||||
|
#
|
||||||
|
# docker build -t sqliteviz/test -f Dockerfile.test .
|
||||||
|
#
|
||||||
|
|
||||||
|
FROM node:12
|
||||||
|
|
||||||
|
RUN set -ex; \
|
||||||
|
apt update; \
|
||||||
|
apt install -y chromium firefox-esr; \
|
||||||
|
npm install -g npm@7
|
||||||
|
|
||||||
|
WORKDIR /tmp/build
|
||||||
|
|
||||||
|
COPY package.json package-lock.json ./
|
||||||
|
COPY lib lib
|
||||||
|
RUN npm install
|
||||||
|
|
||||||
|
COPY . .
|
||||||
|
|
||||||
|
RUN set -ex; \
|
||||||
|
sed -i 's/browsers: \[.*\],/browsers: ['"'FirefoxHeadlessTouch'"'],/' karma.conf.js
|
||||||
|
|
||||||
|
RUN npm run lint -- --no-fix && npm run test
|
||||||
@@ -1,4 +1,4 @@
|
|||||||
FROM emscripten/emsdk:2.0.24
|
FROM emscripten/emsdk:3.0.1
|
||||||
|
|
||||||
WORKDIR /tmp/build
|
WORKDIR /tmp/build
|
||||||
|
|
||||||
|
|||||||
@@ -43,6 +43,8 @@ SQLite [miscellaneous extensions][3] included:
|
|||||||
SQLite 3rd party extensions included:
|
SQLite 3rd party extensions included:
|
||||||
|
|
||||||
1. [pivot_vtab][5] -- a pivot virtual table
|
1. [pivot_vtab][5] -- a pivot virtual table
|
||||||
|
2. `pearson` correlation coefficient function extension from [sqlean][21]
|
||||||
|
(which is part of [squib][20])
|
||||||
|
|
||||||
To ease the step to have working clone locally, the build is committed into
|
To ease the step to have working clone locally, the build is committed into
|
||||||
the repository.
|
the repository.
|
||||||
@@ -99,3 +101,5 @@ described in [this message from SQLite Forum][12]:
|
|||||||
[17]: https://sqlite.org/contrib/
|
[17]: https://sqlite.org/contrib/
|
||||||
[18]: https://sqlite.org/contrib//download/extension-functions.c?get=25
|
[18]: https://sqlite.org/contrib//download/extension-functions.c?get=25
|
||||||
[19]: https://github.com/lana-k/sqliteviz/blob/master/tests/lib/database/sqliteExtensions.spec.js
|
[19]: https://github.com/lana-k/sqliteviz/blob/master/tests/lib/database/sqliteExtensions.spec.js
|
||||||
|
[20]: https://github.com/mrwilson/squib/blob/master/pearson.c
|
||||||
|
[21]: https://github.com/nalgeon/sqlean/blob/incubator/src/pearson.c
|
||||||
|
|||||||
@@ -1,14 +1,25 @@
|
|||||||
# SQLite WebAssembly build micro-benchmark
|
# SQLite WebAssembly build micro-benchmark
|
||||||
|
|
||||||
This directory contains a micro-benchmark for evaluating SQLite
|
This directory contains a micro-benchmark for evaluating SQLite WebAssembly
|
||||||
WebAssembly builds performance on typical SQL queries, run from
|
builds performance on read and write SQL queries, run from `make.sh` script. If
|
||||||
`make.sh` script. It can also serve as a smoke test.
|
the script has permission to `nice` processes and [Procpath][1] is installed,
|
||||||
|
e.g. it is run with `sudo -E env PATH=$PATH ./make.sh`, it'll `renice` all
|
||||||
|
processes running inside the benchmark containers. It can also serve as a smoke
|
||||||
|
test (e.g. for memory leaks).
|
||||||
|
|
||||||
The benchmark operates on a set of SQLite WebAssembly builds expected
|
The benchmark operates on a set of SQLite WebAssembly builds expected in
|
||||||
in `lib/build-$NAME` directories each containing `sql-wasm.js` and
|
`lib/build-$NAME` directories each containing `sql-wasm.js` and
|
||||||
`sql-wasm.wasm`. Then it creates a Docker image for each, and runs
|
`sql-wasm.wasm`. Then it creates a Docker image for each, and runs the
|
||||||
the benchmark in Firefox and Chromium using Karma in the container.
|
benchmark in Firefox and Chromium using Karma in the container.
|
||||||
|
|
||||||
After successful run, the benchmark result of each build is contained
|
After successful run, the benchmark produces the following per each build:
|
||||||
in `build-$NAME-result.json`. The JSON result files can be analysed
|
|
||||||
using `result-analysis.ipynb` Jupyter notebook.
|
- `build-$NAME-result.json`
|
||||||
|
- `build-$NAME.sqlite` (if Procpath is installed)
|
||||||
|
- `build-$NAME.svg` (if Procpath is installed)
|
||||||
|
|
||||||
|
These files can be analysed using `result-analysis.ipynb` Jupyter notebook.
|
||||||
|
The SVG is a chart with CPU and RSS usage of each test container (i.e. Chromium
|
||||||
|
run, then Firefox run per container).
|
||||||
|
|
||||||
|
[1]: https://pypi.org/project/Procpath/
|
||||||
|
|||||||
@@ -1,7 +1,8 @@
|
|||||||
#!/bin/bash -e
|
#!/bin/bash -e
|
||||||
|
|
||||||
cleanup () {
|
cleanup () {
|
||||||
rm -rf lib/dist $flag_file
|
rm -rf lib/dist "$renice_flag_file"
|
||||||
|
docker rm -f sqljs-benchmark-run 2> /dev/null || true
|
||||||
}
|
}
|
||||||
trap cleanup EXIT
|
trap cleanup EXIT
|
||||||
|
|
||||||
@@ -11,34 +12,36 @@ if [ ! -f sample.csv ]; then
|
|||||||
| gunzip -c > sample.csv
|
| gunzip -c > sample.csv
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
PLAYBOOK=procpath/karma_docker.procpath
|
||||||
|
|
||||||
# for renice to work run like "sudo -E env PATH=$PATH ./make.sh"
|
# for renice to work run like "sudo -E env PATH=$PATH ./make.sh"
|
||||||
test_ni=$(nice -n -1 nice)
|
test_ni=$(nice -n -5 nice)
|
||||||
if [ $test_ni == -1 ]; then
|
if [ $test_ni == -5 ]; then
|
||||||
flag_file=$(mktemp)
|
renice_flag_file=$(mktemp)
|
||||||
fi
|
fi
|
||||||
(
|
{
|
||||||
while [ -f $flag_file ]; do
|
while [ -f $renice_flag_file ]; do
|
||||||
root_pid=$(
|
procpath --logging-level ERROR play -f $PLAYBOOK renice:watch
|
||||||
docker ps -f status=running -f name='^sqljs-benchmark-' -q \
|
done
|
||||||
| xargs -r -I{} -- docker inspect -f '{{.State.Pid}}' {}
|
} &
|
||||||
)
|
|
||||||
if [ ! -z $root_pid ]; then
|
|
||||||
procpath query -d $'\n' "$..children[?(@.stat.pid == $root_pid)]..pid" \
|
|
||||||
| xargs -I{} -- renice -n -1 -p {} > /dev/null
|
|
||||||
fi
|
|
||||||
sleep 1
|
|
||||||
done &
|
|
||||||
)
|
|
||||||
|
|
||||||
shopt -s nullglob
|
shopt -s nullglob
|
||||||
for d in lib/build-* ; do
|
for d in lib/build-* ; do
|
||||||
rm -rf lib/dist
|
rm -rf lib/dist
|
||||||
cp -r $d lib/dist
|
cp -r $d lib/dist
|
||||||
|
sample_name=$(basename $d)
|
||||||
|
|
||||||
name=$(basename $d)
|
docker build -t sqliteviz/sqljs-benchmark .
|
||||||
docker build -t sqliteviz/sqljs-benchmark:$name .
|
docker rm sqljs-benchmark-run 2> /dev/null || true
|
||||||
docker rm sqljs-benchmark-$name 2> /dev/null || true
|
docker run -d -it --cpus 2 --name sqljs-benchmark-run sqliteviz/sqljs-benchmark
|
||||||
docker run -it --cpus 2 --name sqljs-benchmark-$name sqliteviz/sqljs-benchmark:$name
|
{
|
||||||
docker cp sqljs-benchmark-$name:/tmp/build/suite-result.json ${name}-result.json
|
rm -f ${sample_name}.sqlite
|
||||||
docker rm sqljs-benchmark-$name
|
procpath play -f $PLAYBOOK -o database_file=${sample_name}.sqlite track:record
|
||||||
|
procpath play -f $PLAYBOOK -o database_file=${sample_name}.sqlite \
|
||||||
|
-o plot_file=${sample_name}.svg track:plot
|
||||||
|
} &
|
||||||
|
|
||||||
|
docker attach sqljs-benchmark-run
|
||||||
|
docker cp sqljs-benchmark-run:/tmp/build/suite-result.json ${sample_name}-result.json
|
||||||
|
docker rm sqljs-benchmark-run
|
||||||
done
|
done
|
||||||
|
|||||||
28
lib/sql-js/benchmark/procpath/karma_docker.procpath
Normal file
28
lib/sql-js/benchmark/procpath/karma_docker.procpath
Normal file
@@ -0,0 +1,28 @@
|
|||||||
|
# This command may run when "sqljs-benchmark-run" does not yet exist or run
|
||||||
|
[renice:watch]
|
||||||
|
interval: 2
|
||||||
|
repeat: 30
|
||||||
|
environment:
|
||||||
|
ROOT_PID=docker inspect -f "{{.State.Pid}}" sqljs-benchmark-run 2> /dev/null || true
|
||||||
|
query:
|
||||||
|
PIDS=$..children[?(@.stat.pid in [$ROOT_PID])]..pid
|
||||||
|
command:
|
||||||
|
echo $PIDS | tr , '\n' | xargs --no-run-if-empty -I{} -- renice -n -5 -p {}
|
||||||
|
|
||||||
|
# Expected input arguments: database_file
|
||||||
|
[track:record]
|
||||||
|
interval: 1
|
||||||
|
stop_without_result: 1
|
||||||
|
environment:
|
||||||
|
ROOT_PID=docker inspect -f "{{.State.Pid}}" sqljs-benchmark-run
|
||||||
|
query:
|
||||||
|
$..children[?(@.stat.pid == $ROOT_PID)]
|
||||||
|
pid_list: $ROOT_PID
|
||||||
|
|
||||||
|
# Expected input arguments: database_file, plot_file
|
||||||
|
[track:plot]
|
||||||
|
moving_average_window: 5
|
||||||
|
title: Chromium vs Firefox (№1 RSS, №2 CPU)
|
||||||
|
custom_query_file:
|
||||||
|
procpath/top2_rss.sql
|
||||||
|
procpath/top2_cpu.sql
|
||||||
29
lib/sql-js/benchmark/procpath/top2_cpu.sql
Normal file
29
lib/sql-js/benchmark/procpath/top2_cpu.sql
Normal file
@@ -0,0 +1,29 @@
|
|||||||
|
WITH diff_all AS (
|
||||||
|
SELECT
|
||||||
|
record_id,
|
||||||
|
ts,
|
||||||
|
stat_pid,
|
||||||
|
stat_utime + stat_stime - LAG(stat_utime + stat_stime) OVER (
|
||||||
|
PARTITION BY stat_pid
|
||||||
|
ORDER BY record_id
|
||||||
|
) tick_diff,
|
||||||
|
ts - LAG(ts) OVER (
|
||||||
|
PARTITION BY stat_pid
|
||||||
|
ORDER BY record_id
|
||||||
|
) ts_diff
|
||||||
|
FROM record
|
||||||
|
), diff AS (
|
||||||
|
SELECT * FROM diff_all WHERE tick_diff IS NOT NULL
|
||||||
|
), one_time_pid_condition AS (
|
||||||
|
SELECT stat_pid
|
||||||
|
FROM record
|
||||||
|
GROUP BY 1
|
||||||
|
ORDER BY SUM(stat_utime + stat_stime) DESC
|
||||||
|
LIMIT 2
|
||||||
|
)
|
||||||
|
SELECT
|
||||||
|
ts,
|
||||||
|
stat_pid pid,
|
||||||
|
100.0 * tick_diff / (SELECT value FROM meta WHERE key = 'clock_ticks') / ts_diff value
|
||||||
|
FROM diff
|
||||||
|
JOIN one_time_pid_condition USING(stat_pid)
|
||||||
13
lib/sql-js/benchmark/procpath/top2_rss.sql
Normal file
13
lib/sql-js/benchmark/procpath/top2_rss.sql
Normal file
@@ -0,0 +1,13 @@
|
|||||||
|
WITH one_time_pid_condition AS (
|
||||||
|
SELECT stat_pid
|
||||||
|
FROM record
|
||||||
|
GROUP BY 1
|
||||||
|
ORDER BY SUM(stat_rss) DESC
|
||||||
|
LIMIT 2
|
||||||
|
)
|
||||||
|
SELECT
|
||||||
|
ts,
|
||||||
|
stat_pid pid,
|
||||||
|
stat_rss / 1024.0 / 1024 * (SELECT value FROM meta WHERE key = 'page_size') value
|
||||||
|
FROM record
|
||||||
|
JOIN one_time_pid_condition USING(stat_pid)
|
||||||
File diff suppressed because one or more lines are too long
@@ -2,9 +2,11 @@ import logging
|
|||||||
import subprocess
|
import subprocess
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
|
||||||
|
# See the setting descriptions on these pages:
|
||||||
|
# - https://emscripten.org/docs/optimizing/Optimizing-Code.html
|
||||||
|
# - https://github.com/emscripten-core/emscripten/blob/main/src/settings.js
|
||||||
cflags = (
|
cflags = (
|
||||||
'-O2',
|
# SQLite configuration
|
||||||
'-DSQLITE_DEFAULT_CACHE_SIZE=-65536', # 64 MiB
|
'-DSQLITE_DEFAULT_CACHE_SIZE=-65536', # 64 MiB
|
||||||
'-DSQLITE_DEFAULT_MEMSTATUS=0',
|
'-DSQLITE_DEFAULT_MEMSTATUS=0',
|
||||||
'-DSQLITE_DEFAULT_SYNCHRONOUS=0',
|
'-DSQLITE_DEFAULT_SYNCHRONOUS=0',
|
||||||
@@ -13,26 +15,26 @@ cflags = (
|
|||||||
'-DSQLITE_ENABLE_FTS3',
|
'-DSQLITE_ENABLE_FTS3',
|
||||||
'-DSQLITE_ENABLE_FTS3_PARENTHESIS',
|
'-DSQLITE_ENABLE_FTS3_PARENTHESIS',
|
||||||
'-DSQLITE_ENABLE_FTS5',
|
'-DSQLITE_ENABLE_FTS5',
|
||||||
'-DSQLITE_ENABLE_JSON1',
|
|
||||||
'-DSQLITE_ENABLE_NORMALIZE',
|
'-DSQLITE_ENABLE_NORMALIZE',
|
||||||
'-DSQLITE_EXTRA_INIT=extra_init',
|
'-DSQLITE_EXTRA_INIT=extra_init',
|
||||||
'-DSQLITE_OMIT_DEPRECATED',
|
'-DSQLITE_OMIT_DEPRECATED',
|
||||||
'-DSQLITE_OMIT_LOAD_EXTENSION',
|
'-DSQLITE_OMIT_LOAD_EXTENSION',
|
||||||
'-DSQLITE_OMIT_SHARED_CACHE',
|
'-DSQLITE_OMIT_SHARED_CACHE',
|
||||||
'-DSQLITE_THREADSAFE=0',
|
'-DSQLITE_THREADSAFE=0',
|
||||||
|
# Compile-time optimisation
|
||||||
|
'-Os', # reduces the code size about in half comparing to -O2
|
||||||
|
'-flto',
|
||||||
)
|
)
|
||||||
emflags = (
|
emflags = (
|
||||||
# Base
|
# Base
|
||||||
'--memory-init-file', '0',
|
'--memory-init-file', '0',
|
||||||
'-s', 'RESERVED_FUNCTION_POINTERS=64',
|
|
||||||
'-s', 'ALLOW_TABLE_GROWTH=1',
|
'-s', 'ALLOW_TABLE_GROWTH=1',
|
||||||
'-s', 'SINGLE_FILE=0',
|
|
||||||
# WASM
|
# WASM
|
||||||
'-s', 'WASM=1',
|
'-s', 'WASM=1',
|
||||||
'-s', 'ALLOW_MEMORY_GROWTH=1',
|
'-s', 'ALLOW_MEMORY_GROWTH=1',
|
||||||
# Optimisation
|
'-s', 'ENVIRONMENT=web,worker',
|
||||||
'-s', 'INLINING_LIMIT=50',
|
# Link-time optimisation
|
||||||
'-O3',
|
'-Os',
|
||||||
'-flto',
|
'-flto',
|
||||||
# sql.js
|
# sql.js
|
||||||
'-s', 'EXPORTED_FUNCTIONS=@src/sqljs/exported_functions.json',
|
'-s', 'EXPORTED_FUNCTIONS=@src/sqljs/exported_functions.json',
|
||||||
@@ -50,22 +52,22 @@ def build(src: Path, dst: Path):
|
|||||||
'emcc',
|
'emcc',
|
||||||
*cflags,
|
*cflags,
|
||||||
'-c', src / 'sqlite3.c',
|
'-c', src / 'sqlite3.c',
|
||||||
'-o', out / 'sqlite3.bc',
|
'-o', out / 'sqlite3.o',
|
||||||
])
|
])
|
||||||
logging.info('Building LLVM bitcode for extension-functions.c')
|
logging.info('Building LLVM bitcode for extension-functions.c')
|
||||||
subprocess.check_call([
|
subprocess.check_call([
|
||||||
'emcc',
|
'emcc',
|
||||||
*cflags,
|
*cflags,
|
||||||
'-c', src / 'extension-functions.c',
|
'-c', src / 'extension-functions.c',
|
||||||
'-o', out / 'extension-functions.bc',
|
'-o', out / 'extension-functions.o',
|
||||||
])
|
])
|
||||||
|
|
||||||
logging.info('Building WASM from bitcode')
|
logging.info('Building WASM from bitcode')
|
||||||
subprocess.check_call([
|
subprocess.check_call([
|
||||||
'emcc',
|
'emcc',
|
||||||
*emflags,
|
*emflags,
|
||||||
out / 'sqlite3.bc',
|
out / 'sqlite3.o',
|
||||||
out / 'extension-functions.bc',
|
out / 'extension-functions.o',
|
||||||
'-o', out / 'sql-wasm.js',
|
'-o', out / 'sql-wasm.js',
|
||||||
])
|
])
|
||||||
|
|
||||||
|
|||||||
@@ -8,7 +8,7 @@ from pathlib import Path
|
|||||||
from urllib import request
|
from urllib import request
|
||||||
|
|
||||||
|
|
||||||
amalgamation_url = 'https://sqlite.org/2022/sqlite-amalgamation-3390000.zip'
|
amalgamation_url = 'https://sqlite.org/2023/sqlite-amalgamation-3410000.zip'
|
||||||
|
|
||||||
# Extension-functions
|
# Extension-functions
|
||||||
# ===================
|
# ===================
|
||||||
@@ -28,10 +28,11 @@ extension_urls = (
|
|||||||
('https://sqlite.org/src/raw/09f967dc?at=decimal.c', 'sqlite3_decimal_init'),
|
('https://sqlite.org/src/raw/09f967dc?at=decimal.c', 'sqlite3_decimal_init'),
|
||||||
# Third-party extension
|
# Third-party extension
|
||||||
# =====================
|
# =====================
|
||||||
('https://github.com/jakethaw/pivot_vtab/raw/08ab0797/pivot_vtab.c', 'sqlite3_pivotvtab_init'),
|
('https://github.com/jakethaw/pivot_vtab/raw/9323ef93/pivot_vtab.c', 'sqlite3_pivotvtab_init'),
|
||||||
|
('https://github.com/nalgeon/sqlean/raw/95e8d21a/src/pearson.c', 'sqlite3_pearson_init'),
|
||||||
)
|
)
|
||||||
|
|
||||||
sqljs_url = 'https://github.com/sql-js/sql.js/archive/refs/tags/v1.5.0.zip'
|
sqljs_url = 'https://github.com/sql-js/sql.js/archive/refs/tags/v1.7.0.zip'
|
||||||
|
|
||||||
|
|
||||||
def _generate_extra_init_c_function(init_function_names):
|
def _generate_extra_init_c_function(init_function_names):
|
||||||
|
|||||||
2
lib/sql-js/dist/sql-wasm.js
vendored
2
lib/sql-js/dist/sql-wasm.js
vendored
File diff suppressed because one or more lines are too long
BIN
lib/sql-js/dist/sql-wasm.wasm
vendored
BIN
lib/sql-js/dist/sql-wasm.wasm
vendored
Binary file not shown.
@@ -293,7 +293,7 @@ describe('SQLite extensions', function () {
|
|||||||
|
|
||||||
it('supports decimal', async function () {
|
it('supports decimal', async function () {
|
||||||
const actual = await db.execute(`
|
const actual = await db.execute(`
|
||||||
select
|
SELECT
|
||||||
decimal_add(decimal('0.1'), decimal('0.2')) "add",
|
decimal_add(decimal('0.1'), decimal('0.2')) "add",
|
||||||
decimal_sub(0.2, 0.1) sub,
|
decimal_sub(0.2, 0.1) sub,
|
||||||
decimal_mul(power(2, 69), 2) mul,
|
decimal_mul(power(2, 69), 2) mul,
|
||||||
@@ -430,4 +430,29 @@ describe('SQLite extensions', function () {
|
|||||||
]
|
]
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
|
it('supports pearson', async function () {
|
||||||
|
const actual = await db.execute(`
|
||||||
|
CREATE TABLE dataset(x REAL, y REAL, z REAL);
|
||||||
|
INSERT INTO dataset VALUES
|
||||||
|
(5,3,3.2), (5,6,4.3), (5,9,5.4),
|
||||||
|
(10,3,4), (10,6,3.8), (10,9,3.6),
|
||||||
|
(15,3,4.8), (15,6,4), (15,9,3.5);
|
||||||
|
|
||||||
|
SELECT
|
||||||
|
pearson(x, x) xx,
|
||||||
|
pearson(x, y) xy,
|
||||||
|
abs(-0.12666 - pearson(x, z)) < 0.00001 xz,
|
||||||
|
pearson(y, x) yx,
|
||||||
|
pearson(y, y) yy,
|
||||||
|
abs(0.10555 - pearson(y, z)) < 0.00001 yz,
|
||||||
|
abs(-0.12666 - pearson(z, x)) < 0.00001 zx,
|
||||||
|
abs(0.10555 - pearson(z, y)) < 0.00001 zy,
|
||||||
|
pearson(z, z) zz
|
||||||
|
FROM dataset;
|
||||||
|
`)
|
||||||
|
expect(actual.values).to.eql({
|
||||||
|
xx: [1], xy: [0], xz: [1], yx: [0], yy: [1], yz: [1], zx: [1], zy: [1], zz: [1]
|
||||||
|
})
|
||||||
|
})
|
||||||
})
|
})
|
||||||
|
|||||||
Reference in New Issue
Block a user