1
0
mirror of https://github.com/lana-k/sqliteviz.git synced 2025-12-07 02:28:54 +08:00

53 Commits

Author SHA1 Message Date
lana-k
86cd55ca9c Update How-to-get-result-set-suitable-for-graph-visualisation.md 2025-11-01 20:46:51 +01:00
lana-k
0da63acd60 Update Graph.md 2025-11-01 20:39:42 +01:00
lana-k
bf1bfb849c Update Graph.md 2025-11-01 20:38:50 +01:00
lana-k
83410560b3 Update How-to-get-result-set-suitable-for-graph-visualisation.md 2025-11-01 20:32:57 +01:00
lana-k
831364181d Add files via upload 2025-11-01 20:30:30 +01:00
lana-k
5015a9bed7 Update How-to-get-result-set-suitable-for-graph-visualisation.md 2025-11-01 20:29:50 +01:00
lana-k
07409a0e20 Update How-to-get-result-set-suitable-for-graph-visualisation.md 2025-11-01 20:10:28 +01:00
lana-k
ae39f9e66e Update Graph.md 2025-11-01 20:08:33 +01:00
lana-k
794c8dd491 Update How-to-get-result-set-suitable-for-graph-visualisation.md 2025-11-01 20:08:13 +01:00
lana-k
d6d563f039 Update Graph.md 2025-11-01 20:02:49 +01:00
lana-k
1bc46f3664 Update and rename How-to-get-result-set-suiltable-for-graph-visualisation.md to How-to-get-result-set-suitable-for-graph-visualisation.md 2025-11-01 20:02:22 +01:00
lana-k
ab623f566b Create How-to-get-result-set-suiltable-for-graph-visualisation.md 2025-11-01 20:01:12 +01:00
lana-k
5a375bc39f Update Graph.md 2025-11-01 20:00:19 +01:00
lana-k
ad3ff74b09 Update Graph.md 2025-10-31 22:53:14 +01:00
lana-k
c8c57e145e Add files via upload 2025-10-31 22:49:10 +01:00
lana-k
888b57e855 Update Graph.md 2025-10-31 22:35:53 +01:00
lana-k
3d5c21f6a2 Update Graph.md 2025-10-31 11:32:33 +01:00
lana-k
7e11ab810d Update Graph.md 2025-10-30 23:11:09 +01:00
lana-k
7656d7d263 Add files via upload 2025-10-30 20:34:51 +01:00
lana-k
f6bafce6aa Update Graph.md 2025-10-30 20:26:19 +01:00
lana-k
1b1fc24926 Update Graph.md 2025-10-30 20:25:43 +01:00
lana-k
77dcfd3691 Update Graph.md 2025-10-30 20:25:19 +01:00
lana-k
fff6ae4ec8 Create Graph.md 2025-10-30 17:37:04 +01:00
lana-k
4c5148c7e0 Update sidebar.json 2025-10-30 17:34:55 +01:00
lana-k
f1c1f7bdf3 Update index.md 2025-10-30 17:34:19 +01:00
lana-k
9a9d51fc72 Update Home.md 2025-10-30 17:33:00 +01:00
lana-k
7a6703648d Update Basic-usage.md 2025-10-30 17:30:36 +01:00
lana-k
053a7a4614 Add new screenshots 2025-10-30 17:25:20 +01:00
lana-k
22120fcff5 Update Home.md 2024-09-17 11:52:20 +02:00
lana-k
c35eaf2c58 Update index.md 2024-09-17 11:41:46 +02:00
lana-k
3e2809655c Update Multiple-CSV-file-import.md 2024-09-17 11:40:08 +02:00
lana-k
783613f55f Update Basic-usage.md 2024-09-17 11:29:37 +02:00
lana-k
001f400142 update home page, fix links #100 2023-06-10 19:07:48 +02:00
lana-k
932a8b20a1 Docs: Loading remote database and inquiries #109 2023-06-10 18:42:06 +02:00
lana-k
140eda15c3 Merge branch 'docs' of github.com:lana-k/sqliteviz into docs 2023-06-09 00:15:58 +02:00
lana-k
1bdc528dff Broken links in documentation #100 2023-06-09 00:15:24 +02:00
lana-k
edcf104b1a Update Installation.md 2022-07-10 22:35:53 +02:00
lana-k
0a18dca5bd Update How-to-migrate-to-sqliteviz-dot-com.md 2022-07-10 18:43:58 +02:00
lana-k
d249e0ac02 Update How-to-migrate-to-sqliteviz-dot-com.md 2022-07-10 18:42:41 +02:00
lana-k
5c53d24ef7 Update How-to-migrate-to-sqliteviz-dot-com.md 2022-07-10 16:54:23 +02:00
lana-k
7660689c27 fix typo 2022-07-01 17:37:28 +02:00
lana-k
092a77a544 rename 2022-07-01 17:34:38 +02:00
lana-k
a268941f01 about migration 2022-07-01 17:27:09 +02:00
lana-k
17514249e7 fix image 2022-07-01 17:04:03 +02:00
lana-k
f574ead7ea Update index.md 2022-07-01 16:49:02 +02:00
lana-k
9de01dbe2e sidebar settings 2022-01-07 18:46:18 +01:00
lana-k
2397ea6fb3 add index 2022-01-07 12:47:12 +01:00
lana-k
27bb23ac3d Add headers; fix images and links 2022-01-06 22:34:07 +01:00
lana-k
0698522fa5 fix link 2022-01-06 22:16:05 +01:00
lana-k
73849ee820 remove versions 2022-01-06 22:04:25 +01:00
lana-k
7dde645fbc fix images in Basic usage 2022-01-06 21:38:29 +01:00
lana-k
b273d7677d add docs 2022-01-06 21:32:52 +01:00
lana-k
f9561b8678 clear branch 2022-01-06 21:29:46 +01:00
268 changed files with 965 additions and 47063 deletions

View File

@@ -1,3 +0,0 @@
> 1%
last 2 versions
not dead

View File

@@ -1,5 +0,0 @@
[*.{js,jsx,ts,tsx,vue}]
indent_style = space
indent_size = 2
trim_trailing_whitespace = true
insert_final_newline = true

View File

@@ -1,29 +0,0 @@
module.exports = {
root: true,
env: {
node: true,
es2022: true
},
extends: ['eslint:recommended', 'plugin:vue/vue3-recommended', 'prettier'],
rules: {
'no-console': process.env.NODE_ENV === 'production' ? 'warn' : 'off',
'no-debugger': process.env.NODE_ENV === 'production' ? 'warn' : 'off',
'no-case-declarations': 'off',
'max-len': [2, 100, 4, { ignoreUrls: true }],
'vue/multi-word-component-names': 'off',
'vue/no-mutating-props': 'warn',
'vue/no-reserved-component-names': 'warn',
'vue/no-v-model-argument': 'off',
'vue/require-default-prop': 'off',
'vue/custom-event-name-casing': ['error', 'camelCase'],
'vue/attribute-hyphenation': ['error', 'never']
},
overrides: [
{
files: ['**/__tests__/*.{j,t}s?(x)', '**/tests/**/*.spec.{j,t}s?(x)'],
env: {
mocha: true
}
}
]
}

View File

@@ -1,14 +0,0 @@
module.exports = {
dataSource: 'milestones',
ignoreIssuesWith: ['wontfix', 'duplicate'],
milestoneMatch: 'v{{tag_name}}',
template: {
issue: '- {{name}} [{{text}}]({{url}})',
changelogTitle: '',
release: '{{body}}'
},
groupBy: {
Enhancements: ['enhancement', 'internal'],
'Bug fixes': ['bug']
}
}

View File

@@ -1,45 +0,0 @@
name: Create release
on:
workflow_dispatch:
push:
tags:
- '*'
jobs:
deploy:
name: Create release
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- name: Use Node.js
uses: actions/setup-node@v1
with:
node-version: 18.x
- name: Update npm
run: npm install -g npm@10
- name: npm install and build
run: |
npm install
npm run build
- name: Create archives
run: |
cd dist
zip -9 -r ../dist.zip . -x "*.map"
zip -9 -r ../dist_map.zip .
- name: Create Release Notes
run: |
npm install github-release-notes@0.16.0 -g
gren changelog --generate --config="/.github/workflows/config.grenrc.cjs"
env:
GREN_GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- name: Create release
uses: ncipollo/release-action@v1
with:
artifacts: 'dist.zip,dist_map.zip'
token: ${{ secrets.GITHUB_TOKEN }}
bodyFile: 'CHANGELOG.md'

View File

@@ -1,38 +0,0 @@
name: Run tests
on:
workflow_dispatch:
push:
branches:
- 'master'
pull_request:
branches:
- 'master'
jobs:
test:
name: Run tests
runs-on: ubuntu-24.04
steps:
- uses: actions/checkout@v2
- name: Use Node.js
uses: actions/setup-node@v1
with:
node-version: 18.x
- name: Install browsers
run: |
export DEBIAN_FRONTEND=noninteractive
sudo add-apt-repository -y ppa:mozillateam/ppa
sudo apt-get update
sudo apt-get install -y chromium-browser firefox-esr
- name: Update npm
run: npm install -g npm@10
- name: Install the project
run: npm install
- name: Run lint
run: npm run lint -- --no-fix
- name: Run karma tests
run: npm run test

23
.gitignore vendored
View File

@@ -1,23 +0,0 @@
.DS_Store
node_modules
/dist
/coverage
# local env files
.env.local
.env.*.local
# Log files
npm-debug.log*
yarn-debug.log*
yarn-error.log*
pnpm-debug.log*
# Editor directories and files
.idea
.vscode
*.suo
*.ntvs*
*.njsproj
*.sln
*.sw?

View File

@@ -1,7 +0,0 @@
{
"trailingComma": "none",
"tabWidth": 2,
"semi": false,
"singleQuote": true,
"arrowParens": "avoid"
}

108
Basic-usage.md Normal file
View File

@@ -0,0 +1,108 @@
# Basic usage
## Choose SQLite database, CSV file, JSON or NDJSON file
You can choose a database, a CSV file, a JSON or NDJSON file right on the welcom page (fig. 1). The
supported file extentions: `.csv`, `.json`, `.ndjson`, `.db`,`.sqlite` and `.sqlite3`.
![Fig. 1: Welcome page](./img/Screenshot_welcome.png)
If you choose a CSV, JSON or NDJSON file it will be parsed. Then sqliteviz creates a new
database with data from the file. You can change table name
in the dialog which is shown automatically if you choose a CSV, JSON or NDJSON file.
For CSV you can also change parsing settings (fig. 2).
![Fig. 2: CSV import dialog](./img/Screenshot_csv.png)
If you don't have a database or CSV or JSON/NDJSON press `Create empty database` button and
then fill an empty database using DDL and DML statements. A tab with an example
of how to do it will be opened automatically (fig. 3).
![Fig. 3: Workspace (empty database)](./img/Screenshot_editor.png)
Database tables, columns and their types are shown in the left panel of the
`Workspace` (fig. 4).
![Fig. 4: Workspace (not empty database)](./img/Screenshot_editor_with_db.png)
## Create inquiry
### Open new tab
Press `Create` button in the top toolbar or use `Ctrl+b` (`Cmd+b` for MacOS)
keyboard shortcut to open a new tab for an inquiry. An inquiry consists of three
parts: SQL query, result set (result of the query execution) and visualisation
of the result set. A tab consists of two panels. Each of them can show one of
the three parts of inquiry. By default, the top panel shows SQL query editor and
the bottom panel shows the result set. You can switch any panel to any other
view with the corresponding buttons in the right side toolbar.
* ![](./img/sql.svg) Switch the panel to an SQL query editor. In that
editor, you can specify and run not only a `SELECT` statement for getting data
but also DDL/DML statements for modifying the database.
* ![](./img/table.svg) Switch the panel to the result set. Here you
can see the result of the query execution.
* ![](./img/visualisation.svg) Switch the panel to visualisation. This
panel allows building a pivot table, a graph or charts from the result set.
> **Note:** The query editor can show you hints: SQL keywords, table and column
> names. In a common case, column names are shown in the hint if you specify the
> table name, e.g. `SELECT table_name.`. But if there is only one table in your
> database column names will be always available in the hint. Press `Ctrl+Space`
> to show a hint explicitly.
### Run query
Press ![](./img/run.svg) button in the right side toolbar of an SQL
editor panel or use `Ctrl+r` or `Ctrl+Enter` (`Cmd+r` or `Cmd+Enter` for MacOS)
keyboard shortcut to execute a query in the current open tab.
> **Note:** Running is not available if a query for the current tab is not
> specified.
The query result will be displayed in the result panel (fig. 4).
### Create visualisation
After running a query switch any panel to the visualisation. Switch to the
disired type of visualisation with buttons in the right side toolbar of the
visualisation panel.
* ![](./img/chart.svg) Switch to a chart type visualisation.
* ![](./img/pivot.svg) Switch to a pivot type visualisation.
* ![](./img/graph.svg) Switch to a graph type visualisation.
A pivot table can be represented as a regular or multi-header table or
visualised with a chart.
Read more details of working with pivot in [Pivot tables][2].
There are some requirements for the result set to build a graph.
Read more in [Graph][3].
All customised charts in sqliteviz are created with a `react-chart-editor`
component (fig. 5). The same component with some additional features is used in
Plotly Chart Studio. Explore its [documentation][1] to learn how to build charts
with `react-chart-editor`.
![Fig. 5: Visualisation with a chart](./img/Screenshot_chart.png)
You can save any visualisation as an image by clicking ![](./img/camera.svg).
## Save inquiry
Press `Save` button in the top toolbar or use `Ctrl+s` (`Cmd+s` for MacOS)
keyboard shortcut to save an inquiry in the current opened tab to local storage
of your browser.
After that, the inquiry will be in the list on `Inquiries` page.
> **Note:** Only the text of the SQL query and the visualisation settings will
> be saved. The result of the query execution won't be saved.
[1]: https://plotly.com/chart-studio-help/tutorials/#basic
[2]: ../Pivot-table
[3]: ../Graph

10
Diagnostic-information.md Normal file
View File

@@ -0,0 +1,10 @@
# Diagnostic information
SQLite running in sqliteviz is compiled with particular [options][1] that can
enable or disable some SQLite features. You can get a list of them in `App
info` dialog (fig. 1) by clicking on ![](./img/info.svg) icon in the top
toolbar.
![Fig. 1: App info dialog](./img/Screenshot_app_info.png)
[1]: https://sqlite.org/compile.html

View File

@@ -1,24 +0,0 @@
# An easy way to run tests locally without Nodejs installed:
#
# docker build -t sqliteviz/test -f Dockerfile.test .
#
FROM node:12.22-bullseye
RUN set -ex; \
apt update; \
apt install -y chromium firefox-esr; \
npm install -g npm@7
WORKDIR /tmp/build
COPY package.json package-lock.json ./
COPY lib lib
RUN npm install
COPY . .
RUN set -ex; \
sed -i 's/browsers: \[.*\],/browsers: ['"'FirefoxHeadlessTouch'"'],/' karma.conf.js
RUN npm run lint -- --no-fix && npm run test

View File

@@ -0,0 +1,6 @@
# Export current database
Sqliteviz allows running not only `SELECT` statements but DML/DDL statements
too. You can save database modifications into a `.sqlite` file by clicking on
icon ![](./img/file-export.svg) next to the database name on `Workspace`
page.

185
Graph.md Normal file
View File

@@ -0,0 +1,185 @@
# Graph
Sqliteviz allows building a graph. To build a graph, run a query to get data.
Then open the visualisation panel by clicking ![](./img/visualisation.svg)
in any of the two side toolbars and choose a graph mode by clicking ![](./img/graph.svg).
## Requirements for result set
To build a graph, a result set must follow the following requirements:
- the first column must contain JSONs (the rest columns in the result set will be ignored)
- each JSON has a common key indicating if the record represents a node (value 0) or an edge (value 1)
- each JSON representing a node has a common key with a node id
- each JSON representing an edge has a common key with the edge source and a common key with the edge target
That is the minimum required for a graph, but each JSON can have more fields used in graph styling
(read [Graph styling](#graph-styling)).
See also an example in [How to get result set suitable for graph visualisation][1].
## Graph structure
Start building a graph by setting a mapping from your records to nodes and edge properties.
Go to `Structure` > `Graph` and set the following fields:
- Object type a field containing 0 for node records and 1 for edge records.
- Node Id a field containing the node identifier
- Edge source - a field keeping a node identifier where the edge starts.
- Edge target - a field keeping a node identifier where the edge ends.
This is already enough to build a graph with default styling and circular layout.
![Fig. 1: Graph structure settings](./img/Screenshot_graph_structure.png)
## Graph styling
### General
Set a background color of the graph in `Style` > `General` panel.
### Nodes
There are the following settings in `Style` > `Nodes` panel:
- Label - a field containing a node label. Note that if the graph has too many nodes or the node size
is too small, some labels can be visible only at a certain zoom level.
- Label Color - a color of node labels
- Size - set a node size. There are 3 modes of node sizing: constant, variable and calculated.
`Constant` means that all nodes have the same size.
`Variable` allows you to choose a field where the node size will be taken.
`Calculated` allows to choose a method that will be used to calculate the node size.
Currently, 3 methods are available: total node degree, degree by in-edges and degree by out-edges.
For variable and calculated sizing it's also possible to set scale, the minimum size and the sizing mode - area or diameter.
In the diameter mode the difference between node sizes will be more noticeable.
- Color - set a node color. There are 3 modes of node color: constant, variable and calculated.
`Constant` means that all nodes have the same color.
`Variable` allows you to choose a field by which the color will be determined.
With this option you can also choose if the color value should be taken directly or mapped to a color palette.
`Direct` mode means that in JSON representing a node, the value available by the selected field will be used as a color.
The color value in the JSON can be set in different ways:
**As Hex, 8-digit (RGBA) Hex**
```
"#000"
"000"
"#369C"
"369C"
"#f0f0f6"
"f0f0f6"
"#f0f0f688"
"f0f0f688"
```
**RGB, RGBA**
```
"rgb (255, 0, 0)"
"rgb 255 0 0"
"rgba (255, 0, 0, .5)"
{ "r": 255, "g": 0, "b": 0 }
```
**HSL, HSLA**
```
"hsl(0, 100%, 50%)"
"hsla(0, 100%, 50%, .5)"
"hsl(0, 100%, 50%)"
"hsl 0 1.0 0.5"
{ "h": 0, "s": 1, "l": .5 }
```
**HSV, HSVA**
```
"hsv(0, 100%, 100%)"
"hsva(0, 100%, 100%, .5)"
"hsv (0 100% 100%)"
"hsv 0 1 1"
{ "h": 0, "s": 100, "v": 100 }
```
**Named colors**
Case insenstive names are accepted, using the list of [colors in the CSS spec][3].
```
"RED"
"blanchedalmond"
"darkblue"
```
When `Map to` option is selected, the value by the selected field can be anything because it won't be used directly as a color.
In this case each distinct value will be mapped to a certain color, so nodes with the same value will have the same color.
Click on a color palette to open a palette selector.
`Calculated` color mode allows to choose a method that will be used to determine a color.
Currently, 3 methods are available: total node degree, degree by in-edges and degree by out-edges. You can also choose a color palette
that will be used in a mapping of calculated values into an actual color.
- Color As - defines how color mapping should work - continuously or categorically.
Continuous mode is more suitable when the mapped values have a meaningful order. It looks more informative with sequential palettes.
In that case the lowest value corresponds to the first color in the palette and the highest value - to the last color.
The color of each intermediate value reflects the position of that value in the range.
Categorical mode just uses the next color in the palette for each new distinct value.
- Colorscale Direction - use a selected palette as is or reverse it.
### Edges
There are the following settings in `Style` > `Edges` panel:
- Direction - show an edges with or without arrows
- Label - a field containing an edge label. Note that if the graph has too many edges,
some labels can be visible only at a certain zoom level.
- Label Color - a color of edge labels
- Size - set an edge thickness. There are 2 modes of edge sizing: constant and variable.
`Constant` means that all edges have the same thickness.
`Variable` allows you to choose a field where the edge size will be taken.
For variable sizing it's also possible to set scale and the minimum size.
- Color - set an edge color. There are 2 modes of edge color: constant and variable. They work similar to node color modes.
- Color As - defines how color mapping should work - continuously or categorically, similar to the same node color setting.
- Colorscale Direction - use a selected palette as is or reverse it.
### Layout
#### Circular
In this layout all nodes are just placed along a circle.
#### Random
This layout places nodes randomly for each seed value. The seed value allows you to restore the random layout you liked
when you open the inquiry and run the query next time.
![Fig. 2: Random layout](./img/Screenshot_graph_random.png)
#### Circle pack
Arranges nodes as a bubble chart according to specified attributes. You can choose multiple hierarchy attributes used to group nodes.
![Fig. 3: Circle pack layout](./img/Screenshot_graph_circle_pack.png)
#### ForceAtlas2
A continuous graph layout algorithm. Read more details about the algorithm and its settings in the [article][4].
The algorithm works in iterations. When you choose ForceAtlas2 layout or run the query, it will automatically run 50 iterations of the algorithm.
You can change the amount of steps run automatically in `Initial Iterations`.
You can also run and stop the algorithm manually by clicking `Start`/`Stop` button.
![Fig. 3: ForceAtlas2 layout](./img/Screenshot_graph_force_atlas2.png)
[How to build a pivot table in SQL(ite)][1] explores two options with static
(or beforehand-known) and dynamic columns.
[1]: ./How-to-get-result-set-suitable-for-graph-visualisation
[2]: https://github.com/bgrins/TinyColor?tab=readme-ov-file#accepted-string-input
[3]: https://www.w3.org/TR/css-color-4/#named-colors
[4]: https://journals.plos.org/plosone/article?id=10.1371/journal.pone.0098679

36
Home.md Normal file
View File

@@ -0,0 +1,36 @@
Welcome to the sqliteviz wiki!
# For users
1. [Installation][0]
2. [Basic usage][1]
3. [Multiple file import][9]
4. [Manage inquiries][2]
5. [Export current database][4]
6. [Graph][13]
7. [Pivot table][3]
8. [Predefined inquiries][5]
9. [Sharing][11]
10. [Diagnostic information][6]
## Examples and tutorials
1. [How to rename tables and columns after CSV import][8]
2. [How to build a pivot table in SQL(ite)][10]
3. [How to rename tables and columns after CSV import][12]
# For developers
1. [Integrate predefined inquiries][7]
[0]: Installation
[1]: Basic-usage
[2]: Manage-inquiries
[3]: Pivot-table
[4]: Export-current-database
[5]: Predefined-inquiries
[6]: Diagnostic-information
[7]: Integrate-predefined-inquiries
[8]: How-to-rename-tables-and-columns-after-CSV-import
[9]: Multiple-CSV-file-import
[10]: How-to-build-a-pivot-table-in-SQLite
[11]: Sharing
[12]: How-to-rename-tables-and-columns-after-csv-import
[13]: Graph

View File

@@ -0,0 +1,137 @@
# How to build a pivot table in SQLite
This how-to explores how to build pivot tables in SQLite, which doesn't have a
special constructs like `PIVOT` or `CROSSTAB` in its SQL dialect.
## Static-column pivot table
If the columns of a pivot table are known beforehand, it's possible to write a
standard, say SQL-92, query that would produce a pivot table in its result set.
This example uses World Bank [country indicators][1]. This query calculates
average fertility and life expectancy for a few country groups.
```sql
SELECT
"Country Name",
AVG(IIF(
FLOOR(year / 10) = 196 AND "Indicator Name" LIKE 'Fertility rate%',
value,
NULL
)) AS "FR 196x",
AVG(IIF(
FLOOR(year / 10) = 196 AND "Indicator Name" LIKE 'Life expectancy%',
value,
NULL
)) AS "LE 196x",
AVG(IIF(
FLOOR(year / 10) = 197 AND "Indicator Name" LIKE 'Fertility rate%',
value,
NULL
)) AS "FR 197x",
AVG(IIF(
FLOOR(year / 10) = 197 AND "Indicator Name" LIKE 'Life expectancy%',
value,
NULL
)) AS "LE 197x",
AVG(IIF(
FLOOR(year / 10) = 198 AND "Indicator Name" LIKE 'Fertility rate%',
value,
NULL
)) AS "FR 198x",
AVG(IIF(
FLOOR(year / 10) = 198 AND "Indicator Name" LIKE 'Life expectancy%',
value,
NULL
)) AS "LE 198x",
AVG(IIF(
FLOOR(year / 10) = 199 AND "Indicator Name" LIKE 'Fertility rate%',
value,
NULL
)) AS "FR 199x",
AVG(IIF(
FLOOR(year / 10) = 199 AND "Indicator Name" LIKE 'Life expectancy%',
value,
NULL
)) AS "LE 199x",
AVG(IIF(
FLOOR(year / 10) = 200 AND "Indicator Name" LIKE 'Fertility rate%',
value,
NULL
)) AS "FR 200x",
AVG(IIF(
FLOOR(year / 10) = 200 AND "Indicator Name" LIKE 'Life expectancy%',
value,
NULL
)) AS "LE 200x"
FROM country_indicators
WHERE "Country Name" IN(
'Arab World',
'Central Europe and the Baltics',
'East Asia & Pacific',
'European Union',
'Latin America & Caribbean',
'High income',
'Middle income',
'Low income'
)
GROUP BY 1
ORDER BY
CASE "Country Name"
WHEN 'High income' THEN 1
WHEN 'Middle income' THEN 2
WHEN 'Low income' THEN 3
WHEN 'European Union' THEN 4
WHEN 'Central Europe and the Baltics' THEN 5
WHEN 'East Asia & Pacific' THEN 6
WHEN 'Latin America & Caribbean' THEN 7
WHEN 'Arab World' THEN 8
ELSE 99
END
```
## Dynamic-column pivot table
SQLite in sqliteviz is built with [pivot_vtab][2] extension. The same result set
can be produced with this, arguably simpler and more maintainable, query.
```sql
CREATE VIRTUAL TABLE temp.pivot USING pivot_vtab(
(
WITH t(country_name) AS (VALUES
('High income'),
('Middle income'),
('Low income'),
('European Union'),
('Central Europe and the Baltics'),
('East Asia & Pacific'),
('Latin America & Caribbean'),
('Arab World')
)
SELECT country_name FROM t
),
(
SELECT
FLOOR(year / 10) || '|' || "Indicator Name" column_key,
CASE
WHEN "Indicator Name" LIKE 'Fertility rate%' THEN 'FR'
WHEN "Indicator Name" LIKE 'Life expectancy%' THEN 'LE'
END || ' ' || FLOOR(year / 10) || 'x' column_name
FROM country_indicators
WHERE
"Indicator Name" LIKE 'Fertility rate%'
OR "Indicator Name" LIKE 'Life expectancy%'
GROUP BY 1
),
(
SELECT AVG(value)
FROM country_indicators
WHERE
"Country Name" = :country_name
AND FLOOR(year / 10) || '|' || "Indicator Name" = :column_key
)
);
SELECT * FROM pivot
```
[1]: https://github.com/plotly/datasets/blob/master/country_indicators.csv
[2]: https://github.com/jakethaw/pivot_vtab

View File

@@ -0,0 +1,112 @@
# How to get result set suitable for graph visualisation
There are some [requirements for result sets][1] if you want to build a graph.
Here is an example of building a query that returns a result set appropriate for graph visualisation.
Let's say, you have 2 tables:
house:
| name | points |
| ---------- | ------ |
| Gryffindor | 100 |
| Hufflepuff | 90 |
| Ravenclaw | 95 |
| Slytherin | 80 |
student:
| id | name | house |
| -- | -------------- | ---------- |
| 1 | Harry Potter | Gryffindor |
| 2 | Ron Weasley | Gryffindor |
| 3 | Draco Malfoy' | Slytherin |
| 4 | Luna Lovegood | Ravenclaw |
| 5 | Cedric Diggory | Hufflepuff |
Each student belongs to a certain house.
Let's say you want to build a graph with houses and students as nodes, where each house is linked with its students.
We are going to use [json_object][2] function to form JSONs. The result set should contain both nodes and edges
and we have to provide a field indicating if the record represents a node (0) or and edge (1).
Let's provide it as 'object_type':
```sql
SELECT json_object('object_type', 0)
FROM house
UNION ALL
SELECT json_object('object_type', 0)
FROM student
UNION ALL
SELECT json_object('object_type', 1)
FROM student
```
Note that we included `student` table twice. That is because the table contains not only students but also their relationship to houses.
So the records from the first union of `student` will be used as nodes and from the second one - as edges.
Then we need to provide an ID for each node. Let's put it in `node_id` field.
The `node_id` value for students is taken from `id` column and for houses - from `name`:
```sql
SELECT json_object('object_type', 0, 'node_id', name)
FROM house
UNION ALL
SELECT json_object('object_type', 0, 'node_id', id)
FROM student
UNION ALL
SELECT json_object('object_type', 1)
FROM student
```
Each edge record must provide a node id where the edge starts and where it ends. Let's put it in `source` and `target`:
```sql
SELECT json_object('object_type', 0, 'node_id', name)
FROM house
UNION ALL
SELECT json_object('object_type', 0, 'node_id', id)
FROM student
UNION ALL
SELECT json_object('object_type', 1, 'source', house, 'target', id)
FROM student
```
Basically, that is enough to build a graph. But it won't be meaningful without labels.
Also, it would be nice to distinguish house nodes from student nodes by color.
Let's put additional fields `label` and `type` that can be used in graph styling.
```sql
SELECT json_object('object_type', 0, 'node_id', name, 'label', name, 'type', 'house')
FROM house
UNION ALL
SELECT json_object('object_type', 0, 'node_id', id, 'label', name, 'type', 'student')
FROM student
UNION ALL
SELECT json_object('object_type', 1, 'source', house, 'target', id)
FROM student
```
Run the query, the result set will look like this:
| json_object('object_type', 0, 'node_id', name, 'label', name, 'type', 'house') |
| ------------------------------------------------------------------------------ |
| {"object_type":0,"node_id":"Gryffindor","label":"Gryffindor","type":"house"} |
| {"object_type":0,"node_id":"Hufflepuff","label":"Hufflepuff","type":"house"} |
| {"object_type":0,"node_id":"Ravenclaw","label":"Ravenclaw","type":"house"} |
| {"object_type":0,"node_id":"Slytherin","label":"Slytherin","type":"house"} |
| {"object_type":0,"node_id":1,"label":"Harry Potter","type":"student"} |
| {"object_type":0,"node_id":2,"label":"Ron Weasley","type":"student"} |
| {"object_type":0,"node_id":3,"label":"Draco Malfoy","type":"student"} |
| {"object_type":0,"node_id":4,"label":"Luna Lovegood","type":"student"} |
| {"object_type":0,"node_id":5,"label":"Cedric Diggory","type":"student"} |
| {"object_type":1,"node_source":"Gryffindor","target":1} |
| {"object_type":1,"node_source":"Gryffindor","target":2} |
| {"object_type":1,"node_source":"Slytherin","target":3} |
| {"object_type":1,"node_source":"Ravenclaw","target":4} |
| {"object_type":1,"node_source":"Hufflepuff","target":5} |
Now in the graph editor, we can set mapping of the result set records into node and edge properties,
set graph styles and get the following visualisation:
![Fig. 1: Graph visualisation example](./img/Screenshot_potter_example.png)
[1]: ./graph#requirements-for-result-set
[2]: https://sqlite.org/json1.html#jobj

View File

@@ -0,0 +1,26 @@
# How to migrate to sqliteviz.com
All upcoming sqliteviz releases will be deployed on sqliteviz.com instead of
[Github Pages](https://lana-k.github.io/sqliteviz/#/). The saved inquiries that
you still care about can be imported to sqliteviz.com as usual.
## Step 1: export inquiries
- Click `Inquiries` in the main top menu
- Select all inquiries by clicking the checkbox in the table header
- Click `Export` and save your inquiries in a JSON file
## Step 2: uninstall sqliteviz
If you installed sqliteviz as PWA uninstall it before install the new version.
- Go to `chrome://apps/` in your browser
- Right click on sqliteviz icon
- Choose `Remove from Chrome`
## Step 3: import inquiries
- Go to [https://sqliteviz.com/app/#/workspace](https://sqliteviz.com/app/#/workspace)
- Click `Inquiries` in the main top menu
- Click `Import`
- Choose the file where you exported your inquiries

View File

@@ -0,0 +1,54 @@
# How to rename tables and columns after CSV import
## Rename columns
If sqliteviz parses CSV without `Use first row as column headers` option then
it will name the columns like `col1`, `col2` etc. You can easily rename the
columns after import with `ALTER TABLE` statements like this:
```sql
ALTER TABLE your_table_name
RENAME COLUMN current_column_name TO new_column_name;
```
### Column rename example
There is a table `dots` with columns `col1`, `col2`, `col3`. Here are the steps
to rename the columns to `x`, `y` and `z` respectively:
- Click `Create` in the top toolbar
- In the opened query editor write a script
```sql
ALTER TABLE dots
RENAME COLUMN col1 TO x;
ALTER TABLE dots
RENAME COLUMN col2 TO y;
ALTER TABLE dots
RENAME COLUMN col3 TO z;
```
- Click ![](./img/run.svg) to run the script
## Rename table
```sql
ALTER TABLE current_table_name
RENAME TO new_table_name;
```
### Table rename example
There is a table `dots`. Here are the steps to rename it to `point`:
- Click `Create` in the top toolbar
- In the opened query editor write a script
```sql
ALTER TABLE dots RENAME TO point
```
- Click ![](./img/run.svg) to run the script

16
Installation.md Normal file
View File

@@ -0,0 +1,16 @@
# Installation
The latest release of sqliteviz is running on [sqliteviz.com/app][1].
Basically, you don't need to install sqliteviz. But if you want you can install
it and use it as a regular desktop application with the following steps:
1. Open sqliteviz with [Chrome browser][2]
2. At the top right of the address bar, click `Install`
3. Confirm installation
See more about PWA on [Google Chrome Help][3].
[1]: https://sqliteviz.com/app/
[2]: https://www.google.com/chrome
[3]: https://support.google.com/chrome/answer/9658361?hl=en&ref_topic=7439636

View File

@@ -0,0 +1,12 @@
# Integrate predefined inquiries
If you run sqliteviz on your own server you can specify predefined inquiries.
These inquiries will appear in `Inquiries` list for all users working with
sqliteviz on your server.
To create a list of predefined inquiries choose inquiries in `Inquiries` list
and export them to `inquiries.json`. Then (re)place this file on the server in
the same directory as `index.html`.
A user can't edit, rename or delete a predefined inquiry. The rest operations
are available.

201
LICENSE
View File

@@ -1,201 +0,0 @@
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
APPENDIX: How to apply the Apache License to your work.
To apply the Apache License to your work, attach the following
boilerplate notice, with the fields enclosed by brackets "[]"
replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
file or class name and description of purpose be included on the
same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright [yyyy] [name of copyright owner]
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.

39
Manage-inquiries.md Normal file
View File

@@ -0,0 +1,39 @@
# Manage inquiries
## Organise
You can find all inquiries that you saved in `Inquiries` (fig. 1).
![Fig. 1: Inquiries](./img/Screenshot_my_inquiries.png)
To manipulate one inquiry hover the cursor over the row with the inquiry and
choose the action:
* ![](./img/rename.svg) rename an inquiry
* ![](./img/copy.svg) duplicate an inquiry
* ![](./img/file-export.svg) export an inquiry to JSON file
* ![](./img/delete.svg) delete an inquiry
To edit a query or visualisation settings of an inquiry click on the respective
row. You will be redirected to `Workspace` where the chosen inquiry will be
opened in a tab.
> **Note:** After opening an inquiry there will be no visualisation for it even
> if you specified it and saved. That is so because there is no data to build
> the visualisation. Run the query and all saved chart settings will be applied.
You can also delete or export a group of inquiries to a JSON file. Select
inquiries with checkboxes and press `Delete`/`Export` button above the grid
(fig. 2).
![Fig. 2: Inquiries: a group of inquiries is selected](./img/Screenshot_group.png)
> **Note:** Some operations are not available for predefined inquiries. Read
> [Predefined inquiries][1] for details.
## Import
Click `Import` button on `Inquiries` page to import inquiries from a JSON file
generated by export.
[1]: ../Predefined-inquiries

View File

@@ -0,0 +1,12 @@
# Multiple CSV, JSON or NDJSON file import
Sometimes it's useful to import several files as tables in one database. For
example, to be able to `JOIN` them in SQL.
In sqliteviz you can not only create a database from a CSV, JSON or NDJSON file, but also add
another table from CSV, JSON or NDJSON to the existing database.
- Click ![](./img/add-csv.svg) icon in the schema panel on `Workspace`
page
- Choose a file
- Import it with `import` dialog.

54
Pivot-table.md Normal file
View File

@@ -0,0 +1,54 @@
# Pivot table
## Pivot table UI
Sqliteviz allows building pivot tables and visualizing them. To build a pivot
run a query to get data. Then open visualisation panel by clicking ![](./img/visualisation.svg)
in any of the two side toolbars and choose a pivot mode by clicking ![](./img/pivot.svg).
A pivot visualisation has the following settings:
- Columns choose one or more column names from the result set. The values in
the chosen columns will be column names of the pivot table.
- Rows choose one or more column names from the result set. The values in the
chosen columns will be row names of the pivot table.
- Order of columns and rows.
- Aggregator and its arguments a function which will be used for pivot cell
calculation. An aggregator can have from zero to two arguments. An aggregator
argument is one of the columns of the result set.
- View pivot table visualisation. It can be a table, a heatmap, a chart,
etc. See some examples of different views of the same pivot table below.
![Fig. 1: Table](./img/Screenshot_pivot_table.png)
![Fig. 2: Heatmap](./img/Screenshot_pivot_heatmap.png)
![Fig. 3: Horizontal Stacked Bar Chart](./img/Screenshot_pivot_barchart.png)
There are several built-in chart views for a pivot. But you can build your own
with `Custom chart` view (fig. 4).
![Fig. 4: Custom Chart](./img/Screenshot_pivot_custom_chart.png)
> **Note:** You can switch to other pivot views and back to `Custom chart`
> all your custom chart settings will be remembered. But if you switch the
> visualisation mode from pivot to any other mode, unsaved changes will be lost.
You can save any visualisation as an image by clicking ![](./img/camera.svg).
## Pivot table SQL
Pivot table (in the form of a result set) can be built on the SQL-level and,
technically speaking, can be visualised as any other result set. Practically
though there are a couple of challenges with that:
1. Visualising a dataset of long/tall shape is much more convenient in Plotly
(chart editor) rather than one of wide/fat shape.
2. SQLite doesn't have a special constructs like `PIVOT` or `CROSSTAB` in
its SQL dialect.
[How to build a pivot table in SQL(ite)][1] explores two options with static
(or beforehand-known) and dynamic columns.
[1]: ../How-to-build-a-pivot-table-in-SQ-Lite

16
Predefined-inquiries.md Normal file
View File

@@ -0,0 +1,16 @@
# Predefined inquiries
Predefined inquiries come out of the box on the sqliteviz instance. In sqliteviz
deployed on [Github Pages][1] there are no predefined inquiries, but you may
see them if you use sqliteviz integrated into another app.
These inquiries are shown in `Inqueries` list with a special label on the mouse
over (fig. 1).
![Fig. 1: Predefined inquiry](./img/Screenshot_predefined.png)
As a user, you can't rename or delete a predefined inquiry. You can't save
changes in a predefined inquiry, but you can save them as a new inquiry. The
rest operations, copy and export, are available.
[1]: https://lana-k.github.io/sqliteviz

View File

@@ -1,49 +0,0 @@
<p align="center">
<img src="src/assets/images/Logo.svg"/>
</p>
# sqliteviz
Sqliteviz is a single-page offline-first PWA for fully client-side visualisation
of SQLite databases, CSV, JSON or NDJSON files.
With sqliteviz you can:
- run SQL queries against a SQLite database and create [Plotly][11] charts and pivot tables based on the result sets
- import a CSV/JSON/NDJSON file into a SQLite database and visualize imported data
- export result set to CSV file
- manage inquiries and run them against different databases
- import/export inquiries from/to a JSON file
- export a modified SQLite database
- use it offline from your OS application menu like any other desktop app
https://user-images.githubusercontent.com/24638357/128249848-f8fab0f5-9add-46e0-a9c1-dd5085a8623e.mp4
## Quickstart
The latest release of sqliteviz is deployed on [sqliteviz.com/app][6].
## Wiki
For user documentation, check out sqliteviz [documentation][7].
## Motivation
It's a kind of middleground between [Plotly Falcon][1] and [Redash][2].
## Components
It is built on top of [react-chart-editor][3], [PivotTable.js][12], [sql.js][4] and [Vue-Codemirror][8] in [Vue.js][5]. CSV parsing is performed with [Papa Parse][9].
[1]: https://github.com/plotly/falcon
[2]: https://github.com/getredash/redash
[3]: https://github.com/plotly/react-chart-editor
[4]: https://github.com/sql-js/sql.js
[5]: https://github.com/vuejs/vue
[6]: https://sqliteviz.com/app/
[7]: https://sqliteviz.com/docs
[8]: https://github.com/surmon-china/vue-codemirror#readme
[9]: https://www.papaparse.com/
[10]: https://github.com/lana-k/sqliteviz/wiki/Predefined-queries
[11]: https://github.com/plotly/plotly.js
[12]: https://github.com/nicolaskruchten/pivottable

25
Sharing.md Normal file
View File

@@ -0,0 +1,25 @@
# Sharing
You can generate a share link. That allows to share not only inquiries but also
a database and inquiry display settings. Sqliteviz will automatically load
database and inquiries, open them and run the query for the first inquiry.
The share link can have the following query parameters:
| Parameter | Values | Description |
|--------------|----------------------------------|-------------|
| `data_url` | | A URL to an SQLite database file. If not provided sqliteviz will run inquiries agains an empty database.|
| `data_format`|`sqlite` | Currently share links support only "sqlite" data format.
| `inquiry_url`| | A URL to an inquiry JSON file (you can make that file with inquiry export see [Manage inquiries][3]).|
| `inquiry_id` | | If `inquiry_id` is provided (can occure multiple times) sqliteviz will load only inquiries with provided IDs. If not provided it will get them all.|
| `maximize` | `table`, `sqlEditor`, `dataView` | Specify which panel should be maximised for each inquiry. If not provided the inquiries will be opened in the default state: SQL editor on top and result set at the bottom.|
> **Note:** the server where you host your database or inquiry files must allow
cross-origin access. For example you can place your files on [GitHub Gist][1].
You can check your URLs with [CORS tester][2].
Use the following form to generate a share link:
[1]: https://gist.github.com/
[2]: https://cors-test.codehappy.dev/?origin=https%3A%2F%2Fsqliteviz.com%2F&method=get
[3]: ../Manage-inquiries

View File

@@ -1,3 +0,0 @@
module.exports = {
presets: ['@vue/cli-plugin-babel/preset']
}

BIN
img/Screenshot_app_info.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 22 KiB

BIN
img/Screenshot_chart.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 94 KiB

BIN
img/Screenshot_csv.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 41 KiB

BIN
img/Screenshot_editor.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 66 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 76 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 306 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 377 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 491 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 327 KiB

BIN
img/Screenshot_group.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 28 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 26 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 57 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 125 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 113 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 84 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 79 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 33 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 23 KiB

BIN
img/Screenshot_result.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 74 KiB

BIN
img/Screenshot_welcome.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 43 KiB

10
img/add-csv.svg Normal file
View File

@@ -0,0 +1,10 @@
<svg width="18" height="18" viewBox="0 0 18 18" fill="none" xmlns="http://www.w3.org/2000/svg">
<g clip-path="url(#clip0)">
<path fill-rule="evenodd" clip-rule="evenodd" d="M13.6573 1.5H2.59985C1.77485 1.5 1.09985 2.175 1.09985 3V13.6649C1.09985 14.4899 1.77485 15.1649 2.59985 15.1649H9.84V13.6649H8.87866V9.08244H13.6573V9.83777H15.1573V3C15.1573 2.17 14.4873 1.5 13.6573 1.5ZM13.6573 7.58244V3H8.87866V7.58244H13.6573ZM7.37866 3H2.59985V7.58244H7.37866V3ZM2.59985 9.08244V13.6649H7.37866V9.08244H2.59985ZM13.1702 10.8434H15.6702V13.1717H18.0001V15.6717H15.6702V18H13.1702V15.6717H10.8401V13.1717H13.1702V10.8434Z" fill="#A2B1C6"/>
</g>
<defs>
<clipPath id="clip0">
<rect width="18" height="18" fill="white"/>
</clipPath>
</defs>
</svg>

After

Width:  |  Height:  |  Size: 739 B

5
img/camera.svg Normal file
View File

@@ -0,0 +1,5 @@
<svg width="18" height="18" viewBox="0 0 18 18" fill="none" xmlns="http://www.w3.org/2000/svg">
<path d="M9 5.51953C6.57686 5.51953 4.60547 7.49092 4.60547 9.91406C4.60547 12.3372 6.57686 14.3086 9 14.3086C11.4231 14.3086 13.3945 12.3372 13.3945 9.91406C13.3945 7.49092 11.4231 5.51953 9 5.51953ZM9 12.9023C7.35226 12.9023 6.01172 11.5618 6.01172 9.91406C6.01172 8.26632 7.35226 6.92578 9 6.92578C10.6477 6.92578 11.9883 8.26632 11.9883 9.91406C11.9883 11.5618 10.6477 12.9023 9 12.9023Z" fill="#A2B1C6"/>
<path d="M15.8906 3.41016H13.304C13.2221 3.41016 13.1483 3.36547 13.1104 3.29319L12.3948 1.78945C12.3928 1.78534 12.3908 1.78126 12.3887 1.77718C12.1117 1.22312 11.5548 0.878906 10.9353 0.878906H7.11478C6.49529 0.878906 5.93835 1.22312 5.66135 1.77722C5.65928 1.7813 5.65731 1.78538 5.65534 1.78949L4.9397 3.2933C4.90173 3.36547 4.82797 3.41016 4.74609 3.41016H2.10938C0.946266 3.41016 0 4.35642 0 5.51953V15.0117C0 16.1748 0.946266 17.1211 2.10938 17.1211H15.8906C17.0537 17.1211 18 16.1748 18 15.0117V5.51953C18 4.35642 17.0537 3.41016 15.8906 3.41016ZM16.5938 15.0117C16.5938 15.3994 16.2783 15.7148 15.8906 15.7148H2.10938C1.72167 15.7148 1.40625 15.3994 1.40625 15.0117V5.51953C1.40625 5.13183 1.72167 4.81641 2.10938 4.81641H4.74609C5.36555 4.81641 5.92249 4.47223 6.19952 3.91816C6.2016 3.91409 6.20357 3.90997 6.20557 3.90586L6.92121 2.40205C6.95914 2.32984 7.0329 2.28516 7.11478 2.28516H10.9353C11.0172 2.28516 11.091 2.32984 11.1289 2.40202L11.8445 3.90582C11.8465 3.90994 11.8485 3.91405 11.8506 3.91813C12.1276 4.47219 12.6846 4.81637 13.304 4.81637H15.8906C16.2783 4.81637 16.5938 5.13179 16.5938 5.5195V15.0117Z" fill="#A2B1C6"/>
<path d="M15.1875 6.22266H13.7812V7.62891H15.1875V6.22266Z" fill="#A2B1C6"/>
</svg>

After

Width:  |  Height:  |  Size: 1.7 KiB

8
img/chart.svg Normal file
View File

@@ -0,0 +1,8 @@
<svg width="19" height="18" viewBox="0 0 19 18" fill="none" xmlns="http://www.w3.org/2000/svg">
<path fill-rule="evenodd" clip-rule="evenodd" d="M8.41943 16V10H10.4194V16H8.41943Z" fill="#A2B1C6"/>
<path fill-rule="evenodd" clip-rule="evenodd" d="M2.41943 16L2.41943 10H4.41943V16H2.41943Z" fill="#A2B1C6"/>
<path fill-rule="evenodd" clip-rule="evenodd" d="M11.4194 16V7H13.4194V16H11.4194Z" fill="#A2B1C6"/>
<path fill-rule="evenodd" clip-rule="evenodd" d="M14.4194 16V8H16.4194V16H14.4194Z" fill="#A2B1C6"/>
<path fill-rule="evenodd" clip-rule="evenodd" d="M7.41943 12V16H5.41943V12H7.41943Z" fill="#A2B1C6"/>
<path fill-rule="evenodd" clip-rule="evenodd" d="M12.4332 1.80676L16.6265 6.00001L15.9194 6.70712L12.4055 3.19326L5.93169 9.1691L1.71436 5.55424L2.36515 4.79499L5.90707 7.83092L12.4332 1.80676Z" fill="#A2B1C6"/>
</svg>

After

Width:  |  Height:  |  Size: 831 B

View File

Before

Width:  |  Height:  |  Size: 643 B

After

Width:  |  Height:  |  Size: 643 B

View File

Before

Width:  |  Height:  |  Size: 447 B

After

Width:  |  Height:  |  Size: 447 B

View File

Before

Width:  |  Height:  |  Size: 346 B

After

Width:  |  Height:  |  Size: 346 B

8
img/graph.svg Normal file
View File

@@ -0,0 +1,8 @@
<svg width="18" height="18" viewBox="0 0 18 18" fill="none" xmlns="http://www.w3.org/2000/svg">
<path d="M5 4C5 5.10457 4.10457 6 3 6C1.89543 6 1 5.10457 1 4C1 2.89543 1.89543 2 3 2C4.10457 2 5 2.89543 5 4Z" fill="#A2B1C6"/>
<path d="M17 7.5C17 8.88071 15.8807 10 14.5 10C13.1193 10 12 8.88071 12 7.5C12 6.11929 13.1193 5 14.5 5C15.8807 5 17 6.11929 17 7.5Z" fill="#A2B1C6"/>
<path d="M8 13.5C8 14.8807 6.88071 16 5.5 16C4.11929 16 3 14.8807 3 13.5C3 12.1193 4.11929 11 5.5 11C6.88071 11 8 12.1193 8 13.5Z" fill="#A2B1C6"/>
<path d="M2.93128 5.31436L3.90527 5.08778L5.48693 11.8867L4.51294 12.1133L2.93128 5.31436Z" fill="#A2B1C6"/>
<path d="M12.9447 7.79159L13.5548 8.58392L7.30516 13.3962L6.69507 12.6038L12.9447 7.79159Z" fill="#A2B1C6"/>
<path d="M14.1316 6.51712L3.13166 3.51723L2.86844 4.48202L13.8684 7.48191L14.1316 6.51712Z" fill="#A2B1C6"/>
</svg>

After

Width:  |  Height:  |  Size: 858 B

View File

Before

Width:  |  Height:  |  Size: 754 B

After

Width:  |  Height:  |  Size: 754 B

6
img/pivot.svg Normal file
View File

@@ -0,0 +1,6 @@
<svg width="19" height="18" viewBox="0 0 19 18" fill="none" xmlns="http://www.w3.org/2000/svg">
<path fill-rule="evenodd" clip-rule="evenodd" d="M15.7661 5.13901L18.3407 9.43008H16.5161V12.8467C16.5161 13.7957 16.2783 14.6451 15.6714 15.2521C15.0645 15.859 14.215 16.0967 13.2661 16.0967H9.84942V17.9214L5.55835 15.3467L9.84942 12.7721V14.5967H13.2661C13.9838 14.5967 14.3844 14.4178 14.6108 14.1914C14.8372 13.965 15.0161 13.5645 15.0161 12.8467V9.43008H13.1914L15.7661 5.13901Z" fill="#A2B1C6"/>
<path d="M6.41943 0H18.4194V4H6.41943V0Z" fill="#A2B1C6"/>
<path d="M0.419434 6H4.41943V18H0.419434V6Z" fill="#A2B1C6"/>
<path d="M0.419434 0H4.41943V4H0.419434V0Z" fill="#A2B1C6"/>
</svg>

After

Width:  |  Height:  |  Size: 687 B

View File

Before

Width:  |  Height:  |  Size: 444 B

After

Width:  |  Height:  |  Size: 444 B

3
img/run.svg Normal file
View File

@@ -0,0 +1,3 @@
<svg width="12" height="13" viewBox="0 0 12 13" fill="none" xmlns="http://www.w3.org/2000/svg">
<path d="M11.1624 6.94358L0.770043 12.9436L0.770043 0.943573L11.1624 6.94358Z" fill="#A2B1C6"/>
</svg>

After

Width:  |  Height:  |  Size: 199 B

11
img/sql.svg Normal file
View File

@@ -0,0 +1,11 @@
<svg width="18" height="19" viewBox="0 0 18 19" fill="none" xmlns="http://www.w3.org/2000/svg">
<g clip-path="url(#clip0)">
<path d="M4.5 1.51343H10.5L15 6.01343V8.45284H13.5V6.76343H9.75V3.01343H4.5V8.45284H3V3.01343C3 2.18843 3.675 1.51343 4.5 1.51343Z" fill="#A2B1C6"/>
<path d="M4.28369 14.8127C4.28369 14.5872 4.20312 14.4114 4.04199 14.2854C3.88379 14.1594 3.604 14.0291 3.20264 13.8943C2.80127 13.7595 2.47314 13.6292 2.21826 13.5032C1.38916 13.0959 0.974609 12.5364 0.974609 11.8245C0.974609 11.47 1.07715 11.158 1.28223 10.8884C1.49023 10.616 1.7832 10.405 2.16113 10.2556C2.53906 10.1033 2.96387 10.0271 3.43555 10.0271C3.89551 10.0271 4.30713 10.1091 4.67041 10.2732C5.03662 10.4373 5.3208 10.6716 5.52295 10.9763C5.7251 11.2781 5.82617 11.6238 5.82617 12.0134H4.28809C4.28809 11.7527 4.20752 11.5505 4.04639 11.407C3.88818 11.2634 3.67285 11.1917 3.40039 11.1917C3.125 11.1917 2.90674 11.2532 2.74561 11.3762C2.5874 11.4963 2.5083 11.6501 2.5083 11.8376C2.5083 12.0017 2.59619 12.1511 2.77197 12.2859C2.94775 12.4177 3.25684 12.5554 3.69922 12.699C4.1416 12.8396 4.50488 12.9919 4.78906 13.156C5.48047 13.5544 5.82617 14.1038 5.82617 14.804C5.82617 15.3635 5.61523 15.803 5.19336 16.1223C4.77148 16.4417 4.19287 16.6013 3.45752 16.6013C2.93896 16.6013 2.46875 16.509 2.04688 16.3245C1.62793 16.137 1.31152 15.8821 1.09766 15.5598C0.886719 15.2346 0.78125 14.8611 0.78125 14.4392H2.32812C2.32812 14.782 2.41602 15.0354 2.5918 15.1995C2.77051 15.3606 3.05908 15.4412 3.45752 15.4412C3.7124 15.4412 3.91309 15.387 4.05957 15.2786C4.20898 15.1672 4.28369 15.012 4.28369 14.8127ZM12.0444 13.446C12.0444 14.0378 11.9463 14.5549 11.75 14.9973C11.5537 15.4368 11.2827 15.7898 10.937 16.0564L11.9697 16.8738L11.0161 17.6824L9.64062 16.575C9.51172 16.5925 9.38281 16.6013 9.25391 16.6013C8.70898 16.6013 8.22559 16.4753 7.80371 16.2234C7.38184 15.9714 7.05225 15.6111 6.81494 15.1423C6.58057 14.6736 6.45898 14.1345 6.4502 13.5251V13.1868C6.4502 12.5569 6.56445 12.0032 6.79297 11.5256C7.02441 11.0481 7.35254 10.679 7.77734 10.4182C8.20508 10.1575 8.69434 10.0271 9.24512 10.0271C9.78711 10.0271 10.2705 10.156 10.6953 10.4138C11.1201 10.6716 11.4497 11.0393 11.6841 11.5168C11.9214 11.9915 12.0415 12.5364 12.0444 13.1516V13.446ZM10.4756 13.178C10.4756 12.5422 10.3687 12.0603 10.1548 11.7322C9.94385 11.4011 9.64062 11.2356 9.24512 11.2356C8.83789 11.2356 8.53174 11.3982 8.32666 11.7234C8.12158 12.0486 8.01758 12.5247 8.01465 13.1516V13.446C8.01465 14.0759 8.11865 14.5593 8.32666 14.8962C8.53467 15.2302 8.84375 15.3972 9.25391 15.3972C9.64648 15.3972 9.94678 15.2317 10.1548 14.9006C10.3657 14.5696 10.4727 14.0935 10.4756 13.4724V13.178ZM14.3735 15.3269H17.0586V16.5134H12.8311V10.115H14.3735V15.3269Z" fill="#A2B1C6"/>
</g>
<defs>
<clipPath id="clip0">
<rect width="18" height="18" fill="white" transform="translate(0 0.0134277)"/>
</clipPath>
</defs>
</svg>

After

Width:  |  Height:  |  Size: 2.8 KiB

7
img/table.svg Normal file
View File

@@ -0,0 +1,7 @@
<svg width="19" height="19" viewBox="0 0 19 19" fill="none" xmlns="http://www.w3.org/2000/svg">
<path fill-rule="evenodd" clip-rule="evenodd" d="M15.0286 2.66757H3.97119C3.14619 2.66757 2.47119 3.34257 2.47119 4.16757V14.8324C2.47119 15.6574 3.14619 16.3324 3.97119 16.3324H15.0286C15.8613 16.3324 16.5286 15.6576 16.5286 14.8324V4.16757C16.5286 3.33757 15.8586 2.66757 15.0286 2.66757ZM15.03 4.16757H3.96997V14.8276H15.03V4.16757Z" fill="#A2B1C6"/>
<line x1="7.52295" y1="3.81403" x2="7.52295" y2="15.1996" stroke="#A2B1C6" stroke-width="1.5"/>
<line x1="11.5137" y1="3.80042" x2="11.5137" y2="15.1859" stroke="#A2B1C6" stroke-width="1.5"/>
<line x1="2.98926" y1="7.22595" x2="16.0105" y2="7.22595" stroke="#A2B1C6" stroke-width="1.5"/>
<line x1="2.98926" y1="11.3483" x2="16.0105" y2="11.3483" stroke="#A2B1C6" stroke-width="1.5"/>
</svg>

After

Width:  |  Height:  |  Size: 841 B

12
img/visualisation.svg Normal file
View File

@@ -0,0 +1,12 @@
<svg width="19" height="19" viewBox="0 0 19 19" fill="none" xmlns="http://www.w3.org/2000/svg">
<g clip-path="url(#clip0)">
<path d="M2.03345 13.6725L5.8465 10.2694L9.25264 13.2145L14.0613 8.16519L17.2235 11.38" stroke="#A2B1C6" stroke-width="1.5" stroke-linecap="round" stroke-linejoin="round"/>
<circle cx="6.00049" cy="6.50339" r="1.5" fill="#A2B1C6"/>
<path fill-rule="evenodd" clip-rule="evenodd" d="M16.536 1.71786L2.46546 1.71298C1.64046 1.71298 0.965462 2.38798 0.965462 3.21298L0.964111 15.7821C0.964111 16.6071 1.63911 17.2821 2.46411 17.2821L16.5347 17.287C17.3674 17.287 18.0347 16.6121 18.0347 15.787L18.036 3.21786C18.036 2.38786 17.366 1.71786 16.536 1.71786ZM16.5374 3.21786L2.46424 3.21298L2.46289 15.7772L16.536 15.7821L16.5374 3.21786Z" fill="#A2B1C6"/>
</g>
<defs>
<clipPath id="clip0">
<rect width="18" height="18" fill="white" transform="translate(0.5 0.5)"/>
</clipPath>
</defs>
</svg>

After

Width:  |  Height:  |  Size: 909 B

View File

@@ -1,98 +0,0 @@
<!doctype html>
<html lang="en">
<head>
<meta charset="utf-8" />
<meta http-equiv="X-UA-Compatible" content="IE=edge" />
<meta name="viewport" content="width=device-width,initial-scale=1.0" />
<link rel="icon" href="favicon.png" />
<link rel="manifest" href="manifest.webmanifest" />
<title>sqliteviz</title>
<style>
#sqliteviz-loading-wrapper {
position: fixed;
width: 100%;
height: 100%;
left: 0;
top: 0;
background-color: white;
}
#sqliteviz-loading-text {
display: block;
position: absolute;
top: 50%;
left: 50%;
transform: translate(-50%, -50%);
color: #506784;
font-family: sans-serif;
font-size: 20px;
}
#sqliteviz-loading-wrapper svg {
display: block;
position: absolute;
left: 50%;
top: 50%;
transform: translate(-50%, -50%);
}
#sqliteviz-loading-wrapper circle {
position: absolute;
left: 0;
right: 0;
top: 0;
bottom: 0;
fill: none;
stroke-width: 5px;
stroke-linecap: round;
stroke: #119dff;
}
#sqliteviz-loading-wrapper circle.bg {
stroke: #c8d4e3;
}
#sqliteviz-loading-wrapper circle.front {
stroke-dasharray: 402px;
animation: sqliteviz-loading 2s linear 0s infinite;
}
@keyframes sqliteviz-loading {
0% {
stroke-dasharray: 100px 402px;
stroke-dashoffset: 0;
}
50% {
stroke-dasharray: 251px;
stroke-dashoffset: -251px;
}
100% {
stroke-dasharray: 100px 402px;
stroke-dashoffset: -502px;
}
}
</style>
<!-- head extention slot start -->
<!-- head extention slot end -->
</head>
<body>
<noscript>
<strong>
We're sorry but this app doesn't work properly without JavaScript
enabled. Please enable it to continue.
</strong>
</noscript>
<div id="app">
<div id="sqliteviz-loading-wrapper">
<div id="sqliteviz-loading-text">LOADING</div>
<svg height="170" width="170" viewBox="0 0 170 170">
<circle class="bg" cx="85" cy="85" r="80" />
<circle class="front" cx="85" cy="85" r="80" />
</svg>
</div>
</div>
<!-- extention slot start -->
<!-- extention slot end -->
<script type="module" src="/src/main.js"></script>
</body>
</html>

14
index.md Executable file
View File

@@ -0,0 +1,14 @@
# Introduction
Sqliteviz is a single-page offline-first PWA for fully client-side visualisation of SQLite databases, CSV, JSON or NDJSON files.
With sqliteviz you can:
- run SQL queries against a SQLite database and create Plotly charts, graphs and pivot tables based on the result sets
- import a CSV, JSON or NDJSON file into a SQLite database and visualize imported data
- export result set to CSV file
- manage inquiries and run them against different databases
- import/export inquiries from/to a JSON file
- export a modified SQLite database
- use it offline from your OS application menu like any other desktop app

View File

@@ -1,10 +0,0 @@
{
"include": ["src/**/*", "tests/**/*"],
"exclude": ["node_modules", "dist"],
"compilerOptions": {
"baseUrl": "./",
"paths": {
"@*": ["./src/*"]
}
}
}

View File

@@ -1,122 +0,0 @@
module.exports = function (config) {
config.set({
vite: {
config: {
resolve: {
alias: {
vue: 'vue/dist/vue.esm-bundler.js'
}
},
server: {
preTransformRequests: false
}
},
coverage: {
enable: true,
include: 'src/*',
exclude: ['node_modules', 'src/components/svg/*'],
extension: ['.js', '.vue'],
requireEnv: false
}
},
// base path that will be used to resolve all patterns (eg. files, exclude)
basePath: '',
// frameworks to use
// available frameworks: https://npmjs.org/browse/keyword/karma-adapter
frameworks: ['mocha', 'sinon-chai', 'vite'],
// list of files / patterns to load in the browser
files: [
{
pattern: 'test.setup.js',
type: 'module',
watched: false,
served: false
},
{
pattern: 'tests/**/*.spec.js',
type: 'module',
watched: false,
served: false
},
{
pattern: 'src/assets/styles/*.css',
type: 'css',
watched: false,
served: false
}
],
plugins: [
'karma-vite',
'karma-mocha',
'karma-sinon-chai',
'karma-firefox-launcher',
'karma-chrome-launcher',
'karma-spec-reporter',
'karma-coverage'
],
// list of files / patterns to exclude
exclude: [],
// test results reporter to use
// possible values: 'dots', 'progress'
// available reporters: https://npmjs.org/browse/keyword/karma-reporter
reporters: ['spec', 'coverage'],
coverageReporter: {
dir: 'coverage',
reporters: [{ type: 'lcov', subdir: '.' }, { type: 'text-summary' }]
},
// web server port
port: 9876,
// enable / disable colors in the output (reporters and logs)
colors: true,
// level of logging
// possible values: config.LOG_DISABLE || config.LOG_ERROR || config.LOG_WARN ||
// config.LOG_INFO || config.LOG_DEBUG
logLevel: config.LOG_INFO,
// enable / disable watching file and executing tests whenever any file changes
autoWatch: false,
customLaunchers: {
FirefoxHeadlessTouch: {
base: 'FirefoxHeadless',
prefs: {
'dom.w3c_touch_events.enabled': 1,
'dom.events.asyncClipboard.clipboardItem': true
}
}
},
// start these browsers
// available browser launchers: https://npmjs.org/browse/keyword/karma-launcher
browsers: ['ChromiumHeadless', 'FirefoxHeadlessTouch'],
// Continuous Integration mode
// if true, Karma captures browsers, runs the tests and exits
singleRun: true,
// Concurrency level
// how many browser should be started simultaneous
concurrency: 2,
client: {
captureConsole: true,
mocha: {
timeout: 7000
}
},
browserConsoleLogOptions: {
terminal: true,
level: ''
}
})
// Fix the timezone
process.env.TZ = 'Europe/Amsterdam'
}

View File

@@ -1,2 +0,0 @@
benchmark
dist

View File

@@ -1,9 +0,0 @@
FROM emscripten/emsdk:3.0.1
WORKDIR /tmp/build
COPY configure.py .
RUN python3.8 configure.py
COPY build.py .
RUN python3.8 build.py

View File

@@ -1,108 +0,0 @@
# SQLite WebAssembly build
This directory contains Docker-based build script, `make.sh`, that builds
a custom version of [sql.js][1]. It allows sqliteviz to have more recent
version of SQLite build with a number of useful extensions.
`Makefile` from [sql.js][1] is rewritten as more comprehensible `configure.py`
and `build.py` Python scripts that run in `emscripten/emsdk` Docker container.
## Extension
SQLite [amalgamation][2] extensions included:
1. [FTS5][4] -- virtual table module that provides full-text search
functionality
2. [FTS3/FTS4][15] -- older virtual table modules for full-text search
3. [JSON1][16] -- scalar, aggregate and table-valued functions for managing JSON data
SQLite [contribution extensions][17]:
1. [extension-functions][18] -- mathematical and string extension functions for SQL queries.
Math: `acos`, `asin`, `atan`, `atn2`, `atan2`, `acosh`, `asinh`, `atanh`, `difference`,
`degrees`, `radians`, `cos`, `sin`, `tan`, `cot`, `cosh`, `sinh`, `tanh`, `coth`,
`exp`, `log`, `log10`, `power`, `sign`, `sqrt`, `square`, `ceil`, `floor`, `pi`.
String: `replicate`, `charindex`, `leftstr`, `rightstr`, `ltrim`, `rtrim`, `trim`,
`replace`, `reverse`, `proper`, `padl`, `padr`, `padc`, `strfilter`.
Aggregate: `stdev`, `variance`, `mode`, `median`, `lower_quartile`, `upper_quartile`.
SQLite [miscellaneous extensions][3] included:
1. `generate_series` table-valued [series function][6] ([series.c][7])
2. `transitive_closure` virtual table for
[Querying Tree Structures in SQLite][11] ([closure.c][8])
3. `uuid`, `uuid_str` and `uuid_blob` RFC-4122 UUID functions ([uuid.c][9])
4. `regexp` (hence `REGEXP` operator) and `regexpi` functions ([regexp.c][10])
5. `percentile` function ([percentile.c][13])
6. `decimal`, `decimal_cmp`, `decimal_add`, `decimal_sub` and `decimal_mul` functions
([decimal.c][14])
SQLite 3rd party extensions included:
1. [pivot_vtab][5] -- a pivot virtual table
2. `pearson` correlation coefficient function extension from [sqlean][21]
(which is part of [squib][20])
3. [sqlitelua][22] -- a virtual table `luafunctions` which allows to define custom scalar,
aggregate and table-valued functions in Lua
To ease the step to have working clone locally, the build is committed into
the repository.
Examples of queries involving these extensions can be found in the test suite in
[sqliteExtensions.spec.js][19].
## Build method
Basically it's extended amalgamation and `SQLITE_EXTRA_INIT` concisely
described in [this message from SQLite Forum][12]:
> Simply append it to the end of the amalgamation file. The real problem is
> how you get the init function called. The easiest way (to me at any rate) is
> to append a function (after the extensions you want to add are all appended)
> that adds the init function for each extension to the auto extension list
> for new connections, and set the pre-processor symbol SQLITE_EXTRA_INIT to
> the name of this function. [...]
>
> An example `SQLITE_EXTRA_INIT` function looks like this:
>
> ```
> int core_init(const char* dummy)
> {
> int nErr = 0;
>
> nErr += sqlite3_auto_extension((void*)sqlite3_autobusy_init);
> nErr += sqlite3_auto_extension((void*)sqlite3_ipaddress_init);
>
> return nErr ? SQLITE_ERROR : SQLITE_OK;
> }
> ```
>
> so you would then define `SQLITE_EXTRA_INIT=core_init` when compiling the
> amalgamation code and the extensions would thereafter be automatically
> initialized on each connection.
[1]: https://github.com/sql-js/sql.js
[2]: https://sqlite.org/amalgamation.html
[3]: https://sqlite.org/src/dir?ci=trunk&name=ext/misc
[4]: https://sqlite.org/fts5.html
[5]: https://github.com/jakethaw/pivot_vtab
[6]: https://sqlite.org/series.html
[7]: https://sqlite.org/src/file/ext/misc/series.c
[8]: https://sqlite.org/src/file/ext/misc/closure.c
[9]: https://sqlite.org/src/file/ext/misc/uuid.c
[10]: https://sqlite.org/src/file/ext/misc/regexp.c
[11]: https://charlesleifer.com/blog/querying-tree-structures-in-sqlite-using-python-and-the-transitive-closure-extension/
[12]: https://sqlite.org/forum/forumpost/6ad7d4f4bebe5e06?raw
[13]: https://sqlite.org/src/file/ext/misc/percentile.c
[14]: https://sqlite.org/src/file/ext/misc/decimal.c
[15]: https://sqlite.org/fts3.html
[16]: https://sqlite.org/json1.html
[17]: https://sqlite.org/contrib/
[18]: https://sqlite.org/contrib//download/extension-functions.c?get=25
[19]: https://github.com/lana-k/sqliteviz/blob/master/tests/lib/database/sqliteExtensions.spec.js
[20]: https://github.com/mrwilson/squib/blob/master/pearson.c
[21]: https://github.com/nalgeon/sqlean/blob/incubator/src/pearson.c
[22]: https://github.com/kev82/sqlitelua

View File

@@ -1,4 +0,0 @@
/lib/build-*
/lib/dist
/build-*-result.json
/sample.csv

View File

@@ -1,17 +0,0 @@
FROM node:20.14-bookworm
RUN set -ex; \
apt-get update; \
apt-get install -y firefox-esr; \
apt-get install -y chromium
WORKDIR /tmp/build
COPY package.json ./
COPY lib/dist lib/dist
COPY lib/package.json lib/package.json
RUN npm install
COPY . .
CMD npm run benchmark

View File

@@ -1,25 +0,0 @@
# SQLite WebAssembly build micro-benchmark
This directory contains a micro-benchmark for evaluating SQLite WebAssembly
builds performance on read and write SQL queries, run from `make.sh` script. If
the script has permission to `nice` processes and [Procpath][1] is installed,
e.g. it is run with `sudo -E env PATH=$PATH ./make.sh`, it'll `renice` all
processes running inside the benchmark containers. It can also serve as a smoke
test (e.g. for memory leaks).
The benchmark operates on a set of SQLite WebAssembly builds expected in
`lib/build-$NAME` directories each containing `sql-wasm.js` and
`sql-wasm.wasm`. Then it creates a Docker image for each, and runs the
benchmark in Firefox and Chromium using Karma in the container.
After successful run, the benchmark produces the following per each build:
- `build-$NAME-result.json`
- `build-$NAME.sqlite` (if Procpath is installed)
- `build-$NAME.svg` (if Procpath is installed)
These files can be analysed using `result-analysis.ipynb` Jupyter notebook.
The SVG is a chart with CPU and RSS usage of each test container (i.e. Chromium
run, then Firefox run per container).
[1]: https://pypi.org/project/Procpath/

View File

@@ -1,57 +0,0 @@
module.exports = function (config) {
const timeout = 15 * 60 * 1000
config.set({
frameworks: ['mocha'],
files: [
'suite.js',
{
pattern: 'node_modules/sql.js/dist/sql-wasm.wasm',
served: true,
included: false
},
{ pattern: 'sample.csv', served: true, included: false }
],
reporters: ['progress', 'json-to-file'],
singleRun: true,
customLaunchers: {
ChromiumHeadlessNoSandbox: {
base: 'ChromiumHeadless',
flags: ['--no-sandbox']
}
},
browsers: ['ChromiumHeadlessNoSandbox', 'FirefoxHeadless'],
concurrency: 1,
browserDisconnectTimeout: timeout,
browserNoActivityTimeout: timeout,
captureTimeout: timeout,
browserSocketTimeout: timeout,
pingTimeout: timeout,
client: {
captureConsole: true,
mocha: { timeout: timeout }
},
logLevel: config.LOG_INFO,
browserConsoleLogOptions: { terminal: true, level: config.LOG_INFO },
preprocessors: { 'suite.js': ['webpack'] },
webpack: {
mode: 'development',
module: {
noParse: [__dirname + '/node_modules/benchmark/benchmark.js']
},
node: { fs: 'empty' }
},
proxies: {
'/sql-wasm.wasm': '/base/node_modules/sql.js/dist/sql-wasm.wasm'
},
jsonToFileReporter: { outputPath: '.', fileName: 'suite-result.json' }
})
}

View File

@@ -1,5 +0,0 @@
{
"name": "sql.js",
"main": "./dist/sql-wasm.js",
"private": true
}

View File

@@ -1,47 +0,0 @@
#!/bin/bash -e
cleanup () {
rm -rf lib/dist "$renice_flag_file"
docker rm -f sqljs-benchmark-run 2> /dev/null || true
}
trap cleanup EXIT
if [ ! -f sample.csv ]; then
wget --header="accept-encoding: gzip" -q -O- \
https://github.com/plotly/datasets/raw/547090bd/wellspublic.csv \
| gunzip -c > sample.csv
fi
PLAYBOOK=procpath/karma_docker.procpath
# for renice to work run like "sudo -E env PATH=$PATH ./make.sh"
test_ni=$(nice -n -5 nice)
if [ $test_ni == -5 ]; then
renice_flag_file=$(mktemp)
fi
{
while [ -f $renice_flag_file ]; do
procpath --logging-level ERROR play -f $PLAYBOOK renice:watch
done
} &
shopt -s nullglob
for d in lib/build-* ; do
rm -rf lib/dist
cp -r $d lib/dist
sample_name=$(basename $d)
docker build -t sqliteviz/sqljs-benchmark .
docker rm sqljs-benchmark-run 2> /dev/null || true
docker run -d -it --cpus 2 --name sqljs-benchmark-run sqliteviz/sqljs-benchmark
{
rm -f ${sample_name}.sqlite
procpath play -f $PLAYBOOK -o database_file=${sample_name}.sqlite track:record
procpath play -f $PLAYBOOK -o database_file=${sample_name}.sqlite \
-o plot_file=${sample_name}.svg track:plot
} &
docker attach sqljs-benchmark-run
docker cp sqljs-benchmark-run:/tmp/build/suite-result.json ${sample_name}-result.json
docker rm sqljs-benchmark-run
done

View File

@@ -1,23 +0,0 @@
{
"name": "sqlite-webassembly-microbenchmark",
"private": true,
"dependencies": {
"@babel/core": "^7.14.8",
"babel-loader": "^8.2.2",
"benchmark": "^2.1.4",
"lodash": "^4.17.4",
"papaparse": "^5.3.1",
"mocha": "^9.0.3",
"karma": "^6.3.4",
"karma-chrome-launcher": "^3.1.0",
"karma-firefox-launcher": "^2.1.1",
"karma-json-to-file-reporter": "^1.0.1",
"karma-mocha": "^2.0.1",
"karma-webpack": "^4.0.2",
"webpack": "^4.46.0",
"sql.js": "file:./lib"
},
"scripts": {
"benchmark": "karma start karma.conf.js"
}
}

View File

@@ -1,28 +0,0 @@
# This command may run when "sqljs-benchmark-run" does not yet exist or run
[renice:watch]
interval: 2
repeat: 30
environment:
ROOT_PID=docker inspect -f "{{.State.Pid}}" sqljs-benchmark-run 2> /dev/null || true
query:
PIDS=$..children[?(@.stat.pid in [$ROOT_PID])]..pid
command:
echo $PIDS | tr , '\n' | xargs --no-run-if-empty -I{} -- renice -n -5 -p {}
# Expected input arguments: database_file
[track:record]
interval: 1
stop_without_result: 1
environment:
ROOT_PID=docker inspect -f "{{.State.Pid}}" sqljs-benchmark-run
query:
$..children[?(@.stat.pid == $ROOT_PID)]
pid_list: $ROOT_PID
# Expected input arguments: database_file, plot_file
[track:plot]
moving_average_window: 5
title: Chromium vs Firefox (№1 RSS, №2 CPU)
custom_query_file:
procpath/top2_rss.sql
procpath/top2_cpu.sql

View File

@@ -1,29 +0,0 @@
WITH diff_all AS (
SELECT
record_id,
ts,
stat_pid,
stat_utime + stat_stime - LAG(stat_utime + stat_stime) OVER (
PARTITION BY stat_pid
ORDER BY record_id
) tick_diff,
ts - LAG(ts) OVER (
PARTITION BY stat_pid
ORDER BY record_id
) ts_diff
FROM record
), diff AS (
SELECT * FROM diff_all WHERE tick_diff IS NOT NULL
), one_time_pid_condition AS (
SELECT stat_pid
FROM record
GROUP BY 1
ORDER BY SUM(stat_utime + stat_stime) DESC
LIMIT 2
)
SELECT
ts,
stat_pid pid,
100.0 * tick_diff / (SELECT value FROM meta WHERE key = 'clock_ticks') / ts_diff value
FROM diff
JOIN one_time_pid_condition USING(stat_pid)

View File

@@ -1,13 +0,0 @@
WITH one_time_pid_condition AS (
SELECT stat_pid
FROM record
GROUP BY 1
ORDER BY SUM(stat_rss) DESC
LIMIT 2
)
SELECT
ts,
stat_pid pid,
stat_rss / 1024.0 / 1024 * (SELECT value FROM meta WHERE key = 'page_size') value
FROM record
JOIN one_time_pid_condition USING(stat_pid)

View File

@@ -1,218 +0,0 @@
{
"cells": [
{
"cell_type": "code",
"source": [
"import json\n",
"from pathlib import Path\n",
"\n",
"import pandas\n",
"from IPython.display import display, IFrame, Markdown\n",
"from plotly import express"
],
"outputs": [],
"execution_count": null,
"metadata": {
"collapsed": false,
"outputHidden": false,
"inputHidden": true
}
},
{
"cell_type": "code",
"source": [
"render_format = 'svg'\n",
"benchmark_path = Path()\n",
"df_dict = {}\n",
"stat_dict = {}\n",
"for p in benchmark_path.glob('build-*-result.json'):\n",
" build_name = p.stem.split('-', 2)[1]\n",
" for browser_data in json.loads(p.read_text()):\n",
" browser_name = f'{browser_data[\"browser\"][\"name\"]} {browser_data[\"browser\"][\"major\"]}'\n",
" browser_name = browser_name.lower().replace('chrome headless', 'chromium')\n",
" for result in (r for i, r in browser_data['result'].items() if i.isdigit()):\n",
" key = build_name, browser_name, result['name']\n",
" df_dict[key] = result['stats']['sample']\n",
" stat_dict[key] = result['stats']\n",
"\n",
"min_sample_size = min(len(v) for v in df_dict.values())\n",
"df_dict = {k: v[:min_sample_size] for k, v in df_dict.items()}\n",
" \n",
"wide_df = pandas.DataFrame(df_dict).reset_index()\n",
"df = pandas.melt(\n",
" wide_df, \n",
" id_vars='index', \n",
" var_name=['build', 'browser', 'test'], \n",
" value_name='duration',\n",
")\n",
"df = df.rename(columns={'index': 'run'})\n",
"df.sort_values(['build', 'run'], inplace=True)"
],
"outputs": [],
"execution_count": null,
"metadata": {
"inputHidden": true,
"outputExpanded": false
}
},
{
"cell_type": "markdown",
"source": [
"# sql.js build comparison\n",
"\n",
"<style>\n",
"@page {\n",
" size: 215mm 297mm;\n",
" margin: 0;\n",
"}\n",
"</style>"
],
"metadata": {}
},
{
"cell_type": "code",
"source": [
"!du -b lib | head -n 2"
],
"outputs": [],
"execution_count": null,
"metadata": {
"collapsed": false,
"outputHidden": false,
"inputHidden": true
}
},
{
"cell_type": "code",
"source": [
"stat_df = pandas.DataFrame(stat_dict)\n",
"stat_df = stat_df.loc[['mean', 'rme']].transpose()\n",
"stat_df.index = stat_df.index.set_names(['build', 'browser', 'test'])\n",
"stat_df = stat_df.reset_index().sort_values(['test', 'browser'], ascending=False)\n",
"for index, row in stat_df.iterrows():\n",
" print('\\t'.join([\n",
" row['build'],\n",
" row['browser'],\n",
" row['test'],\n",
" f'{row[\"mean\"]:.2f} s ± {row[\"rme\"]:.1f}%'\n",
" ]))"
],
"outputs": [],
"execution_count": null,
"metadata": {
"inputHidden": true,
"outputExpanded": false
}
},
{
"cell_type": "code",
"source": [
"fig = express.box(df, x='browser', y='duration', points='all', color='build', facet_row='test')\n",
"fig.update_yaxes(matches=None)\n",
"fig.show(render_format)"
],
"outputs": [],
"execution_count": null,
"metadata": {
"inputHidden": true,
"outputExpanded": false
}
},
{
"cell_type": "code",
"source": [
"fig = express.line(\n",
" df, x='run', y='duration', color='build', facet_col='browser', facet_row='test'\n",
")\n",
"fig.update_yaxes(matches=None)\n",
"fig.show(render_format)"
],
"outputs": [],
"execution_count": null,
"metadata": {
"inputHidden": true,
"outputExpanded": false
}
},
{
"cell_type": "code",
"source": [
"plot_df = df.groupby(['browser', 'build', 'test'])['duration'].mean().reset_index()\n",
"plot_df['pct'] = (\n",
" plot_df\n",
" .groupby(['browser', 'test'])['duration']\n",
" .pct_change()\n",
" .fillna('')\n",
" .map(lambda v: '{:.2%}'.format(v) if v else v)\n",
")\n",
"fig = express.bar(\n",
" plot_df, \n",
" x='browser', \n",
" y='duration', \n",
" color='build', \n",
" text='pct', \n",
" barmode='overlay', \n",
" facet_row='test',\n",
")\n",
"fig.update_yaxes(matches=None)\n",
"fig.show(render_format)"
],
"outputs": [],
"execution_count": null,
"metadata": {
"inputHidden": true,
"outputExpanded": false
}
},
{
"cell_type": "code",
"source": [
"for p in sorted(benchmark_path.glob('build-*.svg')):\n",
" display(Markdown(p.stem))\n",
" display(IFrame(p, 640, 480))"
],
"outputs": [],
"execution_count": null,
"metadata": {
"collapsed": false,
"outputHidden": false,
"inputHidden": true,
"outputExpanded": false
}
}
],
"metadata": {
"kernelspec": {
"name": "stats",
"language": "python",
"display_name": "Stats (Python 3.10)"
},
"language_info": {
"name": "python",
"version": "3.10.14",
"mimetype": "text/x-python",
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"pygments_lexer": "ipython3",
"nbconvert_exporter": "python",
"file_extension": ".py"
},
"widgets": {
"application/vnd.jupyter.widget-state+json": {
"state": {},
"version_major": 2,
"version_minor": 0
}
},
"kernel_info": {
"name": "stats"
},
"nteract": {
"version": "0.14.5"
}
},
"nbformat": 4,
"nbformat_minor": 5
}

View File

@@ -1,137 +0,0 @@
import benchmark from 'benchmark'
import initSqlJs from 'sql.js'
import lodash from 'lodash'
import Papa from 'papaparse'
import useragent from 'ua-parser-js'
describe('SQLite build benchmark', function () {
let parsedCsv
let sqlModule
let selectDb
before(async function () {
parsedCsv = await parseCsv('http://localhost:9876/base/sample.csv')
sqlModule = await initSqlJs()
selectDb = new sqlModule.Database()
importToTable(selectDb, parsedCsv)
})
function benchmarkImport() {
const db = new sqlModule.Database()
try {
importToTable(db, parsedCsv)
} finally {
db.close()
}
}
function benchmarkSelect() {
const result = selectDb.exec(`
SELECT county, AVG(avg_depth) avg_depth_c
FROM (
SELECT s.county, s.town, COUNT(*) cnt, AVG(s.DrilledDepth) avg_depth
FROM csv_import s
JOIN csv_import USING(hole)
WHERE s.town IS NOT NULL
GROUP BY 1, 2
ORDER BY 4 DESC
)
GROUP BY 1
ORDER BY 2 DESC
`)
console.assert(result.values.length == 56, 'Unexpected size of result set')
}
it('run', async function () {
const suite = createSuite()
suite.add('import', { initCount: 3, minSamples: 50, fn: benchmarkImport })
suite.add('select', { initCount: 3, minSamples: 50, fn: benchmarkSelect })
await run(suite)
})
})
function importToTable(db, parsedCsv, chunkSize = 1024) {
const columnListString = parsedCsv.meta.fields.join(', ')
db.exec(`CREATE TABLE csv_import(${columnListString})`)
const params = parsedCsv.meta.fields.map(name => '?').join(', ')
const insertStmt = db.prepare(`INSERT INTO csv_import VALUES(${params})`)
chunkArray(parsedCsv.data, chunkSize).map(function (chunk) {
db.exec('BEGIN')
chunk.map(row => insertStmt.run(Object.values(row)))
db.exec('COMMIT')
})
}
class PromiseWrapper {
constructor() {
this.promise = new Promise((resolve, reject) => {
this.reject = reject
this.resolve = resolve
})
}
}
function parseCsv(url) {
return new Promise((resolve, reject) => {
Papa.parse(url, {
header: true,
download: true,
skipEmptyLines: 'greedy',
complete: results => resolve(results),
error: (error, file) => reject(error)
})
})
}
function chunkArray(arr, size) {
return arr.reduce(function (result, value, index) {
const chunkIndex = Math.floor(index / size)
if (!(chunkIndex in result)) {
result[chunkIndex] = []
}
result[chunkIndex].push(value)
return result
}, [])
}
function createSuite() {
// Combined workaround from:
// - https://github.com/bestiejs/benchmark.js/issues/106
// - https://github.com/bestiejs/benchmark.js/issues/237
// Benchmark could not pick up lodash otherwise
const bm = benchmark.runInContext({ _: lodash })
// Avoid `ReferenceError: Benchmark is not defined` error because Benchmark is assumed
// to be in window
window.Benchmark = bm
return new bm.Suite()
}
function run(suite) {
const suiteResult = new PromiseWrapper()
suite
.on('cycle', function (event) {
console.info(String(event.target))
})
.on('complete', function () {
console.log(
JSON.stringify({
browser: useragent(navigator.userAgent).browser,
result: this.filter('successful')
})
)
suiteResult.resolve()
})
.on('error', function (event) {
console.error('Benchmark failed', String(event.target))
suiteResult.reject()
})
.run({ async: true })
return suiteResult.promise
}

View File

@@ -1,99 +0,0 @@
import logging
import subprocess
from pathlib import Path
# See the setting descriptions on these pages:
# - https://emscripten.org/docs/optimizing/Optimizing-Code.html
# - https://github.com/emscripten-core/emscripten/blob/main/src/settings.js
cflags = (
# SQLite configuration
'-DSQLITE_DEFAULT_CACHE_SIZE=-65536', # 64 MiB
'-DSQLITE_DEFAULT_MEMSTATUS=0',
'-DSQLITE_DEFAULT_SYNCHRONOUS=0',
'-DSQLITE_DISABLE_LFS',
'-DSQLITE_DQS=0',
'-DSQLITE_ENABLE_FTS3',
'-DSQLITE_ENABLE_FTS3_PARENTHESIS',
'-DSQLITE_ENABLE_FTS5',
'-DSQLITE_ENABLE_NORMALIZE',
'-DSQLITE_EXTRA_INIT=extra_init',
'-DSQLITE_OMIT_DEPRECATED',
'-DSQLITE_OMIT_LOAD_EXTENSION',
'-DSQLITE_OMIT_SHARED_CACHE',
'-DSQLITE_THREADSAFE=0',
# Compile-time optimisation
'-Os', # reduces the code size about in half comparing to -O2
'-flto',
'-Isrc', '-Isrc/lua',
)
emflags = (
# Base
'--memory-init-file', '0',
'-s', 'ALLOW_TABLE_GROWTH=1',
# WASM
'-s', 'WASM=1',
'-s', 'ALLOW_MEMORY_GROWTH=1',
'-s', 'ENVIRONMENT=web,worker',
# Link-time optimisation
'-Os',
'-flto',
# sql.js
'-s', 'EXPORTED_FUNCTIONS=@src/sqljs/exported_functions.json',
'-s', 'EXPORTED_RUNTIME_METHODS=@src/sqljs/exported_runtime_methods.json',
'--pre-js', 'src/sqljs/api.js',
)
def build(src: Path, dst: Path):
out = Path('out')
out.mkdir()
logging.info('Building LLVM bitcode for sqlite3.c')
subprocess.check_call([
'emcc',
*cflags,
'-c', src / 'sqlite3.c',
'-o', out / 'sqlite3.o',
])
logging.info('Building LLVM bitcode for extension-functions.c')
subprocess.check_call([
'emcc',
*cflags,
'-c', src / 'extension-functions.c',
'-o', out / 'extension-functions.o',
])
logging.info('Building LLVM bitcode for SQLite Lua extension')
subprocess.check_call([
'emcc',
*cflags,
'-shared',
*(src / 'lua').glob('*.c'),
*(src / 'sqlitelua').glob('*.c'),
'-o', out / 'sqlitelua.o',
])
logging.info('Building WASM from bitcode')
subprocess.check_call([
'emcc',
*emflags,
out / 'sqlite3.o',
out / 'extension-functions.o',
out / 'sqlitelua.o',
'-o', out / 'sql-wasm.js',
])
logging.info('Post-processing build and copying to dist')
(out / 'sql-wasm.wasm').rename(dst / 'sql-wasm.wasm')
with (dst / 'sql-wasm.js').open('w') as f:
f.write((src / 'sqljs' / 'shell-pre.js').read_text())
f.write((out / 'sql-wasm.js').read_text())
f.write((src / 'sqljs' / 'shell-post.js').read_text())
if __name__ == '__main__':
logging.basicConfig(level='INFO', format='%(asctime)s %(levelname)s %(name)s %(message)s')
src = Path('src')
dst = Path('dist')
dst.mkdir()
build(src, dst)

View File

@@ -1,150 +0,0 @@
import logging
import re
import shutil
import subprocess
import sys
import tarfile
import zipfile
from io import BytesIO
from pathlib import Path
from urllib import request
amalgamation_url = 'https://sqlite.org/2025/sqlite-amalgamation-3500300.zip'
# Extension-functions
# ===================
# It breaks amalgamation if appended as other extension because it redefines
# several functions, so build it separately. Note that sql.js registers these
# extension functions by calling ``registerExtensionFunctions`` itself.
contrib_functions_url = 'https://sqlite.org/contrib/download/extension-functions.c?get=25'
extension_urls = (
# Miscellaneous extensions
# ========================
('https://sqlite.org/src/raw/e212edb2?at=series.c', 'sqlite3_series_init'),
('https://sqlite.org/src/raw/5559daf1?at=closure.c', 'sqlite3_closure_init'),
('https://sqlite.org/src/raw/5bb2264c?at=uuid.c', 'sqlite3_uuid_init'),
('https://sqlite.org/src/raw/388e7f23?at=regexp.c', 'sqlite3_regexp_init'),
('https://sqlite.org/src/raw/72e05a21?at=percentile.c', 'sqlite3_percentile_init'),
('https://sqlite.org/src/raw/228d47e9?at=decimal.c', 'sqlite3_decimal_init'),
# Third-party extension
# =====================
('https://github.com/jakethaw/pivot_vtab/raw/e7705f34/pivot_vtab.c', 'sqlite3_pivotvtab_init'),
('https://github.com/nalgeon/sqlean/raw/95e8d21a/src/pearson.c', 'sqlite3_pearson_init'),
# Third-party extension with own dependencies
# ===========================================
('https://github.com/kev82/sqlitelua/raw/db479510/src/main.c', 'sqlite3_luafunctions_init'),
)
lua_url = 'http://www.lua.org/ftp/lua-5.3.5.tar.gz'
sqlitelua_url = 'https://github.com/kev82/sqlitelua/archive/db479510.zip'
sqljs_url = 'https://github.com/sql-js/sql.js/archive/refs/tags/v1.7.0.zip'
def _generate_extra_init_c_function(init_function_names):
auto_ext_calls = '\n'.join([
'nErr += sqlite3_auto_extension((void*){});'.format(init_fn)
for init_fn in init_function_names
])
return '''
int extra_init(const char* dummy)
{
int nErr = 0;
%s
return nErr ? SQLITE_ERROR : SQLITE_OK;
}
''' % auto_ext_calls
def _get_amalgamation(tgt: Path):
logging.info('Downloading and extracting SQLite amalgamation %s', amalgamation_url)
archive = zipfile.ZipFile(BytesIO(request.urlopen(amalgamation_url).read()))
archive_root_dir = zipfile.Path(archive, archive.namelist()[0])
for zpath in archive_root_dir.iterdir():
with zpath.open() as fr, (tgt / zpath.name).open('wb') as fw:
shutil.copyfileobj(fr, fw)
def _get_lua(tgt: Path):
# Library definitions from lua/Makefile
lib_str = '''
CORE_O= lapi.o lcode.o lctype.o ldebug.o ldo.o ldump.o lfunc.o lgc.o llex.o \
lmem.o lobject.o lopcodes.o lparser.o lstate.o lstring.o ltable.o \
ltm.o lundump.o lvm.o lzio.o
LIB_O= lauxlib.o lbaselib.o lbitlib.o lcorolib.o ldblib.o liolib.o \
lmathlib.o loslib.o lstrlib.o ltablib.o lutf8lib.o loadlib.o linit.o
LUA_O= lua.o
'''
header_only_files = {'lprefix', 'luaconf', 'llimits', 'lualib'}
lib_names = set(re.findall(r'(\w+)\.o', lib_str)) | header_only_files
logging.info('Downloading and extracting Lua %s', lua_url)
archive = tarfile.open(fileobj=BytesIO(request.urlopen(lua_url).read()))
(tgt / 'lua').mkdir()
for tarinfo in archive:
tarpath = Path(tarinfo.name)
if tarpath.match('src/*') and tarpath.stem in lib_names:
with (tgt / 'lua' / tarpath.name).open('wb') as fw:
shutil.copyfileobj(archive.extractfile(tarinfo), fw)
logging.info('Downloading and extracting SQLite Lua extension %s', sqlitelua_url)
archive = zipfile.ZipFile(BytesIO(request.urlopen(sqlitelua_url).read()))
archive_root_dir = zipfile.Path(archive, archive.namelist()[0])
(tgt / 'sqlitelua').mkdir()
for zpath in (archive_root_dir / 'src').iterdir():
if zpath.name != 'main.c':
with zpath.open() as fr, (tgt / 'sqlitelua' / zpath.name).open('wb') as fw:
shutil.copyfileobj(fr, fw)
def _get_contrib_functions(tgt: Path):
request.urlretrieve(contrib_functions_url, tgt / 'extension-functions.c')
def _get_extensions(tgt: Path):
init_functions = []
sqlite3_c = tgt / 'sqlite3.c'
with sqlite3_c.open('ab') as f:
for url, init_fn in extension_urls:
logging.info('Downloading and appending to amalgamation %s', url)
with request.urlopen(url) as resp:
f.write(b'\n')
shutil.copyfileobj(resp, f)
init_functions.append(init_fn)
logging.info('Appending SQLITE_EXTRA_INIT to amalgamation')
f.write(_generate_extra_init_c_function(init_functions).encode())
def _get_sqljs(tgt: Path):
logging.info('Downloading and extracting sql.js %s', sqljs_url)
archive = zipfile.ZipFile(BytesIO(request.urlopen(sqljs_url).read()))
archive_root_dir = zipfile.Path(archive, archive.namelist()[0])
(tgt / 'sqljs').mkdir()
for zpath in (archive_root_dir / 'src').iterdir():
with zpath.open() as fr, (tgt / 'sqljs' / zpath.name).open('wb') as fw:
shutil.copyfileobj(fr, fw)
def configure(tgt: Path):
_get_amalgamation(tgt)
_get_contrib_functions(tgt)
_get_lua(tgt)
_get_extensions(tgt)
_get_sqljs(tgt)
subprocess.check_call(['emcc', '--version'])
if __name__ == '__main__':
if sys.version_info < (3, 8):
print('Python 3.8 or higher is expected', file=sys.stderr)
sys.exit(1)
logging.basicConfig(level='INFO', format='%(asctime)s %(levelname)s %(name)s %(message)s')
src = Path('src')
src.mkdir()
configure(src)

File diff suppressed because one or more lines are too long

Binary file not shown.

View File

@@ -1,9 +0,0 @@
#!/bin/bash -e
docker build -t sqliteviz/sqljs .
rm -r dist || true
CONTAINER=$(docker create sqliteviz/sqljs)
docker cp $CONTAINER:/tmp/build/dist .
docker rm $CONTAINER

View File

@@ -1,5 +0,0 @@
{
"name": "sql.js",
"main": "./dist/sql-wasm.js",
"private": true
}

22062
package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@@ -1,79 +0,0 @@
{
"name": "sqliteviz",
"version": "0.27.1",
"license": "Apache-2.0",
"private": true,
"type": "module",
"scripts": {
"dev": "vite",
"build": "vite build",
"serve": "vite preview",
"test": "karma start karma.conf.cjs",
"lint": "eslint --ext .js,.vue --ignore-path .gitignore --fix src",
"format": "prettier . --write"
},
"dependencies": {
"buffer": "^6.0.3",
"codemirror": "^5.65.18",
"codemirror-editor-vue3": "^2.8.0",
"core-js": "^3.6.5",
"dataurl-to-blob": "^0.0.1",
"html2canvas": "^1.1.4",
"jquery": "^3.6.0",
"nanoid": "^3.1.12",
"papaparse": "^5.4.1",
"pivottable": "^2.23.0",
"plotly.js": "^2.35.2",
"promise-worker": "^2.0.1",
"react": "^16.14.0",
"react-chart-editor": "^0.46.1",
"react-dom": "^16.14.0",
"sql.js": "file:./lib/sql-js",
"tiny-emitter": "^2.1.0",
"veaury": "^2.5.1",
"vue": "^3.5.11",
"vue-final-modal": "^4.5.5",
"vue-multiselect": "^3.0.0-beta.3",
"vue-router": "^4.4.5",
"vuejs-paginate-next": "^1.0.2",
"vuex": "^4.1.0"
},
"devDependencies": {
"@babel/core": "^7.25.7",
"@vitejs/plugin-vue": "^5.2.1",
"@vue/eslint-config-standard": "^8.0.1",
"@vue/test-utils": "^2.4.6",
"chai": "^4.1.2",
"chai-as-promised": "^8.0.1",
"eslint": "^8.57.1",
"eslint-config-prettier": "^10.1.1",
"eslint-plugin-import": "^2.20.2",
"eslint-plugin-node": "^11.1.0",
"eslint-plugin-promise": "^4.2.1",
"eslint-plugin-standard": "^4.0.0",
"eslint-plugin-vue": "^9.28.0",
"flush-promises": "^1.0.2",
"karma": "^6.4.4",
"karma-coverage": "^2.2.1",
"karma-coverage-istanbul-reporter": "^3.0.3",
"karma-firefox-launcher": "^2.1.3",
"karma-mocha": "^1.3.0",
"karma-spec-reporter": "^0.0.36",
"karma-vite": "^1.0.5",
"mocha": "^5.2.0",
"prettier": "3.5.3",
"process": "^0.11.10",
"url-loader": "^4.1.1",
"vite": "^5.4.14",
"vite-plugin-istanbul": "^5.0.0",
"vite-plugin-node-polyfills": "^0.23.0",
"vite-plugin-pwa": "^0.21.1",
"vite-plugin-static-copy": "^2.2.0",
"vue-cli-plugin-ui-karma": "^0.2.5"
},
"overrides": {
"karma-vite": {
"vite-plugin-istanbul": "$vite-plugin-istanbul"
}
}
}

Binary file not shown.

Before

Width:  |  Height:  |  Size: 11 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 1.1 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 68 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 774 B

View File

@@ -1 +0,0 @@
[]

View File

@@ -1,30 +0,0 @@
{
"background_color": "white",
"description": "Sqliteviz is a single-page application for fully client-side visualisation of SQLite databases, CSV, JSON or NDJSON.",
"display": "fullscreen",
"icons": [
{
"src": "favicon.png",
"sizes": "32x32",
"type": "image/png"
},
{
"src": "Logo48x48.png",
"sizes": "48x48",
"type": "image/png"
},
{
"src": "Logo192x192.png",
"sizes": "192x192",
"type": "image/png"
},
{
"src": "Logo512x512.png",
"sizes": "512x512",
"type": "image/png"
}
],
"name": "sqliteviz",
"short_name": "sqliteviz",
"start_url": "index.html"
}

33
sidebar.json Normal file
View File

@@ -0,0 +1,33 @@
[{
"sections": [{
"title": "For users",
"items": [
"/docs/",
"/docs/installation/",
"/docs/basic-usage/",
"/docs/multiple-csv-file-import/",
"/docs/manage-inquiries/",
"/docs/export-current-database/",
"/docs/graph/",
"/docs/pivot-table/",
"/docs/predefined-inquiries/",
"/docs/sharing/",
"/docs/diagnostic-information/"
]
},
{
"title": "Examples and tutorials",
"items": [
"/docs/how-to-migrate-to-sqliteviz-dot-com/",
"/docs/how-to-build-a-pivot-table-in-sq-lite/",
"/docs/how-to-rename-tables-and-columns-after-csv-import/"
]
},
{
"title": "For developers",
"items": [
"/docs/integrate-predefined-inquiries/"
]
}
]
}]

View File

@@ -1,95 +0,0 @@
<template>
<div id="app">
<router-view />
</div>
</template>
<script>
import storedInquiries from '@/lib/storedInquiries'
export default {
computed: {
inquiries() {
return this.$store.state.inquiries
}
},
watch: {
inquiries: {
deep: true,
handler() {
storedInquiries.updateStorage(this.inquiries)
}
}
},
created() {
this.$store.commit('setInquiries', storedInquiries.getStoredInquiries())
addEventListener('storage', event => {
if (event.key === storedInquiries.myInquiriesKey) {
this.$store.commit('setInquiries', storedInquiries.getStoredInquiries())
}
})
}
}
</script>
<style>
@font-face {
font-family: 'Open Sans';
src: url('@/assets/fonts/OpenSans-Regular.woff2');
font-weight: 400;
font-style: normal;
}
@font-face {
font-family: 'Open Sans';
src: url('@/assets/fonts/OpenSans-SemiBold.woff2');
font-weight: 600;
font-style: normal;
}
@font-face {
font-family: 'Open Sans';
src: url('@/assets/fonts/OpenSans-Bold.woff2');
font-weight: 700;
font-style: normal;
}
@font-face {
font-family: 'Open Sans';
src: url('@/assets/fonts/OpenSans-Italic.woff2');
font-weight: 400;
font-style: italic;
}
@font-face {
font-family: 'Open Sans';
src: url('@/assets/fonts/OpenSans-SemiBoldItalic.woff2');
font-weight: 600;
font-style: italic;
}
@font-face {
font-family: 'Open Sans';
src: url('@/assets/fonts/OpenSans-BoldItalic.woff2');
font-weight: 700;
font-style: italic;
}
#app,
.dialog,
input,
label,
button,
.plotly_editor * {
font-family: 'Open Sans', Helvetica, Arial, sans-serif;
-webkit-font-smoothing: antialiased;
-moz-osx-font-smoothing: grayscale;
}
body {
margin: 0;
}
.CodeMirror-hints {
z-index: 999 !important;
}
</style>

Binary file not shown.

Some files were not shown because too many files have changed in this diff Show More