1
0
mirror of https://github.com/lana-k/sqliteviz.git synced 2025-12-07 10:38:54 +08:00

5 Commits

Author SHA1 Message Date
lana-k
4213e9df5c fix lint errors 2021-08-10 23:36:16 +02:00
lana-k
9f32323a80 fix column order in result set #74 2021-08-10 20:31:12 +02:00
lana-k
2ed5160f65 remove maps from root 2021-08-05 18:47:24 +02:00
lana-k
fe8ab3f3e8 update version 2021-08-05 18:44:10 +02:00
saaj
848112979b SQLite WebAssembly micro-benchmark and new build with a memory leak fix (#70) 2021-08-05 18:35:31 +02:00
28 changed files with 655 additions and 208 deletions

View File

@@ -27,7 +27,7 @@ jobs:
- name: Create archive
run: |
cd dist
zip -9 -r dist.zip . -x "js/*.map"
zip -9 -r dist.zip . -x "js/*.map" -x "/*.map"
- name: Create Release Notes
run: |

2
lib/sql-js/.dockerignore Normal file
View File

@@ -0,0 +1,2 @@
benchmark
dist

4
lib/sql-js/benchmark/.gitignore vendored Normal file
View File

@@ -0,0 +1,4 @@
/lib/build-*
/lib/dist
/build-*-result.json
/sample.csv

View File

@@ -0,0 +1,19 @@
FROM node:12-buster
RUN set -ex; \
echo 'deb http://deb.debian.org/debian unstable main' \
> /etc/apt/sources.list.d/unstable.list; \
apt-get update; \
apt-get install -y -t unstable firefox; \
apt-get install -y chromium
WORKDIR /tmp/build
COPY package.json ./
COPY lib/dist lib/dist
COPY lib/package.json lib/package.json
RUN npm install
COPY . .
CMD npm run benchmark

View File

@@ -0,0 +1,14 @@
# SQLite WebAssembly build micro-benchmark
This directory contains a micro-benchmark for evaluating SQLite
WebAssembly builds performance on typical SQL queries, run from
`make.sh` script. It can also serve as a smoke test.
The benchmark operates on a set of SQLite WebAssembly builds expected
in `lib/build-$NAME` directories each containing `sql-wasm.js` and
`sql-wasm.wasm`. Then it creates a Docker image for each, and runs
the benchmark in Firefox and Chromium using Karma in the container.
After successful run, the benchmark result of each build is contained
in `build-$NAME-result.json`. The JSON result files can be analysed
using `result-analysis.ipynb` Jupyter notebook.

View File

@@ -0,0 +1,52 @@
module.exports = function (config) {
const timeout = 15 * 60 * 1000
config.set({
frameworks: ['mocha'],
files: [
'suite.js',
{ pattern: 'node_modules/sql.js/dist/sql-wasm.wasm', served: true, included: false },
{ pattern: 'sample.csv', served: true, included: false }
],
reporters: ['progress', 'json-to-file'],
singleRun: true,
customLaunchers: {
ChromiumHeadlessNoSandbox: { base: 'ChromiumHeadless', flags: ['--no-sandbox'] }
},
browsers: ['ChromiumHeadlessNoSandbox', 'FirefoxHeadless'],
concurrency: 1,
browserDisconnectTimeout: timeout,
browserNoActivityTimeout: timeout,
captureTimeout: timeout,
browserSocketTimeout: timeout,
pingTimeout: timeout,
client: {
captureConsole: true,
mocha: { timeout: timeout }
},
logLevel: config.LOG_INFO,
browserConsoleLogOptions: { terminal: true, level: config.LOG_INFO },
preprocessors: { 'suite.js': [ 'webpack' ] },
webpack: {
mode: 'development',
module: {
noParse: [ __dirname + '/node_modules/benchmark/benchmark.js' ]
},
node: { fs: 'empty' }
},
proxies: {
'/sql-wasm.wasm': '/base/node_modules/sql.js/dist/sql-wasm.wasm'
},
jsonToFileReporter: { outputPath: '.', fileName: 'suite-result.json' }
})
}

View File

@@ -0,0 +1,5 @@
{
"name": "sql.js",
"main": "./dist/sql-wasm.js",
"private": true
}

21
lib/sql-js/benchmark/make.sh Executable file
View File

@@ -0,0 +1,21 @@
#!/bin/bash -e
if [ ! -f sample.csv ]; then
wget --header="accept-encoding: gzip" -q -O- \
https://github.com/plotly/datasets/raw/547090bd/wellspublic.csv \
| gunzip -c > sample.csv
fi
for d in lib/build-* ; do
rm -r lib/dist || true
cp -r $d lib/dist
name=$(basename $d)
docker build -t sqliteviz/sqljs-benchmark:$name .
docker rm sqljs-benchmark-$name 2> /dev/null || true
docker run -it --name sqljs-benchmark-$name sqliteviz/sqljs-benchmark:$name
docker cp sqljs-benchmark-$name:/tmp/build/suite-result.json ${name}-result.json
docker rm sqljs-benchmark-$name
done
rm -r lib/dist

View File

@@ -0,0 +1,23 @@
{
"name": "sqlite-webassembly-microbenchmark",
"private": true,
"dependencies": {
"@babel/core" : "^7.14.8",
"babel-loader": "^8.2.2",
"benchmark": "^2.1.4",
"lodash": "^4.17.4",
"papaparse": "^5.3.1",
"mocha": "^9.0.3",
"karma": "^6.3.4",
"karma-chrome-launcher": "^3.1.0",
"karma-firefox-launcher": "^2.1.1",
"karma-json-to-file-reporter" : "^1.0.1",
"karma-mocha": "^2.0.1",
"karma-webpack": "^4.0.2",
"webpack": "^4.46.0",
"sql.js": "file:./lib"
},
"scripts": {
"benchmark": "karma start karma.conf.js"
}
}

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1,140 @@
import benchmark from 'benchmark'
import initSqlJs from 'sql.js'
import lodash from 'lodash'
import Papa from 'papaparse'
import useragent from 'ua-parser-js'
describe('SQLite build benchmark', function () {
let parsedCsv
let sqlModule
let selectDb
before(async function () {
parsedCsv = await parseCsv('http://localhost:9876/base/sample.csv')
sqlModule = await initSqlJs()
selectDb = new sqlModule.Database()
importToTable(selectDb, parsedCsv)
})
function benchmarkImport () {
const db = new sqlModule.Database()
try {
importToTable(db, parsedCsv)
} finally {
db.close()
}
}
function benchmarkSelect () {
const result = selectDb.exec(`
SELECT county, AVG(avg_depth) avg_depth_c
FROM (
SELECT s.county, s.town, COUNT(*) cnt, AVG(s.DrilledDepth) avg_depth
FROM csv_import s
JOIN csv_import USING(hole)
WHERE s.town IS NOT NULL
GROUP BY 1, 2
ORDER BY 4 DESC
)
GROUP BY 1
ORDER BY 2 DESC
`)
console.assert(result.values.length == 56, 'Unexpected size of result set')
}
it('run', async function () {
const suite = createSuite()
suite.add('import', { initCount: 3, minSamples: 50, fn: benchmarkImport })
suite.add('select', { initCount: 3, minSamples: 50, fn: benchmarkSelect })
await run(suite)
})
})
function importToTable (db, parsedCsv, chunkSize = 1024) {
const columnListString = parsedCsv.meta.fields.join(', ')
db.exec(`CREATE TABLE csv_import(${columnListString})`)
const params = parsedCsv.meta.fields.map(name => '?').join(', ')
const insertStmt = db.prepare(`INSERT INTO csv_import VALUES(${params})`)
chunkArray(parsedCsv.data, chunkSize).map(function (chunk) {
db.exec('BEGIN')
chunk.map(row => insertStmt.run(Object.values(row)))
db.exec('COMMIT')
})
}
class PromiseWrapper {
constructor() {
this.promise = new Promise((resolve, reject) => {
this.reject = reject
this.resolve = resolve
})
}
}
function parseCsv(url) {
return new Promise((resolve, reject) => {
Papa.parse(url, {
header: true,
download: true,
skipEmptyLines: 'greedy',
complete: results => resolve(results),
error: (error, file) => reject(error)
})
})
}
function chunkArray (arr, size) {
return arr.reduce(function (result, value, index) {
const chunkIndex = Math.floor(index / size)
if(!(chunkIndex in result)) {
result[chunkIndex] = []
}
result[chunkIndex].push(value)
return result
}, [])
}
function createSuite () {
// Combined workaround from:
// - https://github.com/bestiejs/benchmark.js/issues/106
// - https://github.com/bestiejs/benchmark.js/issues/237
// Benchmark could not pick up lodash otherwise
const bm = benchmark.runInContext({ _: lodash })
// Avoid `ReferenceError: Benchmark is not defined` error because Benchmark is assumed
// to be in window
window.Benchmark = bm
return new bm.Suite()
}
function run (suite) {
const suiteResult = new PromiseWrapper()
suite
.on('cycle', function (event) {
console.info(String(event.target))
})
.on('complete', function () {
console.log(JSON.stringify({
browser: useragent(navigator.userAgent).browser,
result: this.filter('successful')
}))
suiteResult.resolve()
})
.on('error', function (event) {
console.error('Benchmark failed', String(event.target))
suiteResult.reject()
})
.run({async: true})
return suiteResult.promise
}

View File

@@ -5,17 +5,21 @@ from pathlib import Path
cflags = (
'-O2',
'-DSQLITE_OMIT_LOAD_EXTENSION',
'-DSQLITE_DEFAULT_CACHE_SIZE=-65536', # 64 MiB
'-DSQLITE_DEFAULT_MEMSTATUS=0',
'-DSQLITE_DEFAULT_SYNCHRONOUS=0',
'-DSQLITE_DISABLE_LFS',
'-DSQLITE_DQS=0',
'-DSQLITE_ENABLE_FTS3',
'-DSQLITE_ENABLE_FTS3_PARENTHESIS',
'-DSQLITE_ENABLE_FTS5',
'-DSQLITE_ENABLE_JSON1',
'-DSQLITE_THREADSAFE=0',
'-DSQLITE_ENABLE_NORMALIZE',
'-DSQLITE_EXTRA_INIT=extra_init',
'-DSQLITE_DEFAULT_MEMSTATUS=0',
'-DSQLITE_USE_ALLOCA',
'-DSQLITE_OMIT_DEPRECATED',
'-DSQLITE_OMIT_LOAD_EXTENSION',
'-DSQLITE_OMIT_SHARED_CACHE',
'-DSQLITE_THREADSAFE=0',
)
emflags = (
# Base
@@ -30,7 +34,6 @@ emflags = (
'-s', 'INLINING_LIMIT=50',
'-O3',
'-flto',
'--closure', '1',
# sql.js
'-s', 'EXPORTED_FUNCTIONS=@src/sqljs/exported_functions.json',
'-s', 'EXPORTED_RUNTIME_METHODS=@src/sqljs/exported_runtime_methods.json',

File diff suppressed because one or more lines are too long

BIN
lib/sql-js/dist/sql-wasm.wasm vendored Normal file → Executable file

Binary file not shown.

View File

@@ -1,4 +1,5 @@
{
"name": "sql.js",
"main": "./dist/sql-wasm.js"
"main": "./dist/sql-wasm.js",
"private": true
}

View File

@@ -1,6 +1,6 @@
{
"name": "sqliteviz",
"version": "0.15.0",
"version": "0.15.2",
"license": "Apache-2.0",
"private": true,
"scripts": {

View File

@@ -8,10 +8,15 @@ const hintsByCode = {
export default {
getResult (source) {
const result = {}
const result = {
columns: []
}
const values = {}
if (source.meta.fields) {
source.meta.fields.forEach(col => {
result[col.trim()] = source.data.map(row => {
const colName = col.trim()
result.columns.push(colName)
values[colName] = source.data.map(row => {
let value = row[col]
if (value instanceof Date) {
value = value.toISOString()
@@ -21,7 +26,9 @@ export default {
})
} else {
for (let i = 0; i <= source.data[0].length - 1; i++) {
result[`col${i + 1}`] = source.data.map(row => {
const colName = `col${i + 1}`
result.columns.push(colName)
values[colName] = source.data.map(row => {
let value = row[i]
if (value instanceof Date) {
value = value.toISOString()
@@ -30,6 +37,8 @@ export default {
})
}
}
result.values = values
return result
},

View File

@@ -30,7 +30,7 @@
<tr v-for="rowIndex in currentPageData.count" :key="rowIndex">
<td v-for="(col, colIndex) in columns" :key="colIndex">
<div class="cell-data" :style="cellStyle">
{{ dataSet[col][rowIndex - 1 + currentPageData.start] }}
{{ dataSet.values[col][rowIndex - 1 + currentPageData.start] }}
</div>
</td>
</tr>
@@ -74,10 +74,10 @@ export default {
},
computed: {
columns () {
return Object.keys(this.dataSet)
return this.dataSet.columns
},
rowCount () {
return this.dataSet[this.columns[0]].length
return this.dataSet.values[this.columns[0]].length
},
cellStyle () {
const eq = this.tableWidth / this.columns.length

View File

@@ -48,17 +48,22 @@ export default class Sql {
throw new Error('exec: Missing query string')
}
const sqlResults = this.db.exec(sql, params)
return sqlResults.map(result => _getDataSourcesFromSqlResult(result))
return sqlResults.map(result => {
return {
columns: result.columns,
values: _getDataSourcesFromSqlResult(result)
}
})
}
import (tabName, data, progressCounterId, progressCallback, chunkSize = 1500) {
if (this.db === null) {
this.createDb()
}
const columns = Object.keys(data)
const rowCount = data[columns[0]].length
this.db.exec(dbUtils.getCreateStatement(tabName, data))
const chunks = dbUtils.generateChunks(data, chunkSize)
const columns = data.columns
const rowCount = data.values[columns[0]].length
this.db.exec(dbUtils.getCreateStatement(tabName, data.values))
const chunks = dbUtils.generateChunks(data.values, chunkSize)
const chunksAmount = Math.ceil(rowCount / chunkSize)
let count = 0
const insertStr = dbUtils.getInsertStmt(tabName, columns)

View File

@@ -88,11 +88,11 @@ class Database {
const result = await this.execute(getSchemaSql)
// Parse DDL statements to get column names and types
const parsedSchema = []
if (result && result.name) {
result.name.forEach((table, index) => {
if (result && result.values && result.values.name) {
result.values.name.forEach((table, index) => {
parsedSchema.push({
name: table,
columns: stms.getColumns(result.sql[index])
columns: stms.getColumns(result.values.sql[index])
})
})
}

View File

@@ -44,13 +44,13 @@ export default {
async created () {
const state = this.$store.state
let result = await state.db.execute('select sqlite_version()')
let result = (await state.db.execute('select sqlite_version()')).values
this.info.push({
name: 'SQLite version',
info: result['sqlite_version()']
})
result = await state.db.execute('PRAGMA compile_options')
result = (await state.db.execute('PRAGMA compile_options')).values
this.info.push({
name: 'SQLite compile options',
info: result.compile_options

View File

@@ -38,7 +38,7 @@
<teleport :to="`#${layout.dataView}-${tabIndex}`">
<data-view
:data-source="result"
:data-source="(result && result.values) || null"
:init-options="initViewOptions"
:init-mode="initViewType"
ref="dataView"

View File

@@ -58,8 +58,11 @@ describe('CsvImport.vue', () => {
sinon.stub(csv, 'parse').resolves({
delimiter: '|',
data: {
columns: ['col2', 'col1'],
values: {
col1: [1, 2],
col2: ['foo', 'bar']
}
},
rowCount: 2,
messages: [{
@@ -82,10 +85,10 @@ describe('CsvImport.vue', () => {
expect(wrapper.findComponent({ name: 'check-box' }).vm.checked).to.equal(true)
const rows = wrapper.findAll('tbody tr')
expect(rows).to.have.lengthOf(2)
expect(rows.at(0).findAll('td').at(0).text()).to.equal('1')
expect(rows.at(0).findAll('td').at(1).text()).to.equal('foo')
expect(rows.at(1).findAll('td').at(0).text()).to.equal('2')
expect(rows.at(1).findAll('td').at(1).text()).to.equal('bar')
expect(rows.at(0).findAll('td').at(0).text()).to.equal('foo')
expect(rows.at(0).findAll('td').at(1).text()).to.equal('1')
expect(rows.at(1).findAll('td').at(0).text()).to.equal('bar')
expect(rows.at(1).findAll('td').at(1).text()).to.equal('2')
expect(wrapper.findComponent({ name: 'logs' }).text())
.to.include('Information about row 0. Comma was used as a standart delimiter.')
expect(wrapper.findComponent({ name: 'logs' }).text())
@@ -99,8 +102,11 @@ describe('CsvImport.vue', () => {
parse.onCall(0).resolves({
delimiter: '|',
data: {
columns: ['col2', 'col1'],
values: {
col1: [1],
col2: ['foo']
}
},
rowCount: 1
})
@@ -113,8 +119,11 @@ describe('CsvImport.vue', () => {
parse.onCall(1).resolves({
delimiter: ',',
data: {
columns: ['col2', 'col1'],
values: {
col1: [2],
col2: ['bar']
}
},
rowCount: 1,
hasErrors: false
@@ -125,16 +134,19 @@ describe('CsvImport.vue', () => {
let rows = wrapper.findAll('tbody tr')
expect(rows).to.have.lengthOf(1)
expect(rows.at(0).findAll('td').at(0).text()).to.equal('2')
expect(rows.at(0).findAll('td').at(1).text()).to.equal('bar')
expect(rows.at(0).findAll('td').at(0).text()).to.equal('bar')
expect(rows.at(0).findAll('td').at(1).text()).to.equal('2')
expect(wrapper.findComponent({ name: 'logs' }).text())
.to.include('Preview parsing is completed in')
parse.onCall(2).resolves({
delimiter: ',',
data: {
columns: ['col2', 'col1'],
values: {
col1: [3],
col2: ['baz']
}
},
rowCount: 1,
hasErrors: true,
@@ -152,8 +164,8 @@ describe('CsvImport.vue', () => {
await csv.parse.returnValues[2]
rows = wrapper.findAll('tbody tr')
expect(rows).to.have.lengthOf(1)
expect(rows.at(0).findAll('td').at(0).text()).to.equal('3')
expect(rows.at(0).findAll('td').at(1).text()).to.equal('baz')
expect(rows.at(0).findAll('td').at(0).text()).to.equal('baz')
expect(rows.at(0).findAll('td').at(1).text()).to.equal('3')
expect(wrapper.findComponent({ name: 'logs' }).text())
.to.contain('Error in row 0. Quote is missed. Edit your CSV so that the field has a closing quote char.')
expect(wrapper.findComponent({ name: 'logs' }).text())
@@ -162,8 +174,11 @@ describe('CsvImport.vue', () => {
parse.onCall(3).resolves({
delimiter: ',',
data: {
columns: ['col2', 'col1'],
values: {
col1: [4],
col2: ['qux']
}
},
rowCount: 1,
hasErrors: false
@@ -173,16 +188,19 @@ describe('CsvImport.vue', () => {
await csv.parse.returnValues[3]
rows = wrapper.findAll('tbody tr')
expect(rows).to.have.lengthOf(1)
expect(rows.at(0).findAll('td').at(0).text()).to.equal('4')
expect(rows.at(0).findAll('td').at(1).text()).to.equal('qux')
expect(rows.at(0).findAll('td').at(0).text()).to.equal('qux')
expect(rows.at(0).findAll('td').at(1).text()).to.equal('4')
expect(wrapper.findComponent({ name: 'logs' }).text())
.to.contain('Preview parsing is completed in')
parse.onCall(4).resolves({
delimiter: ',',
data: {
columns: ['col2', 'col1'],
values: {
col1: [5],
col2: ['corge']
}
},
rowCount: 1,
hasErrors: false
@@ -192,8 +210,9 @@ describe('CsvImport.vue', () => {
await csv.parse.returnValues[4]
rows = wrapper.findAll('tbody tr')
expect(rows).to.have.lengthOf(1)
expect(rows.at(0).findAll('td').at(0).text()).to.equal('5')
expect(rows.at(0).findAll('td').at(1).text()).to.equal('corge')
expect(rows.at(0).findAll('td').at(0).text()).to.equal('corge')
expect(rows.at(0).findAll('td').at(1).text()).to.equal('5')
expect(wrapper.findComponent({ name: 'logs' }).text())
.to.include('Preview parsing is completed in')
})

View File

@@ -19,9 +19,12 @@ describe('csv.js', () => {
}
}
expect(csv.getResult(source)).to.eql({
columns: ['id', 'name', 'date'],
values: {
id: [1, 2],
name: ['foo', 'bar'],
date: ['2021-06-30T14:10:24.717Z', '2021-07-30T14:10:15.717Z']
}
})
})
@@ -34,9 +37,12 @@ describe('csv.js', () => {
meta: {}
}
expect(csv.getResult(source)).to.eql({
columns: ['col1', 'col2', 'col3'],
values: {
col1: [1, 2],
col2: ['foo', 'bar'],
col3: ['2021-06-30T14:10:24.717Z', '2021-07-30T14:10:15.717Z']
}
})
})
@@ -73,8 +79,11 @@ describe('csv.js', () => {
const result = await csv.parse(file)
expect(result).to.eql({
data: {
columns: ['col1', 'col2'],
values: {
col1: [1, 2],
col2: ['foo', 'bar']
}
},
delimiter: ',',
rowCount: 2,

View File

@@ -35,9 +35,12 @@ describe('_sql.js', () => {
const result = sql.exec('SELECT * from test')
expect(result).to.have.lengthOf(1)
expect(result[0]).to.eql({
columns: ['id', 'name', 'faculty'],
values: {
id: [1, 2],
name: ['Harry Potter', 'Draco Malfoy'],
faculty: ['Griffindor', 'Slytherin']
}
})
})
@@ -64,6 +67,8 @@ describe('_sql.js', () => {
it('imports', async () => {
const data = {
columns: ['id', 'name'],
values: {
id: [1, 2, 3, 4],
name: [
'Harry Potter',
@@ -72,6 +77,7 @@ describe('_sql.js', () => {
'Ron Weasley'
]
}
}
const progressCallback = sinon.stub()
const progressCounterId = 1
const sql = await Sql.build()
@@ -104,7 +110,7 @@ describe('_sql.js', () => {
anotherSql.open(data)
const result = anotherSql.exec('SELECT * from test')
expect(result).to.have.lengthOf(1)
expect(result[0]).to.eql({
expect(result[0].values).to.eql({
id: [1, 2],
name: ['Harry Potter', 'Draco Malfoy'],
faculty: ['Griffindor', 'Slytherin']
@@ -146,12 +152,14 @@ describe('_sql.js', () => {
`)
let result = sql.exec('SELECT * from test')
expect(result[0]).to.eql({
expect(result[0].values).to.eql({
id: [1, 2],
name: ['foo', 'bar']
})
const data = {
columns: ['id', 'name'],
values: {
id: [1, 2, 3, 4],
name: [
'Harry Potter',
@@ -160,12 +168,13 @@ describe('_sql.js', () => {
'Ron Weasley'
]
}
}
// import adds table
sql.import('foo', data, 1, sinon.stub(), 2)
result = sql.exec('SELECT * from foo')
expect(result[0]).to.eql(data)
result = sql.exec('SELECT * from test')
expect(result[0]).to.eql({
expect(result[0].values).to.eql({
id: [1, 2],
name: ['foo', 'bar']
})

View File

@@ -85,7 +85,7 @@ describe('database.js', () => {
await db.loadDb(buffer)
const result = await db.execute('SELECT * from test limit 1; SELECT * from test;')
expect(result).to.eql({
expect(result.values).to.eql({
id: [1, 2],
name: ['Harry Potter', 'Draco Malfoy'],
faculty: ['Griffindor', 'Slytherin']
@@ -116,10 +116,13 @@ describe('database.js', () => {
it('adds table from csv', async () => {
const data = {
columns: ['id', 'name', 'faculty'],
values: {
id: [1, 2],
name: ['Harry Potter', 'Draco Malfoy'],
faculty: ['Griffindor', 'Slytherin']
}
}
const progressHandler = sinon.spy()
const progressCounterId = db.createProgressCounter(progressHandler)
sinon.spy(db, 'refreshSchema')
@@ -144,10 +147,13 @@ describe('database.js', () => {
it('addTableFromCsv throws errors', async () => {
const data = {
columns: [],
values: {
id: [1, 2],
name: ['Harry Potter', 'Draco Malfoy'],
faculty: null
}
}
const progressHandler = sinon.stub()
const progressCounterId = db.createProgressCounter(progressHandler)
await expect(db.addTableFromCsv('foo', data, progressCounterId)).to.be.rejected
@@ -214,8 +220,11 @@ describe('database.js', () => {
// check that new db works and has the same table and data
result = await anotherDb.execute('SELECT * from foo')
expect(result).to.eql({
columns: ['id', 'name'],
values: {
id: [1],
name: ['Harry Potter']
}
})
})

View File

@@ -37,7 +37,7 @@ describe('SQLite extensions', function () {
abs(pi() / 2 - atan2(1, 0)) < 0.000001
`)
expect(actual).to.eql({
expect(actual.values).to.eql({
'abs(3.1415926 - pi()) < 0.000001': [1],
'abs(1 - cos(2 * pi())) < 0.000001': [1],
'abs(0 - sin(pi())) < 0.000001': [1],
@@ -71,7 +71,7 @@ describe('SQLite extensions', function () {
ceil(-1.95) + ceil(1.95),
floor(-1.95) + floor(1.95)
`)
expect(actual).to.eql({
expect(actual.values).to.eql({
'exp(0)': [1],
'log(exp(1))': [1],
'log10(10000)': [4],
@@ -99,7 +99,7 @@ describe('SQLite extensions', function () {
padc('foo', 5),
strfilter('abcba', 'bc')
`)
expect(actual).to.eql({
expect(actual.values).to.eql({
"replicate('ab', 4)": ['abababab'],
"charindex('ab', 'foobarabbarfoo')": [7],
"charindex('ab', 'foobarabbarfoo', 8)": [0],
@@ -137,7 +137,7 @@ describe('SQLite extensions', function () {
VALUES (1)
)
`)
expect(actual).to.eql({
expect(actual.values).to.eql({
'abs( 3.77406806 - stdev(x)) < 0.000001': [1],
'abs(14.24358974 - variance(x)) < 0.000001': [1],
'mode(x)': [1],
@@ -152,7 +152,7 @@ describe('SQLite extensions', function () {
SELECT value
FROM generate_series(5, 20, 5)
`)
expect(actual).to.eql({
expect(actual.values).to.eql({
value: [5, 10, 15, 20]
})
})
@@ -194,7 +194,7 @@ describe('SQLite extensions', function () {
WHERE nc.root = 2 AND nc.depth = 2
);
`)
expect(actual).to.eql({
expect(actual.values).to.eql({
name: [
'_sql.spec.js',
'_statements.spec.js',
@@ -212,7 +212,7 @@ describe('SQLite extensions', function () {
length(uuid()) as length,
uuid_str(uuid_blob('26a8349c8a7f4cbeb519bf792c3d7ac6')) as uid
`)
expect(actual).to.eql({
expect(actual.values).to.eql({
length: [36],
uid: ['26a8349c-8a7f-4cbe-b519-bf792c3d7ac6']
})
@@ -225,7 +225,7 @@ describe('SQLite extensions', function () {
regexpi('=\\s?\\d+', 'const foo = 123; const bar = "bar"') as two,
'const foo = 123; const bar = "bar"' REGEXP '=\\s?\\d+' as three
`)
expect(actual).to.eql({
expect(actual.values).to.eql({
one: [1],
two: [1],
three: [1]
@@ -260,7 +260,7 @@ describe('SQLite extensions', function () {
ALTER TABLE surface DROP COLUMN rownum;
SELECT * FROM surface;
`)
expect(actual).to.eql({
expect(actual.values).to.eql({
x: [5, 10, 15],
y: [3, 6, 9],
'5.0': [3.2, 4.3, 5.4],
@@ -292,7 +292,7 @@ describe('SQLite extensions', function () {
WHERE body MATCH '"full-text" NOT document'
ORDER BY rank;
`)
expect(actual).to.eql({
expect(actual.values).to.eql({
sender: ['bar@localhost']
})
})

View File

@@ -129,8 +129,11 @@ describe('Schema.vue', () => {
sinon.stub(csv, 'parse').resolves({
delimiter: '|',
data: {
columns: ['col1', 'col2'],
values: {
col1: [1],
col2: ['foo']
}
},
hasErrors: false,
messages: []
@@ -168,7 +171,7 @@ describe('Schema.vue', () => {
])
const res = await wrapper.vm.$store.state.db.execute('select * from test')
expect(res).to.eql({
expect(res.values).to.eql({
col1: [1],
col2: ['foo']
})