mirror of
https://github.com/lana-k/sqliteviz.git
synced 2025-12-06 18:18:53 +08:00
change code structure
This commit is contained in:
171
tests/lib/database/_sql.spec.js
Normal file
171
tests/lib/database/_sql.spec.js
Normal file
@@ -0,0 +1,171 @@
|
||||
import chai from 'chai'
|
||||
import sinon from 'sinon'
|
||||
import chaiAsPromised from 'chai-as-promised'
|
||||
import initSqlJs from 'sql.js'
|
||||
import Sql from '@/lib/database/_sql'
|
||||
chai.use(chaiAsPromised)
|
||||
const expect = chai.expect
|
||||
chai.should()
|
||||
|
||||
const getSQL = initSqlJs()
|
||||
|
||||
describe('_sql.js', () => {
|
||||
afterEach(() => {
|
||||
sinon.restore()
|
||||
})
|
||||
|
||||
it('returns a query result', async () => {
|
||||
const SQL = await getSQL
|
||||
const tempDb = new SQL.Database()
|
||||
tempDb.run(`
|
||||
CREATE TABLE test (
|
||||
id integer,
|
||||
name varchar(100),
|
||||
faculty varchar(100)
|
||||
);
|
||||
INSERT INTO test (id, name, faculty)
|
||||
VALUES
|
||||
( 1, 'Harry Potter', 'Griffindor'),
|
||||
( 2, 'Draco Malfoy', 'Slytherin');
|
||||
`)
|
||||
|
||||
const data = tempDb.export()
|
||||
const sql = await Sql.build()
|
||||
sql.open(data)
|
||||
const result = sql.exec('SELECT * from test')
|
||||
expect(result).to.have.lengthOf(1)
|
||||
expect(result[0].columns).to.eql(['id', 'name', 'faculty'])
|
||||
expect(result[0].values).to.have.lengthOf(2)
|
||||
expect(result[0].values[0]).to.eql([1, 'Harry Potter', 'Griffindor'])
|
||||
expect(result[0].values[1]).to.eql([2, 'Draco Malfoy', 'Slytherin'])
|
||||
})
|
||||
|
||||
it('throws an error if query is empty', async () => {
|
||||
const SQL = await getSQL
|
||||
const tempDb = new SQL.Database()
|
||||
tempDb.run(`
|
||||
CREATE TABLE test (
|
||||
id integer,
|
||||
name varchar(100),
|
||||
faculty varchar(100)
|
||||
);
|
||||
INSERT INTO test (id, name, faculty)
|
||||
VALUES
|
||||
( 1, 'Harry Potter', 'Griffindor'),
|
||||
( 2, 'Draco Malfoy', 'Slytherin');
|
||||
`)
|
||||
|
||||
const data = tempDb.export()
|
||||
const sql = await Sql.build()
|
||||
sql.open(data)
|
||||
expect(() => { sql.exec() }).to.throw('exec: Missing query string')
|
||||
})
|
||||
|
||||
it('imports', async () => {
|
||||
const data = {
|
||||
columns: ['id', 'name'],
|
||||
values: [
|
||||
[1, 'Harry Potter'],
|
||||
[2, 'Draco Malfoy'],
|
||||
[3, 'Hermione Granger'],
|
||||
[4, 'Ron Weasley']
|
||||
]
|
||||
}
|
||||
const progressCallback = sinon.stub()
|
||||
const progressCounterId = 1
|
||||
const sql = await Sql.build()
|
||||
sql.import(data.columns, data.values, progressCounterId, progressCallback, 2)
|
||||
const result = sql.exec('SELECT * from csv_import')
|
||||
expect(result).to.have.lengthOf(1)
|
||||
expect(result[0].columns).to.eql(['id', 'name'])
|
||||
expect(result[0].values).to.have.lengthOf(4)
|
||||
expect(result[0].values[0]).to.eql([1, 'Harry Potter'])
|
||||
expect(result[0].values[1]).to.eql([2, 'Draco Malfoy'])
|
||||
expect(result[0].values[2]).to.eql([3, 'Hermione Granger'])
|
||||
expect(result[0].values[3]).to.eql([4, 'Ron Weasley'])
|
||||
|
||||
expect(progressCallback.calledThrice).to.equal(true)
|
||||
expect(progressCallback.getCall(0).args[0]).to.eql({ progress: 0, id: 1 })
|
||||
expect(progressCallback.getCall(1).args[0]).to.eql({ progress: 50, id: 1 })
|
||||
expect(progressCallback.getCall(2).args[0]).to.eql({ progress: 100, id: 1 })
|
||||
})
|
||||
|
||||
it('exports', async () => {
|
||||
const sql = await Sql.build()
|
||||
sql.exec(`
|
||||
CREATE TABLE test (
|
||||
id integer,
|
||||
name varchar(100),
|
||||
faculty varchar(100)
|
||||
);
|
||||
INSERT INTO test (id, name, faculty)
|
||||
VALUES
|
||||
( 1, 'Harry Potter', 'Griffindor'),
|
||||
( 2, 'Draco Malfoy', 'Slytherin');
|
||||
`)
|
||||
const data = sql.export()
|
||||
const anotherSql = await Sql.build()
|
||||
anotherSql.open(data)
|
||||
const result = anotherSql.exec('SELECT * from test')
|
||||
expect(result).to.have.lengthOf(1)
|
||||
expect(result[0].columns).to.eql(['id', 'name', 'faculty'])
|
||||
expect(result[0].values).to.have.lengthOf(2)
|
||||
expect(result[0].values[0]).to.eql([1, 'Harry Potter', 'Griffindor'])
|
||||
expect(result[0].values[1]).to.eql([2, 'Draco Malfoy', 'Slytherin'])
|
||||
})
|
||||
|
||||
it('closes', async () => {
|
||||
const sql = await Sql.build()
|
||||
|
||||
// nothing breaks if close empty db
|
||||
sql.close()
|
||||
|
||||
sql.exec(`
|
||||
CREATE TABLE test (
|
||||
id integer,
|
||||
name varchar(100)
|
||||
);
|
||||
INSERT INTO test (id, name)
|
||||
VALUES
|
||||
( 1, 'Harry Potter'),
|
||||
( 2, 'Draco Malfoy');
|
||||
`)
|
||||
expect(sql.db.db).to.not.equal(null)
|
||||
sql.close()
|
||||
expect(sql.db.db).to.equal(null)
|
||||
})
|
||||
|
||||
it('overwrites', async () => {
|
||||
const sql = await Sql.build()
|
||||
sql.exec(`
|
||||
CREATE TABLE test (
|
||||
id integer,
|
||||
name varchar(100)
|
||||
);
|
||||
INSERT INTO test (id, name)
|
||||
VALUES
|
||||
( 1, 'foo'),
|
||||
( 2, 'bar');
|
||||
`)
|
||||
|
||||
let result = sql.exec('SELECT * from test')
|
||||
expect(result[0].values).to.have.lengthOf(2)
|
||||
|
||||
const data = {
|
||||
columns: ['id', 'name'],
|
||||
values: [
|
||||
[1, 'Harry Potter'],
|
||||
[2, 'Draco Malfoy'],
|
||||
[3, 'Hermione Granger'],
|
||||
[4, 'Ron Weasley']
|
||||
]
|
||||
}
|
||||
// rewrite the database by import
|
||||
sql.import(data.columns, data.values, 1, sinon.stub(), 2)
|
||||
result = sql.exec('SELECT * from csv_import')
|
||||
expect(result[0].values).to.have.lengthOf(4)
|
||||
|
||||
// test table oesn't exists anymore: the db was overwritten
|
||||
expect(() => { sql.exec('SELECT * from test') }).to.throw('no such table: test')
|
||||
})
|
||||
})
|
||||
34
tests/lib/database/_statements.spec.js
Normal file
34
tests/lib/database/_statements.spec.js
Normal file
@@ -0,0 +1,34 @@
|
||||
import { expect } from 'chai'
|
||||
import dbUtils from '@/lib/database/_statements'
|
||||
|
||||
describe('_statements.js', () => {
|
||||
it('generateChunks', () => {
|
||||
const arr = ['1', '2', '3', '4', '5']
|
||||
const size = 2
|
||||
const chunks = dbUtils.generateChunks(arr, size)
|
||||
const output = []
|
||||
for (const chunk of chunks) {
|
||||
output.push(chunk)
|
||||
}
|
||||
expect(output[0]).to.eql(['1', '2'])
|
||||
expect(output[1]).to.eql(['3', '4'])
|
||||
expect(output[2]).to.eql(['5'])
|
||||
})
|
||||
|
||||
it('getInsertStmt', () => {
|
||||
const columns = ['id', 'name']
|
||||
expect(dbUtils.getInsertStmt(columns))
|
||||
.to.equal('INSERT INTO csv_import ("id", "name") VALUES (?, ?);')
|
||||
})
|
||||
|
||||
it('getCreateStatement', () => {
|
||||
const columns = ['id', 'name', 'isAdmin', 'startDate']
|
||||
const values = [
|
||||
[1, 'foo', true, new Date()],
|
||||
[2, 'bar', false, new Date()]
|
||||
]
|
||||
expect(dbUtils.getCreateStatement(columns, values)).to.equal(
|
||||
'CREATE table csv_import("id" REAL, "name" TEXT, "isAdmin" INTEGER, "startDate" TEXT);'
|
||||
)
|
||||
})
|
||||
})
|
||||
245
tests/lib/database/database.spec.js
Normal file
245
tests/lib/database/database.spec.js
Normal file
@@ -0,0 +1,245 @@
|
||||
import chai from 'chai'
|
||||
import sinon from 'sinon'
|
||||
import chaiAsPromised from 'chai-as-promised'
|
||||
import initSqlJs from 'sql.js'
|
||||
import database from '@/lib/database'
|
||||
import fu from '@/lib/utils/fileIo'
|
||||
|
||||
chai.use(chaiAsPromised)
|
||||
const expect = chai.expect
|
||||
chai.should()
|
||||
|
||||
const getSQL = initSqlJs()
|
||||
let db
|
||||
|
||||
describe('database.js', () => {
|
||||
beforeEach(() => {
|
||||
db = database.getNewDatabase()
|
||||
})
|
||||
|
||||
afterEach(() => {
|
||||
db.shutDown()
|
||||
sinon.restore()
|
||||
})
|
||||
|
||||
it('creates schema', async () => {
|
||||
const SQL = await getSQL
|
||||
const tempDb = new SQL.Database()
|
||||
tempDb.run(`CREATE TABLE test (
|
||||
col1,
|
||||
col2 integer,
|
||||
col3 decimal(5,2),
|
||||
col4 varchar(30)
|
||||
)`)
|
||||
|
||||
const data = tempDb.export()
|
||||
const buffer = new Blob([data])
|
||||
buffer.name = 'foo.sqlite'
|
||||
|
||||
const { schema, dbName } = await db.loadDb(buffer)
|
||||
expect(dbName).to.equal('foo')
|
||||
expect(schema).to.have.lengthOf(1)
|
||||
expect(schema[0].name).to.equal('test')
|
||||
expect(schema[0].columns[0].name).to.equal('col1')
|
||||
expect(schema[0].columns[0].type).to.equal('N/A')
|
||||
expect(schema[0].columns[1].name).to.equal('col2')
|
||||
expect(schema[0].columns[1].type).to.equal('integer')
|
||||
expect(schema[0].columns[2].name).to.equal('col3')
|
||||
expect(schema[0].columns[2].type).to.equal('decimal(5, 2)')
|
||||
expect(schema[0].columns[3].name).to.equal('col4')
|
||||
expect(schema[0].columns[3].type).to.equal('varchar(30)')
|
||||
})
|
||||
|
||||
it('creates schema with virtual table', async () => {
|
||||
const SQL = await getSQL
|
||||
const tempDb = new SQL.Database()
|
||||
tempDb.run(`
|
||||
CREATE VIRTUAL TABLE test_virtual USING fts4(
|
||||
col1, col2,
|
||||
notindexed=col1, notindexed=col2,
|
||||
tokenize=unicode61 "tokenchars=.+#")
|
||||
`)
|
||||
|
||||
const data = tempDb.export()
|
||||
const buffer = new Blob([data])
|
||||
buffer.name = 'foo.sqlite'
|
||||
|
||||
const { schema } = await db.loadDb(buffer)
|
||||
expect(schema[0].name).to.equal('test_virtual')
|
||||
expect(schema[0].columns[0].name).to.equal('col1')
|
||||
expect(schema[0].columns[0].type).to.equal('N/A')
|
||||
expect(schema[0].columns[1].name).to.equal('col2')
|
||||
expect(schema[0].columns[1].type).to.equal('N/A')
|
||||
})
|
||||
|
||||
it('loadDb throws errors', async () => {
|
||||
const SQL = await getSQL
|
||||
const tempDb = new SQL.Database()
|
||||
tempDb.run('CREATE TABLE test (col1, col2)')
|
||||
|
||||
const data = tempDb.export()
|
||||
const buffer = new Blob([data])
|
||||
buffer.name = 'foo.sqlite'
|
||||
|
||||
sinon.stub(db.pw, 'postMessage').resolves({ error: new Error('foo') })
|
||||
|
||||
await expect(db.loadDb(buffer)).to.be.rejectedWith('foo')
|
||||
})
|
||||
|
||||
it('returns the last query result', async () => {
|
||||
const SQL = await getSQL
|
||||
const tempDb = new SQL.Database()
|
||||
tempDb.run(`
|
||||
CREATE TABLE test (
|
||||
id integer,
|
||||
name varchar(100),
|
||||
faculty varchar(100)
|
||||
);
|
||||
INSERT INTO test (id, name, faculty)
|
||||
VALUES
|
||||
( 1, 'Harry Potter', 'Griffindor'),
|
||||
( 2, 'Draco Malfoy', 'Slytherin');
|
||||
`)
|
||||
|
||||
const data = tempDb.export()
|
||||
const buffer = new Blob([data])
|
||||
buffer.name = 'foo.sqlite'
|
||||
|
||||
await db.loadDb(buffer)
|
||||
const result = await db.execute('SELECT * from test limit 1; SELECT * from test;')
|
||||
expect(result.columns).to.have.lengthOf(3)
|
||||
expect(result.columns).to.eql(['id', 'name', 'faculty'])
|
||||
expect(result.values).to.have.lengthOf(2)
|
||||
expect(result.values[0]).to.eql([1, 'Harry Potter', 'Griffindor'])
|
||||
expect(result.values[1]).to.eql([2, 'Draco Malfoy', 'Slytherin'])
|
||||
})
|
||||
|
||||
it('returns an error', async () => {
|
||||
const SQL = await getSQL
|
||||
const tempDb = new SQL.Database()
|
||||
tempDb.run(`
|
||||
CREATE TABLE test (
|
||||
id integer,
|
||||
name varchar(100),
|
||||
faculty varchar(100)
|
||||
);
|
||||
INSERT INTO test (id, name, faculty)
|
||||
VALUES
|
||||
( 1, 'Harry Potter', 'Griffindor'),
|
||||
( 2, 'Draco Malfoy', 'Slytherin');
|
||||
`)
|
||||
|
||||
const data = tempDb.export()
|
||||
const buffer = new Blob([data])
|
||||
buffer.name = 'foo.sqlite'
|
||||
await db.loadDb(buffer)
|
||||
await expect(db.execute('SELECT * from foo')).to.be.rejectedWith(/^no such table: foo$/)
|
||||
})
|
||||
|
||||
it('creates db', async () => {
|
||||
const data = {
|
||||
columns: ['id', 'name', 'faculty'],
|
||||
values: [
|
||||
[1, 'Harry Potter', 'Griffindor'],
|
||||
[2, 'Draco Malfoy', 'Slytherin']
|
||||
]
|
||||
}
|
||||
const progressHandler = sinon.spy()
|
||||
const progressCounterId = db.createProgressCounter(progressHandler)
|
||||
const { dbName, schema } = await db.createDb('foo', data, progressCounterId)
|
||||
expect(dbName).to.equal('foo')
|
||||
expect(schema).to.have.lengthOf(1)
|
||||
expect(schema[0].name).to.equal('csv_import')
|
||||
expect(schema[0].columns).to.have.lengthOf(3)
|
||||
expect(schema[0].columns[0]).to.eql({ name: 'id', type: 'real' })
|
||||
expect(schema[0].columns[1]).to.eql({ name: 'name', type: 'text' })
|
||||
expect(schema[0].columns[2]).to.eql({ name: 'faculty', type: 'text' })
|
||||
|
||||
const result = await db.execute('SELECT * from csv_import')
|
||||
expect(result.columns).to.eql(data.columns)
|
||||
expect(result.values).to.eql(data.values)
|
||||
|
||||
expect(progressHandler.calledTwice).to.equal(true)
|
||||
expect(progressHandler.firstCall.calledWith(0)).to.equal(true)
|
||||
expect(progressHandler.secondCall.calledWith(100)).to.equal(true)
|
||||
})
|
||||
|
||||
it('createDb throws errors', async () => {
|
||||
const data = {
|
||||
columns: ['id', 'name'],
|
||||
values: [
|
||||
[1, 'Harry Potter', 'Griffindor'],
|
||||
[2, 'Draco Malfoy', 'Slytherin']
|
||||
]
|
||||
}
|
||||
const progressHandler = sinon.stub()
|
||||
const progressCounterId = db.createProgressCounter(progressHandler)
|
||||
await expect(db.createDb('foo', data, progressCounterId))
|
||||
.to.be.rejectedWith('column index out of range')
|
||||
})
|
||||
|
||||
it('progressCounters', () => {
|
||||
const firstHandler = sinon.stub()
|
||||
const firstId = db.createProgressCounter(firstHandler)
|
||||
db.worker.dispatchEvent(new MessageEvent('message', {
|
||||
data: {
|
||||
progress: 50,
|
||||
id: firstId
|
||||
}
|
||||
}))
|
||||
expect(firstHandler.calledOnceWith(50)).to.equal(true)
|
||||
|
||||
const secondHandler = sinon.stub()
|
||||
const secondId = db.createProgressCounter(secondHandler)
|
||||
db.worker.dispatchEvent(new MessageEvent('message', {
|
||||
data: {
|
||||
progress: 70,
|
||||
id: secondId
|
||||
}
|
||||
}))
|
||||
expect(firstId).to.not.equals(secondId)
|
||||
expect(secondHandler.calledOnceWith(70)).to.equal(true)
|
||||
|
||||
db.worker.dispatchEvent(new MessageEvent('message', {
|
||||
data: {
|
||||
progress: 80,
|
||||
id: firstId
|
||||
}
|
||||
}))
|
||||
expect(firstHandler.calledTwice).to.equal(true)
|
||||
expect(firstHandler.secondCall.calledWith(80)).to.equal(true)
|
||||
|
||||
db.deleteProgressCounter(firstId)
|
||||
expect(db.importProgresses[firstId]).to.equal(undefined)
|
||||
})
|
||||
|
||||
it('exports db', async () => {
|
||||
sinon.stub(fu, 'exportToFile').resolves()
|
||||
|
||||
// create db with table foo
|
||||
const stmt = `
|
||||
CREATE TABLE foo(id, name);
|
||||
INSERT INTO foo VALUES (1, 'Harry Potter')
|
||||
`
|
||||
let result = await db.execute(stmt)
|
||||
|
||||
// export db to a file
|
||||
await db.export('fooDb.sqlite')
|
||||
expect(fu.exportToFile.called).to.equal(true)
|
||||
|
||||
// get data from export
|
||||
const data = fu.exportToFile.getCall(0).args[0]
|
||||
const file = new Blob([data])
|
||||
file.name = 'fooDb.sqlite'
|
||||
|
||||
// loadDb from exported data
|
||||
const anotherDb = database.getNewDatabase()
|
||||
await anotherDb.loadDb(file)
|
||||
|
||||
// check that new db works and has the same table and data
|
||||
result = await anotherDb.execute('SELECT * from foo')
|
||||
expect(result.columns).to.eql(['id', 'name'])
|
||||
expect(result.values).to.have.lengthOf(1)
|
||||
expect(result.values[0]).to.eql([1, 'Harry Potter'])
|
||||
})
|
||||
})
|
||||
267
tests/lib/storedQueries.spec.js
Normal file
267
tests/lib/storedQueries.spec.js
Normal file
@@ -0,0 +1,267 @@
|
||||
import { expect } from 'chai'
|
||||
import sinon from 'sinon'
|
||||
import storedQueries from '@/lib/storedQueries'
|
||||
import fu from '@/lib/utils/fileIo'
|
||||
|
||||
describe('storedQueries.js', () => {
|
||||
beforeEach(() => {
|
||||
localStorage.removeItem('myQueries')
|
||||
})
|
||||
|
||||
afterEach(() => {
|
||||
sinon.restore()
|
||||
})
|
||||
|
||||
it('getStoredQueries returns emplty array when storage is empty', () => {
|
||||
const queries = storedQueries.getStoredQueries()
|
||||
expect(queries).to.eql([])
|
||||
})
|
||||
|
||||
it('updateStorage and getStoredQueries', () => {
|
||||
const data = [
|
||||
{ id: 1 },
|
||||
{ id: 2 }
|
||||
]
|
||||
storedQueries.updateStorage(data)
|
||||
const queries = storedQueries.getStoredQueries()
|
||||
expect(queries).to.eql(data)
|
||||
})
|
||||
|
||||
it('duplicateQuery', () => {
|
||||
const now = new Date()
|
||||
const nowPlusMinute = new Date(now.getTime() + 60 * 1000)
|
||||
const base = {
|
||||
id: 1,
|
||||
name: 'foo',
|
||||
query: 'SELECT * from foo',
|
||||
chart: [],
|
||||
createdAt: new Date(2021, 0, 1),
|
||||
isPredefined: true
|
||||
}
|
||||
|
||||
const copy = storedQueries.duplicateQuery(base)
|
||||
expect(copy).to.have.property('id').which.not.equal(base.id)
|
||||
expect(copy).to.have.property('name').which.equal(base.name + ' Copy')
|
||||
expect(copy).to.have.property('query').which.equal(base.query)
|
||||
expect(copy).to.have.property('chart').which.eql(base.chart)
|
||||
expect(copy).to.have.property('createdAt').which.within(now, nowPlusMinute)
|
||||
expect(copy).to.not.have.property('isPredefined')
|
||||
})
|
||||
|
||||
it('isTabNeedName returns false when the query has a name and is not predefined', () => {
|
||||
const tab = {
|
||||
initName: 'foo'
|
||||
}
|
||||
expect(storedQueries.isTabNeedName(tab)).to.equal(false)
|
||||
})
|
||||
|
||||
it('isTabNeedName returns true when the query has no name and is not predefined', () => {
|
||||
const tab = {
|
||||
initName: null,
|
||||
tempName: 'Untitled'
|
||||
}
|
||||
expect(storedQueries.isTabNeedName(tab)).to.equal(true)
|
||||
})
|
||||
|
||||
it('isTabNeedName returns true when the qiery is predefined', () => {
|
||||
const tab = {
|
||||
initName: 'foo',
|
||||
isPredefined: true
|
||||
}
|
||||
|
||||
expect(storedQueries.isTabNeedName(tab)).to.equal(true)
|
||||
})
|
||||
|
||||
it('serialiseQueries', () => {
|
||||
const queryList = [
|
||||
{
|
||||
id: 1,
|
||||
name: 'foo',
|
||||
query: 'SELECT from foo',
|
||||
chart: [],
|
||||
createdAt: '2020-11-03T14:17:49.524Z',
|
||||
isPredefined: true
|
||||
},
|
||||
{
|
||||
id: 2,
|
||||
name: 'bar',
|
||||
query: 'SELECT from bar',
|
||||
chart: [],
|
||||
createdAt: '2020-12-03T14:17:49.524Z'
|
||||
}
|
||||
]
|
||||
|
||||
const str = storedQueries.serialiseQueries(queryList)
|
||||
const parsedJson = JSON.parse(str)
|
||||
|
||||
expect(parsedJson).to.have.lengthOf(2)
|
||||
expect(parsedJson[1]).to.eql(queryList[1])
|
||||
expect(parsedJson[0].id).to.equal(queryList[0].id)
|
||||
expect(parsedJson[0].name).to.equal(queryList[0].name)
|
||||
expect(parsedJson[0].query).to.equal(queryList[0].query)
|
||||
expect(parsedJson[0].chart).to.eql(queryList[0].chart)
|
||||
expect(parsedJson[0].createdAt).to.eql(queryList[0].createdAt)
|
||||
expect(parsedJson[0].chart).to.not.have.property('isPredefined')
|
||||
})
|
||||
|
||||
it('deserialiseQueries return array for one query', () => {
|
||||
const str = `
|
||||
{
|
||||
"id": 1,
|
||||
"name": "foo",
|
||||
"query": "select * from foo",
|
||||
"chart": [],
|
||||
"createdAt": "2020-11-03T14:17:49.524Z"
|
||||
}
|
||||
`
|
||||
const query = storedQueries.deserialiseQueries(str)
|
||||
expect(query).to.eql([JSON.parse(str)])
|
||||
})
|
||||
|
||||
it('deserialiseQueries generates new id to avoid duplication', () => {
|
||||
storedQueries.updateStorage([{ id: 1 }])
|
||||
const str = `[
|
||||
{
|
||||
"id": 1,
|
||||
"name": "foo",
|
||||
"query": "select * from foo",
|
||||
"chart": [],
|
||||
"createdAt": "2020-11-03T14:17:49.524Z"
|
||||
},
|
||||
{
|
||||
"id": 2,
|
||||
"name": "bar",
|
||||
"query": "select * from bar",
|
||||
"chart": [],
|
||||
"createdAt": "2020-11-04T14:17:49.524Z"
|
||||
}
|
||||
]`
|
||||
|
||||
const queries = storedQueries.deserialiseQueries(str)
|
||||
const parsedStr = JSON.parse(str)
|
||||
expect(queries[1]).to.eql(parsedStr[1])
|
||||
expect(queries[0].id).to.not.equal(parsedStr[0].id)
|
||||
expect(queries[0]).to.have.property('id')
|
||||
expect(queries[0].id).to.not.equal(parsedStr[0].id)
|
||||
expect(queries[0].name).to.equal(parsedStr[0].name)
|
||||
expect(queries[0].query).to.equal(parsedStr[0].query)
|
||||
expect(queries[0].chart).to.eql(parsedStr[0].chart)
|
||||
expect(queries[0].createdAt).to.equal(parsedStr[0].createdAt)
|
||||
})
|
||||
|
||||
it('importQueries', async () => {
|
||||
const str = `
|
||||
{
|
||||
"id": 1,
|
||||
"name": "foo",
|
||||
"query": "select * from foo",
|
||||
"chart": [],
|
||||
"createdAt": "2020-11-03T14:17:49.524Z"
|
||||
}
|
||||
`
|
||||
sinon.stub(fu, 'importFile').returns(Promise.resolve(str))
|
||||
const queries = await storedQueries.importQueries()
|
||||
|
||||
expect(queries).to.eql([JSON.parse(str)])
|
||||
})
|
||||
|
||||
it('readPredefinedQueries', async () => {
|
||||
const str = `
|
||||
{
|
||||
"id": 1,
|
||||
"name": "foo",
|
||||
"query": "select * from foo",
|
||||
"chart": [],
|
||||
"createdAt": "2020-11-03T14:17:49.524Z"
|
||||
}
|
||||
`
|
||||
sinon.stub(fu, 'readFile').returns(Promise.resolve(new Response(str)))
|
||||
const queries = await storedQueries.readPredefinedQueries()
|
||||
expect(fu.readFile.calledOnceWith('./queries.json')).to.equal(true)
|
||||
expect(queries).to.eql(JSON.parse(str))
|
||||
})
|
||||
|
||||
it('save adds new query in the storage', () => {
|
||||
const now = new Date()
|
||||
const nowPlusMinute = new Date(now.getTime() + 60 * 1000)
|
||||
const tab = {
|
||||
id: 1,
|
||||
query: 'select * from foo',
|
||||
chart: [],
|
||||
initName: null,
|
||||
$refs: {
|
||||
chart: {
|
||||
getChartStateForSave () {
|
||||
return ['chart']
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
const value = storedQueries.save(tab, 'foo')
|
||||
expect(value.id).to.equal(tab.id)
|
||||
expect(value.name).to.equal('foo')
|
||||
expect(value.query).to.equal(tab.query)
|
||||
expect(value.chart).to.eql(['chart'])
|
||||
expect(value).to.have.property('createdAt').which.within(now, nowPlusMinute)
|
||||
const queries = storedQueries.getStoredQueries()
|
||||
expect(JSON.stringify(queries)).to.equal(JSON.stringify([value]))
|
||||
})
|
||||
|
||||
it('save updates existing query in the storage', () => {
|
||||
const tab = {
|
||||
id: 1,
|
||||
query: 'select * from foo',
|
||||
chart: [],
|
||||
initName: null,
|
||||
$refs: {
|
||||
chart: {
|
||||
getChartStateForSave () {
|
||||
return ['chart']
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const first = storedQueries.save(tab, 'foo')
|
||||
|
||||
tab.initName = 'foo'
|
||||
tab.query = 'select * from foo'
|
||||
storedQueries.save(tab)
|
||||
const queries = storedQueries.getStoredQueries()
|
||||
const second = queries[0]
|
||||
expect(queries).has.lengthOf(1)
|
||||
expect(second.id).to.equal(first.id)
|
||||
expect(second.name).to.equal(first.name)
|
||||
expect(second.query).to.equal(tab.query)
|
||||
expect(second.chart).to.eql(['chart'])
|
||||
expect(new Date(second.createdAt).getTime()).to.equal(first.createdAt.getTime())
|
||||
})
|
||||
|
||||
it("save adds a new query with new id if it's based on predefined query", () => {
|
||||
const now = new Date()
|
||||
const nowPlusMinute = new Date(now.getTime() + 60 * 1000)
|
||||
const tab = {
|
||||
id: 1,
|
||||
query: 'select * from foo',
|
||||
chart: [],
|
||||
initName: 'foo predefined',
|
||||
$refs: {
|
||||
chart: {
|
||||
getChartStateForSave () {
|
||||
return ['chart']
|
||||
}
|
||||
}
|
||||
},
|
||||
isPredefined: true
|
||||
}
|
||||
storedQueries.save(tab, 'foo')
|
||||
|
||||
const queries = storedQueries.getStoredQueries()
|
||||
expect(queries).has.lengthOf(1)
|
||||
expect(queries[0]).to.have.property('id').which.not.equal(tab.id)
|
||||
expect(queries[0].name).to.equal('foo')
|
||||
expect(queries[0].query).to.equal(tab.query)
|
||||
expect(queries[0].chart).to.eql(['chart'])
|
||||
expect(new Date(queries[0].createdAt)).to.be.within(now, nowPlusMinute)
|
||||
})
|
||||
})
|
||||
108
tests/lib/utils/fileIo.spec.js
Normal file
108
tests/lib/utils/fileIo.spec.js
Normal file
@@ -0,0 +1,108 @@
|
||||
import { expect } from 'chai'
|
||||
import fu from '@/lib/utils/fileIo'
|
||||
import sinon from 'sinon'
|
||||
|
||||
describe('fileIo.js', () => {
|
||||
afterEach(() => {
|
||||
sinon.restore()
|
||||
})
|
||||
|
||||
it('exportToFile (octet/stream by default)', () => {
|
||||
const spyAnchor = document.createElement('a')
|
||||
sinon.spy(spyAnchor, 'click')
|
||||
sinon.stub(document, 'createElement').returns(spyAnchor)
|
||||
sinon.spy(URL, 'createObjectURL')
|
||||
sinon.spy(URL, 'revokeObjectURL')
|
||||
sinon.spy(window, 'Blob')
|
||||
|
||||
fu.exportToFile('foo', 'foo.txt')
|
||||
|
||||
expect(document.createElement.calledOnceWith('a')).to.equal(true)
|
||||
|
||||
expect(window.Blob.calledOnceWith(['foo'], { type: 'octet/stream' })).to.equal(true)
|
||||
const blob = window.Blob.returnValues[0]
|
||||
expect(URL.createObjectURL.calledOnceWith(blob)).to.equal(true)
|
||||
|
||||
const url = URL.createObjectURL.returnValues[0]
|
||||
expect(spyAnchor.href).to.equal(url)
|
||||
|
||||
expect(spyAnchor.download).to.equal('foo.txt')
|
||||
|
||||
expect(spyAnchor.click.calledOnce).to.equal(true)
|
||||
expect(URL.revokeObjectURL.calledOnceWith(url)).to.equal(true)
|
||||
})
|
||||
|
||||
it('exportToFile', () => {
|
||||
const spyAnchor = document.createElement('a')
|
||||
sinon.spy(spyAnchor, 'click')
|
||||
sinon.stub(document, 'createElement').returns(spyAnchor)
|
||||
sinon.spy(URL, 'createObjectURL')
|
||||
sinon.spy(URL, 'revokeObjectURL')
|
||||
sinon.spy(window, 'Blob')
|
||||
|
||||
fu.exportToFile('foo', 'foo.html', 'text/html')
|
||||
|
||||
expect(document.createElement.calledOnceWith('a')).to.equal(true)
|
||||
|
||||
expect(window.Blob.calledOnceWith(['foo'], { type: 'text/html' })).to.equal(true)
|
||||
const blob = window.Blob.returnValues[0]
|
||||
expect(URL.createObjectURL.calledOnceWith(blob)).to.equal(true)
|
||||
|
||||
const url = URL.createObjectURL.returnValues[0]
|
||||
expect(spyAnchor.href).to.equal(url)
|
||||
|
||||
expect(spyAnchor.download).to.equal('foo.html')
|
||||
|
||||
expect(spyAnchor.click.calledOnce).to.equal(true)
|
||||
expect(URL.revokeObjectURL.calledOnceWith(url)).to.equal(true)
|
||||
})
|
||||
|
||||
it('importFile', async () => {
|
||||
const spyInput = document.createElement('input')
|
||||
sinon.stub(spyInput, 'click')
|
||||
|
||||
const blob = new Blob(['foo'])
|
||||
Object.defineProperty(spyInput, 'files', {
|
||||
value: [blob],
|
||||
writable: false
|
||||
})
|
||||
|
||||
sinon.stub(document, 'createElement').returns(spyInput)
|
||||
|
||||
setTimeout(() => { spyInput.dispatchEvent(new Event('change')) })
|
||||
|
||||
const data = await fu.importFile()
|
||||
expect(data).to.equal('foo')
|
||||
expect(document.createElement.calledOnceWith('input')).to.equal(true)
|
||||
expect(spyInput.type).to.equal('file')
|
||||
expect(spyInput.accept).to.equal('.json')
|
||||
expect(spyInput.click.calledOnce).to.equal(true)
|
||||
})
|
||||
|
||||
it('readFile', () => {
|
||||
sinon.spy(window, 'fetch')
|
||||
|
||||
fu.readFile('./foo.bar')
|
||||
expect(window.fetch.calledOnceWith('./foo.bar')).to.equal(true)
|
||||
})
|
||||
|
||||
it('readAsArrayBuffer resolves', async () => {
|
||||
const blob = new Blob(['foo'])
|
||||
const buffer = await fu.readAsArrayBuffer(blob)
|
||||
|
||||
const uint8Array = new Uint8Array(buffer)
|
||||
const text = new TextDecoder().decode(uint8Array)
|
||||
expect(text).to.equal('foo')
|
||||
})
|
||||
|
||||
it('readAsArrayBuffer rejects', async () => {
|
||||
const r = new FileReader()
|
||||
r.readAsArrayBuffer = () => {
|
||||
r.dispatchEvent(new Event('error'))
|
||||
}
|
||||
sinon.stub(window, 'FileReader').returns(r)
|
||||
|
||||
const blob = new Blob(['foo'])
|
||||
await expect(fu.readAsArrayBuffer(blob)).to.be.rejectedWith('Problem parsing input file.')
|
||||
})
|
||||
})
|
||||
30
tests/lib/utils/time.spec.js
Normal file
30
tests/lib/utils/time.spec.js
Normal file
@@ -0,0 +1,30 @@
|
||||
import { expect } from 'chai'
|
||||
import time from '@/lib/utils/time'
|
||||
|
||||
describe('time.js', () => {
|
||||
it('getPeriod', () => {
|
||||
// 1.01.2021 13:00:00 000
|
||||
let start = new Date(2021, 0, 1, 13, 0, 0, 0)
|
||||
|
||||
// 3.01.2021 22:15:20 500
|
||||
let end = new Date(2021, 0, 3, 22, 15, 20, 500)
|
||||
|
||||
expect(time.getPeriod(start, end)).to.equal('2 d 9 h 15 m 20 s 500 ms')
|
||||
|
||||
// 1.01.2021 13:00:00 000
|
||||
start = new Date(2021, 0, 1, 13, 0, 0, 0)
|
||||
|
||||
// 1.01.2021 22:00:20 000
|
||||
end = new Date(2021, 0, 1, 22, 0, 20, 0)
|
||||
|
||||
expect(time.getPeriod(start, end)).to.equal('9 h 20 s')
|
||||
|
||||
// 1.01.2021 13:00:00 000
|
||||
start = new Date(2021, 0, 1, 13, 0, 0, 0)
|
||||
|
||||
// 1.01.2021 13:00:00 45
|
||||
end = new Date(2021, 0, 1, 13, 0, 0, 45)
|
||||
|
||||
expect(time.getPeriod(start, end)).to.equal('45 ms')
|
||||
})
|
||||
})
|
||||
Reference in New Issue
Block a user