mirror of
https://github.com/lana-k/sqliteviz.git
synced 2025-12-07 18:48:55 +08:00
Pivot implementation and redesign (#69)
- Pivot support implementation - Rename queries into inquiries - Rename editor into workspace - Change result set format - New JSON format for inquiries - Redesign panels
This commit is contained in:
@@ -4,6 +4,17 @@ import dbUtils from './_statements'
|
||||
let SQL = null
|
||||
const sqlModuleReady = initSqlJs().then(sqlModule => { SQL = sqlModule })
|
||||
|
||||
function _getDataSourcesFromSqlResult (sqlResult) {
|
||||
if (!sqlResult) {
|
||||
return {}
|
||||
}
|
||||
const dataSorces = {}
|
||||
sqlResult.columns.forEach((column, index) => {
|
||||
dataSorces[column] = sqlResult.values.map(row => row[index])
|
||||
})
|
||||
return dataSorces
|
||||
}
|
||||
|
||||
export default class Sql {
|
||||
constructor () {
|
||||
this.db = null
|
||||
@@ -36,16 +47,19 @@ export default class Sql {
|
||||
if (!sql) {
|
||||
throw new Error('exec: Missing query string')
|
||||
}
|
||||
return this.db.exec(sql, params)
|
||||
const sqlResults = this.db.exec(sql, params)
|
||||
return sqlResults.map(result => _getDataSourcesFromSqlResult(result))
|
||||
}
|
||||
|
||||
import (tabName, columns, values, progressCounterId, progressCallback, chunkSize = 1500) {
|
||||
import (tabName, data, progressCounterId, progressCallback, chunkSize = 1500) {
|
||||
if (this.db === null) {
|
||||
this.createDb()
|
||||
}
|
||||
this.db.exec(dbUtils.getCreateStatement(tabName, columns, values))
|
||||
const chunks = dbUtils.generateChunks(values, chunkSize)
|
||||
const chunksAmount = Math.ceil(values.length / chunkSize)
|
||||
const columns = Object.keys(data)
|
||||
const rowCount = data[columns[0]].length
|
||||
this.db.exec(dbUtils.getCreateStatement(tabName, data))
|
||||
const chunks = dbUtils.generateChunks(data, chunkSize)
|
||||
const chunksAmount = Math.ceil(rowCount / chunkSize)
|
||||
let count = 0
|
||||
const insertStr = dbUtils.getInsertStmt(tabName, columns)
|
||||
const insertStmt = this.db.prepare(insertStr)
|
||||
|
||||
@@ -1,13 +1,17 @@
|
||||
import sqliteParser from 'sqlite-parser'
|
||||
|
||||
export default {
|
||||
* generateChunks (arr, size) {
|
||||
const count = Math.ceil(arr.length / size)
|
||||
* generateChunks (data, size) {
|
||||
const matrix = Object.keys(data).map(col => data[col])
|
||||
const [row] = matrix
|
||||
const transposedMatrix = row.map((value, column) => matrix.map(row => row[column]))
|
||||
|
||||
const count = Math.ceil(transposedMatrix.length / size)
|
||||
|
||||
for (let i = 0; i <= count - 1; i++) {
|
||||
const start = size * i
|
||||
const end = start + size
|
||||
yield arr.slice(start, end)
|
||||
yield transposedMatrix.slice(start, end)
|
||||
}
|
||||
},
|
||||
|
||||
@@ -17,11 +21,11 @@ export default {
|
||||
return `INSERT INTO "${tabName}" (${colList}) VALUES (${params});`
|
||||
},
|
||||
|
||||
getCreateStatement (tabName, columns, values) {
|
||||
getCreateStatement (tabName, data) {
|
||||
let result = `CREATE table "${tabName}"(`
|
||||
columns.forEach((col, index) => {
|
||||
// Get the first row of values to determine types
|
||||
const value = values[0][index]
|
||||
for (const col in data) {
|
||||
// Get the first row of values to determine types
|
||||
const value = data[col][0]
|
||||
let type = ''
|
||||
switch (typeof value) {
|
||||
case 'number': {
|
||||
@@ -39,7 +43,8 @@ export default {
|
||||
default: type = 'TEXT'
|
||||
}
|
||||
result += `"${col}" ${type}, `
|
||||
})
|
||||
}
|
||||
|
||||
result = result.replace(/,\s$/, ');')
|
||||
return result
|
||||
},
|
||||
|
||||
@@ -15,8 +15,7 @@ function processMsg (sql) {
|
||||
case 'import':
|
||||
return sql.import(
|
||||
data.tabName,
|
||||
data.columns,
|
||||
data.values,
|
||||
data.data,
|
||||
data.progressCounterId,
|
||||
postMessage
|
||||
)
|
||||
|
||||
@@ -55,8 +55,7 @@ class Database {
|
||||
async addTableFromCsv (tabName, data, progressCounterId) {
|
||||
const result = await this.pw.postMessage({
|
||||
action: 'import',
|
||||
columns: data.columns,
|
||||
values: data.values,
|
||||
data,
|
||||
progressCounterId,
|
||||
tabName
|
||||
})
|
||||
@@ -89,11 +88,11 @@ class Database {
|
||||
const result = await this.execute(getSchemaSql)
|
||||
// Parse DDL statements to get column names and types
|
||||
const parsedSchema = []
|
||||
if (result && result.values) {
|
||||
result.values.forEach(item => {
|
||||
if (result && result.name) {
|
||||
result.name.forEach((table, index) => {
|
||||
parsedSchema.push({
|
||||
name: item[0],
|
||||
columns: stms.getColumns(item[1])
|
||||
name: table,
|
||||
columns: stms.getColumns(result.sql[index])
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
12
src/lib/storedInquiries/_migrations.js
Normal file
12
src/lib/storedInquiries/_migrations.js
Normal file
@@ -0,0 +1,12 @@
|
||||
export default {
|
||||
_migrate (installedVersion, inquiries) {
|
||||
if (installedVersion === 1) {
|
||||
inquiries.forEach(inquire => {
|
||||
inquire.viewType = 'chart'
|
||||
inquire.viewOptions = inquire.chart
|
||||
delete inquire.chart
|
||||
})
|
||||
return inquiries
|
||||
}
|
||||
}
|
||||
}
|
||||
120
src/lib/storedInquiries/index.js
Normal file
120
src/lib/storedInquiries/index.js
Normal file
@@ -0,0 +1,120 @@
|
||||
import { nanoid } from 'nanoid'
|
||||
import fu from '@/lib/utils/fileIo'
|
||||
import migration from './_migrations'
|
||||
|
||||
const migrate = migration._migrate
|
||||
|
||||
export default {
|
||||
version: 2,
|
||||
getStoredInquiries () {
|
||||
let myInquiries = JSON.parse(localStorage.getItem('myInquiries'))
|
||||
if (!myInquiries) {
|
||||
const oldInquiries = localStorage.getItem('myQueries')
|
||||
if (oldInquiries) {
|
||||
myInquiries = migrate(1, JSON.parse(oldInquiries))
|
||||
this.updateStorage(myInquiries)
|
||||
return myInquiries
|
||||
}
|
||||
return []
|
||||
}
|
||||
|
||||
return (myInquiries && myInquiries.inquiries) || []
|
||||
},
|
||||
|
||||
duplicateInquiry (baseInquiry) {
|
||||
const newInquiry = JSON.parse(JSON.stringify(baseInquiry))
|
||||
newInquiry.name = newInquiry.name + ' Copy'
|
||||
newInquiry.id = nanoid()
|
||||
newInquiry.createdAt = new Date()
|
||||
delete newInquiry.isPredefined
|
||||
|
||||
return newInquiry
|
||||
},
|
||||
|
||||
isTabNeedName (inquiryTab) {
|
||||
const isFromScratch = !inquiryTab.initName
|
||||
return inquiryTab.isPredefined || isFromScratch
|
||||
},
|
||||
|
||||
save (inquiryTab, newName) {
|
||||
const value = {
|
||||
id: inquiryTab.isPredefined ? nanoid() : inquiryTab.id,
|
||||
query: inquiryTab.query,
|
||||
viewType: inquiryTab.$refs.dataView.mode,
|
||||
viewOptions: inquiryTab.$refs.dataView.getOptionsForSave(),
|
||||
name: newName || inquiryTab.initName
|
||||
}
|
||||
|
||||
// Get inquiries from local storage
|
||||
const myInquiries = this.getStoredInquiries()
|
||||
|
||||
// Set createdAt
|
||||
if (newName) {
|
||||
value.createdAt = new Date()
|
||||
} else {
|
||||
var inquiryIndex = myInquiries.findIndex(oldInquiry => oldInquiry.id === inquiryTab.id)
|
||||
value.createdAt = myInquiries[inquiryIndex].createdAt
|
||||
}
|
||||
|
||||
// Insert in inquiries list
|
||||
if (newName) {
|
||||
myInquiries.push(value)
|
||||
} else {
|
||||
myInquiries[inquiryIndex] = value
|
||||
}
|
||||
|
||||
// Save to local storage
|
||||
this.updateStorage(myInquiries)
|
||||
return value
|
||||
},
|
||||
|
||||
updateStorage (inquiries) {
|
||||
localStorage.setItem('myInquiries', JSON.stringify({ version: this.version, inquiries }))
|
||||
},
|
||||
|
||||
serialiseInquiries (inquiryList) {
|
||||
const preparedData = JSON.parse(JSON.stringify(inquiryList))
|
||||
preparedData.forEach(inquiry => delete inquiry.isPredefined)
|
||||
return JSON.stringify({ version: this.version, inquiries: preparedData }, null, 4)
|
||||
},
|
||||
|
||||
deserialiseInquiries (str) {
|
||||
const inquiries = JSON.parse(str)
|
||||
let inquiryList = []
|
||||
if (!inquiries.version) {
|
||||
// Turn data into array if they are not
|
||||
inquiryList = !Array.isArray(inquiries) ? [inquiries] : inquiries
|
||||
inquiryList = migrate(1, inquiryList)
|
||||
} else {
|
||||
inquiryList = inquiries.inquiries || []
|
||||
}
|
||||
|
||||
// Generate new ids if they are the same as existing inquiries
|
||||
inquiryList.forEach(inquiry => {
|
||||
const allInquiriesIds = this.getStoredInquiries().map(inquiry => inquiry.id)
|
||||
if (allInquiriesIds.includes(inquiry.id)) {
|
||||
inquiry.id = nanoid()
|
||||
}
|
||||
})
|
||||
|
||||
return inquiryList
|
||||
},
|
||||
|
||||
importInquiries () {
|
||||
return fu.importFile()
|
||||
.then(str => {
|
||||
return this.deserialiseInquiries(str)
|
||||
})
|
||||
},
|
||||
|
||||
async readPredefinedInquiries () {
|
||||
const res = await fu.readFile('./inquiries.json')
|
||||
const data = await res.json()
|
||||
|
||||
if (!data.version) {
|
||||
return data.length > 0 ? migrate(1, data) : []
|
||||
} else {
|
||||
return data.inquiries
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,96 +0,0 @@
|
||||
import { nanoid } from 'nanoid'
|
||||
import fu from '@/lib/utils/fileIo'
|
||||
|
||||
export default {
|
||||
getStoredQueries () {
|
||||
return JSON.parse(localStorage.getItem('myQueries')) || []
|
||||
},
|
||||
|
||||
duplicateQuery (baseQuery) {
|
||||
const newQuery = JSON.parse(JSON.stringify(baseQuery))
|
||||
newQuery.name = newQuery.name + ' Copy'
|
||||
newQuery.id = nanoid()
|
||||
newQuery.createdAt = new Date()
|
||||
delete newQuery.isPredefined
|
||||
|
||||
return newQuery
|
||||
},
|
||||
|
||||
isTabNeedName (queryTab) {
|
||||
const isFromScratch = !queryTab.initName
|
||||
return queryTab.isPredefined || isFromScratch
|
||||
},
|
||||
|
||||
save (queryTab, newName) {
|
||||
const value = {
|
||||
id: queryTab.isPredefined ? nanoid() : queryTab.id,
|
||||
query: queryTab.query,
|
||||
chart: queryTab.$refs.chart.getChartStateForSave(),
|
||||
name: newName || queryTab.initName
|
||||
}
|
||||
|
||||
// Get queries from local storage
|
||||
const myQueries = this.getStoredQueries()
|
||||
|
||||
// Set createdAt
|
||||
if (newName) {
|
||||
value.createdAt = new Date()
|
||||
} else {
|
||||
var queryIndex = myQueries.findIndex(oldQuery => oldQuery.id === queryTab.id)
|
||||
value.createdAt = myQueries[queryIndex].createdAt
|
||||
}
|
||||
|
||||
// Insert in queries list
|
||||
if (newName) {
|
||||
myQueries.push(value)
|
||||
} else {
|
||||
myQueries[queryIndex] = value
|
||||
}
|
||||
|
||||
// Save to local storage
|
||||
this.updateStorage(myQueries)
|
||||
return value
|
||||
},
|
||||
|
||||
updateStorage (value) {
|
||||
localStorage.setItem('myQueries', JSON.stringify(value))
|
||||
},
|
||||
|
||||
serialiseQueries (queryList) {
|
||||
const preparedData = JSON.parse(JSON.stringify(queryList))
|
||||
preparedData.forEach(query => delete query.isPredefined)
|
||||
return JSON.stringify(preparedData, null, 4)
|
||||
},
|
||||
|
||||
deserialiseQueries (str) {
|
||||
let queryList = JSON.parse(str)
|
||||
// Turn data into array if they are not
|
||||
if (!Array.isArray(queryList)) {
|
||||
queryList = [queryList]
|
||||
}
|
||||
|
||||
// Generate new ids if they are the same as existing queries
|
||||
queryList.forEach(query => {
|
||||
const allQueriesIds = this.getStoredQueries().map(query => query.id)
|
||||
if (allQueriesIds.includes(query.id)) {
|
||||
query.id = nanoid()
|
||||
}
|
||||
})
|
||||
|
||||
return queryList
|
||||
},
|
||||
|
||||
importQueries () {
|
||||
return fu.importFile()
|
||||
.then(data => {
|
||||
return this.deserialiseQueries(data)
|
||||
})
|
||||
},
|
||||
|
||||
readPredefinedQueries () {
|
||||
return fu.readFile('./queries.json')
|
||||
.then(resp => {
|
||||
return resp.json()
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -10,11 +10,9 @@ export default {
|
||||
return file.name.replace(/\.[^.]+$/, '')
|
||||
},
|
||||
|
||||
exportToFile (str, fileName, type = 'octet/stream') {
|
||||
downloadFromUrl (url, fileName) {
|
||||
// Create downloader
|
||||
const downloader = document.createElement('a')
|
||||
const blob = new Blob([str], { type })
|
||||
const url = URL.createObjectURL(blob)
|
||||
downloader.href = url
|
||||
downloader.download = fileName
|
||||
|
||||
@@ -25,6 +23,12 @@ export default {
|
||||
URL.revokeObjectURL(url)
|
||||
},
|
||||
|
||||
async exportToFile (str, fileName, type = 'octet/stream') {
|
||||
const blob = new Blob([str], { type })
|
||||
const url = URL.createObjectURL(blob)
|
||||
this.downloadFromUrl(url, fileName)
|
||||
},
|
||||
|
||||
/**
|
||||
* Note: if user press Cancel in file choosing dialog
|
||||
* it will be an unsettled promise. But it's grabbed by
|
||||
|
||||
Reference in New Issue
Block a user