Skip to content
Snippets Groups Projects
Commit dd541e5f authored by James Long's avatar James Long
Browse files

initial (open-source)

parents
Branches
No related tags found
No related merge requests found
Showing
with 1282 additions and 0 deletions
.DS_Store
config.json
node_modules
log
supervise
bin/large-sync-data.txt
user-files
server-files
\ No newline at end of file
let { join } = require('path');
let { openDatabase } = require('./db');
let accountDb = null;
function getAccountDb() {
if (accountDb == null) {
accountDb = openDatabase(join(__dirname, 'server-files/account.sqlite'));
}
return accountDb;
}
module.exports = { getAccountDb };
let express = require('express');
let bcrypt = require('bcrypt');
let uuid = require('uuid');
let errorMiddleware = require('./util/error-middleware');
let { validateUser } = require('./util/validate-user');
let { getAccountDb } = require('./account-db');
let app = express();
app.use(errorMiddleware);
function init() {}
function hashPassword(password) {
return bcrypt.hashSync(password, 12);
}
// Non-authenticated endpoints:
//
// /boostrap (special endpoint for setting up the instance, cant call again)
// /login
app.get('/needs-bootstrap', (req, res) => {
let accountDb = getAccountDb();
let rows = accountDb.all('SELECT * FROM auth');
res.send({
status: 'ok',
data: { bootstrapped: rows.length > 0 }
});
});
app.post('/bootstrap', (req, res) => {
let { password } = req.body;
let accountDb = getAccountDb();
let rows = accountDb.all('SELECT * FROM auth');
if (rows.length !== 0) {
res.status(400).send({
status: 'error',
reason: 'already-bootstrapped'
});
return;
}
if (password == null || password === '') {
res.status(400).send({ status: 'error', reason: 'invalid-password' });
return;
}
// Hash the password. There's really not a strong need for this
// since this is a self-hosted instance owned by the user.
// However, just in case we do it.
let hashed = hashPassword(password);
accountDb.mutate('INSERT INTO auth (password) VALUES (?)', [hashed]);
let token = uuid.v4();
accountDb.mutate('INSERT INTO sessions (token) VALUES (?)', [token]);
res.send({ status: 'ok', data: { token } });
});
app.post('/login', (req, res) => {
let { password } = req.body;
let accountDb = getAccountDb();
let row = accountDb.first('SELECT * FROM auth');
let confirmed = row && bcrypt.compareSync(password, row.password);
let token = null;
if (confirmed) {
// Right now, tokens are permanent and there's just one in the
// system. In the future this should probably evolve to be a
// "session" that times out after a long time or something, and
// maybe each device has a different token
let row = accountDb.first('SELECT * FROM sessions');
token = row.token;
}
res.send({ status: 'ok', data: { token } });
});
app.post('/change-password', (req, res) => {
let user = validateUser(req, res);
if (!user) return;
let accountDb = getAccountDb();
let { password } = req.body;
if (password == null || password === '') {
res.send({ status: 'error', reason: 'invalid-password' });
return;
}
let hashed = hashPassword(password);
// Note that this doesn't have a WHERE. This table only ever has 1
// row (maybe that will change in the future? if this this will not work)
accountDb.mutate('UPDATE auth SET password = ?', [hashed]);
res.send({ status: 'ok', data: {} });
});
app.get('/validate', (req, res) => {
let user = validateUser(req, res);
if (user) {
res.send({ status: 'ok', data: { validated: true } });
}
});
app.use(errorMiddleware);
module.exports.handlers = app;
module.exports.init = init;
const express = require('express');
const uuid = require('uuid');
const fetch = require('node-fetch');
const plaid = require('plaid');
const { middleware: connectDb } = require('./db');
const { handleError } = require('./util/handle-error');
const { validateSubscribedUser } = require('./util/validate-user');
const config = require('./config');
const app = express();
let plaidClient;
function init() {
plaidClient = new plaid.Client({
clientID: config.plaid.client_id,
secret: config.plaid.secret,
env: config.plaid.env,
options: { version: '2019-05-29' }
});
}
async function validateToken(req, res) {
let { token } = req.body;
let rows = await req.runQuery(
'SELECT * FROM webTokens WHERE token_id = $1',
[token],
true
);
if (rows.length === 0) {
res.send(JSON.stringify({ status: 'error', reason: 'not-found' }));
return null;
}
// Tokens are only valid for 10 minutes
let validTime = 1000 * 60 * 10;
let row = rows[0];
let timeCreated = JSON.parse(row.time_created);
if (Date.now() - timeCreated >= validTime) {
res.send(JSON.stringify({ status: 'error', reason: 'expired' }));
return null;
}
return row;
}
app.post(
'/create-web-token',
connectDb,
handleError(async (req, res) => {
let user = await validateSubscribedUser(req, res);
if (!user) {
return;
}
let token = uuid.v4();
await req.runQuery('DELETE FROM webTokens WHERE user_id = $1', [user.id]);
await req.runQuery(
'INSERT INTO webTokens (user_id, token_id, time_created) VALUES ($1, $2, $3)',
[user.id, token, Date.now()]
);
res.send(
JSON.stringify({
status: 'ok',
data: token
})
);
})
);
app.post(
'/validate-web-token',
connectDb,
handleError(async (req, res) => {
let token = await validateToken(req, res);
if (!token) {
return;
}
res.send(JSON.stringify({ status: 'ok' }));
})
);
app.post(
'/put-web-token-contents',
connectDb,
handleError(async (req, res) => {
let token = await validateToken(req, res);
if (!token) {
return;
}
let { data } = req.body;
await req.runQuery(
'UPDATE webTokens SET contents = $1 WHERE token_id = $2',
[JSON.stringify(data), token.token_id]
);
res.send(
JSON.stringify({
status: 'ok',
data: null
})
);
})
);
app.post(
'/get-web-token-contents',
connectDb,
handleError(async (req, res) => {
let user = await validateSubscribedUser(req, res);
if (!user) {
return;
}
let token = await validateToken(req, res);
if (!token) {
return;
}
let rows = await req.runQuery(
'SELECT * FROM webTokens WHERE user_id = $1 AND token_id = $2',
[user.id, token.token_id],
true
);
if (rows.length === 0) {
res.send(
JSON.stringify({
status: 'error',
reason: 'not-found'
})
);
}
res.send(
JSON.stringify({
status: 'ok',
data: JSON.parse(rows[0].contents)
})
);
})
);
app.post(
'/make_link_token',
connectDb,
handleError(async (req, res) => {
let token = await validateToken(req, res);
if (!token) {
return;
}
let result = await plaidClient.createLinkToken({
user: {
client_user_id: token.user_id
},
client_name: 'Actual',
products: ['transactions'],
country_codes: ['US'],
language: 'en'
});
res.send(JSON.stringify({ status: 'ok', data: result.link_token }));
})
);
app.post(
'/handoff_public_token',
connectDb,
handleError(async (req, res) => {
let user = await validateSubscribedUser(req, res);
if (!user) {
return;
}
let { item_id, public_token } = req.body;
let url = config.plaid.env + '/item/public_token/exchange';
let resData = await fetch(url, {
method: 'POST',
body: JSON.stringify({
client_id: config.plaid.client_id,
secret: config.plaid.secret,
public_token: public_token
}),
headers: {
'Content-Type': 'application/json',
'User-Agent': 'Actual Budget'
}
}).then(res => res.json());
await req.runQuery(
'INSERT INTO access_tokens (item_id, user_id, access_token) VALUES ($1, $2, $3)',
[item_id, user.id, resData.access_token]
);
res.send(JSON.stringify({ status: 'ok' }));
})
);
app.post(
'/remove-access-token',
connectDb,
handleError(async (req, res) => {
let user = await validateSubscribedUser(req, res);
if (!user) {
return;
}
let { item_id } = req.body;
const rows = await req.runQuery(
'SELECT * FROM access_tokens WHERE user_id = $1 AND item_id = $2',
[user.id, item_id],
true
);
if (rows.length === 0) {
throw new Error('access token not found');
}
const { access_token } = rows[0];
const url = config.plaid.env + '/item/remove';
const resData = await fetch(url, {
method: 'POST',
body: JSON.stringify({
client_id: config.plaid.client_id,
secret: config.plaid.secret,
access_token: access_token
}),
headers: {
'Content-Type': 'application/json',
'User-Agent': 'Actual Budget'
}
}).then(res => res.json());
if (resData.removed !== true) {
console.log('[Error] Item not removed: ' + access_token.slice(0, 3));
}
await req.runQuery(
'UPDATE access_tokens SET deleted = TRUE WHERE access_token = $1',
[access_token]
);
res.send(
JSON.stringify({
status: 'ok',
data: resData
})
);
})
);
app.post(
'/accounts',
connectDb,
handleError(async (req, res) => {
let user = await validateSubscribedUser(req, res);
if (!user) {
return;
}
const { item_id } = req.body;
const rows = await req.runQuery(
'SELECT * FROM access_tokens WHERE user_id = $1 AND item_id = $2',
[user.id, item_id],
true
);
if (rows.length === 0) {
throw new Error('access token not found');
}
const { access_token } = rows[0];
const url = config.plaid.env + '/accounts/get';
const resData = await fetch(url, {
method: 'POST',
body: JSON.stringify({
client_id: config.plaid.client_id,
secret: config.plaid.secret,
access_token: access_token
}),
headers: {
'Content-Type': 'application/json',
'User-Agent': 'Actual Budget'
}
}).then(res => res.json());
res.send(
JSON.stringify({
status: 'ok',
data: resData
})
);
})
);
app.post(
'/transactions',
connectDb,
handleError(async (req, res) => {
let user = await validateSubscribedUser(req, res);
if (!user) {
return;
}
let { item_id, start_date, end_date, account_id, count, offset } = req.body;
let resData;
const rows = await req.runQuery(
'SELECT * FROM access_tokens WHERE user_id = $1 AND item_id = $2 AND deleted = FALSE',
[user.id, item_id],
true
);
if (rows.length === 0) {
res.status(400);
res.send('access-token-not-found');
return;
}
const { access_token } = rows[0];
const url = config.plaid.env + '/transactions/get';
resData = await fetch(url, {
method: 'POST',
body: JSON.stringify({
client_id: config.plaid.client_id,
secret: config.plaid.secret,
access_token: access_token,
start_date: start_date,
end_date: end_date,
options: {
account_ids: [account_id],
count: count,
offset: offset
}
}),
headers: {
'Content-Type': 'application/json',
'User-Agent': 'Actual Budget'
}
}).then(res => res.json());
res.send(
JSON.stringify({
status: 'ok',
data: resData
})
);
})
);
app.post(
'/make-public-token',
connectDb,
handleError(async (req, res) => {
let user = await validateSubscribedUser(req, res);
if (!user) {
return;
}
let { item_id } = req.body;
const rows = await req.runQuery(
'SELECT * FROM access_tokens WHERE user_id = $1 AND item_id = $2',
[user.id, item_id],
true
);
if (rows.length === 0) {
throw new Error('access token not found');
}
const { access_token } = rows[0];
let result = await plaidClient.createLinkToken({
user: {
client_user_id: user.id
},
client_name: 'Actual',
country_codes: ['US'],
language: 'en',
access_token: access_token
});
res.send(
JSON.stringify({
status: 'ok',
data: result
})
);
})
);
module.exports.handlers = app;
module.exports.init = init;
let { Buffer } = require('buffer');
let fs = require('fs/promises');
let { join } = require('path');
let express = require('express');
let uuid = require('uuid');
let AdmZip = require('adm-zip');
let { validateUser } = require('./util/validate-user');
let errorMiddleware = require('./util/error-middleware');
let config = require('./config');
let { getAccountDb } = require('./account-db');
let simpleSync = require('./sync-simple');
let fullSync = require('./sync-full');
let actual = require('@actual-app/api');
let SyncPb = actual.internal.SyncProtoBuf;
const app = express();
app.use(errorMiddleware);
async function init() {
await actual.init({
config: {
dataDir: join(__dirname, 'user-files')
}
});
}
// This is a version representing the internal format of sync
// messages. When this changes, all sync files need to be reset. We
// will check this version when syncing and notify the user if they
// need to reset.
const SYNC_FORMAT_VERSION = 2;
app.post('/sync', async (req, res) => {
let user = validateUser(req, res);
if (!user) {
return;
}
let requestPb;
try {
requestPb = SyncPb.SyncRequest.deserializeBinary(req.body);
} catch (e) {
res.status(500);
res.send({ status: 'error', reason: 'internal-error' });
return;
}
let accountDb = getAccountDb();
let file_id = requestPb.getFileid() || null;
let group_id = requestPb.getGroupid() || null;
let key_id = requestPb.getKeyid() || null;
let since = requestPb.getSince() || null;
let messages = requestPb.getMessagesList();
if (!since) {
throw new Error('`since` is required');
}
let currentFiles = accountDb.all(
'SELECT group_id, encrypt_keyid, encrypt_meta, sync_version FROM files WHERE id = ?',
[file_id]
);
if (currentFiles.length === 0) {
res.status(400);
res.send('file-not-found');
return;
}
let currentFile = currentFiles[0];
if (
currentFile.sync_version == null ||
currentFile.sync_version < SYNC_FORMAT_VERSION
) {
res.status(400);
res.send('file-old-version');
return;
}
// When resetting sync state, something went wrong. There is no
// group id and it's awaiting a file to be uploaded.
if (currentFile.group_id == null) {
res.status(400);
res.send('file-needs-upload');
return;
}
// Check to make sure the uploaded file is valid and has been
// encrypted with the same key it is registered with (this might
// be wrong if there was an error during the key creation
// process)
let uploadedKeyId = currentFile.encrypt_meta
? JSON.parse(currentFile.encrypt_meta).keyId
: null;
if (uploadedKeyId !== currentFile.encrypt_keyid) {
res.status(400);
res.send('file-key-mismatch');
return;
}
// The changes being synced are part of an old group, which
// means the file has been reset. User needs to re-download.
if (group_id !== currentFile.group_id) {
res.status(400);
res.send('file-has-reset');
return;
}
// The data is encrypted with a different key which is
// unacceptable. We can't accept these changes. Reject them and
// tell the user that they need to generate the correct key
// (which necessitates a sync reset so they need to re-download).
if (key_id !== currentFile.encrypt_keyid) {
res.status(400);
res.send('file-has-new-key');
return false;
}
// TODO: We also provide a "simple" sync method which currently isn't
// used. This method just stores the messages locally and doesn't
// load the whole app at all. If we want to support end-to-end
// encryption, this method is required because we can't read the
// messages. Using it looks like this:
//
// let simpleSync = require('./sync-simple');
// let {trie, newMessages } = simpleSync.sync(messages, since, file_id);
let { trie, newMessages } = await fullSync.sync(messages, since, file_id);
// encode it back...
let responsePb = new SyncPb.SyncResponse();
responsePb.setMerkle(JSON.stringify(trie));
for (let i = 0; i < newMessages.length; i++) {
let msg = newMessages[i];
let envelopePb = new SyncPb.MessageEnvelope();
envelopePb.setTimestamp(msg.timestamp);
envelopePb.setIsencrypted(msg.is_encrypted === 1);
envelopePb.setContent(msg.content);
responsePb.addMessages(envelopePb);
}
res.set('Content-Type', 'application/actual-sync');
res.send(Buffer.from(responsePb.serializeBinary()));
});
app.post('/user-get-key', (req, res) => {
let user = validateUser(req, res);
if (!user) {
return;
}
let accountDb = getAccountDb();
let { fileId } = req.body;
let rows = accountDb.all(
'SELECT encrypt_salt, encrypt_keyid, encrypt_test FROM files WHERE id = ?',
[fileId]
);
if (rows.length === 0) {
res.status(400).send('file-not-found');
return;
}
let { encrypt_salt, encrypt_keyid, encrypt_test } = rows[0];
res.send(
JSON.stringify({
status: 'ok',
data: { id: encrypt_keyid, salt: encrypt_salt, test: encrypt_test }
})
);
});
app.post('/user-create-key', (req, res) => {
let user = validateUser(req, res);
if (!user) {
return;
}
let accountDb = getAccountDb();
let { fileId, keyId, keySalt, testContent } = req.body;
accountDb.mutate(
'UPDATE files SET encrypt_salt = ?, encrypt_keyid = ?, encrypt_test = ? WHERE id = ?',
[keySalt, keyId, testContent, fileId]
);
res.send(JSON.stringify({ status: 'ok' }));
});
app.post('/reset-user-file', (req, res) => {
let user = validateUser(req, res);
if (!user) {
return;
}
let accountDb = getAccountDb();
let { fileId } = req.body;
let files = accountDb.all('SELECT group_id FROM files WHERE id = ?', [
fileId
]);
if (files.length === 0) {
res.status(400).send('User or file not found');
return;
}
let { group_id } = files[0];
accountDb.mutate('UPDATE files SET group_id = NULL WHERE id = ?', [fileId]);
if (group_id) {
// TODO: Instead of doing this, just delete the db file named
// after the group
// db.mutate('DELETE FROM messages_binary WHERE group_id = ?', [group_id]);
// db.mutate('DELETE FROM messages_merkles WHERE group_id = ?', [group_id]);
}
res.send(JSON.stringify({ status: 'ok' }));
});
app.post('/upload-user-file', async (req, res) => {
let user = validateUser(req, res);
if (!user) {
return;
}
let accountDb = getAccountDb();
let name = decodeURIComponent(req.headers['x-actual-name']);
let fileId = req.headers['x-actual-file-id'];
let groupId = req.headers['x-actual-group-id'] || null;
let encryptMeta = req.headers['x-actual-encrypt-meta'] || null;
let syncFormatVersion = req.headers['x-actual-format'] || null;
let keyId = encryptMeta ? JSON.parse(encryptMeta).keyId : null;
if (!fileId) {
throw new Error('fileId is required');
}
let currentFiles = accountDb.all(
'SELECT group_id, encrypt_keyid, encrypt_meta FROM files WHERE id = ?',
[fileId]
);
if (currentFiles.length) {
let currentFile = currentFiles[0];
// The uploading file is part of an old group, so reject
// it. All of its internal sync state is invalid because its
// old. The sync state has been reset, so user needs to
// either reset again or download from the current group.
if (groupId !== currentFile.group_id) {
res.status(400);
res.send('file-has-reset');
return;
}
// The key that the file is encrypted with is different than
// the current registered key. All data must always be
// encrypted with the registered key for consistency. Key
// changes always necessitate a sync reset, which means this
// upload is trying to overwrite another reset. That might
// be be fine, but since we definitely cannot accept a file
// encrypted with the wrong key, we bail and suggest the
// user download the latest file.
if (keyId !== currentFile.encrypt_keyid) {
res.status(400);
res.send('file-has-new-key');
return;
}
}
// TODO: If we want to support end-to-end encryption, we'd write the
// raw file down because it's an encrypted blob. This isn't
// supported yet in the self-hosted version because it's unclear if
// it's still needed, given that you own your server
//
// await fs.writeFile(join(config.files, `${fileId}.blob`), req.body);
let zip = new AdmZip(req.body);
try {
zip.extractAllTo(join(config.files, fileId), true);
} catch (err) {
console.log('Error writing file', err);
res.send(JSON.stringify({ status: 'error' }));
return;
}
let rows = accountDb.all('SELECT id FROM files WHERE id = ?', [fileId]);
if (rows.length === 0) {
// it's new
groupId = uuid.v4();
accountDb.mutate(
'INSERT INTO files (id, group_id, sync_version, name, encrypt_meta) VALUES (?, ?, ?, ?, ?)',
[fileId, groupId, syncFormatVersion, name, encryptMeta]
);
res.send(JSON.stringify({ status: 'ok', groupId }));
} else {
if (!groupId) {
// sync state was reset, create new group
groupId = uuid.v4();
accountDb.mutate('UPDATE files SET group_id = ? WHERE id = ?', [
groupId,
fileId
]);
}
// Regardless, update some properties
accountDb.mutate(
'UPDATE files SET sync_version = ?, encrypt_meta = ?, name = ? WHERE id = ?',
[syncFormatVersion, encryptMeta, name, fileId]
);
res.send(JSON.stringify({ status: 'ok', groupId }));
}
});
app.get('/download-user-file', async (req, res) => {
let user = validateUser(req, res);
if (!user) {
return;
}
let accountDb = getAccountDb();
let fileId = req.headers['x-actual-file-id'];
// Do some authentication
let rows = accountDb.all(
'SELECT id FROM files WHERE id = ? AND deleted = FALSE',
[fileId]
);
if (rows.length === 0) {
res.status(400).send('User or file not found');
return;
}
let zip = new AdmZip();
try {
zip.addLocalFolder(join(config.files, fileId), '/');
} catch (e) {
res.status(500).send('Error reading files');
return;
}
let buffer = zip.toBuffer();
res.setHeader('Content-Disposition', `attachment;filename=${fileId}`);
res.send(buffer);
});
app.post('/update-user-filename', (req, res) => {
let user = validateUser(req, res);
if (!user) {
return;
}
let accountDb = getAccountDb();
let { fileId, name } = req.body;
// Do some authentication
let rows = accountDb.all(
'SELECT id FROM files WHERE id = ? AND deleted = FALSE',
[fileId]
);
if (rows.length === 0) {
res.status(500).send('User or file not found');
return;
}
accountDb.mutate('UPDATE files SET name = ? WHERE id = ?', [name, fileId]);
res.send(JSON.stringify({ status: 'ok' }));
});
app.get('/list-user-files', (req, res) => {
let user = validateUser(req, res);
if (!user) {
return;
}
let accountDb = getAccountDb();
let rows = accountDb.all('SELECT * FROM files');
res.send(
JSON.stringify({
status: 'ok',
data: rows.map(row => ({
deleted: row.deleted,
fileId: row.id,
groupId: row.group_id,
name: row.name,
encryptKeyId: row.encrypt_keyid
}))
})
);
});
app.get('/get-user-file-info', (req, res) => {
let user = validateUser(req, res);
if (!user) {
return;
}
let accountDb = getAccountDb();
let fileId = req.headers['x-actual-file-id'];
let rows = accountDb.all(
'SELECT * FROM files WHERE id = ? AND deleted = FALSE',
[fileId]
);
if (rows.length === 0) {
res.send(JSON.stringify({ status: 'error' }));
return;
}
let row = rows[0];
res.send(
JSON.stringify({
status: 'ok',
data: {
deleted: row.deleted,
fileId: row.id,
groupId: row.group_id,
name: row.name,
encryptMeta: row.encrypt_meta ? JSON.parse(row.encrypt_meta) : null
}
})
);
});
app.post('/delete-user-file', (req, res) => {
let user = validateUser(req, res);
if (!user) {
return;
}
let accountDb = getAccountDb();
let { fileId } = req.body;
accountDb.mutate('UPDATE files SET deleted = TRUE WHERE id = ?', [fileId]);
res.send(JSON.stringify({ status: 'ok' }));
});
module.exports.handlers = app;
module.exports.init = init;
app.js 0 → 100644
require('source-map-support').install();
const express = require('express');
const bodyParser = require('body-parser');
const cors = require('cors');
const config = require('./config');
const accountApp = require('./app-account');
const syncApp = require('./app-sync');
const app = express();
process.on('unhandledRejection', reason => {
console.log('Rejection:', reason);
});
app.use(cors());
app.use(bodyParser.json({ limit: '20mb' }));
app.use(bodyParser.raw({ type: 'application/actual-sync', limit: '20mb' }));
app.use(bodyParser.raw({ type: 'application/encrypted-file', limit: '50mb' }));
app.use('/sync', syncApp.handlers);
app.use('/account', accountApp.handlers);
app.get('/', (req, res) => {
res.send(config.mode);
});
async function run() {
await accountApp.init();
await syncApp.init();
console.log('Listening on ' + config.port + '...');
app.listen(config.port);
}
run().catch(err => {
console.log('Error starting app:', err);
process.exit(1);
});
db.js 0 → 100644
let Database = require('better-sqlite3');
class WrappedDatabase {
constructor(db) {
this.db = db;
}
all(sql, params = []) {
let stmt = this.db.prepare(sql);
return stmt.all(...params);
}
first(sql, params = []) {
let rows = this.all(sql, params);
return rows.length === 0 ? null : rows[0];
}
exec(sql) {
this.db.exec(sql);
}
mutate(sql, params = []) {
let stmt = this.db.prepare(sql);
let info = stmt.run(...params);
return { changes: info.changes, insertId: info.lastInsertRowid };
}
transaction(fn) {
return this.db.transaction(fn)();
}
close() {
this.db.close();
}
}
function openDatabase(filename) {
return new WrappedDatabase(new Database(filename));
}
module.exports = { openDatabase };
BEGIN TRANSACTION;
DROP TABLE db_version;
COMMIT;
BEGIN TRANSACTION;
CREATE TABLE payees
(id TEXT PRIMARY KEY,
name TEXT,
category TEXT,
tombstone INTEGER DEFAULT 0,
transfer_acct TEXT);
CREATE TABLE payee_rules
(id TEXT PRIMARY KEY,
payee_id TEXT,
type TEXT,
value TEXT,
tombstone INTEGER DEFAULT 0);
CREATE INDEX payee_rules_lowercase_index ON payee_rules(LOWER(value));
CREATE TABLE payee_mapping
(id TEXT PRIMARY KEY,
targetId TEXT);
COMMIT;
BEGIN TRANSACTION;
CREATE TEMPORARY TABLE category_groups_tmp
(id TEXT PRIMARY KEY,
name TEXT UNIQUE,
is_income INTEGER DEFAULT 0,
sort_order REAL,
tombstone INTEGER DEFAULT 0);
INSERT INTO category_groups_tmp SELECT * FROM category_groups;
DROP TABLE category_groups;
CREATE TABLE category_groups
(id TEXT PRIMARY KEY,
name TEXT,
is_income INTEGER DEFAULT 0,
sort_order REAL,
tombstone INTEGER DEFAULT 0);
INSERT INTO category_groups SELECT * FROM category_groups_tmp;
DROP TABLE category_groups_tmp;
COMMIT;
BEGIN TRANSACTION;
CREATE INDEX trans_category_date ON transactions(category, date);
CREATE INDEX trans_category ON transactions(category);
CREATE INDEX trans_date ON transactions(date);
COMMIT;
BEGIN TRANSACTION;
DELETE FROM spreadsheet_cells WHERE
name NOT LIKE '%!budget\_%' ESCAPE '\' AND
name NOT LIKE '%!carryover\_%' ESCAPE '\' AND
name NOT LIKE '%!buffered';
UPDATE OR REPLACE spreadsheet_cells SET name = REPLACE(name, '_', '-');
UPDATE OR REPLACE spreadsheet_cells SET
name =
SUBSTR(name, 1, 28) ||
'-' ||
SUBSTR(name, 29, 4) ||
'-' ||
SUBSTR(name, 33, 4) ||
'-' ||
SUBSTR(name, 37, 4) ||
'-' ||
SUBSTR(name, 41, 12)
WHERE name LIKE '%!budget-%' AND LENGTH(name) = 52;
UPDATE OR REPLACE spreadsheet_cells SET
name =
SUBSTR(name, 1, 31) ||
'-' ||
SUBSTR(name, 32, 4) ||
'-' ||
SUBSTR(name, 36, 4) ||
'-' ||
SUBSTR(name, 40, 4) ||
'-' ||
SUBSTR(name, 44, 12)
WHERE name LIKE '%!carryover-%' AND LENGTH(name) = 55;
UPDATE spreadsheet_cells SET expr = SUBSTR(expr, 2) WHERE name LIKE '%!carryover-%';
COMMIT;
BEGIN TRANSACTION;
ALTER TABLE transactions ADD COLUMN cleared INTEGER DEFAULT 1;
ALTER TABLE transactions ADD COLUMN pending INTEGER DEFAULT 0;
COMMIT;
BEGIN TRANSACTION;
CREATE TABLE rules
(id TEXT PRIMARY KEY,
stage TEXT,
conditions TEXT,
actions TEXT,
tombstone INTEGER DEFAULT 0);
COMMIT;
BEGIN TRANSACTION;
ALTER TABLE transactions ADD COLUMN parent_id TEXT;
UPDATE transactions SET
parent_id = CASE
WHEN isChild THEN SUBSTR(id, 1, INSTR(id, '/') - 1)
ELSE NULL
END;
CREATE INDEX trans_parent_id ON transactions(parent_id);
COMMIT;
BEGIN TRANSACTION;
DROP VIEW IF EXISTS v_transactions_layer2;
CREATE VIEW v_transactions_layer2 AS
SELECT
t.id AS id,
t.isParent AS is_parent,
t.isChild AS is_child,
t.acct AS account,
CASE WHEN t.isChild = 0 THEN NULL ELSE t.parent_id END AS parent_id,
CASE WHEN t.isParent = 1 THEN NULL ELSE cm.transferId END AS category,
pm.targetId AS payee,
t.imported_description AS imported_payee,
IFNULL(t.amount, 0) AS amount,
t.notes AS notes,
t.date AS date,
t.financial_id AS imported_id,
t.error AS error,
t.starting_balance_flag AS starting_balance_flag,
t.transferred_id AS transfer_id,
t.sort_order AS sort_order,
t.cleared AS cleared,
t.tombstone AS tombstone
FROM transactions t
LEFT JOIN category_mapping cm ON cm.id = t.category
LEFT JOIN payee_mapping pm ON pm.id = t.description
WHERE
t.date IS NOT NULL AND
t.acct IS NOT NULL;
CREATE INDEX trans_sorted ON transactions(date desc, starting_balance_flag, sort_order desc, id);
DROP VIEW IF EXISTS v_transactions_layer1;
CREATE VIEW v_transactions_layer1 AS
SELECT t.* FROM v_transactions_layer2 t
LEFT JOIN transactions t2 ON (t.is_child = 1 AND t2.id = t.parent_id)
WHERE IFNULL(t.tombstone, 0) = 0 AND IFNULL(t2.tombstone, 0) = 0;
DROP VIEW IF EXISTS v_transactions;
CREATE VIEW v_transactions AS
SELECT t.* FROM v_transactions_layer1 t
ORDER BY t.date desc, t.starting_balance_flag, t.sort_order desc, t.id;
DROP VIEW IF EXISTS v_categories;
CREATE VIEW v_categories AS
SELECT
id,
name,
is_income,
cat_group AS "group",
sort_order,
tombstone
FROM categories;
COMMIT;
BEGIN TRANSACTION;
CREATE INDEX messages_crdt_search ON messages_crdt(dataset, row, column, timestamp);
ANALYZE;
COMMIT;
BEGIN TRANSACTION;
-- This adds the isChild/parent_id constraint in `where`
DROP VIEW IF EXISTS v_transactions_layer2;
CREATE VIEW v_transactions_layer2 AS
SELECT
t.id AS id,
t.isParent AS is_parent,
t.isChild AS is_child,
t.acct AS account,
CASE WHEN t.isChild = 0 THEN NULL ELSE t.parent_id END AS parent_id,
CASE WHEN t.isParent = 1 THEN NULL ELSE cm.transferId END AS category,
pm.targetId AS payee,
t.imported_description AS imported_payee,
IFNULL(t.amount, 0) AS amount,
t.notes AS notes,
t.date AS date,
t.financial_id AS imported_id,
t.error AS error,
t.starting_balance_flag AS starting_balance_flag,
t.transferred_id AS transfer_id,
t.sort_order AS sort_order,
t.cleared AS cleared,
t.tombstone AS tombstone
FROM transactions t
LEFT JOIN category_mapping cm ON cm.id = t.category
LEFT JOIN payee_mapping pm ON pm.id = t.description
WHERE
t.date IS NOT NULL AND
t.acct IS NOT NULL AND
(t.isChild = 0 OR t.parent_id IS NOT NULL);
COMMIT;
BEGIN TRANSACTION;
CREATE TABLE __meta__ (key TEXT PRIMARY KEY, value TEXT);
DROP VIEW IF EXISTS v_transactions_layer2;
DROP VIEW IF EXISTS v_transactions_layer1;
DROP VIEW IF EXISTS v_transactions;
DROP VIEW IF EXISTS v_categories;
COMMIT;
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment