pf_app/routes/operations.js
Paul Trowbridge cfee3e96b9 Return inserted rows from change operations for incremental grid updates
Instead of re-fetching all forecast data after scale/recode/clone/reference,
the routes now return the inserted rows directly. The frontend uses ag-Grid's
applyTransaction to add only the new rows, eliminating the full reload round-trip.

Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
2026-04-01 12:04:28 -04:00

267 lines
11 KiB
JavaScript

const express = require('express');
const { applyTokens, buildWhere, buildExcludeClause, buildSetClause, esc } = require('../lib/sql_generator');
const { fcTable } = require('../lib/utils');
module.exports = function(pool) {
const router = express.Router();
async function runSQL(sql) {
console.log('--- SQL ---\n', sql, '\n--- END SQL ---');
return pool.query(sql);
}
// fetch everything needed to execute an operation:
// version + source info, col_meta, fc_table name, stored SQL
async function getContext(versionId, operation) {
const verResult = await pool.query(`
SELECT v.*, s.schema, s.tname, s.id AS source_id
FROM pf.version v
JOIN pf.source s ON s.id = v.source_id
WHERE v.id = $1
`, [versionId]);
if (verResult.rows.length === 0) {
const err = new Error('Version not found'); err.status = 404; throw err;
}
const version = verResult.rows[0];
const colResult = await pool.query(
`SELECT * FROM pf.col_meta WHERE source_id = $1 ORDER BY opos`,
[version.source_id]
);
const colMeta = colResult.rows;
const dimCols = colMeta.filter(c => c.role === 'dimension').map(c => c.cname);
const valueCol = colMeta.find(c => c.role === 'value')?.cname;
const unitsCol = colMeta.find(c => c.role === 'units')?.cname;
const sqlResult = await pool.query(
`SELECT sql FROM pf.sql WHERE source_id = $1 AND operation = $2`,
[version.source_id, operation]
);
if (sqlResult.rows.length === 0) {
const err = new Error(`No generated SQL for operation "${operation}" — run generate-sql first`);
err.status = 400; throw err;
}
return {
version,
table: fcTable(version.tname, version.id),
colMeta,
dimCols,
valueCol,
unitsCol,
sql: sqlResult.rows[0].sql
};
}
function guardOpen(version, res) {
if (version.status === 'closed') {
res.status(403).json({ error: 'Version is closed' });
return false;
}
return true;
}
// fetch all rows for a version (all iters including reference)
router.get('/versions/:id/data', async (req, res) => {
try {
const ctx = await getContext(parseInt(req.params.id), 'get_data');
const sql = applyTokens(ctx.sql, { fc_table: ctx.table });
const result = await runSQL(sql);
res.json(result.rows);
} catch (err) {
console.error(err);
res.status(err.status || 500).json({ error: err.message });
}
});
// load baseline rows from source table for a date range
// deletes existing iter='baseline' rows before inserting (handled inside stored SQL)
router.post('/versions/:id/baseline', async (req, res) => {
const { date_from, date_to, pf_user, note, replay } = req.body;
if (!date_from || !date_to) {
return res.status(400).json({ error: 'date_from and date_to are required' });
}
if (replay) {
return res.status(501).json({ error: 'replay is not yet implemented' });
}
try {
const ctx = await getContext(parseInt(req.params.id), 'baseline');
if (!guardOpen(ctx.version, res)) return;
const sql = applyTokens(ctx.sql, {
fc_table: ctx.table,
version_id: ctx.version.id,
pf_user: esc(pf_user || ''),
note: esc(note || ''),
params: esc(JSON.stringify({ date_from, date_to })),
date_from: esc(date_from),
date_to: esc(date_to)
});
const result = await runSQL(sql);
res.json({ rows_affected: result.rows.length });
} catch (err) {
console.error(err);
res.status(err.status || 500).json({ error: err.message });
}
});
// load reference rows from source table (additive — does not clear prior reference rows)
router.post('/versions/:id/reference', async (req, res) => {
const { date_from, date_to, pf_user, note } = req.body;
if (!date_from || !date_to) {
return res.status(400).json({ error: 'date_from and date_to are required' });
}
try {
const ctx = await getContext(parseInt(req.params.id), 'reference');
if (!guardOpen(ctx.version, res)) return;
const sql = applyTokens(ctx.sql, {
fc_table: ctx.table,
version_id: ctx.version.id,
pf_user: esc(pf_user || ''),
note: esc(note || ''),
params: esc(JSON.stringify({ date_from, date_to })),
date_from: esc(date_from),
date_to: esc(date_to)
});
const result = await runSQL(sql);
res.json({ rows: result.rows, rows_affected: result.rows.length });
} catch (err) {
console.error(err);
res.status(err.status || 500).json({ error: err.message });
}
});
// scale a slice — adjust value and/or units by absolute amount or percentage
router.post('/versions/:id/scale', async (req, res) => {
const { pf_user, note, slice, value_incr, units_incr, pct } = req.body;
if (!slice || Object.keys(slice).length === 0) {
return res.status(400).json({ error: 'slice is required' });
}
try {
const ctx = await getContext(parseInt(req.params.id), 'scale');
if (!guardOpen(ctx.version, res)) return;
const whereClause = buildWhere(slice, ctx.dimCols);
const excludeClause = buildExcludeClause(ctx.version.exclude_iters);
let absValueIncr = value_incr || 0;
let absUnitsIncr = units_incr || 0;
// pct mode: run a quick totals query, convert percentages to absolutes
if (pct && (value_incr || units_incr)) {
const totals = await pool.query(`
SELECT
sum("${ctx.valueCol}") AS total_value,
sum("${ctx.unitsCol}") AS total_units
FROM ${ctx.table}
WHERE ${whereClause}
${excludeClause}
`);
const { total_value, total_units } = totals.rows[0];
if (value_incr) absValueIncr = (parseFloat(total_value) || 0) * value_incr / 100;
if (units_incr) absUnitsIncr = (parseFloat(total_units) || 0) * units_incr / 100;
}
if (absValueIncr === 0 && absUnitsIncr === 0) {
return res.status(400).json({ error: 'value_incr and/or units_incr must be non-zero' });
}
const sql = applyTokens(ctx.sql, {
fc_table: ctx.table,
version_id: ctx.version.id,
pf_user: esc(pf_user || ''),
note: esc(note || ''),
params: esc(JSON.stringify({ slice, value_incr, units_incr, pct })),
slice: esc(JSON.stringify(slice)),
where_clause: whereClause,
exclude_clause: excludeClause,
value_incr: absValueIncr,
units_incr: absUnitsIncr
});
const result = await runSQL(sql);
res.json({ rows: result.rows, rows_affected: result.rows.length });
} catch (err) {
console.error(err);
res.status(err.status || 500).json({ error: err.message });
}
});
// recode dimension values on a slice
// inserts negative rows to zero out the original, positive rows with new dimension values
router.post('/versions/:id/recode', async (req, res) => {
const { pf_user, note, slice, set } = req.body;
if (!slice || Object.keys(slice).length === 0) return res.status(400).json({ error: 'slice is required' });
if (!set || Object.keys(set).length === 0) return res.status(400).json({ error: 'set is required' });
try {
const ctx = await getContext(parseInt(req.params.id), 'recode');
if (!guardOpen(ctx.version, res)) return;
const whereClause = buildWhere(slice, ctx.dimCols);
const excludeClause = buildExcludeClause(ctx.version.exclude_iters);
const setClause = buildSetClause(ctx.dimCols, set);
const sql = applyTokens(ctx.sql, {
fc_table: ctx.table,
version_id: ctx.version.id,
pf_user: esc(pf_user || ''),
note: esc(note || ''),
params: esc(JSON.stringify({ slice, set })),
slice: esc(JSON.stringify(slice)),
where_clause: whereClause,
exclude_clause: excludeClause,
set_clause: setClause
});
const result = await runSQL(sql);
res.json({ rows: result.rows, rows_affected: result.rows.length });
} catch (err) {
console.error(err);
res.status(err.status || 500).json({ error: err.message });
}
});
// clone a slice as new business under new dimension values
// does not offset the original slice
router.post('/versions/:id/clone', async (req, res) => {
const { pf_user, note, slice, set, scale } = req.body;
if (!slice || Object.keys(slice).length === 0) return res.status(400).json({ error: 'slice is required' });
if (!set || Object.keys(set).length === 0) return res.status(400).json({ error: 'set is required' });
try {
const ctx = await getContext(parseInt(req.params.id), 'clone');
if (!guardOpen(ctx.version, res)) return;
const scaleFactor = (scale != null) ? parseFloat(scale) : 1.0;
const whereClause = buildWhere(slice, ctx.dimCols);
const excludeClause = buildExcludeClause(ctx.version.exclude_iters);
const setClause = buildSetClause(ctx.dimCols, set);
const sql = applyTokens(ctx.sql, {
fc_table: ctx.table,
version_id: ctx.version.id,
pf_user: esc(pf_user || ''),
note: esc(note || ''),
params: esc(JSON.stringify({ slice, set, scale: scaleFactor })),
slice: esc(JSON.stringify(slice)),
where_clause: whereClause,
exclude_clause: excludeClause,
set_clause: setClause,
scale_factor: scaleFactor
});
const result = await runSQL(sql);
res.json({ rows: result.rows, rows_affected: result.rows.length });
} catch (err) {
console.error(err);
res.status(err.status || 500).json({ error: err.message });
}
});
return router;
};