pf_app/routes/operations.js
Paul Trowbridge 1df37a5ff1 Refactor sources UI, rename pf_ system cols, replace filter builder with raw SQL
- Sources page: left column with stacked DB tables + registered sources panels,
  right column as full-height column mapping workbench
- Add compact table search, column search, table preview button, delete source button
- Rename fc_table system columns to pf_ prefix (pf_id, pf_iter, pf_logid,
  pf_created_at) to avoid collisions with source table columns like 'id'
- Remove 'filter' col_meta role — any non-ignore column usable in baseline filters
- Replace structured filter row builder with free-form SQL WHERE clause textarea
  and clickable column chips for insertion; fully flexible AND/OR logic
- Baseline segment cards now display raw WHERE clause text + offset

Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
2026-04-03 00:47:57 -04:00

292 lines
12 KiB
JavaScript

const express = require('express');
const { applyTokens, buildWhere, buildExcludeClause, buildSetClause, esc } = require('../lib/sql_generator');
const { fcTable } = require('../lib/utils');
module.exports = function(pool) {
const router = express.Router();
async function runSQL(sql) {
console.log('--- SQL ---\n', sql, '\n--- END SQL ---');
return pool.query(sql);
}
// fetch everything needed to execute an operation:
// version + source info, col_meta, fc_table name, stored SQL
async function getContext(versionId, operation) {
const verResult = await pool.query(`
SELECT v.*, s.schema, s.tname, s.id AS source_id
FROM pf.version v
JOIN pf.source s ON s.id = v.source_id
WHERE v.id = $1
`, [versionId]);
if (verResult.rows.length === 0) {
const err = new Error('Version not found'); err.status = 404; throw err;
}
const version = verResult.rows[0];
const colResult = await pool.query(
`SELECT * FROM pf.col_meta WHERE source_id = $1 ORDER BY opos`,
[version.source_id]
);
const colMeta = colResult.rows;
const dimCols = colMeta.filter(c => c.role === 'dimension').map(c => c.cname);
const valueCol = colMeta.find(c => c.role === 'value')?.cname;
const unitsCol = colMeta.find(c => c.role === 'units')?.cname;
const sqlResult = await pool.query(
`SELECT sql FROM pf.sql WHERE source_id = $1 AND operation = $2`,
[version.source_id, operation]
);
if (sqlResult.rows.length === 0) {
const err = new Error(`No generated SQL for operation "${operation}" — run generate-sql first`);
err.status = 400; throw err;
}
return {
version,
table: fcTable(version.tname, version.id),
colMeta,
dimCols,
valueCol,
unitsCol,
sql: sqlResult.rows[0].sql
};
}
function guardOpen(version, res) {
if (version.status === 'closed') {
res.status(403).json({ error: 'Version is closed' });
return false;
}
return true;
}
// fetch all rows for a version (all iters including reference)
router.get('/versions/:id/data', async (req, res) => {
try {
const ctx = await getContext(parseInt(req.params.id), 'get_data');
const sql = applyTokens(ctx.sql, { fc_table: ctx.table });
const result = await runSQL(sql);
res.json(result.rows);
} catch (err) {
console.error(err);
res.status(err.status || 500).json({ error: err.message });
}
});
// load baseline rows from source table — additive, no delete
router.post('/versions/:id/baseline', async (req, res) => {
const { where_clause, date_offset, pf_user, note } = req.body;
const dateOffset = date_offset || '0 days';
const filterClause = (where_clause || '').trim() || 'TRUE';
try {
const ctx = await getContext(parseInt(req.params.id), 'baseline');
if (!guardOpen(ctx.version, res)) return;
const sql = applyTokens(ctx.sql, {
fc_table: ctx.table,
version_id: ctx.version.id,
pf_user: esc(pf_user || ''),
note: esc(note || ''),
params: esc(JSON.stringify({ where_clause: filterClause, date_offset: dateOffset })),
filter_clause: filterClause,
date_offset: esc(dateOffset)
});
const result = await runSQL(sql);
res.json(result.rows[0]);
} catch (err) {
console.error(err);
res.status(err.status || 500).json({ error: err.message });
}
});
// delete all baseline rows and log entries for a version
router.delete('/versions/:id/baseline', async (req, res) => {
const versionId = parseInt(req.params.id);
try {
const ctx = await getContext(versionId, 'baseline');
if (!guardOpen(ctx.version, res)) return;
const client = await pool.connect();
try {
await client.query('BEGIN');
const delRows = await client.query(
`DELETE FROM ${ctx.table} WHERE pf_iter = 'baseline' RETURNING pf_id`
);
const delLog = await client.query(
`DELETE FROM pf.log WHERE version_id = $1 AND operation = 'baseline'`,
[versionId]
);
await client.query('COMMIT');
res.json({ rows_deleted: delRows.rowCount, log_entries_deleted: delLog.rowCount });
} catch (err) {
await client.query('ROLLBACK');
throw err;
} finally {
client.release();
}
} catch (err) {
console.error(err);
res.status(err.status || 500).json({ error: err.message });
}
});
// load reference rows from source table (additive — does not clear prior reference rows)
router.post('/versions/:id/reference', async (req, res) => {
const { date_from, date_to, pf_user, note } = req.body;
if (!date_from || !date_to) {
return res.status(400).json({ error: 'date_from and date_to are required' });
}
try {
const ctx = await getContext(parseInt(req.params.id), 'reference');
if (!guardOpen(ctx.version, res)) return;
const sql = applyTokens(ctx.sql, {
fc_table: ctx.table,
version_id: ctx.version.id,
pf_user: esc(pf_user || ''),
note: esc(note || ''),
params: esc(JSON.stringify({ date_from, date_to })),
date_from: esc(date_from),
date_to: esc(date_to)
});
const result = await runSQL(sql);
res.json({ rows: result.rows, rows_affected: result.rows.length });
} catch (err) {
console.error(err);
res.status(err.status || 500).json({ error: err.message });
}
});
// scale a slice — adjust value and/or units by absolute amount or percentage
router.post('/versions/:id/scale', async (req, res) => {
const { pf_user, note, slice, value_incr, units_incr, pct } = req.body;
if (!slice || Object.keys(slice).length === 0) {
return res.status(400).json({ error: 'slice is required' });
}
try {
const ctx = await getContext(parseInt(req.params.id), 'scale');
if (!guardOpen(ctx.version, res)) return;
const whereClause = buildWhere(slice, ctx.dimCols);
const excludeClause = buildExcludeClause(ctx.version.exclude_iters);
let absValueIncr = value_incr || 0;
let absUnitsIncr = units_incr || 0;
// pct mode: run a quick totals query, convert percentages to absolutes
if (pct && (value_incr || units_incr)) {
const totals = await pool.query(`
SELECT
sum("${ctx.valueCol}") AS total_value,
sum("${ctx.unitsCol}") AS total_units
FROM ${ctx.table}
WHERE ${whereClause}
${excludeClause}
`);
const { total_value, total_units } = totals.rows[0];
if (value_incr) absValueIncr = (parseFloat(total_value) || 0) * value_incr / 100;
if (units_incr) absUnitsIncr = (parseFloat(total_units) || 0) * units_incr / 100;
}
if (absValueIncr === 0 && absUnitsIncr === 0) {
return res.status(400).json({ error: 'value_incr and/or units_incr must be non-zero' });
}
const sql = applyTokens(ctx.sql, {
fc_table: ctx.table,
version_id: ctx.version.id,
pf_user: esc(pf_user || ''),
note: esc(note || ''),
params: esc(JSON.stringify({ slice, value_incr, units_incr, pct })),
slice: esc(JSON.stringify(slice)),
where_clause: whereClause,
exclude_clause: excludeClause,
value_incr: absValueIncr,
units_incr: absUnitsIncr
});
const result = await runSQL(sql);
res.json({ rows: result.rows, rows_affected: result.rows.length });
} catch (err) {
console.error(err);
res.status(err.status || 500).json({ error: err.message });
}
});
// recode dimension values on a slice
// inserts negative rows to zero out the original, positive rows with new dimension values
router.post('/versions/:id/recode', async (req, res) => {
const { pf_user, note, slice, set } = req.body;
if (!slice || Object.keys(slice).length === 0) return res.status(400).json({ error: 'slice is required' });
if (!set || Object.keys(set).length === 0) return res.status(400).json({ error: 'set is required' });
try {
const ctx = await getContext(parseInt(req.params.id), 'recode');
if (!guardOpen(ctx.version, res)) return;
const whereClause = buildWhere(slice, ctx.dimCols);
const excludeClause = buildExcludeClause(ctx.version.exclude_iters);
const setClause = buildSetClause(ctx.dimCols, set);
const sql = applyTokens(ctx.sql, {
fc_table: ctx.table,
version_id: ctx.version.id,
pf_user: esc(pf_user || ''),
note: esc(note || ''),
params: esc(JSON.stringify({ slice, set })),
slice: esc(JSON.stringify(slice)),
where_clause: whereClause,
exclude_clause: excludeClause,
set_clause: setClause
});
const result = await runSQL(sql);
res.json({ rows: result.rows, rows_affected: result.rows.length });
} catch (err) {
console.error(err);
res.status(err.status || 500).json({ error: err.message });
}
});
// clone a slice as new business under new dimension values
// does not offset the original slice
router.post('/versions/:id/clone', async (req, res) => {
const { pf_user, note, slice, set, scale } = req.body;
if (!slice || Object.keys(slice).length === 0) return res.status(400).json({ error: 'slice is required' });
if (!set || Object.keys(set).length === 0) return res.status(400).json({ error: 'set is required' });
try {
const ctx = await getContext(parseInt(req.params.id), 'clone');
if (!guardOpen(ctx.version, res)) return;
const scaleFactor = (scale != null) ? parseFloat(scale) : 1.0;
const whereClause = buildWhere(slice, ctx.dimCols);
const excludeClause = buildExcludeClause(ctx.version.exclude_iters);
const setClause = buildSetClause(ctx.dimCols, set);
const sql = applyTokens(ctx.sql, {
fc_table: ctx.table,
version_id: ctx.version.id,
pf_user: esc(pf_user || ''),
note: esc(note || ''),
params: esc(JSON.stringify({ slice, set, scale: scaleFactor })),
slice: esc(JSON.stringify(slice)),
where_clause: whereClause,
exclude_clause: excludeClause,
set_clause: setClause,
scale_factor: scaleFactor
});
const result = await runSQL(sql);
res.json({ rows: result.rows, rows_affected: result.rows.length });
} catch (err) {
console.error(err);
res.status(err.status || 500).json({ error: err.message });
}
});
return router;
};