Edit baseline/reference segments before forecast rollout
Adds PUT /versions/:id/baseline/:logid that, in one transaction, drops the segment's rows and log entry and replays the baseline or reference SQL with new params. The endpoint refuses (409) if any scale, recode, or clone has been applied — those operations were calibrated against the old totals and would silently misreconcile. Baseline view gets an Edit button on each segment (hidden once forecast operations exist), populating the form with the original filters, offset, and note. Submit issues PUT in edit mode, POST otherwise. POST baseline and POST reference now also persist the structured filters in pf.log.params so edit can reload them. Co-Authored-By: Claude Opus 4.7 <noreply@anthropic.com>
This commit is contained in:
parent
6a98c3f8fc
commit
3a6062d723
@ -123,18 +123,23 @@ module.exports = function(pool) {
|
||||
|
||||
// load baseline rows from source table — additive, no delete
|
||||
router.post('/versions/:id/baseline', async (req, res) => {
|
||||
const { where_clause, date_offset, pf_user, note } = req.body;
|
||||
const { where_clause, date_offset, pf_user, note, filters } = req.body;
|
||||
const dateOffset = date_offset || '0 days';
|
||||
const filterClause = (where_clause || '').trim() || 'TRUE';
|
||||
try {
|
||||
const ctx = await getContext(parseInt(req.params.id), 'baseline');
|
||||
if (!guardOpen(ctx.version, res)) return;
|
||||
const paramsJson = JSON.stringify({
|
||||
where_clause: filterClause,
|
||||
date_offset: dateOffset,
|
||||
...(filters ? { filters } : {})
|
||||
});
|
||||
const sql = applyTokens(ctx.sql, {
|
||||
fc_table: ctx.table,
|
||||
version_id: ctx.version.id,
|
||||
pf_user: esc(pf_user || ''),
|
||||
note: esc(note || ''),
|
||||
params: esc(JSON.stringify({ where_clause: filterClause, date_offset: dateOffset })),
|
||||
params: esc(paramsJson),
|
||||
filter_clause: filterClause,
|
||||
date_offset: esc(dateOffset)
|
||||
});
|
||||
@ -147,6 +152,92 @@ module.exports = function(pool) {
|
||||
}
|
||||
});
|
||||
|
||||
// edit a baseline or reference segment in place — only allowed before any
|
||||
// scale/recode/clone has been applied on this version, since those would
|
||||
// have been calibrated against the old segment's totals.
|
||||
router.put('/versions/:id/baseline/:logid', async (req, res) => {
|
||||
const versionId = parseInt(req.params.id);
|
||||
const logid = parseInt(req.params.logid);
|
||||
const { where_clause, date_offset, pf_user, note, filters } = req.body;
|
||||
const dateOffset = date_offset || '0 days';
|
||||
const filterClause = (where_clause || '').trim() || 'TRUE';
|
||||
|
||||
const client = await pool.connect();
|
||||
try {
|
||||
const logResult = await client.query(
|
||||
`SELECT * FROM pf.log WHERE id = $1 AND version_id = $2`,
|
||||
[logid, versionId]
|
||||
);
|
||||
if (logResult.rows.length === 0) {
|
||||
return res.status(404).json({ error: 'Log entry not found' });
|
||||
}
|
||||
const oldLog = logResult.rows[0];
|
||||
if (!['baseline', 'reference'].includes(oldLog.operation)) {
|
||||
return res.status(400).json({ error: 'Only baseline or reference segments can be edited' });
|
||||
}
|
||||
|
||||
const opsResult = await client.query(
|
||||
`SELECT COUNT(*)::int AS n FROM pf.log
|
||||
WHERE version_id = $1 AND operation IN ('scale', 'recode', 'clone')`,
|
||||
[versionId]
|
||||
);
|
||||
if (opsResult.rows[0].n > 0) {
|
||||
return res.status(409).json({
|
||||
error: 'Cannot edit segments after forecast operations have been applied. Undo the operations first.'
|
||||
});
|
||||
}
|
||||
|
||||
const ctx = await getContext(versionId, oldLog.operation);
|
||||
if (!guardOpen(ctx.version, res)) return;
|
||||
|
||||
const paramsJson = JSON.stringify({
|
||||
where_clause: filterClause,
|
||||
...(oldLog.operation === 'baseline' ? { date_offset: dateOffset } : {}),
|
||||
...(filters ? { filters } : {})
|
||||
});
|
||||
const tokens = oldLog.operation === 'baseline'
|
||||
? {
|
||||
fc_table: ctx.table,
|
||||
version_id: ctx.version.id,
|
||||
pf_user: esc(pf_user || ''),
|
||||
note: esc(note || ''),
|
||||
params: esc(paramsJson),
|
||||
filter_clause: filterClause,
|
||||
date_offset: esc(dateOffset)
|
||||
}
|
||||
: {
|
||||
fc_table: ctx.table,
|
||||
version_id: ctx.version.id,
|
||||
pf_user: esc(pf_user || ''),
|
||||
note: esc(note || ''),
|
||||
params: esc(paramsJson),
|
||||
filter_clause: filterClause
|
||||
};
|
||||
const sql = applyTokens(ctx.sql, tokens);
|
||||
|
||||
await client.query('BEGIN');
|
||||
const delRows = await client.query(
|
||||
`DELETE FROM ${ctx.table} WHERE pf_logid = $1 RETURNING pf_id`,
|
||||
[logid]
|
||||
);
|
||||
await client.query(`DELETE FROM pf.log WHERE id = $1`, [logid]);
|
||||
const insResult = await client.query(sql);
|
||||
await client.query('COMMIT');
|
||||
|
||||
res.json({
|
||||
rows_deleted: delRows.rowCount,
|
||||
pf_ids: delRows.rows.map(r => r.pf_id),
|
||||
rows_affected: insResult.rows[0]?.rows_affected ?? 0
|
||||
});
|
||||
} catch (err) {
|
||||
try { await client.query('ROLLBACK'); } catch {}
|
||||
console.error(err);
|
||||
res.status(err.status || 500).json({ error: err.message });
|
||||
} finally {
|
||||
client.release();
|
||||
}
|
||||
});
|
||||
|
||||
// delete all baseline rows and log entries for a version
|
||||
router.delete('/versions/:id/baseline', async (req, res) => {
|
||||
const versionId = parseInt(req.params.id);
|
||||
@ -183,17 +274,21 @@ module.exports = function(pool) {
|
||||
|
||||
// load reference rows from source table (additive — does not clear prior reference rows)
|
||||
router.post('/versions/:id/reference', async (req, res) => {
|
||||
const { where_clause, pf_user, note } = req.body;
|
||||
const { where_clause, pf_user, note, filters } = req.body;
|
||||
const filterClause = (where_clause || '').trim() || 'TRUE';
|
||||
try {
|
||||
const ctx = await getContext(parseInt(req.params.id), 'reference');
|
||||
if (!guardOpen(ctx.version, res)) return;
|
||||
const paramsJson = JSON.stringify({
|
||||
where_clause: filterClause,
|
||||
...(filters ? { filters } : {})
|
||||
});
|
||||
const sql = applyTokens(ctx.sql, {
|
||||
fc_table: ctx.table,
|
||||
version_id: ctx.version.id,
|
||||
pf_user: esc(pf_user || ''),
|
||||
note: esc(note || ''),
|
||||
params: esc(JSON.stringify({ where_clause: filterClause })),
|
||||
params: esc(paramsJson),
|
||||
filter_clause: filterClause
|
||||
});
|
||||
|
||||
|
||||
@ -73,6 +73,8 @@ export default function Baseline({ sources = [], sourceId, versions = [], versio
|
||||
const [offsetMo, setOffsetMo] = useState(0)
|
||||
const [segNote, setSegNote] = useState('')
|
||||
const [submitting, setSubmitting] = useState(false)
|
||||
const [editingLogId, setEditingLogId] = useState(null)
|
||||
const [hasForecastOps, setHasForecastOps] = useState(false)
|
||||
|
||||
const [expandedId, setExpandedId] = useState(null)
|
||||
const [msg, setMsg] = useState(null)
|
||||
@ -94,6 +96,7 @@ export default function Baseline({ sources = [], sourceId, versions = [], versio
|
||||
function loadLog() {
|
||||
fetch(`/api/versions/${versionId}/log`).then(r => r.json()).then(data => {
|
||||
setLog(data.filter(e => e.operation === 'baseline' || e.operation === 'reference'))
|
||||
setHasForecastOps(data.some(e => ['scale', 'recode', 'clone'].includes(e.operation)))
|
||||
})
|
||||
}
|
||||
|
||||
@ -159,21 +162,26 @@ export default function Baseline({ sources = [], sourceId, versions = [], versio
|
||||
const offsetStr = isRef ? '0 days' : ([offsetYr > 0 ? `${offsetYr} year` : '', offsetMo > 0 ? `${offsetMo} month` : ''].filter(Boolean).join(' ') || '0 days')
|
||||
const endpoint = isRef ? 'reference' : 'baseline'
|
||||
const body = isRef
|
||||
? { where_clause: clause, pf_user: 'admin', note: description || segNote }
|
||||
: { where_clause: clause, date_offset: offsetStr, pf_user: 'admin', note: description || segNote }
|
||||
? { where_clause: clause, pf_user: 'admin', note: description || segNote, filters }
|
||||
: { where_clause: clause, date_offset: offsetStr, pf_user: 'admin', note: description || segNote, filters }
|
||||
setSubmitting(true)
|
||||
try {
|
||||
const res = await fetch(`/api/versions/${versionId}/${endpoint}`, {
|
||||
method: 'POST',
|
||||
const url = editingLogId
|
||||
? `/api/versions/${versionId}/baseline/${editingLogId}`
|
||||
: `/api/versions/${versionId}/${endpoint}`
|
||||
const method = editingLogId ? 'PUT' : 'POST'
|
||||
const res = await fetch(url, {
|
||||
method,
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify(body)
|
||||
})
|
||||
const data = await res.json()
|
||||
if (!res.ok) { flash(data.error, 'error'); return }
|
||||
flash(`Loaded ${data.rows_affected ?? data.row_count ?? ''} rows`)
|
||||
flash(editingLogId
|
||||
? `Updated — ${data.rows_deleted} rows replaced with ${data.rows_affected}`
|
||||
: `Loaded ${data.rows_affected ?? data.row_count ?? ''} rows`)
|
||||
loadLog()
|
||||
setDescription(''); setSegNote(''); setOffsetYr(0); setOffsetMo(0)
|
||||
setFilters(filterCols.length > 0 ? [emptyFilter(filterCols)] : [])
|
||||
cancelEdit()
|
||||
} catch (err) {
|
||||
flash(err.message, 'error')
|
||||
} finally {
|
||||
@ -181,6 +189,47 @@ export default function Baseline({ sources = [], sourceId, versions = [], versio
|
||||
}
|
||||
}
|
||||
|
||||
function startEdit(entry) {
|
||||
if (hasForecastOps) {
|
||||
flash('Undo forecast operations first to edit segments', 'error')
|
||||
return
|
||||
}
|
||||
const params = entry.params || {}
|
||||
setSegType(entry.operation)
|
||||
setSegNote(entry.note || '')
|
||||
setDescription('')
|
||||
if (entry.operation === 'baseline') {
|
||||
const off = parseOffset(params.date_offset)
|
||||
setOffsetYr(off.yr)
|
||||
setOffsetMo(off.mo)
|
||||
} else {
|
||||
setOffsetYr(0); setOffsetMo(0)
|
||||
}
|
||||
if (Array.isArray(params.filters) && params.filters.length > 0) {
|
||||
setFilters(params.filters)
|
||||
} else if (filterCols.length > 0) {
|
||||
// Pre-existing segment without structured filters — fall back to a blank row.
|
||||
// The original WHERE clause is shown read-only on the segment detail row.
|
||||
setFilters([emptyFilter(filterCols)])
|
||||
flash('Filters were not stored on this segment — rebuild them, or undo and re-add', 'error')
|
||||
}
|
||||
setEditingLogId(entry.id)
|
||||
setExpandedId(null)
|
||||
setTimeout(() => {
|
||||
const form = document.getElementById('add-segment')
|
||||
form?.scrollIntoView({ behavior: 'smooth', block: 'start' })
|
||||
}, 0)
|
||||
}
|
||||
|
||||
function cancelEdit() {
|
||||
setEditingLogId(null)
|
||||
setDescription('')
|
||||
setSegNote('')
|
||||
setOffsetYr(0)
|
||||
setOffsetMo(0)
|
||||
setFilters(filterCols.length > 0 ? [emptyFilter(filterCols)] : [])
|
||||
}
|
||||
|
||||
async function undoSegment(logid) {
|
||||
await fetch(`/api/log/${logid}`, { method: 'DELETE' })
|
||||
loadLog()
|
||||
@ -331,6 +380,9 @@ export default function Baseline({ sources = [], sourceId, versions = [], versio
|
||||
<td className="px-3 py-2 text-gray-500">{entry.pf_user}</td>
|
||||
<td className="px-3 py-2 text-gray-400">{new Date(entry.stamp).toLocaleDateString()}</td>
|
||||
<td className="px-3 py-2 text-right">
|
||||
{!hasForecastOps && (
|
||||
<button onClick={e => { e.stopPropagation(); startEdit(entry) }} className="text-gray-400 hover:text-blue-600 text-xs mr-3">Edit</button>
|
||||
)}
|
||||
<button onClick={e => { e.stopPropagation(); undoSegment(entry.id) }} className="text-gray-400 hover:text-red-500 text-xs">Undo</button>
|
||||
</td>
|
||||
</tr>
|
||||
@ -362,10 +414,19 @@ export default function Baseline({ sources = [], sourceId, versions = [], versio
|
||||
</table>
|
||||
</div>
|
||||
|
||||
{/* Add Segment */}
|
||||
<div className="bg-white border border-gray-200 rounded">
|
||||
<div className="px-3 py-2 border-b border-gray-100 text-xs font-medium text-gray-500 uppercase tracking-wide">
|
||||
Add Segment
|
||||
{/* Add / Edit Segment */}
|
||||
<div id="add-segment" className="bg-white border border-gray-200 rounded">
|
||||
<div className="px-3 py-2 border-b border-gray-100 text-xs font-medium text-gray-500 uppercase tracking-wide flex items-center justify-between">
|
||||
<span>{(() => {
|
||||
if (!editingLogId) return 'Add Segment'
|
||||
const entry = log.find(e => e.id === editingLogId)
|
||||
if (!entry) return 'Edit Segment'
|
||||
const label = entry.operation === 'reference' ? 'reference' : 'baseline'
|
||||
return entry.note ? `Edit ${label} — ${entry.note}` : `Edit ${label} segment`
|
||||
})()}</span>
|
||||
{editingLogId && (
|
||||
<button onClick={cancelEdit} className="text-gray-400 hover:text-gray-600 normal-case font-normal">Cancel edit</button>
|
||||
)}
|
||||
</div>
|
||||
<div className="p-4 flex flex-col gap-4">
|
||||
|
||||
@ -465,7 +526,11 @@ export default function Baseline({ sources = [], sourceId, versions = [], versio
|
||||
<input value={segNote} onChange={e => setSegNote(e.target.value)} placeholder="optional" className="border border-gray-200 rounded px-2 py-1.5 text-sm" />
|
||||
</div>
|
||||
<button onClick={loadSegment} disabled={submitting || filters.length === 0} className="bg-blue-600 text-white text-xs px-5 py-2 rounded hover:bg-blue-700 disabled:opacity-50 shrink-0">
|
||||
{submitting ? 'Loading…' : `Load ${segType === 'reference' ? 'Reference' : 'Segment'}`}
|
||||
{submitting
|
||||
? (editingLogId ? 'Saving…' : 'Loading…')
|
||||
: (editingLogId
|
||||
? `Save ${segType === 'reference' ? 'Reference' : 'Segment'}`
|
||||
: `Load ${segType === 'reference' ? 'Reference' : 'Segment'}`)}
|
||||
</button>
|
||||
</div>
|
||||
|
||||
|
||||
Loading…
Reference in New Issue
Block a user