Compare commits

..

4 Commits

Author SHA1 Message Date
6d8b052eb6 Add date offset to baseline — project actuals into forecast period
The baseline operation now accepts a date_offset interval (e.g. "1 year",
"6 months") and applies it to every date when inserting rows, shifting
historical actuals into the target forecast period.

SQL: {date_col} + '{{date_offset}}'::interval)::date at insert time.
Route: defaults to '0 days' if omitted so existing calls are unaffected.
UI: year/month spinners with a live before→after month chip preview so
the projected landing period is visible before submitting.

Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
2026-04-01 12:48:28 -04:00
ddd16bc7a0 Show params column in log grid
Exposes the stored params (e.g. date_from/date_to for baseline/reference)
so the date range used in each operation is visible in the log.

Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
2026-04-01 12:28:03 -04:00
10441a4761 Show baseline/reference form in a modal with live month preview
Replaces the small inline form with a centred modal dialog. When both
dates are selected, a live chip list shows every month covered (up to
36 months) so it is immediately clear what periods will be loaded.

Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
2026-04-01 12:24:20 -04:00
cfee3e96b9 Return inserted rows from change operations for incremental grid updates
Instead of re-fetching all forecast data after scale/recode/clone/reference,
the routes now return the inserted rows directly. The frontend uses ag-Grid's
applyTransaction to add only the new rows, eliminating the full reload round-trip.

Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
2026-04-01 12:04:28 -04:00
6 changed files with 271 additions and 586 deletions

View File

@ -49,6 +49,9 @@ function generateSQL(source, colMeta) {
}
function buildBaseline() {
const baselineSelect = dataCols.map(c =>
c === dateCol ? `(${q(c)} + '{{date_offset}}'::interval)::date` : q(c)
).join(', ');
return `
WITH
ilog AS (
@ -61,7 +64,7 @@ ilog AS (
)
,ins AS (
INSERT INTO {{fc_table}} (${insertCols})
SELECT ${selectData}, 'baseline', (SELECT id FROM ilog), '{{pf_user}}', now()
SELECT ${baselineSelect}, 'baseline', (SELECT id FROM ilog), '{{pf_user}}', now()
FROM ${srcTable}
WHERE ${q(dateCol)} BETWEEN '{{date_from}}' AND '{{date_to}}'
RETURNING *
@ -84,7 +87,7 @@ ilog AS (
WHERE ${q(dateCol)} BETWEEN '{{date_from}}' AND '{{date_to}}'
RETURNING *
)
SELECT count(*) AS rows_affected FROM ins`.trim();
SELECT * FROM ins`.trim();
}
function buildScale() {
@ -124,7 +127,7 @@ ilog AS (
FROM base
RETURNING *
)
SELECT count(*) AS rows_affected FROM ins`.trim();
SELECT * FROM ins`.trim();
}
function buildRecode() {
@ -146,16 +149,16 @@ ilog AS (
SELECT ${dimsJoined}, ${q(dateCol)}, ${effectiveValue ? `-${q(effectiveValue)}` : '0'}, ${effectiveUnits ? `-${q(effectiveUnits)}` : '0'},
'recode', (SELECT id FROM ilog), '{{pf_user}}', now()
FROM src
RETURNING id
RETURNING *
)
,ins AS (
INSERT INTO {{fc_table}} (${insertCols})
SELECT {{set_clause}}, ${q(dateCol)}, ${effectiveValue ? q(effectiveValue) : '0'}, ${effectiveUnits ? q(effectiveUnits) : '0'},
'recode', (SELECT id FROM ilog), '{{pf_user}}', now()
FROM src
RETURNING id
RETURNING *
)
SELECT (SELECT count(*) FROM neg) + (SELECT count(*) FROM ins) AS rows_affected`.trim();
SELECT * FROM neg UNION ALL SELECT * FROM ins`.trim();
}
function buildClone() {
@ -179,7 +182,7 @@ ilog AS (
{{exclude_clause}}
RETURNING *
)
SELECT count(*) AS rows_affected FROM ins`.trim();
SELECT * FROM ins`.trim();
}
function buildUndo() {

View File

@ -1,538 +0,0 @@
# Pivot Forecast — Application Spec
## Overview
A web application for building named forecast scenarios against any PostgreSQL table. An admin configures a source table, generates a baseline, and opens it for users to make adjustments. Users interact with a pivot table to select slices of data and apply forecast operations. All changes are incremental (append-only), fully audited, and reversible.
---
## Tech Stack
- **Backend:** Node.js / Express
- **Database:** PostgreSQL — isolated `pf` schema, installs into any existing DB
- **Frontend:** Vanilla JS + AG Grid (pivot mode)
- **Pattern:** Follows fc_webapp (shell) + pivot_forecast (operations)
---
## Database Schema: `pf`
Everything lives in the `pf` schema. Install via sequential SQL scripts.
### `pf.source`
Registered source tables available for forecasting.
```sql
CREATE TABLE pf.source (
id serial PRIMARY KEY,
schema text NOT NULL,
tname text NOT NULL,
label text, -- friendly display name
status text DEFAULT 'active', -- active | archived
created_at timestamptz DEFAULT now(),
created_by text,
UNIQUE (schema, tname)
);
```
### `pf.col_meta`
Column configuration for each registered source table. Determines how the app treats each column.
```sql
CREATE TABLE pf.col_meta (
id serial PRIMARY KEY,
source_id integer REFERENCES pf.source(id),
cname text NOT NULL, -- column name in source table
label text, -- friendly display name
role text NOT NULL, -- 'dimension' | 'value' | 'units' | 'date' | 'ignore'
is_key boolean DEFAULT false, -- true = part of natural key (used in WHERE slice)
opos integer, -- ordinal position (for ordering)
UNIQUE (source_id, cname)
);
```
**Roles:**
- `dimension` — categorical field (customer, part, channel, rep, geography, etc.) — appears as pivot rows/cols, used in WHERE filters
- `value` — the money/revenue field to scale
- `units` — the quantity field to scale
- `date` — the date field used for baseline date range selection
- `ignore` — exclude from forecast table
### `pf.version`
Named forecast scenarios. One forecast table (`pf.fc_{tname}_{version_id}`) is created per version.
```sql
CREATE TABLE pf.version (
id serial PRIMARY KEY,
source_id integer REFERENCES pf.source(id),
name text NOT NULL,
description text,
status text DEFAULT 'open', -- open | closed
exclude_iters jsonb DEFAULT '["reference"]', -- iter values excluded from all operations
created_at timestamptz DEFAULT now(),
created_by text,
closed_at timestamptz,
closed_by text,
UNIQUE (source_id, name)
);
```
**`exclude_iters`:** jsonb array of `iter` values that are excluded from operation WHERE clauses. Defaults to `["reference"]`. Reference rows are still returned by `get_data` (visible in pivot) but are never touched by scale/recode/clone. Additional iters can be added to lock them from further adjustment.
**Forecast table naming:** `pf.fc_{tname}_{version_id}` — e.g., `pf.fc_sales_3`. One table per version, physically isolated. Contains both operational rows and reference rows.
Creating a version → `CREATE TABLE pf.fc_{tname}_{version_id} (...)`
Deleting a version → `DROP TABLE pf.fc_{tname}_{version_id}` + delete from `pf.version` + delete from `pf.log`
### `pf.log`
Audit log. Every write operation gets one entry here.
```sql
CREATE TABLE pf.log (
id bigserial PRIMARY KEY,
version_id integer REFERENCES pf.version(id),
pf_user text NOT NULL,
stamp timestamptz DEFAULT now(),
operation text NOT NULL, -- 'baseline' | 'reference' | 'scale' | 'recode' | 'clone'
slice jsonb, -- the WHERE conditions that defined the selection
params jsonb, -- operation parameters (increments, new values, scale factor, etc.)
note text -- user-provided comment
);
```
### `pf.fc_{tname}_{version_id}` (dynamic, one per version)
Created when a version is created. Mirrors source table dimension/value/units/date columns plus forecast metadata. Contains both operational rows (`iter = 'baseline' | 'scale' | 'recode' | 'clone'`) and reference rows (`iter = 'reference'`).
```sql
-- Example: source table "sales", version id 3 → pf.fc_sales_3
CREATE TABLE pf.fc_sales_3 (
id bigserial PRIMARY KEY,
-- mirrored from source (role = dimension | value | units | date only):
customer text,
channel text,
part text,
geography text,
order_date date,
units numeric,
value numeric,
-- forecast metadata:
iter text, -- 'baseline' | 'reference' | 'scale' | 'recode' | 'clone'
logid bigint REFERENCES pf.log(id),
pf_user text,
created_at timestamptz DEFAULT now()
);
```
Note: no `version_id` column on the forecast table — it's implied by the table itself.
### `pf.sql`
Generated SQL stored per source and operation. Built once when col_meta is finalized, fetched at request time.
```sql
CREATE TABLE pf.sql (
id serial PRIMARY KEY,
source_id integer REFERENCES pf.source(id),
operation text NOT NULL, -- 'baseline' | 'reference' | 'scale' | 'recode' | 'clone' | 'get_data' | 'undo'
sql text NOT NULL,
generated_at timestamptz DEFAULT now(),
UNIQUE (source_id, operation)
);
```
**Column names are baked in at generation time.** Runtime substitution tokens:
| Token | Resolved from |
|-------|--------------|
| `{{fc_table}}` | `pf.fc_{tname}_{version_id}` — derived at request time |
| `{{where_clause}}` | built from `slice` JSON by `build_where()` in JS |
| `{{exclude_clause}}` | built from `version.exclude_iters` — e.g. `AND iter NOT IN ('reference')` |
| `{{logid}}` | newly inserted `pf.log` id |
| `{{pf_user}}` | from request body |
| `{{date_from}}` / `{{date_to}}` | baseline/reference date range |
| `{{value_incr}}` / `{{units_incr}}` | scale operation increments |
| `{{pct}}` | scale mode: absolute or percentage |
| `{{set_clause}}` | recode/clone dimension overrides |
| `{{scale_factor}}` | clone multiplier |
**Request-time flow:**
1. Fetch SQL from `pf.sql` for `source_id` + `operation`
2. Fetch `version.exclude_iters`, build `{{exclude_clause}}`
3. Build `{{where_clause}}` from `slice` JSON via `build_where()`
4. Substitute all tokens
5. Execute — single round trip
**WHERE clause safety:** `build_where()` validates every key in the slice against col_meta (only `role = 'dimension'` columns are permitted). Values are sanitized (escaped single quotes). No parameterization — consistent with existing projects, debuggable in Postgres logs.
---
## Setup / Install Scripts
```
setup_sql/
01_schema.sql -- CREATE SCHEMA pf; create all metadata tables (source, col_meta, version, log, sql)
```
Source registration, col_meta configuration, SQL generation, version creation, and forecast table DDL all happen via API.
---
## API Routes
### DB Browser
| Method | Route | Description |
|--------|-------|-------------|
| GET | `/api/tables` | List all tables in the DB with row counts |
| GET | `/api/tables/:schema/:tname/preview` | Preview columns + sample rows |
### Source Management
| Method | Route | Description |
|--------|-------|-------------|
| GET | `/api/sources` | List registered sources |
| POST | `/api/sources` | Register a source table |
| GET | `/api/sources/:id/cols` | Get col_meta for a source |
| PUT | `/api/sources/:id/cols` | Save col_meta configuration |
| POST | `/api/sources/:id/generate-sql` | Generate/regenerate all operation SQL into `pf.sql` |
| GET | `/api/sources/:id/sql` | View generated SQL for a source (inspection/debug) |
| DELETE | `/api/sources/:id` | Deregister a source (does not affect existing forecast tables) |
### Forecast Versions
| Method | Route | Description |
|--------|-------|-------------|
| GET | `/api/sources/:id/versions` | List versions for a source |
| POST | `/api/sources/:id/versions` | Create a new version (CREATE TABLE for forecast table) |
| PUT | `/api/versions/:id` | Update version (name, description, exclude_iters) |
| POST | `/api/versions/:id/close` | Close a version (blocks further edits) |
| POST | `/api/versions/:id/reopen` | Reopen a closed version |
| DELETE | `/api/versions/:id` | Delete a version (DROP TABLE + delete log entries) |
### Baseline & Reference Data
| Method | Route | Description |
|--------|-------|-------------|
| POST | `/api/versions/:id/baseline` | Load baseline from source table for a date range |
| POST | `/api/versions/:id/reference` | Load reference rows from source table for a date range |
**Baseline request body:**
```json
{
"date_from": "2024-01-01",
"date_to": "2024-12-31",
"pf_user": "admin",
"note": "restated actuals",
"replay": false
}
```
`replay` controls behavior when incremental rows exist:
- `replay: false` (default) — delete existing `iter = 'baseline'` rows only, re-insert new baseline, leave all incremental rows (`scale`, `recode`, `clone`) untouched
- `replay: true` — delete all rows, re-insert new baseline, then re-execute each log entry in chronological order against the new baseline, reconstructing all adjustments
The UI presents this as a choice when the admin re-baselines and incremental rows exist:
> "This version has N adjustments. Rebuild baseline only, or replay all adjustments against the new baseline?"
**v1 note:** `replay: true` returns `501 Not Implemented` until the replay engine is built. The flag is designed into the API now so the request shape doesn't change later.
**Reference request body:** same shape without `replay`. Reference loads are additive — multiple reference periods can be loaded independently under separate log entries. Each is undoable via its logid.
### Forecast Data
| Method | Route | Description |
|--------|-------|-------------|
| GET | `/api/versions/:id/data` | Return all rows for this version (all iters including reference) |
Returns flat array. AG Grid pivot runs client-side on this data.
### Forecast Operations
All operations share a common request envelope:
```json
{
"pf_user": "paul.trowbridge",
"note": "optional comment",
"slice": {
"channel": "WHS",
"geography": "WEST"
}
}
```
`slice` keys must be `role = 'dimension'` columns per col_meta. Stored in `pf.log` as the implicit link to affected rows.
#### Scale
`POST /api/versions/:id/scale`
```json
{
"pf_user": "paul.trowbridge",
"note": "10% volume lift Q3 West",
"slice": { "channel": "WHS", "geography": "WEST" },
"value_incr": null,
"units_incr": 5000,
"pct": false
}
```
- `value_incr` / `units_incr` — absolute amounts to add (positive or negative). Either can be null.
- `pct: true` — treat as percentage of current slice total instead of absolute
- Excludes `exclude_iters` rows from the source selection
- Distributes increment proportionally across rows in the slice
- Inserts rows tagged `iter = 'scale'`
#### Recode
`POST /api/versions/:id/recode`
```json
{
"pf_user": "paul.trowbridge",
"note": "Part discontinued, replaced by new SKU",
"slice": { "part": "OLD-SKU-001" },
"set": { "part": "NEW-SKU-002" }
}
```
- `set` — one or more dimension fields to replace (can swap multiple at once)
- Inserts negative rows to zero out the original slice
- Inserts positive rows with replaced dimension values
- Both sets of rows share the same `logid` — undone together
- Inserts rows tagged `iter = 'recode'`
#### Clone
`POST /api/versions/:id/clone`
```json
{
"pf_user": "paul.trowbridge",
"note": "New customer win, similar profile to existing",
"slice": { "customer": "EXISTING CO", "channel": "DIR" },
"set": { "customer": "NEW CO" },
"scale": 0.75
}
```
- `set` — dimension values to override on cloned rows
- `scale` — optional multiplier on value/units (default 1.0)
- Does not offset original slice
- Inserts rows tagged `iter = 'clone'`
### Audit & Undo
| Method | Route | Description |
|--------|-------|-------------|
| GET | `/api/versions/:id/log` | List all log entries for a version, newest first |
| DELETE | `/api/log/:logid` | Undo: delete all forecast rows with this logid, then delete log entry |
---
## Frontend (Web UI)
### Navigation (sidebar)
1. **Sources** — browse DB tables, register sources, configure col_meta, generate SQL
2. **Versions** — list forecast versions per source, create/close/reopen/delete
3. **Forecast** — main working view (pivot + operation panel)
4. **Log** — change history with undo
### Sources View
- Left: DB table browser (like fc_webapp) — all tables with row counts, preview on click
- Right: Registered sources list — click to open col_meta editor
- Col_meta editor: AG Grid editable table — set role per column, toggle is_key, set label
- "Generate SQL" button — triggers generate-sql route, shows confirmation
- Must generate SQL before versions can be created against this source
### Versions View
- List of versions for selected source — name, status (open/closed), created date, row count
- Create version form — name, description, exclude_iters (defaults to `["reference"]`)
- Per-version actions: open forecast, load baseline, load reference, close, reopen, delete
### Forecast View
**Layout:**
```
┌──────────────────────────────────────────────────────────┐
│ [Source: sales] [Version: FY2024 v1 — open] [Refresh] │
├────────────────────────┬─────────────────────────────────┤
│ │ │
│ Pivot Grid │ Operation Panel │
│ (AG Grid pivot mode) │ (active when slice selected) │
│ │ │
│ │ Slice: │
│ │ channel = WHS │
│ │ geography = WEST │
│ │ │
│ │ [ Scale ] [ Recode ] [ Clone ] │
│ │ │
│ │ ... operation form ... │
│ │ │
│ │ [ Submit ] │
│ │ │
└────────────────────────┴─────────────────────────────────┘
```
**Interaction flow:**
1. Select cells in pivot — selected dimension values populate Operation Panel as slice
2. Pick operation tab, fill in parameters
3. Submit → POST to API → response shows rows affected
4. Grid refreshes (re-fetch `get_data`)
**Reference rows** shown in pivot (for context) but visually distinguished (e.g., muted color). Operations never affect them.
### Log View
AG Grid list of log entries — user, timestamp, operation, slice, note, rows affected.
"Undo" button per row → `DELETE /api/log/:logid` → grid and pivot refresh.
---
## Forecast SQL Patterns
Column names baked in at generation time. Tokens substituted at request time.
### Baseline / Reference Load
```sql
WITH ilog AS (
INSERT INTO pf.log (version_id, pf_user, operation, slice, params, note)
VALUES ({{version_id}}, '{{pf_user}}', '{{operation}}', NULL, '{{params}}'::jsonb, '{{note}}')
RETURNING id
)
INSERT INTO {{fc_table}} (
{dimension_cols}, {value_col}, {units_col}, {date_col},
iter, logid, pf_user, created_at
)
SELECT
{dimension_cols}, {value_col}, {units_col}, {date_col},
'{{operation}}', (SELECT id FROM ilog), '{{pf_user}}', now()
FROM
{schema}.{tname}
WHERE
{date_col} BETWEEN '{{date_from}}' AND '{{date_to}}'
```
Baseline route also deletes existing `iter = 'baseline'` rows before inserting.
### Scale
```sql
WITH ilog AS (
INSERT INTO pf.log (version_id, pf_user, operation, slice, params, note)
VALUES ({{version_id}}, '{{pf_user}}', 'scale', '{{slice}}'::jsonb, '{{params}}'::jsonb, '{{note}}')
RETURNING id
)
,base AS (
SELECT
{dimension_cols}, {date_col},
{value_col}, {units_col},
sum({value_col}) OVER () AS total_value,
sum({units_col}) OVER () AS total_units
FROM {{fc_table}}
WHERE {{where_clause}}
{{exclude_clause}}
)
INSERT INTO {{fc_table}} (
{dimension_cols}, {date_col}, {value_col}, {units_col},
iter, logid, pf_user, created_at
)
SELECT
{dimension_cols}, {date_col},
round(({value_col} / NULLIF(total_value, 0)) * {{value_incr}}, 2),
round(({units_col} / NULLIF(total_units, 0)) * {{units_incr}}, 5),
'scale', (SELECT id FROM ilog), '{{pf_user}}', now()
FROM base
```
`{{value_incr}}` / `{{units_incr}}` are pre-computed in JS when `pct: true` (multiply slice total by pct).
### Recode
```sql
WITH ilog AS (
INSERT INTO pf.log (version_id, pf_user, operation, slice, params, note)
VALUES ({{version_id}}, '{{pf_user}}', 'recode', '{{slice}}'::jsonb, '{{params}}'::jsonb, '{{note}}')
RETURNING id
)
,src AS (
SELECT {dimension_cols}, {date_col}, {value_col}, {units_col}
FROM {{fc_table}}
WHERE {{where_clause}}
{{exclude_clause}}
)
,negatives AS (
INSERT INTO {{fc_table}} ({dimension_cols}, {date_col}, {value_col}, {units_col}, iter, logid, pf_user, created_at)
SELECT {dimension_cols}, {date_col}, -{value_col}, -{units_col}, 'recode', (SELECT id FROM ilog), '{{pf_user}}', now()
FROM src
)
INSERT INTO {{fc_table}} ({dimension_cols}, {date_col}, {value_col}, {units_col}, iter, logid, pf_user, created_at)
SELECT {{set_clause}}, {date_col}, {value_col}, {units_col}, 'recode', (SELECT id FROM ilog), '{{pf_user}}', now()
FROM src
```
`{{set_clause}}` replaces the listed dimension columns with new values, passes others through unchanged.
### Clone
```sql
WITH ilog AS (
INSERT INTO pf.log (version_id, pf_user, operation, slice, params, note)
VALUES ({{version_id}}, '{{pf_user}}', 'clone', '{{slice}}'::jsonb, '{{params}}'::jsonb, '{{note}}')
RETURNING id
)
INSERT INTO {{fc_table}} ({dimension_cols}, {date_col}, {value_col}, {units_col}, iter, logid, pf_user, created_at)
SELECT
{{set_clause}}, {date_col},
round({value_col} * {{scale_factor}}, 2),
round({units_col} * {{scale_factor}}, 5),
'clone', (SELECT id FROM ilog), '{{pf_user}}', now()
FROM {{fc_table}}
WHERE {{where_clause}}
{{exclude_clause}}
```
### Undo
```sql
DELETE FROM {{fc_table}} WHERE logid = {{logid}};
DELETE FROM pf.log WHERE id = {{logid}};
```
---
## Admin Setup Flow (end-to-end)
1. Open **Sources** view → browse DB tables → register source table
2. Open col_meta editor → assign roles to columns, mark is_key dimensions, set labels
3. Click **Generate SQL** → app writes operation SQL to `pf.sql`
4. Open **Versions** view → create a named version (sets `exclude_iters`, creates forecast table)
5. Load **Baseline** → pick date range → inserts `iter = 'baseline'` rows
6. Optionally load **Reference** → pick prior year date range → inserts `iter = 'reference'` rows
7. Open **Forecast** view → share with users
## User Forecast Flow (end-to-end)
1. Open **Forecast** view → select version
2. Pivot loads — explore data, identify slice to adjust
3. Select cells → Operation Panel populates with slice
4. Choose operation → fill in parameters → Submit
5. Grid refreshes — adjustment visible immediately
6. Repeat as needed
7. Admin closes version when forecasting is complete
---
## Open Questions / Future Scope
- **Baseline replay** — re-execute change log against a restated baseline (`replay: true`); v1 returns 501
- **Timing shifts** — redistribute value/units across date buckets (deferred)
- **Approval workflow** — user submits, admin approves before changes are visible to others (deferred)
- **Territory filtering** — restrict what a user can see/edit by dimension value (deferred)
- **Export** — download forecast as CSV or push results to a reporting table
- **Version comparison** — side-by-side view of two versions (facilitated by isolated tables via UNION)
- **Multi-DB sources** — currently assumes same DB; cross-DB would need connection config per source

View File

@ -375,10 +375,81 @@ function showLoadForm(op) {
state.loadDataOp = op;
document.getElementById('load-data-title').textContent =
op === 'baseline' ? 'Load Baseline' : 'Load Reference';
document.getElementById('load-data-form').classList.remove('hidden');
document.getElementById('load-date-from').value = '';
document.getElementById('load-date-to').value = '';
document.getElementById('load-offset-years').value = '0';
document.getElementById('load-offset-months').value = '0';
document.getElementById('load-note').value = '';
document.getElementById('load-date-preview').classList.add('hidden');
const showOffset = op === 'baseline';
document.getElementById('load-offset-fields').classList.toggle('hidden', !showOffset);
document.getElementById('load-data-modal').classList.remove('hidden');
document.getElementById('load-date-from').focus();
}
function hideLoadModal() {
document.getElementById('load-data-modal').classList.add('hidden');
}
function buildMonthList(fromVal, toVal) {
const from = new Date(fromVal + 'T00:00:00');
const to = new Date(toVal + 'T00:00:00');
if (isNaN(from) || isNaN(to) || from > to) return null;
const months = [];
const cur = new Date(from.getFullYear(), from.getMonth(), 1);
const end = new Date(to.getFullYear(), to.getMonth(), 1);
while (cur <= end) { months.push(new Date(cur)); cur.setMonth(cur.getMonth() + 1); }
return months;
}
function renderChips(months, fmt) {
if (months.length <= 36) {
return months.map(m => `<span class="date-chip">${fmt.format(m)}</span>`).join('');
}
return `<span class="date-chip-summary">${months.length} months — ${fmt.format(months[0])}${fmt.format(months[months.length - 1])}</span>`;
}
function updateDatePreview() {
const fromVal = document.getElementById('load-date-from').value;
const toVal = document.getElementById('load-date-to').value;
const preview = document.getElementById('load-date-preview');
const simple = document.getElementById('load-preview-simple');
const offset = document.getElementById('load-preview-offset');
if (!fromVal || !toVal) { preview.classList.add('hidden'); return; }
const months = buildMonthList(fromVal, toVal);
if (!months) { preview.classList.add('hidden'); return; }
const fmt = new Intl.DateTimeFormat('en-US', { month: 'short', year: 'numeric' });
if (state.loadDataOp === 'baseline') {
const years = parseInt(document.getElementById('load-offset-years').value) || 0;
const mths = parseInt(document.getElementById('load-offset-months').value) || 0;
const projected = months.map(d => {
const p = new Date(d);
p.setFullYear(p.getFullYear() + years);
p.setMonth(p.getMonth() + mths);
return p;
});
document.getElementById('load-chips-source').innerHTML = renderChips(months, fmt);
document.getElementById('load-chips-projected').innerHTML = renderChips(projected, fmt);
simple.classList.add('hidden');
offset.classList.remove('hidden');
} else {
simple.querySelector('.load-preview-label').textContent =
`${months.length} month${months.length !== 1 ? 's' : ''} covered`;
document.getElementById('load-date-chips').innerHTML = renderChips(months, fmt);
offset.classList.add('hidden');
simple.classList.remove('hidden');
}
preview.classList.remove('hidden');
}
async function submitLoadData() {
const date_from = document.getElementById('load-date-from').value;
const date_to = document.getElementById('load-date-to').value;
@ -391,10 +462,19 @@ async function submitLoadData() {
note: document.getElementById('load-note').value.trim() || undefined
};
if (state.loadDataOp === 'baseline') {
const years = parseInt(document.getElementById('load-offset-years').value) || 0;
const months = parseInt(document.getElementById('load-offset-months').value) || 0;
const parts = [];
if (years) parts.push(`${years} year${years !== 1 ? 's' : ''}`);
if (months) parts.push(`${months} month${months !== 1 ? 's' : ''}`);
body.date_offset = parts.length ? parts.join(' ') : '0 days';
}
try {
showStatus(`Loading ${state.loadDataOp}...`, 'info');
const result = await api('POST', `/versions/${state.selectedVersionId}/${state.loadDataOp}`, body);
document.getElementById('load-data-form').classList.add('hidden');
hideLoadModal();
showStatus(`${state.loadDataOp} loaded — ${result.rows_affected} rows`, 'success');
} catch (err) {
showStatus(err.message, 'error');
@ -451,6 +531,17 @@ function openForecast() {
/* ============================================================
FORECAST VIEW data loading
============================================================ */
function parseNumericRows(rows) {
const numericCols = state.colMeta
.filter(c => c.role === 'value' || c.role === 'units')
.map(c => c.cname);
return rows.map(row => {
const r = { ...row };
numericCols.forEach(col => { if (r[col] != null) r[col] = parseFloat(r[col]); });
return r;
});
}
async function loadForecastData() {
if (!state.version) return;
document.getElementById('forecast-label').textContent =
@ -462,14 +553,7 @@ async function loadForecastData() {
}
showStatus('Loading forecast data...', 'info');
const rawData = await api('GET', `/versions/${state.version.id}/data`);
const numericCols = state.colMeta
.filter(c => c.role === 'value' || c.role === 'units')
.map(c => c.cname);
const data = rawData.map(row => {
const r = { ...row };
numericCols.forEach(col => { if (r[col] != null) r[col] = parseFloat(r[col]); });
return r;
});
const data = parseNumericRows(rawData);
initPivotGrid(data);
showStatus(`Loaded ${data.length.toLocaleString()} rows`, 'success');
} catch (err) {
@ -695,7 +779,7 @@ async function submitScale() {
document.getElementById('scale-value-incr').value = '';
document.getElementById('scale-units-incr').value = '';
document.getElementById('scale-note').value = '';
await loadForecastData();
state.grids.pivot.applyTransaction({ add: parseNumericRows(result.rows) });
} catch (err) {
showStatus(err.message, 'error');
}
@ -721,7 +805,7 @@ async function submitRecode() {
});
showStatus(`Recode applied — ${result.rows_affected} rows inserted`, 'success');
document.querySelectorAll('#recode-fields input[data-col], #recode-fields select[data-col]').forEach(i => { i.value = ''; });
await loadForecastData();
state.grids.pivot.applyTransaction({ add: parseNumericRows(result.rows) });
} catch (err) {
showStatus(err.message, 'error');
}
@ -750,7 +834,7 @@ async function submitClone() {
showStatus(`Clone applied — ${result.rows_affected} rows inserted`, 'success');
document.querySelectorAll('#clone-fields input[data-col], #clone-fields select[data-col]').forEach(i => { i.value = ''; });
document.getElementById('clone-scale').value = '1';
await loadForecastData();
state.grids.pivot.applyTransaction({ add: parseNumericRows(result.rows) });
} catch (err) {
showStatus(err.message, 'error');
}
@ -780,6 +864,8 @@ function renderLogGrid(logs) {
{ field: 'operation', headerName: 'Operation', width: 90 },
{ field: 'slice', headerName: 'Slice', flex: 1,
valueFormatter: p => p.value ? JSON.stringify(p.value) : '' },
{ field: 'params', headerName: 'Params', flex: 1,
valueFormatter: p => p.value ? JSON.stringify(p.value) : '' },
{ field: 'note', headerName: 'Note', flex: 1 },
{
headerName: '',
@ -875,9 +961,12 @@ document.addEventListener('DOMContentLoaded', () => {
document.getElementById('vbtn-toggle').addEventListener('click', toggleVersionStatus);
document.getElementById('vbtn-delete').addEventListener('click', deleteVersion);
document.getElementById('btn-load-submit').addEventListener('click', submitLoadData);
document.getElementById('btn-load-cancel').addEventListener('click', () => {
document.getElementById('load-data-form').classList.add('hidden');
});
document.getElementById('btn-load-cancel').addEventListener('click', hideLoadModal);
document.getElementById('btn-load-close').addEventListener('click', hideLoadModal);
document.getElementById('load-date-from').addEventListener('change', updateDatePreview);
document.getElementById('load-date-to').addEventListener('change', updateDatePreview);
document.getElementById('load-offset-years').addEventListener('input', updateDatePreview);
document.getElementById('load-offset-months').addEventListener('input', updateDatePreview);
// forecast view buttons
document.getElementById('btn-forecast-refresh').addEventListener('click', loadForecastData);

View File

@ -94,18 +94,6 @@
<button class="btn" id="vbtn-toggle">Close Version</button>
<button class="btn btn-danger" id="vbtn-delete">Delete</button>
</div>
<div id="load-data-form" class="inline-form hidden">
<h3 id="load-data-title">Load Baseline</h3>
<div class="form-row">
<label>Date From<input type="date" id="load-date-from" /></label>
<label>Date To<input type="date" id="load-date-to" /></label>
<label>Note<input type="text" id="load-note" placeholder="optional" /></label>
</div>
<div class="form-actions">
<button id="btn-load-submit" class="btn btn-primary">Load</button>
<button id="btn-load-cancel" class="btn">Cancel</button>
</div>
</div>
</div>
<!-- ===== FORECAST VIEW ===== -->
@ -170,6 +158,54 @@
</main>
</div>
<!-- Load baseline / reference modal -->
<div id="load-data-modal" class="modal-overlay hidden">
<div class="modal load-data-modal">
<div class="modal-header">
<span id="load-data-title">Load Baseline</span>
<button id="btn-load-close" class="btn-icon">×</button>
</div>
<div id="load-data-body">
<div class="load-form-fields">
<label>Date From<input type="date" id="load-date-from" /></label>
<label>Date To<input type="date" id="load-date-to" /></label>
<div id="load-offset-fields">
<div class="load-offset-row">
<label>Offset Years<input type="number" id="load-offset-years" min="0" value="0" /></label>
<label>Offset Months<input type="number" id="load-offset-months" min="0" value="0" /></label>
</div>
</div>
<label>Note<input type="text" id="load-note" placeholder="optional" /></label>
</div>
<div id="load-date-preview" class="load-date-preview hidden">
<!-- reference: single chip list -->
<div id="load-preview-simple">
<div class="load-preview-label"></div>
<div id="load-date-chips" class="date-chips"></div>
</div>
<!-- baseline: before → after -->
<div id="load-preview-offset" class="hidden">
<div class="load-preview-columns">
<div class="load-preview-col">
<div class="load-preview-label">Source</div>
<div id="load-chips-source" class="date-chips"></div>
</div>
<div class="load-preview-arrow"></div>
<div class="load-preview-col">
<div class="load-preview-label">Projected</div>
<div id="load-chips-projected" class="date-chips"></div>
</div>
</div>
</div>
</div>
</div>
<div class="modal-footer">
<button id="btn-load-submit" class="btn btn-primary">Load</button>
<button id="btn-load-cancel" class="btn">Cancel</button>
</div>
</div>
</div>
<!-- Table preview modal -->
<div id="modal-overlay" class="modal-overlay hidden">
<div class="modal">

View File

@ -312,6 +312,99 @@ body {
#modal-body { padding: 16px 18px; overflow-y: auto; flex: 1; font-size: 12px; }
.modal-footer { padding: 10px 18px; border-top: 1px solid #eee; display: flex; justify-content: flex-end; gap: 8px; }
/* ============================================================
LOAD BASELINE / REFERENCE MODAL
============================================================ */
.load-data-modal { width: 480px; max-height: 80vh; }
#load-data-body {
padding: 20px 24px;
display: flex;
flex-direction: column;
gap: 20px;
overflow-y: auto;
}
.load-form-fields {
display: flex;
flex-direction: column;
gap: 12px;
}
.load-form-fields label {
font-size: 11px;
color: #555;
display: flex;
flex-direction: column;
gap: 4px;
}
.load-form-fields input[type=date],
.load-form-fields input[type=text] {
border: 1px solid #dce1e7;
padding: 7px 10px;
border-radius: 3px;
font-size: 13px;
color: #333;
width: 100%;
}
.load-form-fields input[type=date]:focus,
.load-form-fields input[type=text]:focus {
outline: none;
border-color: #2980b9;
box-shadow: 0 0 0 2px rgba(41,128,185,.15);
}
.load-offset-row { display: flex; gap: 12px; }
.load-offset-row label { flex: 1; }
.load-date-preview { display: flex; flex-direction: column; gap: 8px; }
.load-date-preview.hidden { display: none; }
.load-preview-columns {
display: flex;
gap: 10px;
align-items: flex-start;
}
.load-preview-col { flex: 1; display: flex; flex-direction: column; gap: 6px; }
.load-preview-arrow {
font-size: 20px;
color: #aaa;
padding-top: 18px;
flex-shrink: 0;
}
.load-preview-label {
font-size: 11px;
font-weight: 600;
color: #7f8c8d;
text-transform: uppercase;
letter-spacing: 0.04em;
}
.date-chips {
display: flex;
flex-wrap: wrap;
gap: 5px;
}
.date-chip {
background: #eaf4fb;
color: #1a6fa8;
border: 1px solid #c5dff0;
padding: 3px 9px;
border-radius: 12px;
font-size: 11px;
white-space: nowrap;
}
.date-chip-summary {
font-size: 12px;
color: #555;
font-style: italic;
}
.preview-section h4 { font-size: 12px; margin-bottom: 6px; color: #555; }
.preview-section + .preview-section { margin-top: 16px; }
.preview-table { border-collapse: collapse; width: 100%; font-size: 11px; }

View File

@ -77,13 +77,14 @@ module.exports = function(pool) {
// load baseline rows from source table for a date range
// deletes existing iter='baseline' rows before inserting (handled inside stored SQL)
router.post('/versions/:id/baseline', async (req, res) => {
const { date_from, date_to, pf_user, note, replay } = req.body;
const { date_from, date_to, date_offset, pf_user, note, replay } = req.body;
if (!date_from || !date_to) {
return res.status(400).json({ error: 'date_from and date_to are required' });
}
if (replay) {
return res.status(501).json({ error: 'replay is not yet implemented' });
}
const dateOffset = date_offset || '0 days';
try {
const ctx = await getContext(parseInt(req.params.id), 'baseline');
if (!guardOpen(ctx.version, res)) return;
@ -93,13 +94,14 @@ module.exports = function(pool) {
version_id: ctx.version.id,
pf_user: esc(pf_user || ''),
note: esc(note || ''),
params: esc(JSON.stringify({ date_from, date_to })),
params: esc(JSON.stringify({ date_from, date_to, date_offset: dateOffset })),
date_from: esc(date_from),
date_to: esc(date_to)
date_to: esc(date_to),
date_offset: esc(dateOffset)
});
const result = await runSQL(sql);
res.json(result.rows[0]);
res.json({ rows_affected: result.rows.length });
} catch (err) {
console.error(err);
res.status(err.status || 500).json({ error: err.message });
@ -127,7 +129,7 @@ module.exports = function(pool) {
});
const result = await runSQL(sql);
res.json(result.rows[0]);
res.json({ rows: result.rows, rows_affected: result.rows.length });
} catch (err) {
console.error(err);
res.status(err.status || 500).json({ error: err.message });
@ -183,7 +185,7 @@ module.exports = function(pool) {
});
const result = await runSQL(sql);
res.json(result.rows[0]);
res.json({ rows: result.rows, rows_affected: result.rows.length });
} catch (err) {
console.error(err);
res.status(err.status || 500).json({ error: err.message });
@ -218,7 +220,7 @@ module.exports = function(pool) {
});
const result = await runSQL(sql);
res.json(result.rows[0]);
res.json({ rows: result.rows, rows_affected: result.rows.length });
} catch (err) {
console.error(err);
res.status(err.status || 500).json({ error: err.message });
@ -255,7 +257,7 @@ module.exports = function(pool) {
});
const result = await runSQL(sql);
res.json(result.rows[0]);
res.json({ rows: result.rows, rows_affected: result.rows.length });
} catch (err) {
console.error(err);
res.status(err.status || 500).json({ error: err.message });