Compare commits

..

5 Commits

5 changed files with 211 additions and 97 deletions

View File

@ -55,7 +55,7 @@ server.get('/baseline', bodyParser.json(), function(req, res) {
var path = './routes/baseline/baseline.sql'; var path = './routes/baseline/baseline.sql';
var args = []; var args = [];
var app_req = JSON.stringify(req.body);
var app_baseline_from_date = req.body.app_baseline_from_date; var app_baseline_from_date = req.body.app_baseline_from_date;
var app_baseline_to_date = req.body.app_baseline_to_date; var app_baseline_to_date = req.body.app_baseline_to_date;
var app_first_forecast_date = req.body.app_first_forecast_date; var app_first_forecast_date = req.body.app_first_forecast_date;
@ -71,6 +71,7 @@ server.get('/baseline', bodyParser.json(), function(req, res) {
console.log(req.body); console.log(req.body);
//parse the where clause into the main sql statement //parse the where clause into the main sql statement
//sql = sql.replace(new RegExp("where_clause", 'g'), w) //sql = sql.replace(new RegExp("where_clause", 'g'), w)
sql = sql.replace(new RegExp("app_req", 'g'), app_req);
sql = sql.replace(new RegExp("app_baseline_from_date", 'g'), app_baseline_from_date); sql = sql.replace(new RegExp("app_baseline_from_date", 'g'), app_baseline_from_date);
sql = sql.replace(new RegExp("app_baseline_to_date", 'g'), app_baseline_to_date); sql = sql.replace(new RegExp("app_baseline_to_date", 'g'), app_baseline_to_date);
sql = sql.replace(new RegExp("app_first_forecast_date", 'g'), app_first_forecast_date); sql = sql.replace(new RegExp("app_first_forecast_date", 'g'), app_first_forecast_date);

View File

@ -26,11 +26,15 @@ setup
| | | | | | | | | iter | | | | | | | | | | | iter | |
| | | | | | | | | logid | | | | | | | | | | | logid | |
* func: table name of associated data * func:
* fkey: primary key of assoicated dat * a foreign key and table is setup (except date types)
* baseline function does a group-by to compact data for any func/appcol and value types are aggregated
* fkey:
* primary key of assoicated data and foreign key basis
* for date types no foreign key is setup, but dynamic table joins use the fkey field to join on
* fcol: associated field from the master data table if it is different (oseas would refer to ssyr in fc.perd) * fcol: associated field from the master data table if it is different (oseas would refer to ssyr in fc.perd)
* pretty: display column name (user friendly) * pretty: display column name (user friendly)
* appcol: parameters that will have to be supplied but the application * appcol: flag columns that will be needed by the application
* order_date * order_date
* ship_date * ship_date
* customer * customer
@ -44,6 +48,11 @@ setup
* logid (added if missing) * logid (added if missing)
issues issues
- [ ] baseline request json is not depostiing correctly into the sql
- [ ] what if you create a func table but some of the values are null? (orders without quotes)
- [x] what if you want certain things like invoice numbers included in the baseline, is a func table necesary? no, just leave fkey null and no table will be setup
- [ ] baseline needs to populate all columns in sequential order like the scale route
- [ ] what if there is more than one table in fc.target_meta
- [x] validate the ability to join to associated date tables, for example order season that needs incremented - [x] validate the ability to join to associated date tables, for example order season that needs incremented
- [ ] there is not currently any initial grouping to limit excess data from all the document# scenarios - [ ] there is not currently any initial grouping to limit excess data from all the document# scenarios
- [ ] how to handle a target value adjustment, which currency is it in? - [ ] how to handle a target value adjustment, which currency is it in?

View File

@ -1,20 +1,30 @@
DELETE FROM tpsv.dcard WHERE app_version = 'app_forecast_name'; DELETE FROM tpsv.dcard WHERE app_version = 'app_forecast_name';
WITH WITH
baseline AS ( req AS (SELECT $$app_req$$::jsonb j)
,ilog AS (
INSERT INTO
fc.log (doc)
SELECT
req.j
FROM
req
RETURNING *
)
,baseline AS (
SELECT SELECT
o.id null::integer AS id
,o.logid ,null::integer AS logid
,o."Trans. Date" ,o."Trans. Date"
,o."Post Date" ,o."Post Date"
,o."Description" ,o."Description"
,o."Amount" ,sum(o."Amount") AS "Amount"
,o."Category" ,o."Category"
,o."Party" ,o."Party"
,o."Reason" ,o."Reason"
,o.app_units ,sum(o.app_units) AS app_units
,'app_forecast_name' "version" ,'app_forecast_name' AS app_version
,'actuals' iter ,'baseline' AS app_iter
,null::bigint app_logid ,(SELECT id FROM ilog) AS app_logid
FROM FROM
tpsv.dcard o tpsv.dcard o
WHERE WHERE
@ -27,52 +37,72 @@ WHERE
OR ("Post Date" BETWEEN 'app_baseline_from_date'::date AND 'app_baseline_to_date'::date) OR ("Post Date" BETWEEN 'app_baseline_from_date'::date AND 'app_baseline_to_date'::date)
) )
--be sure to pre-exclude unwanted items, like canceled orders, non-gross sales, and short-ships --be sure to pre-exclude unwanted items, like canceled orders, non-gross sales, and short-ships
UNION ALL GROUP BY
SELECT o."Trans. Date"
o.id ,o."Post Date"
,o.logid
,o."Trans. Date" + interval '1 year' AS "Trans. Date"
,o."Post Date" + interval '1 year' AS "Post Date"
,o."Description" ,o."Description"
,o."Amount"
,o."Category" ,o."Category"
,o."Party" ,o."Party"
,o."Reason" ,o."Reason"
,o.app_units UNION ALL
,'app_forecast_name' "version" SELECT
,'plug' iter null::integer AS id
,null::bigint app_logid ,null::integer AS logid
,o."Trans. Date" + interval '1 year' AS "Trans. Date"
,o."Post Date" + interval '1 year' AS "Post Date"
,o."Description"
,sum(o."Amount") AS "Amount"
,o."Category"
,o."Party"
,o."Reason"
,sum(o.app_units) AS app_units
,'app_forecast_name' AS app_version
,'baseline' AS app_iter
,(SELECT id FROM ilog) AS app_logid
FROM FROM
tpsv.dcard o tpsv.dcard o
LEFT OUTER JOIN fc.perd pdate ON LEFT OUTER JOIN fc.perd tdate ON
(o."Post Date" + interval '1 year' )::date <@ pdate.drange
LEFT OUTER JOIN fc.perd tdate ON
(o."Trans. Date" + interval '1 year' )::date <@ tdate.drange (o."Trans. Date" + interval '1 year' )::date <@ tdate.drange
LEFT OUTER JOIN fc.perd pdate ON
(o."Post Date" + interval '1 year' )::date <@ pdate.drange
WHERE WHERE
"Trans. Date" BETWEEN 'app_plug_fromdate'::date AND 'app_plug_todate'::date "Trans. Date" BETWEEN 'app_plug_fromdate'::date AND 'app_plug_todate'::date
--be sure to pre-exclude unwanted items, like canceled orders, non-gross sales, and short-ships --be sure to pre-exclude unwanted items, like canceled orders, non-gross sales, and short-ships
) GROUP BY
o."Trans. Date"
,o."Post Date"
,o."Description"
,o."Category"
,o."Party"
,o."Reason")
,incr AS ( ,incr AS (
SELECT SELECT
o.id null::integer AS id
,o.logid ,null::integer AS logid
,o."Trans. Date" + interval '1 year' AS "Trans. Date" ,o."Trans. Date" + interval '1 year' AS "Trans. Date"
,o."Post Date" + interval '1 year' AS "Post Date" ,o."Post Date" + interval '1 year' AS "Post Date"
,o."Description" ,o."Description"
,o."Amount" ,sum(o."Amount") AS "Amount"
,o."Category" ,o."Category"
,o."Party" ,o."Party"
,o."Reason" ,o."Reason"
,o.app_units ,sum(o.app_units) AS app_units
,'app_forecast_name' "version" ,'app_forecast_name' AS app_version
,'baseline' iter ,'baseline' AS app_iter
,null::bigint app_logid ,(SELECT id FROM ilog) AS app_logid
FROM FROM
baseline o baseline o
LEFT OUTER JOIN fc.perd pdate ON
(o."Post Date" + interval '1 year' )::date <@ pdate.drange
LEFT OUTER JOIN fc.perd tdate ON LEFT OUTER JOIN fc.perd tdate ON
(o."Trans. Date" + interval '1 year' )::date <@ tdate.drange (o."Trans. Date" + interval '1 year' )::date <@ tdate.drange
LEFT OUTER JOIN fc.perd pdate ON
(o."Post Date" + interval '1 year' )::date <@ pdate.drange
GROUP BY
o."Trans. Date"
,o."Post Date"
,o."Description"
,o."Category"
,o."Party"
,o."Reason"
) )
,ins AS ( ,ins AS (
INSERT INTO INSERT INTO

View File

@ -1,20 +1,26 @@
DO DO
$func$ $func$
DECLARE DECLARE
_clist text; _clist text;
_clist_inc text; _clist_sum text;
_ytdbody text; _clist_group text;
_order_date text; _clist_inc text;
_ship_date text; _version_col text;
_order_status text; _iter_col text;
_actpy text; _logid_col text;
_sql text; _order_date text;
_baseline text; _ship_date text;
_date_funcs jsonb; _order_status text;
_perd_joins text; _units_col text;
_interval interval; _value_col text;
_target_table text; _ytdbody text;
_version_col text; _actpy text;
_sql text;
_baseline text;
_date_funcs jsonb;
_perd_joins text;
_interval interval;
_target_table text;
/*----------------parameters listing-------------- /*----------------parameters listing--------------
app_baseline_from_date app_baseline_from_date
@ -29,14 +35,22 @@ app_plug_todate
BEGIN BEGIN
-----------------populate application variables-------------------------------------------- -----------------populate application variables--------------------------------------------
SELECT (SELECT cname FROM fc.target_meta WHERE appcol = 'order_date') INTO _order_date; SELECT cname INTO _order_date FROM fc.target_meta WHERE appcol = 'order_date' ;
SELECT (SELECT cname FROM fc.target_meta WHERE appcol = 'ship_date') INTO _ship_date; SELECT cname INTO _ship_date FROM fc.target_meta WHERE appcol = 'ship_date' ;
SELECT (SELECT cname FROM fc.target_meta WHERE appcol = 'order_status') INTO _order_status; SELECT cname INTO _order_status FROM fc.target_meta WHERE appcol = 'order_status' ;
SELECT format('%I',max(schema))||'.'||format('%I',max(tname)) INTO _target_table FROM fc.target_meta; SELECT cname INTO _units_col FROM fc.target_meta WHERE appcol = 'units' ;
--the target interval SELECT cname INTO _value_col FROM fc.target_meta WHERE appcol = 'value' ;
SELECT cname INTO _version_col FROM fc.target_meta WHERE appcol = 'version' ;
SELECT cname INTO _iter_col FROM fc.target_meta WHERE appcol = 'iter' ;
SELECT cname INTO _logid_col FROM fc.target_meta WHERE appcol = 'logid' ;
SELECT format('%I',max(schema))||'.'||format('%I',max(tname)) INTO _target_table FROM fc.target_meta;
SELECT interval '1 year' INTO _interval; SELECT interval '1 year' INTO _interval;
SELECT cname INTO _version_col FROM fc.target_meta WHERE appcol = 'version';
SELECT jsonb_agg(func) INTO _date_funcs FROM fc.target_meta WHERE dtype = 'date' AND appcol is NOT null; ----------------------setup listing of date joins required-----------------------------------------------------
SELECT jsonb_agg(DISTINCT func) INTO _date_funcs FROM fc.target_meta WHERE dtype = 'date' AND fkey is NOT null;
raise notice E'date list -----> \n %',_date_funcs;
-----------------------create the date based table joins--------------------------------------------------------
--create table join for each date based func in target_meta joining to fc.perd static table --create table join for each date based func in target_meta joining to fc.perd static table
--the join, though, should be based on the target date, which is needs an interval added to get to the target --the join, though, should be based on the target date, which is needs an interval added to get to the target
SELECT SELECT
@ -44,7 +58,7 @@ SELECT
'LEFT OUTER JOIN fc.perd '||func||' ON'|| 'LEFT OUTER JOIN fc.perd '||func||' ON'||
$$ $$
$$||'(o.'||format('%I',cname)||' + interval '||format('%L',_interval) ||' )::date <@ '||func||'.drange' $$||'(o.'||format('%I',cname)||' + interval '||format('%L',_interval) ||' )::date <@ '||func||'.drange'
,E'\n') ,E'\n ')
INTO INTO
_perd_joins _perd_joins
FROM FROM
@ -53,19 +67,60 @@ WHERE
dtype = 'date' dtype = 'date'
AND func IS NOT NULL; AND func IS NOT NULL;
--raise notice '%',_perd_joins; raise notice E'period table joins -----> \n %',_perd_joins;
-------------------------------build a column list----------------------------------------- -------------------------------build a column for sums-----------------------------------------
SELECT SELECT
string_agg('o.'||format('%I',cname),E'\n ,' ORDER BY opos ASC) --string_agg('o.'||format('%I',cname),E'\n ,' ORDER BY opos ASC)
string_agg(
--create the column reference
CASE appcol
----aggregation columns-----------------------------------------------------------
WHEN 'units' THEN 'sum(o.'||format('%I',cname)||') AS '||format('%I',cname)
WHEN 'value' THEN 'sum(o.'||format('%I',cname)||') AS '||format('%I',cname)
WHEN 'cost' THEN 'sum(o.'||format('%I',cname)||') AS '||format('%I',cname)
----create singular values for fingle forecast step-------------------------------
WHEN 'version' THEN '''app_forecast_name'' AS '||format('%I',_version_col)
WHEN 'iter' THEN '''baseline'' AS '||format('%I',_iter_col)
WHEN 'logid' THEN '(SELECT id FROM ilog) AS '||format('%I',_logid_col)
ELSE
---if there is no function, make the column null or the default value---
CASE WHEN t.func IS NULL
THEN 'null::'||t.dtype||' AS '||format('%I',cname)
ELSE 'o.'||format('%I',cname)
END
END
--delimiter
,E'\n ,'
--sort column ordinal
ORDER BY opos ASC
)
INTO INTO
_clist _clist_sum
FROM FROM
fc.target_meta fc.target_meta t;
WHERE
COALESCE(appcol,'') NOT IN ('version','iter','logid');
--raise notice '%',_clist; raise notice E'column list for sums -----> \n %',_clist_sum;
-------------------------------build a column for groupby-----------------------------------------
SELECT
--string_agg('o.'||format('%I',cname),E'\n ,' ORDER BY opos ASC)
string_agg(
--create the column reference
'o.'||format('%I',cname)
--delimiter
,E'\n ,'
--sort column ordinal
ORDER BY opos ASC
)
INTO
_clist_group
FROM
fc.target_meta t
WHERE
COALESCE(appcol,'') NOT IN ('units','value','cost','version','iter','logid')
AND func IS NOT NULL;
raise notice E'column list for group by-----> \n %',_clist_group;
---------------------------build column to increment dates--------------------------------- ---------------------------build column to increment dates---------------------------------
SELECT SELECT
@ -75,7 +130,7 @@ SELECT
WHEN _date_funcs ? func THEN WHEN _date_funcs ? func THEN
CASE CASE
--...but it's not the date itself... --...but it's not the date itself...
WHEN appcol IS NULL THEN WHEN fkey IS NULL THEN
--...pull the associated date field from perd table --...pull the associated date field from perd table
'perd.'||m.fcol 'perd.'||m.fcol
--...and it's the primary key date... --...and it's the primary key date...
@ -85,29 +140,41 @@ SELECT
'o.'||format('%I',cname)||' + interval '||format('%L',_interval) ||' AS '||format('%I',cname) 'o.'||format('%I',cname)||' + interval '||format('%L',_interval) ||' AS '||format('%I',cname)
END END
ELSE ELSE
'o.'||format('%I',cname) CASE appcol
----aggregation columns-----------------------------------------------------------
WHEN 'units' THEN 'sum(o.'||format('%I',cname)||') AS '||format('%I',cname)
WHEN 'value' THEN 'sum(o.'||format('%I',cname)||') AS '||format('%I',cname)
WHEN 'cost' THEN 'sum(o.'||format('%I',cname)||') AS '||format('%I',cname)
----create singular values for fingle forecast step-------------------------------
WHEN 'version' THEN '''app_forecast_name'' AS '||format('%I',_version_col)
WHEN 'iter' THEN '''baseline'' AS '||format('%I',_iter_col)
WHEN 'logid' THEN '(SELECT id FROM ilog) AS '||format('%I',_logid_col)
ELSE
---if there is no function, make the column null or the default value---
CASE WHEN m.func IS NULL
THEN 'null::'||m.dtype||' AS '||format('%I',cname)
ELSE 'o.'||format('%I',cname)
END
END
END END
,E'\n ,' ORDER BY opos ASC ,E'\n ,' ORDER BY opos ASC
) )
INTO INTO
_clist_inc _clist_inc
FROM FROM
fc.target_meta m fc.target_meta m;
WHERE --WHERE
COALESCE(appcol,'') NOT IN ('version','iter','logid'); -- COALESCE(appcol,'') NOT IN ('version','iter','logid');
--RAISE NOTICE 'DATES INCREMENTED: %',_clist_inc; RAISE NOTICE E'DATES INCREMENTED: ------> \n %',_clist_inc;
--------------------------------------clone the actual baseline----------------------------------------------- --------------------------------------clone the actual baseline-----------------------------------------------
SELECT SELECT
$$SELECT $$SELECT
$$::text|| $$||
_clist|| _clist_sum||
$$ $$
,'app_forecast_name' "version"
,'actuals' iter
,null::bigint app_logid
FROM FROM
$$||_target_table||$$ o $$||_target_table||$$ o
WHERE WHERE
@ -120,12 +187,13 @@ WHERE
OR ($$||format('%I',_ship_date)||$$ BETWEEN 'app_baseline_from_date'::date AND 'app_baseline_to_date'::date) OR ($$||format('%I',_ship_date)||$$ BETWEEN 'app_baseline_from_date'::date AND 'app_baseline_to_date'::date)
) )
--be sure to pre-exclude unwanted items, like canceled orders, non-gross sales, and short-ships --be sure to pre-exclude unwanted items, like canceled orders, non-gross sales, and short-ships
$$::text GROUP BY
$$||
_clist_group
INTO INTO
_ytdbody; _ytdbody;
--RAISE NOTICE '_ytdbody %', _ytdbody; RAISE NOTICE E'_ytdbody \n%', _ytdbody;
------------------------------------pull a plug from actuals to create a full year baseline------------------ ------------------------------------pull a plug from actuals to create a full year baseline------------------
@ -133,19 +201,17 @@ SELECT
$$SELECT $$SELECT
$$||_clist_inc|| $$||_clist_inc||
$$ $$
,'app_forecast_name' "version"
,'plug' iter
,null::bigint app_logid
FROM FROM
$$||_target_table||' o'||E'\n'||_perd_joins||$$ $$||_target_table||' o'||E'\n '||_perd_joins||$$
WHERE WHERE
$$||format('%I',_order_date)||$$ BETWEEN 'app_plug_fromdate'::date AND 'app_plug_todate'::date $$||format('%I',_order_date)||$$ BETWEEN 'app_plug_fromdate'::date AND 'app_plug_todate'::date
--be sure to pre-exclude unwanted items, like canceled orders, non-gross sales, and short-ships --be sure to pre-exclude unwanted items, like canceled orders, non-gross sales, and short-ships
$$ GROUP BY
$$||_clist_group
INTO INTO
_actpy; _actpy;
--RAISE NOTICE '_actpf %',_actpy; RAISE NOTICE E'plug body --------> \n%',_actpy;
------------------------------copy a full year and increment by 1 year for the baseline------------------------- ------------------------------copy a full year and increment by 1 year for the baseline-------------------------
SELECT SELECT
@ -153,13 +219,11 @@ SELECT
-- fc.live -- fc.live
$$,incr AS ( $$,incr AS (
SELECT SELECT
$$||_clist_inc|| $$||_clist_inc||$$
$$
,'app_forecast_name' "version"
,'baseline' iter
,null::bigint app_logid
FROM FROM
baseline o$$||E'\n'||_perd_joins||$$ baseline o$$||E'\n'||_perd_joins||$$
GROUP BY
$$||_clist_group||$$
) )
,ins AS ( ,ins AS (
INSERT INTO INSERT INTO
@ -179,22 +243,32 @@ INTO
_baseline; _baseline;
--RAISE NOTICE '_baseline %',_baseline; RAISE NOTICE E'_baseline -------> \n%',_baseline;
------------------------------stack the sql into the final format------------------------------------------------ ------------------------------stack the sql into the final format------------------------------------------------
SELECT SELECT
$$DELETE FROM $$||_target_table||$$ WHERE $$||_version_col||$$ = 'app_forecast_name'; $$DELETE FROM $$||_target_table||$$ WHERE $$||_version_col||$$ = 'app_forecast_name';
WITH WITH
baseline AS ( req AS (SELECT $$||'$$app_req$$::jsonb j)'||$$
$$||_ytdbody|| ,ilog AS (
$$UNION ALL INSERT INTO
fc.log (doc)
SELECT
req.j
FROM
req
RETURNING *
)
,baseline AS (
$$||_ytdbody||$$
UNION ALL
$$||_actpy $$||_actpy
||$$) ||$$)
$$||_baseline $$||_baseline
INTO INTO
_sql; _sql;
--RAISE NOTICE '_sql %',_sql; RAISE NOTICE E'_sql ---------> \n%',_sql;
INSERT INTO fc.sql SELECT 'baseline', _sql ON CONFLICT ON CONSTRAINT sql_pkey DO UPDATE SET t = EXCLUDED.t; INSERT INTO fc.sql SELECT 'baseline', _sql ON CONFLICT ON CONSTRAINT sql_pkey DO UPDATE SET t = EXCLUDED.t;

View File

@ -35,7 +35,7 @@ BEGIN
--HAVING --HAVING
-- string_agg(cname,', ') FILTER (WHERE fkey = func) <> '' -- string_agg(cname,', ') FILTER (WHERE fkey = func) <> ''
loop loop
INSERT INTO fc.sql SELECT f.func, f.ddl; INSERT INTO fc.sql SELECT f.func, f.ddl ON CONFLICT ON CONSTRAINT sql_pkey DO UPDATE SET t = EXCLUDED.t;
EXECUTE format('%s',f.ddl); EXECUTE format('%s',f.ddl);
EXECUTE format('%s',f.pop); EXECUTE format('%s',f.pop);
EXECUTE format('%s',f.fk); EXECUTE format('%s',f.fk);