Merge pull request #20 from fleetside72/dev_setup

Dev setup
This commit is contained in:
fleetside72 2018-05-25 16:06:29 -04:00 committed by GitHub
commit 8902f529a9
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
52 changed files with 3137 additions and 2465 deletions

View File

@ -1,4 +0,0 @@
"C:\PostgreSQL\pg10\bin\pg_dump" -h localhost -p 5433 -U ptrowbridge -d ubm2 -s -n "tps" -O -F p -f "C:\users\fleet\Documents\tps_etl\deploy\ubm_schema.sql"
"/home/ubuntu/workspace/bigsql/pg10/bin/psql" -h localhost -p 5433 -U ptrowbridge -d ubm -s -n "tps" -O -F p -f "/home/ubuntu/workspace/tps_etl/deploy/ubm_schema.sql"

View File

@ -1,7 +0,0 @@
"C:\PostgreSQL\pg10\bin\psql" -h localhost -p 5433 -d postgres -U postgres -c "DROP DATABASE ubm2"
"C:\PostgreSQL\pg10\bin\psql" -h localhost -p 5433 -d postgres -U postgres -c "CREATE DATABASE ubm2"
"C:\PostgreSQL\pg10\bin\psql" -h localhost -p 5433 -d ubm2 -U postgres -f "C:\users\fleet\documents\tps_etl\deploy\ubm_schema.sql"
"/home/ubuntu/workspace/bigsql/pg10/bin/psql" -h localhost -p 5432 -d postgres -U postgres -c "DROP DATABASE ubm"
"/home/ubuntu/workspace/bigsql/pg10/bin/psql" -h localhost -p 5432 -d postgres -U postgres -c "CREATE DATABASE ubm"
"/home/ubuntu/workspace/bigsql/pg10/bin/psql" -h localhost -p 5432 -d ubm -U postgres -f "/home/ubuntu/workspace/tps_etl/ubm_schema.sql"

File diff suppressed because one or more lines are too long

View File

@ -1,16 +0,0 @@
/*---------------------------------------------------------------------------
turns a single json object into a table suitable for insert to tps.map_rv
this could facilitate a call to a function for inserting many rows from ui
----------------------------------------------------------------------------*/
WITH j AS (
select
$$
[{"source":"DCARD","map":"First 20","ret_val":{"f20": "DISCOUNT DRUG MART 3"},"mapped":{"party":"Discount Drug Mart","reason":"groceries"}},{"source":"DCARD","map":"First 20","ret_val":{"f20": "TARGET STOW OH"},"mapped":{"party":"Target","reason":"groceries"}},{"source":"DCARD","map":"First 20","ret_val":{"f20": "WALMART GROCERY 800-"},"mapped":{"party":"Walmart","reason":"groceries"}},{"source":"DCARD","map":"First 20","ret_val":{"f20": "CIRCLE K 05416 STOW "},"mapped":{"party":"Circle K","reason":"gasoline"}},{"source":"DCARD","map":"First 20","ret_val":{"f20": "TARGET.COM * 800-591"},"mapped":{"party":"Target","reason":"home supplies"}},{"source":"DCARD","map":"First 20","ret_val":{"f20": "ACME NO. 17 STOW OH"},"mapped":{"party":"Acme","reason":"groceries"}},{"source":"DCARD","map":"First 20","ret_val":{"f20": "AT&T *PAYMENT 800-28"},"mapped":{"party":"AT&T","reason":"internet"}},{"source":"DCARD","map":"First 20","ret_val":{"f20": "AUTOZONE #0722 STOW "},"mapped":{"party":"Autozone","reason":"auto maint"}},{"source":"DCARD","map":"First 20","ret_val":{"f20": "BESTBUYCOM8055267948"},"mapped":{"party":"BestBuy","reason":"home supplies"}},{"source":"DCARD","map":"First 20","ret_val":{"f20": "BUFFALO WILD WINGS K"},"mapped":{"party":"Buffalo Wild Wings","reason":"restaurante"}},{"source":"DCARD","map":"First 20","ret_val":{"f20": "CASHBACK BONUS REDEM"},"mapped":{"party":"Discover Card","reason":"financing"}},{"source":"DCARD","map":"First 20","ret_val":{"f20": "CLE CLINIC PT PMTS 2"},"mapped":{"party":"Cleveland Clinic","reason":"medical"}}]
$$::jsonb x
)
SELECT
jtr.*
FROM
j
LEFT JOIN LATERAL jsonb_array_elements(j.x) ae(v) ON TRUE
LEFT JOIN LATERAL jsonb_to_record(ae.v) AS jtr(source text, map text, ret_val jsonb, mapped jsonb) ON TRUE

View File

@ -1,196 +0,0 @@
\timing
/*--------------------------------------------------------
0. load target import to temp table
1. create pending list
2. get unqiue pending keys
3. see which keys not already in tps.trans
4. insert pending records associated with keys that are not already in trans
5. insert summary to log table
*/---------------------------------------------------------
DO $$
DECLARE _t text;
DECLARE _c text;
DECLARE _path text;
DECLARE _srce text;
DECLARE _log_info text;
DECLARE _log_id text;
BEGIN
_path := 'C:\users\ptrowbridge\documents\tps_etl\sample_discovercard\data.csv';
_srce := 'DCARD';
----------------------------------------------------build the column list of the temp table----------------------------------------------------------------
SELECT
string_agg(quote_ident(prs.key)||' '||prs.type,','),
string_agg(quote_ident(prs.key),',')
INTO
_t,
_c
FROM
tps.srce
--unwrap the schema definition array
LEFT JOIN LATERAL jsonb_populate_recordset(null::tps.srce_defn_schema, defn->'schema') prs ON TRUE
WHERE
srce = _srce
GROUP BY
srce;
----------------------------------------------------add create table verbage in front of column list--------------------------------------------------------
_t := format('CREATE TEMP TABLE csv_i (%s, id SERIAL)', _t);
--RAISE NOTICE '%', _t;
--RAISE NOTICE '%', _c;
DROP TABLE IF EXISTS csv_i;
EXECUTE _t;
----------------------------------------------------do the insert-------------------------------------------------------------------------------------------
--the column list needs to be dynamic forcing this whole line to be dynamic
_t := format('COPY csv_i (%s) FROM %L WITH (HEADER TRUE,DELIMITER '','', FORMAT CSV, ENCODING ''SQL_ASCII'',QUOTE ''"'');',_c,_path);
--RAISE NOTICE '%', _t;
EXECUTE _t;
WITH
-------------extract the limiter fields to one row per source----------------------------------
ext AS (
SELECT
srce
,defn->'unique_constraint'->>'fields'
,ARRAY(SELECT ae.e::text[] FROM jsonb_array_elements_text(defn->'unique_constraint'->'fields') ae(e)) text_array
FROM
tps.srce
WHERE
srce = _srce
--add where clause for targeted source
)
-------------for each imported row in the COPY table, genereate the json rec, and a column for the json key specified in the srce.defn-----------
,pending_list AS (
SELECT
tps.jsonb_extract(
row_to_json(i)::jsonb
,ext.text_array
) json_key,
row_to_json(i)::JSONB rec,
srce,
--ae.rn,
id
FROM
csv_i i
INNER JOIN ext ON
ext.srce = _srce
ORDER BY
id ASC
)
-----------create a unique list of keys from staged rows------------------------------------------------------------------------------------------
, pending_keys AS (
SELECT DISTINCT
json_key
FROM
pending_list
)
-----------list of keys already loaded to tps-----------------------------------------------------------------------------------------------------
, matched_keys AS (
SELECT DISTINCT
k.json_key
FROM
pending_keys k
INNER JOIN tps.trans t ON
t.rec @> k.json_key
)
-----------return unique keys that are not already in tps.trans-----------------------------------------------------------------------------------
, unmatched_keys AS (
SELECT
json_key
FROM
pending_keys
EXCEPT
SELECT
json_key
FROM
matched_keys
)
-----------insert pending rows that have key with no trans match-----------------------------------------------------------------------------------
--need to look into mapping the transactions prior to loading
, inserted AS (
INSERT INTO
tps.trans (srce, rec)
SELECT
pl.srce
,pl.rec
FROM
pending_list pl
INNER JOIN unmatched_keys u ON
u.json_key = pl.json_key
ORDER BY
pl.id ASC
----this conflict is only if an exact duplicate rec json happens, which will be rejected
----therefore, records may not be inserted due to ay matches with certain json fields, or if the entire json is a duplicate, reason is not specified
RETURNING *
)
--------summarize records not inserted-------------------+------------------------------------------------------------------------------------------------
, logged AS (
INSERT INTO
tps.trans_log (info)
SELECT
JSONB_BUILD_OBJECT('time_stamp',CURRENT_TIMESTAMP)
||JSONB_BUILD_OBJECT('srce',_srce)
||JSONB_BUILD_OBJECT('path',_path)
||JSONB_BUILD_OBJECT('not_inserted',
(
SELECT
jsonb_agg(json_key)
FROM
matched_keys
)
)
||JSONB_BUILD_OBJECT('inserted',
(
SELECT
jsonb_agg(json_key)
FROM
unmatched_keys
)
)
RETURNING *
)
SELECT
id
,info
INTO
_log_id
,_log_info
FROM
logged;
RAISE NOTICE 'import logged under id# %, info: %', _log_id, _log_info;
END
$$;

View File

@ -1,256 +0,0 @@
\timing
DROP FUNCTION tps.srce_import(_path text, _srce text);
CREATE OR REPLACE FUNCTION tps.srce_import(_path text, _srce text) RETURNS jsonb
/*--------------------------------------------------------
0. load target import to temp table
1. create pending list
2. get unqiue pending keys
3. see which keys not already in tps.trans
4. insert pending records associated with keys that are not already in trans
5. insert summary to log table
*/---------------------------------------------------------
--to-do
--return infomation to a client via json or composite type
AS $f$
DECLARE _t text;
DECLARE _c text;
DECLARE _log_info jsonb;
DECLARE _log_id text;
DECLARE _cnt numeric;
DECLARE _message jsonb;
_MESSAGE_TEXT text;
_PG_EXCEPTION_DETAIL text;
_PG_EXCEPTION_HINT text;
BEGIN
--_path := 'C:\users\fleet\downloads\discover-recentactivity-20171031.csv';
--_srce := 'DCARD';
----------------------------------------------------test if source exists----------------------------------------------------------------------------------
SELECT
COUNT(*)
INTO
_cnt
FROM
tps.srce
WHERE
srce = _srce;
IF _cnt = 0 THEN
_message:=
format(
$$
{
"status":"fail",
"message":"source %L does not exists"
}
$$,
_srce
)::jsonb;
RETURN _message;
END IF;
----------------------------------------------------build the column list of the temp table----------------------------------------------------------------
SELECT
string_agg(quote_ident(prs.key)||' '||prs.type,','),
string_agg(quote_ident(prs.key),',')
INTO
_t,
_c
FROM
tps.srce
--unwrap the schema definition array
LEFT JOIN LATERAL jsonb_populate_recordset(null::tps.srce_defn_schema, defn->'schema') prs ON TRUE
WHERE
srce = _srce
GROUP BY
srce;
----------------------------------------------------add create table verbage in front of column list--------------------------------------------------------
_t := format('CREATE TEMP TABLE csv_i (%s, id SERIAL)', _t);
--RAISE NOTICE '%', _t;
--RAISE NOTICE '%', _c;
DROP TABLE IF EXISTS csv_i;
EXECUTE _t;
----------------------------------------------------do the insert-------------------------------------------------------------------------------------------
--the column list needs to be dynamic forcing this whole line to be dynamic
_t := format('COPY csv_i (%s) FROM %L WITH (HEADER TRUE,DELIMITER '','', FORMAT CSV, ENCODING ''SQL_ASCII'',QUOTE ''"'');',_c,_path);
--RAISE NOTICE '%', _t;
EXECUTE _t;
WITH
-------------extract the limiter fields to one row per source----------------------------------
ext AS (
SELECT
srce
,defn->'unique_constraint'->>'fields'
,ARRAY(SELECT ae.e::text[] FROM jsonb_array_elements_text(defn->'unique_constraint'->'fields') ae(e)) text_array
FROM
tps.srce
WHERE
srce = _srce
--add where clause for targeted source
)
-------------for each imported row in the COPY table, genereate the json rec, and a column for the json key specified in the srce.defn-----------
,pending_list AS (
SELECT
tps.jsonb_extract(
row_to_json(i)::jsonb
,ext.text_array
) json_key,
row_to_json(i)::JSONB rec,
srce,
--ae.rn,
id
FROM
csv_i i
INNER JOIN ext ON
ext.srce = _srce
ORDER BY
id ASC
)
-----------create a unique list of keys from staged rows------------------------------------------------------------------------------------------
, pending_keys AS (
SELECT DISTINCT
json_key
FROM
pending_list
)
-----------list of keys already loaded to tps-----------------------------------------------------------------------------------------------------
, matched_keys AS (
SELECT DISTINCT
k.json_key
FROM
pending_keys k
INNER JOIN tps.trans t ON
t.rec @> k.json_key
)
-----------return unique keys that are not already in tps.trans-----------------------------------------------------------------------------------
, unmatched_keys AS (
SELECT
json_key
FROM
pending_keys
EXCEPT
SELECT
json_key
FROM
matched_keys
)
-----------insert pending rows that have key with no trans match-----------------------------------------------------------------------------------
--need to look into mapping the transactions prior to loading
, inserted AS (
INSERT INTO
tps.trans (srce, rec)
SELECT
pl.srce
,pl.rec
FROM
pending_list pl
INNER JOIN unmatched_keys u ON
u.json_key = pl.json_key
ORDER BY
pl.id ASC
----this conflict is only if an exact duplicate rec json happens, which will be rejected
----therefore, records may not be inserted due to ay matches with certain json fields, or if the entire json is a duplicate, reason is not specified
RETURNING *
)
--------summarize records not inserted-------------------+------------------------------------------------------------------------------------------------
, logged AS (
INSERT INTO
tps.trans_log (info)
SELECT
JSONB_BUILD_OBJECT('time_stamp',CURRENT_TIMESTAMP)
||JSONB_BUILD_OBJECT('srce',_srce)
||JSONB_BUILD_OBJECT('path',_path)
||JSONB_BUILD_OBJECT('not_inserted',
(
SELECT
jsonb_agg(json_key)
FROM
matched_keys
)
)
||JSONB_BUILD_OBJECT('inserted',
(
SELECT
jsonb_agg(json_key)
FROM
unmatched_keys
)
)
RETURNING *
)
SELECT
id
,info
INTO
_log_id
,_log_info
FROM
logged;
--RAISE NOTICE 'import logged under id# %, info: %', _log_id, _log_info;
_message:=
(
format(
$$
{
"status":"complete",
"message":"import of %L for source %L complete"
}
$$, _path, _srce)::jsonb
)||jsonb_build_object('details',_log_info);
RETURN _message;
EXCEPTION WHEN OTHERS THEN
GET STACKED DIAGNOSTICS
_MESSAGE_TEXT = MESSAGE_TEXT,
_PG_EXCEPTION_DETAIL = PG_EXCEPTION_DETAIL,
_PG_EXCEPTION_HINT = PG_EXCEPTION_HINT;
_message:=
($$
{
"status":"fail",
"message":"error importing data"
}
$$::jsonb)
||jsonb_build_object('message_text',_MESSAGE_TEXT)
||jsonb_build_object('pg_exception_detail',_PG_EXCEPTION_DETAIL);
return _message;
END;
$f$
LANGUAGE plpgsql

View File

@ -1,51 +0,0 @@
CREATE OR REPLACE FUNCTION tps.srce_map_def_set(_srce text, _map text, _defn jsonb, _seq int) RETURNS jsonb
AS
$f$
DECLARE
_message jsonb;
_MESSAGE_TEXT text;
_PG_EXCEPTION_DETAIL text;
_PG_EXCEPTION_HINT text;
BEGIN
BEGIN
INSERT INTO
tps.map_rm
SELECT
_srce
,_map
,_defn
,_seq
ON CONFLICT ON CONSTRAINT map_rm_pk DO UPDATE SET
srce = _srce
,target = _map
,regex = _defn
,seq = _seq;
EXCEPTION WHEN OTHERS THEN
GET STACKED DIAGNOSTICS
_MESSAGE_TEXT = MESSAGE_TEXT,
_PG_EXCEPTION_DETAIL = PG_EXCEPTION_DETAIL,
_PG_EXCEPTION_HINT = PG_EXCEPTION_HINT;
_message:=
($$
{
"status":"fail",
"message":"error setting definition"
}
$$::jsonb)
||jsonb_build_object('message_text',_MESSAGE_TEXT)
||jsonb_build_object('pg_exception_detail',_PG_EXCEPTION_DETAIL);
return _message;
END;
_message:= jsonb_build_object('status','complete','message','definition has been set');
return _message;
END;
$f$
language plpgsql

View File

@ -1,49 +0,0 @@
CREATE OR REPLACE FUNCTION tps.srce_map_val_set(_srce text, _target text, _ret jsonb, _map jsonb) RETURNS jsonb
AS
$f$
DECLARE
_message jsonb;
_MESSAGE_TEXT text;
_PG_EXCEPTION_DETAIL text;
_PG_EXCEPTION_HINT text;
BEGIN
INSERT INTO
tps.map_rv
SELECT
_srce
,_target
,_ret
,_map
ON CONFLICT ON CONSTRAINT map_rv_pk DO UPDATE SET
srce = _srce
,target = _target
,retval = _ret
,map = _map;
_message:= jsonb_build_object('status','complete');
RETURN _message;
EXCEPTION WHEN OTHERS THEN
GET STACKED DIAGNOSTICS
_MESSAGE_TEXT = MESSAGE_TEXT,
_PG_EXCEPTION_DETAIL = PG_EXCEPTION_DETAIL,
_PG_EXCEPTION_HINT = PG_EXCEPTION_HINT;
_message:=
($$
{
"status":"fail",
"message":"error setting map value"
}
$$::jsonb)
||jsonb_build_object('message_text',_MESSAGE_TEXT)
||jsonb_build_object('pg_exception_detail',_PG_EXCEPTION_DETAIL);
RETURN _message;
END
$f$
language plpgsql

View File

@ -1,60 +0,0 @@
DROP FUNCTION tps.srce_map_val_set_multi;
CREATE OR REPLACE FUNCTION tps.srce_map_val_set_multi(_maps jsonb) RETURNS JSONB
LANGUAGE plpgsql
AS $f$
DECLARE
_message jsonb;
_MESSAGE_TEXT text;
_PG_EXCEPTION_DETAIL text;
_PG_EXCEPTION_HINT text;
BEGIN
WITH
-----------expand the json into a table------------------------------------------------------------------------------
t AS (
SELECT
jtr.*
FROM
jsonb_array_elements(_maps) ae(v)
JOIN LATERAL jsonb_to_record(ae.v) AS jtr(source text, map text, ret_val jsonb, mapped jsonb) ON TRUE
)
-----------do merge---------------------------------------------------------------------------------------------------
INSERT INTO
tps.map_rv
SELECT
t."source"
,t."map"
,t.ret_val
,t.mapped
FROM
t
ON CONFLICT ON CONSTRAINT map_rv_pk DO UPDATE SET
map = excluded.map;
-------return message--------------------------------------------------------------------------------------------------
_message:= jsonb_build_object('status','complete');
RETURN _message;
EXCEPTION WHEN OTHERS THEN
GET STACKED DIAGNOSTICS
_MESSAGE_TEXT = MESSAGE_TEXT,
_PG_EXCEPTION_DETAIL = PG_EXCEPTION_DETAIL,
_PG_EXCEPTION_HINT = PG_EXCEPTION_HINT;
_message:=
($$
{
"status":"fail",
"message":"error setting map value"
}
$$::jsonb)
||jsonb_build_object('message_text',_MESSAGE_TEXT)
||jsonb_build_object('pg_exception_detail',_PG_EXCEPTION_DETAIL);
RETURN _message;
END;
$f$

View File

@ -1,107 +0,0 @@
CREATE OR REPLACE FUNCTION tps.srce_set(_defn jsonb) RETURNS jsonb
AS $f$
DECLARE
_cnt int;
_conflict BOOLEAN;
_message jsonb;
_sql text;
_cur_sch jsonb;
BEGIN
/*
1. determine if insert or update
2. if update, determine if conflicts exists
3. do merge
*/
-------extract current source schema for compare--------------------------
SELECT
defn->'schema'
INTO
_cur_sch
FROM
tps.srce
WHERE
srce = _defn->>'name';
-------check for transctions already existing under this source-----------
SELECT
COUNT(*)
INTO
_cnt
FROM
tps.trans
WHERE
srce = _defn->>'name';
--if there are transaction already and the schema is different stop--------
IF _cnt > 0 THEN
IF _cur_sch <> _defn->'schema' THEN
_conflict = TRUE;
--get out of the function somehow
_message =
$$
{
"message":"transactions already exist under source profile and there is a pending schema change"
,"status":"error"
}
$$::jsonb;
return _message;
END IF;
END IF;
/*-------------------------------------------------------
do schema validation fo _defn object?
---------------------------------------------------------*/
-------------------insert definition----------------------------------------
INSERT INTO
tps.srce
SELECT
_defn->>'name', _defn
ON CONFLICT ON CONSTRAINT srce_pkey DO UPDATE
SET
defn = _defn;
------------------drop existing type-----------------------------------------
EXECUTE format('DROP TYPE IF EXISTS tps.%I',_defn->>'name');
------------------create new type--------------------------------------------
SELECT
string_agg(quote_ident(prs.key)||' '||prs.type,',')
INTO
_sql
FROM
tps.srce
--unwrap the schema definition array
LEFT JOIN LATERAL jsonb_populate_recordset(null::tps.srce_defn_schema, defn->'schema') prs ON TRUE
WHERE
srce = _defn->>'name'
GROUP BY
srce;
RAISE NOTICE 'CREATE TYPE tps.% AS (%)',_defn->>'name',_sql;
EXECUTE format('CREATE TYPE tps.%I AS (%s)',_defn->>'name',_sql);
EXECUTE format('COMMENT ON TYPE tps.%I IS %L',_defn->>'name',(_defn->>'description'));
----------------set message-----------------------------------------------------
_message =
$$
{
"message":"definition set"
,"status":"success"
}
$$::jsonb;
return _message;
END;
$f$
LANGUAGE plpgsql

View File

@ -1,6 +1,13 @@
CREATE OR REPLACE FUNCTION tps.trans_insert_map() RETURNS TRIGGER AS $f$
CREATE OR REPLACE FUNCTION tps.trans_insert_map() RETURNS TRIGGER
AS
$f$
DECLARE
_cnt INTEGER;
BEGIN
IF (TG_OP = 'INSERT') THEN
WITH
--------------------apply regex operations to transactions-----------------------------------------------------------------------------------
@ -205,18 +212,33 @@ CREATE OR REPLACE FUNCTION tps.trans_insert_map() RETURNS TRIGGER AS $f$
--SELECT agg_to_id.srce, agg_to_id.id, jsonb_pretty(agg_to_id.retain_val) , jsonb_pretty(agg_to_id.map) FROM agg_to_id ORDER BY id desc LIMIT 100
--create a complete list of all new inserts assuming some do not have maps (left join)
,join_all AS (
SELECT
n.srce
,n.id
,n.rec
,a.retain_val parse
,a.map
,n.rec||COALESCE(a.map||a.retain_val,'{}'::jsonb) allj
FROM
new_table n
LEFT OUTER JOIN agg_to_id a ON
a.id = n.id
)
--update trans with join_all recs
UPDATE
tps.trans t
SET
map = o.map,
parse = o.retain_val,
allj = t.rec||o.map||o.retain_val
parse = a.parse
,map = a.map
,allj = a.allj
FROM
agg_to_id o
join_all a
WHERE
o.id = t.id;
t.id = a.id;
END IF;
RETURN NULL;

File diff suppressed because one or more lines are too long

View File

@ -0,0 +1,184 @@
DO
$f$
DECLARE
_t text;
_c text;
_log_info jsonb;
_log_id text;
_cnt numeric;
_message jsonb;
_recs jsonb;
_srce text;
_defn jsonb;
_MESSAGE_TEXT text;
_PG_EXCEPTION_DETAIL text;
_PG_EXCEPTION_HINT text;
BEGIN
_srce := 'DMAPI';
_recs:= $${"id":1,"doc":{"rows":[{"elements":[{"status":"OK","distance":{"text":"225 mi","value":361940},"duration":{"text":"3 hours 50 mins","value":13812}}]}],"status":"OK","origin_addresses":["Washington, DC, USA"],"destination_addresses":["New York, NY, USA"]}}$$::jsonb;
----------------------------------------------------test if source exists----------------------------------------------------------------------------------
SELECT
defn
INTO
_defn
FROM
tps.srce
WHERE
srce = _srce;
IF _defn IS NULL THEN
_message:=
format(
$$
{
"status":"fail",
"message":"source %L does not exists"
}
$$,
_srce
)::jsonb;
RAISE NOTICE '%s', _message;
END IF;
-------------unwrap the json record and apply the path(s) of the constraint to build a constraint key per record-----------------------------------------------------------------------------------
WITH
pending_list AS (
SELECT
_srce srce
,j.rec
,j.id
--aggregate back to the record since multiple paths may be listed in the constraint
--it is unclear why the "->>0" is required to correctly extract the text array from the jsonb
,tps.jsonb_concat_obj(
jsonb_build_object(
--the new json key is the path itself
cons.path->>0
,j.rec#>((cons.path->>0)::text[])
)
) json_key
FROM
jsonb_array_elements(_recs) WITH ORDINALITY j(rec,id)
JOIN LATERAL jsonb_array_elements(_defn->'constraint') WITH ORDINALITY cons(path, seq) ON TRUE
GROUP BY
j.rec
,j.id
)
-----------create a unique list of keys from staged rows------------------------------------------------------------------------------------------
, pending_keys AS (
SELECT DISTINCT
json_key
FROM
pending_list
)
-----------list of keys already loaded to tps-----------------------------------------------------------------------------------------------------
, matched_keys AS (
SELECT DISTINCT
k.json_key
FROM
pending_keys k
INNER JOIN tps.trans t ON
t.ic = k.json_key
)
-----------return unique keys that are not already in tps.trans-----------------------------------------------------------------------------------
, unmatched_keys AS (
SELECT
json_key
FROM
pending_keys
EXCEPT
SELECT
json_key
FROM
matched_keys
)
--------build log record-------------------+------------------------------------------------------------------------------------------------
, logged AS (
INSERT INTO
tps.trans_log (info)
SELECT
JSONB_BUILD_OBJECT('time_stamp',CURRENT_TIMESTAMP)
||JSONB_BUILD_OBJECT('srce',_srce)
--||JSONB_BUILD_OBJECT('path',_path)
||JSONB_BUILD_OBJECT('not_inserted',
(
SELECT
jsonb_agg(json_key)
FROM
matched_keys
)
)
||JSONB_BUILD_OBJECT('inserted',
(
SELECT
jsonb_agg(json_key)
FROM
unmatched_keys
)
)
RETURNING *
)
-----------insert pending rows that have key with no trans match-----------------------------------------------------------------------------------
--need to look into mapping the transactions prior to loading
, inserted AS (
INSERT INTO
tps.trans (srce, rec, ic, logid)
SELECT
pl.srce
,pl.rec
,pl.json_key
,logged.id
FROM
pending_list pl
INNER JOIN unmatched_keys u ON
u.json_key = pl.json_key
CROSS JOIN logged
ORDER BY
pl.id ASC
----this conflict is only if an exact duplicate rec json happens, which will be rejected
----therefore, records may not be inserted due to ay matches with certain json fields, or if the entire json is a duplicate, reason is not specified
RETURNING *
)
SELECT
id
,info
INTO
_log_id
,_log_info
FROM
logged;
--RAISE NOTICE 'import logged under id# %, info: %', _log_id, _log_info;
_message:=
(
$$
{
"status":"complete"
}
$$::jsonb
)||jsonb_build_object('details',_log_info);
RAISE NOTICE '%s', _message;
END;
$f$
LANGUAGE plpgsql

View File

@ -0,0 +1,72 @@
CREATE OR REPLACE FUNCTION tps.srce_map_def_set(_defn jsonb) RETURNS jsonb
AS
$f$
DECLARE
_message jsonb;
_MESSAGE_TEXT text;
_PG_EXCEPTION_DETAIL text;
_PG_EXCEPTION_HINT text;
BEGIN
BEGIN
INSERT INTO
tps.map_rm (srce, target, regex, seq, hist)
SELECT
--data source
_defn->>'srce'
--map name
,_defn->>'name'
--map definition
,_defn
--map aggregation sequence
,(_defn->>'sequence')::INTEGER
--history definition
,jsonb_build_object(
'hist_defn',_defn
,'effective',jsonb_build_array(CURRENT_TIMESTAMP,null::timestamptz)
) || '[]'::jsonb
ON CONFLICT ON CONSTRAINT map_rm_pk DO UPDATE SET
srce = _defn->>'srce'
,target = _defn->>'name'
,regex = _defn
,seq = (_defn->>'sequence')::INTEGER
,hist =
--the new definition going to position -0-
jsonb_build_object(
'hist_defn',_defn
,'effective',jsonb_build_array(CURRENT_TIMESTAMP,null::timestamptz)
)
--the previous definition, set upper bound of effective range which was previously null
|| jsonb_set(
map_rm.hist
,'{0,effective,1}'::text[]
,to_jsonb(CURRENT_TIMESTAMP)
);
EXCEPTION WHEN OTHERS THEN
GET STACKED DIAGNOSTICS
_MESSAGE_TEXT = MESSAGE_TEXT,
_PG_EXCEPTION_DETAIL = PG_EXCEPTION_DETAIL,
_PG_EXCEPTION_HINT = PG_EXCEPTION_HINT;
_message:=
($$
{
"status":"fail",
"message":"error setting definition"
}
$$::jsonb)
||jsonb_build_object('message_text',_MESSAGE_TEXT)
||jsonb_build_object('pg_exception_detail',_PG_EXCEPTION_DETAIL);
return _message;
END;
_message:= jsonb_build_object('status','complete','message','definition has been set');
return _message;
END;
$f$
language plpgsql

View File

@ -0,0 +1,64 @@
DROP FUNCTION IF EXISTS tps.map_rv_set;
CREATE OR REPLACE FUNCTION tps.map_rv_set(_defn jsonb) RETURNS jsonb
AS
$f$
DECLARE
_message jsonb;
_MESSAGE_TEXT text;
_PG_EXCEPTION_DETAIL text;
_PG_EXCEPTION_HINT text;
BEGIN
INSERT INTO
tps.map_rv (srce, target, retval, map, hist)
SELECT
r.source
,r.map
,r.ret_val
,r.mapped
,jsonb_build_object(
'hist_defn',mapped
,'effective',jsonb_build_array(CURRENT_TIMESTAMP,null::timestamptz)
) || '[]'::jsonb
FROM
JSONB_ARRAY_ELEMENTS(_defn) WITH ORDINALITY ae(r,s)
JOIN LATERAL jsonb_to_record(ae.r) r(source TEXT,map TEXT, ret_val jsonb, mapped jsonb) ON TRUE
ON CONFLICT ON CONSTRAINT map_rv_pk DO UPDATE
SET
map = excluded.map
,hist =
--the new definition going to position -0-
jsonb_build_object(
'hist_defn',excluded.map
,'effective',jsonb_build_array(CURRENT_TIMESTAMP,null::timestamptz)
)
--the previous definition, set upper bound of effective range which was previously null
|| jsonb_set(
map_rv.hist
,'{0,effective,1}'::text[]
,to_jsonb(CURRENT_TIMESTAMP)
);
-------return message--------------------------------------------------------------------------------------------------
_message:= jsonb_build_object('status','complete');
RETURN _message;
EXCEPTION WHEN OTHERS THEN
GET STACKED DIAGNOSTICS
_MESSAGE_TEXT = MESSAGE_TEXT,
_PG_EXCEPTION_DETAIL = PG_EXCEPTION_DETAIL,
_PG_EXCEPTION_HINT = PG_EXCEPTION_HINT;
_message:=
($$
{
"status":"fail",
"message":"error setting map value"
}
$$::jsonb)
||jsonb_build_object('message_text',_MESSAGE_TEXT)
||jsonb_build_object('pg_exception_detail',_PG_EXCEPTION_DETAIL);
RETURN _message;
END;
$f$
LANGUAGE plpgsql;

View File

@ -1,4 +1,26 @@
DROP FUNCTION tps.report_unmapped;
CREATE OR REPLACE FUNCTION tps.jsonb_concat(
state jsonb,
concat jsonb)
RETURNS jsonb AS
$BODY$
BEGIN
--RAISE notice 'state is %', state;
--RAISE notice 'concat is %', concat;
RETURN state || concat;
END;
$BODY$
LANGUAGE plpgsql VOLATILE
COST 100;
DROP AGGREGATE IF EXISTS tps.jsonb_concat_obj(jsonb);
CREATE AGGREGATE tps.jsonb_concat_obj(jsonb) (
SFUNC=tps.jsonb_concat,
STYPE=jsonb,
INITCOND='{}'
);
DROP FUNCTION IF EXISTS tps.report_unmapped;
CREATE FUNCTION tps.report_unmapped(_srce text) RETURNS TABLE
(
source text,

View File

@ -1,7 +1,27 @@
\timing
DROP FUNCTION IF EXISTS tps.report_unmapped_recs;
CREATE FUNCTION tps.report_unmapped_recs(_srce text) RETURNS TABLE
(
source text,
map text,
ret_val jsonb,
"count" bigint,
recs jsonb
)
LANGUAGE plpgsql
AS
$f$
BEGIN
/*
first get distinct target json values
then apply regex
*/
RETURN QUERY
WITH
--------------------apply regex operations to transactions-----------------------------------------------------------------------------------
--------------------apply regex operations to transactions---------------------------------------------------------------------------------
rx AS (
SELECT
@ -21,12 +41,14 @@ SELECT
COALESCE(mt.rn,rp.rn,1) result_number,
mt.mt rx_match,
rp.rp rx_replace,
--------------------------json key name assigned to return value-----------------------------------------------------------------------
CASE e.v->>'map'
WHEN 'y' THEN
e.v->>'field'
ELSE
null
END map_key,
--------------------------json value resulting from regular expression-----------------------------------------------------------------
CASE e.v->>'map'
WHEN 'y' THEN
CASE regex->>'function'
@ -43,12 +65,14 @@ SELECT
ELSE
NULL
END map_val,
--------------------------flag for if retruned regex result is stored as a new part of the final json output---------------------------
CASE e.v->>'retain'
WHEN 'y' THEN
e.v->>'field'
ELSE
NULL
END retain_key,
--------------------------push regex result into json object---------------------------------------------------------------------------
CASE e.v->>'retain'
WHEN 'y' THEN
CASE regex->>'function'
@ -66,35 +90,45 @@ SELECT
NULL
END retain_val
FROM
--------------------------start with all regex maps------------------------------------------------------------------------------------
tps.map_rm m
LEFT JOIN LATERAL jsonb_array_elements(m.regex->'where') w(v) ON TRUE
--------------------------isolate matching basis to limit map to only look at certain json---------------------------------------------
JOIN LATERAL jsonb_array_elements(m.regex->'where') w(v) ON TRUE
--------------------------break out array of regluar expressions in the map------------------------------------------------------------
JOIN LATERAL jsonb_array_elements(m.regex->'defn') WITH ORDINALITY e(v, rn) ON true
--------------------------join to main transaction table but only certain key/values are included--------------------------------------
INNER JOIN tps.trans t ON
t.srce = m.srce AND
t.rec @> w.v
LEFT JOIN LATERAL jsonb_array_elements(m.regex->'defn') WITH ORDINALITY e(v, rn) ON true
--------------------------each regex references a path to the target value, extract the target from the reference and do regex---------
LEFT JOIN LATERAL regexp_matches(t.rec #>> ((e.v ->> 'key')::text[]), e.v ->> 'regex'::text,COALESCE(e.v ->> 'flag','')) WITH ORDINALITY mt(mt, rn) ON
m.regex->>'function' = 'extract'
--------------------------same as above but for a replacement type function------------------------------------------------------------
LEFT JOIN LATERAL regexp_replace(t.rec #>> ((e.v ->> 'key')::text[]), e.v ->> 'regex'::text, e.v ->> 'replace'::text,e.v ->> 'flag') WITH ORDINALITY rp(rp, rn) ON
m.regex->>'function' = 'replace'
WHERE
--t.allj IS NULL
t.srce = 'DCARD'
t.srce = _srce AND
e.v @> '{"map":"y"}'::jsonb
--rec @> '{"Transaction":"ACH Credits","Transaction":"ACH Debits"}'
--rec @> '{"Description":"CHECK 93013270 086129935"}'::jsonb
/*
ORDER BY
t.id DESC,
m.target,
e.rn,
COALESCE(mt.rn,rp.rn,1)
*/
)
--SELECT count(*) FROM rx LIMIT 100
--SELECT * FROM rx LIMIT 100
, agg_to_target_items AS (
SELECT
srce
,id
,rec
,target
,seq
,map_intention
@ -135,6 +169,7 @@ FROM
GROUP BY
srce
,id
,rec
,target
,seq
,map_intention
@ -153,6 +188,7 @@ GROUP BY
SELECT
srce
,id
,rec
,target
,seq
,map_intention
@ -163,61 +199,65 @@ FROM
GROUP BY
srce
,id
,rec
,target
,seq
,map_intention
ORDER BY
id
)
--SELECT * FROM agg_to_target
, agg_to_ret AS (
SELECT
srce
,target
,seq
,map_intention
,map_val
,retain_val
,count(*) "count"
,jsonb_agg(rec) rec
FROM
agg_to_target
GROUP BY
srce
,target
,seq
,map_intention
,map_val
,retain_val
)
, link_map AS (
SELECT
a.srce
,a.id
,a.target
,a.seq
,a.map_intention
,a.map_val
,a.retain_val retain_value
,v.map
,a."count"
,a.rec
,a.retain_val
,v.map mapped_val
FROM
agg_to_target a
agg_to_ret a
LEFT OUTER JOIN tps.map_rv v ON
v.srce = a.srce AND
v.target = a.target AND
v.retval = a.map_val
)
--SELECT * FROM link_map
, agg_to_id AS (
SELECT
srce
,id
,tps.jsonb_concat_obj(COALESCE(retain_value,'{}'::jsonb) ORDER BY seq DESC) retain_val
,tps.jsonb_concat_obj(COALESCE(map,'{}'::jsonb)) map
l.srce
,l.target
,l.map_val
,l."count"
,l.rec
FROM
link_map
GROUP BY
srce
,id
)
--SELECT agg_to_id.srce, agg_to_id.id, jsonb_pretty(agg_to_id.retain_val) , jsonb_pretty(agg_to_id.map) FROM agg_to_id ORDER BY id desc LIMIT 100
UPDATE
tps.trans t
SET
map = o.map,
parse = o.retain_val,
allj = t.rec||o.map||o.retain_val
FROM
agg_to_id o
link_map l
WHERE
o.id = t.id;
l.mapped_val IS NULL
ORDER BY
l.srce
,l.target
,l."count" desc;
END;
$f$

View File

@ -0,0 +1,31 @@
CREATE OR REPLACE FUNCTION tps.build_srce_view_sql(_srce text, _schema text) RETURNS TEXT
AS
$f$
DECLARE
--_schema text;
_path text[];
--_srce text;
_sql text;
BEGIN
--_schema:= 'default';
_path:= ARRAY['schemas',_schema]::text[];
--_srce:= 'dcard';
SELECT
'CREATE VIEW tpsv.'||_srce||'_'||_path[2]||' AS SELECT '||string_agg('(allj#>>'''||r.PATH::text||''')::'||r.type||' AS "'||r.column_name||'"',', ')||' FROM tps.trans WHERE srce = '''||_srce||''''
INTO
_sql
FROM
tps.srce
JOIN LATERAL jsonb_array_elements(defn#>_path) ae(v) ON TRUE
JOIN LATERAL jsonb_to_record (ae.v) AS r(PATH text[], "type" text, column_name text) ON TRUE
WHERE
srce = _srce
GROUP BY
srce.srce;
RETURN _sql;
RAISE NOTICE '%',_sql;
END
$f$
LANGUAGE plpgsql;

View File

@ -0,0 +1,67 @@
DROP FUNCTION IF EXISTS tps.srce_set(jsonb);
CREATE FUNCTION tps.srce_set(_defn jsonb) RETURNS jsonb
AS
$f$
DECLARE
_message jsonb;
_MESSAGE_TEXT text;
_PG_EXCEPTION_DETAIL text;
_PG_EXCEPTION_HINT text;
BEGIN
INSERT INTO
tps.srce (srce, defn, hist)
SELECT
--extract name from defintion
_defn->>'name'
--add current timestamp to defintions
,_defn
--add definition
,jsonb_build_object(
'hist_defn',_defn
,'effective',jsonb_build_array(CURRENT_TIMESTAMP,null::timestamptz)
) || '[]'::jsonb
ON CONFLICT ON CONSTRAINT srce_pkey DO UPDATE
SET
defn = _defn
,hist =
--the new definition going to position -0-
jsonb_build_object(
'hist_defn',_defn
,'effective',jsonb_build_array(CURRENT_TIMESTAMP,null::timestamptz)
)
--the previous definition, set upper bound of effective range which was previously null
|| jsonb_set(
srce.hist
,'{0,effective,1}'::text[]
,to_jsonb(CURRENT_TIMESTAMP)
);
_message:=
(
$$
{
"status":"complete",
"message":"source set"
}
$$::jsonb
);
RETURN _message;
EXCEPTION WHEN OTHERS THEN
GET STACKED DIAGNOSTICS
_MESSAGE_TEXT = MESSAGE_TEXT,
_PG_EXCEPTION_DETAIL = PG_EXCEPTION_DETAIL,
_PG_EXCEPTION_HINT = PG_EXCEPTION_HINT;
_message:=
($$
{
"status":"fail",
"message":"error importing data"
}
$$::jsonb)
||jsonb_build_object('message_text',_MESSAGE_TEXT)
||jsonb_build_object('pg_exception_detail',_PG_EXCEPTION_DETAIL);
RETURN _message;
END;
$f$
LANGUAGE plpgsql

19
package.json Normal file
View File

@ -0,0 +1,19 @@
{
"name": "tps_etl",
"version": "1.0.0",
"description": "third party source data transformation",
"main": "index.js",
"scripts": {
"test": "uh"
},
"repository": {
"type": "git",
"url": "git+https://github.com/fleetside72/tps_etl.git"
},
"author": "",
"license": "ISC",
"bugs": {
"url": "https://github.com/fleetside72/tps_etl/issues"
},
"homepage": "https://github.com/fleetside72/tps_etl#readme"
}

View File

@ -0,0 +1,44 @@
create temp table x as (
select
t.rec
from
generate_series(1,1000000,1) s
inner join tps.trans t on
srce = 'DMAPI'
) with data;
create temp table x2 as (
select
(
rec #>>(
'{doc,origin_addresses,0}'::text[]
)
)::text as origin_address,
(
rec #>>(
'{doc,destination_addresses,0}'::text[]
)
)::text as desatination_address,
(
rec #>>(
'{doc,status}'::text[]
)
)::text as status,
(
rec #>>(
'{doc,rows,0,elements,0,distance,value}'::text[]
)
)::numeric as distance,
(
rec #>>(
'{doc,rows,0,elements,0,duration,value}'::text[]
)
)::numeric as duration
from
x
) with data;
drop table x;
drop table x2;

View File

@ -0,0 +1,37 @@
create temp table x as (
select
(rec #>>('{batch}'::text[]))::text as batch
,(rec #>>('{week}'::text[]))::text as week
,(rec #>>('{period_end}'::text[]))::text as period_end
,(rec #>>('{pay_date}'::text[]))::text as pay_date
,(rec #>>('{adp_comp}'::text[]))::text as adp_comp
,(rec #>>('{hours_reg}'::text[]))::numeric as hours_reg
,(rec #>>('{hours_ot}'::text[]))::numeric as hours_ot
,(rec #>>('{adp_dep_home}'::text[]))::text as adp_dep_home
,(rec #>>('{adp_dep}'::text[]))::text as adp_dep
,(rec #>>('{gl_dep}'::text[]))::text as gl_dep
,(rec #>>('{checkn}'::text[]))::text as checkn
,(rec #>>('{employee}'::text[]))::text as employee
,(rec #>>('{title}'::text[]))::text as title
,(rec #>>('{prim_offset}'::text[]))::text as prim_offset
,(rec #>>('{cms_tb}'::text[]))::text as cms_tb
,(rec #>>('{cms_acct}'::text[]))::text as cms_acct
,(rec #>>('{gl_descr}'::text[]))::text as gl_descr
,(rec #>>('{amount}'::text[]))::numeric as amount
FROM
tps.trans
WHERE
srce = 'ADPRP'
) with data
-- SELECT 1603392 Query returned successfully in 13 secs 604 msec.
/*
build to table --> 13 sec
run an aggregate on the table --> 1.5 sec
-versus-
run a basic aggregate on the json data live --> 7 sec
-versus-
run a basic aggregate on the json data with jsonb_popualte_record --> 8 sec
*/

View File

@ -1,5 +1,5 @@
Generic Data Transformation Tool
----------------------------------------------
=======================================================
The goal is to:
1. house external data and prevent duplication on insert
@ -33,24 +33,96 @@ Major Interactions
### Interaction Details
* Source Definitions (Maint/Inquire)
* _Source Definitions (Maint/Inquire)_
* display a list of existing sources with display detials/edit options
* create new option
* underlying function is `tps.srce_set(_name text, _defn jsonb)`
* Regex Instructions (Maint/Inquire)
* the current definition of a source includes data based on bad presumptions:
* how to load from a csv file using `COPY`
* setup a Postgres type to reflect the associated columns (if applicable)
* _Regex Instructions (Maint/Inquire)_
* display a list of existing instruction sets with display details/edit options
* create new option
* underlying function is `tps.srce_map_def_set(_srce text, _map text, _defn jsonb, _seq int)` which takes a source "code" and a json
* Cross Reference List (Maint/Inquire)
* _Cross Reference List (Maint/Inquire)_
* first step is to populate a list of values returned from the instructions (choose all or unmapped) `tps.report_unmapped(_srce text)`
* the list of rows facilitates additional named column(s) to be added which are used to assign values anytime the result occurs
* function to set the values of the cross reference `tps.srce_map_val_set_multi(_maps jsonb)`
* Run Import
* _Run Import_
* underlying function is `tps.srce_import(_path text, _srce text)`
source definition
----------------------------------------------------------------------
* **load data**
* the brwosers role is to extract the contents of a file and send them as a post body to the backend for processing under target function `based on srce defintion`
* the backend builds a json array of all the rows to be added and sends as an argument to a database insert function
* build constraint key `based on srce definition`
* handle violations
* increment global key list (this may not be possible depending on if a json with variable length arrays can be traversed)
* build an import log
* run maps (as opposed to relying on trigger)
* **read data**
* the `schema` key contains either a text element or a text array in curly braces
* forcing everything to extract via `#>{}` would be cleaner but may be more expensive than `jsonb_populate_record`
* it took 5.5 seconds to parse 1,000,000 rows of an identicle google distance matrix json to a 5 column temp table
* top level key to table based on `jsonb_populate_record` extracting from `tps.type` developed from `srce.defn->schema`
* custom function parsing contents based on #> operator and extracting from `srce.defn->schema`
* view that `uses the source definiton` to extrapolate a table?
* a materialized table is built `based on the source definition` and any addtional regex?
* add regex = alter table add column with historic updates?
* no primary key?
* every document must work out to one row
```
{
"name":"dcard",
"source":"client_file",
"loading_function":"csv"
"constraint":[
"{Trans. Date}",
"{Post Date}"
],
"schemas":{
"default":[
{
"path":"{doc,origin_addresses,0}",
"type":"text",
"column_name":"origin_address"
},
{
"path":"{doc,destination_addresses,0}",
"type":"text",
"column_name":"origin_address"
},
{
"path":"{doc,status}",
"type":"text",
"column_name":"status"
}
{
"path":"{doc,rows,0,elements,0,distance,value}",
"type":"numeric",
"column_name":"distance"
}
{
"path":"{doc,rows,0,elements,0,duration,value}",
"type":"numeric",
"column_name":"duration"
}
],
"version2":[]
}
}
```

View File

@ -1,22 +0,0 @@
SELECT
id
,rec->>'id'
,r.*
,CASE "Schedule#"
WHEN '02IN Raw Material' THEN 13097563.42
WHEN '03IN Finished Goods' THEN 35790696.52
ELSE 0
END + SUM("Sales"+"Credits & Adjustments"-"Gross Collections") OVER (PARTITION BY "Schedule#" ORDER BY "Schedule#" ASC, "PostDate" ASC, rec->>'id' ASC) running_bal
,(LEAST("CollateralBalance" - "Ineligible Amount","MaxEligible")*("AdvanceRate"/100))::NUMERIC(20,2) qualified_collateral
,(("CollateralBalance" - "Ineligible Amount")*("AdvanceRate"/100))::NUMERIC(20,2) qualified_collateral_nl
FROM
tps.trans
LEFT JOIN LATERAL jsonb_populate_record(null::tps.pncl, rec) r ON TRUE
WHERE
srce = 'PNCL'
--AND rec @> '{"Schedule#":"03IN Finished Goods"}'
ORDER BY
"Schedule#" asc
,r."PostDate" asc
,rec->>'id' asc

View File

@ -1,17 +0,0 @@
\timing
SELECT
r."Trans. Date",
r."Post Date",
r."Description",
r."Amount",
r."Category",
rec->'id' id,
SUM(r."Amount") OVER (PARTITION BY srce ORDER BY r."Post Date" asc , rec->>'id' asc, r."Description") + 1061.1 + 22.40 balance
FROM
tps.trans
LEFT JOIN LATERAL jsonb_populate_record(null::tps.dcard, rec) r ON TRUE
WHERE
srce = 'DCARD'
ORDER BY
r."Post Date" asc
,rEC->>'id' asc

34
reports/key_list.sql Normal file
View File

@ -0,0 +1,34 @@
\timing
/*--------------------------------------------------
maintain statment level triggers to update a master log of keys
* table based listing
* composite type maintenance
potential updates sources/events
* tps.trans insert
* tps.trans re-map
--------------------------------------------------*/
WITH ok AS (
SELECT
srce,
ok.k,
jsonb_typeof(allj->ok.k) typeof,
COUNT(*)
FROM
tps.trans
JOIN LATERAL jsonb_object_keys(allj) ok(k) ON TRUE
GROUP BY
srce,
ok.k,
jsonb_typeof(allj->ok.k)
ORDER BY
srce
)
SELECT
srce
,k
,typeof
FROM
ok

View File

@ -1,14 +0,0 @@
\timing
SELECT
r.*,
SUM(r."Advances"+r."Adjustments"-r."Payments") OVER (PARTITION BY "Loan#" ORDER BY r."Post Date" asc ,rec->>'id' asc, r."Reference #" asc)
FROM
tps.trans
LEFT JOIN LATERAL jsonb_populate_record(null::tps.pnco, rec) r ON TRUE
WHERE
rec @> '{"Loan#":"606780281"}'
ORDER BY
r."Loan#"
,r."Post Date" ASC
,rec->>'id' ASC
,r."Reference #" ASC

View File

@ -1,19 +0,0 @@
WITH
ext AS (
SELECT
srce
,defn->'unique_constraint'->>'fields'
,ARRAY(SELECT ae.e::text[] FROM jsonb_array_elements_text(defn->'unique_constraint'->'fields') ae(e)) txa
FROM
tps.srce
)
SELECT
t.srce
,jsonb_pretty(t.rec)
,jsonb_pretty(public.jsonb_extract(rec,txa))
FROM
tps.trans t
INNER JOIN ext ON
t.srce = ext.srce

View File

@ -1,18 +0,0 @@
\timing
SELECT
t.srce
,(ae.e::text[])[1] unq_constr
,MIN(rec #>> ae.e::text[]) min_text
,COUNT(*) cnt
,MAX(rec #>> ae.e::text[]) max_text
FROM
tps.trans t
INNER JOIN tps.srce s ON
s.srce = t.srce
LEFT JOIN LATERAL JSONB_ARRAY_ELEMENTS_TEXT(defn->'unique_constraint'->'fields') WITH ORDINALITY ae(e, rn) ON TRUE
GROUP BY
t.srce
,(ae.e::text[])[1]
ORDER BY
t.srce
,(ae.e::text[])[1]

View File

@ -0,0 +1,275 @@
[
{
"Trans. Date": "1/2/2018",
"Post Date": "1/2/2018",
"Description": "GOOGLE *YOUTUBE VIDEOS G.CO/HELPPAY#CAP0H07TXV",
"Amount": 4.26,
"Category": "Services"
},
{
"Trans. Date": "1/2/2018",
"Post Date": "1/2/2018",
"Description": "MICROSOFT *ONEDRIVE 800-642-7676 WA",
"Amount": 4.26,
"Category": "Services"
},
{
"Trans. Date": "1/3/2018",
"Post Date": "1/3/2018",
"Description": "CLE CLINIC PT PMTS 216-445-6249 OHAK2C57F2F0B3",
"Amount": 200,
"Category": "Medical Services"
},
{
"Trans. Date": "1/4/2018",
"Post Date": "1/4/2018",
"Description": "AT&T *PAYMENT 800-288-2020 TX",
"Amount": 57.14,
"Category": "Services"
},
{
"Trans. Date": "1/4/2018",
"Post Date": "1/7/2018",
"Description": "WWW.KOHLS.COM #0873 MIDDLETOWN OH",
"Amount": -7.9,
"Category": "Payments and Credits"
},
{
"Trans. Date": "1/5/2018",
"Post Date": "1/7/2018",
"Description": "PIZZA HUT 007946 STOW OH",
"Amount": 9.24,
"Category": "Restaurants"
},
{
"Trans. Date": "1/5/2018",
"Post Date": "1/7/2018",
"Description": "SUBWAY 00044289255 STOW OH",
"Amount": 10.25,
"Category": "Restaurants"
},
{
"Trans. Date": "1/6/2018",
"Post Date": "1/7/2018",
"Description": "ACME NO. 17 STOW OH",
"Amount": 103.98,
"Category": "Supermarkets"
},
{
"Trans. Date": "1/6/2018",
"Post Date": "1/7/2018",
"Description": "DISCOUNT DRUG MART 32 STOW OH",
"Amount": 1.69,
"Category": "Merchandise"
},
{
"Trans. Date": "1/6/2018",
"Post Date": "1/7/2018",
"Description": "DISCOUNT DRUG MART 32 STOW OH",
"Amount": 2.19,
"Category": "Merchandise"
},
{
"Trans. Date": "1/9/2018",
"Post Date": "1/9/2018",
"Description": "CIRCLE K 05416 STOW OH00947R",
"Amount": 3.94,
"Category": "Gasoline"
},
{
"Trans. Date": "1/9/2018",
"Post Date": "1/9/2018",
"Description": "CIRCLE K 05416 STOW OH00915R",
"Amount": 52.99,
"Category": "Gasoline"
},
{
"Trans. Date": "1/13/2018",
"Post Date": "1/13/2018",
"Description": "AUTOZONE #0722 STOW OH",
"Amount": 85.36,
"Category": "Automotive"
},
{
"Trans. Date": "1/13/2018",
"Post Date": "1/13/2018",
"Description": "DISCOUNT DRUG MART 32 STOW OH",
"Amount": 26.68,
"Category": "Merchandise"
},
{
"Trans. Date": "1/13/2018",
"Post Date": "1/13/2018",
"Description": "EL CAMPESINO STOW OH",
"Amount": 6.5,
"Category": "Restaurants"
},
{
"Trans. Date": "1/13/2018",
"Post Date": "1/13/2018",
"Description": "TARGET STOW OH",
"Amount": 197.9,
"Category": "Merchandise"
},
{
"Trans. Date": "1/14/2018",
"Post Date": "1/14/2018",
"Description": "DISCOUNT DRUG MART 32 STOW OH",
"Amount": 13.48,
"Category": "Merchandise"
},
{
"Trans. Date": "1/15/2018",
"Post Date": "1/15/2018",
"Description": "TARGET.COM * 800-591-3869 MN",
"Amount": 22.41,
"Category": "Merchandise"
},
{
"Trans. Date": "1/16/2018",
"Post Date": "1/16/2018",
"Description": "BUFFALO WILD WINGS KENT KENT OH",
"Amount": 63.22,
"Category": "Restaurants"
},
{
"Trans. Date": "1/16/2018",
"Post Date": "1/16/2018",
"Description": "PARTA - KCG KENT OH",
"Amount": 4,
"Category": "Government Services"
},
{
"Trans. Date": "1/16/2018",
"Post Date": "1/16/2018",
"Description": "REMEMBERNHU 402-935-7733 IA",
"Amount": 60,
"Category": "Services"
},
{
"Trans. Date": "1/16/2018",
"Post Date": "1/16/2018",
"Description": "TARGET.COM * 800-591-3869 MN",
"Amount": 44.81,
"Category": "Merchandise"
},
{
"Trans. Date": "1/16/2018",
"Post Date": "1/16/2018",
"Description": "TREE CITY COFFEE & PASTR KENT OH",
"Amount": 17.75,
"Category": "Restaurants"
},
{
"Trans. Date": "1/17/2018",
"Post Date": "1/17/2018",
"Description": "BESTBUYCOM805526794885 888-BESTBUY MN",
"Amount": 343.72,
"Category": "Merchandise"
},
{
"Trans. Date": "1/19/2018",
"Post Date": "1/19/2018",
"Description": "DISCOUNT DRUG MART 32 STOW OH",
"Amount": 5.98,
"Category": "Merchandise"
},
{
"Trans. Date": "1/19/2018",
"Post Date": "1/19/2018",
"Description": "U-HAUL OF KENT-STOW KENT OH",
"Amount": 15.88,
"Category": "Travel/ Entertainment"
},
{
"Trans. Date": "1/19/2018",
"Post Date": "1/19/2018",
"Description": "WALMART GROCERY 800-966-6546 AR",
"Amount": 5.99,
"Category": "Supermarkets"
},
{
"Trans. Date": "1/19/2018",
"Post Date": "1/19/2018",
"Description": "WALMART GROCERY 800-966-6546 AR",
"Amount": 17.16,
"Category": "Supermarkets"
},
{
"Trans. Date": "1/19/2018",
"Post Date": "1/19/2018",
"Description": "WALMART GROCERY 800-966-6546 AR",
"Amount": 500.97,
"Category": "Supermarkets"
},
{
"Trans. Date": "1/20/2018",
"Post Date": "1/20/2018",
"Description": "GOOGLE *GOOGLE PLAY G.CO/HELPPAY#CAP0HFFS7W",
"Amount": 2.12,
"Category": "Services"
},
{
"Trans. Date": "1/20/2018",
"Post Date": "1/20/2018",
"Description": "LOWE'S OF STOW, OH. STOW OH",
"Amount": 256.48,
"Category": "Home Improvement"
},
{
"Trans. Date": "1/23/2018",
"Post Date": "1/23/2018",
"Description": "CASHBACK BONUS REDEMPTION PYMT/STMT CRDT",
"Amount": -32.2,
"Category": "Awards and Rebate Credits"
},
{
"Trans. Date": "1/23/2018",
"Post Date": "1/23/2018",
"Description": "INTERNET PAYMENT - THANK YOU",
"Amount": -2394.51,
"Category": "Payments and Credits"
},
{
"Trans. Date": "1/27/2018",
"Post Date": "1/27/2018",
"Description": "GIANT-EAGLE #4096 STOW OH",
"Amount": 67.81,
"Category": "Supermarkets"
},
{
"Trans. Date": "1/27/2018",
"Post Date": "1/27/2018",
"Description": "OFFICEMAX/OFFICE DEPOT63 STOW OH",
"Amount": 21.06,
"Category": "Merchandise"
},
{
"Trans. Date": "1/27/2018",
"Post Date": "1/27/2018",
"Description": "TARGET STOW OH",
"Amount": 71,
"Category": "Merchandise"
},
{
"Trans. Date": "1/29/2018",
"Post Date": "1/29/2018",
"Description": "NETFLIX.COM NETFLIX.COM CA19899514437",
"Amount": 14.93,
"Category": "Services"
},
{
"Trans. Date": "1/30/2018",
"Post Date": "1/30/2018",
"Description": "SQ *TWISTED MELTZ KENT OH0002305843011416898511",
"Amount": 16.87,
"Category": "Restaurants"
},
{
"Trans. Date": "1/30/2018",
"Post Date": "1/30/2018",
"Description": "TARGET STOW OH",
"Amount": 49.37,
"Category": "Merchandise"
}
]

View File

@ -0,0 +1,20 @@
{
"srce": "dcard",
"sequence": 1,
"defn": [
{
"key": "{Description}",
"map": "y",
"flag": "",
"field": "f20",
"regex": ".{1,20}",
"retain": "y"
}
],
"name": "First 20",
"where": [
{}
],
"function": "extract",
"description": "pull first 20 characters from description for mapping"
}

View File

@ -98,8 +98,7 @@ FROM
],
"name": "First 20",
"where": [
{"Category":"Restaurantes"},
{"Category":"Services"}
{}
],
"function": "extract",
"description": "pull first 20 characters from description for mapping"

View File

@ -0,0 +1,76 @@
{
"name": "dcard",
"source": "client_file",
"loading_function": "csv",
"constraint": [
"{Trans. Date}",
"{Post Date}",
"{Description}"
],
"schemas": {
"default": [
{
"path": "{Trans. Date}",
"type": "date",
"column_name": "Trans. Date"
},
{
"path": "{Post Date}",
"type": "date",
"column_name": "Post Date"
},
{
"path": "{Description}",
"type": "text",
"column_name": "Description"
},
{
"path": "{Amount}",
"type": "numeric",
"column_name": "Amount"
},
{
"path": "{Category}",
"type": "text",
"column_name": "Category"
}
],
"mapped": [
{
"path": "{Trans. Date}",
"type": "date",
"column_name": "Trans. Date"
},
{
"path": "{Post Date}",
"type": "date",
"column_name": "Post Date"
},
{
"path": "{Description}",
"type": "text",
"column_name": "Description"
},
{
"path": "{Amount}",
"type": "numeric",
"column_name": "Amount"
},
{
"path": "{Category}",
"type": "text",
"column_name": "Category"
},
{
"path": "{party}",
"type": "text",
"column_name": "Party"
},
{
"path": "{reason}",
"type": "text",
"column_name": "Reason"
}
]
}
}

View File

@ -0,0 +1,4 @@
SELECT * FROM TPS.SRCE_SET(
$$
{"name":"dcard","source":"client_file","loading_function":"csv","constraint":["{Trans. Date}","{Post Date}","{Description}"],"schemas":{"default":[{"path":"{Trans. Date}","type":"date","column_name":"Trans. Date"},{"path":"{Post Date}","type":"date","column_name":"Post Date"},{"path":"{Description}","type":"text","column_name":"Description"},{"path":"{Amount}","type":"numeric","column_name":"Amount"},{"path":"{Category}","type":"text","column_name":"Category"}],"mapped":[{"path":"{Trans. Date}","type":"date","column_name":"Trans. Date"},{"path":"{Post Date}","type":"date","column_name":"Post Date"},{"path":"{Description}","type":"text","column_name":"Description"},{"path":"{Amount}","type":"numeric","column_name":"Amount"},{"path":"{Category}","type":"text","column_name":"Category"},{"path":"{party}","type":"text","column_name":"Party"},{"path":"{reason}","type":"text","column_name":"Reason"}]}}
$$::JSONB)

View File

@ -0,0 +1,27 @@
SELECT
*
FROM
tps.srce_map_def_set(
$$
{
"srce":"dcard",
"sequence":1,
"defn": [
{
"key": "{Description}",
"map": "y",
"flag": "",
"field": "f20",
"regex": ".{1,20}",
"retain": "y"
}
],
"name": "First 20",
"where": [
{}
],
"function": "extract",
"description": "pull first 20 characters from description for mapping"
}
$$
)

View File

@ -0,0 +1,6 @@
SELECT
*
FROM
tps.map_rv_set(
$$[{"source":"dcard","map":"First 20","ret_val":{"f20": "DISCOUNT DRUG MART 3"},"mapped":{"party":"Discount Drug Mart","reason":"groceries"}},{"source":"dcard","map":"First 20","ret_val":{"f20": "TARGET STOW OH"},"mapped":{"party":"Target","reason":"groceries"}},{"source":"dcard","map":"First 20","ret_val":{"f20": "WALMART GROCERY 800-"},"mapped":{"party":"Walmart","reason":"groceries"}},{"source":"dcard","map":"First 20","ret_val":{"f20": "CIRCLE K 05416 STOW "},"mapped":{"party":"Circle K","reason":"gasoline"}},{"source":"dcard","map":"First 20","ret_val":{"f20": "TARGET.COM * 800-591"},"mapped":{"party":"Target","reason":"home supplies"}},{"source":"dcard","map":"First 20","ret_val":{"f20": "ACME NO. 17 STOW OH"},"mapped":{"party":"Acme","reason":"groceries"}},{"source":"dcard","map":"First 20","ret_val":{"f20": "AT&T *PAYMENT 800-28"},"mapped":{"party":"AT&T","reason":"internet"}},{"source":"dcard","map":"First 20","ret_val":{"f20": "AUTOZONE #0722 STOW "},"mapped":{"party":"Autozone","reason":"auto maint"}},{"source":"dcard","map":"First 20","ret_val":{"f20": "BESTBUYCOM8055267948"},"mapped":{"party":"BestBuy","reason":"home supplies"}},{"source":"dcard","map":"First 20","ret_val":{"f20": "BUFFALO WILD WINGS K"},"mapped":{"party":"Buffalo Wild Wings","reason":"restaurante"}},{"source":"dcard","map":"First 20","ret_val":{"f20": "CASHBACK BONUS REDEM"},"mapped":{"party":"Discover Card","reason":"financing"}},{"source":"dcard","map":"First 20","ret_val":{"f20": "CLE CLINIC PT PMTS 2"},"mapped":{"party":"Cleveland Clinic","reason":"medical"}}]$$::jsonb
)

File diff suppressed because one or more lines are too long

View File

@ -0,0 +1,134 @@
[
{
"source": "dcard",
"map": "First 20",
"ret_val": {
"f20": "DISCOUNT DRUG MART 3"
},
"mapped": {
"party": "Discount Drug Mart",
"reason": "groceries"
}
},
{
"source": "dcard",
"map": "First 20",
"ret_val": {
"f20": "TARGET STOW OH"
},
"mapped": {
"party": "Target",
"reason": "groceries"
}
},
{
"source": "dcard",
"map": "First 20",
"ret_val": {
"f20": "WALMART GROCERY 800-"
},
"mapped": {
"party": "Walmart",
"reason": "groceries"
}
},
{
"source": "dcard",
"map": "First 20",
"ret_val": {
"f20": "CIRCLE K 05416 STOW "
},
"mapped": {
"party": "Circle K",
"reason": "gasoline"
}
},
{
"source": "dcard",
"map": "First 20",
"ret_val": {
"f20": "TARGET.COM * 800-591"
},
"mapped": {
"party": "Target",
"reason": "home supplies"
}
},
{
"source": "dcard",
"map": "First 20",
"ret_val": {
"f20": "ACME NO. 17 STOW OH"
},
"mapped": {
"party": "Acme",
"reason": "groceries"
}
},
{
"source": "dcard",
"map": "First 20",
"ret_val": {
"f20": "AT&T *PAYMENT 800-28"
},
"mapped": {
"party": "AT&T",
"reason": "internet"
}
},
{
"source": "dcard",
"map": "First 20",
"ret_val": {
"f20": "AUTOZONE #0722 STOW "
},
"mapped": {
"party": "Autozone",
"reason": "auto maint"
}
},
{
"source": "dcard",
"map": "First 20",
"ret_val": {
"f20": "BESTBUYCOM8055267948"
},
"mapped": {
"party": "BestBuy",
"reason": "home supplies"
}
},
{
"source": "dcard",
"map": "First 20",
"ret_val": {
"f20": "BUFFALO WILD WINGS K"
},
"mapped": {
"party": "Buffalo Wild Wings",
"reason": "restaurante"
}
},
{
"source": "dcard",
"map": "First 20",
"ret_val": {
"f20": "CASHBACK BONUS REDEM"
},
"mapped": {
"party": "Discover Card",
"reason": "financing"
}
},
{
"source": "dcard",
"map": "First 20",
"ret_val": {
"f20": "CLE CLINIC PT PMTS 2"
},
"mapped": {
"party": "Cleveland Clinic",
"reason": "medical"
}
}
]

View File

@ -0,0 +1,31 @@
[
{
"id": 1,
"doc": {
"rows": [
{
"elements": [
{
"status": "OK",
"distance": {
"text": "225 mi",
"value": 361940
},
"duration": {
"text": "3 hours 50 mins",
"value": 13812
}
}
]
}
],
"status": "OK",
"origin_addresses": [
"Washington, DC, USA"
],
"destination_addresses": [
"New York, NY, USA"
]
}
}
]

View File

@ -0,0 +1,31 @@
{
"name": "DMAPI",
"type": "csv",
"schemas": {
"default": [
{
"path": "{doc,origin_addresses,0}",
"type": "text",
"column_name": "origin_address"
},
{
"path": "{doc,destination_addresses,0}",
"type": "text",
"column_name": "destination_address"
},
{
"path": "{doc,rows,0,elements,0,distance,value}",
"type": "numeric",
"column_name": "distince"
},
{
"path": "{doc,rows,0,elements,0,duration,value}",
"type": "numeric",
"column_name": "duration"
}
],
"constraint": [
"{doc}"
]
}
}

View File

@ -0,0 +1,37 @@
SELECT
*
FROM
tps.srce_import(
'DMAPI'
,$$
[{
"id": 1,
"doc": {
"rows": [
{
"elements": [
{
"status": "OK",
"distance": {
"text": "225 mi",
"value": 361940
},
"duration": {
"text": "3 hours 50 mins",
"value": 13812
}
}
]
}
],
"status": "OK",
"origin_addresses": [
"Washington, DC, USA"
],
"destination_addresses": [
"New York, NY, USA"
]
}
}]
$$::JSONB
)

View File

@ -0,0 +1,39 @@
SELECT
jsonb_pretty(r.x)
FROM
tps.srce_set(
$$
{
"name": "DMAPI",
"type": "csv",
"schemas": {
"default": [
{
"path": "{doc,origin_addresses,0}",
"type": "text",
"column_name": "origin_address"
},
{
"path": "{doc,destination_addresses,0}",
"type": "text",
"column_name": "destination_address"
},
{
"path": "{doc,rows,0,elements,0,distance,value}",
"type": "numeric",
"column_name": "distince"
},
{
"path": "{doc,rows,0,elements,0,duration,value}",
"type": "numeric",
"column_name": "duration"
}
]
},
"constraint": [
"{doc,origin_addresses}",
"{doc,destination_addresses}"
]
}
$$
) r(x);

View File

@ -5,20 +5,34 @@ SELECT
jsonb_pretty(r.x)
FROM
tps.srce_set(
'DMAPI',
$$
{
"name": "DMAPI",
"type": "csv",
"schema": [
"schemas": {
"default": [
{
"key": "doc",
"type": "jsonb"
"path": "{doc,origin_addresses,0}",
"type": "text",
"column_name": "origin_address"
},
{
"path": "{doc,destination_addresses,0}",
"type": "text",
"column_name": "destination_address"
},
{
"path": "{doc,rows,0,elements,0,distance,value}",
"type": "numeric",
"column_name": "distince"
},
{
"path": "{doc,rows,0,elements,0,duration,value}",
"type": "numeric",
"column_name": "duration"
}
],
"unique_constraint": {
"type": "key",
"fields": [
"constraint": [
"{doc}"
]
}
@ -32,12 +46,13 @@ copy
select
$$
{
"destination_addresses" : [ "New York, NY, USA" ],
"origin_addresses" : [ "Washington, DC, USA" ],
"id": 1,
"doc": {
"rows": [
{
"elements": [
{
"status": "OK",
"distance": {
"text": "225 mi",
"value": 361940
@ -45,13 +60,19 @@ $$
"duration": {
"text": "3 hours 50 mins",
"value": 13812
},
"status" : "OK"
}
}
]
}
],
"status" : "OK"
"status": "OK",
"origin_addresses": [
"Washington, DC, USA"
],
"destination_addresses": [
"New York, NY, USA"
]
}
}
$$::JSONB DOC
)

74
sample_pnc/define.sql Normal file
View File

@ -0,0 +1,74 @@
SELECT
*
FROM
tps.srce_set(
$$
{
"name": "PNCC",
"type": "csv",
"descr": "PNC Cash Accounts",
"constraint": [
"{AsOfDate}"
],
"schemas": {
"default": [
{
"path": "{AsOfDate}",
"type": "date",
"column_name": "AsOfDate"
},
{
"path": "{BankId}",
"type": "text",
"column_name": "BankID"
},
{
"path": "{AccountNumber}",
"type": "text",
"column_name": "AccountNumber"
},
{
"path": "{AccountName}",
"type": "text",
"column_name": "AccountName"
},
{
"path": "{BaiControl}",
"type": "text",
"column_name": "BaiControl"
},
{
"path": "{Currency}",
"type": "text",
"column_name": "Currency"
},
{
"path": "{Transaction}",
"type": "text",
"column_name": "Transaction"
},
{
"path": "{Reference}",
"type": "text",
"column_name": "Reference"
},
{
"path": "{Amount}",
"type": "text",
"column_name": "Amount"
},
{
"path": "{Description}",
"type": "text",
"column_name": "Description"
},
{
"path": "{AdditionalRemittance}",
"type": "text",
"column_name": "CurrencyAdditionalRemittance"
}
]
}
}
$$::jsonb
)

2
sample_pnc/dump.cmd Normal file
View File

@ -0,0 +1,2 @@
psql -U ptrowbridge -d ubm -p 5432 -h ushcc10091 -c "COPY (SELECT jsonb_agg(rec) rec from tps.trans where srce = 'PNCC') TO 'c:\users\ptrowbridge\downloads\pncc.csv' WITH (format csv, header true)"
psql -U ptrowbridge -d ubm_dev -p 5432 -h ushcc10091 -c "CREATE TEMP TABLE x(j jsonb); COPY x FROM 'c:\users\ptrowbridge\downloads\pncc.csv' with (format csv, header true); SELECT * FROM x JOIN LATERAL tps.srce_import('PNCC',x.j) ON TRUE; DROP TABLE X;"

0
sample_pnc/import.sql Normal file
View File

67
sample_pnc/srce.json Normal file
View File

@ -0,0 +1,67 @@
{
"name": "PNCC",
"type": "csv",
"descr": "PNC Cash Accounts",
"constraint": [
"{AsOfDate}"
],
"schemas": {
"default": [
{
"path": "{AsOfDate}",
"type": "date",
"column_name": "AsOfDate"
},
{
"path": "{BankId}",
"type": "text",
"column_name": "BankID"
},
{
"path": "{AccountNumber}",
"type": "text",
"column_name": "AccountNumber"
},
{
"path": "{AccountName}",
"type": "text",
"column_name": "AccountName"
},
{
"path": "{BaiControl}",
"type": "text",
"column_name": "BaiControl"
},
{
"path": "{Currency}",
"type": "text",
"column_name": "Currency"
},
{
"path": "{Transaction}",
"type": "text",
"column_name": "Transaction"
},
{
"path": "{Reference}",
"type": "text",
"column_name": "Reference"
},
{
"path": "{Amount}",
"type": "text",
"column_name": "Amount"
},
{
"path": "{Description}",
"type": "text",
"column_name": "Description"
},
{
"path": "{AdditionalRemittance}",
"type": "text",
"column_name": "CurrencyAdditionalRemittance"
}
]
}
}

View File

@ -0,0 +1,6 @@
{
"constraint": [
"{doc,origin_addresses}",
"{doc,destination_addresses}"
]
}

View File

@ -1,6 +1,27 @@
[
{
"strip commas":
"srce": "dcard",
"sequence": 1,
"defn": [
{
"key": "{Description}",
"map": "y",
"flag": "",
"field": "f20",
"regex": ".{1,20}",
"retain": "y"
}
],
"name": "First 20",
"where": [
{}
],
"function": "extract",
"description": "pull first 20 characters from description for mapping"
},
{
"srce": "pncc",
"sequence": 1,
"name": "Strip Amount Commas",
"description": "the Amount field come from PNC with commas embeded so it cannot be cast to numeric",
"defn": [
@ -19,8 +40,9 @@
{}
]
},
"Parse ACH Credits":
{
"srce": "pncc",
"sequence": 1,
"name": "Parse ACH Credits",
"description": "parse select components of the description for ACH Credits Receieved",
"defn": [
@ -112,8 +134,9 @@
}
]
},
"Parse ACH Debits":
{
"srce": "pncc",
"sequence": 1,
"name": "Parse ACH Debits",
"description": "parse select components of the description for ACH Credits Receieved",
"defn": [
@ -205,8 +228,9 @@
}
]
},
"Parse Wires":
{
"srce": "pncc",
"sequence": 1,
"name": "Parse Wires",
"description": "pull out whatever follows OBI in the description until atleast 3 capital letters followed by a colon are encountered",
"defn": [
@ -337,8 +361,9 @@
}
]
},
"Trans Type":
{
"srce": "pncc",
"sequence": 1,
"name": "Trans Type",
"description": "extract intial description in conjunction with account name and transaction type for mapping",
"defn": [
@ -369,8 +394,9 @@
],
"function": "extract"
},
"Currency":
{
"srce": "pncc",
"sequence": 1,
"name": "Currency",
"description": "pull out currency indicators from description of misc items and map",
"defn": [
@ -406,26 +432,9 @@
],
"function": "extract"
},
"check number":
{
"defn": [
{
"key": "{Description}",
"field": "checkn",
"regex": "[^0-9]*([0-9]*)\\s|$",
"retain": "y",
"map": "n"
}
],
"where": [
{
"Transaction": "Checks Paid"
}
],
"function": "extract"
},
"ADP Codes":
{
"srce": "adprp",
"sequence": 1,
"name": "ADP Codes",
"description": "link to adp code definitions",
"defn": [
@ -459,4 +468,4 @@
{}
]
}
}
]

View File

@ -1,121 +1,76 @@
{
"name": "WMPD",
"descr": "Williams Paid File",
"type":"csv",
"schema": [
"name": "dcard",
"source": "client_file",
"loading_function": "csv",
"constraint": [
"{Trans. Date}",
"{Post Date}",
"{Description}"
],
"schemas": {
"default": [
{
"key": "Carrier",
"type": "text"
"path": "{Trans. Date}",
"type": "date",
"column_name": "Trans. Date"
},
{
"key": "SCAC",
"type": "text"
"path": "{Post Date}",
"type": "date",
"column_name": "Post Date"
},
{
"key": "Mode",
"type": "text"
"path": "{Description}",
"type": "text",
"column_name": "Description"
},
{
"key": "Pro #",
"type": "text"
"path": "{Amount}",
"type": "numeric",
"column_name": "Amount"
},
{
"key": "B/L",
"type": "text"
},
{
"key": "Pd Amt",
"type": "numeric"
},
{
"key": "Loc#",
"type": "text"
},
{
"key": "Pcs",
"type": "numeric"
},
{
"key": "Wgt",
"type": "numeric"
},
{
"key": "Chk#",
"type": "numeric"
},
{
"key": "Pay Dt",
"type": "date"
},
{
"key": "Acct #",
"type": "text"
},
{
"key": "I/O",
"type": "text"
},
{
"key": "Sh Nm",
"type": "text"
},
{
"key": "Sh City",
"type": "text"
},
{
"key": "Sh St",
"type": "text"
},
{
"key": "Sh Zip",
"type": "text"
},
{
"key": "Cons Nm",
"type": "text"
},
{
"key": "D City ",
"type": "text"
},
{
"key": "D St",
"type": "text"
},
{
"key": "D Zip",
"type": "text"
},
{
"key": "Sh Dt",
"type": "date"
},
{
"key": "Inv Dt",
"type": "date"
},
{
"key": "Customs Entry#",
"type": "text"
},
{
"key": "Miles",
"type": "numeric"
},
{
"key": "Frt Class",
"type": "text"
},
{
"key": "Master B/L",
"type": "text"
"path": "{Category}",
"type": "text",
"column_name": "Category"
}
],
"unique_constraint": {
"fields":[
"{Pay Dt}",
"{Carrier}"
"mapped": [
{
"path": "{Trans. Date}",
"type": "date",
"column_name": "Trans. Date"
},
{
"path": "{Post Date}",
"type": "date",
"column_name": "Post Date"
},
{
"path": "{Description}",
"type": "text",
"column_name": "Description"
},
{
"path": "{Amount}",
"type": "numeric",
"column_name": "Amount"
},
{
"path": "{Category}",
"type": "text",
"column_name": "Category"
},
{
"path": "{party}",
"type": "text",
"column_name": "Party"
},
{
"path": "{reason}",
"type": "text",
"column_name": "Reason"
}
]
}
}