Compare commits

..

No commits in common. "dev" and "7ccfcc1d275320ca242b961923fc083d3138adef" have entirely different histories.

89 changed files with 6255 additions and 9179 deletions

View File

@ -1,5 +0,0 @@
user=
password=
host=
port=
database=

9
.gitignore vendored
View File

@ -1,5 +1,6 @@
node_modules/* .vscode/database.json
.env .dbeaver-data-sources.xml
.project
Scripts/
*.log *.log
*.pem *.swp
.vscode/

26
.vscode/launch.json vendored
View File

@ -1,26 +0,0 @@
{
"version": "0.1.0",
"configurations": [
{
"name": "Node Start",
"type": "node",
"program": "${workspaceRoot}/index.js",
"request": "launch",
"cwd": "${workspaceRoot}",
"runtimeExecutable": null,
"env": {
"NODE_ENV": "developement"
}
},
{
"name": "Mocha Test",
"type": "node",
"program": "${workspaceRoot}/node_modules/mocha/bin/_mocha",
"request": "launch",
"cwd": "${workspaceRoot}",
"runtimeExecutable": null,
"env": {
"NODE_ENV": "testing"
}
}]
}

View File

@ -1,19 +0,0 @@
-----BEGIN CERTIFICATE-----
MIIDETCCAfkCFDygOtYyPxZgKLgMp/wIeQ2yGHweMA0GCSqGSIb3DQEBCwUAMEUx
CzAJBgNVBAYTAkFVMRMwEQYDVQQIDApTb21lLVN0YXRlMSEwHwYDVQQKDBhJbnRl
cm5ldCBXaWRnaXRzIFB0eSBMdGQwHhcNMjAwMzE3MDQ0MzI2WhcNNDcwODAyMDQ0
MzI2WjBFMQswCQYDVQQGEwJBVTETMBEGA1UECAwKU29tZS1TdGF0ZTEhMB8GA1UE
CgwYSW50ZXJuZXQgV2lkZ2l0cyBQdHkgTHRkMIIBIjANBgkqhkiG9w0BAQEFAAOC
AQ8AMIIBCgKCAQEAruKzxqy7Zdk1odLAtnKs60eu5/rLGMdsLjbB+V7R7v3bIdi/
TaoKD0oYOHjJSEfpKUEqva+W45Yx8A9dt2OE2jD2Rr3sCNub6m2vY4iB9xXGA5Wy
qtMr83Die225TOtMgGnTmYIU2D7VfOZ08xFu4rwU6NlvjHx/xMByRQ7N1QAMmlkr
F1/KRd6i4+OmFoY81ErsLL4P/rhJ8jbzbRmJGzz3DGOXFvE9Qk+DdMxk8WnzfiZc
8HBVBMYPYifSvuoARtxigQOwodjOTrb+asw3AG5B4Yh1NnZMDa8ujaYgid0RiN8a
Y18GcXZmtGdl1h88Zmlv+sqJbSK5ThOj8pPsxQIDAQABMA0GCSqGSIb3DQEBCwUA
A4IBAQBpLPJ1YZYPERYPYQbczMgcv+GaT7mqJNo7ATAgvMQYIhk2PjbS1FU8+A1X
DBuSb4vfJ6C2zEAWoduncNbKXw4Q9UkZS6/fer8HS1oYOWe9gli/V+hisEPfF1DB
6jyvbp1PZYd39LccovQ9d1ujEOdch+I2iQP4BfpiQohXePDXJA1eDCg2kQBI7aAF
IQ9ZB4ywe6IivLsv1hM37EWEAf6/wO8k2HrZf+LvQAf+fuk/SIhNFav7oRg/FuBJ
p+56Itc8M+Qd6fIEFyj6GOAUhUzbuA1TH9h/XP6dmMk1XIWrFjhUQ4QC0scLdKTF
+xO3dWJaJnrTa+8Ex7KK6Akgo9rc
-----END CERTIFICATE-----

View File

@ -1,4 +0,0 @@
openssl genrsa -out key.pem
openssl req -new -key key.pem -out csr.pem
openssl x509 -req -days 9999 -in csr.pem -signkey key.pem -out cert.pem
rm csr.pem

BIN
database/build_maps.xlsm Normal file

Binary file not shown.

View File

@ -0,0 +1,4 @@
curl -H "Content-Type: application/json" -X POST -d@./srce.json http://localhost/source
curl -H "Content-Type: application/json" -X POST -d@./map.json http://localhost/regex
curl -H "Content-Type: application/json" -X POST -d@./vals.json http://localhost/mapping
curl -v -F upload=@./d.csv http://localhost/import?srce=dcard

View File

@ -0,0 +1,24 @@
--source
SELECT DEFN FROM TPS.SRCE WHERE SRCE = 'DCARD'
--mapdef
SELECT jsonb_agg(row_to_json(x)::jsonb) FROM (SELECT srce, target "name", regex, seq "sequence" FROM tps.map_rm WHERE srce = 'DCARD') x
--map values
SELECT jsonb_agg(row_to_JSON(x)::jsonb) FROM (SELECT srce "source", target "map", retval ret_val, "map" mapped FROM tps.map_rv WHERE srce = 'DCARD') X
--records
copy (
select
to_char(r."Trans. Date",'mm/dd/yyyy') "Trans. Date"
,to_char(r."Post Date",'mm/dd/yyyy') "Post Date"
,r."Description"
,r."Amount"
,r."Category"
from
tps.trans
join lateral jsonb_populate_record(null::tps.dcard, rec) r on true
where
srce = 'DCARD'
) to
'C:\users\fleet\downloads\dcard.csv' with (format csv, header true);

View File

@ -0,0 +1,25 @@
[
{
"regex": {
"function": "extract",
"description": "pull first 20 characters from description for mapping",
"where": [
{}
],
"defn": [
{
"regex": ".{1,20}",
"map": "y",
"field": "f20",
"flag": "",
"key": "{Description}",
"retain": "y"
}
],
"name": "First 20"
},
"sequence": 2,
"name": "First 20",
"srce": "dcard"
}
]

View File

@ -0,0 +1,3 @@
curl -H "Content-Type: application/json" -X POST -d@./srce.json http://localhost:81/srce_set
curl -H "Content-Type: application/json" -X POST -d@./mapdef.json http://localhost:81/mapdef_set
curl -v -F upload=@//mnt/c/Users/fleet/Downloads/hunt.csv http://localhost:81/import?srce=hunt

View File

@ -0,0 +1,27 @@
--source
COPY (SELECT DEFN FROM TPS.SRCE WHERE SRCE = 'HUNT') TO 'C:\users\fleet\documents\tps_etl\reload\hunt\srce.json' WITH (FORMAT TEXT, HEADER FALSE)
--mapdef
COPY (SELECT jsonb_agg(row_to_json(x)::jsonb) FROM (SELECT srce, target "name", regex, seq "sequence" FROM tps.map_rm WHERE srce = 'HUNT') x) TO 'C:\users\fleet\documents\tps_etl\reload\hunt\map.json' WITH (FORMAT TEXT, HEADER FALSE)
--map values
SELECT jsonb_agg(row_to_JSON(x)::jsonb) FROM (SELECT srce "source", target "map", retval ret_val, "map" mapped FROM tps.map_rv WHERE srce = 'HUNT') X
--records
copy (
select
to_char(r."Date",'mm/dd/yy') "Date"
,r."Reference Number"
,r."Payee Name"
,r."Memo"
,r."Amount"
,r."Category Name"
from
tps.trans
join lateral jsonb_populate_record(null::tps.hunt, rec) r on true
where
srce = 'HUNT'
order by
r."Date" asc
) to
'C:\users\fleet\downloads\hunt.csv' with (format csv, header true);

View File

@ -0,0 +1,25 @@
[
{
"name": "First 20",
"srce": "hunt",
"regex": {
"defn": [
{
"key": "{Memo}",
"map": "y",
"flag": "",
"field": "f20",
"regex": ".{1,20}",
"retain": "y"
}
],
"name": "First 20",
"where": [
{}
],
"function": "extract",
"description": "pull first 20 characters from description for mapping"
},
"sequence": 1
}
]

View File

@ -0,0 +1,42 @@
{
"name": "hunt",
"source": "client_file",
"loading_function": "csv",
"constraint": [
"{Date}"
],
"schemas": {
"default": [
{
"path": "{Date}",
"type": "date",
"column_name": "Date"
},
{
"path": "{Reference Number}",
"type": "numeric",
"column_name": "Reference Number"
},
{
"path": "{Payee Name}",
"type": "text",
"column_name": "Payee Name"
},
{
"path": "{Memo}",
"type": "text",
"column_name": "Memo"
},
{
"path": "{Amount}",
"type": "numeric",
"column_name": "Amount"
},
{
"path": "{Category Name}",
"type": "text",
"column_name": "Cateogry Name"
}
]
}
}

View File

@ -0,0 +1,58 @@
--transactions with date in download format for constraint
COPY
(
SELECT
r."perd_start",
r."perd_end",
r."check_date",
r."loc_code",
r."loc_descr",
r."loc_glseg",
r."loc_over",
r."dep_code",
r."dep_descr",
r."dep_nat",
r."dep_over",
r."di_code",
r."di_descr",
r."di_glseg",
r."di_over",
r."title_code",
r."title_descr",
r."title_glseg",
r."title_over",
r."ee_code",
r."ee_glseg",
r."ee_over",
r."acct_type_code",
r."hours",
r."nat_code",
r."nat_over",
r."gl_ref",
r."gl_group",
r."gl_descr",
r."gl_code",
r."gl_amount",
r."pp_code",
r."pp_descr",
r."pp_gl",
r."pp_over",
r."transaction"
FROM
tps.trans
JOIN LATERAL jsonb_populate_record(NULL::tps.PAYCOM, rec) r ON TRUE
WHERE
srce = 'PAYCOM'
)
TO 'C:\users\ptrowbridge\downloads\PAYCOM.csv' WITH (format csv, header TRUE)
--source
SELECT DEFN FROM TPS.SRCE WHERE SRCE = 'PAYCOM'
--mapdef
SELECT jsonb_agg(row_to_json(x)::jsonb) FROM (SELECT srce, target "name", regex, seq "sequence" FROM tps.map_rm WHERE srce = 'PAYCOM') x
--map values
SELECT jsonb_agg(row_to_JSON(x)::jsonb) FROM (SELECT srce "source", target "map", retval ret_val, "map" mapped FROM tps.map_rv WHERE srce = 'PAYCOM') X

View File

@ -0,0 +1,4 @@
curl -H "Content-Type: application/json" -X POST -d@./srce.json http://localhost:81/srce_set
curl -H "Content-Type: application/json" -X POST -d@./map.json http://localhost:81/mapdef_set
curl -H "Content-Type: application/json" -X POST -d@./vals.json http://localhost:81/mapval_set
curl -v -F upload=@//mnt/c/Users/ptrowbridge/Downloads/PAYCOM.csv http://localhost:81/import?srce=PAYCOM

View File

@ -0,0 +1,64 @@
[
{
"name": "Extract month and code",
"srce": "PAYCOM",
"regex": {
"defn": [
{
"key": "{check_date}",
"map": "n",
"flag": "",
"field": "check_month",
"regex": "(\\d{4})-(\\d{2})-\\d{2}",
"retain": "y"
},
{
"key": "{loc_code}",
"map": "y",
"flag": "",
"field": "loc_code_rx",
"regex": ".*",
"retain": "n"
}
],
"name": "Paycom",
"where": [
{}
],
"function": "extract",
"description": "extract month and code"
},
"sequence": 1
},
{
"name": "SUBSEQUENT PAYOUT",
"srce": "PAYCOM",
"regex": {
"defn": [
{
"key": "{gl_group}",
"map": "y",
"flag": "",
"field": "gl_group",
"regex": ".*",
"retain": "n"
},
{
"key": "{acct_type_code}",
"map": "y",
"flag": "",
"field": "acct_type_code",
"regex": ".*",
"retain": "n"
}
],
"name": "Paycom",
"where": [
{}
],
"function": "extract",
"description": "map payroll code and account group to subsequent payout"
},
"sequence": 2
}
]

View File

@ -0,0 +1,392 @@
{
"constraint": [
"{transaction}",
"{loc_code}",
"{perd_start}",
"{perd_end}"
],
"name": "PAYCOM",
"source": "client_file",
"loading_function": "csv",
"schemas": {
"default": [
{
"path": "{perd_start}",
"type": "date",
"column_name": "perd_start"
},
{
"path": "{perd_end}",
"type": "date",
"column_name": "perd_end"
},
{
"path": "{check_date}",
"type": "date",
"column_name": "check_date"
},
{
"path": "{loc_code}",
"type": "text",
"column_name": "loc_code"
},
{
"path": "{loc_descr}",
"type": "text",
"column_name": "loc_descr"
},
{
"path": "{loc_glseg}",
"type": "text",
"column_name": "loc_glseg"
},
{
"path": "{loc_over}",
"type": "text",
"column_name": "loc_over"
},
{
"path": "{dep_code}",
"type": "text",
"column_name": "dep_code"
},
{
"path": "{dep_descr}",
"type": "text",
"column_name": "dep_descr"
},
{
"path": "{dep_nat}",
"type": "text",
"column_name": "dep_nat"
},
{
"path": "{dep_over}",
"type": "text",
"column_name": "dep_over"
},
{
"path": "{di_code}",
"type": "text",
"column_name": "di_code"
},
{
"path": "{di_descr}",
"type": "text",
"column_name": "di_descr"
},
{
"path": "{di_glseg}",
"type": "text",
"column_name": "di_glseg"
},
{
"path": "{di_over}",
"type": "text",
"column_name": "di_over"
},
{
"path": "{title_code}",
"type": "text",
"column_name": "title_code"
},
{
"path": "{title_descr}",
"type": "text",
"column_name": "title_descr"
},
{
"path": "{title_glseg}",
"type": "text",
"column_name": "title_glseg"
},
{
"path": "{title_over}",
"type": "text",
"column_name": "title_over"
},
{
"path": "{ee_code}",
"type": "text",
"column_name": "ee_code"
},
{
"path": "{ee_glseg}",
"type": "text",
"column_name": "ee_glseg"
},
{
"path": "{ee_over}",
"type": "text",
"column_name": "ee_over"
},
{
"path": "{acct_type_code}",
"type": "text",
"column_name": "acct_type_code"
},
{
"path": "{hours}",
"type": "numeric",
"column_name": "hours"
},
{
"path": "{nat_code}",
"type": "text",
"column_name": "nat_code"
},
{
"path": "{nat_over}",
"type": "text",
"column_name": "nat_over"
},
{
"path": "{gl_ref}",
"type": "text",
"column_name": "gl_ref"
},
{
"path": "{gl_group}",
"type": "text",
"column_name": "gl_group"
},
{
"path": "{gl_descr}",
"type": "text",
"column_name": "gl_descr"
},
{
"path": "{gl_code}",
"type": "text",
"column_name": "gl_code"
},
{
"path": "{gl_amount}",
"type": "numeric",
"column_name": "gl_amount"
},
{
"path": "{pp_code}",
"type": "text",
"column_name": "pp_code"
},
{
"path": "{pp_descr}",
"type": "text",
"column_name": "pp_descr"
},
{
"path": "{pp_gl}",
"type": "text",
"column_name": "pp_gl"
},
{
"path": "{pp_over}",
"type": "text",
"column_name": "pp_over"
},
{
"path": "{transaction}",
"type": "text",
"column_name": "transaction"
}
],
"mapped":[
{
"path": "{perd_start}",
"type": "date",
"column_name": "perd_start"
},
{
"path": "{perd_end}",
"type": "date",
"column_name": "perd_end"
},
{
"path": "{check_date}",
"type": "date",
"column_name": "check_date"
},
{
"path": "{loc_code}",
"type": "text",
"column_name": "loc_code"
},
{
"path": "{loc_descr}",
"type": "text",
"column_name": "loc_descr"
},
{
"path": "{loc_glseg}",
"type": "text",
"column_name": "loc_glseg"
},
{
"path": "{loc_over}",
"type": "text",
"column_name": "loc_over"
},
{
"path": "{dep_code}",
"type": "text",
"column_name": "dep_code"
},
{
"path": "{dep_descr}",
"type": "text",
"column_name": "dep_descr"
},
{
"path": "{dep_nat}",
"type": "text",
"column_name": "dep_nat"
},
{
"path": "{dep_over}",
"type": "text",
"column_name": "dep_over"
},
{
"path": "{di_code}",
"type": "text",
"column_name": "di_code"
},
{
"path": "{di_descr}",
"type": "text",
"column_name": "di_descr"
},
{
"path": "{di_glseg}",
"type": "text",
"column_name": "di_glseg"
},
{
"path": "{di_over}",
"type": "text",
"column_name": "di_over"
},
{
"path": "{title_code}",
"type": "text",
"column_name": "title_code"
},
{
"path": "{title_descr}",
"type": "text",
"column_name": "title_descr"
},
{
"path": "{title_glseg}",
"type": "text",
"column_name": "title_glseg"
},
{
"path": "{title_over}",
"type": "text",
"column_name": "title_over"
},
{
"path": "{ee_code}",
"type": "text",
"column_name": "ee_code"
},
{
"path": "{ee_glseg}",
"type": "text",
"column_name": "ee_glseg"
},
{
"path": "{ee_over}",
"type": "text",
"column_name": "ee_over"
},
{
"path": "{acct_type_code}",
"type": "text",
"column_name": "acct_type_code"
},
{
"path": "{hours}",
"type": "numeric",
"column_name": "hours"
},
{
"path": "{nat_code}",
"type": "text",
"column_name": "nat_code"
},
{
"path": "{nat_over}",
"type": "text",
"column_name": "nat_over"
},
{
"path": "{gl_ref}",
"type": "text",
"column_name": "gl_ref"
},
{
"path": "{gl_group}",
"type": "text",
"column_name": "gl_group"
},
{
"path": "{gl_descr}",
"type": "text",
"column_name": "gl_descr"
},
{
"path": "{gl_code}",
"type": "text",
"column_name": "gl_code"
},
{
"path": "{gl_amount}",
"type": "numeric",
"column_name": "gl_amount"
},
{
"path": "{pp_code}",
"type": "text",
"column_name": "pp_code"
},
{
"path": "{pp_descr}",
"type": "text",
"column_name": "pp_descr"
},
{
"path": "{pp_gl}",
"type": "text",
"column_name": "pp_gl"
},
{
"path": "{pp_over}",
"type": "text",
"column_name": "pp_over"
},
{
"path": "{transaction}",
"type": "text",
"column_name": "transaction"
},
{
"path": "{trial_bal}",
"type": "text",
"column_name": "trial_bal"
},
{
"path": "{third_party}",
"type": "text",
"column_name": "third_party"
},
{
"path": "{check_month}",
"type": "jsonb",
"column_name": "check_month"
}
]
}
}

View File

@ -0,0 +1,139 @@
[
{
"map": "Extract month and code",
"mapped": {
"trial_bal": "65"
},
"source": "PAYCOM",
"ret_val": {
"loc_code_rx": "SLS"
}
},
{
"map": "Extract month and code",
"mapped": {
"trial_bal": "59"
},
"source": "PAYCOM",
"ret_val": {
"loc_code_rx": "SEB"
}
},
{
"map": "Extract month and code",
"mapped": {
"trial_bal": "93"
},
"source": "PAYCOM",
"ret_val": {
"loc_code_rx": "MDF"
}
},
{
"map": "Extract month and code",
"mapped": {
"trial_bal": "93"
},
"source": "PAYCOM",
"ret_val": {
"loc_code_rx": "ELY"
}
},
{
"map": "Extract month and code",
"mapped": {
"trial_bal": "93"
},
"source": "PAYCOM",
"ret_val": {
"loc_code_rx": "TWN"
}
},
{
"map": "Extract month and code",
"mapped": {
"trial_bal": "88"
},
"source": "PAYCOM",
"ret_val": {
"loc_code_rx": "SPK"
}
},
{
"map": "SUBSEQUENT PAYOUT",
"mapped": {
"third_party": "PRINCIPAL LIFE P"
},
"source": "PAYCOM",
"ret_val": {
"gl_group": "4.Liab",
"acct_type_code": "40P"
}
},
{
"map": "SUBSEQUENT PAYOUT",
"mapped": {
"third_party": "PRINCIPAL LIFE P"
},
"source": "PAYCOM",
"ret_val": {
"gl_group": "4.Liab",
"acct_type_code": "CUF"
}
},
{
"map": "SUBSEQUENT PAYOUT",
"mapped": {
"third_party": "PRINCIPAL LIFE P"
},
"source": "PAYCOM",
"ret_val": {
"gl_group": "4.Liab",
"acct_type_code": "K4L"
}
},
{
"map": "SUBSEQUENT PAYOUT",
"mapped": {
"third_party": "PRINCIPAL LIFE P"
},
"source": "PAYCOM",
"ret_val": {
"gl_group": "5.Liab",
"acct_type_code": "40P"
}
},
{
"map": "SUBSEQUENT PAYOUT",
"mapped": {
"third_party": "PRINCIPAL LIFE P"
},
"source": "PAYCOM",
"ret_val": {
"gl_group": "5.Liab",
"acct_type_code": "CUF"
}
},
{
"map": "SUBSEQUENT PAYOUT",
"mapped": {
"third_party": "Basic NEO"
},
"source": "PAYCOM",
"ret_val": {
"gl_group": "4.Liab",
"acct_type_code": "FSA"
}
},
{
"map": "SUBSEQUENT PAYOUT",
"mapped": {
"third_party": "Basic NEO"
},
"source": "PAYCOM",
"ret_val": {
"gl_group": "4.Liab",
"acct_type_code": "FSD"
}
}
]

View File

@ -0,0 +1,32 @@
--transactions with date in download format for constraint
COPY
(
SELECT
to_char(r."AsOfDate",'mm/dd/yyyy') "AsOfDate"
,r."BankId"
,r."AccountNumber"
,r."AccountName"
,r."BaiControl"
,r."Currency"
,r."Transaction"
,r."Reference"
,r."Amount"
,r."Description"
FROM
tps.trans
JOIN LATERAL jsonb_populate_record(NULL::tps.pncc, rec) r ON TRUE
WHERE
srce = 'PNCC'
)
TO 'C:\users\ptrowbridge\downloads\pncc.csv' WITH (format csv, header TRUE)
--source
SELECT DEFN FROM TPS.SRCE WHERE SRCE = 'PNCC'
--mapdef
SELECT jsonb_agg(row_to_json(x)::jsonb) FROM (SELECT srce, target "name", regex, seq "sequence" FROM tps.map_rm WHERE srce = 'PNCC') x
--map values
SELECT jsonb_agg(row_to_JSON(x)::jsonb) FROM (SELECT srce "source", target "map", retval ret_val, "map" mapped FROM tps.map_rv WHERE srce = 'PNCC') X

View File

@ -0,0 +1,4 @@
curl -H "Content-Type: application/json" -X POST -d@./srce.json http://localhost:81/srce_set
curl -H "Content-Type: application/json" -X POST -d@./map.json http://localhost:81/mapdef_set
curl -H "Content-Type: application/json" -X POST -d@./vals.json http://localhost:81/mapval_set
curl -v -F upload=@//mnt/c/Users/ptrowbridge/Downloads/pncc.csv http://localhost:81/import?srce=PNCC

View File

@ -0,0 +1,455 @@
[
{
"name": "Check Number",
"srce": "PNCC",
"regex": {
"defn": [
{
"key": "{Description}",
"map": "n",
"field": "checkn",
"regex": "[^0-9]*([0-9]*)\\s|$",
"retain": "y"
}
],
"where": [
{
"Transaction": "Checks Paid"
}
],
"function": "extract"
},
"sequence": 2
},
{
"name": "Strip Amount Commas",
"srce": "PNCC",
"regex": {
"defn": [
{
"key": "{Amount}",
"map": "n",
"flag": "g",
"field": "amount",
"regex": ",",
"retain": "y",
"replace": ""
}
],
"name": "Strip Amount Commas",
"where": [
{}
],
"function": "replace",
"description": "the Amount field come from PNC with commas embeded so it cannot be cast to numeric"
},
"sequence": 1
},
{
"name": "Trans Type",
"srce": "PNCC",
"regex": {
"defn": [
{
"key": "{AccountName}",
"map": "y",
"field": "acctn",
"regex": "(.*)",
"retain": "n"
},
{
"key": "{Transaction}",
"map": "y",
"field": "trans",
"regex": "(.*)",
"retain": "n"
},
{
"key": "{Description}",
"map": "y",
"field": "ini",
"regex": "([\\w].*?)(?=$| -|\\s[0-9].*?|\\s[\\w/]+?:)",
"retain": "y"
}
],
"name": "Trans Type",
"where": [
{}
],
"function": "extract",
"description": "extract intial description in conjunction with account name and transaction type for mapping"
},
"sequence": 1
},
{
"name": "Currency",
"srce": "PNCC",
"regex": {
"defn": [
{
"key": "{Description}",
"map": "y",
"field": "ini",
"regex": "([\\w].*?)(?=$| -|\\s[0-9].*?|\\s[\\w/]+?:)",
"retain": "y"
},
{
"key": "{Description}",
"map": "y",
"field": "curr1",
"regex": ".*(DEBIT|CREDIT).*(USD|CAD).*(?=DEBIT|CREDIT).*(?=USD|CAD).*",
"retain": "y"
},
{
"key": "{Description}",
"map": "y",
"field": "curr2",
"regex": ".*(?=DEBIT|CREDIT).*(?=USD|CAD).*(DEBIT|CREDIT).*(USD|CAD).*",
"retain": "y"
}
],
"name": "Currency",
"where": [
{
"Transaction": "Miscellaneous Credits"
},
{
"Transaction": "Miscellaneous Debits"
}
],
"function": "extract",
"description": "pull out currency indicators from description of misc items and map"
},
"sequence": 2
},
{
"name": "Parse ACH Credits",
"srce": "PNCC",
"regex": {
"defn": [
{
"key": "{Description}",
"map": "n",
"flag": "",
"field": "beneficiary",
"regex": "Comp Name:(.+?)(?=\\d{6} Com|SEC:|Cust ID:|Desc:|Comp Name:|Comp ID:|Batch Discr:|Cust Name:|Addenda:|SETT:|Date:|Time:|$)",
"retain": "y"
},
{
"key": "{Description}",
"map": "n",
"flag": "",
"field": "Cust ID",
"regex": "Cust ID:(.+?)(?=SEC:|Cust ID:|Desc:|Comp Name:|Comp ID:|Batch Discr:|Cust Name:|Addenda:|SETT:|Date:|Time:|$)",
"retain": "y"
},
{
"key": "{Description}",
"map": "n",
"flag": "",
"field": "Desc",
"regex": "Desc:(.+?)(?=SEC:|Cust ID:|Desc:|Comp Name:|Comp ID:|Batch Discr:|Cust Name:|Addenda:|SETT:|Date:|Time:|$)",
"retain": "y"
},
{
"key": "{Description}",
"map": "n",
"flag": "",
"field": "originator",
"regex": "Cust Name:(.+?)(?=SEC:|Cust ID:|Desc:|Comp Name:|Comp ID:|Batch Discr:|Cust Name:|Addenda:|SETT:|Date:|Time:|$)",
"retain": "y"
},
{
"key": "{Description}",
"map": "n",
"flag": "",
"field": "Batch Discr",
"regex": "Batch Discr:(.+?)(?=SEC:|Cust ID:|Desc:|Comp Name:|Comp ID:|Batch Discr:|Cust Name:|Addenda:|SETT:|Date:|Time:|$)",
"retain": "y"
},
{
"key": "{Description}",
"map": "n",
"flag": "",
"field": "Comp ID",
"regex": "Comp ID:(.+?)(?=SEC:|Cust ID:|Desc:|Comp Name:|Comp ID:|Batch Discr:|Cust Name:|Addenda:|SETT:|Date:|Time:|$)",
"retain": "y"
},
{
"key": "{Description}",
"map": "n",
"flag": "",
"field": "Addenda",
"regex": "Addenda:(.+?)(?=SEC:|Cust ID:|Desc:|Comp Name:|Comp ID:|Batch Discr:|Cust Name:|Addenda:|SETT:|Date:|Time:|$)",
"retain": "y"
},
{
"key": "{Description}",
"map": "n",
"flag": "",
"field": "SETT",
"regex": "SETT:(.+?)(?=SEC:|Cust ID:|Desc:|Comp Name:|Comp ID:|Batch Discr:|Cust Name:|Addenda:|SETT:|Date:|Time:|$)",
"retain": "y"
},
{
"key": "{Description}",
"map": "n",
"flag": "",
"field": "Date",
"regex": "Date:(.+?)(?=SEC:|Cust ID:|Desc:|Comp Name:|Comp ID:|Batch Discr:|Cust Name:|Addenda:|SETT:|Date:|Time:|$)",
"retain": "y"
},
{
"key": "{Description}",
"map": "n",
"flag": "",
"field": "Time",
"regex": "Time:(.+?)(?=SEC:|Cust ID:|Desc:|Comp Name:|Comp ID:|Batch Discr:|Cust Name:|Addenda:|SETT:|Date:|Time:|$)",
"retain": "y"
}
],
"name": "Parse ACH Credits",
"where": [
{
"Transaction": "ACH Credits"
}
],
"function": "extract",
"description": "parse select components of the description for ACH Credits Receieved"
},
"sequence": 2
},
{
"name": "Parse ACH Debits",
"srce": "PNCC",
"regex": {
"defn": [
{
"key": "{Description}",
"map": "n",
"flag": "",
"field": "originator",
"regex": "Comp Name:(.+?)(?=\\d{6} Com|SEC:|Cust ID:|Desc:|Comp Name:|Comp ID:|Batch Discr:|Cust Name:|Addenda:|SETT:|Date:|Time:|$)",
"retain": "y"
},
{
"key": "{Description}",
"map": "n",
"flag": "",
"field": "Cust ID",
"regex": "Cust ID:(.+?)(?=SEC:|Cust ID:|Desc:|Comp Name:|Comp ID:|Batch Discr:|Cust Name:|Addenda:|SETT:|Date:|Time:|$)",
"retain": "y"
},
{
"key": "{Description}",
"map": "n",
"flag": "",
"field": "Desc",
"regex": "Desc:(.+?)(?=SEC:|Cust ID:|Desc:|Comp Name:|Comp ID:|Batch Discr:|Cust Name:|Addenda:|SETT:|Date:|Time:|$)",
"retain": "y"
},
{
"key": "{Description}",
"map": "n",
"flag": "",
"field": "beneficiary",
"regex": "Cust Name:(.+?)(?=SEC:|Cust ID:|Desc:|Comp Name:|Comp ID:|Batch Discr:|Cust Name:|Addenda:|SETT:|Date:|Time:|$)",
"retain": "y"
},
{
"key": "{Description}",
"map": "n",
"flag": "",
"field": "Batch Discr",
"regex": "Batch Discr:(.+?)(?=SEC:|Cust ID:|Desc:|Comp Name:|Comp ID:|Batch Discr:|Cust Name:|Addenda:|SETT:|Date:|Time:|$)",
"retain": "y"
},
{
"key": "{Description}",
"map": "n",
"flag": "",
"field": "Comp ID",
"regex": "Comp ID:(.+?)(?=SEC:|Cust ID:|Desc:|Comp Name:|Comp ID:|Batch Discr:|Cust Name:|Addenda:|SETT:|Date:|Time:|$)",
"retain": "y"
},
{
"key": "{Description}",
"map": "n",
"flag": "",
"field": "Addenda",
"regex": "Addenda:(.+?)(?=SEC:|Cust ID:|Desc:|Comp Name:|Comp ID:|Batch Discr:|Cust Name:|Addenda:|SETT:|Date:|Time:|$)",
"retain": "y"
},
{
"key": "{Description}",
"map": "n",
"flag": "",
"field": "SETT",
"regex": "SETT:(.+?)(?=SEC:|Cust ID:|Desc:|Comp Name:|Comp ID:|Batch Discr:|Cust Name:|Addenda:|SETT:|Date:|Time:|$)",
"retain": "y"
},
{
"key": "{Description}",
"map": "n",
"flag": "",
"field": "Date",
"regex": "Date:(.+?)(?=SEC:|Cust ID:|Desc:|Comp Name:|Comp ID:|Batch Discr:|Cust Name:|Addenda:|SETT:|Date:|Time:|$)",
"retain": "y"
},
{
"key": "{Description}",
"map": "n",
"flag": "",
"field": "Time",
"regex": "Time:(.+?)(?=SEC:|Cust ID:|Desc:|Comp Name:|Comp ID:|Batch Discr:|Cust Name:|Addenda:|SETT:|Date:|Time:|$)",
"retain": "y"
}
],
"name": "Parse ACH Debits",
"where": [
{
"Transaction": "ACH Debits"
}
],
"function": "extract",
"description": "parse select components of the description for ACH Credits Receieved"
},
"sequence": 2
},
{
"name": "Parse Wires",
"srce": "PNCC",
"regex": {
"defn": [
{
"key": "{Description}",
"map": "n",
"flag": "g",
"field": "dparse",
"regex": "([A-Z]{3,}?:)(.*)(?=[A-Z]{3,}?:|$)",
"retain": "y"
},
{
"key": "{Description}",
"map": "n",
"flag": "",
"field": "beneficiary_components",
"regex": "BENEFICIARY:(.*?)AC/([\\d-]*) (.*)(?=[A-Z]{3,}?:|$)",
"retain": "y"
},
{
"key": "{Description}",
"map": "n",
"flag": "",
"field": "originator_components",
"regex": "ORIGINATOR:(.*?)AC/(\\d*) (.*)(?=[A-Z]{3,}?:|$)",
"retain": "y"
},
{
"key": "{Description}",
"map": "n",
"flag": "",
"field": "beneficiary",
"regex": "BENEFICIARY:(.*?)AC/[\\d-]* .*(?=[A-Z]{3,}?:|$)",
"retain": "y"
},
{
"key": "{Description}",
"map": "n",
"flag": "",
"field": "originator",
"regex": "ORIGINATOR:(.*?)AC/\\d* .*(?=[A-Z]{3,}?:|$)",
"retain": "y"
},
{
"key": "{Description}",
"map": "n",
"flag": "",
"field": "OBI",
"regex": "OBI:(.*?)(?=[A-Z]{3,}?:|$)",
"retain": "y"
},
{
"key": "{Description}",
"map": "n",
"flag": "",
"field": "RFB",
"regex": "RFB:(.*?)(?=[A-Z]{3,}?:|$)",
"retain": "y"
},
{
"key": "{Description}",
"map": "n",
"flag": "",
"field": "ABA",
"regex": "ABA:(.*?)(?=[A-Z]{3,}?:|$)",
"retain": "y"
},
{
"key": "{Description}",
"map": "n",
"flag": "",
"field": "BBI",
"regex": "BBI:(.*?)(?=[A-Z]{3,}?:|$)",
"retain": "y"
},
{
"key": "{Description}",
"map": "n",
"flag": "",
"field": "BENEBNK",
"regex": "BENEBNK:(.*?)(?=[A-Z]{3,}?:|$)",
"retain": "y"
},
{
"key": "{Description}",
"map": "n",
"flag": "",
"field": "IBK",
"regex": "IBK:(.*?)(?=[A-Z]{3,}?:|$)",
"retain": "y"
},
{
"key": "{Description}",
"map": "n",
"flag": "",
"field": "RATE",
"regex": "RATE:(.*?)(?=[A-Z]{3,}?:|$)",
"retain": "y"
},
{
"key": "{Description}",
"map": "n",
"flag": "",
"field": "RECVBNK",
"regex": "RECVBNK:(.*?)(?=[A-Z]{3,}?:|$)",
"retain": "y"
}
],
"name": "Parse Wires",
"where": [
{
"Transaction": "Money Transfer DB - Wire"
},
{
"Transaction": "Money Transfer DB - Other"
},
{
"Transaction": "Money Transfer CR-Wire"
},
{
"Transaction": "Money Transfer CR-Other"
},
{
"Transaction": "Intl Money Transfer Debits"
},
{
"Transaction": "Intl Money Transfer Credits"
}
],
"function": "extract",
"description": "pull out whatever follows OBI in the description until atleast 3 capital letters followed by a colon are encountered"
},
"sequence": 2
}
]

View File

@ -0,0 +1,67 @@
{
"name": "PNCC",
"source": "client_file",
"loading_function": "csv",
"constraint": [
"{AsOfDate}"
],
"schemas": {
"default": [
{
"path": "{AsOfDate}",
"type": "date",
"column_name": "AsOfDate"
},
{
"path": "{BankId}",
"type": "text",
"column_name": "BankId"
},
{
"path": "{AccountNumber}",
"type": "text",
"column_name": "AccountNumber"
},
{
"path": "{AccountName}",
"type": "text",
"column_name": "AccountName"
},
{
"path": "{BaiControl}",
"type": "text",
"column_name": "BaiControl"
},
{
"path": "{Currency}",
"type": "text",
"column_name": "Currency"
},
{
"path": "{Transaction}",
"type": "text",
"column_name": "Transaction"
},
{
"path": "{Reference}",
"type": "text",
"column_name": "Reference"
},
{
"path": "{Amount}",
"type": "text",
"column_name": "Amount"
},
{
"path":"{Description}",
"type": "text",
"column_name": "Description"
},
{
"path": "{AdditionalRemittance}",
"type": "text",
"column_name": "AdditionalRemittance"
}
]
}
}

View File

@ -0,0 +1,952 @@
[
{
"map": "Trans Type",
"mapped": {
"sign": "-1",
"party": "PNC",
"ledger": "Manual",
"reason": "Bank Fees",
"trantype": "Disbursement"
},
"source": "PNCC",
"ret_val": {
"ini": "CANADA TAX",
"acctn": "The HC Operating Company OPERA",
"trans": "Detail Debit Adjustments"
}
},
{
"map": "Trans Type",
"mapped": {
"sign": "-1",
"ledger": "AP - ACH",
"trantype": "Disbursement"
},
"source": "PNCC",
"ret_val": {
"ini": "ACH DEBIT SETTLEMENT",
"acctn": "The HC Operating Company OPERA",
"trans": "ACH Debits"
}
},
{
"map": "Trans Type",
"mapped": {
"sign": "-1",
"ledger": "Manual",
"reason": "Returned Deposit RTM",
"trantype": "Collections"
},
"source": "PNCC",
"ret_val": {
"ini": "RET DEP ITEM RTM",
"acctn": "The HC Operating Company FBO P",
"trans": "Deposited Items Returned"
}
},
{
"map": "Trans Type",
"mapped": {
"sign": "-1",
"ledger": "Manual",
"reason": "Returned Deposit STOP",
"trantype": "Collections"
},
"source": "PNCC",
"ret_val": {
"ini": "RET DEP ITEM STOP",
"acctn": "The HC Operating Company FBO P",
"trans": "Deposited Items Returned"
}
},
{
"map": "Trans Type",
"mapped": {
"sign": "1",
"ledger": "AR - Collections",
"trantype": "Collections"
},
"source": "PNCC",
"ret_val": {
"ini": "CREDIT ADJUSTMENT",
"acctn": "The HC Operating Company FBO P",
"trans": "Detail Credit Adjustments"
}
},
{
"map": "Trans Type",
"mapped": {
"sign": "1",
"ledger": "Manual",
"reason": "Returned Check",
"trantype": "Disbursement"
},
"source": "PNCC",
"ret_val": {
"ini": "REFER TO MAKER OF CK RETURN CK",
"acctn": "The HC Operating Company OPERA",
"trans": "Detail Credit Adjustments"
}
},
{
"map": "Trans Type",
"mapped": {
"sign": "-1",
"ledger": "Manual",
"reason": "Payroll Adjustment",
"trantype": "Disbursement"
},
"source": "PNCC",
"ret_val": {
"ini": "DEBIT ADJUSTMENT",
"acctn": "The HC Operating Company PAYR",
"trans": "Detail Debit Adjustments"
}
},
{
"map": "Trans Type",
"mapped": {
"sign": "1",
"ledger": "AR - Collections",
"trantype": "Collections"
},
"source": "PNCC",
"ret_val": {
"ini": "DEPOSIT",
"acctn": "The HC Operating Company FBO P",
"trans": "Detail Deposits"
}
},
{
"map": "Trans Type",
"mapped": {
"sign": "-1",
"ledger": "AP - Wire",
"trantype": "Disbursement"
},
"source": "PNCC",
"ret_val": {
"ini": "INTL WIRE OUT",
"acctn": "The HC Operating Company OPERA",
"trans": "Intl Money Transfer Debits"
}
},
{
"map": "Trans Type",
"mapped": {
"sign": "-1",
"ledger": "AP - Wire",
"trantype": "Disbursement"
},
"source": "PNCC",
"ret_val": {
"ini": "INTL WIRES OUT",
"acctn": "The HC Operating Company OPERA",
"trans": "Intl Money Transfer Debits"
}
},
{
"map": "Trans Type",
"mapped": {
"sign": "1",
"ledger": "AR - Collections",
"trantype": "Collections"
},
"source": "PNCC",
"ret_val": {
"ini": "WHLS LBX DEP",
"acctn": "The HC Operating Company FBO P",
"trans": "Lockbox Deposits"
}
},
{
"map": "Trans Type",
"mapped": {
"sign": "1",
"ledger": "AR - Collections",
"trantype": "Collections"
},
"source": "PNCC",
"ret_val": {
"ini": "WHLS LBX DEP932855",
"acctn": "The HC Operating Company FBO P",
"trans": "Lockbox Deposits"
}
},
{
"map": "Trans Type",
"mapped": {
"sign": "1",
"ledger": "Manual",
"reason": "Revolver Advance",
"trantype": "Revolver Borrow"
},
"source": "PNCC",
"ret_val": {
"ini": "ADVANCE",
"acctn": "The HC Operating Company OPERA",
"trans": "Miscellaneous Credits"
}
},
{
"map": "Trans Type",
"mapped": {
"sign": "1",
"ledger": "Manual",
"trantype": "Collections"
},
"source": "PNCC",
"ret_val": {
"ini": "DEPOSIT:",
"acctn": "The HC Operating Company FBO P",
"trans": "Miscellaneous Credits"
}
},
{
"map": "Trans Type",
"mapped": {
"sign": "1",
"ledger": "Manual",
"reason": "Misc Credit",
"trantype": "Disbursement"
},
"source": "PNCC",
"ret_val": {
"ini": "MISC CREDIT",
"acctn": "The HC Operating Company OPERA",
"trans": "Miscellaneous Credits"
}
},
{
"map": "Trans Type",
"mapped": {
"sign": "1",
"ledger": "Manual",
"reason": "Revolver Payment",
"trantype": "Revolver Borrow"
},
"source": "PNCC",
"ret_val": {
"ini": "PAYMENT",
"acctn": "The HC Operating Company FBO P",
"trans": "Miscellaneous Credits"
}
},
{
"map": "Trans Type",
"mapped": {
"sign": "-1",
"ledger": "Manual",
"reason": "Revolver Payment",
"trantype": "Revolver Payment"
},
"source": "PNCC",
"ret_val": {
"ini": "PAYMENT",
"acctn": "The HC Operating Company FBO P",
"trans": "Miscellaneous Debits"
}
},
{
"map": "Trans Type",
"mapped": {
"sign": "1",
"ledger": "AR - Collections",
"trantype": "Collections"
},
"source": "PNCC",
"ret_val": {
"ini": "INTTL WIRES IN",
"acctn": "The HC Operating Company FBO P",
"trans": "Intl Money Transfer Credits"
}
},
{
"map": "Trans Type",
"mapped": {
"sign": "1",
"ledger": "Manual",
"reason": "Revolver Advance",
"trantype": "Revolver Borrow"
},
"source": "PNCC",
"ret_val": {
"ini": "PNC BANK- NJ LOAN PROCEEDS",
"acctn": "The HC Operating Company FBO P",
"trans": "Money Transfer CR-Other"
}
},
{
"map": "Trans Type",
"mapped": {
"sign": "1",
"ledger": "Manual",
"reason": "Revolver Advance",
"trantype": "Revolver Borrow"
},
"source": "PNCC",
"ret_val": {
"ini": "PNC BANK-PGH LOAN PROCEEDS",
"acctn": "The HC Operating Company OPERA",
"trans": "Money Transfer CR-Other"
}
},
{
"map": "Trans Type",
"mapped": {
"sign": "1",
"ledger": "AR - Collections",
"trantype": "Collections"
},
"source": "PNCC",
"ret_val": {
"ini": "FED WIRE IN",
"acctn": "The HC Operating Company FBO P",
"trans": "Money Transfer CR-Wire"
}
},
{
"map": "Trans Type",
"mapped": {
"sign": "1",
"ledger": "Manual",
"reason": "Returned Wires",
"trantype": "Disbursement"
},
"source": "PNCC",
"ret_val": {
"ini": "FED WIRE IN",
"acctn": "The HC Operating Company OPERA",
"trans": "Money Transfer CR-Wire"
}
},
{
"map": "Trans Type",
"mapped": {
"sign": "-1",
"ledger": "Manual",
"reason": "Returned Item",
"trantype": "Disbursement"
},
"source": "PNCC",
"ret_val": {
"ini": "BOOK TRANSFER DEBIT",
"acctn": "The HC Operating Company OPERA",
"trans": "Money Transfer DB - Other"
}
},
{
"map": "Trans Type",
"mapped": {
"sign": "-1",
"ledger": "Manual",
"reason": "Freight Wires",
"trantype": "Disbursement"
},
"source": "PNCC",
"ret_val": {
"ini": "FED WIRE OUT",
"acctn": "The HC Operating Company FREIG",
"trans": "Money Transfer DB - Wire"
}
},
{
"map": "Trans Type",
"mapped": {
"sign": "-1",
"ledger": "AP - Wire",
"trantype": "Disbursement"
},
"source": "PNCC",
"ret_val": {
"ini": "FED WIRE OUT",
"acctn": "The HC Operating Company OPERA",
"trans": "Money Transfer DB - Wire"
}
},
{
"map": "Trans Type",
"mapped": {
"sign": "-1",
"ledger": "Manual",
"trantype": "Disbursement"
},
"source": "PNCC",
"ret_val": {
"ini": "FED WIRE OUT",
"acctn": "The HC Operating Company PAYR",
"trans": "Money Transfer DB - Wire"
}
},
{
"map": "Trans Type",
"mapped": {
"sign": "1",
"ledger": "Manual",
"reason": "ZBA Funding",
"trantype": "Funding"
},
"source": "PNCC",
"ret_val": {
"ini": "FUNDS TRANSFER FROM ACCT",
"acctn": "The HC Operating Company FREIG",
"trans": "ZBA Credits"
}
},
{
"map": "Trans Type",
"mapped": {
"sign": "1",
"ledger": "Manual",
"reason": "ZBA Funding",
"trantype": "Funding"
},
"source": "PNCC",
"ret_val": {
"ini": "FUNDS TRANSFER FROM ACCT",
"acctn": "The HC Operating Company OPERA",
"trans": "ZBA Credits"
}
},
{
"map": "Trans Type",
"mapped": {
"sign": "1",
"ledger": "Manual",
"reason": "ZBA Funding",
"trantype": "Funding"
},
"source": "PNCC",
"ret_val": {
"ini": "FUNDS TRANSFER FROM ACCT",
"acctn": "The HC Operating Company PAYR",
"trans": "ZBA Credits"
}
},
{
"map": "Trans Type",
"mapped": {
"sign": "-1",
"ledger": "Manual",
"reason": "ZBA Funding",
"trantype": "Funding"
},
"source": "PNCC",
"ret_val": {
"ini": "FUNDS TRANSFER TO ACCT",
"acctn": "The HC Operating Company OPERA",
"trans": "ZBA Debits"
}
},
{
"map": "Trans Type",
"mapped": {
"sign": "-1",
"ledger": "Manual",
"reason": "ZBA Funding",
"trantype": "Funding"
},
"source": "PNCC",
"ret_val": {
"ini": "FUNDS TRANSFER TO ACCT",
"acctn": "The HC Operating Company PAYR",
"trans": "ZBA Debits"
}
},
{
"map": "Currency",
"mapped": {
"party": "The HC Canada Operating Company, Ltd.",
"ledger": "Manual",
"reason": "IC - Can to US Settlement",
"trantype": "Interco Collection"
},
"source": "PNCC",
"ret_val": {
"ini": "DEPOSIT:",
"curr1": [
"CREDIT",
"USD"
],
"curr2": [
"DEBIT",
"CAD"
]
}
},
{
"map": "Trans Type",
"mapped": {
"sign": "-1",
"party": "PNC",
"ledger": "Manual",
"reason": "Bank Fees",
"trantype": "Fees"
},
"source": "PNCC",
"ret_val": {
"ini": "CORPORATE ACCOUNT ANALYSIS CHARGE",
"acctn": "The HC Operating Company OPERA",
"trans": "Miscellaneous Fees"
}
},
{
"map": "Trans Type",
"mapped": {
"sign": "-1",
"party": "PNC",
"ledger": "Manual",
"reason": "Bank Fees",
"trantype": "Fees"
},
"source": "PNCC",
"ret_val": {
"ini": "PNC MERCHANT FINCL ADJ",
"acctn": "The HC Operating Company FBO P",
"trans": "Miscellaneous Fees"
}
},
{
"map": "Trans Type",
"mapped": {
"sign": "-1",
"ledger": "Manual",
"reason": "Revolver Payment",
"trantype": "Revolver Payment"
},
"source": "PNCC",
"ret_val": {
"ini": "PNC BANK- NJ LOAN PMTS",
"acctn": "The HC Operating Company FBO P",
"trans": "Miscellaneous Debits"
}
},
{
"map": "Trans Type",
"mapped": {
"sign": "-1",
"ledger": "Manual",
"trantype": "Disbursement"
},
"source": "PNCC",
"ret_val": {
"ini": "WITHDRAWAL:",
"acctn": "The HC Operating Company FBO P",
"trans": "Miscellaneous Debits"
}
},
{
"map": "Trans Type",
"mapped": {
"sign": "1",
"ledger": "Manual",
"reason": "Returned Item",
"trantype": "Collections"
},
"source": "PNCC",
"ret_val": {
"ini": "BOOK TRANSFER CREDIT",
"acctn": "The HC Operating Company FBO P",
"trans": "Money Transfer CR-Other"
}
},
{
"map": "Trans Type",
"mapped": {
"sign": "1",
"ledger": "Manual",
"reason": "Returned Item",
"trantype": "Collections"
},
"source": "PNCC",
"ret_val": {
"ini": "BOOK TRANSFER CREDIT",
"acctn": "The HC Operating Company OPERA",
"trans": "Money Transfer CR-Other"
}
},
{
"map": "Trans Type",
"mapped": {
"sign": "1",
"ledger": "Manual",
"reason": "Returned Item",
"trantype": "Collections"
},
"source": "PNCC",
"ret_val": {
"ini": "BOOK TRANSFER CREDIT GHFTDD DDA CREDIT",
"acctn": "The HC Operating Company FBO P",
"trans": "Money Transfer CR-Other"
}
},
{
"map": "Trans Type",
"mapped": {
"sign": "1",
"ledger": "Manual",
"reason": "Payroll Credits",
"trantype": "Disbursement"
},
"source": "PNCC",
"ret_val": {
"ini": "19UDV",
"acctn": "The HC Operating Company PAYR",
"trans": "ACH Credits"
}
},
{
"map": "Trans Type",
"mapped": {
"sign": "1",
"ledger": "AR - Collections",
"trantype": "Collections"
},
"source": "PNCC",
"ret_val": {
"ini": "ACH CREDIT RECEIVED",
"acctn": "The HC Operating Company FBO P",
"trans": "ACH Credits"
}
},
{
"map": "Trans Type",
"mapped": {
"sign": "1",
"ledger": "Manual",
"reason": "Payroll Credits",
"trantype": "Disbursement"
},
"source": "PNCC",
"ret_val": {
"ini": "ACH CREDIT RECEIVED",
"acctn": "The HC Operating Company PAYR",
"trans": "ACH Credits"
}
},
{
"map": "Trans Type",
"mapped": {
"sign": "1",
"ledger": "Manual",
"reason": "AP ACH Returned",
"trantype": "Disbursement"
},
"source": "PNCC",
"ret_val": {
"ini": "ACH CREDIT RETURN",
"acctn": "The HC Operating Company OPERA",
"trans": "ACH Credits"
}
},
{
"map": "Trans Type",
"mapped": {
"sign": "-1",
"ledger": "Manual",
"reason": "Auto ACH Out",
"trantype": "Disbursement"
},
"source": "PNCC",
"ret_val": {
"ini": "ACH DEBIT RECEIVED",
"acctn": "The HC Operating Company FBO P",
"trans": "ACH Debits"
}
},
{
"map": "Trans Type",
"mapped": {
"sign": "-1",
"ledger": "Manual",
"reason": "Auto ACH Out",
"trantype": "Disbursement"
},
"source": "PNCC",
"ret_val": {
"ini": "ACH DEBIT RECEIVED",
"acctn": "The HC Operating Company OPERA",
"trans": "ACH Debits"
}
},
{
"map": "Trans Type",
"mapped": {
"sign": "-1",
"ledger": "Manual",
"reason": "Auto ACH Out",
"trantype": "Disbursement"
},
"source": "PNCC",
"ret_val": {
"ini": "ACH DEBIT RECEIVED",
"acctn": "The HC Operating Company PAYR",
"trans": "ACH Debits"
}
},
{
"map": "Trans Type",
"mapped": {
"sign": "-1",
"ledger": "AP - Check Run",
"trantype": "Disbursement"
},
"source": "PNCC",
"ret_val": {
"ini": "CASHED CHECK",
"acctn": "The HC Operating Company OPERA",
"trans": "Checks Paid"
}
},
{
"map": "Trans Type",
"mapped": {
"sign": "-1",
"ledger": "Manual",
"reason": "Payroll Checks",
"trantype": "Disbursement"
},
"source": "PNCC",
"ret_val": {
"ini": "CASHED CHECK",
"acctn": "The HC Operating Company PAYR",
"trans": "Checks Paid"
}
},
{
"map": "Trans Type",
"mapped": {
"sign": "-1",
"ledger": "Manual",
"reason": "Freight Checks",
"trantype": "Disbursement"
},
"source": "PNCC",
"ret_val": {
"ini": "CHECK",
"acctn": "The HC Operating Company FREIG",
"trans": "Checks Paid"
}
},
{
"map": "Trans Type",
"mapped": {
"sign": "-1",
"ledger": "AP - Check Run",
"trantype": "Disbursement"
},
"source": "PNCC",
"ret_val": {
"ini": "CHECK",
"acctn": "The HC Operating Company OPERA",
"trans": "Checks Paid"
}
},
{
"map": "Trans Type",
"mapped": {
"sign": "-1",
"ledger": "Manual",
"reason": "Payroll Checks",
"trantype": "Disbursement"
},
"source": "PNCC",
"ret_val": {
"ini": "CHECK",
"acctn": "The HC Operating Company PAYR",
"trans": "Checks Paid"
}
},
{
"map": "Trans Type",
"mapped": {
"sign": "-1",
"ledger": "Manual",
"reason": "Freight Checks",
"trantype": "Disbursement"
},
"source": "PNCC",
"ret_val": {
"ini": "SUBSTITUTE CHK",
"acctn": "The HC Operating Company FREIG",
"trans": "Checks Paid"
}
},
{
"map": "Trans Type",
"mapped": {
"sign": "-1",
"ledger": "AP - Check Run",
"trantype": "Disbursement"
},
"source": "PNCC",
"ret_val": {
"ini": "SUBSTITUTE CHK",
"acctn": "The HC Operating Company OPERA",
"trans": "Checks Paid"
}
},
{
"map": "Trans Type",
"mapped": {
"sign": "-1",
"ledger": "Manual",
"reason": "Returned Deposit NSF",
"trantype": "Collections"
},
"source": "PNCC",
"ret_val": {
"ini": "RET DEP ITEM NSF UN",
"acctn": "The HC Operating Company FBO P",
"trans": "Deposited Items Returned"
}
},
{
"map": "Currency",
"mapped": {
"party": "The HC Canada Operating Company, Ltd.",
"ledger": "Manual",
"reason": "IC - US to CAN Settlement",
"trantype": "Interco Funding"
},
"source": "PNCC",
"ret_val": {
"ini": "WITHDRAWAL:",
"curr1": [
"DEBIT",
"USD"
],
"curr2": [
"CREDIT",
"CAD"
]
}
},
{
"map": "Currency",
"mapped": {
"party": "The HC Canada Operating Company, Ltd.",
"ledger": "Manual",
"reason": "IC - round-trip settlement return",
"trantype": "Interco Collection"
},
"source": "PNCC",
"ret_val": {
"ini": "DEPOSIT:",
"curr1": [
"CREDIT",
"USD"
],
"curr2": [
"DEBIT",
"USD"
]
}
},
{
"map": "Currency",
"mapped": {
"party": "The HC Canada Operating Company, Ltd.",
"ledger": "Manual",
"reason": "IC - Can to US Settlement",
"trantype": "Interco Collection"
},
"source": "PNCC",
"ret_val": {
"ini": "DEPOSIT:",
"curr1": [
"DEBIT",
"USD"
],
"curr2": [
"CREDIT",
"CAD"
]
}
},
{
"map": "Trans Type",
"mapped": {
"sign": "1",
"ledger": "Manual",
"trantype": "Collections"
},
"source": "PNCC",
"ret_val": {
"ini": "DEPOSIT:",
"acctn": "The HC Operating Company OPERA",
"trans": "Miscellaneous Credits"
}
},
{
"map": "Trans Type",
"mapped": {
"sign": "-1",
"ledger": "Manual",
"reason": "Payroll Checks",
"trantype": "Disbursement"
},
"source": "PNCC",
"ret_val": {
"ini": "SUBSTITUTE CHK",
"acctn": "The HC Operating Company PAYR",
"trans": "Checks Paid"
}
},
{
"map": "Trans Type",
"mapped": {
"sign": "-1",
"ledger": "manual",
"trantype": "Disbursement"
},
"source": "PNCC",
"ret_val": {
"ini": "ACH DEBIT RETURN",
"acctn": "The HC Operating Company OPERA",
"trans": "ACH Debits"
}
},
{
"map": "Trans Type",
"mapped": {
"sign": "1",
"ledger": "manual",
"trantype": "Disbursement"
},
"source": "PNCC",
"ret_val": {
"ini": "ACH CREDIT SETTLEMENT",
"acctn": "The HC Operating Company OPERA",
"trans": "ACH Credits"
}
},
{
"map": "Trans Type",
"mapped": {
"sign": "-1",
"ledger": "Manual",
"trantype": "Disbursement"
},
"source": "PNCC",
"ret_val": {
"ini": "CHECK",
"acctn": "The HC Operating Company FBO P",
"trans": "Checks Paid"
}
},
{
"map": "Trans Type",
"mapped": {
"sign": "1",
"ledger": "AR - Collections",
"trantype": "Collections"
},
"source": "PNCC",
"ret_val": {
"ini": "POSTING CORRECTION RETURN CK",
"acctn": "The HC Operating Company FBO P",
"trans": "Detail Credit Adjustments"
}
}
]

View File

@ -0,0 +1,30 @@
--transactions with date in download format for constraint
--transactions with date in download format for constraint
COPY
(
SELECT
r."Schedule#"
,to_char(r."PostDate",'mm/dd/yyyy') "PostDate"
,r."Assn#"
,r."Coll#"
,COALESCE(r."AdvanceRate",0) "AdvanceRate"
,COALESCE(r."Sales",0) "Sales"
,COALESCE(r."Credits & Adjustments",0) "Credits & Adjustments"
,COALESCE(r."Gross Collections",0) "Gross Collections"
,COALESCE(r."CollateralBalance",0) "CollateralBalance"
,COALESCE(r."MaxEligible",0) "MaxEligible"
,COALESCE(r."Ineligible Amount",0) "Ineligible Amount"
,COALESCE(r."Reserve Amount",0) "Reserve Amount"
FROM
tps.trans
JOIN LATERAL jsonb_populate_record(NULL::tps.pncl, rec) r ON TRUE
WHERE
srce = 'PNCL'
--and case when rec->>'Credits & Adjustments' is null then 'null' else '' end <> 'null'
)
TO 'C:\users\ptrowbridge\downloads\pncl.csv' WITH (format csv, header TRUE)
--source
SELECT DEFN FROM TPS.SRCE WHERE SRCE = 'PNCL'

View File

@ -0,0 +1,2 @@
curl -H "Content-Type: application/json" -X POST -d@./srce.json http://localhost:81/srce_set
curl -v -F upload=@//mnt/c/Users/ptrowbridge/Downloads/pncl.csv http://localhost:81/import?srce=PNCL

View File

@ -0,0 +1,73 @@
{
"constraint": [
"{PostDate}",
"{Schedule#}"
],
"source": "client_file",
"loading_function": "csv",
"name": "PNCL",
"schemas": {
"default": [
{
"path": "{Schedule#}",
"type": "text",
"column_name": "Schedule#"
},
{
"type": "date",
"column_name": "PostDate",
"path": "{PostDate}"
},
{
"type": "text",
"column_name": "Assn#",
"path": "{Assn#}"
},
{
"type": "text",
"column_name": "Coll#",
"path": "{Coll#}"
},
{
"type": "numeric",
"column_name": "AdvanceRate",
"path": "{AdvanceRate}"
},
{
"type": "numeric",
"column_name": "Sales",
"path": "{Sales}"
},
{
"type": "numeric",
"column_name": "Credits & Adjustments",
"path": "{Credits & Adjustments}"
},
{
"type": "numeric",
"column_name": "Gross Collections",
"path": "{Gross Collections}"
},
{
"type": "numeric",
"column_name": "CollateralBalance",
"path": "{CollateralBalance}"
},
{
"type": "numeric",
"column_name": "MaxEligible",
"path": "{MaxEligible}"
},
{
"type": "numeric",
"column_name": "Ineligible Amount",
"path": "{Ineligible Amount}"
},
{
"type": "numeric",
"column_name": "Reserve Amount",
"path": "{Reserve Amount}"
}
]
}
}

View File

@ -0,0 +1,32 @@
--transactions with date in download format for constraint
--transactions with date in download format for constraint
COPY
(
SELECT
r."Loan#"
,to_char(r."Post Date",'mm/dd/yyyy') "Post Date"
,to_char(r."Effective Date",'mm/dd/yyyy') "Effective Date"
,r."Reference #"
,r."Description"
,r."Advances"
,r."Adjustments"
,r."Payments"
,r."Loan Balance"
FROM
tps.trans
JOIN LATERAL jsonb_populate_record(NULL::tps.pnco, rec) r ON TRUE
WHERE
srce = 'PNCO'
)
TO 'C:\users\ptrowbridge\downloads\pnco.csv' WITH (format csv, header TRUE)
--source
SELECT DEFN FROM TPS.SRCE WHERE SRCE = 'PNCO'
--mapdef
SELECT jsonb_agg(row_to_json(x)::jsonb) FROM (SELECT srce, target "name", regex, seq "sequence" FROM tps.map_rm WHERE srce = 'PNCO') x
--map values
SELECT jsonb_agg(row_to_JSON(x)::jsonb) FROM (SELECT srce "source", target "map", retval ret_val, "map" mapped FROM tps.map_rv WHERE srce = 'PNCO') X

View File

@ -0,0 +1,2 @@
curl -H "Content-Type: application/json" -X POST -d@./srce.json http://localhost:81/srce_set
curl -v -F upload=@//mnt/c/Users/ptrowbridge/Downloads/pnco.csv http://localhost:81/import?srce=PNCO

View File

@ -0,0 +1,60 @@
{
"name": "PNCO",
"source": "client_file",
"loading_function": "csv",
"constraint": [
"{Post Date}",
"{Effective Date}",
"{Loan#}",
"{Reference #}"
],
"schemas": {
"default": [
{
"path": "{Loan#}",
"type": "text",
"column_name":"Loan#"
},
{
"path": "{Post Date}",
"type": "date",
"column_name":"Post Date"
},
{
"path": "{Effective Date}",
"type": "date",
"column_name":"Effective Date"
},
{
"path": "{Reference #}",
"type": "text",
"column_name":"Reference #"
},
{
"path": "{Description}",
"type": "text",
"column_name":"Description"
},
{
"path": "{Advances}",
"type": "numeric",
"column_name":"Advances"
},
{
"path": "{Adjustments}",
"type": "numeric",
"column_name":"Adjustments"
},
{
"path": "{Payments}",
"type": "numeric",
"column_name":"Payments"
},
{
"path": "{Loan Balance}",
"type": "numeric",
"column_name":"Loan Balance"
}
]
}
}

View File

@ -0,0 +1,3 @@
curl -H "Content-Type: application/json" -X POST -d@./srce.json http://localhost:81/srce_set
curl -H "Content-Type: application/json" -X POST -d@./mapdef.json http://localhost:81/mapdef_set
curl -v -F upload=@//mnt/c/Users/ptrowbridge/Downloads/WMPD.csv http://localhost:81/import?srce=WMPD

View File

@ -0,0 +1,63 @@
--source
COPY (SELECT DEFN FROM TPS.SRCE WHERE SRCE = 'WMPD') TO 'C:\users\ptrowbridge\documents\tps_etl\deploy\reload\wmpd\srce.json' WITH (FORMAT TEXT, HEADER FALSE)
--mapdef
COPY (SELECT jsonb_agg(row_to_json(x)::jsonb) FROM (SELECT srce, target "name", regex, seq "sequence" FROM tps.map_rm WHERE srce = 'WMPD') x) TO 'C:\users\ptrowbridge\documents\tps_etl\deploy\reload\wmpd\map.json' WITH (FORMAT TEXT, HEADER FALSE)
--map values
SELECT jsonb_agg(row_to_JSON(x)::jsonb) FROM (SELECT srce "source", target "map", retval ret_val, "map" mapped FROM tps.map_rv WHERE srce = 'WMPD') X
--records
copy (
select
r."Carrier",
r."SCAC",
r."Mode",
r."Pro #",
r."B/L",
r."Pd Amt",
r."Loc#",
r."Pcs",
r."Wgt",
r."Chk#",
r."Pay Dt",
r."Acct #",
r."I/O",
r."Sh Nm",
r."Sh City",
r."Sh St",
r."Sh Zip",
r."Cons Nm",
r."D City ",
r."D St",
r."D Zip",
r."Sh Dt",
r."Inv Dt",
r."Customs Entry#",
r."Miles",
r."Frt Class",
r."Master B/L"
from
tps.trans
join lateral jsonb_populate_record(null::tps.WMPD, rec) r on true
where
srce = 'WMPD'
order by
r."Pay Dt" asc
) to
'C:\users\ptrowbridge\downloads\WMPD.csv' with (format csv, header true);
--rebuild source def to include PATH
SELECT
ae.r
||jsonb_build_object(
'path',
(
'{'||(ae.r->>'column_name')||'}'
)
)
FROM
tps.srce
JOIN LATERAL jsonb_array_elements(defn->'schemas'->'default') ae(r) ON TRUE
WHERE
srce = 'WMPD'

View File

@ -0,0 +1,148 @@
{
"name": "WMPD",
"source": "client_file",
"loading_function": "csv",
"constraint": [
"{Pay Dt}",
"{Carrier}"
],
"schemas": {
"default": [
{
"path": "{Carrier}",
"type": "text",
"column_name": "Carrier"
},
{
"path": "{SCAC}",
"type": "text",
"column_name": "SCAC"
},
{
"path": "{Mode}",
"type": "text",
"column_name": "Mode"
},
{
"path": "{Pro #}",
"type": "text",
"column_name": "Pro #"
},
{
"path": "{B/L}",
"type": "text",
"column_name": "B/L"
},
{
"path": "{Pd Amt}",
"type": "numeric",
"column_name": "Pd Amt"
},
{
"path": "{Loc#}",
"type": "text",
"column_name": "Loc#"
},
{
"path": "{Pcs}",
"type": "numeric",
"column_name": "Pcs"
},
{
"path": "{Wgt}",
"type": "numeric",
"column_name": "Wgt"
},
{
"path": "{Chk#}",
"type": "numeric",
"column_name": "Chk#"
},
{
"path": "{Pay Dt}",
"type": "date",
"column_name": "Pay Dt"
},
{
"path": "{Acct #}",
"type": "text",
"column_name": "Acct #"
},
{
"path": "{I/O}",
"type": "text",
"column_name": "I/O"
},
{
"path": "{Sh Nm}",
"type": "text",
"column_name": "Sh Nm"
},
{
"path": "{Sh City}",
"type": "text",
"column_name": "Sh City"
},
{
"path": "{Sh St}",
"type": "text",
"column_name": "Sh St"
},
{
"path": "{Sh Zip}",
"type": "text",
"column_name": "Sh Zip"
},
{
"path": "{Cons Nm}",
"type": "text",
"column_name": "Cons Nm"
},
{
"path": "{D City }",
"type": "text",
"column_name": "D City "
},
{
"path": "{D St}",
"type": "text",
"column_name": "D St"
},
{
"path": "{D Zip}",
"type": "text",
"column_name": "D Zip"
},
{
"path": "{Sh Dt}",
"type": "date",
"column_name": "Sh Dt"
},
{
"path": "{Inv Dt}",
"type": "date",
"column_name": "Inv Dt"
},
{
"path": "{Customs Entry#}",
"type": "text",
"column_name": "Customs Entry#"
},
{
"path": "{Miles}",
"type": "numeric",
"column_name": "Miles"
},
{
"path": "{Frt Class}",
"type": "text",
"column_name": "Frt Class"
},
{
"path": "{Master B/L}",
"type": "text",
"column_name": "Master B/L"
}
]
}
}

View File

@ -311,25 +311,26 @@ AS
$f$ $f$
DECLARE DECLARE
--_schema text; --_schema text;
_path text[];
--_srce text; --_srce text;
_sql text; _sql text;
BEGIN BEGIN
--_schema:= 'default'; --_schema:= 'default';
_path:= ARRAY['schemas',_schema]::text[];
--_srce:= 'dcard'; --_srce:= 'dcard';
SELECT SELECT
'DROP VIEW IF EXISTS tpsv.'||_srce||'_'||_path[2]||'; CREATE VIEW tpsv.'||_srce||'_'||_path[2]||' AS SELECT id, logid, '||string_agg('(allj#>>'''||r.PATH::text||''')::'||r.type||' AS "'||r.column_name||'"',', ')||' FROM tps.trans WHERE srce = '''||_srce||''';' 'DROP VIEW IF EXISTS tpsv.'||s.srce||'_'||(list.e->>'name')||'; CREATE VIEW tpsv.'||s.srce||'_'||(list.e->>'name')||' AS SELECT id, logid, allj, '||string_agg('(allj#>>'''||rec.PATH::text||''')::'||rec.type||' AS "'||rec.column_name||'"',', ')||' FROM tps.trans WHERE srce = '''||s.srce||''';'
INTO INTO
_sql _sql
FROM FROM
tps.srce tps.srce s
JOIN LATERAL jsonb_array_elements(defn#>_path) ae(v) ON TRUE JOIN LATERAL jsonb_array_elements(s.defn->'schemas') list (e) ON TRUE
JOIN LATERAL jsonb_to_record (ae.v) AS r(PATH text[], "type" text, column_name text) ON TRUE JOIN LATERAL jsonb_array_elements(list.e->'columns') as cols(e) ON TRUE
JOIN LATERAL jsonb_to_record (cols.e) AS rec( PATH text[], "type" text, column_name text) ON TRUE
WHERE WHERE
srce = _srce srce = _srce
AND list.e->>'name' = _schema
GROUP BY GROUP BY
srce.srce; s.srce
,list.e;
RETURN _sql; RETURN _sql;
RAISE NOTICE '%',_sql; RAISE NOTICE '%',_sql;
@ -1983,4 +1984,167 @@ ORDER BY
,l.target ,l.target
,l."count" desc; ,l."count" desc;
END; END;
$f$ $f$;
--setup function to delete a single source
DROP FUNCTION IF EXISTS tps.srce_delete(jsonb);
CREATE FUNCTION tps.srce_delete(_defn jsonb) RETURNS jsonb
AS
$f$
DECLARE
_message jsonb;
_MESSAGE_TEXT text;
_PG_EXCEPTION_DETAIL text;
_PG_EXCEPTION_HINT text;
_rebuild BOOLEAN;
BEGIN
-------------------------------do delete---------------------------------
DELETE FROM tps.srce WHERE srce = _defn->>'name';
--could move this record to a "recycle bin" table for a certain period of time
--need to handle cascading record deletes
---------------------------set message-----------------------------------
_message:=
(
$$
{
"status":"complete",
"message":"source was permanently deleted"
}
$$::jsonb
);
RETURN _message;
EXCEPTION WHEN OTHERS THEN
GET STACKED DIAGNOSTICS
_MESSAGE_TEXT = MESSAGE_TEXT,
_PG_EXCEPTION_DETAIL = PG_EXCEPTION_DETAIL,
_PG_EXCEPTION_HINT = PG_EXCEPTION_HINT;
_message:=
($$
{
"status":"fail",
"message":"error dropping the source"
}
$$::jsonb)
||jsonb_build_object('message_text',_MESSAGE_TEXT)
||jsonb_build_object('pg_exception_detail',_PG_EXCEPTION_DETAIL);
RETURN _message;
END;
$f$
LANGUAGE plpgsql;
/*
This function takes and array of definition object where "name" object is the primary key
It will force the entire body of sources to match what is received
*/
DROP FUNCTION IF EXISTS tps.srce_overwrite_all(jsonb);
CREATE FUNCTION tps.srce_overwrite_all(_defn jsonb) RETURNS jsonb
AS
$f$
DECLARE
_message jsonb;
_MESSAGE_TEXT text;
_PG_EXCEPTION_DETAIL text;
_PG_EXCEPTION_HINT text;
_rebuild BOOLEAN;
_list text;
BEGIN
WITH
--retain the results of the update by srce
_set AS (
SELECT
j.rn rn
,j.e->>'name' srce
,j.e defn
FROM
jsonb_array_elements(_defn) WITH ORDINALITY j(e, rn)
)
--full join
,_full AS (
SELECT
COALESCE(_srce.srce,_set.srce) srce
,CASE COALESCE(_set.srce,'DELETE') WHEN 'DELETE' THEN 'DELETE' ELSE 'SET' END actn
,COALESCE(_set.defn,_srce.defn) defn
FROM
tps.srce _srce
FULL OUTER JOIN _set ON
_set.srce = _srce.srce
)
--call functions from list
,_do_set AS (
SELECT
f.srce
,f.actn
,setd.message
FROM
_full f
JOIN LATERAL tps.srce_set(defn) setd(message) ON f.actn = 'SET'
--dual left joins for functions that touch the same table causes the first left join actions to be undone
--LEFT JOIN LATERAL tps.srce_delete(defn) deld(message) ON f.actn = 'DELETE'
)
,_do_del AS (
SELECT
f.srce
,f.actn
,deld.message
FROM
_full f
JOIN LATERAL tps.srce_delete(defn) deld(message) ON f.actn = 'DELETE'
)
--aggregate all the messages into one message
----
---- should look at rolling back the whole thing if one of the function returns a fail. stored proc could do this.
----
SELECT
jsonb_agg(m)
INTO
_message
FROM
(
SELECT
jsonb_build_object('source',srce,'status',message->>'status','message',message->>'message') m
FROM
_do_set
UNION ALL
SELECT
jsonb_build_object('source',srce,'status',message->>'status','message',message->>'message') m
FROM
_do_del
) x;
SELECT string_agg(srce,',') INTO _list FROM tps.srce;
RAISE NOTICE 'multi source list: %', _list;
RETURN _message;
SELECT string_agg(srce,',') INTO _list FROM tps.srce;
RAISE NOTICE 'after return: %', _list;
EXCEPTION WHEN OTHERS THEN
GET STACKED DIAGNOSTICS
_MESSAGE_TEXT = MESSAGE_TEXT,
_PG_EXCEPTION_DETAIL = PG_EXCEPTION_DETAIL,
_PG_EXCEPTION_HINT = PG_EXCEPTION_HINT;
_message:=
($$
{
"status":"fail",
"message":"error updating sources"
}
$$::jsonb)
||jsonb_build_object('message_text',_MESSAGE_TEXT)
||jsonb_build_object('pg_exception_detail',_PG_EXCEPTION_DETAIL);
RETURN _message;
END;
$f$
LANGUAGE plpgsql

View File

@ -0,0 +1,257 @@
CREATE OR REPLACE FUNCTION tps.trans_insert_map() RETURNS TRIGGER
AS
$f$
DECLARE
_cnt INTEGER;
BEGIN
IF (TG_OP = 'INSERT') THEN
WITH
--------------------apply regex operations to transactions-----------------------------------------------------------------------------------
rx AS (
SELECT
t.srce,
t.id,
t.rec,
m.target,
m.seq,
regex->'regex'->>'function' regex_function,
e.v ->> 'field' result_key_name,
e.v ->> 'key' target_json_path,
e.v ->> 'flag' regex_options_flag,
e.v->>'map' map_intention,
e.v->>'retain' retain_result,
e.v->>'regex' regex_expression,
e.rn target_item_number,
COALESCE(mt.rn,rp.rn,1) result_number,
mt.mt rx_match,
rp.rp rx_replace,
CASE e.v->>'map'
WHEN 'y' THEN
e.v->>'field'
ELSE
null
END map_key,
CASE e.v->>'map'
WHEN 'y' THEN
CASE regex->'regex'->>'function'
WHEN 'extract' THEN
CASE WHEN array_upper(mt.mt,1)=1
THEN to_json(mt.mt[1])
ELSE array_to_json(mt.mt)
END::jsonb
WHEN 'replace' THEN
to_jsonb(rp.rp)
ELSE
'{}'::jsonb
END
ELSE
NULL
END map_val,
CASE e.v->>'retain'
WHEN 'y' THEN
e.v->>'field'
ELSE
NULL
END retain_key,
CASE e.v->>'retain'
WHEN 'y' THEN
CASE regex->'regex'->>'function'
WHEN 'extract' THEN
CASE WHEN array_upper(mt.mt,1)=1
THEN to_json(trim(mt.mt[1]))
ELSE array_to_json(mt.mt)
END::jsonb
WHEN 'replace' THEN
to_jsonb(rtrim(rp.rp))
ELSE
'{}'::jsonb
END
ELSE
NULL
END retain_val
FROM
--------------------------start with all regex maps------------------------------------------------------------------------------------
tps.map_rm m
--------------------------isolate matching basis to limit map to only look at certain json---------------------------------------------
LEFT JOIN LATERAL jsonb_array_elements(m.regex->'regex'->'where') w(v) ON TRUE
--------------------------join to main transaction table but only certain key/values are included--------------------------------------
INNER JOIN new_table t ON
t.srce = m.srce AND
t.rec @> w.v
--------------------------break out array of regluar expressions in the map------------------------------------------------------------
LEFT JOIN LATERAL jsonb_array_elements(m.regex->'regex'->'defn') WITH ORDINALITY e(v, rn) ON true
--------------------------each regex references a path to the target value, extract the target from the reference and do regex---------
LEFT JOIN LATERAL regexp_matches(t.rec #>> ((e.v ->> 'key')::text[]), e.v ->> 'regex'::text,COALESCE(e.v ->> 'flag','')) WITH ORDINALITY mt(mt, rn) ON
m.regex->'regex'->>'function' = 'extract'
--------------------------same as above but for a replacement type function------------------------------------------------------------
LEFT JOIN LATERAL regexp_replace(t.rec #>> ((e.v ->> 'key')::text[]), e.v ->> 'regex'::text, e.v ->> 'replace'::text,e.v ->> 'flag') WITH ORDINALITY rp(rp, rn) ON
m.regex->'regex'->>'function' = 'replace'
ORDER BY
t.id DESC,
m.target,
e.rn,
COALESCE(mt.rn,rp.rn,1)
)
--SELECT count(*) FROM rx LIMIT 100
, agg_to_target_items AS (
SELECT
srce
,id
,target
,seq
,map_intention
,regex_function
,target_item_number
,result_key_name
,target_json_path
,CASE WHEN map_key IS NULL
THEN
NULL
ELSE
jsonb_build_object(
map_key,
CASE WHEN max(result_number) = 1
THEN
jsonb_agg(map_val ORDER BY result_number) -> 0
ELSE
jsonb_agg(map_val ORDER BY result_number)
END
)
END map_val
,CASE WHEN retain_key IS NULL
THEN
NULL
ELSE
jsonb_build_object(
retain_key,
CASE WHEN max(result_number) = 1
THEN
jsonb_agg(retain_val ORDER BY result_number) -> 0
ELSE
jsonb_agg(retain_val ORDER BY result_number)
END
)
END retain_val
FROM
rx
GROUP BY
srce
,id
,target
,seq
,map_intention
,regex_function
,target_item_number
,result_key_name
,target_json_path
,map_key
,retain_key
)
--SELECT * FROM agg_to_target_items LIMIT 100
, agg_to_target AS (
SELECT
srce
,id
,target
,seq
,map_intention
,tps.jsonb_concat_obj(COALESCE(map_val,'{}'::JSONB)) map_val
,jsonb_strip_nulls(tps.jsonb_concat_obj(COALESCE(retain_val,'{}'::JSONB))) retain_val
FROM
agg_to_target_items
GROUP BY
srce
,id
,target
,seq
,map_intention
ORDER BY
id
)
--SELECT * FROM agg_to_target
, link_map AS (
SELECT
a.srce
,a.id
,a.target
,a.seq
,a.map_intention
,a.map_val
,a.retain_val retain_value
,v.map
FROM
agg_to_target a
LEFT OUTER JOIN tps.map_rv v ON
v.srce = a.srce AND
v.target = a.target AND
v.retval = a.map_val
)
--SELECT * FROM link_map
, agg_to_id AS (
SELECT
srce
,id
,tps.jsonb_concat_obj(COALESCE(retain_value,'{}'::jsonb) ORDER BY seq DESC) retain_val
,tps.jsonb_concat_obj(COALESCE(map,'{}'::jsonb)) map
FROM
link_map
GROUP BY
srce
,id
)
--SELECT agg_to_id.srce, agg_to_id.id, jsonb_pretty(agg_to_id.retain_val) , jsonb_pretty(agg_to_id.map) FROM agg_to_id ORDER BY id desc LIMIT 100
--create a complete list of all new inserts assuming some do not have maps (left join)
,join_all AS (
SELECT
n.srce
,n.id
,n.rec
,a.retain_val parse
,a.map
,n.rec||COALESCE(a.map||a.retain_val,'{}'::jsonb) allj
FROM
new_table n
LEFT OUTER JOIN agg_to_id a ON
a.id = n.id
)
--update trans with join_all recs
UPDATE
tps.trans t
SET
parse = a.parse
,map = a.map
,allj = a.allj
FROM
join_all a
WHERE
t.id = a.id;
END IF;
RETURN NULL;
END;
$f$ LANGUAGE plpgsql;
CREATE TRIGGER trans_insert
AFTER INSERT ON tps.trans
REFERENCING NEW TABLE AS new_table
FOR EACH STATEMENT EXECUTE PROCEDURE tps.trans_insert_map();

File diff suppressed because one or more lines are too long

View File

@ -0,0 +1,184 @@
DO
$f$
DECLARE
_t text;
_c text;
_log_info jsonb;
_log_id text;
_cnt numeric;
_message jsonb;
_recs jsonb;
_srce text;
_defn jsonb;
_MESSAGE_TEXT text;
_PG_EXCEPTION_DETAIL text;
_PG_EXCEPTION_HINT text;
BEGIN
_srce := 'DMAPI';
_recs:= $${"id":1,"doc":{"rows":[{"elements":[{"status":"OK","distance":{"text":"225 mi","value":361940},"duration":{"text":"3 hours 50 mins","value":13812}}]}],"status":"OK","origin_addresses":["Washington, DC, USA"],"destination_addresses":["New York, NY, USA"]}}$$::jsonb;
----------------------------------------------------test if source exists----------------------------------------------------------------------------------
SELECT
defn
INTO
_defn
FROM
tps.srce
WHERE
srce = _srce;
IF _defn IS NULL THEN
_message:=
format(
$$
{
"status":"fail",
"message":"source %L does not exists"
}
$$,
_srce
)::jsonb;
RAISE NOTICE '%s', _message;
END IF;
-------------unwrap the json record and apply the path(s) of the constraint to build a constraint key per record-----------------------------------------------------------------------------------
WITH
pending_list AS (
SELECT
_srce srce
,j.rec
,j.id
--aggregate back to the record since multiple paths may be listed in the constraint
--it is unclear why the "->>0" is required to correctly extract the text array from the jsonb
,tps.jsonb_concat_obj(
jsonb_build_object(
--the new json key is the path itself
cons.path->>0
,j.rec#>((cons.path->>0)::text[])
)
) json_key
FROM
jsonb_array_elements(_recs) WITH ORDINALITY j(rec,id)
JOIN LATERAL jsonb_array_elements(_defn->'constraint') WITH ORDINALITY cons(path, seq) ON TRUE
GROUP BY
j.rec
,j.id
)
-----------create a unique list of keys from staged rows------------------------------------------------------------------------------------------
, pending_keys AS (
SELECT DISTINCT
json_key
FROM
pending_list
)
-----------list of keys already loaded to tps-----------------------------------------------------------------------------------------------------
, matched_keys AS (
SELECT DISTINCT
k.json_key
FROM
pending_keys k
INNER JOIN tps.trans t ON
t.ic = k.json_key
)
-----------return unique keys that are not already in tps.trans-----------------------------------------------------------------------------------
, unmatched_keys AS (
SELECT
json_key
FROM
pending_keys
EXCEPT
SELECT
json_key
FROM
matched_keys
)
--------build log record-------------------+------------------------------------------------------------------------------------------------
, logged AS (
INSERT INTO
tps.trans_log (info)
SELECT
JSONB_BUILD_OBJECT('time_stamp',CURRENT_TIMESTAMP)
||JSONB_BUILD_OBJECT('srce',_srce)
--||JSONB_BUILD_OBJECT('path',_path)
||JSONB_BUILD_OBJECT('not_inserted',
(
SELECT
jsonb_agg(json_key)
FROM
matched_keys
)
)
||JSONB_BUILD_OBJECT('inserted',
(
SELECT
jsonb_agg(json_key)
FROM
unmatched_keys
)
)
RETURNING *
)
-----------insert pending rows that have key with no trans match-----------------------------------------------------------------------------------
--need to look into mapping the transactions prior to loading
, inserted AS (
INSERT INTO
tps.trans (srce, rec, ic, logid)
SELECT
pl.srce
,pl.rec
,pl.json_key
,logged.id
FROM
pending_list pl
INNER JOIN unmatched_keys u ON
u.json_key = pl.json_key
CROSS JOIN logged
ORDER BY
pl.id ASC
----this conflict is only if an exact duplicate rec json happens, which will be rejected
----therefore, records may not be inserted due to ay matches with certain json fields, or if the entire json is a duplicate, reason is not specified
RETURNING *
)
SELECT
id
,info
INTO
_log_id
,_log_info
FROM
logged;
--RAISE NOTICE 'import logged under id# %, info: %', _log_id, _log_info;
_message:=
(
$$
{
"status":"complete"
}
$$::jsonb
)||jsonb_build_object('details',_log_info);
RAISE NOTICE '%s', _message;
END;
$f$
LANGUAGE plpgsql

View File

@ -0,0 +1,110 @@
CREATE OR REPLACE FUNCTION tps.srce_map_def_set(_defn jsonb) RETURNS jsonb
AS
$f$
DECLARE
_message jsonb;
_MESSAGE_TEXT text;
_PG_EXCEPTION_DETAIL text;
_PG_EXCEPTION_HINT text;
BEGIN
WITH
------------------------------------------stage rows to insert-----------------------------------------------------
stg AS (
SELECT
--data source
ae.r->>'srce' srce
--map name
,ae.r->>'name' target
--map definition
,ae.r regex
--map aggregation sequence
,(ae.r->>'sequence')::INTEGER seq
--history definition
,jsonb_build_object(
'hist_defn',ae.r
,'effective',jsonb_build_array(CURRENT_TIMESTAMP,null::timestamptz)
) || '[]'::jsonb hist
--determine if the rows are new or match
,(m.regex->>'regex' = ae.r->>'regex')::BOOLEAN rebuild
FROM
jsonb_array_elements(_defn) ae(r)
LEFT OUTER JOIN tps.map_rm m ON
m.srce = ae.r->>'srce'
AND m.target = ae.t->>'name'
)
---------------------------------------do the upsert-------------------------------------------------------------------
,ins AS (
INSERT INTO
tps.map_rm (srce, target, regex, seq, hist)
SELECT
srce
,target
,regex
,seq
,hist
FROM
stg
ON CONFLICT ON CONSTRAINT map_rm_pk DO UPDATE SET
srce = excluded.srce
,target = excluded.target
,regex = excluded.regex
,seq = excluded.seq
,hist =
--the new definition going to position -0-
jsonb_build_object(
'hist_defn',excluded.regex
,'effective',jsonb_build_array(CURRENT_TIMESTAMP,null::timestamptz)
)
--the previous definition, set upper bound of effective range which was previously null
|| jsonb_set(
map_rm.hist
,'{0,effective,1}'::text[]
,to_jsonb(CURRENT_TIMESTAMP)
)
)
---------------------------get list of sources that had maps change--------------------------------------------------------
, to_update AS (
SELECT DISTINCT
srce
FROM
ins
WHERE
rebuild = TRUE
)
--------------------------call the map overwrite for each source and return all the messages into message----------------
/*the whole source must be overwritten because if an element is no longer returned it shoudl be wiped from the data*/
SELECT
jsonb_agg(x.message)
INTO
_message
FROM
to_update
JOIN LATERAL tps.srce_map_overwrite(to_update.srce) AS x(message) ON TRUE;
_message:= jsonb_build_object('status','complete','message','definition has been set');
return _message;
GET STACKED DIAGNOSTICS
_MESSAGE_TEXT = MESSAGE_TEXT,
_PG_EXCEPTION_DETAIL = PG_EXCEPTION_DETAIL,
_PG_EXCEPTION_HINT = PG_EXCEPTION_HINT;
_message:=
($$
{
"status":"fail",
"message":"error setting definition"
}
$$::jsonb)
||jsonb_build_object('message_text',_MESSAGE_TEXT)
||jsonb_build_object('pg_exception_detail',_PG_EXCEPTION_DETAIL);
return _message;
END;
$f$
language plpgsql

View File

@ -0,0 +1,98 @@
CREATE OR REPLACE FUNCTION tps.srce_map_def_set_single(_defn jsonb, _rebuild BOOLEAN) RETURNS jsonb
AS
$f$
DECLARE
_message jsonb;
_MESSAGE_TEXT text;
_PG_EXCEPTION_DETAIL text;
_PG_EXCEPTION_HINT text;
BEGIN
---------test if anythign is changing--------------------------------------------------------------------------------------------
IF _defn->'regex' = (SELECT regex->'regex' FROM tps.map_rm WHERE srce = _defn->>'srce' and target = _defn->>'name') THEN
_message:=
(
$$
{
"status":"complete",
"message":"map was not different no action taken"
}
$$::jsonb
);
RETURN _message;
END IF;
---------do the rebuild-----------------------------------------------------------------------------------------------------------
INSERT INTO
tps.map_rm (srce, target, regex, seq, hist)
SELECT
--data source
_defn->>'srce'
--map name
,_defn->>'name'
--map definition
,_defn
--map aggregation sequence
,(_defn->>'sequence')::INTEGER
--history definition
,jsonb_build_object(
'hist_defn',_defn
,'effective',jsonb_build_array(CURRENT_TIMESTAMP,null::timestamptz)
) || '[]'::jsonb
ON CONFLICT ON CONSTRAINT map_rm_pk DO UPDATE SET
srce = excluded.srce
,target = excluded.target
,regex = excluded.regex
,seq = excluded.seq
,hist =
--the new definition going to position -0-
jsonb_build_object(
'hist_defn',excluded.regex
,'effective',jsonb_build_array(CURRENT_TIMESTAMP,null::timestamptz)
)
--the previous definition, set upper bound of effective range which was previously null
|| jsonb_set(
map_rm.hist
,'{0,effective,1}'::text[]
,to_jsonb(CURRENT_TIMESTAMP)
);
--------------if rebuild was flag call the rebuild--------------------------------------------------------------------------------
IF _rebuild THEN
SELECT
x.message||'{"step":"overwrite maps in tps.trans"}'::jsonb
INTO
_message
FROM
tps.srce_map_overwrite(_defn->>'srce') as X(message);
END IF;
return _message;
EXCEPTION WHEN OTHERS THEN
GET STACKED DIAGNOSTICS
_MESSAGE_TEXT = MESSAGE_TEXT,
_PG_EXCEPTION_DETAIL = PG_EXCEPTION_DETAIL,
_PG_EXCEPTION_HINT = PG_EXCEPTION_HINT;
_message:=
($$
{
"status":"fail",
"message":"error setting definition"
}
$$::jsonb)
||jsonb_build_object('message_text',_MESSAGE_TEXT)
||jsonb_build_object('pg_exception_detail',_PG_EXCEPTION_DETAIL);
return _message;
END;
$f$
language plpgsql

View File

@ -0,0 +1,222 @@
DROP FUNCTION IF EXISTS tps.test_regex(jsonb);
CREATE FUNCTION tps.test_regex(_defn jsonb) RETURNS jsonb
LANGUAGE plpgsql
AS
$f$
DECLARE
_rslt jsonb;
BEGIN
WITH
--------------------apply regex operations to transactions---------------------------------------------------------------------------------
rx AS (
SELECT
t.srce,
t.id,
t.rec,
m.target,
m.seq,
regex->'regex'->>'function' regex_function,
e.v ->> 'field' result_key_name,
e.v ->> 'key' target_json_path,
e.v ->> 'flag' regex_options_flag,
e.v->>'map' map_intention,
e.v->>'retain' retain_result,
e.v->>'regex' regex_expression,
e.rn target_item_number,
COALESCE(mt.rn,rp.rn,1) result_number,
mt.mt rx_match,
rp.rp rx_replace,
CASE e.v->>'map'
WHEN 'y' THEN
e.v->>'field'
ELSE
null
END map_key,
CASE e.v->>'map'
WHEN 'y' THEN
CASE regex->'regex'->>'function'
WHEN 'extract' THEN
CASE WHEN array_upper(mt.mt,1)=1
THEN to_json(mt.mt[1])
ELSE array_to_json(mt.mt)
END::jsonb
WHEN 'replace' THEN
to_jsonb(rp.rp)
ELSE
'{}'::jsonb
END
ELSE
NULL
END map_val,
CASE e.v->>'retain'
WHEN 'y' THEN
e.v->>'field'
ELSE
NULL
END retain_key,
CASE e.v->>'retain'
WHEN 'y' THEN
CASE regex->'regex'->>'function'
WHEN 'extract' THEN
CASE WHEN array_upper(mt.mt,1)=1
THEN to_json(trim(mt.mt[1]))
ELSE array_to_json(mt.mt)
END::jsonb
WHEN 'replace' THEN
to_jsonb(rtrim(rp.rp))
ELSE
'{}'::jsonb
END
ELSE
NULL
END retain_val
FROM
--------------------------start with all regex maps------------------------------------------------------------------------------------
(SELECT _defn->>'srce' srce, _defn->>'name' target, _defn regex, (_defn->>'sequence')::numeric seq) m
--------------------------isolate matching basis to limit map to only look at certain json---------------------------------------------
LEFT JOIN LATERAL jsonb_array_elements(m.regex->'regex'->'where') w(v) ON TRUE
--------------------------break out array of regluar expressions in the map------------------------------------------------------------
LEFT JOIN LATERAL jsonb_array_elements(m.regex->'regex'->'defn') WITH ORDINALITY e(v, rn) ON true
--------------------------join to main transaction table but only certain key/values are included--------------------------------------
INNER JOIN tps.trans t ON
t.srce = m.srce AND
t.rec @> w.v
--------------------------each regex references a path to the target value, extract the target from the reference and do regex---------
LEFT JOIN LATERAL regexp_matches(t.rec #>> ((e.v ->> 'key')::text[]), e.v ->> 'regex'::text,COALESCE(e.v ->> 'flag','')) WITH ORDINALITY mt(mt, rn) ON
m.regex->'regex'->>'function' = 'extract'
--------------------------same as above but for a replacement type function------------------------------------------------------------
LEFT JOIN LATERAL regexp_replace(t.rec #>> ((e.v ->> 'key')::text[]), e.v ->> 'regex'::text, e.v ->> 'replace'::text,e.v ->> 'flag') WITH ORDINALITY rp(rp, rn) ON
m.regex->'regex'->>'function' = 'replace'
ORDER BY
t.id DESC,
m.target,
e.rn,
COALESCE(mt.rn,rp.rn,1)
)
--SELECT * FROM rx LIMIT 100
, agg_to_target_items AS (
SELECT
srce
,id
,target
,seq
,map_intention
,regex_function
,target_item_number
,result_key_name
,target_json_path
,CASE WHEN map_key IS NULL
THEN
NULL
ELSE
jsonb_build_object(
map_key,
CASE WHEN max(result_number) = 1
THEN
jsonb_agg(map_val ORDER BY result_number) -> 0
ELSE
jsonb_agg(map_val ORDER BY result_number)
END
)
END map_val
,CASE WHEN retain_key IS NULL
THEN
NULL
ELSE
jsonb_build_object(
retain_key,
CASE WHEN max(result_number) = 1
THEN
jsonb_agg(retain_val ORDER BY result_number) -> 0
ELSE
jsonb_agg(retain_val ORDER BY result_number)
END
)
END retain_val
FROM
rx
GROUP BY
srce
,id
,target
,seq
,map_intention
,regex_function
,target_item_number
,result_key_name
,target_json_path
,map_key
,retain_key
)
--SELECT * FROM agg_to_target_items LIMIT 100
, agg_to_target AS (
SELECT
srce
,id
,target
,seq
,map_intention
,tps.jsonb_concat_obj(COALESCE(map_val,'{}'::JSONB)) map_val
,jsonb_strip_nulls(tps.jsonb_concat_obj(COALESCE(retain_val,'{}'::JSONB))) retain_val
FROM
agg_to_target_items
GROUP BY
srce
,id
,target
,seq
,map_intention
)
, agg_to_ret AS (
SELECT
srce
,target
,seq
,map_intention
,map_val
,retain_val
,count(*) "count"
FROM
agg_to_target
GROUP BY
srce
,target
,seq
,map_intention
,map_val
,retain_val
)
,agg_to_id AS (
SELECT
l.srce
,l.target
,l.map_val
,l."count"
FROM
agg_to_ret l
ORDER BY
l.srce
,l.target
,l."count" desc
)
SELECT
jsonb_agg(row_to_json(agg_to_id)::jsonb)
INTO
_rslt
FROM
agg_to_id;
RETURN _rslt;
END;
$f$;

View File

@ -0,0 +1,211 @@
DROP FUNCTION IF EXISTS tps.test_regex_rec(jsonb);
CREATE FUNCTION tps.test_regex_recs(_defn jsonb) RETURNS jsonb
LANGUAGE plpgsql
AS
$f$
DECLARE
_rslt jsonb;
BEGIN
WITH
--------------------apply regex operations to transactions---------------------------------------------------------------------------------
rx AS (
SELECT
t.srce,
t.id,
t.rec,
m.target,
m.seq,
regex->'regex'->>'function' regex_function,
e.v ->> 'field' result_key_name,
e.v ->> 'key' target_json_path,
e.v ->> 'flag' regex_options_flag,
e.v->>'map' map_intention,
e.v->>'retain' retain_result,
e.v->>'regex' regex_expression,
e.rn target_item_number,
COALESCE(mt.rn,rp.rn,1) result_number,
mt.mt rx_match,
rp.rp rx_replace,
CASE e.v->>'map'
WHEN 'y' THEN
e.v->>'field'
ELSE
null
END map_key,
CASE e.v->>'map'
WHEN 'y' THEN
CASE regex->'regex'->>'function'
WHEN 'extract' THEN
CASE WHEN array_upper(mt.mt,1)=1
THEN to_json(mt.mt[1])
ELSE array_to_json(mt.mt)
END::jsonb
WHEN 'replace' THEN
to_jsonb(rp.rp)
ELSE
'{}'::jsonb
END
ELSE
NULL
END map_val,
CASE e.v->>'retain'
WHEN 'y' THEN
e.v->>'field'
ELSE
NULL
END retain_key,
CASE e.v->>'retain'
WHEN 'y' THEN
CASE regex->'regex'->>'function'
WHEN 'extract' THEN
CASE WHEN array_upper(mt.mt,1)=1
THEN to_json(trim(mt.mt[1]))
ELSE array_to_json(mt.mt)
END::jsonb
WHEN 'replace' THEN
to_jsonb(rtrim(rp.rp))
ELSE
'{}'::jsonb
END
ELSE
NULL
END retain_val
FROM
--------------------------start with all regex maps------------------------------------------------------------------------------------
(SELECT _defn->>'srce' srce, _defn->>'name' target, _defn regex, (_defn->>'sequence')::numeric seq) m
--------------------------isolate matching basis to limit map to only look at certain json---------------------------------------------
LEFT JOIN LATERAL jsonb_array_elements(m.regex->'regex'->'where') w(v) ON TRUE
--------------------------break out array of regluar expressions in the map------------------------------------------------------------
LEFT JOIN LATERAL jsonb_array_elements(m.regex->'regex'->'defn') WITH ORDINALITY e(v, rn) ON true
--------------------------join to main transaction table but only certain key/values are included--------------------------------------
INNER JOIN tps.trans t ON
t.srce = m.srce AND
t.rec @> w.v
--------------------------each regex references a path to the target value, extract the target from the reference and do regex---------
LEFT JOIN LATERAL regexp_matches(t.rec #>> ((e.v ->> 'key')::text[]), e.v ->> 'regex'::text,COALESCE(e.v ->> 'flag','')) WITH ORDINALITY mt(mt, rn) ON
m.regex->'regex'->>'function' = 'extract'
--------------------------same as above but for a replacement type function------------------------------------------------------------
LEFT JOIN LATERAL regexp_replace(t.rec #>> ((e.v ->> 'key')::text[]), e.v ->> 'regex'::text, e.v ->> 'replace'::text,e.v ->> 'flag') WITH ORDINALITY rp(rp, rn) ON
m.regex->'regex'->>'function' = 'replace'
ORDER BY
t.id DESC,
m.target,
e.rn,
COALESCE(mt.rn,rp.rn,1)
)
--SELECT * FROM rx LIMIT 100
, agg_to_target_items AS (
SELECT
srce
,id
,rec
,target
,seq
,map_intention
,regex_function
,target_item_number
,result_key_name
,target_json_path
,CASE WHEN map_key IS NULL
THEN
NULL
ELSE
jsonb_build_object(
map_key,
CASE WHEN max(result_number) = 1
THEN
jsonb_agg(map_val ORDER BY result_number) -> 0
ELSE
jsonb_agg(map_val ORDER BY result_number)
END
)
END map_val
,CASE WHEN retain_key IS NULL
THEN
NULL
ELSE
jsonb_build_object(
retain_key,
CASE WHEN max(result_number) = 1
THEN
jsonb_agg(retain_val ORDER BY result_number) -> 0
ELSE
jsonb_agg(retain_val ORDER BY result_number)
END
)
END retain_val
FROM
rx
GROUP BY
srce
,id
,rec
,target
,seq
,map_intention
,regex_function
,target_item_number
,result_key_name
,target_json_path
,map_key
,retain_key
)
--SELECT * FROM agg_to_target_items LIMIT 100
, agg_to_target AS (
SELECT
srce
,id
,rec
,target
,seq
,map_intention
,tps.jsonb_concat_obj(COALESCE(map_val,'{}'::JSONB)) map_val
,jsonb_strip_nulls(tps.jsonb_concat_obj(COALESCE(retain_val,'{}'::JSONB))) retain_val
FROM
agg_to_target_items
GROUP BY
srce
,id
,rec
,target
,seq
,map_intention
)
, agg_to_ret AS (
SELECT
srce
,target
,seq
,map_intention
,map_val
,retain_val
,count(*) "count"
,jsonb_agg(rec) rec
FROM
agg_to_target
GROUP BY
srce
,target
,seq
,map_intention
,map_val
,retain_val
)
SELECT
jsonb_agg(row_to_json(agg_to_ret)::jsonb)
INTO
_rslt
FROM
agg_to_ret;
RETURN _rslt;
END;
$f$;

View File

@ -0,0 +1,64 @@
DROP FUNCTION IF EXISTS tps.map_rv_set;
CREATE OR REPLACE FUNCTION tps.map_rv_set(_defn jsonb) RETURNS jsonb
AS
$f$
DECLARE
_message jsonb;
_MESSAGE_TEXT text;
_PG_EXCEPTION_DETAIL text;
_PG_EXCEPTION_HINT text;
BEGIN
INSERT INTO
tps.map_rv (srce, target, retval, map, hist)
SELECT
r.source
,r.map
,r.ret_val
,r.mapped
,jsonb_build_object(
'hist_defn',mapped
,'effective',jsonb_build_array(CURRENT_TIMESTAMP,null::timestamptz)
) || '[]'::jsonb
FROM
JSONB_ARRAY_ELEMENTS(_defn) WITH ORDINALITY ae(r,s)
JOIN LATERAL jsonb_to_record(ae.r) r(source TEXT,map TEXT, ret_val jsonb, mapped jsonb) ON TRUE
ON CONFLICT ON CONSTRAINT map_rv_pk DO UPDATE
SET
map = excluded.map
,hist =
--the new definition going to position -0-
jsonb_build_object(
'hist_defn',excluded.map
,'effective',jsonb_build_array(CURRENT_TIMESTAMP,null::timestamptz)
)
--the previous definition, set upper bound of effective range which was previously null
|| jsonb_set(
map_rv.hist
,'{0,effective,1}'::text[]
,to_jsonb(CURRENT_TIMESTAMP)
);
-------return message--------------------------------------------------------------------------------------------------
_message:= jsonb_build_object('status','complete');
RETURN _message;
EXCEPTION WHEN OTHERS THEN
GET STACKED DIAGNOSTICS
_MESSAGE_TEXT = MESSAGE_TEXT,
_PG_EXCEPTION_DETAIL = PG_EXCEPTION_DETAIL,
_PG_EXCEPTION_HINT = PG_EXCEPTION_HINT;
_message:=
($$
{
"status":"fail",
"message":"error setting map value"
}
$$::jsonb)
||jsonb_build_object('message_text',_MESSAGE_TEXT)
||jsonb_build_object('pg_exception_detail',_PG_EXCEPTION_DETAIL);
RETURN _message;
END;
$f$
LANGUAGE plpgsql;

View File

@ -0,0 +1,249 @@
DROP FUNCTION IF EXISTS tps.report_unmapped;
CREATE FUNCTION tps.report_unmapped(_srce text) RETURNS TABLE
(
source text,
map text,
ret_val jsonb,
"count" bigint
)
LANGUAGE plpgsql
AS
$f$
BEGIN
/*
first get distinct target json values
then apply regex
*/
RETURN QUERY
WITH
--------------------apply regex operations to transactions---------------------------------------------------------------------------------
rx AS (
SELECT
t.srce,
t.id,
t.rec,
m.target,
m.seq,
regex->'regex'->>'function' regex_function,
e.v ->> 'field' result_key_name,
e.v ->> 'key' target_json_path,
e.v ->> 'flag' regex_options_flag,
e.v->>'map' map_intention,
e.v->>'retain' retain_result,
e.v->>'regex' regex_expression,
e.rn target_item_number,
COALESCE(mt.rn,rp.rn,1) result_number,
mt.mt rx_match,
rp.rp rx_replace,
CASE e.v->>'map'
WHEN 'y' THEN
e.v->>'field'
ELSE
null
END map_key,
CASE e.v->>'map'
WHEN 'y' THEN
CASE regex->'regex'->>'function'
WHEN 'extract' THEN
CASE WHEN array_upper(mt.mt,1)=1
THEN to_json(mt.mt[1])
ELSE array_to_json(mt.mt)
END::jsonb
WHEN 'replace' THEN
to_jsonb(rp.rp)
ELSE
'{}'::jsonb
END
ELSE
NULL
END map_val,
CASE e.v->>'retain'
WHEN 'y' THEN
e.v->>'field'
ELSE
NULL
END retain_key,
CASE e.v->>'retain'
WHEN 'y' THEN
CASE regex->'regex'->>'function'
WHEN 'extract' THEN
CASE WHEN array_upper(mt.mt,1)=1
THEN to_json(trim(mt.mt[1]))
ELSE array_to_json(mt.mt)
END::jsonb
WHEN 'replace' THEN
to_jsonb(rtrim(rp.rp))
ELSE
'{}'::jsonb
END
ELSE
NULL
END retain_val
FROM
--------------------------start with all regex maps------------------------------------------------------------------------------------
tps.map_rm m
--------------------------isolate matching basis to limit map to only look at certain json---------------------------------------------
LEFT JOIN LATERAL jsonb_array_elements(m.regex->'regex'->'where') w(v) ON TRUE
--------------------------join to main transaction table but only certain key/values are included--------------------------------------
INNER JOIN tps.trans t ON
t.srce = m.srce AND
t.rec @> w.v
--------------------------break out array of regluar expressions in the map------------------------------------------------------------
LEFT JOIN LATERAL jsonb_array_elements(m.regex->'regex'->'defn') WITH ORDINALITY e(v, rn) ON true
--------------------------each regex references a path to the target value, extract the target from the reference and do regex---------
LEFT JOIN LATERAL regexp_matches(t.rec #>> ((e.v ->> 'key')::text[]), e.v ->> 'regex'::text,COALESCE(e.v ->> 'flag','')) WITH ORDINALITY mt(mt, rn) ON
m.regex->'regex'->>'function' = 'extract'
--------------------------same as above but for a replacement type function------------------------------------------------------------
LEFT JOIN LATERAL regexp_replace(t.rec #>> ((e.v ->> 'key')::text[]), e.v ->> 'regex'::text, e.v ->> 'replace'::text,e.v ->> 'flag') WITH ORDINALITY rp(rp, rn) ON
m.regex->'regex'->>'function' = 'replace'
WHERE
--t.allj IS NULL
t.srce = _srce AND
e.v @> '{"map":"y"}'::jsonb
--rec @> '{"Transaction":"ACH Credits","Transaction":"ACH Debits"}'
--rec @> '{"Description":"CHECK 93013270 086129935"}'::jsonb
ORDER BY
t.id DESC,
m.target,
e.rn,
COALESCE(mt.rn,rp.rn,1)
)
--SELECT * FROM rx LIMIT 100
, agg_to_target_items AS (
SELECT
srce
,id
,target
,seq
,map_intention
,regex_function
,target_item_number
,result_key_name
,target_json_path
,CASE WHEN map_key IS NULL
THEN
NULL
ELSE
jsonb_build_object(
map_key,
CASE WHEN max(result_number) = 1
THEN
jsonb_agg(map_val ORDER BY result_number) -> 0
ELSE
jsonb_agg(map_val ORDER BY result_number)
END
)
END map_val
,CASE WHEN retain_key IS NULL
THEN
NULL
ELSE
jsonb_build_object(
retain_key,
CASE WHEN max(result_number) = 1
THEN
jsonb_agg(retain_val ORDER BY result_number) -> 0
ELSE
jsonb_agg(retain_val ORDER BY result_number)
END
)
END retain_val
FROM
rx
GROUP BY
srce
,id
,target
,seq
,map_intention
,regex_function
,target_item_number
,result_key_name
,target_json_path
,map_key
,retain_key
)
--SELECT * FROM agg_to_target_items LIMIT 100
, agg_to_target AS (
SELECT
srce
,id
,target
,seq
,map_intention
,tps.jsonb_concat_obj(COALESCE(map_val,'{}'::JSONB)) map_val
,jsonb_strip_nulls(tps.jsonb_concat_obj(COALESCE(retain_val,'{}'::JSONB))) retain_val
FROM
agg_to_target_items
GROUP BY
srce
,id
,target
,seq
,map_intention
)
, agg_to_ret AS (
SELECT
srce
,target
,seq
,map_intention
,map_val
,retain_val
,count(*) "count"
FROM
agg_to_target
GROUP BY
srce
,target
,seq
,map_intention
,map_val
,retain_val
)
, link_map AS (
SELECT
a.srce
,a.target
,a.seq
,a.map_intention
,a.map_val
,a."count"
,a.retain_val
,v.map mapped_val
FROM
agg_to_ret a
LEFT OUTER JOIN tps.map_rv v ON
v.srce = a.srce AND
v.target = a.target AND
v.retval = a.map_val
)
SELECT
l.srce
,l.target
,l.map_val
,l."count"
FROM
link_map l
WHERE
l.mapped_val IS NULL
ORDER BY
l.srce
,l.target
,l."count" desc;
END;
$f$

View File

@ -0,0 +1,257 @@
DROP FUNCTION IF EXISTS tps.report_unmapped_recs;
CREATE FUNCTION tps.report_unmapped_recs(_srce text) RETURNS TABLE
(
source text,
map text,
ret_val jsonb,
"count" bigint,
recs jsonb
)
LANGUAGE plpgsql
AS
$f$
BEGIN
/*
first get distinct target json values
then apply regex
*/
RETURN QUERY
WITH
--------------------apply regex operations to transactions---------------------------------------------------------------------------------
rx AS (
SELECT
t.srce,
t.id,
t.rec,
m.target,
m.seq,
regex->'regex'->>'function' regex_function,
e.v ->> 'field' result_key_name,
e.v ->> 'key' target_json_path,
e.v ->> 'flag' regex_options_flag,
e.v->>'map' map_intention,
e.v->>'retain' retain_result,
e.v->>'regex' regex_expression,
e.rn target_item_number,
COALESCE(mt.rn,rp.rn,1) result_number,
mt.mt rx_match,
rp.rp rx_replace,
CASE e.v->>'map'
WHEN 'y' THEN
e.v->>'field'
ELSE
null
END map_key,
CASE e.v->>'map'
WHEN 'y' THEN
CASE regex->'regex'->>'function'
WHEN 'extract' THEN
CASE WHEN array_upper(mt.mt,1)=1
THEN to_json(mt.mt[1])
ELSE array_to_json(mt.mt)
END::jsonb
WHEN 'replace' THEN
to_jsonb(rp.rp)
ELSE
'{}'::jsonb
END
ELSE
NULL
END map_val,
CASE e.v->>'retain'
WHEN 'y' THEN
e.v->>'field'
ELSE
NULL
END retain_key,
CASE e.v->>'retain'
WHEN 'y' THEN
CASE regex->'regex'->>'function'
WHEN 'extract' THEN
CASE WHEN array_upper(mt.mt,1)=1
THEN to_json(trim(mt.mt[1]))
ELSE array_to_json(mt.mt)
END::jsonb
WHEN 'replace' THEN
to_jsonb(rtrim(rp.rp))
ELSE
'{}'::jsonb
END
ELSE
NULL
END retain_val
FROM
--------------------------start with all regex maps------------------------------------------------------------------------------------
tps.map_rm m
--------------------------isolate matching basis to limit map to only look at certain json---------------------------------------------
LEFT JOIN LATERAL jsonb_array_elements(m.regex->'regex'->'where') w(v) ON TRUE
--------------------------join to main transaction table but only certain key/values are included--------------------------------------
INNER JOIN tps.trans t ON
t.srce = m.srce AND
t.rec @> w.v
--------------------------break out array of regluar expressions in the map------------------------------------------------------------
LEFT JOIN LATERAL jsonb_array_elements(m.regex->'regex'->'defn') WITH ORDINALITY e(v, rn) ON true
--------------------------each regex references a path to the target value, extract the target from the reference and do regex---------
LEFT JOIN LATERAL regexp_matches(t.rec #>> ((e.v ->> 'key')::text[]), e.v ->> 'regex'::text,COALESCE(e.v ->> 'flag','')) WITH ORDINALITY mt(mt, rn) ON
m.regex->'regex'->>'function' = 'extract'
--------------------------same as above but for a replacement type function------------------------------------------------------------
LEFT JOIN LATERAL regexp_replace(t.rec #>> ((e.v ->> 'key')::text[]), e.v ->> 'regex'::text, e.v ->> 'replace'::text,e.v ->> 'flag') WITH ORDINALITY rp(rp, rn) ON
m.regex->'regex'->>'function' = 'replace'
WHERE
--t.allj IS NULL
t.srce = _srce AND
e.v @> '{"map":"y"}'::jsonb
--rec @> '{"Transaction":"ACH Credits","Transaction":"ACH Debits"}'
--rec @> '{"Description":"CHECK 93013270 086129935"}'::jsonb
ORDER BY
t.id DESC,
m.target,
e.rn,
COALESCE(mt.rn,rp.rn,1)
)
--SELECT * FROM rx LIMIT 100
, agg_to_target_items AS (
SELECT
srce
,id
,rec
,target
,seq
,map_intention
,regex_function
,target_item_number
,result_key_name
,target_json_path
,CASE WHEN map_key IS NULL
THEN
NULL
ELSE
jsonb_build_object(
map_key,
CASE WHEN max(result_number) = 1
THEN
jsonb_agg(map_val ORDER BY result_number) -> 0
ELSE
jsonb_agg(map_val ORDER BY result_number)
END
)
END map_val
,CASE WHEN retain_key IS NULL
THEN
NULL
ELSE
jsonb_build_object(
retain_key,
CASE WHEN max(result_number) = 1
THEN
jsonb_agg(retain_val ORDER BY result_number) -> 0
ELSE
jsonb_agg(retain_val ORDER BY result_number)
END
)
END retain_val
FROM
rx
GROUP BY
srce
,id
,rec
,target
,seq
,map_intention
,regex_function
,target_item_number
,result_key_name
,target_json_path
,map_key
,retain_key
)
--SELECT * FROM agg_to_target_items LIMIT 100
, agg_to_target AS (
SELECT
srce
,id
,rec
,target
,seq
,map_intention
,tps.jsonb_concat_obj(COALESCE(map_val,'{}'::JSONB)) map_val
,jsonb_strip_nulls(tps.jsonb_concat_obj(COALESCE(retain_val,'{}'::JSONB))) retain_val
FROM
agg_to_target_items
GROUP BY
srce
,id
,rec
,target
,seq
,map_intention
)
, agg_to_ret AS (
SELECT
srce
,target
,seq
,map_intention
,map_val
,retain_val
,count(*) "count"
,jsonb_agg(rec) rec
FROM
agg_to_target
GROUP BY
srce
,target
,seq
,map_intention
,map_val
,retain_val
)
, link_map AS (
SELECT
a.srce
,a.target
,a.seq
,a.map_intention
,a.map_val
,a."count"
,a.rec
,a.retain_val
,v.map mapped_val
FROM
agg_to_ret a
LEFT OUTER JOIN tps.map_rv v ON
v.srce = a.srce AND
v.target = a.target AND
v.retval = a.map_val
)
SELECT
l.srce
,l.target
,l.map_val
,l."count"
,l.rec
FROM
link_map l
WHERE
l.mapped_val IS NULL
ORDER BY
l.srce
,l.target
,l."count" desc;
END;
$f$

View File

@ -0,0 +1,261 @@
CREATE OR REPLACE FUNCTION tps.srce_map_overwrite(_srce text) RETURNS jsonb
AS
$f$
DECLARE
_message jsonb;
_MESSAGE_TEXT text;
_PG_EXCEPTION_DETAIL text;
_PG_EXCEPTION_HINT text;
BEGIN
WITH
--------------------apply regex operations to transactions-----------------------------------------------------------------------------------
rx AS (
SELECT
t.srce,
t.id,
t.rec,
m.target,
m.seq,
regex->'regex'->>'function' regex_function,
e.v ->> 'field' result_key_name,
e.v ->> 'key' target_json_path,
e.v ->> 'flag' regex_options_flag,
e.v->>'map' map_intention,
e.v->>'retain' retain_result,
e.v->>'regex' regex_expression,
e.rn target_item_number,
COALESCE(mt.rn,rp.rn,1) result_number,
mt.mt rx_match,
rp.rp rx_replace,
CASE e.v->>'map'
WHEN 'y' THEN
e.v->>'field'
ELSE
null
END map_key,
CASE e.v->>'map'
WHEN 'y' THEN
CASE regex->'regex'->>'function'
WHEN 'extract' THEN
CASE WHEN array_upper(mt.mt,1)=1
THEN to_json(mt.mt[1])
ELSE array_to_json(mt.mt)
END::jsonb
WHEN 'replace' THEN
to_jsonb(rp.rp)
ELSE
'{}'::jsonb
END
ELSE
NULL
END map_val,
CASE e.v->>'retain'
WHEN 'y' THEN
e.v->>'field'
ELSE
NULL
END retain_key,
CASE e.v->>'retain'
WHEN 'y' THEN
CASE regex->'regex'->>'function'
WHEN 'extract' THEN
CASE WHEN array_upper(mt.mt,1)=1
THEN to_json(trim(mt.mt[1]))
ELSE array_to_json(mt.mt)
END::jsonb
WHEN 'replace' THEN
to_jsonb(rtrim(rp.rp))
ELSE
'{}'::jsonb
END
ELSE
NULL
END retain_val
FROM
--------------------------start with all regex maps------------------------------------------------------------------------------------
tps.map_rm m
--------------------------isolate matching basis to limit map to only look at certain json---------------------------------------------
LEFT JOIN LATERAL jsonb_array_elements(m.regex->'regex'->'where') w(v) ON TRUE
--------------------------join to main transaction table but only certain key/values are included--------------------------------------
INNER JOIN tps.trans t ON
t.srce = m.srce AND
t.rec @> w.v
--------------------------break out array of regluar expressions in the map------------------------------------------------------------
LEFT JOIN LATERAL jsonb_array_elements(m.regex->'regex'->'defn') WITH ORDINALITY e(v, rn) ON true
--------------------------each regex references a path to the target value, extract the target from the reference and do regex---------
LEFT JOIN LATERAL regexp_matches(t.rec #>> ((e.v ->> 'key')::text[]), e.v ->> 'regex'::text,COALESCE(e.v ->> 'flag','')) WITH ORDINALITY mt(mt, rn) ON
m.regex->'regex'->>'function' = 'extract'
--------------------------same as above but for a replacement type function------------------------------------------------------------
LEFT JOIN LATERAL regexp_replace(t.rec #>> ((e.v ->> 'key')::text[]), e.v ->> 'regex'::text, e.v ->> 'replace'::text,e.v ->> 'flag') WITH ORDINALITY rp(rp, rn) ON
m.regex->'regex'->>'function' = 'replace'
WHERE
--t.allj IS NULL
t.srce = _srce
--rec @> '{"Transaction":"ACH Credits","Transaction":"ACH Debits"}'
--rec @> '{"Description":"CHECK 93013270 086129935"}'::jsonb
ORDER BY
t.id DESC,
m.target,
e.rn,
COALESCE(mt.rn,rp.rn,1)
)
--SELECT count(*) FROM rx LIMIT 100
, agg_to_target_items AS (
SELECT
srce
,id
,target
,seq
,map_intention
,regex_function
,target_item_number
,result_key_name
,target_json_path
,CASE WHEN map_key IS NULL
THEN
NULL
ELSE
jsonb_build_object(
map_key,
CASE WHEN max(result_number) = 1
THEN
jsonb_agg(map_val ORDER BY result_number) -> 0
ELSE
jsonb_agg(map_val ORDER BY result_number)
END
)
END map_val
,CASE WHEN retain_key IS NULL
THEN
NULL
ELSE
jsonb_build_object(
retain_key,
CASE WHEN max(result_number) = 1
THEN
jsonb_agg(retain_val ORDER BY result_number) -> 0
ELSE
jsonb_agg(retain_val ORDER BY result_number)
END
)
END retain_val
FROM
rx
GROUP BY
srce
,id
,target
,seq
,map_intention
,regex_function
,target_item_number
,result_key_name
,target_json_path
,map_key
,retain_key
)
--SELECT * FROM agg_to_target_items LIMIT 100
, agg_to_target AS (
SELECT
srce
,id
,target
,seq
,map_intention
,tps.jsonb_concat_obj(COALESCE(map_val,'{}'::JSONB)) map_val
,jsonb_strip_nulls(tps.jsonb_concat_obj(COALESCE(retain_val,'{}'::JSONB))) retain_val
FROM
agg_to_target_items
GROUP BY
srce
,id
,target
,seq
,map_intention
ORDER BY
id
)
--SELECT * FROM agg_to_target
, link_map AS (
SELECT
a.srce
,a.id
,a.target
,a.seq
,a.map_intention
,a.map_val
,a.retain_val retain_value
,v.map
FROM
agg_to_target a
LEFT OUTER JOIN tps.map_rv v ON
v.srce = a.srce AND
v.target = a.target AND
v.retval = a.map_val
)
--SELECT * FROM link_map
, agg_to_id AS (
SELECT
srce
,id
,tps.jsonb_concat_obj(COALESCE(retain_value,'{}'::jsonb) ORDER BY seq DESC) retain_val
,tps.jsonb_concat_obj(COALESCE(map,'{}'::jsonb)) map
FROM
link_map
GROUP BY
srce
,id
)
--SELECT agg_to_id.srce, agg_to_id.id, jsonb_pretty(agg_to_id.retain_val) , jsonb_pretty(agg_to_id.map) FROM agg_to_id ORDER BY id desc LIMIT 100
UPDATE
tps.trans t
SET
map = o.map,
parse = o.retain_val,
allj = t.rec||o.map||o.retain_val
FROM
agg_to_id o
WHERE
o.id = t.id;
_message:= jsonb_build_object('status','complete');
RETURN _message;
EXCEPTION WHEN OTHERS THEN
GET STACKED DIAGNOSTICS
_MESSAGE_TEXT = MESSAGE_TEXT,
_PG_EXCEPTION_DETAIL = PG_EXCEPTION_DETAIL,
_PG_EXCEPTION_HINT = PG_EXCEPTION_HINT;
_message:=
($$
{
"status":"fail",
"message":"error setting map value"
}
$$::jsonb)
||jsonb_build_object('message_text',_MESSAGE_TEXT)
||jsonb_build_object('pg_exception_detail',_PG_EXCEPTION_DETAIL);
RETURN _message;
END;
$f$
language plpgsql

View File

@ -0,0 +1,33 @@
DROP FUNCTION IF EXISTS tps.build_srce_view_sql(text, text);
CREATE OR REPLACE FUNCTION tps.build_srce_view_sql(_srce text, _schema text) RETURNS TEXT
AS
$f$
DECLARE
--_schema text;
--_srce text;
_sql text;
BEGIN
--_schema:= 'default';
--_srce:= 'dcard';
SELECT
'DROP VIEW IF EXISTS tpsv.'||s.srce||'_'||(list.e->>'name')||'; CREATE VIEW tpsv.'||s.srce||'_'||(list.e->>'name')||' AS SELECT id, logid, allj, '||string_agg('(allj#>>'''||rec.PATH::text||''')::'||rec.type||' AS "'||rec.column_name||'"',', ')||' FROM tps.trans WHERE srce = '''||s.srce||''';'
INTO
_sql
FROM
tps.srce s
JOIN LATERAL jsonb_array_elements(s.defn->'schemas') list (e) ON TRUE
JOIN LATERAL jsonb_array_elements(list.e->'columns') as cols(e) ON TRUE
JOIN LATERAL jsonb_to_record (cols.e) AS rec( PATH text[], "type" text, column_name text) ON TRUE
WHERE
srce = _srce
AND list.e->>'name' = _schema
GROUP BY
s.srce
,list.e;
RETURN _sql;
RAISE NOTICE '%',_sql;
END
$f$
LANGUAGE plpgsql;

View File

@ -0,0 +1,52 @@
--setup function to delete a single source
DROP FUNCTION IF EXISTS tps.srce_delete(jsonb);
CREATE FUNCTION tps.srce_delete(_defn jsonb) RETURNS jsonb
AS
$f$
DECLARE
_message jsonb;
_MESSAGE_TEXT text;
_PG_EXCEPTION_DETAIL text;
_PG_EXCEPTION_HINT text;
_rebuild BOOLEAN;
BEGIN
-------------------------------do delete---------------------------------
DELETE FROM tps.srce WHERE srce = _defn->>'name';
--could move this record to a "recycle bin" table for a certain period of time
--need to handle cascading record deletes
---------------------------set message-----------------------------------
_message:=
(
$$
{
"status":"complete",
"message":"source was permanently deleted"
}
$$::jsonb
);
RETURN _message;
EXCEPTION WHEN OTHERS THEN
GET STACKED DIAGNOSTICS
_MESSAGE_TEXT = MESSAGE_TEXT,
_PG_EXCEPTION_DETAIL = PG_EXCEPTION_DETAIL,
_PG_EXCEPTION_HINT = PG_EXCEPTION_HINT;
_message:=
($$
{
"status":"fail",
"message":"error dropping the source"
}
$$::jsonb)
||jsonb_build_object('message_text',_MESSAGE_TEXT)
||jsonb_build_object('pg_exception_detail',_PG_EXCEPTION_DETAIL);
RETURN _message;
END;
$f$
LANGUAGE plpgsql

View File

@ -0,0 +1,108 @@
/*
This function takes and array of definition object where "name" object is the primary key
It will force the entire body of sources to match what is received
*/
DROP FUNCTION IF EXISTS tps.srce_overwrite_all(jsonb);
CREATE FUNCTION tps.srce_overwrite_all(_defn jsonb) RETURNS jsonb
AS
$f$
DECLARE
_message jsonb;
_MESSAGE_TEXT text;
_PG_EXCEPTION_DETAIL text;
_PG_EXCEPTION_HINT text;
_rebuild BOOLEAN;
_list text;
BEGIN
WITH
--retain the results of the update by srce
_set AS (
SELECT
j.rn rn
,j.e->>'name' srce
,j.e defn
FROM
jsonb_array_elements(_defn) WITH ORDINALITY j(e, rn)
)
--full join
,_full AS (
SELECT
COALESCE(_srce.srce,_set.srce) srce
,CASE COALESCE(_set.srce,'DELETE') WHEN 'DELETE' THEN 'DELETE' ELSE 'SET' END actn
,COALESCE(_set.defn,_srce.defn) defn
FROM
tps.srce _srce
FULL OUTER JOIN _set ON
_set.srce = _srce.srce
)
--call functions from list
,_do_set AS (
SELECT
f.srce
,f.actn
,setd.message
FROM
_full f
JOIN LATERAL tps.srce_set(defn) setd(message) ON f.actn = 'SET'
--dual left joins for functions that touch the same table causes the first left join actions to be undone
--LEFT JOIN LATERAL tps.srce_delete(defn) deld(message) ON f.actn = 'DELETE'
)
,_do_del AS (
SELECT
f.srce
,f.actn
,deld.message
FROM
_full f
JOIN LATERAL tps.srce_delete(defn) deld(message) ON f.actn = 'DELETE'
)
--aggregate all the messages into one message
----
---- should look at rolling back the whole thing if one of the function returns a fail. stored proc could do this.
----
SELECT
jsonb_agg(m)
INTO
_message
FROM
(
SELECT
jsonb_build_object('source',srce,'status',message->>'status','message',message->>'message') m
FROM
_do_set
UNION ALL
SELECT
jsonb_build_object('source',srce,'status',message->>'status','message',message->>'message') m
FROM
_do_del
) x;
SELECT string_agg(srce,',') INTO _list FROM tps.srce;
RAISE NOTICE 'multi source list: %', _list;
RETURN _message;
SELECT string_agg(srce,',') INTO _list FROM tps.srce;
RAISE NOTICE 'after return: %', _list;
EXCEPTION WHEN OTHERS THEN
GET STACKED DIAGNOSTICS
_MESSAGE_TEXT = MESSAGE_TEXT,
_PG_EXCEPTION_DETAIL = PG_EXCEPTION_DETAIL,
_PG_EXCEPTION_HINT = PG_EXCEPTION_HINT;
_message:=
($$
{
"status":"fail",
"message":"error updating sources"
}
$$::jsonb)
||jsonb_build_object('message_text',_MESSAGE_TEXT)
||jsonb_build_object('pg_exception_detail',_PG_EXCEPTION_DETAIL);
RETURN _message;
END;
$f$
LANGUAGE plpgsql

View File

@ -0,0 +1,144 @@
DROP FUNCTION IF EXISTS tps.srce_set(jsonb);
CREATE FUNCTION tps.srce_set(_defn jsonb) RETURNS jsonb
AS
$f$
DECLARE
_message jsonb;
_MESSAGE_TEXT text;
_PG_EXCEPTION_DETAIL text;
_PG_EXCEPTION_HINT text;
_rebuild BOOLEAN;
BEGIN
---------test if anythign is changing--------------------------------------------------------------------------------------------
IF _defn = (SELECT defn FROM tps.srce WHERE srce = _defn->>'name') THEN
_message:=
(
$$
{
"status":"complete",
"message":"source was not different no action taken"
}
$$::jsonb
);
RETURN _message;
END IF;
---------if the constraint definition is changing, rebuild for existing records---------------------------------------------------
SELECT
NOT (_defn->'constraint' = (SELECT defn->'constraint' FROM tps.srce WHERE srce = _defn->>'name'))
INTO
_rebuild;
RAISE NOTICE '%',_rebuild::text;
---------do merge-----------------------------------------------------------------------------------------------------------------
INSERT INTO
tps.srce (srce, defn, hist)
SELECT
--extract name from defintion
_defn->>'name'
--add current timestamp to defintions
,_defn
--add definition
,jsonb_build_object(
'hist_defn',_defn
,'effective',jsonb_build_array(CURRENT_TIMESTAMP,null::timestamptz)
) || '[]'::jsonb
ON CONFLICT ON CONSTRAINT srce_pkey DO UPDATE
SET
defn = _defn
,hist =
--the new definition going to position -0-
jsonb_build_object(
'hist_defn',_defn
,'effective',jsonb_build_array(CURRENT_TIMESTAMP,null::timestamptz)
)
--the previous definition, set upper bound of effective range which was previously null
|| jsonb_set(
srce.hist
,'{0,effective,1}'::text[]
,to_jsonb(CURRENT_TIMESTAMP)
);
--rebuild constraint key if necessary---------------------------------------------------------------------------------------
IF _rebuild THEN
WITH
rebuild AS (
SELECT
j.srce
,j.rec
,j.id
--aggregate back to the record since multiple paths may be listed in the constraint
,tps.jsonb_concat_obj(
jsonb_build_object(
--the new json key is the path itself
cons.path->>0
,j.rec#>((cons.path->>0)::text[])
)
) json_key
FROM
tps.trans j
INNER JOIN tps.srce s ON
s.srce = j.srce
JOIN LATERAL jsonb_array_elements(s.defn->'constraint') WITH ORDINALITY cons(path, seq) ON TRUE
WHERE
s.srce = _defn->>'name'
GROUP BY
j.rec
,j.id
)
UPDATE
tps.trans t
SET
ic = r.json_key
FROM
rebuild r
WHERE
t.id = r.id;
_message:=
(
$$
{
"status":"complete",
"message":"source set and constraint rebuilt on existing records"
}
$$::jsonb
);
ELSE
_message:=
(
$$
{
"status":"complete",
"message":"source set"
}
$$::jsonb
);
END IF;
RETURN _message;
EXCEPTION WHEN OTHERS THEN
GET STACKED DIAGNOSTICS
_MESSAGE_TEXT = MESSAGE_TEXT,
_PG_EXCEPTION_DETAIL = PG_EXCEPTION_DETAIL,
_PG_EXCEPTION_HINT = PG_EXCEPTION_HINT;
_message:=
($$
{
"status":"fail",
"message":"error importing data"
}
$$::jsonb)
||jsonb_build_object('message_text',_MESSAGE_TEXT)
||jsonb_build_object('pg_exception_detail',_PG_EXCEPTION_DETAIL);
RETURN _message;
END;
$f$
LANGUAGE plpgsql

128
database/readme.md Normal file
View File

@ -0,0 +1,128 @@
Generic Data Transformation Tool
=======================================================
The goal is to:
1. house external data and prevent duplication on insert
2. facilitate regular exression operations to extract meaningful data
3. be able to reference it from outside sources (no action required) and maintain reference to original data
It is well suited for data from outside systems that
* requires complex transformation (parsing and mapping)
* original data is retained for reference
* don't feel like writing a map-reduce
use cases:
* on-going bank feeds
* jumbled product lists
* storing api results
The data is converted to json by the importing program and inserted to the database.
Regex expressions are applied to specified json components and the results can be mapped to other values.
Major Interactions
------------------------
* Source Definitions (Maint/Inquire)
* Regex Instructions (Maint/Inquire)
* Cross Reference List (Maint/Inquire)
* Run Import (Run Job)
### Interaction Details
* _Source Definitions (Maint/Inquire)_
* display a list of existing sources with display detials/edit options
* create new option
* underlying function is `tps.srce_set(_name text, _defn jsonb)`
* the current definition of a source includes data based on bad presumptions:
* how to load from a csv file using `COPY`
* setup a Postgres type to reflect the associated columns (if applicable)
* _Regex Instructions (Maint/Inquire)_
* display a list of existing instruction sets with display details/edit options
* create new option
* underlying function is `tps.srce_map_def_set(_srce text, _map text, _defn jsonb, _seq int)` which takes a source "code" and a json
* _Cross Reference List (Maint/Inquire)_
* first step is to populate a list of values returned from the instructions (choose all or unmapped) `tps.report_unmapped(_srce text)`
* the list of rows facilitates additional named column(s) to be added which are used to assign values anytime the result occurs
* function to set the values of the cross reference `tps.srce_map_val_set_multi(_maps jsonb)`
* _Run Import_
* underlying function is `tps.srce_import(_path text, _srce text)`
source definition
----------------------------------------------------------------------
* **load data**
* the brwosers role is to extract the contents of a file and send them as a post body to the backend for processing under target function `based on srce defintion`
* the backend builds a json array of all the rows to be added and sends as an argument to a database insert function
* build constraint key `based on srce definition`
* handle violations
* increment global key list (this may not be possible depending on if a json with variable length arrays can be traversed)
* build an import log
* run maps (as opposed to relying on trigger)
* **read data**
* the `schema` key contains either a text element or a text array in curly braces
* forcing everything to extract via `#>{}` would be cleaner but may be more expensive than `jsonb_populate_record`
* it took 5.5 seconds to parse 1,000,000 rows of an identicle google distance matrix json to a 5 column temp table
* top level key to table based on `jsonb_populate_record` extracting from `tps.type` developed from `srce.defn->schema`
* custom function parsing contents based on #> operator and extracting from `srce.defn->schema`
* view that `uses the source definiton` to extrapolate a table?
* a materialized table is built `based on the source definition` and any addtional regex?
* add regex = alter table add column with historic updates?
* no primary key?
* every document must work out to one row
```
{
"name":"dcard",
"source":"client_file",
"loading_function":"csv"
"constraint":[
"{Trans. Date}",
"{Post Date}"
],
"schemas":{
"default":[
{
"path":"{doc,origin_addresses,0}",
"type":"text",
"column_name":"origin_address"
},
{
"path":"{doc,destination_addresses,0}",
"type":"text",
"column_name":"origin_address"
},
{
"path":"{doc,status}",
"type":"text",
"column_name":"status"
}
{
"path":"{doc,rows,0,elements,0,distance,value}",
"type":"numeric",
"column_name":"distance"
}
{
"path":"{doc,rows,0,elements,0,duration,value}",
"type":"numeric",
"column_name":"duration"
}
],
"version2":[]
}
}
```

View File

@ -0,0 +1,245 @@
/*
first get distinct target json values
then apply regex
*/
WITH
--------------------apply regex operations to transactions---------------------------------------------------------------------------------
rx AS (
SELECT
t.srce,
t.id,
t.rec,
m.target,
m.seq,
regex->>'function' regex_function,
e.v ->> 'field' result_key_name,
e.v ->> 'key' target_json_path,
e.v ->> 'flag' regex_options_flag,
e.v->>'map' map_intention,
e.v->>'retain' retain_result,
e.v->>'regex' regex_expression,
e.rn target_item_number,
COALESCE(mt.rn,rp.rn,1) result_number,
mt.mt rx_match,
rp.rp rx_replace,
--------------------------json key name assigned to return value-----------------------------------------------------------------------
CASE e.v->>'map'
WHEN 'y' THEN
e.v->>'field'
ELSE
null
END map_key,
--------------------------json value resulting from regular expression-----------------------------------------------------------------
CASE e.v->>'map'
WHEN 'y' THEN
CASE regex->>'function'
WHEN 'extract' THEN
CASE WHEN array_upper(mt.mt,1)=1
THEN to_json(mt.mt[1])
ELSE array_to_json(mt.mt)
END::jsonb
WHEN 'replace' THEN
to_jsonb(rp.rp)
ELSE
'{}'::jsonb
END
ELSE
NULL
END map_val,
--------------------------flag for if retruned regex result is stored as a new part of the final json output---------------------------
CASE e.v->>'retain'
WHEN 'y' THEN
e.v->>'field'
ELSE
NULL
END retain_key,
--------------------------push regex result into json object---------------------------------------------------------------------------
CASE e.v->>'retain'
WHEN 'y' THEN
CASE regex->>'function'
WHEN 'extract' THEN
CASE WHEN array_upper(mt.mt,1)=1
THEN to_json(trim(mt.mt[1]))
ELSE array_to_json(mt.mt)
END::jsonb
WHEN 'replace' THEN
to_jsonb(rtrim(rp.rp))
ELSE
'{}'::jsonb
END
ELSE
NULL
END retain_val
FROM
--------------------------start with all regex maps------------------------------------------------------------------------------------
tps.map_rm m
--------------------------isolate matching basis to limit map to only look at certain json---------------------------------------------
JOIN LATERAL jsonb_array_elements(m.regex->'where') w(v) ON TRUE
--------------------------break out array of regluar expressions in the map------------------------------------------------------------
JOIN LATERAL jsonb_array_elements(m.regex->'defn') WITH ORDINALITY e(v, rn) ON true
--------------------------join to main transaction table but only certain key/values are included--------------------------------------
INNER JOIN tps.trans t ON
t.srce = m.srce AND
t.rec @> w.v
--------------------------each regex references a path to the target value, extract the target from the reference and do regex---------
LEFT JOIN LATERAL regexp_matches(t.rec #>> ((e.v ->> 'key')::text[]), e.v ->> 'regex'::text,COALESCE(e.v ->> 'flag','')) WITH ORDINALITY mt(mt, rn) ON
m.regex->>'function' = 'extract'
--------------------------same as above but for a replacement type function------------------------------------------------------------
LEFT JOIN LATERAL regexp_replace(t.rec #>> ((e.v ->> 'key')::text[]), e.v ->> 'regex'::text, e.v ->> 'replace'::text,e.v ->> 'flag') WITH ORDINALITY rp(rp, rn) ON
m.regex->>'function' = 'replace'
WHERE
--t.allj IS NULL
--t.srce = 'PNCC' AND
e.v @> '{"map":"y"}'::jsonb
--rec @> '{"Transaction":"ACH Credits","Transaction":"ACH Debits"}'
--rec @> '{"Description":"CHECK 93013270 086129935"}'::jsonb
/*
ORDER BY
t.id DESC,
m.target,
e.rn,
COALESCE(mt.rn,rp.rn,1)
*/
)
--SELECT * FROM rx LIMIT 100
, agg_to_target_items AS (
SELECT
srce
,id
,target
,seq
,map_intention
,regex_function
,target_item_number
,result_key_name
,target_json_path
,CASE WHEN map_key IS NULL
THEN
NULL
ELSE
jsonb_build_object(
map_key,
CASE WHEN max(result_number) = 1
THEN
jsonb_agg(map_val ORDER BY result_number) -> 0
ELSE
jsonb_agg(map_val ORDER BY result_number)
END
)
END map_val
,CASE WHEN retain_key IS NULL
THEN
NULL
ELSE
jsonb_build_object(
retain_key,
CASE WHEN max(result_number) = 1
THEN
jsonb_agg(retain_val ORDER BY result_number) -> 0
ELSE
jsonb_agg(retain_val ORDER BY result_number)
END
)
END retain_val
FROM
rx
GROUP BY
srce
,id
,target
,seq
,map_intention
,regex_function
,target_item_number
,result_key_name
,target_json_path
,map_key
,retain_key
)
--SELECT * FROM agg_to_target_items LIMIT 100
, agg_to_target AS (
SELECT
srce
,id
,target
,seq
,map_intention
,tps.jsonb_concat_obj(COALESCE(map_val,'{}'::JSONB)) map_val
,jsonb_strip_nulls(tps.jsonb_concat_obj(COALESCE(retain_val,'{}'::JSONB))) retain_val
FROM
agg_to_target_items
GROUP BY
srce
,id
,target
,seq
,map_intention
)
, agg_to_ret AS (
SELECT
srce
,target
,seq
,map_intention
,map_val
,retain_val
,count(*) "count"
FROM
agg_to_target
GROUP BY
srce
,target
,seq
,map_intention
,map_val
,retain_val
)
, link_map AS (
SELECT
a.srce
,a.target
,a.seq
,a.map_intention
,a.map_val
,a."count"
,a.retain_val
,v.map mapped_val
FROM
agg_to_ret a
LEFT OUTER JOIN tps.map_rv v ON
v.srce = a.srce AND
v.target = a.target AND
v.retval = a.map_val
)
SELECT
l.srce
,l.target
,l.seq
,l.map_intention
,l.map_val
,l."count"
,l.retain_val
,l.mapped_val
FROM
link_map l
ORDER BY
l.srce
,l.target
,l.seq
,l."count" desc
,l.map_val
,l.mapped_val

View File

@ -0,0 +1,19 @@
SELECT
r.*
,CASE "Schedule#"
WHEN '02IN Raw Material' THEN 13097563.42
WHEN '03IN Finished Goods' THEN 35790696.52
ELSE 0
END + SUM("Sales"+"Credits & Adjustments"-"Gross Collections") OVER (PARTITION BY "Schedule#" ORDER BY "Schedule#" ASC, "PostDate" ASC) running_bal
,(LEAST("CollateralBalance" - "Ineligible Amount","MaxEligible")*("AdvanceRate"/100))::NUMERIC(20,2) qualified_collateral
,(("CollateralBalance" - "Ineligible Amount")*("AdvanceRate"/100))::NUMERIC(20,2) qualified_collateral_nl
FROM
tpsv.pncl_default r
WHERE
"Schedule#" = '01AR'
--"Schedule#" = '02IN Raw Material'
--"Schedule#" = '03IN Finished Goods'
ORDER BY
"Schedule#" asc
,r."PostDate" asc
,id

View File

@ -0,0 +1,8 @@
\timing
SELECT
r.*
,SUM(r."Amount") OVER (ORDER BY r."Post Date" asc , r."Description") + 1061.1 + 22.40 balance
FROM
tpsv.dcard_default r
ORDER BY
r."Post Date" asc

View File

@ -0,0 +1,34 @@
\timing
/*--------------------------------------------------
maintain statment level triggers to update a master log of keys
* table based listing
* composite type maintenance
potential updates sources/events
* tps.trans insert
* tps.trans re-map
--------------------------------------------------*/
WITH ok AS (
SELECT
srce,
ok.k,
jsonb_typeof(allj->ok.k) typeof,
COUNT(*)
FROM
tps.trans
JOIN LATERAL jsonb_object_keys(allj) ok(k) ON TRUE
GROUP BY
srce,
ok.k,
jsonb_typeof(allj->ok.k)
ORDER BY
srce
)
SELECT
srce
,k
,typeof
FROM
ok

View File

@ -0,0 +1,19 @@
SELECT
m.srce,
m.target,
regex->>'function' regex_function,
regex->>'where' where_clause,
e.v ->> 'field' result_key_name,
e.v ->> 'key' target_json_path,
e.v ->> 'flag' regex_options_flag,
e.v->>'map' map_intention,
e.v->>'retain' retain_result,
e.v->>'regex' regex_expression,
e.rn target_item_number
FROM
tps.map_rm m
LEFT JOIN LATERAL jsonb_array_elements(m.regex->'defn') WITH ORDINALITY e(v, rn) ON true
ORDER BY
m.srce,
m.target,
e.rn

View File

@ -0,0 +1,12 @@
\timing
SELECT
r.*,
SUM(r."Advances"+r."Adjustments"-r."Payments") OVER (PARTITION BY "Loan#" ORDER BY r."Post Date" asc, r."Reference #" asc)
FROM
tpsv.pnco_default r
WHERE
"Loan#" = '606780191'
ORDER BY
r."Loan#"
,r."Post Date" ASC
,r."Reference #" ASC

View File

@ -15,7 +15,8 @@
"key": "{Description}", "key": "{Description}",
"retain": "y" "retain": "y"
} }
] ],
"name": "First 20"
}, },
"sequence": 2, "sequence": 2,
"name": "First 20", "name": "First 20",

View File

@ -0,0 +1,82 @@
{
"name": "dcard",
"source": "client_file",
"loading_function": "csv",
"constraint": [
"{Trans. Date}",
"{Post Date}",
"{Description}"
],
"schemas": [
{
"name": "default",
"columns": [
{
"path": "{Trans. Date}",
"type": "date",
"column_name": "Trans. Date"
},
{
"path": "{Post Date}",
"type": "date",
"column_name": "Post Date"
},
{
"path": "{Description}",
"type": "text",
"column_name": "Description"
},
{
"path": "{Amount}",
"type": "numeric",
"column_name": "Amount"
},
{
"path": "{Category}",
"type": "text",
"column_name": "Category"
}
]
},
{
"name": "mapped",
"columns": [
{
"path": "{Trans. Date}",
"type": "date",
"column_name": "Trans. Date"
},
{
"path": "{Post Date}",
"type": "date",
"column_name": "Post Date"
},
{
"path": "{Description}",
"type": "text",
"column_name": "Description"
},
{
"path": "{Amount}",
"type": "numeric",
"column_name": "Amount"
},
{
"path": "{Category}",
"type": "text",
"column_name": "Category"
},
{
"path": "{party}",
"type": "text",
"column_name": "Party"
},
{
"path": "{reason}",
"type": "text",
"column_name": "Reason"
}
]
}
]
}

View File

@ -0,0 +1,24 @@
{
"name": "Strip Amount Commas", //the name here currently also serves as the primary key in the database
"srce": "PNCC", //name of the target source
"sequence": 1 , //only for edge cases where the instructions returns two keys of the same name, this determines priority. pretty much 1.
"regex": { //instruction set
"where": [ //only apply this regex to these specified key value pairs, if none then use empty object {}
{
"example_key":"example_value"
}
],
"function": "replace", //even though there is an array of definitions they all have to operate under the same premise(extract or replace)
"defn": [ //there is an array of instructions
{
"key": "{Amount}", //key= the path to the json key/value pair to operate on. path woudl be a better term.
"map": "n", //y or n to indicate if the returned value will be used to search a lookup table
"flag": "g", //g indicates find all values, null or empty would be the other option I guess
"field": "amount", //the key name to give the value that comes out of this instruction
"regex": ",", //the reg expression itself
"retain": "y", //flag to indicate if the returned value should be retained and included with the data
"replace": "" //this key is only evaluated if the function is defined as replace
}
]
}
}

View File

@ -0,0 +1,45 @@
{
"name": "Trans Type",
"srce": "PNCC",
"regex": {
"function": "extract",
"defn": [
{
"key": "{AccountName}",
"map": "y",
"field": "acctn",
"regex": "(.*)",
"retain": "n"
},
{
"key": "{Transaction}",
"map": "y",
"field": "trans",
"regex": "(.*)",
"retain": "n"
},
{
"key": "{Description}",
"map": "y",
"field": "ini",
"regex": "([\\w].*?)(?=$| -|\\s[0-9].*?|\\s[\\w/]+?:)",
"retain": "y"
}
],
"where": [
{}
]
},
"sequence": 1
}
/*
target | retval | map
------------+----------------------------------------------------------------------------------------------------------------------------------+---------------------------------------------------------------------------------------------------------------------
Trans Type | {"ini": "01346", "acctn": "The HC Operating Company OPERA", "trans": "Miscellaneous Fees"} | {"sign": "-1", "party": "PNC", "ledger": "Manual", "reason": "Bank Fees", "trantype": "Disbursement"}
Trans Type | {"ini": "CANADA TAX", "acctn": "The HC Operating Company OPERA", "trans": "Detail Debit Adjustments"} | {"sign": "-1", "party": "PNC", "ledger": "Manual", "reason": "Bank Fees", "trantype": "Disbursement"}
Trans Type | {"ini": "ACH DEBIT SETTLEMENT", "acctn": "The HC Operating Company OPERA", "trans": "ACH Debits"} | {"sign": "-1", "ledger": "AP - ACH", "trantype": "Disbursement"}
Trans Type | {"ini": "RET DEP ITEM RTM", "acctn": "The HC Operating Company FBO P", "trans": "Deposited Items Returned"} | {"sign": "-1", "ledger": "Manual", "reason": "Returned Deposit RTM", "trantype": "Collections"}
Trans Type | {"ini": "RET DEP ITEM STOP", "acctn": "The HC Operating Company FBO P", "trans": "Deposited Items Returned"} | {"sign": "-1", "ledger": "Manual", "reason": "Returned Deposit STOP", "trantype": "Collections"}
Trans Type | {"ini": "CREDIT ADJUSTMENT", "acctn": "The HC Operating Company FBO P", "trans": "Detail Credit Adjustments"} | {"sign": "1", "ledger": "AR - Collections", "trantype": "Collections"}
*/

View File

@ -0,0 +1,30 @@
WITH
mod AS (
SELECT
srce
,jsonb_pretty(defn) orig
,(defn - 'schemas')||
--rebuild the schemas key value from below
jsonb_build_object(
'schemas'
--aggregate all the new key values for a single soure
,jsonb_agg(
--combine a new key 'name' with the columns for that name
jsonb_build_object('name',k)||jsonb_build_object('columns',v)
)
) rebuild
FROM
tps.srce
LEFT JOIN LATERAL jsonb_each(defn->'schemas') WITH ORDINALITY je(k,v, rn) ON TRUE
GROUP BY
srce
,defn
)
UPDATE
tps.srce s
SET
defn = rebuild
FROM
mod
WHERE
mod.srce = s.srce

View File

@ -0,0 +1,33 @@
DROP FUNCTION IF EXISTS tps.build_srce_view_sql(text, text);
CREATE OR REPLACE FUNCTION tps.build_srce_view_sql(_srce text, _schema text) RETURNS TEXT
AS
$f$
DECLARE
--_schema text;
--_srce text;
_sql text;
BEGIN
--_schema:= 'default';
--_srce:= 'dcard';
SELECT
'DROP VIEW IF EXISTS tpsv.'||s.srce||'_'||(list.e->>'name')||'; CREATE VIEW tpsv.'||s.srce||'_'||(list.e->>'name')||' AS SELECT id, logid, allj, '||string_agg('(allj#>>'''||rec.PATH::text||''')::'||rec.type||' AS "'||rec.column_name||'"',', ')||' FROM tps.trans WHERE srce = '''||s.srce||''';'
INTO
_sql
FROM
tps.srce s
JOIN LATERAL jsonb_array_elements(s.defn->'schemas') list (e) ON TRUE
JOIN LATERAL jsonb_array_elements(list.e->'columns') as cols(e) ON TRUE
JOIN LATERAL jsonb_to_record (cols.e) AS rec( PATH text[], "type" text, column_name text) ON TRUE
WHERE
srce = _srce
AND list.e->>'name' = _schema
GROUP BY
s.srce
,list.e;
RETURN _sql;
RAISE NOTICE '%',_sql;
END
$f$
LANGUAGE plpgsql;

View File

@ -0,0 +1,52 @@
--setup function to delete a single source
DROP FUNCTION IF EXISTS tps.srce_delete(jsonb);
CREATE FUNCTION tps.srce_delete(_defn jsonb) RETURNS jsonb
AS
$f$
DECLARE
_message jsonb;
_MESSAGE_TEXT text;
_PG_EXCEPTION_DETAIL text;
_PG_EXCEPTION_HINT text;
_rebuild BOOLEAN;
BEGIN
-------------------------------do delete---------------------------------
DELETE FROM tps.srce WHERE srce = _defn->>'name';
--could move this record to a "recycle bin" table for a certain period of time
--need to handle cascading record deletes
---------------------------set message-----------------------------------
_message:=
(
$$
{
"status":"complete",
"message":"source was permanently deleted"
}
$$::jsonb
);
RETURN _message;
EXCEPTION WHEN OTHERS THEN
GET STACKED DIAGNOSTICS
_MESSAGE_TEXT = MESSAGE_TEXT,
_PG_EXCEPTION_DETAIL = PG_EXCEPTION_DETAIL,
_PG_EXCEPTION_HINT = PG_EXCEPTION_HINT;
_message:=
($$
{
"status":"fail",
"message":"error dropping the source"
}
$$::jsonb)
||jsonb_build_object('message_text',_MESSAGE_TEXT)
||jsonb_build_object('pg_exception_detail',_PG_EXCEPTION_DETAIL);
RETURN _message;
END;
$f$
LANGUAGE plpgsql

View File

@ -0,0 +1,108 @@
/*
This function takes and array of definition object where "name" object is the primary key
It will force the entire body of sources to match what is received
*/
DROP FUNCTION IF EXISTS tps.srce_overwrite_all(jsonb);
CREATE FUNCTION tps.srce_overwrite_all(_defn jsonb) RETURNS jsonb
AS
$f$
DECLARE
_message jsonb;
_MESSAGE_TEXT text;
_PG_EXCEPTION_DETAIL text;
_PG_EXCEPTION_HINT text;
_rebuild BOOLEAN;
_list text;
BEGIN
WITH
--retain the results of the update by srce
_set AS (
SELECT
j.rn rn
,j.e->>'name' srce
,j.e defn
FROM
jsonb_array_elements(_defn) WITH ORDINALITY j(e, rn)
)
--full join
,_full AS (
SELECT
COALESCE(_srce.srce,_set.srce) srce
,CASE COALESCE(_set.srce,'DELETE') WHEN 'DELETE' THEN 'DELETE' ELSE 'SET' END actn
,COALESCE(_set.defn,_srce.defn) defn
FROM
tps.srce _srce
FULL OUTER JOIN _set ON
_set.srce = _srce.srce
)
--call functions from list
,_do_set AS (
SELECT
f.srce
,f.actn
,setd.message
FROM
_full f
JOIN LATERAL tps.srce_set(defn) setd(message) ON f.actn = 'SET'
--dual left joins for functions that touch the same table causes the first left join actions to be undone
--LEFT JOIN LATERAL tps.srce_delete(defn) deld(message) ON f.actn = 'DELETE'
)
,_do_del AS (
SELECT
f.srce
,f.actn
,deld.message
FROM
_full f
JOIN LATERAL tps.srce_delete(defn) deld(message) ON f.actn = 'DELETE'
)
--aggregate all the messages into one message
----
---- should look at rolling back the whole thing if one of the function returns a fail. stored proc could do this.
----
SELECT
jsonb_agg(m)
INTO
_message
FROM
(
SELECT
jsonb_build_object('source',srce,'status',message->>'status','message',message->>'message') m
FROM
_do_set
UNION ALL
SELECT
jsonb_build_object('source',srce,'status',message->>'status','message',message->>'message') m
FROM
_do_del
) x;
SELECT string_agg(srce,',') INTO _list FROM tps.srce;
RAISE NOTICE 'multi source list: %', _list;
RETURN _message;
SELECT string_agg(srce,',') INTO _list FROM tps.srce;
RAISE NOTICE 'after return: %', _list;
EXCEPTION WHEN OTHERS THEN
GET STACKED DIAGNOSTICS
_MESSAGE_TEXT = MESSAGE_TEXT,
_PG_EXCEPTION_DETAIL = PG_EXCEPTION_DETAIL,
_PG_EXCEPTION_HINT = PG_EXCEPTION_HINT;
_message:=
($$
{
"status":"fail",
"message":"error updating sources"
}
$$::jsonb)
||jsonb_build_object('message_text',_MESSAGE_TEXT)
||jsonb_build_object('pg_exception_detail',_PG_EXCEPTION_DETAIL);
RETURN _message;
END;
$f$
LANGUAGE plpgsql

View File

@ -1,37 +0,0 @@
[
{
"name": "Trans Type",
"srce": "PNCC",
"regex": {
"defn": [
{
"key": "{AccountName}",
"map": "y",
"field": "acctn",
"regex": "(.*)",
"retain": "n"
},
{
"key": "{Transaction}",
"map": "y",
"field": "trans",
"regex": "(.*)",
"retain": "n"
},
{
"key": "{Description}",
"map": "y",
"field": "ini",
"regex": "([\\w].*?)(?=$| -|\\s[0-9].*?|\\s[\\w/]+?:)",
"retain": "y"
}
],
"where": [
{}
],
"function": "extract",
"description": "extract intial description in conjunction with account name and transaction type for mapping"
},
"sequence": 1
}
]

View File

@ -1,76 +0,0 @@
{
"name": "dcard",
"source": "client_file",
"loading_function": "csv",
"constraint": [
"{Trans. Date}",
"{Post Date}",
"{Description}"
],
"schemas": {
"default": [
{
"path": "{Trans. Date}",
"type": "date",
"column_name": "Trans. Date"
},
{
"path": "{Post Date}",
"type": "date",
"column_name": "Post Date"
},
{
"path": "{Description}",
"type": "text",
"column_name": "Description"
},
{
"path": "{Amount}",
"type": "numeric",
"column_name": "Amount"
},
{
"path": "{Category}",
"type": "text",
"column_name": "Category"
}
],
"mapped": [
{
"path": "{Trans. Date}",
"type": "date",
"column_name": "Trans. Date"
},
{
"path": "{Post Date}",
"type": "date",
"column_name": "Post Date"
},
{
"path": "{Description}",
"type": "text",
"column_name": "Description"
},
{
"path": "{Amount}",
"type": "numeric",
"column_name": "Amount"
},
{
"path": "{Category}",
"type": "text",
"column_name": "Category"
},
{
"path": "{party}",
"type": "text",
"column_name": "Party"
},
{
"path": "{reason}",
"type": "text",
"column_name": "Reason"
}
]
}
}

View File

@ -1,12 +0,0 @@
//var server = require('./server');
//server.listen(process.env.nodeport);
var options = {
key: fs.readFileSync(process.env.wd + 'key.pem'),
cert: fs.readFileSync(process.env.wd + 'cert.pem'),
passprase: []
};
https.createServer(options, server).listen(process.env.nodeport, () => {
console.log('started on ' + process.env.nodeport)
});

27
key.pem
View File

@ -1,27 +0,0 @@
-----BEGIN RSA PRIVATE KEY-----
MIIEowIBAAKCAQEAruKzxqy7Zdk1odLAtnKs60eu5/rLGMdsLjbB+V7R7v3bIdi/
TaoKD0oYOHjJSEfpKUEqva+W45Yx8A9dt2OE2jD2Rr3sCNub6m2vY4iB9xXGA5Wy
qtMr83Die225TOtMgGnTmYIU2D7VfOZ08xFu4rwU6NlvjHx/xMByRQ7N1QAMmlkr
F1/KRd6i4+OmFoY81ErsLL4P/rhJ8jbzbRmJGzz3DGOXFvE9Qk+DdMxk8WnzfiZc
8HBVBMYPYifSvuoARtxigQOwodjOTrb+asw3AG5B4Yh1NnZMDa8ujaYgid0RiN8a
Y18GcXZmtGdl1h88Zmlv+sqJbSK5ThOj8pPsxQIDAQABAoIBAF28hy1q8frSJIv7
AW4JIyPsxRPz+Z3rbdGLaga2SwG1MRoAKfF/vWqahUbnfsNuP8vr8PPAxLhxjYyl
P88G2XN1oNsj+vp4cyaA6ewIg9ii5AKR3zTugPU3wTz0/8f5qoR9tHvoHDIX46fR
gkeyRWPj+bUgrkh3hqg3mkfnem26eQ7/lBLrwhscCqDnxg7CneoFfvdqdlYO1HTO
gU+zmwZ6LPXoeV2wQpJy0yf+FZkOd4mUiSlK5C+DMOrfjWcvdVG931h91xzQUHDh
MumjbEx5tSJXMeb4zXofBbPLUZZCHl4VkgY/C7JjG7KGrl/GN6aSkjfSCnzzBBrb
3B7lcKECgYEA20g9TfFMLp7rlaap5WAUh2wDgg7klIhLYlqx+No5YgAcnkC4ec6y
97CV9jrAdLzod9i0WVczN9d4dfA30vu45Vp9k6k+XUgnKe00PFAHQheAOY+yG+Am
KUxvjvVwHolYSTq+6dVbMAfN0rfiJgvABKuxTmdFEloQWlzMpG7Re+kCgYEAzCta
0NJEhDrgf7AUPDkeFx45ZmwcTIwGff5jOlr8WfW8/u24qRVrqnMc7JyA7Pub8NlX
b+tFMVUKfWTJbCnqOiwQjstuXrUUkL78vIBcAyETRrXln11kRM+/aOLWm3F9WgCR
qRqxEehSlpVjM3w98hLfElWjbjSMZekboNjvjH0CgYEAqoWexsHiUE7dPN8CN5Fj
5XulrroOH0Nt++ozhCWPgho8JwSFcJPAsMTtmTFPAHj2/lvSw7b/6WjyECTiBHwj
6JdaGD4AdWHqsrv3Zv+E5WyJFjocc3j3tB3wkudibRy5Pxkj5Ys5AjzPXhIJFzUD
+Z3an/HG0OKg3ORVb3mOBiECgYA9EEZ0KD9H0Rgt+GrE7Excm7SNNo5sMoEKk9f3
rxjEiyPAlzbgk0CFVTJ0bgZ9Wm90ZHyrQ5GJRKcDg+5eRFcCmQVGv/89oikJdaW/
dyCN6wmJ34NS26rOjdfwtc975ECad4sZKZuGAJca5Ikr79/TE+PV5vkfT+Yh4Mtb
myZiPQKBgHGM9ZSXCQRGeZqhbSZeXBC/oBSdI2+R8HSR9O7x9yLiRd5JGDJS82q9
4aKQRCAGQ7AILEBmdwWXQuFqrroUYFWR+YYIGwBZhms1Fdcb1WvjV0Ag/GBce93x
XQgdFrcA0mZ5BUXR5MFPCOeYfi/d4GjQbMPls36tz7DnAFMHeKbF
-----END RSA PRIVATE KEY-----

3860
package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@ -1,28 +0,0 @@
{
"name": "base",
"version": "1.0.0",
"description": "",
"main": "index.js",
"scripts": {
"test": "node_modules/mocha/bin/mocha",
"start": "nodemon index.js"
},
"author": "",
"license": "ISC",
"devDependencies": {
"chai": "^3.5.0",
"chai-http": "^3.0.0",
"mocha": "^2.5.3",
"nodemon": "^1.17.5"
},
"dependencies": {
"body-parser": "^1.17.1",
"cookie-parser": "^1.4.3",
"csvtojson": "^2.0.0",
"dotenv": "^2.0.0",
"express": "^4.13.4",
"express-handlebars": "^3.0.0",
"multer": "^1.3.0",
"pg": "^7.4.1"
}
}

View File

@ -1,16 +0,0 @@
data munger
=================================================
organized storage and cleansing of disparate data
current formats
--------------------------------------------------
* csv
* json
functions
-------------------------------------------------
* define constraints to prevent overlap during import
* manipulate with regex
* tag data en masse
* flatten into traditional tables for downstream analytics

298
server.js
View File

@ -1,298 +0,0 @@
require('dotenv').config();
var express = require('express');
var handlebars = require('express-handlebars');
var bodyParser = require('body-parser');
var cookieParser = require('cookie-parser');
var mult = require('multer');
var upload = mult({ encoding: "utf8" });
var csvtojson = require('csvtojson');
var pg = require('pg');
var https = require('https');
var fs = require('fs');
var readline = require('readline');
var server = express();
server.engine('handlebars', handlebars());
server.set('view engine', 'handlebars');
server.use(function(inReq, inRes, inNext) {
inRes.header("Access-Control-Allow-Origin", "*");
inRes.header("Access-Control-Allow-Methods", "POST, GET, PUT, DELETE, OPTIONS");
inRes.header("Access-Control-Allow-Headers", "Origin, X-Requested-With, Content-Type, Accept");
inNext();
});
var options = {
key: fs.readFileSync(process.env.wd + 'key.pem'),
cert: fs.readFileSync(process.env.wd + 'cert.pem'),
passprase: []
};
https.createServer(options, server).listen(process.env.nodeport, () => {
console.log('started on ' + process.env.nodeport)
});
var Postgres = new pg.Client({
user: process.env.user,
password: process.env.password,
host: process.env.host,
port: process.env.port,
database: process.env.database,
ssl: false,
application_name: "tps_etl_api"
});
Postgres.FirstRow = function(inSQL, args, inResponse) {
Postgres.query(inSQL, args, (err, res) => {
if (err === null) {
inResponse.json(res.rows[0]);
return;
}
inResponse.json(err.stack);
});
};
Postgres.connect();
//----------------------------------------------------------source definitions-------------------------------------------------------------------------------------------------------------------------
//returns array of all sources
server.get("/source", function(inReq, inRes) {
var sql = "SELECT jsonb_agg(defn) source_list FROM tps.srce";
Postgres.FirstRow(sql, [], inRes);
});
//returns message about status and error description
server.post("/source_single", bodyParser.json(), function(inReq, inRes) // remove body parsing, just pass post body to the sql string build
{
var sql = "SELECT x.message FROM tps.srce_set($1::jsonb) as x(message)";
Postgres.FirstRow(sql, [JSON.stringify(inReq.body)], inRes);
});
//assume inboud info is json array of definitions to set
server.post("/source", bodyParser.json(), function(inReq, inRes) // remove body parsing, just pass post body to the sql string build
{
//x = inReq.body;
var sql = "SELECT x.message FROM tps.srce_overwrite_all($1::jsonb) x(message)";
//console.log(JSON.stringify(inReq.body));
Postgres.FirstRow(sql, [JSON.stringify(inReq.body)], inRes);
});
//----------------------------------------------------------regex instrUctions-------------------------------------------------------------------------------------------------------------------------
//list all regex operations
server.get("/regex", function(inReq, inRes) {
var sql = "SELECT jsonb_agg(regex) regex FROM tps.map_rm WHERE srce = $1::text";
Postgres.FirstRow(sql, [inReq.query.srce], inRes);
});
//set one or more map definitions
server.post("/regex", bodyParser.json(), function(inReq, inRes) {
var sql = "SELECT x.message FROM tps.srce_map_def_set($1::jsonb) as x(message)";
Postgres.FirstRow(sql, [JSON.stringify(inReq.body)], inRes);
});
//takes an ad-hoc regex definition in curly braces
server.get("/regex_test", bodyParser.json(), function(inReq, inRes) {
var sql = "SELECT x.message FROM tps.test_regex_recs($1::jsonb) as x(message)";
Postgres.FirstRow(sql, [JSON.stringify(inReq.body)], inRes);
});
//------------------------------------------------------------mappings---------------------------------------------------------------------------------------------------------------------------------
//list unmapped items flagged to be mapped ?srce=
server.get("/unmapped_all", function(inReq, inRes) {
var sql = "SELECT jsonb_agg(row_to_json(x)::jsonb) regex FROM tps.report_unmapped_recs($1::text) x";
Postgres.FirstRow(sql, [inReq.query.srce], inRes);
});
//list unmapped items flagged to be mapped ?srce=
server.get("/unmapped", function(inReq, inRes) {
var sql = "SELECT jsonb_agg(row_to_json(x)::jsonb) regex FROM tps.report_unmapped($1::text) x";
Postgres.FirstRow(sql, [inReq.query.srce], inRes);
});
server.get("/mapping", function(inReq, inRes) {
var sql = "SELECT jsonb_agg(row_to_json(x)::jsonb) regex FROM tps.map_rv x WHERE srce = $1::text";
Postgres.FirstRow(sql, [inReq.query.srce], inRes);
});
//add entries to lookup table
server.post("/mapping", bodyParser.json(), function(inReq, inRes) {
var sql = "SELECT x.message FROM tps.map_rv_set($1::jsonb) as x(message)";
Postgres.FirstRow(sql, [JSON.stringify(inReq.body)], inRes);
});
//---------------------------------------------------------list imports--------------------------------------------------------------------------------------------------------------------------------
server.get("/import_log", function(inReq, inRes) {
var sql = "SELECT jsonb_agg(row_to_json(l)::jsonb) regex FROM tps.trans_log l";
Postgres.FirstRow(sql, [], inRes);
});
//-------------------------------------------------------------import data-----------------------------------------------------------------------------------------------------------------------------
server.use("/import", upload.single('upload'), function(inReq, inRes) {
console.log("should have gotten file as post body here");
var csv = inReq.file.buffer.toString('utf8')
//{headers: "true", delimiter: ",", output: "jsonObj", flatKeys: "true"}
csvtojson({ flatKeys: "true" }).fromString(csv).then(
(x) => {
var sql = "SELECT x.message FROM tps.srce_import($1, $2::jsonb) as x(message)"
console.log(sql);
Postgres.FirstRow(sql, [inReq.query.srce, JSON.stringify(x)], inRes);
}
);
});
//----------------------------------------------------------list import logs---------------------------------------------------------------------------------------------------------------------------
server.get("/import_log", function(inReq, inRes) {
var sql = "SELECT jsonb_agg(info) info FROM tps.trans_log WHERE info @> $1::jsonb";
Postgres.FirstRow(sql, [inReq.query], inRes);
});
//-------------------------------------------------------------suggest source def----------------------------------------------------------------------------------------------------------------------
server.use("/csv_suggest", upload.single('upload'), function(inReq, inRes) {
console.log("should have gotten file as post body here");
var csv = inReq.file.buffer.toString('utf8')
//{headers: "true", delimiter: ",", output: "jsonObj", flatKeys: "true"}
csvtojson({ flatKeys: "true" }).fromString(csv).then(
(x) => {
var sug = {
schemas: {
default: []
},
loading_function: "csv",
source: "client_file",
name: "",
constraint: []
};
for (var key in x[0]) {
var col = {};
//test if number
if (!isNaN(parseFloat(x[0][key])) && isFinite(x[0][key])) {
//if is a number but leading character is -0- then it's text
if (x[0][key].charAt(0) == "0") {
col["type"] = "text";
}
//if number and leadign character is not 0 then numeric
else {
col["type"] = "numeric";
}
}
//if can cast to a date within a hundred years its probably a date
else if (Date.parse(x[0][key]) > Date.parse('1950-01-01') && Date.parse(x[0][key]) < Date.parse('2050-01-01')) {
col["type"] = "date";
}
//otherwise its text
else {
col["type"] = "text";
}
col["path"] = "{" + key + "}";
col["column_name"] = key;
sug.schemas.default.push(col);
}
console.log(sug);
inRes.json(sug);
}
);
});
//--------------------------------------------------------------------------------------------------------------------------------------------------------------------
//-----------------------------------------------------------------------------ledger---------------------------------------------------------------------------------
//--------------------------------------------------------------------------------------------------------------------------------------------------------------------
//add ledger array and create offset account for every line
server.get("/gl_mhi_multi_post", bodyParser.json(), function(inReq, inRes) {
var l = 0;
console.log(inReq.body);
x = inReq.body;
x.gl = {};
x.gl.lines = [];
x.gl.jpath = [];
for (var i in x.item) {
//copy the current item to the gl array
var line = x.item[i];
x.gl.lines.push(line);
//build references to 'item' array
var ref = [];
ref.push("{item," + i + "}");
ref.push("{header}");
x.gl.jpath.push(ref);
//copy the current item to the gl array again, but swap account with supplied 'account' in header
var ofs = JSON.parse(JSON.stringify(line));
ofs.account = x.header.account;
ofs.amount = -ofs.amount;
x.gl.lines.push(ofs);
//add the same reference again for the offset account
x.gl.jpath.push(ref);
}
var sql = "INSERT INTO evt.bpr (bpr) SELECT $1";
console.log(JSON.stringify(x));
Postgres.FirstRow(sql, [JSON.stringify(x)], inRes);
});
//add ledger array and create offset account for every line
server.get("/gl_mje_build", bodyParser.json(), function(inReq, inRes) {
var l = 0;
console.log(inReq.body);
x = inReq.body;
x.gl = {};
x.gl.lines = [];
x.gl.jpath = [];
for (var i in x.items) {
//copy the current item to the gl array
var line = x.items[i];
x.gl.lines.push(line);
//build references to 'item' array
var ref = [];
ref.push("{items," + i + "}");
ref.push("{header}");
x.gl.jpath.push(ref);
}
var sql = "INSERT INTO evt.bpr (bpr) SELECT $1";
console.log(JSON.stringify(x));
Postgres.FirstRow(sql, [JSON.stringify(x)], inRes);
});
//add ledger array and create offset account for total of all lines
server.get("/gl_mhi_single_build", bodyParser.json(), function(inReq, inRes) {
var l = 0;
var tot = 0.00;
var bomb = false;
console.log(inReq.body);
x = inReq.body;
//add GL array
x.GL = [];
for (var i in x.item) {
var line = x.item[i];
if ((line.account != null) && (line.amount != null)) {
x.GL.push(line);
tot = tot + (line.amount || 0);
} else {
bomb = true;
}
//add the whole line as-is
}
if (bomb == false) {
var ofs = JSON.parse(JSON.stringify(x.header));
ofs.account = ofs.account;
delete ofs.account;
ofs.amount = -tot;
x.GL.push(ofs);
}
inRes.json(x);
});
server.get("/", function(inReq, inRes) {
inRes.render("definition", { title: "definition", layout: "main" });
});
module.exports = server;

View File

@ -1,106 +0,0 @@
<!DOCTYPE html>
<html>
<head>
<meta charset="utf-8">
<meta name="viewport" content="width=device-width, initial-scale=1">
<script src="//ajax.googleapis.com/ajax/libs/angularjs/1.5.6/angular.min.js"></script>
<script src="//ajax.googleapis.com/ajax/libs/angularjs/1.5.6/angular-cookies.js"></script>
<script src="//ajax.googleapis.com/ajax/libs/angularjs/1.5.6/angular-animate.js"></script>
<script src="/static/papa.js"></script>
<link rel="stylesheet" href="https://maxcdn.bootstrapcdn.com/bootstrap/3.3.6/css/bootstrap.min.css">
<link rel="stylesheet" href="/static/styles.css">
</head>
<body>
<div ng-app="DatApp" ng-controller="DatAppController" ng-cloak class="container">
<div class="row">
<form class="well">
<div class="form-group">
<button class="btn btn-primary" ng-click="save();">Save</button>
<button class="btn btn-primary" ng-click="load();">Load</button>
</div>
</form>
</div>
<div class="row">
<form ng-submit="add();" class="well">
<div class="form-group">
<label>name</label>
<input class="form-control" type="text" ng-model="currentName">
</div>
<div class="form-group">
<button class="btn btn-primary">new one</button>
</div>
</form>
</div>
<div class="row">
<ul class="list-group">
<li class="list-group-item" ng-repeat="member in members">
<span>name:</span><strong ng-bind="member.name"></strong>
<button class="btn btn-danger" ng-click="remove(member);">remove</button>
</li>
</ul>
</div>
</div>
<script>
var DatApp;
DatApp = angular.module("DatApp", []);
DatApp.controller("DatAppController", ["$scope", "$http", function(inScope, inHTTP){
inScope.currentName = "name";
inScope.randomize = function(){
inScope.message = Math.random();
};
inScope.members = [];
inScope.add = function(){
inScope.members.push({
name: inScope.currentName,
id: Math.floor(Math.random()*10000000)
});
};
inScope.remove = function(inMember){
var i;
for(i=0; i<inScope.members.length; i++)
{
if(inScope.members[i] === inMember){
inScope.members.splice(i, 1);
return;
}
}
};
inScope.save = function(){
inHTTP({
method:"POST",
url:"/data",
data:inScope.members
})
.then(function(inSuccess){
console.log("done")
}, function(inFailure){
console.log(inFailure);
});
};
inScope.load = function(){
inHTTP({
method:"GET",
url:"/data"
})
.then(function(inSuccess){
inScope.members = inSuccess.data;
console.log("done");
}, function(inFailure){
console.log(inFailure);
});
};
}]);
</script>
</body>
</html>

File diff suppressed because one or more lines are too long

View File

View File

@ -1,13 +0,0 @@
[Unit]
Description=forecast_api
After=network.target
[Service]
ExecStart=/usr/bin/node //opt/forecast_api/index.js
Restart=always
User=fc_api
Environemnt=NODE_ENV=production
WorkingDirectory=//opt/forecast_api
[Install]
WantedBy=multi-user.target

40
test.pgsql Normal file
View File

@ -0,0 +1,40 @@
select jsonb_pretty(x.r) from tps.test_regex_recs(
$$
{
"name": "Trans Type",
"srce": "PNCC",
"regex": {
"function": "extract",
"defn": [
{
"key": "{AccountName}",
"map": "y",
"field": "acctn",
"regex": "(.*)",
"retain": "n"
},
{
"key": "{Transaction}",
"map": "y",
"field": "trans",
"regex": "(.*)",
"retain": "n"
},
{
"key": "{Description}",
"map": "y",
"field": "ini",
"regex": "([\\w].*?)(?=$| -|\\s[0-9].*?|\\s[\\w/]+?:)",
"retain": "y"
}
],
"where": [
{}
]
},
"sequence": 1
}
$$::jsonb
) x(r)
limit 1

View File

View File

@ -1 +0,0 @@
curl -H "Content-Type: application/json" -X POST -d@./srce.json http://localhost/source

View File

@ -1 +0,0 @@
curl -H "Content-Type: application/json" -X POST -d@./regex.json http://localhost/regex

View File

@ -1 +0,0 @@
curl -H "Content-Type: application/json" -X POST -d@./mapping.json http://localhost/mapping

File diff suppressed because it is too large Load Diff

View File

@ -1 +0,0 @@
curl -v -F upload=@./d.csv http://localhost/import?srce=dcard

View File

@ -1,24 +0,0 @@
var chai = require('chai');
var chaiHTTP = require('chai-http');
var server = require('../server.js');
var express;
var should = chai.should();
chai.use(chaiHTTP);
describe("tests", function(){
before(function(done){
express = server.listen(7357);
done();
});
after(function(done){
express.close();
done();
});
it("should pass", function(done){
var test = true;
test.should.equal(true);
done();
});
});

View File

@ -1,139 +0,0 @@
Trans. Date,Post Date,Description,Amount,Category
01/02/2018,01/02/2018,"GOOGLE *YOUTUBE VIDEOS G.CO/HELPPAY#CAP0H07TXV",4.26,"Services"
01/02/2018,01/02/2018,"MICROSOFT *ONEDRIVE 800-642-7676 WA",4.26,"Services"
01/03/2018,01/03/2018,"CLE CLINIC PT PMTS 216-445-6249 OHAK2C57F2F0B3",200.00,"Medical Services"
01/04/2018,01/04/2018,"AT&T *PAYMENT 800-288-2020 TX",57.14,"Services"
01/04/2018,01/07/2018,"WWW.KOHLS.COM #0873 MIDDLETOWN OH",-7.90,"Payments and Credits"
01/05/2018,01/07/2018,"PIZZA HUT 007946 STOW OH",9.24,"Restaurants"
01/05/2018,01/07/2018,"SUBWAY 00044289255 STOW OH",10.25,"Restaurants"
01/06/2018,01/07/2018,"ACME NO. 17 STOW OH",103.98,"Supermarkets"
01/06/2018,01/07/2018,"DISCOUNT DRUG MART 32 STOW OH",1.69,"Merchandise"
01/06/2018,01/07/2018,"DISCOUNT DRUG MART 32 STOW OH",2.19,"Merchandise"
01/09/2018,01/09/2018,"CIRCLE K 05416 STOW OH00947R",3.94,"Gasoline"
01/09/2018,01/09/2018,"CIRCLE K 05416 STOW OH00915R",52.99,"Gasoline"
01/13/2018,01/13/2018,"AUTOZONE #0722 STOW OH",85.36,"Automotive"
01/13/2018,01/13/2018,"DISCOUNT DRUG MART 32 STOW OH",26.68,"Merchandise"
01/13/2018,01/13/2018,"EL CAMPESINO STOW OH",6.50,"Restaurants"
01/13/2018,01/13/2018,"TARGET STOW OH",197.90,"Merchandise"
01/14/2018,01/14/2018,"DISCOUNT DRUG MART 32 STOW OH",13.48,"Merchandise"
01/15/2018,01/15/2018,"TARGET.COM * 800-591-3869 MN",22.41,"Merchandise"
01/16/2018,01/16/2018,"BUFFALO WILD WINGS KENT KENT OH",63.22,"Restaurants"
01/16/2018,01/16/2018,"PARTA - KCG KENT OH",4.00,"Government Services"
01/16/2018,01/16/2018,"REMEMBERNHU 402-935-7733 IA",60.00,"Services"
01/16/2018,01/16/2018,"TARGET.COM * 800-591-3869 MN",44.81,"Merchandise"
01/16/2018,01/16/2018,"TREE CITY COFFEE & PASTR KENT OH",17.75,"Restaurants"
01/17/2018,01/17/2018,"BESTBUYCOM805526794885 888-BESTBUY MN",343.72,"Merchandise"
01/19/2018,01/19/2018,"DISCOUNT DRUG MART 32 STOW OH",5.98,"Merchandise"
01/19/2018,01/19/2018,"U-HAUL OF KENT-STOW KENT OH",15.88,"Travel/ Entertainment"
01/19/2018,01/19/2018,"WALMART GROCERY 800-966-6546 AR",5.99,"Supermarkets"
01/19/2018,01/19/2018,"WALMART GROCERY 800-966-6546 AR",17.16,"Supermarkets"
01/19/2018,01/19/2018,"WALMART GROCERY 800-966-6546 AR",500.97,"Supermarkets"
01/20/2018,01/20/2018,"GOOGLE *GOOGLE PLAY G.CO/HELPPAY#CAP0HFFS7W",2.12,"Services"
01/20/2018,01/20/2018,"LOWE'S OF STOW, OH. STOW OH",256.48,"Home Improvement"
01/22/2018,01/22/2018,"HOBBY LOBBY #405 STOW OHITEM TRANSFERRED FROM PREV ACCOUNT",38.49,"Merchandise"
01/23/2018,01/23/2018,"CASHBACK BONUS REDEMPTION PYMT/STMT CRDT",-32.20,"Awards and Rebate Credits"
01/23/2018,01/23/2018,"INTERNET PAYMENT - THANK YOU",-2394.51,"Payments and Credits"
01/27/2018,01/27/2018,"GIANT-EAGLE #4096 STOW OH",67.81,"Supermarkets"
01/27/2018,01/27/2018,"OFFICEMAX/OFFICE DEPOT63 STOW OH",21.06,"Merchandise"
01/27/2018,01/27/2018,"TARGET STOW OH",71.00,"Merchandise"
01/29/2018,01/29/2018,"NETFLIX.COM NETFLIX.COM CA19899514437",14.93,"Services"
01/30/2018,01/30/2018,"PARTA - KCG KENT OH",1.00,"Government Services"
01/30/2018,01/30/2018,"SPEEDWAY 09303 KEN KENT OH",46.57,"Gasoline"
01/30/2018,01/30/2018,"SQ *TWISTED MELTZ KENT OH0002305843011416898511",16.87,"Restaurants"
01/30/2018,01/30/2018,"TARGET STOW OH",49.37,"Merchandise"
01/31/2018,01/31/2018,"TARGET STOW OH",4.14,"Merchandise"
01/31/2018,01/31/2018,"TARGET STREETSBORO OH",14.28,"Merchandise"
01/31/2018,02/01/2018,"TARGET STOW OH",-21.34,"Payments and Credits"
01/31/2018,02/01/2018,"TARGET STREETSBORO OH",-9.60,"Payments and Credits"
02/01/2018,02/01/2018,"EL CAMPESINO STOW OH",42.24,"Restaurants"
02/02/2018,02/02/2018,"CASH ADVANCE FEE",5.00,"Fees"
02/02/2018,02/02/2018,"GOOGLE *ASCIIFLOW.COM G.CO/HELPPAY#CAP0HQTYN5",5.00,"Cash Advances"
02/03/2018,02/03/2018,"TARGET STREETSBORO OH",71.69,"Merchandise"
02/03/2018,02/07/2018,"SAMS CLUB - #4750 CUYAHOGA FALLOH",371.90,"Warehouse Clubs"
02/04/2018,02/04/2018,"ACME NO. 17 STOW OH",8.98,"Supermarkets"
02/04/2018,02/04/2018,"MICROSOFT *ONEDRIVE 800-642-7676 WA",4.26,"Services"
02/06/2018,02/06/2018,"MINIMUM INTEREST CHARGE FEE",0.50,"Fees"
02/06/2018,02/07/2018,"BP#954778736210 7-ELEVEN STOW OH",52.80,"Gasoline"
02/06/2018,02/07/2018,"CVS/PHARMACY #08932 TWINSBURG OH",13.87,"Merchandise"
02/07/2018,02/07/2018,"AT&T *PAYMENT 800-288-2020 TXX51Z5QX7SMT2U01",57.14,"Services"
02/07/2018,02/07/2018,"TOYS R US #9203 CUYAHOGA FALLOH",193.32,"Merchandise"
02/08/2018,02/08/2018,"GIANT-EAGLE #4096 STOW OH",66.13,"Supermarkets"
02/08/2018,02/08/2018,"TARGET STOW OH",121.32,"Merchandise"
02/09/2018,02/09/2018,"GUIDOS ORIGINAL PIZZA KENT OH",11.75,"Restaurants"
02/09/2018,02/09/2018,"MARATHON PETRO73601 TWINSBURG OH",44.30,"Gasoline"
02/10/2018,02/10/2018,"RSVP NO. 36 STOW OH",14.43,"Supermarkets"
02/10/2018,02/10/2018,"TARGET STOW OH",77.90,"Merchandise"
02/11/2018,02/11/2018,"SUBWAY 00044289255 STOW OH",21.00,"Restaurants"
02/13/2018,02/13/2018,"CHICK-FIL-A #02197 CUYAHOGA FLS OH",12.79,"Restaurants"
02/13/2018,02/13/2018,"IN *MR. BULKY'S FOODS AKRON OHAJ16V8Q6",3.39,"Supermarkets"
02/13/2018,02/13/2018,"TARGET CUYAHOGA FALLOH",5.33,"Supermarkets"
02/14/2018,02/14/2018,"DISCOUNT DRUG MART 32 STOW OH",4.29,"Merchandise"
02/14/2018,02/14/2018,"HANDELS ICE CREAM STOW STOW OH",7.95,"Supermarkets"
02/15/2018,02/15/2018,"BATH&BODY STOW OH",47.19,"Merchandise"
02/15/2018,02/15/2018,"TARGET STOW OH",76.35,"Merchandise"
02/17/2018,02/17/2018,"EL CAMPESINO STOW OH",6.50,"Restaurants"
02/17/2018,02/17/2018,"WALMART GROCERY 800-966-6546 AR",461.36,"Supermarkets"
02/18/2018,02/18/2018,"ACME NO. 17 STOW OH",32.68,"Supermarkets"
02/18/2018,02/18/2018,"CHIPOTLE ONLINE 303-595-4000 CO",20.75,"Restaurants"
02/19/2018,02/19/2018,"GIANT EAGLE #5863 STREETSBORO OH",25.00,"Supermarkets"
02/20/2018,02/20/2018,"REMEMBERNHU 402-935-7733 IA",60.00,"Services"
02/21/2018,02/21/2018,"BP#954635936241 7-ELEVEN STOW OH",30.04,"Gasoline"
02/22/2018,02/22/2018,"CHICK-FIL-A #02197 CUYAHOGA FLS OH",3.19,"Restaurants"
02/22/2018,02/22/2018,"CHICK-FIL-A #02197 CUYAHOGA FLS OH",18.22,"Restaurants"
02/22/2018,02/22/2018,"PET SUPPLIES PLUS #68 STOW OH",45.88,"Merchandise"
02/22/2018,02/22/2018,"TOYS R US #9203 CUYAHOGA FALLOH",21.31,"Merchandise"
02/23/2018,02/23/2018,"SUMMIT CO PARKING GAR AKRON OH",6.00,"Services"
02/24/2018,02/24/2018,"GET GO #3396 STOW OH",26.46,"Gasoline"
02/25/2018,02/25/2018,"DISCOUNT DRUG MART 32 STOW OH",19.70,"Merchandise"
02/25/2018,02/25/2018,"EL CAMPESINO STOW OH",6.50,"Restaurants"
02/25/2018,02/25/2018,"SQ *CORNER CUP COFFEEH STOW OH0001152921507942036274",2.30,"Supermarkets"
02/25/2018,02/25/2018,"TARGET STOW OH",18.49,"Merchandise"
02/28/2018,02/28/2018,"NETFLIX.COM NETFLIX.COM CA20475539512",14.93,"Services"
03/01/2018,03/01/2018,"GIANT-EAGLE #4032 STOW OH",2.99,"Supermarkets"
03/01/2018,03/01/2018,"TARGET STOW OH",72.46,"Merchandise"
03/02/2018,03/02/2018,"LATE FEE",27.00,"Fees"
03/02/2018,03/02/2018,"MICROSOFT *ONEDRIVE 800-642-7676 WA",4.26,"Services"
03/02/2018,03/02/2018,"PIZZA HUT 007946 STOW OH",9.89,"Restaurants"
03/03/2018,03/03/2018,"CASHBACK BONUS REDEMPTION PYMT/STMT CRDT",-23.28,"Awards and Rebate Credits"
03/03/2018,03/03/2018,"INTERNET PAYMENT - THANK YOU",-2451.43,"Payments and Credits"
03/03/2018,03/03/2018,"LOWE'S OF STOW, OH. STOW OH",6.93,"Home Improvement"
03/03/2018,03/07/2018,"WAL-MART SC - #2323 STOW OH",150.32,"Merchandise"
03/04/2018,03/04/2018,"OLD NAVY ON-LINE 800-OLDNAVY OH",13.66,"Merchandise"
03/06/2018,03/07/2018,"MFW BOOKS LLC 5732022000 MO",86.90,"Education"
03/06/2018,03/07/2018,"OLD NAVY ON-LINE 800-OLDNAVY OH",127.46,"Merchandise"
03/08/2018,03/08/2018,"ACME NO. 17 STOW OH",8.58,"Supermarkets"
03/08/2018,03/08/2018,"CHICK-FIL-A #02197 CUYAHOGA FLS OH",2.12,"Restaurants"
03/08/2018,03/08/2018,"EL CAMPESINO STOW OH",6.50,"Restaurants"
03/08/2018,03/08/2018,"SPEEDWAY 03686 496 STOW OH",45.24,"Gasoline"
03/08/2018,03/08/2018,"SWENSONS STOW KENT STOW OH",8.30,"Restaurants"
03/08/2018,03/08/2018,"TOYS R US #9203 CUYAHOGA FALLOH",5.33,"Merchandise"
03/09/2018,03/09/2018,"SPEEDWAY 03686 496 STOW OH",48.29,"Gasoline"
03/10/2018,03/10/2018,"WALMART GROCERY 800-966-6546 AR",522.22,"Supermarkets"
03/11/2018,03/11/2018,"AT&T *PAYMENT 800-288-2020 TXQ8F55RY7SMT2N04",57.14,"Services"
03/11/2018,03/11/2018,"SQ *CORNER CUP COFFEEH STOW OH0002305843011470140810",2.30,"Supermarkets"
03/12/2018,03/12/2018,"MICROSOFT *STORE 800-642-7676 WA",1.06,"Services"
03/15/2018,03/15/2018,"SQ *CORNER CUP COFFEEH STOW OH0002305843011475075512",2.30,"Supermarkets"
03/16/2018,03/16/2018,"ACME NO. 17 STOW OH",15.85,"Supermarkets"
03/16/2018,03/16/2018,"CHIPOTLE 1152 STOW OH",3.85,"Restaurants"
03/16/2018,03/16/2018,"EL CAMPESINO STOW OH",6.50,"Restaurants"
03/16/2018,03/16/2018,"PIZZA HUT 007946 STOW OH",13.98,"Restaurants"
03/17/2018,03/17/2018,"CHIPOTLE ONLINE 303-595-4000 CO",15.75,"Restaurants"
03/17/2018,03/17/2018,"DISCOUNT DRUG MART 32 STOW OH",9.89,"Merchandise"
03/17/2018,03/17/2018,"MFW BOOKS LLC 5732022000 MO",66.75,"Education"
03/18/2018,03/18/2018,"ACME NO. 17 STOW OH",27.78,"Supermarkets"
03/18/2018,03/18/2018,"GIANT-EAGLE #4032 STOW OH",28.34,"Supermarkets"
03/20/2018,03/20/2018,"REMEMBERNHU 402-935-7733 IA",60.00,"Services"
03/20/2018,03/20/2018,"SONLIGHT CURRICULUM LTD 303-730-8193 CO",762.87,"Education"
03/21/2018,03/21/2018,"BP#954635936241 7-ELEVEN STOW OH",8.87,"Gasoline"
03/21/2018,03/21/2018,"DISCOUNT DRUG MART 32 STOW OH",18.07,"Merchandise"
03/21/2018,03/21/2018,"SQ *CORNER CUP COFFEEH STOW OH0002305843011484061091",2.30,"Supermarkets"
03/21/2018,03/21/2018,"TARGET STOW OH",1.95,"Merchandise"
03/21/2018,03/21/2018,"TARGET STOW OH",224.85,"Merchandise"
03/22/2018,03/22/2018,"JUSTICE #0639 STOW OH",16.01,"Merchandise"
03/22/2018,03/22/2018,"SPEEDWAY 03686 496 STOW OH",32.54,"Gasoline"
03/22/2018,03/22/2018,"SQ *TWISTED MELTZ KENT OH0002305843011486528725",6.74,"Restaurants"
03/22/2018,03/22/2018,"TARGET STOW OH",6.60,"Merchandise"
03/25/2018,03/25/2018,"ACME NO. 17 STOW OH",95.42,"Supermarkets"
03/25/2018,03/25/2018,"ASIAN-GREEK CUISINES STOW OH",70.25,"Restaurants"
03/25/2018,03/25/2018,"MARATHON PETRO73601 TWINSBURG OH",11.09,"Gasoline"
03/25/2018,03/25/2018,"SPEEDWAY 09303 KEN KENT OH",53.28,"Gasoline"

View File

@ -1,149 +0,0 @@
<div ng-app="App" ng-controller="AppController">
<form ng-submit="Submit()">
<input type="text" ng-model="Model.name"/>
<input type="text" ng-model="Model.description"/>
<h2>Rows</h2>
<div ng-repeat="Row in Model.schema track by $index">
<input type="text" ng-model="Row.key"/>
<select ng-model="Row.type" ng-options="v for v in Types"></select>
</div>
<input type="submit"/>
</form>
<form>
<input type="file" data-import-csv class="form-control">
</form>
</div>
<script>
var App = angular.module("App", []);
App.factory("Rows", [function()
{
return function(inData)
{
console.log(inData);
};
}]);
App.directive("importCsv", ["Rows", function(Rows)
{
var directive = {};
directive.link = function(inScope, inElement, inAttributes){
function handlerEnter(inEvent){
if(inEvent){
inEvent.preventDefault();
}
inElement.addClass("Import");
inEvent.dataTransfer.effectAllowed = 'copy';
return false;
}
function handlerDrop(inEvent){
inElement.removeClass("Import");
if(inEvent){
inEvent.preventDefault();
}
parse(event.dataTransfer.files[0]);
return false;
}
function handlerChange(inEvent){
inEvent.stopImmediatePropagation();
parse(inEvent.target.files[0]);
}
function handlerLeave()
{
inElement.removeClass("Import");
}
function parse(inFile)
{
Papa.parse(inFile, {
complete: function(inCSV)
{
Rows(inCSV.data);
inScope.$apply();
}
});
}
inElement.on("dragenter dragstart dragend dragleave dragover drag drop", function (inEvent) {inEvent.preventDefault();});
inElement.on('dragenter', handlerEnter);
inElement.on('dragleave', handlerLeave);
inElement.on('drop', handlerDrop);
inElement.on('change', handlerChange);
inElement.on('click', function(inEvent){
inEvent.stopImmediatePropagation();
})
};
return directive;
}]);
App.controller("AppController", ["$scope", "$http", function($scope, $http)
{
console.log("init");
$scope.Types = ["date", "text", "numeric"];
$scope.Model = {
"name": "DCARD",
"description":"Discover Card",
"type": "csv",
"schema": [
{
"key": "Trans. Date",
"type": "date"
},
{
"key": "Post Date",
"type": "date"
},
{
"key": "Description",
"type": "text"
},
{
"key": "Amount",
"type": "numeric"
},
{
"key": "Category",
"type": "text"
}
],
"unique_constraint": {
"type": "key",
"fields": [
"{Post Date}",
"{Trans. Date}",
"{Description}"
]
}
};
$scope.Submit = function()
{
console.log($scope.Model);
var req = {
method: 'POST',
url: '/json',
data: $scope.Model
};
$http(req).then(
function(inSuccess){
console.log(inSuccess);
},
function(inFailure)
{
console.log(inFailure);
}
);
};
}]);
</script>

View File

@ -1,16 +0,0 @@
<!DOCTYPE html>
<html>
<head>
<meta charset="utf-8">
<meta name="viewport" content="width=device-width, initial-scale=1">
<script src="//ajax.googleapis.com/ajax/libs/angularjs/1.5.6/angular.min.js"></script>
<script src="//ajax.googleapis.com/ajax/libs/angularjs/1.5.6/angular-cookies.js"></script>
<script src="//ajax.googleapis.com/ajax/libs/angularjs/1.5.6/angular-animate.js"></script>
<script src="/static/papa.js"></script>
<link rel="stylesheet" href="https://maxcdn.bootstrapcdn.com/bootstrap/3.3.6/css/bootstrap.min.css">
<link rel="stylesheet" href="/static/styles.css">
</head>
<body>
{{{body}}}
</body>
</html>