merge wk
This commit is contained in:
commit
545ba4e2b5
1
.gitignore
vendored
Normal file
1
.gitignore
vendored
Normal file
@ -0,0 +1 @@
|
||||
.vscode/database.json
|
31
coa.md
31
coa.md
@ -1,31 +0,0 @@
|
||||
Balance Sheet
|
||||
-----------------------
|
||||
* Cash
|
||||
- On-hand
|
||||
- Hunt Checking
|
||||
- Hunt Savings
|
||||
* Prepaid
|
||||
- Auto Insurance
|
||||
- Home Insurance
|
||||
* Investments
|
||||
- Principle 401k
|
||||
- Fidelity 401k
|
||||
- STRS
|
||||
- PPG 401k
|
||||
- Norwex SP
|
||||
* Assets
|
||||
- Auto Cost
|
||||
- Auto Depr
|
||||
- Home
|
||||
* Credit
|
||||
- Chase
|
||||
- Discover
|
||||
- Kohls
|
||||
- Lowes
|
||||
- BestBuy
|
||||
- Target
|
||||
- TheHomeDepot
|
||||
* Mortgage
|
||||
- Principle
|
||||
|
||||
|
@ -6,9 +6,9 @@ FROM
|
||||
tps.trans
|
||||
LEFT JOIN LATERAL jsonb_populate_record(null::tps.pnco, rec) r ON TRUE
|
||||
WHERE
|
||||
rec @> '{"Loan#":"606780191"}'
|
||||
rec @> '{"Loan#":"606780281"}'
|
||||
ORDER BY
|
||||
r."Loan#"
|
||||
,r."Post Date" asc
|
||||
,rec->>'id' asc
|
||||
,r."Reference #" asc
|
||||
,r."Post Date" ASC
|
||||
,rec->>'id' ASC
|
||||
,r."Reference #" ASC
|
109
readme.md
109
readme.md
@ -1,4 +1,107 @@
|
||||
Concepts
|
||||
Overview
|
||||
----------------------------------------------
|
||||
|
||||
|
||||
```
|
||||
+--------------+
|
||||
|csv data |
|
||||
+-----+--------+
|
||||
|
|
||||
|
|
||||
v
|
||||
+----web ui----+ +----func+----+ +---table----+
|
||||
|import screen +------> |srce.sql +----------> |tps.srce | <-------------------+
|
||||
+--------------+ +-------------+ +------------+ |
|
||||
|p1:srce | |
|
||||
|p2:file path | |
|
||||
+-----web ui---+ +-------------+ +----table---+ |
|
||||
|create map | |tps.map_rm | +--+--db proc-----+
|
||||
|profile +---------------------------------> | | |update tps.trans |
|
||||
+------+-------+ +-----+------+ |column allj to |
|
||||
| ^ |contain map data |
|
||||
| | +--+--------------+
|
||||
v foreign key ^
|
||||
+----web ui+----+ | |
|
||||
|assign maps | + |
|
||||
|for return | +---table----+ |
|
||||
+values +--------------------------------> |tps.map_rv | |
|
||||
+---------------+ | +---------------------+
|
||||
+------------+
|
||||
|
||||
```
|
||||
|
||||
The goal is to:
|
||||
1. house external data and prevent duplication on insert
|
||||
2. apply mappings to the data to make it meaningful
|
||||
3. be able to reference it from outside sources (no action required)
|
||||
|
||||
There are 5 tables
|
||||
* tps.srce : definition of source
|
||||
* tps.trans : actual data
|
||||
* tps.trans_log : log of inserts
|
||||
* tps.map_rm : map profile
|
||||
* tps.map_rv : profile associated values
|
||||
|
||||
# tps.srce schema
|
||||
{
|
||||
"name": "WMPD",
|
||||
"descr": "Williams Paid File",
|
||||
"type":"csv",
|
||||
"schema": [
|
||||
{
|
||||
"key": "Carrier",
|
||||
"type": "text"
|
||||
},
|
||||
{
|
||||
"key": "Pd Amt",
|
||||
"type": "numeric"
|
||||
},
|
||||
{
|
||||
"key": "Pay Dt",
|
||||
"type": "date"
|
||||
}
|
||||
],
|
||||
"unique_constraint": {
|
||||
"fields":[
|
||||
"{Pay Dt}",
|
||||
"{Carrier}"
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
# tps.map_rm schema
|
||||
{
|
||||
"name":"Strip Amount Commas",
|
||||
"description":"the Amount field comes from PNC with commas embeded so it cannot be cast to numeric",
|
||||
"defn": [
|
||||
{
|
||||
"key": "{Amount}", /*this is a Postgres text array stored in json*/
|
||||
"field": "amount", /*key name assigned to result of regex/*
|
||||
"regex": ",", /*regular expression/*
|
||||
"flag":"g",
|
||||
"retain":"y",
|
||||
"map":"n"
|
||||
}
|
||||
],
|
||||
"function":"replace",
|
||||
"where": [
|
||||
{
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
Notes
|
||||
======================================
|
||||
|
||||
pull various static files into postgres and do basic transformation without losing the original document
|
||||
@ -17,8 +120,8 @@ applied mappings are in associated jsonb documents
|
||||
1. regular expressions are used to extract pieces of the json objects
|
||||
2. the results of the regular expressions are bumped up against a list of basic mappings and written to an associated jsonb document
|
||||
|
||||
a target represents a whole scenario that needs matched. it can contain several regex expressions. if one fails, then no match is attempted because it coudl result in a false positive based on the @> oeprator used at join time
|
||||
`this probably isn't correctly implemented`
|
||||
each regex expression within a targeted pattern can be set to map or not. then the mapping items should be joined to map_rv with an `=` as opposed to `@>` to avoid duplication of rows
|
||||
|
||||
|
||||
## Transformation tools
|
||||
* `COPY`
|
||||
|
@ -1,39 +1,128 @@
|
||||
insert into tps.srce
|
||||
SELECT
|
||||
'CAMZ',
|
||||
UPDATE tps.SRCE
|
||||
|
||||
SET DEFN =
|
||||
$$
|
||||
{
|
||||
"name": "CAMZ",
|
||||
"description":"Chase Amazon Credit Card",
|
||||
"type": "csv",
|
||||
"name": "WMPD",
|
||||
"descr": "Williams Paid File",
|
||||
"type":"csv",
|
||||
"schema": [
|
||||
{
|
||||
"key": "Type",
|
||||
"key": "Carrier",
|
||||
"type": "text"
|
||||
},
|
||||
{
|
||||
"key": "Trans Date",
|
||||
"type": "date"
|
||||
},
|
||||
{
|
||||
"key": "Post Date",
|
||||
"type": "date"
|
||||
},
|
||||
{
|
||||
"key": "Description",
|
||||
"key": "SCAC",
|
||||
"type": "text"
|
||||
},
|
||||
{
|
||||
"key": "Amount",
|
||||
"key": "Mode",
|
||||
"type": "text"
|
||||
},
|
||||
{
|
||||
"key": "Pro #",
|
||||
"type": "text"
|
||||
},
|
||||
{
|
||||
"key": "B/L",
|
||||
"type": "text"
|
||||
},
|
||||
{
|
||||
"key": "Pd Amt",
|
||||
"type": "numeric"
|
||||
},
|
||||
{
|
||||
"key": "Loc#",
|
||||
"type": "text"
|
||||
},
|
||||
{
|
||||
"key": "Pcs",
|
||||
"type": "numeric"
|
||||
},
|
||||
{
|
||||
"key": "Wgt",
|
||||
"type": "numeric"
|
||||
},
|
||||
{
|
||||
"key": "Chk#",
|
||||
"type": "numeric"
|
||||
},
|
||||
{
|
||||
"key": "Pay Dt",
|
||||
"type": "date"
|
||||
},
|
||||
{
|
||||
"key": "Acct #",
|
||||
"type": "text"
|
||||
},
|
||||
{
|
||||
"key": "I/O",
|
||||
"type": "text"
|
||||
},
|
||||
{
|
||||
"key": "Sh Nm",
|
||||
"type": "text"
|
||||
},
|
||||
{
|
||||
"key": "Sh City",
|
||||
"type": "text"
|
||||
},
|
||||
{
|
||||
"key": "Sh St",
|
||||
"type": "text"
|
||||
},
|
||||
{
|
||||
"key": "Sh Zip",
|
||||
"type": "text"
|
||||
},
|
||||
{
|
||||
"key": "Cons Nm",
|
||||
"type": "text"
|
||||
},
|
||||
{
|
||||
"key": "D City ",
|
||||
"type": "text"
|
||||
},
|
||||
{
|
||||
"key": "D St",
|
||||
"type": "text"
|
||||
},
|
||||
{
|
||||
"key": "D Zip",
|
||||
"type": "text"
|
||||
},
|
||||
{
|
||||
"key": "Sh Dt",
|
||||
"type": "date"
|
||||
},
|
||||
{
|
||||
"key": "Inv Dt",
|
||||
"type": "date"
|
||||
},
|
||||
{
|
||||
"key": "Customs Entry#",
|
||||
"type": "text"
|
||||
},
|
||||
{
|
||||
"key": "Miles",
|
||||
"type": "numeric"
|
||||
},
|
||||
{
|
||||
"key": "Frt Class",
|
||||
"type": "text"
|
||||
},
|
||||
{
|
||||
"key": "Master B/L",
|
||||
"type": "text"
|
||||
}
|
||||
],
|
||||
"unique_constraint": {
|
||||
"type": "key",
|
||||
"fields": [
|
||||
"{Trans Date}"
|
||||
,"{Post Date}"
|
||||
"fields":[
|
||||
"{Pay Dt}",
|
||||
"{Carrier}"
|
||||
]
|
||||
}
|
||||
}
|
||||
$$::JSONB
|
||||
$$::JSONB
|
||||
WHERE
|
||||
SRCE = 'WMPD'
|
@ -1,3 +1,4 @@
|
||||
\timing
|
||||
SELECT
|
||||
t.srce
|
||||
,(ae.e::text[])[1] unq_constr
|
||||
|
77
ubm_data.sql
77
ubm_data.sql
File diff suppressed because one or more lines are too long
@ -440,6 +440,13 @@ CREATE INDEX trans_allj ON trans USING gin (allj);
|
||||
CREATE INDEX trans_rec ON trans USING gin (rec);
|
||||
|
||||
|
||||
--
|
||||
-- Name: trans_srce; Type: INDEX; Schema: tps; Owner: -
|
||||
--
|
||||
|
||||
CREATE INDEX trans_srce ON trans USING btree (srce);
|
||||
|
||||
|
||||
--
|
||||
-- Name: map_rm map_rm_fk_srce; Type: FK CONSTRAINT; Schema: tps; Owner: -
|
||||
--
|
||||
|
Loading…
Reference in New Issue
Block a user