- Restore export.tsv and import-csv endpoints to mappings routes - sample column is always last in export and discarded on import - get_unmapped_values now returns distinct source field values as sample instead of full raw records Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
315 lines
11 KiB
JavaScript
315 lines
11 KiB
JavaScript
/**
|
|
* Mappings Routes
|
|
* Manage value mappings
|
|
*/
|
|
|
|
const express = require('express');
|
|
const multer = require('multer');
|
|
const { parse } = require('csv-parse/sync');
|
|
|
|
const upload = multer({ storage: multer.memoryStorage() });
|
|
|
|
const SYSTEM_COLS = new Set(['source_name', 'rule_name', 'input_value', 'record_count', 'sample']);
|
|
|
|
module.exports = (pool) => {
|
|
const router = express.Router();
|
|
|
|
// List all mappings for a source
|
|
router.get('/source/:source_name', async (req, res, next) => {
|
|
try {
|
|
const { rule_name } = req.query;
|
|
|
|
let query = 'SELECT * FROM mappings WHERE source_name = $1';
|
|
const params = [req.params.source_name];
|
|
|
|
if (rule_name) {
|
|
query += ' AND rule_name = $2';
|
|
params.push(rule_name);
|
|
}
|
|
|
|
query += ' ORDER BY rule_name, input_value';
|
|
|
|
const result = await pool.query(query, params);
|
|
res.json(result.rows);
|
|
} catch (err) {
|
|
next(err);
|
|
}
|
|
});
|
|
|
|
// Get unmapped values
|
|
router.get('/source/:source_name/unmapped', async (req, res, next) => {
|
|
try {
|
|
const { rule_name } = req.query;
|
|
|
|
const result = await pool.query(
|
|
'SELECT * FROM get_unmapped_values($1, $2)',
|
|
[req.params.source_name, rule_name || null]
|
|
);
|
|
|
|
res.json(result.rows);
|
|
} catch (err) {
|
|
next(err);
|
|
}
|
|
});
|
|
|
|
// Export unmapped values + existing mappings as TSV
|
|
// Columns: source_name, rule_name, input_value, record_count, <output keys...>, sample
|
|
// sample is always last and is discarded on import
|
|
router.get('/source/:source_name/export.tsv', async (req, res, next) => {
|
|
try {
|
|
const { rule_name } = req.query;
|
|
const source_name = req.params.source_name;
|
|
|
|
const [unmappedResult, mappedResult] = await Promise.all([
|
|
pool.query('SELECT * FROM get_unmapped_values($1, $2)', [source_name, rule_name || null]),
|
|
pool.query(
|
|
'SELECT * FROM mappings WHERE source_name = $1' + (rule_name ? ' AND rule_name = $2' : '') + ' ORDER BY rule_name, input_value',
|
|
rule_name ? [source_name, rule_name] : [source_name]
|
|
)
|
|
]);
|
|
|
|
// Collect output keys from existing mappings
|
|
const outputKeys = [];
|
|
for (const row of mappedResult.rows) {
|
|
for (const key of Object.keys(row.output || {})) {
|
|
if (!outputKeys.includes(key)) outputKeys.push(key);
|
|
}
|
|
}
|
|
|
|
const escape = (val) => String(val ?? '').replace(/\t/g, ' ');
|
|
|
|
// sample is always last
|
|
const allCols = ['source_name', 'rule_name', 'input_value', 'record_count', ...outputKeys, 'sample'];
|
|
|
|
const dataRows = [];
|
|
|
|
for (const row of unmappedResult.rows) {
|
|
const r = {
|
|
source_name,
|
|
rule_name: row.rule_name,
|
|
input_value: Array.isArray(row.extracted_value) ? JSON.stringify(row.extracted_value) : String(row.extracted_value ?? ''),
|
|
record_count: row.record_count,
|
|
sample: Array.isArray(row.sample) ? row.sample.join(' | ') : String(row.sample ?? '')
|
|
};
|
|
for (const key of outputKeys) r[key] = '';
|
|
dataRows.push(r);
|
|
}
|
|
|
|
for (const row of mappedResult.rows) {
|
|
const r = {
|
|
source_name: row.source_name,
|
|
rule_name: row.rule_name,
|
|
input_value: Array.isArray(row.input_value) ? JSON.stringify(row.input_value) : String(row.input_value ?? ''),
|
|
record_count: '',
|
|
sample: ''
|
|
};
|
|
for (const key of outputKeys) r[key] = row.output?.[key] ?? '';
|
|
dataRows.push(r);
|
|
}
|
|
|
|
const tsv = [
|
|
allCols.map(escape).join('\t'),
|
|
...dataRows.map(r => allCols.map(c => escape(r[c])).join('\t'))
|
|
].join('\n');
|
|
|
|
res.setHeader('Content-Type', 'text/tab-separated-values');
|
|
res.setHeader('Content-Disposition', `attachment; filename="mappings_${source_name}.tsv"`);
|
|
res.send(tsv);
|
|
} catch (err) {
|
|
next(err);
|
|
}
|
|
});
|
|
|
|
// Import mappings from uploaded TSV
|
|
// Any column that isn't a system field (source_name, rule_name, input_value, record_count, sample)
|
|
// is treated as an output key. sample is discarded wherever it appears.
|
|
router.post('/source/:source_name/import-csv', upload.single('file'), async (req, res, next) => {
|
|
const client = await pool.connect();
|
|
try {
|
|
if (!req.file) {
|
|
return res.status(400).json({ error: 'No file uploaded. Send TSV as multipart field named "file".' });
|
|
}
|
|
|
|
const records = parse(req.file.buffer, { columns: true, skip_empty_lines: true, trim: true, delimiter: '\t' });
|
|
|
|
if (records.length === 0) {
|
|
return res.status(400).json({ error: 'File is empty.' });
|
|
}
|
|
|
|
const outputKeys = Object.keys(records[0]).filter(k => !SYSTEM_COLS.has(k));
|
|
|
|
const mappings = [];
|
|
for (const row of records) {
|
|
const { source_name, rule_name, input_value } = row;
|
|
|
|
const output = {};
|
|
for (const key of outputKeys) {
|
|
if (row[key] && row[key].trim() !== '') output[key] = row[key].trim();
|
|
}
|
|
if (Object.keys(output).length === 0) continue;
|
|
|
|
let parsedInput;
|
|
try { parsedInput = JSON.parse(input_value); } catch { parsedInput = input_value; }
|
|
|
|
mappings.push({ source_name, rule_name, input_value: parsedInput, output });
|
|
}
|
|
|
|
if (mappings.length === 0) {
|
|
return res.status(400).json({ error: 'No rows with output values filled in.' });
|
|
}
|
|
|
|
await client.query('BEGIN');
|
|
const results = [];
|
|
for (const { source_name, rule_name, input_value, output } of mappings) {
|
|
const result = await client.query(
|
|
`INSERT INTO mappings (source_name, rule_name, input_value, output)
|
|
VALUES ($1, $2, $3, $4)
|
|
ON CONFLICT (source_name, rule_name, input_value)
|
|
DO UPDATE SET output = EXCLUDED.output
|
|
RETURNING *`,
|
|
[source_name, rule_name, JSON.stringify(input_value), JSON.stringify(output)]
|
|
);
|
|
results.push(result.rows[0]);
|
|
}
|
|
await client.query('COMMIT');
|
|
|
|
res.status(201).json({ count: results.length, mappings: results });
|
|
} catch (err) {
|
|
await client.query('ROLLBACK');
|
|
next(err);
|
|
} finally {
|
|
client.release();
|
|
}
|
|
});
|
|
|
|
// Get single mapping
|
|
router.get('/:id', async (req, res, next) => {
|
|
try {
|
|
const result = await pool.query(
|
|
'SELECT * FROM mappings WHERE id = $1',
|
|
[req.params.id]
|
|
);
|
|
|
|
if (result.rows.length === 0) {
|
|
return res.status(404).json({ error: 'Mapping not found' });
|
|
}
|
|
|
|
res.json(result.rows[0]);
|
|
} catch (err) {
|
|
next(err);
|
|
}
|
|
});
|
|
|
|
// Create mapping
|
|
router.post('/', async (req, res, next) => {
|
|
try {
|
|
const { source_name, rule_name, input_value, output } = req.body;
|
|
|
|
if (!source_name || !rule_name || !input_value || !output) {
|
|
return res.status(400).json({
|
|
error: 'Missing required fields: source_name, rule_name, input_value, output'
|
|
});
|
|
}
|
|
|
|
const result = await pool.query(
|
|
`INSERT INTO mappings (source_name, rule_name, input_value, output)
|
|
VALUES ($1, $2, $3, $4)
|
|
RETURNING *`,
|
|
[source_name, rule_name, JSON.stringify(input_value), JSON.stringify(output)]
|
|
);
|
|
|
|
res.status(201).json(result.rows[0]);
|
|
} catch (err) {
|
|
if (err.code === '23505') { // Unique violation
|
|
return res.status(409).json({ error: 'Mapping already exists' });
|
|
}
|
|
if (err.code === '23503') { // Foreign key violation
|
|
return res.status(404).json({ error: 'Source or rule not found' });
|
|
}
|
|
next(err);
|
|
}
|
|
});
|
|
|
|
// Bulk create mappings
|
|
router.post('/bulk', async (req, res, next) => {
|
|
const client = await pool.connect();
|
|
try {
|
|
const { mappings } = req.body;
|
|
|
|
if (!Array.isArray(mappings)) {
|
|
return res.status(400).json({ error: 'Expected array of mappings' });
|
|
}
|
|
|
|
await client.query('BEGIN');
|
|
|
|
const results = [];
|
|
for (const mapping of mappings) {
|
|
const { source_name, rule_name, input_value, output } = mapping;
|
|
|
|
const result = await client.query(
|
|
`INSERT INTO mappings (source_name, rule_name, input_value, output)
|
|
VALUES ($1, $2, $3, $4)
|
|
ON CONFLICT (source_name, rule_name, input_value)
|
|
DO UPDATE SET output = EXCLUDED.output
|
|
RETURNING *`,
|
|
[source_name, rule_name, JSON.stringify(input_value), JSON.stringify(output)]
|
|
);
|
|
|
|
results.push(result.rows[0]);
|
|
}
|
|
|
|
await client.query('COMMIT');
|
|
res.status(201).json({ count: results.length, mappings: results });
|
|
} catch (err) {
|
|
await client.query('ROLLBACK');
|
|
next(err);
|
|
} finally {
|
|
client.release();
|
|
}
|
|
});
|
|
|
|
// Update mapping
|
|
router.put('/:id', async (req, res, next) => {
|
|
try {
|
|
const { input_value, output } = req.body;
|
|
|
|
const result = await pool.query(
|
|
`UPDATE mappings
|
|
SET input_value = COALESCE($2, input_value),
|
|
output = COALESCE($3, output)
|
|
WHERE id = $1
|
|
RETURNING *`,
|
|
[req.params.id, input_value, output ? JSON.stringify(output) : null]
|
|
);
|
|
|
|
if (result.rows.length === 0) {
|
|
return res.status(404).json({ error: 'Mapping not found' });
|
|
}
|
|
|
|
res.json(result.rows[0]);
|
|
} catch (err) {
|
|
next(err);
|
|
}
|
|
});
|
|
|
|
// Delete mapping
|
|
router.delete('/:id', async (req, res, next) => {
|
|
try {
|
|
const result = await pool.query(
|
|
'DELETE FROM mappings WHERE id = $1 RETURNING id',
|
|
[req.params.id]
|
|
);
|
|
|
|
if (result.rows.length === 0) {
|
|
return res.status(404).json({ error: 'Mapping not found' });
|
|
}
|
|
|
|
res.json({ success: true, deleted: result.rows[0].id });
|
|
} catch (err) {
|
|
next(err);
|
|
}
|
|
});
|
|
|
|
return router;
|
|
};
|