network #1
135
nn.test.js
135
nn.test.js
@ -1,113 +1,52 @@
|
|||||||
import { assert, assertEquals } from "https://deno.land/std@0.102.0/testing/asserts.ts";
|
import { assert, assertEquals } from "https://deno.land/std@0.102.0/testing/asserts.ts";
|
||||||
import { Split, Forward, Backward } from "./nn.ts";
|
import { Split, Build, Label, Learn, Check } from "./nn.ts";
|
||||||
import { default as M } from "./m.ts";
|
|
||||||
import { default as Methods } from "./m.ts";
|
|
||||||
|
|
||||||
let training = [];
|
let data = [
|
||||||
let stages = [];
|
[ 0.10, 0.05, 0, 1],
|
||||||
|
[ 0.00, -0.06, 0, 1],
|
||||||
|
[ 0.99, 0.85, 1, 0],
|
||||||
|
[ 1.20, 1.05, 1, 0]
|
||||||
|
];
|
||||||
|
let columns = [2, 3];
|
||||||
|
let input, output;
|
||||||
let layers = [];
|
let layers = [];
|
||||||
|
|
||||||
let typeA = [
|
|
||||||
[ 0.1, 0.05],
|
|
||||||
[ 0.0, -0.06]
|
|
||||||
];
|
|
||||||
let typeB = [
|
|
||||||
[ 0.99, 0.85],
|
|
||||||
[ 1.2, 1.05]
|
|
||||||
];
|
|
||||||
|
|
||||||
|
|
||||||
Deno.test("check forward/backward", ()=>
|
|
||||||
{
|
|
||||||
let matrix1 = [
|
|
||||||
[-0.43662948305036675, -0.368590640707799, -0.23227179558890843],
|
|
||||||
[-0.004292653969505622, 0.38670055222186317, -0.2478421495365568],
|
|
||||||
[0.738181366836224, 0.3389203747353555, 0.4920200816404332]
|
|
||||||
];
|
|
||||||
|
|
||||||
let matrix2 = [
|
|
||||||
[0.7098703863463034, 0.35485944251238033, 0.7642849892333241, 0.03046174288491077],
|
|
||||||
[-0.30655426258144347, 0.45509633551425077, -0.5013795222004322, -0.3421292736637427]
|
|
||||||
];
|
|
||||||
|
|
||||||
let input = [
|
|
||||||
[ 0.1, 0.05],
|
|
||||||
[ 0.0, -0.06],
|
|
||||||
[ 0.99, 0.85],
|
|
||||||
[ 1.2, 1.05]
|
|
||||||
];
|
|
||||||
let output = [
|
|
||||||
[1, 0],
|
|
||||||
[1, 0],
|
|
||||||
[0, 1],
|
|
||||||
[0, 1]
|
|
||||||
];
|
|
||||||
|
|
||||||
let layers = [matrix1, matrix2];
|
|
||||||
let stages = [];
|
|
||||||
for(let i=0; i<1000; i++)
|
|
||||||
{
|
|
||||||
stages = Forward(Methods.Mutate.Pad(input), layers);
|
|
||||||
Backward(stages, layers, output, 0.1);
|
|
||||||
}
|
|
||||||
|
|
||||||
stages = Forward(input, layers);
|
|
||||||
console.log(stages[stages.length-1]);
|
|
||||||
});
|
|
||||||
|
|
||||||
Deno.test("NN.Split", ()=>
|
Deno.test("NN.Split", ()=>
|
||||||
{
|
{
|
||||||
let data = [
|
[input, output] = Split(data, columns);
|
||||||
[3, 2, 1, 0, 1],
|
assert(input);
|
||||||
[6, 5, 4, 1, 0]
|
assert(output);
|
||||||
]
|
assertEquals(input.length, output.length, "data split into equal input and output");
|
||||||
let split = Split(data, [3, 4]);
|
|
||||||
console.log(split);
|
assertEquals(input[0].length, 3, "padded input");
|
||||||
|
assertEquals(output[0].length, 2, "unpadded output");
|
||||||
});
|
});
|
||||||
|
|
||||||
|
Deno.test("NN.Build", ()=>
|
||||||
/*
|
|
||||||
Deno.test("NN.Label", ()=>
|
|
||||||
{
|
{
|
||||||
Label(training, typeA, [1, 0]);
|
layers = Build(2, 5, 2);
|
||||||
Label(training, typeB, [0, 1]);
|
|
||||||
assertEquals(training.length, 2, "input and output sets created");
|
assertEquals(layers.length, 2, "correct number of matrices");
|
||||||
assertEquals(training[0].length, training[1].length, "both sets have same length");
|
assertEquals(layers[0][0].length, input[0].length, "input: padded input");
|
||||||
assertEquals(training[0][0].length, 3, "padded input component");
|
assertEquals(layers[0].length, 5, "input: unpadded output");
|
||||||
assertEquals(training[1][0].length, 2, "unchanged label vector");
|
|
||||||
|
assertEquals(layers[1][0].length, 6, "hidden: padded input");
|
||||||
|
assertEquals(layers[1].length, output[0].length, "hidden: unpadded output");
|
||||||
});
|
});
|
||||||
|
|
||||||
Deno.test("NN.Forward", ()=>
|
|
||||||
{
|
|
||||||
let layer1 = M.Create.Box([-1, -1, -1], [1, 1, 1], 2);
|
|
||||||
let layer2 = M.Create.Box([-1, -1, -1], [1, 1, 1], 1);
|
|
||||||
layers.push(layer1);
|
|
||||||
layers.push(layer2);
|
|
||||||
|
|
||||||
console.log(training[0]);
|
|
||||||
stages = Forward(training[0], layers);
|
|
||||||
console.log(stages);
|
|
||||||
});
|
|
||||||
|
|
||||||
Deno.test("NN.Backward", ()=>
|
|
||||||
{
|
|
||||||
let copy1 = M.Create.Clone(layers[0]);
|
|
||||||
let copy2 = M.Create.Clone(layers[1]);
|
|
||||||
|
|
||||||
for(let i=0; i<100; i++)
|
|
||||||
{
|
|
||||||
Backward(stages, layers, training[1], 0.1);
|
|
||||||
}
|
|
||||||
|
|
||||||
assert(layers[0][0][0] != copy1[0][0], "first matrix has changed");
|
|
||||||
assert(layers[1][0][0] != copy2[0][0], "second matrix has changed");
|
|
||||||
});
|
|
||||||
|
|
||||||
|
|
||||||
Deno.test("NN.Label", ()=>
|
Deno.test("NN.Label", ()=>
|
||||||
{
|
{
|
||||||
let stages = Forward(training[0], layers);
|
let labels = Label(input, layers);
|
||||||
console.log(stages[stages.length-1]);
|
assertEquals(labels.length, output.length);
|
||||||
|
assertEquals(labels[0].length, output[0].length);
|
||||||
});
|
});
|
||||||
|
|
||||||
*/
|
Deno.test("NN.Learn", ()=>
|
||||||
|
{
|
||||||
|
let error = Learn(input, layers, output, 1000, 0.1);
|
||||||
|
assertEquals(error.length, output.length);
|
||||||
|
let total = 0;
|
||||||
|
let count = error.length*error[0].length;
|
||||||
|
error.forEach(row=> row.forEach(component=> total+=Math.abs(component)));
|
||||||
|
assert(total/count < 0.3);
|
||||||
|
});
|
14
nn.ts
14
nn.ts
@ -38,10 +38,12 @@ const Split = (inTrainingSet:Cloud.M, inHeaderLabel:Cloud.V, inHeaderKeep:Cloud.
|
|||||||
{
|
{
|
||||||
inTrainingSet[0].forEach( (item:number, index:number)=> inHeaderLabel.includes(index) ? false : inHeaderKeep.push(index) );
|
inTrainingSet[0].forEach( (item:number, index:number)=> inHeaderLabel.includes(index) ? false : inHeaderKeep.push(index) );
|
||||||
}
|
}
|
||||||
inTrainingSet.forEach((row:Cloud.V) =>
|
inTrainingSet.forEach((row:Cloud.V):void =>
|
||||||
{
|
{
|
||||||
data.push( [...inHeaderKeep.map((i:number)=>row[i]), 1] );
|
let vectorData = [ ...inHeaderKeep.map((i:number)=>row[i]), 1];
|
||||||
label.push( inHeaderLabel.map((i:number)=>row[i]) );
|
let vectorLabel = inHeaderLabel.map((i:number)=>row[i])
|
||||||
|
data.push( vectorData );
|
||||||
|
label.push( vectorLabel );
|
||||||
});
|
});
|
||||||
return [ data, label ];
|
return [ data, label ];
|
||||||
};
|
};
|
||||||
@ -54,7 +56,6 @@ const Build = (...inLayers:Array<number>):N =>
|
|||||||
{
|
{
|
||||||
output.push(rand( inLayers[i]+1, inLayers[i+1]));
|
output.push(rand( inLayers[i]+1, inLayers[i+1]));
|
||||||
}
|
}
|
||||||
output.push( rand( inLayers[i-1], inLayers[i]) );
|
|
||||||
return output;
|
return output;
|
||||||
};
|
};
|
||||||
const Label = (inData:Cloud.M, inLayers:N):Cloud.M =>
|
const Label = (inData:Cloud.M, inLayers:N):Cloud.M =>
|
||||||
@ -72,7 +73,6 @@ const Learn = (inData:Cloud.M, inLayers:N, inLabels:Cloud.M, inIterations:number
|
|||||||
}
|
}
|
||||||
return M.Batch.Subtract(stages[stages.length-1], inLabels);
|
return M.Batch.Subtract(stages[stages.length-1], inLabels);
|
||||||
};
|
};
|
||||||
const Error = M.Batch.Subtract;
|
const Check = (inData:Cloud.M, inLayers:N, inLabels:Cloud.M):Cloud.M => Learn(inData, inLayers, inLabels, 1, 0);
|
||||||
|
|
||||||
export { Split, Build, Label, Learn, Error, Forward, Backward };
|
export { Split, Build, Label, Learn, Check, Forward, Backward };
|
||||||
export type { Cloud };
|
|
Loading…
Reference in New Issue
Block a user