network #1

Merged
SethTrowbridge merged 13 commits from network into master 2021-07-29 16:16:32 -04:00
3 changed files with 23 additions and 10 deletions
Showing only changes of commit 574c670482 - Show all commits

View File

@ -136,6 +136,15 @@ Deno.test("Batch.Scale", ()=>
assertEquals(t[0].length, 2, "correct dimensions"); assertEquals(t[0].length, 2, "correct dimensions");
assertEquals(t[1][0], 1.5, "correct placement"); assertEquals(t[1][0], 1.5, "correct placement");
}); });
Deno.test("Batch.Subtract", ()=>
{
const c = [[1, 2], [3, 4]];
const s = [[0.5, 0.5], [0.5, 0.5]];
const t = M.Batch.Subtract(c, s);
assertEquals(t.length, 2, "correct count");
assertEquals(t[0].length, 2, "correct dimensions");
assertEquals(t[1][0], 2.5, "correct placement");
});
Deno.test("Batch.Sigmoid", ()=> Deno.test("Batch.Sigmoid", ()=>
{ {
const m = [[-1000, 1000]]; const m = [[-1000, 1000]];

View File

@ -31,8 +31,8 @@ Deno.test("NN.Label", ()=>
Deno.test("NN.Backward", ()=> Deno.test("NN.Backward", ()=>
{ {
let layer1 = M.Create.Box([0, 0, 0], [1, 1, 1], 4); let layer1 = M.Create.Box([0, 0, 0], [1, 1, 1], 2);
let layer2 = M.Create.Box([0, 0, 0, 0, 0], [1, 1, 1, 1, 1], 1); let layer2 = M.Create.Box([0, 0, 0], [1, 1, 1], 1);
let copy1 = M.Create.Clone(layer1); let copy1 = M.Create.Clone(layer1);
let copy2 = M.Create.Clone(layer2); let copy2 = M.Create.Clone(layer2);
@ -41,16 +41,18 @@ Deno.test("NN.Backward", ()=>
for(let i=0; i<100; i++) for(let i=0; i<100; i++)
{ {
Backward(stages, layers, training[1], 0.01); Backward(stages, layers, training[1], 0.1);
} }
console.log(layer1, copy1);
assert(layer1[0][0] != copy1[0][0], "first matrix has changed"); assert(layers[0][0][0] != copy1[0][0], "first matrix has changed");
assert(layer1[1][0] != copy2[1][0], "second matrix has changed"); assert(layers[1][0][0] != copy2[0][0], "second matrix has changed");
}); });
Deno.test("NN.Forward", ()=> Deno.test("NN.Forward", ()=>
{ {
console.log(Forward(stages, layers)); console.log(Forward(stages, layers));
console.log(training[1]); console.log(training[1]);
}); });

10
nn.ts
View File

@ -28,24 +28,26 @@ const Forward = (inStages:N, inLayers:N):Cloud.M =>
inStages[i+1] = process(i); inStages[i+1] = process(i);
return inStages[i+1]; return inStages[i+1];
}; };
const Backward = (inStages:N, inLayers:N, inGoals:Cloud.M, inRate:number):void => const Backward = (inStages:N, inLayers:N, inGoals:Cloud.M, inRate:number):N =>
{ {
let i:number; let i:number;
let errorBack:Cloud.M = M.Batch.Subtract(Forward(inStages, inLayers), inGoals); let errorBack:Cloud.M = M.Batch.Subtract(Forward(inStages, inLayers), inGoals);
for(i=inLayers.length-1; i>=0; i--) for(i=inLayers.length-1; i>=0; i--)
{ {
let layerMatrix:Cloud.M = inLayers[i];
let layerInput:Cloud.M = inStages[i]; let layerInput:Cloud.M = inStages[i];
let layerOutput:Cloud.M = inStages[i+1]; let layerOutput:Cloud.M = inStages[i+1];
let errorScaled:Cloud.M = M.Batch.Multiply(errorBack, M.Batch.Derivative(layerOutput)); let errorScaled:Cloud.M = M.Batch.Multiply(errorBack, M.Batch.Derivative(layerOutput));
errorBack = M.Batch.Affine(errorScaled, M.Create.Transpose(layerMatrix)); errorBack = M.Batch.Affine(errorScaled, M.Create.Transpose(inLayers[i]));
errorScaled.forEach((inScaledError:Cloud.V, inIndex:number)=> { errorScaled.forEach((inScaledError:Cloud.V, inIndex:number)=> {
const deltas = M.Batch.Scale(M.Create.Outer(layerInput[inIndex], inScaledError), inRate); const deltas = M.Batch.Scale(M.Create.Outer(layerInput[inIndex], inScaledError), inRate);
layerMatrix = M.Batch.Subtract(layerMatrix, deltas); inLayers[i] = M.Batch.Subtract(inLayers[i], deltas);
}); });
} }
return inLayers;
}; };
export { Label, Forward, Backward }; export { Label, Forward, Backward };