Compare commits

..

No commits in common. "574c6704823f31b200e73dc51ac9ec6e18744674" and "329c5f5f819c5d8a4ebab827b61de42a3aa6bc26" have entirely different histories.

3 changed files with 22 additions and 29 deletions

View File

@ -136,15 +136,6 @@ Deno.test("Batch.Scale", ()=>
assertEquals(t[0].length, 2, "correct dimensions"); assertEquals(t[0].length, 2, "correct dimensions");
assertEquals(t[1][0], 1.5, "correct placement"); assertEquals(t[1][0], 1.5, "correct placement");
}); });
Deno.test("Batch.Subtract", ()=>
{
const c = [[1, 2], [3, 4]];
const s = [[0.5, 0.5], [0.5, 0.5]];
const t = M.Batch.Subtract(c, s);
assertEquals(t.length, 2, "correct count");
assertEquals(t[0].length, 2, "correct dimensions");
assertEquals(t[1][0], 2.5, "correct placement");
});
Deno.test("Batch.Sigmoid", ()=> Deno.test("Batch.Sigmoid", ()=>
{ {
const m = [[-1000, 1000]]; const m = [[-1000, 1000]];

View File

@ -2,9 +2,16 @@ import { assert, assertEquals } from "https://deno.land/std@0.102.0/testing/asse
import { Label, Forward, Backward } from "./nn.ts"; import { Label, Forward, Backward } from "./nn.ts";
import { default as M } from "./m.ts"; import { default as M } from "./m.ts";
let training = []; const input = [
let stages = []; [ 0.1, 0.05],
let layers = []; [ 0.0, -0.06]
[ 0.99, 0.85],
[ 1.2, 1.05]
];
const training = [];
const stages = [];
const layers = [];
Deno.test("NN.Label", ()=> Deno.test("NN.Label", ()=>
{ {
@ -31,28 +38,24 @@ Deno.test("NN.Label", ()=>
Deno.test("NN.Backward", ()=> Deno.test("NN.Backward", ()=>
{ {
let layer1 = M.Create.Box([0, 0, 0], [1, 1, 1], 2); layers.push(M.Create.Box([0, 0, 0], [1, 1, 1], 4));
let layer2 = M.Create.Box([0, 0, 0], [1, 1, 1], 1); layers.push(M.Create.Box([0, 0, 0, 0, 0], [1, 1, 1, 1, 1], 1));
let copy1 = M.Create.Clone(layer1);
let copy2 = M.Create.Clone(layer2);
layers.push(layer1); let copy1 = M.Create.Clone(layers[0]);
layers.push(layer2); let copy2 = M.Create.Clone(layers[1]);
for(let i=0; i<100; i++) for(let i=0; i<1000; i++)
{ {
Backward(stages, layers, training[1], 0.1); Backward(stages, layers, training[1], 0.1);
} }
assert(layers[0][0][0] != copy1[0][0], "first matrix has changed"); assert(layers[0][0][0] != copy1[0][0][0], "first matrix has changed");
assert(layers[1][0][0] != copy2[0][0], "second matrix has changed"); assert(layers[1][0][0] != copy2[0][0][0], "second matrix has changed");
}); });
Deno.test("NN.Forward", ()=> Deno.test("NN.Forward", ()=>
{ {
console.log(Forward(stages, layers)); console.log(Forward(stages, layers));
console.log(training[1]); console.log(training[1]);
}); });

11
nn.ts
View File

@ -28,26 +28,25 @@ const Forward = (inStages:N, inLayers:N):Cloud.M =>
inStages[i+1] = process(i); inStages[i+1] = process(i);
return inStages[i+1]; return inStages[i+1];
}; };
const Backward = (inStages:N, inLayers:N, inGoals:Cloud.M, inRate:number):N => const Backward = (inStages:N, inLayers:N, inGoals:Cloud.M, inRate:number):void =>
{ {
let i:number; let i:number;
let errorBack:Cloud.M = M.Batch.Subtract(Forward(inStages, inLayers), inGoals); let errorBack:Cloud.M = M.Batch.Subtract(Forward(inStages, inLayers), inGoals);
for(i=inLayers.length-1; i>=0; i--) for(i=inLayers.length-1; i>=0; i--)
{ {
let layerMatrix:Cloud.M = inLayers[i];
let layerInput:Cloud.M = inStages[i]; let layerInput:Cloud.M = inStages[i];
let layerOutput:Cloud.M = inStages[i+1]; let layerOutput:Cloud.M = inStages[i+1];
let errorScaled:Cloud.M = M.Batch.Multiply(errorBack, M.Batch.Derivative(layerOutput)); let errorScaled:Cloud.M = M.Batch.Multiply(errorBack, M.Batch.Derivative(layerOutput));
errorBack = M.Batch.Affine(errorScaled, M.Create.Transpose(inLayers[i])); errorBack = M.Batch.Affine(errorScaled, M.Create.Transpose(layerMatrix));
errorScaled.forEach((inScaledError:Cloud.V, inIndex:number)=> { errorScaled.forEach((inScaledError:Cloud.V, inIndex:number)=> {
const deltas = M.Batch.Scale(M.Create.Outer(layerInput[inIndex], inScaledError), inRate); const deltas = M.Batch.Scale(M.Create.Outer(layerInput[inIndex], inScaledError), inRate);
inLayers[i] = M.Batch.Subtract(inLayers[i], deltas); layerMatrix = M.Batch.Subtract(layerMatrix, deltas);
}); });
} }
return inLayers;
}; };
export { Label, Forward, Backward }; export { Label, Forward, Backward };