Compare commits
2 Commits
329c5f5f81
...
574c670482
Author | SHA1 | Date | |
---|---|---|---|
574c670482 | |||
6c186bc6e4 |
@ -136,6 +136,15 @@ Deno.test("Batch.Scale", ()=>
|
||||
assertEquals(t[0].length, 2, "correct dimensions");
|
||||
assertEquals(t[1][0], 1.5, "correct placement");
|
||||
});
|
||||
Deno.test("Batch.Subtract", ()=>
|
||||
{
|
||||
const c = [[1, 2], [3, 4]];
|
||||
const s = [[0.5, 0.5], [0.5, 0.5]];
|
||||
const t = M.Batch.Subtract(c, s);
|
||||
assertEquals(t.length, 2, "correct count");
|
||||
assertEquals(t[0].length, 2, "correct dimensions");
|
||||
assertEquals(t[1][0], 2.5, "correct placement");
|
||||
});
|
||||
Deno.test("Batch.Sigmoid", ()=>
|
||||
{
|
||||
const m = [[-1000, 1000]];
|
||||
|
31
nn.test.js
31
nn.test.js
@ -2,16 +2,9 @@ import { assert, assertEquals } from "https://deno.land/std@0.102.0/testing/asse
|
||||
import { Label, Forward, Backward } from "./nn.ts";
|
||||
import { default as M } from "./m.ts";
|
||||
|
||||
const input = [
|
||||
[ 0.1, 0.05],
|
||||
[ 0.0, -0.06]
|
||||
[ 0.99, 0.85],
|
||||
[ 1.2, 1.05]
|
||||
];
|
||||
|
||||
const training = [];
|
||||
const stages = [];
|
||||
const layers = [];
|
||||
let training = [];
|
||||
let stages = [];
|
||||
let layers = [];
|
||||
|
||||
Deno.test("NN.Label", ()=>
|
||||
{
|
||||
@ -38,24 +31,28 @@ Deno.test("NN.Label", ()=>
|
||||
Deno.test("NN.Backward", ()=>
|
||||
{
|
||||
|
||||
layers.push(M.Create.Box([0, 0, 0], [1, 1, 1], 4));
|
||||
layers.push(M.Create.Box([0, 0, 0, 0, 0], [1, 1, 1, 1, 1], 1));
|
||||
let layer1 = M.Create.Box([0, 0, 0], [1, 1, 1], 2);
|
||||
let layer2 = M.Create.Box([0, 0, 0], [1, 1, 1], 1);
|
||||
let copy1 = M.Create.Clone(layer1);
|
||||
let copy2 = M.Create.Clone(layer2);
|
||||
|
||||
let copy1 = M.Create.Clone(layers[0]);
|
||||
let copy2 = M.Create.Clone(layers[1]);
|
||||
layers.push(layer1);
|
||||
layers.push(layer2);
|
||||
|
||||
for(let i=0; i<1000; i++)
|
||||
for(let i=0; i<100; i++)
|
||||
{
|
||||
Backward(stages, layers, training[1], 0.1);
|
||||
}
|
||||
|
||||
assert(layers[0][0][0] != copy1[0][0][0], "first matrix has changed");
|
||||
assert(layers[1][0][0] != copy2[0][0][0], "second matrix has changed");
|
||||
assert(layers[0][0][0] != copy1[0][0], "first matrix has changed");
|
||||
assert(layers[1][0][0] != copy2[0][0], "second matrix has changed");
|
||||
});
|
||||
|
||||
|
||||
Deno.test("NN.Forward", ()=>
|
||||
{
|
||||
console.log(Forward(stages, layers));
|
||||
console.log(training[1]);
|
||||
});
|
||||
|
||||
|
||||
|
11
nn.ts
11
nn.ts
@ -28,25 +28,26 @@ const Forward = (inStages:N, inLayers:N):Cloud.M =>
|
||||
inStages[i+1] = process(i);
|
||||
return inStages[i+1];
|
||||
};
|
||||
const Backward = (inStages:N, inLayers:N, inGoals:Cloud.M, inRate:number):void =>
|
||||
const Backward = (inStages:N, inLayers:N, inGoals:Cloud.M, inRate:number):N =>
|
||||
{
|
||||
let i:number;
|
||||
let errorBack:Cloud.M = M.Batch.Subtract(Forward(inStages, inLayers), inGoals);
|
||||
|
||||
for(i=inLayers.length-1; i>=0; i--)
|
||||
{
|
||||
let layerMatrix:Cloud.M = inLayers[i];
|
||||
let layerInput:Cloud.M = inStages[i];
|
||||
let layerOutput:Cloud.M = inStages[i+1];
|
||||
|
||||
let errorScaled:Cloud.M = M.Batch.Multiply(errorBack, M.Batch.Derivative(layerOutput));
|
||||
|
||||
errorBack = M.Batch.Affine(errorScaled, M.Create.Transpose(layerMatrix));
|
||||
errorBack = M.Batch.Affine(errorScaled, M.Create.Transpose(inLayers[i]));
|
||||
|
||||
errorScaled.forEach((inScaledError:Cloud.V, inIndex:number)=> {
|
||||
const deltas = M.Batch.Scale(M.Create.Outer(layerInput[inIndex], inScaledError), inRate);
|
||||
layerMatrix = M.Batch.Subtract(layerMatrix, deltas);
|
||||
inLayers[i] = M.Batch.Subtract(inLayers[i], deltas);
|
||||
});
|
||||
|
||||
}
|
||||
return inLayers;
|
||||
};
|
||||
|
||||
export { Label, Forward, Backward };
|
||||
|
Loading…
Reference in New Issue
Block a user