network #1
11
index.html
11
index.html
@ -708,13 +708,10 @@ NN.Network.Stochastic = function(inNetwork, inTrainingSet, inIterations)
|
||||
let nn1 = NN.Network.Create(2, 3, 2);
|
||||
nn1.Layers[0].Forward.Matrix = matrix1;
|
||||
nn1.Layers[1].Forward.Matrix = matrix2;
|
||||
nn1.LearningRate = 0.1;
|
||||
//let logLayers = inNN => inNN.Layers.forEach(L=>console.log(L.Forward.Matrix));
|
||||
|
||||
let logLayers = inNN => inNN.Layers.forEach(L=>console.log(L.Forward.Matrix));
|
||||
|
||||
logLayers(nn1);
|
||||
|
||||
NN.Network.Batch(nn1, {Input:input, Output:output}, 100);
|
||||
|
||||
logLayers(nn1);
|
||||
NN.Network.Batch(nn1, {Input:input, Output:output}, 1000);
|
||||
console.log(NN.Network.Observe(nn1, input));
|
||||
|
||||
</script>
|
23
nn.test.js
23
nn.test.js
@ -1,5 +1,5 @@
|
||||
import { assert, assertEquals } from "https://deno.land/std@0.102.0/testing/asserts.ts";
|
||||
import { Label, Forward, Backward } from "./nn.ts";
|
||||
import { Split, Forward, Backward } from "./nn.ts";
|
||||
import { default as M } from "./m.ts";
|
||||
import { default as Methods } from "./m.ts";
|
||||
|
||||
@ -17,7 +17,7 @@ let typeB = [
|
||||
];
|
||||
|
||||
|
||||
Deno.test("check.forward", ()=>
|
||||
Deno.test("check forward/backward", ()=>
|
||||
{
|
||||
let matrix1 = [
|
||||
[-0.43662948305036675, -0.368590640707799, -0.23227179558890843],
|
||||
@ -44,14 +44,25 @@ Deno.test("check.forward", ()=>
|
||||
];
|
||||
|
||||
let layers = [matrix1, matrix2];
|
||||
console.log("BEFORE", layers);
|
||||
for(let i=0; i<100; i++)
|
||||
let stages = [];
|
||||
for(let i=0; i<1000; i++)
|
||||
{
|
||||
let stages = Forward(Methods.Mutate.Pad(input), layers);
|
||||
stages = Forward(Methods.Mutate.Pad(input), layers);
|
||||
Backward(stages, layers, output, 0.1);
|
||||
}
|
||||
|
||||
console.log("AFTER", layers);
|
||||
stages = Forward(input, layers);
|
||||
console.log(stages[stages.length-1]);
|
||||
});
|
||||
|
||||
Deno.test("NN.Split", ()=>
|
||||
{
|
||||
let data = [
|
||||
[3, 2, 1, 0, 1],
|
||||
[6, 5, 4, 1, 0]
|
||||
]
|
||||
let split = Split(data, [3, 4]);
|
||||
console.log(split);
|
||||
});
|
||||
|
||||
|
||||
|
83
nn.ts
83
nn.ts
@ -1,31 +1,13 @@
|
||||
import { default as M, Cloud } from "./m.ts";
|
||||
export type N = Array<Array<Array<number>>>
|
||||
|
||||
const Label = (inSet:any, inData:Cloud.M, inLabel:Cloud.V):N =>
|
||||
{
|
||||
if(!inSet){inSet = [[], []];}
|
||||
if(inSet.length == 0){inSet.push([]);}
|
||||
if(inSet.length == 1){inSet.push([]);}
|
||||
|
||||
inData.forEach((row:Cloud.V) =>
|
||||
{
|
||||
row.push(1);
|
||||
inSet[0].push(row);
|
||||
inSet[1].push(inLabel);
|
||||
});
|
||||
return inSet;
|
||||
};
|
||||
|
||||
const Forward = (inData:Cloud.M, inLayers:N):N =>
|
||||
{
|
||||
let i:number;
|
||||
let stages = [inData];
|
||||
let stages:N = [inData];
|
||||
let process = (index:number):Cloud.M => M.Batch.Sigmoid(M.Batch.Affine(stages[index], inLayers[index]));
|
||||
|
||||
for(i=0; i<inLayers.length-1; i++)
|
||||
{
|
||||
stages[i+1] = M.Mutate.Pad(process(i));
|
||||
}
|
||||
for(i=0; i<inLayers.length-1; i++){ stages[i+1] = M.Mutate.Pad(process(i)); }
|
||||
stages[i+1] = process(i);
|
||||
return stages;
|
||||
};
|
||||
@ -36,20 +18,61 @@ const Backward = (inStages:N, inLayers:N, inGoals:Cloud.M, inRate:number):N =>
|
||||
|
||||
for(i=inLayers.length-1; i>=0; i--)
|
||||
{
|
||||
let layerInput:Cloud.M = inStages[i];
|
||||
let layerOutput:Cloud.M = inStages[i+1];
|
||||
let errorScaled:Cloud.M = M.Batch.Multiply(errorBack, M.Batch.Derivative(layerOutput));
|
||||
|
||||
let errorScaled:Cloud.M = M.Batch.Multiply(errorBack, M.Batch.Derivative(inStages[i+1]));
|
||||
errorBack = M.Batch.Affine(errorScaled, M.Create.Transpose(inLayers[i]));
|
||||
|
||||
errorScaled.forEach((inScaledError:Cloud.V, inIndex:number)=> {
|
||||
const deltas = M.Batch.Scale(M.Create.Outer(layerInput[inIndex], inScaledError), inRate);
|
||||
inLayers[i] = M.Batch.Subtract(inLayers[i], deltas);
|
||||
errorScaled.forEach((inScaledError:Cloud.V, inIndex:number)=>
|
||||
{
|
||||
inLayers[i] = M.Batch.Subtract(
|
||||
inLayers[i],
|
||||
M.Batch.Scale(M.Create.Outer(inStages[i][inIndex], inScaledError), inRate)
|
||||
);
|
||||
});
|
||||
|
||||
}
|
||||
return inLayers;
|
||||
};
|
||||
const Split = (inTrainingSet:Cloud.M, inHeaderLabel:Cloud.V, inHeaderKeep:Cloud.V = []):N =>
|
||||
{
|
||||
let data:Cloud.M = [];
|
||||
let label:Cloud.M = [];
|
||||
if(!inHeaderKeep.length)
|
||||
{
|
||||
inTrainingSet[0].forEach( (item:number, index:number)=> inHeaderLabel.includes(index) ? false : inHeaderKeep.push(index) );
|
||||
}
|
||||
inTrainingSet.forEach((row:Cloud.V) =>
|
||||
{
|
||||
data.push( [...inHeaderKeep.map((i:number)=>row[i]), 1] );
|
||||
label.push( inHeaderLabel.map((i:number)=>row[i]) );
|
||||
});
|
||||
return [ data, label ];
|
||||
};
|
||||
const Build = (...inLayers:Array<number>):N =>
|
||||
{
|
||||
let i:number;
|
||||
let output:N = [];
|
||||
let rand = (inDimensions:number, inCount:number):Cloud.M => M.Create.Box( new Array(inDimensions).fill(-1), new Array(inDimensions).fill(1), inCount);
|
||||
for(i=0; i<inLayers.length-1; i++)
|
||||
{
|
||||
output.push(rand( inLayers[i]+1, inLayers[i+1]));
|
||||
}
|
||||
output.push( rand( inLayers[i-1], inLayers[i]) );
|
||||
return output;
|
||||
};
|
||||
const Label = (inData:Cloud.M, inLayers:N):Cloud.M =>
|
||||
{
|
||||
let stages:N = Forward(inData, inLayers);
|
||||
return stages[stages.length-1];
|
||||
};
|
||||
const Learn = (inData:Cloud.M, inLayers:N, inLabels:Cloud.M, inIterations:number, inRate:number):Cloud.M =>
|
||||
{
|
||||
let stages:N = [];
|
||||
for(let i=0; i<inIterations; i++)
|
||||
{
|
||||
stages = Forward(inData, inLayers);
|
||||
Backward(stages, inLayers, inLabels, inRate);
|
||||
}
|
||||
return M.Batch.Subtract(stages[stages.length-1], inLabels);
|
||||
};
|
||||
const Error = M.Batch.Subtract;
|
||||
|
||||
export { Label, Forward, Backward };
|
||||
export { Split, Build, Label, Learn, Error, Forward, Backward };
|
||||
export type { Cloud };
|
Loading…
Reference in New Issue
Block a user