setup
This commit is contained in:
commit
642e4203a0
49
m.js
Normal file
49
m.js
Normal file
@ -0,0 +1,49 @@
|
||||
const M =
|
||||
{
|
||||
Iterate:
|
||||
{
|
||||
New(inDimensions, inCount, inFunction)
|
||||
{
|
||||
let row, i, outputCloud, outputVector;
|
||||
outputCloud = [];
|
||||
for(row=0; row<inCount; row++)
|
||||
{
|
||||
outputVector = [];
|
||||
for(i=0; i<inDimensions; i++)
|
||||
{
|
||||
outputVector.push(inFunction(i, row, outputVector));
|
||||
}
|
||||
outputCloud.push(outputVector);
|
||||
}
|
||||
return outputCloud;
|
||||
},
|
||||
Old(inCloud, inFunction)
|
||||
{
|
||||
return M.Iterate.New(inCloud[0].length, inCloud.length, inFunction);
|
||||
}
|
||||
},
|
||||
Create:
|
||||
{
|
||||
Box: (inV1, inV2, inCount)=> M.Iterate.New(inV1.length, inCount, (i, row)=> inV1[i]+(inV2[i]-inV1[i])*Math.random()),
|
||||
Transpose: (inCloud)=> M.Iterate.New(inCloud.length, inCloud[0].length, (i, row)=> inCloud[i][row]),
|
||||
Outer: (inV1, inV2)=> M.Iterate.New(inV1.length, inV2.length, (i, row)=> inV1[i]*inV2[row]),
|
||||
Clone: (inCloud)=> M.Iterate.Old(inCloud, (i, row)=> inCloud[row][i])
|
||||
},
|
||||
Mutate:
|
||||
{
|
||||
Pad: inCloud=> inCloud.forEach(row=> row.push(1)),
|
||||
Unpad: inCloud=> inCloud.forEach(row=> row.pop())
|
||||
},
|
||||
Single:
|
||||
{
|
||||
Affine: (inV, inMatrix)=> inMatrix.map(row=> row.reduce((sum, current, index)=> sum + current*inV[index]))
|
||||
},
|
||||
Batch:
|
||||
{
|
||||
Affine: (inCloud, inMatrix)=> inCloud.map(row=> M.Single.Affine(row, inMatrix)),
|
||||
Sigmoid: (inCloud)=> M.Iterate.Old(inCloud, i=>1/(1+Math.Pow(Math.E, i))),
|
||||
Derivative: (inCloud)=> M.Iterate.Old(inCloud, i=>i*(1-i))
|
||||
}
|
||||
}
|
||||
|
||||
export default M;
|
107
m.test.js
Normal file
107
m.test.js
Normal file
@ -0,0 +1,107 @@
|
||||
import { assert, assertEquals } from "https://deno.land/std@0.102.0/testing/asserts.ts";
|
||||
import { default as M } from "./m.js";
|
||||
|
||||
Deno.test("Iterate.New", ()=>
|
||||
{
|
||||
let dimensions = 3;
|
||||
let count = 4;
|
||||
let cloud = M.Iterate.New(dimensions, count, (i, j)=>i+j);
|
||||
assertEquals(cloud.length, count, "correct count");
|
||||
assertEquals(cloud[0].length, dimensions, "correct dimensions");
|
||||
assertEquals(cloud[0][0], 0);
|
||||
assertEquals(cloud[3][2], 5, "correct output");
|
||||
});
|
||||
|
||||
Deno.test("Create.Box", ()=>
|
||||
{
|
||||
let min = [-1, -2, -3];
|
||||
let max = [1, 2, 3];
|
||||
let count = 10;
|
||||
|
||||
let box = M.Create.Box(min, max, count);
|
||||
assertEquals(box.length, count, "correct count");
|
||||
for(let i=0; i<box.length; i++)
|
||||
{
|
||||
assertEquals(box[i].length, min.length, "correct dimensions");
|
||||
for(let j=0; j<box[i].length; j++)
|
||||
{
|
||||
assert(box[i][j] >= min[j], true);
|
||||
assert(box[i][j] <= max[j], true, "correct range");
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
Deno.test("Create.Transpose", ()=>
|
||||
{
|
||||
let v1 = [1, 2, 3];
|
||||
let v2 = [4, 5, 6];
|
||||
let tpose = M.Create.Transpose([v1, v2]);
|
||||
assertEquals(tpose.length, 3, "correct count");
|
||||
assertEquals(tpose[0].length, 2, "correct dimensions");
|
||||
assertEquals(tpose[0][0], v1[0]);
|
||||
assertEquals(tpose[0][1], v2[0], "correct placement");
|
||||
});
|
||||
|
||||
Deno.test("Create.Outer", ()=>
|
||||
{
|
||||
let v1 = [1, 2, 3];
|
||||
let v2 = [4, 5];
|
||||
let outer = M.Create.Outer(v1, v2);
|
||||
assertEquals(outer.length, v2.length, "correct count");
|
||||
assertEquals(outer[0].length, v1.length, "correct dimensions");
|
||||
assertEquals(outer[1][0], v1[0]*v2[1], "correct placement")
|
||||
});
|
||||
|
||||
Deno.test("Create.Clone", ()=>
|
||||
{
|
||||
let v1 = [1, 2, 3];
|
||||
let v2 = [4, 5, 6];
|
||||
let clone = M.Create.Clone([v1, v2]);
|
||||
assertEquals(clone.length, 2, "correct count");
|
||||
assertEquals(clone[0].length, v1.length, "correct dimensions");
|
||||
assertEquals(clone[1][0], v2[0], "correct placement");
|
||||
});
|
||||
|
||||
Deno.test("Mutate.Pad", ()=>
|
||||
{
|
||||
let matrix = [
|
||||
[1, 2, 3],
|
||||
[4, 5, 6]
|
||||
];
|
||||
M.Mutate.Pad(matrix);
|
||||
assertEquals(matrix.length, 2, "correct count");
|
||||
assertEquals(matrix[0].length, 4, "correct dimensions");
|
||||
assertEquals(matrix[0][3], 1, "correct placement");
|
||||
});
|
||||
|
||||
Deno.test("Mutate.Unpad", ()=>
|
||||
{
|
||||
let matrix = [
|
||||
[1, 2, 3, 1],
|
||||
[4, 5, 6, 1]
|
||||
];
|
||||
M.Mutate.Unpad(matrix);
|
||||
assertEquals(matrix.length, 2, "correct count");
|
||||
assertEquals(matrix[0].length, 3, "correct dimensions");
|
||||
assertEquals(matrix[1][0], 4, "correct placement");
|
||||
});
|
||||
|
||||
Deno.test("Single.Affine", ()=>
|
||||
{
|
||||
let v = [1, 2];
|
||||
let m = [[0.1, 0.2], [0.3, 0.4]];
|
||||
let t = M.Single.Affine(v, m);
|
||||
assertEquals(t.length, 2, "correct dimensions");
|
||||
assertEquals(t[0], 0.5)
|
||||
assertEquals(t[1], 1.1, "correct placement");
|
||||
});
|
||||
|
||||
Deno.test("Batch.Affine", ()=>
|
||||
{
|
||||
let c = [[1, 2], [3, 4]];
|
||||
let m = [[0.1, 0.2], [0.3, 0.4]];
|
||||
let t = M.Batch.Affine(c, m);
|
||||
assertEquals(t.length, 2, "correct count");
|
||||
assertEquals(t[0].length, 2, "correct dimensions")
|
||||
assertEquals(t[0][1], 1.1, "correct placement");
|
||||
});
|
17
methods.md
Normal file
17
methods.md
Normal file
@ -0,0 +1,17 @@
|
||||
box(boundingBox, count) // done
|
||||
transpose(inMatrix) // done
|
||||
outer(inv1, inv2) // done
|
||||
clone(inCloud) // done
|
||||
|
||||
pad(inCloud) // done
|
||||
unpad(inCloud) // done
|
||||
|
||||
// batch filter
|
||||
transform(inCloud, inMatrix) // done
|
||||
sigmoid(inCloud) // 1/(1+e^x) //
|
||||
derivative(inCloud) // x*(1-x)
|
||||
scale(inCloud1, inV)
|
||||
|
||||
// batch of pairs
|
||||
subtract(inCloud1, inCloud2)
|
||||
multiply(inCloud1, inCloud2)
|
211
nn.js
Normal file
211
nn.js
Normal file
@ -0,0 +1,211 @@
|
||||
var NN = {};
|
||||
|
||||
NN.TrainingSet = {};
|
||||
NN.TrainingSet.Instances = [];
|
||||
NN.TrainingSet.Create = function()
|
||||
{
|
||||
var obj = {};
|
||||
|
||||
obj.Input = [];
|
||||
obj.Output = [];
|
||||
obj.Order = [];
|
||||
|
||||
NN.TrainingSet.Instances.push(obj);
|
||||
return obj;
|
||||
};
|
||||
NN.TrainingSet.AddPoint = function(inTrainingSet, inType, inData)
|
||||
{
|
||||
inTrainingSet.Input.push(inData);
|
||||
inTrainingSet.Output.push(inType);
|
||||
inTrainingSet.Order.push(inTrainingSet.Order.length);
|
||||
};
|
||||
NN.TrainingSet.AddCloud = function(inTrainingSet, inLabel, inCloud)
|
||||
{
|
||||
var i;
|
||||
for(i=0; i<inCloud.length; i++)
|
||||
{
|
||||
NN.TrainingSet.AddPoint(inTrainingSet, inLabel, inCloud[i]);
|
||||
}
|
||||
};
|
||||
NN.TrainingSet.Randomize = function(inTrainingSet)
|
||||
{
|
||||
var newOrder = [];
|
||||
var selection;
|
||||
while(inTrainingSet.Order.length != 0)
|
||||
{
|
||||
selection = Math.floor(inTrainingSet.Order.length * Math.random());
|
||||
inTrainingSet.Order.splice(selection, 1);
|
||||
newOrder.push(selection);
|
||||
}
|
||||
inTrainingSet.Order = newOrder;
|
||||
};
|
||||
|
||||
|
||||
NN.Layer = {};
|
||||
NN.Layer.Create = function(sizeIn, sizeOut)
|
||||
{
|
||||
var i;
|
||||
var min = [];
|
||||
var max = [];
|
||||
var obj = {};
|
||||
|
||||
sizeIn++;
|
||||
|
||||
obj.Forward = {};
|
||||
for(i=0; i<sizeIn; i++)
|
||||
{
|
||||
min.push(-1);
|
||||
max.push(1);
|
||||
}
|
||||
obj.Forward.Matrix = M.Box([min, max], sizeOut);
|
||||
obj.Forward.StageInput = [];
|
||||
obj.Forward.StageAffine = [];
|
||||
obj.Forward.StageSigmoid = [];
|
||||
obj.Forward.StageDerivative = [];
|
||||
|
||||
obj.Backward = {};
|
||||
obj.Backward.Matrix = M.Transpose(obj.Forward.Matrix);
|
||||
obj.Backward.StageInput = [];
|
||||
obj.Backward.StageDerivative = [];
|
||||
obj.Backward.StageAffine = [];
|
||||
|
||||
return obj;
|
||||
};
|
||||
NN.Layer.Forward = function(inLayer, inInput)
|
||||
{
|
||||
inLayer.Forward.StageInput = M.Pad(inInput); // Pad the input
|
||||
inLayer.Forward.StageAffine = M.Transform(inLayer.Forward.Matrix, inLayer.Forward.StageInput);
|
||||
inLayer.Forward.StageSigmoid = M.Sigmoid(inLayer.Forward.StageAffine);
|
||||
|
||||
return inLayer.Forward.StageSigmoid;
|
||||
};
|
||||
NN.Layer.Error = function(inLayer, inTarget)
|
||||
{
|
||||
return M.Subtract(inLayer.Forward.StageSigmoid, inTarget);
|
||||
};
|
||||
NN.Layer.Backward = function(inLayer, inInput)
|
||||
{
|
||||
/* We need the derivative of the forward pass, but only during the backward pass.
|
||||
That's why-- even though it "belongs" to the forward pass-- it is being calculated here. */
|
||||
inLayer.Forward.StageDerivative = M.Derivative(inLayer.Forward.StageSigmoid);
|
||||
|
||||
/* This transpose matrix is for sending the error back to a previous layer.
|
||||
And again, even though it is derived directly from the forward matrix, it is only needed during the backward pass so we calculate it here.*/
|
||||
inLayer.Backward.Matrix = M.Transpose(inLayer.Forward.Matrix);
|
||||
|
||||
/* When the error vector arrives at a layer, it always needs to be multiplied (read 'supressed') by the derivative of
|
||||
what the layer output earlier during the forward pass.
|
||||
So despite its name, Backward.StageDerivative contains the result of this *multiplication* and not some new derivative calculation.*/
|
||||
inLayer.Backward.StageInput = inInput;
|
||||
inLayer.Backward.StageDerivative = M.Multiply(inLayer.Backward.StageInput, inLayer.Forward.StageDerivative);
|
||||
inLayer.Backward.StageAffine = M.Transform(inLayer.Backward.Matrix, inLayer.Backward.StageDerivative);
|
||||
|
||||
return M.Unpad(inLayer.Backward.StageAffine);// Unpad the output
|
||||
};
|
||||
NN.Layer.Adjust = function(inLayer, inLearningRate)
|
||||
{
|
||||
var deltas;
|
||||
var vector;
|
||||
var scalar;
|
||||
var i, j;
|
||||
|
||||
for(i=0; i<inLayer.Forward.StageInput.length; i++)
|
||||
{
|
||||
deltas = M.Outer(inLayer.Forward.StageInput[i], inLayer.Backward.StageDerivative[i]);
|
||||
deltas = M.Scale(deltas, inLearningRate);
|
||||
|
||||
inLayer.Forward.Matrix = M.Subtract(inLayer.Forward.Matrix, deltas);
|
||||
}
|
||||
};
|
||||
NN.Layer.Stochastic = function(inLayer, inTrainingSet, inIterations)
|
||||
{
|
||||
/* this method is ONLY for testing individual layers, and does not translate to network-level training */
|
||||
var i, j;
|
||||
var current;
|
||||
var error;
|
||||
for(i=0; i<inIterations; i++)
|
||||
{
|
||||
NN.TrainingSet.Randomize(inTrainingSet);
|
||||
for(j=0; j<inTrainingSet.Order.length; j++)
|
||||
{
|
||||
current = inTrainingSet.Order[j];
|
||||
NN.Layer.Forward(inLayer, [inTrainingSet.Input[current]]);
|
||||
error = M.Subtract(inLayer.Forward.StageSigmoid, [inTrainingSet.Output[current]]);
|
||||
NN.Layer.Backward(inLayer, error);
|
||||
NN.Layer.Adjust(inLayer, 0.1);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
NN.Network = {};
|
||||
NN.Network.Instances = [];
|
||||
NN.Network.Create = function()
|
||||
{
|
||||
var obj = {};
|
||||
var i;
|
||||
|
||||
obj.Layers = [];
|
||||
obj.LearningRate = 0.8;
|
||||
obj.Error = [];
|
||||
|
||||
for(i=0; i<arguments.length-1; i++)
|
||||
{
|
||||
obj.Layers.push(NN.Layer.Create(arguments[i], arguments[i+1]));
|
||||
}
|
||||
|
||||
NN.Network.Instances.push(obj);
|
||||
return obj;
|
||||
};
|
||||
NN.Network.Observe = function(inNetwork, inBatch)
|
||||
{
|
||||
var input = M.Clone(inBatch);
|
||||
var i;
|
||||
for(i=0; i<inNetwork.Layers.length; i++)
|
||||
{
|
||||
input = NN.Layer.Forward(inNetwork.Layers[i], input);
|
||||
}
|
||||
return inNetwork.Layers[inNetwork.Layers.length-1].Forward.StageSigmoid;
|
||||
};
|
||||
NN.Network.Error = function(inNetwork, inTraining)
|
||||
{
|
||||
return M.Subtract(inNetwork.Layers[inNetwork.Layers.length-1].Forward.StageSigmoid, inTraining);
|
||||
};
|
||||
NN.Network.Learn = function(inNetwork, inError)
|
||||
{
|
||||
var input = inError;
|
||||
var i;
|
||||
for(i=inNetwork.Layers.length-1; i>=0; i--)
|
||||
{
|
||||
input = NN.Layer.Backward(inNetwork.Layers[i], input);
|
||||
NN.Layer.Adjust(inNetwork.Layers[i], inNetwork.LearningRate);
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
NN.Network.Batch = function(inNetwork, inTrainingSet, inIterations)
|
||||
{
|
||||
var i;
|
||||
for(i=0; i<inIterations; i++)
|
||||
{
|
||||
NN.Network.Observe(inNetwork, inTrainingSet.Input);
|
||||
inNetwork.Error = NN.Network.Error(inNetwork, inTrainingSet.Output)
|
||||
NN.Network.Learn(inNetwork, inNetwork.Error);
|
||||
}
|
||||
};
|
||||
NN.Network.Stochastic = function(inNetwork, inTrainingSet, inIterations)
|
||||
{
|
||||
var i, j;
|
||||
var current;
|
||||
|
||||
for(i=0; i<inIterations; i++)
|
||||
{
|
||||
NN.TrainingSet.Randomize(inTrainingSet);
|
||||
for(j=0; j<inTrainingSet.Order.length; j++)
|
||||
{
|
||||
current = inTrainingSet.Order[j];
|
||||
NN.Network.Observe(inNetwork, [inTrainingSet.Input[current]]);
|
||||
inNetwork.Error = NN.Network.Error(inNetwork, [inTrainingSet.Output[current]]);
|
||||
NN.Network.Learn(inNetwork, inNetwork.Error);
|
||||
}
|
||||
}
|
||||
};
|
Loading…
Reference in New Issue
Block a user