Compare commits
	
		
			6 Commits
		
	
	
		
	
	| Author | SHA1 | Date | |
|---|---|---|---|
| 3d9b692d4d | |||
| 9fcfb9115d | |||
| 44771715d3 | |||
| 3861d41c83 | |||
| b1c95d4819 | |||
| 94c7abdda5 | 
							
								
								
									
										717
									
								
								index.html
									
									
									
									
									
								
							
							
						
						
									
										717
									
								
								index.html
									
									
									
									
									
								
							| @ -1,717 +0,0 @@ | ||||
| <script> | ||||
|     /* Vector Library */ | ||||
| /* | ||||
| 	Works with n-dimensional vectors: represented as arrays of numbers | ||||
| */ | ||||
| var V = {}; | ||||
| V.Subtract = function(inV1, inV2) | ||||
| { | ||||
| 	var out = []; | ||||
| 	for(var i=0; i<inV1.length; i++) | ||||
| 	{ | ||||
| 		out[i] = inV1[i] - inV2[i]; | ||||
| 	} | ||||
| 	return out; | ||||
| }; | ||||
| V.Add = function(inV1, inV2) | ||||
| { | ||||
| 	var out = []; | ||||
| 	for(var i=0; i<inV1.length; i++) | ||||
| 	{ | ||||
| 		out[i] = inV1[i] + inV2[i]; | ||||
| 	} | ||||
| 	return out; | ||||
| }; | ||||
| V.Distance = function(inV1, inV2) | ||||
| { | ||||
| 	return V.Length(V.Subtract(inV1, inV2)) | ||||
| }; | ||||
| V.Dot = function(inV1, inV2) | ||||
| { | ||||
| 	var out = 0; | ||||
| 	for(var i=0; i<inV1.length; i++) | ||||
| 	{ | ||||
| 		out += inV1[i] * inV2[i]; | ||||
| 	} | ||||
| 	return out; | ||||
| }; | ||||
| V.Multiply = function(inV1, inV2) | ||||
| { | ||||
| 	var out = []; | ||||
| 	for(var i=0; i<inV1.length; i++) | ||||
| 	{ | ||||
| 		out[i] = inV1[i] * inV2[i]; | ||||
| 	} | ||||
| 	return out; | ||||
| }; | ||||
| V.Length = function(inV1) | ||||
| { | ||||
| 	return Math.sqrt(V.Dot(inV1, inV1)); | ||||
| }; | ||||
| V.Scale = function(inV1, inScalar) | ||||
| { | ||||
| 	var out = []; | ||||
| 	for(var i=0; i<inV1.length; i++) | ||||
| 	{ | ||||
| 		out[i] = inV1[i] * inScalar; | ||||
| 	} | ||||
| 	return out; | ||||
| }; | ||||
| V.Normalize = function(inV1) | ||||
| { | ||||
|     return V.Scale(inV1, 1/V.Length(inV1)); | ||||
| }; | ||||
| V.Clone = function(inV1) | ||||
| { | ||||
| 	var out = []; | ||||
| 	var i; | ||||
| 	for(i=0; i<inV1.length; i++) | ||||
| 	{ | ||||
| 		out[i] = inV1[i]; | ||||
| 	} | ||||
| 	return out; | ||||
| }; | ||||
| 
 | ||||
| 
 | ||||
| 
 | ||||
| 
 | ||||
| 
 | ||||
| 
 | ||||
| 
 | ||||
| 
 | ||||
| 
 | ||||
| 
 | ||||
| 
 | ||||
| var M = {}; | ||||
| 
 | ||||
| /************************** | ||||
| M A T R I X | ||||
| */ | ||||
| // transform inC with inM | ||||
| // returns the transformed inC | ||||
| M.Transform = function(inM, inC) | ||||
| { | ||||
| 	var outM = []; | ||||
| 	var outV = []; | ||||
| 	var i, j; | ||||
| 	 | ||||
| 	for(i=0; i<inC.length; i++) | ||||
| 	{ | ||||
| 		outV = []; | ||||
| 		for(j=0; j<inM.length; j++) | ||||
| 		{ | ||||
| 			outV[j] = V.Dot(inM[j], inC[i]); | ||||
| 		} | ||||
| 		outM.push(outV); | ||||
| 	} | ||||
| 	return outM; | ||||
| }; | ||||
| 
 | ||||
| 
 | ||||
| // flip rows for columns in inM | ||||
| // returns the modified Matrix | ||||
| M.Transpose = function(inM) | ||||
| { | ||||
| 	var dimensions = inM[0].length; | ||||
| 	var i, j; | ||||
| 	var outM = []; | ||||
| 	var outV = []; | ||||
| 	for(i=0; i<dimensions; i++) | ||||
| 	{ | ||||
| 		outV = []; | ||||
| 		for(j=0; j<inM.length; j++) | ||||
| 		{ | ||||
| 			//the Ith componenth of the Jth member | ||||
| 			outV[j] = inM[j][i]; | ||||
| 		} | ||||
| 		outM.push(outV); | ||||
| 	} | ||||
| 	return outM; | ||||
| } | ||||
| 
 | ||||
| // returns a matrix that is the result of the outer product of inV1 and inV2 | ||||
| // where the Nth member of outM is a copy of V1, scaled by the Nth component of V2 | ||||
| M.Outer = function(inV1, inV2) | ||||
| { | ||||
| 	var outM = []; | ||||
| 	 | ||||
| 	var i; | ||||
| 	for(i=0; i<inV2.length; i++) | ||||
| 	{ | ||||
| 		outM.push(V.Scale(inV1, inV2[i])); | ||||
| 	} | ||||
| 	 | ||||
| 	return outM; | ||||
| }; | ||||
| 
 | ||||
| 
 | ||||
| 
 | ||||
| 
 | ||||
| 
 | ||||
| /************************** | ||||
| B A T C H | ||||
| */ | ||||
| //smash the members of inM with a softmax | ||||
| M.Sigmoid = function(inM) | ||||
| { | ||||
| 	var i, j; | ||||
| 	var outM = []; | ||||
| 	var outV = []; | ||||
| 	for(i=0; i<inM.length; i++) | ||||
| 	{ | ||||
| 		outV = []; | ||||
| 		for(j=0; j<inM[i].length; j++) | ||||
| 		{ | ||||
| 			outV[j] = 1/(1 + Math.pow(Math.E, -inM[i][j])); | ||||
| 		} | ||||
| 		outM.push(outV); | ||||
| 	} | ||||
| 	return outM; | ||||
| }; | ||||
| // return the derivatives of the members of inM (that have been run through the softmax) | ||||
| M.Derivative = function(inM) | ||||
| { | ||||
| 	var i, j; | ||||
| 	var component; | ||||
| 	var outM = []; | ||||
| 	var outV = []; | ||||
| 	for(i=0; i<inM.length; i++) | ||||
| 	{ | ||||
| 		outV = []; | ||||
| 		for(j=0; j<inM[i].length; j++) | ||||
| 		{ | ||||
| 			component = inM[i][j]; | ||||
| 			outV[j] = component*(1 - component); | ||||
| 		} | ||||
| 		outM.push(outV); | ||||
| 	} | ||||
| 	return outM; | ||||
| }; | ||||
| // batch multiply these pairs of vectors | ||||
| M.Multiply = function(inCloud1, inCloud2) | ||||
| { | ||||
| 	var i; | ||||
| 	var outM = []; | ||||
| 	for(i=0; i<inCloud1.length; i++) | ||||
| 	{ | ||||
| 		outM.push(V.Multiply(inCloud1[i], inCloud2[i])); | ||||
| 	}; | ||||
| 	return outM; | ||||
| }; | ||||
| // batch add | ||||
| M.Add = function(inCloud1, inCloud2) | ||||
| { | ||||
|     var outM = []; | ||||
|      | ||||
|     var i; | ||||
|     for(i=0; i<inCloud1.length; i++) | ||||
|     { | ||||
|         outM.push(V.Add(inCloud1[i], inCloud2[i])); | ||||
|     } | ||||
|     return outM; | ||||
| }; | ||||
| M.Subtract = function(inCloud1, inCloud2) | ||||
| { | ||||
|     var outM = []; | ||||
|      | ||||
|     var i; | ||||
|     for(i=0; i<inCloud1.length; i++) | ||||
|     { | ||||
|         outM.push(V.Subtract(inCloud1[i], inCloud2[i])); | ||||
|     } | ||||
|     return outM; | ||||
| }; | ||||
| M.Scale = function(inCloud1, inScalar) | ||||
| { | ||||
|     var outM = []; | ||||
|      | ||||
|     var i; | ||||
|     for(i=0; i<inCloud1.length; i++) | ||||
|     { | ||||
|         outM.push(V.Scale(inCloud1[i], inScalar)); | ||||
|     } | ||||
|     return outM; | ||||
| }; | ||||
| M.Clone = function(inM) | ||||
| { | ||||
|     var i; | ||||
|     var outM; | ||||
|     var outV; | ||||
|      | ||||
|     outM =[]; | ||||
|     for(i=0; i<inM.length; i++) | ||||
|     { | ||||
|         outM.push(V.Clone(inM[i])); | ||||
|     } | ||||
|     return outM; | ||||
| }; | ||||
| 
 | ||||
| 
 | ||||
| /************************** | ||||
| B O U N D S | ||||
| */ | ||||
| // return the bounding box of inM as a two-member Matrix | ||||
| M.Bounds = function(inM) | ||||
| { | ||||
| 	var dimensions = inM[0].length; | ||||
| 	var i, j; | ||||
| 	var min = []; | ||||
| 	var max = []; | ||||
| 	for(i=0; i<dimensions; i++) | ||||
| 	{ | ||||
| 		min[i] = 9999999; | ||||
| 		max[i] = -999999; | ||||
| 	} | ||||
| 	for(i=0; i<inM.length; i++) | ||||
| 	{ | ||||
| 		for(j=0; j<dimensions; j++) | ||||
| 		{ | ||||
| 			if(inM[i][j] < min[j]) | ||||
| 			{ | ||||
| 				min[j] = inM[i][j]; | ||||
| 			} | ||||
| 			if(inM[i][j] > max[j]) | ||||
| 			{ | ||||
| 				max[j] = inM[i][j]; | ||||
| 			}			 | ||||
| 		} | ||||
| 	} | ||||
| 	return [min, max]; | ||||
| }; | ||||
| 
 | ||||
| // find the local coordinates for all the members of inM, within the bounding box inB | ||||
| // returns a new Matrix of relative vectors | ||||
| M.GlobalToLocal = function(inM, inB) | ||||
| { | ||||
| 	var dimensions = inB[0].length; | ||||
| 	var i, j; | ||||
| 	var outM = []; | ||||
| 	var outV = []; | ||||
| 	var size; | ||||
| 	var min; | ||||
| 	var denominator; | ||||
| 	for(i=0; i<inM.length; i++) | ||||
| 	{ | ||||
| 		outV = []; | ||||
| 		for(j=0; j<dimensions; j++) | ||||
| 		{ | ||||
| 			denominator = inB[1][j] - inB[0][j]; | ||||
| 			if(denominator == 0) | ||||
| 			{ | ||||
| 				outV[j] = inB[1][j];// if min and max are the same, just output max | ||||
| 			} | ||||
| 			else | ||||
| 			{ | ||||
| 				outV[j] = (inM[i][j] - inB[0][j])/denominator;	 | ||||
| 			} | ||||
| 		} | ||||
| 		outM.push(outV); | ||||
| 	} | ||||
| 	return outM; | ||||
| }; | ||||
| // find the global coordinates for all the members of inM, within the bounding box inB | ||||
| // returns a new Matrix of global vectors | ||||
| M.LocalToGlobal = function(inM, inB) | ||||
| { | ||||
| 	var dimensions = inB[0].length; | ||||
| 	var i, j; | ||||
| 	var outM = []; | ||||
| 	var outV = []; | ||||
| 	var size; | ||||
| 	var min; | ||||
| 	for(i=0; i<inM.length; i++) | ||||
| 	{ | ||||
| 		outV = []; | ||||
| 		for(j=0; j<dimensions; j++) | ||||
| 		{ | ||||
| 			outV[j] = inB[0][j] + inM[i][j] * (inB[1][j] - inB[0][j]); | ||||
| 		} | ||||
| 		outM.push(outV); | ||||
| 	} | ||||
| 	return outM; | ||||
| }; | ||||
| 
 | ||||
| 
 | ||||
| /************************** | ||||
| C L O U D | ||||
| */ | ||||
| // return some number of points from inM as a new Matrix | ||||
| M.Reduce = function(inM, inCount) | ||||
| { | ||||
| 	var largeGroupSize; | ||||
| 	var largeGroupCount; | ||||
| 	var smallGroupSize; | ||||
| 	var outM = []; | ||||
| 	 | ||||
| 	largeGroupSize = Math.floor(inM.length/inM); | ||||
| 	smallGroupSize = inM.length%inCount | ||||
| 	for(i=0; i<inM-1; i++) | ||||
| 	{ | ||||
| 		index = i*largeGroupSize + Math.floor(Math.random()*largeGroupSize); | ||||
| 		outM.push( V.Clone(inM[index]) ); | ||||
| 	} | ||||
| 	if(smallGroupSize != 0) | ||||
| 	{ | ||||
| 		index = i*largeGroupSize + Math.floor(Math.random()*smallGroupSize) | ||||
| 		outM.push( V.Clone(inM[index]) ); | ||||
| 	} | ||||
| 	return outM; | ||||
| }; | ||||
| 
 | ||||
| // return a Matrix of length inCount, where all the members fall within the circle paramemters, including a bias | ||||
| M.Circle = function(inCenter, inRadius, inBias, inCount) | ||||
| { | ||||
| 	var i, j; | ||||
| 	var vector; | ||||
| 	var length; | ||||
| 	var outM = []; | ||||
| 	 | ||||
| 	for(i=0; i<inCount; i++) | ||||
| 	{ | ||||
| 		//generate a random vector | ||||
| 		vector = []; | ||||
| 		for(j=0; j<inCenter.length; j++) | ||||
| 		{ | ||||
| 			vector[j] = (Math.random() - 0.5); | ||||
| 		} | ||||
| 		 | ||||
| 		//normalize the vector | ||||
| 		vector = V.Scale(vector, 1/V.Length(vector)); | ||||
| 		 | ||||
| 		//set a random length (with a bias) | ||||
| 		length = Math.pow(Math.random(), Math.log(inBias)/Math.log(0.5))*inRadius; | ||||
| 		vector = V.Scale(vector, length); | ||||
| 		 | ||||
| 		//move the vector to the center | ||||
| 		vector = V.Add(vector, inCenter); | ||||
| 		 | ||||
| 		outM.push(vector); | ||||
| 	} | ||||
| 	return outM; | ||||
| }; | ||||
| 
 | ||||
| // return a Matrix of length inCount, where all the members fall within inBounds | ||||
| M.Box = function(inBounds, inCount) | ||||
| { | ||||
| 	var vector; | ||||
| 	var dimensions = inBounds[0].length; | ||||
| 	var i, j; | ||||
| 	var min, max; | ||||
| 	var outM = []; | ||||
| 	 | ||||
| 	for(i=0; i<inCount; i++) | ||||
| 	{ | ||||
| 		vector = []; | ||||
| 		for(j=0; j<dimensions; j++) | ||||
| 		{ | ||||
| 			min = inBounds[0][j]; | ||||
| 			max = inBounds[1][j]; | ||||
| 			 | ||||
| 			vector[j] = min + Math.random()*(max - min); | ||||
| 		} | ||||
| 		outM.push(vector); | ||||
| 	} | ||||
| 	return outM; | ||||
| }; | ||||
| 
 | ||||
| //combine all the matricies in inList into one long Matrix | ||||
| M.Combine = function(inList) | ||||
| { | ||||
| 	var i, j; | ||||
| 	var outM = []; | ||||
| 	for(i=0; i<inList.length; i++) | ||||
| 	{ | ||||
| 		for(j=0; j<inList[i].length; j++) | ||||
| 		{ | ||||
| 			outM.push(V.Clone(inList[i][j])); | ||||
| 		} | ||||
| 	} | ||||
| 	return outM; | ||||
| }; | ||||
| 
 | ||||
| /* | ||||
| PLEASE NOTE: These padding routines are unique to this library in that they | ||||
| actually modify the input object(s) rather than returning modified copies! | ||||
| */ | ||||
| // add a new component (set to '1') to each member of inM | ||||
| M.Pad = function(inM) | ||||
| { | ||||
| 	var i; | ||||
| 	for(i=0; i<inM.length; i++) | ||||
| 	{ | ||||
| 		inM[i].push(1); | ||||
| 	} | ||||
|     	return inM; | ||||
| }; | ||||
| // remove the last component of each memeber of inM | ||||
| M.Unpad = function(inM) | ||||
| { | ||||
| 	var i; | ||||
|     	for(i=0; i<inM.length; i++) | ||||
| 	{ | ||||
|         	inM[i].pop(); | ||||
| 	} | ||||
| 	return inM; | ||||
| }; | ||||
| // set the last component of each member of inM to 1 | ||||
| M.Repad = function(inM) | ||||
| { | ||||
| 	var i; | ||||
| 	var last = inM[0].length-1; | ||||
|     	for(i=0; i<inM.length; i++) | ||||
| 	{ | ||||
|         	inM[i][last] = 1; | ||||
| 	} | ||||
| 	return inM; | ||||
| }; | ||||
| </script> | ||||
| 
 | ||||
| <script> | ||||
| var NN = {}; | ||||
| 
 | ||||
| NN.TrainingSet = {}; | ||||
| NN.TrainingSet.Instances = []; | ||||
| NN.TrainingSet.Create = function() | ||||
| { | ||||
|     var obj = {}; | ||||
| 
 | ||||
|     obj.Input = []; | ||||
|     obj.Output = []; | ||||
|     obj.Order = []; | ||||
|      | ||||
|     NN.TrainingSet.Instances.push(obj); | ||||
|     return obj; | ||||
| }; | ||||
| NN.TrainingSet.AddPoint = function(inTrainingSet, inType, inData) | ||||
| { | ||||
|     inTrainingSet.Input.push(inData); | ||||
|     inTrainingSet.Output.push(inType); | ||||
|     inTrainingSet.Order.push(inTrainingSet.Order.length); | ||||
| }; | ||||
| NN.TrainingSet.AddCloud = function(inTrainingSet, inLabel, inCloud) | ||||
| { | ||||
|     var i; | ||||
|     for(i=0; i<inCloud.length; i++) | ||||
|     { | ||||
|         NN.TrainingSet.AddPoint(inTrainingSet, inLabel, inCloud[i]); | ||||
|     } | ||||
| }; | ||||
| NN.TrainingSet.Randomize = function(inTrainingSet) | ||||
| { | ||||
|       var newOrder = []; | ||||
|       var selection; | ||||
|       while(inTrainingSet.Order.length != 0) | ||||
|       { | ||||
|           selection = Math.floor(inTrainingSet.Order.length * Math.random()); | ||||
|           inTrainingSet.Order.splice(selection, 1); | ||||
|           newOrder.push(selection); | ||||
|       } | ||||
|       inTrainingSet.Order = newOrder; | ||||
| }; | ||||
| 
 | ||||
| 
 | ||||
| NN.Layer = {}; | ||||
| NN.Layer.Create = function(sizeIn, sizeOut) | ||||
| { | ||||
|     var i; | ||||
|     var min = []; | ||||
|     var max = []; | ||||
|     var obj = {}; | ||||
|      | ||||
|     sizeIn++; | ||||
|      | ||||
|     obj.Forward = {}; | ||||
|     for(i=0; i<sizeIn; i++) | ||||
|     { | ||||
|         min.push(-1); | ||||
|         max.push(1); | ||||
|     } | ||||
|     obj.Forward.Matrix = M.Box([min, max], sizeOut); | ||||
|     obj.Forward.StageInput = []; | ||||
|     obj.Forward.StageAffine = []; | ||||
|     obj.Forward.StageSigmoid = []; | ||||
|     obj.Forward.StageDerivative = []; | ||||
|      | ||||
|     obj.Backward = {}; | ||||
|     obj.Backward.Matrix = M.Transpose(obj.Forward.Matrix); | ||||
|     obj.Backward.StageInput = []; | ||||
|     obj.Backward.StageDerivative = []; | ||||
|     obj.Backward.StageAffine = []; | ||||
|      | ||||
|     return obj; | ||||
| }; | ||||
| NN.Layer.Forward = function(inLayer, inInput) | ||||
| { | ||||
|     inLayer.Forward.StageInput = M.Pad(inInput); // Pad the input | ||||
|     inLayer.Forward.StageAffine = M.Transform(inLayer.Forward.Matrix, inLayer.Forward.StageInput); | ||||
|     inLayer.Forward.StageSigmoid = M.Sigmoid(inLayer.Forward.StageAffine); | ||||
|      | ||||
|     return inLayer.Forward.StageSigmoid; | ||||
| }; | ||||
| NN.Layer.Error = function(inLayer, inTarget) | ||||
| { | ||||
|     return M.Subtract(inLayer.Forward.StageSigmoid, inTarget); | ||||
| }; | ||||
| NN.Layer.Backward = function(inLayer, inInput) | ||||
| { | ||||
|     /* We need the derivative of the forward pass, but only during the backward pass. | ||||
|     That's why-- even though it "belongs" to the forward pass-- it is being calculated here. */ | ||||
|     inLayer.Forward.StageDerivative = M.Derivative(inLayer.Forward.StageSigmoid); | ||||
|      | ||||
|     /* This transpose matrix is for sending the error back to a previous layer. | ||||
|     And again, even though it is derived directly from the forward matrix, it is only needed during the backward pass so we calculate it here.*/ | ||||
|     inLayer.Backward.Matrix = M.Transpose(inLayer.Forward.Matrix); | ||||
|      | ||||
|     /* When the error vector arrives at a layer, it always needs to be multiplied (read 'supressed') by the derivative of | ||||
|     what the layer output earlier during the forward pass. | ||||
|     So despite its name, Backward.StageDerivative contains the result of this *multiplication* and not some new derivative calculation.*/ | ||||
|     inLayer.Backward.StageInput = inInput; | ||||
|     inLayer.Backward.StageDerivative = M.Multiply(inLayer.Backward.StageInput, inLayer.Forward.StageDerivative); | ||||
|     inLayer.Backward.StageAffine = M.Transform(inLayer.Backward.Matrix, inLayer.Backward.StageDerivative); | ||||
|      | ||||
|     return M.Unpad(inLayer.Backward.StageAffine);// Unpad the output | ||||
| }; | ||||
| NN.Layer.Adjust = function(inLayer, inLearningRate) | ||||
| { | ||||
|     var deltas; | ||||
|     var vector; | ||||
|     var scalar; | ||||
|     var i, j; | ||||
|      | ||||
|     for(i=0; i<inLayer.Forward.StageInput.length; i++) | ||||
|     { | ||||
|         deltas = M.Outer(inLayer.Forward.StageInput[i], inLayer.Backward.StageDerivative[i]); | ||||
|         deltas = M.Scale(deltas, inLearningRate); | ||||
|          | ||||
|         inLayer.Forward.Matrix = M.Subtract(inLayer.Forward.Matrix, deltas); | ||||
|     } | ||||
| }; | ||||
| NN.Layer.Stochastic = function(inLayer, inTrainingSet, inIterations) | ||||
| { | ||||
|     /* this method is ONLY for testing individual layers, and does not translate to network-level training */ | ||||
|     var i, j; | ||||
|     var current; | ||||
|     var error; | ||||
|     for(i=0; i<inIterations; i++) | ||||
|     { | ||||
|         NN.TrainingSet.Randomize(inTrainingSet); | ||||
|         for(j=0; j<inTrainingSet.Order.length; j++) | ||||
|         { | ||||
|             current = inTrainingSet.Order[j]; | ||||
|             NN.Layer.Forward(inLayer, [inTrainingSet.Input[current]]); | ||||
|             error = M.Subtract(inLayer.Forward.StageSigmoid, [inTrainingSet.Output[current]]); | ||||
|             NN.Layer.Backward(inLayer, error); | ||||
|             NN.Layer.Adjust(inLayer, 0.1); | ||||
|         } | ||||
|     } | ||||
| }; | ||||
| 
 | ||||
| NN.Network = {}; | ||||
| NN.Network.Instances = []; | ||||
| NN.Network.Create = function() | ||||
| { | ||||
|     var obj = {}; | ||||
|     var i;     | ||||
|      | ||||
|     obj.Layers = []; | ||||
|     obj.LearningRate = 0.1; | ||||
|     obj.Error = []; | ||||
|      | ||||
|     for(i=0; i<arguments.length-1; i++) | ||||
|     { | ||||
|         obj.Layers.push(NN.Layer.Create(arguments[i], arguments[i+1])); | ||||
|     } | ||||
|      | ||||
|     NN.Network.Instances.push(obj); | ||||
|     return obj; | ||||
| }; | ||||
| NN.Network.Observe = function(inNetwork, inBatch) | ||||
| { | ||||
|       var input = M.Clone(inBatch); | ||||
|       var i; | ||||
|       for(i=0; i<inNetwork.Layers.length; i++) | ||||
|       { | ||||
|           input = NN.Layer.Forward(inNetwork.Layers[i], input); | ||||
|       } | ||||
|       return inNetwork.Layers[inNetwork.Layers.length-1].Forward.StageSigmoid; | ||||
| }; | ||||
| NN.Network.Error = function(inNetwork, inTraining) | ||||
| { | ||||
|       return M.Subtract(inNetwork.Layers[inNetwork.Layers.length-1].Forward.StageSigmoid, inTraining); | ||||
| }; | ||||
| NN.Network.Learn = function(inNetwork, inError) | ||||
| { | ||||
|       var input = inError; | ||||
|       var i; | ||||
|       for(i=inNetwork.Layers.length-1; i>=0; i--) | ||||
|       { | ||||
|           input = NN.Layer.Backward(inNetwork.Layers[i], input); | ||||
|           NN.Layer.Adjust(inNetwork.Layers[i], inNetwork.LearningRate); | ||||
|       } | ||||
| }; | ||||
| 
 | ||||
| 
 | ||||
| NN.Network.Batch = function(inNetwork, inTrainingSet, inIterations) | ||||
| { | ||||
|     var i; | ||||
|     for(i=0; i<inIterations; i++) | ||||
|     { | ||||
|         NN.Network.Observe(inNetwork, inTrainingSet.Input); | ||||
|         inNetwork.Error = NN.Network.Error(inNetwork, inTrainingSet.Output) | ||||
|         NN.Network.Learn(inNetwork, inNetwork.Error); | ||||
|     } | ||||
| }; | ||||
| NN.Network.Stochastic = function(inNetwork, inTrainingSet, inIterations) | ||||
| { | ||||
|     var i, j; | ||||
|     var current; | ||||
|      | ||||
|     for(i=0; i<inIterations; i++) | ||||
|     { | ||||
|         NN.TrainingSet.Randomize(inTrainingSet); | ||||
|         for(j=0; j<inTrainingSet.Order.length; j++) | ||||
|         { | ||||
|             current = inTrainingSet.Order[j]; | ||||
|             NN.Network.Observe(inNetwork, [inTrainingSet.Input[current]]); | ||||
|             inNetwork.Error = NN.Network.Error(inNetwork, [inTrainingSet.Output[current]]); | ||||
|             NN.Network.Learn(inNetwork, inNetwork.Error); | ||||
|         } | ||||
|     } | ||||
| }; | ||||
| </script> | ||||
| 
 | ||||
| <script> | ||||
|     let matrix1 = [ | ||||
|     [-0.43662948305036675, -0.368590640707799, -0.23227179558890843], | ||||
|     [-0.004292653969505622, 0.38670055222186317, -0.2478421495365568], | ||||
|     [0.738181366836224, 0.3389203747353555, 0.4920200816404332] | ||||
|     ]; | ||||
| 
 | ||||
|     let matrix2 = [ | ||||
|     [0.7098703863463034, 0.35485944251238033, 0.7642849892333241, 0.03046174288491077], | ||||
| 	[-0.30655426258144347, 0.45509633551425077, -0.5013795222004322, -0.3421292736637427] | ||||
|     ]; | ||||
| 
 | ||||
|     let input = [ | ||||
|     [ 0.1,  0.05], | ||||
|     [ 0.0, -0.06], | ||||
|     [ 0.99, 0.85], | ||||
|     [ 1.2,  1.05] | ||||
|     ]; | ||||
|     let output = [ | ||||
|     [1, 0], | ||||
|     [1, 0], | ||||
|     [0, 1], | ||||
|     [0, 1] | ||||
|     ]; | ||||
| 
 | ||||
| 	let nn1 = NN.Network.Create(2, 3, 2); | ||||
| 	nn1.Layers[0].Forward.Matrix = matrix1; | ||||
| 	nn1.Layers[1].Forward.Matrix = matrix2; | ||||
| 	nn1.LearningRate = 0.1; | ||||
| 	//let logLayers = inNN => inNN.Layers.forEach(L=>console.log(L.Forward.Matrix)); | ||||
| 
 | ||||
| 	NN.Network.Batch(nn1, {Input:input, Output:output}, 1000); | ||||
| 	console.log(NN.Network.Observe(nn1, input)); | ||||
| 
 | ||||
| </script> | ||||
							
								
								
									
										10
									
								
								index.js
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										10
									
								
								index.js
									
									
									
									
									
										Normal file
									
								
							| @ -0,0 +1,10 @@ | ||||
| import { Build, Learn, Label } from "./nn.ts"; | ||||
| import { default as Clean } from "./iris.js"; | ||||
| 
 | ||||
| let [ inputs, labels ] = Clean(); | ||||
| 
 | ||||
| let layers = Build(4, 10, 3); | ||||
| let errors = Learn(inputs, layers, labels, 500, 0.1); | ||||
| let output = Label(inputs, layers, true); | ||||
| 
 | ||||
| console.log(output); | ||||
							
								
								
									
										238
									
								
								iris.js
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										238
									
								
								iris.js
									
									
									
									
									
										Normal file
									
								
							| @ -0,0 +1,238 @@ | ||||
| export default () => | ||||
| { | ||||
|     let inputs = []; | ||||
|     let labels = []; | ||||
| 
 | ||||
|     let min = [999, 999, 999, 999]; | ||||
|     let max = [-99, -99, -99, -99]; | ||||
| 
 | ||||
|     DataBig.split("\n").forEach((inRowValue, inRowIndex)=> | ||||
|     { | ||||
|         let currentInput = []; | ||||
|         let currentLabel = []; | ||||
| 
 | ||||
|         if(inRowIndex == 0){ return; } | ||||
| 
 | ||||
|         inRowValue.split(",").forEach((inCellValue, inCellIndex)=> | ||||
|         { | ||||
|             if(inCellIndex == 4) | ||||
|             { | ||||
|                 switch(inCellValue) | ||||
|                 { | ||||
|                     case `"Setosa"`: | ||||
|                         currentLabel = [1, 0, 0]; | ||||
|                         break; | ||||
|                     case `"Versicolor"` : | ||||
|                         currentLabel = [0, 1, 0]; | ||||
|                         break; | ||||
|                     case `"Virginica"` : | ||||
|                         currentLabel = [0, 0, 1]; | ||||
|                         break; | ||||
|                 } | ||||
|             } | ||||
|             else | ||||
|             { | ||||
|                 let value = parseFloat(inCellValue); | ||||
|                 if(min[inCellIndex] > value){ min[inCellIndex] = value; } | ||||
|                 if(max[inCellIndex] < value){ max[inCellIndex] = value; } | ||||
|                 currentInput.push(value); | ||||
|             } | ||||
|         }); | ||||
| 
 | ||||
|         inputs.push(currentInput); | ||||
|         labels.push(currentLabel); | ||||
|     }); | ||||
| 
 | ||||
|     console.log(min, max); | ||||
|     inputs.forEach((inRowValue, inRowIndex)=> | ||||
|     { | ||||
|         inRowValue.forEach((inCellValue, inCellIndex)=> | ||||
|         { | ||||
|             inputs[inRowIndex][inCellIndex] = (inCellValue - min[inCellIndex])/(max[inCellIndex] - min[inCellIndex]); | ||||
|         }); | ||||
|     }) | ||||
| 
 | ||||
|     return [ inputs, labels ]; | ||||
| }; | ||||
| const Data = `"sepal.length","sepal.width","petal.length","petal.width","variety"
 | ||||
| 5.1,3.5,1.4,.2,"Setosa" | ||||
| 4.9,3,1.4,.2,"Setosa" | ||||
| 4.7,3.2,1.3,.2,"Setosa" | ||||
| 4.6,3.1,1.5,.2,"Setosa" | ||||
| 5,3.6,1.4,.2,"Setosa" | ||||
| 5.4,3.9,1.7,.4,"Setosa" | ||||
| 4.6,3.4,1.4,.3,"Setosa" | ||||
| 7,3.2,4.7,1.4,"Versicolor" | ||||
| 6.4,3.2,4.5,1.5,"Versicolor" | ||||
| 6.9,3.1,4.9,1.5,"Versicolor" | ||||
| 5.5,2.3,4,1.3,"Versicolor" | ||||
| 6.5,2.8,4.6,1.5,"Versicolor" | ||||
| 5.7,2.8,4.5,1.3,"Versicolor" | ||||
| 6.3,3.3,4.7,1.6,"Versicolor" | ||||
| 4.9,2.4,3.3,1,"Versicolor" | ||||
| 6.6,2.9,4.6,1.3,"Versicolor" | ||||
| 5.2,2.7,3.9,1.4,"Versicolor" | ||||
| 5,2,3.5,1,"Versicolor" | ||||
| 5.7,2.5,5,2,"Virginica" | ||||
| 5.8,2.8,5.1,2.4,"Virginica" | ||||
| 6.4,3.2,5.3,2.3,"Virginica" | ||||
| 6.5,3,5.5,1.8,"Virginica" | ||||
| 7.7,3.8,6.7,2.2,"Virginica" | ||||
| 7.7,2.6,6.9,2.3,"Virginica" | ||||
| 6,2.2,5,1.5,"Virginica" | ||||
| 6.9,3.2,5.7,2.3,"Virginica" | ||||
| 5.6,2.8,4.9,2,"Virginica" | ||||
| 7.7,2.8,6.7,2,"Virginica" | ||||
| 6.3,2.7,4.9,1.8,"Virginica" | ||||
| 6.7,3.3,5.7,2.1,"Virginica"`;
 | ||||
| const DataBig = `"sepal.length","sepal.width","petal.length","petal.width","variety"
 | ||||
| 5.1,3.5,1.4,.2,"Setosa" | ||||
| 4.9,3,1.4,.2,"Setosa" | ||||
| 4.7,3.2,1.3,.2,"Setosa" | ||||
| 4.6,3.1,1.5,.2,"Setosa" | ||||
| 5,3.6,1.4,.2,"Setosa" | ||||
| 5.4,3.9,1.7,.4,"Setosa" | ||||
| 4.6,3.4,1.4,.3,"Setosa" | ||||
| 5,3.4,1.5,.2,"Setosa" | ||||
| 4.4,2.9,1.4,.2,"Setosa" | ||||
| 4.9,3.1,1.5,.1,"Setosa" | ||||
| 5.4,3.7,1.5,.2,"Setosa" | ||||
| 4.8,3.4,1.6,.2,"Setosa" | ||||
| 4.8,3,1.4,.1,"Setosa" | ||||
| 4.3,3,1.1,.1,"Setosa" | ||||
| 5.8,4,1.2,.2,"Setosa" | ||||
| 5.7,4.4,1.5,.4,"Setosa" | ||||
| 5.4,3.9,1.3,.4,"Setosa" | ||||
| 5.1,3.5,1.4,.3,"Setosa" | ||||
| 5.7,3.8,1.7,.3,"Setosa" | ||||
| 5.1,3.8,1.5,.3,"Setosa" | ||||
| 5.4,3.4,1.7,.2,"Setosa" | ||||
| 5.1,3.7,1.5,.4,"Setosa" | ||||
| 4.6,3.6,1,.2,"Setosa" | ||||
| 5.1,3.3,1.7,.5,"Setosa" | ||||
| 4.8,3.4,1.9,.2,"Setosa" | ||||
| 5,3,1.6,.2,"Setosa" | ||||
| 5,3.4,1.6,.4,"Setosa" | ||||
| 5.2,3.5,1.5,.2,"Setosa" | ||||
| 5.2,3.4,1.4,.2,"Setosa" | ||||
| 4.7,3.2,1.6,.2,"Setosa" | ||||
| 4.8,3.1,1.6,.2,"Setosa" | ||||
| 5.4,3.4,1.5,.4,"Setosa" | ||||
| 5.2,4.1,1.5,.1,"Setosa" | ||||
| 5.5,4.2,1.4,.2,"Setosa" | ||||
| 4.9,3.1,1.5,.2,"Setosa" | ||||
| 5,3.2,1.2,.2,"Setosa" | ||||
| 5.5,3.5,1.3,.2,"Setosa" | ||||
| 4.9,3.6,1.4,.1,"Setosa" | ||||
| 4.4,3,1.3,.2,"Setosa" | ||||
| 5.1,3.4,1.5,.2,"Setosa" | ||||
| 5,3.5,1.3,.3,"Setosa" | ||||
| 4.5,2.3,1.3,.3,"Setosa" | ||||
| 4.4,3.2,1.3,.2,"Setosa" | ||||
| 5,3.5,1.6,.6,"Setosa" | ||||
| 5.1,3.8,1.9,.4,"Setosa" | ||||
| 4.8,3,1.4,.3,"Setosa" | ||||
| 5.1,3.8,1.6,.2,"Setosa" | ||||
| 4.6,3.2,1.4,.2,"Setosa" | ||||
| 5.3,3.7,1.5,.2,"Setosa" | ||||
| 5,3.3,1.4,.2,"Setosa" | ||||
| 7,3.2,4.7,1.4,"Versicolor" | ||||
| 6.4,3.2,4.5,1.5,"Versicolor" | ||||
| 6.9,3.1,4.9,1.5,"Versicolor" | ||||
| 5.5,2.3,4,1.3,"Versicolor" | ||||
| 6.5,2.8,4.6,1.5,"Versicolor" | ||||
| 5.7,2.8,4.5,1.3,"Versicolor" | ||||
| 6.3,3.3,4.7,1.6,"Versicolor" | ||||
| 4.9,2.4,3.3,1,"Versicolor" | ||||
| 6.6,2.9,4.6,1.3,"Versicolor" | ||||
| 5.2,2.7,3.9,1.4,"Versicolor" | ||||
| 5,2,3.5,1,"Versicolor" | ||||
| 5.9,3,4.2,1.5,"Versicolor" | ||||
| 6,2.2,4,1,"Versicolor" | ||||
| 6.1,2.9,4.7,1.4,"Versicolor" | ||||
| 5.6,2.9,3.6,1.3,"Versicolor" | ||||
| 6.7,3.1,4.4,1.4,"Versicolor" | ||||
| 5.6,3,4.5,1.5,"Versicolor" | ||||
| 5.8,2.7,4.1,1,"Versicolor" | ||||
| 6.2,2.2,4.5,1.5,"Versicolor" | ||||
| 5.6,2.5,3.9,1.1,"Versicolor" | ||||
| 5.9,3.2,4.8,1.8,"Versicolor" | ||||
| 6.1,2.8,4,1.3,"Versicolor" | ||||
| 6.3,2.5,4.9,1.5,"Versicolor" | ||||
| 6.1,2.8,4.7,1.2,"Versicolor" | ||||
| 6.4,2.9,4.3,1.3,"Versicolor" | ||||
| 6.6,3,4.4,1.4,"Versicolor" | ||||
| 6.8,2.8,4.8,1.4,"Versicolor" | ||||
| 6.7,3,5,1.7,"Versicolor" | ||||
| 6,2.9,4.5,1.5,"Versicolor" | ||||
| 5.7,2.6,3.5,1,"Versicolor" | ||||
| 5.5,2.4,3.8,1.1,"Versicolor" | ||||
| 5.5,2.4,3.7,1,"Versicolor" | ||||
| 5.8,2.7,3.9,1.2,"Versicolor" | ||||
| 6,2.7,5.1,1.6,"Versicolor" | ||||
| 5.4,3,4.5,1.5,"Versicolor" | ||||
| 6,3.4,4.5,1.6,"Versicolor" | ||||
| 6.7,3.1,4.7,1.5,"Versicolor" | ||||
| 6.3,2.3,4.4,1.3,"Versicolor" | ||||
| 5.6,3,4.1,1.3,"Versicolor" | ||||
| 5.5,2.5,4,1.3,"Versicolor" | ||||
| 5.5,2.6,4.4,1.2,"Versicolor" | ||||
| 6.1,3,4.6,1.4,"Versicolor" | ||||
| 5.8,2.6,4,1.2,"Versicolor" | ||||
| 5,2.3,3.3,1,"Versicolor" | ||||
| 5.6,2.7,4.2,1.3,"Versicolor" | ||||
| 5.7,3,4.2,1.2,"Versicolor" | ||||
| 5.7,2.9,4.2,1.3,"Versicolor" | ||||
| 6.2,2.9,4.3,1.3,"Versicolor" | ||||
| 5.1,2.5,3,1.1,"Versicolor" | ||||
| 5.7,2.8,4.1,1.3,"Versicolor" | ||||
| 6.3,3.3,6,2.5,"Virginica" | ||||
| 5.8,2.7,5.1,1.9,"Virginica" | ||||
| 7.1,3,5.9,2.1,"Virginica" | ||||
| 6.3,2.9,5.6,1.8,"Virginica" | ||||
| 6.5,3,5.8,2.2,"Virginica" | ||||
| 7.6,3,6.6,2.1,"Virginica" | ||||
| 4.9,2.5,4.5,1.7,"Virginica" | ||||
| 7.3,2.9,6.3,1.8,"Virginica" | ||||
| 6.7,2.5,5.8,1.8,"Virginica" | ||||
| 7.2,3.6,6.1,2.5,"Virginica" | ||||
| 6.5,3.2,5.1,2,"Virginica" | ||||
| 6.4,2.7,5.3,1.9,"Virginica" | ||||
| 6.8,3,5.5,2.1,"Virginica" | ||||
| 5.7,2.5,5,2,"Virginica" | ||||
| 5.8,2.8,5.1,2.4,"Virginica" | ||||
| 6.4,3.2,5.3,2.3,"Virginica" | ||||
| 6.5,3,5.5,1.8,"Virginica" | ||||
| 7.7,3.8,6.7,2.2,"Virginica" | ||||
| 7.7,2.6,6.9,2.3,"Virginica" | ||||
| 6,2.2,5,1.5,"Virginica" | ||||
| 6.9,3.2,5.7,2.3,"Virginica" | ||||
| 5.6,2.8,4.9,2,"Virginica" | ||||
| 7.7,2.8,6.7,2,"Virginica" | ||||
| 6.3,2.7,4.9,1.8,"Virginica" | ||||
| 6.7,3.3,5.7,2.1,"Virginica" | ||||
| 7.2,3.2,6,1.8,"Virginica" | ||||
| 6.2,2.8,4.8,1.8,"Virginica" | ||||
| 6.1,3,4.9,1.8,"Virginica" | ||||
| 6.4,2.8,5.6,2.1,"Virginica" | ||||
| 7.2,3,5.8,1.6,"Virginica" | ||||
| 7.4,2.8,6.1,1.9,"Virginica" | ||||
| 7.9,3.8,6.4,2,"Virginica" | ||||
| 6.4,2.8,5.6,2.2,"Virginica" | ||||
| 6.3,2.8,5.1,1.5,"Virginica" | ||||
| 6.1,2.6,5.6,1.4,"Virginica" | ||||
| 7.7,3,6.1,2.3,"Virginica" | ||||
| 6.3,3.4,5.6,2.4,"Virginica" | ||||
| 6.4,3.1,5.5,1.8,"Virginica" | ||||
| 6,3,4.8,1.8,"Virginica" | ||||
| 6.9,3.1,5.4,2.1,"Virginica" | ||||
| 6.7,3.1,5.6,2.4,"Virginica" | ||||
| 6.9,3.1,5.1,2.3,"Virginica" | ||||
| 5.8,2.7,5.1,1.9,"Virginica" | ||||
| 6.8,3.2,5.9,2.3,"Virginica" | ||||
| 6.7,3.3,5.7,2.5,"Virginica" | ||||
| 6.7,3,5.2,2.3,"Virginica" | ||||
| 6.3,2.5,5,1.9,"Virginica" | ||||
| 6.5,3,5.2,2,"Virginica" | ||||
| 6.2,3.4,5.4,2.3,"Virginica" | ||||
| 5.9,3,5.1,1.8,"Virginica"`;
 | ||||
							
								
								
									
										31
									
								
								m.test.js
									
									
									
									
									
								
							
							
						
						
									
										31
									
								
								m.test.js
									
									
									
									
									
								
							| @ -11,10 +11,10 @@ Deno.test("Iterate.Loop", ()=> | ||||
|     assertEquals(cloud[0][0], 0); | ||||
|     assertEquals(cloud[3][2], 5, "correct output"); | ||||
| }); | ||||
| Deno.test("Iterate.Edit", ()=> | ||||
| Deno.test("Iterate.Copy", ()=> | ||||
| { | ||||
|     const c = [[1, 2], [3, 4]] | ||||
|     const t = M.Iterate.Edit(c, (i)=>i); | ||||
|     const t = M.Iterate.Copy(c, (i)=>i); | ||||
|     assertEquals(t.length, c.length, "correct count"); | ||||
|     assertEquals(t[0][0], c[0][0], "correct dimensions"); | ||||
|     assertEquals(t[1][1], c[1][1], "correct placement"); | ||||
| @ -146,20 +146,39 @@ Deno.test("Batch.Subtract", ()=> | ||||
|     assertEquals(t[0].length, 2,   "correct dimensions"); | ||||
|     assertEquals(t[1][0],     2.5, "correct placement"); | ||||
| }); | ||||
| Deno.test("Batch.Sigmoid", ()=> | ||||
| Deno.test("Batch.Sig", ()=> | ||||
| { | ||||
|     const m = [[-1000, 1000]]; | ||||
|     const t = M.Batch.Sigmoid(m); | ||||
|     const t = M.Batch.Sig(m); | ||||
|     assertEquals(t.length, 1, "correct count"); | ||||
|     assertEquals(t[0].length, 2, "correct dimensions"); | ||||
|     assert(t[0][0]>=0 && t[0][0]<0.5); | ||||
|     assert(t[0][1]<=1 && t[0][1]>0.5, "correct placement"); | ||||
| 
 | ||||
| }); | ||||
| Deno.test("Batch.Derivative", ()=> | ||||
| Deno.test("Batch.SigDeriv", ()=> | ||||
| { | ||||
|     const m = [[-1000, 0, 1000]]; | ||||
|     const t = M.Batch.Derivative(M.Batch.Sigmoid(m)); | ||||
|     const t = M.Batch.SigDeriv(M.Batch.Sig(m)); | ||||
|     assertEquals(t.length, 1, "correct count"); | ||||
|     assertEquals(t[0].length, 3, "correct dimensions"); | ||||
|     assert(t[0][0]<t[0][1] && t[0][1]>t[0][2]); | ||||
| }); | ||||
| 
 | ||||
| Deno.test("Batch.Rec", ()=> | ||||
| { | ||||
|     const m = [[-1, 1, 10]]; | ||||
|     const t = M.Batch.Rec(m); | ||||
|     assert(t[0][0] == 0); | ||||
|     assert(t[0][1] == 1); | ||||
|     assert(t[0][2] == 10); | ||||
| }); | ||||
| Deno.test("Batch.RecDeriv", ()=> | ||||
| { | ||||
|     const m = [[-1, 1, 10]]; | ||||
|     const t = M.Batch.RecDeriv(m); | ||||
| 
 | ||||
|     assert(t[0][0] == 0); | ||||
|     assert(t[0][1] == 1); | ||||
|     assert(t[0][2] == 1); | ||||
| }); | ||||
							
								
								
									
										26
									
								
								m.ts
									
									
									
									
									
								
							
							
						
						
									
										26
									
								
								m.ts
									
									
									
									
									
								
							| @ -16,35 +16,27 @@ const Methods = { | ||||
|             for(i=0; i<inCount; i++) | ||||
|             { | ||||
|                 outputVector = []; | ||||
|                 for(j=0; j<inDimensions; j++) | ||||
|                 { | ||||
|                     outputVector.push(inFunction(j, i, outputVector)); | ||||
|                 } | ||||
|                 for(j=0; j<inDimensions; j++){ outputVector.push(inFunction(j, i, outputVector)); } | ||||
|                 outputCloud.push(outputVector); | ||||
|             } | ||||
|             return outputCloud; | ||||
|         }, | ||||
|         Edit: (inCloud:Cloud.M, inFunction:Cloud.HandleEdit):Cloud.M=> inCloud.map((row:Cloud.V):Cloud.V=>row.map(inFunction)) | ||||
|         Copy: (inCloud:Cloud.M, inFunction:Cloud.HandleEdit):Cloud.M=> inCloud.map((row:Cloud.V):Cloud.V=> row.map(inFunction)), | ||||
|         Edit: (inCloud:Cloud.M, inFunction:Cloud.HandleEdit):void   => inCloud.forEach((row:Cloud.V):void=>row.forEach(inFunction)) | ||||
|     }, | ||||
|     Create: | ||||
|     { | ||||
|               Box: (inV1:Cloud.V, inV2:Cloud.V, inCount:number):Cloud.M=> Methods.Iterate.Loop(inV1.length, inCount, i=> inV1[i]+(inV2[i]-inV1[i])*Math.random()), | ||||
|         Transpose:                            (inCloud:Cloud.M):Cloud.M=> Methods.Iterate.Loop(inCloud.length, inCloud[0].length, (i, row)=> inCloud[i][row]), | ||||
|             Outer:                 (inV1:Cloud.V, inV2:Cloud.V):Cloud.M=> Methods.Iterate.Loop(inV1.length, inV2.length, (i, row)=> inV1[i]*inV2[row]), | ||||
|             Clone:                            (inCloud:Cloud.M):Cloud.M=> Methods.Iterate.Edit(inCloud, i=> i) | ||||
|             Clone:                            (inCloud:Cloud.M):Cloud.M=> Methods.Iterate.Copy(inCloud, i=> i), | ||||
|            Padded:                            (inCloud:Cloud.M):Cloud.M=> inCloud.map((row:Cloud.V)=> [...row, 1]) | ||||
|     }, | ||||
|     Mutate: | ||||
|     { | ||||
|           Pad: (inCloud:Cloud.M):Cloud.M=> {inCloud.forEach((row:Cloud.V)=> row.push(1)); return inCloud; }, | ||||
|         Unpad: (inCloud:Cloud.M):Cloud.M=> {inCloud.forEach((row:Cloud.V)=> row.pop());   return inCloud; } | ||||
|     }, | ||||
|     Test: | ||||
|     { | ||||
|         Dot:(v1:Cloud.V, v2:Cloud.V):number=>  | ||||
|         { | ||||
|             return v1.reduce((sum, current, index)=> sum + current*v2[index]); | ||||
|         } | ||||
|     }, | ||||
|     Single: | ||||
|     { | ||||
|         Subtract:    (inV1:Cloud.V, inV2:Cloud.V):Cloud.V=> inV1.map((component, i)=> component-inV2[i]), | ||||
| @ -56,9 +48,11 @@ const Methods = { | ||||
|           Subtract: (inCloud1:Cloud.M, inCloud2:Cloud.M):Cloud.M=> inCloud1.map((row:Cloud.V, rowIndex:number)=> Methods.Single.Subtract(row, inCloud2[rowIndex])), | ||||
|           Multiply: (inCloud1:Cloud.M, inCloud2:Cloud.M):Cloud.M=> inCloud1.map((row:Cloud.V, rowIndex:number)=> Methods.Single.Multiply(row, inCloud2[rowIndex])), | ||||
|             Affine: (inCloud1:Cloud.M, inCloud2:Cloud.M):Cloud.M=> inCloud1.map((row:Cloud.V)=> Methods.Single.Affine(row, inCloud2)), | ||||
|            Sigmoid:                    (inCloud:Cloud.M):Cloud.M=> Methods.Iterate.Edit(inCloud, i=>1/(1+Math.pow(Math.E, -i))), | ||||
|         Derivative:                    (inCloud:Cloud.M):Cloud.M=> Methods.Iterate.Edit(inCloud, i=>i*(1-i)), | ||||
|              Scale:   (inCloud:Cloud.M, inScalar:number):Cloud.M=> Methods.Iterate.Edit(inCloud, i=>i*inScalar) | ||||
|                Sig:                    (inCloud:Cloud.M):Cloud.M=> Methods.Iterate.Copy(inCloud, i=>1/(1+Math.pow(Math.E, -i))), | ||||
|           SigDeriv:                    (inCloud:Cloud.M):Cloud.M=> Methods.Iterate.Copy(inCloud, i=>i*(1-i)), | ||||
|                Rec:                    (inCloud:Cloud.M):Cloud.M=> Methods.Iterate.Copy(inCloud, i=> i<=0 ? 0 : i), | ||||
|           RecDeriv:                    (inCloud:Cloud.M):Cloud.M=> Methods.Iterate.Copy(inCloud, i=> i<=0 ? 0 : 1), | ||||
|              Scale:   (inCloud:Cloud.M, inScalar:number):Cloud.M=> Methods.Iterate.Copy(inCloud, i=>i*inScalar) | ||||
|     } | ||||
| }; | ||||
| 
 | ||||
|  | ||||
							
								
								
									
										14
									
								
								methods.md
									
									
									
									
									
								
							
							
						
						
									
										14
									
								
								methods.md
									
									
									
									
									
								
							| @ -1,14 +0,0 @@ | ||||
| box(boundingBox, count) // done | ||||
| transpose(inMatrix) // done | ||||
| outer(inv1, inv2) // done | ||||
| clone(inCloud) // done | ||||
| 
 | ||||
| pad(inCloud) // done | ||||
| unpad(inCloud) // done | ||||
| 
 | ||||
| transform(inCloud, inMatrix) // done | ||||
| sigmoid(inCloud) // 1/(1+e^x) // done | ||||
| derivative(inCloud) // x*(1-x) // done | ||||
| scale(inCloud1, inV) // done | ||||
| subtract(inCloud1, inCloud2) // done | ||||
| multiply(inCloud1, inCloud2) // done | ||||
| @ -18,7 +18,7 @@ Deno.test("NN.Split", ()=> | ||||
|     assert(output); | ||||
|     assertEquals(input.length, output.length, "data split into equal input and output"); | ||||
| 
 | ||||
|     assertEquals(input[0].length, 3, "padded input"); | ||||
|     assertEquals(input[0].length, 2, "unpadded input"); | ||||
|     assertEquals(output[0].length, 2, "unpadded output"); | ||||
| }); | ||||
| 
 | ||||
| @ -27,7 +27,7 @@ Deno.test("NN.Build", ()=> | ||||
|     layers = Build(2, 5, 2); | ||||
| 
 | ||||
|     assertEquals(layers.length, 2, "correct number of matrices"); | ||||
|     assertEquals(layers[0][0].length, input[0].length, "input: padded input"); | ||||
|     assertEquals(layers[0][0].length, input[0].length+1, "input: padded input"); | ||||
|     assertEquals(layers[0].length, 5, "input: unpadded output"); | ||||
| 
 | ||||
|     assertEquals(layers[1][0].length, 6, "hidden: padded input"); | ||||
| @ -43,7 +43,8 @@ Deno.test("NN.Label", ()=> | ||||
| 
 | ||||
| Deno.test("NN.Learn", ()=> | ||||
| { | ||||
|     let error = Learn(input, layers, output, 1000, 0.1); | ||||
|     let error = Learn(input, layers, output, 50, 0.2); | ||||
|     console.log(error); | ||||
|     assertEquals(error.length, output.length); | ||||
|     let total = 0; | ||||
|     let count = error.length*error[0].length; | ||||
|  | ||||
							
								
								
									
										31
									
								
								nn.ts
									
									
									
									
									
								
							
							
						
						
									
										31
									
								
								nn.ts
									
									
									
									
									
								
							| @ -5,7 +5,8 @@ const Forward = (inData:Cloud.M, inLayers:N):N => | ||||
| { | ||||
|     let i:number; | ||||
|     let stages:N = [inData]; | ||||
|     let process = (index:number):Cloud.M => M.Batch.Sigmoid(M.Batch.Affine(stages[index], inLayers[index])); | ||||
|     let nonLinear = (inIndex:number):any=> inIndex >= inLayers.length-1 ? M.Batch.Sig : M.Batch.Rec; | ||||
|     let process = (index:number):Cloud.M => nonLinear(index)(M.Batch.Affine(stages[index], inLayers[index])); | ||||
| 
 | ||||
|     for(i=0; i<inLayers.length-1; i++){ stages[i+1] = M.Mutate.Pad(process(i)); } | ||||
|     stages[i+1] = process(i); | ||||
| @ -15,10 +16,11 @@ const Backward = (inStages:N, inLayers:N, inGoals:Cloud.M, inRate:number):N => | ||||
| { | ||||
|     let i:number; | ||||
|     let errorBack:Cloud.M = M.Batch.Subtract(inStages[inStages.length-1], inGoals); | ||||
|     let nonLinear = (inIndex:number):any=> inIndex >= inLayers.length-1 ? M.Batch.SigDeriv : M.Batch.RecDeriv; | ||||
| 
 | ||||
|     for(i=inLayers.length-1; i>=0; i--) | ||||
|     { | ||||
|         let errorScaled:Cloud.M = M.Batch.Multiply(errorBack, M.Batch.Derivative(inStages[i+1])); | ||||
|         let errorScaled:Cloud.M = M.Batch.Multiply(errorBack, nonLinear(i)(inStages[i+1])); | ||||
|         errorBack = M.Batch.Affine(errorScaled, M.Create.Transpose(inLayers[i])); | ||||
|         errorScaled.forEach((inScaledError:Cloud.V, inIndex:number)=> | ||||
|         { | ||||
| @ -40,10 +42,8 @@ const Split = (inTrainingSet:Cloud.M, inHeaderLabel:Cloud.V, inHeaderKeep:Cloud. | ||||
|     } | ||||
|     inTrainingSet.forEach((row:Cloud.V):void => | ||||
|     { | ||||
|         let vectorData = [ ...inHeaderKeep.map((i:number)=>row[i]), 1]; | ||||
|         let vectorLabel = inHeaderLabel.map((i:number)=>row[i])  | ||||
|          data.push( vectorData ); | ||||
|         label.push( vectorLabel ); | ||||
|          data.push(  inHeaderKeep.map((i:number)=>row[i]) ); | ||||
|         label.push( inHeaderLabel.map((i:number)=>row[i]) ); | ||||
|     }); | ||||
|     return [ data, label ]; | ||||
| }; | ||||
| @ -58,17 +58,28 @@ const Build = (...inLayers:Array<number>):N => | ||||
|     } | ||||
|     return output; | ||||
| }; | ||||
| const Label = (inData:Cloud.M, inLayers:N):Cloud.M => | ||||
| const Label = (inData:Cloud.M, inLayers:N, inRound:boolean):Cloud.M => | ||||
| { | ||||
|     let stages:N = Forward(inData, inLayers); | ||||
|     return stages[stages.length-1]; | ||||
|     let stages:N = Forward(M.Create.Padded(inData), inLayers); | ||||
|     let output = stages[stages.length-1]; | ||||
|     if(inRound) | ||||
|     { | ||||
|         output.forEach(row=> | ||||
|         { | ||||
|             row.forEach((cell, i)=> | ||||
|             { | ||||
|                 row[i] = (Math.round(cell * 100) / 100); | ||||
|             }); | ||||
|         }); | ||||
|     } | ||||
|     return output; | ||||
| }; | ||||
| const Learn = (inData:Cloud.M, inLayers:N, inLabels:Cloud.M, inIterations:number, inRate:number):Cloud.M => | ||||
| { | ||||
|     let stages:N = []; | ||||
|     for(let i=0; i<inIterations; i++) | ||||
|     { | ||||
|         stages = Forward(inData, inLayers); | ||||
|         stages = Forward(M.Create.Padded(inData), inLayers); | ||||
|         Backward(stages, inLayers, inLabels, inRate); | ||||
|     } | ||||
|     return M.Batch.Subtract(stages[stages.length-1], inLabels); | ||||
|  | ||||
							
								
								
									
										211
									
								
								nn_old.js
									
									
									
									
									
								
							
							
						
						
									
										211
									
								
								nn_old.js
									
									
									
									
									
								
							| @ -1,211 +0,0 @@ | ||||
| var NN = {}; | ||||
| 
 | ||||
| NN.TrainingSet = {}; | ||||
| NN.TrainingSet.Instances = []; | ||||
| NN.TrainingSet.Create = function() | ||||
| { | ||||
|     var obj = {}; | ||||
| 
 | ||||
|     obj.Input = []; | ||||
|     obj.Output = []; | ||||
|     obj.Order = []; | ||||
|      | ||||
|     NN.TrainingSet.Instances.push(obj); | ||||
|     return obj; | ||||
| }; | ||||
| NN.TrainingSet.AddPoint = function(inTrainingSet, inType, inData) | ||||
| { | ||||
|     inTrainingSet.Input.push(inData); | ||||
|     inTrainingSet.Output.push(inType); | ||||
|     inTrainingSet.Order.push(inTrainingSet.Order.length); | ||||
| }; | ||||
| NN.TrainingSet.AddCloud = function(inTrainingSet, inLabel, inCloud) | ||||
| { | ||||
|     var i; | ||||
|     for(i=0; i<inCloud.length; i++) | ||||
|     { | ||||
|         NN.TrainingSet.AddPoint(inTrainingSet, inLabel, inCloud[i]); | ||||
|     } | ||||
| }; | ||||
| NN.TrainingSet.Randomize = function(inTrainingSet) | ||||
| { | ||||
|       var newOrder = []; | ||||
|       var selection; | ||||
|       while(inTrainingSet.Order.length != 0) | ||||
|       { | ||||
|           selection = Math.floor(inTrainingSet.Order.length * Math.random()); | ||||
|           inTrainingSet.Order.splice(selection, 1); | ||||
|           newOrder.push(selection); | ||||
|       } | ||||
|       inTrainingSet.Order = newOrder; | ||||
| }; | ||||
| 
 | ||||
| 
 | ||||
| NN.Layer = {}; | ||||
| NN.Layer.Create = function(sizeIn, sizeOut) | ||||
| { | ||||
|     var i; | ||||
|     var min = []; | ||||
|     var max = []; | ||||
|     var obj = {}; | ||||
|      | ||||
|     sizeIn++; | ||||
|      | ||||
|     obj.Forward = {}; | ||||
|     for(i=0; i<sizeIn; i++) | ||||
|     { | ||||
|         min.push(-1); | ||||
|         max.push(1); | ||||
|     } | ||||
|     obj.Forward.Matrix = M.Box([min, max], sizeOut); | ||||
|     obj.Forward.StageInput = []; | ||||
|     obj.Forward.StageAffine = []; | ||||
|     obj.Forward.StageSigmoid = []; | ||||
|     obj.Forward.StageDerivative = []; | ||||
|      | ||||
|     obj.Backward = {}; | ||||
|     obj.Backward.Matrix = M.Transpose(obj.Forward.Matrix); | ||||
|     obj.Backward.StageInput = []; | ||||
|     obj.Backward.StageDerivative = []; | ||||
|     obj.Backward.StageAffine = []; | ||||
|      | ||||
|     return obj; | ||||
| }; | ||||
| NN.Layer.Forward = function(inLayer, inInput) | ||||
| { | ||||
|     inLayer.Forward.StageInput = M.Pad(inInput); // Pad the input
 | ||||
|     inLayer.Forward.StageAffine = M.Transform(inLayer.Forward.Matrix, inLayer.Forward.StageInput); | ||||
|     inLayer.Forward.StageSigmoid = M.Sigmoid(inLayer.Forward.StageAffine); | ||||
|      | ||||
|     return inLayer.Forward.StageSigmoid; | ||||
| }; | ||||
| NN.Layer.Error = function(inLayer, inTarget) | ||||
| { | ||||
|     return M.Subtract(inLayer.Forward.StageSigmoid, inTarget); | ||||
| }; | ||||
| NN.Layer.Backward = function(inLayer, inInput) | ||||
| { | ||||
|     /* We need the derivative of the forward pass, but only during the backward pass. | ||||
|     That's why-- even though it "belongs" to the forward pass-- it is being calculated here. */ | ||||
|     inLayer.Forward.StageDerivative = M.Derivative(inLayer.Forward.StageSigmoid); | ||||
|      | ||||
|     /* This transpose matrix is for sending the error back to a previous layer. | ||||
|     And again, even though it is derived directly from the forward matrix, it is only needed during the backward pass so we calculate it here.*/ | ||||
|     inLayer.Backward.Matrix = M.Transpose(inLayer.Forward.Matrix); | ||||
|      | ||||
|     /* When the error vector arrives at a layer, it always needs to be multiplied (read 'supressed') by the derivative of | ||||
|     what the layer output earlier during the forward pass. | ||||
|     So despite its name, Backward.StageDerivative contains the result of this *multiplication* and not some new derivative calculation.*/ | ||||
|     inLayer.Backward.StageInput = inInput; | ||||
|     inLayer.Backward.StageDerivative = M.Multiply(inLayer.Backward.StageInput, inLayer.Forward.StageDerivative); | ||||
|     inLayer.Backward.StageAffine = M.Transform(inLayer.Backward.Matrix, inLayer.Backward.StageDerivative); | ||||
|      | ||||
|     return M.Unpad(inLayer.Backward.StageAffine);// Unpad the output
 | ||||
| }; | ||||
| NN.Layer.Adjust = function(inLayer, inLearningRate) | ||||
| { | ||||
|     var deltas; | ||||
|     var vector; | ||||
|     var scalar; | ||||
|     var i, j; | ||||
|      | ||||
|     for(i=0; i<inLayer.Forward.StageInput.length; i++) | ||||
|     { | ||||
|         deltas = M.Outer(inLayer.Forward.StageInput[i], inLayer.Backward.StageDerivative[i]); | ||||
|         deltas = M.Scale(deltas, inLearningRate); | ||||
|          | ||||
|         inLayer.Forward.Matrix = M.Subtract(inLayer.Forward.Matrix, deltas); | ||||
|     } | ||||
| }; | ||||
| NN.Layer.Stochastic = function(inLayer, inTrainingSet, inIterations) | ||||
| { | ||||
|     /* this method is ONLY for testing individual layers, and does not translate to network-level training */ | ||||
|     var i, j; | ||||
|     var current; | ||||
|     var error; | ||||
|     for(i=0; i<inIterations; i++) | ||||
|     { | ||||
|         NN.TrainingSet.Randomize(inTrainingSet); | ||||
|         for(j=0; j<inTrainingSet.Order.length; j++) | ||||
|         { | ||||
|             current = inTrainingSet.Order[j]; | ||||
|             NN.Layer.Forward(inLayer, [inTrainingSet.Input[current]]); | ||||
|             error = M.Subtract(inLayer.Forward.StageSigmoid, [inTrainingSet.Output[current]]); | ||||
|             NN.Layer.Backward(inLayer, error); | ||||
|             NN.Layer.Adjust(inLayer, 0.1); | ||||
|         } | ||||
|     } | ||||
| }; | ||||
| 
 | ||||
| NN.Network = {}; | ||||
| NN.Network.Instances = []; | ||||
| NN.Network.Create = function() | ||||
| { | ||||
|     var obj = {}; | ||||
|     var i;     | ||||
|      | ||||
|     obj.Layers = []; | ||||
|     obj.LearningRate = 0.8; | ||||
|     obj.Error = []; | ||||
|      | ||||
|     for(i=0; i<arguments.length-1; i++) | ||||
|     { | ||||
|         obj.Layers.push(NN.Layer.Create(arguments[i], arguments[i+1])); | ||||
|     } | ||||
|      | ||||
|     NN.Network.Instances.push(obj); | ||||
|     return obj; | ||||
| }; | ||||
| NN.Network.Observe = function(inNetwork, inBatch) | ||||
| { | ||||
|       var input = M.Clone(inBatch); | ||||
|       var i; | ||||
|       for(i=0; i<inNetwork.Layers.length; i++) | ||||
|       { | ||||
|           input = NN.Layer.Forward(inNetwork.Layers[i], input); | ||||
|       } | ||||
|       return inNetwork.Layers[inNetwork.Layers.length-1].Forward.StageSigmoid; | ||||
| }; | ||||
| NN.Network.Error = function(inNetwork, inTraining) | ||||
| { | ||||
|       return M.Subtract(inNetwork.Layers[inNetwork.Layers.length-1].Forward.StageSigmoid, inTraining); | ||||
| }; | ||||
| NN.Network.Learn = function(inNetwork, inError) | ||||
| { | ||||
|       var input = inError; | ||||
|       var i; | ||||
|       for(i=inNetwork.Layers.length-1; i>=0; i--) | ||||
|       { | ||||
|           input = NN.Layer.Backward(inNetwork.Layers[i], input); | ||||
|           NN.Layer.Adjust(inNetwork.Layers[i], inNetwork.LearningRate); | ||||
|       } | ||||
| }; | ||||
| 
 | ||||
| 
 | ||||
| NN.Network.Batch = function(inNetwork, inTrainingSet, inIterations) | ||||
| { | ||||
|     var i; | ||||
|     for(i=0; i<inIterations; i++) | ||||
|     { | ||||
|         NN.Network.Observe(inNetwork, inTrainingSet.Input); | ||||
|         inNetwork.Error = NN.Network.Error(inNetwork, inTrainingSet.Output) | ||||
|         NN.Network.Learn(inNetwork, inNetwork.Error); | ||||
|     } | ||||
| }; | ||||
| NN.Network.Stochastic = function(inNetwork, inTrainingSet, inIterations) | ||||
| { | ||||
|     var i, j; | ||||
|     var current; | ||||
|      | ||||
|     for(i=0; i<inIterations; i++) | ||||
|     { | ||||
|         NN.TrainingSet.Randomize(inTrainingSet); | ||||
|         for(j=0; j<inTrainingSet.Order.length; j++) | ||||
|         { | ||||
|             current = inTrainingSet.Order[j]; | ||||
|             NN.Network.Observe(inNetwork, [inTrainingSet.Input[current]]); | ||||
|             inNetwork.Error = NN.Network.Error(inNetwork, [inTrainingSet.Output[current]]); | ||||
|             NN.Network.Learn(inNetwork, inNetwork.Error); | ||||
|         } | ||||
|     } | ||||
| }; | ||||
		Loading…
	
		Reference in New Issue
	
	Block a user