layer batch training ok
This commit is contained in:
		
							parent
							
								
									23e71542f2
								
							
						
					
					
						commit
						1e644a70dc
					
				
							
								
								
									
										36
									
								
								index.html
									
									
									
									
									
								
							
							
						
						
									
										36
									
								
								index.html
									
									
									
									
									
								
							| @ -614,7 +614,7 @@ NN.Network.Create = function() | ||||
|     var i;     | ||||
|      | ||||
|     obj.Layers = []; | ||||
|     obj.LearningRate = 0.8; | ||||
|     obj.LearningRate = 0.1; | ||||
|     obj.Error = []; | ||||
|      | ||||
|     for(i=0; i<arguments.length-1; i++) | ||||
| @ -688,31 +688,33 @@ NN.Network.Stochastic = function(inNetwork, inTrainingSet, inIterations) | ||||
|     ]; | ||||
| 
 | ||||
|     let matrix2 = [ | ||||
|     [0.5793881115472015, 0.9732593374796092, 0.15207639877016987, -0.5356575655337803] | ||||
|     [0.7098703863463034, 0.35485944251238033, 0.7642849892333241, 0.03046174288491077], | ||||
| 	[-0.30655426258144347, 0.45509633551425077, -0.5013795222004322, -0.3421292736637427] | ||||
|     ]; | ||||
| 
 | ||||
|     let typeA = [ | ||||
|     let input = [ | ||||
|     [ 0.1,  0.05], | ||||
|         [ 0.0, -0.06] | ||||
|     ]; | ||||
|     let typeB = [ | ||||
|     [ 0.0, -0.06], | ||||
|     [ 0.99, 0.85], | ||||
|     [ 1.2,  1.05] | ||||
|     ]; | ||||
| 	let goals = [ | ||||
|         [1, 1, 0], | ||||
|         [0, 0, 1] | ||||
|     let output = [ | ||||
|     [1, 0], | ||||
|     [1, 0], | ||||
|     [0, 1], | ||||
|     [0, 1] | ||||
|     ]; | ||||
| 
 | ||||
|     var layer1 = NN.Layer.Create(1, 1); | ||||
|     layer1.Forward.Matrix = matrix1; | ||||
| 	let nn1 = NN.Network.Create(2, 3, 2); | ||||
| 	nn1.Layers[0].Forward.Matrix = matrix1; | ||||
| 	nn1.Layers[1].Forward.Matrix = matrix2; | ||||
| 
 | ||||
|     let stage1 = NN.Layer.Forward(layer1, typeA); | ||||
| 	let stage1Error = NN.Layer.Error(layer1, goals); | ||||
| 	let stage1Back = NN.Layer.Backward(layer1, stage1Error); | ||||
| 	let logLayers = inNN => inNN.Layers.forEach(L=>console.log(L.Forward.Matrix)); | ||||
| 
 | ||||
| 	console.log("matrix before", layer1.Forward.Matrix); | ||||
| 	NN.Layer.Adjust(layer1, 0.1); | ||||
| 	console.log("matrix after", layer1.Forward.Matrix); | ||||
| 	logLayers(nn1); | ||||
| 
 | ||||
| 	NN.Network.Batch(nn1, {Input:input, Output:output}, 100); | ||||
| 
 | ||||
| 	logLayers(nn1); | ||||
| 
 | ||||
| </script> | ||||
							
								
								
									
										65
									
								
								nn.test.js
									
									
									
									
									
								
							
							
						
						
									
										65
									
								
								nn.test.js
									
									
									
									
									
								
							| @ -26,48 +26,62 @@ Deno.test("check.forward", ()=> | ||||
|     ]; | ||||
| 
 | ||||
|     let matrix2 = [ | ||||
|     [0.5793881115472015, 0.9732593374796092, 0.15207639877016987, -0.5356575655337803] | ||||
|     [0.7098703863463034, 0.35485944251238033, 0.7642849892333241, 0.03046174288491077], | ||||
|     [-0.30655426258144347, 0.45509633551425077, -0.5013795222004322, -0.3421292736637427] | ||||
|     ]; | ||||
| 
 | ||||
|     let typeA = [ | ||||
|     let input = [ | ||||
|     [ 0.1,  0.05], | ||||
|         [ 0.0, -0.06] | ||||
|     ]; | ||||
|     let typeB = [ | ||||
|     [ 0.0, -0.06], | ||||
|     [ 0.99, 0.85], | ||||
|     [ 1.2,  1.05] | ||||
|     ]; | ||||
|     let goals = [ | ||||
|         [1, 1, 0], | ||||
|         [0, 0, 1] | ||||
|     let output = [ | ||||
|     [1, 0], | ||||
|     [1, 0], | ||||
|     [0, 1], | ||||
|     [0, 1] | ||||
|     ]; | ||||
| 
 | ||||
|     let layers = [matrix1]; | ||||
|     let stages = Forward(Methods.Mutate.Pad(typeA), layers); | ||||
|     Backward(stages, layers, goals, 0.1); | ||||
|     let layers = [matrix1, matrix2]; | ||||
|     console.log("BEFORE", layers); | ||||
|     for(let i=0; i<100; i++) | ||||
|     { | ||||
|         let stages = Forward(Methods.Mutate.Pad(input), layers); | ||||
|         Backward(stages, layers, output, 0.1); | ||||
|     } | ||||
| 
 | ||||
|     console.log("AFTER", layers); | ||||
| }); | ||||
| 
 | ||||
| 
 | ||||
| /* | ||||
| Deno.test("NN.Label", ()=> | ||||
| { | ||||
|     Label(training, typeA, [1]); | ||||
|     Label(training, typeB, [0]); | ||||
|     stages.push(training[0]); | ||||
|     console.log(training); | ||||
|     Label(training, typeA, [1, 0]); | ||||
|     Label(training, typeB, [0, 1]); | ||||
|     assertEquals(training.length, 2, "input and output sets created"); | ||||
|     assertEquals(training[0].length, training[1].length, "both sets have same length"); | ||||
|     assertEquals(training[0][0].length, 3, "padded input component"); | ||||
|     assertEquals(training[1][0].length, 1, "unchanged label vector"); | ||||
|     assertEquals(training[1][0].length, 2, "unchanged label vector"); | ||||
| }); | ||||
| 
 | ||||
| Deno.test("NN.Forward", ()=> | ||||
| { | ||||
|     let layer1 = M.Create.Box([-1, -1, -1], [1, 1, 1], 2); | ||||
|     let layer2 = M.Create.Box([-1, -1, -1], [1, 1, 1], 1); | ||||
|     layers.push(layer1); | ||||
|     layers.push(layer2); | ||||
| 
 | ||||
|     console.log(training[0]); | ||||
|     stages = Forward(training[0], layers); | ||||
|     console.log(stages); | ||||
| }); | ||||
| 
 | ||||
| Deno.test("NN.Backward", ()=> | ||||
| { | ||||
|     let layer1 = M.Create.Box([-1, -1, -1], [1, 1, 1], 2); | ||||
|     let layer2 = M.Create.Box([-1, -1, -1], [1, 1, 1], 1); | ||||
|     let copy1 = M.Create.Clone(layer1); | ||||
|     let copy2 = M.Create.Clone(layer2); | ||||
|     layers.push(layer1); | ||||
|     layers.push(layer2); | ||||
|     let copy1 = M.Create.Clone(layers[0]); | ||||
|     let copy2 = M.Create.Clone(layers[1]); | ||||
| 
 | ||||
|     for(let i=0; i<100; i++) | ||||
|     { | ||||
| @ -79,11 +93,10 @@ Deno.test("NN.Backward", ()=> | ||||
| }); | ||||
| 
 | ||||
| 
 | ||||
| Deno.test("NN.Forward", ()=> | ||||
| Deno.test("NN.Label", ()=> | ||||
| { | ||||
|     console.log(Forward(stages, layers)); | ||||
|     console.log(training[1]); | ||||
|     let stages = Forward(training[0], layers); | ||||
|     console.log(stages[stages.length-1]); | ||||
| }); | ||||
| 
 | ||||
| 
 | ||||
| */ | ||||
							
								
								
									
										3
									
								
								nn.ts
									
									
									
									
									
								
							
							
						
						
									
										3
									
								
								nn.ts
									
									
									
									
									
								
							| @ -41,15 +41,12 @@ const Backward = (inStages:N, inLayers:N, inGoals:Cloud.M, inRate:number):N => | ||||
|         let errorScaled:Cloud.M = M.Batch.Multiply(errorBack, M.Batch.Derivative(layerOutput)); | ||||
| 
 | ||||
|         errorBack = M.Batch.Affine(errorScaled, M.Create.Transpose(inLayers[i])); | ||||
|         console.log("matrix before:", inLayers[i]); | ||||
| 
 | ||||
|         errorScaled.forEach((inScaledError:Cloud.V, inIndex:number)=> { | ||||
|             const deltas = M.Batch.Scale(M.Create.Outer(layerInput[inIndex], inScaledError), inRate); | ||||
|             inLayers[i] = M.Batch.Subtract(inLayers[i], deltas); | ||||
|         }); | ||||
| 
 | ||||
|         console.log("matrix after:", inLayers[i]); | ||||
| 
 | ||||
|     } | ||||
|     return inLayers; | ||||
| }; | ||||
|  | ||||
		Loading…
	
		Reference in New Issue
	
	Block a user