network #1
							
								
								
									
										63
									
								
								nn.test.js
									
									
									
									
									
								
							
							
						
						
									
										63
									
								
								nn.test.js
									
									
									
									
									
								
							| @ -1,14 +1,61 @@ | ||||
| import { assert, assertEquals } from "https://deno.land/std@0.102.0/testing/asserts.ts"; | ||||
| import * as NN from "./nn.ts"; | ||||
| import { Label, Forward, Backward } from "./nn.ts"; | ||||
| import { default as M } from "./m.ts"; | ||||
| 
 | ||||
| Deno.test("NN.Observe", ()=> | ||||
| const input = [ | ||||
|     [ 0.1,  0.05], | ||||
|     [ 0.0, -0.06] | ||||
|     [ 0.99, 0.85], | ||||
|     [ 1.2,  1.05] | ||||
| ]; | ||||
| 
 | ||||
| const training = []; | ||||
| const stages = []; | ||||
| const layers = []; | ||||
| 
 | ||||
| Deno.test("NN.Label", ()=> | ||||
| { | ||||
|     console.log(NN.Observe([[[1, 2, 3]]], [[[0.4, 0.5, 0.6]]])); | ||||
|     Label(training, | ||||
|     [ | ||||
|         [ 0.1,  0.05], | ||||
|         [ 0.0, -0.06] | ||||
|     ], | ||||
|     [1]); | ||||
|     Label(training, | ||||
|     [ | ||||
|         [ 0.99, 0.85], | ||||
|         [ 1.2,  1.05] | ||||
|     ], | ||||
|     [0]); | ||||
|     stages.push(training[0]); | ||||
|     console.log(training); | ||||
|     assertEquals(training.length, 2, "input and output sets created"); | ||||
|     assertEquals(training[0].length, training[1].length, "both sets have same length"); | ||||
|     assertEquals(training[0][0].length, 3, "padded input component"); | ||||
|     assertEquals(training[1][0].length, 1, "unchanged label vector"); | ||||
| }); | ||||
| Deno.test("NN.Train", ()=> | ||||
| 
 | ||||
| Deno.test("NN.Backward", ()=> | ||||
| { | ||||
|     const stages = [[[1, 2, 3]]]; | ||||
|     const layers = [[[0.4, 0.5, 0.6]]]; | ||||
|      | ||||
|     console.log(NN.Train(stages, layers, [[0, 0, 1]], 0.1)); | ||||
| 
 | ||||
|     layers.push(M.Create.Box([0, 0, 0], [1, 1, 1], 4)); | ||||
|     layers.push(M.Create.Box([0, 0, 0, 0, 0], [1, 1, 1, 1, 1], 1)); | ||||
| 
 | ||||
|     let copy1 = M.Create.Clone(layers[0]); | ||||
|     let copy2 = M.Create.Clone(layers[1]); | ||||
| 
 | ||||
|     for(let i=0; i<1000; i++) | ||||
|     { | ||||
|         Backward(stages, layers, training[1], 0.1); | ||||
|     } | ||||
| 
 | ||||
|     assert(layers[0][0][0] != copy1[0][0][0], "first matrix has changed"); | ||||
|     assert(layers[1][0][0] != copy2[0][0][0], "second matrix has changed"); | ||||
| }); | ||||
| 
 | ||||
| Deno.test("NN.Forward", ()=> | ||||
| { | ||||
|     console.log(Forward(stages, layers)); | ||||
|     console.log(training[1]); | ||||
| }); | ||||
| 
 | ||||
|  | ||||
							
								
								
									
										27
									
								
								nn.ts
									
									
									
									
									
								
							
							
						
						
									
										27
									
								
								nn.ts
									
									
									
									
									
								
							| @ -1,6 +1,22 @@ | ||||
| import { default as M, Cloud } from "./m.ts"; | ||||
| export type N = Array<Array<Array<number>>> | ||||
| 
 | ||||
| const Observe = (inStages:Array<Cloud.M>, inLayers:Array<Cloud.M>):Cloud.M => | ||||
| const Label = (inSet:any, inData:Cloud.M, inLabel:Cloud.V):N => | ||||
| { | ||||
|     if(!inSet){inSet = [[], []];} | ||||
|     if(inSet.length == 0){inSet.push([]);} | ||||
|     if(inSet.length == 1){inSet.push([]);} | ||||
| 
 | ||||
|     inData.forEach((row:Cloud.V) => | ||||
|     { | ||||
|         row.push(1); | ||||
|         inSet[0].push(row); | ||||
|         inSet[1].push(inLabel); | ||||
|     }); | ||||
|     return inSet; | ||||
| }; | ||||
| 
 | ||||
| const Forward = (inStages:N, inLayers:N):Cloud.M => | ||||
| { | ||||
|     let i:number; | ||||
|     let process = (index:number):Cloud.M => M.Batch.Sigmoid(M.Batch.Affine(inStages[index], inLayers[index])); | ||||
| @ -12,16 +28,17 @@ const Observe = (inStages:Array<Cloud.M>, inLayers:Array<Cloud.M>):Cloud.M => | ||||
|     inStages[i+1] = process(i); | ||||
|     return inStages[i+1]; | ||||
| }; | ||||
| const Train = (inStages:Array<Cloud.M>, inLayers:Array<Cloud.M>, inGoals:Cloud.M, inRate:number):void => | ||||
| const Backward = (inStages:N, inLayers:N, inGoals:Cloud.M, inRate:number):void => | ||||
| { | ||||
|     let i:number; | ||||
|     let errorBack:Cloud.M = M.Batch.Subtract(Observe(inStages, inLayers), inGoals); | ||||
|     let errorBack:Cloud.M = M.Batch.Subtract(Forward(inStages, inLayers), inGoals); | ||||
| 
 | ||||
|     for(i=inLayers.length-1; i>=0; i++) | ||||
|     for(i=inLayers.length-1; i>=0; i--) | ||||
|     { | ||||
|         let layerMatrix:Cloud.M = inLayers[i]; | ||||
|         let  layerInput:Cloud.M = inStages[i]; | ||||
|         let layerOutput:Cloud.M = inStages[i+1]; | ||||
| 
 | ||||
|         let errorScaled:Cloud.M = M.Batch.Multiply(errorBack, M.Batch.Derivative(layerOutput)); | ||||
| 
 | ||||
|         errorBack = M.Batch.Affine(errorScaled, M.Create.Transpose(layerMatrix)); | ||||
| @ -32,5 +49,5 @@ const Train = (inStages:Array<Cloud.M>, inLayers:Array<Cloud.M>, inGoals:Cloud.M | ||||
|     } | ||||
| }; | ||||
| 
 | ||||
| export { Observe, Train }; | ||||
| export { Label, Forward, Backward }; | ||||
| export type { Cloud }; | ||||
		Loading…
	
		Reference in New Issue
	
	Block a user