Docjar: A Java Source and Docuemnt Enginecom.*    java.*    javax.*    org.*    all    new    plug-in

Quick Search    Search Deep

Source code: AI/NeuralNetworks/FeedForwardNetwork.java


1   /* FeedForwardNetwork.java */
2   
3   package AI.NeuralNetworks;
4   
5   import java.util.*;
6   import java.io.*;
7   
8   /** 
9     This Class is a nerual network with a multiple layer feed forward architecture
10  */
11  public class FeedForwardNetwork {
12  //  private Vector layers;
13    protected LinkedList layers;
14    protected static final String COMMENT_TOKEN = "#";
15    protected static final String NUMBER_OF_LAYERS_TOKEN = "Number_of_layers:";
16    protected static final String INPUT_LAYER_TOKEN = "Input_layer:";
17    protected static final String WEIGHTED_LAYER_TOKEN = "Weighted_layer:";
18    protected static final String WEIGHTS_TOKEN = "Weights:";
19    protected static final String VERSION_TOKEN = "Version:";
20    protected static final String VERSION = "1.0";  
21    protected static final String HEXADECIMAL_VERSION = VERSION +" Hexadecimal";  
22    /**
23      Creates a new FeedForwardNetwork
24      This constructor should be only used by sublcases
25    */
26    protected FeedForwardNetwork(){
27    }
28    
29    /**
30      Creates a new FeedForwardNetwork
31      @param structure an Arrray of int that sepecifies the structure of the Network
32      the length of the Aarray is the amount of layers of the network and each value
33      is the size of the layer in that position
34    */
35    public FeedForwardNetwork(int[] structure){
36  //    layers = new Vector(structure.length, 1);
37      layers = new LinkedList();
38      layers.add(new FeedForwardInputLayer(structure[0]));
39      for(int l=1; l<structure.length; l++){
40        layers.add(new FeedForwardWeightedLayer(structure[l],structure[l-1]));
41      }
42    }
43  
44    /**
45      Creates a new FeedForwardNetwork form the specified file
46      @param fileName is the name of the file where the neural network is saved
47    */
48    public FeedForwardNetwork(String fileName)throws IOException{
49      this(new BufferedReader( new FileReader( fileName) ) );
50  /*    BufferedReader input;
51      int numberOfLayers, layerSize, previousLayerSize;
52      StringTokenizer line;
53      float weights[][];
54      
55      layers = new LinkedList();
56       
57      input = new BufferedReader( new FileReader( fileName) );
58      
59      //check saved file version 
60      line = getNextStringTokenizer(input, VERSION_TOKEN);
61      System.out.println("\t\tVersion:\t" + line.nextToken());
62      
63      line = getNextStringTokenizer(input, NUMBER_OF_LAYERS_TOKEN);
64      numberOfLayers = Integer.valueOf(line.nextToken()).intValue();
65      
66      line = getNextStringTokenizer(input, INPUT_LAYER_TOKEN);
67      layerSize = Integer.valueOf(line.nextToken()).intValue();
68      layers.add(new FeedForwardInputLayer(layerSize));
69      for(int l=1; l<numberOfLayers; l++){
70        previousLayerSize = layerSize;
71        line = getNextStringTokenizer(input, WEIGHTED_LAYER_TOKEN);
72        layerSize = Integer.valueOf(line.nextToken()).intValue();
73        weights = createWeights(input, layerSize, previousLayerSize);
74        layers.add(new FeedForwardWeightedLayer(weights));
75      }
76  */
77    }
78    
79    /**
80      Creates a new FeedForwardNetwork form the specified BufferedReader
81      @param input is the BufferedReader from where the net will be created
82    */
83    public FeedForwardNetwork(BufferedReader input)throws IOException, NumberFormatException{
84      int numberOfLayers, layerSize, previousLayerSize;
85      StringTokenizer line;
86      float weights[][];
87          
88      //check saved file version 
89      line = getNextStringTokenizer(input, VERSION_TOKEN);
90      String version = line.nextToken();
91  //    System.out.println("\t\tVersion:\t" + line.nextToken());
92  
93      layers = new LinkedList();
94    
95      line = getNextStringTokenizer(input, NUMBER_OF_LAYERS_TOKEN);
96      numberOfLayers = Integer.valueOf(line.nextToken()).intValue();
97      
98      line = getNextStringTokenizer(input, INPUT_LAYER_TOKEN);
99      layerSize = Integer.valueOf(line.nextToken()).intValue();
100     layers.add(new FeedForwardInputLayer(layerSize));
101     for(int l=1; l<numberOfLayers; l++){
102       previousLayerSize = layerSize;
103       line = getNextStringTokenizer(input, WEIGHTED_LAYER_TOKEN);
104       layerSize = Integer.valueOf(line.nextToken()).intValue();
105       weights = createWeights(input, layerSize, previousLayerSize);
106       layers.add(new FeedForwardWeightedLayer(weights));
107     }
108   }
109   
110   /**
111     Creates a new FeedForwardNetwork form the specified BufferedReader
112     @param input is the BufferedReader from where the net will be created
113   */
114   public FeedForwardNetwork(int[] netIntArray, int chanchada)throws IOException, NumberFormatException{
115     int numberOfLayers, layerSize, previousLayerSize, pos;
116     StringTokenizer line;
117     float weights[][];
118         
119     layers = new LinkedList();
120     pos = 0;
121   
122     numberOfLayers = netIntArray[pos++];
123     
124     layerSize = netIntArray[pos++];
125     layers.add(new FeedForwardInputLayer(layerSize));
126     for(int l=1; l<numberOfLayers; l++){
127       previousLayerSize = layerSize;
128       layerSize = netIntArray[pos++];
129       weights = new float[layerSize][previousLayerSize+1];
130       try{
131         for(int c = 0; c<layerSize; c++){
132           for(int r = 0; r<previousLayerSize+1; r++){
133             weights[c][r] = Float.intBitsToFloat(netIntArray[pos++]);
134           }
135         }
136 //          return weights;
137       } catch (NoSuchElementException nsee) {
138         //throw new CoruptedFileException("This file is not a Feed Forewar Nerual Network file");
139         throw nsee;
140       }
141 //      weights = createWeights(netIntArray, pos, layerSize, previousLayerSize);
142       layers.add(new FeedForwardWeightedLayer(weights));
143     }
144   }
145 
146   
147   /**
148     Runs the network with the inputVector and returns the outputVector
149     @param inputVector Vector of  @see Number
150     @returns a Vector of @see Float
151   */
152 /*  public Vector runVector(Vector inputVector){
153     return array2Vector(runVector(vector2Array(inputVector));
154   }
155 */
156   
157   /**
158     Runs the network with the inputVector and returns the outputVector
159     @param inputVector an array of float with 
160     @returns a Vector of @see Float
161   */
162   public float[] runVector(float[] inputVector) throws UnexpectedInputArraySizeException{
163     ListIterator currentLayer;
164     ListIterator previousLayer;
165     
166 /*debug*///AI.Test.TestBP.printArray(inputVector,"inputVector = ");
167     ((FeedForwardInputLayer) layers.getFirst()).setInput(inputVector);
168 /*debug*///AI.Test.TestBP.printArray(((FeedForwardInputLayer) layers.getFirst()).getActivation(),"\tsetInput = ");
169     currentLayer = layers.listIterator(1);
170     previousLayer = layers.listIterator(0);
171     while (currentLayer.hasNext()){
172       ((FeedForwardWeightedLayer) currentLayer.next()).runLayer((FeedForwardLayer) previousLayer.next());
173     }
174     return ((FeedForwardLayer) layers.getLast()).getActivation();
175   }
176 
177   
178   
179   /** Saves the neural network
180     @param fileName the name of the file with an absolute path
181   */
182   public void saveToFile(String fileName) throws IOException {
183 //    ListIterator layer;
184     BufferedWriter output;
185 //    FeedForwardInputLayer firstLayer;
186 //    FeedForwardWeightedLayer actualLayer;
187 //    float[][] weights;
188     
189     output = new BufferedWriter(new FileWriter(fileName));
190                 save(output);
191 /*    output.write(COMMENT_TOKEN + "\t" + fileName );
192     output.newLine();
193     output.write(COMMENT_TOKEN + "\tthis is a beautifull nerual net! " );
194     output.newLine();
195     
196     output.write(VERSION_TOKEN + "\t" + VERSION );
197     output.newLine();
198         
199     output.write(NUMBER_OF_LAYERS_TOKEN + "\t" + layers.size() );
200     output.newLine();
201 
202     layer = layers.listIterator(0);
203     
204     //the input layer has no weights
205     firstLayer = (FeedForwardInputLayer) layer.next();
206     output.write(INPUT_LAYER_TOKEN + "\t" + (firstLayer.size()-1) );
207     output.newLine();
208     
209     //all the other layers has weights
210     while(layer.hasNext()){
211       actualLayer = (FeedForwardWeightedLayer) layer.next();
212       output.write(WEIGHTED_LAYER_TOKEN + "\t" + (actualLayer.size()-1) );
213       output.newLine();
214       weights = actualLayer.getWeights();
215       for (int c = 0; c<weights.length ; c++){
216         output.write(WEIGHTS_TOKEN + "\t" );
217         for (int r = 0; r<weights[c].length ; r++){
218           output.write("\t" + weights[c][r]);
219         }
220       output.newLine();
221       }
222     }
223     output.write(COMMENT_TOKEN + "\thasta aca llega la red" );
224     output.newLine();
225 */    
226     output.close();
227 
228   }
229 
230   /** Saves the neural network
231     @param fileName the name of the file with an absolute path
232   */
233   public void save(BufferedWriter output) throws IOException {
234     ListIterator layer;
235 //    BufferedWriter output;
236     FeedForwardInputLayer firstLayer;
237     FeedForwardWeightedLayer actualLayer;
238     float[][] weights;
239     
240 //    output = new BufferedWriter(new FileWriter(fileName));
241 //    output.write(COMMENT_TOKEN + "\t" + fileName );
242 //    output.newLine();
243     output.write(COMMENT_TOKEN + "\tthis is a beautifull nerual net! " );
244     output.newLine();
245     
246     output.write(VERSION_TOKEN + "\t" + VERSION );
247     output.newLine();
248         
249     output.write(NUMBER_OF_LAYERS_TOKEN + "\t" + layers.size() );
250     output.newLine();
251 
252     layer = layers.listIterator(0);
253     
254     //the input layer has no weights
255     firstLayer = (FeedForwardInputLayer) layer.next();
256     output.write(INPUT_LAYER_TOKEN + "\t" + (firstLayer.size()-1) );
257     output.newLine();
258     
259     //all the other layers has weights
260     while(layer.hasNext()){
261       actualLayer = (FeedForwardWeightedLayer) layer.next();
262       output.write(WEIGHTED_LAYER_TOKEN + "\t" + (actualLayer.size()-1) );
263       output.newLine();
264       weights = actualLayer.getWeights();
265       for (int c = 0; c<weights.length ; c++){
266         output.write(WEIGHTS_TOKEN + "\t" );
267         for (int r = 0; r<weights[c].length ; r++){
268           output.write("\t" + weights[c][r]);
269         }
270       output.newLine();
271       }
272     }
273     output.write(COMMENT_TOKEN + "\thasta aca llega la red" );
274     output.newLine();
275     
276 //    output.close();
277 
278   }
279   
280   /**
281     retrurns a String representation  of the net
282   */
283   public String toString(){
284     String s = "";
285     ListIterator layer;
286     FeedForwardInputLayer firstLayer;
287     FeedForwardWeightedLayer actualLayer;
288     float[][] weights;
289     
290     s += COMMENT_TOKEN + "\tthis is a beautifull nerual net! ";
291     s += "\n";
292     
293     s += VERSION_TOKEN + "\t" + VERSION;
294     s += "\n";
295         
296     s += NUMBER_OF_LAYERS_TOKEN + "\t" + layers.size();
297     s += "\n";
298     
299     layer = layers.listIterator(0);
300     
301     //the input layer has no weights
302     firstLayer = (FeedForwardInputLayer) layer.next();
303     s += INPUT_LAYER_TOKEN + "\t" + (firstLayer.size()-1) ;
304     s += "\n";
305     
306     //all the other layers has weights
307     while(layer.hasNext()){
308       actualLayer = (FeedForwardWeightedLayer) layer.next();
309       s += WEIGHTED_LAYER_TOKEN + "\t" + (actualLayer.size()-1) ;
310       s += "\n";
311       weights = actualLayer.getWeights();
312       for (int c = 0; c<weights.length ; c++){
313         s += WEIGHTS_TOKEN + "\t" ;
314         for (int r = 0; r<weights[c].length ; r++){
315           s += "\t" + weights[c][r];
316         }
317       s += "\n";
318       }
319     }    
320     
321 
322 /*    ListIterator currentLayer;
323     
324     s += currentLayer = layers.listIterator(0);
325     while (currentLayer.hasNext()){
326       s += ((FeedForwardLayer) currentLayer.next()).toString() + "\n";
327     }
328 */    return s;
329   }
330   
331   /**
332     returns the number of inputs of the net without counting the bias
333   */
334   public int inputSize(){
335     return ((FeedForwardLayer)layers.getFirst()).size()-1;
336   }
337 
338   /**
339     returns the number of outputs of the net without counting the bias
340   */
341   public int outputSize(){
342     return ((FeedForwardLayer)layers.getLast()).size()-1;
343   }
344   
345   private static StringTokenizer getNextStringTokenizer(BufferedReader input, String token) throws IOException, NoSuchElementException{
346     StringTokenizer line;
347     
348     try {
349       line = new StringTokenizer(input.readLine());
350        while(!line.hasMoreTokens() || !line.nextToken().equals(token)){
351         line = new StringTokenizer(input.readLine());
352       }
353        return line;
354      } catch (NoSuchElementException nsee) {
355       //throw new CoruptedFileException("This file is not a Feed Forewar Nerual Network file");
356       throw nsee;
357     }
358   }
359 
360   private static float[][] createWeights(BufferedReader input, int layerSize, int previousLayerSize) throws IOException, NoSuchElementException{
361     float[][] weights = new float[layerSize][previousLayerSize+1];
362     StringTokenizer line;
363     
364     try{
365       for(int c = 0; c<layerSize; c++){
366          line = getNextStringTokenizer(input, WEIGHTS_TOKEN);
367         for(int r = 0; r<previousLayerSize+1; r++){
368            weights[c][r] = Float.valueOf(line.nextToken()).floatValue();
369          }
370       }
371       return weights;
372     } catch (NoSuchElementException nsee) {
373       //throw new CoruptedFileException("This file is not a Feed Forewar Nerual Network file");
374       throw nsee;
375     }
376   }
377 
378 /*  private static float[][] createWeights(int[] input, int pos, int layerSize, int previousLayerSize) throws IOException, NoSuchElementException{
379     float[][] weights = new float[layerSize][previousLayerSize+1];
380 System.out.println("\t\tcw" + pos +" "+ input.length);//debug    
381     try{
382       for(int c = 0; c<layerSize; c++){
383         for(int r = 0; r<previousLayerSize+1; r++){
384            weights[c][r] = Float.intBitsToFloat(input[pos++]);
385          }
386       }
387       return weights;
388     } catch (NoSuchElementException nsee) {
389       //throw new CoruptedFileException("This file is not a Feed Forewar Nerual Network file");
390       throw nsee;
391     }
392   }
393 */
394   
395   /**
396     returns a int[] which represents a netework the weightValues are converted
397     to int with the Float.floatToIntBits() method.
398   */
399   public int[] netToIntArrey(){
400     ListIterator layer;
401     FeedForwardInputLayer firstLayer;
402     FeedForwardWeightedLayer actualLayer;
403     float[][] weights;
404     int[] intArray = new int[200];
405     int pos = 0;
406     
407     intArray = addIntToArray(intArray,layers.size(), pos++);
408     
409     layer = layers.listIterator(0);
410     
411     //the input layer has no weights
412     firstLayer = (FeedForwardInputLayer) layer.next();
413     intArray = addIntToArray(intArray,(firstLayer.size()-1), pos++);
414     
415     //all the other layers has weights
416     while(layer.hasNext()){
417       actualLayer = (FeedForwardWeightedLayer) layer.next();
418       intArray = addIntToArray(intArray,(actualLayer.size()-1), pos++) ;
419       weights = actualLayer.getWeights();
420       for (int c = 0; c<weights.length ; c++){
421         for (int r = 0; r<weights[c].length ; r++){
422           intArray = addIntToArray(intArray, Float.floatToIntBits(weights[c][r]), pos++);
423         }
424       }
425     }
426     int[] newArray = new int[pos];
427     System.arraycopy(intArray, 0, newArray, 0, pos);
428     return newArray;
429   }
430   
431   private static int[] addIntToArray(int[] array, int element, int pos){
432     int[] newArray;
433     int STEP = 10;
434     
435     if (pos>=array.length){
436       newArray = new int[array.length + STEP];
437       
438       System.arraycopy(array, 0, newArray, 0, array.length);
439       array = newArray;
440     }
441     array[pos] = element;
442     return array;
443   }
444   
445 }
446 
447