Docjar: A Java Source and Docuemnt Enginecom.*    java.*    javax.*    org.*    all    new    plug-in

Quick Search    Search Deep

Source code: AI/NeuralNetworks/FeedForwardWeightedLayer.java


1   /* FeedForwardWeightedLayer.java */
2   
3   package AI.NeuralNetworks;
4   
5   import java.util.*;
6   
7   /**
8     This class is used to represent the inner and output layers of a multilayer feed forward neural network
9   */
10  public class FeedForwardWeightedLayer extends FeedForwardLayer {
11    protected float[][] weights;
12    private static float SEED_WIDTH = (float) 0.2;
13    
14    /**
15      creates a new insatnce of FeedForwardWeightedLayer
16      @param size is the size of the input layer without counting the bias
17      @param previousSize is the size of the previous layer without counting the bias
18    */
19    public FeedForwardWeightedLayer(int size, int previousSize){
20      activation = new float[size+1];
21      activation[0] = 1; //sets the bias
22      Random rand = new Random();
23      
24      weights = new float[size][previousSize+1];
25      for (int row=0; row<size; row++){
26        for (int col=0; col<previousSize+1; col++){
27          weights[row][col] = rand.nextFloat() * SEED_WIDTH - SEED_WIDTH/2;
28        }
29      }
30    }
31    
32    /**
33      creates a new insatnce of FeedForwardWeightedLayer
34      @param layer is the FeedForwardWeightedLayer from which the new layer is created
35    */
36    public FeedForwardWeightedLayer(FeedForwardWeightedLayer layer){
37      activation = new float[layer.activation.length];
38      System.arraycopy(layer.activation, 0, activation, 0, layer.activation.length);
39      
40      weights = new float[layer.weights.length][layer.weights[0].length];
41      for (int row=0; row<layer.weights.length; row++){
42        for (int col=0; col<layer.weights[row].length; col++){
43          weights[row][col] = layer.weights[row][col];
44        }
45      }
46    }
47    
48    /**
49      creates a new insatnce of FeedForwardWeightedLayer
50      @param weights the weights of this layer
51    */
52    public FeedForwardWeightedLayer(float[][] weights){
53      activation = new float[weights.length+1];
54      activation[0] = 1; //sets the bias
55      
56      this.weights = weights;
57    }
58       
59    /**
60      Calculates the new activation values of this layer based on the previous layer 
61      @param previousLayer the previous layer
62    */
63    public void runLayer(FeedForwardLayer previousLayer){
64      float[] previousActivation;
65      float sum;
66  
67  /*debug*///AI.Test.TestBP.printMatrix(weights,"\t\t\tweights = ");
68      previousActivation = previousLayer.getActivation();
69      for(int cell=1; cell<activation.length; cell++){
70        sum = 0;
71        for(int prev=0; prev<weights[cell-1].length; prev++){
72          sum += previousActivation[prev] * weights[cell-1][prev];
73        }
74        activation[cell] = sigmoid(sum);
75      }
76  /*debug*///AI.Test.TestBP.printArray(activation,"\t\t\tacti vation =");
77    }
78    
79    /** 
80      returns the value of the sigmoid function 1/(1+exp(-x))
81    */
82    protected static float sigmoid(float x){
83      return (float) (1/(1 + Math.exp(-x)));
84    }
85    
86    protected float[][] getWeights(){
87      return weights;
88    }
89    
90    /**
91      returns a String representation of the layer
92    */
93    public String toString(){
94      String s = "Weighted Layer\t";
95      
96      s += super.toString();
97      s += "\n";
98      for (int row=0; row<weights.length; row++){
99        for (int col=0; col<weights[row].length; col++){
100         s += weights[row][col] + "\t";
101       }
102       s += "\n";
103     }
104     return s; 
105   }
106   
107   
108 }