Chris Pollett > Old Classes >
CS156

( Print View )

Student Corner:
  [Grades Sec1]
 
  [Submit Sec1]
 
  [
Lecture Notes]

Course Info:
  [Texts & Links]
  [Topics]
  [Grading]
  [HW Info]
  [Exam Info]
  [Regrades]
  [Honesty]
  [Announcements]

HW Assignments:
  [Hw1]  [Hw2]  [Hw3]
  [Hw4]

Practice Exams:
  [Mid1]  [Mid2]  [Final]

                           












HW4 Solutions Page

Return to homework page.

For Hw4 I took the sample set I provided you and deleted the last three of each type from the training samples and added each of these to the test set. I then tested each of the 16 submitted programs. The two best programs each got 5 out 9 correct. Here is what one of them looked like:

import java.util.*;
import java.io.*;

public class Hw4
{
  /**
     Reads a single bitmap picture from a file into memory.
     @param strInputBmpFileName file name of type "BMP" file
            (monochrome, 100 by 100 pixels only)
     @return the Vector of Bitsets. Each Bitset is a row
             of the monochrome picture.
  */
  public static Vector readImage(String strInputBmpFileName) throws
IOException, FileNotFoundException
  {
    DataInputStream dis = new DataInputStream(new
FileInputStream(strInputBmpFileName));

    /* FILE HEADER */
    if(dis.readByte() != 66 || dis.readByte() != 77)
      throw new IOException("Not a .BMP file");

    int iFileSzBt = intelToJavaInt(dis.readInt());
    //System.out.println("File Size in Bytes is : " + iFileSzBt);
    if(iFileSzBt != 1662)
      throw new IOException("Invalid file size");

    int iReserved1 = intelToJavaShort(dis.readUnsignedShort());
    //System.out.println("Reserved #1 : " + iReserved1);

    int iReserved2 = intelToJavaShort(dis.readUnsignedShort());
    //System.out.println("Reserved #2 : " + iReserved2);

    int iBitmapOffsetBt = intelToJavaInt(dis.readInt());
    //System.out.println("Size of Bitmap Offset is : " + iBitmapOffsetBt);
    if(iBitmapOffsetBt  != 62)
      throw new IOException("Invalid bitmap offset");

    /* INFO HEADER */
    int iBitmapInfoHdrSzBt = intelToJavaInt(dis.readInt());
    //System.out.println("Bitmap Info Header Size is : " +
iBitmapInfoHdrSzBt);
    if(iBitmapInfoHdrSzBt != 40)
      throw new IOException("Invalid bitmap info header size");

    int iBitmapWidthPxl = intelToJavaInt(dis.readInt());
    //System.out.println("Bitmap Width In Pixels is : " +
iBitmapWidthPxl);
    if(iBitmapWidthPxl != 100)
      throw new IOException("Invalid bitmap width in pixels");

    int iBitmapHeightPxl = intelToJavaInt(dis.readInt());
    //System.out.println("Bitmap Height In Pixels is : " +
iBitmapHeightPxl);
    if(iBitmapHeightPxl != 100)
      throw new IOException("Invalid bitmap height in pixels");

    int iNumOfBitplanes = intelToJavaShort(dis.readUnsignedShort());
    //System.out.println("Number of Bitplanes is : " + iNumOfBitplanes);
    if(iNumOfBitplanes != 1)
      throw new IOException("Invalid number of bitplanes");

    int iNumOfBitsPerPixel = intelToJavaShort(dis.readUnsignedShort());
    //System.out.println("Number of Bits per Pixel is : " +
iNumOfBitsPerPixel);
    if(iNumOfBitsPerPixel  != 1)
      throw new IOException("Invalid number of bits per pixel");

    int iCompressionType = intelToJavaInt(dis.readInt());
    //System.out.println("Compression Type is : " + iCompressionType);
    if(iCompressionType  != 0)
      throw new IOException("Invalid compression type");

    int iActualImageSzBt = intelToJavaInt(dis.readInt());
    //System.out.println("Actual Image Size in Bytes is : " +
iActualImageSzBt);
    if(iActualImageSzBt  != 1600)
      throw new IOException("Invalid actual image size");

    int iHorizDimenPxlsPerMeter = intelToJavaInt(dis.readInt());
    //System.out.println("Horizontal Dimension in Pixels per Meter is : "
+ iHorizDimenPxlsPerMeter);

    int iVertDimenPxlsPerMeter = intelToJavaInt(dis.readInt());
    //System.out.println("Vertical Dimension in Pixels per Meter is : " +
iVertDimenPxlsPerMeter);

    int iNumOfColorsActuallyUsed = intelToJavaInt(dis.readInt());
    if (iNumOfColorsActuallyUsed == 0)
      iNumOfColorsActuallyUsed = 1 << iNumOfBitsPerPixel;
    //System.out.println("Number of Used Colors is : " +
iNumOfColorsActuallyUsed);
    if(iNumOfColorsActuallyUsed  != 2)
      throw new IOException("Invalid number of actually used colors");

    int iNumOfColorsImportant = intelToJavaInt(dis.readInt());
    //System.out.println("Number of Important Colors is : " +
iNumOfColorsImportant);

    /* COLOR PALETTE */
    int aiColorTable[] = new int[iNumOfColorsActuallyUsed];
    //System.out.println("Palette:");
    for (int i=0; i < iNumOfColorsActuallyUsed; i++)
    {
      //aiColorTable[i] = (intelToJavaInt(dis.readInt()) & 0xffffff) +
0xff000000;
      aiColorTable[i] = intelToJavaInt(dis.readInt());
      //System.out.println(aiColorTable[i]);
    }
    if(aiColorTable[0] != 0 || aiColorTable[1] != 16777215)
      throw new IOException("Invalid color palette");

    /* IMAGE DATA (PIXELS) */
    Vector vImageRowsOfBitsets = new Vector(iBitmapHeightPxl);
    for (int i=0; i < iBitmapHeightPxl; i++)
    {
      BitSet bsImageCurrRow = new BitSet(iBitmapWidthPxl);
      int iNumOfBytesToRead = ((iBitmapWidthPxl / (4 * 8) + 1) * (4 * 8))
/ 8;
      int iCurrColumnIndex = 0;
      for (int j=0; j < iNumOfBytesToRead; j++)
      {
        byte btCurrByte = dis.readByte();
        int iCurrMask = 0x80;
        for (int k=0; k < 8; k++)
        {
          if(iCurrColumnIndex < iBitmapWidthPxl)
            bsImageCurrRow.set(iCurrColumnIndex, (btCurrByte & iCurrMask)
!= 0);
          ++iCurrColumnIndex;
          iCurrMask /= 2;
        }
      }
      vImageRowsOfBitsets.add(bsImageCurrRow);
    }
    Collections.reverse(vImageRowsOfBitsets);
    return vImageRowsOfBitsets;
  }

/*
  public static void printImage(Vector vImageRowsOfBitsets)
  {
    for (int i=0; i < vImageRowsOfBitsets.size(); i++)
    {
      for (int j=0; j < 100; j++)
      {
        if(((BitSet)(vImageRowsOfBitsets.get(i))).get(j))
          System.out.print(" ");
        else
          System.out.print("$");
      }
      System.out.println();
    }
  }
*/

  // intelToJavaShort converts a 16-bit number stored in intel byte order
into
  // the local host format
  protected static int intelToJavaShort(int i)
  {
    return ((i >> 8) & 0xff) + ((i << 8) & 0xff00);
  }

  // intelToJavaInt converts a 32-bit number stored in intel byte order
into
  // the local host format
  protected static int intelToJavaInt(int i)
  {
    return ((i & 0xff) << 24) + ((i & 0xff00) << 8) +
           ((i & 0xff0000) >> 8) + ((i >> 24) & 0xff);
  }

  public static void main(String[] args) throws IOException,
FileNotFoundException, Exception
  {
    TrainingSet tsTrainingSet = makeTrainingSetOfFaces();
    //printTrainingSet(tsTrainingSet);

    int iNumOfNeuralNetworkInputs = 0;
    if(tsTrainingSet.size() > 0)
      iNumOfNeuralNetworkInputs =
((DataSample)(tsTrainingSet.m_vecDataSamples.get(0))).m_vecDblsInputValues.size();
    //System.out.println("The number of neural network inputs: " +
iNumOfNeuralNetworkInputs);

    Vector vecIntNumOfNeuronsOnEachLayer = new Vector();
    vecIntNumOfNeuronsOnEachLayer.add(new Integer(7));
    vecIntNumOfNeuronsOnEachLayer.add(new Integer(3));
    NeuralNetwork nnNeuralNetwork =
makeNeuralNetwork(iNumOfNeuralNetworkInputs,
vecIntNumOfNeuronsOnEachLayer);
    //printNeuralNetwork (nnNeuralNetwork);

    trainNeuralNetwork(nnNeuralNetwork, tsTrainingSet);
    //printNeuralNetwork (nnNeuralNetwork);
    recognizeFaces(nnNeuralNetwork);
  }

/////////////////////////////////////////////////////////////////

  public static class DataSample
  {
    public DataSample()
    {
      m_vecDblsInputValues = new Vector();
      m_vecDblsOutputValues = new Vector();
    }
    public Vector m_vecDblsInputValues;
    public Vector m_vecDblsOutputValues;
  }

  /**
     Prints a data sample as a set of inputs and a set of outputs
     @param dsDataSample the data sample for the neural network
     @return none
  */
  public static void printDataSample(DataSample dsDataSample)
  {
    System.out.print("Data Sample: {");

    for (int i=0; i < dsDataSample.m_vecDblsInputValues.size(); i++)
    {

System.out.print(((Double)(dsDataSample.m_vecDblsInputValues.get(i))).doubleValue());
      if(i != dsDataSample.m_vecDblsInputValues.size() - 1)
        System.out.print("; ");
    }
    System.out.print("} -> {");
    for (int i=0; i < dsDataSample.m_vecDblsOutputValues.size(); i++)
    {

System.out.print(((Double)(dsDataSample.m_vecDblsOutputValues.get(i))).doubleValue());
      if(i != dsDataSample.m_vecDblsOutputValues.size() - 1)
        System.out.print("; ");
    }

    System.out.println("}");
  }

  public static class TrainingSet
  {
    public TrainingSet()
    {
      m_vecDataSamples = new Vector();
    }
    public int size()
    {
      return m_vecDataSamples.size();
    }
    public Vector m_vecDataSamples;
  }

  /**
     Prints the trainign set as a set of data samples
     @param tsTrainingSet the trainign set for the neural network
     @return none
  */
  public static void printTrainingSet(TrainingSet tsTrainingSet)
  {
    System.out.println();
    System.out.println("Training Set: {");
    for (int i=0; i < tsTrainingSet.m_vecDataSamples.size(); i++)
    {
      System.out.print("  ");

printDataSample((DataSample)(tsTrainingSet.m_vecDataSamples.get(i)));
    }
    System.out.println("};");
  }


  /**
     Reads all bitmaps from the "BMP" files in the current directory and
     creates a training set of data samples.
     @return TrainingSet a set of data samples for the nural network
  */
  public static TrainingSet makeTrainingSetOfFaces()  throws Exception //
generate a vector of bitmaps' input sets and output classifications
  {
    TrainingSet tsTrainingSet = new TrainingSet();

    int iCurrImageIndex = 1;
    try
    {
      while(true)

tsTrainingSet.m_vecDataSamples.add(extractFeaturesFromBitmapToDataSample(readImage("h"
+ iCurrImageIndex++ + ".bmp"), 0));
    }
    catch(IOException e) {}
    iCurrImageIndex = 1;
    try
    {
      while(true)

tsTrainingSet.m_vecDataSamples.add(extractFeaturesFromBitmapToDataSample(readImage("s"
+ iCurrImageIndex++ + ".bmp"), 1));
    }
    catch(IOException e) {}
    iCurrImageIndex = 1;
    try
    {
      while(true)

tsTrainingSet.m_vecDataSamples.add(extractFeaturesFromBitmapToDataSample(readImage("a"
+ iCurrImageIndex++ + ".bmp"), 2));
    }
    catch(IOException e) {}

    return tsTrainingSet;
  }

  /**
     Converts a 100 by 100 matrix of bits (monochrome pixels)
     into a data sample, which defines the inputs and the outputs
     for the neural network.
     @param vImageRowsOfBitsets a Vector of Bitsets with pixels from
            the image. Used for the inputs.
     @param iImageType the type of the image ("happy", "sad", or
            "amazed" face). Used for the outputs.
     @return the generated data sample.
  */
  public static DataSample extractFeaturesFromBitmapToDataSample(Vector
vImageRowsOfBitsets, int iImageType)
  {
    DataSample dsDataSample = new DataSample();
    int iImageDimensionRows = vImageRowsOfBitsets.size(); // 100
    int iImageDimensionColumns = 100;
/*
    int iNumOfFeatureScales = 2;
    int[][] aaiSchemaOfFeatures =
      {{20, 20, 10, 10},
       {8, 8, 4, 4}};
*/
    int iNumOfFeatureScales = 1;
    int[][] aaiSchemaOfFeatures =
      {{4, 4, 4, 4}};

    for (int r=0; r < iNumOfFeatureScales; ++r)
    {
      //System.out.println();
      //System.out.println("Extacting features on scale " +
aaiSchemaOfFeatures[r][0] + " X " + aaiSchemaOfFeatures[r][1] );
      int iFeatureSquareLeftUpperRow = 0;
      int iFeatureSquareLeftUpperColumn = 0;
      while(iFeatureSquareLeftUpperRow + aaiSchemaOfFeatures[r][0] <=
iImageDimensionRows)
      {
        while(iFeatureSquareLeftUpperColumn + aaiSchemaOfFeatures[r][1]
<= iImageDimensionColumns)
        {
          int iCurrNumOfBlackPixelsInRectangle = 0;
          for (int i=iFeatureSquareLeftUpperRow; (i <
(iFeatureSquareLeftUpperRow + aaiSchemaOfFeatures[r][0]) && i <
iImageDimensionRows); ++i)
          {
            for (int j=iFeatureSquareLeftUpperColumn; (j <
(iFeatureSquareLeftUpperColumn + aaiSchemaOfFeatures[r][1]) && j
< iImageDimensionColumns); ++j)
            {
              if( !((BitSet)(vImageRowsOfBitsets.get(i))).get(j) )
                ++iCurrNumOfBlackPixelsInRectangle;
            }
          }
          dsDataSample.m_vecDblsInputValues.add(new
Double((double)iCurrNumOfBlackPixelsInRectangle));

          //System.out.println("Feature: [" + iFeatureSquareLeftUpperRow +
"; "+ iFeatureSquareLeftUpperColumn + "] - [" +
          //  (iFeatureSquareLeftUpperRow + aaiSchemaOfFeatures[r][0] - 1)
+ "; " +
          //  (iFeatureSquareLeftUpperColumn + aaiSchemaOfFeatures[r][1] -
1) + "]: " +
          //  dCurrFeatureSquareInputValue);

          iFeatureSquareLeftUpperColumn += aaiSchemaOfFeatures[r][3];
        }
        iFeatureSquareLeftUpperColumn = 0;
        iFeatureSquareLeftUpperRow += aaiSchemaOfFeatures[r][2];
      }
    }

    if(iImageType == 0) // happy face
    {
      dsDataSample.m_vecDblsOutputValues.add(new Double(1.0));
      dsDataSample.m_vecDblsOutputValues.add(new Double(0.0));
      dsDataSample.m_vecDblsOutputValues.add(new Double(0.0));
    } else if(iImageType == 1) // sad face
    {
      dsDataSample.m_vecDblsOutputValues.add(new Double(0.0));
      dsDataSample.m_vecDblsOutputValues.add(new Double(1.0));
      dsDataSample.m_vecDblsOutputValues.add(new Double(0.0));
    } else if(iImageType == 2) // amazed face
    {
      dsDataSample.m_vecDblsOutputValues.add(new Double(0.0));
      dsDataSample.m_vecDblsOutputValues.add(new Double(0.0));
      dsDataSample.m_vecDblsOutputValues.add(new Double(1.0));
    } else
    {
      // throw new Exception("Unexpected Image type: " + iImageType);
    }

    //System.out.println();
    //System.out.println("Number of inputs in the data sample: " +
dsDataSample.m_vecDblsInputValues.size());
    //System.out.println("Number of outputs in the data sample: " +
dsDataSample.m_vecDblsOutputValues.size());
    //System.out.println();

    return dsDataSample;
  }

  /**
     Recognized the images (types of faces on them) with the trained
     neural network. Prints out the types' names.
     @param nnNeuralNetwork trained neural network
     @return none
  */
  public static void recognizeFaces(NeuralNetwork nnNeuralNetwork) throws
Exception
  {
    int iCurrImageIndex = 1;
    try
    {
      while(true)
      {
        DataSample dsDataSample =
extractFeaturesFromBitmapToDataSample(readImage("test" + iCurrImageIndex++
+ ".bmp"), -1);
        //printDataSample(dsDataSample);
        applyFeedForwardNeuralNetwork(nnNeuralNetwork, dsDataSample,
true);
        //printDataSample(dsDataSample);
        //printNeuralNetwork (nnNeuralNetwork);
        if (dsDataSample.m_vecDblsOutputValues.size() == 3)
        {
          double dHappyCategoryStrength =
((Double)(dsDataSample.m_vecDblsOutputValues.get(0))).doubleValue();
          double dSadCategoryStrength =
((Double)(dsDataSample.m_vecDblsOutputValues.get(1))).doubleValue();
          double dAmazedCategoryStrength =
((Double)(dsDataSample.m_vecDblsOutputValues.get(2))).doubleValue();

          //System.out.print("{ happy; sad; amazed } = { " +
dHappyCategoryStrength + "; " +
          //                 dSadCategoryStrength + "; " +
dAmazedCategoryStrength + " } => ");

          if(dHappyCategoryStrength >= dSadCategoryStrength &&
dHappyCategoryStrength >= dAmazedCategoryStrength)
            System.out.println("happy");
          else if(dSadCategoryStrength >= dHappyCategoryStrength
&& dSadCategoryStrength >= dAmazedCategoryStrength)
            System.out.println("sad");
          else if(dAmazedCategoryStrength >= dSadCategoryStrength
&& dAmazedCategoryStrength >= dHappyCategoryStrength)
            System.out.println("amazed");
          else
            System.out.println("undefined");
        } else
          throw new Exception("The number of categories in the data sample
is wrong");
      }
    }
    catch(IOException e) {}
  }

/////////////////////////////////////////////////////////////////

  private static class NeuralNetwork
  {
    public NeuralNetwork(int intNumOfInputs)
    {
      m_vecLayers = new Vector();
      m_intNumOfInputs = intNumOfInputs;
    }
    public int depth()
    {
      return m_vecLayers.size();
    }
    public Vector m_vecLayers;
    public int m_intNumOfInputs;
  }

  private static class NeuralNetworkLayer
  {
    public NeuralNetworkLayer()
    {
      m_vecNeurons = new Vector();
    }
    public int size()
    {
      return m_vecNeurons.size();
    }
    public Vector m_vecNeurons;
  }

  // Use sigmoid function only:
  // g(x) = 1 / ( 1 + e ^ (-x) ) = e^x / (1 + e^x);
  // g'(x) = 1 / ( 4 * (cosh(x/2))^2 );
  // g'(x) = (e^x)(e^x + 1)^(-2)
  private static class Neuron
  {
    public Neuron()
    {
      m_vecInputInfoComponents = new Vector();
      m_vecOutputInfoComponents = new Vector();
      m_dblCurrMemorizedNodeValue = 0.0;
      m_dblCurrSumOfWeightedInputs = 0.0;
      m_dblCurrDelta = 0.0;
      m_dblCurrSumOfWeightedDeltas = 0.0;
    }
    public Vector m_vecInputInfoComponents;
    public Vector m_vecOutputInfoComponents;
    public double m_dblCurrMemorizedNodeValue;
    public double m_dblCurrSumOfWeightedInputs;
    public double m_dblCurrDelta;
    public double m_dblCurrSumOfWeightedDeltas;
  }

  private static class NeuronInputInfoComponent
  {
    public double m_dblInputWeight;
    public int m_intSourceLayerIndex;
    public int m_intSourceNeuronIndex; // within the above Layer
  }

  private static class NeuronOutputInfoComponent
  {
    public int m_intDestinationLayerIndex;
    public int m_intDestinationNeuronIndex; // within the above Layer
  }

/////////////////////////////////////////////////////////////////

  /**
     Constructs a multilayer fully connected forward-feed neural network
(untrained).
     @param iNumOfNetworkInputs the number of inputs to the neural network
            (excluding the bias input)
     @param vecIntNumOfNeuronsOnEachLayer the vector of layers: the size
of the vector
            determines the depth of the network (excluding the input
"layer"), the
            values in the vector define the number of neutrons in the
correspondent
            layer of the network.
     @return brand-new untrained neural network
  */
  public static NeuralNetwork makeNeuralNetwork(int iNumOfNetworkInputs,
Vector vecIntNumOfNeuronsOnEachLayer)
  {
    // Create the Neural Network
    NeuralNetwork nnNeuralNetwork = new
NeuralNetwork(iNumOfNetworkInputs);

    // Create the Layers and their Neurons
    int iNumOfLayers = vecIntNumOfNeuronsOnEachLayer.size();
    for (int i=0; i < iNumOfLayers; i++)
    {
      nnNeuralNetwork.m_vecLayers.add(new NeuralNetworkLayer());
      NeuralNetworkLayer nnlCurrLayer =
(NeuralNetworkLayer)(nnNeuralNetwork.m_vecLayers.elementAt(i));
      int iNumOfNeuronsOnCurrLayer =
((Integer)(vecIntNumOfNeuronsOnEachLayer.get(i))).intValue();
      for (int j=0; j < iNumOfNeuronsOnCurrLayer; j++)
        nnlCurrLayer.m_vecNeurons.add(new Neuron());
    }

    // Initalize Neurons in all Layers
    for (int i=0; i < iNumOfLayers; i++)
    {
      NeuralNetworkLayer nnlCurrLayer =
(NeuralNetworkLayer)(nnNeuralNetwork.m_vecLayers.elementAt(i));
      int iNumOfNeuronsOnCurrLayer = nnlCurrLayer.size();
      for (int j=0; j < iNumOfNeuronsOnCurrLayer; j++)
      {
        Neuron nCurrNeuron =
(Neuron)(nnlCurrLayer.m_vecNeurons.elementAt(j));

        // Connect the current neuron with all network inputs (for the
first layer) OR
        // Connect the current neuron with all neurons from the previous
layer (for any succeeding layer)
        nCurrNeuron.m_vecInputInfoComponents = new Vector();
        int iNumOfInputConnectionsToCurrNeuron = 1;
        if(i == 0)
          iNumOfInputConnectionsToCurrNeuron = iNumOfNetworkInputs + 1;
        else
          iNumOfInputConnectionsToCurrNeuron =
((NeuralNetworkLayer)(nnNeuralNetwork.m_vecLayers.elementAt(i-1))).size()
+ 1;
        for (int k=0; k < iNumOfInputConnectionsToCurrNeuron; ++k)
        {
          // Create Neurons Input Component
          NeuronInputInfoComponent niicInputInfoComp = new
NeuronInputInfoComponent();

          // Initialize Neurons Input Component
          if (k == 0)
            niicInputInfoComp.m_dblInputWeight = /*1.0;*/ (1.0 /
(double)(iNumOfInputConnectionsToCurrNeuron));
          else
            niicInputInfoComp.m_dblInputWeight = /*1.0;*/ (1.0 /
(double)(iNumOfInputConnectionsToCurrNeuron));
          niicInputInfoComp.m_intSourceLayerIndex = i - 1;
          niicInputInfoComp.m_intSourceNeuronIndex = k - 1;

nCurrNeuron.m_vecInputInfoComponents.addElement(niicInputInfoComp);
        }

        // Connect the current neuron with all neurons from the next layer
(for any preceeding layer) OR
        // Connect the current neuron with one network output (for the
last layer)
        nCurrNeuron.m_vecOutputInfoComponents = new Vector();
        int iNumOfOutputConnectionsFromCurrNeuron = 0;
        if(i == iNumOfLayers - 1)
          iNumOfOutputConnectionsFromCurrNeuron = 1;
        else
          iNumOfOutputConnectionsFromCurrNeuron =
((NeuralNetworkLayer)(nnNeuralNetwork.m_vecLayers.elementAt(i+1))).size();
        for (int k=0; k < iNumOfOutputConnectionsFromCurrNeuron; k++)
        {
          // Create Neurons Output Component
          NeuronOutputInfoComponent noicInputInfoComp = new
NeuronOutputInfoComponent();
          // Initialize Neurons Output Component
          noicInputInfoComp.m_intDestinationLayerIndex = i + 1;
          noicInputInfoComp.m_intDestinationNeuronIndex = k;


nCurrNeuron.m_vecOutputInfoComponents.addElement(noicInputInfoComp);
        }

        // Initialize neuron's memorized value (there is no initial
memory)
        nCurrNeuron.m_dblCurrMemorizedNodeValue = 0.0;
        nCurrNeuron.m_dblCurrSumOfWeightedInputs = 0.0;
        nCurrNeuron.m_dblCurrDelta = 0.0;
        nCurrNeuron.m_dblCurrSumOfWeightedDeltas = 0.0;
      }
    }

    return nnNeuralNetwork;
  }

  /**
     Prints the neural network contents
     @param nnNeuralNetwork the neural network to be printed
     @return none
  */
  public static void printNeuralNetwork (NeuralNetwork nnNeuralNetwork)
  {
    int iNumOfInputs = nnNeuralNetwork.m_intNumOfInputs;
    int iNumOfLayers = nnNeuralNetwork.depth();
    int iNumOfOutputs = 0;
    if(iNumOfLayers > 0)
      iNumOfOutputs =
((NeuralNetworkLayer)(nnNeuralNetwork.m_vecLayers.lastElement())).size();
    System.out.println();
    System.out.println("Feed Forward Completely Connected Neural
Network:");
    System.out.println("Inputs: " + iNumOfInputs);
    System.out.print("Layers: " + iNumOfLayers + ". Layers' sizes (# of
neurons): ");
    for (int i=0; i < iNumOfLayers; ++i)

System.out.print(((NeuralNetworkLayer)(nnNeuralNetwork.m_vecLayers.get(i))).size()
+ "; ");
    System.out.println();
    System.out.println("Outputs: " + iNumOfOutputs);
    for (int i=0; i < iNumOfLayers; ++i)
    {
      System.out.println("Layer " + i + ":");
      NeuralNetworkLayer nnlCurrLayer =
(NeuralNetworkLayer)(nnNeuralNetwork.m_vecLayers.get(i));
      for (int j=0; j < nnlCurrLayer.size(); ++j)
      {
        Neuron nnCurrNeuron = (Neuron)(nnlCurrLayer.m_vecNeurons.get(j));
        System.out.println("  Neuron " + j + ": Value = " +
nnCurrNeuron.m_dblCurrMemorizedNodeValue +
                           "; Sum of Weighted inputs = " +
nnCurrNeuron.m_dblCurrSumOfWeightedInputs +
                           "; Delta = " + nnCurrNeuron.m_dblCurrDelta +
                           "; Sum of Weighted deltas = " +
nnCurrNeuron.m_dblCurrSumOfWeightedDeltas);

        for (int k=0; k < nnCurrNeuron.m_vecInputInfoComponents.size();
++k)
        {
          NeuronInputInfoComponent niicCurrNIIC =
(NeuronInputInfoComponent)(nnCurrNeuron.m_vecInputInfoComponents.get(k));
          System.out.println("    In " + k + ": [" +
niicCurrNIIC.m_intSourceLayerIndex + "][" +
                           niicCurrNIIC.m_intSourceNeuronIndex + "] * " +
niicCurrNIIC.m_dblInputWeight + " -> g()");
        }

        for (int m=0; m <
nnCurrNeuron.m_vecOutputInfoComponents.size(); ++m)
        {
          NeuronOutputInfoComponent noicCurrNOIC =
(NeuronOutputInfoComponent)(nnCurrNeuron.m_vecOutputInfoComponents.get(m));
          System.out.println("    Out " + m + ": g() -> [" +
noicCurrNOIC.m_intDestinationLayerIndex + "][" +
                           noicCurrNOIC.m_intDestinationNeuronIndex +
"]");
        }
      }
    }
    System.out.println();
  }

  /**
     Trains the neural network
     @param nnNeuralNetwork the neural network to be trained
     @param tsTrainingSet the training set to be used for trianing
     @return none
  */
  public static void trainNeuralNetwork(NeuralNetwork nnNeuralNetwork,
TrainingSet tsTrainingSet) throws Exception
  {
    int iMaxNumOfEpochs = 100000;
    int iCurrEpochNum = 0;

    long lTrainingStartTimeMs = System.currentTimeMillis();
    long lMaxTrainingPeriodMs = (5 * 60 - 3) * 1000;

    double dblAlphaLearningRate = 0.1;
    do
    {
      double dblMaxAbsRelWeightChange = 0.0; // = abs( (OldWeight -
NewWeight) / OldWeight); if NaN, then set above threshold
      double dblThresholdForMaxAbsRelWeightChange = 0.001;

      for (int e=0; e < tsTrainingSet.size(); ++e)
      {
        DataSample dsCurrDataSample =
(DataSample)(tsTrainingSet.m_vecDataSamples.elementAt(e));
        applyFeedForwardNeuralNetwork(nnNeuralNetwork, dsCurrDataSample,
false); // The neural network state is updated, but the data sample is
not.
        for (int i = (nnNeuralNetwork.depth() - 1); i >= 0; --i)
        {
          NeuralNetworkLayer nnlCurrLayer =
(NeuralNetworkLayer)(nnNeuralNetwork.m_vecLayers.get(i));
          for (int j=0; j < nnlCurrLayer.size(); ++j)
          {
            Neuron nnCurrNeuron =
(Neuron)(nnlCurrLayer.m_vecNeurons.get(j));
            if(i == (nnNeuralNetwork.depth() - 1)) // if the last layer,
then simulate the weighted sum of deltas from the next layer
            {
              nnCurrNeuron.m_dblCurrSumOfWeightedDeltas =

((Double)(dsCurrDataSample.m_vecDblsOutputValues.get(j))).doubleValue() -
nnCurrNeuron.m_dblCurrMemorizedNodeValue;
            } // else we assume that the weighted deltas are already
initialized one step earlier

            // Calculate delta for this neuron
            double dblPoweredE =
Math.pow(Math.E,nnCurrNeuron.m_dblCurrSumOfWeightedInputs);
            double dbl_g_prime = 0.0;
            if(dblPoweredE == Double.POSITIVE_INFINITY)
              dbl_g_prime = 0.0;
            else
              dbl_g_prime = dblPoweredE / Math.pow (dblPoweredE + 1.0,
2.0);
            if(dbl_g_prime == Double.NaN)
              dbl_g_prime = 0.0;
            nnCurrNeuron.m_dblCurrDelta =
nnCurrNeuron.m_dblCurrSumOfWeightedDeltas * dbl_g_prime;

            nnCurrNeuron.m_dblCurrSumOfWeightedDeltas = 0.0; // reset for
future use

            for (int k=0; k <
nnCurrNeuron.m_vecInputInfoComponents.size(); ++k)
            {
              NeuronInputInfoComponent niicCurrNIIC =
(NeuronInputInfoComponent)(nnCurrNeuron.m_vecInputInfoComponents.get(k));
              double dblCurrInputComponentValue = 0.0;
              if(niicCurrNIIC.m_intSourceLayerIndex == -1) // use inputs
from the data sample
              {
                if(niicCurrNIIC.m_intSourceNeuronIndex == -1)
                  dblCurrInputComponentValue = 1.0;
                else
                  dblCurrInputComponentValue =
((Double)(dsCurrDataSample.m_vecDblsInputValues.get(niicCurrNIIC.m_intSourceNeuronIndex))).doubleValue();
              } else // use outputs from the neuron in the previous layer
              {
                NeuralNetworkLayer nnlPrevLayer =
(NeuralNetworkLayer)(nnNeuralNetwork.m_vecLayers.get(niicCurrNIIC.m_intSourceLayerIndex));
                if(niicCurrNIIC.m_intSourceNeuronIndex == -1)
                  dblCurrInputComponentValue = 1.0;
                else
                  dblCurrInputComponentValue =
((Neuron)(nnlPrevLayer.m_vecNeurons.get(niicCurrNIIC.m_intSourceNeuronIndex))).m_dblCurrMemorizedNodeValue;
              }
              double dblOldWeightValue = niicCurrNIIC.m_dblInputWeight;
              niicCurrNIIC.m_dblInputWeight += dblAlphaLearningRate *
dblCurrInputComponentValue * nnCurrNeuron.m_dblCurrDelta;
              double dblCurrAbsRelWeightChange = Math.abs(
(dblOldWeightValue - niicCurrNIIC.m_dblInputWeight) / dblOldWeightValue);
              if(dblCurrAbsRelWeightChange > dblMaxAbsRelWeightChange)
                dblMaxAbsRelWeightChange = dblCurrAbsRelWeightChange;
              else if (dblCurrAbsRelWeightChange == Double.NaN)
                dblMaxAbsRelWeightChange =
dblThresholdForMaxAbsRelWeightChange + 1.0;

              if(/*i != 0 && */ niicCurrNIIC.m_intSourceLayerIndex
!= -1 && niicCurrNIIC.m_intSourceNeuronIndex != -1)
              // if this is not the first layer, then update the weighted
sum of this layer deltas in the prev layer
              {
                NeuralNetworkLayer nnlPrevLayer =
(NeuralNetworkLayer)(nnNeuralNetwork.m_vecLayers.get(niicCurrNIIC.m_intSourceLayerIndex));
                Neuron nnPrevNeuron =
(Neuron)(nnlPrevLayer.m_vecNeurons.get(niicCurrNIIC.m_intSourceNeuronIndex));
                nnPrevNeuron.m_dblCurrSumOfWeightedDeltas +=
niicCurrNIIC.m_dblInputWeight * nnCurrNeuron.m_dblCurrDelta;
              }
            }
          }
        }
      }
      if ( (++iCurrEpochNum) >= iMaxNumOfEpochs ||
           ( Math.abs(System.currentTimeMillis() - lTrainingStartTimeMs )
> lMaxTrainingPeriodMs ) ||
           dblMaxAbsRelWeightChange <=
dblThresholdForMaxAbsRelWeightChange)
        break;
    } while (true);
    //System.out.println("Training is over after " + iCurrEpochNum + "
epochs.");
  }

  /**
     Runs a data sample through the neural network.
     @param nnNeuralNetwork the neural network to be used for the analysis
of the data sample
     @param dsDataSample the data sample to be investigated with the
neural network
     @param bUpdateDataSample the outputs of the data sample are updated
only if this flag is set.
            There is no need to update data sample outputs, if it comes
from the training set during
            training session. The neural network temporary internal
outputs are always updated.
     @return none
  */
  public static void applyFeedForwardNeuralNetwork(NeuralNetwork
nnNeuralNetwork, DataSample dsDataSample, boolean bUpdateDataSample)
throws Exception
  {
    if(bUpdateDataSample)
    {
      if(nnNeuralNetwork.depth() != 0)
      {
        int iNumOfNeuralNetworkOutputs =
((NeuralNetworkLayer)(nnNeuralNetwork.m_vecLayers.lastElement())).size();
        dsDataSample.m_vecDblsOutputValues = new
Vector(iNumOfNeuralNetworkOutputs);
        for (int i=0; i < iNumOfNeuralNetworkOutputs; ++i)
          dsDataSample.m_vecDblsOutputValues.add(new Double(0.0));
      }
      else
        dsDataSample.m_vecDblsOutputValues =
(Vector)(dsDataSample.m_vecDblsInputValues.clone());
    }
    else if(dsDataSample.m_vecDblsOutputValues.size() !=
((NeuralNetworkLayer)(nnNeuralNetwork.m_vecLayers.lastElement())).size() )
      throw new Exception("Error: number of outputs from the neural
network does not macth to the number of outputs in the data sample.");

    for (int i=0; i < nnNeuralNetwork.depth(); ++i)
    {
      NeuralNetworkLayer nnlCurrLayer =
(NeuralNetworkLayer)(nnNeuralNetwork.m_vecLayers.get(i));

      for (int j=0; j < nnlCurrLayer.size(); ++j)
      {
        Neuron nnCurrNeuron = (Neuron)(nnlCurrLayer.m_vecNeurons.get(j));
        nnCurrNeuron.m_dblCurrSumOfWeightedInputs = 0.0;
        for (int k=0; k < nnCurrNeuron.m_vecInputInfoComponents.size();
++k)
        {
          NeuronInputInfoComponent niicCurrNIIC =
(NeuronInputInfoComponent)(nnCurrNeuron.m_vecInputInfoComponents.get(k));
          if(k==0)
            nnCurrNeuron.m_dblCurrSumOfWeightedInputs +=
niicCurrNIIC.m_dblInputWeight * 1.0;
          else
          {
            if(i == 0)
            {
              if (niicCurrNIIC.m_intSourceNeuronIndex < 0 ||
                  niicCurrNIIC.m_intSourceNeuronIndex >=
dsDataSample.m_vecDblsInputValues.size() ||
                  niicCurrNIIC.m_intSourceLayerIndex != -1 ||
                  (k-1) != niicCurrNIIC.m_intSourceNeuronIndex)
                throw new Exception("Error: invalid input address in the
first layer in the Forward Feed Neural Network.");
              nnCurrNeuron.m_dblCurrSumOfWeightedInputs +=
niicCurrNIIC.m_dblInputWeight *
((Double)(dsDataSample.m_vecDblsInputValues.get(niicCurrNIIC.m_intSourceNeuronIndex))).doubleValue();
            }
            else
            {
              if((i-1) != niicCurrNIIC.m_intSourceLayerIndex)
                throw new Exception("Error");
              NeuralNetworkLayer nnlEarlierLayer =
(NeuralNetworkLayer)(nnNeuralNetwork.m_vecLayers.get(niicCurrNIIC.m_intSourceLayerIndex));
              Neuron nnEarlierNeuron =
(Neuron)(nnlEarlierLayer.m_vecNeurons.get(niicCurrNIIC.m_intSourceNeuronIndex));
              nnCurrNeuron.m_dblCurrSumOfWeightedInputs +=
niicCurrNIIC.m_dblInputWeight *
nnEarlierNeuron.m_dblCurrMemorizedNodeValue;
            }
          }
        }
        double dblPoweredE =
Math.pow(Math.E,nnCurrNeuron.m_dblCurrSumOfWeightedInputs);
        if(dblPoweredE == Double.POSITIVE_INFINITY)
          nnCurrNeuron.m_dblCurrMemorizedNodeValue = 1.0;
        else
          nnCurrNeuron.m_dblCurrMemorizedNodeValue = dblPoweredE /
(dblPoweredE + 1.0); // sigmoid function
        if(nnCurrNeuron.m_dblCurrMemorizedNodeValue == Double.NaN)
          nnCurrNeuron.m_dblCurrMemorizedNodeValue = 1.0;
        //nnCurrNeuron.m_dblCurrMemorizedNodeValue =
nnCurrNeuron.m_dblCurrSumOfWeightedInputs > 0 ? 1.0 : 0.0; // step
function

        if(i == nnNeuralNetwork.depth() - 1)
        {
          if(bUpdateDataSample)
          {
            NeuronOutputInfoComponent noicOutputInfoComp =
(NeuronOutputInfoComponent)(nnCurrNeuron.m_vecOutputInfoComponents.elementAt(0));
            if (nnCurrNeuron.m_vecOutputInfoComponents.size() != 1 ||
                noicOutputInfoComp.m_intDestinationLayerIndex !=
nnNeuralNetwork.depth() ||
                noicOutputInfoComp.m_intDestinationNeuronIndex < 0 ||
                noicOutputInfoComp.m_intDestinationNeuronIndex >=
dsDataSample.m_vecDblsOutputValues.size() )
              throw new Exception("Error: the number of data outputs does
not match to the number of outputs in the Neural Network.");

dsDataSample.m_vecDblsOutputValues.set(/*noicOutputInfoComp.m_intDestinationNeuronIndex*/
j, new Double(nnCurrNeuron.m_dblCurrMemorizedNodeValue));
          }
        }
      }
    }
  }

/*
  public static void resetNeuralNetwork(NeuralNetwork nnNeuralNetwork)
  {
    for (int i=0; i < nnNeuralNetwork.depth(); ++i)
    {
      NeuralNetworkLayer nnlCurrLayer =
(NeuralNetworkLayer)(nnNeuralNetwork.m_vecLayers.get(i));
      for (int j=0; j < nnlCurrLayer.size(); ++j)
      {
        Neuron nnCurrNeuron = (Neuron)(nnlCurrLayer.m_vecNeurons.get(j));

        for (int k=0; k < nnCurrNeuron.m_vecInputInfoComponents.size();
++k)
        {
          NeuronInputInfoComponent niicCurrNIIC =
(NeuronInputInfoComponent)(nnCurrNeuron.m_vecInputInfoComponents.get(k));
          // niicCurrNIIC.m_dblInputWeight = 1.0;
        }
      }
    }
  }
*/
}
 

Return to homework page.