Main Page   Class Hierarchy   Compound List   File List   Compound Members   File Members  

layered.cc

Go to the documentation of this file.
00001 
00025 #include <inanna/annetwork.h>
00026 #include <inanna/initializer.h>
00027 #include <nhp/individual.h>
00028 #include <magic/mclass.h>
00029 
00030 #include "annalee/layered.h"
00031 
00032 impl_dynamic (LayeredEncoding, {ANNEncoding});
00033 
00034 
00036 //                                                                          //
00037 //  |                                     | -----                      |    //
00038 //  |      ___         ___       ___      | |       _    ___           |    //
00039 //  |      ___| \   | /   ) |/\ /   )  ---| |---  |/ \  |   \  __   ---|    //
00040 //  |     (   |  \  | |---  |   |---  (   | |     |   | |     /  \ (   |    //
00041 //  |____  \__|   \_/  \__  |    \__   ---| |____ |   |  \__/ \__/  ---| O  //
00042 //               \_/                                                        //
00044 
00045 LayeredEncoding::LayeredEncoding (const GeneticID& name,
00046                                   const StringMap& params) : ANNEncoding (name, params)
00047 {
00048     mPruneInputs  = isnull(params["prins"])? true  : params["prins"].toInt ();
00049     mPruneWeights = isnull(params["prwts"])? false : params["prwts"].toInt ();
00050 }
00051 
00052 LayeredEncoding::LayeredEncoding (const LayeredEncoding& other) : ANNEncoding (other)
00053 {
00054     mPruneInputs  = other.mPruneInputs;
00055     mPruneWeights = other.mPruneWeights;
00056 }
00057 
00058 void LayeredEncoding::copy (const Genstruct& o)
00059 {
00060     ANNEncoding::copy (o);
00061     const LayeredEncoding& other = static_cast<const LayeredEncoding&>(o);
00062     mPruneInputs = other.mPruneInputs;
00063     mPruneWeights = other.mPruneWeights;
00064 }
00065 
00066 void LayeredEncoding::addPrivateGenes (Gentainer& g, const StringMap& params)
00067 {
00068     Gentainer::addPrivateGenes (g, params);
00069 
00070     if (mPruneInputs)
00071         for (int i=0; i<mInputs; i++)
00072             add (new BinaryGene (format ("R%d", i), 1.0));
00073     
00074     if (mPruneWeights) {
00075         for (int i=0; i<mInputs; i++)
00076             for (int j=0; j<mMaxHidden; j++)
00077                 add (new BinaryGene (format ("W%d-%d", i, j), 1.0));
00078     }
00079         
00080     // Prune hidden
00081     for (int i=0; i<mMaxHidden; i++)
00082         add (new BinaryGene (format ("H%d", i), 1.0));
00083 }
00084 
00085 bool LayeredEncoding::execute (const GeneticMsg& msg) const
00086 {
00087     ANNetwork* net = new ANNetwork (format ("%d-%d-%d", mInputs, mMaxHidden, mOutputs));
00088     
00089     // Go trough each hidden unit and check if it exists
00090     bool hidexists [mMaxHidden];
00091     for (int h=0; h<mMaxHidden; h++) {
00092         hidexists[h] = static_cast<const BinaryGene&> (
00093             (*this)[(CONSTR)format ("H%d", h)]).getvalue();
00094         // TRACE2 ("%d=%d", h, int(hidexists[h]));
00095         
00096         // Enable or disable it from the network
00097         (*net)[h+mInputs].enable(hidexists[h]);
00098     }
00099 
00100     // Connect inputs to hiddens
00101     bool input_exists; // Does an input unit exist?
00102     bool w_exists;     // Does a weight exist?
00103     for (int i=0; i<mInputs; i++) {
00104         
00105         // See if this input unit "exists" (if input pruning is enabled)
00106         input_exists = true;
00107         if (mPruneInputs)
00108             input_exists = static_cast<const BinaryGene&> (
00109                 (*this)[(CONSTR)format ("R%d", i)]).getvalue();
00110 
00111         (*net)[i].enable (input_exists);
00112         
00113         // If it exists...
00114         if (input_exists) {
00115             
00116             // Go trough each (existing) hidden unit
00117             for (int h=0; h<mMaxHidden; h++) {
00118                 w_exists = hidexists[h];
00119                 
00120                 // If the hidden unit exists
00121                 if (mPruneWeights) {
00122                     w_exists = static_cast<const BinaryGene&> (
00123                         (*this)[(CONSTR)format ("W%d-%d", i, h)]).getvalue();
00124                 }
00125                 
00126                 // Create the connection if it exists
00127                 if (w_exists)
00128                     net->connect (i, h+mInputs);
00129             }
00130         }
00131     }
00132 
00133     // Connect hiddens to outputs
00134 
00135     // To each output unit...
00136     for (int o=0; o<mOutputs; o++) {
00137     
00138         // ...connect every (existing) hidden unit
00139         for (int h=0; h<mMaxHidden; h++) {
00140             w_exists = hidexists[h];
00141             
00142             // If the hidden unit exists
00143             if (mPruneWeights) {
00144                 w_exists = static_cast<const BinaryGene&> (
00145                     (*this)[(CONSTR)format ("W%d-%d", o, h)]).getvalue();
00146             }
00147             
00148             // Create the connection if it exists
00149             if (w_exists)
00150                 net->connect (h+mInputs, o+mInputs+mMaxHidden);
00151         }
00152     }
00153 
00154     msg.host.set ("brainplan", net);
00155 
00156     return true;
00157 }
00158 

Generated on Thu Feb 10 20:21:26 2005 for Annalee by doxygen1.2.18