00001
00025 #include <inanna/annetwork.h>
00026 #include <inanna/initializer.h>
00027 #include <nhp/individual.h>
00028 #include <magic/mclass.h>
00029
00030 #include "annalee/layered.h"
00031
00032 impl_dynamic (LayeredEncoding, {ANNEncoding});
00033
00034
00036
00037
00038
00039
00040
00041
00042
00044
00045 LayeredEncoding::LayeredEncoding (const GeneticID& name,
00046 const StringMap& params) : ANNEncoding (name, params)
00047 {
00048 mPruneInputs = isnull(params["prins"])? true : params["prins"].toInt ();
00049 mPruneWeights = isnull(params["prwts"])? false : params["prwts"].toInt ();
00050 }
00051
00052 LayeredEncoding::LayeredEncoding (const LayeredEncoding& other) : ANNEncoding (other)
00053 {
00054 mPruneInputs = other.mPruneInputs;
00055 mPruneWeights = other.mPruneWeights;
00056 }
00057
00058 void LayeredEncoding::copy (const Genstruct& o)
00059 {
00060 ANNEncoding::copy (o);
00061 const LayeredEncoding& other = static_cast<const LayeredEncoding&>(o);
00062 mPruneInputs = other.mPruneInputs;
00063 mPruneWeights = other.mPruneWeights;
00064 }
00065
00066 void LayeredEncoding::addPrivateGenes (Gentainer& g, const StringMap& params)
00067 {
00068 Gentainer::addPrivateGenes (g, params);
00069
00070 if (mPruneInputs)
00071 for (int i=0; i<mInputs; i++)
00072 add (new BinaryGene (format ("R%d", i), 1.0));
00073
00074 if (mPruneWeights) {
00075 for (int i=0; i<mInputs; i++)
00076 for (int j=0; j<mMaxHidden; j++)
00077 add (new BinaryGene (format ("W%d-%d", i, j), 1.0));
00078 }
00079
00080
00081 for (int i=0; i<mMaxHidden; i++)
00082 add (new BinaryGene (format ("H%d", i), 1.0));
00083 }
00084
00085 bool LayeredEncoding::execute (const GeneticMsg& msg) const
00086 {
00087 ANNetwork* net = new ANNetwork (format ("%d-%d-%d", mInputs, mMaxHidden, mOutputs));
00088
00089
00090 bool hidexists [mMaxHidden];
00091 for (int h=0; h<mMaxHidden; h++) {
00092 hidexists[h] = static_cast<const BinaryGene&> (
00093 (*this)[(CONSTR)format ("H%d", h)]).getvalue();
00094
00095
00096
00097 (*net)[h+mInputs].enable(hidexists[h]);
00098 }
00099
00100
00101 bool input_exists;
00102 bool w_exists;
00103 for (int i=0; i<mInputs; i++) {
00104
00105
00106 input_exists = true;
00107 if (mPruneInputs)
00108 input_exists = static_cast<const BinaryGene&> (
00109 (*this)[(CONSTR)format ("R%d", i)]).getvalue();
00110
00111 (*net)[i].enable (input_exists);
00112
00113
00114 if (input_exists) {
00115
00116
00117 for (int h=0; h<mMaxHidden; h++) {
00118 w_exists = hidexists[h];
00119
00120
00121 if (mPruneWeights) {
00122 w_exists = static_cast<const BinaryGene&> (
00123 (*this)[(CONSTR)format ("W%d-%d", i, h)]).getvalue();
00124 }
00125
00126
00127 if (w_exists)
00128 net->connect (i, h+mInputs);
00129 }
00130 }
00131 }
00132
00133
00134
00135
00136 for (int o=0; o<mOutputs; o++) {
00137
00138
00139 for (int h=0; h<mMaxHidden; h++) {
00140 w_exists = hidexists[h];
00141
00142
00143 if (mPruneWeights) {
00144 w_exists = static_cast<const BinaryGene&> (
00145 (*this)[(CONSTR)format ("W%d-%d", o, h)]).getvalue();
00146 }
00147
00148
00149 if (w_exists)
00150 net->connect (h+mInputs, o+mInputs+mMaxHidden);
00151 }
00152 }
00153
00154 msg.host.set ("brainplan", net);
00155
00156 return true;
00157 }
00158