• Main Page
  • Related Pages
  • Modules
  • Namespaces
  • Classes
  • Files
  • File List
  • File Members

src/BackPropNeuralNetwork.cpp

00001 /*
00002  *   This file is part of the Standard Portable Library (SPL).
00003  *
00004  *   SPL is free software: you can redistribute it and/or modify
00005  *   it under the terms of the GNU General Public License as published by
00006  *   the Free Software Foundation, either version 3 of the License, or
00007  *   (at your option) any later version.
00008  *
00009  *   SPL is distributed in the hope that it will be useful,
00010  *   but WITHOUT ANY WARRANTY; without even the implied warranty of
00011  *   MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
00012  *   GNU General Public License for more details.
00013  *
00014  *   You should have received a copy of the GNU General Public License
00015  *   along with SPL.  If not, see <http://www.gnu.org/licenses/>.
00016  */
00017 #include <spl/Debug.h>
00018 #include <spl/math/Math.h>
00019 #include <spl/math/NeuralNetwork.h>
00020 
00021 void Network::DeleteArrays()
00022 {
00023         //if ( m_input != NULL )
00024         //{
00025         //      delete m_input;
00026         //}
00027         //if ( m_output != NULL )
00028         //{
00029         //      delete m_output;
00030         //}
00031         //if ( m_layer1 != NULL )
00032         //{
00033         //      delete m_layer1;
00034         //}
00035         //if ( m_layer2 != NULL )
00036         //{
00037         //      delete m_layer2;
00038         //}
00039         //
00040         //if ( m_errors1 != NULL )
00041         //{
00042         //      delete m_errors1;
00043         //}
00044         //if ( m_errors2 != NULL )
00045         //{
00046         //      delete m_errors2;
00047         //}
00048         //if ( m_errorsOut != NULL )
00049         //{
00050         //      delete m_errorsOut;
00051         //}
00052         //
00053         //if ( m_wt1ToIn != NULL)
00054         //{
00055         //      delete m_wt1ToIn;
00056         //}
00057         //if ( m_delta1ToIn != NULL )
00058         //{
00059         //      delete m_delta1ToIn;
00060         //}
00061         //if ( m_wt2To1 != NULL )
00062         //{
00063         //      delete m_wt2To1;
00064         //}
00065         //if ( m_delta2To1 != NULL )
00066         //{
00067         //      delete m_delta2To1;
00068         //}
00069         //
00070         //if ( m_wtOutTo2 != NULL )
00071         //{
00072         //      delete m_wtOutTo2;
00073         //}
00074         //if ( m_deltaOutTo2 != NULL )
00075         //{
00076         //      delete m_deltaOutTo2;
00077         //}
00078 }
00079 
00080 void Network::RandomizeWeights()
00081 {
00082         int x;
00083         // initialize the weights
00084         int len = m_inputLen * m_h1len;
00085         ASSERT_MEM( m_wt1ToIn, len );
00086         for (x = 0; x < len; x++)
00087         {
00088                 m_wt1ToIn[x] = (Math::Random() * m_wtRange * 2.0) - m_wtRange;
00089         }
00090         len = m_h1len * m_h2len;
00091         ASSERT_MEM( m_wt2To1, len );
00092         for (x = 0; x < len; x++)
00093         {
00094                 m_wt2To1[x] = (Math::Random() * m_wtRange * 2.0) - m_wtRange;
00095         }
00096         len = m_outputLen * m_h2len;
00097         ASSERT_MEM( m_wtOutTo2, len );
00098         for (x = 0; x < len; x++)
00099         {
00100                 m_wtOutTo2[x] = (Math::Random() * m_wtRange * 2.0) - m_wtRange;
00101         }
00102 }
00103 
00104 void Network::BuildNetwork( int inlen, int len1, int len2, int outlen )
00105 {
00106         m_inputLen = inlen;
00107         m_h1len = len1;
00108         m_h2len = len2;
00109         m_outputLen = outlen;
00110 
00111         m_input = Array<double>(inlen);
00112         m_layer1 = Array<double>(len1);
00113         m_layer2 = Array<double>(len2);
00114         m_output = Array<double>(outlen);
00115         m_errors1 = Array<double>(len1);
00116         m_errors2 = Array<double>(len2);
00117         m_errorsOut = Array<double>(outlen);
00118         m_wt1ToIn = Array<double>(inlen * len1);
00119         m_delta1ToIn = Array<double>(inlen * len1);
00120         m_wt2To1 = Array<double>(len1 * len2);
00121         m_delta2To1 = Array<double>(len1 * len2);
00122         m_wtOutTo2 = Array<double>(outlen * len2);
00123         m_deltaOutTo2 = Array<double>(outlen * len2);
00124         
00125         RandomizeWeights();
00126 }
00127 
00128 void Network::Init() 
00129 {
00130         m_networkId = -1;
00131         m_learnRate = .2;
00132         m_momentum = .05;
00133         m_wtRange = .2;
00134         m_useAdaptiveLR = true;
00135         m_useAdaptiveMom = false;
00136         m_previousError = 0.0;
00137 }
00138 
00142 Network::Network( int inlen, int len1, int len2, int outlen )
00143 :       m_input(),
00144         m_output(),
00145         m_layer1(),
00146         m_layer2(),
00147         m_errors1(),
00148         m_errors2(),
00149         m_errorsOut(),
00150         m_wt1ToIn(),
00151         m_delta1ToIn(),
00152         m_wt2To1(),
00153         m_delta2To1(),
00154         m_wtOutTo2(),
00155         m_deltaOutTo2()
00156 {
00157         Init();
00158         BuildNetwork( inlen, len1, len2, outlen );
00159 }
00160 
00161 Network::Network( const Network& net )
00162 :       m_input(),
00163         m_output(),
00164         m_layer1(),
00165         m_layer2(),
00166         m_errors1(),
00167         m_errors2(),
00168         m_errorsOut(),
00169         m_wt1ToIn(),
00170         m_delta1ToIn(),
00171         m_wt2To1(),
00172         m_delta2To1(),
00173         m_wtOutTo2(),
00174         m_deltaOutTo2()
00175 {
00176         Init();
00177         BuildNetwork( 1, 1, 1, 1 );
00178 
00179         *this = net;
00180 }
00181 
00185 Network::Network( int id, Connection& conn )
00186 :       m_input(),
00187         m_output(),
00188         m_layer1(),
00189         m_layer2(),
00190         m_errors1(),
00191         m_errors2(),
00192         m_errorsOut(),
00193         m_wt1ToIn(),
00194         m_delta1ToIn(),
00195         m_wt2To1(),
00196         m_delta2To1(),
00197         m_wtOutTo2(),
00198         m_deltaOutTo2()
00199 {
00200         Init();
00201 
00202         CommandPtr cmd = conn.CreateCommand( "selNetwork" );
00203         cmd->CreateParameter("_NETWORK_ID", id);
00204         RecordSetPtr rs = cmd->ExecuteQuery();
00205         if ( ! rs->Next() )
00206         {
00207                 throw NeuralNetworkException("Network not found");
00208         }
00209 
00210         m_networkId = id;
00211         m_learnRate = rs->GetColumn("LEARN_RATE_NUM")->GetFloat64();
00212         m_momentum = rs->GetColumn("MOMENTUM_NUM")->GetFloat64();
00213         m_wtRange = rs->GetColumn("WEIGHT_RANGE_NUM")->GetFloat64();
00214         m_useAdaptiveLR = rs->GetColumn("ADAPT_LEARN_RATE_IND")->GetBit();
00215         m_useAdaptiveMom = rs->GetColumn("ADAPT_MOM_IND")->GetBit();
00216         m_previousError = rs->GetColumn("PREV_ERROR_NUM")->GetFloat64();
00217         int input = rs->GetColumn("IN_NODE_NUM")->GetInt32();
00218         int h1 = rs->GetColumn("L1_NODE_NUM")->GetInt32();
00219         int h2 = rs->GetColumn("L2_NODE_NUM")->GetInt32();
00220         int output = rs->GetColumn("OUT_NODE_NUM")->GetInt32();
00221 
00222         BuildNetwork( input, h1, h2, output );
00223 
00224         cmd = conn.CreateCommand("selNodes" );
00225         cmd->CreateParameter("_NETWORK_ID", id );
00226 
00227         rs = cmd->ExecuteQuery();
00228         if ( 0 == rs->RowCount() )
00229         {
00230                 // just use the newly created network;
00231                 return;
00232         }
00233         while ( rs->Next() )
00234         {
00235                 int idx = rs->GetColumn("INDEX_NUM")->GetInt32();
00236                 double val = rs->GetColumn("VAL_NUM")->GetFloat64();
00237 
00238                 switch ( rs->GetColumn("LAYER_NUM")->GetInt32() )
00239                 {
00240                         case NET_INPUT:
00241                                 m_input[idx] = val;
00242                                 break;
00243                         case NET_L1:
00244                                 m_layer1[idx] = val;
00245                                 break;
00246                         case NET_L2:
00247                                 m_layer2[idx] = val;
00248                                 break;
00249                         case NET_OUTPUT:
00250                                 m_output[idx] = val;
00251                                 break;
00252                         default:
00253                                 throw NeuralNetworkException( "Unexpected layer num in nodes " );
00254                 }
00255         }
00256 
00257         cmd = conn.CreateCommand("selConns");
00258         cmd->CreateParameter("_NETWORK_ID", id );
00259 
00260         rs = cmd->ExecuteQuery();
00261         if ( 0 == rs->RowCount() )
00262         {
00263                 throw NeuralNetworkException("Connections for network not found");
00264         }
00265         while ( rs->Next() )
00266         {
00267                 int idx = rs->GetColumn("INDEX_NUM")->GetInt32();
00268                 double weight = rs->GetColumn("WEIGHT_NUM")->GetFloat64();
00269                 double delta = rs->GetColumn("DELTA_NUM")->GetFloat64();
00270 
00271                 switch ( rs->GetColumn("LAYER_NUM")->GetByte() )
00272                 {
00273                         case NET_L1:
00274                                 m_wt1ToIn[idx] = weight;
00275                                 m_delta1ToIn[idx] = delta;
00276                                 break;
00277                         case NET_L2:
00278                                 m_wt2To1[idx] = weight;
00279                                 m_delta2To1[idx] = delta;
00280                                 break;
00281                         case NET_OUTPUT:
00282                                 m_wtOutTo2[idx] = weight;
00283                                 m_deltaOutTo2[idx] = delta;
00284                                 break;
00285                         default:
00286                                 throw NeuralNetworkException("Unexpected layer num in nodes" );
00287                 }
00288         }
00289 }
00290 
00291 Network::~Network()
00292 {
00293         DeleteArrays();
00294 }
00295 
00296 Network& Network::operator =(const Network& net)
00297 {
00298         DeleteArrays();
00299 
00300         Init();
00301         BuildNetwork( net.m_inputLen, net.m_h1len, net.m_h2len, net.m_outputLen );
00302 
00303         int x;
00304         for (x = 0; x < m_inputLen; x++)
00305         {
00306                 m_input[x] = net.m_input[x];
00307         }
00308         for (x = 0; x < m_h1len; x++)
00309         {
00310                 m_layer1[x] = net.m_layer1[x];
00311         }
00312         for (x = 0; x < m_h2len; x++)
00313         {
00314                 m_layer2[x] = net.m_layer2[x];
00315         }
00316         for (x = 0; x < m_outputLen; x++)
00317         {
00318                 m_output[x] = net.m_output[x];
00319         }
00320 
00321         for (x = 0; x < wt1ToInLen(); x++)
00322         {
00323                 m_wt1ToIn[x] = net.m_wt1ToIn[x];
00324                 m_delta1ToIn[x] = net.m_delta1ToIn[x];
00325         }
00326         for (x = 0; x < wt2To1Len(); x++)
00327         {
00328                 m_wt2To1[x] = net.m_wt2To1[x];
00329                 m_delta2To1[x] = net.m_delta2To1[x];
00330         }
00331         for (x = 0; x < wtOutTo2Len(); x++)
00332         {
00333                 m_wtOutTo2[x] = net.m_wtOutTo2[x];
00334                 m_deltaOutTo2[x] = net.m_deltaOutTo2[x];
00335         }
00336 
00337         return *this;
00338 }
00339 
00344 void Network::ActivateLayer(Array<double>& layer1, Array<double>& layer2, Array<double>& wt)
00345 {
00346         double dSum;
00347         int iWtPos = 0;
00348         //double *iWtPos = &wt[0];
00349         //double *layer1p;
00350         //double *layer1pend = &layer1[l1len -1];
00351         
00352         for (int x = 0; x < layer2.Length(); x++)
00353         {
00354                 dSum = 0.0;
00355 
00356                 // calculate the ouput for this node    
00357                 // for all the nodes connected to this one
00358                 
00359                 for (int y = 0; y < layer1.Length(); y++)
00360                 {
00361                         // sum the weights * outputs.  note that the weights are
00362                         dSum += layer1[y] * wt[iWtPos++];
00363                 }
00364                 //dSum += *layer1p++ * *wt++;
00365 
00366                 // set the sum for the current node
00367                 //layer2[x] = (1 - Math.exp(-(dSum)))/(1+Math.exp(-(dSum)));
00368                 // layer2[x] = (1.0-exp(-2.0*dSum))/(1.0+exp(-2.0*dSum));
00369                 
00370                 // the -1.0 in the exponent changes the logistic slope
00371                 layer2[x] = 1.0 / (1 + exp(- 1.0 * dSum));
00372         }
00373 }
00374 
00375 double Network::CalcError( const Array<double>& input, const Array<double>& output )
00376 {
00377         double dErrTotal = 0;
00378         int x;
00379         for (x = 0; x < m_inputLen; x++)
00380         {
00381                 m_input[x] = input[x];
00382         }
00383         Activate();
00384                                 
00385         // calculate the final error
00386         for (x = 0; x < m_outputLen; x++)
00387         {
00388                 // calculate the absolute error for the output layer
00389                 double err = output[x] - m_output[x];
00390                 
00391                 // save the squared error
00392                 dErrTotal += err * err;
00393         }
00394         return dErrTotal;
00395 }
00396 
00397 double Network::Train(const Array<double>& input, const Array<double>& output, double dErrTarget, int maxCycles)
00398 {
00399         int delay = 0;
00400         int iWtPos;
00401         double dErrTotal;
00402         int x;
00403 
00404         ASSERT(input.Length() == m_input.Length());
00405         ASSERT(output.Length() == m_output.Length());
00406 
00407         do 
00408         {
00409                 // set the errors to 0
00410                 m_errors1.ClearBinary();
00411                 m_errors2.ClearBinary();
00412                 input.CopyToBinary(m_input);
00413                 
00414                 // activate
00415                 Activate();
00416                 
00417                 iWtPos = 0;
00418                 dErrTotal = 0.0;
00419                 
00420                 // calculate the final error
00421                 for (x = 0; x < m_outputLen; x++)
00422                 {
00423                         // calculate the absolute error for the output layer
00424                         m_errorsOut[x] = output[x] - m_output[x];
00425 
00426                         // save the squared error
00427                         dErrTotal += m_errorsOut[x] * m_errorsOut[x];
00428                         
00429                         // back prop the error, update the weights
00430                         for (int y = 0; y < m_h2len; y++)
00431                         {
00432                                 // back prop the error
00433                                 m_errors2[y] += m_errorsOut[x] * m_wtOutTo2[iWtPos];
00434                                 
00435                                 // update the delta and weight
00436                                 m_wtOutTo2[iWtPos] += (m_deltaOutTo2[iWtPos] = m_learnRate * m_errorsOut[x] * m_layer2[y] + m_momentum * m_deltaOutTo2[iWtPos]);
00437                                 //
00438                                 // need to check for exploding weights here
00439                                 //
00440                                 if (m_wtOutTo2[iWtPos] > 100.0 || m_wtOutTo2[iWtPos] < - 100.0)
00441                                 {
00442                                         throw NeuralNetworkException("Exploding weights");
00443                                 }
00444                                 if (m_wtOutTo2[iWtPos] == 0.0)
00445                                 {
00446                                         throw NeuralNetworkException("Weight underflow");
00447                                 }
00448                                 iWtPos++;
00449                         }
00450                 }
00451                 iWtPos = 0;
00452                 // hidden layer 2
00453                 for (x = 0; x < m_h2len; x++)
00454                 {
00455                         if (m_layer2[x] > 0)
00456                         {
00457                                 m_errors2[x] *= m_layer2[x] * (1.0 - m_layer2[x]);
00458                         }
00459                         else
00460                         {
00461                                 m_errors2[x] *= - m_layer2[x] * (1.0 + m_layer2[x]);
00462                         }
00463                         // back prop the error, update the weights
00464                         for (int y = 0; y < m_h1len; y++)
00465                         {
00466                                 m_errors1[y] += m_errors2[x] * m_wt2To1[iWtPos];
00467                                 // update the delta and weight
00468                                 m_wt2To1[iWtPos] += (m_delta2To1[iWtPos] = m_learnRate * m_errors2[x] * m_layer1[y] + m_momentum * m_delta2To1[iWtPos]);
00469                                 //
00470                                 // need to check for exploding weights here
00471                                 //
00472                                 if (m_wt2To1[iWtPos] > 100.0 || m_wt2To1[iWtPos] < - 100.0)
00473                                 {
00474                                         throw NeuralNetworkException("Exploding weights");
00475                                 }
00476                                 if (m_wt2To1[iWtPos] == 0.0)
00477                                 {
00478                                         throw NeuralNetworkException("Weight underflow");
00479                                 }
00480                                 iWtPos++;
00481                         }
00482                 }
00483                 iWtPos = 0;
00484                 // hidden layer 1
00485                 for (x = 0; x < m_h1len; x++)
00486                 {
00487                         if (m_layer1[x] > 0)
00488                         {
00489                                 m_errors1[x] *= m_layer1[x] * (1 - m_layer1[x]);
00490                         }
00491                         else
00492                         {
00493                                 m_errors1[x] *= - m_layer1[x] * (1 + m_layer1[x]);
00494                         }
00495                         // back prop the error, update the weights
00496                         for (int y = 0; y < m_inputLen; y++)
00497                         {
00498                                 //dpErrorsIn[y] += dpErrors1[x] * dpWt1ToIn[iWtPos];
00499                                 // update the delta and weight
00500                                 m_wt1ToIn[iWtPos] += (m_delta1ToIn[iWtPos] = m_learnRate * m_errors1[x] * m_input[y] + m_momentum * m_delta1ToIn[iWtPos]);
00501                                 //
00502                                 // need to check for exploding weights here
00503                                 //
00504                                 if (m_wt1ToIn[iWtPos] > 100.0 || m_wt1ToIn[iWtPos] < - 100.0)
00505                                 {
00506                                         throw NeuralNetworkException("Exploding weights");
00507                                 }
00508                                 if (m_wt1ToIn[iWtPos] == 0.0)
00509                                 {
00510                                         throw NeuralNetworkException("Weight underflow");
00511                                 }
00512                                 iWtPos++;
00513                         }
00514                 }
00515                 if (m_useAdaptiveLR)
00516                 {
00517                         delay++;
00518                         if (dErrTotal < m_previousError && delay > 10)
00519                         {
00520                                 m_learnRate += .00001;
00521                                 delay = 0;
00522                         }
00523                         else if (m_learnRate > .02)
00524                         {
00525                                 m_learnRate -= .005 * m_learnRate;
00526                         }
00527                         m_previousError = dErrTotal;
00528                 }
00529         }
00530         while (dErrTotal > dErrTarget && maxCycles-- > 0);
00531 
00532         return dErrTotal;
00533 }
00534 
00535 void Network::Write( Connection& conn )
00536 {
00537         int x;
00538         byte layerNum;
00539         bool update = true;
00540         CommandPtr cmd;
00541 
00542         if ( m_networkId < 0 )
00543         {
00544                 update = false;
00545                 cmd = conn.CreateCommand( "insNetwork" );
00546                 cmd->CreateParameter("_NETWORK_ID", DbSqlType::SQL_TYPE_INT32, ParameterDirection::PARAM_DIR_OUT);
00547                 cmd->CreateParameter("_LEARN_RATE_NUM", m_learnRate);
00548                 cmd->CreateParameter("_MOMENTUM_NUM", m_momentum);
00549                 cmd->CreateParameter("_WEIGHT_RANGE_NUM", m_wtRange);
00550                 cmd->CreateParameter("_ADAPT_LEARN_RATE_IND", m_useAdaptiveLR);
00551                 cmd->CreateParameter("_ADAPT_MOM_IND", m_useAdaptiveMom);
00552                 cmd->CreateParameter("_PREV_ERROR_NUM", m_previousError);
00553                 cmd->CreateParameter("_IN_NODE_NUM", m_inputLen);
00554                 cmd->CreateParameter("_L1_NODE_NUM", m_h1len );
00555                 cmd->CreateParameter("_L2_NODE_NUM", m_h2len );
00556                 cmd->CreateParameter("_OUT_NODE_NUM", m_outputLen);
00557                 cmd->ExecuteNonQuery();
00558 
00559                 m_networkId = cmd->GetParameter("_NETWORK_ID")->GetInt32();
00560                 ASSERT( m_networkId >= 0 );
00561         }
00562 
00563 
00564         if ( update )
00565         {
00566                 cmd = conn.CreateCommand("updNode");
00567         }
00568         else
00569         {
00570                 cmd = conn.CreateCommand("insNode");
00571         }
00572         cmd->CreateParameter("_NETWORK_ID", m_networkId);
00573         cmd->CreateParameter("_LAYER_NUM", DbSqlType::SQL_TYPE_INT8, ParameterDirection::PARAM_DIR_IN);
00574         cmd->CreateParameter("_INDEX_NUM", DbSqlType::SQL_TYPE_INT32, ParameterDirection::PARAM_DIR_IN);
00575         cmd->CreateParameter("_VAL_NUM", DbSqlType::SQL_TYPE_FLOAT64, ParameterDirection::PARAM_DIR_IN);
00576 
00577         layerNum = NET_INPUT;
00578         cmd->GetParameter("_LAYER_NUM")->Set(layerNum);
00579         for (x = 0; x < m_inputLen; x++)
00580         {
00581                 cmd->GetParameter("_INDEX_NUM")->Set(x);
00582                 cmd->GetParameter("@VAL_NUM")->Set(m_input[x]);
00583                 cmd->ExecuteNonQuery();
00584         }       
00585         layerNum = NET_L1;
00586         cmd->GetParameter("_LAYER_NUM")->Set(layerNum);
00587         for (x = 0; x < m_h1len; x++)
00588         {
00589                 cmd->GetParameter("_INDEX_NUM")->Set(x);
00590                 cmd->GetParameter("_VAL_NUM")->Set(m_layer1[x]);
00591                 cmd->ExecuteNonQuery();
00592         }
00593         layerNum = NET_L2;
00594         cmd->GetParameter("@LAYER_NUM")->Set(layerNum);
00595         for (x = 0; x < m_h2len; x++)
00596         {
00597                 cmd->GetParameter("_INDEX_NUM")->Set(x);
00598                 cmd->GetParameter("_VAL_NUM")->Set(m_layer2[x]);
00599                 cmd->ExecuteNonQuery();
00600         }
00601         layerNum = NET_OUTPUT;
00602         cmd->GetParameter("@LAYER_NUM")->Set(layerNum);
00603         for (x = 0; x < m_outputLen; x++)
00604         {
00605                 cmd->GetParameter("_INDEX_NUM")->Set(x);
00606                 cmd->GetParameter("_VAL_NUM")->Set(m_output[x]);
00607                 cmd->ExecuteNonQuery();
00608         }
00609 
00610         if ( update )
00611         {
00612                 cmd = conn.CreateCommand("updConn");
00613         }
00614         else
00615         {
00616                 cmd = conn.CreateCommand("insConn");
00617         }
00618         cmd->CreateParameter("_NETWORK_ID", m_networkId);
00619         cmd->CreateParameter("_LAYER_NUM", DbSqlType::SQL_TYPE_INT8, ParameterDirection::PARAM_DIR_IN);
00620         cmd->CreateParameter("_INDEX_NUM", DbSqlType::SQL_TYPE_INT32, ParameterDirection::PARAM_DIR_IN);
00621         cmd->CreateParameter("_WEIGHT_NUM", DbSqlType::SQL_TYPE_FLOAT64, ParameterDirection::PARAM_DIR_IN);
00622         cmd->CreateParameter("_DELTA_NUM", DbSqlType::SQL_TYPE_FLOAT64, ParameterDirection::PARAM_DIR_IN);
00623 
00624         layerNum = NET_L1;
00625         cmd->GetParameter("@LAYER_NUM")->Set(layerNum);
00626         for (x = 0; x < wt1ToInLen(); x++)
00627         {
00628                 cmd->GetParameter("@INDEX_NUM")->Set(x);
00629                 cmd->GetParameter("@WEIGHT_NUM")->Set(m_wt1ToIn[x]);
00630                 cmd->GetParameter("@DELTA_NUM")->Set(m_delta1ToIn[x]);
00631                 cmd->ExecuteNonQuery();
00632         }
00633         layerNum = NET_L2;
00634         cmd->GetParameter("@LAYER_NUM")->Set(layerNum);
00635         for (x = 0; x < wt2To1Len(); x++)
00636         {
00637                 cmd->GetParameter("@INDEX_NUM")->Set(x);
00638                 cmd->GetParameter("@WEIGHT_NUM")->Set(m_wt2To1[x]);
00639                 cmd->GetParameter("@DELTA_NUM")->Set(m_delta2To1[x]);
00640                 cmd->ExecuteNonQuery();
00641         }
00642         layerNum = NET_OUTPUT;
00643         cmd->GetParameter("@LAYER_NUM")->Set(layerNum);
00644         for (x = 0; x < wtOutTo2Len(); x++)
00645         {
00646                 cmd->GetParameter("@INDEX_NUM")->Set(x);
00647                 cmd->GetParameter("@WEIGHT_NUM")->Set(m_wtOutTo2[x]);
00648                 cmd->GetParameter("@DELTA_NUM")->Set(m_deltaOutTo2[x]);
00649                 cmd->ExecuteNonQuery();
00650         }
00651 }
00652 
00653 #ifdef DEBUG
00654 void Network::ValidateMem() const
00655 {
00656         ASSERT_MEM( m_input, m_inputLen * sizeof(double) );
00657         ASSERT_MEM( m_output, m_outputLen * sizeof(double) );
00658         ASSERT_MEM( m_layer1, m_h1len * sizeof(double) );
00659         ASSERT_MEM( m_layer2, m_h2len * sizeof(double) );
00660         ASSERT_MEM( m_errors1, m_h1len * sizeof(double) );
00661         ASSERT_MEM( m_errors2, m_h2len * sizeof(double) );
00662         ASSERT_MEM( m_errorsOut, m_outputLen * sizeof(double) );
00663         ASSERT_MEM( m_wt1ToIn, m_inputLen * m_h1len * sizeof(double) );
00664         ASSERT_MEM( m_delta1ToIn, m_inputLen * m_h1len * sizeof(double) );
00665         ASSERT_MEM( m_wt2To1, m_h1len * m_h2len * sizeof(double) );
00666         ASSERT_MEM( m_delta2To1, m_h1len * m_h2len * sizeof(double) );
00667         ASSERT_MEM( m_wtOutTo2, m_outputLen * m_h2len * sizeof(double) );
00668         ASSERT_MEM( m_deltaOutTo2, m_outputLen * m_h2len * sizeof(double) );
00669 }
00670 
00671 void Network::CheckMem() const
00672 {
00673         DEBUG_NOTE_MEM_ALLOCATION( m_input );
00674         DEBUG_NOTE_MEM_ALLOCATION( m_output );
00675         DEBUG_NOTE_MEM_ALLOCATION( m_layer1 );
00676         DEBUG_NOTE_MEM_ALLOCATION( m_layer2 );
00677         DEBUG_NOTE_MEM_ALLOCATION( m_errors1 );
00678         DEBUG_NOTE_MEM_ALLOCATION( m_errors2 );
00679         DEBUG_NOTE_MEM_ALLOCATION( m_errorsOut );
00680         DEBUG_NOTE_MEM_ALLOCATION( m_wt1ToIn );
00681         DEBUG_NOTE_MEM_ALLOCATION( m_delta1ToIn );
00682         DEBUG_NOTE_MEM_ALLOCATION( m_wt2To1 );
00683         DEBUG_NOTE_MEM_ALLOCATION( m_delta2To1 );
00684         DEBUG_NOTE_MEM_ALLOCATION( m_wtOutTo2 );
00685         DEBUG_NOTE_MEM_ALLOCATION( m_deltaOutTo2 );
00686 }
00687 #endif