Main Page   Modules   Data Structures   File List   Data Fields   Globals   Related Pages  

gnn_conjugate_gradient.c

Go to the documentation of this file.
00001 /***************************************************************************
00002  *  @file gnn_conjugate_gradient.c
00003  *  @brief Conjugate Gradient Trainer Implementation.
00004  *
00005  *  @date   : 07-09-03 12:08, 13-09-03 18:41
00006  *  @author : Pedro Ortega C. <peortega@dcc.uchile.cl>
00007  *  Copyright  2003  Pedro Ortega C.
00008  ****************************************************************************/
00009 /*
00010  *  This program is free software; you can redistribute it and/or modify
00011  *  it under the terms of the GNU General Public License as published by
00012  *  the Free Software Foundation; either version 2 of the License, or
00013  *  (at your option) any later version.
00014  *
00015  *  This program is distributed in the hope that it will be useful,
00016  *  but WITHOUT ANY WARRANTY; without even the implied warranty of
00017  *  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
00018  *  GNU Library General Public License for more details.
00019  *
00020  *  You should have received a copy of the GNU General Public License
00021  *  along with this program; if not, write to the Free Software
00022  *  Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
00023  */
00024 
00025 
00026 
00027 /**
00028  * @defgroup gnn_conjugate_gradient_doc gnn_conjugate_gradient : Conjugate Gradient Descent Algorithm.
00029  * @ingroup gnn_trainer_doc
00030  *
00031  * This trainer implements the Conjugate Gradient minimization procedure.
00032  * The conjugate gradients method takes one step after another in special
00033  * conjugate directions, minimizing the error function along the current
00034  * line.
00035  *
00036  * The conjugate directions are built using a sequential procedure, given
00037  * by the formula:
00038  *
00039  * \f[ d_{new} = g_{new} - \beta_{new} d_{old} \f]
00040  *
00041  * where \f$d_{new}\f$ is the new search direction, which is conjugate to
00042  * the old \f$d_{old}\f$, \f$g_{new}\f$ and \f$g_{old}\f$ are the new and
00043  * the previous computed gradients with respect to the function's parameters.
00044  * The \f$\beta\f$ is a coefficient which can be computed in different ways.
00045  * There are three forms:
00046  * - \ref gnn_conjugate_gradient_hestenes_stiefel : The original form,
00047  *   discovered by Hestenes and Stiefel.
00048  * - \ref gnn_conjugate_gradient_fletcher_reeves :
00049  *   The Fletcher-Reeves form.
00050  * - \ref gnn_conjugate_gradient_polak_ribiere :
00051  *   The Polak-Ribière form.
00052  *
00053  * Informally, two directions are said to be conjugate, if they are "ortogonal"
00054  * in a sense considering the second-order Taylor approximation of the error
00055  * surface.
00056  *
00057  * After choosing the direction, the algorithm seeks the minimum along this
00058  * line. This is performed, in practice, using a line-search procedure.
00059  * You can install any of the available line-search procedures for the
00060  * training algoritm. Please refer to (\ref gnn_line_search) for further
00061  * details.
00062  */
00063 
00064 
00065 
00066 /******************************************/
00067 /* Include Files                          */
00068 /******************************************/
00069 
00070 #include <math.h>
00071 #include "gnn_conjugate_gradient.h"
00072 
00073 
00074 
00075 /******************************************/
00076 /* Static Declaration                     */
00077 /******************************************/
00078 
00079 static int
00080 gnn_conjugate_gradient_reset (gnn_trainer *trainer);
00081 
00082 static int
00083 gnn_conjugate_gradient_train (gnn_trainer *trainer);
00084 
00085 static void
00086 gnn_conjugate_gradient_destroy (gnn_trainer *trainer);
00087 
00088 
00089 
00090 
00091 /******************************************/
00092 /* Static Implementation                  */
00093 /******************************************/
00094 
00095 /**
00096  * @brief The trainer's "reset" implementation.
00097  * @ingroup gnn_conjugate_gradient_doc
00098  *
00099  * @param  trainer A pointer to a \ref gnn_conjugate_gradient.
00100  * @return Returns 0 if suceeded.
00101  */
00102 static int
00103 gnn_conjugate_gradient_reset (gnn_trainer *trainer)
00104 {
00105     gnn_conjugate_gradient *cgtrainer;
00106 
00107     assert (trainer != NULL);
00108 
00109     cgtrainer = (gnn_conjugate_gradient *) trainer;
00110 
00111     /* reset iteration counter */
00112     cgtrainer->iteration = 0;
00113 
00114     /* clear optimization information */
00115     gsl_vector_set_zero (cgtrainer->buf);
00116     gsl_vector_set_zero (cgtrainer->gnew);
00117     gsl_vector_set_zero (cgtrainer->gold);
00118 
00119     return 0;
00120 }
00121 
00122 /**
00123  * @brief The trainer's "train" implementation.
00124  * @ingroup gnn_conjugate_gradient_descent_doc
00125  *
00126  * @param  trainer A pointer to a \ref gnn_conjugate_gradient.
00127  * @return Returns 0 if succeeded.
00128  */
00129 static int
00130 gnn_conjugate_gradient_train (gnn_trainer *trainer)
00131 {
00132     double alpha;
00133     double beta;
00134     double ax, bx, cx;
00135     double fa, fb, fc;
00136     size_t s, n;
00137     gnn_conjugate_gradient *cg;
00138 
00139     /* get view */
00140     cg = (gnn_conjugate_gradient *) trainer;
00141 
00142     /* process minibatch */
00143     gnn_trainer_batch_process (trainer);
00144     
00145     /* copy gradient */
00146     gsl_vector_memcpy (cg->gnew, gnn_trainer_batch_get_dw (trainer));
00147 
00148     /* check if the algorithm should be restarted */
00149     if (cg->iteration % cg->restart == 0)
00150     {
00151         /* initialize direction */
00152         gsl_vector_memcpy (cg->line->d, cg->gnew);
00153     }
00154     else
00155     {
00156         /* compute beta */
00157         beta = cg->beta (trainer);
00158         if (beta < 0.0)
00159         {
00160             /* beta is negative, restart direction. */
00161             gsl_vector_memcpy (cg->line->d, cg->gnew);
00162         }
00163         else
00164         {
00165             /* beta is positive, compute new direction */
00166             gsl_vector_scale (cg->line->d, beta);
00167             gsl_vector_sub (cg->line->d, cg->gnew);
00168         }
00169     }
00170 
00171     /* perform line search over the current minibatch's patterns */
00172     ax = 0.0;
00173     bx = cg->step;
00174     s = gnn_trainer_get_pattern_index (trainer);
00175     n = gnn_trainer_batch_get_size (trainer);
00176     
00177     gnn_line_search_bracket (cg->line, s, n, &ax, &bx, &cx, &fa, &fb, &fc);
00178     cg->alpha (cg->line, s, n, ax, bx, cx, &alpha, cg->tol);
00179 
00180     /* build new point */
00181     gsl_vector_memcpy (cg->buf, cg->line->d);
00182     gsl_vector_scale (cg->buf, alpha);
00183 
00184     /* sum to parameters */
00185     gsl_vector_add (cg->line->w, cg->buf);
00186 
00187     /* update parameters */
00188     gnn_node_param_set (trainer->node, cg->line->w);
00189 
00190     /* store old gradient */
00191     gsl_vector_memcpy (cg->gold, cg->gnew);
00192 
00193     /* move to next minibatch */
00194     gnn_trainer_batch_next (trainer);
00195 
00196     /* update iteration counter */
00197     cg->iteration++;
00198 
00199     return 0;
00200 }
00201 
00202 /**
00203  * @brief The trainers "destroy" implementation.
00204  * @ingroup gnn_conjugate_gradient_doc
00205  *
00206  * @param trainer A pointer to a \ref gnn_conjugate_gradient.
00207  */
00208 static void
00209 gnn_conjugate_gradient_destroy (gnn_trainer *trainer)
00210 {
00211     gnn_conjugate_gradient *cgtrainer;
00212 
00213     assert (trainer != NULL);
00214 
00215     cgtrainer = (gnn_conjugate_gradient *) trainer;
00216 
00217     if (cgtrainer->buf != NULL)
00218         gsl_vector_free (cgtrainer->buf);
00219 
00220     if (cgtrainer->gnew != NULL)
00221         gsl_vector_free (cgtrainer->gnew);
00222 
00223     if (cgtrainer->gold != NULL)
00224         gsl_vector_free (cgtrainer->gold);
00225 
00226     if (cgtrainer->line != NULL)
00227         gnn_line_destroy (cgtrainer->line);
00228         
00229     return;
00230 }
00231 
00232 
00233 
00234 /******************************************/
00235 /* Public Interface                       */
00236 /******************************************/
00237 
00238 /**
00239  * @brief Creates a new conjugate gradient descent trainer.
00240  * @ingroup gnn_conjugate_gradient_doc
00241  *
00242  * This function creates a new conjugate gradients trainer
00243  * (\ref gnn_conjugate_gradient).
00244  *
00245  * @param  node A pointer to a \ref gnn_node.
00246  * @param  crit A pointer to a \ref gnn_criterion.
00247  * @param  data A pointer to a \ref gnn_dataset.
00248  * @return Returns a pointer to a new \ref gnn_conjugate_gradient trainer.
00249  */
00250 gnn_trainer *
00251 gnn_conjugate_gradient_new (gnn_node *node,
00252                             gnn_criterion *crit,
00253                             gnn_dataset *data)
00254 {
00255     int status;
00256     size_t l;
00257     gnn_trainer *trainer;
00258     gnn_conjugate_gradient *cgtrainer;
00259 
00260     /* allocate memory for the trainer */
00261     cgtrainer =
00262             (gnn_conjugate_gradient *) malloc (sizeof (gnn_conjugate_gradient));
00263     if (cgtrainer == NULL)
00264     {
00265         GSL_ERROR_VAL ("couldn't allocate memory for gnn_conjugate_gradient",
00266                        GSL_ENOMEM, NULL);
00267     }
00268 
00269     /* get view as gnn_trainer */
00270     trainer = (gnn_trainer *) cgtrainer;
00271 
00272     /* initialize */
00273     status = gnn_trainer_init (trainer,
00274                                "gnn_conjugate_gradient",
00275                                node,
00276                                crit,
00277                                data,
00278                                gnn_conjugate_gradient_reset,
00279                                gnn_conjugate_gradient_train,
00280                                gnn_conjugate_gradient_destroy);
00281     if (status)
00282     {
00283         GSL_ERROR_VAL ("couldn't initialize gnn_conjugate_gradient",
00284                        GSL_EFAILED, NULL);
00285     }
00286 
00287     /* set fields */
00288     cgtrainer->step      = GNN_CONJUGATE_GRADIENT_STEP;
00289     cgtrainer->tol       = GNN_CONJUGATE_GRADIENT_TOL;
00290     cgtrainer->iteration = 0;
00291     cgtrainer->restart   = GNN_CONJUGATE_GRADIENT_RESTART;
00292 
00293     cgtrainer->alpha = GNN_CONJUGATE_GRADIENT_ALPHA;
00294     cgtrainer->beta  = GNN_CONJUGATE_GRADIENT_BETA;
00295 
00296     l = gnn_node_param_get_size (node);
00297     cgtrainer->gnew = gsl_vector_alloc (l);
00298     cgtrainer->gold = gsl_vector_alloc (l);
00299     cgtrainer->buf  = gsl_vector_alloc (l);
00300     cgtrainer->line = gnn_line_new (trainer->grad, NULL);
00301 
00302     if (   cgtrainer->buf  == NULL
00303         || cgtrainer->gnew == NULL
00304         || cgtrainer->gold == NULL )
00305     {
00306         gnn_trainer_destroy (trainer);
00307         GSL_ERROR_VAL ("couldn't allocate memory for gnn_conjugate_gradient",
00308                        GSL_ENOMEM, NULL);
00309     }
00310 
00311     return trainer;
00312 }
00313 
00314 
00315 
00316 /**
00317  * @brief The Polak-Ribière form for the \f$\beta\f$ coefficient.
00318  * @ingroup gnn_conjugate_gradient_doc
00319  *
00320  * This function returns the Polak-Ribière form of the \f$\beta\f$ coefficient
00321  * for the evaluation of the new conjugate direction:
00322  *
00323  * \f[ d_{new} = g_{new} - \beta_{new} d_{old} \f]
00324  *
00325  * where
00326  *
00327  * \f[ \beta_{new} = \frac{g_{new}^T (g_{new} - g_{old})}
00328  *                        {     g_{old}^T - g_{old}     }
00329  * \f]
00330  *
00331  * @param trainer A pointer to a \ref gnn_conjugate_gradient.
00332  * @return Returns the \f$beta\f$ coefficient.
00333  */
00334 double
00335 gnn_conjugate_gradient_polak_ribiere (gnn_trainer *trainer)
00336 {
00337     double beta_u;
00338     double beta_l;
00339     gnn_conjugate_gradient *cgtrainer;
00340 
00341     /* get view */
00342     cgtrainer = (gnn_conjugate_gradient *) trainer;
00343 
00344     /* compute beta */
00345     gsl_vector_memcpy (cgtrainer->buf, cgtrainer->gnew);
00346     gsl_vector_sub (cgtrainer->buf, cgtrainer->gold);
00347     gsl_blas_ddot (cgtrainer->gnew, cgtrainer->buf, &beta_u);
00348     gsl_blas_ddot (cgtrainer->gold, cgtrainer->gold, &beta_l);
00349 
00350     return ( beta_u / beta_l );
00351 }
00352 
00353 /**
00354  * @brief The Hestenes-Stiefel form for the \f$\beta\f$ coefficient.
00355  * @ingroup gnn_conjugate_gradient_doc
00356  *
00357  * This function returns the Hestenes-Stiefel form of the \f$\beta\f$
00358  * coefficient for the evaluation of the new conjugate direction:
00359  *
00360  * \f[ d_{new} = g_{new} - \beta_{new} d_{old} \f]
00361  *
00362  * where
00363  *
00364  * \f[ \beta_{new} = \frac{ g_{new}^T (g_{new}   - g_{old}) }
00365  *                        { d_{old}^T (g_{new}^T - g_{old}) }
00366  * \f]
00367  *
00368  * @param trainer A pointer to a \ref gnn_conjugate_gradient.
00369  * @return Returns the \f$beta\f$ coefficient.
00370  */
00371 double
00372 gnn_conjugate_gradient_hestenes_stiefel (gnn_trainer *trainer)
00373 {
00374     double beta_u;
00375     double beta_l;
00376     gnn_conjugate_gradient *cgtrainer;
00377 
00378     /* get view */
00379     cgtrainer = (gnn_conjugate_gradient *) trainer;
00380 
00381     /* compute beta */
00382     gsl_vector_memcpy (cgtrainer->buf, cgtrainer->gnew);
00383     gsl_vector_sub (cgtrainer->buf, cgtrainer->gold);
00384     
00385     gsl_blas_ddot (cgtrainer->gnew, cgtrainer->buf, &beta_u);
00386     gsl_blas_ddot (cgtrainer->line->d, cgtrainer->buf, &beta_l);
00387 
00388     return ( beta_u / beta_l );
00389 }
00390 
00391 /**
00392  * @brief The Fletcher-Reeves form for the \f$\beta\f$ coefficient.
00393  * @ingroup gnn_conjugate_gradient_doc
00394  *
00395  * This function returns the Fletcher-Reeves form of the \f$\beta\f$
00396  * coefficient for the evaluation of the new conjugate direction:
00397  *
00398  * \f[ d_{new} = g_{new} - \beta_{new} d_{old} \f]
00399  *
00400  * where
00401  *
00402  * \f[ \beta_{new} = \frac{ g_{new}^T g_{new} }
00403  *                        { d_{old}^T g_{old} }
00404  * \f]
00405  *
00406  * @param trainer A pointer to a \ref gnn_conjugate_gradient.
00407  * @return Returns the \f$beta\f$ coefficient.
00408  */
00409 double
00410 gnn_conjugate_gradient_fletcher_reeves (gnn_trainer *trainer)
00411 {
00412     double beta_u;
00413     double beta_l;
00414     gnn_conjugate_gradient *cgtrainer;
00415 
00416     /* get view */
00417     cgtrainer = (gnn_conjugate_gradient *) trainer;
00418 
00419     /* compute beta */
00420     gsl_blas_ddot (cgtrainer->gnew, cgtrainer->gnew, &beta_u);
00421     gsl_blas_ddot (cgtrainer->gold, cgtrainer->gold, &beta_l);
00422 
00423     return ( beta_u / beta_l );
00424 }
00425 
00426 /**
00427  * @brief Sets the precision tolerance for the line search procedure.
00428  * @ingroup gnn_conjugate_gradient_doc
00429  *
00430  * @param trainer A pointer to a \ref gnn_conjugate_gradient.
00431  * @param tol A stricly positive real value.
00432  * @return Returns 0 if succeeded.
00433  */
00434 int
00435 gnn_conjugate_gradient_set_tol (gnn_trainer *trainer, double tol)
00436 {
00437     gnn_conjugate_gradient *cg;
00438 
00439     assert (trainer != NULL);
00440 
00441     /* check value */
00442     if (tol <= 0.0)
00443     {
00444         GSL_ERROR ("tolerance should be stricly greater than zero",
00445                    GSL_EINVAL);
00446     }
00447 
00448     /* set value */
00449     cg = (gnn_conjugate_gradient *) trainer;
00450     cg->tol = tol;
00451 
00452     return 0;
00453 }
00454 
00455 /**
00456  * @brief Gets the tolerance for the line search procedure.
00457  * @ingroup gnn_conjugate_gradient_doc
00458  *
00459  * @param trainer A pointer to a \ref gnn_conjugate_gradient.
00460  * @return Returns the tolerance's value.
00461  */
00462 double
00463 gnn_conjugate_gradient_get_tol (gnn_trainer *trainer)
00464 {
00465     gnn_conjugate_gradient *cg;
00466 
00467     assert (trainer != NULL);
00468 
00469     cg = (gnn_conjugate_gradient *) trainer;
00470 
00471     return cg->tol;
00472 }
00473 
00474 /**
00475  * @brief Sets the initial step for the interval bracketing procedure.
00476  * @ingroup gnn_conjugate_gradient_doc
00477  *
00478  * @param trainer A pointer to a \ref gnn_conjugate_gradient.
00479  * @param step A stricly positive real value.
00480  * @return Returns 0 if succeeded.
00481  */
00482 int
00483 gnn_conjugate_gradient_set_step (gnn_trainer *trainer, double step)
00484 {
00485     gnn_conjugate_gradient *cg;
00486 
00487     assert (trainer != NULL);
00488 
00489     /* check value */
00490     if (step <= 0.0)
00491     {
00492         GSL_ERROR ("step should be stricly greater than zero",
00493                    GSL_EINVAL);
00494     }
00495 
00496     /* set value */
00497     cg = (gnn_conjugate_gradient *) trainer;
00498     cg->step = step;
00499 
00500     return 0;
00501 }
00502 
00503 /**
00504  * @brief Gets the initial step for the interval bracketing procedure.
00505  * @ingroup gnn_conjugate_gradient_doc
00506  *
00507  * @param trainer A pointer to a \ref gnn_conjugate_gradient.
00508  * @return The trainer's internal step.
00509  */
00510 double
00511 gnn_conjugate_gradient_get_step (gnn_trainer *trainer)
00512 {
00513     gnn_conjugate_gradient *cg;
00514 
00515     assert (trainer != NULL);
00516 
00517     cg = (gnn_conjugate_gradient *) trainer;
00518 
00519     return cg->step;
00520 }
00521 
00522 /**
00523  * @brief Sets the number of iterations before restarting.
00524  * @ingroup gnn_conjugate_gradient_doc
00525  *
00526  * @param trainer A pointer to a \ref gnn_conjugate_gradient.
00527  * @param restart A stricly positive integer.
00528  * @return Returns 0 if succeeded.
00529  */
00530 int
00531 gnn_conjugate_gradient_set_restart (gnn_trainer *trainer, size_t restart)
00532 {
00533     gnn_conjugate_gradient *cg;
00534 
00535     assert (trainer != NULL);
00536 
00537     /* check value */
00538     if (restart <= 0.0)
00539     {
00540         GSL_ERROR ("restart iteration should be stricly greater than zero",
00541                    GSL_EINVAL);
00542     }
00543 
00544     /* set value */
00545     cg = (gnn_conjugate_gradient *) trainer;
00546     cg->restart = restart;
00547 
00548     return 0;
00549 }
00550 
00551 /**
00552  * @brief Gets the number of iterations before reinitializing the direction.
00553  * @ingroup gnn_conjugate_gradient_doc
00554  *
00555  * This function returns the number of iterations executed by the conjugate
00556  * gradients trainer before reinitializing the search direction.
00557  *
00558  * @param trainer A pointer to a \ref gnn_conjugate_gradient.
00559  * @return Returns the number of iterations.
00560  */
00561 size_t
00562 gnn_conjugate_gradient_get_restart (gnn_trainer *trainer)
00563 {
00564     gnn_conjugate_gradient *cg;
00565 
00566     assert (trainer != NULL);
00567 
00568     cg = (gnn_conjugate_gradient *) trainer;
00569 
00570     return cg->restart;
00571 }
00572 
00573 /**
00574  * @brief Sets the line search procedure.
00575  * @ingroup gnn_conjugate_gradient_doc
00576  *
00577  * This function sets a new line search procedure used by the conjugate
00578  * gradients trainer.
00579  *
00580  * \code
00581  * gnn_trainer *trainer;
00582  * trainer = gnn_conjugate_gradient_new (node, crit, data);
00583  *
00584  * // use the Golden-Section line search
00585  * gnn_conjugate_gradient_set_line_search (trainer, gnn_line_search_golden);
00586  * \endcode
00587  *
00588  * Please refer to (\ref gnn_line_search_doc) for the available line search
00589  * procedures.
00590  *
00591  * @param trainer A pointer to a \ref gnn_conjugate_gradient.
00592  * @param lsearch A pointer to a line search procedure.
00593  * @return Returns 0 if succeeded.
00594  */
00595 int
00596 gnn_conjugate_gradient_set_line_search (gnn_trainer *trainer,
00597                                         gnn_line_search_type lsearch)
00598 {
00599     gnn_conjugate_gradient *cg;
00600 
00601     assert (trainer != NULL);
00602     assert (lsearch != NULL);
00603 
00604     /* set value */
00605     cg = (gnn_conjugate_gradient *) trainer;
00606     cg->alpha = lsearch;
00607 
00608     return 0;
00609 }
00610 
00611 /**
00612  * @brief Gets the installed line search procedure.
00613  * @ingroup gnn_conjugate_gradient_doc
00614  *
00615  * @param trainer A pointer to a \ref gnn_conjugate_gradient.
00616  * @return Returns a pointer to the installed line-search procedure.
00617  */
00618 gnn_line_search_type
00619 gnn_conjugate_gradient_get_alpha (gnn_trainer *trainer)
00620 {
00621     gnn_conjugate_gradient *cg;
00622 
00623     assert (trainer != NULL);
00624 
00625     cg = (gnn_conjugate_gradient *) trainer;
00626 
00627     return cg->alpha;
00628 }
00629 
00630 /**
00631  * @brief Sets the form of the \f$beta\f$ coefficient.
00632  * @ingroup gnn_conjugate_gradient_doc
00633  *
00634  * This function sets a new form for evaluating the \f$beta\f$ coefficient
00635  * used by the conjugate gradients method to build the new search direction.
00636  *
00637  * \code
00638  * gnn_trainer *trainer;
00639  * trainer = gnn_conjugate_gradient_new (node, crit, data);
00640  *
00641  * // use the Fletcher-Reeves form
00642  * gnn_conjugate_gradient_set_beta (trainer,
00643  *                                  gnn_conjugate_gradient_fletcher_reeves);
00644  * \endcode
00645  *
00646  * @param trainer A pointer to a \ref gnn_conjugate_gradient.
00647  * @param beta A pointer to the a \f$\beta\f$ evaluation procedure.
00648  * @return Returns 0 if succeeded.
00649  */
00650 int
00651 gnn_conjugate_gradient_set_beta (gnn_trainer *trainer,
00652                                  gnn_conjugate_gradient_beta beta)
00653 {
00654     gnn_conjugate_gradient *cg;
00655 
00656     assert (trainer != NULL);
00657     assert (beta != NULL);
00658 
00659     /* set value */
00660     cg = (gnn_conjugate_gradient *) trainer;
00661     cg->beta = beta;
00662 
00663     return 0;
00664 }
00665 
00666 /**
00667  * @brief Gets the \f$\beta\f$ evaluation function.
00668  * @ingroup gnn_conjugate_gradient_doc
00669  *
00670  * This function returns a pointer to the installed \f$\beta\f$ evaluation
00671  * procedure.
00672  *
00673  * @param trainer A pointer to a \ref gnn_conjugate_gradient.
00674  * @return A pointer to a \f$\beta\f$ function.
00675  */
00676 gnn_conjugate_gradient_beta
00677 gnn_conjugate_gradient_get_beta (gnn_trainer *trainer)
00678 {
00679     gnn_conjugate_gradient *cg;
00680 
00681     assert (trainer != NULL);
00682 
00683     cg = (gnn_conjugate_gradient *) trainer;
00684 
00685     return cg->beta;
00686 }
00687 

Generated on Sun Jun 13 20:50:11 2004 for libgnn Gradient Retropropagation Machine Library by doxygen1.2.18