Classes | Functions
stan::services::optimize Namespace Reference

Classes

struct  history_size
 
struct  init_alpha
 
struct  iter
 
struct  save_iterations
 
struct  tol_grad
 
struct  tol_obj
 
struct  tol_param
 
struct  tol_rel_grad
 
struct  tol_rel_obj
 

Functions

template<class Model >
int bfgs (Model &model, stan::io::var_context &init, unsigned int random_seed, unsigned int chain, double init_radius, double init_alpha, double tol_obj, double tol_rel_obj, double tol_grad, double tol_rel_grad, double tol_param, int num_iterations, bool save_iterations, int refresh, callbacks::interrupt &interrupt, callbacks::logger &logger, callbacks::writer &init_writer, callbacks::writer &parameter_writer)
 
template<class Model >
int lbfgs (Model &model, stan::io::var_context &init, unsigned int random_seed, unsigned int chain, double init_radius, int history_size, double init_alpha, double tol_obj, double tol_rel_obj, double tol_grad, double tol_rel_grad, double tol_param, int num_iterations, bool save_iterations, int refresh, callbacks::interrupt &interrupt, callbacks::logger &logger, callbacks::writer &init_writer, callbacks::writer &parameter_writer)
 
template<class Model >
int newton (Model &model, stan::io::var_context &init, unsigned int random_seed, unsigned int chain, double init_radius, int num_iterations, bool save_iterations, callbacks::interrupt &interrupt, callbacks::logger &logger, callbacks::writer &init_writer, callbacks::writer &parameter_writer)
 

Function Documentation

template<class Model >
int stan::services::optimize::bfgs ( Model model,
stan::io::var_context init,
unsigned int  random_seed,
unsigned int  chain,
double  init_radius,
double  init_alpha,
double  tol_obj,
double  tol_rel_obj,
double  tol_grad,
double  tol_rel_grad,
double  tol_param,
int  num_iterations,
bool  save_iterations,
int  refresh,
callbacks::interrupt interrupt,
callbacks::logger logger,
callbacks::writer init_writer,
callbacks::writer parameter_writer 
)

Runs the BFGS algorithm for a model.

Template Parameters
ModelA model implementation
Parameters
[in]modelInput model to test (with data already instantiated)
[in]initvar context for initialization
[in]random_seedrandom seed for the random number generator
[in]chainchain id to advance the pseudo random number generator
[in]init_radiusradius to initialize
[in]init_alphaline search step size for first iteration
[in]tol_objconvergence tolerance on absolute changes in objective function value
[in]tol_rel_objconvergence tolerance on relative changes in objective function value
[in]tol_gradconvergence tolerance on the norm of the gradient
[in]tol_rel_gradconvergence tolerance on the relative norm of the gradient
[in]tol_paramconvergence tolerance on changes in parameter value
[in]num_iterationsmaximum number of iterations
[in]save_iterationsindicates whether all the interations should be saved to the parameter_writer
[in]refreshhow often to write output to logger
[in,out]interruptcallback to be called every iteration
[in,out]loggerLogger for messages
[in,out]init_writerWriter callback for unconstrained inits
[in,out]parameter_writeroutput for parameter values
Returns
error_codes::OK if successful

Definition at line 54 of file bfgs.hpp.

References stan::optimization::BFGSMinimizer< FunctorType, QNUpdateType, Scalar, DimAtCompile >::_conv_opts, stan::optimization::BFGSMinimizer< FunctorType, QNUpdateType, Scalar, DimAtCompile >::_ls_opts, stan::optimization::BFGSMinimizer< FunctorType, QNUpdateType, Scalar, DimAtCompile >::alpha(), stan::optimization::BFGSMinimizer< FunctorType, QNUpdateType, Scalar, DimAtCompile >::alpha0(), stan::services::util::create_rng(), stan::optimization::BFGSMinimizer< FunctorType, QNUpdateType, Scalar, DimAtCompile >::curr_g(), stan::optimization::BFGSMinimizer< FunctorType, QNUpdateType, Scalar, DimAtCompile >::get_code_string(), stan::optimization::BFGSMinimizer< FunctorType, QNUpdateType, Scalar, DimAtCompile >::iter_num(), lem_server::msg, gen_hdf5record::names, stan::optimization::BFGSMinimizer< FunctorType, QNUpdateType, Scalar, DimAtCompile >::note(), stan::services::error_codes::OK, stan::optimization::BFGSMinimizer< FunctorType, QNUpdateType, Scalar, DimAtCompile >::prev_step_size(), runNovaSAM::ret, makeTestPickles::return_code, stan::services::error_codes::SOFTWARE, stan::optimization::BFGSMinimizer< FunctorType, QNUpdateType, Scalar, DimAtCompile >::step(), and train_latest_snapshot::values.

Referenced by TEST(), and TEST_F().

63  {
64  boost::ecuyer1988 rng = util::create_rng(random_seed, chain);
65 
66  std::vector<int> disc_vector;
67  std::vector<double> cont_vector
68  = util::initialize<false>(model, init, rng, init_radius, false,
69  logger, init_writer);
70 
71  std::stringstream bfgs_ss;
74  Optimizer bfgs(model, cont_vector, disc_vector, &bfgs_ss);
75  bfgs._ls_opts.alpha0 = init_alpha;
76  bfgs._conv_opts.tolAbsF = tol_obj;
77  bfgs._conv_opts.tolRelF = tol_rel_obj;
78  bfgs._conv_opts.tolAbsGrad = tol_grad;
79  bfgs._conv_opts.tolRelGrad = tol_rel_grad;
80  bfgs._conv_opts.tolAbsX = tol_param;
81  bfgs._conv_opts.maxIts = num_iterations;
82 
83  double lp = bfgs.logp();
84 
85  std::stringstream initial_msg;
86  initial_msg << "Initial log joint probability = " << lp;
87  logger.info(initial_msg);
88 
89  std::vector<std::string> names;
90  names.push_back("lp__");
91  model.constrained_param_names(names, true, true);
92  parameter_writer(names);
93 
94  if (save_iterations) {
95  std::vector<double> values;
96  std::stringstream msg;
97  model.write_array(rng, cont_vector, disc_vector, values,
98  true, true, &msg);
99  if (msg.str().length() > 0)
100  logger.info(msg);
101 
102  values.insert(values.begin(), lp);
103  parameter_writer(values);
104  }
105  int ret = 0;
106 
107  while (ret == 0) {
108  interrupt();
109  if (refresh > 0
110  && (bfgs.iter_num() == 0
111  || ((bfgs.iter_num() + 1) % refresh == 0)))
112  logger.info(" Iter"
113  " log prob"
114  " ||dx||"
115  " ||grad||"
116  " alpha"
117  " alpha0"
118  " # evals"
119  " Notes ");
120 
121  ret = bfgs.step();
122  lp = bfgs.logp();
123  bfgs.params_r(cont_vector);
124 
125  if (refresh > 0
126  && (ret != 0
127  || !bfgs.note().empty()
128  || bfgs.iter_num() == 0
129  || ((bfgs.iter_num() + 1) % refresh == 0))) {
130  std::stringstream msg;
131  msg << " " << std::setw(7) << bfgs.iter_num() << " ";
132  msg << " " << std::setw(12) << std::setprecision(6)
133  << lp << " ";
134  msg << " " << std::setw(12) << std::setprecision(6)
135  << bfgs.prev_step_size() << " ";
136  msg << " " << std::setw(12) << std::setprecision(6)
137  << bfgs.curr_g().norm() << " ";
138  msg << " " << std::setw(10) << std::setprecision(4)
139  << bfgs.alpha() << " ";
140  msg << " " << std::setw(10) << std::setprecision(4)
141  << bfgs.alpha0() << " ";
142  msg << " " << std::setw(7)
143  << bfgs.grad_evals() << " ";
144  msg << " " << bfgs.note() << " ";
145  logger.info(msg);
146  }
147 
148  if (bfgs_ss.str().length() > 0) {
149  logger.info(bfgs_ss);
150  bfgs_ss.str("");
151  }
152 
153  if (save_iterations) {
154  std::vector<double> values;
155  std::stringstream msg;
156  model.write_array(rng, cont_vector, disc_vector, values,
157  true, true, &msg);
158  // This if is here to match the pre-refactor behavior
159  if (msg.str().length() > 0)
160  logger.info(msg);
161 
162  values.insert(values.begin(), lp);
163  parameter_writer(values);
164  }
165  }
166 
167  if (!save_iterations) {
168  std::vector<double> values;
169  std::stringstream msg;
170  model.write_array(rng, cont_vector, disc_vector, values,
171  true, true, &msg);
172  if (msg.str().length() > 0)
173  logger.info(msg);
174  values.insert(values.begin(), lp);
175  parameter_writer(values);
176  }
177 
178  int return_code;
179  if (ret >= 0) {
180  logger.info("Optimization terminated normally: ");
181  return_code = error_codes::OK;
182  } else {
183  logger.info("Optimization terminated with error: ");
184  return_code = error_codes::SOFTWARE;
185  }
186  logger.info(" " + bfgs.get_code_string(ret));
187 
188  return return_code;
189  }
stan::optimization::BFGSMinimizer< stan::optimization::ModelAdaptor< Model >, stan::optimization::BFGSUpdate_HInv<> > Optimizer
rosenbrock_model_namespace::rosenbrock_model Model
chain
Check that an output directory exists.
int bfgs(Model &model, stan::io::var_context &init, unsigned int random_seed, unsigned int chain, double init_radius, double init_alpha, double tol_obj, double tol_rel_obj, double tol_grad, double tol_rel_grad, double tol_param, int num_iterations, bool save_iterations, int refresh, callbacks::interrupt &interrupt, callbacks::logger &logger, callbacks::writer &init_writer, callbacks::writer &parameter_writer)
Definition: bfgs.hpp:54
const XML_Char XML_Content * model
Definition: expat.h:151
boost::ecuyer1988 create_rng(unsigned int seed, unsigned int chain)
Definition: create_rng.hpp:25
void refresh()
Definition: show_event.C:21
template<class Model >
int stan::services::optimize::lbfgs ( Model model,
stan::io::var_context init,
unsigned int  random_seed,
unsigned int  chain,
double  init_radius,
int  history_size,
double  init_alpha,
double  tol_obj,
double  tol_rel_obj,
double  tol_grad,
double  tol_rel_grad,
double  tol_param,
int  num_iterations,
bool  save_iterations,
int  refresh,
callbacks::interrupt interrupt,
callbacks::logger logger,
callbacks::writer init_writer,
callbacks::writer parameter_writer 
)

Runs the L-BFGS algorithm for a model.

Template Parameters
ModelA model implementation
Parameters
[in]modelInput model to test (with data already instantiated)
[in]initvar context for initialization
[in]random_seedrandom seed for the random number generator
[in]chainchain id to advance the pseudo random number generator
[in]init_radiusradius to initialize
[in]history_sizeamount of history to keep for L-BFGS
[in]init_alphaline search step size for first iteration
[in]tol_objconvergence tolerance on absolute changes in objective function value
[in]tol_rel_objconvergence tolerance on relative changes in objective function value
[in]tol_gradconvergence tolerance on the norm of the gradient
[in]tol_rel_gradconvergence tolerance on the relative norm of the gradient
[in]tol_paramconvergence tolerance on changes in parameter value
[in]num_iterationsmaximum number of iterations
[in]save_iterationsindicates whether all the interations should be saved to the parameter_writer
[in]refreshhow often to write output to logger
[in,out]interruptcallback to be called every iteration
[in,out]loggerLogger for messages
[in,out]init_writerWriter callback for unconstrained inits
[in,out]parameter_writeroutput for parameter values
Returns
error_codes::OK if successful

Definition at line 55 of file lbfgs.hpp.

References stan::optimization::BFGSMinimizer< FunctorType, QNUpdateType, Scalar, DimAtCompile >::_conv_opts, stan::optimization::BFGSMinimizer< FunctorType, QNUpdateType, Scalar, DimAtCompile >::_ls_opts, stan::optimization::BFGSMinimizer< FunctorType, QNUpdateType, Scalar, DimAtCompile >::alpha(), stan::optimization::BFGSMinimizer< FunctorType, QNUpdateType, Scalar, DimAtCompile >::alpha0(), stan::services::util::create_rng(), stan::optimization::BFGSMinimizer< FunctorType, QNUpdateType, Scalar, DimAtCompile >::curr_g(), stan::optimization::BFGSMinimizer< FunctorType, QNUpdateType, Scalar, DimAtCompile >::get_code_string(), stan::optimization::BFGSMinimizer< FunctorType, QNUpdateType, Scalar, DimAtCompile >::get_qnupdate(), stan::optimization::BFGSMinimizer< FunctorType, QNUpdateType, Scalar, DimAtCompile >::iter_num(), lem_server::msg, gen_hdf5record::names, stan::optimization::BFGSMinimizer< FunctorType, QNUpdateType, Scalar, DimAtCompile >::note(), stan::services::error_codes::OK, stan::optimization::BFGSMinimizer< FunctorType, QNUpdateType, Scalar, DimAtCompile >::prev_step_size(), runNovaSAM::ret, makeTestPickles::return_code, stan::services::error_codes::SOFTWARE, stan::optimization::BFGSMinimizer< FunctorType, QNUpdateType, Scalar, DimAtCompile >::step(), and train_latest_snapshot::values.

Referenced by TEST_F().

64  {
65  boost::ecuyer1988 rng = util::create_rng(random_seed, chain);
66 
67  std::vector<int> disc_vector;
68  std::vector<double> cont_vector
69  = util::initialize<false>(model, init, rng, init_radius, false,
70  logger, init_writer);
71 
72  std::stringstream lbfgs_ss;
75  Optimizer lbfgs(model, cont_vector, disc_vector, &lbfgs_ss);
76  lbfgs.get_qnupdate().set_history_size(history_size);
77  lbfgs._ls_opts.alpha0 = init_alpha;
78  lbfgs._conv_opts.tolAbsF = tol_obj;
79  lbfgs._conv_opts.tolRelF = tol_rel_obj;
80  lbfgs._conv_opts.tolAbsGrad = tol_grad;
81  lbfgs._conv_opts.tolRelGrad = tol_rel_grad;
82  lbfgs._conv_opts.tolAbsX = tol_param;
83  lbfgs._conv_opts.maxIts = num_iterations;
84 
85  double lp = lbfgs.logp();
86 
87  std::stringstream initial_msg;
88  initial_msg << "Initial log joint probability = " << lp;
89  logger.info(initial_msg);
90 
91  std::vector<std::string> names;
92  names.push_back("lp__");
93  model.constrained_param_names(names, true, true);
94  parameter_writer(names);
95 
96  if (save_iterations) {
97  std::vector<double> values;
98  std::stringstream msg;
99  model.write_array(rng, cont_vector, disc_vector, values,
100  true, true, &msg);
101  if (msg.str().length() > 0)
102  logger.info(msg);
103 
104  values.insert(values.begin(), lp);
105  parameter_writer(values);
106  }
107  int ret = 0;
108 
109  while (ret == 0) {
110  interrupt();
111  if (refresh > 0
112  && (lbfgs.iter_num() == 0
113  || ((lbfgs.iter_num() + 1) % refresh == 0)))
114  logger.info(" Iter"
115  " log prob"
116  " ||dx||"
117  " ||grad||"
118  " alpha"
119  " alpha0"
120  " # evals"
121  " Notes ");
122 
123  ret = lbfgs.step();
124  lp = lbfgs.logp();
125  lbfgs.params_r(cont_vector);
126 
127  if (refresh > 0
128  && (ret != 0
129  || !lbfgs.note().empty()
130  || lbfgs.iter_num() == 0
131  || ((lbfgs.iter_num() + 1) % refresh == 0))) {
132  std::stringstream msg;
133  msg << " " << std::setw(7) << lbfgs.iter_num() << " ";
134  msg << " " << std::setw(12) << std::setprecision(6)
135  << lp << " ";
136  msg << " " << std::setw(12) << std::setprecision(6)
137  << lbfgs.prev_step_size() << " ";
138  msg << " " << std::setw(12) << std::setprecision(6)
139  << lbfgs.curr_g().norm() << " ";
140  msg << " " << std::setw(10) << std::setprecision(4)
141  << lbfgs.alpha() << " ";
142  msg << " " << std::setw(10) << std::setprecision(4)
143  << lbfgs.alpha0() << " ";
144  msg << " " << std::setw(7)
145  << lbfgs.grad_evals() << " ";
146  msg << " " << lbfgs.note() << " ";
147  logger.info(msg);
148  }
149 
150  if (lbfgs_ss.str().length() > 0) {
151  logger.info(lbfgs_ss);
152  lbfgs_ss.str("");
153  }
154 
155  if (save_iterations) {
156  std::vector<double> values;
157  std::stringstream msg;
158  model.write_array(rng, cont_vector, disc_vector, values,
159  true, true, &msg);
160  if (msg.str().length() > 0)
161  logger.info(msg);
162 
163  values.insert(values.begin(), lp);
164  parameter_writer(values);
165  }
166  }
167 
168  if (!save_iterations) {
169  std::vector<double> values;
170  std::stringstream msg;
171  model.write_array(rng, cont_vector, disc_vector, values,
172  true, true, &msg);
173  if (msg.str().length() > 0)
174  logger.info(msg);
175 
176  values.insert(values.begin(), lp);
177  parameter_writer(values);
178  }
179 
180  int return_code;
181  if (ret >= 0) {
182  logger.info("Optimization terminated normally: ");
183  return_code = error_codes::OK;
184  } else {
185  logger.info("Optimization terminated with error: ");
186  return_code = error_codes::SOFTWARE;
187  }
188  logger.info(" " + lbfgs.get_code_string(ret));
189 
190  return return_code;
191  }
stan::optimization::BFGSMinimizer< stan::optimization::ModelAdaptor< Model >, stan::optimization::BFGSUpdate_HInv<> > Optimizer
rosenbrock_model_namespace::rosenbrock_model Model
chain
Check that an output directory exists.
const XML_Char XML_Content * model
Definition: expat.h:151
int lbfgs(Model &model, stan::io::var_context &init, unsigned int random_seed, unsigned int chain, double init_radius, int history_size, double init_alpha, double tol_obj, double tol_rel_obj, double tol_grad, double tol_rel_grad, double tol_param, int num_iterations, bool save_iterations, int refresh, callbacks::interrupt &interrupt, callbacks::logger &logger, callbacks::writer &init_writer, callbacks::writer &parameter_writer)
Definition: lbfgs.hpp:55
boost::ecuyer1988 create_rng(unsigned int seed, unsigned int chain)
Definition: create_rng.hpp:25
void refresh()
Definition: show_event.C:21
template<class Model >
int stan::services::optimize::newton ( Model model,
stan::io::var_context init,
unsigned int  random_seed,
unsigned int  chain,
double  init_radius,
int  num_iterations,
bool  save_iterations,
callbacks::interrupt interrupt,
callbacks::logger logger,
callbacks::writer init_writer,
callbacks::writer parameter_writer 
)

Runs the Newton algorithm for a model.

Template Parameters
ModelA model implementation
Parameters
[in]modelthe Stan model instantiated with data
[in]initvar context for initialization
[in]random_seedrandom seed for the random number generator
[in]chainchain id to advance the pseudo random number generator
[in]init_radiusradius to initialize
[in]num_iterationsmaximum number of iterations
[in]save_iterationsindicates whether all the interations should be saved
[in,out]interruptcallback to be called every iteration
[in,out]loggerLogger for messages
[in,out]init_writerWriter callback for unconstrained inits
[in,out]parameter_writeroutput for parameter values
Returns
error_codes::OK if successful

Definition at line 42 of file newton.hpp.

References stan::services::util::create_rng(), e, stan::math::fabs(), m, datagram_client::message, lem_server::msg, gen_hdf5record::names, stan::optimization::newton_step(), stan::services::error_codes::OK, ss, and train_latest_snapshot::values.

Referenced by TEST_F().

49  {
50  boost::ecuyer1988 rng = util::create_rng(random_seed, chain);
51 
52  std::vector<int> disc_vector;
53  std::vector<double> cont_vector
54  = util::initialize<false>(model, init, rng, init_radius, false,
55  logger, init_writer);
56 
57 
58  double lp(0);
59  try {
60  std::stringstream message;
61  lp = model.template log_prob<false, false>(cont_vector, disc_vector,
62  &message);
63  logger.info(message);
64  } catch (const std::exception& e) {
65  logger.info("");
66  logger.info("Informational Message: The current Metropolis"
67  " proposal is about to be rejected because of"
68  " the following issue:");
69  logger.info(e.what());
70  logger.info("If this warning occurs sporadically, such as"
71  " for highly constrained variable types like"
72  " covariance matrices, then the sampler is fine,");
73  logger.info("but if this warning occurs often then your model"
74  " may be either severely ill-conditioned or"
75  " misspecified.");
76  lp = -std::numeric_limits<double>::infinity();
77  }
78 
79  std::stringstream msg;
80  msg << "Initial log joint probability = " << lp;
81  logger.info(msg);
82 
83  std::vector<std::string> names;
84  names.push_back("lp__");
85  model.constrained_param_names(names, true, true);
86  parameter_writer(names);
87 
88  double lastlp = lp;
89  for (int m = 0; m < num_iterations; m++) {
90  if (save_iterations) {
91  std::vector<double> values;
92  std::stringstream ss;
93  model.write_array(rng, cont_vector, disc_vector, values,
94  true, true, &ss);
95  if (ss.str().length() > 0)
96  logger.info(ss);
97  values.insert(values.begin(), lp);
98  parameter_writer(values);
99  }
100  interrupt();
101  lastlp = lp;
102  lp = stan::optimization::newton_step(model, cont_vector, disc_vector);
103 
104  std::stringstream msg2;
105  msg2 << "Iteration "
106  << std::setw(2) << (m + 1) << "."
107  << " Log joint probability = " << std::setw(10) << lp
108  << ". Improved by " << (lp - lastlp) << ".";
109  logger.info(msg2);
110 
111  if (std::fabs(lp - lastlp) <= 1e-8)
112  break;
113  }
114 
115  {
116  std::vector<double> values;
117  std::stringstream ss;
118  model.write_array(rng, cont_vector, disc_vector, values,
119  true, true, &ss);
120  if (ss.str().length() > 0)
121  logger.info(ss);
122  values.insert(values.begin(), lp);
123  parameter_writer(values);
124  }
125  return error_codes::OK;
126  }
fvar< T > fabs(const fvar< T > &x)
Definition: fabs.hpp:15
Float_t ss
Definition: plot.C:24
double newton_step(M &model, std::vector< double > &params_r, std::vector< int > &params_i, std::ostream *output_stream=0)
Definition: newton.hpp:32
::xsd::cxx::tree::exception< char > exception
Definition: Database.h:225
chain
Check that an output directory exists.
Float_t e
Definition: plot.C:35
const XML_Char XML_Content * model
Definition: expat.h:151
boost::ecuyer1988 create_rng(unsigned int seed, unsigned int chain)
Definition: create_rng.hpp:25