1095 auto b = scoped_calloc_or_throw<bfgs>();
1096 bool conjugate_gradient =
false;
1097 bool bfgs_option =
false;
1099 bfgs_outer_options.add(
1100 make_option(
"conjugate_gradient", conjugate_gradient).keep().help(
"use conjugate gradient based optimization"));
1103 bfgs_inner_options.add(
make_option(
"bfgs", bfgs_option).keep().help(
"use conjugate gradient based optimization"));
1104 bfgs_inner_options.add(
make_option(
"hessian_on", all.
hessian_on).help(
"use second derivative in line search"));
1105 bfgs_inner_options.add(
make_option(
"mem", b->m).default_value(15).help(
"memory in bfgs"));
1106 bfgs_inner_options.add(
1107 make_option(
"termination", b->rel_threshold).default_value(0.001f).help(
"Termination threshold"));
1110 if (!conjugate_gradient)
1120 b->wolfe1_bound = 0.01;
1121 b->first_hessian_on =
true;
1122 b->first_pass =
true;
1123 b->gradient_pass =
true;
1124 b->preconditioner_pass =
true;
1125 b->backstep_on =
false;
1127 b->no_win_counter = 0;
1132 b->early_stop_thres = options.
get_typed_option<
size_t>(
"early_terminate").value();
1141 b->all->trace_message <<
"enabling BFGS based optimization ";
1143 b->all->trace_message <<
"enabling conjugate gradient optimization via BFGS ";
1145 b->all->trace_message <<
"with curvature calculation" << std::endl;
1147 b->all->trace_message <<
"**without** curvature calculation" << std::endl;
1151 THROW(
"you must make at least 2 passes to use BFGS");
1158 learn_ptr = learn<true>;
1160 learn_ptr = learn<false>;
base_learner * make_base(learner< T, E > &base)
virtual void add_and_parse(const option_group_definition &group)=0
void save_load(bfgs &b, io_buf &model_file, bool read, bool text)
learner< T, E > & init_learner(free_ptr< T > &dat, L *base, void(*learn)(T &, L &, E &), void(*predict)(T &, L &, E &), size_t ws, prediction_type::prediction_type_t pred_type)
typed_option< T > & get_typed_option(const std::string &key)
void init_driver(bfgs &b)
typed_option< T > make_option(std::string name, T &location)