312 auto data = scoped_calloc_or_throw<cs_active>();
314 bool simulation =
false;
318 .add(
make_option(
"cs_active", data->num_classes).keep().help(
"Cost-sensitive active learning with <k> costs"))
319 .
add(
make_option(
"simulation", simulation).help(
"cost-sensitive active learning simulation mode"))
320 .
add(
make_option(
"baseline", data->is_baseline).help(
"cost-sensitive active learning baseline"))
323 .help(
"cost-sensitive active learning use domination. Default 1"))
324 .
add(
make_option(
"mellowness", data->c0).default_value(0.1
f).help(
"mellowness parameter c_0. Default 0.1."))
327 .help(
"parameter controlling the threshold for per-label cost uncertainty. Default 0.5."))
328 .
add(
make_option(
"max_labels", data->max_labels).default_value(-1).help(
"maximum number of label queries."))
329 .
add(
make_option(
"min_labels", data->min_labels).default_value(-1).help(
"minimum number of label queries."))
330 .
add(
make_option(
"cost_max", data->cost_max).default_value(1.
f).help(
"cost upper bound. Default 1."))
331 .
add(
make_option(
"cost_min", data->cost_min).default_value(0.
f).help(
"cost lower bound. Default 0."))
333 .
add(
make_option(
"csa_debug", data->print_debug_stuff).help(
"print debug stuff for cs_active"));
336 data->use_domination =
true;
338 data->use_domination =
false;
347 if (loss_function_type !=
"squared")
348 THROW(
"error: you can't use non-squared loss with cs_active");
351 THROW(
"error: you can't combine lda and active learning");
354 THROW(
"error: you can't use --cs_active and --active at the same time");
357 THROW(
"error: you can't use --cs_active and --active_cover at the same time");
360 THROW(
"error: you can't use --cs_active and --csoaa at the same time");
363 all.
trace_message <<
"WARNING: --cs_active should be used with --adax" << endl;
368 for (uint32_t i = 0; i < data->num_classes + 1; i++) data->examples_by_queries.push_back(0);
LEARNER::base_learner * cost_sensitive
base_learner * make_base(learner< T, E > &base)
virtual void add_and_parse(const option_group_definition &group)=0
single_learner * as_singleline(learner< T, E > *l)
void(* set_minmax)(shared_data *sd, float label)
void set_finish_example(void(*f)(vw &all, T &, E &))
learner< T, E > & init_learner(free_ptr< T > &dat, L *base, void(*learn)(T &, L &, E &), void(*predict)(T &, L &, E &), size_t ws, prediction_type::prediction_type_t pred_type)
void finish_example(vw &all, cs_active &cs_a, example &ec)
virtual bool was_supplied(const std::string &key)=0
virtual std::string getType()=0
int add(svm_params ¶ms, svm_example *fec)
void predict_or_learn(cs_active &cs_a, single_learner &base, example &ec)
typed_option< T > make_option(std::string name, T &location)
LEARNER::base_learner * setup_base(options_i &options, vw &all)