All RL Inference API functions return error codes.
ERROR_CODE_DEFINITION(13, last_modified_invalid, "Unable to parse Last-Modified http header as date-time")
16, model_export_frequency_not_provided, "Export frequency of model not specified in configuration.")
ERROR_CODE_DEFINITION(20, json_no_actions_found, "Context json did not have actions (_multi array empty or not found)")
"A background thread encountered an error but there was no error handler registered. Register an error handler to "
"see the error code and message.")
ERROR_CODE_DEFINITION(29, thread_unresponsive_timeout, "A background thread exceeded the watchdog timer.")
30, incorrect_buffer_preamble_size, "Buffer preamble is pre-allocated and does not match the size requested.")
ERROR_CODE_DEFINITION(34, json_no_slots_found, "Context json did not have slots (_slots array empty or not found)")
ERROR_CODE_DEFINITION(49, baseline_actions_not_defined, "Baseline Actions must be defined in apprentice mode")
ERROR_CODE_DEFINITION(51, http_model_uri_not_provided, "Model Blob URI parameter was not passed in via configuration")
ERROR_CODE_DEFINITION(52, static_model_load_error, "Static model passed in C
# layer is not loading properly")
ERROR_CODE_DEFINITION(53, http_oauth_authentication_error, "http request failed to authenticate")
ERROR_CODE_DEFINITION(
54, http_oauth_unexpected_error, "http request failed with an unexpected error while retrieving a token")
#define ERROR_CODE_DEFINITION(code, name, message)
[Error Generator]
Definition: err_constants.h:11