From 0dbbc0c1797bcdbeb6c967ac04f20eaa0fbed8c1 Mon Sep 17 00:00:00 2001 From: Indrajit Maloji Bhosale Date: Tue, 5 Mar 2024 15:24:29 -0800 Subject: [PATCH] Test local build script --- src/infer_request.cc | 114 ++++++++++++++++++++++--------------------- src/infer_request.h | 3 ++ 2 files changed, 62 insertions(+), 55 deletions(-) diff --git a/src/infer_request.cc b/src/infer_request.cc index e2e360159..656e65e41 100644 --- a/src/infer_request.cc +++ b/src/infer_request.cc @@ -1001,61 +1001,7 @@ InferenceRequest::Normalize() } // Make sure that the request is providing the number of inputs // as is expected by the model. - if ((original_inputs_.size() > (size_t)model_config.input_size()) || - (original_inputs_.size() < model_raw_->RequiredInputCount())) { - // If no input is marked as optional, then use exact match error message - // for consistency / backward compatibility - std::string missing_required_input_string = "["; - std::string original_input_string = "["; - - for (size_t i = 0; i < (size_t)model_config.input_size(); ++i) { - const inference::ModelInput& input = model_config.input(i); - if ((!input.optional()) && - (original_inputs_.find(input.name()) == original_inputs_.end())) { - missing_required_input_string = - missing_required_input_string + "'" + input.name() + "'" + ","; - } - } - // Removes the extra "," - missing_required_input_string.pop_back(); - missing_required_input_string = missing_required_input_string + "]"; - - for (const auto& pair : original_inputs_) { - original_input_string = - original_input_string + "'" + pair.first + "'" + ","; - } - // Removes the extra "," - original_input_string.pop_back(); - original_input_string = original_input_string + "]"; - if (original_inputs_.size() == 0) { - original_input_string = "[]"; - } - if ((size_t)model_config.input_size() == model_raw_->RequiredInputCount()) { - // This is response ONLY when there are no optional parameters in the - // model - return Status( - Status::Code::INVALID_ARG, - LogRequest() + "expected " + - std::to_string(model_config.input_size()) + " inputs but got " + - std::to_string(original_inputs_.size()) + " inputs for model '" + - ModelName() + "'. Got input(s) " + original_input_string + - ", but missing required input(s) " + - missing_required_input_string + - ". Please provide all required input(s)."); - } else { - return Status( - Status::Code::INVALID_ARG, - LogRequest() + "expected number of inputs between " + - std::to_string(model_raw_->RequiredInputCount()) + " and " + - std::to_string(model_config.input_size()) + " but got " + - std::to_string(original_inputs_.size()) + " inputs for model '" + - ModelName() + "'. Got input(s) " + original_input_string + - ", but missing required input(s) " + - missing_required_input_string + - ". Please provide all required input(s)."); - } - } - + RETURN_IF_ERROR(ValidateRequestInputs(model_config)); // Determine the batch size and shape of each input. if (model_config.max_batch_size() == 0) { // Model does not support Triton-style batching so set as @@ -1228,6 +1174,64 @@ InferenceRequest::Normalize() return Status::Success; } +Status InferenceRequest::ValidateRequestInputs(const inference::ModelConfig& model_config) { + + if ((original_inputs_.size() > (size_t)model_config.input_size()) || + (original_inputs_.size() < model_raw_->RequiredInputount())) { + // If no input is marked as optional, then use exact match error message + // for consistency / backward compatibility + std::string missing_required_input_string = "["; + std::string original_input_string = "["; + + for (size_t i = 0; i < (size_t)model_config.input_size(); ++i) { + const inference::ModelInput& input = model_config.input(i); + if ((!input.optional()) && + (original_inputs_.find(input.name()) == original_inputs_.end())) { + missing_required_input_string = + missing_required_input_string + "'" + input.name() + "'" + ","; + } + } + // Removes the extra "," + missing_required_input_string.pop_back(); + missing_required_input_string = missing_required_input_string + "]"; + + for (const auto& pair : original_inputs_) { + original_input_string = + original_input_string + "'" + pair.first + "'" + ","; + } + // Removes the extra "," + original_input_string.pop_back(); + original_input_string = original_input_string + "]"; + if (original_inputs_.size() == 0) { + original_input_string = "[]"; + } + if ((size_t)model_config.input_size() == model_raw_->RequiredInputCount()) { + // This is response ONLY when there are no optional parameters in the + // model + return Status( + Status::Code::INVALID_ARG, + LogRequest() + "expected " + + std::to_string(model_config.input_size()) + " inputs but got " + + std::to_string(original_inputs_.size()) + " inputs for model '" + + ModelName() + "'. Got input(s) " + original_input_string + + ", but missing required input(s) " + + missing_required_input_string + + ". Please provide all required input(s)."); + } else { + return Status( + Status::Code::INVALID_ARG, + LogRequest() + "expected number of inputs between " + + std::to_string(model_raw_->RequiredInputCount()) + " and " + + std::to_string(model_config.input_size()) + " but got " + + std::to_string(original_inputs_.size()) + " inputs for model '" + + ModelName() + "'. Got input(s) " + original_input_string + + ", but missing required input(s) " + + missing_required_input_string + + ". Please provide all required input(s)."); + } + } +} + #ifdef TRITON_ENABLE_STATS void InferenceRequest::ReportStatistics( diff --git a/src/infer_request.h b/src/infer_request.h index 737e96b14..e3c334057 100644 --- a/src/infer_request.h +++ b/src/infer_request.h @@ -744,6 +744,9 @@ class InferenceRequest { Status Normalize(); + // Helper for validating Inputs + Status ValidateRequestInputs(); + // Helpers for pending request metrics void IncrementPendingRequestCount(); void DecrementPendingRequestCount();