Skip to content

Commit

Permalink
Test local build script
Browse files Browse the repository at this point in the history
  • Loading branch information
indrajit96 committed Mar 5, 2024
1 parent a5ca1a5 commit 0dbbc0c
Show file tree
Hide file tree
Showing 2 changed files with 62 additions and 55 deletions.
114 changes: 59 additions & 55 deletions src/infer_request.cc
Original file line number Diff line number Diff line change
Expand Up @@ -1001,61 +1001,7 @@ InferenceRequest::Normalize()
}
// Make sure that the request is providing the number of inputs
// as is expected by the model.
if ((original_inputs_.size() > (size_t)model_config.input_size()) ||
(original_inputs_.size() < model_raw_->RequiredInputCount())) {
// If no input is marked as optional, then use exact match error message
// for consistency / backward compatibility
std::string missing_required_input_string = "[";
std::string original_input_string = "[";

for (size_t i = 0; i < (size_t)model_config.input_size(); ++i) {
const inference::ModelInput& input = model_config.input(i);
if ((!input.optional()) &&
(original_inputs_.find(input.name()) == original_inputs_.end())) {
missing_required_input_string =
missing_required_input_string + "'" + input.name() + "'" + ",";
}
}
// Removes the extra ","
missing_required_input_string.pop_back();
missing_required_input_string = missing_required_input_string + "]";

for (const auto& pair : original_inputs_) {
original_input_string =
original_input_string + "'" + pair.first + "'" + ",";
}
// Removes the extra ","
original_input_string.pop_back();
original_input_string = original_input_string + "]";
if (original_inputs_.size() == 0) {
original_input_string = "[]";
}
if ((size_t)model_config.input_size() == model_raw_->RequiredInputCount()) {
// This is response ONLY when there are no optional parameters in the
// model
return Status(
Status::Code::INVALID_ARG,
LogRequest() + "expected " +
std::to_string(model_config.input_size()) + " inputs but got " +
std::to_string(original_inputs_.size()) + " inputs for model '" +
ModelName() + "'. Got input(s) " + original_input_string +
", but missing required input(s) " +
missing_required_input_string +
". Please provide all required input(s).");
} else {
return Status(
Status::Code::INVALID_ARG,
LogRequest() + "expected number of inputs between " +
std::to_string(model_raw_->RequiredInputCount()) + " and " +
std::to_string(model_config.input_size()) + " but got " +
std::to_string(original_inputs_.size()) + " inputs for model '" +
ModelName() + "'. Got input(s) " + original_input_string +
", but missing required input(s) " +
missing_required_input_string +
". Please provide all required input(s).");
}
}

RETURN_IF_ERROR(ValidateRequestInputs(model_config));
// Determine the batch size and shape of each input.
if (model_config.max_batch_size() == 0) {
// Model does not support Triton-style batching so set as
Expand Down Expand Up @@ -1228,6 +1174,64 @@ InferenceRequest::Normalize()
return Status::Success;
}

Status InferenceRequest::ValidateRequestInputs(const inference::ModelConfig& model_config) {

if ((original_inputs_.size() > (size_t)model_config.input_size()) ||
(original_inputs_.size() < model_raw_->RequiredInputount())) {
// If no input is marked as optional, then use exact match error message
// for consistency / backward compatibility
std::string missing_required_input_string = "[";
std::string original_input_string = "[";

for (size_t i = 0; i < (size_t)model_config.input_size(); ++i) {
const inference::ModelInput& input = model_config.input(i);
if ((!input.optional()) &&
(original_inputs_.find(input.name()) == original_inputs_.end())) {
missing_required_input_string =
missing_required_input_string + "'" + input.name() + "'" + ",";
}
}
// Removes the extra ","
missing_required_input_string.pop_back();
missing_required_input_string = missing_required_input_string + "]";

for (const auto& pair : original_inputs_) {
original_input_string =
original_input_string + "'" + pair.first + "'" + ",";
}
// Removes the extra ","
original_input_string.pop_back();
original_input_string = original_input_string + "]";
if (original_inputs_.size() == 0) {
original_input_string = "[]";
}
if ((size_t)model_config.input_size() == model_raw_->RequiredInputCount()) {
// This is response ONLY when there are no optional parameters in the
// model
return Status(
Status::Code::INVALID_ARG,
LogRequest() + "expected " +
std::to_string(model_config.input_size()) + " inputs but got " +
std::to_string(original_inputs_.size()) + " inputs for model '" +
ModelName() + "'. Got input(s) " + original_input_string +
", but missing required input(s) " +
missing_required_input_string +
". Please provide all required input(s).");
} else {
return Status(
Status::Code::INVALID_ARG,
LogRequest() + "expected number of inputs between " +
std::to_string(model_raw_->RequiredInputCount()) + " and " +
std::to_string(model_config.input_size()) + " but got " +
std::to_string(original_inputs_.size()) + " inputs for model '" +
ModelName() + "'. Got input(s) " + original_input_string +
", but missing required input(s) " +
missing_required_input_string +
". Please provide all required input(s).");
}
}
}

#ifdef TRITON_ENABLE_STATS
void
InferenceRequest::ReportStatistics(
Expand Down
3 changes: 3 additions & 0 deletions src/infer_request.h
Original file line number Diff line number Diff line change
Expand Up @@ -744,6 +744,9 @@ class InferenceRequest {

Status Normalize();

// Helper for validating Inputs
Status ValidateRequestInputs();

// Helpers for pending request metrics
void IncrementPendingRequestCount();
void DecrementPendingRequestCount();
Expand Down

0 comments on commit 0dbbc0c

Please sign in to comment.