Skip to content

Commit

Permalink
Merge pull request #615 from roboflow/add-inference-ids-detection-blocks
Browse files Browse the repository at this point in the history
Add inference_ids to model blocks
  • Loading branch information
PawelPeczek-Roboflow authored Aug 29, 2024
2 parents d430c82 + bbd7475 commit a2d06b5
Show file tree
Hide file tree
Showing 6 changed files with 229 additions and 6 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@
convert_inference_detections_batch_to_sv_detections,
filter_out_unwanted_classes_from_sv_detections_batch,
)
from inference.core.workflows.execution_engine.constants import INFERENCE_ID_KEY
from inference.core.workflows.execution_engine.entities.base import (
Batch,
OutputDefinition,
Expand Down Expand Up @@ -164,6 +165,7 @@ def accepts_batch_input(cls) -> bool:
@classmethod
def describe_outputs(cls) -> List[OutputDefinition]:
return [
OutputDefinition(name=INFERENCE_ID_KEY, kind=[STRING_KIND]),
OutputDefinition(
name="predictions",
kind=[BATCH_OF_INSTANCE_SEGMENTATION_PREDICTION_KIND],
Expand Down Expand Up @@ -356,6 +358,7 @@ def _post_process_result(
predictions: List[dict],
class_filter: Optional[List[str]],
) -> BlockResult:
inference_id = predictions[0].get(INFERENCE_ID_KEY, None)
predictions = convert_inference_detections_batch_to_sv_detections(predictions)
predictions = attach_prediction_type_info_to_sv_detections_batch(
predictions=predictions,
Expand All @@ -369,4 +372,7 @@ def _post_process_result(
images=images,
predictions=predictions,
)
return [{"predictions": prediction} for prediction in predictions]
return [
{"inference_id": inference_id, "predictions": prediction}
for prediction in predictions
]
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@
convert_inference_detections_batch_to_sv_detections,
filter_out_unwanted_classes_from_sv_detections_batch,
)
from inference.core.workflows.execution_engine.constants import INFERENCE_ID_KEY
from inference.core.workflows.execution_engine.entities.base import (
Batch,
OutputDefinition,
Expand All @@ -34,6 +35,7 @@
LIST_OF_VALUES_KIND,
ROBOFLOW_MODEL_ID_KIND,
ROBOFLOW_PROJECT_KIND,
STRING_KIND,
FloatZeroToOne,
ImageInputField,
RoboflowModelField,
Expand Down Expand Up @@ -156,6 +158,7 @@ def accepts_batch_input(cls) -> bool:
@classmethod
def describe_outputs(cls) -> List[OutputDefinition]:
return [
OutputDefinition(name=INFERENCE_ID_KEY, kind=[STRING_KIND]),
OutputDefinition(
name="predictions", kind=[BATCH_OF_KEYPOINT_DETECTION_PREDICTION_KIND]
),
Expand Down Expand Up @@ -340,6 +343,7 @@ def _post_process_result(
predictions: List[dict],
class_filter: Optional[List[str]],
) -> BlockResult:
inference_id = predictions[0].get(INFERENCE_ID_KEY, None)
detections = convert_inference_detections_batch_to_sv_detections(predictions)
for prediction, image_detections in zip(predictions, detections):
add_inference_keypoints_to_sv_detections(
Expand All @@ -358,4 +362,7 @@ def _post_process_result(
images=images,
predictions=detections,
)
return [{"predictions": image_detections} for image_detections in detections]
return [
{"inference_id": inference_id, "predictions": image_detections}
for image_detections in detections
]
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@
from inference.core.workflows.core_steps.common.entities import StepExecutionMode
from inference.core.workflows.core_steps.common.utils import attach_prediction_type_info
from inference.core.workflows.execution_engine.constants import (
INFERENCE_ID_KEY,
PARENT_ID_KEY,
ROOT_PARENT_ID_KEY,
)
Expand All @@ -28,6 +29,7 @@
FLOAT_ZERO_TO_ONE_KIND,
ROBOFLOW_MODEL_ID_KIND,
ROBOFLOW_PROJECT_KIND,
STRING_KIND,
FloatZeroToOne,
ImageInputField,
RoboflowModelField,
Expand Down Expand Up @@ -106,6 +108,7 @@ def accepts_batch_input(cls) -> bool:
@classmethod
def describe_outputs(cls) -> List[OutputDefinition]:
return [
OutputDefinition(name=INFERENCE_ID_KEY, kind=[STRING_KIND]),
OutputDefinition(
name="predictions", kind=[BATCH_OF_CLASSIFICATION_PREDICTION_KIND]
),
Expand Down Expand Up @@ -246,6 +249,7 @@ def _post_process_result(
images: Batch[WorkflowImageData],
predictions: List[dict],
) -> BlockResult:
inference_id = predictions[0].get(INFERENCE_ID_KEY, None)
predictions = attach_prediction_type_info(
predictions=predictions,
prediction_type="classification",
Expand All @@ -255,4 +259,7 @@ def _post_process_result(
prediction[ROOT_PARENT_ID_KEY] = (
image.workflow_root_ancestor_metadata.parent_id
)
return [{"predictions": prediction} for prediction in predictions]
return [
{"inference_id": inference_id, "predictions": prediction}
for prediction in predictions
]
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@
from inference.core.workflows.core_steps.common.entities import StepExecutionMode
from inference.core.workflows.core_steps.common.utils import attach_prediction_type_info
from inference.core.workflows.execution_engine.constants import (
INFERENCE_ID_KEY,
PARENT_ID_KEY,
ROOT_PARENT_ID_KEY,
)
Expand All @@ -28,6 +29,7 @@
FLOAT_ZERO_TO_ONE_KIND,
ROBOFLOW_MODEL_ID_KIND,
ROBOFLOW_PROJECT_KIND,
STRING_KIND,
FloatZeroToOne,
ImageInputField,
RoboflowModelField,
Expand Down Expand Up @@ -106,9 +108,10 @@ def accepts_batch_input(cls) -> bool:
@classmethod
def describe_outputs(cls) -> List[OutputDefinition]:
return [
OutputDefinition(name=INFERENCE_ID_KEY, kind=[STRING_KIND]),
OutputDefinition(
name="predictions", kind=[BATCH_OF_CLASSIFICATION_PREDICTION_KIND]
)
),
]

@classmethod
Expand Down Expand Up @@ -243,6 +246,7 @@ def _post_process_result(
images: Batch[WorkflowImageData],
predictions: List[dict],
) -> List[dict]:
inference_id = predictions[0].get(INFERENCE_ID_KEY, None)
predictions = attach_prediction_type_info(
predictions=predictions,
prediction_type="classification",
Expand All @@ -252,4 +256,7 @@ def _post_process_result(
prediction[ROOT_PARENT_ID_KEY] = (
image.workflow_root_ancestor_metadata.parent_id
)
return [{"predictions": image_detections} for image_detections in predictions]
return [
{"inference_id": inference_id, "predictions": prediction}
for prediction in predictions
]
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@
convert_inference_detections_batch_to_sv_detections,
filter_out_unwanted_classes_from_sv_detections_batch,
)
from inference.core.workflows.execution_engine.constants import INFERENCE_ID_KEY
from inference.core.workflows.execution_engine.entities.base import (
Batch,
OutputDefinition,
Expand All @@ -31,6 +32,7 @@
LIST_OF_VALUES_KIND,
ROBOFLOW_MODEL_ID_KIND,
ROBOFLOW_PROJECT_KIND,
STRING_KIND,
FloatZeroToOne,
ImageInputField,
RoboflowModelField,
Expand Down Expand Up @@ -145,6 +147,7 @@ def accepts_batch_input(cls) -> bool:
@classmethod
def describe_outputs(cls) -> List[OutputDefinition]:
return [
OutputDefinition(name="inference_id", kind=[STRING_KIND]),
OutputDefinition(
name="predictions", kind=[BATCH_OF_OBJECT_DETECTION_PREDICTION_KIND]
),
Expand Down Expand Up @@ -322,6 +325,7 @@ def _post_process_result(
predictions: List[dict],
class_filter: Optional[List[str]],
) -> BlockResult:
inference_id = predictions[0].get(INFERENCE_ID_KEY, None)
predictions = convert_inference_detections_batch_to_sv_detections(predictions)
predictions = attach_prediction_type_info_to_sv_detections_batch(
predictions=predictions,
Expand All @@ -335,4 +339,7 @@ def _post_process_result(
images=images,
predictions=predictions,
)
return [{"predictions": prediction} for prediction in predictions]
return [
{"inference_id": inference_id, "predictions": prediction}
for prediction in predictions
]
Original file line number Diff line number Diff line change
@@ -0,0 +1,189 @@
import numpy as np
import pytest

from inference.core.env import WORKFLOWS_MAX_CONCURRENT_STEPS
from inference.core.managers.base import ModelManager
from inference.core.workflows.core_steps.common.entities import StepExecutionMode
from inference.core.workflows.execution_engine.core import ExecutionEngine

DETECTION_PLUS_CLASSIFICATION_WORKFLOW = {
"version": "1.0",
"inputs": [{"type": "WorkflowImage", "name": "image"}],
"steps": [
{
"type": "ObjectDetectionModel",
"name": "general_detection",
"image": "$inputs.image",
"model_id": "yolov8n-640",
"class_filter": ["dog"],
},
{
"type": "Crop",
"name": "cropping",
"image": "$inputs.image",
"predictions": "$steps.general_detection.predictions",
},
{
"type": "ClassificationModel",
"name": "breds_classification",
"image": "$steps.cropping.crops",
"model_id": "dog-breed-xpaq6/1",
},
],
"outputs": [
{
"type": "JsonField",
"name": "predictions",
"selector": "$steps.breds_classification.predictions",
},
],
}

OBJECT_DETECTION_WORKFLOW = {
"version": "1.0",
"inputs": [{"type": "WorkflowImage", "name": "image"}],
"steps": [
{
"type": "ObjectDetectionModel",
"name": "general_detection",
"image": "$inputs.image",
"model_id": "yolov8n-640",
"class_filter": ["dog"],
},
],
"outputs": [
{
"type": "JsonField",
"name": "predictions",
"coordinates_system": "own",
"selector": "$steps.general_detection.predictions",
}
],
}

INSTANCE_SEGMENTATION_WORKFLOW = {
"version": "1.0",
"inputs": [{"type": "WorkflowImage", "name": "image"}],
"steps": [
{
"type": "InstanceSegmentationModel",
"name": "instance_segmentation",
"image": "$inputs.image",
"model_id": "yolov8n-640",
},
],
"outputs": [
{
"type": "JsonField",
"name": "predictions",
"selector": "$steps.instance_segmentation.*",
}
],
}


@pytest.mark.workflows
def test_detection_plus_classification_workflow_with_inference_id(
model_manager: ModelManager,
dogs_image: np.ndarray,
roboflow_api_key: str,
) -> None:
# given
workflow_init_parameters = {
"workflows_core.model_manager": model_manager,
"workflows_core.api_key": roboflow_api_key,
"workflows_core.step_execution_mode": StepExecutionMode.LOCAL,
}
execution_engine = ExecutionEngine.init(
workflow_definition=DETECTION_PLUS_CLASSIFICATION_WORKFLOW,
init_parameters=workflow_init_parameters,
max_concurrent_steps=WORKFLOWS_MAX_CONCURRENT_STEPS,
)

# when
result = execution_engine.run(
runtime_parameters={
"image": dogs_image,
}
)

# then
assert (
len(result[0]["predictions"]) == 2
), "Expected 2 dogs crops on input image, hence 2 nested classification results"

for prediction in result[0]["predictions"]:
assert "inference_id" in prediction, "Expected inference_id in each prediction"
assert prediction["inference_id"] is not None, "Expected non-null inference_id"

assert [result[0]["predictions"][0]["top"], result[0]["predictions"][1]["top"]] == [
"116.Parson_russell_terrier",
"131.Wirehaired_pointing_griffon",
], "Expected predictions to be as measured in reference run"


@pytest.mark.workflows
def test_object_detection_workflow_with_inference_id(
model_manager: ModelManager,
dogs_image: np.ndarray,
roboflow_api_key: str,
) -> None:
# given
workflow_init_parameters = {
"workflows_core.model_manager": model_manager,
"workflows_core.api_key": roboflow_api_key,
"workflows_core.step_execution_mode": StepExecutionMode.LOCAL,
}
execution_engine = ExecutionEngine.init(
workflow_definition=OBJECT_DETECTION_WORKFLOW,
init_parameters=workflow_init_parameters,
max_concurrent_steps=WORKFLOWS_MAX_CONCURRENT_STEPS,
)

# when
result = execution_engine.run(
runtime_parameters={
"image": dogs_image,
}
)

# then
assert len(result[0]["predictions"]) == 2, "Expected 2 predictions"
assert (
result[0]["predictions"][0]["inference_id"] is not None
), "Expected non-null inference_id"
assert (
result[0]["predictions"][1]["inference_id"] is not None
), "Expected non-null inference_id"


@pytest.mark.workflows
def test_instance_segmentation_workflow_with_inference_id(
model_manager: ModelManager,
dogs_image: np.ndarray,
roboflow_api_key: str,
) -> None:
# given
workflow_init_parameters = {
"workflows_core.model_manager": model_manager,
"workflows_core.api_key": roboflow_api_key,
"workflows_core.step_execution_mode": StepExecutionMode.LOCAL,
}
execution_engine = ExecutionEngine.init(
workflow_definition=INSTANCE_SEGMENTATION_WORKFLOW,
init_parameters=workflow_init_parameters,
max_concurrent_steps=WORKFLOWS_MAX_CONCURRENT_STEPS,
)

# when
result = execution_engine.run(
runtime_parameters={
"image": dogs_image,
}
)

# then
assert len(result[0]["predictions"]) == 2, "Expected 2 predictions"
assert (
result[0]["predictions"].get("inference_id") is not None
), "Expected non-null inference_id"

0 comments on commit a2d06b5

Please sign in to comment.