From dffd46118123e44e1af248f45660beaddcbca68e Mon Sep 17 00:00:00 2001 From: Vivek Miglani Date: Sun, 29 Dec 2024 21:41:12 -0800 Subject: [PATCH] Fix layer activation pyre fixme issues Differential Revision: D67706972 --- captum/attr/_core/layer/layer_activation.py | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/captum/attr/_core/layer/layer_activation.py b/captum/attr/_core/layer/layer_activation.py index 076323a274..d9aea9b27d 100644 --- a/captum/attr/_core/layer/layer_activation.py +++ b/captum/attr/_core/layer/layer_activation.py @@ -20,8 +20,7 @@ class LayerActivation(LayerAttribution): def __init__( self, - # pyre-fixme[24]: Generic type `Callable` expects 2 type parameters. - forward_func: Callable, + forward_func: Callable[..., Union[int, float, Tensor]], layer: ModuleOrModuleList, device_ids: Union[None, List[int]] = None, ) -> None: @@ -132,8 +131,6 @@ def attribute( ) else: return [ - # pyre-fixme[6]: For 2nd argument expected `Tuple[Tensor, ...]` but - # got `Tensor`. _format_output(len(single_layer_eval) > 1, single_layer_eval) for single_layer_eval in layer_eval ]