diff --git a/releases/1.35.1/Examples/tensorflow/quantization/keras/quantsim_cle.ipynb b/releases/1.35.1/Examples/tensorflow/quantization/keras/quantsim_cle.ipynb index 0a55913..6adca66 100644 --- a/releases/1.35.1/Examples/tensorflow/quantization/keras/quantsim_cle.ipynb +++ b/releases/1.35.1/Examples/tensorflow/quantization/keras/quantsim_cle.ipynb @@ -277,7 +277,7 @@ { "cell_type": "code", "execution_count": null, - "id": "e3a590a2", + "id": "33b66b4d", "metadata": { "collapsed": false }, @@ -349,7 +349,7 @@ { "cell_type": "code", "execution_count": null, - "id": "ad19213c", + "id": "8a10e833", "metadata": { "collapsed": false }, @@ -406,7 +406,7 @@ { "cell_type": "code", "execution_count": null, - "id": "a3626c09", + "id": "86c35ac5", "metadata": { "collapsed": false }, diff --git a/releases/1.35.1/api_docs/onnx.html b/releases/1.35.1/api_docs/onnx.html index c0d4d11..450964c 100644 --- a/releases/1.35.1/api_docs/onnx.html +++ b/releases/1.35.1/api_docs/onnx.html @@ -243,9 +243,15 @@
QuantAnalyzer.check_model_sensitivity_to_quantization()
QuantAnalyzer.perform_per_layer_analysis_by_enabling_quant_wrappers()
QuantAnalyzer.perform_per_layer_analysis_by_disabling_quant_wrappers()
QuantAnalyzer.export_per_layer_encoding_min_max_range()
QuantAnalyzer.export_per_layer_stats_histogram()
QuantAnalyzer.export_per_layer_mse_loss()
QuantScheme
QuantScheme.post_training_percentile
QuantScheme
QuantScheme.post_training_percentile
ClsSetInfo
QuantParams
AdapterMetaData
peft.replace_lora_layers_with_quantizable_layers()
peft.track_lora_meta_data()
PeftQuantUtils
PeftQuantUtils.disable_lora_adapters()
PeftQuantUtils.enable_adapter_and_load_weights()
PeftQuantUtils.export_adapter_weights()
PeftQuantUtils.freeze_base_model()
PeftQuantUtils.freeze_base_model_activation_quantizers()
PeftQuantUtils.freeze_base_model_param_quantizers()
PeftQuantUtils.get_fp_lora_layer()
PeftQuantUtils.get_quantized_lora_layer()
PeftQuantUtils.quantize_lora_scale_with_fixed_range()
PeftQuantUtils.set_bitwidth_for_lora_adapters()
QuantizerBase
+QuantizeDequantize
+Quantize
+QuantizationMixin
QuantizationMixin.input_quantizers
QuantizationMixin.output_quantizers
QuantizationMixin.param_quantizers
QuantizationMixin.forward()
QuantizationMixin.__quant_init__()
QuantizationMixin.set_kernel()
QuantizationMixin.set_default_kernel()
QuantizationMixin.compute_encodings()
QuantizationMixin.from_module()
QuantizationMixin.get_default_kernel()
QuantizationMixin.get_kernel()
QuantizationMixin.implements()
QuantScheme
QuantScheme.post_training_percentile
CostMetric
CostMetric.mac
CompressionScheme.weight_svd
ModuleCompRatioPair
QuantAnalyzer.check_model_sensitivity_to_quantization()
QuantAnalyzer.perform_per_layer_analysis_by_enabling_quant_wrappers()
QuantAnalyzer.perform_per_layer_analysis_by_disabling_quant_wrappers()
QuantAnalyzer.export_per_layer_encoding_min_max_range()
QuantAnalyzer.export_per_layer_stats_histogram()
QuantAnalyzer.export_per_layer_mse_loss()
QuantScheme
QuantScheme.post_training_percentile
QuantScheme
QuantScheme.post_training_percentile
ClsSetInfo
QuantParams
AdapterMetaData
peft.replace_lora_layers_with_quantizable_layers()
peft.track_lora_meta_data()
PeftQuantUtils
PeftQuantUtils.disable_lora_adapters()
PeftQuantUtils.enable_adapter_and_load_weights()
PeftQuantUtils.export_adapter_weights()
PeftQuantUtils.freeze_base_model()
PeftQuantUtils.freeze_base_model_activation_quantizers()
PeftQuantUtils.freeze_base_model_param_quantizers()
PeftQuantUtils.get_fp_lora_layer()
PeftQuantUtils.get_quantized_lora_layer()
PeftQuantUtils.quantize_lora_scale_with_fixed_range()
PeftQuantUtils.set_bitwidth_for_lora_adapters()
QuantizerBase
+QuantizeDequantize
+Quantize
+QuantizationMixin
QuantizationMixin.input_quantizers
QuantizationMixin.output_quantizers
QuantizationMixin.param_quantizers
QuantizationMixin.forward()
QuantizationMixin.__quant_init__()
QuantizationMixin.set_kernel()
QuantizationMixin.set_default_kernel()
QuantizationMixin.compute_encodings()
QuantizationMixin.from_module()
QuantizationMixin.get_default_kernel()
QuantizationMixin.get_kernel()
QuantizationMixin.implements()
QuantScheme
QuantScheme.post_training_percentile
CostMetric
CostMetric.mac
CompressionScheme.weight_svd
ModuleCompRatioPair
QuantAnalyzer.check_model_sensitivity_to_quantization()
QuantAnalyzer.perform_per_layer_analysis_by_enabling_quant_wrappers()
QuantAnalyzer.perform_per_layer_analysis_by_disabling_quant_wrappers()
QuantAnalyzer.export_per_layer_encoding_min_max_range()
QuantAnalyzer.export_per_layer_stats_histogram()
QuantAnalyzer.export_per_layer_mse_loss()
QuantScheme
QuantScheme.post_training_percentile
QuantScheme
QuantScheme.post_training_percentile
ClsSetInfo
QuantParams
AdapterMetaData
peft.replace_lora_layers_with_quantizable_layers()
peft.track_lora_meta_data()
PeftQuantUtils
PeftQuantUtils.disable_lora_adapters()
PeftQuantUtils.enable_adapter_and_load_weights()
PeftQuantUtils.export_adapter_weights()
PeftQuantUtils.freeze_base_model()
PeftQuantUtils.freeze_base_model_activation_quantizers()
PeftQuantUtils.freeze_base_model_param_quantizers()
PeftQuantUtils.get_fp_lora_layer()
PeftQuantUtils.get_quantized_lora_layer()
PeftQuantUtils.quantize_lora_scale_with_fixed_range()
PeftQuantUtils.set_bitwidth_for_lora_adapters()
QuantizerBase
+QuantizeDequantize
+Quantize
+QuantizationMixin
QuantizationMixin.input_quantizers
QuantizationMixin.output_quantizers
QuantizationMixin.param_quantizers
QuantizationMixin.forward()
QuantizationMixin.__quant_init__()
QuantizationMixin.set_kernel()
QuantizationMixin.set_default_kernel()
QuantizationMixin.compute_encodings()
QuantizationMixin.from_module()
QuantizationMixin.get_default_kernel()
QuantizationMixin.get_kernel()
QuantizationMixin.implements()
QuantScheme
QuantScheme.post_training_percentile
CostMetric
CostMetric.mac
CompressionScheme.weight_svd
ModuleCompRatioPair
QuantAnalyzer.check_model_sensitivity_to_quantization()
QuantAnalyzer.perform_per_layer_analysis_by_enabling_quant_wrappers()
QuantAnalyzer.perform_per_layer_analysis_by_disabling_quant_wrappers()
QuantAnalyzer.export_per_layer_encoding_min_max_range()
QuantAnalyzer.export_per_layer_stats_histogram()
QuantAnalyzer.export_per_layer_mse_loss()
QuantScheme
QuantScheme.post_training_percentile
QuantScheme
QuantScheme.post_training_percentile
ClsSetInfo
QuantParams
AdapterMetaData
peft.replace_lora_layers_with_quantizable_layers()
peft.track_lora_meta_data()
PeftQuantUtils
PeftQuantUtils.disable_lora_adapters()
PeftQuantUtils.enable_adapter_and_load_weights()
PeftQuantUtils.export_adapter_weights()
PeftQuantUtils.freeze_base_model()
PeftQuantUtils.freeze_base_model_activation_quantizers()
PeftQuantUtils.freeze_base_model_param_quantizers()
PeftQuantUtils.get_fp_lora_layer()
PeftQuantUtils.get_quantized_lora_layer()
PeftQuantUtils.quantize_lora_scale_with_fixed_range()
PeftQuantUtils.set_bitwidth_for_lora_adapters()
QuantizerBase
+QuantizeDequantize
+Quantize
+QuantizationMixin
QuantizationMixin.input_quantizers
QuantizationMixin.output_quantizers
QuantizationMixin.param_quantizers
QuantizationMixin.forward()
QuantizationMixin.__quant_init__()
QuantizationMixin.set_kernel()
QuantizationMixin.set_default_kernel()
QuantizationMixin.compute_encodings()
QuantizationMixin.from_module()
QuantizationMixin.get_default_kernel()
QuantizationMixin.get_kernel()
QuantizationMixin.implements()
QuantScheme
QuantScheme.post_training_percentile
CostMetric
CostMetric.mac
CompressionScheme.weight_svd
ModuleCompRatioPair
QuantAnalyzer.check_model_sensitivity_to_quantization()
QuantAnalyzer.perform_per_layer_analysis_by_enabling_quant_wrappers()
QuantAnalyzer.perform_per_layer_analysis_by_disabling_quant_wrappers()
QuantAnalyzer.export_per_layer_encoding_min_max_range()
QuantAnalyzer.export_per_layer_stats_histogram()
QuantAnalyzer.export_per_layer_mse_loss()
QuantScheme
QuantScheme.post_training_percentile
QuantScheme
QuantScheme.post_training_percentile
ClsSetInfo
QuantParams
AdapterMetaData
peft.replace_lora_layers_with_quantizable_layers()
peft.track_lora_meta_data()
PeftQuantUtils
PeftQuantUtils.disable_lora_adapters()
PeftQuantUtils.enable_adapter_and_load_weights()
PeftQuantUtils.export_adapter_weights()
PeftQuantUtils.freeze_base_model()
PeftQuantUtils.freeze_base_model_activation_quantizers()
PeftQuantUtils.freeze_base_model_param_quantizers()
PeftQuantUtils.get_fp_lora_layer()
PeftQuantUtils.get_quantized_lora_layer()
PeftQuantUtils.quantize_lora_scale_with_fixed_range()
PeftQuantUtils.set_bitwidth_for_lora_adapters()
QuantizerBase
+QuantizeDequantize
+Quantize
+QuantizationMixin
QuantizationMixin.input_quantizers
QuantizationMixin.output_quantizers
QuantizationMixin.param_quantizers
QuantizationMixin.forward()
QuantizationMixin.__quant_init__()
QuantizationMixin.set_kernel()
QuantizationMixin.set_default_kernel()
QuantizationMixin.compute_encodings()
QuantizationMixin.from_module()
QuantizationMixin.get_default_kernel()
QuantizationMixin.get_kernel()
QuantizationMixin.implements()
QuantScheme
QuantScheme.post_training_percentile
CostMetric
CostMetric.mac
CompressionScheme.weight_svd
ModuleCompRatioPair
QuantAnalyzer.check_model_sensitivity_to_quantization()
QuantAnalyzer.perform_per_layer_analysis_by_enabling_quant_wrappers()
QuantAnalyzer.perform_per_layer_analysis_by_disabling_quant_wrappers()
QuantAnalyzer.export_per_layer_encoding_min_max_range()
QuantAnalyzer.export_per_layer_stats_histogram()
QuantAnalyzer.export_per_layer_mse_loss()
QuantScheme
QuantScheme.post_training_percentile
QuantScheme
QuantScheme.post_training_percentile
ClsSetInfo
QuantParams
AdapterMetaData
peft.replace_lora_layers_with_quantizable_layers()
peft.track_lora_meta_data()
PeftQuantUtils
PeftQuantUtils.disable_lora_adapters()
PeftQuantUtils.enable_adapter_and_load_weights()
PeftQuantUtils.export_adapter_weights()
PeftQuantUtils.freeze_base_model()
PeftQuantUtils.freeze_base_model_activation_quantizers()
PeftQuantUtils.freeze_base_model_param_quantizers()
PeftQuantUtils.get_fp_lora_layer()
PeftQuantUtils.get_quantized_lora_layer()
PeftQuantUtils.quantize_lora_scale_with_fixed_range()
PeftQuantUtils.set_bitwidth_for_lora_adapters()
QuantizerBase
+QuantizeDequantize
+Quantize
+QuantizationMixin
QuantizationMixin.input_quantizers
QuantizationMixin.output_quantizers
QuantizationMixin.param_quantizers
QuantizationMixin.forward()
QuantizationMixin.__quant_init__()
QuantizationMixin.set_kernel()
QuantizationMixin.set_default_kernel()
QuantizationMixin.compute_encodings()
QuantizationMixin.from_module()
QuantizationMixin.get_default_kernel()
QuantizationMixin.get_kernel()
QuantizationMixin.implements()
QuantScheme
QuantScheme.post_training_percentile
CostMetric
CostMetric.mac
CompressionScheme.weight_svd
ModuleCompRatioPair
QuantAnalyzer.check_model_sensitivity_to_quantization()
QuantAnalyzer.perform_per_layer_analysis_by_enabling_quant_wrappers()
QuantAnalyzer.perform_per_layer_analysis_by_disabling_quant_wrappers()
QuantAnalyzer.export_per_layer_encoding_min_max_range()
QuantAnalyzer.export_per_layer_stats_histogram()
QuantAnalyzer.export_per_layer_mse_loss()
QuantScheme
QuantScheme.post_training_percentile
QuantScheme
QuantScheme.post_training_percentile
ClsSetInfo
QuantParams
AdapterMetaData
peft.replace_lora_layers_with_quantizable_layers()
peft.track_lora_meta_data()
PeftQuantUtils
PeftQuantUtils.disable_lora_adapters()
PeftQuantUtils.enable_adapter_and_load_weights()
PeftQuantUtils.export_adapter_weights()
PeftQuantUtils.freeze_base_model()
PeftQuantUtils.freeze_base_model_activation_quantizers()
PeftQuantUtils.freeze_base_model_param_quantizers()
PeftQuantUtils.get_fp_lora_layer()
PeftQuantUtils.get_quantized_lora_layer()
PeftQuantUtils.quantize_lora_scale_with_fixed_range()
PeftQuantUtils.set_bitwidth_for_lora_adapters()
QuantizerBase
+QuantizeDequantize
+Quantize
+QuantizationMixin
QuantizationMixin.input_quantizers
QuantizationMixin.output_quantizers
QuantizationMixin.param_quantizers
QuantizationMixin.forward()
QuantizationMixin.__quant_init__()
QuantizationMixin.set_kernel()
QuantizationMixin.set_default_kernel()
QuantizationMixin.compute_encodings()
QuantizationMixin.from_module()
QuantizationMixin.get_default_kernel()
QuantizationMixin.get_kernel()
QuantizationMixin.implements()
QuantScheme
QuantScheme.post_training_percentile
CostMetric
CostMetric.mac
CompressionScheme.weight_svd
ModuleCompRatioPair
QuantAnalyzer.check_model_sensitivity_to_quantization()
QuantAnalyzer.perform_per_layer_analysis_by_enabling_quant_wrappers()
QuantAnalyzer.perform_per_layer_analysis_by_disabling_quant_wrappers()
QuantAnalyzer.export_per_layer_encoding_min_max_range()
QuantAnalyzer.export_per_layer_stats_histogram()
QuantAnalyzer.export_per_layer_mse_loss()
QuantScheme
QuantScheme.post_training_percentile
QuantScheme
QuantScheme.post_training_percentile
ClsSetInfo
QuantParams
AdapterMetaData
peft.replace_lora_layers_with_quantizable_layers()
peft.track_lora_meta_data()
PeftQuantUtils
PeftQuantUtils.disable_lora_adapters()
PeftQuantUtils.enable_adapter_and_load_weights()
PeftQuantUtils.export_adapter_weights()
PeftQuantUtils.freeze_base_model()
PeftQuantUtils.freeze_base_model_activation_quantizers()
PeftQuantUtils.freeze_base_model_param_quantizers()
PeftQuantUtils.get_fp_lora_layer()
PeftQuantUtils.get_quantized_lora_layer()
PeftQuantUtils.quantize_lora_scale_with_fixed_range()
PeftQuantUtils.set_bitwidth_for_lora_adapters()
QuantizerBase
+QuantizeDequantize
+Quantize
+QuantizationMixin
QuantizationMixin.input_quantizers
QuantizationMixin.output_quantizers
QuantizationMixin.param_quantizers
QuantizationMixin.forward()
QuantizationMixin.__quant_init__()
QuantizationMixin.set_kernel()
QuantizationMixin.set_default_kernel()
QuantizationMixin.compute_encodings()
QuantizationMixin.from_module()
QuantizationMixin.get_default_kernel()
QuantizationMixin.get_kernel()
QuantizationMixin.implements()
QuantScheme
QuantScheme.post_training_percentile
CostMetric
CostMetric.mac
CompressionScheme.weight_svd
ModuleCompRatioPair
QuantAnalyzer.check_model_sensitivity_to_quantization()
QuantAnalyzer.perform_per_layer_analysis_by_enabling_quant_wrappers()
QuantAnalyzer.perform_per_layer_analysis_by_disabling_quant_wrappers()
QuantAnalyzer.export_per_layer_encoding_min_max_range()
QuantAnalyzer.export_per_layer_stats_histogram()
QuantAnalyzer.export_per_layer_mse_loss()
QuantScheme
QuantScheme.post_training_percentile
QuantScheme
QuantScheme.post_training_percentile
ClsSetInfo
QuantParams
AdapterMetaData
peft.replace_lora_layers_with_quantizable_layers()
peft.track_lora_meta_data()
PeftQuantUtils
PeftQuantUtils.disable_lora_adapters()
PeftQuantUtils.enable_adapter_and_load_weights()
PeftQuantUtils.export_adapter_weights()
PeftQuantUtils.freeze_base_model()
PeftQuantUtils.freeze_base_model_activation_quantizers()
PeftQuantUtils.freeze_base_model_param_quantizers()
PeftQuantUtils.get_fp_lora_layer()
PeftQuantUtils.get_quantized_lora_layer()
PeftQuantUtils.quantize_lora_scale_with_fixed_range()
PeftQuantUtils.set_bitwidth_for_lora_adapters()
QuantizerBase
+QuantizeDequantize
+Quantize
+QuantizationMixin
QuantizationMixin.input_quantizers
QuantizationMixin.output_quantizers
QuantizationMixin.param_quantizers
QuantizationMixin.forward()
QuantizationMixin.__quant_init__()
QuantizationMixin.set_kernel()
QuantizationMixin.set_default_kernel()
QuantizationMixin.compute_encodings()
QuantizationMixin.from_module()
QuantizationMixin.get_default_kernel()
QuantizationMixin.get_kernel()
QuantizationMixin.implements()
QuantScheme
QuantScheme.post_training_percentile
CostMetric
CostMetric.mac
CompressionScheme.weight_svd
ModuleCompRatioPair
QuantAnalyzer.check_model_sensitivity_to_quantization()
QuantAnalyzer.perform_per_layer_analysis_by_enabling_quant_wrappers()
QuantAnalyzer.perform_per_layer_analysis_by_disabling_quant_wrappers()
QuantAnalyzer.export_per_layer_encoding_min_max_range()
QuantAnalyzer.export_per_layer_stats_histogram()
QuantAnalyzer.export_per_layer_mse_loss()
QuantScheme
QuantScheme.post_training_percentile
QuantScheme
QuantScheme.post_training_percentile
ClsSetInfo
QuantParams
AdapterMetaData
peft.replace_lora_layers_with_quantizable_layers()
peft.track_lora_meta_data()
PeftQuantUtils
PeftQuantUtils.disable_lora_adapters()
PeftQuantUtils.enable_adapter_and_load_weights()
PeftQuantUtils.export_adapter_weights()
PeftQuantUtils.freeze_base_model()
PeftQuantUtils.freeze_base_model_activation_quantizers()
PeftQuantUtils.freeze_base_model_param_quantizers()
PeftQuantUtils.get_fp_lora_layer()
PeftQuantUtils.get_quantized_lora_layer()
PeftQuantUtils.quantize_lora_scale_with_fixed_range()
PeftQuantUtils.set_bitwidth_for_lora_adapters()
QuantizerBase
+QuantizeDequantize
+Quantize
+QuantizationMixin
QuantizationMixin.input_quantizers
QuantizationMixin.output_quantizers
QuantizationMixin.param_quantizers
QuantizationMixin.forward()
QuantizationMixin.__quant_init__()
QuantizationMixin.set_kernel()
QuantizationMixin.set_default_kernel()
QuantizationMixin.compute_encodings()
QuantizationMixin.from_module()
QuantizationMixin.get_default_kernel()
QuantizationMixin.get_kernel()
QuantizationMixin.implements()
QuantScheme
QuantScheme.post_training_percentile
CostMetric
CostMetric.mac
CompressionScheme.weight_svd
ModuleCompRatioPair
The following api can be used to create a sparse tensor given indices and features in dense form
+Custom SparsetensorWrapper class for SparseConvTensor
+Initializes internal Module state, shared by both nn.Module and ScriptModule.
+The following api can be used to create a dense tensor given a sparse tensor
+ +TensorFlow
# Tensorflow 2.10 GPU with CUDA 11.x -python3 -m pip install https://github.com/quic/aimet/releases/download/1.35.1/aimet_tensorflow-1.35.1.cu118-cp310-cp310-manylinux_2_34_x86_64.whl +python3 -m pip install https://github.com/quic/aimet/releases/download/1.35.1/aimet_tensorflow-1.35.1+cu118-cp310-cp310-manylinux_2_34_x86_64.whl # Tensorflow 2.10 CPU only -python3 -m pip install https://github.com/quic/aimet/releases/download/1.35.1/aimet_tensorflow-1.35.1.cpu-cp310-cp310-manylinux_2_34_x86_64.whl+python3 -m pip install https://github.com/quic/aimet/releases/download/1.35.1/aimet_tensorflow-1.35.1+cpu-cp310-cp310-manylinux_2_34_x86_64.whl
ONNX
# ONNX 1.16 GPU with CUDA 11.x -python3 -m pip install https://github.com/quic/aimet/releases/download/1.35.1/aimet_onnx-1.35.1.cu117-cp310-cp310-manylinux_2_34_x86_64.whl -f https://download.pytorch.org/whl/torch_stable.html +python3 -m pip install https://github.com/quic/aimet/releases/download/1.35.1/aimet_onnx-1.35.1+cu118-cp310-cp310-manylinux_2_34_x86_64.whl -f https://download.pytorch.org/whl/torch_stable.html # ONNX 1.16 CPU -python3 -m pip install https://github.com/quic/aimet/releases/download/1.35.1/aimet_onnx-1.35.1.cpu-cp310-cp310-manylinux_2_34_x86_64.whl -f https://download.pytorch.org/whl/torch_stable.html+python3 -m pip install https://github.com/quic/aimet/releases/download/1.35.1/aimet_onnx-1.35.1+cpu-cp310-cp310-manylinux_2_34_x86_64.whl -f https://download.pytorch.org/whl/torch_stable.html
For older versions, please browse the releases at https://github.com/quic/aimet/releases and follow the documentation corresponding to that release to select and install the appropriate package.
Set the package details as follows:
-# Set the release tag ex. "1.34.0"
+# Set the release tag ex. "1.35.1"
export release_tag="<version release tag>"
# Construct the download root URL
export download_url="https://github.com/quic/aimet/releases/download/${release_tag}"
# Set the wheel file name with extension
-# ex. "aimet_torch-1.34.0.cu121-cp310-cp310-manylinux_2_34_x86_64.whl"
+# ex. "aimet_torch-1.35.1+cu121-cp310-cp310-manylinux_2_34_x86_64.whl"
export wheel_file_name="<wheel file name>"
# NOTE: Do the following ONLY for the PyTorch and ONNX variant packages!
diff --git a/releases/1.35.1/install/install_host.html b/releases/1.35.1/install/install_host.html
index b25510d..e3419b2 100644
--- a/releases/1.35.1/install/install_host.html
+++ b/releases/1.35.1/install/install_host.html
@@ -1056,14 +1056,14 @@ From Release Package
Set the package details as follows:
-# Set the release tag ex. "1.34.0"
+# Set the release tag ex. "1.35.1"
export release_tag="<version release tag>"
# Construct the download root URL
export download_url="https://github.com/quic/aimet/releases/download/${release_tag}"
# Set the wheel file name with extension
-# ex. "aimet_torch-1.33.0.cu121-cp310-cp310-manylinux_2_34_x86_64.whl"
+# ex. "aimet_torch-1.35.1+cu121-cp310-cp310-manylinux_2_34_x86_64.whl"
export wheel_file_name="<wheel file name>"
# NOTE: Do the following ONLY for the PyTorch and ONNX variant packages!
diff --git a/releases/1.35.1/searchindex.js b/releases/1.35.1/searchindex.js
index e2a506a..1ddac88 100644
--- a/releases/1.35.1/searchindex.js
+++ b/releases/1.35.1/searchindex.js
@@ -1 +1 @@
-Search.setIndex({"docnames": ["Examples/onnx/quantization/AMP", "Examples/onnx/quantization/adaround", "Examples/onnx/quantization/cle", "Examples/onnx/quantization/quantsim", "Examples/tensorflow/quantization/keras/KerasAMP", "Examples/tensorflow/quantization/keras/adaround", "Examples/tensorflow/quantization/keras/autoquant", "Examples/tensorflow/quantization/keras/bn_reestimation", "Examples/tensorflow/quantization/keras/keras_transformer_qat", "Examples/tensorflow/quantization/keras/model_preparer", "Examples/tensorflow/quantization/keras/qat", "Examples/tensorflow/quantization/keras/qat_range_learning", "Examples/tensorflow/quantization/keras/quant_analyzer", "Examples/tensorflow/quantization/keras/quantsim_adaround_pcq", "Examples/tensorflow/quantization/keras/quantsim_cle", "Examples/torch/compression/channel_pruning", "Examples/torch/compression/spatial_svd", "Examples/torch/compression/spatial_svd_channel_pruning", "Examples/torch/quantization/AMP", "Examples/torch/quantization/adaround", "Examples/torch/quantization/autoquant", "Examples/torch/quantization/bn_reestimation", "Examples/torch/quantization/cle_bc", "Examples/torch/quantization/qat", "Examples/torch/quantization/qat_range_learning", "Examples/torch/quantization/quant_analyzer", "api_docs/index", "api_docs/keras", "api_docs/keras_adaround", "api_docs/keras_batchnorm_re_estimation", "api_docs/keras_compression", "api_docs/keras_cross_layer_equalization", "api_docs/keras_layer_output_generation", "api_docs/keras_mixed_precision", "api_docs/keras_model_guidelines", "api_docs/keras_model_preparer", "api_docs/keras_primitive_apis_cle", "api_docs/keras_quant_analyzer", "api_docs/keras_quantization", "api_docs/keras_quantsim", "api_docs/onnx", "api_docs/onnx_adaround", "api_docs/onnx_auto_quant", "api_docs/onnx_cross_layer_equalization", "api_docs/onnx_layer_output_generation", "api_docs/onnx_mixed_precision", "api_docs/onnx_quant_analyzer", "api_docs/onnx_quantization", "api_docs/onnx_quantsim", "api_docs/quantization_encoding_specification", "api_docs/torch", "api_docs/torch_adaround", "api_docs/torch_architecture_checker", "api_docs/torch_auto_quant", "api_docs/torch_batchnorm_re_estimation", "api_docs/torch_bias_correction", "api_docs/torch_compress", "api_docs/torch_cross_layer_equalization", "api_docs/torch_layer_output_generation", "api_docs/torch_mixed_precision", "api_docs/torch_model_guidelines", "api_docs/torch_model_preparer", "api_docs/torch_model_validator", "api_docs/torch_multi_gpu", "api_docs/torch_peft_lora", "api_docs/torch_primitive_apis_cle", "api_docs/torch_quant_analyzer", "api_docs/torch_quantization", "api_docs/torch_quantsim", "api_docs/torch_spconv_custom_onnx_export", "api_docs/torch_visualization_compression", "api_docs/torch_visualization_quantization", "api_docs/v2_migrgation_guide", "install/index", "install/install_docker", "install/install_host", "toplevelhidden", "torch_docs/api/nn.quantization_mixin", "torch_docs/api/quantization/affine/index", "torch_docs/api/quantization/float/FloatQuantizeDequantize", "torch_docs/api/quantization/float/index", "torch_docs/api/quantization/tensor", "torch_docs/api/visualization_tools", "torch_docs/blockwise_quantization", "torch_docs/encoding_analyzer", "torch_docs/examples/ptq", "torch_docs/gptvq", "torch_docs/index", "torch_docs/quantized_modules", "torch_docs/quantizer", "torch_docs/tutorials/quickstart_guide", "user_guide/adaround", "user_guide/auto_quant", "user_guide/bn_reestimation", "user_guide/channel_pruning", "user_guide/compression_feature_guidebook", "user_guide/examples", "user_guide/greedy_compression_ratio_selection", "user_guide/index", "user_guide/known_issues", "user_guide/model_compression", "user_guide/model_guidelines", "user_guide/model_quantization", "user_guide/post_training_quant_techniques", "user_guide/quant_analyzer", "user_guide/quantization_aware_training", "user_guide/quantization_configuration", "user_guide/quantization_feature_guidebook", "user_guide/quantization_sim", "user_guide/release_notes", "user_guide/spatial_svd", "user_guide/visualization_compression", "user_guide/visualization_quant", "user_guide/weight_svd", "user_guide/winnowing"], "filenames": ["Examples/onnx/quantization/AMP.ipynb", "Examples/onnx/quantization/adaround.ipynb", "Examples/onnx/quantization/cle.ipynb", "Examples/onnx/quantization/quantsim.ipynb", "Examples/tensorflow/quantization/keras/KerasAMP.ipynb", "Examples/tensorflow/quantization/keras/adaround.ipynb", "Examples/tensorflow/quantization/keras/autoquant.ipynb", "Examples/tensorflow/quantization/keras/bn_reestimation.ipynb", "Examples/tensorflow/quantization/keras/keras_transformer_qat.ipynb", "Examples/tensorflow/quantization/keras/model_preparer.ipynb", "Examples/tensorflow/quantization/keras/qat.ipynb", "Examples/tensorflow/quantization/keras/qat_range_learning.ipynb", "Examples/tensorflow/quantization/keras/quant_analyzer.ipynb", "Examples/tensorflow/quantization/keras/quantsim_adaround_pcq.ipynb", "Examples/tensorflow/quantization/keras/quantsim_cle.ipynb", "Examples/torch/compression/channel_pruning.ipynb", "Examples/torch/compression/spatial_svd.ipynb", "Examples/torch/compression/spatial_svd_channel_pruning.ipynb", "Examples/torch/quantization/AMP.ipynb", "Examples/torch/quantization/adaround.ipynb", "Examples/torch/quantization/autoquant.ipynb", "Examples/torch/quantization/bn_reestimation.ipynb", "Examples/torch/quantization/cle_bc.ipynb", "Examples/torch/quantization/qat.ipynb", "Examples/torch/quantization/qat_range_learning.ipynb", "Examples/torch/quantization/quant_analyzer.ipynb", "api_docs/index.rst", "api_docs/keras.rst", "api_docs/keras_adaround.rst", "api_docs/keras_batchnorm_re_estimation.rst", "api_docs/keras_compression.rst", "api_docs/keras_cross_layer_equalization.rst", "api_docs/keras_layer_output_generation.rst", "api_docs/keras_mixed_precision.rst", "api_docs/keras_model_guidelines.rst", "api_docs/keras_model_preparer.rst", "api_docs/keras_primitive_apis_cle.rst", "api_docs/keras_quant_analyzer.rst", "api_docs/keras_quantization.rst", "api_docs/keras_quantsim.rst", "api_docs/onnx.rst", "api_docs/onnx_adaround.rst", "api_docs/onnx_auto_quant.rst", "api_docs/onnx_cross_layer_equalization.rst", "api_docs/onnx_layer_output_generation.rst", "api_docs/onnx_mixed_precision.rst", "api_docs/onnx_quant_analyzer.rst", "api_docs/onnx_quantization.rst", "api_docs/onnx_quantsim.rst", "api_docs/quantization_encoding_specification.rst", "api_docs/torch.rst", "api_docs/torch_adaround.rst", "api_docs/torch_architecture_checker.rst", "api_docs/torch_auto_quant.rst", "api_docs/torch_batchnorm_re_estimation.rst", "api_docs/torch_bias_correction.rst", "api_docs/torch_compress.rst", "api_docs/torch_cross_layer_equalization.rst", "api_docs/torch_layer_output_generation.rst", "api_docs/torch_mixed_precision.rst", "api_docs/torch_model_guidelines.rst", "api_docs/torch_model_preparer.rst", "api_docs/torch_model_validator.rst", "api_docs/torch_multi_gpu.rst", "api_docs/torch_peft_lora.rst", "api_docs/torch_primitive_apis_cle.rst", "api_docs/torch_quant_analyzer.rst", "api_docs/torch_quantization.rst", "api_docs/torch_quantsim.rst", "api_docs/torch_spconv_custom_onnx_export.rst", "api_docs/torch_visualization_compression.rst", "api_docs/torch_visualization_quantization.rst", "api_docs/v2_migrgation_guide.rst", "install/index.rst", "install/install_docker.rst", "install/install_host.rst", "toplevelhidden.rst", "torch_docs/api/nn.quantization_mixin.rst", "torch_docs/api/quantization/affine/index.rst", "torch_docs/api/quantization/float/FloatQuantizeDequantize.rst", "torch_docs/api/quantization/float/index.rst", "torch_docs/api/quantization/tensor.rst", "torch_docs/api/visualization_tools.rst", "torch_docs/blockwise_quantization.rst", "torch_docs/encoding_analyzer.rst", "torch_docs/examples/ptq.rst", "torch_docs/gptvq.rst", "torch_docs/index.rst", "torch_docs/quantized_modules.rst", "torch_docs/quantizer.rst", "torch_docs/tutorials/quickstart_guide.rst", "user_guide/adaround.rst", "user_guide/auto_quant.rst", "user_guide/bn_reestimation.rst", "user_guide/channel_pruning.rst", "user_guide/compression_feature_guidebook.rst", "user_guide/examples.rst", "user_guide/greedy_compression_ratio_selection.rst", "user_guide/index.rst", "user_guide/known_issues.rst", "user_guide/model_compression.rst", "user_guide/model_guidelines.rst", "user_guide/model_quantization.rst", "user_guide/post_training_quant_techniques.rst", "user_guide/quant_analyzer.rst", "user_guide/quantization_aware_training.rst", "user_guide/quantization_configuration.rst", "user_guide/quantization_feature_guidebook.rst", "user_guide/quantization_sim.rst", "user_guide/release_notes.rst", "user_guide/spatial_svd.rst", "user_guide/visualization_compression.rst", "user_guide/visualization_quant.rst", "user_guide/weight_svd.rst", "user_guide/winnowing.rst"], "titles": ["Automatic Mixed-Precision (AMP)", "Adaptive Rounding (AdaRound)", "Cross-Layer Equalization", "Quantization simulation", "Automatic Mixed-Precision (AMP)", "Adaptive Rounding (AdaRound)", "AutoQuant", "Quantization-Aware Training with BatchNorm Re-estimation", "Quantization-Aware Training with a Keras Transformer Model", "Keras Model Preparer", "Quantization-aware training", "Quantization-Aware training with range learning", "Quant Analyzer", "Quantsim and Adaround - Per Channel Quantization (PCQ)", "Cross-Layer Equalization with QuantSim", "Model compression using channel pruning", "Model compression using spatial SVD", "Model compression using spatial SVD and channel pruning", "Automatic Mixed-Precision (AMP)", "Adaptive Rounding (AdaRound)", "AutoQuant", "Quantization-Aware Training with BatchNorm Re-estimation", "Cross-Layer Equalization and Bias Correction", "Quantization-aware training", "Quantization-aware training with range learning", "Quant Analyzer", "Welcome to AI Model Efficiency Toolkit API Docs!", "AIMET TensorFlow APIs", "AIMET TensorFlow AdaRound API", "AIMET TensorFlow BatchNorm Re-estimation APIs", "AIMET TensorFlow Compression API", "AIMET TensorFlow Cross Layer Equalization APIs", "AIMET TensorFlow Layer Output Generation API", "AIMET TensorFlow Mixed Precision API", "TensorFlow Model Guidelines", "TensorFlow Model Preparer API", "AIMET TensorFlow Cross Layer Equalization Primitive API", "AIMET TensorFlow Quant Analyzer API", "AIMET TensorFlow Quantization APIs", "AIMET TensorFlow Quantization SIM API", "AIMET ONNX APIs", "AIMET ONNX AdaRound API", "AIMET ONNX AutoQuant API", "AIMET ONNX Cross Layer Equalization APIs", "AIMET ONNX Layer Output Generation API", "AIMET ONNX Mixed Precision API", "AIMET ONNX Quant Analyzer API", "AIMET ONNX Quantization APIs", "AIMET ONNX Quantization SIM API", "Encoding Format Specification", "AIMET PyTorch APIs", "AIMET PyTorch AdaRound API", "Architecture Checker API", "AIMET PyTorch AutoQuant API", "AIMET PyTorch BatchNorm Re-estimation APIs", "AIMET PyTorch Bias Correction API", "AIMET PyTorch Compression API", "AIMET PyTorch Cross Layer Equalization APIs", "AIMET PyTorch Layer Output Generation API", "AIMET PyTorch Mixed Precision API", "PyTorch Model Guidelines", "Model Preparer API", "Model Validator Utility", "PyTorch Multi-GPU support", "PEFT LoRA", "AIMET PyTorch Cross Layer Equalization Primitive API", "AIMET PyTorch Quant Analyzer API", "AIMET PyTorch Quantization APIs", "AIMET PyTorch Quantization SIM API", "AIMET Torch SparseConvolution custom onnx export", "AIMET Visualization Compression API", "AIMET Visualization for Quantization API", "Migrate to aimet_torch.v2", "AIMET Installation", "AIMET Installation in Docker", "AIMET Installation and Setup", "<no title>", "QuantizationMixin", "quantization.affine", "FloatQuantizeDequantize", "quantization.float", "quantization.tensor", "Visualization Tools", "Blockwise Quantization", "Encoding Analyzers", "Post-Training Quantization", "GPTVQ", "AIMET: AI Model Efficiency Toolkit Documentation", "Quantized Modules", "Quantizers", "Quickstart Guide", "AIMET AdaRound", "AIMET AutoQuant", "AIMET Batch Norm Re-estimation", "AIMET channel pruning", "AIMET Compression Features Guidebook", "AIMET examples", "AIMET greedy compression ratio selection", "AI Model Efficiency Toolkit User Guide", "AIMET Known Issues", "AIMET model compression", "Model Guidelines for PyTorch", "AIMET model quantization", "AIMET post-training quantization techniques", "AIMET QuantAnalyzer", "AIMET quantization aware training", "Quantization simulation configuration", "AIMET quantization diagnostics", "AIMET quantization simulation", "AIMET Release Notes", "AIMET spatial SVD", "AIMET visualization", "AIMET visualization for quantization", "AIMET weight SVD", "AIMET winnowing"], "terms": {"show": [0, 1, 2, 3, 4, 5, 6, 7, 8, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 28, 29, 31, 41, 45, 51, 52, 53, 54, 57, 59, 62, 64, 66, 68, 71, 83, 86, 87, 90, 93, 97, 98, 103, 107], "work": [0, 1, 3, 4, 5, 6, 7, 8, 10, 11, 12, 15, 16, 17, 18, 19, 20, 21, 23, 24, 25, 26, 33, 39, 41, 48, 51, 62, 63, 68, 83, 96, 100, 103], "code": [0, 1, 2, 3, 4, 5, 7, 8, 10, 11, 12, 13, 14, 18, 19, 20, 21, 22, 23, 24, 25, 83, 90, 91], "how": [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 28, 29, 31, 33, 41, 45, 48, 49, 51, 52, 53, 54, 55, 57, 59, 62, 64, 66, 67, 68, 69, 72, 83, 86, 88, 90, 96, 100, 102, 103, 104, 106, 107, 108, 110, 113], "us": [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 18, 19, 20, 21, 22, 23, 24, 25, 26, 28, 29, 30, 31, 32, 33, 34, 35, 37, 38, 39, 41, 42, 43, 44, 45, 46, 48, 49, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 72, 73, 75, 77, 78, 81, 82, 83, 84, 86, 87, 88, 89, 90, 93, 95, 96, 98, 101, 103, 104, 105, 106, 107, 108, 109, 111, 112], "aimet": [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 34, 35, 49, 52, 60, 61, 62, 63, 64, 72, 77, 86, 88, 90, 98, 101, 106], "perform": [0, 1, 2, 3, 4, 5, 6, 8, 12, 13, 14, 15, 16, 17, 18, 19, 20, 22, 25, 28, 29, 31, 33, 37, 41, 42, 43, 45, 46, 49, 51, 52, 54, 55, 56, 57, 59, 63, 64, 65, 66, 67, 68, 71, 77, 78, 83, 86, 88, 89, 90, 92, 93, 94, 95, 97, 100, 102, 103, 104, 105, 107], "auto": [0, 4, 15, 16, 17, 18, 26, 30, 31, 36, 42, 43, 45, 48, 49, 56, 57, 59, 65, 70], "techniqu": [0, 1, 2, 3, 4, 5, 6, 7, 8, 10, 11, 13, 14, 15, 16, 17, 18, 19, 20, 21, 23, 24, 28, 30, 31, 38, 41, 42, 43, 47, 51, 53, 55, 56, 57, 66, 67, 87, 90, 92, 94, 95, 96, 98, 102, 104, 105, 107, 108, 109, 110, 113], "where": [0, 1, 2, 3, 4, 5, 6, 7, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 28, 30, 33, 39, 41, 45, 49, 51, 56, 59, 60, 61, 64, 68, 78, 79, 80, 86, 89, 90, 93, 97, 104, 110, 111, 113, 114], "given": [0, 1, 2, 3, 4, 5, 6, 7, 10, 11, 13, 14, 15, 16, 17, 18, 19, 21, 22, 23, 24, 25, 30, 31, 33, 36, 39, 42, 43, 45, 48, 49, 51, 53, 54, 56, 57, 59, 65, 68, 69, 77, 82, 83, 84, 88, 92, 97, 100, 103], "target": [0, 1, 2, 3, 4, 5, 6, 7, 8, 10, 11, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 30, 32, 35, 39, 42, 44, 48, 49, 51, 54, 56, 58, 59, 64, 68, 87, 93, 95, 97, 98, 100, 102, 107, 108, 109], "find": [0, 1, 5, 6, 7, 8, 10, 11, 13, 14, 15, 16, 17, 18, 19, 20, 22, 23, 24, 33, 36, 45, 51, 55, 59, 62, 65, 68, 71, 97, 102, 104, 105, 108], "bit": [0, 1, 2, 3, 4, 5, 6, 7, 8, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 21, 22, 23, 24, 25, 33, 41, 45, 46, 49, 51, 59, 66, 73, 79, 80, 83, 90, 91, 93, 102, 107, 108, 109], "per": [0, 1, 2, 3, 4, 5, 6, 7, 10, 11, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 30, 37, 45, 46, 49, 54, 56, 58, 59, 64, 66, 68, 83, 86, 88, 93, 103, 104, 106, 107, 108, 109, 111], "meet": [0, 4, 18, 42, 53, 59, 73, 92, 95, 97], "while": [0, 1, 2, 3, 4, 5, 8, 10, 11, 13, 14, 15, 16, 17, 18, 19, 22, 23, 24, 30, 51, 56, 64, 83, 88, 91, 97, 101, 105, 107], "try": [0, 1, 2, 3, 4, 13, 15, 16, 17, 18, 30, 52, 56, 70, 92, 94, 95, 100, 102, 107], "optim": [0, 1, 2, 3, 4, 5, 6, 7, 8, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 28, 30, 33, 37, 38, 39, 41, 42, 45, 47, 48, 51, 52, 53, 56, 59, 67, 68, 70, 71, 83, 86, 90, 91, 92, 98, 100, 102, 105, 108, 109, 111], "infer": [0, 1, 2, 3, 4, 5, 6, 7, 8, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 28, 30, 33, 38, 39, 41, 42, 46, 47, 48, 49, 51, 67, 68, 69, 83, 86, 87, 90, 93, 95, 98, 103, 105, 108, 109], "speed": [0, 1, 2, 3, 4, 5, 6, 7, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 30, 51, 56, 64, 91, 100, 103, 105, 109], "As": [0, 2, 4, 7, 8, 10, 11, 13, 14, 15, 16, 17, 18, 21, 22, 23, 24, 60, 68, 77, 83, 86, 92, 95, 97, 103, 104, 108], "sai": [0, 1, 2, 3, 4, 5, 6, 7, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 61, 95], "particular": [0, 4, 18, 83, 102, 106], "desir": [0, 4, 15, 16, 17, 18, 30, 39, 46, 56, 66, 68, 74, 83, 90, 95, 100, 102, 107], "when": [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 30, 33, 34, 35, 37, 38, 39, 42, 43, 45, 46, 49, 56, 57, 59, 60, 61, 66, 67, 68, 77, 83, 86, 87, 88, 90, 91, 100, 102, 103, 104, 105, 106, 107, 108, 111, 112, 114], "int8": [0, 1, 2, 3, 4, 13, 18, 19, 22, 23, 24, 51, 81, 105, 108, 112], "The": [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 28, 29, 30, 32, 33, 35, 36, 37, 39, 41, 43, 44, 45, 46, 48, 49, 51, 52, 54, 55, 56, 57, 58, 59, 60, 61, 62, 64, 65, 66, 68, 69, 72, 73, 74, 75, 77, 78, 79, 80, 81, 82, 83, 84, 86, 87, 88, 90, 91, 92, 93, 94, 95, 96, 97, 98, 100, 101, 102, 103, 104, 105, 106, 107, 108, 110, 111, 112, 113, 114], "featur": [0, 1, 2, 3, 4, 5, 6, 7, 8, 10, 11, 12, 13, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 34, 35, 36, 38, 43, 49, 51, 57, 60, 61, 62, 63, 65, 67, 68, 69, 72, 82, 83, 86, 91, 92, 93, 96, 100, 103, 104, 107, 108, 109, 111, 112], "minim": [0, 3, 4, 10, 11, 18, 23, 24, 28, 51, 68, 87, 98, 102, 108], "set": [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 28, 30, 33, 35, 36, 41, 42, 45, 46, 48, 49, 51, 53, 56, 59, 61, 62, 64, 65, 66, 68, 72, 75, 77, 82, 83, 86, 88, 89, 91, 95, 96, 97, 100, 101, 103, 104, 105, 106, 107], "need": [0, 1, 2, 3, 4, 5, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 21, 22, 23, 24, 25, 30, 32, 33, 34, 35, 36, 39, 41, 46, 49, 51, 55, 56, 58, 63, 64, 65, 66, 67, 68, 69, 72, 73, 74, 75, 83, 86, 90, 102, 103, 105, 106, 108, 109, 112], "int16": [0, 4, 18], "get": [0, 1, 2, 3, 4, 7, 8, 9, 12, 30, 32, 33, 36, 44, 45, 52, 56, 58, 59, 61, 64, 68, 71, 73, 74, 75, 86, 100, 112], "It": [0, 1, 2, 3, 4, 5, 7, 8, 10, 11, 12, 13, 14, 18, 19, 22, 23, 24, 25, 30, 31, 33, 35, 36, 37, 41, 42, 43, 44, 45, 46, 48, 49, 51, 56, 61, 65, 66, 68, 90, 93, 94, 97, 105, 114], "should": [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 21, 22, 23, 24, 25, 30, 33, 34, 35, 37, 38, 39, 41, 42, 46, 48, 49, 51, 53, 56, 58, 60, 61, 64, 66, 67, 68, 72, 77, 83, 86, 88, 90, 95], "note": [0, 1, 2, 3, 4, 5, 6, 7, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 33, 35, 36, 37, 39, 41, 42, 43, 44, 45, 46, 48, 53, 56, 57, 59, 63, 64, 65, 66, 68, 69, 72, 74, 75, 83, 86, 90, 98, 111], "choos": [0, 4, 15, 16, 17, 18, 48, 68, 71, 83, 94, 95, 100], "higher": [0, 4, 15, 16, 17, 18, 41, 51, 56, 59, 83, 84, 93, 97, 105, 107], "some": [0, 1, 2, 3, 4, 5, 6, 7, 8, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 22, 23, 24, 25, 30, 35, 36, 38, 39, 51, 56, 61, 65, 67, 68, 71, 74, 75, 88, 90, 91, 95, 97, 101, 102, 103, 105, 107, 108, 114], "necessarili": [0, 4, 18], "involv": [0, 4, 18, 33, 45, 59, 72, 84, 90, 102, 107], "trade": [0, 4, 18, 28, 33, 41, 51], "off": [0, 4, 18, 28, 39, 41, 51, 68, 103], "lower": [0, 4, 13, 18, 33, 41, 51, 72, 83, 97, 102, 107], "sec": [0, 4, 13, 18], "vice": [0, 4, 18, 64, 97, 107, 108], "versa": [0, 4, 18, 64, 97, 107, 108], "altern": [0, 4, 15, 16, 17, 18, 75, 83], "can": [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 28, 30, 31, 32, 33, 34, 35, 36, 38, 39, 41, 44, 45, 46, 48, 49, 51, 52, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 74, 78, 81, 83, 86, 87, 88, 89, 90, 92, 93, 95, 97, 98, 100, 102, 103, 104, 105, 106, 107, 108, 111, 112, 114], "gener": [0, 4, 8, 18, 20, 26, 33, 39, 45, 48, 49, 52, 59, 64, 69, 77, 78, 83, 86, 89, 90, 96, 100, 102, 104, 105, 106, 107, 108], "pareto": [0, 4, 18, 33, 45, 59], "curv": [0, 4, 18, 33, 45, 59, 97], "v": [0, 4, 12, 18, 25, 28, 33, 41, 45, 51, 59, 74, 83], "op": [0, 1, 2, 3, 5, 7, 8, 10, 11, 13, 14, 15, 17, 18, 19, 20, 21, 22, 23, 24, 28, 30, 33, 36, 39, 41, 42, 45, 48, 51, 52, 59, 62, 68, 90, 106, 108, 109], "guid": [0, 4, 6, 18, 19, 20, 22, 23, 24, 25, 30, 56, 67, 72, 87, 95, 103, 107, 109], "user": [0, 1, 2, 3, 4, 5, 6, 7, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 29, 30, 33, 34, 35, 36, 37, 38, 45, 46, 48, 49, 54, 56, 59, 60, 61, 63, 65, 67, 70, 71, 72, 74, 77, 83, 86, 87, 91, 95, 100, 104, 107, 109], "decid": [0, 1, 2, 3, 4, 5, 13, 15, 16, 17, 18, 19, 22, 23, 24, 25, 91, 111], "right": [0, 4, 6, 8, 18, 20, 77, 78, 79, 80, 88, 89, 114], "oper": [0, 3, 4, 9, 10, 11, 18, 23, 24, 30, 35, 60, 61, 62, 72, 77, 88, 90, 101, 102, 103, 106, 107], "point": [0, 1, 2, 4, 5, 6, 7, 8, 10, 11, 13, 14, 15, 16, 17, 18, 19, 21, 22, 23, 24, 33, 35, 36, 37, 38, 45, 46, 47, 49, 55, 59, 64, 65, 66, 67, 68, 72, 77, 81, 83, 87, 98, 102, 104, 107, 108, 112], "tradeoff": [0, 4, 18], "specif": [0, 1, 4, 5, 7, 9, 13, 18, 19, 21, 30, 33, 35, 39, 56, 68, 77, 83, 88, 90, 92, 93, 98, 101, 102, 103, 106, 109], "abov": [0, 4, 12, 13, 15, 16, 17, 18, 25, 28, 37, 46, 51, 56, 61, 63, 66, 68, 74, 75, 83, 86, 87, 100, 103, 108, 111, 114], "cover": [0, 1, 2, 3, 4, 5, 7, 8, 9, 10, 11, 12, 13, 14, 18, 19, 21, 22, 23, 24, 25, 51, 68, 83, 108], "follow": [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 30, 31, 32, 33, 34, 35, 36, 38, 39, 43, 44, 46, 47, 48, 49, 51, 52, 53, 54, 55, 56, 57, 58, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 72, 73, 74, 75, 77, 83, 86, 88, 91, 93, 94, 95, 96, 97, 98, 100, 101, 102, 104, 105, 106, 107, 108, 110, 113, 114], "instanti": [0, 12, 13, 18, 21, 25, 60, 63, 64, 83, 86, 90, 111], "": [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 21, 22, 23, 24, 25, 28, 30, 31, 33, 34, 35, 36, 37, 39, 41, 42, 45, 46, 48, 49, 51, 54, 55, 56, 59, 61, 65, 66, 68, 69, 71, 75, 77, 83, 87, 88, 89, 90, 94, 95, 97, 99, 100, 102, 103, 104, 105, 107, 108, 112, 114], "fake": [0, 1, 2, 3, 5, 7, 8, 10, 11, 13, 14, 18, 19, 20, 21, 22, 23, 24, 51, 59, 68, 78, 79, 80, 89, 90], "insert": [0, 1, 2, 3, 5, 7, 8, 10, 11, 13, 14, 15, 17, 18, 19, 21, 22, 23, 24, 51, 61, 68, 102, 108], "design": [0, 4, 9, 12, 13, 18, 25, 26, 62, 67, 103], "state": [0, 1, 2, 3, 4, 5, 6, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 22, 23, 24, 25, 33, 69, 90], "art": [0, 1, 2, 3, 4, 5, 6, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 22, 23, 24, 25, 90], "result": [0, 1, 2, 3, 4, 5, 6, 7, 8, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 30, 33, 35, 37, 42, 45, 46, 51, 53, 56, 59, 66, 68, 77, 81, 83, 84, 86, 91, 92, 94, 95, 98, 100, 103, 104, 105, 106, 108], "For": [0, 4, 7, 8, 9, 12, 13, 18, 21, 25, 28, 29, 30, 31, 32, 34, 35, 44, 46, 48, 51, 53, 54, 56, 57, 58, 60, 61, 62, 63, 64, 66, 67, 68, 71, 72, 73, 74, 75, 77, 83, 86, 87, 88, 90, 91, 94, 95, 96, 97, 98, 99, 100, 102, 104, 106, 108], "rel": [0, 1, 2, 3, 4, 5, 6, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 22, 23, 24, 25, 30, 33, 56, 71, 95, 107, 112], "friendli": [0, 1, 2, 3, 4, 5, 6, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 22, 23, 24, 25, 67, 92, 102, 103], "like": [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 29, 31, 35, 36, 45, 51, 57, 59, 65, 68, 72, 83, 86, 87, 90, 92, 98, 100, 102, 104, 105, 106], "resnet18": [0, 1, 2, 3, 5, 6, 10, 11, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 51, 53, 54, 57, 65, 66, 68, 70, 71], "also": [0, 1, 2, 3, 4, 5, 6, 8, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 22, 23, 24, 25, 28, 37, 39, 41, 46, 49, 51, 53, 60, 61, 66, 68, 74, 75, 83, 94, 100, 102, 104, 106, 108, 109, 112, 114], "number": [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 28, 29, 30, 33, 35, 37, 39, 41, 45, 46, 48, 51, 54, 55, 56, 59, 61, 66, 68, 74, 77, 78, 79, 80, 83, 84, 88, 91, 97, 98, 100, 105, 108, 109, 111, 114], "sampl": [0, 1, 2, 3, 4, 5, 6, 7, 8, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 28, 30, 33, 36, 37, 39, 41, 45, 46, 48, 49, 51, 53, 55, 59, 66, 68, 82, 87, 88, 90, 94, 103, 104, 105, 108], "ar": [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 28, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 41, 44, 45, 46, 48, 51, 52, 54, 55, 56, 58, 59, 60, 61, 62, 64, 65, 66, 67, 68, 72, 73, 74, 75, 77, 78, 79, 80, 83, 84, 86, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 100, 101, 102, 103, 104, 105, 106, 107, 108, 111, 112, 114], "deliber": [0, 4, 12, 13, 18, 25], "chosen": [0, 1, 2, 3, 4, 5, 6, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 22, 23, 24, 25, 96], "have": [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 35, 43, 46, 49, 51, 52, 57, 58, 61, 62, 66, 68, 71, 72, 75, 77, 82, 83, 90, 100, 103, 104, 107], "execut": [0, 1, 2, 3, 4, 5, 6, 7, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 33, 42, 45, 51, 53, 59, 61, 70, 75, 90, 96, 111], "more": [0, 4, 7, 8, 9, 12, 13, 18, 21, 25, 28, 30, 31, 36, 37, 39, 41, 42, 43, 46, 48, 49, 51, 52, 53, 55, 56, 57, 59, 62, 65, 66, 67, 68, 72, 73, 83, 86, 87, 88, 90, 94, 95, 97, 100, 102, 103, 104, 106, 107, 108, 111], "quickli": [0, 4, 12, 13, 18, 25], "reli": [0, 1, 2, 3, 4, 7, 8, 12, 13, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 72], "imagenet": [0, 1, 2, 3, 4, 5, 6, 7, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 30, 33, 48, 51, 54, 55, 68, 96], "task": [0, 1, 2, 3, 4, 5, 6, 7, 12, 13, 14, 15, 16, 17, 18, 19, 21, 22, 23, 24, 25, 49, 111, 112], "imag": [0, 1, 2, 3, 4, 5, 6, 7, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 33, 37, 46, 53, 55, 66, 73, 91, 96, 104], "classif": [0, 1, 2, 3, 4, 5, 6, 7, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 35, 100], "If": [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 28, 30, 31, 33, 34, 35, 36, 39, 41, 42, 45, 46, 48, 49, 51, 53, 55, 56, 57, 58, 59, 60, 61, 62, 64, 65, 66, 67, 68, 69, 70, 71, 72, 74, 75, 77, 78, 79, 80, 83, 86, 88, 89, 90, 91, 92, 95, 97, 101, 102, 103, 104, 106, 107, 111, 112, 114], "you": [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 30, 51, 56, 61, 63, 67, 68, 74, 75, 77, 86, 91, 92, 95, 96, 98, 100, 101, 102, 104, 105, 106, 107, 108, 111, 112], "alreadi": [0, 1, 2, 3, 4, 5, 6, 7, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 57, 65, 68, 74, 77, 107], "version": [0, 1, 2, 3, 4, 5, 6, 7, 8, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 44, 51, 61, 67, 68, 72, 73, 74, 75, 77, 82, 83, 86, 88, 90, 96, 98, 105], "readili": [0, 4, 7, 12, 13, 18, 21, 25], "avail": [0, 1, 2, 3, 4, 7, 12, 13, 15, 16, 17, 18, 19, 21, 22, 23, 24, 25, 31, 51, 53, 61, 66, 68, 73, 90, 101, 104, 106, 107], "pleas": [0, 3, 4, 5, 7, 8, 9, 10, 11, 12, 13, 14, 18, 21, 25, 26, 28, 29, 30, 31, 37, 39, 41, 42, 43, 46, 51, 53, 54, 55, 56, 57, 62, 64, 66, 67, 68, 69, 72, 73, 75, 87, 90, 103], "els": [0, 1, 2, 3, 4, 5, 7, 10, 11, 12, 13, 14, 18, 19, 21, 22, 23, 24, 25, 30, 33, 36, 42, 45, 54, 59, 61, 62, 90, 103], "download": [0, 1, 2, 3, 4, 5, 6, 7, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 73, 74, 75, 90], "from": [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 28, 29, 30, 31, 32, 33, 35, 36, 37, 39, 41, 42, 43, 44, 45, 46, 48, 49, 51, 52, 53, 54, 55, 56, 57, 58, 59, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 73, 77, 78, 79, 80, 81, 83, 84, 86, 88, 89, 90, 91, 94, 95, 96, 97, 98, 100, 101, 102, 103, 104, 105, 106, 107, 108, 114], "appropri": [0, 4, 7, 8, 12, 13, 18, 21, 25, 30, 33, 51, 56, 59, 68, 73, 75, 77, 83, 86, 88, 97, 107], "locat": [0, 4, 7, 12, 13, 18, 21, 25], "e": [0, 4, 7, 12, 13, 18, 21, 25, 28, 33, 37, 38, 39, 41, 45, 46, 48, 49, 51, 55, 59, 66, 68, 90, 93], "g": [0, 4, 7, 12, 13, 18, 21, 25, 28, 33, 37, 39, 41, 46, 48, 49, 51, 66, 68, 74, 90, 93], "http": [0, 1, 2, 3, 4, 5, 6, 7, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 61, 73, 74, 75, 96, 103, 109, 111], "net": [0, 1, 2, 3, 4, 5, 6, 7, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 96], "org": [0, 1, 2, 3, 4, 5, 6, 7, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 61, 73, 74, 75, 96, 103], "challeng": [0, 1, 2, 3, 4, 5, 6, 7, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25], "lsvrc": [0, 1, 2, 3, 4, 5, 6, 7, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25], "2012": [0, 1, 2, 3, 4, 5, 6, 7, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25], "index": [0, 1, 2, 3, 4, 5, 6, 7, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 46, 53, 66, 69, 86, 88, 109], "php": [0, 4, 7, 12, 13, 18, 21, 25, 96], "note1": [0, 18, 21, 25], "typic": [0, 1, 5, 6, 7, 8, 10, 11, 13, 15, 17, 18, 19, 20, 21, 22, 23, 24, 25, 35, 51, 68, 77, 83, 88, 90, 95, 102, 104, 105, 108], "ha": [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 28, 35, 36, 37, 41, 43, 46, 51, 52, 56, 57, 59, 61, 62, 64, 65, 66, 68, 71, 72, 77, 81, 83, 86, 90, 95, 97, 102, 103, 105, 108, 111, 114], "characterist": [0, 18, 21, 25], "dataload": [0, 1, 2, 3, 4, 5, 6, 7, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 33, 41, 42, 45, 46, 48, 51, 53, 54, 59, 66, 86, 90, 96, 104], "provid": [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 30, 33, 35, 36, 37, 38, 41, 45, 46, 47, 48, 49, 51, 52, 53, 59, 62, 64, 66, 67, 68, 70, 72, 74, 75, 77, 79, 80, 83, 86, 87, 90, 91, 95, 96, 97, 100, 102, 103, 104, 106, 107, 108, 111, 112], "subfold": [0, 1, 2, 3, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25], "train": [0, 4, 9, 18, 20, 26, 28, 29, 30, 35, 37, 38, 42, 46, 47, 48, 51, 53, 54, 56, 63, 66, 67, 73, 87, 91, 92, 93, 96, 98, 100, 104, 107, 108, 109], "val": [0, 1, 2, 3, 10, 11, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25], "valid": [0, 4, 7, 8, 10, 11, 12, 13, 18, 20, 21, 25, 30, 33, 37, 45, 46, 48, 51, 55, 56, 59, 66, 67, 68, 83, 92, 102, 109], "see": [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 28, 29, 30, 31, 35, 39, 41, 42, 43, 51, 53, 54, 55, 56, 57, 66, 67, 68, 73, 78, 88, 89, 90, 91, 92, 93, 94, 96, 98, 100, 102, 104, 106, 107, 108, 111, 112], "descript": [0, 1, 2, 3, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 88, 91, 101], "detail": [0, 1, 2, 3, 4, 5, 6, 7, 8, 10, 11, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 51, 61, 67, 68, 73, 74, 75, 88, 97, 107, 108, 111, 112], "A": [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 29, 30, 33, 36, 37, 39, 41, 42, 45, 46, 48, 51, 52, 53, 54, 55, 56, 57, 59, 63, 64, 65, 66, 68, 70, 71, 83, 91, 95, 97, 100, 102, 104, 105, 108], "subdirectori": [0, 1, 2, 3, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25], "class": [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 28, 30, 32, 33, 34, 35, 36, 37, 39, 41, 42, 44, 45, 46, 48, 51, 52, 53, 55, 56, 58, 59, 60, 61, 62, 64, 65, 66, 68, 69, 70, 72, 77, 79, 83, 84, 86, 89, 90], "file": [0, 1, 2, 3, 6, 8, 12, 13, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 28, 30, 33, 37, 39, 41, 42, 45, 46, 48, 51, 52, 53, 55, 56, 59, 64, 66, 68, 70, 73, 74, 75, 83, 86, 90, 102, 104, 105, 108, 109, 112], "each": [0, 1, 2, 3, 4, 5, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 21, 22, 23, 24, 25, 28, 30, 33, 35, 36, 41, 45, 46, 48, 49, 51, 52, 55, 56, 59, 62, 65, 66, 68, 70, 71, 74, 75, 77, 82, 83, 88, 89, 90, 93, 94, 95, 97, 100, 102, 103, 104, 105, 106, 107, 108, 112], "note2": [0, 18, 21, 25], "To": [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 28, 30, 31, 33, 35, 39, 41, 42, 43, 45, 51, 53, 55, 56, 57, 59, 64, 66, 68, 72, 73, 77, 86, 88, 90, 93, 96, 97, 101, 102, 104, 106, 107, 108, 111, 112], "up": [0, 1, 2, 3, 4, 5, 6, 7, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 28, 30, 35, 37, 39, 41, 46, 48, 51, 56, 64, 66, 68, 75, 83, 91, 96, 100, 106, 114], "mai": [0, 2, 4, 7, 8, 12, 13, 14, 18, 21, 22, 25, 35, 42, 45, 49, 51, 53, 59, 61, 64, 68, 73, 74, 75, 77, 81, 82, 83, 86, 88, 91, 100, 102, 103, 104], "reduc": [0, 1, 2, 3, 5, 6, 7, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 51, 64, 83, 86, 88, 100, 102, 103, 105, 107, 109, 114], "subset": [0, 1, 2, 3, 4, 5, 6, 7, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 36, 37, 46, 66, 83, 91, 93, 104, 114], "entir": [0, 1, 2, 3, 5, 6, 7, 8, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 37, 46, 56, 64, 66, 83, 90, 100], "ilsvrc2012": [0, 1, 2, 3, 5, 6, 7, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25], "1000": [0, 1, 2, 3, 4, 5, 6, 7, 8, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 33, 37, 46, 48, 51, 53, 55, 56, 66, 68, 90, 91, 103, 104], "50": [0, 1, 2, 3, 5, 6, 7, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 30, 45, 56, 59, 95], "But": [0, 7, 8, 12, 13, 18, 21, 25, 51, 61, 68, 100], "purpos": [0, 1, 2, 3, 4, 5, 6, 7, 8, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 51, 68, 83], "could": [0, 1, 2, 3, 5, 7, 10, 11, 12, 13, 14, 18, 19, 21, 22, 23, 24, 25, 28, 37, 39, 41, 46, 48, 49, 51, 60, 66, 68, 72, 90, 114], "perhap": [0, 7, 12, 13, 18, 21, 25], "exercis": [0, 7, 12, 13, 18, 21, 25], "left": [0, 1, 2, 3, 5, 7, 8, 10, 11, 12, 13, 14, 18, 19, 21, 22, 23, 24, 25, 78, 79, 80, 89, 97, 114], "upto": [0, 7, 12, 13, 18, 21, 25, 33], "reader": [0, 7, 12, 13, 18, 21, 25], "necessari": [0, 1, 2, 3, 4, 5, 6, 7, 8, 10, 11, 12, 13, 14, 18, 19, 20, 21, 22, 23, 24, 25, 37, 46, 51, 53, 56, 66, 68, 81, 83, 90, 96, 111], "edit": [0, 1, 2, 3, 5, 6, 7, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 48, 49, 51, 68], "cell": [0, 1, 2, 3, 5, 6, 7, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25], "below": [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 31, 32, 33, 34, 35, 36, 43, 44, 49, 51, 57, 58, 60, 64, 67, 72, 74, 75, 77, 78, 83, 86, 88, 89, 90, 93, 94, 96, 97, 100, 102, 103, 104, 106, 107, 108, 111], "specifi": [0, 1, 2, 3, 4, 5, 6, 7, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 30, 33, 39, 41, 45, 46, 49, 51, 56, 59, 66, 68, 71, 74, 75, 78, 79, 80, 83, 89, 90, 92, 94, 95, 97, 100, 106, 108, 111], "directori": [0, 1, 2, 3, 4, 5, 6, 7, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 30, 32, 33, 37, 42, 44, 45, 46, 48, 53, 56, 58, 59, 64, 66, 71, 96], "save": [0, 1, 2, 3, 4, 5, 6, 7, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 28, 30, 32, 33, 36, 37, 39, 41, 42, 44, 45, 46, 48, 51, 53, 56, 58, 59, 64, 65, 66, 68, 71, 82, 86, 90, 108, 112], "dataset_dir": [0, 1, 2, 3, 4, 5, 6, 7, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 30, 33], "path": [0, 1, 2, 3, 4, 5, 6, 7, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 28, 30, 32, 33, 37, 39, 41, 42, 44, 45, 46, 48, 51, 53, 55, 56, 58, 59, 61, 64, 66, 68, 69, 70, 75, 82, 86, 96], "replac": [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 36, 48, 51, 54, 55, 61, 64, 65, 67, 68, 72, 74, 83, 88, 90, 103, 108], "real": [0, 1, 2, 3, 4, 5, 6, 7, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 53, 81, 111], "loop": [0, 1, 2, 3, 5, 6, 7, 12, 13, 14, 15, 16, 17, 18, 19, 21, 22, 23, 24, 25, 61, 90, 107], "doe": [0, 1, 2, 3, 5, 6, 7, 9, 10, 11, 12, 13, 14, 15, 16, 17, 19, 20, 21, 22, 23, 24, 25, 28, 35, 37, 43, 46, 56, 57, 59, 60, 61, 66, 72, 77, 90, 94, 97, 99, 102, 107, 108], "ani": [0, 1, 2, 3, 4, 5, 6, 7, 8, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 30, 33, 34, 35, 36, 37, 38, 39, 45, 46, 51, 54, 56, 59, 60, 61, 62, 65, 66, 67, 68, 69, 72, 74, 77, 83, 86, 90, 106, 109], "limit": [0, 1, 2, 3, 5, 6, 7, 8, 12, 13, 14, 15, 16, 17, 19, 21, 22, 23, 24, 25, 72, 99], "written": [0, 1, 2, 3, 4, 5, 6, 7, 8, 12, 13, 14, 15, 16, 17, 18, 19, 21, 22, 23, 24, 25, 101, 102], "Not": [0, 7, 12, 13, 21, 25, 46, 63, 64, 66, 91, 97], "realli": [0, 7, 12, 13, 21, 25], "we": [0, 1, 4, 5, 6, 7, 8, 9, 12, 13, 15, 17, 18, 19, 20, 21, 25, 29, 30, 32, 33, 35, 41, 44, 45, 46, 51, 52, 54, 55, 58, 59, 61, 62, 64, 66, 68, 71, 72, 74, 75, 83, 86, 88, 90, 91, 95, 100, 102, 103, 107], "later": [0, 7, 8, 12, 13, 21, 25, 68, 73, 86, 90], "modifi": [0, 1, 2, 3, 5, 6, 7, 8, 12, 13, 14, 15, 16, 17, 18, 19, 21, 22, 23, 24, 25, 36, 39, 44, 61, 64, 65, 68, 74, 75, 102, 108, 109, 114], "quantizationsim": [0, 1, 2, 3, 5, 6, 8, 10, 11, 12, 13, 14, 15, 16, 17, 19, 21, 22, 23, 24, 25, 51, 63], "session": [0, 1, 2, 3, 28, 30, 42, 44, 45, 46, 48, 70], "act": [0, 45, 46, 59, 66], "regular": [0, 8, 28, 39, 41, 51, 68, 77, 88, 91, 102, 108], "onnxruntim": [0, 1, 2, 3, 42, 44, 46], "howev": [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 22, 23, 24, 25, 34, 35, 51, 56, 68, 72, 102, 103], "recommend": [0, 1, 2, 3, 4, 5, 6, 9, 13, 19, 20, 30, 35, 37, 41, 42, 43, 45, 46, 48, 51, 52, 56, 66, 73, 83, 91, 93, 95, 102, 107], "onli": [0, 1, 2, 3, 4, 5, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 21, 22, 23, 24, 25, 30, 32, 33, 34, 39, 41, 44, 45, 46, 48, 51, 52, 55, 58, 59, 60, 61, 63, 65, 66, 68, 69, 73, 74, 75, 78, 81, 82, 83, 88, 90, 93, 99, 100, 102, 104, 105, 106, 109, 111, 114], "quantizationsimmodel": [0, 1, 2, 3, 4, 5, 7, 8, 10, 11, 12, 13, 14, 18, 19, 20, 21, 22, 23, 24, 25, 28, 29, 32, 33, 36, 39, 41, 42, 44, 45, 46, 48, 51, 54, 58, 59, 64, 66, 67, 68, 69, 77, 82, 83, 86, 87, 90, 91, 93], "regist": [0, 77, 89], "requir": [0, 1, 4, 9, 12, 15, 16, 17, 18, 19, 22, 23, 24, 25, 28, 29, 30, 31, 33, 35, 36, 37, 39, 41, 43, 45, 46, 48, 49, 51, 54, 55, 56, 57, 58, 59, 60, 61, 64, 65, 66, 68, 70, 71, 74, 75, 77, 83, 86, 90, 93, 95, 100, 102, 103, 106, 107, 111], "custom": [0, 6, 8, 20, 35, 39, 41, 48, 49, 61, 67, 77, 88, 107, 108], "import": [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 39, 41, 42, 43, 44, 45, 46, 48, 51, 52, 53, 54, 55, 56, 57, 58, 59, 61, 62, 64, 65, 66, 67, 68, 69, 70, 71, 75, 77, 78, 79, 80, 81, 83, 84, 86, 87, 88, 89, 90, 93, 94], "torch": [0, 1, 2, 3, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 51, 52, 53, 54, 56, 58, 59, 60, 62, 63, 64, 65, 66, 68, 70, 71, 72, 73, 74, 77, 78, 79, 80, 81, 83, 84, 86, 87, 88, 89, 90, 96, 101, 109], "ort": [0, 1, 2, 3, 42], "common": [0, 1, 2, 3, 5, 6, 7, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 64, 68, 72, 74, 83, 107, 112], "image_net_config": [0, 1, 2, 3, 5, 6, 7, 12, 13, 14, 15, 16, 17, 18, 19, 21, 22, 23, 24, 25], "util": [0, 1, 2, 3, 5, 6, 7, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 28, 35, 36, 37, 39, 44, 52, 53, 64, 65, 68, 70, 72, 83, 90, 93, 102], "image_net_evalu": [0, 1, 2, 3, 5, 6, 7, 13, 14, 15, 16, 17, 19, 21, 22, 23, 24, 25], "imagenetevalu": [0, 1, 2, 3, 5, 6, 7, 13, 14, 15, 16, 17, 19, 21, 22, 23, 24, 25], "image_net_data_load": [0, 1, 2, 3, 15, 17, 19, 21, 22, 23, 24, 25], "imagenetdataload": [0, 1, 2, 3, 12, 15, 17, 19, 21, 22, 23, 24, 25], "imagenetdatapipelin": [0, 1, 2, 3, 5, 6, 7, 12, 13, 14, 15, 16, 17, 19, 21, 22, 23, 24, 25, 51, 54, 55, 68], "staticmethod": [0, 1, 2, 3, 5, 6, 7, 12, 13, 14, 15, 16, 17, 19, 21, 22, 23, 24, 25, 61], "def": [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 28, 30, 31, 33, 34, 35, 36, 37, 39, 41, 42, 43, 45, 46, 48, 51, 52, 53, 54, 55, 56, 57, 59, 60, 61, 62, 65, 66, 68, 69, 70, 71, 72, 77, 86, 90], "get_val_dataload": [0, 1, 2, 3, 15, 17, 18, 19, 21, 22, 23, 24, 25, 51, 55, 68], "data": [0, 1, 2, 3, 4, 5, 6, 7, 8, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 28, 29, 30, 33, 36, 37, 39, 41, 42, 45, 46, 48, 51, 53, 54, 55, 56, 59, 61, 63, 64, 66, 68, 70, 72, 81, 82, 83, 84, 86, 87, 90, 91, 93, 99, 103, 104, 105, 107, 108], "return": [0, 1, 2, 3, 4, 5, 6, 7, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 41, 42, 44, 45, 46, 48, 51, 52, 53, 54, 55, 56, 58, 59, 60, 61, 62, 64, 65, 66, 68, 69, 70, 71, 77, 81, 82, 83, 84, 86, 87, 89, 90, 92, 97, 98, 104, 108], "data_load": [0, 1, 2, 3, 5, 6, 7, 13, 14, 15, 17, 18, 19, 20, 21, 22, 23, 24, 25, 41, 42, 45, 48, 51, 53, 55, 56, 59, 68, 82, 86, 87, 90], "image_s": [0, 1, 2, 3, 4, 5, 6, 7, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 21, 22, 23, 24, 25, 30, 33, 53], "batch_siz": [0, 1, 2, 3, 4, 5, 6, 7, 8, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 28, 29, 30, 33, 37, 41, 42, 46, 48, 51, 53, 56, 66, 68, 90], "is_train": [0, 1, 2, 3, 4, 15, 17, 19, 21, 22, 23, 24, 25, 33], "fals": [0, 1, 2, 3, 4, 5, 7, 8, 10, 11, 13, 15, 16, 17, 18, 19, 21, 22, 23, 24, 25, 30, 33, 35, 39, 42, 44, 45, 48, 49, 52, 53, 55, 56, 59, 61, 62, 64, 68, 69, 72, 77, 78, 81, 83, 84, 86, 88, 89, 90, 101, 106], "num_work": [0, 1, 2, 3, 15, 16, 17, 18, 19, 21, 22, 23, 24, 25], "sess": [0, 1, 2, 3, 30], "inferencesess": [0, 1, 2, 3, 42, 44, 46], "float": [0, 1, 2, 3, 4, 5, 6, 7, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 28, 30, 33, 37, 39, 41, 42, 45, 46, 48, 49, 51, 53, 55, 56, 59, 66, 67, 68, 77, 79, 81, 83, 84, 87, 102, 104, 107, 108, 112], "its": [0, 1, 2, 3, 5, 6, 7, 9, 13, 14, 15, 16, 17, 19, 21, 22, 23, 24, 25, 30, 33, 35, 45, 46, 59, 77, 81, 86, 87, 88, 90, 94, 96, 98, 104, 108, 114], "top": [0, 1, 2, 3, 4, 5, 6, 7, 8, 13, 14, 15, 16, 17, 19, 21, 22, 23, 24, 25, 77, 94, 111], "param": [0, 1, 2, 3, 5, 6, 7, 13, 14, 15, 16, 17, 19, 21, 22, 23, 24, 25, 28, 30, 31, 33, 36, 37, 39, 41, 42, 45, 46, 48, 51, 53, 56, 59, 64, 66, 68, 69, 70, 77, 83, 88, 89, 106], "iter": [0, 1, 2, 3, 4, 5, 6, 7, 13, 14, 15, 16, 17, 19, 21, 22, 23, 24, 25, 28, 30, 33, 37, 41, 42, 46, 51, 53, 56, 66, 90, 91, 103], "none": [0, 1, 2, 3, 4, 5, 6, 7, 8, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 28, 30, 32, 33, 35, 36, 37, 39, 41, 42, 44, 45, 46, 48, 51, 52, 53, 54, 55, 56, 57, 58, 59, 61, 64, 65, 66, 68, 70, 71, 72, 77, 78, 79, 80, 82, 83, 86, 88, 90, 111], "go": [0, 4, 7, 8, 12, 13, 18, 21, 25, 35, 51, 54, 68, 74, 75, 86, 90], "load": [0, 1, 2, 3, 8, 32, 33, 36, 39, 44, 45, 51, 54, 55, 56, 58, 59, 61, 62, 64, 68, 69, 86, 100], "pretrain": [0, 1, 2, 3, 4, 5, 10, 11, 13, 14, 15, 16, 17, 18, 19, 21, 22, 23, 24, 25, 33, 51, 53, 54, 57, 65, 66, 68, 70, 71, 86, 105, 108], "torchvis": [0, 1, 2, 3, 5, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 51, 53, 54, 57, 65, 66, 68, 70, 71, 90], "similarli": [0, 4, 12, 13, 18, 21, 25, 51, 68, 83, 86, 107], "instead": [0, 1, 2, 3, 4, 5, 6, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 30, 34, 51, 60, 61, 62, 68, 83, 86, 102, 103, 114], "input_shap": [0, 1, 2, 3, 4, 5, 9, 13, 14, 15, 16, 17, 18, 19, 22, 23, 24, 31, 33, 34, 37, 42, 46, 48, 51, 53, 55, 56, 57, 61, 65, 66, 68, 70, 90], "224": [0, 1, 2, 3, 4, 7, 10, 11, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 30, 31, 37, 39, 42, 45, 46, 48, 51, 53, 55, 57, 58, 65, 66, 68, 70, 71], "shape": [0, 1, 2, 3, 7, 8, 9, 13, 18, 19, 21, 22, 23, 24, 25, 33, 34, 35, 52, 56, 57, 61, 62, 64, 65, 69, 72, 78, 79, 80, 81, 83, 84, 88, 89, 90, 104], "channel": [0, 1, 2, 3, 14, 16, 18, 19, 21, 22, 23, 24, 25, 30, 46, 49, 52, 54, 64, 66, 71, 83, 86, 88, 93, 95, 96, 97, 99, 100, 103, 104, 106, 107, 108, 109, 110, 112, 113, 114], "x": [0, 1, 2, 3, 4, 5, 8, 9, 12, 13, 18, 19, 21, 22, 23, 24, 25, 30, 33, 34, 35, 39, 52, 60, 61, 62, 73, 79, 80, 81, 88, 90, 95, 101, 104], "height": [0, 1, 2, 3, 8, 18, 19, 21, 22, 23, 24, 25, 110, 113, 114], "width": [0, 1, 2, 3, 13, 18, 19, 21, 22, 23, 24, 25, 46, 49, 66, 91, 107, 108, 110, 113, 114], "dummy_input": [0, 1, 2, 3, 18, 19, 20, 21, 22, 23, 24, 25, 29, 32, 42, 44, 45, 46, 48, 51, 52, 53, 57, 58, 59, 60, 64, 65, 66, 68, 69, 82, 83, 86, 90], "randn": [0, 1, 2, 3, 20, 39, 42, 46, 48, 51, 53, 59, 61, 62, 66, 68, 69, 77, 78, 81, 84, 88, 89], "filenam": [0, 1, 2, 3, 28, 39, 41, 48, 51, 64, 68, 86], "resnet": [0, 1, 2, 3, 4, 5, 6, 10, 11, 12, 13, 14, 30, 33, 95], "18": [0, 1, 2, 3], "pt_model": [0, 1, 2, 3], "true": [0, 1, 2, 3, 4, 5, 7, 10, 11, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 30, 33, 36, 39, 41, 42, 45, 48, 49, 51, 52, 53, 54, 55, 56, 57, 59, 61, 62, 64, 65, 66, 68, 69, 70, 71, 72, 77, 78, 79, 80, 81, 83, 84, 88, 89, 90, 101, 106], "export": [0, 1, 2, 3, 4, 5, 8, 10, 11, 13, 14, 18, 19, 20, 22, 23, 24, 32, 33, 37, 39, 44, 45, 46, 48, 49, 51, 54, 58, 59, 60, 64, 66, 68, 72, 74, 75, 86, 87, 93, 96, 98, 100, 101, 102, 105, 108, 109], "eval": [0, 1, 2, 3, 4, 7, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 28, 30, 33, 37, 42, 45, 46, 51, 53, 55, 56, 57, 59, 61, 63, 65, 66, 68, 70, 71, 90, 97, 100, 111], "trainingmod": [0, 3], "export_param": [0, 1, 2, 3], "do_constant_fold": [0, 1, 2, 3], "input_nam": [0, 1, 2, 3, 68], "input": [0, 1, 2, 3, 5, 7, 8, 9, 10, 11, 12, 13, 14, 18, 21, 25, 30, 32, 33, 34, 35, 37, 42, 44, 45, 46, 48, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 64, 65, 66, 68, 69, 72, 77, 78, 79, 80, 82, 83, 84, 86, 88, 89, 90, 94, 97, 100, 104, 106, 108, 110, 113, 114], "output_nam": [0, 1, 2, 3, 68], "output": [0, 1, 2, 3, 4, 5, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 29, 30, 33, 34, 35, 37, 42, 45, 46, 48, 49, 51, 52, 53, 55, 59, 60, 61, 62, 64, 66, 67, 68, 69, 71, 72, 77, 78, 83, 88, 89, 90, 94, 100, 103, 104, 106, 108, 109, 110, 113, 114], "dynamic_ax": [0, 1, 2, 3], "0": [0, 1, 2, 3, 5, 6, 8, 9, 10, 11, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 28, 29, 30, 35, 39, 41, 42, 44, 45, 48, 51, 52, 53, 55, 56, 59, 61, 62, 64, 65, 68, 69, 70, 72, 73, 74, 75, 77, 78, 79, 80, 81, 83, 84, 86, 88, 89, 90, 91, 95, 97, 101, 106], "load_model": [0, 1, 2, 3, 32], "befor": [0, 1, 2, 3, 4, 5, 6, 7, 8, 10, 11, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 32, 34, 35, 36, 38, 41, 42, 43, 44, 45, 46, 48, 51, 53, 58, 65, 67, 68, 71, 72, 77, 82, 88, 90, 91, 92, 93, 100, 102, 104, 105, 108, 112], "onnxsim": [0, 1, 2, 3, 41, 42, 43, 44, 45, 46, 48], "_": [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 18, 19, 20, 22, 23, 24, 25, 28, 32, 33, 35, 36, 37, 41, 42, 43, 44, 45, 46, 48, 53, 58, 66, 77, 78, 82, 87, 88, 89, 90, 102], "except": [0, 1, 2, 3, 9, 12, 25, 35, 83], "print": [0, 1, 2, 3, 7, 8, 9, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 30, 33, 42, 45, 51, 52, 53, 56, 59, 61, 62, 68, 72, 75, 77, 78, 88, 90, 104], "fail": [0, 1, 2, 3, 52, 61, 62, 92, 101, 102], "proceed": [0, 1, 2, 3], "unsimplifi": [0, 1, 2, 3], "whether": [0, 1, 2, 3, 5, 13, 15, 16, 17, 18, 19, 21, 22, 23, 24, 25, 32, 38, 44, 58, 61, 62, 67, 91, 97, 105], "cpu": [0, 1, 2, 3, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 51, 53, 54, 57, 58, 61, 65, 68, 69, 71, 73, 74, 90, 102, 109], "cuda": [0, 1, 2, 3, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 41, 42, 44, 48, 53, 54, 55, 56, 59, 66, 68, 70, 73, 74, 75, 90], "devic": [0, 1, 2, 3, 7, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 32, 41, 42, 44, 48, 51, 53, 54, 55, 58, 59, 61, 64, 65, 68, 70, 71, 86, 87, 90, 108], "your": [0, 1, 2, 3, 5, 6, 7, 12, 13, 14, 15, 16, 17, 18, 19, 21, 22, 23, 24, 25, 42, 48, 51, 54, 55, 61, 62, 68, 72, 73, 74, 75, 77, 87, 91, 92, 95, 96, 100, 101, 102, 105, 108, 111], "environ": [0, 4, 6, 7, 10, 11, 18, 25, 33, 73, 96], "chang": [0, 1, 2, 3, 4, 7, 8, 15, 16, 17, 18, 19, 21, 22, 23, 24, 25, 33, 38, 45, 49, 51, 53, 59, 60, 61, 64, 67, 68, 71, 82, 83, 86, 90, 91, 100, 104, 105, 106, 108, 112, 114], "logic": [0, 1, 2, 3, 15, 16, 17, 18, 19, 22, 23, 24, 25, 33, 45, 59, 77, 88, 109], "forc": [0, 1, 2, 3, 15, 16, 17, 18, 19, 22, 23, 24, 25], "placement": [0, 1, 2, 3, 15, 16, 17, 18, 19, 22, 23, 24, 25, 106], "cudnn_conv_algo_search": [0, 1, 2, 3], "fix": [0, 1, 2, 3, 49, 62, 87, 98, 102, 107, 108, 109], "default": [0, 1, 2, 3, 4, 5, 6, 7, 8, 12, 13, 15, 17, 18, 19, 20, 21, 22, 23, 24, 25, 28, 30, 33, 35, 37, 39, 41, 42, 45, 46, 48, 51, 53, 55, 56, 59, 61, 66, 68, 74, 75, 77, 78, 82, 86, 88, 89, 91, 97, 100, 106, 108, 109, 111], "avoid": [0, 1, 2, 3, 4, 5, 7, 8, 10, 11, 12, 13, 14, 18, 19, 22, 23, 24, 25, 34, 39, 46, 51, 60, 66, 68, 91, 95], "everi": [0, 1, 2, 3, 4, 7, 8, 10, 11, 12, 13, 15, 16, 17, 18, 21, 23, 24, 25, 28, 32, 37, 41, 44, 46, 51, 58, 66, 68, 90, 97, 100, 105, 112], "cudaexecutionprovid": [0, 1, 2, 3], "get_available_provid": [0, 1, 2, 3], "cpuexecutionprovid": [0, 1, 2, 3], "use_cuda": [0, 1, 2, 3, 4, 15, 16, 17, 18, 19, 21, 22, 23, 24, 25, 30, 41, 42, 44, 48, 51, 54, 56, 68], "let": [0, 4, 7, 8, 13, 18, 51, 61, 68, 90], "determin": [0, 8, 18, 20, 28, 30, 37, 39, 41, 46, 48, 49, 51, 56, 66, 68, 79, 80, 83, 88, 90, 92, 102, 103, 104, 112], "32": [0, 1, 2, 4, 5, 6, 8, 9, 10, 11, 13, 14, 15, 16, 17, 18, 19, 20, 22, 23, 24, 28, 33, 34, 35, 37, 42, 49, 51, 52, 61, 62, 68, 74, 75, 78, 86, 89, 107], "routin": [0, 1, 2, 3, 4, 5, 6, 7, 8, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 21, 22, 23, 24, 25, 51, 68], "serializetostr": [0, 1, 2, 3, 44], "batchnorm": [0, 1, 2, 3, 5, 10, 11, 13, 14, 18, 19, 22, 23, 24, 31, 35, 36, 38, 42, 43, 52, 53, 55, 57, 65, 71, 92, 103, 114], "bn": [0, 1, 2, 3, 5, 7, 10, 11, 13, 14, 18, 19, 22, 23, 24, 29, 36, 55, 65, 67, 102, 104, 109], "These": [0, 2, 6, 8, 13, 18, 20, 22, 28, 39, 41, 42, 48, 53, 67, 68, 72, 75, 88, 90, 92, 93, 94, 95, 101, 102, 103, 104, 107, 108], "adjac": [0, 1, 2, 3, 5, 10, 11, 13, 14, 18, 19, 22, 23, 24, 90, 106], "convolut": [0, 1, 2, 3, 5, 9, 10, 11, 13, 14, 15, 17, 18, 19, 22, 23, 24, 30, 35, 36, 52, 83, 90, 93, 94, 95, 100, 107, 110, 113, 114], "cannot": [0, 1, 2, 3, 5, 10, 11, 13, 14, 18, 19, 22, 23, 24, 52, 61, 62, 78, 89], "thei": [0, 1, 2, 3, 5, 8, 9, 10, 11, 12, 13, 14, 18, 19, 22, 23, 24, 25, 34, 36, 61, 65, 67, 83, 107, 111], "why": [0, 13, 18, 107], "do": [0, 1, 2, 3, 4, 5, 6, 7, 9, 12, 13, 14, 15, 16, 17, 18, 19, 21, 22, 23, 24, 25, 29, 33, 35, 39, 41, 51, 61, 63, 64, 71, 74, 75, 90, 100, 104, 107, 108], "On": [0, 13, 18, 49, 73], "runtim": [0, 1, 2, 3, 5, 10, 11, 13, 14, 18, 19, 22, 23, 24, 39, 48, 49, 51, 56, 68, 69, 77, 83, 86, 87, 90, 95, 98, 100, 102, 104, 106, 108, 109], "tflite": [0, 13, 18], "snapdragon": [0, 13, 18], "neural": [0, 6, 13, 18, 20, 87, 90, 92, 95, 98, 100, 102, 105, 107, 108, 113], "process": [0, 1, 2, 3, 4, 5, 7, 8, 10, 11, 13, 14, 18, 19, 22, 23, 24, 30, 32, 44, 51, 58, 68, 70, 83, 84, 87, 90, 92, 94, 97, 98, 100, 103, 108], "sdk": [0, 13, 18, 87, 90, 98], "etc": [0, 4, 7, 8, 13, 18, 49, 51, 68, 74, 95, 102], "practic": [0, 4, 6, 7, 8, 12, 13, 15, 16, 17, 18, 20, 25, 51, 68, 90, 100], "so": [0, 1, 4, 5, 7, 8, 9, 10, 11, 12, 13, 15, 16, 17, 18, 19, 21, 23, 24, 25, 28, 29, 37, 39, 41, 46, 48, 51, 56, 61, 62, 63, 66, 68, 72, 75, 88, 97, 101, 104], "speedup": [0, 13, 18], "sinc": [0, 1, 2, 3, 4, 5, 7, 8, 9, 10, 11, 12, 13, 14, 18, 19, 22, 23, 24, 25, 48, 51, 64, 68, 83, 93, 95, 108], "unnecessari": [0, 13, 18, 90, 114], "now": [0, 1, 2, 3, 4, 7, 8, 9, 10, 11, 13, 15, 16, 17, 18, 19, 21, 22, 23, 24, 25, 30, 35, 51, 61, 62, 68, 69, 86, 90, 109], "perspect": [0, 13, 18], "mathemat": [0, 13, 18, 60], "equival": [0, 13, 18, 19, 22, 23, 24, 25, 51, 59, 60, 67, 68, 77, 78, 79, 80, 83, 90], "produc": [0, 12, 13, 18, 25, 42, 49, 53, 61, 81, 82, 83, 84, 97, 104], "same": [0, 1, 2, 3, 4, 5, 7, 8, 9, 10, 11, 13, 15, 16, 17, 18, 19, 21, 22, 23, 24, 28, 32, 35, 44, 51, 53, 58, 61, 62, 64, 66, 68, 69, 72, 77, 81, 83, 88, 89, 93, 103, 106, 112], "increas": [0, 13, 18, 30, 56, 71, 83, 97, 103], "rang": [0, 1, 2, 3, 4, 5, 7, 8, 9, 10, 13, 14, 18, 19, 20, 21, 22, 23, 35, 37, 46, 51, 53, 54, 61, 66, 68, 71, 78, 82, 84, 90, 91, 93, 96, 97, 102, 103, 104, 105, 107, 108, 109, 112], "tensor": [0, 2, 9, 12, 13, 14, 18, 22, 23, 24, 25, 28, 32, 35, 38, 39, 45, 47, 48, 49, 51, 52, 53, 56, 57, 58, 59, 60, 61, 64, 65, 66, 67, 68, 69, 77, 78, 79, 80, 83, 84, 86, 88, 89, 90, 91, 94, 101, 102, 104, 106, 107, 108, 109], "valu": [0, 1, 2, 4, 5, 6, 7, 8, 9, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 25, 28, 30, 33, 36, 37, 41, 45, 46, 49, 51, 55, 56, 58, 59, 61, 64, 65, 66, 68, 77, 78, 79, 80, 81, 82, 83, 84, 86, 89, 90, 91, 94, 95, 97, 100, 102, 103, 104, 105, 106, 108, 110, 112, 113], "weight": [0, 1, 2, 3, 4, 5, 6, 9, 10, 11, 12, 13, 14, 18, 19, 20, 22, 23, 24, 25, 28, 30, 33, 35, 36, 37, 38, 39, 41, 45, 46, 47, 49, 51, 55, 59, 62, 64, 65, 66, 67, 68, 71, 72, 77, 82, 83, 84, 86, 88, 90, 91, 93, 95, 100, 102, 103, 104, 105, 106, 107, 108, 112], "And": [0, 13, 18, 36, 65], "neg": [0, 7, 8, 13, 18, 78, 83], "impact": [0, 13, 18, 90, 97, 107], "especi": [0, 13, 18, 73, 102, 105, 107], "want": [0, 4, 7, 8, 12, 13, 18, 25, 32, 33, 36, 39, 44, 45, 48, 51, 58, 59, 61, 65, 67, 68, 74, 77], "behavior": [0, 13, 18, 19, 22, 23, 24, 61, 72, 88, 90, 98], "here": [0, 1, 2, 3, 4, 5, 7, 8, 9, 10, 11, 12, 13, 14, 15, 17, 18, 19, 21, 22, 23, 24, 25, 28, 29, 30, 31, 41, 51, 53, 54, 57, 61, 64, 66, 68, 69, 72, 86, 90, 102, 105, 106], "place": [0, 1, 2, 3, 5, 6, 7, 12, 13, 14, 15, 16, 17, 18, 19, 21, 22, 23, 24, 25, 33, 39, 43, 45, 51, 55, 57, 59, 65, 68, 83, 86, 106], "aimet_onnx": [0, 1, 2, 3, 41, 42, 43, 44, 45, 46, 48, 73, 75], "batch_norm_fold": [0, 1, 2, 3, 4, 5, 7, 10, 11, 13, 14, 18, 19, 21, 22, 23, 24, 29, 36, 54, 65, 71, 90], "fold_all_batch_norms_to_weight": [0, 1, 2, 3], "basic": [0, 4, 7, 8, 18, 21, 51, 68, 75, 87, 90], "mean": [0, 4, 7, 8, 9, 12, 13, 15, 16, 17, 18, 20, 21, 22, 25, 38, 51, 54, 68, 71, 88, 90, 104, 106, 108], "graph": [0, 1, 2, 3, 4, 5, 7, 8, 10, 11, 13, 14, 15, 17, 18, 19, 21, 22, 23, 24, 25, 30, 35, 49, 51, 52, 61, 68, 70, 86, 101, 102, 108, 111], "configur": [0, 1, 2, 3, 4, 5, 7, 8, 10, 11, 14, 18, 19, 21, 22, 23, 24, 28, 37, 41, 42, 46, 48, 49, 51, 53, 55, 64, 66, 68, 83, 86, 90, 99, 109], "them": [0, 1, 2, 3, 4, 5, 7, 8, 9, 10, 11, 12, 13, 14, 18, 19, 21, 22, 23, 24, 25, 35, 39, 42, 46, 51, 61, 62, 64, 66, 68, 72, 88, 90, 100], "few": [0, 1, 2, 3, 4, 5, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 21, 22, 23, 24, 25, 51, 68, 90, 95], "explain": [0, 4, 7, 8, 12, 18, 21, 25, 51, 68, 94, 100, 103, 108], "quant_schem": [0, 1, 2, 3, 4, 5, 7, 8, 10, 11, 12, 13, 14, 18, 19, 21, 22, 23, 24, 25, 28, 37, 39, 42, 46, 48, 49, 51, 53, 55, 66, 68, 69, 82, 86], "quantschem": [0, 1, 2, 3, 4, 5, 7, 8, 10, 11, 12, 13, 14, 18, 19, 21, 22, 23, 24, 25, 28, 37, 39, 41, 42, 46, 48, 51, 53, 54, 55, 66, 68, 69, 72, 82, 86, 92], "post_training_tf_enhanc": [0, 1, 2, 3, 4, 7, 8, 12, 18, 19, 21, 22, 23, 25, 28, 37, 39, 41, 42, 46, 48, 49, 51, 53, 55, 66, 68], "support": [0, 4, 7, 8, 9, 18, 21, 23, 24, 26, 28, 30, 31, 33, 34, 35, 37, 39, 41, 45, 46, 48, 51, 55, 56, 59, 60, 61, 66, 67, 68, 69, 73, 77, 83, 86, 94, 95, 98, 99, 100, 101, 102, 103, 106, 107, 108, 109, 113], "option": [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 28, 30, 33, 36, 37, 39, 41, 42, 45, 46, 48, 51, 52, 53, 54, 55, 56, 59, 61, 64, 65, 66, 68, 71, 75, 77, 78, 83, 86, 87, 89, 90, 91, 104, 106, 108], "tf_enhanc": [0, 4, 7, 8, 18, 21, 39, 55], "tf": [0, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 18, 21, 25, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 39, 45, 46, 48, 55, 66, 74, 104, 108, 109], "quant": [0, 1, 4, 5, 7, 8, 10, 11, 13, 14, 18, 19, 20, 21, 22, 23, 24, 28, 38, 39, 41, 42, 45, 48, 51, 55, 59, 67, 68, 93], "scheme": [0, 4, 7, 8, 12, 15, 16, 17, 18, 21, 25, 28, 30, 37, 39, 41, 42, 46, 48, 51, 53, 55, 56, 58, 66, 68, 83, 84, 92, 93, 100, 104], "enum": [0, 4, 7, 8, 18, 21, 30, 33, 41, 45, 55, 56, 59], "post_training_tf": [0, 4, 5, 7, 8, 10, 13, 14, 18, 21, 22, 28, 37, 39, 41, 46, 48, 49, 51, 55, 66, 68, 69, 82, 86], "default_output_bw": [0, 1, 2, 3, 4, 5, 7, 8, 10, 11, 12, 13, 14, 18, 19, 21, 22, 23, 24, 25, 28, 37, 39, 45, 48, 51, 59, 66, 68, 86, 90], "8": [0, 1, 2, 3, 4, 5, 7, 8, 10, 11, 12, 13, 14, 16, 17, 18, 19, 21, 22, 23, 24, 25, 28, 33, 36, 37, 39, 41, 42, 45, 46, 48, 49, 51, 53, 55, 56, 59, 61, 62, 64, 66, 68, 70, 72, 73, 75, 77, 78, 79, 80, 81, 83, 84, 86, 88, 89, 90, 102, 107, 114], "essenti": [0, 4, 7, 8, 18, 21, 83, 87], "ask": [0, 4, 7, 8, 12, 18, 21, 25, 77], "all": [0, 1, 2, 3, 4, 5, 6, 7, 8, 10, 11, 12, 13, 14, 18, 19, 21, 22, 23, 24, 25, 29, 31, 33, 34, 35, 36, 39, 41, 45, 46, 48, 49, 51, 54, 55, 56, 59, 61, 62, 64, 65, 66, 67, 68, 71, 72, 73, 74, 77, 82, 83, 86, 88, 90, 94, 97, 100, 103, 104, 106, 107], "activ": [0, 1, 2, 3, 4, 5, 7, 8, 9, 10, 11, 12, 13, 14, 18, 19, 21, 22, 23, 24, 25, 33, 34, 35, 36, 37, 39, 45, 46, 48, 49, 51, 52, 55, 59, 60, 61, 64, 65, 66, 68, 69, 82, 83, 86, 88, 90, 102, 104, 105, 106, 107, 108], "integ": [0, 1, 2, 3, 4, 5, 7, 8, 10, 11, 12, 13, 14, 18, 19, 21, 22, 23, 24, 25, 28, 37, 39, 41, 46, 48, 49, 66, 68, 78, 83, 91, 102, 104], "default_param_bw": [0, 1, 2, 3, 4, 5, 7, 8, 10, 11, 12, 13, 14, 18, 19, 21, 22, 23, 24, 25, 28, 37, 39, 41, 45, 46, 48, 51, 59, 66, 68, 86, 90], "There": [0, 1, 4, 5, 7, 8, 12, 13, 14, 18, 19, 21, 22, 23, 24, 25, 58, 62, 68, 72, 73, 101, 103, 105, 112], "other": [0, 1, 2, 3, 4, 5, 6, 7, 8, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 36, 60, 61, 65, 67, 68, 69, 72, 73, 83, 86, 97, 99, 102, 104, 107, 108, 109], "check": [0, 4, 7, 8, 12, 18, 21, 25, 32, 35, 37, 38, 44, 46, 52, 58, 60, 61, 62, 66, 67, 68, 92, 102, 105, 107], "document": [0, 1, 2, 3, 4, 5, 6, 7, 8, 10, 11, 12, 14, 18, 19, 20, 21, 22, 23, 24, 25, 26, 33, 64, 70, 73, 75, 98, 109, 111], "refer": [0, 1, 2, 3, 4, 5, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 21, 22, 23, 24, 25, 45, 49, 51, 55, 58, 59, 64, 68, 72, 83, 86, 88, 91, 96, 108], "aimet_common": [0, 1, 2, 3, 4, 5, 7, 8, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114], "quantsim": [0, 1, 2, 3, 4, 5, 7, 8, 10, 11, 18, 19, 21, 22, 23, 24, 28, 32, 39, 41, 44, 45, 46, 48, 51, 53, 54, 55, 58, 59, 64, 66, 67, 68, 69, 72, 82, 83, 86, 87, 96, 102, 105, 106, 109], "default_activation_bw": [0, 1, 2, 3, 41, 46, 48], "even": [0, 4, 7, 8, 13, 18, 51, 68, 77], "though": [0, 4, 7, 8, 13, 18, 51, 68, 77, 83, 106], "ad": [0, 1, 2, 3, 4, 5, 7, 8, 10, 11, 13, 14, 15, 16, 17, 18, 19, 22, 23, 24, 48, 49, 51, 62, 64, 68, 69, 90, 99, 106, 109], "node": [0, 1, 2, 3, 4, 5, 7, 8, 10, 11, 13, 14, 18, 19, 22, 23, 24, 39, 51, 52, 59, 60, 61, 68, 90, 105, 108], "readi": [0, 1, 2, 3, 4, 7, 8, 10, 11, 13, 15, 16, 17, 18, 19, 22, 23, 24, 25, 39, 51, 68, 90, 107], "yet": [0, 4, 7, 8, 13, 18, 51, 68], "scale": [0, 1, 2, 3, 4, 5, 7, 8, 10, 11, 12, 13, 14, 18, 19, 20, 22, 23, 24, 25, 29, 31, 36, 43, 49, 51, 54, 57, 64, 65, 68, 77, 78, 79, 80, 81, 83, 86, 89, 93, 102, 103, 104, 105, 108], "offset": [0, 1, 2, 3, 4, 5, 7, 8, 10, 11, 12, 13, 14, 18, 19, 20, 22, 23, 24, 25, 37, 46, 49, 51, 66, 68, 77, 78, 83, 84, 86, 89, 102, 104, 105, 108], "pass": [0, 1, 2, 3, 4, 5, 7, 8, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 21, 22, 23, 24, 25, 28, 30, 32, 33, 35, 36, 37, 39, 41, 42, 44, 45, 46, 48, 51, 52, 54, 55, 56, 58, 59, 60, 61, 62, 63, 64, 65, 66, 68, 72, 77, 83, 86, 87, 88, 90, 98, 101, 103, 104, 105, 107, 108, 109, 111], "unlabel": [0, 1, 2, 3, 4, 5, 6, 7, 8, 10, 11, 12, 13, 14, 15, 17, 18, 19, 20, 22, 23, 24, 25, 42, 46, 48, 51, 53, 66, 68, 91, 102, 104], "through": [0, 1, 2, 3, 4, 5, 7, 8, 10, 11, 12, 13, 14, 18, 19, 21, 22, 23, 24, 25, 35, 45, 51, 55, 59, 61, 66, 68, 71, 72, 86, 88, 90, 103, 104, 108], "collect": [0, 1, 2, 3, 4, 5, 7, 8, 10, 11, 13, 14, 18, 19, 22, 23, 24, 42, 46, 51, 53, 66, 68, 82, 83, 97, 104], "statist": [0, 1, 2, 3, 4, 5, 7, 8, 10, 11, 13, 14, 15, 16, 17, 18, 19, 22, 23, 24, 29, 30, 37, 38, 46, 51, 54, 56, 66, 67, 68, 71, 78, 79, 80, 82, 84, 88, 89, 90, 93, 102, 104, 112], "which": [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 21, 22, 23, 24, 25, 28, 30, 32, 33, 35, 36, 38, 41, 44, 45, 46, 49, 51, 52, 55, 56, 58, 59, 61, 62, 64, 65, 67, 68, 71, 74, 75, 77, 78, 81, 83, 84, 86, 87, 88, 89, 90, 93, 95, 97, 100, 102, 103, 104, 106, 108, 109, 111, 114], "calcul": [0, 1, 2, 3, 4, 5, 7, 8, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 22, 23, 24, 25, 28, 33, 39, 42, 45, 48, 51, 53, 59, 65, 68, 84, 88, 97, 103, 104, 105, 108], "sometim": [0, 1, 2, 3, 4, 5, 7, 8, 10, 11, 13, 14, 18, 19, 22, 23, 24, 51, 68, 94, 100, 103, 104], "calibr": [0, 1, 2, 3, 4, 5, 7, 8, 10, 11, 12, 13, 14, 18, 19, 22, 23, 24, 25, 37, 41, 46, 48, 51, 66, 68, 78, 82, 83, 84, 86, 87, 88, 89, 90, 102, 104, 105, 107, 108], "simpli": [0, 4, 7, 8, 13, 18, 21, 28, 37, 39, 41, 46, 48, 51, 61, 66, 68], "benefici": [0, 4, 7, 8, 12, 13, 18, 25, 51, 68, 104], "forward": [0, 3, 4, 7, 8, 9, 10, 11, 12, 18, 19, 21, 22, 23, 24, 25, 33, 35, 37, 39, 45, 46, 48, 51, 52, 54, 59, 60, 61, 62, 63, 65, 66, 68, 69, 74, 77, 78, 88, 89, 90, 101, 104, 107, 109], "well": [0, 1, 2, 3, 4, 5, 6, 7, 8, 10, 11, 12, 13, 14, 18, 19, 20, 22, 23, 24, 25, 33, 46, 51, 52, 62, 64, 66, 68, 81, 83, 86, 100, 103, 104], "distribut": [0, 1, 2, 3, 4, 5, 7, 8, 10, 11, 12, 13, 14, 18, 19, 22, 23, 24, 25, 30, 51, 56, 68, 103, 107, 108], "doesn": [0, 1, 2, 3, 5, 6, 14, 15, 16, 17, 18, 19, 22, 23, 24, 63, 77, 105], "t": [0, 4, 7, 8, 12, 13, 18, 21, 25, 33, 45, 51, 59, 61, 63, 64, 67, 68, 74, 77, 91, 104, 105], "look": [0, 4, 7, 8, 9, 12, 13, 18, 25, 35, 45, 51, 59, 68, 90, 92], "definit": [0, 4, 7, 8, 12, 13, 18, 19, 22, 23, 24, 25, 34, 38, 60, 61, 64, 67, 72, 77, 90, 102], "extrem": [0, 1, 2, 3, 4, 5, 7, 8, 10, 11, 12, 13, 14, 18, 19, 22, 23, 24, 25, 51, 68, 108], "bias": [0, 2, 14, 18, 22, 102], "origin": [0, 1, 2, 3, 4, 5, 6, 7, 8, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 21, 22, 23, 24, 25, 30, 32, 33, 35, 44, 56, 58, 61, 68, 72, 77, 88, 90, 94, 95, 97, 102, 103, 104, 108, 111], "consist": [0, 18, 58, 72, 83, 97, 108], "dark": [0, 1, 2, 3, 4, 5, 10, 11, 12, 13, 14, 18, 19, 22, 23, 24, 25, 51, 68], "light": [0, 1, 2, 3, 4, 5, 10, 11, 12, 13, 14, 18, 19, 22, 23, 24, 25, 51, 68], "mani": [0, 4, 7, 8, 12, 13, 15, 16, 17, 18, 25, 61, 91, 95, 103], "differ": [0, 4, 7, 8, 13, 15, 16, 17, 18, 23, 24, 30, 51, 53, 56, 61, 64, 66, 68, 69, 72, 74, 75, 83, 94, 97, 100, 102, 103, 105, 107, 108], "wai": [0, 1, 2, 3, 4, 5, 7, 8, 9, 10, 11, 12, 13, 14, 18, 19, 22, 23, 24, 25, 30, 56, 58, 68, 72, 73, 83, 90, 101], "just": [0, 4, 7, 8, 12, 13, 18, 23, 24, 25, 51, 68, 108, 114], "pass_calibration_data": [0, 1, 2, 3, 5, 10, 11, 12, 13, 14, 18, 19, 21, 22, 23, 24, 25, 41, 48, 51, 68], "get_input": [0, 1, 2, 3], "name": [0, 1, 2, 3, 7, 9, 12, 25, 30, 32, 33, 35, 41, 44, 45, 46, 49, 55, 58, 59, 64, 66, 68, 74, 75, 77, 82, 86, 88, 89, 103, 108, 109, 111], "batch_cntr": [0, 1, 2, 3, 5, 10, 11, 13, 14, 19, 21, 22, 23, 24, 25, 42], "input_data": [0, 1, 2, 3, 18, 19, 21, 22, 23, 24, 25, 28, 42, 48, 51, 68], "target_data": [0, 1, 2, 3, 18, 19, 21, 22, 23, 24, 25, 51, 68], "inputs_batch": [0, 1, 2, 3, 18, 19, 21, 22, 23, 24, 25, 51, 68], "numpi": [0, 1, 2, 3, 9, 28, 30, 33, 37, 39, 42, 45, 46, 48], "break": [0, 1, 2, 3, 4, 5, 10, 11, 13, 14, 19, 21, 22, 23, 24, 25, 30, 33, 42, 48, 51, 68, 90], "subsequ": [0, 2, 13, 14, 18, 51, 68, 81, 86, 101, 103, 106], "compute_encod": [0, 1, 2, 3, 4, 5, 7, 8, 10, 11, 13, 14, 18, 19, 21, 22, 23, 24, 28, 39, 45, 48, 51, 59, 68, 69, 72, 77, 78, 79, 80, 81, 82, 84, 86, 87, 88, 89, 90], "forward_pass_callback": [0, 1, 2, 3, 5, 10, 11, 12, 13, 14, 18, 19, 21, 22, 23, 24, 25, 28, 33, 37, 39, 41, 45, 46, 48, 59, 66, 68], "forward_pass_callback_arg": [0, 1, 2, 3, 4, 5, 7, 10, 11, 13, 14, 18, 19, 21, 22, 23, 24, 39, 41, 45, 48, 51, 59, 68], "10000": [0, 1, 5, 13, 18, 19, 28, 51, 91], "initi": [0, 1, 5, 11, 12, 13, 18, 19, 23, 24, 25, 28, 44, 51, 68, 69, 77, 78, 79, 80, 88, 89, 91, 105, 108], "phase": [0, 4, 18, 33, 45, 59, 100], "compris": [0, 4, 18, 97], "sensit": [0, 2, 4, 14, 18, 22, 33, 37, 38, 45, 46, 47, 59, 66, 67, 97, 102, 104, 107, 108, 109], "greedili": [0, 4, 18], "select": [0, 4, 18, 28, 42, 51, 68, 71, 73, 74, 75, 92, 95, 104, 108, 111, 114], "bitwidth": [0, 1, 4, 5, 13, 18, 19, 28, 33, 37, 39, 41, 42, 45, 46, 48, 49, 51, 53, 55, 59, 64, 66, 68, 72, 78, 79, 80, 81, 83, 86, 88, 89, 90, 93, 102, 107, 108], "base": [0, 1, 4, 5, 6, 7, 13, 18, 19, 20, 21, 28, 29, 41, 45, 46, 48, 51, 59, 64, 66, 68, 69, 72, 74, 77, 78, 79, 80, 83, 84, 88, 89, 94, 95, 97, 102, 111], "three": [0, 15, 16, 17, 18, 31, 61, 92, 95, 112], "eval_callback_for_phase1": [0, 18, 33, 45, 59], "eval_callback_for_phase2": [0, 18, 33, 45, 59], "callbackfunc": [0, 4, 12, 18, 25, 37, 46, 66], "object": [0, 6, 7, 12, 18, 19, 20, 22, 23, 24, 25, 28, 32, 33, 37, 39, 41, 42, 44, 45, 46, 48, 53, 56, 58, 59, 64, 66, 68, 77, 81, 82, 83, 86, 88, 89, 90, 93, 102, 105, 108], "In": [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 30, 34, 35, 37, 38, 42, 46, 49, 51, 52, 53, 56, 59, 60, 61, 62, 66, 67, 68, 72, 77, 83, 88, 90, 91, 95, 97, 100, 102, 103, 105, 106, 108, 111, 112, 114], "reus": [0, 18, 34, 60, 61, 62], "previou": [0, 4, 18, 22, 30, 33, 36, 45, 56, 59, 65, 67, 90, 95, 107], "snippet": [0, 18, 49, 61, 83, 86], "func_callback_arg": [0, 18, 33, 45, 46, 59, 66], "measur": [0, 15, 16, 17, 18, 30, 33, 45, 46, 56, 59, 66], "score": [0, 1, 2, 3, 4, 6, 7, 20, 21, 30, 33, 42, 45, 46, 51, 53, 56, 59, 66, 70, 97, 100, 111], "respecit": [0, 18], "both": [0, 4, 11, 17, 18, 23, 24, 35, 49, 51, 61, 66, 68, 69, 72, 73, 78, 83, 86, 87, 88, 100, 102, 103, 105, 106, 107, 108, 110, 111, 114], "qualiti": [0, 18], "slightli": [0, 18], "goal": [0, 18, 37, 42, 46, 53, 66, 92], "rough": [0, 18], "wherea": [0, 18, 59, 90, 108], "callbak": [0, 18], "impli": [0, 18, 45, 59], "flexibl": [0, 18], "than": [0, 1, 5, 7, 8, 18, 19, 21, 29, 30, 36, 41, 49, 51, 52, 56, 59, 61, 62, 65, 68, 72, 83, 86, 90, 95, 99, 105, 106], "smaller": [0, 13, 18, 33, 41, 45, 51, 52, 59, 98, 107, 110, 113], "indirect": [0, 18], "sqnr": [0, 4, 18, 45, 59, 84, 108], "between": [0, 13, 18, 28, 30, 36, 41, 45, 46, 51, 53, 56, 58, 59, 65, 66, 68, 72, 75, 83, 88, 103, 104, 106, 108], "faster": [0, 6, 7, 13, 18, 20, 33, 84, 98], "correl": [0, 18], "metric": [0, 1, 2, 3, 4, 5, 6, 7, 8, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 22, 23, 24, 25, 30, 37, 39, 51, 56, 68, 104, 108], "aimet_torch": [0, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 51, 52, 53, 54, 55, 56, 57, 58, 59, 61, 62, 64, 65, 66, 68, 69, 70, 71, 73, 74, 75, 77, 78, 79, 80, 81, 82, 83, 84, 86, 87, 88, 89, 90, 101], "mixed_precision_algo": [0, 4, 18, 45, 59], "evalcallbackfactori": [0, 18, 45, 59], "forward_one_batch": [0, 18], "label": [0, 4, 5, 6, 7, 10, 11, 13, 18, 20, 30, 33, 37, 46, 51, 53, 66, 68, 104, 105], "eval_callback_factori": [0, 18], "forward_fn": [0, 1, 7, 18, 21, 41, 45, 51, 54, 59, 86], "small": [0, 1, 2, 3, 4, 5, 6, 7, 8, 10, 11, 12, 13, 14, 18, 19, 20, 22, 23, 24, 25, 51, 68, 93, 98, 102], "hand": [0, 18, 102], "miou": [0, 18], "full": [0, 1, 2, 3, 4, 5, 7, 10, 11, 14, 18, 19, 21, 22, 23, 24, 30, 33, 34, 38, 60, 67, 88], "appli": [0, 7, 8, 15, 16, 17, 18, 20, 21, 28, 30, 31, 36, 38, 41, 42, 43, 45, 46, 51, 53, 55, 56, 62, 65, 67, 69, 71, 73, 77, 78, 83, 86, 88, 89, 90, 91, 92, 93, 96, 100, 102, 103, 105, 106, 107, 108, 109, 111, 112], "dummi": [0, 12, 18, 25, 28, 39, 41, 42, 46, 48, 51, 52, 53, 57, 58, 59, 65, 66, 68, 86, 104], "one": [0, 1, 2, 3, 4, 5, 8, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 22, 23, 24, 25, 36, 49, 51, 52, 59, 61, 62, 64, 65, 68, 74, 83, 86, 90, 94, 96, 100, 105, 109, 110, 111, 113], "tupl": [0, 4, 12, 18, 25, 28, 29, 30, 32, 33, 36, 37, 39, 41, 42, 44, 45, 46, 48, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 65, 66, 68, 78, 83, 84, 86, 89], "expect": [0, 7, 15, 16, 17, 18, 21, 30, 34, 36, 37, 39, 41, 45, 46, 48, 51, 52, 56, 59, 61, 62, 66, 68, 82, 84, 86, 90, 100, 102, 104], "group": [0, 18, 36, 74, 83, 106, 108], "list": [0, 4, 7, 9, 15, 16, 17, 18, 19, 21, 22, 23, 24, 25, 29, 30, 31, 32, 33, 35, 36, 41, 42, 44, 45, 48, 51, 54, 55, 56, 57, 58, 59, 61, 65, 66, 68, 71, 78, 83, 86, 88, 89, 97, 99, 101, 106], "modul": [0, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 30, 33, 34, 41, 45, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 64, 65, 66, 67, 68, 71, 73, 77, 82, 83, 86, 87, 90, 102, 109, 114], "therefor": [0, 4, 7, 8, 9, 18, 33, 45, 59, 64, 90, 95, 103], "might": [0, 1, 4, 5, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 22, 23, 24, 25, 33, 45, 49, 51, 59, 68, 91, 104, 107], "front": [0, 4, 18, 33, 45, 59], "candid": [0, 4, 18, 30, 31, 33, 42, 45, 56, 59, 97, 100], "possibl": [0, 4, 6, 9, 18, 20, 33, 34, 35, 39, 42, 45, 48, 53, 59, 60, 62, 68, 83, 90, 97, 104, 106, 107], "suppos": [0, 4, 18, 33, 45, 59], "combin": [0, 4, 18, 33, 45, 55, 59, 61, 83, 92, 95, 100, 102, 103], "type": [0, 4, 6, 7, 9, 12, 15, 16, 17, 18, 20, 23, 24, 28, 29, 30, 31, 33, 36, 37, 39, 41, 42, 45, 46, 48, 51, 52, 53, 54, 55, 56, 59, 61, 64, 65, 66, 68, 71, 72, 77, 81, 82, 83, 86, 88, 89, 100, 102, 104, 106, 108, 111], "int": [0, 4, 6, 7, 12, 15, 16, 17, 18, 20, 28, 29, 30, 33, 37, 39, 41, 42, 44, 45, 46, 48, 49, 51, 53, 54, 55, 56, 59, 64, 66, 68, 69, 72, 78, 79, 80, 83, 84, 89], "16": [0, 4, 7, 18, 22, 28, 30, 33, 39, 45, 48, 49, 51, 59, 61, 64, 68, 69, 73, 77, 78, 79, 80, 83, 84, 86, 88, 89], "quantizationdatatyp": [0, 4, 18, 33, 39, 45, 48, 59, 68, 72], "allowed_accuracy_drop": [0, 4, 6, 18, 20, 33, 42, 45, 53, 59], "maximum": [0, 4, 5, 6, 7, 12, 13, 14, 18, 25, 28, 33, 42, 45, 51, 59, 68, 78, 79, 80, 84], "allow": [0, 4, 6, 8, 18, 20, 30, 32, 33, 35, 38, 39, 42, 44, 45, 47, 49, 56, 58, 59, 60, 61, 67, 68, 81, 83, 92, 100, 107, 108, 109, 111], "drop": [0, 1, 2, 3, 4, 5, 6, 7, 8, 10, 11, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 33, 42, 45, 59, 67, 68, 88, 92, 95, 100, 103, 104, 105, 107, 108], "plot": [0, 4, 12, 18, 25, 33, 45, 59, 71, 82, 104], "till": [0, 4, 18, 33, 45, 59], "met": [0, 4, 18, 33, 45, 59], "complet": [0, 4, 15, 16, 17, 18, 30, 33, 45, 59, 90, 93, 107], "pick": [0, 4, 18, 30, 33, 35, 36, 45, 56, 59, 95, 97], "results_dir": [0, 4, 12, 18, 25, 33, 37, 42, 45, 46, 53, 59, 66, 71], "cach": [0, 4, 18, 33, 42, 45, 53, 59, 75], "intermedi": [0, 4, 18, 32, 33, 44, 45, 52, 58, 59, 68, 108], "clean_start": [0, 4, 18, 33, 45, 59], "inform": [0, 4, 18, 21, 33, 36, 45, 49, 55, 59, 62, 65, 67, 72, 81, 86, 102], "delet": [0, 4, 18, 33, 45, 59, 104], "prior": [0, 4, 18, 19, 22, 23, 24, 33, 45, 59], "start": [0, 4, 7, 8, 9, 10, 11, 12, 13, 15, 16, 17, 18, 21, 23, 24, 25, 28, 30, 33, 35, 41, 45, 51, 56, 59, 61, 62, 68, 78, 90, 91, 95, 96, 106, 108], "analysi": [0, 4, 8, 15, 16, 17, 18, 30, 33, 37, 45, 46, 56, 59, 66, 100, 107], "applic": [0, 1, 4, 5, 6, 10, 11, 12, 13, 14, 18, 19, 22, 30, 31, 33, 36, 37, 39, 45, 49, 59, 65, 101], "respons": [0, 4, 18, 33, 45, 59, 100], "flag": [0, 4, 18, 33, 36, 42, 45, 53, 59, 61, 68, 72, 82, 89], "anyth": [0, 4, 12, 18, 25, 33, 45, 59], "compar": [0, 4, 7, 8, 9, 12, 18, 21, 25, 33, 45, 52, 59, 61, 71, 83, 86, 90, 102, 104, 105, 112], "use_all_amp_candid": [0, 18, 45, 59], "supported_kernel": [0, 18, 45, 59], "field": [0, 18, 45, 49, 59], "config": [0, 13, 18, 39, 45, 46, 59, 66, 106, 109], "under": [0, 18, 31, 33, 45, 59, 72, 82, 83, 86, 104, 106], "op_typ": [0, 7, 18, 45, 59, 106], "section": [0, 7, 18, 20, 45, 52, 59, 62, 74, 75, 83, 93, 102, 108], "ignor": [0, 1, 2, 3, 4, 5, 7, 8, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 22, 23, 24, 25, 30, 45, 51, 55, 56, 59, 61, 68, 90], "continu": [0, 18, 35, 45, 59, 62, 68, 102, 103, 105, 107], "amp_search_algo": [0, 18, 33, 45, 59], "ampsearchalgo": [0, 18, 33, 45, 59], "search": [0, 7, 8, 10, 11, 15, 16, 17, 18, 21, 23, 24, 26, 28, 33, 42, 45, 51, 59, 68, 84, 105, 106], "binari": [0, 18, 33, 45, 59], "interpol": [0, 18, 33, 97], "bruteforc": [0, 18, 33], "phase1_optim": [0, 18, 33, 45, 59], "implement": [0, 18, 32, 37, 39, 42, 44, 46, 53, 58, 66, 68, 69, 77, 101, 102, 107], "either": [0, 5, 13, 15, 16, 17, 18, 26, 30, 36, 51, 56, 60, 64, 83, 91, 114], "optmiz": [0, 18], "phase1": [0, 18, 33, 45, 59], "001": [0, 18, 39], "p": [0, 18, 74, 84], "store": [0, 4, 18, 28, 39, 41, 51, 64, 68, 81, 83, 86], "final": [0, 4, 7, 8, 12, 18, 21, 25, 30, 35, 45, 56, 59, 62, 70, 75, 83, 94, 95, 97, 105, 107], "after": [0, 1, 2, 4, 5, 7, 8, 10, 11, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 28, 29, 36, 42, 46, 51, 53, 54, 61, 62, 65, 66, 68, 71, 77, 84, 86, 88, 90, 91, 92, 93, 95, 96, 100, 102, 107, 111, 112], "multipli": [0, 18, 88, 95, 100], "mac": [0, 15, 16, 17, 18, 30, 56, 70, 95, 100, 110, 113], "therebi": [0, 18], "lesser": [0, 18], "re": [0, 9, 18, 33, 38, 45, 59, 67, 90, 96, 102], "mixed_precis": [0, 4, 18, 33, 45, 59], "choose_mixed_precis": [0, 4, 18, 33, 45, 59], "pareto_front_list": [0, 18, 45, 59], "next": [0, 2, 4, 7, 8, 13, 18, 21, 36, 51, 64, 65, 68, 74, 90, 105, 107, 108], "step": [0, 2, 3, 4, 6, 7, 8, 12, 13, 18, 21, 25, 33, 36, 42, 46, 51, 53, 54, 61, 64, 66, 68, 78, 84, 86, 87, 90, 91, 92, 93, 94, 95, 96, 97, 100, 102, 103, 105, 107], "would": [0, 4, 7, 12, 13, 18, 21, 25, 30, 31, 35, 51, 57, 68, 72, 83, 84, 106, 109], "actual": [0, 4, 7, 9, 18, 21, 32, 37, 42, 44, 46, 51, 53, 58, 63, 66, 68, 83], "take": [0, 4, 6, 7, 9, 12, 13, 14, 15, 16, 17, 18, 21, 25, 33, 36, 45, 51, 56, 59, 65, 68, 77, 83, 90, 97, 100, 102, 103, 105, 107, 114], "o": [0, 1, 4, 5, 6, 7, 10, 11, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 33, 56], "makedir": [0, 1, 4, 5, 7, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24], "exist_ok": [0, 1, 4, 5, 7, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24], "filename_prefix": [0, 1, 4, 5, 7, 13, 14, 18, 19, 20, 21, 22, 23, 24, 28, 33, 39, 41, 48, 51, 64, 68], "resnet18_mixed_precis": [0, 18], "hope": [0, 4, 7, 8, 13, 18, 21], "wa": [0, 1, 4, 5, 7, 8, 9, 13, 15, 17, 18, 19, 21, 30, 32, 44, 49, 51, 56, 58, 64, 81, 100], "understand": [0, 4, 7, 8, 9, 12, 13, 18, 21, 25, 72, 102, 112], "qat": [0, 3, 8, 13, 18, 28, 29, 41, 48, 51, 54, 63, 67, 86, 90, 91, 93, 96, 98, 102, 107, 108, 109], "learn": [0, 1, 2, 3, 5, 6, 7, 8, 9, 10, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 28, 29, 30, 31, 39, 41, 42, 43, 51, 53, 54, 55, 56, 57, 66, 68, 72, 93, 96, 100, 102, 105, 108, 109], "addit": [0, 7, 8, 9, 12, 13, 18, 21, 25, 49, 68, 73, 77, 83, 86, 102, 105, 106, 109], "resourc": [0, 7, 8, 9, 13, 18, 21], "doc": [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 61, 64, 104], "know": [0, 7, 8, 13, 18, 21, 77], "thi": [1, 2, 3, 5, 6, 7, 8, 9, 10, 11, 14, 15, 16, 17, 19, 20, 22, 23, 24, 26, 28, 30, 31, 32, 33, 34, 35, 36, 37, 39, 41, 42, 43, 44, 45, 46, 48, 49, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 64, 65, 66, 68, 69, 71, 72, 73, 74, 75, 77, 78, 79, 80, 81, 82, 83, 84, 86, 88, 89, 90, 91, 92, 94, 95, 97, 98, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 111, 114], "notebook": [1, 2, 3, 5, 6, 7, 8, 9, 10, 11, 14, 15, 16, 17, 19, 20, 22, 23, 24], "contain": [1, 2, 3, 5, 6, 10, 11, 12, 14, 15, 16, 17, 19, 20, 22, 23, 24, 25, 33, 36, 45, 46, 49, 52, 56, 59, 61, 62, 66, 73, 77, 81, 82, 86, 88, 90, 102, 104, 105, 106, 108], "nearest": [1, 4, 5, 8, 10, 11, 13, 14, 19, 22, 37, 39, 42, 48, 53, 55, 68, 91], "achiev": [1, 5, 13, 19, 30, 56, 74, 83, 91, 95], "i": [1, 2, 3, 5, 6, 7, 8, 9, 10, 11, 14, 15, 16, 17, 19, 20, 22, 23, 24, 26, 28, 30, 31, 33, 34, 35, 36, 37, 38, 39, 41, 42, 43, 44, 45, 46, 48, 49, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 77, 78, 79, 80, 82, 83, 84, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 97, 98, 100, 101, 102, 103, 104, 105, 106, 107, 108, 110, 111, 112, 113, 114], "loss": [1, 2, 3, 4, 5, 6, 7, 8, 10, 11, 13, 14, 19, 22, 23, 24, 28, 37, 39, 41, 46, 51, 66, 68, 81, 87, 90, 91, 98, 100, 102, 104, 108], "function": [1, 2, 4, 5, 8, 12, 13, 14, 15, 16, 17, 19, 21, 22, 23, 24, 25, 28, 29, 30, 31, 33, 34, 35, 36, 37, 38, 39, 41, 42, 43, 45, 46, 47, 48, 51, 52, 53, 54, 55, 56, 57, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 72, 77, 81, 82, 83, 88, 90, 91, 97, 100, 101, 104, 108, 109, 111, 112], "closer": [1, 5, 13, 19, 91], "farther": [1, 5, 19], "high": [1, 2, 3, 5, 10, 11, 14, 15, 16, 17, 19, 22, 23, 24, 31, 33, 36, 43, 45, 57, 59, 65, 72, 87, 91, 95, 97, 98, 103, 109, 112], "level": [1, 3, 4, 5, 8, 10, 11, 15, 16, 17, 19, 23, 24, 31, 43, 57, 72, 87, 95, 97, 98, 102, 107, 111], "simuat": [1, 2, 5, 10, 11, 13, 14, 19, 22, 23, 24], "post": [1, 2, 3, 5, 6, 7, 8, 10, 11, 13, 14, 15, 16, 17, 19, 20, 21, 22, 23, 24, 38, 42, 47, 48, 49, 53, 67, 87, 90, 91, 92, 98, 100, 105, 108, 109], "finetun": [1, 2, 5, 6, 7, 13, 14, 15, 16, 17, 19, 21, 23, 24, 48, 68], "paramet": [1, 2, 3, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 19, 20, 21, 22, 23, 24, 25, 29, 32, 33, 35, 36, 37, 38, 39, 42, 43, 44, 45, 46, 47, 48, 49, 52, 53, 54, 55, 57, 58, 59, 61, 62, 64, 65, 66, 67, 68, 70, 71, 72, 77, 78, 79, 80, 81, 82, 83, 84, 88, 89, 90, 93, 94, 100, 101, 102, 103, 104, 105, 106, 112], "fine": [1, 2, 3, 5, 6, 8, 10, 11, 14, 19, 20, 22, 23, 24, 26, 30, 39, 56, 64, 68, 87, 95, 98, 102, 105, 108], "tune": [1, 2, 3, 5, 6, 8, 10, 11, 14, 19, 20, 22, 23, 24, 26, 30, 39, 56, 64, 68, 87, 95, 98, 102, 105, 108], "epoch": [1, 2, 3, 5, 6, 7, 8, 10, 11, 14, 15, 16, 17, 19, 20, 21, 22, 23, 24, 39, 54, 56, 68, 90, 98, 100, 105], "improv": [1, 2, 3, 5, 6, 7, 8, 10, 11, 14, 15, 16, 17, 19, 20, 21, 22, 23, 24, 26, 51, 68, 83, 90, 95, 100, 102, 105, 107, 112], "otherwis": [1, 2, 3, 5, 6, 10, 11, 14, 15, 16, 17, 19, 20, 22, 23, 24, 42, 48, 62, 64, 68, 74, 75, 78, 83, 89, 107], "One": [1, 2, 3, 4, 12, 15, 16, 17, 18, 19, 20, 22, 23, 24, 25, 37, 39, 90], "run": [1, 2, 3, 5, 6, 7, 10, 11, 12, 13, 14, 15, 16, 17, 19, 21, 22, 23, 24, 25, 30, 32, 33, 35, 37, 38, 39, 41, 42, 44, 45, 48, 49, 52, 53, 54, 56, 58, 59, 61, 62, 68, 72, 74, 75, 78, 83, 88, 89, 93, 98, 100, 102, 103, 104, 108, 109, 111], "two": [1, 2, 3, 4, 5, 6, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 19, 20, 22, 23, 24, 25, 30, 33, 35, 36, 56, 59, 61, 62, 65, 72, 73, 83, 90, 97, 98, 100, 102, 103, 104, 105, 108, 110, 112, 113], "thing": [1, 2, 3, 5, 6, 13, 14, 15, 16, 17, 19, 22, 23, 24], "put": [1, 2, 3, 5, 6, 7, 12, 13, 14, 15, 16, 17, 19, 21, 22, 23, 24, 25, 59, 102], "still": [1, 2, 3, 5, 6, 7, 12, 13, 14, 15, 16, 17, 19, 21, 22, 23, 24, 25, 34, 67, 83, 102, 107], "interfac": [1, 2, 3, 5, 6, 7, 12, 13, 14, 15, 16, 17, 19, 21, 22, 23, 24, 25, 67, 73], "method": [1, 2, 3, 5, 6, 7, 8, 9, 12, 13, 14, 15, 16, 17, 19, 21, 22, 23, 24, 25, 32, 33, 39, 42, 44, 55, 58, 59, 61, 68, 72, 77, 83, 86, 88, 90, 97, 100, 102, 107, 108], "abl": [1, 2, 3, 5, 6, 7, 9, 12, 13, 14, 15, 16, 17, 19, 21, 22, 23, 24, 25, 35, 46, 61, 62, 66, 81, 90], "exist": [1, 2, 3, 4, 5, 6, 7, 8, 10, 11, 12, 13, 14, 15, 16, 17, 19, 21, 22, 23, 24, 25, 45, 51, 59, 68, 74, 77, 83, 88, 108], "degrad": [1, 2, 3, 5, 10, 11, 14, 19, 22, 23, 24], "platform": [1, 2, 3, 5, 10, 11, 14, 19, 22, 23, 24, 73, 91, 102, 108], "call": [1, 2, 3, 5, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 19, 22, 23, 24, 25, 29, 30, 31, 33, 34, 35, 36, 42, 45, 46, 49, 51, 56, 57, 59, 61, 65, 66, 68, 77, 79, 80, 81, 83, 88, 90, 102, 104, 106, 108, 109, 110, 113], "kei": [1, 2, 3, 5, 9, 10, 11, 14, 15, 16, 17, 19, 22, 23, 24, 36, 49, 65], "precis": [1, 2, 3, 5, 7, 8, 10, 11, 12, 13, 14, 19, 21, 22, 23, 24, 25, 49, 51, 78, 79, 80, 87, 89, 102], "api": [1, 2, 3, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 19, 20, 21, 22, 23, 24, 25, 34, 60, 63, 72, 74, 82, 90, 96, 98, 101, 102, 106, 109, 111], "explan": [1, 2, 3, 5, 10, 11, 14, 19, 22, 23, 24], "copi": [1, 18, 19, 22, 23, 24, 35, 39, 68, 71, 96, 108], "deepcopi": [1, 71], "must": [1, 2, 3, 5, 7, 8, 10, 11, 12, 13, 14, 15, 16, 17, 19, 22, 23, 24, 25, 35, 51, 83, 86, 88, 92, 93, 95, 98, 99, 102, 104, 106, 114], "comput": [1, 2, 3, 5, 7, 8, 10, 11, 12, 13, 14, 15, 16, 17, 19, 22, 23, 24, 25, 28, 30, 32, 33, 37, 39, 41, 42, 44, 45, 46, 48, 49, 51, 53, 56, 58, 59, 63, 64, 66, 68, 69, 73, 75, 77, 79, 80, 83, 84, 86, 90, 91, 100, 101, 103, 104, 108, 111, 114], "encod": [1, 2, 3, 5, 6, 7, 8, 10, 11, 13, 14, 19, 20, 22, 23, 24, 28, 32, 33, 37, 39, 41, 42, 44, 45, 46, 48, 51, 53, 54, 58, 59, 63, 64, 66, 67, 68, 69, 72, 77, 78, 81, 83, 86, 87, 89, 90, 91, 93, 102, 104, 105, 109], "write": [1, 2, 3, 5, 10, 11, 14, 19, 22, 23, 24, 41, 48, 51, 68, 83, 86], "loader": [1, 2, 3, 4, 5, 7, 8, 10, 11, 12, 13, 14, 15, 17, 19, 21, 22, 23, 24, 25, 33, 37, 41, 45, 46, 48, 51, 54, 55, 59, 66, 68, 90, 91], "extract": [1, 2, 3, 4, 5, 7, 8, 9, 10, 11, 12, 13, 14, 19, 22, 23, 24, 25, 51, 68, 103], "regard": [1, 2, 3, 4, 5, 7, 8, 10, 11, 12, 13, 14, 19, 22, 23, 24, 25, 33, 45, 51, 59, 68], "veri": [1, 2, 3, 4, 5, 6, 7, 8, 10, 11, 12, 13, 14, 19, 20, 22, 23, 24, 25, 51, 68, 69, 95, 100, 104, 112], "percentag": [1, 2, 3, 4, 5, 7, 8, 10, 11, 12, 13, 14, 19, 22, 23, 24, 25, 51, 68], "1m": [1, 2, 3, 4, 5, 10, 11, 12, 13, 14, 19, 22, 23, 24, 25, 51, 68], "500": [1, 2, 3, 4, 5, 10, 11, 12, 13, 14, 19, 20, 22, 23, 24, 25, 33, 42, 51, 56, 68, 90, 91, 103, 104], "suffic": [1, 2, 3, 5, 10, 11, 14, 19, 22, 23, 24], "reason": [1, 2, 3, 5, 10, 11, 14, 19, 22, 23, 24, 62], "scenario": [1, 2, 3, 4, 5, 7, 8, 10, 11, 12, 13, 14, 19, 22, 23, 24, 25, 51, 68, 93, 114], "That": [1, 2, 3, 5, 10, 11, 14, 19, 22, 23, 24, 114], "pictur": [1, 2, 3, 4, 5, 10, 11, 12, 13, 14, 19, 22, 23, 24, 25, 51, 68, 87], "captur": [1, 2, 3, 4, 5, 10, 11, 12, 13, 14, 19, 22, 23, 24, 25, 32, 44, 51, 58, 61, 68], "night": [1, 2, 3, 4, 5, 10, 11, 12, 13, 14, 19, 22, 23, 24, 25, 51, 68], "skew": [1, 2, 3, 5, 10, 11, 14, 19, 22, 23, 24], "5": [1, 2, 3, 4, 5, 8, 10, 11, 13, 14, 15, 16, 17, 19, 22, 23, 24, 30, 41, 42, 45, 53, 54, 56, 59, 61, 64, 68, 69, 70, 72, 73, 78, 79, 80, 83, 88, 89, 95, 105, 107], "comparison": [1, 2, 3, 19, 22, 23, 24, 32, 44, 58], "order": [1, 7, 8, 9, 10, 11, 12, 15, 16, 17, 19, 21, 23, 24, 25, 34, 36, 38, 60, 62, 65, 67, 68, 74, 75, 77, 83, 88, 90, 93, 94, 102, 105, 106], "vector": [1, 5, 13, 19, 51, 86], "num_batch": [1, 2, 5, 6, 7, 12, 13, 19, 20, 21, 22, 25, 28, 37, 41, 42, 46, 51, 53, 54, 66], "2000": [1, 5, 6, 13, 19, 20, 22, 41, 42, 51, 53, 78, 81], "size": [1, 5, 6, 7, 8, 9, 13, 19, 33, 35, 39, 46, 48, 51, 52, 53, 62, 66, 68, 77, 78, 83, 86, 88, 89, 91, 101, 110, 113], "default_num_iter": [1, 5, 13, 19, 20, 28, 41, 51], "strongli": [1, 5, 13, 19, 35, 51, 61], "least": [1, 5, 19, 59], "adaround_weight": [1, 5, 6, 13, 19, 20, 28, 41, 42, 51, 53, 67, 72], "adaroundparamet": [1, 5, 6, 13, 19, 20, 28, 41, 42, 51, 53, 67], "satisfi": [1, 6, 20, 61, 83, 90, 92], "signatur": [1, 15, 16, 17, 28, 30, 37, 42, 46, 56, 66, 77, 78], "deriv": [1, 4, 18, 49, 78, 89, 108], "form": [1, 12, 25, 35, 69], "arrai": [1, 83], "__init__": [1, 9, 20, 34, 35, 42, 52, 53, 56, 60, 61, 62, 69, 77, 88, 90], "self": [1, 9, 20, 34, 35, 52, 53, 56, 60, 61, 62, 69, 77, 81, 90], "_torch_data_load": 1, "_iter": 1, "__iter__": 1, "__next__": 1, "__len__": [1, 20, 46, 53, 66], "len": [1, 4, 8, 18, 20, 30, 33, 41, 42, 51, 53, 61], "ada_model": [1, 5, 13, 19], "apply_adaround": [1, 5, 13, 19, 28, 41, 51], "default_quant_schem": [1, 5, 13, 19, 28, 41, 51], "about": [1, 2, 3, 4, 5, 6, 9, 10, 11, 14, 15, 16, 17, 19, 20, 21, 22, 23, 24, 28, 30, 31, 36, 39, 41, 42, 43, 49, 51, 53, 55, 56, 57, 65, 66, 67, 68, 81, 90, 98, 104], "biwidth": [1, 5, 13, 19], "freez": [1, 5, 13, 19, 28, 51, 64, 72, 91], "set_and_freeze_param_encod": [1, 5, 13, 19, 28, 41, 51], "dure": [1, 3, 5, 7, 10, 11, 12, 13, 14, 15, 16, 17, 19, 21, 23, 24, 25, 28, 29, 30, 33, 35, 41, 42, 45, 51, 56, 59, 68, 70, 77, 82, 86, 88, 90, 91, 98, 100, 102, 105, 106, 108, 111, 112], "intern": [1, 5, 8, 9, 13, 19, 35, 42, 48, 51, 53, 68, 69, 72, 84, 100, 103, 106], "maintain": [1, 5, 19, 42, 97, 100], "alter": [1, 5, 13, 19, 51], "negat": [1, 5, 19], "encoding_path": [1, 5, 6, 13, 19, 20, 28, 42, 51, 53], "join": [1, 5, 10, 11, 13, 18, 19, 20, 56, 70], "littl": [1, 5, 10, 11, 14, 15, 16, 17, 19, 22, 23, 24, 100], "gain": [1, 5, 10, 11, 13, 14, 15, 16, 17, 19, 22, 23, 24, 94, 95, 100], "experi": [1, 5, 10, 11, 14, 15, 16, 17, 19, 22, 23, 24, 30, 56, 83, 100], "hyper": [1, 5, 7, 8, 10, 11, 14, 15, 16, 17, 19, 21, 22, 23, 24, 68, 105], "better": [1, 5, 10, 11, 13, 14, 15, 16, 17, 19, 21, 22, 23, 24, 52, 71, 83, 91, 92, 100, 102, 103, 105], "updat": [1, 3, 5, 9, 10, 11, 13, 14, 19, 20, 22, 23, 24, 49, 51, 52, 68, 70, 72, 75, 84, 89, 96, 102, 103, 105, 108, 109, 111], "without": [1, 5, 6, 8, 10, 11, 13, 14, 19, 20, 22, 23, 24, 39, 51, 64, 68, 79, 80, 81, 82, 83, 86, 92, 102, 105, 108, 114], "resnet18_after_adaround": 1, "bia": [2, 7, 14, 31, 36, 43, 52, 57, 61, 62, 64, 65, 67, 69, 72, 77, 79, 80, 88, 90, 91, 94, 102, 103, 106, 107, 109], "correct": [2, 13, 14, 20, 25, 36, 51, 53, 65, 67, 75, 90, 91, 102, 103, 107], "bc": [2, 91], "help": [2, 12, 14, 22, 25, 52, 62, 64, 68, 71, 91, 97, 100, 102, 103, 104, 105, 107, 112], "recov": [2, 14, 15, 16, 17, 22, 87, 98, 107, 108], "oppos": [2, 14, 22], "conv": [2, 7, 14, 22, 28, 29, 36, 41, 51, 54, 55, 61, 65, 71, 83, 99, 106, 109, 110, 113, 114], "immedi": [2, 6, 14, 20, 22], "consecut": [2, 14, 22, 36, 64, 65, 102, 103], "five": [2, 22], "sake": [2, 22, 32, 44, 58], "procedur": [2, 14, 15, 17, 22, 97, 111], "cl": [2, 14, 22, 36, 65, 77, 109], "skip": [2, 14, 22, 31, 33, 41, 51, 53, 55, 74, 94], "absorpt": [2, 14, 22], "hba": [2, 14, 22], "again": [2, 13, 14, 15, 16, 17, 22, 104, 105], "cross_layer_equ": [2, 14, 22, 31, 36, 43, 57, 65, 71, 72, 101], "equalize_model": [2, 14, 22, 31, 43, 57, 71, 72, 101], "free": [2, 7, 8, 14, 21, 22, 68, 74, 103], "iccv": [2, 14, 22, 100, 103], "2019": [2, 14, 22, 103], "add": [3, 7, 10, 11, 15, 16, 17, 23, 24, 34, 35, 39, 49, 60, 61, 62, 64, 68, 74, 77, 88, 90, 106, 108, 109, 114], "ml": [3, 8, 10, 11, 23, 24, 26, 68, 100, 102, 103], "standard": [3, 8, 10, 11, 23, 24, 33, 61, 68, 71, 79, 80, 88], "acceler": [3, 8, 10, 11, 23, 24, 56, 68, 73, 87, 98, 100], "min": [3, 10, 11, 13, 37, 46, 49, 64, 66, 71, 72, 78, 82, 84, 89, 90, 104, 108], "max": [3, 10, 11, 13, 37, 46, 49, 64, 66, 71, 72, 78, 79, 80, 82, 84, 89, 90, 100, 103, 104, 108], "onc": [3, 8, 10, 11, 15, 16, 17, 22, 23, 24, 34, 60, 62, 64, 68, 77, 93, 94, 100, 104, 105, 108], "effect": [3, 8, 10, 11, 23, 24, 28, 29, 39, 41, 51, 54, 64, 68, 83, 88, 90, 93, 102, 104, 108], "keep": [3, 10, 61, 68, 106], "constant": [3, 10, 23, 24, 42, 53, 61, 68, 97, 102], "4": [3, 8, 12, 15, 16, 17, 20, 25, 28, 29, 36, 37, 41, 42, 45, 46, 51, 53, 54, 55, 56, 61, 64, 65, 66, 68, 69, 72, 78, 81, 83, 90, 93, 97, 102, 107, 114], "layer": [4, 8, 15, 16, 17, 28, 29, 30, 33, 34, 35, 37, 38, 39, 41, 42, 46, 47, 48, 51, 52, 53, 54, 55, 56, 60, 62, 63, 64, 66, 67, 68, 69, 70, 71, 77, 82, 83, 86, 88, 90, 91, 92, 93, 94, 95, 96, 99, 101, 102, 104, 106, 107, 108, 109, 110, 111, 112, 113, 114], "an": [4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 28, 29, 30, 31, 33, 34, 35, 37, 39, 41, 42, 45, 46, 48, 49, 51, 53, 54, 55, 56, 57, 59, 60, 61, 62, 66, 68, 69, 72, 73, 77, 81, 82, 83, 87, 88, 89, 90, 91, 92, 94, 95, 97, 98, 100, 101, 102, 104, 105, 106, 107, 108, 112, 114], "resnet50": [4, 5, 6, 10, 11, 12, 13, 14, 30, 31, 33, 36, 37, 39], "disabl": [4, 15, 17, 37, 45, 46, 64, 66, 68, 72, 88, 97, 100, 104, 106, 108], "log": [4, 7, 8, 10, 11, 28, 33, 62, 104], "info": [4, 36, 52, 55, 62, 65, 90, 109], "verbos": 4, "displai": [4, 8, 96, 104, 111, 112], "erorr": 4, "tensorflow": [4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 25, 26, 73, 74, 93, 96, 98, 99, 102, 106, 108, 109], "messag": 4, "error": [4, 33, 35, 42, 53, 61, 77, 92, 102, 104, 105, 107, 108], "critic": 4, "tf_cpp_min_log_level": [4, 6, 7, 10, 11, 33], "compat": [4, 7, 8, 33, 49, 60, 73, 83, 90], "v1": [4, 18, 19, 20, 21, 22, 23, 24, 25, 33, 51, 53, 55, 58, 59, 66, 67, 68, 69, 72], "set_verbos": [4, 33], "evlauat": 4, "kera": [4, 5, 6, 10, 11, 12, 13, 14, 26, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 91, 93, 96, 98, 102, 103, 104, 106, 108, 109], "preprocess_input": [4, 5, 10, 11, 13, 14, 30, 33], "decode_predict": [4, 30, 33], "center_crop": [4, 33], "img_height": 4, "256": [4, 18, 33, 37, 46, 66, 86, 90, 104], "img_width": 4, "crop_length": 4, "start_x": 4, "start_i": 4, "cropped_imag": 4, "get_eval_func": [4, 30, 33], "num_iter": [4, 30, 33], "50000": [4, 30, 33], "debug": [4, 26, 27, 28, 32, 40, 44, 49, 50, 58, 72, 90, 107], "get_top5_acc": 4, "func_wrapp": [4, 30, 33], "validation_d": [4, 30, 33], "preprocess": [4, 5, 8, 10, 11, 13, 30, 33], "image_dataset_from_directori": [4, 5, 10, 11, 13, 30, 33], "label_mod": [4, 5, 10, 11, 13, 30, 33], "categor": [4, 5, 10, 11, 13, 30, 33], "shuffl": [4, 5, 10, 11, 13, 30, 33, 56, 90], "top1": [4, 20, 30, 33], "top5": 4, "total": [4, 9, 30, 33, 90, 97, 108], "img": [4, 30, 33], "pred": [4, 18, 30, 33, 42], "predict": [4, 12, 30, 32, 33, 37, 53], "np": [4, 9, 28, 30, 33, 35, 37, 39, 42, 45, 46, 48], "class_nam": [4, 9, 30, 33], "cnt": [4, 30, 33], "sum": [4, 18, 20, 30, 33, 53, 90], "b": [4, 30, 33, 54, 64, 77, 78, 89], "zip": [4, 30, 33, 37], "str": [4, 9, 28, 30, 32, 33, 37, 39, 41, 42, 44, 45, 46, 48, 51, 53, 55, 58, 59, 60, 61, 64, 66, 68, 71, 82, 86], "eval_func": [4, 12, 30, 33, 70], "aimet_tensorflow": [4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 28, 29, 30, 31, 32, 33, 35, 36, 37, 39, 73, 75], "fold_all_batch_norm": [4, 5, 10, 11, 13, 14, 18, 19, 22, 23, 24, 33, 36, 65, 71, 90], "get_model": [4, 33, 34], "include_top": [4, 5, 13, 14, 33], "input_tensor": [4, 5, 13, 14, 33, 35, 48, 59, 61, 84], "pool": [4, 5, 7, 13, 14, 33], "batch": [4, 6, 7, 15, 16, 17, 21, 28, 29, 30, 32, 33, 36, 37, 41, 44, 45, 46, 48, 51, 52, 54, 58, 59, 65, 66, 68, 91, 102, 103, 104], "norm": [4, 14, 33, 36, 52, 54, 65, 91, 102, 103, 104], "fold": [4, 29, 31, 33, 36, 42, 43, 52, 53, 54, 57, 65, 67, 71, 88, 91, 92, 93, 102, 103, 104, 109], "acccuraci": 4, "fp32_acccuraci": 4, "sim": [4, 7, 8, 13, 20, 28, 29, 33, 41, 42, 45, 46, 51, 53, 54, 55, 59, 63, 64, 66, 72, 82, 83, 86, 87, 90, 105, 108], "rounding_mod": [4, 5, 8, 10, 11, 13, 14, 37, 39, 42, 48, 53, 68], "solut": [4, 18, 97, 105, 107], "less": [4, 18, 52, 83, 84, 94, 97, 106], "convert": [4, 18, 34, 35, 36, 45, 59, 61, 69, 81, 90, 92, 102, 112], "overhead": [4, 15, 17, 18], "fairli": [4, 7, 8, 12, 13, 25, 51, 68], "simpl": [4, 7, 8, 12, 13, 25, 28, 37, 46, 51, 56, 61, 66, 68, 90], "don": [4, 7, 8, 12, 13, 25, 51, 61, 64, 68, 77, 91, 104], "pointer": [4, 7, 8, 12, 13, 25, 51, 68], "done": [4, 9, 13, 21, 34, 74, 78, 89, 100, 106, 108], "defin": [4, 9, 12, 13, 15, 16, 17, 19, 22, 23, 24, 25, 28, 33, 34, 35, 37, 38, 42, 45, 46, 49, 51, 53, 56, 59, 60, 61, 62, 66, 67, 68, 77, 83, 88, 90, 102, 104, 106, 108], "separ": [4, 28, 35, 36, 39, 41, 51, 61, 62, 65, 68, 72, 86, 93, 104, 107, 109], "larg": [4, 64, 95, 105, 110, 113], "enough": [4, 15, 16, 17, 52], "meaning": 4, "give": [4, 8, 12, 13, 15, 16, 17, 25, 33, 51, 68, 69, 100], "ideal": [4, 12, 13, 25, 51, 68], "opt": [4, 86], "01": [4, 6, 20, 28, 41, 42, 51, 53, 78, 91], "discuss": [4, 15, 16, 17, 90, 95, 96, 108], "eval_callback_phase1": 4, "eval_callback_phase2": 4, "forward_pass_call_back": [4, 33, 45, 59], "greedymixedprecisionalgo": [4, 18, 33, 59], "enabl": [4, 11, 13, 15, 17, 18, 24, 26, 30, 33, 37, 45, 46, 54, 56, 59, 64, 66, 72, 73, 74, 83, 87, 93, 98, 102, 104, 106, 108, 109, 111], "enable_convert_op_reduct": [4, 18, 33, 59], "acuraci": [4, 33], "time": [4, 6, 12, 20, 22, 25, 30, 36, 49, 56, 61, 62, 73, 83, 86, 90, 92, 100, 101, 105, 106, 111], "wrapper": [4, 8, 18, 19, 22, 23, 24, 28, 33, 37, 46, 56, 66], "callback": [4, 6, 8, 10, 11, 12, 15, 16, 17, 25, 30, 33, 37, 39, 42, 45, 46, 48, 53, 56, 59, 66, 68, 104, 108], "get_data_loader_wrapp": [4, 33], "dataloader_wrapp": [4, 33], "map": [4, 5, 6, 7, 9, 13, 33, 37, 39, 49, 62, 64, 77, 78, 81, 83, 88, 97, 104, 106], "lambda": [4, 5, 6, 7, 8, 9, 13, 33, 37, 59, 83], "y": [4, 5, 12, 13, 25, 33, 39, 61, 74, 75, 90, 104], "data_loader_wrapp": [4, 33], "choose_fast_mixed_precis": [4, 33], "resnet50_after_amp": 4, "pytorch": [5, 6, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 52, 61, 62, 69, 71, 73, 74, 77, 86, 88, 91, 92, 93, 96, 98, 103, 104, 106, 108, 109], "image_net_dataset": [5, 6, 7, 12, 13, 14], "imagenetdataset": [5, 6, 7, 12, 13, 14], "get_val_dataset": [5, 6, 7, 12, 13, 14], "10": [5, 6, 7, 8, 9, 10, 11, 13, 14, 15, 16, 17, 21, 23, 24, 28, 30, 34, 35, 36, 39, 54, 56, 61, 62, 64, 68, 69, 70, 73, 74, 75, 77, 78, 81, 83, 84, 88, 89, 90, 97, 100, 105], "new": [5, 9, 10, 11, 13, 14, 18, 25, 34, 35, 36, 45, 49, 51, 59, 60, 61, 64, 68, 72, 74, 78, 83, 86, 87, 89, 90, 102, 109], "rest": [5, 10, 11, 13, 14, 107], "progbar": [5, 10, 11, 13, 14], "sim_model": [5, 10, 11, 12, 13, 14, 19, 21, 22, 23, 24, 25, 51, 68], "tf_dataset": [5, 13, 14], "progbar_stat_upd": [5, 10, 11, 13, 14], "equal": [5, 9, 38, 42, 47, 53, 55, 63, 67, 71, 81, 83, 91, 92, 95, 96, 97, 101, 102, 104, 107, 108, 112], "data_set": [5, 13, 28], "ada_round_data": [5, 13], "image_width": [5, 13], "image_height": [5, 13], "resnet50_after_adaround": 5, "offer": [6, 20, 46, 66, 92], "suit": [6, 20, 92], "network": [6, 8, 9, 13, 20, 35, 88, 90, 92, 95, 97, 100, 102, 105, 107, 108, 111, 113], "quantiz": [6, 9, 15, 16, 17, 20, 26, 27, 28, 29, 32, 35, 37, 40, 41, 42, 44, 46, 49, 50, 51, 53, 54, 58, 60, 61, 63, 64, 66, 77, 79, 82, 84, 86, 87, 91, 92, 93, 95, 96, 98, 100, 104, 109, 111], "ptq": [6, 20, 42, 53, 98, 102, 104, 105], "success": [6, 20], "sequenc": [6, 8, 20, 52, 91, 92, 93, 101, 106], "consum": [6, 20, 49, 92, 100], "non": [6, 20, 52, 61, 108, 111], "expert": [6, 20], "effort": [6, 20, 42, 53, 92], "analyz": [6, 20, 30, 38, 47, 56, 67, 78, 87, 88, 89, 92, 94, 101, 104, 111, 112], "automat": [6, 20, 30, 36, 56, 65, 74, 75, 83, 95, 100, 102, 104, 109], "variou": [6, 12, 20, 25, 30, 45, 56, 59, 100, 102, 107, 108, 109, 112], "best": [6, 13, 20, 42, 53, 92, 95, 100, 102, 108], "heurist": [6, 20], "toler": [6, 20, 92, 95], "cumul": [6, 20], "until": [6, 20, 42, 53, 78, 89, 92], "evalu": [6, 8, 20, 28, 30, 33, 37, 39, 42, 45, 46, 48, 51, 53, 56, 59, 66, 68, 70, 90, 92, 96, 97, 100, 102, 104, 105, 108, 111], "creat": [6, 8, 12, 15, 16, 17, 20, 25, 28, 30, 33, 35, 36, 37, 39, 42, 44, 45, 46, 48, 51, 53, 54, 56, 59, 61, 63, 64, 65, 66, 68, 69, 71, 77, 82, 84, 86, 87, 88, 90, 91, 93, 96, 100, 101, 102, 105, 108], "auto_qu": [6, 20, 42, 53], "eval_dataset_s": [6, 7, 20, 42, 53], "5000": [6, 20, 42, 45, 53, 59], "been": [6, 7, 8, 13, 20, 35, 51, 62, 71, 77, 81, 82, 83, 86, 108, 114], "calibration_dataset_s": [6, 20, 42, 53], "20": [6, 7, 8, 9, 10, 11, 21, 23, 24, 28, 30, 35, 41, 49, 51, 68, 73, 91, 105], "eval_dataset": [6, 7, 37, 53], "unlabeled_dataset": [6, 7, 12, 37, 53], "prepar": [6, 29, 34, 37, 38, 42, 46, 51, 53, 60, 64, 66, 67, 68, 69, 92, 102, 109], "eval_callback": [6, 7, 12, 15, 16, 17, 20, 25, 30, 33, 37, 42, 46, 53, 56, 66, 70], "compil": [6, 7, 8, 10, 11, 12, 37, 39, 41, 48], "dictionari": [6, 7, 9, 30, 46, 56, 66, 68, 70, 97, 100, 106], "argument": [6, 7, 12, 25, 28, 32, 33, 37, 39, 41, 44, 45, 46, 48, 51, 58, 59, 61, 66, 68, 77, 79, 80, 83], "num_sampl": [6, 7, 18, 20, 37, 45, 53, 59], "whole": [6, 7, 83, 108], "sampled_dataset": [6, 7, 12], "adam": [6, 7, 8, 10, 11, 12, 37, 39, 90], "categoricalcrossentropi": [6, 7, 12, 37], "categoricalaccuraci": [6, 7, 12, 37], "acc": [6, 7, 8, 12, 37], "unlabeleddatasetwrapp": [6, 20, 53], "indic": [6, 13, 18, 20, 30, 36, 49, 53, 56, 69, 88, 95, 114], "seri": [6, 20, 68], "reach": [6, 20, 92], "furhter": [6, 20], "adaround": [6, 20, 42, 47, 53, 67, 72, 87, 92, 96, 102, 107, 109], "predefin": [6, 20], "were": [6, 13, 20, 32, 36, 44, 49, 51, 58, 64, 65, 68, 75, 86, 95, 106, 114], "empir": [6, 20, 103], "adaround_dataset_s": [6, 20, 42, 53], "adaround_dataset": 6, "adaround_param": [6, 20, 42, 53], "set_adaround_param": [6, 20, 42, 53], "correspond": [6, 13, 20, 28, 29, 32, 36, 41, 44, 45, 46, 51, 54, 58, 59, 64, 65, 66, 68, 73, 74, 75, 83, 86, 88, 94, 104, 108, 114], "counter": [7, 21, 38, 39, 68], "potenti": [7, 21, 36, 38, 52, 101, 111, 112], "instabl": [7, 21, 38], "varianc": [7, 21, 38, 103], "recalcul": [7, 21, 29], "By": [7, 18, 19, 21, 22, 23, 24, 29, 30, 35, 56, 64, 88, 91, 100, 108], "aim": [7, 15, 16, 17, 21, 29], "make": [7, 8, 9, 13, 21, 29, 34, 36, 38, 60, 63, 67, 83, 88, 101, 102], "our": [7, 8, 21, 29, 46, 64, 66, 73, 75, 90], "stabl": [7, 21, 29, 61, 69, 91], "rather": [7, 21, 29, 61, 106], "noisi": [7, 21, 29], "6": [7, 8, 9, 10, 11, 17, 19, 22, 23, 24, 28, 35, 42, 51, 53, 61, 64, 68, 78, 83, 84, 86, 105, 107], "simul": [7, 8, 15, 16, 17, 25, 32, 38, 39, 44, 47, 48, 49, 51, 55, 58, 60, 63, 64, 67, 68, 79, 80, 83, 86, 87, 88, 90, 98, 102, 105, 109], "7": [7, 16, 17, 19, 21, 22, 23, 24, 36, 53, 54, 64, 68, 74, 75, 78, 79, 80, 90, 107, 114], "dir": [7, 12, 13, 14, 48, 69, 75], "accuraci": [7, 8, 12, 20, 21, 25, 26, 28, 30, 32, 33, 39, 41, 42, 44, 45, 51, 53, 56, 58, 59, 64, 68, 83, 87, 90, 91, 92, 95, 97, 98, 100, 102, 103, 104, 105, 107, 108, 109, 112, 114], "helper": [7, 21, 33, 53, 59], "train_dataset_s": 7, "re_estimation_dataset_s": 7, "train_dataset": 7, "re_estimation_dataset": 7, "current": [7, 8, 9, 18, 25, 30, 33, 35, 39, 52, 56, 59, 62, 63, 77, 99, 113], "built": [7, 8, 45, 59, 73, 74, 77, 88], "sequenti": [7, 8, 9, 34, 35, 72, 106, 107], "subclass": [7, 8, 34, 35, 77, 84], "incompat": [7, 8, 90], "backend": [7, 28], "clear_sess": [7, 28], "conv2d": [7, 8, 13, 35, 36, 49, 52, 61, 62, 65, 72, 77, 83, 88, 90, 94, 100, 109, 114], "conv1": [7, 15, 16, 17, 35, 52, 56, 60, 61, 62, 65, 70, 90], "fuse": [7, 106, 108], "relu": [7, 8, 9, 34, 35, 36, 49, 52, 55, 60, 61, 62, 65, 69, 72, 88, 90, 103, 106, 114], "maxpooling2d": 7, "conv2": [7, 35, 49, 56, 60, 61, 65, 83, 90], "flatten": [7, 61, 88], "dens": [7, 8, 9, 34, 35, 36, 69], "functional_model": [7, 8, 9], "fp32": [7, 8, 25, 32, 33, 37, 44, 45, 46, 58, 59, 64, 66, 91, 98, 103, 104, 105, 107, 108], "baselin": [7, 8, 20, 33, 45, 59, 90, 97, 100, 105], "loss_fn": [7, 90], "fit": [7, 8, 10, 11, 21, 30, 39, 56, 68, 97], "test": [7, 8, 9, 37, 46, 52, 54, 66, 69, 75], "training_range_learning_with_tf_init": [7, 11, 21, 24, 28, 51, 68], "json": [7, 12, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114], "default_config_per_channel": 7, "is_output_quant": [7, 106], "is_quant": [7, 106], "is_symmetr": [7, 49, 84, 106], "strict_symmetr": [7, 106], "unsigned_symmetr": [7, 106], "per_channel_quant": [7, 13, 49, 88, 106], "squeez": [7, 18], "pad": [7, 35, 52, 61, 62, 90], "supergroup": [7, 106, 109], "op_list": [7, 106], "clip": [7, 84, 106, 108], "gemm": [7, 106], "model_input": [7, 62, 106], "is_input_quant": [7, 106], "model_output": [7, 106], "open": 7, "tmp": [7, 12, 25, 37, 42, 46, 53, 66, 90], "w": [7, 33, 45, 46, 59, 66, 74, 114], "f": [7, 20, 42, 53, 61, 62, 73, 74, 75, 90], "dump": 7, "qsim": [7, 29, 69], "config_fil": [7, 12, 13, 25, 28, 37, 39, 42, 46, 48, 53, 55, 66, 68], "posit": [7, 8, 9, 35, 78], "15": [7, 8, 10, 11, 21, 23, 24, 68, 78, 100, 105], "job": [7, 8, 10, 11, 15, 16, 17, 21, 23, 24, 68], "good": [7, 8, 10, 11, 15, 16, 17, 21, 23, 24, 35, 64, 67, 68, 90, 91, 100], "rate": [7, 8, 9, 10, 11, 15, 16, 17, 21, 23, 24, 35, 68, 100, 105], "end": [7, 8, 10, 11, 15, 16, 17, 21, 23, 24, 28, 29, 31, 33, 51, 53, 54, 57, 61, 62, 66, 68, 78, 89, 90, 100], "factor": [7, 8, 10, 11, 15, 16, 17, 21, 23, 24, 36, 65, 68, 83, 84, 95, 100, 103], "feel": [7, 8, 21, 68, 74], "quantized_callback": [7, 8, 10, 11], "tensorboard": [7, 8, 10, 11], "log_dir": [7, 8, 10, 11], "histori": [7, 8, 10, 11], "validation_data": [7, 8, 10, 11], "reestimate_bn_stat": [7, 21, 29, 54], "reestim": [7, 29], "100": [7, 21, 29, 33, 45, 53, 54, 59, 72, 84, 86, 90], "adapt": [7, 13, 21, 38, 54, 64, 67, 87, 90, 91, 96, 102, 104, 109], "yield": [7, 21, 45, 46, 51, 54, 59, 66, 95, 107, 108], "directli": [7, 21, 29, 42, 46, 63, 66, 72, 104, 108], "bn_reestim": [7, 21, 29, 54], "far": [7, 21, 91], "first": [7, 8, 9, 12, 13, 15, 16, 17, 21, 25, 30, 35, 38, 51, 61, 67, 69, 88, 90, 100, 102, 105, 107], "effici": [7, 21, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114], "fold_all_batch_norms_to_scal": [7, 21, 29, 54], "mnist_after_bn_re_estimation_qat_range_learn": 7, "exampl": [8, 9, 20, 34, 52, 60, 62, 63, 64, 77, 78, 79, 80, 81, 82, 83, 84, 88, 89, 90, 91, 95, 98, 102, 104, 106, 107, 108, 109, 114], "pipelin": [8, 10, 11, 32, 44, 53, 54, 58, 68, 102, 105, 107, 108], "1": [8, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 50, 51, 52, 53, 54, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 91, 92, 93, 94, 95, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 110, 111, 112, 113, 114], "dataset": [8, 28, 29, 30, 33, 37, 42, 45, 46, 48, 51, 53, 54, 55, 59, 66, 68, 90, 96, 102, 103, 108], "2": [8, 28, 30, 35, 36, 39, 41, 42, 45, 48, 51, 52, 53, 54, 56, 58, 59, 61, 62, 64, 65, 66, 68, 69, 72, 73, 74, 77, 78, 79, 80, 81, 83, 84, 86, 89, 91, 95, 97, 102, 107, 108], "3": [8, 28, 30, 31, 33, 36, 37, 39, 42, 43, 45, 46, 48, 51, 52, 53, 54, 55, 56, 57, 58, 59, 61, 62, 64, 65, 66, 68, 69, 70, 71, 72, 73, 78, 81, 83, 84, 90, 95, 102, 105, 107], "imdb": 8, "sentiment": 8, "vocab_s": [8, 9, 35], "20000": [8, 9, 35], "consid": [8, 15, 17, 52, 107, 110, 113], "20k": 8, "word": [8, 83], "maxlen": [8, 9, 35], "200": [8, 9, 20, 35, 72], "movi": 8, "review": 8, "x_train": [8, 29], "y_train": 8, "x_val": 8, "y_val": 8, "load_data": 8, "num_word": 8, "pad_sequ": 8, "embed_dim": [8, 9, 35], "embed": [8, 9, 35, 61, 68, 88, 100, 107], "token": [8, 9, 35, 107], "num_head": [8, 9, 35], "attent": [8, 9, 35], "head": [8, 9, 35], "ff_dim": [8, 9, 35], "hidden": [8, 9, 35], "feed": [8, 9, 35, 108], "insid": [8, 9, 30, 35, 61, 74, 77, 88, 90], "delta": [8, 9, 35, 37, 46, 66, 84, 108], "input_dim": [8, 9, 35], "output_dim": [8, 9, 35], "block": [8, 53, 72, 78, 83, 86, 89], "multiheadattent": [8, 9, 35, 109], "key_dim": [8, 9, 35], "dropout": [8, 9, 35, 88], "layernorm": [8, 9, 35, 88], "epsilon": [8, 9, 35], "1e": [8, 9, 35, 52, 64, 90, 105], "globalaveragepooling1d": [8, 9, 35], "softmax": [8, 9, 18, 35, 88, 90], "functional_callback": 8, "histogram_freq": 8, "sparse_categorical_crossentropi": 8, "128": [8, 10, 11, 45, 59, 61, 81, 83, 90], "wrap": [8, 12, 25, 61, 72], "visual": [8, 12, 25, 26, 30, 50, 56, 67, 74, 100, 102, 103, 104, 107, 109], "multi": [8, 67, 109], "encount": [8, 77, 90], "access": [8, 18, 19, 22, 23, 24, 67, 72, 74], "within": [8, 77, 81, 88, 95, 104, 108], "granular": [8, 15, 16, 17, 30, 56, 83, 100, 107, 108, 112], "mha": [8, 109], "accur": [8, 90], "clone_lay": 8, "u": [8, 13, 51, 74], "clone": [8, 96], "diagram": [8, 93, 98, 108], "m": [8, 73, 74, 75, 87, 96], "convert_to_pb": [8, 39], "inspect": [8, 100], "1024": [8, 48, 51, 68, 91, 101], "artifact": [8, 32, 44, 48, 58, 74, 90], "3000": [8, 81], "model_after_qat": [8, 10, 11], "anoth": [8, 64, 68, 113, 114], "most": [8, 83, 90, 106], "case": [8, 9, 12, 25, 37, 42, 46, 49, 53, 60, 61, 62, 64, 66, 72, 75, 78, 83, 88, 90, 97, 103, 106], "complex": [8, 28, 37, 39, 41, 46, 48, 66, 68], "around": [8, 13, 37, 46, 48, 51, 66, 68], "elementari": 8, "logdir": 8, "summari": [8, 71, 100], "vanilla": 8, "becaus": [9, 21, 35, 61, 69, 102], "tool": [9, 37, 66, 67, 87, 90, 103, 112, 114], "sequanti": 9, "build": [9, 35, 72], "dicuss": 9, "text": [9, 35, 78, 89], "transform": [9, 18, 19, 20, 22, 23, 24, 25, 35, 51, 53, 61, 68, 83, 86, 90, 109], "found": [9, 36, 65, 69, 72, 83, 105, 108], "tokenandpositionembed": [9, 35], "transformerblock": [9, 35], "super": [9, 35, 52, 61, 62, 69, 77, 90], "att": [9, 35], "ffn": [9, 35], "layernorm1": [9, 35], "layernorm2": [9, 35], "dropout1": [9, 35], "dropout2": [9, 35], "kwarg": [9, 35, 77, 78, 81, 88, 89], "attn_output": [9, 35], "out1": [9, 35], "ffn_output": [9, 35], "token_emb": [9, 35], "pos_emb": [9, 35], "With": [9, 12, 25, 81, 102, 105], "those": [9, 46, 66, 106], "random": [9, 20, 28, 33, 35, 37, 39, 42, 45, 46, 48, 53, 69, 94, 104], "random_input": [9, 35], "embedding_lay": [9, 35], "transformer_block": [9, 35], "token_and_position_embed": 9, "symmetr": [9, 48, 49, 64, 72, 77, 78, 81, 83, 84, 88, 89, 90, 106, 108], "model_prepar": [9, 18, 19, 21, 22, 23, 24, 25, 35, 37, 51, 54, 61, 66, 68, 69, 72, 90], "prepare_model": [9, 18, 19, 21, 22, 23, 24, 25, 35, 37, 51, 54, 61, 66, 68, 69, 72, 90], "input_lay": [9, 35], "begin": [9, 35, 52, 61, 62, 78, 89, 106], "unwrap": 9, "ident": [9, 34, 60, 90], "present": [9, 21, 26, 52, 58, 62, 64, 72, 83, 86, 90, 103], "get_weight": 9, "represent": [9, 49, 79, 80, 81], "reorder": 9, "get_original_models_weights_in_functional_model_ord": 9, "original_model": [9, 35], "ndarrai": [9, 33, 36, 42, 44, 45, 46, 48, 65], "arg": [9, 68, 77, 78, 81, 83, 86, 88, 89], "quick": [9, 13, 15, 17], "lookup": 9, "remov": [9, 29, 54, 61, 72, 74, 90, 94, 98, 108, 114], "match": [9, 28, 32, 37, 44, 46, 56, 58, 66, 68, 83, 94, 100, 104, 106, 107, 108, 114], "original_model_weight": 9, "pop": 9, "out": [9, 35, 37, 38, 39, 46, 47, 61, 64, 66, 67, 68, 78, 79, 80, 89], "weight_nam": 9, "functional_model_weight_ord": 9, "enumer": [9, 28, 30, 51, 56, 58, 68, 90], "sort": 9, "weights_in_correct_ord": 9, "item": [9, 12, 25, 42, 102], "weight_info": 9, "assert": [9, 18, 61, 72, 90], "count_param": 9, "output_shap": 9, "textclassif": 9, "what": [9, 90, 111], "architectur": [9, 67], "model_weights_in_correct_ord": 9, "assert_array_equ": 9, "modelprepar": [9, 18, 19, 22, 23, 24, 25, 29, 35, 51, 61, 68, 90], "arthmet": [9, 35], "experss": [9, 35], "tfoplambda": [9, 35], "ressembl": 9, "conv_1": [9, 35, 83], "conv_2": [9, 35], "becuas": [9, 35, 44], "rais": [9, 35, 77], "hopefulli": 9, "reflect": [10, 11, 23, 24, 102, 108], "imagenet_dir": [10, 11], "assign": [10, 11, 49, 64, 77, 78, 88, 89], "dataset_train": [10, 11], "dataset_valid": [10, 11], "respect": [10, 11, 34, 71, 88, 104], "categorical_crossentropi": [10, 11, 39], "finish": [10, 11, 23, 24], "against": [10, 11, 13, 21, 23, 24, 37, 46, 66, 71, 94], "Of": [10, 11, 13, 15, 16, 17, 23, 24, 83], "cours": [10, 11, 13, 15, 16, 17, 23, 24], "instal": [10, 11, 14, 20, 22, 23, 24, 87, 96, 98, 109, 111], "jointli": [11, 23, 24], "trainabl": [11, 24, 102], "showcas": [12, 25], "analys": [12, 25, 104], "respond": [12, 25], "repres": [12, 25, 28, 30, 33, 35, 37, 39, 41, 45, 46, 48, 49, 56, 59, 60, 64, 66, 68, 79, 80, 81, 88, 89, 90, 97, 103, 104, 105, 108], "second": [12, 25, 35, 51, 69, 83, 88, 106], "singl": [12, 13, 25, 28, 37, 45, 46, 56, 59, 61, 63, 66, 83, 91, 97, 100, 103], "val_dataset": 12, "shown": [12, 25, 36, 60, 63, 64, 72, 103, 104, 107, 114], "itself": [12, 21, 25, 44, 100, 108, 110], "amount": [12, 25, 33], "exactli": [12, 25, 77, 88, 108], "multipl": [12, 25, 30, 52, 56, 57, 58, 61, 62, 65, 68, 70, 75, 83, 88, 98, 100, 109], "demonstr": [12, 25, 33, 90, 96], "quant_analyz": [12, 25, 37, 46, 66, 67, 72], "enable_per_layer_mse_loss": [12, 25, 37, 46, 66], "choic": [12, 13, 25, 37, 49, 100, 108], "enhanc": [12, 25, 37, 46, 48, 64, 66, 104, 108], "track": [12, 25, 64, 84, 104], "minimum": [12, 25, 28, 51, 61, 68, 71, 73, 78, 84], "histogram": [12, 25, 37, 46, 66, 71, 84, 102, 104, 108, 109], "seen": [12, 25, 103, 104], "html": [12, 25, 46, 61, 66, 73, 74, 75, 82, 104, 109, 112], "folder": [12, 25, 71, 104], "structur": [12, 17, 25, 49, 61, 77, 88, 100], "per_layer_quant_en": [12, 25, 104], "per_layer_quant_dis": [12, 25, 104], "min_max_rang": [12, 25, 104], "activations_pdf": [12, 25, 104], "name_": [12, 25, 46, 66], "index_0": [12, 25], "index_1": [12, 25], "index_n": [12, 25], "weights_pdf": [12, 25, 104], "layer1": [12, 25, 36, 65], "param_name_": [12, 25, 46, 66], "channel_index_0": [12, 25], "channel_index_1": [12, 25], "channel_index_n": [12, 25], "layer2": [12, 25, 36, 65], "layern": [12, 25], "per_layer_mse_loss": [12, 25, 104], "axi": [12, 25, 49, 104], "associ": [12, 25, 28, 36, 42, 45, 53, 59, 62, 77, 88, 102], "sub": [12, 25, 52, 75, 108], "illustr": [13, 15, 16, 17, 68, 91, 94, 97, 102, 108, 110, 111, 113, 114], "round": [13, 28, 30, 37, 38, 39, 42, 47, 48, 53, 55, 56, 67, 68, 86, 87, 91, 96, 102, 104, 108], "awai": 13, "low": [13, 15, 17, 22, 67, 87, 91, 93, 100, 102, 103], "basi": [13, 49, 83], "2d": [13, 83, 94, 100, 114], "imagin": 13, "64": [13, 34, 35, 37, 46, 48, 51, 66, 68, 73, 78, 83, 84, 89, 91], "filter": [13, 35], "kernel": [13, 67, 77, 83, 94, 110, 113], "28": [13, 56, 59, 81], "entireti": [13, 35], "matrix": 13, "contrast": [13, 35, 72], "repeat": [13, 64, 90, 94], "uniqu": 13, "attribut": [13, 33, 35, 59, 61, 64, 72, 77, 88, 104], "conv2d_lay": 13, "kernel_s": [13, 35, 52, 61, 62, 69, 90], "snpe": [13, 14], "qnn": [13, 14], "describ": [13, 39, 49, 51, 61, 68, 72, 83, 96, 100, 101, 102, 103, 106, 107, 108, 111], "style": 13, "ensur": [13, 58, 74, 88, 97, 102, 107], "mismatch": 13, "togeth": [13, 83], "pcq_quantsim_config": 13, "tell": 13, "saw": 13, "fashion": [13, 83], "did": [13, 103], "translat": [13, 51], "down": [13, 49, 51], "fo": [13, 51], "r": [13, 33, 45, 46, 51, 59, 64, 66], "NOT": [13, 51], "frozen": [13, 51], "depend": [13, 61, 72, 74, 75, 81, 95, 96, 102, 106, 109], "observ": [13, 21, 71, 77, 84, 88, 89, 90, 100, 103, 104, 108], "slight": 13, "plai": 13, "resnet50_pcq_adaround": 13, "invok": [13, 28, 30, 36, 38, 41, 46, 48, 51, 56, 66, 67, 68, 77, 88, 100, 102, 111, 112], "lead": [14, 22, 37, 46, 66, 83, 91, 93, 103, 107], "shift": [14, 22, 67, 103], "adjust": [14, 22, 28, 51, 52, 68, 82, 83, 91, 93, 94, 95, 102, 103, 107], "aimet_cl": 14, "cle_applied_model": [14, 31], "resnet50_after_cl": 14, "arxiv": [14, 103], "ab": [14, 103], "1906": [14, 103], "04721": [14, 103], "memori": [15, 16, 17, 30, 56, 64, 84, 95, 100, 110, 113, 114], "restor": [15, 16, 17, 68, 107], "image_net_train": [15, 16, 17, 21, 22, 23, 24], "imagenettrain": [15, 16, 17, 21, 22, 23, 24], "nn": [15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 51, 52, 53, 56, 60, 61, 62, 63, 65, 66, 68, 69, 72, 77, 78, 82, 83, 87, 88, 89, 90, 101, 109], "bool": [15, 16, 17, 19, 21, 22, 23, 24, 25, 30, 33, 36, 39, 41, 42, 45, 48, 53, 55, 56, 59, 61, 64, 65, 68, 78, 83, 84, 89], "gpu": [15, 16, 17, 19, 21, 22, 23, 24, 25, 56, 58, 67, 69, 73, 74, 102, 109], "learning_r": [15, 16, 17, 21, 23, 24, 54, 68], "learning_rate_schedul": [15, 16, 17, 21, 23, 24, 54, 68], "schedul": [15, 16, 17, 21, 23, 24, 105], "trainer": [15, 16, 17, 21, 23, 24, 30, 56, 96], "max_epoch": [15, 16, 17, 21, 23, 24], "is_avail": [15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 54, 90], "target_comp_ratio": [15, 16, 17, 30, 56, 70], "ratio": [15, 16, 17, 30, 56, 70, 94, 95], "9": [15, 16, 17, 18, 19, 22, 23, 24, 25, 30, 33, 45, 51, 59, 61, 64, 68, 69, 75, 78, 81], "num_comp_ratio_candid": [15, 16, 17, 30, 56, 70], "tri": [15, 16, 17, 100, 102], "caus": [15, 16, 17, 35, 100, 101, 102, 106, 107], "33": [15, 16, 17, 75, 78, 89], "66": [15, 16, 17, 78, 89, 95], "00": [15, 16, 17, 78], "longer": [15, 16, 17, 72, 102, 105], "compromis": [15, 16, 17], "modules_to_ignor": [15, 16, 17, 30, 56, 66, 70, 99], "mode": [15, 16, 17, 30, 31, 39, 42, 43, 48, 53, 55, 56, 57, 61, 63, 65, 68, 70, 84, 89, 101, 102, 106], "much": [15, 16, 17, 114], "manual": [15, 16, 17, 30, 49, 56, 65, 72, 78, 89, 92, 100], "reconstruct": [15, 17, 28, 41, 51], "num_reconstruction_sampl": [15, 17, 56], "too": [15, 17, 83], "allow_custom_downsample_op": [15, 17, 56], "downsampl": [15, 17], "cost": [15, 17, 30, 56, 83, 97, 100, 105], "suggest": [15, 17, 39, 68, 97, 100, 103], "function_nam": [15, 16, 17], "eval_iter": [15, 16, 17, 30, 56, 70], "invoc": [15, 16, 17], "compress_schem": [15, 16, 17, 30, 56, 70], "cost_metr": [15, 16, 17, 30, 56, 70], "reduct": [15, 16, 17, 95, 102], "decim": [15, 16, 17, 30, 56, 70], "greedyselectionparamet": [15, 16, 17, 30, 56, 70], "channelpruningparamet": [15, 17, 56], "compressionschem": [15, 16, 17, 30, 56, 70], "costmetr": [15, 16, 17, 30, 56, 70], "greedy_param": [15, 16, 17, 30, 56, 70], "auto_param": [15, 16, 17, 30, 56, 70], "automodeparam": [15, 16, 17, 30, 56, 70], "greedy_select_param": [15, 16, 17, 30, 56], "channel_prun": [15, 17, 30, 56], "modelcompressor": [15, 16, 17, 30, 56, 70], "compress_model": [15, 16, 17, 30, 56, 70, 111], "relev": [15, 16, 17], "compressed_model": [15, 16, 30, 56], "comp_stat": [15, 16], "fall": [15, 16, 17, 97, 106], "sharpli": [15, 16, 17], "15e": [15, 17], "finetuned_model": [15, 16], "singular": [16, 17, 100, 110, 113], "decomposit": [16, 17, 100, 110, 113], "prune": [16, 30, 95, 96, 97, 99, 100, 109, 114], "spatialsvdparamet": [16, 17, 30, 56, 70], "spatial_svd": [16, 17, 30, 56, 70], "5e": [16, 17, 21, 23, 24, 54, 68], "ssvd_compressed_model": 17, "ssvd_comp_stat": 17, "ssvd_finetuned_model": 17, "further": [17, 26, 61, 72, 78, 81, 83, 89, 90, 91, 94, 98, 100], "ssvd_cp_compressed_model": 17, "cp_comp_stat": 17, "11": [17, 36, 49, 64, 73, 74, 75, 78, 81], "ssvd_cp_finetuned_model": 17, "imagefold": [18, 20], "tqdm": [18, 20], "root": [18, 20, 74, 75, 90], "compos": [18, 20, 53], "resiz": 18, "centercrop": [18, 20], "totensor": [18, 20, 53, 90], "images_mean": 18, "std": [18, 20], "images_std": 18, "manual_se": 18, "randperm": 18, "tolist": 18, "pin_memori": [18, 61], "evaluate_accuraci": 18, "top1_accuraci": 18, "dim": [18, 53, 69, 83, 90], "no_grad": [18, 19, 20, 22, 23, 24, 25, 51, 62, 68], "logit": [18, 20], "topk": [18, 20, 42], "certain": [18, 19, 22, 23, 24, 25, 51, 60, 61, 66, 68, 83, 100, 101, 106], "guidelin": [18, 19, 22, 23, 24, 25, 38, 41, 48, 51, 61, 67, 90, 102, 105], "autom": [18, 19, 22, 23, 24, 25, 38, 51, 60, 61, 67, 68, 102], "compli": [18, 19, 22, 23, 24, 25, 51, 68], "rand": [18, 19, 21, 22, 23, 24, 25, 28, 37, 45, 52, 58, 62], "modif": [18, 19, 22, 23, 24], "made": [18, 19, 22, 23, 24, 61, 106], "overrid": [18, 19, 22, 23, 24, 61, 68], "300": 18, "verifi": [19, 22, 23, 24, 35, 61], "resnet18_after_cle_bc": [19, 20, 22], "val_transform": 20, "normal": [20, 36, 52, 69, 104], "485": 20, "456": 20, "406": 20, "229": 20, "225": 20, "imagenet_dataset": 20, "_create_sampled_data_load": [20, 53], "subsetrandomsampl": [20, 53], "in_eval_mod": 20, "get_devic": 20, "_dataset": [20, 53], "k": 20, "view_a": 20, "__getitem__": [20, 53], "unlabeled_imagenet_dataset": 20, "unlabeled_imagenet_data_load": 20, "initial_accuraci": [20, 42, 53], "run_infer": [20, 42, 53], "adaround_data_load": [20, 42, 53], "optimized_accuraci": [20, 42, 53], "batchnrom": 21, "focu": 21, "unlik": [21, 79, 80], "script": 21, "didn": [21, 67], "statatist": 21, "finetuned_accuraci": [21, 23, 24], "train_load": [21, 54, 56, 90], "images_dir": 21, "finetuned_accuracy_bn_reestim": 21, "resnet18_after_qat": [21, 23, 24], "reload": 22, "techiqu": 22, "num_quant_sampl": [22, 55], "num_bias_correct_sampl": [22, 55], "quantparam": [22, 55], "bias_correct": [22, 55], "correct_bia": [22, 55], "bc_param": 22, "weight_bw": [22, 55], "act_bw": [22, 55], "round_mod": [22, 55], "held": [23, 24, 77, 88, 90], "Then": [23, 24, 28, 46, 51, 66, 68, 100], "matter": [25, 77], "softwar": [26, 98], "compress": [26, 27, 50, 87, 94, 96, 98, 109, 110, 112, 113, 114], "dramat": 26, "lost": [26, 87, 98, 108], "due": [26, 35, 62, 67, 83, 103], "At": [26, 95, 111], "onnx": [26, 49, 58, 60, 68, 73, 74, 87, 91, 92, 96, 98, 101, 102, 103, 104, 106, 108], "framework": [26, 48, 98, 102, 106, 108], "link": [26, 73, 75, 93, 96, 103, 104], "codebas": 26, "sphinx": 26, "page": [26, 69, 74, 75, 95, 108, 109], "qualcomm": [26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114], "innov": [26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114], "center": [26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114], "inc": [26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114], "quantsim_config": [26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114], "default_config": [26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114], "35": [26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114], "model": [27, 28, 29, 30, 31, 32, 33, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 88, 89, 91, 92, 93, 94, 95, 96, 97, 99, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114], "ai": [27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114], "toolkit": [27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114], "linear": [28, 29, 36, 41, 51, 54, 61, 62, 64, 65, 71, 72, 77, 83, 88, 90, 93, 94], "format": [28, 33, 34, 36, 39, 41, 46, 51, 64, 66, 68, 83, 86, 89, 92, 99], "prefix": [28, 39, 41, 51, 64, 68, 86, 88], "31": [28, 37, 41, 46, 51, 52, 55, 66, 68, 74, 75], "default_reg_param": [28, 41, 51], "default_beta_rang": [28, 41, 51], "default_warm_start": [28, 41, 51], "sourc": [28, 29, 30, 31, 32, 33, 35, 36, 37, 39, 41, 42, 43, 44, 45, 46, 48, 51, 53, 54, 55, 56, 57, 58, 59, 61, 64, 65, 66, 68, 69, 70, 71, 74, 75, 77, 78, 79, 80, 81, 82, 83, 84, 86, 88, 89, 107], "datasetv2": [28, 29], "stop": [28, 41, 51, 92], "beta": [28, 41, 51, 91], "anneal": [28, 41, 51], "start_beta": [28, 41, 51], "end_beta": [28, 41, 51], "warm": [28, 41, 51, 91], "period": [28, 41, 51, 91], "zero": [28, 41, 51, 64, 86, 108, 109], "post_training_percentil": [28, 51, 68], "percentil": [28, 51, 68, 84], "absolut": [28, 45, 51, 56, 59, 68, 78, 89], "nois": [28, 51, 67, 68, 84, 90, 102, 103, 104, 105, 106], "training_range_learning_with_tf_enhanced_init": [28, 51, 68], "aimetlogg": 28, "test_model": 28, "keras_model": 28, "dummy_forward_pass": [28, 69], "intend": [28, 37, 46, 49, 56, 66, 90, 96], "Or": [28, 37, 39, 41, 46, 48, 58, 61, 66, 68, 100], "someth": [28, 37, 39, 41, 46, 48, 66, 68, 100], "apply_adaround_exampl": [28, 41], "set_level_for_all_area": 28, "dataset_s": 28, "possible_batch": 28, "from_tensor_slic": [28, 29, 37], "w4a8": 28, "param_bw": [28, 42, 51, 53, 64], "output_bw": [28, 42, 51, 53, 64], "adarounded_model": [28, 51], "adarounded_sess": 28, "grid": [28, 51, 83], "awar": [29, 38, 54, 63, 77, 91, 93, 96, 98, 100, 102, 107, 108], "bn_re_estimation_dataset": 29, "bn_num_batch": 29, "handl": [29, 42, 53, 54, 83], "undo": [29, 54], "upon": [29, 54, 77, 88, 90], "batch_norm": [29, 36, 54, 65], "qcquantizewrapp": [29, 54], "pair": [29, 30, 36, 54, 55, 56, 65], "got": [29, 54, 61, 65], "overal": [30, 51, 56, 68, 95, 97, 107], "algorithm": [30, 33, 45, 49, 56, 59, 95, 97, 100, 107, 114], "tweak": [30, 36, 56, 64, 65], "compressor": [30, 56], "static": [30, 35, 56, 61, 108], "visualization_url": [30, 56, 70], "callabl": [30, 33, 41, 42, 45, 51, 53, 54, 56, 59, 61, 66, 77, 83], "url": [30, 56, 70, 74, 75, 96, 111], "appear": [30, 36, 52, 56, 61, 62, 65], "compressionstat": [30, 56], "use_monotonic_fit": [30, 56], "saved_eval_scores_dict": [30, 56, 70], "variabl": [30, 35, 56, 61, 74, 75, 78, 89, 96, 97], "express": [30, 56], "comp": [30, 56], "greater": [30, 36, 56, 65, 83, 95, 97], "monoton": [30, 56, 97], "dict": [30, 36, 39, 42, 45, 46, 48, 55, 56, 58, 59, 60, 61, 64, 65, 66, 68, 69, 88, 89], "eval_scor": [30, 46, 56, 66], "pickl": [30, 56], "input_op_nam": 30, "output_op_nam": [30, 33], "union": [30, 32, 35, 36, 39, 42, 44, 46, 51, 52, 53, 55, 56, 57, 58, 59, 65, 66, 68, 83, 86], "manualmodeparam": [30, 56], "rank": [30, 56, 64, 110, 113], "noth": [30, 56, 90], "list_of_module_comp_ratio_pair": [30, 56], "modulecompratiopair": [30, 56], "space": [30, 56, 83], "weight_svd": [30, 56], "comp_ratio": [30, 56], "ivar": 30, "aimet_common_def": 30, "aimet_tensorflow_def": 30, "inp_data": 30, "aimet_spatial_svd": 30, "evalfunct": 30, "denot": [30, 114], "80": [30, 78, 84, 89], "being": [30, 33, 34, 36, 49, 56, 60, 61, 62, 65, 66, 72, 83, 94], "driver": [30, 73, 75], "stat": [30, 46, 54, 56, 82, 84], "comprehens": 31, "detect": [31, 100], "shall": [31, 49], "cle": [31, 43, 57, 63, 67, 91, 92, 96, 102, 107, 109], "rtype": [31, 35, 36, 59, 81], "cross_layer_equalization_auto": [31, 57], "individu": [31, 36, 46, 57, 66, 83, 93, 97, 102, 104, 106, 107], "accord": [32, 44, 58, 102], "amongst": [32, 44, 58], "miss": [32, 35, 44, 49, 58, 62], "issu": [32, 35, 44, 52, 58, 62, 93, 98, 101, 107, 109, 111, 112], "layer_output_util": [32, 44, 58], "layeroutpututil": [32, 44, 58], "save_dir": 32, "keraslayeroutput": 32, "constructor": [32, 36, 44, 48, 55, 58, 60, 61, 65, 68], "generate_layer_output": [32, 44, 58], "input_batch": [32, 44, 58], "disk": [32, 44, 58], "obtain": [32, 36, 44, 46, 49, 58, 65, 69, 75, 95, 104], "aimet_export_artifact": [32, 44, 58], "h5": [32, 98, 102], "simplic": [32, 44, 58], "mandatori": [32, 44, 58], "load_encodings_to_sim": [32, 44, 58], "construct": [32, 44, 52, 58, 74, 75, 90, 101], "properli": [32, 44, 58, 78, 89, 90], "pre": [32, 44, 58, 73, 74, 75, 94, 98, 103, 104], "get_pre_processed_input": [32, 44, 58], "fp32_layer_output_util": [32, 44, 58], "fp32_layer_output": [32, 44, 58], "quantsim_layer_output_util": [32, 44, 58], "quantsim_layer_output": [32, 44, 58], "bitop": [33, 45, 59], "starting_op_nam": 33, "quantizergroup": [33, 45, 59], "wrt": 33, "vari": [33, 45, 59, 95, 103, 112], "earli": [33, 45, 59], "exit": [33, 45, 59, 77, 88, 90], "forward_pass_callback_2": 33, "nupi": 33, "last": [33, 99], "kept": [33, 107], "func": [33, 45, 59, 66], "encapsul": [33, 45, 59, 66], "back": [33, 45, 51, 59, 63, 68, 81, 90, 106], "OF": 33, "SUCH": 33, "damag": 33, "spdx": 33, "licens": 33, "identifi": [33, 55, 62, 74, 75, 83, 94, 96, 104, 107, 109, 114], "bsd": 33, "claus": 33, "copyright": 33, "pylint": 33, "statement": [33, 61, 67, 72, 101], "alpha": [33, 64], "directrori": 33, "preset": 33, "parent": [33, 77, 88], "retuern": 33, "seed": [33, 56], "set_se": 33, "evalutaion": 33, "org_top1": 33, "get_quantizated_model": 33, "accept": [33, 97, 103, 107], "09": 33, "cmp_re": 33, "mixed_preision_quant_model": 33, "fast_mixed_precis": 33, "sever": [34, 38, 60, 62, 67, 83, 88, 95, 104], "encourag": [34, 35, 38, 60, 61, 67], "mix": 34, "had": [34, 60, 90], "x2": [34, 60, 61], "relu2": [34, 35, 60, 62], "manditori": 35, "submodul": [35, 60], "via": [35, 67, 87, 95, 108], "inherit": [35, 77, 88], "pure": [35, 60], "over": [35, 42, 46, 53, 56, 66, 78, 84, 88, 91, 97, 100, 112], "inputlay": 35, "portion": 35, "taken": [35, 114], "get_text_classificaiton_model": 35, "model_preparer_two_subclassed_lay": 35, "get_subclass_model_with_functional_lay": 35, "sigmoid": [35, 61, 88], "binary_classifi": 35, "myfunctionalmodel": 35, "my_functional_model": 35, "classifi": 35, "model_preparer_subclassed_model_with_functional_lay": 35, "resembl": 35, "piec": [35, 61], "python": [35, 68, 73, 74, 75], "trace": [35, 60, 82], "symbol": 35, "touch": 35, "part": [35, 37, 42, 46, 53, 63, 66, 83, 100, 103, 104], "static_patch_count": 35, "guarante": 35, "furthermor": 35, "resu": 35, "resblock": 35, "twice": 35, "bad": 35, "bn1": [35, 52, 62, 65], "bn2": 35, "relu1": [35, 60, 62, 69], "plug": [36, 65], "conv2dtranspos": 36, "depthwiseconv2d": [36, 99], "crosslayersc": [36, 65], "scale_model": [36, 65], "clssetinfo": 36, "highbiasfold": [36, 65], "bias_fold": [36, 65], "cls_set_info_list": [36, 65], "bn_layer": [36, 65], "sigma": [36, 65], "element": [36, 49, 65], "model_transform_util": 36, "replace_relu6_with_relu": 36, "cross_layer_equalization_auto_stepwis": 36, "relu6": [36, 55, 65, 88, 103], "model_for_cl": 36, "folded_pair": [36, 65], "bn_dict": [36, 65], "conv_or_linear": 36, "fold_given_batch_norm": [36, 65], "layer_pair": [36, 65], "conv_linear": 36, "is_batch_norm_second": 36, "scale_cls_set": [36, 65], "cls_set": [36, 65], "cls_pair_1": [36, 65], "cls_pair_2": [36, 65], "hold": [36, 55, 65, 81, 83, 86, 88, 106, 107], "along": [36, 64, 65, 81, 83, 90, 105, 108], "depth": [36, 65, 87, 95, 107], "wise": [36, 46, 56, 65, 66, 86, 107], "clssetlayerpairinfo": [36, 65], "scale_factor": [36, 65], "relu_activation_between_lay": [36, 65], "relat": [36, 55, 56, 65, 72, 104, 108], "whose": [36, 58, 61, 65, 72, 82, 83, 103, 106], "cross_layer_equalization_manu": [36, 65], "get_example_layer_pairs_resnet50_for_fold": 36, "consecutive_layer_list": [36, 65], "get_consecutive_layer_list_from_resnet50_for_sc": 36, "scaling_factor_list": [36, 65], "format_info_for_high_bias_fold": 36, "conv_op_1": 36, "bn_op_1": 36, "conv_op_2": 36, "bn_op_2": 36, "conv_op_3": 36, "bn_op_3": 36, "bn_op": 36, "upstream": [36, 94, 114], "downstream": [36, 49, 114], "usag": [36, 49, 62, 64, 68, 84, 87], "conv_op": 36, "bn_op_with_meta": 36, "_fold_upstream_flag": 36, "append": [36, 56], "boolean": [36, 77], "is_relu_activation_in_cls_set": 36, "fill": [36, 68], "create_cls_set_info_list": 36, "mse": [37, 46, 66, 72, 104, 108], "quantanalyz": [37, 46, 47, 66, 67, 72, 102, 109], "pdf": [37, 46, 66, 109], "scalar": [37, 46, 66], "hotspot": [37, 46, 66, 104], "stochast": [37, 39, 55, 68], "toi": [37, 45, 59], "num_class": [37, 39, 53], "ey": 37, "image_dataset": 37, "label_dataset": 37, "serv": [37, 42, 46, 53, 66, 88, 111], "own": [37, 42, 46, 48, 51, 53, 54, 55, 66, 68], "action": [37, 46, 48, 51, 54, 55, 66, 68, 114], "prepared_model": [37, 51, 61, 66, 68, 69, 90], "forward_pass_callback_fn": [37, 46, 66], "eval_callback_fn": [37, 46, 66], "approxim": [37, 46, 66, 91, 95, 103, 104], "quant_analyzer_result": [37, 46, 66], "abil": [38, 47, 67, 109], "hardwar": [38, 47, 67, 71, 98, 102, 103, 108], "cross": [38, 42, 47, 53, 55, 63, 67, 71, 91, 92, 96, 101, 102, 104, 107, 112], "estim": [38, 67, 90, 102, 103], "in_plac": [39, 68], "default_data_typ": [39, 48, 68], "mechan": [39, 61, 68], "rule": [39, 83, 106], "custom_object": 39, "pth": [39, 56, 58, 64, 68], "quantize_model": [39, 48], "dummy_x": 39, "dummy_i": 39, "randint": 39, "to_categor": 39, "lr": [39, 90], "param_bw_override_list": [41, 51], "ignore_quant_ops_list": [41, 51], "default_config_fil": [41, 51], "user_onnx_lib": [41, 48], "modelproto": [41, 42, 43, 44, 46, 48], "affect": [41, 51, 83, 93, 106], "id": [41, 42, 44, 53, 70, 74, 111], "librari": [41, 48, 64, 69, 100], "commonli": [41, 51], "among": [41, 42, 51], "ceil": [41, 42, 51], "10k": [41, 51], "15k": [41, 51], "simplifi": [41, 42, 43, 44, 45, 46, 48, 72], "ada_rounded_model": 41, "auto_quant_v2": [42, 53], "cache_id": [42, 53], "strict_valid": [42, 53], "integr": [42, 47, 53, 63, 64, 67, 102], "includ": [42, 45, 49, 53, 59, 68, 72, 82, 83, 93, 97, 104, 105, 106, 108, 109, 111], "manner": [42, 53, 92], "onnxmodel": [42, 46], "proce": [42, 53, 92], "unid": [42, 53], "unintuit": [42, 53], "explicitli": [42, 86, 114], "get_quant_scheme_candid": 42, "highest": [42, 97], "_quantschemepair": 42, "set_quant_scheme_candid": 42, "math": [42, 84, 90], "onnx_model": [42, 43, 46, 48], "dummy_data": [42, 46, 48], "astyp": [42, 45, 46, 48], "float32": [42, 45, 46, 48, 69], "Its": [42, 114], "fed": 42, "unlabelled_data_load": 42, "num_of_sampl": 42, "evaldataload": 42, "acc_top1": 42, "acc_top5": 42, "batch_avg_top_1_5": 42, "4f": [42, 53], "happen": [43, 57, 90], "dir_path": [44, 58], "wherein": [44, 58], "dummy_input_dict": 44, "amp": [45, 59], "quantizer_group": [45, 59], "parameter_quant": [45, 59], "factori": [45, 59], "activation_quant": 45, "get_activation_quant": 45, "name_to_quantizer_dict": [45, 59], "get_active_quant": [45, 59], "qcquantizeop": 45, "get_candid": [45, 59], "get_param_quant": 45, "set_quantizers_to_candid": [45, 59], "bw": [45, 59, 60, 64, 68, 83], "to_list": [45, 59], "quantizer_info": 45, "fp32_output": 45, "quantize_with_mixed_precis": [45, 59], "flow": [45, 59, 61, 63, 67, 102, 107, 108], "algo": [45, 59], "default_bitwidth": [45, 59], "eval_callback_for_phase_1": [45, 59], "eval_callback_func": [45, 59], "eval_callback_for_phase_2": [45, 59], "clean": [45, 59], "quantize_with_mixed_precision_start_from_existing_cach": [45, 59], "90": [45, 59], "number_of_sampl": [45, 59], "popul": [45, 49, 59], "perform_ev": [45, 59], "in_tensor": 45, "historgram": 46, "unlabeled_dataset_iter": [46, 53, 66], "interest": [46, 66], "create_quantsim_and_encod": 46, "check_model_sensitivity_to_quant": [46, 66], "perform_per_layer_analysis_by_enabling_quant": 46, "occurr": [46, 66, 94], "record": [46, 52, 66, 84], "layer_nam": [46, 52, 66], "perform_per_layer_analysis_by_disabling_quant": 46, "export_per_layer_encoding_min_max_rang": [46, 66], "esults_dir": [46, 66], "pcq": [46, 66, 93, 104], "param_nam": [46, 66], "export_per_layer_stats_histogram": [46, 66], "ctivations_pdf": [46, 66], "eights_pdf": [46, 66], "n": [46, 66, 83, 90, 109], "am": [46, 66], "channel_index": [46, 66], "export_per_layer_mse_loss": [46, 66], "discard": [46, 66], "unlabeled_data_load": [46, 53, 66], "_get_unlabled_data_load": [46, 66], "autoqu": [47, 67, 96, 102, 105, 109], "unifi": [47, 67], "use_symmetric_encod": [48, 72], "attempt": [48, 102, 103], "line": [48, 51, 54, 55, 68, 71], "max_batch_count": [48, 51, 68], "current_batch_count": [48, 51, 68], "forward_pass_funct": 48, "syntax": 49, "usabl": 49, "xx": 49, "yy": 49, "zz": 49, "major": [49, 100], "revis": 49, "minor": [49, 109], "patch": 49, "substanti": 49, "fulli": [49, 67, 90, 99, 100, 113], "bug": [49, 109], "backward": [49, 65, 90], "assum": [49, 53, 75, 83, 86, 97], "string": [49, 106], "activation_encod": 49, "tensor_nam": 49, "param_encod": [49, 72], "constraint": [49, 83], "depict": 49, "6086959838867188": 49, "109158515930176": 49, "114": 49, "018501389771699905": 49, "21": [49, 84], "558866932988167": 49, "12636379897594452": 49, "12": [49, 73, 74, 75, 78, 83, 84], "010530316270887852": 49, "06318144500255585": 49, "06268782913684845": 49, "127": [49, 81], "0004936049808748066": 49, "fc1": [49, 61], "05589814856648445": 49, "05546144023537636": 49, "0004367042565718293": 49, "184721499681473": 49, "10788747668266296": 49, "0089906234367221": 49, "conv2d_1": 49, "1020304188132286": 49, "10380396991968155": 49, "008650330936207491": 49, "readvariableop": 49, "1462666392326355": 49, "1451239287853241": 49, "126": 49, "0011427081098743512": 49, "08333279937505722": 49, "08268175274133682": 49, "0006510374592799766": 49, "dtype": [49, 61, 64, 72, 79, 80, 81, 83, 86], "datatyp": [49, 69], "highlight": [49, 103, 111, 112], "quantizer_arg": 49, "activation_bitwidth": 49, "param_bitwidth": 49, "broken": 49, "occur": [49, 82, 83, 86], "who": 49, "benefit": [49, 83, 91], "knowledg": 49, "experiment": [51, 52, 53, 66, 68, 72, 83, 100], "v2": [51, 53, 66, 68, 77, 78, 79, 80, 81, 82, 83, 84, 86, 87, 88, 89, 90], "namespac": [51, 53, 66, 67, 68, 72], "visit": [51, 53, 66, 68, 75, 87], "overview": [51, 53, 66, 68, 72], "pars": [51, 68, 86], "get_train_dataload": [51, 54], "emploi": [51, 68], "quantized_resnet18": [51, 68], "arch_check": 52, "archcheck": 52, "check_model_arch": 52, "result_dir": 52, "_node_check_dict": 52, "arch_checker_report": 52, "dotted_name_op": 52, "nodeerrorreportobject": 52, "archcheckerreport": 52, "condit": [52, 61, 62], "modelwithnotenoughchannel": 52, "prelu": [52, 88], "stride": [52, 61, 62, 90], "batchnorm2d": [52, 62, 65, 88, 90], "example_check_for_number_of_conv_channel": 52, "fewer": [52, 100], "logger": [52, 62], "_check_conv_channel_32_bas": 52, "_check_conv_channel_larger_than_32": 52, "modelwithprelu": 52, "prelu1": 52, "example_check_for_non_performant_activ": 52, "num_paramet": 52, "_activation_check": 52, "standalon": [52, 102], "modelwithnonfoldablebn": 52, "foldabl": 52, "avg_pool1": 52, "avgpool2d": [52, 88], "example_check_for_standalone_bn": 52, "averagepool": 52, "prevent": [52, 61, 72, 94, 101], "ep": [52, 90], "05": [52, 64, 78, 90], "momentum": [52, 90], "affin": [52, 64, 67, 77, 81, 83, 86, 87, 88, 89, 90], "track_running_stat": [52, 90], "_check_batch_norm_fold": 52, "model_prepare_requir": 53, "hen": 53, "_subset_sampl": 53, "sampler": 53, "fp32_model": 53, "fakedata": 53, "eval_data_load": 53, "num_correct_predict": 53, "argmax": [53, 90], "deprec": [53, 72, 102], "dummy_input_on_cpu": 53, "dummy_input_on_gpu": 53, "preced": [54, 114], "var": 54, "load_fp32_model": 54, "imagenetpipelin": 54, "quant_sim": [54, 68], "quant_param": 55, "conv_bn_dict": 55, "perform_only_empirical_bias_corr": 55, "layers_to_ignor": 55, "unless": [55, 75, 77, 86, 114], "remain": [55, 72, 102, 103, 108], "calc": 55, "corr": 55, "irrespect": 55, "fact": [55, 83], "elig": 55, "input_bn": 55, "output_bn": 55, "in_activation_typ": 55, "no_activ": 55, "out_activation_typ": 55, "hode": 55, "No": [55, 62], "mobilenetv2": [55, 65], "512": 55, "module_prop_dict": 55, "find_all_conv_bn_with_activ": 55, "train_model": 56, "train_flag": 56, "weightsvdparamet": 56, "rank_select_schem": 56, "select_param": 56, "rankselectschem": 56, "tar": 56, "evaluate_model": 56, "honor": 56, "obvious": 56, "spatial_svd_auto_mod": 56, "mnist": 56, "mnist_trained_on_gpu": 56, "pretti": 56, "easili": 56, "spatial_svd_manual_mod": 56, "manual_param": 56, "weight_svd_auto_mod": 56, "rank_select": 56, "weight_svd_manual_mod": 56, "channel_pruning_auto_mod": 56, "mnist_torch_model": 56, "dataloadermnist": 56, "channel_pruning_manual_mod": 56, "_layer_db": 56, "ture": 56, "batch_callback": 56, "spatial_svd_auto_mode_with_layerwise_finetun": 56, "torchscript": [58, 68, 87], "naming_schem": 58, "namingschem": 58, "onnx_export_arg": [58, 68, 69], "onnxexportapiarg": [58, 68], "numer": 58, "onnx_util": 58, "pythonpath": [58, 96], "successfulli": 58, "map_loc": 58, "model_torch": 58, "phase2_revers": 59, "phase2": 59, "input_quant": [59, 72, 77, 88, 90], "output_quant": [59, 72, 77, 88, 90], "supported_kernel_op": 59, "get_input_quantizer_modul": 59, "roughli": 59, "convers": 60, "onnx_file_nam": 60, "jit": 60, "traceabl": [60, 61], "stateless": 60, "former": 60, "retrain": [60, 102], "whenev": [60, 90], "image_rgb": 60, "rgb_output": 60, "image_bw": 60, "bw_output": 60, "rgb": 60, "elementwis": [61, 88, 109], "unrol": 61, "independ": [61, 107], "modules_to_exclud": 61, "module_classes_to_exclud": 61, "concrete_arg": 61, "instanc": [61, 62, 68, 77, 111], "duplic": 61, "exclud": [61, 62, 66, 86], "partial": 61, "special": [61, 90], "control": [61, 88, 108], "won": 61, "symbolic_trac": 61, "graphmodul": [61, 90], "modelwithfunctionalrelu": 61, "9216": 61, "fc2": 61, "model_preparer_functional_exampl": 61, "allclos": 61, "modelwithreusedrelu": 61, "model_preparer_reused_exampl": 61, "modelwithelementwiseaddop": 61, "x1": 61, "model_preparer_elementwise_add_exampl": 61, "dynam": [61, 79, 80, 103, 108, 109, 112], "branch": [61, 96, 106], "weren": 61, "traceerror": 61, "workaround": [61, 101], "problem": [61, 107], "across": [61, 64, 83, 103, 104, 112], "Such": 61, "concret": 61, "truli": 61, "scope": 61, "preserv": 61, "custom_function_not_to_be_trac": 61, "call_funct": 61, "__torch_function__": 61, "sqrt": 61, "modelwithnontorchfunct": 61, "model_transform": 61, "tracer": 61, "is_leaf_modul": 61, "leaf": [61, 86, 109], "expos": [61, 91, 104], "module_to_exclud": 61, "examin": 61, "custommodul": 61, "softplu": [61, 88], "custommodel": 61, "arang": [61, 69, 78], "traceback": 61, "typeerror": 61, "receiv": 61, "invalid": [61, 83], "proxi": 61, "layout": 61, "requires_grad": [61, 81], "problemat": [61, 107, 112], "determinist": 61, "hard": 61, "do_not_trace_m": 61, "known": [62, 83, 98], "share": [62, 69, 88], "modelwithreusednod": 62, "inplac": 62, "2592": [62, 84], "view": [62, 69, 82, 87, 90, 93, 98, 101, 103, 104, 111], "model_valid": 62, "modelvalid": 62, "validate_example_model": 62, "validate_model": 62, "validate_for_reused_modul": 62, "0x7f127685a598": 62, "resolv": 62, "warn": [62, 102], "redefin": [62, 90], "distinct": [62, 90], "rewrit": [62, 101], "modelwithoutreusednod": 62, "rerun": 62, "0x7ff577373598": 62, "validate_for_missing_modul": 62, "0x7ff5703eff28": 62, "modelwithfunctionallinear": 62, "0x7f9dd9bd90d0": 62, "matmul_8": 62, "connectedgraph": 62, "op_type_map": 62, "recogn": [62, 108], "functional_op": 62, "modelwithoutfunctionallinear": 62, "parallel": [63, 84], "dataparallel": [63, 67], "move": [63, 96, 102], "forth": 63, "huggingfac": 64, "similar": [64, 83, 103, 105, 108], "alon": 64, "loraconfig": 64, "get_peft_model": 64, "lora_config": 64, "lora_alpha": 64, "lora_dropout": 64, "target_modul": 64, "replace_lora_layers_with_quantizable_lay": 64, "meta": [64, 98, 102], "track_lora_meta_data": 64, "meta_data": 64, "tmp_dir": 64, "convinplacelinear": 64, "peftquantutil": 64, "peft_util": 64, "name_to_module_dict": 64, "disable_lora_adapt": 64, "recomput": 64, "freeze_base_model_param_quant": 64, "tmpdir": 64, "export_model": [64, 68], "filename_prefix_encod": [64, 68], "base_encod": 64, "enable_adapter_and_load_weight": 64, "lora_weights_after_adaptation_for_adapter1": 64, "safetensor": 64, "use_safetensor": 64, "lora_modul": 64, "get_quantized_lora_lay": 64, "param_quant": [64, 72, 77, 83, 88, 90], "quantizedequant": [64, 72, 78, 80, 81, 83, 88, 89, 90], "base_model": 64, "adapter1": 64, "export_adapter_weight": 64, "adapter1_weight": 64, "configr": 64, "adaptermetadata": 64, "lora_a": 64, "lora_b": 64, "replaced_module_typ": 64, "adapater_name_to_meta_data": 64, "init": 64, "track_meta_data": 64, "pt": 64, "adapter_weights_path": 64, "bin": [64, 74, 75, 84], "freeze_base_model": 64, "freeze_base_model_activation_quant": 64, "get_fp_lora_lay": 64, "quantize_lora_scale_with_fixed_rang": 64, "scale_min": 64, "scale_max": 64, "mul": 64, "set_bitwidth_for_lora_adapt": 64, "conv1d": [65, 88, 109], "convtranspose2d": [65, 88], "batchnorm1d": [65, 88], "cross_layer_equalization_auto_step_by_step": 65, "conv_bn": 65, "replace_modules_of_type1_with_type2": 65, "layer_list": 65, "clspairinfo": 65, "depthwis": [65, 93, 109], "cross_layer_equalization_depthwise_lay": 65, "perform_per_layer_analysis_by_enabling_quant_wrapp": 66, "perform_per_layer_analysis_by_disabling_quant_wrapp": 66, "wrapped_module_nam": 66, "tap": 66, "packag": [67, 86, 96, 109], "plan": 67, "upgrad": [67, 75], "flexibil": 67, "extens": [67, 74, 75, 88, 96], "futur": [67, 82, 83, 86], "releas": [67, 96, 101], "core": 67, "checker": 67, "concern": 67, "peft": 67, "lora": 67, "introduc": [67, 83, 88, 102, 106, 108], "power": [67, 107], "blockwis": [67, 86], "lpbq": [67, 86], "dispatch": 67, "carefulli": 67, "compatibil": 67, "public": 67, "seq_ms": [67, 72], "apply_seq_ms": [67, 72], "orthogon": 67, "migrat": 67, "quantizationmixin": [67, 87, 88], "save_checkpoint": 68, "file_path": 68, "checkpoint": [68, 98, 102], "load_checkpoint": 68, "quant_sim_model": 68, "propagate_encod": 68, "export_to_torchscript": 68, "use_embedded_encod": 68, "opset_vers": [68, 69], "enable_onnx_check": 68, "entri": [68, 106], "data_typ": [68, 72], "fakequ": 68, "trainingextens": 68, "src": 68, "forward_pass_arg": 68, "quatiz": 68, "unction": 68, "idea": [69, 90], "travel": 69, "sparseconvolution3d": 69, "spars": 69, "sparsetensorwrapp": 69, "sparseconvtensor": 69, "scriptmodul": 69, "scatterdens": 69, "pro": [69, 83], "spconv3d": 69, "spconvmodel": 69, "spconv_tensor": 69, "sparseconv3d": 69, "spconv1": 69, "in_channel": [69, 83, 90], "out_channel": [69, 83, 90], "spconv2": 69, "conv3d": [69, 88], "normal_conv3d": 69, "spconv_scatter_dens": 69, "coord": 69, "voxel": 69, "sp_tensor": 69, "sp_outputs1": 69, "sp_outputs2": 69, "sp_outputs2_dens": 69, "sp_output": 69, "sp_outputs_relu": 69, "dense_tensor_sp_input": 69, "ncdhw": 69, "permut": 69, "ndhwc": 69, "stack": 69, "meshgrid": 69, "ij": 69, "reshap": [69, 88], "operator_export_typ": 69, "operatorexporttyp": 69, "onnx_aten_fallback": 69, "converter_arg": 69, "input_dtyp": 69, "int32": 69, "expand_sparse_op_structur": 69, "preserve_io": 69, "exported_sp_conv_model": 69, "visualize_serialized_data": 70, "visualizecompress": [70, 111], "bokeh": [70, 71], "server": 70, "publish": [70, 71], "tabl": [70, 82, 96, 111], "display_eval_scor": [70, 111], "saved_eval_scores_dict_path": 70, "display_comp_ratio_plot": [70, 111], "comp_ratio_list_path": 70, "pkl": 70, "start_bokeh_server_sess": 70, "model_compression_with_visu": 70, "65": [70, 95], "resnet18_eval_scor": 70, "comp_ratios_file_path": 70, "greedy_selection_comp_ratios_list": 70, "eval_scores_path": 70, "compression_visu": 70, "termin": [70, 96], "visualize_model": 71, "visualize_relative_weight_ranges_to_identify_problematic_lay": 71, "selected_lay": 71, "figur": [71, 91, 94, 97, 107, 110, 111, 113, 114], "visualize_weight_rang": 71, "scatter": 71, "deviat": 71, "visualize_changes_after_optim": 71, "old_model": 71, "new_model": 71, "visualize_changes_in_model_after_and_before_cl": 71, "visualiz": 71, "model_copi": 71, "visualize_weight_ranges_model": 71, "usual": [71, 90, 100, 105, 108], "visualize_relative_weight_ranges_model": 71, "easier": [72, 83], "simpler": 72, "extend": 72, "fundament": 72, "advis": 72, "subject": 72, "interact": [72, 82], "hood": 72, "properti": 72, "compon": 72, "stai": 72, "quantizewrapp": 72, "quantizationsimmodelv1": 72, "all_quant_wrapp": 72, "quant_wrapp": 72, "staticgridquantwrapp": 72, "_module_to_wrap": 72, "in_featur": [72, 77, 88, 90], "out_featur": [72, 77, 88, 90], "quantizedlinear": [72, 77, 83, 88, 90], "quantizedconv2d": [72, 83, 88, 90], "quantizationsimmodelv2": 72, "sim2": 72, "all_q_modul": 72, "qmodul": 72, "q_modul": 72, "moduledict": [72, 77, 88, 90], "modulelist": [72, 77, 88, 90], "quantizedrelu": [72, 88, 90], "staticgridquant": 72, "learnedgridquant": 72, "tensor_quant": 72, "staticgridperchannelquant": 72, "fp_quantiz": 72, "affine_quant": 72, "affinequant": [72, 79, 80], "floatquant": [72, 79, 80], "q": [72, 77, 78, 79, 80, 81, 88, 89, 108], "affine_q": 72, "affine_qdq": 72, "fp_qdq": 72, "floatquantizedequant": [72, 80], "float16": [72, 79, 80, 83], "setup": [72, 73], "sim1": 72, "wrap_linear": 72, "qlinear": [72, 77, 88], "symmetri": 72, "is_unsigned_symmetr": 72, "use_strict_symmetr": 72, "sign": [72, 78, 108], "libpymo": 72, "tfencod": 72, "copy_": 72, "OR": 72, "_remove_input_quant": 72, "_remove_output_quant": 72, "_remove_param_quant": 72, "temporarili": 72, "_is_encoding_frozen": 72, "freeze_encod": 72, "concept": 72, "mimick": 72, "allow_overwrit": [72, 86, 89], "requires_grad_": 72, "overwritten": 72, "pypi": 73, "intel": 73, "x86": 73, "processor": 73, "linux": [73, 75], "ubuntu": [73, 75], "22": [73, 75, 84, 90], "04": [73, 75], "lt": [73, 75], "pip": [73, 74, 75, 87, 96, 111], "apt": [73, 74, 75, 87], "liblapack": [73, 74, 75, 87], "libpython3": 73, "dev": [73, 74, 75], "python3": [73, 74, 75, 87, 96], "variant": [73, 75, 91, 92, 93, 102, 103, 104, 108], "latest": [73, 74], "whl": [73, 74, 75], "host": [73, 74, 75, 109, 111], "github": [73, 74, 75, 96, 109], "com": [73, 74, 75, 96, 109], "quic": [73, 74, 75, 96, 109], "prerequisit": [73, 111], "requisit": [73, 75], "cu121": [73, 74, 75], "cp310": [73, 74, 75], "manylinux_2_34_x86_64": [73, 74, 75], "torch_stabl": [73, 74, 75], "13": [73, 74, 78, 84], "cu117": 73, "cu118": 73, "older": 73, "brows": [73, 74, 75], "bash": [73, 74], "command": [73, 74, 75, 96, 111], "shell": 73, "nvidia": [73, 74, 75], "card": 73, "capabl": [73, 88, 111], "docker": 73, "455": 73, "alwai": [73, 97], "newer": 73, "cudnn": 73, "machin": [73, 74, 100], "develop": [73, 74, 75, 82, 83, 86], "click": 73, "instruct": [74, 75, 87, 96], "variant_str": 74, "ONE": 74, "pt113": 74, "aimet_vari": 74, "workspac": [74, 96], "absolute_path_to_workspac": [74, 96], "docker_image_nam": 74, "codelinaro": 74, "docker_container_nam": 74, "any_nam": 74, "any_tag": 74, "jenkin": 74, "dockerfil": 74, "grep": 74, "kill": 74, "rm": 74, "passwd": 74, "ro": 74, "home": 74, "mnt": 74, "entrypoint": 74, "hostnam": 74, "filesystem": 74, "port": [74, 111], "port_id": 74, "project": [74, 75], "wish": [74, 75], "tip": [74, 75], "post1": [74, 75], "prepend": [74, 75, 88, 96], "sudo": [74, 75, 96], "wheel": [74, 75], "tag": [74, 75, 96, 109], "ex": [74, 75, 83], "34": [74, 75], "release_tag": [74, 75, 96], "download_url": [74, 75], "wheel_file_nam": [74, 75], "find_pkg_url_str": [74, 75], "usr": [74, 75], "lib": [74, 75], "dist": [74, 75], "envsetup": [74, 75], "sh": [74, 75], "pend": 75, "pip3": 75, "h": [75, 96, 113, 114], "local": [75, 96, 111], "accordingli": 75, "ye": [75, 100], "wget": 75, "gnupg2": 75, "archiv": 75, "exact": [75, 93, 102], "date": 75, "aforement": 75, "repo": 75, "ubuntu2204": 75, "x86_64": 75, "keyring_1": 75, "1_all": 75, "deb": 75, "dpkg": 75, "cat": 75, "reqs_deb_common": 75, "txt": 75, "xarg": 75, "reqs_deb_torch_common": 75, "reqs_deb_onnx_common": 75, "reqs_deb_tf_gpu": 75, "reqs_deb_torch_gpu": 75, "reqs_deb_onnx_gpu": 75, "uninstal": 75, "onnxruntime_v": 75, "c": [75, 95], "__version__": 75, "ln": 75, "gnu": 75, "libjpeg": 75, "chose": 75, "mixin": [77, 88], "quantizerbas": [77, 88, 89], "behav": [77, 88, 107], "abstract": [77, 88, 89], "__quant_init__": [77, 88], "initializd": [77, 88], "overridden": [77, 88, 106], "length": [77, 83, 88], "set_kernel": 77, "quantizedtensor": [77, 78, 81, 89], "keyword": 77, "output_encod": 77, "context": [77, 88, 90], "underli": [77, 107], "int_multipli": 77, "enc": 77, "notimplementederror": 77, "q_output": 77, "quantized_repr": [77, 81], "dq_output": 77, "qmult": 77, "quantizedmultipli": [77, 88], "classmethod": 77, "set_default_kernel": 77, "get_kernel": 77, "enter": [77, 88, 92], "is_initi": [77, 78, 79, 80, 88, 89], "from_modul": 77, "quantized_linear": 77, "get_default_kernel": 77, "retriev": 77, "module_cl": 77, "decor": 77, "subpackag": 77, "throw": 77, "declar": 77, "maskedadd": 77, "mask": 77, "quantizedmaskedadd": 77, "input_qtzr": 77, "value_qtzr": 77, "output_qtzr": 77, "clamp": [78, 79, 80, 89, 108], "lceil": [78, 79, 80, 89], "frac": [78, 79, 80, 89], "rfloor": [78, 79, 80, 89], "qmin": [78, 89, 108], "qmax": [78, 89, 108], "learnabl": [78, 89], "theta_": [78, 89], "pmatrix": [78, 89], "b_0": [78, 89], "b_1": [78, 83, 89], "cdot": [78, 89], "b_": [78, 89], "d": [78, 89], "equat": [78, 83, 89, 108], "out_": [78, 89], "j_0": [78, 89], "j_": [78, 89], "input_": [78, 89], "scale_": [78, 89], "i_0": [78, 89], "i_": [78, 89], "offset_": [78, 89], "quad": [78, 89, 108], "forall_": [78, 89], "leq": [78, 89], "i_d": [78, 89], "lfloor": [78, 79, 80, 89], "j_d": [78, 89], "b_d": [78, 89], "asymmetr": [78, 84, 89, 106, 108], "encoding_analyz": [78, 79, 80, 84, 89], "encodinganalyz": [78, 79, 80, 84, 89], "block_siz": [78, 83, 89], "129": [78, 89, 101], "255": [78, 81, 89], "122": [78, 89], "192": [78, 89], "106": [78, 89], "94": [78, 89], "145": [78, 89], "181": [78, 89], "144": [78, 89], "194": [78, 89], "74": [78, 89], "86": [78, 89], "150": [78, 89], "103": [78, 89], "37": [78, 89], "111": [78, 89], "237": [78, 89], "218": [78, 89], "49": [78, 89], "155": [78, 89], "179": [78, 89], "89": [78, 89], "110": [78, 89], "17": [78, 84, 89], "36": [78, 89], "83": [78, 89], "grad_fn": [78, 81, 89], "aliasbackward0": [78, 81, 89], "ones_lik": [78, 89], "187": [78, 89], "186": [78, 89], "131": [78, 89], "203": [78, 89], "143": [78, 89], "152": [78, 89], "226": [78, 89], "55": [78, 89], "172": [78, 89], "207": [78, 89], "146": [78, 89], "216": [78, 89], "238": [78, 89], "141": [78, 89], "178": [78, 89], "188": [78, 89], "63": [78, 89], "59": [78, 89], "19": [78, 84, 89], "162": [78, 89], "30": [78, 89], "109": [78, 89], "dequant": [78, 81, 89], "overlin": [78, 89], "qdq": [78, 79, 80, 89], "dequantizedtensor": [78, 81, 89], "2771": [78, 89], "3038": [78, 89], "0819": [78, 89], "9700": [78, 89], "9487": [78, 89], "1307": [78, 89], "7894": [78, 89], "1709": [78, 89], "2212": [78, 89], "7741": [78, 89], "0295": [78, 89], "2265": [78, 89], "0564": [78, 89], "6177": [78, 89], "0386": [78, 89], "0176": [78, 89], "6054": [78, 89], "8836": [78, 89], "1232": [78, 89], "8229": [78, 89], "5540": [78, 89], "3992": [78, 89], "2363": [78, 89], "2546": [78, 89], "0036": [78, 89], "2355": [78, 89], "1741": [78, 89], "6079": [78, 89], "6247": [78, 89], "0115": [78, 89], "2458": [78, 89], "9157": [78, 89], "4694": [78, 89], "0639": [78, 89], "2568": [78, 89], "0680": [78, 89], "6695": [78, 89], "7932": [78, 89], "1889": [78, 89], "0158": [78, 89], "5695": [78, 89], "5220": [78, 89], "1977": [78, 89], "4475": [78, 89], "0424": [78, 89], "1128": [78, 89], "8796": [78, 89], "1060": [78, 89], "5897": [78, 89], "6196": [78, 89], "9961": [78, 89], "0549": [78, 89], "6431": [78, 89], "0039": [78, 89], "8706": [78, 89], "4706": [78, 89], "2353": [78, 89], "8078": [78, 89], "3451": [78, 89], "1176": [78, 89], "4549": [78, 89], "0471": [78, 89], "5255": [78, 89], "4157": [78, 89], "0784": [78, 89], "5333": [78, 89], "1647": [78, 89], "2118": [78, 89], "2196": [78, 89], "9176": [78, 89], "9490": [78, 89], "7765": [78, 89], "4784": [78, 89], "6039": [78, 89], "3137": [78, 89], "3216": [78, 89], "8000": [78, 89], "4392": [78, 89], "4863": [78, 89], "overload": 78, "rceil": 78, "num_step": [78, 84], "num": 78, "_step": 78, "0000e": 78, "5000e": 78, "02": 78, "1921e": 78, "08": 78, "0500e": 78, "1000e": 78, "1500e": 78, "2000e": 78, "2500e": 78, "14": [78, 90], "quantize_dequant": 78, "0000": [78, 81], "0667": 78, "1333": 78, "2667": 78, "3333": 78, "4000": [78, 81], "4667": 78, "6000": [78, 81], "6667": 78, "7333": 78, "8667": 78, "9333": 78, "exponent_bit": [79, 80, 83], "mantissa_bit": [79, 80, 83], "cast": [79, 80], "expon": [79, 80, 83], "mantissa": [79, 80, 83], "x_c": [79, 80], "log_2": [79, 80], "ieee": [79, 80, 100, 103], "_max": [79, 80], "mutual": [79, 80, 83], "exclus": [79, 80, 83], "finer": [79, 80, 83], "8998": [79, 80], "0947": [79, 80], "0891": [79, 80], "1727": [79, 80], "is_bfloat16": [79, 80], "8984": [79, 80], "0859": [79, 80], "1729": [79, 80], "minmaxencodinganalyz": [79, 80, 84], "is_float16": [79, 80], "8994": [79, 80], "0889": [79, 80], "alia": [79, 80], "encodingbas": [81, 89], "57": 81, "312": 81, "153": 81, "205": 81, "set_rang": 81, "x_q": 81, "26": 81, "23": [81, 84], "x_dq": 81, "carri": 81, "gradient": 81, "thu": 81, "autograd": 81, "backpropag": 81, "38": [81, 100], "40": 81, "39": [81, 90], "51": 81, "521": 81, "41": 81, "quant_dequ": 81, "x_qdq": 81, "52": 81, "68": 81, "97": 81, "uint8": 81, "heavi": [82, 83, 86, 111, 112], "notic": [82, 83, 86], "visualization_tool": 82, "visualize_stat": 82, "save_path": 82, "quant_stats_visu": 82, "threshold": [82, 88, 92], "exce": 82, "exceed": 82, "counterpart": [83, 88], "come": [83, 100, 105], "split": 83, "con": 83, "storag": 83, "drawback": 83, "outlier": [83, 104, 108], "dimens": [83, 86, 88, 107, 110, 113], "influenc": 83, "resid": [83, 109], "chunk": 83, "isol": 83, "favor": 83, "relationship": 83, "long": [83, 86], "b_2": 83, "b_n": 83, "s_1": 83, "s_2": 83, "s_n": 83, "evenli": 83, "divid": [83, 88, 95, 105], "divis": 83, "permit": 83, "3d": 83, "arbitrari": 83, "restrict": [83, 101], "themselv": [83, 105], "linear_1": 83, "lie": 83, "leverag": 83, "expans": [83, 100], "groupedblockquantizedequant": 83, "decompressed_bw": 83, "expand": 83, "block_group": 83, "config_util": 83, "set_blockwise_quantization_for_weight": 83, "input_channel": 83, "linear1": 83, "isinst": 83, "signific": [83, 107], "switch": 83, "docstr": 83, "4d": 83, "mention": 83, "assist": 83, "set_activation_quantizers_to_float": 83, "set_grouped_blockwise_quantization_for_weight": 83, "decompress": 83, "addition": 83, "larger": [83, 110, 113], "encoding_vers": [83, 86], "exported_model": [83, 86], "gather": 84, "reset_stat": 84, "reset": 84, "update_stat": 84, "pow": 84, "0991": 84, "3696": 84, "_minmaxrang": 84, "1721": 84, "sqnrencodinganalyz": 84, "num_bin": 84, "2048": [84, 86], "asymmetric_delta_candid": 84, "symmetric_delta_candid": 84, "101": 84, "offset_candid": 84, "max_parallel": 84, "gamma": 84, "lowest": [84, 94], "3612": 84, "8497": 84, "_histogram": 84, "bin_edg": 84, "8907": 84, "3625": 84, "8343": 84, "3061": 84, "7779": 84, "2497": 84, "2784": 84, "8066": 84, "3348": 84, "8630": 84, "3912": 84, "7080": 84, "2438": 84, "percentileencodinganalyz": 84, "largest": 84, "smallest": 84, "1188": 84, "3368": 84, "27": 84, "5710": 84, "0989": 84, "6269": 84, "1548": 84, "6827": 84, "2106": 84, "2614": 84, "7335": 84, "2056": 84, "6776": 84, "1497": 84, "gptvq_weight": 86, "apply_gptvq": 86, "gptvq_param": 86, "param_encoding_path": 86, "module_names_to_exclud": 86, "block_level_module_nam": 86, "file_name_prefix": 86, "config_file_path": 86, "gptvqparamet": 86, "dataclass": 86, "row_axi": 86, "col_axi": 86, "rows_per_block": 86, "cols_per_block": 86, "vector_dim": 86, "vector_bw": 86, "vector_strid": 86, "index_bw": 86, "num_of_kmeans_iter": 86, "assignment_chunk_s": 86, "carrier": 86, "125m": 86, "optforcausallm": 86, "from_pretrain": 86, "facebook": 86, "gptvq_applied_model": 86, "lm_head": 86, "gptvq_opt": 86, "load_encod": 86, "deploi": [87, 108], "edg": [87, 98], "incur": [87, 98, 104], "workflow": [87, 90, 95, 98], "sample_input": [87, 90], "sample_output": 87, "out_dir": 87, "quantized_model": 87, "quickstart": 87, "product": [87, 95, 98], "technologi": [87, 98], "subsidiari": [87, 98], "extra": 88, "nativ": 88, "quantizedsoftmax": [88, 90], "qmul": 88, "sens": 88, "qadd": 88, "quantizedadd": 88, "calibration_data_load": 88, "adaptiveavgpool1d": 88, "quantizedadaptiveavgpool1d": 88, "adaptiveavgpool2d": 88, "quantizedadaptiveavgpool2d": 88, "adaptiveavgpool3d": 88, "quantizedadaptiveavgpool3d": 88, "adaptivemaxpool1d": 88, "quantizedadaptivemaxpool1d": 88, "adaptivemaxpool2d": 88, "quantizedadaptivemaxpool2d": 88, "adaptivemaxpool3d": 88, "quantizedadaptivemaxpool3d": 88, "alphadropout": 88, "quantizedalphadropout": 88, "avgpool1d": 88, "quantizedavgpool1d": 88, "quantizedavgpool2d": 88, "avgpool3d": 88, "quantizedavgpool3d": 88, "quantizedbatchnorm1d": 88, "quantizedbatchnorm2d": 88, "batchnorm3d": 88, "quantizedbatchnorm3d": 88, "celu": 88, "quantizedcelu": 88, "channelshuffl": 88, "quantizedchannelshuffl": 88, "constantpad1d": 88, "quantizedconstantpad1d": 88, "constantpad2d": 88, "quantizedconstantpad2d": 88, "constantpad3d": 88, "quantizedconstantpad3d": 88, "quantizedconv1d": 88, "quantizedconv3d": 88, "convtranspose1d": [88, 109], "quantizedconvtranspose1d": 88, "quantizedconvtranspose2d": 88, "convtranspose3d": 88, "quantizedconvtranspose3d": 88, "quantizeddropout": 88, "dropout2d": 88, "quantizeddropout2d": 88, "dropout3d": 88, "quantizeddropout3d": 88, "elu": 88, "quantizedelu": 88, "featurealphadropout": 88, "quantizedfeaturealphadropout": 88, "quantizedflatten": 88, "quantizedfold": 88, "fractionalmaxpool2d": 88, "quantizedfractionalmaxpool2d": 88, "fractionalmaxpool3d": 88, "quantizedfractionalmaxpool3d": 88, "gelu": 88, "quantizedgelu": 88, "glu": 88, "quantizedglu": 88, "groupnorm": 88, "quantizedgroupnorm": 88, "hardshrink": 88, "quantizedhardshrink": 88, "hardsigmoid": 88, "quantizedhardsigmoid": 88, "hardswish": 88, "quantizedhardswish": 88, "hardtanh": 88, "quantizedhardtanh": 88, "instancenorm1d": 88, "quantizedinstancenorm1d": 88, "instancenorm2d": 88, "quantizedinstancenorm2d": 88, "instancenorm3d": 88, "quantizedinstancenorm3d": 88, "lppool1d": 88, "quantizedlppool1d": 88, "lppool2d": 88, "quantizedlppool2d": 88, "quantizedlayernorm": 88, "leakyrelu": 88, "quantizedleakyrelu": 88, "localresponsenorm": 88, "quantizedlocalresponsenorm": 88, "logsigmoid": 88, "quantizedlogsigmoid": 88, "logsoftmax": 88, "quantizedlogsoftmax": 88, "maxpool1d": 88, "quantizedmaxpool1d": 88, "maxpool2d": 88, "quantizedmaxpool2d": 88, "maxpool3d": 88, "quantizedmaxpool3d": 88, "maxunpool1d": 88, "quantizedmaxunpool1d": 88, "maxunpool2d": 88, "quantizedmaxunpool2d": 88, "maxunpool3d": 88, "quantizedmaxunpool3d": 88, "mish": 88, "quantizedmish": 88, "quantizedprelu": 88, "pixelshuffl": 88, "quantizedpixelshuffl": 88, "pixelunshuffl": 88, "quantizedpixelunshuffl": 88, "rrelu": 88, "quantizedrrelu": 88, "quantizedrelu6": 88, "reflectionpad1d": 88, "quantizedreflectionpad1d": 88, "reflectionpad2d": 88, "quantizedreflectionpad2d": 88, "replicationpad1d": 88, "quantizedreplicationpad1d": 88, "replicationpad2d": 88, "quantizedreplicationpad2d": 88, "replicationpad3d": 88, "quantizedreplicationpad3d": 88, "selu": 88, "quantizedselu": 88, "silu": 88, "quantizedsilu": 88, "quantizedsigmoid": 88, "softmax2d": 88, "quantizedsoftmax2d": 88, "softmin": 88, "quantizedsoftmin": 88, "quantizedsoftplu": 88, "softshrink": 88, "quantizedsoftshrink": 88, "softsign": 88, "quantizedsoftsign": 88, "tanh": 88, "quantizedtanh": 88, "tanhshrink": 88, "quantizedtanhshrink": 88, "quantizedthreshold": 88, "unflatten": 88, "quantizedunflatten": 88, "unfold": 88, "quantizedunfold": 88, "upsampl": [88, 101], "quantizedupsampl": 88, "upsamplingbilinear2d": 88, "quantizedupsamplingbilinear2d": 88, "upsamplingnearest2d": 88, "quantizedupsamplingnearest2d": 88, "zeropad2d": 88, "quantizedzeropad2d": 88, "bceloss": 88, "quantizedbceloss": 88, "bcewithlogitsloss": 88, "quantizedbcewithlogitsloss": 88, "bilinear": [88, 101], "quantizedbilinear": 88, "ctcloss": 88, "quantizedctcloss": 88, "cosinesimilar": 88, "quantizedcosinesimilar": 88, "crossentropyloss": [88, 90], "quantizedcrossentropyloss": 88, "hingeembeddingloss": 88, "quantizedhingeembeddingloss": 88, "huberloss": 88, "quantizedhuberloss": 88, "kldivloss": 88, "quantizedkldivloss": 88, "l1loss": 88, "quantizedl1loss": 88, "mseloss": 88, "quantizedmseloss": 88, "multilabelmarginloss": 88, "quantizedmultilabelmarginloss": 88, "multilabelsoftmarginloss": 88, "quantizedmultilabelsoftmarginloss": 88, "multimarginloss": 88, "quantizedmultimarginloss": 88, "nllloss": 88, "quantizednllloss": 88, "nllloss2d": 88, "quantizednllloss2d": 88, "pairwisedist": 88, "quantizedpairwisedist": 88, "poissonnllloss": 88, "quantizedpoissonnllloss": 88, "smoothl1loss": 88, "quantizedsmoothl1loss": 88, "softmarginloss": 88, "quantizedsoftmarginloss": 88, "cosineembeddingloss": 88, "quantizedcosineembeddingloss": 88, "gaussiannllloss": 88, "quantizedgaussiannllloss": 88, "marginrankingloss": 88, "quantizedmarginrankingloss": 88, "tripletmarginloss": 88, "quantizedtripletmarginloss": 88, "tripletmarginwithdistanceloss": 88, "quantizedtripletmarginwithdistanceloss": 88, "quantizedembed": 88, "embeddingbag": 88, "quantizedembeddingbag": 88, "gru": [88, 109], "quantizedgru": 88, "rnn": [88, 109], "quantizedrnn": 88, "grucel": 88, "quantizedgrucel": 88, "rnncell": 88, "quantizedrnncel": 88, "lstm": [88, 109], "quantizedlstm": 88, "lstmcell": 88, "quantizedlstmcel": 88, "cumsum": 88, "quantizedcumsum": 88, "sin": 88, "quantizedsin": 88, "co": 88, "quantizedco": 88, "rsqrt": 88, "quantizedrsqrt": 88, "quantizedreshap": 88, "matmul": [88, 109], "quantizedmatmul": 88, "subtract": 88, "quantizedsubtract": 88, "quantizeddivid": 88, "bmm": 88, "quantizedbmm": 88, "baddbmm": 88, "quantizedbaddbmm": 88, "addmm": 88, "quantizedaddmm": 88, "concat": [88, 109], "quantizedconcat": 88, "allow_overwit": 89, "get_encod": 89, "get_legacy_encod": 89, "register_quantization_paramet": 89, "set_legacy_encod": 89, "tutori": 90, "meant": 90, "clearli": 90, "cifar10_train_data": 90, "fashionmnist": 90, "cifar10": 90, "cifar10_test_data": 90, "test_load": 90, "bn_1": 90, "bn_2": 90, "establish": 90, "send": 90, "batch_idx": 90, "zero_grad": 90, "fp_accuraci": 90, "91": 90, "70999908447266": 90, "coupl": 90, "care": [90, 100], "conform": 90, "incorrectli": 90, "thankfulli": 90, "fp_accuracy_prepar": 90, "2024": 90, "07": 90, "747": 90, "806": 90, "module_relu": 90, "module_relu_1": 90, "module_softmax": 90, "12544": 90, "getattr_1": 90, "getitem": 90, "graph_modul": 90, "print_read": 90, "passthrough": 90, "previous": 90, "theoret": 90, "idx": 90, "quantized_accuraci": 90, "1500015258789": 90, "advanc": 90, "post_qat_accuraci": 90, "92": 90, "05333709716797": 90, "happi": 90, "export_path": 90, "model_nam": 90, "fashion_mnist_model": 90, "sent": 90, "terminologi": 91, "abbrevi": 91, "bnf": 91, "hbf": 91, "often": [91, 92, 100, 105], "16x": 91, "caution": 91, "soon": 92, "prone": 92, "preprat": 92, "preceed": 93, "decreas": 93, "main": [93, 106, 109, 112], "oscil": 93, "cp": [94, 95, 97, 114], "magnitud": 94, "connect": [94, 99, 100, 113], "residu": 94, "regress": 94, "outlin": 95, "svd": [95, 96, 97, 99, 100, 109], "spatial": [95, 96, 97, 99, 100, 109], "accumul": [95, 100], "uncompress": 95, "latenc": 95, "bandwidth": 95, "websit": 95, "half": 95, "unknown": 95, "ssvd": 95, "f0": 95, "75": 95, "2b": 95, "revisit": 95, "close": [95, 108], "jupyt": 96, "familiar": 96, "teach": 96, "viewabl": 96, "metapackag": 96, "grant": 96, "admin": 96, "privileg": 96, "ip": 96, "browser": 96, "past": 96, "mkdir": 96, "cd": 96, "repositori": 96, "git": 96, "www": 96, "navig": 96, "launch": 96, "ipynb": 96, "assess": 97, "column": 97, "unmodifi": 97, "omit": [97, 106], "account": [97, 102, 105, 107], "drastic": 97, "redund": 98, "dilat": 99, "guidebook": 100, "greedi": [100, 111], "decompos": [100, 110, 113], "fc": [100, 113], "sharp": 100, "decai": 100, "slow": 100, "searcher": 100, "wors": 100, "strike": 100, "balanc": 100, "seem": [100, 102], "xiangyu": 100, "zhang": 100, "jianhua": 100, "zou": 100, "kaim": 100, "he": 100, "jian": 100, "sun": 100, "deep": 100, "transact": 100, "pattern": 100, "intellig": 100, "vol": 100, "pp": 100, "1943": 100, "1955": 100, "oct": 100, "2016": 100, "yihui": 100, "confer": [100, 103], "vision": [100, 103], "venic": 100, "2017": 100, "1398": 100, "1406": 100, "jaderberg": 100, "andrea": 100, "vedaldi": 100, "andrew": 100, "zisserman": 100, "british": 100, "jan": 100, "2014": 100, "andrei": 100, "kuzmin": 100, "marku": [100, 103], "nagel": [100, 103], "saurabh": 100, "pitr": 100, "sandeep": 100, "pendyam": 100, "tijmen": [100, 103], "blankevoort": [100, 103], "taxonomi": 100, "primit": 101, "sure": 101, "slice": 101, "align_corn": 101, "deconvolut": 101, "dconvolut": 101, "deeplabv3": 101, "address": [101, 107, 111], "briefli": 102, "suffici": [102, 104, 105, 108], "fast": 102, "easi": 102, "robust": 102, "_aimet": 102, "prep": 102, "complainc": 102, "align": 102, "retri": 102, "ref": 102, "satisfactori": [102, 107], "onto": 102, "pb": 102, "trial": [102, 107], "diagnost": 102, "becom": 103, "paper": 103, "surround": 103, "big": 103, "discrep": 103, "wide": 103, "significantli": 103, "quantizaion": 103, "analyt": [103, 111], "bottleneck": [103, 107], "hybrid": 103, "approach": [103, 108], "mart": 103, "van": 103, "baalen": 103, "seoul": 103, "octob": 103, "area": 104, "situat": 104, "pinpoint": 104, "culprit": 104, "squar": [104, 108], "monitor": 104, "contribut": [104, 107], "read": 104, "presenc": 105, "hyperparamet": 105, "converg": 105, "six": 106, "turn": 106, "empti": 106, "outsid": 106, "strict": [106, 108], "unsign": [106, 108], "throughout": [106, 112], "convent": 106, "member": 106, "insight": [107, 111, 112], "underperform": 107, "chart": 107, "confid": 107, "toward": 107, "uneven": 107, "global": 107, "inner": 107, "bert": 107, "reveal": 107, "resort": 107, "revert": 107, "mitig": 108, "ingest": 108, "000": 108, "de": 108, "hook": 108, "intercept": 108, "four": 108, "textrm": 108, "dfrac": 108, "elimin": 108, "induc": 108, "signal": 108, "satur": 108, "erro": 108, "alongsid": 108, "settabl": 108, "slim": 109, "backslash": 109, "io": 109, "user_guid": 109, "api_doc": 109, "quantizablemultiheadattent": 109, "kyuykim": 109, "mangal": 109, "geunle": 109, "correctli": 109, "klhsieh": 109, "akhobar": 109, "ashvkuma": 109, "fp16": 109, "stand": 109, "adaptiveround": 109, "recurr": 109, "\ud835\udc5a": [110, 113], "\ud835\udc5b": [110, 113], "\u210e": [110, 113], "\ud835\udc64": [110, 113], "\ud835\udc58": [110, 113], "degre": [110, 113], "augment": 111, "progress": [111, 112], "computation": [111, 112], "arrang": 111, "websocket": 111, "listen": 111, "5006": 111, "lot": 112, "term": 113, "lose": 114, "volum": 114, "hxwx8": 114, "hxwx5": 114, "propag": 114, "green": 114, "side": 114, "pink": 114, "color": 114, "orang": 114}, "objects": {"aimet_common.bias_correction": [[55, 0, 1, "", "ConvBnInfoType"]], "aimet_common.defs": [[55, 0, 1, "", "ActivationType"], [59, 0, 1, "", "CallbackFunc"], [30, 0, 1, "", "CompressionScheme"], [30, 0, 1, "", "CostMetric"], [56, 0, 1, "", "GreedySelectionParameters"], [68, 0, 1, "", "QuantScheme"]], "aimet_common.defs.ActivationType": [[55, 1, 1, "", "no_activation"], [55, 1, 1, "", "relu"], [55, 1, 1, "", "relu6"]], "aimet_common.defs.CompressionScheme": [[30, 1, 1, "", "channel_pruning"], [30, 1, 1, "", "spatial_svd"], [30, 1, 1, "", "weight_svd"]], "aimet_common.defs.CostMetric": [[30, 1, 1, "", "mac"], [30, 1, 1, "", "memory"]], "aimet_common.defs.QuantScheme": [[68, 1, 1, "", "post_training_percentile"], [68, 1, 1, "", "post_training_tf"], [68, 1, 1, "", "post_training_tf_enhanced"], [68, 1, 1, "", "training_range_learning_with_tf_enhanced_init"], [68, 1, 1, "", "training_range_learning_with_tf_init"]], "aimet_common.utils": [[66, 0, 1, "", "CallbackFunc"]], "aimet_onnx.adaround.adaround_weight.Adaround": [[41, 2, 1, "", "apply_adaround"]], "aimet_onnx.adaround.adaround_weight": [[41, 0, 1, "", "AdaroundParameters"]], "aimet_onnx.amp.mixed_precision_algo": [[45, 0, 1, "", "EvalCallbackFactory"]], "aimet_onnx.amp.mixed_precision_algo.EvalCallbackFactory": [[45, 3, 1, "", "sqnr"]], "aimet_onnx.amp.quantizer_groups": [[45, 0, 1, "", "QuantizerGroup"]], "aimet_onnx.amp.quantizer_groups.QuantizerGroup": [[45, 3, 1, "", "get_activation_quantizers"], [45, 3, 1, "", "get_active_quantizers"], [45, 3, 1, "", "get_candidate"], [45, 3, 1, "", "get_param_quantizers"], [45, 3, 1, "", "set_quantizers_to_candidate"], [45, 3, 1, "", "to_list"]], "aimet_onnx.auto_quant_v2": [[42, 0, 1, "", "AutoQuant"]], "aimet_onnx.auto_quant_v2.AutoQuant": [[42, 3, 1, "", "get_quant_scheme_candidates"], [42, 3, 1, "", "optimize"], [42, 3, 1, "", "run_inference"], [42, 3, 1, "", "set_adaround_params"], [42, 3, 1, "", "set_quant_scheme_candidates"]], "aimet_onnx.cross_layer_equalization": [[43, 2, 1, "", "equalize_model"]], "aimet_onnx.layer_output_utils": [[44, 0, 1, "", "LayerOutputUtil"]], "aimet_onnx.layer_output_utils.LayerOutputUtil": [[44, 3, 1, "", "generate_layer_outputs"]], "aimet_onnx.mixed_precision": [[45, 2, 1, "", "choose_mixed_precision"]], "aimet_onnx.quant_analyzer": [[46, 0, 1, "", "QuantAnalyzer"]], "aimet_onnx.quant_analyzer.QuantAnalyzer": [[46, 3, 1, "", "analyze"], [46, 3, 1, "", "check_model_sensitivity_to_quantization"], [46, 3, 1, "", "create_quantsim_and_encodings"], [46, 3, 1, "", "enable_per_layer_mse_loss"], [46, 3, 1, "", "export_per_layer_encoding_min_max_range"], [46, 3, 1, "", "export_per_layer_mse_loss"], [46, 3, 1, "", "export_per_layer_stats_histogram"], [46, 3, 1, "", "perform_per_layer_analysis_by_disabling_quantizers"], [46, 3, 1, "", "perform_per_layer_analysis_by_enabling_quantizers"]], "aimet_onnx.quantsim": [[48, 0, 1, "", "QuantizationSimModel"]], "aimet_onnx.quantsim.QuantizationSimModel": [[48, 3, 1, "", "compute_encodings"], [48, 3, 1, "", "export"]], "aimet_tensorflow.keras.adaround_weight.Adaround": [[28, 2, 1, "", "apply_adaround"]], "aimet_tensorflow.keras.adaround_weight": [[28, 0, 1, "", "AdaroundParameters"]], "aimet_tensorflow.keras.batch_norm_fold": [[36, 2, 1, "", "fold_all_batch_norms"], [29, 2, 1, "", "fold_all_batch_norms_to_scale"], [36, 2, 1, "", "fold_given_batch_norms"]], "aimet_tensorflow.keras.bn_reestimation": [[29, 2, 1, "", "reestimate_bn_stats"]], "aimet_tensorflow.keras.compress": [[30, 0, 1, "", "ModelCompressor"]], "aimet_tensorflow.keras.compress.ModelCompressor": [[30, 3, 1, "", "compress_model"]], "aimet_tensorflow.keras.cross_layer_equalization": [[36, 0, 1, "", "ClsSetInfo"], [31, 2, 1, "", "equalize_model"]], "aimet_tensorflow.keras.cross_layer_equalization.ClsSetInfo": [[36, 0, 1, "", "ClsSetLayerPairInfo"]], "aimet_tensorflow.keras.cross_layer_equalization.CrossLayerScaling": [[36, 2, 1, "", "scale_cls_sets"], [36, 2, 1, "", "scale_model"]], "aimet_tensorflow.keras.cross_layer_equalization.HighBiasFold": [[36, 2, 1, "id0", "bias_fold"]], "aimet_tensorflow.keras.defs": [[30, 0, 1, "", "ModuleCompRatioPair"], [30, 0, 1, "", "SpatialSvdParameters"]], "aimet_tensorflow.keras.defs.SpatialSvdParameters": [[30, 0, 1, "", "AutoModeParams"], [30, 0, 1, "", "ManualModeParams"], [30, 0, 1, "", "Mode"]], "aimet_tensorflow.keras.defs.SpatialSvdParameters.Mode": [[30, 1, 1, "", "auto"], [30, 1, 1, "", "manual"]], "aimet_tensorflow.keras.layer_output_utils": [[32, 0, 1, "", "LayerOutputUtil"]], "aimet_tensorflow.keras.layer_output_utils.LayerOutputUtil": [[32, 3, 1, "", "generate_layer_outputs"]], "aimet_tensorflow.keras.mixed_precision": [[33, 2, 1, "", "choose_fast_mixed_precision"], [33, 2, 1, "", "choose_mixed_precision"]], "aimet_tensorflow.keras.model_preparer": [[35, 2, 1, "", "prepare_model"]], "aimet_tensorflow.keras.quant_analyzer": [[37, 0, 1, "", "QuantAnalyzer"]], "aimet_tensorflow.keras.quant_analyzer.QuantAnalyzer": [[37, 3, 1, "", "analyze"]], "aimet_tensorflow.keras.quantsim": [[39, 0, 1, "", "QuantizationSimModel"]], "aimet_tensorflow.keras.quantsim.QuantizationSimModel": [[39, 3, 1, "", "compute_encodings"], [39, 3, 1, "", "export"]], "aimet_torch.amp.mixed_precision_algo": [[59, 0, 1, "", "EvalCallbackFactory"]], "aimet_torch.amp.mixed_precision_algo.EvalCallbackFactory": [[59, 3, 1, "", "sqnr"]], "aimet_torch.amp.quantizer_groups": [[59, 0, 1, "", "QuantizerGroup"]], "aimet_torch.amp.quantizer_groups.QuantizerGroup": [[59, 3, 1, "", "get_active_quantizers"], [59, 3, 1, "", "get_candidate"], [59, 3, 1, "", "get_input_quantizer_modules"], [59, 3, 1, "", "set_quantizers_to_candidate"], [59, 3, 1, "", "to_list"]], "aimet_torch.arch_checker.arch_checker.ArchChecker": [[52, 2, 1, "", "check_model_arch"]], "aimet_torch.batch_norm_fold": [[65, 2, 1, "", "fold_all_batch_norms"], [54, 2, 1, "", "fold_all_batch_norms_to_scale"], [65, 2, 1, "", "fold_given_batch_norms"]], "aimet_torch.bias_correction": [[55, 2, 1, "", "correct_bias"]], "aimet_torch.bn_reestimation": [[54, 2, 1, "", "reestimate_bn_stats"]], "aimet_torch.compress": [[56, 0, 1, "", "ModelCompressor"]], "aimet_torch.compress.ModelCompressor": [[56, 3, 1, "", "compress_model"]], "aimet_torch.cross_layer_equalization": [[65, 0, 1, "", "ClsSetInfo"], [57, 2, 1, "", "equalize_model"]], "aimet_torch.cross_layer_equalization.ClsSetInfo": [[65, 0, 1, "", "ClsSetLayerPairInfo"]], "aimet_torch.cross_layer_equalization.CrossLayerScaling": [[65, 2, 1, "", "scale_cls_sets"], [65, 2, 1, "", "scale_model"]], "aimet_torch.cross_layer_equalization.HighBiasFold": [[65, 2, 1, "id0", "bias_fold"]], "aimet_torch.defs": [[56, 0, 1, "", "ChannelPruningParameters"], [56, 0, 1, "", "ModuleCompRatioPair"], [56, 0, 1, "", "SpatialSvdParameters"], [56, 0, 1, "", "WeightSvdParameters"]], "aimet_torch.defs.ChannelPruningParameters": [[56, 0, 1, "", "AutoModeParams"], [56, 0, 1, "", "ManualModeParams"], [56, 0, 1, "", "Mode"]], "aimet_torch.defs.ChannelPruningParameters.Mode": [[56, 1, 1, "", "auto"], [56, 1, 1, "", "manual"]], "aimet_torch.defs.SpatialSvdParameters": [[56, 0, 1, "", "AutoModeParams"], [56, 0, 1, "", "ManualModeParams"], [56, 0, 1, "", "Mode"]], "aimet_torch.defs.SpatialSvdParameters.Mode": [[56, 1, 1, "", "auto"], [56, 1, 1, "", "manual"]], "aimet_torch.defs.WeightSvdParameters": [[56, 0, 1, "", "AutoModeParams"], [56, 0, 1, "", "ManualModeParams"], [56, 0, 1, "", "Mode"]], "aimet_torch.defs.WeightSvdParameters.Mode": [[56, 1, 1, "", "auto"], [56, 1, 1, "", "manual"]], "aimet_torch.gptvq.defs": [[86, 0, 1, "", "GPTVQParameters"]], "aimet_torch.gptvq.gptvq_weight.GPTVQ": [[86, 2, 1, "", "apply_gptvq"]], "aimet_torch.layer_output_utils": [[58, 0, 1, "", "LayerOutputUtil"], [58, 0, 1, "", "NamingScheme"]], "aimet_torch.layer_output_utils.LayerOutputUtil": [[58, 3, 1, "", "generate_layer_outputs"]], "aimet_torch.layer_output_utils.NamingScheme": [[58, 1, 1, "", "ONNX"], [58, 1, 1, "", "PYTORCH"], [58, 1, 1, "", "TORCHSCRIPT"]], "aimet_torch.mixed_precision": [[59, 2, 1, "", "choose_mixed_precision"]], "aimet_torch.model_preparer": [[61, 2, 1, "", "prepare_model"]], "aimet_torch.nn.modules.custom": [[69, 0, 1, "", "ScatterDense"], [69, 0, 1, "", "SparseTensorWrapper"]], "aimet_torch.peft": [[64, 0, 1, "", "AdapterMetaData"], [64, 0, 1, "", "PeftQuantUtils"], [64, 3, 1, "", "replace_lora_layers_with_quantizable_layers"], [64, 3, 1, "", "track_lora_meta_data"]], "aimet_torch.peft.PeftQuantUtils": [[64, 3, 1, "", "disable_lora_adapters"], [64, 3, 1, "", "enable_adapter_and_load_weights"], [64, 3, 1, "", "export_adapter_weights"], [64, 3, 1, "", "freeze_base_model"], [64, 3, 1, "", "freeze_base_model_activation_quantizers"], [64, 3, 1, "", "freeze_base_model_param_quantizers"], [64, 3, 1, "", "get_fp_lora_layer"], [64, 3, 1, "", "get_quantized_lora_layer"], [64, 3, 1, "", "quantize_lora_scale_with_fixed_range"], [64, 3, 1, "", "set_bitwidth_for_lora_adapters"]], "aimet_torch.v1.adaround.adaround_weight.Adaround": [[51, 2, 1, "", "apply_adaround"]], "aimet_torch.v1.adaround.adaround_weight": [[51, 0, 1, "", "AdaroundParameters"]], "aimet_torch.v1.auto_quant": [[53, 0, 1, "", "AutoQuant"]], "aimet_torch.v1.quant_analyzer": [[66, 0, 1, "", "QuantAnalyzer"]], "aimet_torch.v1.quant_analyzer.QuantAnalyzer": [[66, 3, 1, "", "analyze"], [66, 3, 1, "", "check_model_sensitivity_to_quantization"], [66, 3, 1, "", "enable_per_layer_mse_loss"], [66, 3, 1, "", "export_per_layer_encoding_min_max_range"], [66, 3, 1, "", "export_per_layer_mse_loss"], [66, 3, 1, "", "export_per_layer_stats_histogram"], [66, 3, 1, "", "perform_per_layer_analysis_by_disabling_quant_wrappers"], [66, 3, 1, "", "perform_per_layer_analysis_by_enabling_quant_wrappers"]], "aimet_torch.v1.quantsim": [[55, 0, 1, "", "QuantParams"], [68, 0, 1, "", "QuantizationSimModel"], [68, 3, 1, "", "load_checkpoint"], [68, 3, 1, "", "save_checkpoint"]], "aimet_torch.v1.quantsim.QuantizationSimModel": [[68, 3, 1, "", "compute_encodings"], [68, 3, 1, "", "export"]], "aimet_torch.v2.nn": [[88, 0, 1, "", "QuantizationMixin"]], "aimet_torch.v2.nn.QuantizationMixin": [[88, 3, 1, "", "__quant_init__"], [88, 3, 1, "", "compute_encodings"], [88, 3, 1, "", "forward"], [77, 3, 1, "", "from_module"], [77, 3, 1, "", "get_default_kernel"], [77, 3, 1, "", "get_kernel"], [77, 3, 1, "", "implements"], [88, 1, 1, "", "input_quantizers"], [88, 1, 1, "", "output_quantizers"], [88, 1, 1, "", "param_quantizers"], [77, 3, 1, "", "set_default_kernel"], [77, 3, 1, "", "set_kernel"]], "aimet_torch.v2.quantization": [[78, 4, 0, "-", "affine"], [80, 4, 0, "-", "float"]], "aimet_torch.v2.quantization.affine": [[78, 0, 1, "", "Quantize"], [78, 0, 1, "", "QuantizeDequantize"], [78, 2, 1, "", "dequantize"], [78, 2, 1, "", "quantize"], [78, 2, 1, "", "quantize_dequantize"]], "aimet_torch.v2.quantization.affine.quantizer": [[89, 0, 1, "", "Quantize"], [89, 0, 1, "", "QuantizeDequantize"], [89, 0, 1, "", "QuantizerBase"]], "aimet_torch.v2.quantization.affine.quantizer.Quantize": [[89, 3, 1, "", "forward"]], "aimet_torch.v2.quantization.affine.quantizer.QuantizeDequantize": [[89, 3, 1, "", "forward"]], "aimet_torch.v2.quantization.affine.quantizer.QuantizerBase": [[89, 3, 1, "", "allow_overwrite"], [89, 3, 1, "", "compute_encodings"], [89, 3, 1, "", "get_encoding"], [89, 3, 1, "", "get_legacy_encodings"], [89, 3, 1, "", "is_initialized"], [89, 3, 1, "", "register_quantization_parameter"], [89, 3, 1, "", "set_legacy_encodings"]], "aimet_torch.v2.quantization.encoding_analyzer": [[84, 0, 1, "", "EncodingAnalyzer"], [84, 0, 1, "", "MinMaxEncodingAnalyzer"], [84, 0, 1, "", "PercentileEncodingAnalyzer"], [84, 0, 1, "", "SqnrEncodingAnalyzer"]], "aimet_torch.v2.quantization.encoding_analyzer.EncodingAnalyzer": [[84, 3, 1, "", "compute_encodings"], [84, 3, 1, "", "reset_stats"], [84, 3, 1, "", "update_stats"]], "aimet_torch.v2.quantization.float": [[80, 0, 1, "", "FloatQuantizeDequantize"], [80, 0, 1, "", "QuantizeDequantize"]], "aimet_torch.v2.quantization.tensor": [[81, 0, 1, "", "DequantizedTensor"], [81, 0, 1, "", "QuantizedTensor"]], "aimet_torch.v2.quantization.tensor.DequantizedTensor": [[81, 3, 1, "", "dequantize"], [81, 3, 1, "", "quantize"], [81, 3, 1, "", "quantized_repr"]], "aimet_torch.v2.quantization.tensor.QuantizedTensor": [[81, 3, 1, "", "dequantize"], [81, 3, 1, "", "quantize"], [81, 3, 1, "", "quantized_repr"]], "aimet_torch.v2.quantsim.config_utils": [[83, 2, 1, "", "set_activation_quantizers_to_float"], [83, 2, 1, "", "set_blockwise_quantization_for_weights"], [83, 2, 1, "", "set_grouped_blockwise_quantization_for_weights"]], "aimet_torch.v2.visualization_tools": [[82, 2, 1, "", "visualize_stats"]], "aimet_torch.visualize_model": [[71, 2, 1, "", "visualize_changes_after_optimization"], [71, 2, 1, "", "visualize_relative_weight_ranges_to_identify_problematic_layers"], [71, 2, 1, "", "visualize_weight_ranges"]], "aimet_torch.visualize_serialized_data": [[70, 0, 1, "", "VisualizeCompression"]], "aimet_torch.visualize_serialized_data.VisualizeCompression": [[70, 3, 1, "", "display_comp_ratio_plot"], [70, 3, 1, "", "display_eval_scores"]]}, "objtypes": {"0": "py:class", "1": "py:attribute", "2": "py:function", "3": "py:method", "4": "py:module"}, "objnames": {"0": ["py", "class", "Python class"], "1": ["py", "attribute", "Python attribute"], "2": ["py", "function", "Python function"], "3": ["py", "method", "Python method"], "4": ["py", "module", "Python module"]}, "titleterms": {"automat": [0, 4, 18], "mix": [0, 4, 18, 33, 45, 59], "precis": [0, 4, 18, 33, 45, 59], "amp": [0, 4, 18, 33], "overal": [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 90], "flow": [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 64, 90, 103], "what": [0, 4, 12, 13, 18, 21, 25, 67], "thi": [0, 4, 12, 13, 18, 21, 25], "notebook": [0, 4, 12, 13, 18, 21, 25, 28, 29, 31, 51, 53, 54, 57, 66, 68, 96], "i": [0, 4, 12, 13, 18, 21, 25], "dataset": [0, 1, 2, 3, 4, 5, 6, 7, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25], "1": [0, 1, 2, 3, 4, 5, 6, 7, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 49, 55, 75, 90, 96, 109], "exampl": [0, 1, 2, 3, 4, 5, 6, 7, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 21, 22, 23, 24, 25, 28, 29, 30, 31, 32, 33, 35, 36, 37, 39, 41, 42, 43, 44, 45, 46, 48, 49, 51, 53, 54, 55, 56, 57, 58, 59, 61, 65, 66, 68, 69, 70, 71, 72, 86, 87, 96], "evalu": [0, 1, 2, 3, 4, 5, 7, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 21, 22, 23, 24, 25], "pipelin": [0, 1, 2, 3, 5, 6, 7, 12, 13, 14, 15, 16, 17, 18, 19, 21, 22, 23, 24, 25], "2": [0, 1, 2, 3, 4, 5, 6, 7, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 33, 49, 55, 75, 90, 96, 109], "convert": [0, 1, 2, 3, 9], "an": [0, 1, 2, 3], "fp32": [0, 1, 2, 3, 4, 5, 6, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24], "pytorch": [0, 1, 2, 3, 49, 50, 51, 53, 54, 55, 56, 57, 58, 59, 60, 63, 65, 66, 67, 68, 75, 87, 90, 101, 102, 112], "model": [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 34, 35, 60, 61, 62, 87, 90, 98, 100, 101, 102], "onnx": [0, 1, 2, 3, 40, 41, 42, 43, 44, 45, 46, 47, 48, 69, 75], "simplifi": [0, 1, 2, 3], "baselin": [0, 1, 2, 3, 4, 5, 6, 10, 11, 13, 14, 15, 16, 17, 18, 19, 22, 23, 24], "accuraci": [0, 1, 2, 3, 4, 5, 6, 10, 11, 13, 14, 15, 16, 17, 18, 19, 22, 23, 24], "3": [0, 1, 2, 3, 4, 5, 6, 7, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 49, 96], "creat": [0, 1, 2, 3, 4, 5, 7, 9, 10, 11, 13, 14, 18, 19, 21, 22, 23, 24], "quantiz": [0, 1, 2, 3, 4, 5, 7, 8, 10, 11, 12, 13, 14, 18, 19, 21, 22, 23, 24, 25, 33, 38, 39, 45, 47, 48, 55, 59, 67, 68, 71, 72, 78, 80, 81, 83, 85, 88, 89, 90, 102, 103, 105, 106, 107, 108, 112], "simul": [0, 1, 2, 3, 4, 5, 10, 11, 13, 14, 18, 19, 21, 22, 23, 24, 106, 108], "fold": [0, 1, 2, 3, 5, 7, 10, 11, 13, 14, 18, 19, 21, 22, 23, 24, 90], "batch": [0, 1, 2, 3, 5, 10, 11, 13, 14, 18, 19, 22, 23, 24, 93], "normal": [0, 1, 3, 5, 10, 11, 13, 14, 18, 19, 23, 24], "layer": [0, 1, 2, 3, 5, 7, 9, 10, 11, 12, 13, 14, 18, 19, 21, 22, 23, 24, 25, 31, 32, 36, 43, 44, 57, 58, 65, 97, 100, 103], "sim": [0, 1, 2, 3, 5, 10, 11, 14, 18, 19, 21, 22, 23, 24, 39, 48, 68], "comput": [0, 4, 18, 88], "encod": [0, 4, 12, 18, 25, 49, 84, 88, 108], "4": [0, 1, 2, 4, 5, 6, 7, 9, 10, 11, 13, 14, 18, 19, 21, 22, 23, 24, 49], "run": [0, 4, 18, 20, 46, 66, 96], "algorithm": [0, 4, 18], "defin": [0, 6, 7, 18, 20], "callback": [0, 7, 18], "function": [0, 6, 7, 9, 18, 20, 78], "paramet": [0, 4, 18, 28, 30, 41, 51, 56, 86, 91, 108], "call": [0, 4, 18], "api": [0, 4, 18, 26, 27, 28, 29, 30, 31, 32, 33, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 61, 64, 65, 66, 67, 68, 69, 70, 71, 83, 86, 87, 88, 89, 91, 92, 93, 103, 104, 108], "adapt": [1, 5, 19, 41, 51], "round": [1, 5, 19, 41, 51, 100], "adaround": [1, 5, 13, 19, 28, 41, 51, 91], "instanti": [1, 2, 3, 4, 5, 6, 7, 10, 11, 14, 15, 16, 17, 19, 22, 23, 24], "train": [1, 2, 3, 5, 6, 7, 8, 10, 11, 12, 13, 14, 15, 16, 17, 19, 21, 22, 23, 24, 25, 68, 85, 90, 102, 103, 105], "valid": [1, 2, 3, 5, 6, 14, 15, 16, 17, 19, 22, 23, 24, 62], "determin": [1, 2, 3, 5, 6, 10, 11, 13, 14, 19, 22, 23, 24, 108], "appli": [1, 2, 5, 6, 12, 13, 14, 19, 22, 25], "next": [1, 5, 10, 11, 14, 15, 16, 17, 19, 20, 22, 23, 24], "step": [1, 5, 10, 11, 14, 15, 16, 17, 19, 20, 22, 23, 24, 75], "For": [1, 2, 3, 5, 6, 10, 11, 14, 15, 16, 17, 19, 20, 22, 23, 24], "more": [1, 2, 3, 5, 6, 10, 11, 14, 15, 16, 17, 19, 20, 22, 23, 24, 98], "inform": [1, 2, 3, 5, 6, 10, 11, 14, 15, 16, 17, 19, 20, 22, 23, 24, 98], "cross": [2, 14, 22, 31, 36, 43, 57, 65, 103], "equal": [2, 14, 22, 31, 36, 43, 57, 65, 103], "norm": [2, 22, 93], "cle": [2, 14, 22, 36], "method": [4, 36], "load": [4, 5, 6, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25], "find": 4, "fake": 4, "op": 4, "insert": 4, "regular": [4, 33], "fast": [4, 33], "0": [4, 33, 49, 109], "summari": [4, 7, 9, 13, 21], "get": [5, 10, 11, 13, 14, 15, 16, 17, 18, 19, 22, 23, 24, 87], "score": [5, 10, 11, 13, 14, 15, 16, 17, 18, 19, 22, 23, 24], "autoqu": [6, 20, 42, 53, 92], "pretrain": [6, 12, 20], "constant": [6, 7, 20], "helper": [6, 20, 36], "5": [6, 7, 21, 49], "awar": [7, 8, 10, 11, 21, 23, 24, 68, 90, 105], "batchnorm": [7, 21, 29, 54, 90], "re": [7, 21, 29, 54, 93], "estim": [7, 21, 29, 54, 93], "prepar": [7, 9, 35, 61, 90], "kera": [7, 8, 9], "quantizationsim": 7, "perform": [7, 10, 11, 21, 23, 24, 36], "qat": [7, 10, 11, 21, 23, 24, 68, 105], "export": [7, 21, 69, 83, 90], "transform": 8, "subclass": 9, "show": 9, "similar": 9, "differ": 9, "between": 9, "origin": 9, "discuss": 9, "limit": [9, 29, 35, 61], "rang": [11, 12, 24, 25], "learn": [11, 24], "quant": [12, 25, 37, 46, 66], "analyz": [12, 25, 37, 46, 66, 84], "quantanalyz": [12, 25, 104], "per": [12, 13, 25, 97, 100], "analysi": [12, 25, 102, 104], "enabl": [12, 25], "disabl": [12, 25], "wrapper": [12, 25], "min": [12, 25], "max": [12, 25], "pdf": [12, 25], "statist": [12, 21, 25], "mse": [12, 25], "loss": [12, 25], "quantsim": [13, 14, 90, 108], "channel": [13, 15, 17, 56, 94], "pcq": 13, "compress": [15, 16, 17, 30, 56, 70, 95, 97, 100, 111], "us": [15, 16, 17, 36, 74, 91, 100, 102], "prune": [15, 17, 56, 94], "fine": [15, 16, 17, 90, 100], "tune": [15, 16, 17, 90, 100], "spatial": [16, 17, 30, 56, 110], "svd": [16, 17, 30, 56, 110, 113], "reestim": [21, 54], "bia": [22, 55], "correct": [22, 55], "techniqu": [22, 100, 103], "bc": 22, "welcom": 26, "ai": [26, 87, 98], "effici": [26, 87, 98], "toolkit": [26, 87, 98], "doc": 26, "indic": 26, "tabl": 26, "aimet": [27, 28, 29, 30, 31, 32, 33, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 50, 51, 53, 54, 55, 56, 57, 58, 59, 65, 66, 67, 68, 69, 70, 71, 73, 74, 75, 87, 91, 92, 93, 94, 95, 96, 97, 99, 100, 102, 103, 104, 105, 107, 108, 109, 110, 111, 112, 113, 114], "tensorflow": [27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 49, 75, 112], "user": [28, 31, 39, 41, 42, 43, 51, 53, 55, 57, 64, 66, 68, 98, 103], "guid": [28, 31, 39, 41, 42, 43, 51, 53, 55, 57, 66, 68, 90, 98], "link": [28, 29, 31, 39, 41, 42, 43, 51, 53, 54, 55, 57, 66, 68], "top": [28, 29, 30, 32, 33, 35, 37, 39, 41, 42, 44, 45, 46, 48, 51, 53, 54, 56, 58, 59, 61, 64, 66, 68, 70, 71, 83, 86, 88, 89], "level": [28, 29, 30, 32, 33, 35, 36, 37, 39, 41, 42, 44, 45, 46, 48, 51, 53, 54, 56, 58, 59, 61, 64, 65, 66, 68, 70, 71, 83, 86, 88, 89], "enum": [28, 51, 58, 68], "definit": [28, 30, 33, 45, 51, 56, 58, 59, 65, 68], "code": [28, 29, 30, 31, 32, 33, 35, 36, 37, 39, 41, 42, 43, 44, 45, 46, 48, 51, 53, 54, 55, 56, 57, 58, 59, 61, 65, 66, 68, 69, 70, 71, 72, 86, 96], "introduct": [29, 30, 31, 36, 43, 54, 56, 57, 65], "greedi": [30, 56, 97], "select": [30, 56, 94, 97, 100], "configur": [30, 56, 88, 106, 108], "primit": [31, 36, 57, 65], "output": [32, 44, 58], "gener": [32, 44, 58], "group": [33, 45, 59], "callbackfunc": [33, 45, 59], "guidelin": [34, 60, 68, 91, 101], "higher": [36, 65], "lower": [36, 65], "custom": [36, 69], "datatyp": 36, "manual": 36, "mode": [36, 105], "specif": [46, 49, 66], "util": [46, 62, 66], "format": 49, "version": 49, "up": 49, "file": [49, 106], "6": 49, "architectur": 52, "checker": 52, "html": 52, "report": 52, "content": 52, "bn": [54, 93], "convbninfotyp": 55, "activationtyp": 55, "param": 55, "empir": 55, "analyt": 55, "weight": [56, 94, 113], "torch": [61, 69, 75], "fx": 61, "symbol": 61, "trace": 61, "multi": 63, "gpu": [63, 75], "support": 63, "peft": 64, "lora": 64, "clssetinfo": 65, "aimet_torch": [67, 72], "refer": [67, 100, 103], "v2": [67, 72], "": 67, "new": 67, "backward": 67, "compat": 67, "sparseconvolut": 69, "spconv": 69, "modul": [69, 72, 88], "visual": [70, 71, 82, 111, 112], "migrat": 72, "chang": 72, "process": 72, "import": 72, "quantizationsimmodel": 72, "move": 72, "from": [72, 74, 75], "quantwrapp": 72, "staticgrid": 72, "learnedgrid": 72, "affin": [72, 78], "float": [72, 80, 90], "instal": [73, 74, 75], "quick": 73, "releas": [73, 74, 75, 98, 109], "packag": [73, 74, 75], "system": 73, "requir": [73, 104], "advanc": 73, "instruct": 73, "docker": 74, "set": 74, "variant": [74, 84], "prebuilt": 74, "imag": 74, "build": 74, "local": 74, "start": [74, 87, 111], "contain": 74, "pypi": [74, 75], "environ": [74, 75], "setup": [74, 75], "prerequisit": [75, 90], "13": [75, 109], "common": 75, "debian": 75, "replac": 75, "pillow": 75, "simd": 75, "onnxruntim": 75, "post": [75, 85, 102, 103], "quantizationmixin": 77, "class": [78, 80, 81, 88], "floatquantizedequant": 79, "quantizedequant": 79, "tensor": 81, "tool": [82, 102], "blockwis": 83, "low": 83, "power": 83, "lpbq": 83, "gptvq": 86, "document": 87, "featur": [87, 95, 98, 102], "descript": [87, 104], "quickstart": 90, "point": 90, "case": [91, 100, 102], "hyper": 91, "overview": [92, 93, 97, 98, 100, 103, 104, 105, 106, 108, 111, 112, 114], "workflow": [92, 93, 102, 105, 108], "procedur": 94, "winnow": [94, 114], "reconstruct": 94, "guidebook": 95, "brows": 96, "server": [96, 111], "download": 96, "relat": 96, "ratio": [97, 100, 111], "how": [97, 114], "work": [97, 114], "explor": 97, "toc": 98, "tree": 98, "known": 99, "issu": 99, "option": 100, "rank": 100, "note": [100, 109], "faq": [100, 103], "debug": 102, "detail": 104, "recommend": 105, "structur": 106, "modifi": 106, "section": 106, "diagnost": 107, "nois": 108, "scheme": 108, "oper": 108, "22": 109, "21": 109, "20": 109, "19": 109, "py37": 109, "18": 109, "17": 109, "16": 109, "14": 109, "design": 111, "bokeh": 111, "session": 111}, "envversion": {"sphinx.domains.c": 2, "sphinx.domains.changeset": 1, "sphinx.domains.citation": 1, "sphinx.domains.cpp": 8, "sphinx.domains.index": 1, "sphinx.domains.javascript": 2, "sphinx.domains.math": 2, "sphinx.domains.python": 3, "sphinx.domains.rst": 2, "sphinx.domains.std": 2, "nbsphinx": 4, "sphinx.ext.intersphinx": 1, "sphinx.ext.viewcode": 1, "sphinx": 57}, "alltitles": {"Automatic Mixed-Precision (AMP)": [[0, "Automatic-Mixed-Precision-(AMP)"], [4, "Automatic-Mixed-Precision-(AMP)"], [18, "Automatic-Mixed-Precision-(AMP)"]], "Overall flow": [[0, "Overall-flow"], [1, "Overall-flow"], [2, "Overall-flow"], [3, "Overall-flow"], [4, "Overall-flow"], [5, "Overall-flow"], [6, "Overall-flow"], [7, "Overall-flow"], [8, "Overall-flow"], [9, "Overall-flow"], [10, "Overall-flow"], [11, "Overall-flow"], [12, "Overall-flow"], [13, "Overall-flow"], [14, "Overall-flow"], [15, "Overall-flow"], [16, "Overall-flow"], [17, "Overall-flow"], [18, "Overall-flow"], [19, "Overall-flow"], [20, "Overall-flow"], [21, "Overall-flow"], [22, "Overall-flow"], [23, "Overall-flow"], [24, "Overall-flow"], [25, "Overall-flow"], [90, "overall-flow"]], "What this notebook is not": [[0, "What-this-notebook-is-not"], [4, "What-this-notebook-is-not"], [12, "What-this-notebook-is-not"], [13, "What-this-notebook-is-not"], [18, "What-this-notebook-is-not"], [21, "What-this-notebook-is-not"], [25, "What-this-notebook-is-not"]], "Dataset": [[0, "Dataset"], [1, "Dataset"], [2, "Dataset"], [3, "Dataset"], [4, "Dataset"], [5, "Dataset"], [6, "Dataset"], [7, "Dataset"], [10, "Dataset"], [11, "Dataset"], [12, "Dataset"], [13, "Dataset"], [14, "Dataset"], [15, "Dataset"], [16, "Dataset"], [17, "Dataset"], [18, "Dataset"], [19, "Dataset"], [20, "Dataset"], [21, "Dataset"], [22, "Dataset"], [23, "Dataset"], [24, "Dataset"], [25, "Dataset"]], "1. Example evaluation pipeline": [[0, "1.-Example-evaluation-pipeline"], [18, "1.-Example-evaluation-pipeline"]], "2. Convert an FP32 PyTorch model to ONNX, simplify & then evaluate baseline FP32 accuracy": [[0, "2.-Convert-an-FP32-PyTorch-model-to-ONNX,-simplify-&-then-evaluate-baseline-FP32-accuracy"], [1, "2.-Convert-an-FP32-PyTorch-model-to-ONNX,-simplify-&-then-evaluate-baseline-FP32-accuracy"], [2, "2.-Convert-an-FP32-PyTorch-model-to-ONNX,-simplify-&-then-evaluate-baseline-FP32-accuracy"], [3, "2.-Convert-an-FP32-PyTorch-model-to-ONNX,-simplify-&-then-evaluate-baseline-FP32-accuracy"]], "3. Create a quantization simulation model": [[0, "3.-Create-a-quantization-simulation-model"], [18, "3.-Create-a-quantization-simulation-model"]], "Fold Batch Normalization layers": [[0, "Fold-Batch-Normalization-layers"], [1, "Fold-Batch-Normalization-layers"], [3, "Fold-Batch-Normalization-layers"], [5, "Fold-Batch-Normalization-layers"], [10, "Fold-Batch-Normalization-layers"], [11, "Fold-Batch-Normalization-layers"], [13, "Fold-Batch-Normalization-layers"], [14, "Fold-Batch-Normalization-layers"], [18, "Fold-Batch-Normalization-layers"], [19, "Fold-Batch-Normalization-layers"], [23, "Fold-Batch-Normalization-layers"], [24, "Fold-Batch-Normalization-layers"]], "Create Quantization Sim Model": [[0, "Create-Quantization-Sim-Model"], [18, "Create-Quantization-Sim-Model"], [21, "Create-Quantization-Sim-Model"]], "Compute Encodings": [[0, "Compute-Encodings"], [4, "Compute-Encodings"], [18, "Compute-Encodings"]], "4. Run AMP algorithm on the quantized model": [[0, "4.-Run-AMP-algorithm-on-the-quantized-model"], [4, "4.-Run-AMP-algorithm-on-the-quantized-model"], [18, "4.-Run-AMP-algorithm-on-the-quantized-model"]], "Define callback functions for AMP": [[0, "Define-callback-functions-for-AMP"], [18, "Define-callback-functions-for-AMP"]], "Parameters for AMP algorithm": [[0, "Parameters-for-AMP-algorithm"], [4, "Parameters-for-AMP-algorithm"], [18, "Parameters-for-AMP-algorithm"]], "Call AMP API": [[0, "Call-AMP-API"], [18, "Call-AMP-API"]], "Adaptive Rounding (AdaRound)": [[1, "Adaptive-Rounding-(AdaRound)"], [5, "Adaptive-Rounding-(AdaRound)"], [19, "Adaptive-Rounding-(AdaRound)"]], "1. Instantiate the example training and validation pipeline": [[1, "1.-Instantiate-the-example-training-and-validation-pipeline"], [2, "1.-Instantiate-the-example-training-and-validation-pipeline"], [3, "1.-Instantiate-the-example-training-and-validation-pipeline"], [5, "1.-Instantiate-the-example-training-and-validation-pipeline"], [6, "1.-Instantiate-the-example-training-and-validation-pipeline"], [14, "1.-Instantiate-the-example-training-and-validation-pipeline"], [15, "1.-Instantiate-the-example-training-and-validation-pipeline"], [16, "1.-Instantiate-the-example-training-and-validation-pipeline"], [17, "1.-Instantiate-the-example-training-and-validation-pipeline"], [19, "1.-Instantiate-the-example-training-and-validation-pipeline"], [22, "1.-Instantiate-the-example-training-and-validation-pipeline"], [23, "1.-Instantiate-the-example-training-and-validation-pipeline"], [24, "1.-Instantiate-the-example-training-and-validation-pipeline"]], "3. Create a quantization simulation model and determine quantized accuracy": [[1, "3.-Create-a-quantization-simulation-model-and-determine-quantized-accuracy"], [2, "3.-Create-a-quantization-simulation-model-and-determine-quantized-accuracy"], [3, "3.-Create-a-quantization-simulation-model-and-determine-quantized-accuracy"], [5, "3.-Create-a-quantization-simulation-model-and-determine-quantized-accuracy"], [10, "3.-Create-a-quantization-simulation-model-and-determine-quantized-accuracy"], [11, "3.-Create-a-quantization-simulation-model-and-determine-quantized-accuracy"], [13, "3.-Create-a-quantization-simulation-model-and-determine-quantized-accuracy"], [14, "3.-Create-a-quantization-simulation-model-and-determine-quantized-accuracy"], [19, "3.-Create-a-quantization-simulation-model-and-determine-quantized-accuracy"], [22, "3.-Create-a-quantization-simulation-model-and-determine-quantized-accuracy"], [23, "3.-Create-a-quantization-simulation-model-and-determine-quantized-accuracy"], [24, "3.-Create-a-quantization-simulation-model-and-determine-quantized-accuracy"]], "Create the Quantization Sim Model": [[1, "Create-the-Quantization-Sim-Model"], [2, "Create-the-Quantization-Sim-Model"], [3, "Create-the-Quantization-Sim-Model"], [5, "Create-the-Quantization-Sim-Model"], [10, "Create-the-Quantization-Sim-Model"], [11, "Create-the-Quantization-Sim-Model"], [14, "Create-the-Quantization-Sim-Model"], [19, "Create-the-Quantization-Sim-Model"], [22, "Create-the-Quantization-Sim-Model"], [23, "Create-the-Quantization-Sim-Model"], [24, "Create-the-Quantization-Sim-Model"]], "4. Apply Adaround": [[1, "4.-Apply-Adaround"], [5, "4.-Apply-Adaround"], [13, "4.-Apply-Adaround"], [19, "4.-Apply-Adaround"]], "Next steps": [[1, "Next-steps"], [5, "Next-steps"], [10, "Next-steps"], [11, "Next-steps"], [14, "Next-steps"], [15, "Next-steps"], [16, "Next-steps"], [17, "Next-steps"], [19, "Next-steps"], [20, "Next-steps"], [22, "Next-steps"], [23, "Next-steps"], [24, "Next-steps"]], "For more information": [[1, "For-more-information"], [2, "For-more-information"], [3, "For-more-information"], [5, "For-more-information"], [6, "For-more-information"], [10, "For-more-information"], [11, "For-more-information"], [14, "For-more-information"], [15, "For-more-information"], [16, "For-more-information"], [17, "For-more-information"], [19, "For-more-information"], [20, "For-more-information"], [22, "For-more-information"], [23, "For-more-information"], [24, "For-more-information"]], "Cross-Layer Equalization": [[2, "Cross-Layer-Equalization"]], "Fold Batch Norm layers": [[2, "Fold-Batch-Norm-layers"], [22, "Fold-Batch-Norm-layers"]], "4. Apply CLE": [[2, "4.-Apply-CLE"], [14, "4.-Apply-CLE"]], "Quantization simulation": [[3, "Quantization-simulation"]], "1. Instantiate the example evaluation method": [[4, "1.-Instantiate-the-example-evaluation-method"]], "2. Load the FP32 model and evaluate the model to find the baseline FP32 accuracy": [[4, "2.-Load-the-FP32-model-and-evaluate-the-model-to-find-the-baseline-FP32-accuracy"]], "3.Create a quantization simulation model (with fake quantization ops inserted)": [[4, "3.Create-a-quantization-simulation-model-(with-fake-quantization-ops-inserted)"]], "Regular AMP": [[4, "Regular-AMP"]], "API Call for Regular AMP": [[4, "API-Call-for-Regular-AMP"]], "Fast AMP (AMP 2.0)": [[4, "Fast-AMP-(AMP-2.0)"]], "Summary": [[4, "Summary"], [7, "Summary"], [9, "Summary"], [13, "Summary"], [21, "Summary"]], "2. Load the model and evaluate to get a baseline FP32 accuracy score": [[5, "2.-Load-the-model-and-evaluate-to-get-a-baseline-FP32-accuracy-score"], [10, "2.-Load-the-model-and-evaluate-to-get-a-baseline-FP32-accuracy-score"], [11, "2.-Load-the-model-and-evaluate-to-get-a-baseline-FP32-accuracy-score"], [13, "2.-Load-the-model-and-evaluate-to-get-a-baseline-FP32-accuracy-score"], [14, "2.-Load-the-model-and-evaluate-to-get-a-baseline-FP32-accuracy-score"], [15, "2.-Load-the-model-and-evaluate-to-get-a-baseline-FP32-accuracy-score"], [16, "2.-Load-the-model-and-evaluate-to-get-a-baseline-FP32-accuracy-score"], [17, "2.-Load-the-model-and-evaluate-to-get-a-baseline-FP32-accuracy-score"], [18, "2.-Load-the-model-and-evaluate-to-get-a-baseline-FP32-accuracy-score"], [19, "2.-Load-the-model-and-evaluate-to-get-a-baseline-FP32-accuracy-score"], [22, "2.-Load-the-model-and-evaluate-to-get-a-baseline-FP32-accuracy-score"], [23, "2.-Load-the-model-and-evaluate-to-get-a-baseline-FP32-accuracy-score"], [24, "2.-Load-the-model-and-evaluate-to-get-a-baseline-FP32-accuracy-score"]], "AutoQuant": [[6, "AutoQuant"], [20, "AutoQuant"]], "2. Load a pretrained FP32 model": [[6, "2.-Load-a-pretrained-FP32-model"], [12, "2.-Load-a-pretrained-FP32-model"], [20, "2.-Load-a-pretrained-FP32-model"]], "3. Determine the baseline FP32 accuracy": [[6, "3.-Determine-the-baseline-FP32-accuracy"]], "4. Define constants and helper functions": [[6, "4.-Define-constants-and-helper-functions"]], "5. Apply AutoQuant": [[6, "5.-Apply-AutoQuant"]], "Quantization-Aware Training with BatchNorm Re-estimation": [[7, "Quantization-Aware-Training-with-BatchNorm-Re-estimation"], [21, "Quantization-Aware-Training-with-BatchNorm-Re-estimation"]], "1. Instantiate the example evaluation and training pipeline": [[7, "1.-Instantiate-the-example-evaluation-and-training-pipeline"]], "2. Define Constants and Datasets Prepare": [[7, "2.-Define-Constants-and-Datasets-Prepare"]], "2. Create the model in Keras": [[7, "2.-Create-the-model-in-Keras"]], "3. Train and evaluate the model": [[7, "3.-Train-and-evaluate-the-model"]], "4. Create a QuantizationSim Model": [[7, "4.-Create-a-QuantizationSim-Model"]], "Prepare the evaluation callback function": [[7, "Prepare-the-evaluation-callback-function"]], "5. Perform QAT": [[7, "5.-Perform-QAT"]], "Fold BatchNorm Layers": [[7, "Fold-BatchNorm-Layers"], [21, "Fold-BatchNorm-Layers"]], "5. Export Model": [[7, "5.-Export-Model"], [21, "5.-Export-Model"]], "Quantization-Aware Training with a Keras Transformer Model": [[8, "Quantization-Aware-Training-with-a-Keras-Transformer-Model"]], "Keras Model Preparer": [[9, "Keras-Model-Preparer"]], "1. Creating a Keras model with subclass layers": [[9, "1.-Creating-a-Keras-model-with-subclass-layers"]], "2. Converting the Keras model with subclass layers to a Keras model with functional layers": [[9, "2.-Converting-the-Keras-model-with-subclass-layers-to-a-Keras-model-with-functional-layers"]], "3. Showing similarities and differences between the original and converted models": [[9, "3.-Showing-similarities-and-differences-between-the-original-and-converted-models"]], "4. Discussing the limitations of the Keras Model Preparer": [[9, "4.-Discussing-the-limitations-of-the-Keras-Model-Preparer"]], "Quantization-aware training": [[10, "Quantization-aware-training"], [23, "Quantization-aware-training"]], "1. Instantiate the example evaluation and training datasets": [[10, "1.-Instantiate-the-example-evaluation-and-training-datasets"], [11, "1.-Instantiate-the-example-evaluation-and-training-datasets"]], "4. Perform QAT": [[10, "4.-Perform-QAT"], [11, "4.-Perform-QAT"], [23, "4.-Perform-QAT"], [24, "4.-Perform-QAT"]], "Quantization-Aware training with range learning": [[11, "Quantization-Aware-training-with-range-learning"]], "Quant Analyzer": [[12, "Quant-Analyzer"], [25, "Quant-Analyzer"]], "1. Example evaluation and training pipeline": [[12, "1.-Example-evaluation-and-training-pipeline"], [13, "1.-Example-evaluation-and-training-pipeline"], [21, "1.-Example-evaluation-and-training-pipeline"], [25, "1.-Example-evaluation-and-training-pipeline"]], "3. Apply QuantAnalyzer to the model": [[12, "3.-Apply-QuantAnalyzer-to-the-model"], [25, "3.-Apply-QuantAnalyzer-to-the-model"]], "Per-layer analysis by enabling/disabling quantization wrappers": [[12, "Per-layer-analysis-by-enabling/disabling-quantization-wrappers"], [25, "Per-layer-analysis-by-enabling/disabling-quantization-wrappers"]], "Encoding min/max ranges": [[12, "Encoding-min/max-ranges"], [25, "Encoding-min/max-ranges"]], "PDF of statistics": [[12, "PDF-of-statistics"], [25, "PDF-of-statistics"]], "Per-layer MSE loss": [[12, "Per-layer-MSE-loss"], [25, "Per-layer-MSE-loss"]], "Quantsim and Adaround - Per Channel Quantization (PCQ)": [[13, "Quantsim-and-Adaround---Per-Channel-Quantization-(PCQ)"]], "Cross-Layer Equalization with QuantSim": [[14, "Cross-Layer-Equalization-with-QuantSim"]], "Model compression using channel pruning": [[15, "Model-compression-using-channel-pruning"]], "3. Compress the model and fine-tune": [[15, "3.-Compress-the-model-and-fine-tune"], [16, "3.-Compress-the-model-and-fine-tune"], [17, "3.-Compress-the-model-and-fine-tune"]], "Model compression using spatial SVD": [[16, "Model-compression-using-spatial-SVD"]], "Model compression using spatial SVD and channel pruning": [[17, "Model-compression-using-spatial-SVD-and-channel-pruning"]], "1. Define Constants and Helper functions": [[20, "1.-Define-Constants-and-Helper-functions"]], "3. Run AutoQuant": [[20, "3.-Run-AutoQuant"]], "2. Load FP32 model": [[21, "2.-Load-FP32-model"]], "3. Create a quantization simulation model and Perform QAT": [[21, "3.-Create-a-quantization-simulation-model-and-Perform-QAT"]], "Perform QAT": [[21, "Perform-QAT"]], "4. Perform BatchNorm Reestimation": [[21, "4.-Perform-BatchNorm-Reestimation"]], "Re-estimate BatchNorm Statistics": [[21, "Re-estimate-BatchNorm-Statistics"]], "Cross-Layer Equalization and Bias Correction": [[22, "Cross-Layer-Equalization-and-Bias-Correction"]], "Techniques": [[22, "Techniques"]], "4. Apply CLE and BC": [[22, "4.-Apply-CLE-and-BC"]], "CLE": [[22, "CLE"]], "Bias Correction": [[22, "Bias-Correction"]], "Quantization-aware training with range learning": [[24, "Quantization-aware-training-with-range-learning"]], "2. Load the model": [[25, "2.-Load-the-model"]], "Welcome to AI Model Efficiency Toolkit API Docs!": [[26, "welcome-to-ai-model-efficiency-toolkit-api-docs"]], "Indices and tables": [[26, "indices-and-tables"]], "AIMET TensorFlow APIs": [[27, "aimet-tensorflow-apis"]], "AIMET TensorFlow AdaRound API": [[28, "aimet-tensorflow-adaround-api"]], "User Guide Link": [[28, "user-guide-link"], [31, "user-guide-link"], [39, "user-guide-link"], [41, "user-guide-link"], [42, "user-guide-link"], [43, "user-guide-link"], [51, "user-guide-link"], [53, "user-guide-link"], [55, "user-guide-link"], [57, "user-guide-link"], [66, "user-guide-link"], [68, "user-guide-link"]], "Examples Notebook Link": [[28, "examples-notebook-link"], [29, "examples-notebook-link"], [31, "examples-notebook-link"], [51, "examples-notebook-link"], [53, "examples-notebook-link"], [54, "examples-notebook-link"], [57, "examples-notebook-link"], [66, "examples-notebook-link"], [68, "examples-notebook-link"]], "Top-level API": [[28, "top-level-api"], [32, "top-level-api"], [35, "top-level-api"], [37, "top-level-api"], [39, "top-level-api"], [41, "top-level-api"], [42, "top-level-api"], [44, "top-level-api"], [45, "top-level-api"], [46, "top-level-api"], [48, "top-level-api"], [51, "top-level-api"], [53, "top-level-api"], [58, "top-level-api"], [59, "top-level-api"], [61, "top-level-api"], [64, "top-level-api"], [66, "top-level-api"], [68, "top-level-api"], [88, "top-level-api"], [89, "top-level-api"]], "Adaround Parameters": [[28, "adaround-parameters"], [41, "adaround-parameters"], [51, "adaround-parameters"]], "Enum Definition": [[28, "enum-definition"], [51, "enum-definition"], [58, "enum-definition"], [68, "enum-definition"]], "Code Examples": [[28, "code-examples"], [30, "code-examples"], [33, "code-examples"], [35, "code-examples"], [37, "code-examples"], [39, "code-examples"], [42, "code-examples"], [45, "code-examples"], [46, "code-examples"], [48, "code-examples"], [53, "code-examples"], [56, "code-examples"], [59, "code-examples"], [61, "code-examples"], [66, "code-examples"], [70, "code-examples"], [71, "code-examples"], [72, "code-examples"]], "AIMET TensorFlow BatchNorm Re-estimation APIs": [[29, "aimet-tensorflow-batchnorm-re-estimation-apis"]], "Introduction": [[29, "introduction"], [30, "introduction"], [31, "introduction"], [36, "introduction"], [43, "introduction"], [54, "introduction"], [56, "introduction"], [57, "introduction"], [65, "introduction"]], "Top-level APIs": [[29, "top-level-apis"], [54, "top-level-apis"]], "Code Example": [[29, "code-example"], [31, "code-example"], [32, "code-example"], [43, "code-example"], [44, "code-example"], [57, "code-example"], [58, "code-example"], [69, "code-example"], [86, "code-example"]], "Limitations": [[29, "limitations"], [35, "limitations"]], "AIMET TensorFlow Compression API": [[30, "aimet-tensorflow-compression-api"]], "Top-level API for Compression": [[30, "top-level-api-for-compression"], [56, "top-level-api-for-compression"]], "Greedy Selection Parameters": [[30, "greedy-selection-parameters"], [56, "greedy-selection-parameters"]], "Spatial SVD Configuration": [[30, "spatial-svd-configuration"], [56, "spatial-svd-configuration"]], "Configuration Definitions": [[30, "configuration-definitions"], [56, "configuration-definitions"]], "AIMET TensorFlow Cross Layer Equalization APIs": [[31, "aimet-tensorflow-cross-layer-equalization-apis"]], "Cross Layer Equalization API": [[31, "cross-layer-equalization-api"], [43, "cross-layer-equalization-api"], [57, "cross-layer-equalization-api"]], "Primitive APIs": [[31, "primitive-apis"], [57, "primitive-apis"]], "AIMET TensorFlow Layer Output Generation API": [[32, "aimet-tensorflow-layer-output-generation-api"]], "AIMET TensorFlow Mixed Precision API": [[33, "aimet-tensorflow-mixed-precision-api"]], "Top-level API for Regular AMP": [[33, "top-level-api-for-regular-amp"]], "Top-level API for Fast AMP (AMP 2.0)": [[33, "top-level-api-for-fast-amp-amp-2-0"]], "Quantizer Groups definition": [[33, "quantizer-groups-definition"], [45, "quantizer-groups-definition"], [59, "quantizer-groups-definition"]], "CallbackFunc Definition": [[33, "callbackfunc-definition"], [45, "callbackfunc-definition"], [59, "callbackfunc-definition"]], "TensorFlow Model Guidelines": [[34, "tensorflow-model-guidelines"]], "TensorFlow Model Preparer API": [[35, "tensorflow-model-preparer-api"]], "AIMET TensorFlow Cross Layer Equalization Primitive API": [[36, "aimet-tensorflow-cross-layer-equalization-primitive-api"]], "Higher Level APIs for Cross Layer Equalization": [[36, "higher-level-apis-for-cross-layer-equalization"], [65, "higher-level-apis-for-cross-layer-equalization"]], "Code Examples for Higher Level APIs": [[36, "code-examples-for-higher-level-apis"], [65, "code-examples-for-higher-level-apis"]], "Lower Level APIs for Cross Layer Equalization": [[36, "lower-level-apis-for-cross-layer-equalization"], [65, "lower-level-apis-for-cross-layer-equalization"]], "Custom Datatype used": [[36, "custom-datatype-used"]], "Code Example for Lower level APIs": [[36, "code-example-for-lower-level-apis"]], "Example helper methods to perform CLE in manual mode": [[36, "example-helper-methods-to-perform-cle-in-manual-mode"]], "AIMET TensorFlow Quant Analyzer API": [[37, "aimet-tensorflow-quant-analyzer-api"]], "AIMET TensorFlow Quantization APIs": [[38, "aimet-tensorflow-quantization-apis"]], "AIMET TensorFlow Quantization SIM API": [[39, "aimet-tensorflow-quantization-sim-api"]], "AIMET ONNX APIs": [[40, "aimet-onnx-apis"]], "AIMET ONNX AdaRound API": [[41, "aimet-onnx-adaround-api"]], "Code Example - Adaptive Rounding (AdaRound)": [[41, "code-example-adaptive-rounding-adaround"], [51, "code-example-adaptive-rounding-adaround"]], "AIMET ONNX AutoQuant API": [[42, "aimet-onnx-autoquant-api"]], "AIMET ONNX Cross Layer Equalization APIs": [[43, "aimet-onnx-cross-layer-equalization-apis"]], "AIMET ONNX Layer Output Generation API": [[44, "aimet-onnx-layer-output-generation-api"]], "AIMET ONNX Mixed Precision API": [[45, "aimet-onnx-mixed-precision-api"]], "AIMET ONNX Quant Analyzer API": [[46, "aimet-onnx-quant-analyzer-api"]], "Run specific utility": [[46, "run-specific-utility"], [66, "run-specific-utility"]], "AIMET ONNX Quantization APIs": [[47, "aimet-onnx-quantization-apis"]], "AIMET ONNX Quantization SIM API": [[48, "aimet-onnx-quantization-sim-api"]], "Encoding Format Specification": [[49, "encoding-format-specification"]], "1. Versioning": [[49, "versioning"]], "2. Version 0.4.0 (up to)": [[49, "version-0-4-0-up-to"]], "2.1. Encoding Specification": [[49, "encoding-specification"]], "2.2. Encoding File Example for PyTorch": [[49, "encoding-file-example-for-pytorch"]], "2.3. Encoding File Example for TensorFlow": [[49, "encoding-file-example-for-tensorflow"]], "3. Version 0.5.0": [[49, "version-0-5-0"]], "3.1. Encoding Specification": [[49, "id1"]], "3.2. Encoding File Example for PyTorch": [[49, "id2"]], "3.3. Encoding File Example for TensorFlow": [[49, "id3"]], "4. Version 0.6.1": [[49, "version-0-6-1"]], "4.1. Encoding Specification": [[49, "id4"]], "AIMET PyTorch APIs": [[50, "aimet-pytorch-apis"]], "AIMET PyTorch AdaRound API": [[51, "aimet-pytorch-adaround-api"]], "Architecture Checker API": [[52, "architecture-checker-api"]], "HTML report content": [[52, "id1"]], "AIMET PyTorch AutoQuant API": [[53, "aimet-pytorch-autoquant-api"]], "AIMET PyTorch BatchNorm Re-estimation APIs": [[54, "aimet-pytorch-batchnorm-re-estimation-apis"]], "Code Example - BN-Reestimation": [[54, "code-example-bn-reestimation"]], "AIMET PyTorch Bias Correction API": [[55, "aimet-pytorch-bias-correction-api"]], "Bias Correction API": [[55, "bias-correction-api"]], "ConvBnInfoType": [[55, "convbninfotype"]], "ActivationType": [[55, "activationtype"]], "Quantization Params": [[55, "quantization-params"]], "Code Example #1 Empirical Bias Correction": [[55, "code-example-1-empirical-bias-correction"]], "Code Example #2 Analytical + Empirical Bias correction": [[55, "code-example-2-analytical-empirical-bias-correction"]], "AIMET PyTorch Compression API": [[56, "aimet-pytorch-compression-api"]], "Weight SVD Configuration": [[56, "weight-svd-configuration"]], "Channel Pruning Configuration": [[56, "channel-pruning-configuration"]], "AIMET PyTorch Cross Layer Equalization APIs": [[57, "aimet-pytorch-cross-layer-equalization-apis"]], "AIMET PyTorch Layer Output Generation API": [[58, "aimet-pytorch-layer-output-generation-api"]], "AIMET PyTorch Mixed Precision API": [[59, "aimet-pytorch-mixed-precision-api"]], "PyTorch Model Guidelines": [[60, "pytorch-model-guidelines"]], "Model Preparer API": [[61, "model-preparer-api"]], "Limitations of torch.fx symbolic trace API": [[61, "limitations-of-torch-fx-symbolic-trace-api"]], "Model Validator Utility": [[62, "model-validator-utility"]], "PyTorch Multi-GPU support": [[63, "pytorch-multi-gpu-support"]], "PEFT LoRA": [[64, "peft-lora"]], "User flow": [[64, "user-flow"]], "AIMET PyTorch Cross Layer Equalization Primitive API": [[65, "aimet-pytorch-cross-layer-equalization-primitive-api"]], "ClsSetInfo Definition": [[65, "clssetinfo-definition"]], "Code Examples for Lower Level APIs": [[65, "code-examples-for-lower-level-apis"]], "AIMET PyTorch Quant Analyzer API": [[66, "aimet-pytorch-quant-analyzer-api"]], "AIMET PyTorch Quantization APIs": [[67, "aimet-pytorch-quantization-apis"]], "aimet_torch": [[67, "aimet-torch"]], "API Reference": [[67, "api-reference"], [67, "id1"]], "aimet_torch.v2": [[67, "aimet-torch-v2"]], "What\u2019s New": [[67, "what-s-new"]], "Backwards Compatibility": [[67, "backwards-compatibility"]], "AIMET PyTorch Quantization SIM API": [[68, "aimet-pytorch-quantization-sim-api"]], "Guidelines": [[68, "guidelines"]], "Code Example - Quantization Aware Training (QAT)": [[68, "code-example-quantization-aware-training-qat"]], "AIMET Torch SparseConvolution custom onnx export": [[69, "aimet-torch-sparseconvolution-custom-onnx-export"]], "Custom API for the spconv modules": [[69, "custom-api-for-the-spconv-modules"]], "AIMET Visualization Compression API": [[70, "aimet-visualization-compression-api"]], "Top-level API Compression": [[70, "top-level-api-compression"]], "AIMET Visualization for Quantization API": [[71, "aimet-visualization-for-quantization-api"]], "Top-level API Quantization": [[71, "top-level-api-quantization"]], "Migrate to aimet_torch.v2": [[72, "migrate-to-aimet-torch-v2"]], "Changes in aimet_torch v2": [[72, "changes-in-aimet-torch-v2"]], "Migration Process": [[72, "migration-process"]], "Imports": [[72, "imports"]], "QuantizationSimModel": [[72, "quantizationsimmodel"]], "Moving from QuantWrapper to Quantized Modules": [[72, "moving-from-quantwrapper-to-quantized-modules"]], "Moving from StaticGrid and LearnedGrid Quantizer to Affine and Float Quantizer": [[72, "moving-from-staticgrid-and-learnedgrid-quantizer-to-affine-and-float-quantizer"]], "AIMET Installation": [[73, "aimet-installation"]], "Quick Install": [[73, "quick-install"]], "Release Packages": [[73, "release-packages"]], "System Requirements": [[73, "system-requirements"]], "Advanced Installation Instructions": [[73, "advanced-installation-instructions"]], "AIMET Installation in Docker": [[74, "aimet-installation-in-docker"]], "Set variant": [[74, "set-variant"]], "Use prebuilt docker image": [[74, "use-prebuilt-docker-image"]], "Build docker image locally": [[74, "build-docker-image-locally"]], "Start docker container": [[74, "start-docker-container"]], "Install AIMET packages": [[74, "install-aimet-packages"], [75, "install-aimet-packages"]], "From PyPI": [[74, "from-pypi"], [75, "from-pypi"]], "From Release Package": [[74, "from-release-package"], [75, "from-release-package"]], "Environment setup": [[74, "environment-setup"], [75, "environment-setup"]], "AIMET Installation and Setup": [[75, "aimet-installation-and-setup"]], "Install prerequisite packages": [[75, "install-prerequisite-packages"]], "Install GPU packages": [[75, "install-gpu-packages"]], "Install GPU packages for PyTorch 2.1 or PyTorch 1.13 or ONNX or TensorFlow": [[75, "install-gpu-packages-for-pytorch-2-1-or-pytorch-1-13-or-onnx-or-tensorflow"]], "Install common debian packages": [[75, "install-common-debian-packages"]], "Install tensorflow GPU debian packages": [[75, "install-tensorflow-gpu-debian-packages"]], "Install torch GPU debian packages": [[75, "install-torch-gpu-debian-packages"]], "Install ONNX GPU debian packages": [[75, "install-onnx-gpu-debian-packages"]], "Replace Pillow with Pillow-SIMD": [[75, "replace-pillow-with-pillow-simd"]], "Replace onnxruntime with onnxruntime-gpu": [[75, "replace-onnxruntime-with-onnxruntime-gpu"]], "Post installation steps": [[75, "post-installation-steps"]], "QuantizationMixin": [[77, "quantizationmixin"]], "quantization.affine": [[78, "module-aimet_torch.v2.quantization.affine"]], "Classes": [[78, "classes"], [80, "classes"], [81, "classes"]], "Functions": [[78, "functions"]], "FloatQuantizeDequantize": [[79, "floatquantizedequantize"]], "QuantizeDequantize": [[79, "quantizedequantize"]], "quantization.float": [[80, "module-aimet_torch.v2.quantization.float"]], "quantization.tensor": [[81, "quantization-tensor"]], "Visualization Tools": [[82, "visualization-tools"]], "Blockwise Quantization": [[83, "blockwise-quantization"]], "Low Power Blockwise Quantization (LPBQ)": [[83, "low-power-blockwise-quantization-lpbq"]], "Top Level API": [[83, "top-level-api"], [86, "top-level-api"]], "Export": [[83, "export"]], "Encoding Analyzers": [[84, "encoding-analyzers"]], "Variants": [[84, "variants"]], "Post-Training Quantization": [[85, "post-training-quantization"], [102, "post-training-quantization"]], "GPTVQ": [[86, "gptvq"]], "GPTVQ Parameters": [[86, "gptvq-parameters"]], "AIMET: AI Model Efficiency Toolkit Documentation": [[87, "aimet-ai-model-efficiency-toolkit-documentation"]], "Getting Started": [[87, "getting-started"]], "Examples": [[87, null]], "Feature Descriptions": [[87, null]], "AIMET PyTorch API": [[87, null]], "Quantized Modules": [[88, "quantized-modules"]], "Configuration": [[88, "configuration"]], "Computing Encodings": [[88, "computing-encodings"]], "Quantized Module Classes": [[88, "quantized-module-classes"]], "Quantizers": [[89, "quantizers"]], "Quickstart Guide": [[90, "quickstart-guide"]], "PyTorch prerequisites": [[90, "pytorch-prerequisites"]], "Prepare the floating point model for quantization": [[90, "prepare-the-floating-point-model-for-quantization"]], "1) Model preparation": [[90, "model-preparation"]], "2) BatchNorm fold": [[90, "batchnorm-fold"]], "Quantize the model": [[90, "quantize-the-model"]], "Fine-tune the model with quantization aware training": [[90, "fine-tune-the-model-with-quantization-aware-training"]], "Export the quantsim model": [[90, "export-the-quantsim-model"]], "AIMET AdaRound": [[91, "aimet-adaround"]], "AdaRound use cases": [[91, "adaround-use-cases"]], "AdaRound hyper parameters guidelines": [[91, "adaround-hyper-parameters-guidelines"]], "AdaRound API": [[91, "adaround-api"]], "AIMET AutoQuant": [[92, "aimet-autoquant"]], "Overview": [[92, "overview"], [93, "overview"], [97, "overview"], [98, "overview"], [100, "overview"], [103, "overview"], [104, "overview"], [105, "overview"], [106, "overview"], [108, "overview"], [111, "overview"], [112, "overview"], [114, "overview"]], "Workflow": [[92, "workflow"], [93, "workflow"]], "AutoQuant API": [[92, "autoquant-api"]], "AIMET Batch Norm Re-estimation": [[93, "aimet-batch-norm-re-estimation"]], "BN Re-estimation API": [[93, "bn-re-estimation-api"]], "AIMET channel pruning": [[94, "aimet-channel-pruning"]], "Procedure": [[94, "procedure"]], "Channel selection": [[94, "channel-selection"]], "Winnowing": [[94, "winnowing"]], "Weight reconstruction": [[94, "weight-reconstruction"]], "AIMET Compression Features Guidebook": [[95, "aimet-compression-features-guidebook"]], "AIMET examples": [[96, "aimet-examples"]], "Browse the notebooks": [[96, "browse-the-notebooks"]], "Running the notebooks": [[96, "running-the-notebooks"]], "1. Run the notebook server": [[96, "run-the-notebook-server"]], "2. Download the example notebooks and related code": [[96, "download-the-example-notebooks-and-related-code"]], "3. Run the notebooks": [[96, "run-the-notebooks"]], "AIMET greedy compression ratio selection": [[97, "aimet-greedy-compression-ratio-selection"]], "How it works": [[97, "how-it-works"]], "Per-layer exploration": [[97, "per-layer-exploration"]], "Compression ratio selection": [[97, "compression-ratio-selection"], [100, "compression-ratio-selection"]], "AI Model Efficiency Toolkit User Guide": [[98, "ai-model-efficiency-toolkit-user-guide"]], "Features": [[98, "features"]], "More Information": [[98, "more-information"]], "Release Information": [[98, "release-information"]], "toc tree": [[98, "toc-tree"]], "AIMET Known Issues": [[99, "aimet-known-issues"]], "AIMET model compression": [[100, "aimet-model-compression"]], "Use Case": [[100, "use-case"]], "Model compression": [[100, "model-compression"]], "Optional techniques": [[100, "optional-techniques"]], "Rank Rounding": [[100, "rank-rounding"]], "Per-layer fine-tuning": [[100, "per-layer-fine-tuning"]], "NOTE": [[100, null]], "FAQs": [[100, "faqs"], [103, "faqs"]], "References": [[100, "references"], [103, "references"]], "Model Guidelines for PyTorch": [[101, "model-guidelines-for-pytorch"]], "AIMET model quantization": [[102, "aimet-model-quantization"]], "Use cases": [[102, "use-cases"]], "AIMET quantization features": [[102, "aimet-quantization-features"]], "Debugging and Analysis Tools": [[102, "debugging-and-analysis-tools"]], "AIMET quantization workflow": [[102, "aimet-quantization-workflow"]], "PyTorch": [[102, "pytorch"], [112, "pytorch"]], "Debugging": [[102, "debugging"]], "AIMET post-training quantization techniques": [[103, "aimet-post-training-quantization-techniques"]], "User Flow": [[103, "user-flow"]], "Cross-Layer Equalization API": [[103, "cross-layer-equalization-api"]], "AIMET QuantAnalyzer": [[104, "aimet-quantanalyzer"]], "Requirements": [[104, "requirements"]], "Detailed analysis descriptions": [[104, "detailed-analysis-descriptions"]], "QuantAnalyzer API": [[104, "quantanalyzer-api"]], "AIMET quantization aware training": [[105, "aimet-quantization-aware-training"]], "QAT workflow": [[105, "qat-workflow"]], "QAT modes": [[105, "qat-modes"]], "Recommendations for quantization-aware training": [[105, "recommendations-for-quantization-aware-training"]], "Quantization simulation configuration": [[106, "quantization-simulation-configuration"]], "Configuration file structure": [[106, "configuration-file-structure"]], "Modifying configuration file sections": [[106, "modifying-configuration-file-sections"]], "AIMET quantization diagnostics": [[107, "aimet-quantization-diagnostics"]], "AIMET quantization simulation": [[108, "aimet-quantization-simulation"]], "QuantSim workflow": [[108, "quantsim-workflow"]], "Simulating quantization noise": [[108, "simulating-quantization-noise"]], "Determining quantization parameters (encodings)": [[108, "determining-quantization-parameters-encodings"]], "Quantization schemes": [[108, "quantization-schemes"]], "Configuring quantization simulation operations": [[108, "configuring-quantization-simulation-operations"]], "Quantization Simulation APIs": [[108, "quantization-simulation-apis"]], "AIMET Release Notes": [[109, "aimet-release-notes"]], "1.22.2": [[109, "id1"]], "1.22.1": [[109, "id2"]], "1.22.0": [[109, "id3"]], "1.21.0": [[109, "id4"]], "1.20.0": [[109, "id5"]], "1.19.1.py37": [[109, "py37"]], "1.19.1": [[109, "id6"]], "1.18.0.py37": [[109, "id7"]], "1.18.0": [[109, "id8"]], "1.17.0.py37": [[109, "id9"]], "1.17.0": [[109, "id10"]], "1.16.2.py37": [[109, "id11"]], "1.16.2": [[109, "id12"]], "1.16.1.py37": [[109, "id13"]], "1.16.1": [[109, "id14"]], "1.16.0": [[109, "id15"]], "1.14.0": [[109, "id16"]], "1.13.0": [[109, "id17"]], "AIMET spatial SVD": [[110, "aimet-spatial-svd"]], "AIMET visualization": [[111, "aimet-visualization"]], "Design": [[111, "design"]], "Compression": [[111, "compression"]], "Starting a Bokeh server session": [[111, "starting-a-bokeh-server-session"]], "Visualizing compression ratios": [[111, "visualizing-compression-ratios"]], "AIMET visualization for quantization": [[112, "aimet-visualization-for-quantization"]], "Quantization": [[112, "quantization"]], "TensorFlow": [[112, "tensorflow"]], "AIMET weight SVD": [[113, "aimet-weight-svd"]], "AIMET winnowing": [[114, "aimet-winnowing"]], "Winnowing overview": [[114, "winnowing-overview"]], "How winnowing works": [[114, "how-winnowing-works"]]}, "indexentries": {"adaroundparameters (class in aimet_tensorflow.keras.adaround_weight)": [[28, "aimet_tensorflow.keras.adaround_weight.AdaroundParameters"]], "quantscheme (class in aimet_common.defs)": [[28, "aimet_common.defs.QuantScheme"], [51, "aimet_common.defs.QuantScheme"], [68, "aimet_common.defs.QuantScheme"]], "apply_adaround() (in module aimet_tensorflow.keras.adaround_weight.adaround)": [[28, "aimet_tensorflow.keras.adaround_weight.Adaround.apply_adaround"]], "post_training_percentile (aimet_common.defs.quantscheme attribute)": [[28, "aimet_common.defs.QuantScheme.post_training_percentile"], [51, "aimet_common.defs.QuantScheme.post_training_percentile"], [68, "aimet_common.defs.QuantScheme.post_training_percentile"]], "post_training_tf (aimet_common.defs.quantscheme attribute)": [[28, "aimet_common.defs.QuantScheme.post_training_tf"], [51, "aimet_common.defs.QuantScheme.post_training_tf"], [68, "aimet_common.defs.QuantScheme.post_training_tf"]], "post_training_tf_enhanced (aimet_common.defs.quantscheme attribute)": [[28, "aimet_common.defs.QuantScheme.post_training_tf_enhanced"], [51, "aimet_common.defs.QuantScheme.post_training_tf_enhanced"], [68, "aimet_common.defs.QuantScheme.post_training_tf_enhanced"]], "training_range_learning_with_tf_enhanced_init (aimet_common.defs.quantscheme attribute)": [[28, "aimet_common.defs.QuantScheme.training_range_learning_with_tf_enhanced_init"], [51, "aimet_common.defs.QuantScheme.training_range_learning_with_tf_enhanced_init"], [68, "aimet_common.defs.QuantScheme.training_range_learning_with_tf_enhanced_init"]], "training_range_learning_with_tf_init (aimet_common.defs.quantscheme attribute)": [[28, "aimet_common.defs.QuantScheme.training_range_learning_with_tf_init"], [51, "aimet_common.defs.QuantScheme.training_range_learning_with_tf_init"], [68, "aimet_common.defs.QuantScheme.training_range_learning_with_tf_init"]], "fold_all_batch_norms_to_scale() (in module aimet_tensorflow.keras.batch_norm_fold)": [[29, "aimet_tensorflow.keras.batch_norm_fold.fold_all_batch_norms_to_scale"]], "reestimate_bn_stats() (in module aimet_tensorflow.keras.bn_reestimation)": [[29, "aimet_tensorflow.keras.bn_reestimation.reestimate_bn_stats"]], "compressionscheme (class in aimet_common.defs)": [[30, "aimet_common.defs.CompressionScheme"]], "costmetric (class in aimet_common.defs)": [[30, "aimet_common.defs.CostMetric"]], "modelcompressor (class in aimet_tensorflow.keras.compress)": [[30, "aimet_tensorflow.keras.compress.ModelCompressor"]], "modulecompratiopair (class in aimet_tensorflow.keras.defs)": [[30, "aimet_tensorflow.keras.defs.ModuleCompRatioPair"]], "spatialsvdparameters (class in aimet_tensorflow.keras.defs)": [[30, "aimet_tensorflow.keras.defs.SpatialSvdParameters"]], "spatialsvdparameters.automodeparams (class in aimet_tensorflow.keras.defs)": [[30, "aimet_tensorflow.keras.defs.SpatialSvdParameters.AutoModeParams"]], "spatialsvdparameters.manualmodeparams (class in aimet_tensorflow.keras.defs)": [[30, "aimet_tensorflow.keras.defs.SpatialSvdParameters.ManualModeParams"]], "spatialsvdparameters.mode (class in aimet_tensorflow.keras.defs)": [[30, "aimet_tensorflow.keras.defs.SpatialSvdParameters.Mode"]], "auto (aimet_tensorflow.keras.defs.spatialsvdparameters.mode attribute)": [[30, "aimet_tensorflow.keras.defs.SpatialSvdParameters.Mode.auto"]], "channel_pruning (aimet_common.defs.compressionscheme attribute)": [[30, "aimet_common.defs.CompressionScheme.channel_pruning"]], "compress_model() (aimet_tensorflow.keras.compress.modelcompressor static method)": [[30, "aimet_tensorflow.keras.compress.ModelCompressor.compress_model"]], "mac (aimet_common.defs.costmetric attribute)": [[30, "aimet_common.defs.CostMetric.mac"]], "manual (aimet_tensorflow.keras.defs.spatialsvdparameters.mode attribute)": [[30, "aimet_tensorflow.keras.defs.SpatialSvdParameters.Mode.manual"]], "memory (aimet_common.defs.costmetric attribute)": [[30, "aimet_common.defs.CostMetric.memory"]], "spatial_svd (aimet_common.defs.compressionscheme attribute)": [[30, "aimet_common.defs.CompressionScheme.spatial_svd"]], "weight_svd (aimet_common.defs.compressionscheme attribute)": [[30, "aimet_common.defs.CompressionScheme.weight_svd"]], "equalize_model() (in module aimet_tensorflow.keras.cross_layer_equalization)": [[31, "aimet_tensorflow.keras.cross_layer_equalization.equalize_model"]], "layeroutpututil (class in aimet_tensorflow.keras.layer_output_utils)": [[32, "aimet_tensorflow.keras.layer_output_utils.LayerOutputUtil"]], "generate_layer_outputs() (aimet_tensorflow.keras.layer_output_utils.layeroutpututil method)": [[32, "aimet_tensorflow.keras.layer_output_utils.LayerOutputUtil.generate_layer_outputs"]], "callbackfunc (class in aimet_common.defs)": [[33, "aimet_common.defs.CallbackFunc"], [45, "aimet_common.defs.CallbackFunc"], [59, "aimet_common.defs.CallbackFunc"]], "choose_fast_mixed_precision() (in module aimet_tensorflow.keras.mixed_precision)": [[33, "aimet_tensorflow.keras.mixed_precision.choose_fast_mixed_precision"]], "choose_mixed_precision() (in module aimet_tensorflow.keras.mixed_precision)": [[33, "aimet_tensorflow.keras.mixed_precision.choose_mixed_precision"]], "prepare_model() (in module aimet_tensorflow.keras.model_preparer)": [[35, "aimet_tensorflow.keras.model_preparer.prepare_model"]], "clssetinfo (class in aimet_tensorflow.keras.cross_layer_equalization)": [[36, "aimet_tensorflow.keras.cross_layer_equalization.ClsSetInfo"]], "clssetinfo.clssetlayerpairinfo (class in aimet_tensorflow.keras.cross_layer_equalization)": [[36, "aimet_tensorflow.keras.cross_layer_equalization.ClsSetInfo.ClsSetLayerPairInfo"]], "bias_fold() (in module aimet_tensorflow.keras.cross_layer_equalization.highbiasfold)": [[36, "aimet_tensorflow.keras.cross_layer_equalization.HighBiasFold.bias_fold"], [36, "id0"]], "fold_all_batch_norms() (in module aimet_tensorflow.keras.batch_norm_fold)": [[36, "aimet_tensorflow.keras.batch_norm_fold.fold_all_batch_norms"]], "fold_given_batch_norms() (in module aimet_tensorflow.keras.batch_norm_fold)": [[36, "aimet_tensorflow.keras.batch_norm_fold.fold_given_batch_norms"]], "scale_cls_sets() (in module aimet_tensorflow.keras.cross_layer_equalization.crosslayerscaling)": [[36, "aimet_tensorflow.keras.cross_layer_equalization.CrossLayerScaling.scale_cls_sets"]], "scale_model() (in module aimet_tensorflow.keras.cross_layer_equalization.crosslayerscaling)": [[36, "aimet_tensorflow.keras.cross_layer_equalization.CrossLayerScaling.scale_model"]], "quantanalyzer (class in aimet_tensorflow.keras.quant_analyzer)": [[37, "aimet_tensorflow.keras.quant_analyzer.QuantAnalyzer"]], "analyze() (aimet_tensorflow.keras.quant_analyzer.quantanalyzer method)": [[37, "aimet_tensorflow.keras.quant_analyzer.QuantAnalyzer.analyze"]], "quantizationsimmodel (class in aimet_tensorflow.keras.quantsim)": [[39, "aimet_tensorflow.keras.quantsim.QuantizationSimModel"]], "compute_encodings() (aimet_tensorflow.keras.quantsim.quantizationsimmodel method)": [[39, "aimet_tensorflow.keras.quantsim.QuantizationSimModel.compute_encodings"]], "export() (aimet_tensorflow.keras.quantsim.quantizationsimmodel method)": [[39, "aimet_tensorflow.keras.quantsim.QuantizationSimModel.export"]], "adaroundparameters (class in aimet_onnx.adaround.adaround_weight)": [[41, "aimet_onnx.adaround.adaround_weight.AdaroundParameters"]], "apply_adaround() (in module aimet_onnx.adaround.adaround_weight.adaround)": [[41, "aimet_onnx.adaround.adaround_weight.Adaround.apply_adaround"]], "autoquant (class in aimet_onnx.auto_quant_v2)": [[42, "aimet_onnx.auto_quant_v2.AutoQuant"]], "get_quant_scheme_candidates() (aimet_onnx.auto_quant_v2.autoquant method)": [[42, "aimet_onnx.auto_quant_v2.AutoQuant.get_quant_scheme_candidates"]], "optimize() (aimet_onnx.auto_quant_v2.autoquant method)": [[42, "aimet_onnx.auto_quant_v2.AutoQuant.optimize"]], "run_inference() (aimet_onnx.auto_quant_v2.autoquant method)": [[42, "aimet_onnx.auto_quant_v2.AutoQuant.run_inference"]], "set_adaround_params() (aimet_onnx.auto_quant_v2.autoquant method)": [[42, "aimet_onnx.auto_quant_v2.AutoQuant.set_adaround_params"]], "set_quant_scheme_candidates() (aimet_onnx.auto_quant_v2.autoquant method)": [[42, "aimet_onnx.auto_quant_v2.AutoQuant.set_quant_scheme_candidates"]], "equalize_model() (in module aimet_onnx.cross_layer_equalization)": [[43, "aimet_onnx.cross_layer_equalization.equalize_model"]], "layeroutpututil (class in aimet_onnx.layer_output_utils)": [[44, "aimet_onnx.layer_output_utils.LayerOutputUtil"]], "generate_layer_outputs() (aimet_onnx.layer_output_utils.layeroutpututil method)": [[44, "aimet_onnx.layer_output_utils.LayerOutputUtil.generate_layer_outputs"]], "evalcallbackfactory (class in aimet_onnx.amp.mixed_precision_algo)": [[45, "aimet_onnx.amp.mixed_precision_algo.EvalCallbackFactory"]], "quantizergroup (class in aimet_onnx.amp.quantizer_groups)": [[45, "aimet_onnx.amp.quantizer_groups.QuantizerGroup"]], "choose_mixed_precision() (in module aimet_onnx.mixed_precision)": [[45, "aimet_onnx.mixed_precision.choose_mixed_precision"]], "get_activation_quantizers() (aimet_onnx.amp.quantizer_groups.quantizergroup method)": [[45, "aimet_onnx.amp.quantizer_groups.QuantizerGroup.get_activation_quantizers"]], "get_active_quantizers() (aimet_onnx.amp.quantizer_groups.quantizergroup method)": [[45, "aimet_onnx.amp.quantizer_groups.QuantizerGroup.get_active_quantizers"]], "get_candidate() (aimet_onnx.amp.quantizer_groups.quantizergroup method)": [[45, "aimet_onnx.amp.quantizer_groups.QuantizerGroup.get_candidate"]], "get_param_quantizers() (aimet_onnx.amp.quantizer_groups.quantizergroup method)": [[45, "aimet_onnx.amp.quantizer_groups.QuantizerGroup.get_param_quantizers"]], "set_quantizers_to_candidate() (aimet_onnx.amp.quantizer_groups.quantizergroup method)": [[45, "aimet_onnx.amp.quantizer_groups.QuantizerGroup.set_quantizers_to_candidate"]], "sqnr() (aimet_onnx.amp.mixed_precision_algo.evalcallbackfactory method)": [[45, "aimet_onnx.amp.mixed_precision_algo.EvalCallbackFactory.sqnr"]], "to_list() (aimet_onnx.amp.quantizer_groups.quantizergroup method)": [[45, "aimet_onnx.amp.quantizer_groups.QuantizerGroup.to_list"]], "quantanalyzer (class in aimet_onnx.quant_analyzer)": [[46, "aimet_onnx.quant_analyzer.QuantAnalyzer"]], "analyze() (aimet_onnx.quant_analyzer.quantanalyzer method)": [[46, "aimet_onnx.quant_analyzer.QuantAnalyzer.analyze"]], "check_model_sensitivity_to_quantization() (aimet_onnx.quant_analyzer.quantanalyzer method)": [[46, "aimet_onnx.quant_analyzer.QuantAnalyzer.check_model_sensitivity_to_quantization"]], "create_quantsim_and_encodings() (aimet_onnx.quant_analyzer.quantanalyzer method)": [[46, "aimet_onnx.quant_analyzer.QuantAnalyzer.create_quantsim_and_encodings"]], "enable_per_layer_mse_loss() (aimet_onnx.quant_analyzer.quantanalyzer method)": [[46, "aimet_onnx.quant_analyzer.QuantAnalyzer.enable_per_layer_mse_loss"]], "export_per_layer_encoding_min_max_range() (aimet_onnx.quant_analyzer.quantanalyzer method)": [[46, "aimet_onnx.quant_analyzer.QuantAnalyzer.export_per_layer_encoding_min_max_range"]], "export_per_layer_mse_loss() (aimet_onnx.quant_analyzer.quantanalyzer method)": [[46, "aimet_onnx.quant_analyzer.QuantAnalyzer.export_per_layer_mse_loss"]], "export_per_layer_stats_histogram() (aimet_onnx.quant_analyzer.quantanalyzer method)": [[46, "aimet_onnx.quant_analyzer.QuantAnalyzer.export_per_layer_stats_histogram"]], "perform_per_layer_analysis_by_disabling_quantizers() (aimet_onnx.quant_analyzer.quantanalyzer method)": [[46, "aimet_onnx.quant_analyzer.QuantAnalyzer.perform_per_layer_analysis_by_disabling_quantizers"]], "perform_per_layer_analysis_by_enabling_quantizers() (aimet_onnx.quant_analyzer.quantanalyzer method)": [[46, "aimet_onnx.quant_analyzer.QuantAnalyzer.perform_per_layer_analysis_by_enabling_quantizers"]], "quantizationsimmodel (class in aimet_onnx.quantsim)": [[48, "aimet_onnx.quantsim.QuantizationSimModel"]], "compute_encodings() (aimet_onnx.quantsim.quantizationsimmodel method)": [[48, "aimet_onnx.quantsim.QuantizationSimModel.compute_encodings"]], "export() (aimet_onnx.quantsim.quantizationsimmodel method)": [[48, "aimet_onnx.quantsim.QuantizationSimModel.export"]], "adaroundparameters (class in aimet_torch.v1.adaround.adaround_weight)": [[51, "aimet_torch.v1.adaround.adaround_weight.AdaroundParameters"]], "apply_adaround() (in module aimet_torch.v1.adaround.adaround_weight.adaround)": [[51, "aimet_torch.v1.adaround.adaround_weight.Adaround.apply_adaround"]], "check_model_arch() (in module aimet_torch.arch_checker.arch_checker.archchecker)": [[52, "aimet_torch.arch_checker.arch_checker.ArchChecker.check_model_arch"]], "autoquant (class in aimet_torch.v1.auto_quant)": [[53, "aimet_torch.v1.auto_quant.AutoQuant"]], "fold_all_batch_norms_to_scale() (in module aimet_torch.batch_norm_fold)": [[54, "aimet_torch.batch_norm_fold.fold_all_batch_norms_to_scale"]], "reestimate_bn_stats() (in module aimet_torch.bn_reestimation)": [[54, "aimet_torch.bn_reestimation.reestimate_bn_stats"]], "activationtype (class in aimet_common.defs)": [[55, "aimet_common.defs.ActivationType"]], "convbninfotype (class in aimet_common.bias_correction)": [[55, "aimet_common.bias_correction.ConvBnInfoType"]], "quantparams (class in aimet_torch.v1.quantsim)": [[55, "aimet_torch.v1.quantsim.QuantParams"]], "correct_bias() (in module aimet_torch.bias_correction)": [[55, "aimet_torch.bias_correction.correct_bias"]], "no_activation (aimet_common.defs.activationtype attribute)": [[55, "aimet_common.defs.ActivationType.no_activation"]], "relu (aimet_common.defs.activationtype attribute)": [[55, "aimet_common.defs.ActivationType.relu"]], "relu6 (aimet_common.defs.activationtype attribute)": [[55, "aimet_common.defs.ActivationType.relu6"]], "channelpruningparameters (class in aimet_torch.defs)": [[56, "aimet_torch.defs.ChannelPruningParameters"]], "channelpruningparameters.automodeparams (class in aimet_torch.defs)": [[56, "aimet_torch.defs.ChannelPruningParameters.AutoModeParams"]], "channelpruningparameters.manualmodeparams (class in aimet_torch.defs)": [[56, "aimet_torch.defs.ChannelPruningParameters.ManualModeParams"]], "channelpruningparameters.mode (class in aimet_torch.defs)": [[56, "aimet_torch.defs.ChannelPruningParameters.Mode"]], "greedyselectionparameters (class in aimet_common.defs)": [[56, "aimet_common.defs.GreedySelectionParameters"]], "modelcompressor (class in aimet_torch.compress)": [[56, "aimet_torch.compress.ModelCompressor"]], "modulecompratiopair (class in aimet_torch.defs)": [[56, "aimet_torch.defs.ModuleCompRatioPair"]], "spatialsvdparameters (class in aimet_torch.defs)": [[56, "aimet_torch.defs.SpatialSvdParameters"]], "spatialsvdparameters.automodeparams (class in aimet_torch.defs)": [[56, "aimet_torch.defs.SpatialSvdParameters.AutoModeParams"]], "spatialsvdparameters.manualmodeparams (class in aimet_torch.defs)": [[56, "aimet_torch.defs.SpatialSvdParameters.ManualModeParams"]], "spatialsvdparameters.mode (class in aimet_torch.defs)": [[56, "aimet_torch.defs.SpatialSvdParameters.Mode"]], "weightsvdparameters (class in aimet_torch.defs)": [[56, "aimet_torch.defs.WeightSvdParameters"]], "weightsvdparameters.automodeparams (class in aimet_torch.defs)": [[56, "aimet_torch.defs.WeightSvdParameters.AutoModeParams"]], "weightsvdparameters.manualmodeparams (class in aimet_torch.defs)": [[56, "aimet_torch.defs.WeightSvdParameters.ManualModeParams"]], "weightsvdparameters.mode (class in aimet_torch.defs)": [[56, "aimet_torch.defs.WeightSvdParameters.Mode"]], "auto (aimet_torch.defs.channelpruningparameters.mode attribute)": [[56, "aimet_torch.defs.ChannelPruningParameters.Mode.auto"]], "auto (aimet_torch.defs.spatialsvdparameters.mode attribute)": [[56, "aimet_torch.defs.SpatialSvdParameters.Mode.auto"]], "auto (aimet_torch.defs.weightsvdparameters.mode attribute)": [[56, "aimet_torch.defs.WeightSvdParameters.Mode.auto"]], "compress_model() (aimet_torch.compress.modelcompressor static method)": [[56, "aimet_torch.compress.ModelCompressor.compress_model"]], "manual (aimet_torch.defs.channelpruningparameters.mode attribute)": [[56, "aimet_torch.defs.ChannelPruningParameters.Mode.manual"]], "manual (aimet_torch.defs.spatialsvdparameters.mode attribute)": [[56, "aimet_torch.defs.SpatialSvdParameters.Mode.manual"]], "manual (aimet_torch.defs.weightsvdparameters.mode attribute)": [[56, "aimet_torch.defs.WeightSvdParameters.Mode.manual"]], "equalize_model() (in module aimet_torch.cross_layer_equalization)": [[57, "aimet_torch.cross_layer_equalization.equalize_model"]], "layeroutpututil (class in aimet_torch.layer_output_utils)": [[58, "aimet_torch.layer_output_utils.LayerOutputUtil"]], "namingscheme (class in aimet_torch.layer_output_utils)": [[58, "aimet_torch.layer_output_utils.NamingScheme"]], "onnx (aimet_torch.layer_output_utils.namingscheme attribute)": [[58, "aimet_torch.layer_output_utils.NamingScheme.ONNX"]], "pytorch (aimet_torch.layer_output_utils.namingscheme attribute)": [[58, "aimet_torch.layer_output_utils.NamingScheme.PYTORCH"]], "torchscript (aimet_torch.layer_output_utils.namingscheme attribute)": [[58, "aimet_torch.layer_output_utils.NamingScheme.TORCHSCRIPT"]], "generate_layer_outputs() (aimet_torch.layer_output_utils.layeroutpututil method)": [[58, "aimet_torch.layer_output_utils.LayerOutputUtil.generate_layer_outputs"]], "evalcallbackfactory (class in aimet_torch.amp.mixed_precision_algo)": [[59, "aimet_torch.amp.mixed_precision_algo.EvalCallbackFactory"]], "quantizergroup (class in aimet_torch.amp.quantizer_groups)": [[59, "aimet_torch.amp.quantizer_groups.QuantizerGroup"]], "choose_mixed_precision() (in module aimet_torch.mixed_precision)": [[59, "aimet_torch.mixed_precision.choose_mixed_precision"]], "get_active_quantizers() (aimet_torch.amp.quantizer_groups.quantizergroup method)": [[59, "aimet_torch.amp.quantizer_groups.QuantizerGroup.get_active_quantizers"]], "get_candidate() (aimet_torch.amp.quantizer_groups.quantizergroup method)": [[59, "aimet_torch.amp.quantizer_groups.QuantizerGroup.get_candidate"]], "get_input_quantizer_modules() (aimet_torch.amp.quantizer_groups.quantizergroup method)": [[59, "aimet_torch.amp.quantizer_groups.QuantizerGroup.get_input_quantizer_modules"]], "set_quantizers_to_candidate() (aimet_torch.amp.quantizer_groups.quantizergroup method)": [[59, "aimet_torch.amp.quantizer_groups.QuantizerGroup.set_quantizers_to_candidate"]], "sqnr() (aimet_torch.amp.mixed_precision_algo.evalcallbackfactory method)": [[59, "aimet_torch.amp.mixed_precision_algo.EvalCallbackFactory.sqnr"]], "to_list() (aimet_torch.amp.quantizer_groups.quantizergroup method)": [[59, "aimet_torch.amp.quantizer_groups.QuantizerGroup.to_list"]], "prepare_model() (in module aimet_torch.model_preparer)": [[61, "aimet_torch.model_preparer.prepare_model"]], "adaptermetadata (class in aimet_torch.peft)": [[64, "aimet_torch.peft.AdapterMetaData"]], "peftquantutils (class in aimet_torch.peft)": [[64, "aimet_torch.peft.PeftQuantUtils"]], "disable_lora_adapters() (aimet_torch.peft.peftquantutils method)": [[64, "aimet_torch.peft.PeftQuantUtils.disable_lora_adapters"]], "enable_adapter_and_load_weights() (aimet_torch.peft.peftquantutils method)": [[64, "aimet_torch.peft.PeftQuantUtils.enable_adapter_and_load_weights"]], "export_adapter_weights() (aimet_torch.peft.peftquantutils method)": [[64, "aimet_torch.peft.PeftQuantUtils.export_adapter_weights"]], "freeze_base_model() (aimet_torch.peft.peftquantutils method)": [[64, "aimet_torch.peft.PeftQuantUtils.freeze_base_model"]], "freeze_base_model_activation_quantizers() (aimet_torch.peft.peftquantutils method)": [[64, "aimet_torch.peft.PeftQuantUtils.freeze_base_model_activation_quantizers"]], "freeze_base_model_param_quantizers() (aimet_torch.peft.peftquantutils method)": [[64, "aimet_torch.peft.PeftQuantUtils.freeze_base_model_param_quantizers"]], "get_fp_lora_layer() (aimet_torch.peft.peftquantutils method)": [[64, "aimet_torch.peft.PeftQuantUtils.get_fp_lora_layer"]], "get_quantized_lora_layer() (aimet_torch.peft.peftquantutils method)": [[64, "aimet_torch.peft.PeftQuantUtils.get_quantized_lora_layer"]], "quantize_lora_scale_with_fixed_range() (aimet_torch.peft.peftquantutils method)": [[64, "aimet_torch.peft.PeftQuantUtils.quantize_lora_scale_with_fixed_range"]], "replace_lora_layers_with_quantizable_layers() (aimet_torch.peft method)": [[64, "aimet_torch.peft.replace_lora_layers_with_quantizable_layers"]], "set_bitwidth_for_lora_adapters() (aimet_torch.peft.peftquantutils method)": [[64, "aimet_torch.peft.PeftQuantUtils.set_bitwidth_for_lora_adapters"]], "track_lora_meta_data() (aimet_torch.peft method)": [[64, "aimet_torch.peft.track_lora_meta_data"]], "clssetinfo (class in aimet_torch.cross_layer_equalization)": [[65, "aimet_torch.cross_layer_equalization.ClsSetInfo"]], "clssetinfo.clssetlayerpairinfo (class in aimet_torch.cross_layer_equalization)": [[65, "aimet_torch.cross_layer_equalization.ClsSetInfo.ClsSetLayerPairInfo"]], "bias_fold() (in module aimet_torch.cross_layer_equalization.highbiasfold)": [[65, "aimet_torch.cross_layer_equalization.HighBiasFold.bias_fold"], [65, "id0"]], "fold_all_batch_norms() (in module aimet_torch.batch_norm_fold)": [[65, "aimet_torch.batch_norm_fold.fold_all_batch_norms"]], "fold_given_batch_norms() (in module aimet_torch.batch_norm_fold)": [[65, "aimet_torch.batch_norm_fold.fold_given_batch_norms"]], "scale_cls_sets() (in module aimet_torch.cross_layer_equalization.crosslayerscaling)": [[65, "aimet_torch.cross_layer_equalization.CrossLayerScaling.scale_cls_sets"]], "scale_model() (in module aimet_torch.cross_layer_equalization.crosslayerscaling)": [[65, "aimet_torch.cross_layer_equalization.CrossLayerScaling.scale_model"]], "callbackfunc (class in aimet_common.utils)": [[66, "aimet_common.utils.CallbackFunc"]], "quantanalyzer (class in aimet_torch.v1.quant_analyzer)": [[66, "aimet_torch.v1.quant_analyzer.QuantAnalyzer"]], "analyze() (aimet_torch.v1.quant_analyzer.quantanalyzer method)": [[66, "aimet_torch.v1.quant_analyzer.QuantAnalyzer.analyze"]], "check_model_sensitivity_to_quantization() (aimet_torch.v1.quant_analyzer.quantanalyzer method)": [[66, "aimet_torch.v1.quant_analyzer.QuantAnalyzer.check_model_sensitivity_to_quantization"]], "enable_per_layer_mse_loss() (aimet_torch.v1.quant_analyzer.quantanalyzer method)": [[66, "aimet_torch.v1.quant_analyzer.QuantAnalyzer.enable_per_layer_mse_loss"]], "export_per_layer_encoding_min_max_range() (aimet_torch.v1.quant_analyzer.quantanalyzer method)": [[66, "aimet_torch.v1.quant_analyzer.QuantAnalyzer.export_per_layer_encoding_min_max_range"]], "export_per_layer_mse_loss() (aimet_torch.v1.quant_analyzer.quantanalyzer method)": [[66, "aimet_torch.v1.quant_analyzer.QuantAnalyzer.export_per_layer_mse_loss"]], "export_per_layer_stats_histogram() (aimet_torch.v1.quant_analyzer.quantanalyzer method)": [[66, "aimet_torch.v1.quant_analyzer.QuantAnalyzer.export_per_layer_stats_histogram"]], "perform_per_layer_analysis_by_disabling_quant_wrappers() (aimet_torch.v1.quant_analyzer.quantanalyzer method)": [[66, "aimet_torch.v1.quant_analyzer.QuantAnalyzer.perform_per_layer_analysis_by_disabling_quant_wrappers"]], "perform_per_layer_analysis_by_enabling_quant_wrappers() (aimet_torch.v1.quant_analyzer.quantanalyzer method)": [[66, "aimet_torch.v1.quant_analyzer.QuantAnalyzer.perform_per_layer_analysis_by_enabling_quant_wrappers"]], "quantizationsimmodel (class in aimet_torch.v1.quantsim)": [[68, "aimet_torch.v1.quantsim.QuantizationSimModel"]], "compute_encodings() (aimet_torch.v1.quantsim.quantizationsimmodel method)": [[68, "aimet_torch.v1.quantsim.QuantizationSimModel.compute_encodings"]], "export() (aimet_torch.v1.quantsim.quantizationsimmodel method)": [[68, "aimet_torch.v1.quantsim.QuantizationSimModel.export"]], "load_checkpoint() (aimet_torch.v1.quantsim method)": [[68, "aimet_torch.v1.quantsim.load_checkpoint"]], "save_checkpoint() (aimet_torch.v1.quantsim method)": [[68, "aimet_torch.v1.quantsim.save_checkpoint"]], "scatterdense (class in aimet_torch.nn.modules.custom)": [[69, "aimet_torch.nn.modules.custom.ScatterDense"]], "sparsetensorwrapper (class in aimet_torch.nn.modules.custom)": [[69, "aimet_torch.nn.modules.custom.SparseTensorWrapper"]], "visualizecompression (class in aimet_torch.visualize_serialized_data)": [[70, "aimet_torch.visualize_serialized_data.VisualizeCompression"]], "display_comp_ratio_plot() (aimet_torch.visualize_serialized_data.visualizecompression method)": [[70, "aimet_torch.visualize_serialized_data.VisualizeCompression.display_comp_ratio_plot"]], "display_eval_scores() (aimet_torch.visualize_serialized_data.visualizecompression method)": [[70, "aimet_torch.visualize_serialized_data.VisualizeCompression.display_eval_scores"]], "visualize_changes_after_optimization() (in module aimet_torch.visualize_model)": [[71, "aimet_torch.visualize_model.visualize_changes_after_optimization"]], "visualize_relative_weight_ranges_to_identify_problematic_layers() (in module aimet_torch.visualize_model)": [[71, "aimet_torch.visualize_model.visualize_relative_weight_ranges_to_identify_problematic_layers"]], "visualize_weight_ranges() (in module aimet_torch.visualize_model)": [[71, "aimet_torch.visualize_model.visualize_weight_ranges"]], "quantizationmixin (class in aimet_torch.v2.nn)": [[77, "aimet_torch.v2.nn.QuantizationMixin"], [88, "aimet_torch.v2.nn.QuantizationMixin"]], "__quant_init__() (aimet_torch.v2.nn.quantizationmixin method)": [[77, "aimet_torch.v2.nn.QuantizationMixin.__quant_init__"], [88, "aimet_torch.v2.nn.QuantizationMixin.__quant_init__"]], "compute_encodings() (aimet_torch.v2.nn.quantizationmixin method)": [[77, "aimet_torch.v2.nn.QuantizationMixin.compute_encodings"], [88, "aimet_torch.v2.nn.QuantizationMixin.compute_encodings"]], "forward() (aimet_torch.v2.nn.quantizationmixin method)": [[77, "aimet_torch.v2.nn.QuantizationMixin.forward"], [88, "aimet_torch.v2.nn.QuantizationMixin.forward"]], "from_module() (aimet_torch.v2.nn.quantizationmixin class method)": [[77, "aimet_torch.v2.nn.QuantizationMixin.from_module"]], "get_default_kernel() (aimet_torch.v2.nn.quantizationmixin class method)": [[77, "aimet_torch.v2.nn.QuantizationMixin.get_default_kernel"]], "get_kernel() (aimet_torch.v2.nn.quantizationmixin method)": [[77, "aimet_torch.v2.nn.QuantizationMixin.get_kernel"]], "implements() (aimet_torch.v2.nn.quantizationmixin class method)": [[77, "aimet_torch.v2.nn.QuantizationMixin.implements"]], "input_quantizers (aimet_torch.v2.nn.quantizationmixin attribute)": [[77, "aimet_torch.v2.nn.QuantizationMixin.input_quantizers"], [88, "aimet_torch.v2.nn.QuantizationMixin.input_quantizers"]], "output_quantizers (aimet_torch.v2.nn.quantizationmixin attribute)": [[77, "aimet_torch.v2.nn.QuantizationMixin.output_quantizers"], [88, "aimet_torch.v2.nn.QuantizationMixin.output_quantizers"]], "param_quantizers (aimet_torch.v2.nn.quantizationmixin attribute)": [[77, "aimet_torch.v2.nn.QuantizationMixin.param_quantizers"], [88, "aimet_torch.v2.nn.QuantizationMixin.param_quantizers"]], "set_default_kernel() (aimet_torch.v2.nn.quantizationmixin class method)": [[77, "aimet_torch.v2.nn.QuantizationMixin.set_default_kernel"]], "set_kernel() (aimet_torch.v2.nn.quantizationmixin method)": [[77, "aimet_torch.v2.nn.QuantizationMixin.set_kernel"]], "quantize (class in aimet_torch.v2.quantization.affine)": [[78, "aimet_torch.v2.quantization.affine.Quantize"]], "quantizedequantize (class in aimet_torch.v2.quantization.affine)": [[78, "aimet_torch.v2.quantization.affine.QuantizeDequantize"]], "aimet_torch.v2.quantization.affine": [[78, "module-aimet_torch.v2.quantization.affine"]], "dequantize() (in module aimet_torch.v2.quantization.affine)": [[78, "aimet_torch.v2.quantization.affine.dequantize"]], "module": [[78, "module-aimet_torch.v2.quantization.affine"], [80, "module-aimet_torch.v2.quantization.float"]], "quantize() (in module aimet_torch.v2.quantization.affine)": [[78, "aimet_torch.v2.quantization.affine.quantize"]], "quantize_dequantize() (in module aimet_torch.v2.quantization.affine)": [[78, "aimet_torch.v2.quantization.affine.quantize_dequantize"]], "floatquantizedequantize (class in aimet_torch.v2.quantization.float)": [[79, "aimet_torch.v2.quantization.float.FloatQuantizeDequantize"], [80, "aimet_torch.v2.quantization.float.FloatQuantizeDequantize"]], "quantizedequantize (class in aimet_torch.v2.quantization.float)": [[79, "aimet_torch.v2.quantization.float.QuantizeDequantize"], [80, "aimet_torch.v2.quantization.float.QuantizeDequantize"]], "aimet_torch.v2.quantization.float": [[80, "module-aimet_torch.v2.quantization.float"]], "dequantizedtensor (class in aimet_torch.v2.quantization.tensor)": [[81, "aimet_torch.v2.quantization.tensor.DequantizedTensor"]], "quantizedtensor (class in aimet_torch.v2.quantization.tensor)": [[81, "aimet_torch.v2.quantization.tensor.QuantizedTensor"]], "dequantize() (aimet_torch.v2.quantization.tensor.dequantizedtensor method)": [[81, "aimet_torch.v2.quantization.tensor.DequantizedTensor.dequantize"]], "dequantize() (aimet_torch.v2.quantization.tensor.quantizedtensor method)": [[81, "aimet_torch.v2.quantization.tensor.QuantizedTensor.dequantize"]], "quantize() (aimet_torch.v2.quantization.tensor.dequantizedtensor method)": [[81, "aimet_torch.v2.quantization.tensor.DequantizedTensor.quantize"]], "quantize() (aimet_torch.v2.quantization.tensor.quantizedtensor method)": [[81, "aimet_torch.v2.quantization.tensor.QuantizedTensor.quantize"]], "quantized_repr() (aimet_torch.v2.quantization.tensor.dequantizedtensor method)": [[81, "aimet_torch.v2.quantization.tensor.DequantizedTensor.quantized_repr"]], "quantized_repr() (aimet_torch.v2.quantization.tensor.quantizedtensor method)": [[81, "aimet_torch.v2.quantization.tensor.QuantizedTensor.quantized_repr"]], "visualize_stats() (in module aimet_torch.v2.visualization_tools)": [[82, "aimet_torch.v2.visualization_tools.visualize_stats"]], "set_activation_quantizers_to_float() (in module aimet_torch.v2.quantsim.config_utils)": [[83, "aimet_torch.v2.quantsim.config_utils.set_activation_quantizers_to_float"]], "set_blockwise_quantization_for_weights() (in module aimet_torch.v2.quantsim.config_utils)": [[83, "aimet_torch.v2.quantsim.config_utils.set_blockwise_quantization_for_weights"]], "set_grouped_blockwise_quantization_for_weights() (in module aimet_torch.v2.quantsim.config_utils)": [[83, "aimet_torch.v2.quantsim.config_utils.set_grouped_blockwise_quantization_for_weights"]], "encodinganalyzer (class in aimet_torch.v2.quantization.encoding_analyzer)": [[84, "aimet_torch.v2.quantization.encoding_analyzer.EncodingAnalyzer"]], "minmaxencodinganalyzer (class in aimet_torch.v2.quantization.encoding_analyzer)": [[84, "aimet_torch.v2.quantization.encoding_analyzer.MinMaxEncodingAnalyzer"]], "percentileencodinganalyzer (class in aimet_torch.v2.quantization.encoding_analyzer)": [[84, "aimet_torch.v2.quantization.encoding_analyzer.PercentileEncodingAnalyzer"]], "sqnrencodinganalyzer (class in aimet_torch.v2.quantization.encoding_analyzer)": [[84, "aimet_torch.v2.quantization.encoding_analyzer.SqnrEncodingAnalyzer"]], "compute_encodings() (aimet_torch.v2.quantization.encoding_analyzer.encodinganalyzer method)": [[84, "aimet_torch.v2.quantization.encoding_analyzer.EncodingAnalyzer.compute_encodings"]], "reset_stats() (aimet_torch.v2.quantization.encoding_analyzer.encodinganalyzer method)": [[84, "aimet_torch.v2.quantization.encoding_analyzer.EncodingAnalyzer.reset_stats"]], "update_stats() (aimet_torch.v2.quantization.encoding_analyzer.encodinganalyzer method)": [[84, "aimet_torch.v2.quantization.encoding_analyzer.EncodingAnalyzer.update_stats"]], "gptvqparameters (class in aimet_torch.gptvq.defs)": [[86, "aimet_torch.gptvq.defs.GPTVQParameters"]], "apply_gptvq() (in module aimet_torch.gptvq.gptvq_weight.gptvq)": [[86, "aimet_torch.gptvq.gptvq_weight.GPTVQ.apply_gptvq"]], "quantize (class in aimet_torch.v2.quantization.affine.quantizer)": [[89, "aimet_torch.v2.quantization.affine.quantizer.Quantize"]], "quantizedequantize (class in aimet_torch.v2.quantization.affine.quantizer)": [[89, "aimet_torch.v2.quantization.affine.quantizer.QuantizeDequantize"]], "quantizerbase (class in aimet_torch.v2.quantization.affine.quantizer)": [[89, "aimet_torch.v2.quantization.affine.quantizer.QuantizerBase"]], "allow_overwrite() (aimet_torch.v2.quantization.affine.quantizer.quantizerbase method)": [[89, "aimet_torch.v2.quantization.affine.quantizer.QuantizerBase.allow_overwrite"]], "compute_encodings() (aimet_torch.v2.quantization.affine.quantizer.quantizerbase method)": [[89, "aimet_torch.v2.quantization.affine.quantizer.QuantizerBase.compute_encodings"]], "forward() (aimet_torch.v2.quantization.affine.quantizer.quantize method)": [[89, "aimet_torch.v2.quantization.affine.quantizer.Quantize.forward"]], "forward() (aimet_torch.v2.quantization.affine.quantizer.quantizedequantize method)": [[89, "aimet_torch.v2.quantization.affine.quantizer.QuantizeDequantize.forward"]], "get_encoding() (aimet_torch.v2.quantization.affine.quantizer.quantizerbase method)": [[89, "aimet_torch.v2.quantization.affine.quantizer.QuantizerBase.get_encoding"]], "get_legacy_encodings() (aimet_torch.v2.quantization.affine.quantizer.quantizerbase method)": [[89, "aimet_torch.v2.quantization.affine.quantizer.QuantizerBase.get_legacy_encodings"]], "is_initialized() (aimet_torch.v2.quantization.affine.quantizer.quantizerbase method)": [[89, "aimet_torch.v2.quantization.affine.quantizer.QuantizerBase.is_initialized"]], "register_quantization_parameter() (aimet_torch.v2.quantization.affine.quantizer.quantizerbase method)": [[89, "aimet_torch.v2.quantization.affine.quantizer.QuantizerBase.register_quantization_parameter"]], "set_legacy_encodings() (aimet_torch.v2.quantization.affine.quantizer.quantizerbase method)": [[89, "aimet_torch.v2.quantization.affine.quantizer.QuantizerBase.set_legacy_encodings"]]}})
\ No newline at end of file
+Search.setIndex({"docnames": ["Examples/onnx/quantization/AMP", "Examples/onnx/quantization/adaround", "Examples/onnx/quantization/cle", "Examples/onnx/quantization/quantsim", "Examples/tensorflow/quantization/keras/KerasAMP", "Examples/tensorflow/quantization/keras/adaround", "Examples/tensorflow/quantization/keras/autoquant", "Examples/tensorflow/quantization/keras/bn_reestimation", "Examples/tensorflow/quantization/keras/keras_transformer_qat", "Examples/tensorflow/quantization/keras/model_preparer", "Examples/tensorflow/quantization/keras/qat", "Examples/tensorflow/quantization/keras/qat_range_learning", "Examples/tensorflow/quantization/keras/quant_analyzer", "Examples/tensorflow/quantization/keras/quantsim_adaround_pcq", "Examples/tensorflow/quantization/keras/quantsim_cle", "Examples/torch/compression/channel_pruning", "Examples/torch/compression/spatial_svd", "Examples/torch/compression/spatial_svd_channel_pruning", "Examples/torch/quantization/AMP", "Examples/torch/quantization/adaround", "Examples/torch/quantization/autoquant", "Examples/torch/quantization/bn_reestimation", "Examples/torch/quantization/cle_bc", "Examples/torch/quantization/qat", "Examples/torch/quantization/qat_range_learning", "Examples/torch/quantization/quant_analyzer", "api_docs/index", "api_docs/keras", "api_docs/keras_adaround", "api_docs/keras_batchnorm_re_estimation", "api_docs/keras_compression", "api_docs/keras_cross_layer_equalization", "api_docs/keras_layer_output_generation", "api_docs/keras_mixed_precision", "api_docs/keras_model_guidelines", "api_docs/keras_model_preparer", "api_docs/keras_primitive_apis_cle", "api_docs/keras_quant_analyzer", "api_docs/keras_quantization", "api_docs/keras_quantsim", "api_docs/onnx", "api_docs/onnx_adaround", "api_docs/onnx_auto_quant", "api_docs/onnx_cross_layer_equalization", "api_docs/onnx_layer_output_generation", "api_docs/onnx_mixed_precision", "api_docs/onnx_quant_analyzer", "api_docs/onnx_quantization", "api_docs/onnx_quantsim", "api_docs/quantization_encoding_specification", "api_docs/torch", "api_docs/torch_adaround", "api_docs/torch_architecture_checker", "api_docs/torch_auto_quant", "api_docs/torch_batchnorm_re_estimation", "api_docs/torch_bias_correction", "api_docs/torch_compress", "api_docs/torch_cross_layer_equalization", "api_docs/torch_layer_output_generation", "api_docs/torch_mixed_precision", "api_docs/torch_model_guidelines", "api_docs/torch_model_preparer", "api_docs/torch_model_validator", "api_docs/torch_multi_gpu", "api_docs/torch_peft_lora", "api_docs/torch_primitive_apis_cle", "api_docs/torch_quant_analyzer", "api_docs/torch_quantization", "api_docs/torch_quantsim", "api_docs/torch_spconv_custom_onnx_export", "api_docs/torch_visualization_compression", "api_docs/torch_visualization_quantization", "api_docs/v2_migrgation_guide", "install/index", "install/install_docker", "install/install_host", "toplevelhidden", "torch_docs/api/nn.quantization_mixin", "torch_docs/api/quantization/affine/index", "torch_docs/api/quantization/float/FloatQuantizeDequantize", "torch_docs/api/quantization/float/index", "torch_docs/api/quantization/tensor", "torch_docs/api/visualization_tools", "torch_docs/blockwise_quantization", "torch_docs/encoding_analyzer", "torch_docs/examples/ptq", "torch_docs/gptvq", "torch_docs/index", "torch_docs/quantized_modules", "torch_docs/quantizer", "torch_docs/tutorials/quickstart_guide", "user_guide/adaround", "user_guide/auto_quant", "user_guide/bn_reestimation", "user_guide/channel_pruning", "user_guide/compression_feature_guidebook", "user_guide/examples", "user_guide/greedy_compression_ratio_selection", "user_guide/index", "user_guide/known_issues", "user_guide/model_compression", "user_guide/model_guidelines", "user_guide/model_quantization", "user_guide/post_training_quant_techniques", "user_guide/quant_analyzer", "user_guide/quantization_aware_training", "user_guide/quantization_configuration", "user_guide/quantization_feature_guidebook", "user_guide/quantization_sim", "user_guide/release_notes", "user_guide/spatial_svd", "user_guide/visualization_compression", "user_guide/visualization_quant", "user_guide/weight_svd", "user_guide/winnowing"], "filenames": ["Examples/onnx/quantization/AMP.ipynb", "Examples/onnx/quantization/adaround.ipynb", "Examples/onnx/quantization/cle.ipynb", "Examples/onnx/quantization/quantsim.ipynb", "Examples/tensorflow/quantization/keras/KerasAMP.ipynb", "Examples/tensorflow/quantization/keras/adaround.ipynb", "Examples/tensorflow/quantization/keras/autoquant.ipynb", "Examples/tensorflow/quantization/keras/bn_reestimation.ipynb", "Examples/tensorflow/quantization/keras/keras_transformer_qat.ipynb", "Examples/tensorflow/quantization/keras/model_preparer.ipynb", "Examples/tensorflow/quantization/keras/qat.ipynb", "Examples/tensorflow/quantization/keras/qat_range_learning.ipynb", "Examples/tensorflow/quantization/keras/quant_analyzer.ipynb", "Examples/tensorflow/quantization/keras/quantsim_adaround_pcq.ipynb", "Examples/tensorflow/quantization/keras/quantsim_cle.ipynb", "Examples/torch/compression/channel_pruning.ipynb", "Examples/torch/compression/spatial_svd.ipynb", "Examples/torch/compression/spatial_svd_channel_pruning.ipynb", "Examples/torch/quantization/AMP.ipynb", "Examples/torch/quantization/adaround.ipynb", "Examples/torch/quantization/autoquant.ipynb", "Examples/torch/quantization/bn_reestimation.ipynb", "Examples/torch/quantization/cle_bc.ipynb", "Examples/torch/quantization/qat.ipynb", "Examples/torch/quantization/qat_range_learning.ipynb", "Examples/torch/quantization/quant_analyzer.ipynb", "api_docs/index.rst", "api_docs/keras.rst", "api_docs/keras_adaround.rst", "api_docs/keras_batchnorm_re_estimation.rst", "api_docs/keras_compression.rst", "api_docs/keras_cross_layer_equalization.rst", "api_docs/keras_layer_output_generation.rst", "api_docs/keras_mixed_precision.rst", "api_docs/keras_model_guidelines.rst", "api_docs/keras_model_preparer.rst", "api_docs/keras_primitive_apis_cle.rst", "api_docs/keras_quant_analyzer.rst", "api_docs/keras_quantization.rst", "api_docs/keras_quantsim.rst", "api_docs/onnx.rst", "api_docs/onnx_adaround.rst", "api_docs/onnx_auto_quant.rst", "api_docs/onnx_cross_layer_equalization.rst", "api_docs/onnx_layer_output_generation.rst", "api_docs/onnx_mixed_precision.rst", "api_docs/onnx_quant_analyzer.rst", "api_docs/onnx_quantization.rst", "api_docs/onnx_quantsim.rst", "api_docs/quantization_encoding_specification.rst", "api_docs/torch.rst", "api_docs/torch_adaround.rst", "api_docs/torch_architecture_checker.rst", "api_docs/torch_auto_quant.rst", "api_docs/torch_batchnorm_re_estimation.rst", "api_docs/torch_bias_correction.rst", "api_docs/torch_compress.rst", "api_docs/torch_cross_layer_equalization.rst", "api_docs/torch_layer_output_generation.rst", "api_docs/torch_mixed_precision.rst", "api_docs/torch_model_guidelines.rst", "api_docs/torch_model_preparer.rst", "api_docs/torch_model_validator.rst", "api_docs/torch_multi_gpu.rst", "api_docs/torch_peft_lora.rst", "api_docs/torch_primitive_apis_cle.rst", "api_docs/torch_quant_analyzer.rst", "api_docs/torch_quantization.rst", "api_docs/torch_quantsim.rst", "api_docs/torch_spconv_custom_onnx_export.rst", "api_docs/torch_visualization_compression.rst", "api_docs/torch_visualization_quantization.rst", "api_docs/v2_migrgation_guide.rst", "install/index.rst", "install/install_docker.rst", "install/install_host.rst", "toplevelhidden.rst", "torch_docs/api/nn.quantization_mixin.rst", "torch_docs/api/quantization/affine/index.rst", "torch_docs/api/quantization/float/FloatQuantizeDequantize.rst", "torch_docs/api/quantization/float/index.rst", "torch_docs/api/quantization/tensor.rst", "torch_docs/api/visualization_tools.rst", "torch_docs/blockwise_quantization.rst", "torch_docs/encoding_analyzer.rst", "torch_docs/examples/ptq.rst", "torch_docs/gptvq.rst", "torch_docs/index.rst", "torch_docs/quantized_modules.rst", "torch_docs/quantizer.rst", "torch_docs/tutorials/quickstart_guide.rst", "user_guide/adaround.rst", "user_guide/auto_quant.rst", "user_guide/bn_reestimation.rst", "user_guide/channel_pruning.rst", "user_guide/compression_feature_guidebook.rst", "user_guide/examples.rst", "user_guide/greedy_compression_ratio_selection.rst", "user_guide/index.rst", "user_guide/known_issues.rst", "user_guide/model_compression.rst", "user_guide/model_guidelines.rst", "user_guide/model_quantization.rst", "user_guide/post_training_quant_techniques.rst", "user_guide/quant_analyzer.rst", "user_guide/quantization_aware_training.rst", "user_guide/quantization_configuration.rst", "user_guide/quantization_feature_guidebook.rst", "user_guide/quantization_sim.rst", "user_guide/release_notes.rst", "user_guide/spatial_svd.rst", "user_guide/visualization_compression.rst", "user_guide/visualization_quant.rst", "user_guide/weight_svd.rst", "user_guide/winnowing.rst"], "titles": ["Automatic Mixed-Precision (AMP)", "Adaptive Rounding (AdaRound)", "Cross-Layer Equalization", "Quantization simulation", "Automatic Mixed-Precision (AMP)", "Adaptive Rounding (AdaRound)", "AutoQuant", "Quantization-Aware Training with BatchNorm Re-estimation", "Quantization-Aware Training with a Keras Transformer Model", "Keras Model Preparer", "Quantization-aware training", "Quantization-Aware training with range learning", "Quant Analyzer", "Quantsim and Adaround - Per Channel Quantization (PCQ)", "Cross-Layer Equalization with QuantSim", "Model compression using channel pruning", "Model compression using spatial SVD", "Model compression using spatial SVD and channel pruning", "Automatic Mixed-Precision (AMP)", "Adaptive Rounding (AdaRound)", "AutoQuant", "Quantization-Aware Training with BatchNorm Re-estimation", "Cross-Layer Equalization and Bias Correction", "Quantization-aware training", "Quantization-aware training with range learning", "Quant Analyzer", "Welcome to AI Model Efficiency Toolkit API Docs!", "AIMET TensorFlow APIs", "AIMET TensorFlow AdaRound API", "AIMET TensorFlow BatchNorm Re-estimation APIs", "AIMET TensorFlow Compression API", "AIMET TensorFlow Cross Layer Equalization APIs", "AIMET TensorFlow Layer Output Generation API", "AIMET TensorFlow Mixed Precision API", "TensorFlow Model Guidelines", "TensorFlow Model Preparer API", "AIMET TensorFlow Cross Layer Equalization Primitive API", "AIMET TensorFlow Quant Analyzer API", "AIMET TensorFlow Quantization APIs", "AIMET TensorFlow Quantization SIM API", "AIMET ONNX APIs", "AIMET ONNX AdaRound API", "AIMET ONNX AutoQuant API", "AIMET ONNX Cross Layer Equalization APIs", "AIMET ONNX Layer Output Generation API", "AIMET ONNX Mixed Precision API", "AIMET ONNX Quant Analyzer API", "AIMET ONNX Quantization APIs", "AIMET ONNX Quantization SIM API", "Encoding Format Specification", "AIMET PyTorch APIs", "AIMET PyTorch AdaRound API", "Architecture Checker API", "AIMET PyTorch AutoQuant API", "AIMET PyTorch BatchNorm Re-estimation APIs", "AIMET PyTorch Bias Correction API", "AIMET PyTorch Compression API", "AIMET PyTorch Cross Layer Equalization APIs", "AIMET PyTorch Layer Output Generation API", "AIMET PyTorch Mixed Precision API", "PyTorch Model Guidelines", "Model Preparer API", "Model Validator Utility", "PyTorch Multi-GPU support", "PEFT LoRA", "AIMET PyTorch Cross Layer Equalization Primitive API", "AIMET PyTorch Quant Analyzer API", "AIMET PyTorch Quantization APIs", "AIMET PyTorch Quantization SIM API", "AIMET Torch SparseConvolution custom onnx export", "AIMET Visualization Compression API", "AIMET Visualization for Quantization API", "Migrate to aimet_torch.v2", "AIMET Installation", "AIMET Installation in Docker", "AIMET Installation and Setup", "<no title>", "QuantizationMixin", "quantization.affine", "FloatQuantizeDequantize", "quantization.float", "quantization.tensor", "Visualization Tools", "Blockwise Quantization", "Encoding Analyzers", "Post-Training Quantization", "GPTVQ", "AIMET: AI Model Efficiency Toolkit Documentation", "Quantized Modules", "Quantizers", "Quickstart Guide", "AIMET AdaRound", "AIMET AutoQuant", "AIMET Batch Norm Re-estimation", "AIMET channel pruning", "AIMET Compression Features Guidebook", "AIMET examples", "AIMET greedy compression ratio selection", "AI Model Efficiency Toolkit User Guide", "AIMET Known Issues", "AIMET model compression", "Model Guidelines for PyTorch", "AIMET model quantization", "AIMET post-training quantization techniques", "AIMET QuantAnalyzer", "AIMET quantization aware training", "Quantization simulation configuration", "AIMET quantization diagnostics", "AIMET quantization simulation", "AIMET Release Notes", "AIMET spatial SVD", "AIMET visualization", "AIMET visualization for quantization", "AIMET weight SVD", "AIMET winnowing"], "terms": {"show": [0, 1, 2, 3, 4, 5, 6, 7, 8, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 28, 29, 31, 41, 45, 51, 52, 53, 54, 57, 59, 62, 64, 66, 68, 71, 83, 86, 87, 90, 93, 97, 98, 103, 107], "work": [0, 1, 3, 4, 5, 6, 7, 8, 10, 11, 12, 15, 16, 17, 18, 19, 20, 21, 23, 24, 25, 26, 33, 39, 41, 48, 51, 62, 63, 68, 83, 96, 100, 103], "code": [0, 1, 2, 3, 4, 5, 7, 8, 10, 11, 12, 13, 14, 18, 19, 20, 21, 22, 23, 24, 25, 83, 90, 91], "how": [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 28, 29, 31, 33, 41, 45, 48, 49, 51, 52, 53, 54, 55, 57, 59, 62, 64, 66, 67, 68, 69, 72, 83, 86, 88, 90, 96, 100, 102, 103, 104, 106, 107, 108, 110, 113], "us": [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 18, 19, 20, 21, 22, 23, 24, 25, 26, 28, 29, 30, 31, 32, 33, 34, 35, 37, 38, 39, 41, 42, 43, 44, 45, 46, 48, 49, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 72, 73, 75, 77, 78, 81, 82, 83, 84, 86, 87, 88, 89, 90, 93, 95, 96, 98, 101, 103, 104, 105, 106, 107, 108, 109, 111, 112], "aimet": [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 34, 35, 49, 52, 60, 61, 62, 63, 64, 72, 77, 86, 88, 90, 98, 101, 106], "perform": [0, 1, 2, 3, 4, 5, 6, 8, 12, 13, 14, 15, 16, 17, 18, 19, 20, 22, 25, 28, 29, 31, 33, 37, 41, 42, 43, 45, 46, 49, 51, 52, 54, 55, 56, 57, 59, 63, 64, 65, 66, 67, 68, 71, 77, 78, 83, 86, 88, 89, 90, 92, 93, 94, 95, 97, 100, 102, 103, 104, 105, 107], "auto": [0, 4, 15, 16, 17, 18, 26, 30, 31, 36, 42, 43, 45, 48, 49, 56, 57, 59, 65, 70], "techniqu": [0, 1, 2, 3, 4, 5, 6, 7, 8, 10, 11, 13, 14, 15, 16, 17, 18, 19, 20, 21, 23, 24, 28, 30, 31, 38, 41, 42, 43, 47, 51, 53, 55, 56, 57, 66, 67, 87, 90, 92, 94, 95, 96, 98, 102, 104, 105, 107, 108, 109, 110, 113], "where": [0, 1, 2, 3, 4, 5, 6, 7, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 28, 30, 33, 39, 41, 45, 49, 51, 56, 59, 60, 61, 64, 68, 78, 79, 80, 86, 89, 90, 93, 97, 104, 110, 111, 113, 114], "given": [0, 1, 2, 3, 4, 5, 6, 7, 10, 11, 13, 14, 15, 16, 17, 18, 19, 21, 22, 23, 24, 25, 30, 31, 33, 36, 39, 42, 43, 45, 48, 49, 51, 53, 54, 56, 57, 59, 65, 68, 69, 77, 82, 83, 84, 88, 92, 97, 100, 103], "target": [0, 1, 2, 3, 4, 5, 6, 7, 8, 10, 11, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 30, 32, 35, 39, 42, 44, 48, 49, 51, 54, 56, 58, 59, 64, 68, 87, 93, 95, 97, 98, 100, 102, 107, 108, 109], "find": [0, 1, 5, 6, 7, 8, 10, 11, 13, 14, 15, 16, 17, 18, 19, 20, 22, 23, 24, 33, 36, 45, 51, 55, 59, 62, 65, 68, 71, 97, 102, 104, 105, 108], "bit": [0, 1, 2, 3, 4, 5, 6, 7, 8, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 21, 22, 23, 24, 25, 33, 41, 45, 46, 49, 51, 59, 66, 73, 79, 80, 83, 90, 91, 93, 102, 107, 108, 109], "per": [0, 1, 2, 3, 4, 5, 6, 7, 10, 11, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 30, 37, 45, 46, 49, 54, 56, 58, 59, 64, 66, 68, 83, 86, 88, 93, 103, 104, 106, 107, 108, 109, 111], "meet": [0, 4, 18, 42, 53, 59, 73, 92, 95, 97], "while": [0, 1, 2, 3, 4, 5, 8, 10, 11, 13, 14, 15, 16, 17, 18, 19, 22, 23, 24, 30, 51, 56, 64, 83, 88, 91, 97, 101, 105, 107], "try": [0, 1, 2, 3, 4, 13, 15, 16, 17, 18, 30, 52, 56, 70, 92, 94, 95, 100, 102, 107], "optim": [0, 1, 2, 3, 4, 5, 6, 7, 8, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 28, 30, 33, 37, 38, 39, 41, 42, 45, 47, 48, 51, 52, 53, 56, 59, 67, 68, 70, 71, 83, 86, 90, 91, 92, 98, 100, 102, 105, 108, 109, 111], "infer": [0, 1, 2, 3, 4, 5, 6, 7, 8, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 28, 30, 33, 38, 39, 41, 42, 46, 47, 48, 49, 51, 67, 68, 69, 83, 86, 87, 90, 93, 95, 98, 103, 105, 108, 109], "speed": [0, 1, 2, 3, 4, 5, 6, 7, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 30, 51, 56, 64, 91, 100, 103, 105, 109], "As": [0, 2, 4, 7, 8, 10, 11, 13, 14, 15, 16, 17, 18, 21, 22, 23, 24, 60, 68, 77, 83, 86, 92, 95, 97, 103, 104, 108], "sai": [0, 1, 2, 3, 4, 5, 6, 7, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 61, 95], "particular": [0, 4, 18, 83, 102, 106], "desir": [0, 4, 15, 16, 17, 18, 30, 39, 46, 56, 66, 68, 74, 83, 90, 95, 100, 102, 107], "when": [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 30, 33, 34, 35, 37, 38, 39, 42, 43, 45, 46, 49, 56, 57, 59, 60, 61, 66, 67, 68, 77, 83, 86, 87, 88, 90, 91, 100, 102, 103, 104, 105, 106, 107, 108, 111, 112, 114], "int8": [0, 1, 2, 3, 4, 13, 18, 19, 22, 23, 24, 51, 81, 105, 108, 112], "The": [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 28, 29, 30, 32, 33, 35, 36, 37, 39, 41, 43, 44, 45, 46, 48, 49, 51, 52, 54, 55, 56, 57, 58, 59, 60, 61, 62, 64, 65, 66, 68, 69, 72, 73, 74, 75, 77, 78, 79, 80, 81, 82, 83, 84, 86, 87, 88, 90, 91, 92, 93, 94, 95, 96, 97, 98, 100, 101, 102, 103, 104, 105, 106, 107, 108, 110, 111, 112, 113, 114], "featur": [0, 1, 2, 3, 4, 5, 6, 7, 8, 10, 11, 12, 13, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 34, 35, 36, 38, 43, 49, 51, 57, 60, 61, 62, 63, 65, 67, 68, 69, 72, 82, 83, 86, 91, 92, 93, 96, 100, 103, 104, 107, 108, 109, 111, 112], "minim": [0, 3, 4, 10, 11, 18, 23, 24, 28, 51, 68, 87, 98, 102, 108], "set": [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 28, 30, 33, 35, 36, 41, 42, 45, 46, 48, 49, 51, 53, 56, 59, 61, 62, 64, 65, 66, 68, 72, 75, 77, 82, 83, 86, 88, 89, 91, 95, 96, 97, 100, 101, 103, 104, 105, 106, 107], "need": [0, 1, 2, 3, 4, 5, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 21, 22, 23, 24, 25, 30, 32, 33, 34, 35, 36, 39, 41, 46, 49, 51, 55, 56, 58, 63, 64, 65, 66, 67, 68, 69, 72, 73, 74, 75, 83, 86, 90, 102, 103, 105, 106, 108, 109, 112], "int16": [0, 4, 18], "get": [0, 1, 2, 3, 4, 7, 8, 9, 12, 30, 32, 33, 36, 44, 45, 52, 56, 58, 59, 61, 64, 68, 71, 73, 74, 75, 86, 100, 112], "It": [0, 1, 2, 3, 4, 5, 7, 8, 10, 11, 12, 13, 14, 18, 19, 22, 23, 24, 25, 30, 31, 33, 35, 36, 37, 41, 42, 43, 44, 45, 46, 48, 49, 51, 56, 61, 65, 66, 68, 90, 93, 94, 97, 105, 114], "should": [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 21, 22, 23, 24, 25, 30, 33, 34, 35, 37, 38, 39, 41, 42, 46, 48, 49, 51, 53, 56, 58, 60, 61, 64, 66, 67, 68, 72, 77, 83, 86, 88, 90, 95], "note": [0, 1, 2, 3, 4, 5, 6, 7, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 33, 35, 36, 37, 39, 41, 42, 43, 44, 45, 46, 48, 53, 56, 57, 59, 63, 64, 65, 66, 68, 69, 72, 74, 75, 83, 86, 90, 98, 111], "choos": [0, 4, 15, 16, 17, 18, 48, 68, 71, 83, 94, 95, 100], "higher": [0, 4, 15, 16, 17, 18, 41, 51, 56, 59, 83, 84, 93, 97, 105, 107], "some": [0, 1, 2, 3, 4, 5, 6, 7, 8, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 22, 23, 24, 25, 30, 35, 36, 38, 39, 51, 56, 61, 65, 67, 68, 71, 74, 75, 88, 90, 91, 95, 97, 101, 102, 103, 105, 107, 108, 114], "necessarili": [0, 4, 18], "involv": [0, 4, 18, 33, 45, 59, 72, 84, 90, 102, 107], "trade": [0, 4, 18, 28, 33, 41, 51], "off": [0, 4, 18, 28, 39, 41, 51, 68, 103], "lower": [0, 4, 13, 18, 33, 41, 51, 72, 83, 97, 102, 107], "sec": [0, 4, 13, 18], "vice": [0, 4, 18, 64, 97, 107, 108], "versa": [0, 4, 18, 64, 97, 107, 108], "altern": [0, 4, 15, 16, 17, 18, 75, 83], "can": [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 28, 30, 31, 32, 33, 34, 35, 36, 38, 39, 41, 44, 45, 46, 48, 49, 51, 52, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 74, 78, 81, 83, 86, 87, 88, 89, 90, 92, 93, 95, 97, 98, 100, 102, 103, 104, 105, 106, 107, 108, 111, 112, 114], "gener": [0, 4, 8, 18, 20, 26, 33, 39, 45, 48, 49, 52, 59, 64, 69, 77, 78, 83, 86, 89, 90, 96, 100, 102, 104, 105, 106, 107, 108], "pareto": [0, 4, 18, 33, 45, 59], "curv": [0, 4, 18, 33, 45, 59, 97], "v": [0, 4, 12, 18, 25, 28, 33, 41, 45, 51, 59, 74, 83], "op": [0, 1, 2, 3, 5, 7, 8, 10, 11, 13, 14, 15, 17, 18, 19, 20, 21, 22, 23, 24, 28, 30, 33, 36, 39, 41, 42, 45, 48, 51, 52, 59, 62, 68, 90, 106, 108, 109], "guid": [0, 4, 6, 18, 19, 20, 22, 23, 24, 25, 30, 56, 67, 72, 87, 95, 103, 107, 109], "user": [0, 1, 2, 3, 4, 5, 6, 7, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 29, 30, 33, 34, 35, 36, 37, 38, 45, 46, 48, 49, 54, 56, 59, 60, 61, 63, 65, 67, 70, 71, 72, 74, 77, 83, 86, 87, 91, 95, 100, 104, 107, 109], "decid": [0, 1, 2, 3, 4, 5, 13, 15, 16, 17, 18, 19, 22, 23, 24, 25, 91, 111], "right": [0, 4, 6, 8, 18, 20, 77, 78, 79, 80, 88, 89, 114], "oper": [0, 3, 4, 9, 10, 11, 18, 23, 24, 30, 35, 60, 61, 62, 72, 77, 88, 90, 101, 102, 103, 106, 107], "point": [0, 1, 2, 4, 5, 6, 7, 8, 10, 11, 13, 14, 15, 16, 17, 18, 19, 21, 22, 23, 24, 33, 35, 36, 37, 38, 45, 46, 47, 49, 55, 59, 64, 65, 66, 67, 68, 72, 77, 81, 83, 87, 98, 102, 104, 107, 108, 112], "tradeoff": [0, 4, 18], "specif": [0, 1, 4, 5, 7, 9, 13, 18, 19, 21, 30, 33, 35, 39, 56, 68, 77, 83, 88, 90, 92, 93, 98, 101, 102, 103, 106, 109], "abov": [0, 4, 12, 13, 15, 16, 17, 18, 25, 28, 37, 46, 51, 56, 61, 63, 66, 68, 74, 75, 83, 86, 87, 100, 103, 108, 111, 114], "cover": [0, 1, 2, 3, 4, 5, 7, 8, 9, 10, 11, 12, 13, 14, 18, 19, 21, 22, 23, 24, 25, 51, 68, 83, 108], "follow": [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 30, 31, 32, 33, 34, 35, 36, 38, 39, 43, 44, 46, 47, 48, 49, 51, 52, 53, 54, 55, 56, 57, 58, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 72, 73, 74, 75, 77, 83, 86, 88, 91, 93, 94, 95, 96, 97, 98, 100, 101, 102, 104, 105, 106, 107, 108, 110, 113, 114], "instanti": [0, 12, 13, 18, 21, 25, 60, 63, 64, 83, 86, 90, 111], "": [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 21, 22, 23, 24, 25, 28, 30, 31, 33, 34, 35, 36, 37, 39, 41, 42, 45, 46, 48, 49, 51, 54, 55, 56, 59, 61, 65, 66, 68, 69, 71, 75, 77, 83, 87, 88, 89, 90, 94, 95, 97, 99, 100, 102, 103, 104, 105, 107, 108, 112, 114], "fake": [0, 1, 2, 3, 5, 7, 8, 10, 11, 13, 14, 18, 19, 20, 21, 22, 23, 24, 51, 59, 68, 78, 79, 80, 89, 90], "insert": [0, 1, 2, 3, 5, 7, 8, 10, 11, 13, 14, 15, 17, 18, 19, 21, 22, 23, 24, 51, 61, 68, 102, 108], "design": [0, 4, 9, 12, 13, 18, 25, 26, 62, 67, 103], "state": [0, 1, 2, 3, 4, 5, 6, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 22, 23, 24, 25, 33, 69, 90], "art": [0, 1, 2, 3, 4, 5, 6, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 22, 23, 24, 25, 90], "result": [0, 1, 2, 3, 4, 5, 6, 7, 8, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 30, 33, 35, 37, 42, 45, 46, 51, 53, 56, 59, 66, 68, 77, 81, 83, 84, 86, 91, 92, 94, 95, 98, 100, 103, 104, 105, 106, 108], "For": [0, 4, 7, 8, 9, 12, 13, 18, 21, 25, 28, 29, 30, 31, 32, 34, 35, 44, 46, 48, 51, 53, 54, 56, 57, 58, 60, 61, 62, 63, 64, 66, 67, 68, 71, 72, 73, 74, 75, 77, 83, 86, 87, 88, 90, 91, 94, 95, 96, 97, 98, 99, 100, 102, 104, 106, 108], "rel": [0, 1, 2, 3, 4, 5, 6, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 22, 23, 24, 25, 30, 33, 56, 71, 95, 107, 112], "friendli": [0, 1, 2, 3, 4, 5, 6, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 22, 23, 24, 25, 67, 92, 102, 103], "like": [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 29, 31, 35, 36, 45, 51, 57, 59, 65, 68, 72, 83, 86, 87, 90, 92, 98, 100, 102, 104, 105, 106], "resnet18": [0, 1, 2, 3, 5, 6, 10, 11, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 51, 53, 54, 57, 65, 66, 68, 70, 71], "also": [0, 1, 2, 3, 4, 5, 6, 8, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 22, 23, 24, 25, 28, 37, 39, 41, 46, 49, 51, 53, 60, 61, 66, 68, 74, 75, 83, 94, 100, 102, 104, 106, 108, 109, 112, 114], "number": [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 28, 29, 30, 33, 35, 37, 39, 41, 45, 46, 48, 51, 54, 55, 56, 59, 61, 66, 68, 74, 77, 78, 79, 80, 83, 84, 88, 91, 97, 98, 100, 105, 108, 109, 111, 114], "sampl": [0, 1, 2, 3, 4, 5, 6, 7, 8, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 28, 30, 33, 36, 37, 39, 41, 45, 46, 48, 49, 51, 53, 55, 59, 66, 68, 82, 87, 88, 90, 94, 103, 104, 105, 108], "ar": [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 28, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 41, 44, 45, 46, 48, 51, 52, 54, 55, 56, 58, 59, 60, 61, 62, 64, 65, 66, 67, 68, 72, 73, 74, 75, 77, 78, 79, 80, 83, 84, 86, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 100, 101, 102, 103, 104, 105, 106, 107, 108, 111, 112, 114], "deliber": [0, 4, 12, 13, 18, 25], "chosen": [0, 1, 2, 3, 4, 5, 6, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 22, 23, 24, 25, 96], "have": [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 35, 43, 46, 49, 51, 52, 57, 58, 61, 62, 66, 68, 71, 72, 75, 77, 82, 83, 90, 100, 103, 104, 107], "execut": [0, 1, 2, 3, 4, 5, 6, 7, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 33, 42, 45, 51, 53, 59, 61, 70, 75, 90, 96, 111], "more": [0, 4, 7, 8, 9, 12, 13, 18, 21, 25, 28, 30, 31, 36, 37, 39, 41, 42, 43, 46, 48, 49, 51, 52, 53, 55, 56, 57, 59, 62, 65, 66, 67, 68, 72, 73, 83, 86, 87, 88, 90, 94, 95, 97, 100, 102, 103, 104, 106, 107, 108, 111], "quickli": [0, 4, 12, 13, 18, 25], "reli": [0, 1, 2, 3, 4, 7, 8, 12, 13, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 72], "imagenet": [0, 1, 2, 3, 4, 5, 6, 7, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 30, 33, 48, 51, 54, 55, 68, 96], "task": [0, 1, 2, 3, 4, 5, 6, 7, 12, 13, 14, 15, 16, 17, 18, 19, 21, 22, 23, 24, 25, 49, 111, 112], "imag": [0, 1, 2, 3, 4, 5, 6, 7, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 33, 37, 46, 53, 55, 66, 73, 91, 96, 104], "classif": [0, 1, 2, 3, 4, 5, 6, 7, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 35, 100], "If": [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 28, 30, 31, 33, 34, 35, 36, 39, 41, 42, 45, 46, 48, 49, 51, 53, 55, 56, 57, 58, 59, 60, 61, 62, 64, 65, 66, 67, 68, 69, 70, 71, 72, 74, 75, 77, 78, 79, 80, 83, 86, 88, 89, 90, 91, 92, 95, 97, 101, 102, 103, 104, 106, 107, 111, 112, 114], "you": [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 30, 51, 56, 61, 63, 67, 68, 74, 75, 77, 86, 91, 92, 95, 96, 98, 100, 101, 102, 104, 105, 106, 107, 108, 111, 112], "alreadi": [0, 1, 2, 3, 4, 5, 6, 7, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 57, 65, 68, 74, 77, 107], "version": [0, 1, 2, 3, 4, 5, 6, 7, 8, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 44, 51, 61, 67, 68, 72, 73, 74, 75, 77, 82, 83, 86, 88, 90, 96, 98, 105], "readili": [0, 4, 7, 12, 13, 18, 21, 25], "avail": [0, 1, 2, 3, 4, 7, 12, 13, 15, 16, 17, 18, 19, 21, 22, 23, 24, 25, 31, 51, 53, 61, 66, 68, 73, 90, 101, 104, 106, 107], "pleas": [0, 3, 4, 5, 7, 8, 9, 10, 11, 12, 13, 14, 18, 21, 25, 26, 28, 29, 30, 31, 37, 39, 41, 42, 43, 46, 51, 53, 54, 55, 56, 57, 62, 64, 66, 67, 68, 69, 72, 73, 75, 87, 90, 103], "els": [0, 1, 2, 3, 4, 5, 7, 10, 11, 12, 13, 14, 18, 19, 21, 22, 23, 24, 25, 30, 33, 36, 42, 45, 54, 59, 61, 62, 90, 103], "download": [0, 1, 2, 3, 4, 5, 6, 7, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 73, 74, 75, 90], "from": [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 28, 29, 30, 31, 32, 33, 35, 36, 37, 39, 41, 42, 43, 44, 45, 46, 48, 49, 51, 52, 53, 54, 55, 56, 57, 58, 59, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 73, 77, 78, 79, 80, 81, 83, 84, 86, 88, 89, 90, 91, 94, 95, 96, 97, 98, 100, 101, 102, 103, 104, 105, 106, 107, 108, 114], "appropri": [0, 4, 7, 8, 12, 13, 18, 21, 25, 30, 33, 51, 56, 59, 68, 73, 75, 77, 83, 86, 88, 97, 107], "locat": [0, 4, 7, 12, 13, 18, 21, 25], "e": [0, 4, 7, 12, 13, 18, 21, 25, 28, 33, 37, 38, 39, 41, 45, 46, 48, 49, 51, 55, 59, 66, 68, 90, 93], "g": [0, 4, 7, 12, 13, 18, 21, 25, 28, 33, 37, 39, 41, 46, 48, 49, 51, 66, 68, 74, 90, 93], "http": [0, 1, 2, 3, 4, 5, 6, 7, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 61, 73, 74, 75, 96, 103, 109, 111], "net": [0, 1, 2, 3, 4, 5, 6, 7, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 96], "org": [0, 1, 2, 3, 4, 5, 6, 7, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 61, 73, 74, 75, 96, 103], "challeng": [0, 1, 2, 3, 4, 5, 6, 7, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25], "lsvrc": [0, 1, 2, 3, 4, 5, 6, 7, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25], "2012": [0, 1, 2, 3, 4, 5, 6, 7, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25], "index": [0, 1, 2, 3, 4, 5, 6, 7, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 46, 53, 66, 69, 86, 88, 109], "php": [0, 4, 7, 12, 13, 18, 21, 25, 96], "note1": [0, 18, 21, 25], "typic": [0, 1, 5, 6, 7, 8, 10, 11, 13, 15, 17, 18, 19, 20, 21, 22, 23, 24, 25, 35, 51, 68, 77, 83, 88, 90, 95, 102, 104, 105, 108], "ha": [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 28, 35, 36, 37, 41, 43, 46, 51, 52, 56, 57, 59, 61, 62, 64, 65, 66, 68, 71, 72, 77, 81, 83, 86, 90, 95, 97, 102, 103, 105, 108, 111, 114], "characterist": [0, 18, 21, 25], "dataload": [0, 1, 2, 3, 4, 5, 6, 7, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 33, 41, 42, 45, 46, 48, 51, 53, 54, 59, 66, 86, 90, 96, 104], "provid": [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 30, 33, 35, 36, 37, 38, 41, 45, 46, 47, 48, 49, 51, 52, 53, 59, 62, 64, 66, 67, 68, 70, 72, 74, 75, 77, 79, 80, 83, 86, 87, 90, 91, 95, 96, 97, 100, 102, 103, 104, 106, 107, 108, 111, 112], "subfold": [0, 1, 2, 3, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25], "train": [0, 4, 9, 18, 20, 26, 28, 29, 30, 35, 37, 38, 42, 46, 47, 48, 51, 53, 54, 56, 63, 66, 67, 73, 87, 91, 92, 93, 96, 98, 100, 104, 107, 108, 109], "val": [0, 1, 2, 3, 10, 11, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25], "valid": [0, 4, 7, 8, 10, 11, 12, 13, 18, 20, 21, 25, 30, 33, 37, 45, 46, 48, 51, 55, 56, 59, 66, 67, 68, 83, 92, 102, 109], "see": [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 28, 29, 30, 31, 35, 39, 41, 42, 43, 51, 53, 54, 55, 56, 57, 66, 67, 68, 73, 78, 88, 89, 90, 91, 92, 93, 94, 96, 98, 100, 102, 104, 106, 107, 108, 111, 112], "descript": [0, 1, 2, 3, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 88, 91, 101], "detail": [0, 1, 2, 3, 4, 5, 6, 7, 8, 10, 11, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 51, 61, 67, 68, 73, 74, 75, 88, 97, 107, 108, 111, 112], "A": [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 29, 30, 33, 36, 37, 39, 41, 42, 45, 46, 48, 51, 52, 53, 54, 55, 56, 57, 59, 63, 64, 65, 66, 68, 70, 71, 83, 91, 95, 97, 100, 102, 104, 105, 108], "subdirectori": [0, 1, 2, 3, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25], "class": [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 28, 30, 32, 33, 34, 35, 36, 37, 39, 41, 42, 44, 45, 46, 48, 51, 52, 53, 55, 56, 58, 59, 60, 61, 62, 64, 65, 66, 68, 69, 70, 72, 77, 79, 83, 84, 86, 89, 90], "file": [0, 1, 2, 3, 6, 8, 12, 13, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 28, 30, 33, 37, 39, 41, 42, 45, 46, 48, 51, 52, 53, 55, 56, 59, 64, 66, 68, 70, 73, 74, 75, 83, 86, 90, 102, 104, 105, 108, 109, 112], "each": [0, 1, 2, 3, 4, 5, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 21, 22, 23, 24, 25, 28, 30, 33, 35, 36, 41, 45, 46, 48, 49, 51, 52, 55, 56, 59, 62, 65, 66, 68, 70, 71, 74, 75, 77, 82, 83, 88, 89, 90, 93, 94, 95, 97, 100, 102, 103, 104, 105, 106, 107, 108, 112], "note2": [0, 18, 21, 25], "To": [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 28, 30, 31, 33, 35, 39, 41, 42, 43, 45, 51, 53, 55, 56, 57, 59, 64, 66, 68, 72, 73, 77, 86, 88, 90, 93, 96, 97, 101, 102, 104, 106, 107, 108, 111, 112], "up": [0, 1, 2, 3, 4, 5, 6, 7, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 28, 30, 35, 37, 39, 41, 46, 48, 51, 56, 64, 66, 68, 75, 83, 91, 96, 100, 106, 114], "mai": [0, 2, 4, 7, 8, 12, 13, 14, 18, 21, 22, 25, 35, 42, 45, 49, 51, 53, 59, 61, 64, 68, 73, 74, 75, 77, 81, 82, 83, 86, 88, 91, 100, 102, 103, 104], "reduc": [0, 1, 2, 3, 5, 6, 7, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 51, 64, 83, 86, 88, 100, 102, 103, 105, 107, 109, 114], "subset": [0, 1, 2, 3, 4, 5, 6, 7, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 36, 37, 46, 66, 83, 91, 93, 104, 114], "entir": [0, 1, 2, 3, 5, 6, 7, 8, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 37, 46, 56, 64, 66, 83, 90, 100], "ilsvrc2012": [0, 1, 2, 3, 5, 6, 7, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25], "1000": [0, 1, 2, 3, 4, 5, 6, 7, 8, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 33, 37, 46, 48, 51, 53, 55, 56, 66, 68, 90, 91, 103, 104], "50": [0, 1, 2, 3, 5, 6, 7, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 30, 45, 56, 59, 95], "But": [0, 7, 8, 12, 13, 18, 21, 25, 51, 61, 68, 100], "purpos": [0, 1, 2, 3, 4, 5, 6, 7, 8, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 51, 68, 83], "could": [0, 1, 2, 3, 5, 7, 10, 11, 12, 13, 14, 18, 19, 21, 22, 23, 24, 25, 28, 37, 39, 41, 46, 48, 49, 51, 60, 66, 68, 72, 90, 114], "perhap": [0, 7, 12, 13, 18, 21, 25], "exercis": [0, 7, 12, 13, 18, 21, 25], "left": [0, 1, 2, 3, 5, 7, 8, 10, 11, 12, 13, 14, 18, 19, 21, 22, 23, 24, 25, 78, 79, 80, 89, 97, 114], "upto": [0, 7, 12, 13, 18, 21, 25, 33], "reader": [0, 7, 12, 13, 18, 21, 25], "necessari": [0, 1, 2, 3, 4, 5, 6, 7, 8, 10, 11, 12, 13, 14, 18, 19, 20, 21, 22, 23, 24, 25, 37, 46, 51, 53, 56, 66, 68, 81, 83, 90, 96, 111], "edit": [0, 1, 2, 3, 5, 6, 7, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 48, 49, 51, 68], "cell": [0, 1, 2, 3, 5, 6, 7, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25], "below": [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 31, 32, 33, 34, 35, 36, 43, 44, 49, 51, 57, 58, 60, 64, 67, 72, 74, 75, 77, 78, 83, 86, 88, 89, 90, 93, 94, 96, 97, 100, 102, 103, 104, 106, 107, 108, 111], "specifi": [0, 1, 2, 3, 4, 5, 6, 7, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 30, 33, 39, 41, 45, 46, 49, 51, 56, 59, 66, 68, 71, 74, 75, 78, 79, 80, 83, 89, 90, 92, 94, 95, 97, 100, 106, 108, 111], "directori": [0, 1, 2, 3, 4, 5, 6, 7, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 30, 32, 33, 37, 42, 44, 45, 46, 48, 53, 56, 58, 59, 64, 66, 71, 96], "save": [0, 1, 2, 3, 4, 5, 6, 7, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 28, 30, 32, 33, 36, 37, 39, 41, 42, 44, 45, 46, 48, 51, 53, 56, 58, 59, 64, 65, 66, 68, 71, 82, 86, 90, 108, 112], "dataset_dir": [0, 1, 2, 3, 4, 5, 6, 7, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 30, 33], "path": [0, 1, 2, 3, 4, 5, 6, 7, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 28, 30, 32, 33, 37, 39, 41, 42, 44, 45, 46, 48, 51, 53, 55, 56, 58, 59, 61, 64, 66, 68, 69, 70, 75, 82, 86, 96], "replac": [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 36, 48, 51, 54, 55, 61, 64, 65, 67, 68, 72, 74, 83, 88, 90, 103, 108], "real": [0, 1, 2, 3, 4, 5, 6, 7, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 53, 81, 111], "loop": [0, 1, 2, 3, 5, 6, 7, 12, 13, 14, 15, 16, 17, 18, 19, 21, 22, 23, 24, 25, 61, 90, 107], "doe": [0, 1, 2, 3, 5, 6, 7, 9, 10, 11, 12, 13, 14, 15, 16, 17, 19, 20, 21, 22, 23, 24, 25, 28, 35, 37, 43, 46, 56, 57, 59, 60, 61, 66, 72, 77, 90, 94, 97, 99, 102, 107, 108], "ani": [0, 1, 2, 3, 4, 5, 6, 7, 8, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 30, 33, 34, 35, 36, 37, 38, 39, 45, 46, 51, 54, 56, 59, 60, 61, 62, 65, 66, 67, 68, 69, 72, 74, 77, 83, 86, 90, 106, 109], "limit": [0, 1, 2, 3, 5, 6, 7, 8, 12, 13, 14, 15, 16, 17, 19, 21, 22, 23, 24, 25, 72, 99], "written": [0, 1, 2, 3, 4, 5, 6, 7, 8, 12, 13, 14, 15, 16, 17, 18, 19, 21, 22, 23, 24, 25, 101, 102], "Not": [0, 7, 12, 13, 21, 25, 46, 63, 64, 66, 91, 97], "realli": [0, 7, 12, 13, 21, 25], "we": [0, 1, 4, 5, 6, 7, 8, 9, 12, 13, 15, 17, 18, 19, 20, 21, 25, 29, 30, 32, 33, 35, 41, 44, 45, 46, 51, 52, 54, 55, 58, 59, 61, 62, 64, 66, 68, 71, 72, 74, 75, 83, 86, 88, 90, 91, 95, 100, 102, 103, 107], "later": [0, 7, 8, 12, 13, 21, 25, 68, 73, 86, 90], "modifi": [0, 1, 2, 3, 5, 6, 7, 8, 12, 13, 14, 15, 16, 17, 18, 19, 21, 22, 23, 24, 25, 36, 39, 44, 61, 64, 65, 68, 74, 75, 102, 108, 109, 114], "quantizationsim": [0, 1, 2, 3, 5, 6, 8, 10, 11, 12, 13, 14, 15, 16, 17, 19, 21, 22, 23, 24, 25, 51, 63], "session": [0, 1, 2, 3, 28, 30, 42, 44, 45, 46, 48, 70], "act": [0, 45, 46, 59, 66], "regular": [0, 8, 28, 39, 41, 51, 68, 77, 88, 91, 102, 108], "onnxruntim": [0, 1, 2, 3, 42, 44, 46], "howev": [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 22, 23, 24, 25, 34, 35, 51, 56, 68, 72, 102, 103], "recommend": [0, 1, 2, 3, 4, 5, 6, 9, 13, 19, 20, 30, 35, 37, 41, 42, 43, 45, 46, 48, 51, 52, 56, 66, 73, 83, 91, 93, 95, 102, 107], "onli": [0, 1, 2, 3, 4, 5, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 21, 22, 23, 24, 25, 30, 32, 33, 34, 39, 41, 44, 45, 46, 48, 51, 52, 55, 58, 59, 60, 61, 63, 65, 66, 68, 69, 73, 74, 75, 78, 81, 82, 83, 88, 90, 93, 99, 100, 102, 104, 105, 106, 109, 111, 114], "quantizationsimmodel": [0, 1, 2, 3, 4, 5, 7, 8, 10, 11, 12, 13, 14, 18, 19, 20, 21, 22, 23, 24, 25, 28, 29, 32, 33, 36, 39, 41, 42, 44, 45, 46, 48, 51, 54, 58, 59, 64, 66, 67, 68, 69, 77, 82, 83, 86, 87, 90, 91, 93], "regist": [0, 77, 89], "requir": [0, 1, 4, 9, 12, 15, 16, 17, 18, 19, 22, 23, 24, 25, 28, 29, 30, 31, 33, 35, 36, 37, 39, 41, 43, 45, 46, 48, 49, 51, 54, 55, 56, 57, 58, 59, 60, 61, 64, 65, 66, 68, 70, 71, 74, 75, 77, 83, 86, 90, 93, 95, 100, 102, 103, 106, 107, 111], "custom": [0, 6, 8, 20, 35, 39, 41, 48, 49, 61, 67, 77, 88, 107, 108], "import": [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 39, 41, 42, 43, 44, 45, 46, 48, 51, 52, 53, 54, 55, 56, 57, 58, 59, 61, 62, 64, 65, 66, 67, 68, 69, 70, 71, 75, 77, 78, 79, 80, 81, 83, 84, 86, 87, 88, 89, 90, 93, 94], "torch": [0, 1, 2, 3, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 51, 52, 53, 54, 56, 58, 59, 60, 62, 63, 64, 65, 66, 68, 70, 71, 72, 73, 74, 77, 78, 79, 80, 81, 83, 84, 86, 87, 88, 89, 90, 96, 101, 109], "ort": [0, 1, 2, 3, 42], "common": [0, 1, 2, 3, 5, 6, 7, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 64, 68, 72, 74, 83, 107, 112], "image_net_config": [0, 1, 2, 3, 5, 6, 7, 12, 13, 14, 15, 16, 17, 18, 19, 21, 22, 23, 24, 25], "util": [0, 1, 2, 3, 5, 6, 7, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 28, 35, 36, 37, 39, 44, 52, 53, 64, 65, 68, 70, 72, 83, 90, 93, 102], "image_net_evalu": [0, 1, 2, 3, 5, 6, 7, 13, 14, 15, 16, 17, 19, 21, 22, 23, 24, 25], "imagenetevalu": [0, 1, 2, 3, 5, 6, 7, 13, 14, 15, 16, 17, 19, 21, 22, 23, 24, 25], "image_net_data_load": [0, 1, 2, 3, 15, 17, 19, 21, 22, 23, 24, 25], "imagenetdataload": [0, 1, 2, 3, 12, 15, 17, 19, 21, 22, 23, 24, 25], "imagenetdatapipelin": [0, 1, 2, 3, 5, 6, 7, 12, 13, 14, 15, 16, 17, 19, 21, 22, 23, 24, 25, 51, 54, 55, 68], "staticmethod": [0, 1, 2, 3, 5, 6, 7, 12, 13, 14, 15, 16, 17, 19, 21, 22, 23, 24, 25, 61], "def": [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 28, 30, 31, 33, 34, 35, 36, 37, 39, 41, 42, 43, 45, 46, 48, 51, 52, 53, 54, 55, 56, 57, 59, 60, 61, 62, 65, 66, 68, 69, 70, 71, 72, 77, 86, 90], "get_val_dataload": [0, 1, 2, 3, 15, 17, 18, 19, 21, 22, 23, 24, 25, 51, 55, 68], "data": [0, 1, 2, 3, 4, 5, 6, 7, 8, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 28, 29, 30, 33, 36, 37, 39, 41, 42, 45, 46, 48, 51, 53, 54, 55, 56, 59, 61, 63, 64, 66, 68, 70, 72, 81, 82, 83, 84, 86, 87, 90, 91, 93, 99, 103, 104, 105, 107, 108], "return": [0, 1, 2, 3, 4, 5, 6, 7, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 41, 42, 44, 45, 46, 48, 51, 52, 53, 54, 55, 56, 58, 59, 60, 61, 62, 64, 65, 66, 68, 69, 70, 71, 77, 81, 82, 83, 84, 86, 87, 89, 90, 92, 97, 98, 104, 108], "data_load": [0, 1, 2, 3, 5, 6, 7, 13, 14, 15, 17, 18, 19, 20, 21, 22, 23, 24, 25, 41, 42, 45, 48, 51, 53, 55, 56, 59, 68, 82, 86, 87, 90], "image_s": [0, 1, 2, 3, 4, 5, 6, 7, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 21, 22, 23, 24, 25, 30, 33, 53], "batch_siz": [0, 1, 2, 3, 4, 5, 6, 7, 8, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 28, 29, 30, 33, 37, 41, 42, 46, 48, 51, 53, 56, 66, 68, 90], "is_train": [0, 1, 2, 3, 4, 15, 17, 19, 21, 22, 23, 24, 25, 33], "fals": [0, 1, 2, 3, 4, 5, 7, 8, 10, 11, 13, 15, 16, 17, 18, 19, 21, 22, 23, 24, 25, 30, 33, 35, 39, 42, 44, 45, 48, 49, 52, 53, 55, 56, 59, 61, 62, 64, 68, 69, 72, 77, 78, 81, 83, 84, 86, 88, 89, 90, 101, 106], "num_work": [0, 1, 2, 3, 15, 16, 17, 18, 19, 21, 22, 23, 24, 25], "sess": [0, 1, 2, 3, 30], "inferencesess": [0, 1, 2, 3, 42, 44, 46], "float": [0, 1, 2, 3, 4, 5, 6, 7, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 28, 30, 33, 37, 39, 41, 42, 45, 46, 48, 49, 51, 53, 55, 56, 59, 66, 67, 68, 77, 79, 81, 83, 84, 87, 102, 104, 107, 108, 112], "its": [0, 1, 2, 3, 5, 6, 7, 9, 13, 14, 15, 16, 17, 19, 21, 22, 23, 24, 25, 30, 33, 35, 45, 46, 59, 77, 81, 86, 87, 88, 90, 94, 96, 98, 104, 108, 114], "top": [0, 1, 2, 3, 4, 5, 6, 7, 8, 13, 14, 15, 16, 17, 19, 21, 22, 23, 24, 25, 77, 94, 111], "param": [0, 1, 2, 3, 5, 6, 7, 13, 14, 15, 16, 17, 19, 21, 22, 23, 24, 25, 28, 30, 31, 33, 36, 37, 39, 41, 42, 45, 46, 48, 51, 53, 56, 59, 64, 66, 68, 69, 70, 77, 83, 88, 89, 106], "iter": [0, 1, 2, 3, 4, 5, 6, 7, 13, 14, 15, 16, 17, 19, 21, 22, 23, 24, 25, 28, 30, 33, 37, 41, 42, 46, 51, 53, 56, 66, 90, 91, 103], "none": [0, 1, 2, 3, 4, 5, 6, 7, 8, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 28, 30, 32, 33, 35, 36, 37, 39, 41, 42, 44, 45, 46, 48, 51, 52, 53, 54, 55, 56, 57, 58, 59, 61, 64, 65, 66, 68, 70, 71, 72, 77, 78, 79, 80, 82, 83, 86, 88, 90, 111], "go": [0, 4, 7, 8, 12, 13, 18, 21, 25, 35, 51, 54, 68, 74, 75, 86, 90], "load": [0, 1, 2, 3, 8, 32, 33, 36, 39, 44, 45, 51, 54, 55, 56, 58, 59, 61, 62, 64, 68, 69, 86, 100], "pretrain": [0, 1, 2, 3, 4, 5, 10, 11, 13, 14, 15, 16, 17, 18, 19, 21, 22, 23, 24, 25, 33, 51, 53, 54, 57, 65, 66, 68, 70, 71, 86, 105, 108], "torchvis": [0, 1, 2, 3, 5, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 51, 53, 54, 57, 65, 66, 68, 70, 71, 90], "similarli": [0, 4, 12, 13, 18, 21, 25, 51, 68, 83, 86, 107], "instead": [0, 1, 2, 3, 4, 5, 6, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 30, 34, 51, 60, 61, 62, 68, 83, 86, 102, 103, 114], "input_shap": [0, 1, 2, 3, 4, 5, 9, 13, 14, 15, 16, 17, 18, 19, 22, 23, 24, 31, 33, 34, 37, 42, 46, 48, 51, 53, 55, 56, 57, 61, 65, 66, 68, 70, 90], "224": [0, 1, 2, 3, 4, 7, 10, 11, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 30, 31, 37, 39, 42, 45, 46, 48, 51, 53, 55, 57, 58, 65, 66, 68, 70, 71], "shape": [0, 1, 2, 3, 7, 8, 9, 13, 18, 19, 21, 22, 23, 24, 25, 33, 34, 35, 52, 56, 57, 61, 62, 64, 65, 69, 72, 78, 79, 80, 81, 83, 84, 88, 89, 90, 104], "channel": [0, 1, 2, 3, 14, 16, 18, 19, 21, 22, 23, 24, 25, 30, 46, 49, 52, 54, 64, 66, 71, 83, 86, 88, 93, 95, 96, 97, 99, 100, 103, 104, 106, 107, 108, 109, 110, 112, 113, 114], "x": [0, 1, 2, 3, 4, 5, 8, 9, 12, 13, 18, 19, 21, 22, 23, 24, 25, 30, 33, 34, 35, 39, 52, 60, 61, 62, 73, 79, 80, 81, 88, 90, 95, 101, 104], "height": [0, 1, 2, 3, 8, 18, 19, 21, 22, 23, 24, 25, 110, 113, 114], "width": [0, 1, 2, 3, 13, 18, 19, 21, 22, 23, 24, 25, 46, 49, 66, 91, 107, 108, 110, 113, 114], "dummy_input": [0, 1, 2, 3, 18, 19, 20, 21, 22, 23, 24, 25, 29, 32, 42, 44, 45, 46, 48, 51, 52, 53, 57, 58, 59, 60, 64, 65, 66, 68, 69, 82, 83, 86, 90], "randn": [0, 1, 2, 3, 20, 39, 42, 46, 48, 51, 53, 59, 61, 62, 66, 68, 69, 77, 78, 81, 84, 88, 89], "filenam": [0, 1, 2, 3, 28, 39, 41, 48, 51, 64, 68, 86], "resnet": [0, 1, 2, 3, 4, 5, 6, 10, 11, 12, 13, 14, 30, 33, 95], "18": [0, 1, 2, 3], "pt_model": [0, 1, 2, 3], "true": [0, 1, 2, 3, 4, 5, 7, 10, 11, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 30, 33, 36, 39, 41, 42, 45, 48, 49, 51, 52, 53, 54, 55, 56, 57, 59, 61, 62, 64, 65, 66, 68, 69, 70, 71, 72, 77, 78, 79, 80, 81, 83, 84, 88, 89, 90, 101, 106], "export": [0, 1, 2, 3, 4, 5, 8, 10, 11, 13, 14, 18, 19, 20, 22, 23, 24, 32, 33, 37, 39, 44, 45, 46, 48, 49, 51, 54, 58, 59, 60, 64, 66, 68, 72, 74, 75, 86, 87, 93, 96, 98, 100, 101, 102, 105, 108, 109], "eval": [0, 1, 2, 3, 4, 7, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 28, 30, 33, 37, 42, 45, 46, 51, 53, 55, 56, 57, 59, 61, 63, 65, 66, 68, 70, 71, 90, 97, 100, 111], "trainingmod": [0, 3], "export_param": [0, 1, 2, 3], "do_constant_fold": [0, 1, 2, 3], "input_nam": [0, 1, 2, 3, 68], "input": [0, 1, 2, 3, 5, 7, 8, 9, 10, 11, 12, 13, 14, 18, 21, 25, 30, 32, 33, 34, 35, 37, 42, 44, 45, 46, 48, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 64, 65, 66, 68, 69, 72, 77, 78, 79, 80, 82, 83, 84, 86, 88, 89, 90, 94, 97, 100, 104, 106, 108, 110, 113, 114], "output_nam": [0, 1, 2, 3, 68], "output": [0, 1, 2, 3, 4, 5, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 29, 30, 33, 34, 35, 37, 42, 45, 46, 48, 49, 51, 52, 53, 55, 59, 60, 61, 62, 64, 66, 67, 68, 69, 71, 72, 77, 78, 83, 88, 89, 90, 94, 100, 103, 104, 106, 108, 109, 110, 113, 114], "dynamic_ax": [0, 1, 2, 3], "0": [0, 1, 2, 3, 5, 6, 8, 9, 10, 11, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 28, 29, 30, 35, 39, 41, 42, 44, 45, 48, 51, 52, 53, 55, 56, 59, 61, 62, 64, 65, 68, 69, 70, 72, 73, 74, 75, 77, 78, 79, 80, 81, 83, 84, 86, 88, 89, 90, 91, 95, 97, 101, 106], "load_model": [0, 1, 2, 3, 32], "befor": [0, 1, 2, 3, 4, 5, 6, 7, 8, 10, 11, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 32, 34, 35, 36, 38, 41, 42, 43, 44, 45, 46, 48, 51, 53, 58, 65, 67, 68, 71, 72, 77, 82, 88, 90, 91, 92, 93, 100, 102, 104, 105, 108, 112], "onnxsim": [0, 1, 2, 3, 41, 42, 43, 44, 45, 46, 48], "_": [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 18, 19, 20, 22, 23, 24, 25, 28, 32, 33, 35, 36, 37, 41, 42, 43, 44, 45, 46, 48, 53, 58, 66, 77, 78, 82, 87, 88, 89, 90, 102], "except": [0, 1, 2, 3, 9, 12, 25, 35, 83], "print": [0, 1, 2, 3, 7, 8, 9, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 30, 33, 42, 45, 51, 52, 53, 56, 59, 61, 62, 68, 72, 75, 77, 78, 88, 90, 104], "fail": [0, 1, 2, 3, 52, 61, 62, 92, 101, 102], "proceed": [0, 1, 2, 3], "unsimplifi": [0, 1, 2, 3], "whether": [0, 1, 2, 3, 5, 13, 15, 16, 17, 18, 19, 21, 22, 23, 24, 25, 32, 38, 44, 58, 61, 62, 67, 91, 97, 105], "cpu": [0, 1, 2, 3, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 51, 53, 54, 57, 58, 61, 65, 68, 69, 71, 73, 74, 90, 102, 109], "cuda": [0, 1, 2, 3, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 41, 42, 44, 48, 53, 54, 55, 56, 59, 66, 68, 70, 73, 74, 75, 90], "devic": [0, 1, 2, 3, 7, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 32, 41, 42, 44, 48, 51, 53, 54, 55, 58, 59, 61, 64, 65, 68, 70, 71, 86, 87, 90, 108], "your": [0, 1, 2, 3, 5, 6, 7, 12, 13, 14, 15, 16, 17, 18, 19, 21, 22, 23, 24, 25, 42, 48, 51, 54, 55, 61, 62, 68, 72, 73, 74, 75, 77, 87, 91, 92, 95, 96, 100, 101, 102, 105, 108, 111], "environ": [0, 4, 6, 7, 10, 11, 18, 25, 33, 73, 96], "chang": [0, 1, 2, 3, 4, 7, 8, 15, 16, 17, 18, 19, 21, 22, 23, 24, 25, 33, 38, 45, 49, 51, 53, 59, 60, 61, 64, 67, 68, 71, 82, 83, 86, 90, 91, 100, 104, 105, 106, 108, 112, 114], "logic": [0, 1, 2, 3, 15, 16, 17, 18, 19, 22, 23, 24, 25, 33, 45, 59, 77, 88, 109], "forc": [0, 1, 2, 3, 15, 16, 17, 18, 19, 22, 23, 24, 25], "placement": [0, 1, 2, 3, 15, 16, 17, 18, 19, 22, 23, 24, 25, 106], "cudnn_conv_algo_search": [0, 1, 2, 3], "fix": [0, 1, 2, 3, 49, 62, 87, 98, 102, 107, 108, 109], "default": [0, 1, 2, 3, 4, 5, 6, 7, 8, 12, 13, 15, 17, 18, 19, 20, 21, 22, 23, 24, 25, 28, 30, 33, 35, 37, 39, 41, 42, 45, 46, 48, 51, 53, 55, 56, 59, 61, 66, 68, 74, 75, 77, 78, 82, 86, 88, 89, 91, 97, 100, 106, 108, 109, 111], "avoid": [0, 1, 2, 3, 4, 5, 7, 8, 10, 11, 12, 13, 14, 18, 19, 22, 23, 24, 25, 34, 39, 46, 51, 60, 66, 68, 91, 95], "everi": [0, 1, 2, 3, 4, 7, 8, 10, 11, 12, 13, 15, 16, 17, 18, 21, 23, 24, 25, 28, 32, 37, 41, 44, 46, 51, 58, 66, 68, 90, 97, 100, 105, 112], "cudaexecutionprovid": [0, 1, 2, 3], "get_available_provid": [0, 1, 2, 3], "cpuexecutionprovid": [0, 1, 2, 3], "use_cuda": [0, 1, 2, 3, 4, 15, 16, 17, 18, 19, 21, 22, 23, 24, 25, 30, 41, 42, 44, 48, 51, 54, 56, 68], "let": [0, 4, 7, 8, 13, 18, 51, 61, 68, 90], "determin": [0, 8, 18, 20, 28, 30, 37, 39, 41, 46, 48, 49, 51, 56, 66, 68, 79, 80, 83, 88, 90, 92, 102, 103, 104, 112], "32": [0, 1, 2, 4, 5, 6, 8, 9, 10, 11, 13, 14, 15, 16, 17, 18, 19, 20, 22, 23, 24, 28, 33, 34, 35, 37, 42, 49, 51, 52, 61, 62, 68, 74, 75, 78, 86, 89, 107], "routin": [0, 1, 2, 3, 4, 5, 6, 7, 8, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 21, 22, 23, 24, 25, 51, 68], "serializetostr": [0, 1, 2, 3, 44], "batchnorm": [0, 1, 2, 3, 5, 10, 11, 13, 14, 18, 19, 22, 23, 24, 31, 35, 36, 38, 42, 43, 52, 53, 55, 57, 65, 71, 92, 103, 114], "bn": [0, 1, 2, 3, 5, 7, 10, 11, 13, 14, 18, 19, 22, 23, 24, 29, 36, 55, 65, 67, 102, 104, 109], "These": [0, 2, 6, 8, 13, 18, 20, 22, 28, 39, 41, 42, 48, 53, 67, 68, 72, 75, 88, 90, 92, 93, 94, 95, 101, 102, 103, 104, 107, 108], "adjac": [0, 1, 2, 3, 5, 10, 11, 13, 14, 18, 19, 22, 23, 24, 90, 106], "convolut": [0, 1, 2, 3, 5, 9, 10, 11, 13, 14, 15, 17, 18, 19, 22, 23, 24, 30, 35, 36, 52, 83, 90, 93, 94, 95, 100, 107, 110, 113, 114], "cannot": [0, 1, 2, 3, 5, 10, 11, 13, 14, 18, 19, 22, 23, 24, 52, 61, 62, 78, 89], "thei": [0, 1, 2, 3, 5, 8, 9, 10, 11, 12, 13, 14, 18, 19, 22, 23, 24, 25, 34, 36, 61, 65, 67, 83, 107, 111], "why": [0, 13, 18, 107], "do": [0, 1, 2, 3, 4, 5, 6, 7, 9, 12, 13, 14, 15, 16, 17, 18, 19, 21, 22, 23, 24, 25, 29, 33, 35, 39, 41, 51, 61, 63, 64, 71, 74, 75, 90, 100, 104, 107, 108], "On": [0, 13, 18, 49, 73], "runtim": [0, 1, 2, 3, 5, 10, 11, 13, 14, 18, 19, 22, 23, 24, 39, 48, 49, 51, 56, 68, 69, 77, 83, 86, 87, 90, 95, 98, 100, 102, 104, 106, 108, 109], "tflite": [0, 13, 18], "snapdragon": [0, 13, 18], "neural": [0, 6, 13, 18, 20, 87, 90, 92, 95, 98, 100, 102, 105, 107, 108, 113], "process": [0, 1, 2, 3, 4, 5, 7, 8, 10, 11, 13, 14, 18, 19, 22, 23, 24, 30, 32, 44, 51, 58, 68, 70, 83, 84, 87, 90, 92, 94, 97, 98, 100, 103, 108], "sdk": [0, 13, 18, 87, 90, 98], "etc": [0, 4, 7, 8, 13, 18, 49, 51, 68, 74, 95, 102], "practic": [0, 4, 6, 7, 8, 12, 13, 15, 16, 17, 18, 20, 25, 51, 68, 90, 100], "so": [0, 1, 4, 5, 7, 8, 9, 10, 11, 12, 13, 15, 16, 17, 18, 19, 21, 23, 24, 25, 28, 29, 37, 39, 41, 46, 48, 51, 56, 61, 62, 63, 66, 68, 72, 75, 88, 97, 101, 104], "speedup": [0, 13, 18], "sinc": [0, 1, 2, 3, 4, 5, 7, 8, 9, 10, 11, 12, 13, 14, 18, 19, 22, 23, 24, 25, 48, 51, 64, 68, 83, 93, 95, 108], "unnecessari": [0, 13, 18, 90, 114], "now": [0, 1, 2, 3, 4, 7, 8, 9, 10, 11, 13, 15, 16, 17, 18, 19, 21, 22, 23, 24, 25, 30, 35, 51, 61, 62, 68, 69, 86, 90, 109], "perspect": [0, 13, 18], "mathemat": [0, 13, 18, 60], "equival": [0, 13, 18, 19, 22, 23, 24, 25, 51, 59, 60, 67, 68, 77, 78, 79, 80, 83, 90], "produc": [0, 12, 13, 18, 25, 42, 49, 53, 61, 81, 82, 83, 84, 97, 104], "same": [0, 1, 2, 3, 4, 5, 7, 8, 9, 10, 11, 13, 15, 16, 17, 18, 19, 21, 22, 23, 24, 28, 32, 35, 44, 51, 53, 58, 61, 62, 64, 66, 68, 69, 72, 77, 81, 83, 88, 89, 93, 103, 106, 112], "increas": [0, 13, 18, 30, 56, 71, 83, 97, 103], "rang": [0, 1, 2, 3, 4, 5, 7, 8, 9, 10, 13, 14, 18, 19, 20, 21, 22, 23, 35, 37, 46, 51, 53, 54, 61, 66, 68, 71, 78, 82, 84, 90, 91, 93, 96, 97, 102, 103, 104, 105, 107, 108, 109, 112], "tensor": [0, 2, 9, 12, 13, 14, 18, 22, 23, 24, 25, 28, 32, 35, 38, 39, 45, 47, 48, 49, 51, 52, 53, 56, 57, 58, 59, 60, 61, 64, 65, 66, 67, 68, 69, 77, 78, 79, 80, 83, 84, 86, 88, 89, 90, 91, 94, 101, 102, 104, 106, 107, 108, 109], "valu": [0, 1, 2, 4, 5, 6, 7, 8, 9, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 25, 28, 30, 33, 36, 37, 41, 45, 46, 49, 51, 55, 56, 58, 59, 61, 64, 65, 66, 68, 77, 78, 79, 80, 81, 82, 83, 84, 86, 89, 90, 91, 94, 95, 97, 100, 102, 103, 104, 105, 106, 108, 110, 112, 113], "weight": [0, 1, 2, 3, 4, 5, 6, 9, 10, 11, 12, 13, 14, 18, 19, 20, 22, 23, 24, 25, 28, 30, 33, 35, 36, 37, 38, 39, 41, 45, 46, 47, 49, 51, 55, 59, 62, 64, 65, 66, 67, 68, 71, 72, 77, 82, 83, 84, 86, 88, 90, 91, 93, 95, 100, 102, 103, 104, 105, 106, 107, 108, 112], "And": [0, 13, 18, 36, 65], "neg": [0, 7, 8, 13, 18, 78, 83], "impact": [0, 13, 18, 90, 97, 107], "especi": [0, 13, 18, 73, 102, 105, 107], "want": [0, 4, 7, 8, 12, 13, 18, 25, 32, 33, 36, 39, 44, 45, 48, 51, 58, 59, 61, 65, 67, 68, 74, 77], "behavior": [0, 13, 18, 19, 22, 23, 24, 61, 72, 88, 90, 98], "here": [0, 1, 2, 3, 4, 5, 7, 8, 9, 10, 11, 12, 13, 14, 15, 17, 18, 19, 21, 22, 23, 24, 25, 28, 29, 30, 31, 41, 51, 53, 54, 57, 61, 64, 66, 68, 69, 72, 86, 90, 102, 105, 106], "place": [0, 1, 2, 3, 5, 6, 7, 12, 13, 14, 15, 16, 17, 18, 19, 21, 22, 23, 24, 25, 33, 39, 43, 45, 51, 55, 57, 59, 65, 68, 83, 86, 106], "aimet_onnx": [0, 1, 2, 3, 41, 42, 43, 44, 45, 46, 48, 73, 75], "batch_norm_fold": [0, 1, 2, 3, 4, 5, 7, 10, 11, 13, 14, 18, 19, 21, 22, 23, 24, 29, 36, 54, 65, 71, 90], "fold_all_batch_norms_to_weight": [0, 1, 2, 3], "basic": [0, 4, 7, 8, 18, 21, 51, 68, 75, 87, 90], "mean": [0, 4, 7, 8, 9, 12, 13, 15, 16, 17, 18, 20, 21, 22, 25, 38, 51, 54, 68, 71, 88, 90, 104, 106, 108], "graph": [0, 1, 2, 3, 4, 5, 7, 8, 10, 11, 13, 14, 15, 17, 18, 19, 21, 22, 23, 24, 25, 30, 35, 49, 51, 52, 61, 68, 70, 86, 101, 102, 108, 111], "configur": [0, 1, 2, 3, 4, 5, 7, 8, 10, 11, 14, 18, 19, 21, 22, 23, 24, 28, 37, 41, 42, 46, 48, 49, 51, 53, 55, 64, 66, 68, 83, 86, 90, 99, 109], "them": [0, 1, 2, 3, 4, 5, 7, 8, 9, 10, 11, 12, 13, 14, 18, 19, 21, 22, 23, 24, 25, 35, 39, 42, 46, 51, 61, 62, 64, 66, 68, 72, 88, 90, 100], "few": [0, 1, 2, 3, 4, 5, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 21, 22, 23, 24, 25, 51, 68, 90, 95], "explain": [0, 4, 7, 8, 12, 18, 21, 25, 51, 68, 94, 100, 103, 108], "quant_schem": [0, 1, 2, 3, 4, 5, 7, 8, 10, 11, 12, 13, 14, 18, 19, 21, 22, 23, 24, 25, 28, 37, 39, 42, 46, 48, 49, 51, 53, 55, 66, 68, 69, 82, 86], "quantschem": [0, 1, 2, 3, 4, 5, 7, 8, 10, 11, 12, 13, 14, 18, 19, 21, 22, 23, 24, 25, 28, 37, 39, 41, 42, 46, 48, 51, 53, 54, 55, 66, 68, 69, 72, 82, 86, 92], "post_training_tf_enhanc": [0, 1, 2, 3, 4, 7, 8, 12, 18, 19, 21, 22, 23, 25, 28, 37, 39, 41, 42, 46, 48, 49, 51, 53, 55, 66, 68], "support": [0, 4, 7, 8, 9, 18, 21, 23, 24, 26, 28, 30, 31, 33, 34, 35, 37, 39, 41, 45, 46, 48, 51, 55, 56, 59, 60, 61, 66, 67, 68, 69, 73, 77, 83, 86, 94, 95, 98, 99, 100, 101, 102, 103, 106, 107, 108, 109, 113], "option": [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 28, 30, 33, 36, 37, 39, 41, 42, 45, 46, 48, 51, 52, 53, 54, 55, 56, 59, 61, 64, 65, 66, 68, 71, 75, 77, 78, 83, 86, 87, 89, 90, 91, 104, 106, 108], "tf_enhanc": [0, 4, 7, 8, 18, 21, 39, 55], "tf": [0, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 18, 21, 25, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 39, 45, 46, 48, 55, 66, 74, 104, 108, 109], "quant": [0, 1, 4, 5, 7, 8, 10, 11, 13, 14, 18, 19, 20, 21, 22, 23, 24, 28, 38, 39, 41, 42, 45, 48, 51, 55, 59, 67, 68, 93], "scheme": [0, 4, 7, 8, 12, 15, 16, 17, 18, 21, 25, 28, 30, 37, 39, 41, 42, 46, 48, 51, 53, 55, 56, 58, 66, 68, 83, 84, 92, 93, 100, 104], "enum": [0, 4, 7, 8, 18, 21, 30, 33, 41, 45, 55, 56, 59], "post_training_tf": [0, 4, 5, 7, 8, 10, 13, 14, 18, 21, 22, 28, 37, 39, 41, 46, 48, 49, 51, 55, 66, 68, 69, 82, 86], "default_output_bw": [0, 1, 2, 3, 4, 5, 7, 8, 10, 11, 12, 13, 14, 18, 19, 21, 22, 23, 24, 25, 28, 37, 39, 45, 48, 51, 59, 66, 68, 86, 90], "8": [0, 1, 2, 3, 4, 5, 7, 8, 10, 11, 12, 13, 14, 16, 17, 18, 19, 21, 22, 23, 24, 25, 28, 33, 36, 37, 39, 41, 42, 45, 46, 48, 49, 51, 53, 55, 56, 59, 61, 62, 64, 66, 68, 70, 72, 73, 75, 77, 78, 79, 80, 81, 83, 84, 86, 88, 89, 90, 102, 107, 114], "essenti": [0, 4, 7, 8, 18, 21, 83, 87], "ask": [0, 4, 7, 8, 12, 18, 21, 25, 77], "all": [0, 1, 2, 3, 4, 5, 6, 7, 8, 10, 11, 12, 13, 14, 18, 19, 21, 22, 23, 24, 25, 29, 31, 33, 34, 35, 36, 39, 41, 45, 46, 48, 49, 51, 54, 55, 56, 59, 61, 62, 64, 65, 66, 67, 68, 71, 72, 73, 74, 77, 82, 83, 86, 88, 90, 94, 97, 100, 103, 104, 106, 107], "activ": [0, 1, 2, 3, 4, 5, 7, 8, 9, 10, 11, 12, 13, 14, 18, 19, 21, 22, 23, 24, 25, 33, 34, 35, 36, 37, 39, 45, 46, 48, 49, 51, 52, 55, 59, 60, 61, 64, 65, 66, 68, 69, 82, 83, 86, 88, 90, 102, 104, 105, 106, 107, 108], "integ": [0, 1, 2, 3, 4, 5, 7, 8, 10, 11, 12, 13, 14, 18, 19, 21, 22, 23, 24, 25, 28, 37, 39, 41, 46, 48, 49, 66, 68, 78, 83, 91, 102, 104], "default_param_bw": [0, 1, 2, 3, 4, 5, 7, 8, 10, 11, 12, 13, 14, 18, 19, 21, 22, 23, 24, 25, 28, 37, 39, 41, 45, 46, 48, 51, 59, 66, 68, 86, 90], "There": [0, 1, 4, 5, 7, 8, 12, 13, 14, 18, 19, 21, 22, 23, 24, 25, 58, 62, 68, 72, 73, 101, 103, 105, 112], "other": [0, 1, 2, 3, 4, 5, 6, 7, 8, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 36, 60, 61, 65, 67, 68, 69, 72, 73, 83, 86, 97, 99, 102, 104, 107, 108, 109], "check": [0, 4, 7, 8, 12, 18, 21, 25, 32, 35, 37, 38, 44, 46, 52, 58, 60, 61, 62, 66, 67, 68, 92, 102, 105, 107], "document": [0, 1, 2, 3, 4, 5, 6, 7, 8, 10, 11, 12, 14, 18, 19, 20, 21, 22, 23, 24, 25, 26, 33, 64, 70, 73, 75, 98, 109, 111], "refer": [0, 1, 2, 3, 4, 5, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 21, 22, 23, 24, 25, 45, 49, 51, 55, 58, 59, 64, 68, 72, 83, 86, 88, 91, 96, 108], "aimet_common": [0, 1, 2, 3, 4, 5, 7, 8, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114], "quantsim": [0, 1, 2, 3, 4, 5, 7, 8, 10, 11, 18, 19, 21, 22, 23, 24, 28, 32, 39, 41, 44, 45, 46, 48, 51, 53, 54, 55, 58, 59, 64, 66, 67, 68, 69, 72, 82, 83, 86, 87, 96, 102, 105, 106, 109], "default_activation_bw": [0, 1, 2, 3, 41, 46, 48], "even": [0, 4, 7, 8, 13, 18, 51, 68, 77], "though": [0, 4, 7, 8, 13, 18, 51, 68, 77, 83, 106], "ad": [0, 1, 2, 3, 4, 5, 7, 8, 10, 11, 13, 14, 15, 16, 17, 18, 19, 22, 23, 24, 48, 49, 51, 62, 64, 68, 69, 90, 99, 106, 109], "node": [0, 1, 2, 3, 4, 5, 7, 8, 10, 11, 13, 14, 18, 19, 22, 23, 24, 39, 51, 52, 59, 60, 61, 68, 90, 105, 108], "readi": [0, 1, 2, 3, 4, 7, 8, 10, 11, 13, 15, 16, 17, 18, 19, 22, 23, 24, 25, 39, 51, 68, 90, 107], "yet": [0, 4, 7, 8, 13, 18, 51, 68], "scale": [0, 1, 2, 3, 4, 5, 7, 8, 10, 11, 12, 13, 14, 18, 19, 20, 22, 23, 24, 25, 29, 31, 36, 43, 49, 51, 54, 57, 64, 65, 68, 77, 78, 79, 80, 81, 83, 86, 89, 93, 102, 103, 104, 105, 108], "offset": [0, 1, 2, 3, 4, 5, 7, 8, 10, 11, 12, 13, 14, 18, 19, 20, 22, 23, 24, 25, 37, 46, 49, 51, 66, 68, 77, 78, 83, 84, 86, 89, 102, 104, 105, 108], "pass": [0, 1, 2, 3, 4, 5, 7, 8, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 21, 22, 23, 24, 25, 28, 30, 32, 33, 35, 36, 37, 39, 41, 42, 44, 45, 46, 48, 51, 52, 54, 55, 56, 58, 59, 60, 61, 62, 63, 64, 65, 66, 68, 72, 77, 83, 86, 87, 88, 90, 98, 101, 103, 104, 105, 107, 108, 109, 111], "unlabel": [0, 1, 2, 3, 4, 5, 6, 7, 8, 10, 11, 12, 13, 14, 15, 17, 18, 19, 20, 22, 23, 24, 25, 42, 46, 48, 51, 53, 66, 68, 91, 102, 104], "through": [0, 1, 2, 3, 4, 5, 7, 8, 10, 11, 12, 13, 14, 18, 19, 21, 22, 23, 24, 25, 35, 45, 51, 55, 59, 61, 66, 68, 71, 72, 86, 88, 90, 103, 104, 108], "collect": [0, 1, 2, 3, 4, 5, 7, 8, 10, 11, 13, 14, 18, 19, 22, 23, 24, 42, 46, 51, 53, 66, 68, 82, 83, 97, 104], "statist": [0, 1, 2, 3, 4, 5, 7, 8, 10, 11, 13, 14, 15, 16, 17, 18, 19, 22, 23, 24, 29, 30, 37, 38, 46, 51, 54, 56, 66, 67, 68, 71, 78, 79, 80, 82, 84, 88, 89, 90, 93, 102, 104, 112], "which": [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 21, 22, 23, 24, 25, 28, 30, 32, 33, 35, 36, 38, 41, 44, 45, 46, 49, 51, 52, 55, 56, 58, 59, 61, 62, 64, 65, 67, 68, 71, 74, 75, 77, 78, 81, 83, 84, 86, 87, 88, 89, 90, 93, 95, 97, 100, 102, 103, 104, 106, 108, 109, 111, 114], "calcul": [0, 1, 2, 3, 4, 5, 7, 8, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 22, 23, 24, 25, 28, 33, 39, 42, 45, 48, 51, 53, 59, 65, 68, 84, 88, 97, 103, 104, 105, 108], "sometim": [0, 1, 2, 3, 4, 5, 7, 8, 10, 11, 13, 14, 18, 19, 22, 23, 24, 51, 68, 94, 100, 103, 104], "calibr": [0, 1, 2, 3, 4, 5, 7, 8, 10, 11, 12, 13, 14, 18, 19, 22, 23, 24, 25, 37, 41, 46, 48, 51, 66, 68, 78, 82, 83, 84, 86, 87, 88, 89, 90, 102, 104, 105, 107, 108], "simpli": [0, 4, 7, 8, 13, 18, 21, 28, 37, 39, 41, 46, 48, 51, 61, 66, 68], "benefici": [0, 4, 7, 8, 12, 13, 18, 25, 51, 68, 104], "forward": [0, 3, 4, 7, 8, 9, 10, 11, 12, 18, 19, 21, 22, 23, 24, 25, 33, 35, 37, 39, 45, 46, 48, 51, 52, 54, 59, 60, 61, 62, 63, 65, 66, 68, 69, 74, 77, 78, 88, 89, 90, 101, 104, 107, 109], "well": [0, 1, 2, 3, 4, 5, 6, 7, 8, 10, 11, 12, 13, 14, 18, 19, 20, 22, 23, 24, 25, 33, 46, 51, 52, 62, 64, 66, 68, 81, 83, 86, 100, 103, 104], "distribut": [0, 1, 2, 3, 4, 5, 7, 8, 10, 11, 12, 13, 14, 18, 19, 22, 23, 24, 25, 30, 51, 56, 68, 103, 107, 108], "doesn": [0, 1, 2, 3, 5, 6, 14, 15, 16, 17, 18, 19, 22, 23, 24, 63, 77, 105], "t": [0, 4, 7, 8, 12, 13, 18, 21, 25, 33, 45, 51, 59, 61, 63, 64, 67, 68, 74, 77, 91, 104, 105], "look": [0, 4, 7, 8, 9, 12, 13, 18, 25, 35, 45, 51, 59, 68, 90, 92], "definit": [0, 4, 7, 8, 12, 13, 18, 19, 22, 23, 24, 25, 34, 38, 60, 61, 64, 67, 72, 77, 90, 102], "extrem": [0, 1, 2, 3, 4, 5, 7, 8, 10, 11, 12, 13, 14, 18, 19, 22, 23, 24, 25, 51, 68, 108], "bias": [0, 2, 14, 18, 22, 102], "origin": [0, 1, 2, 3, 4, 5, 6, 7, 8, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 21, 22, 23, 24, 25, 30, 32, 33, 35, 44, 56, 58, 61, 68, 72, 77, 88, 90, 94, 95, 97, 102, 103, 104, 108, 111], "consist": [0, 18, 58, 72, 83, 97, 108], "dark": [0, 1, 2, 3, 4, 5, 10, 11, 12, 13, 14, 18, 19, 22, 23, 24, 25, 51, 68], "light": [0, 1, 2, 3, 4, 5, 10, 11, 12, 13, 14, 18, 19, 22, 23, 24, 25, 51, 68], "mani": [0, 4, 7, 8, 12, 13, 15, 16, 17, 18, 25, 61, 91, 95, 103], "differ": [0, 4, 7, 8, 13, 15, 16, 17, 18, 23, 24, 30, 51, 53, 56, 61, 64, 66, 68, 69, 72, 74, 75, 83, 94, 97, 100, 102, 103, 105, 107, 108], "wai": [0, 1, 2, 3, 4, 5, 7, 8, 9, 10, 11, 12, 13, 14, 18, 19, 22, 23, 24, 25, 30, 56, 58, 68, 72, 73, 83, 90, 101], "just": [0, 4, 7, 8, 12, 13, 18, 23, 24, 25, 51, 68, 108, 114], "pass_calibration_data": [0, 1, 2, 3, 5, 10, 11, 12, 13, 14, 18, 19, 21, 22, 23, 24, 25, 41, 48, 51, 68], "get_input": [0, 1, 2, 3], "name": [0, 1, 2, 3, 7, 9, 12, 25, 30, 32, 33, 35, 41, 44, 45, 46, 49, 55, 58, 59, 64, 66, 68, 74, 75, 77, 82, 86, 88, 89, 103, 108, 109, 111], "batch_cntr": [0, 1, 2, 3, 5, 10, 11, 13, 14, 19, 21, 22, 23, 24, 25, 42], "input_data": [0, 1, 2, 3, 18, 19, 21, 22, 23, 24, 25, 28, 42, 48, 51, 68], "target_data": [0, 1, 2, 3, 18, 19, 21, 22, 23, 24, 25, 51, 68], "inputs_batch": [0, 1, 2, 3, 18, 19, 21, 22, 23, 24, 25, 51, 68], "numpi": [0, 1, 2, 3, 9, 28, 30, 33, 37, 39, 42, 45, 46, 48], "break": [0, 1, 2, 3, 4, 5, 10, 11, 13, 14, 19, 21, 22, 23, 24, 25, 30, 33, 42, 48, 51, 68, 90], "subsequ": [0, 2, 13, 14, 18, 51, 68, 81, 86, 101, 103, 106], "compute_encod": [0, 1, 2, 3, 4, 5, 7, 8, 10, 11, 13, 14, 18, 19, 21, 22, 23, 24, 28, 39, 45, 48, 51, 59, 68, 69, 72, 77, 78, 79, 80, 81, 82, 84, 86, 87, 88, 89, 90], "forward_pass_callback": [0, 1, 2, 3, 5, 10, 11, 12, 13, 14, 18, 19, 21, 22, 23, 24, 25, 28, 33, 37, 39, 41, 45, 46, 48, 59, 66, 68], "forward_pass_callback_arg": [0, 1, 2, 3, 4, 5, 7, 10, 11, 13, 14, 18, 19, 21, 22, 23, 24, 39, 41, 45, 48, 51, 59, 68], "10000": [0, 1, 5, 13, 18, 19, 28, 51, 91], "initi": [0, 1, 5, 11, 12, 13, 18, 19, 23, 24, 25, 28, 44, 51, 68, 69, 77, 78, 79, 80, 88, 89, 91, 105, 108], "phase": [0, 4, 18, 33, 45, 59, 100], "compris": [0, 4, 18, 97], "sensit": [0, 2, 4, 14, 18, 22, 33, 37, 38, 45, 46, 47, 59, 66, 67, 97, 102, 104, 107, 108, 109], "greedili": [0, 4, 18], "select": [0, 4, 18, 28, 42, 51, 68, 71, 73, 74, 75, 92, 95, 104, 108, 111, 114], "bitwidth": [0, 1, 4, 5, 13, 18, 19, 28, 33, 37, 39, 41, 42, 45, 46, 48, 49, 51, 53, 55, 59, 64, 66, 68, 72, 78, 79, 80, 81, 83, 86, 88, 89, 90, 93, 102, 107, 108], "base": [0, 1, 4, 5, 6, 7, 13, 18, 19, 20, 21, 28, 29, 41, 45, 46, 48, 51, 59, 64, 66, 68, 69, 72, 74, 77, 78, 79, 80, 83, 84, 88, 89, 94, 95, 97, 102, 111], "three": [0, 15, 16, 17, 18, 31, 61, 92, 95, 112], "eval_callback_for_phase1": [0, 18, 33, 45, 59], "eval_callback_for_phase2": [0, 18, 33, 45, 59], "callbackfunc": [0, 4, 12, 18, 25, 37, 46, 66], "object": [0, 6, 7, 12, 18, 19, 20, 22, 23, 24, 25, 28, 32, 33, 37, 39, 41, 42, 44, 45, 46, 48, 53, 56, 58, 59, 64, 66, 68, 77, 81, 82, 83, 86, 88, 89, 90, 93, 102, 105, 108], "In": [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 30, 34, 35, 37, 38, 42, 46, 49, 51, 52, 53, 56, 59, 60, 61, 62, 66, 67, 68, 72, 77, 83, 88, 90, 91, 95, 97, 100, 102, 103, 105, 106, 108, 111, 112, 114], "reus": [0, 18, 34, 60, 61, 62], "previou": [0, 4, 18, 22, 30, 33, 36, 45, 56, 59, 65, 67, 90, 95, 107], "snippet": [0, 18, 49, 61, 83, 86], "func_callback_arg": [0, 18, 33, 45, 46, 59, 66], "measur": [0, 15, 16, 17, 18, 30, 33, 45, 46, 56, 59, 66], "score": [0, 1, 2, 3, 4, 6, 7, 20, 21, 30, 33, 42, 45, 46, 51, 53, 56, 59, 66, 70, 97, 100, 111], "respecit": [0, 18], "both": [0, 4, 11, 17, 18, 23, 24, 35, 49, 51, 61, 66, 68, 69, 72, 73, 78, 83, 86, 87, 88, 100, 102, 103, 105, 106, 107, 108, 110, 111, 114], "qualiti": [0, 18], "slightli": [0, 18], "goal": [0, 18, 37, 42, 46, 53, 66, 92], "rough": [0, 18], "wherea": [0, 18, 59, 90, 108], "callbak": [0, 18], "impli": [0, 18, 45, 59], "flexibl": [0, 18], "than": [0, 1, 5, 7, 8, 18, 19, 21, 29, 30, 36, 41, 49, 51, 52, 56, 59, 61, 62, 65, 68, 72, 83, 86, 90, 95, 99, 105, 106], "smaller": [0, 13, 18, 33, 41, 45, 51, 52, 59, 98, 107, 110, 113], "indirect": [0, 18], "sqnr": [0, 4, 18, 45, 59, 84, 108], "between": [0, 13, 18, 28, 30, 36, 41, 45, 46, 51, 53, 56, 58, 59, 65, 66, 68, 72, 75, 83, 88, 103, 104, 106, 108], "faster": [0, 6, 7, 13, 18, 20, 33, 84, 98], "correl": [0, 18], "metric": [0, 1, 2, 3, 4, 5, 6, 7, 8, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 22, 23, 24, 25, 30, 37, 39, 51, 56, 68, 104, 108], "aimet_torch": [0, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 51, 52, 53, 54, 55, 56, 57, 58, 59, 61, 62, 64, 65, 66, 68, 69, 70, 71, 73, 74, 75, 77, 78, 79, 80, 81, 82, 83, 84, 86, 87, 88, 89, 90, 101], "mixed_precision_algo": [0, 4, 18, 45, 59], "evalcallbackfactori": [0, 18, 45, 59], "forward_one_batch": [0, 18], "label": [0, 4, 5, 6, 7, 10, 11, 13, 18, 20, 30, 33, 37, 46, 51, 53, 66, 68, 104, 105], "eval_callback_factori": [0, 18], "forward_fn": [0, 1, 7, 18, 21, 41, 45, 51, 54, 59, 86], "small": [0, 1, 2, 3, 4, 5, 6, 7, 8, 10, 11, 12, 13, 14, 18, 19, 20, 22, 23, 24, 25, 51, 68, 93, 98, 102], "hand": [0, 18, 102], "miou": [0, 18], "full": [0, 1, 2, 3, 4, 5, 7, 10, 11, 14, 18, 19, 21, 22, 23, 24, 30, 33, 34, 38, 60, 67, 88], "appli": [0, 7, 8, 15, 16, 17, 18, 20, 21, 28, 30, 31, 36, 38, 41, 42, 43, 45, 46, 51, 53, 55, 56, 62, 65, 67, 69, 71, 73, 77, 78, 83, 86, 88, 89, 90, 91, 92, 93, 96, 100, 102, 103, 105, 106, 107, 108, 109, 111, 112], "dummi": [0, 12, 18, 25, 28, 39, 41, 42, 46, 48, 51, 52, 53, 57, 58, 59, 65, 66, 68, 86, 104], "one": [0, 1, 2, 3, 4, 5, 8, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 22, 23, 24, 25, 36, 49, 51, 52, 59, 61, 62, 64, 65, 68, 74, 83, 86, 90, 94, 96, 100, 105, 109, 110, 111, 113], "tupl": [0, 4, 12, 18, 25, 28, 29, 30, 32, 33, 36, 37, 39, 41, 42, 44, 45, 46, 48, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 65, 66, 68, 78, 83, 84, 86, 89], "expect": [0, 7, 15, 16, 17, 18, 21, 30, 34, 36, 37, 39, 41, 45, 46, 48, 51, 52, 56, 59, 61, 62, 66, 68, 82, 84, 86, 90, 100, 102, 104], "group": [0, 18, 36, 74, 83, 106, 108], "list": [0, 4, 7, 9, 15, 16, 17, 18, 19, 21, 22, 23, 24, 25, 29, 30, 31, 32, 33, 35, 36, 41, 42, 44, 45, 48, 51, 54, 55, 56, 57, 58, 59, 61, 65, 66, 68, 71, 78, 83, 86, 88, 89, 97, 99, 101, 106], "modul": [0, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 30, 33, 34, 41, 45, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 64, 65, 66, 67, 68, 71, 73, 77, 82, 83, 86, 87, 90, 102, 109, 114], "therefor": [0, 4, 7, 8, 9, 18, 33, 45, 59, 64, 90, 95, 103], "might": [0, 1, 4, 5, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 22, 23, 24, 25, 33, 45, 49, 51, 59, 68, 91, 104, 107], "front": [0, 4, 18, 33, 45, 59], "candid": [0, 4, 18, 30, 31, 33, 42, 45, 56, 59, 97, 100], "possibl": [0, 4, 6, 9, 18, 20, 33, 34, 35, 39, 42, 45, 48, 53, 59, 60, 62, 68, 83, 90, 97, 104, 106, 107], "suppos": [0, 4, 18, 33, 45, 59], "combin": [0, 4, 18, 33, 45, 55, 59, 61, 83, 92, 95, 100, 102, 103], "type": [0, 4, 6, 7, 9, 12, 15, 16, 17, 18, 20, 23, 24, 28, 29, 30, 31, 33, 36, 37, 39, 41, 42, 45, 46, 48, 51, 52, 53, 54, 55, 56, 59, 61, 64, 65, 66, 68, 71, 72, 77, 81, 82, 83, 86, 88, 89, 100, 102, 104, 106, 108, 111], "int": [0, 4, 6, 7, 12, 15, 16, 17, 18, 20, 28, 29, 30, 33, 37, 39, 41, 42, 44, 45, 46, 48, 49, 51, 53, 54, 55, 56, 59, 64, 66, 68, 69, 72, 78, 79, 80, 83, 84, 89], "16": [0, 4, 7, 18, 22, 28, 30, 33, 39, 45, 48, 49, 51, 59, 61, 64, 68, 69, 73, 77, 78, 79, 80, 83, 84, 86, 88, 89], "quantizationdatatyp": [0, 4, 18, 33, 39, 45, 48, 59, 68, 72], "allowed_accuracy_drop": [0, 4, 6, 18, 20, 33, 42, 45, 53, 59], "maximum": [0, 4, 5, 6, 7, 12, 13, 14, 18, 25, 28, 33, 42, 45, 51, 59, 68, 78, 79, 80, 84], "allow": [0, 4, 6, 8, 18, 20, 30, 32, 33, 35, 38, 39, 42, 44, 45, 47, 49, 56, 58, 59, 60, 61, 67, 68, 81, 83, 92, 100, 107, 108, 109, 111], "drop": [0, 1, 2, 3, 4, 5, 6, 7, 8, 10, 11, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 33, 42, 45, 59, 67, 68, 88, 92, 95, 100, 103, 104, 105, 107, 108], "plot": [0, 4, 12, 18, 25, 33, 45, 59, 71, 82, 104], "till": [0, 4, 18, 33, 45, 59], "met": [0, 4, 18, 33, 45, 59], "complet": [0, 4, 15, 16, 17, 18, 30, 33, 45, 59, 90, 93, 107], "pick": [0, 4, 18, 30, 33, 35, 36, 45, 56, 59, 95, 97], "results_dir": [0, 4, 12, 18, 25, 33, 37, 42, 45, 46, 53, 59, 66, 71], "cach": [0, 4, 18, 33, 42, 45, 53, 59, 75], "intermedi": [0, 4, 18, 32, 33, 44, 45, 52, 58, 59, 68, 108], "clean_start": [0, 4, 18, 33, 45, 59], "inform": [0, 4, 18, 21, 33, 36, 45, 49, 55, 59, 62, 65, 67, 72, 81, 86, 102], "delet": [0, 4, 18, 33, 45, 59, 104], "prior": [0, 4, 18, 19, 22, 23, 24, 33, 45, 59], "start": [0, 4, 7, 8, 9, 10, 11, 12, 13, 15, 16, 17, 18, 21, 23, 24, 25, 28, 30, 33, 35, 41, 45, 51, 56, 59, 61, 62, 68, 78, 90, 91, 95, 96, 106, 108], "analysi": [0, 4, 8, 15, 16, 17, 18, 30, 33, 37, 45, 46, 56, 59, 66, 100, 107], "applic": [0, 1, 4, 5, 6, 10, 11, 12, 13, 14, 18, 19, 22, 30, 31, 33, 36, 37, 39, 45, 49, 59, 65, 101], "respons": [0, 4, 18, 33, 45, 59, 100], "flag": [0, 4, 18, 33, 36, 42, 45, 53, 59, 61, 68, 72, 82, 89], "anyth": [0, 4, 12, 18, 25, 33, 45, 59], "compar": [0, 4, 7, 8, 9, 12, 18, 21, 25, 33, 45, 52, 59, 61, 71, 83, 86, 90, 102, 104, 105, 112], "use_all_amp_candid": [0, 18, 45, 59], "supported_kernel": [0, 18, 45, 59], "field": [0, 18, 45, 49, 59], "config": [0, 13, 18, 39, 45, 46, 59, 66, 106, 109], "under": [0, 18, 31, 33, 45, 59, 72, 82, 83, 86, 104, 106], "op_typ": [0, 7, 18, 45, 59, 106], "section": [0, 7, 18, 20, 45, 52, 59, 62, 74, 75, 83, 93, 102, 108], "ignor": [0, 1, 2, 3, 4, 5, 7, 8, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 22, 23, 24, 25, 30, 45, 51, 55, 56, 59, 61, 68, 90], "continu": [0, 18, 35, 45, 59, 62, 68, 102, 103, 105, 107], "amp_search_algo": [0, 18, 33, 45, 59], "ampsearchalgo": [0, 18, 33, 45, 59], "search": [0, 7, 8, 10, 11, 15, 16, 17, 18, 21, 23, 24, 26, 28, 33, 42, 45, 51, 59, 68, 84, 105, 106], "binari": [0, 18, 33, 45, 59], "interpol": [0, 18, 33, 97], "bruteforc": [0, 18, 33], "phase1_optim": [0, 18, 33, 45, 59], "implement": [0, 18, 32, 37, 39, 42, 44, 46, 53, 58, 66, 68, 69, 77, 101, 102, 107], "either": [0, 5, 13, 15, 16, 17, 18, 26, 30, 36, 51, 56, 60, 64, 83, 91, 114], "optmiz": [0, 18], "phase1": [0, 18, 33, 45, 59], "001": [0, 18, 39], "p": [0, 18, 74, 84], "store": [0, 4, 18, 28, 39, 41, 51, 64, 68, 81, 83, 86], "final": [0, 4, 7, 8, 12, 18, 21, 25, 30, 35, 45, 56, 59, 62, 70, 75, 83, 94, 95, 97, 105, 107], "after": [0, 1, 2, 4, 5, 7, 8, 10, 11, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 28, 29, 36, 42, 46, 51, 53, 54, 61, 62, 65, 66, 68, 71, 77, 84, 86, 88, 90, 91, 92, 93, 95, 96, 100, 102, 107, 111, 112], "multipli": [0, 18, 88, 95, 100], "mac": [0, 15, 16, 17, 18, 30, 56, 70, 95, 100, 110, 113], "therebi": [0, 18], "lesser": [0, 18], "re": [0, 9, 18, 33, 38, 45, 59, 67, 90, 96, 102], "mixed_precis": [0, 4, 18, 33, 45, 59], "choose_mixed_precis": [0, 4, 18, 33, 45, 59], "pareto_front_list": [0, 18, 45, 59], "next": [0, 2, 4, 7, 8, 13, 18, 21, 36, 51, 64, 65, 68, 74, 90, 105, 107, 108], "step": [0, 2, 3, 4, 6, 7, 8, 12, 13, 18, 21, 25, 33, 36, 42, 46, 51, 53, 54, 61, 64, 66, 68, 78, 84, 86, 87, 90, 91, 92, 93, 94, 95, 96, 97, 100, 102, 103, 105, 107], "would": [0, 4, 7, 12, 13, 18, 21, 25, 30, 31, 35, 51, 57, 68, 72, 83, 84, 106, 109], "actual": [0, 4, 7, 9, 18, 21, 32, 37, 42, 44, 46, 51, 53, 58, 63, 66, 68, 83], "take": [0, 4, 6, 7, 9, 12, 13, 14, 15, 16, 17, 18, 21, 25, 33, 36, 45, 51, 56, 59, 65, 68, 77, 83, 90, 97, 100, 102, 103, 105, 107, 114], "o": [0, 1, 4, 5, 6, 7, 10, 11, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 33, 56], "makedir": [0, 1, 4, 5, 7, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24], "exist_ok": [0, 1, 4, 5, 7, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24], "filename_prefix": [0, 1, 4, 5, 7, 13, 14, 18, 19, 20, 21, 22, 23, 24, 28, 33, 39, 41, 48, 51, 64, 68], "resnet18_mixed_precis": [0, 18], "hope": [0, 4, 7, 8, 13, 18, 21], "wa": [0, 1, 4, 5, 7, 8, 9, 13, 15, 17, 18, 19, 21, 30, 32, 44, 49, 51, 56, 58, 64, 81, 100], "understand": [0, 4, 7, 8, 9, 12, 13, 18, 21, 25, 72, 102, 112], "qat": [0, 3, 8, 13, 18, 28, 29, 41, 48, 51, 54, 63, 67, 86, 90, 91, 93, 96, 98, 102, 107, 108, 109], "learn": [0, 1, 2, 3, 5, 6, 7, 8, 9, 10, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 28, 29, 30, 31, 39, 41, 42, 43, 51, 53, 54, 55, 56, 57, 66, 68, 72, 93, 96, 100, 102, 105, 108, 109], "addit": [0, 7, 8, 9, 12, 13, 18, 21, 25, 49, 68, 73, 77, 83, 86, 102, 105, 106, 109], "resourc": [0, 7, 8, 9, 13, 18, 21], "doc": [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 61, 64, 104], "know": [0, 7, 8, 13, 18, 21, 77], "thi": [1, 2, 3, 5, 6, 7, 8, 9, 10, 11, 14, 15, 16, 17, 19, 20, 22, 23, 24, 26, 28, 30, 31, 32, 33, 34, 35, 36, 37, 39, 41, 42, 43, 44, 45, 46, 48, 49, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 64, 65, 66, 68, 69, 71, 72, 73, 74, 75, 77, 78, 79, 80, 81, 82, 83, 84, 86, 88, 89, 90, 91, 92, 94, 95, 97, 98, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 111, 114], "notebook": [1, 2, 3, 5, 6, 7, 8, 9, 10, 11, 14, 15, 16, 17, 19, 20, 22, 23, 24], "contain": [1, 2, 3, 5, 6, 10, 11, 12, 14, 15, 16, 17, 19, 20, 22, 23, 24, 25, 33, 36, 45, 46, 49, 52, 56, 59, 61, 62, 66, 73, 77, 81, 82, 86, 88, 90, 102, 104, 105, 106, 108], "nearest": [1, 4, 5, 8, 10, 11, 13, 14, 19, 22, 37, 39, 42, 48, 53, 55, 68, 91], "achiev": [1, 5, 13, 19, 30, 56, 74, 83, 91, 95], "i": [1, 2, 3, 5, 6, 7, 8, 9, 10, 11, 14, 15, 16, 17, 19, 20, 22, 23, 24, 26, 28, 30, 31, 33, 34, 35, 36, 37, 38, 39, 41, 42, 43, 44, 45, 46, 48, 49, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 77, 78, 79, 80, 82, 83, 84, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 97, 98, 100, 101, 102, 103, 104, 105, 106, 107, 108, 110, 111, 112, 113, 114], "loss": [1, 2, 3, 4, 5, 6, 7, 8, 10, 11, 13, 14, 19, 22, 23, 24, 28, 37, 39, 41, 46, 51, 66, 68, 81, 87, 90, 91, 98, 100, 102, 104, 108], "function": [1, 2, 4, 5, 8, 12, 13, 14, 15, 16, 17, 19, 21, 22, 23, 24, 25, 28, 29, 30, 31, 33, 34, 35, 36, 37, 38, 39, 41, 42, 43, 45, 46, 47, 48, 51, 52, 53, 54, 55, 56, 57, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 72, 77, 81, 82, 83, 88, 90, 91, 97, 100, 101, 104, 108, 109, 111, 112], "closer": [1, 5, 13, 19, 91], "farther": [1, 5, 19], "high": [1, 2, 3, 5, 10, 11, 14, 15, 16, 17, 19, 22, 23, 24, 31, 33, 36, 43, 45, 57, 59, 65, 72, 87, 91, 95, 97, 98, 103, 109, 112], "level": [1, 3, 4, 5, 8, 10, 11, 15, 16, 17, 19, 23, 24, 31, 43, 57, 72, 87, 95, 97, 98, 102, 107, 111], "simuat": [1, 2, 5, 10, 11, 13, 14, 19, 22, 23, 24], "post": [1, 2, 3, 5, 6, 7, 8, 10, 11, 13, 14, 15, 16, 17, 19, 20, 21, 22, 23, 24, 38, 42, 47, 48, 49, 53, 67, 87, 90, 91, 92, 98, 100, 105, 108, 109], "finetun": [1, 2, 5, 6, 7, 13, 14, 15, 16, 17, 19, 21, 23, 24, 48, 68], "paramet": [1, 2, 3, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 19, 20, 21, 22, 23, 24, 25, 29, 32, 33, 35, 36, 37, 38, 39, 42, 43, 44, 45, 46, 47, 48, 49, 52, 53, 54, 55, 57, 58, 59, 61, 62, 64, 65, 66, 67, 68, 70, 71, 72, 77, 78, 79, 80, 81, 82, 83, 84, 88, 89, 90, 93, 94, 100, 101, 102, 103, 104, 105, 106, 112], "fine": [1, 2, 3, 5, 6, 8, 10, 11, 14, 19, 20, 22, 23, 24, 26, 30, 39, 56, 64, 68, 87, 95, 98, 102, 105, 108], "tune": [1, 2, 3, 5, 6, 8, 10, 11, 14, 19, 20, 22, 23, 24, 26, 30, 39, 56, 64, 68, 87, 95, 98, 102, 105, 108], "epoch": [1, 2, 3, 5, 6, 7, 8, 10, 11, 14, 15, 16, 17, 19, 20, 21, 22, 23, 24, 39, 54, 56, 68, 90, 98, 100, 105], "improv": [1, 2, 3, 5, 6, 7, 8, 10, 11, 14, 15, 16, 17, 19, 20, 21, 22, 23, 24, 26, 51, 68, 83, 90, 95, 100, 102, 105, 107, 112], "otherwis": [1, 2, 3, 5, 6, 10, 11, 14, 15, 16, 17, 19, 20, 22, 23, 24, 42, 48, 62, 64, 68, 74, 75, 78, 83, 89, 107], "One": [1, 2, 3, 4, 12, 15, 16, 17, 18, 19, 20, 22, 23, 24, 25, 37, 39, 90], "run": [1, 2, 3, 5, 6, 7, 10, 11, 12, 13, 14, 15, 16, 17, 19, 21, 22, 23, 24, 25, 30, 32, 33, 35, 37, 38, 39, 41, 42, 44, 45, 48, 49, 52, 53, 54, 56, 58, 59, 61, 62, 68, 72, 74, 75, 78, 83, 88, 89, 93, 98, 100, 102, 103, 104, 108, 109, 111], "two": [1, 2, 3, 4, 5, 6, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 19, 20, 22, 23, 24, 25, 30, 33, 35, 36, 56, 59, 61, 62, 65, 72, 73, 83, 90, 97, 98, 100, 102, 103, 104, 105, 108, 110, 112, 113], "thing": [1, 2, 3, 5, 6, 13, 14, 15, 16, 17, 19, 22, 23, 24], "put": [1, 2, 3, 5, 6, 7, 12, 13, 14, 15, 16, 17, 19, 21, 22, 23, 24, 25, 59, 102], "still": [1, 2, 3, 5, 6, 7, 12, 13, 14, 15, 16, 17, 19, 21, 22, 23, 24, 25, 34, 67, 83, 102, 107], "interfac": [1, 2, 3, 5, 6, 7, 12, 13, 14, 15, 16, 17, 19, 21, 22, 23, 24, 25, 67, 73], "method": [1, 2, 3, 5, 6, 7, 8, 9, 12, 13, 14, 15, 16, 17, 19, 21, 22, 23, 24, 25, 32, 33, 39, 42, 44, 55, 58, 59, 61, 68, 72, 77, 83, 86, 88, 90, 97, 100, 102, 107, 108], "abl": [1, 2, 3, 5, 6, 7, 9, 12, 13, 14, 15, 16, 17, 19, 21, 22, 23, 24, 25, 35, 46, 61, 62, 66, 81, 90], "exist": [1, 2, 3, 4, 5, 6, 7, 8, 10, 11, 12, 13, 14, 15, 16, 17, 19, 21, 22, 23, 24, 25, 45, 51, 59, 68, 74, 77, 83, 88, 108], "degrad": [1, 2, 3, 5, 10, 11, 14, 19, 22, 23, 24], "platform": [1, 2, 3, 5, 10, 11, 14, 19, 22, 23, 24, 73, 91, 102, 108], "call": [1, 2, 3, 5, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 19, 22, 23, 24, 25, 29, 30, 31, 33, 34, 35, 36, 42, 45, 46, 49, 51, 56, 57, 59, 61, 65, 66, 68, 77, 79, 80, 81, 83, 88, 90, 102, 104, 106, 108, 109, 110, 113], "kei": [1, 2, 3, 5, 9, 10, 11, 14, 15, 16, 17, 19, 22, 23, 24, 36, 49, 65], "precis": [1, 2, 3, 5, 7, 8, 10, 11, 12, 13, 14, 19, 21, 22, 23, 24, 25, 49, 51, 78, 79, 80, 87, 89, 102], "api": [1, 2, 3, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 19, 20, 21, 22, 23, 24, 25, 34, 60, 63, 72, 74, 82, 90, 96, 98, 101, 102, 106, 109, 111], "explan": [1, 2, 3, 5, 10, 11, 14, 19, 22, 23, 24], "copi": [1, 18, 19, 22, 23, 24, 35, 39, 68, 71, 96, 108], "deepcopi": [1, 71], "must": [1, 2, 3, 5, 7, 8, 10, 11, 12, 13, 14, 15, 16, 17, 19, 22, 23, 24, 25, 35, 51, 83, 86, 88, 92, 93, 95, 98, 99, 102, 104, 106, 114], "comput": [1, 2, 3, 5, 7, 8, 10, 11, 12, 13, 14, 15, 16, 17, 19, 22, 23, 24, 25, 28, 30, 32, 33, 37, 39, 41, 42, 44, 45, 46, 48, 49, 51, 53, 56, 58, 59, 63, 64, 66, 68, 69, 73, 75, 77, 79, 80, 83, 84, 86, 90, 91, 100, 101, 103, 104, 108, 111, 114], "encod": [1, 2, 3, 5, 6, 7, 8, 10, 11, 13, 14, 19, 20, 22, 23, 24, 28, 32, 33, 37, 39, 41, 42, 44, 45, 46, 48, 51, 53, 54, 58, 59, 63, 64, 66, 67, 68, 69, 72, 77, 78, 81, 83, 86, 87, 89, 90, 91, 93, 102, 104, 105, 109], "write": [1, 2, 3, 5, 10, 11, 14, 19, 22, 23, 24, 41, 48, 51, 68, 83, 86], "loader": [1, 2, 3, 4, 5, 7, 8, 10, 11, 12, 13, 14, 15, 17, 19, 21, 22, 23, 24, 25, 33, 37, 41, 45, 46, 48, 51, 54, 55, 59, 66, 68, 90, 91], "extract": [1, 2, 3, 4, 5, 7, 8, 9, 10, 11, 12, 13, 14, 19, 22, 23, 24, 25, 51, 68, 103], "regard": [1, 2, 3, 4, 5, 7, 8, 10, 11, 12, 13, 14, 19, 22, 23, 24, 25, 33, 45, 51, 59, 68], "veri": [1, 2, 3, 4, 5, 6, 7, 8, 10, 11, 12, 13, 14, 19, 20, 22, 23, 24, 25, 51, 68, 69, 95, 100, 104, 112], "percentag": [1, 2, 3, 4, 5, 7, 8, 10, 11, 12, 13, 14, 19, 22, 23, 24, 25, 51, 68], "1m": [1, 2, 3, 4, 5, 10, 11, 12, 13, 14, 19, 22, 23, 24, 25, 51, 68], "500": [1, 2, 3, 4, 5, 10, 11, 12, 13, 14, 19, 20, 22, 23, 24, 25, 33, 42, 51, 56, 68, 90, 91, 103, 104], "suffic": [1, 2, 3, 5, 10, 11, 14, 19, 22, 23, 24], "reason": [1, 2, 3, 5, 10, 11, 14, 19, 22, 23, 24, 62], "scenario": [1, 2, 3, 4, 5, 7, 8, 10, 11, 12, 13, 14, 19, 22, 23, 24, 25, 51, 68, 93, 114], "That": [1, 2, 3, 5, 10, 11, 14, 19, 22, 23, 24, 114], "pictur": [1, 2, 3, 4, 5, 10, 11, 12, 13, 14, 19, 22, 23, 24, 25, 51, 68, 87], "captur": [1, 2, 3, 4, 5, 10, 11, 12, 13, 14, 19, 22, 23, 24, 25, 32, 44, 51, 58, 61, 68], "night": [1, 2, 3, 4, 5, 10, 11, 12, 13, 14, 19, 22, 23, 24, 25, 51, 68], "skew": [1, 2, 3, 5, 10, 11, 14, 19, 22, 23, 24], "5": [1, 2, 3, 4, 5, 8, 10, 11, 13, 14, 15, 16, 17, 19, 22, 23, 24, 30, 41, 42, 45, 53, 54, 56, 59, 61, 64, 68, 69, 70, 72, 73, 78, 79, 80, 83, 88, 89, 95, 105, 107], "comparison": [1, 2, 3, 19, 22, 23, 24, 32, 44, 58], "order": [1, 7, 8, 9, 10, 11, 12, 15, 16, 17, 19, 21, 23, 24, 25, 34, 36, 38, 60, 62, 65, 67, 68, 74, 75, 77, 83, 88, 90, 93, 94, 102, 105, 106], "vector": [1, 5, 13, 19, 51, 86], "num_batch": [1, 2, 5, 6, 7, 12, 13, 19, 20, 21, 22, 25, 28, 37, 41, 42, 46, 51, 53, 54, 66], "2000": [1, 5, 6, 13, 19, 20, 22, 41, 42, 51, 53, 78, 81], "size": [1, 5, 6, 7, 8, 9, 13, 19, 33, 35, 39, 46, 48, 51, 52, 53, 62, 66, 68, 77, 78, 83, 86, 88, 89, 91, 101, 110, 113], "default_num_iter": [1, 5, 13, 19, 20, 28, 41, 51], "strongli": [1, 5, 13, 19, 35, 51, 61], "least": [1, 5, 19, 59], "adaround_weight": [1, 5, 6, 13, 19, 20, 28, 41, 42, 51, 53, 67, 72], "adaroundparamet": [1, 5, 6, 13, 19, 20, 28, 41, 42, 51, 53, 67], "satisfi": [1, 6, 20, 61, 83, 90, 92], "signatur": [1, 15, 16, 17, 28, 30, 37, 42, 46, 56, 66, 77, 78], "deriv": [1, 4, 18, 49, 78, 89, 108], "form": [1, 12, 25, 35, 69], "arrai": [1, 83], "__init__": [1, 9, 20, 34, 35, 42, 52, 53, 56, 60, 61, 62, 69, 77, 88, 90], "self": [1, 9, 20, 34, 35, 52, 53, 56, 60, 61, 62, 69, 77, 81, 90], "_torch_data_load": 1, "_iter": 1, "__iter__": 1, "__next__": 1, "__len__": [1, 20, 46, 53, 66], "len": [1, 4, 8, 18, 20, 30, 33, 41, 42, 51, 53, 61], "ada_model": [1, 5, 13, 19], "apply_adaround": [1, 5, 13, 19, 28, 41, 51], "default_quant_schem": [1, 5, 13, 19, 28, 41, 51], "about": [1, 2, 3, 4, 5, 6, 9, 10, 11, 14, 15, 16, 17, 19, 20, 21, 22, 23, 24, 28, 30, 31, 36, 39, 41, 42, 43, 49, 51, 53, 55, 56, 57, 65, 66, 67, 68, 81, 90, 98, 104], "biwidth": [1, 5, 13, 19], "freez": [1, 5, 13, 19, 28, 51, 64, 72, 91], "set_and_freeze_param_encod": [1, 5, 13, 19, 28, 41, 51], "dure": [1, 3, 5, 7, 10, 11, 12, 13, 14, 15, 16, 17, 19, 21, 23, 24, 25, 28, 29, 30, 33, 35, 41, 42, 45, 51, 56, 59, 68, 70, 77, 82, 86, 88, 90, 91, 98, 100, 102, 105, 106, 108, 111, 112], "intern": [1, 5, 8, 9, 13, 19, 35, 42, 48, 51, 53, 68, 69, 72, 84, 100, 103, 106], "maintain": [1, 5, 19, 42, 97, 100], "alter": [1, 5, 13, 19, 51], "negat": [1, 5, 19], "encoding_path": [1, 5, 6, 13, 19, 20, 28, 42, 51, 53], "join": [1, 5, 10, 11, 13, 18, 19, 20, 56, 70], "littl": [1, 5, 10, 11, 14, 15, 16, 17, 19, 22, 23, 24, 100], "gain": [1, 5, 10, 11, 13, 14, 15, 16, 17, 19, 22, 23, 24, 94, 95, 100], "experi": [1, 5, 10, 11, 14, 15, 16, 17, 19, 22, 23, 24, 30, 56, 83, 100], "hyper": [1, 5, 7, 8, 10, 11, 14, 15, 16, 17, 19, 21, 22, 23, 24, 68, 105], "better": [1, 5, 10, 11, 13, 14, 15, 16, 17, 19, 21, 22, 23, 24, 52, 71, 83, 91, 92, 100, 102, 103, 105], "updat": [1, 3, 5, 9, 10, 11, 13, 14, 19, 20, 22, 23, 24, 49, 51, 52, 68, 70, 72, 75, 84, 89, 96, 102, 103, 105, 108, 109, 111], "without": [1, 5, 6, 8, 10, 11, 13, 14, 19, 20, 22, 23, 24, 39, 51, 64, 68, 79, 80, 81, 82, 83, 86, 92, 102, 105, 108, 114], "resnet18_after_adaround": 1, "bia": [2, 7, 14, 31, 36, 43, 52, 57, 61, 62, 64, 65, 67, 69, 72, 77, 79, 80, 88, 90, 91, 94, 102, 103, 106, 107, 109], "correct": [2, 13, 14, 20, 25, 36, 51, 53, 65, 67, 75, 90, 91, 102, 103, 107], "bc": [2, 91], "help": [2, 12, 14, 22, 25, 52, 62, 64, 68, 71, 91, 97, 100, 102, 103, 104, 105, 107, 112], "recov": [2, 14, 15, 16, 17, 22, 87, 98, 107, 108], "oppos": [2, 14, 22], "conv": [2, 7, 14, 22, 28, 29, 36, 41, 51, 54, 55, 61, 65, 71, 83, 99, 106, 109, 110, 113, 114], "immedi": [2, 6, 14, 20, 22], "consecut": [2, 14, 22, 36, 64, 65, 102, 103], "five": [2, 22], "sake": [2, 22, 32, 44, 58], "procedur": [2, 14, 15, 17, 22, 97, 111], "cl": [2, 14, 22, 36, 65, 77, 109], "skip": [2, 14, 22, 31, 33, 41, 51, 53, 55, 74, 94], "absorpt": [2, 14, 22], "hba": [2, 14, 22], "again": [2, 13, 14, 15, 16, 17, 22, 104, 105], "cross_layer_equ": [2, 14, 22, 31, 36, 43, 57, 65, 71, 72, 101], "equalize_model": [2, 14, 22, 31, 43, 57, 71, 72, 101], "free": [2, 7, 8, 14, 21, 22, 68, 74, 103], "iccv": [2, 14, 22, 100, 103], "2019": [2, 14, 22, 103], "add": [3, 7, 10, 11, 15, 16, 17, 23, 24, 34, 35, 39, 49, 60, 61, 62, 64, 68, 74, 77, 88, 90, 106, 108, 109, 114], "ml": [3, 8, 10, 11, 23, 24, 26, 68, 100, 102, 103], "standard": [3, 8, 10, 11, 23, 24, 33, 61, 68, 71, 79, 80, 88], "acceler": [3, 8, 10, 11, 23, 24, 56, 68, 73, 87, 98, 100], "min": [3, 10, 11, 13, 37, 46, 49, 64, 66, 71, 72, 78, 82, 84, 89, 90, 104, 108], "max": [3, 10, 11, 13, 37, 46, 49, 64, 66, 71, 72, 78, 79, 80, 82, 84, 89, 90, 100, 103, 104, 108], "onc": [3, 8, 10, 11, 15, 16, 17, 22, 23, 24, 34, 60, 62, 64, 68, 77, 93, 94, 100, 104, 105, 108], "effect": [3, 8, 10, 11, 23, 24, 28, 29, 39, 41, 51, 54, 64, 68, 83, 88, 90, 93, 102, 104, 108], "keep": [3, 10, 61, 68, 106], "constant": [3, 10, 23, 24, 42, 53, 61, 68, 97, 102], "4": [3, 8, 12, 15, 16, 17, 20, 25, 28, 29, 36, 37, 41, 42, 45, 46, 51, 53, 54, 55, 56, 61, 64, 65, 66, 68, 69, 72, 78, 81, 83, 90, 93, 97, 102, 107, 114], "layer": [4, 8, 15, 16, 17, 28, 29, 30, 33, 34, 35, 37, 38, 39, 41, 42, 46, 47, 48, 51, 52, 53, 54, 55, 56, 60, 62, 63, 64, 66, 67, 68, 69, 70, 71, 77, 82, 83, 86, 88, 90, 91, 92, 93, 94, 95, 96, 99, 101, 102, 104, 106, 107, 108, 109, 110, 111, 112, 113, 114], "an": [4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 28, 29, 30, 31, 33, 34, 35, 37, 39, 41, 42, 45, 46, 48, 49, 51, 53, 54, 55, 56, 57, 59, 60, 61, 62, 66, 68, 69, 72, 73, 77, 81, 82, 83, 87, 88, 89, 90, 91, 92, 94, 95, 97, 98, 100, 101, 102, 104, 105, 106, 107, 108, 112, 114], "resnet50": [4, 5, 6, 10, 11, 12, 13, 14, 30, 31, 33, 36, 37, 39], "disabl": [4, 15, 17, 37, 45, 46, 64, 66, 68, 72, 88, 97, 100, 104, 106, 108], "log": [4, 7, 8, 10, 11, 28, 33, 62, 104], "info": [4, 36, 52, 55, 62, 65, 90, 109], "verbos": 4, "displai": [4, 8, 96, 104, 111, 112], "erorr": 4, "tensorflow": [4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 25, 26, 73, 74, 93, 96, 98, 99, 102, 106, 108, 109], "messag": 4, "error": [4, 33, 35, 42, 53, 61, 77, 92, 102, 104, 105, 107, 108], "critic": 4, "tf_cpp_min_log_level": [4, 6, 7, 10, 11, 33], "compat": [4, 7, 8, 33, 49, 60, 73, 83, 90], "v1": [4, 18, 19, 20, 21, 22, 23, 24, 25, 33, 51, 53, 55, 58, 59, 66, 67, 68, 69, 72], "set_verbos": [4, 33], "evlauat": 4, "kera": [4, 5, 6, 10, 11, 12, 13, 14, 26, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 91, 93, 96, 98, 102, 103, 104, 106, 108, 109], "preprocess_input": [4, 5, 10, 11, 13, 14, 30, 33], "decode_predict": [4, 30, 33], "center_crop": [4, 33], "img_height": 4, "256": [4, 18, 33, 37, 46, 66, 86, 90, 104], "img_width": 4, "crop_length": 4, "start_x": 4, "start_i": 4, "cropped_imag": 4, "get_eval_func": [4, 30, 33], "num_iter": [4, 30, 33], "50000": [4, 30, 33], "debug": [4, 26, 27, 28, 32, 40, 44, 49, 50, 58, 72, 90, 107], "get_top5_acc": 4, "func_wrapp": [4, 30, 33], "validation_d": [4, 30, 33], "preprocess": [4, 5, 8, 10, 11, 13, 30, 33], "image_dataset_from_directori": [4, 5, 10, 11, 13, 30, 33], "label_mod": [4, 5, 10, 11, 13, 30, 33], "categor": [4, 5, 10, 11, 13, 30, 33], "shuffl": [4, 5, 10, 11, 13, 30, 33, 56, 90], "top1": [4, 20, 30, 33], "top5": 4, "total": [4, 9, 30, 33, 90, 97, 108], "img": [4, 30, 33], "pred": [4, 18, 30, 33, 42], "predict": [4, 12, 30, 32, 33, 37, 53], "np": [4, 9, 28, 30, 33, 35, 37, 39, 42, 45, 46, 48], "class_nam": [4, 9, 30, 33], "cnt": [4, 30, 33], "sum": [4, 18, 20, 30, 33, 53, 90], "b": [4, 30, 33, 54, 64, 77, 78, 89], "zip": [4, 30, 33, 37], "str": [4, 9, 28, 30, 32, 33, 37, 39, 41, 42, 44, 45, 46, 48, 51, 53, 55, 58, 59, 60, 61, 64, 66, 68, 71, 82, 86], "eval_func": [4, 12, 30, 33, 70], "aimet_tensorflow": [4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 28, 29, 30, 31, 32, 33, 35, 36, 37, 39, 73, 75], "fold_all_batch_norm": [4, 5, 10, 11, 13, 14, 18, 19, 22, 23, 24, 33, 36, 65, 71, 90], "get_model": [4, 33, 34], "include_top": [4, 5, 13, 14, 33], "input_tensor": [4, 5, 13, 14, 33, 35, 48, 59, 61, 84], "pool": [4, 5, 7, 13, 14, 33], "batch": [4, 6, 7, 15, 16, 17, 21, 28, 29, 30, 32, 33, 36, 37, 41, 44, 45, 46, 48, 51, 52, 54, 58, 59, 65, 66, 68, 91, 102, 103, 104], "norm": [4, 14, 33, 36, 52, 54, 65, 91, 102, 103, 104], "fold": [4, 29, 31, 33, 36, 42, 43, 52, 53, 54, 57, 65, 67, 71, 88, 91, 92, 93, 102, 103, 104, 109], "acccuraci": 4, "fp32_acccuraci": 4, "sim": [4, 7, 8, 13, 20, 28, 29, 33, 41, 42, 45, 46, 51, 53, 54, 55, 59, 63, 64, 66, 72, 82, 83, 86, 87, 90, 105, 108], "rounding_mod": [4, 5, 8, 10, 11, 13, 14, 37, 39, 42, 48, 53, 68], "solut": [4, 18, 97, 105, 107], "less": [4, 18, 52, 83, 84, 94, 97, 106], "convert": [4, 18, 34, 35, 36, 45, 59, 61, 69, 81, 90, 92, 102, 112], "overhead": [4, 15, 17, 18], "fairli": [4, 7, 8, 12, 13, 25, 51, 68], "simpl": [4, 7, 8, 12, 13, 25, 28, 37, 46, 51, 56, 61, 66, 68, 90], "don": [4, 7, 8, 12, 13, 25, 51, 61, 64, 68, 77, 91, 104], "pointer": [4, 7, 8, 12, 13, 25, 51, 68], "done": [4, 9, 13, 21, 34, 74, 78, 89, 100, 106, 108], "defin": [4, 9, 12, 13, 15, 16, 17, 19, 22, 23, 24, 25, 28, 33, 34, 35, 37, 38, 42, 45, 46, 49, 51, 53, 56, 59, 60, 61, 62, 66, 67, 68, 77, 83, 88, 90, 102, 104, 106, 108], "separ": [4, 28, 35, 36, 39, 41, 51, 61, 62, 65, 68, 72, 86, 93, 104, 107, 109], "larg": [4, 64, 95, 105, 110, 113], "enough": [4, 15, 16, 17, 52], "meaning": 4, "give": [4, 8, 12, 13, 15, 16, 17, 25, 33, 51, 68, 69, 100], "ideal": [4, 12, 13, 25, 51, 68], "opt": [4, 86], "01": [4, 6, 20, 28, 41, 42, 51, 53, 78, 91], "discuss": [4, 15, 16, 17, 90, 95, 96, 108], "eval_callback_phase1": 4, "eval_callback_phase2": 4, "forward_pass_call_back": [4, 33, 45, 59], "greedymixedprecisionalgo": [4, 18, 33, 59], "enabl": [4, 11, 13, 15, 17, 18, 24, 26, 30, 33, 37, 45, 46, 54, 56, 59, 64, 66, 72, 73, 74, 83, 87, 93, 98, 102, 104, 106, 108, 109, 111], "enable_convert_op_reduct": [4, 18, 33, 59], "acuraci": [4, 33], "time": [4, 6, 12, 20, 22, 25, 30, 36, 49, 56, 61, 62, 73, 83, 86, 90, 92, 100, 101, 105, 106, 111], "wrapper": [4, 8, 18, 19, 22, 23, 24, 28, 33, 37, 46, 56, 66], "callback": [4, 6, 8, 10, 11, 12, 15, 16, 17, 25, 30, 33, 37, 39, 42, 45, 46, 48, 53, 56, 59, 66, 68, 104, 108], "get_data_loader_wrapp": [4, 33], "dataloader_wrapp": [4, 33], "map": [4, 5, 6, 7, 9, 13, 33, 37, 39, 49, 62, 64, 77, 78, 81, 83, 88, 97, 104, 106], "lambda": [4, 5, 6, 7, 8, 9, 13, 33, 37, 59, 83], "y": [4, 5, 12, 13, 25, 33, 39, 61, 74, 75, 90, 104], "data_loader_wrapp": [4, 33], "choose_fast_mixed_precis": [4, 33], "resnet50_after_amp": 4, "pytorch": [5, 6, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 52, 61, 62, 69, 71, 73, 74, 77, 86, 88, 91, 92, 93, 96, 98, 103, 104, 106, 108, 109], "image_net_dataset": [5, 6, 7, 12, 13, 14], "imagenetdataset": [5, 6, 7, 12, 13, 14], "get_val_dataset": [5, 6, 7, 12, 13, 14], "10": [5, 6, 7, 8, 9, 10, 11, 13, 14, 15, 16, 17, 21, 23, 24, 28, 30, 34, 35, 36, 39, 54, 56, 61, 62, 64, 68, 69, 70, 73, 74, 75, 77, 78, 81, 83, 84, 88, 89, 90, 97, 100, 105], "new": [5, 9, 10, 11, 13, 14, 18, 25, 34, 35, 36, 45, 49, 51, 59, 60, 61, 64, 68, 72, 74, 78, 83, 86, 87, 89, 90, 102, 109], "rest": [5, 10, 11, 13, 14, 107], "progbar": [5, 10, 11, 13, 14], "sim_model": [5, 10, 11, 12, 13, 14, 19, 21, 22, 23, 24, 25, 51, 68], "tf_dataset": [5, 13, 14], "progbar_stat_upd": [5, 10, 11, 13, 14], "equal": [5, 9, 38, 42, 47, 53, 55, 63, 67, 71, 81, 83, 91, 92, 95, 96, 97, 101, 102, 104, 107, 108, 112], "data_set": [5, 13, 28], "ada_round_data": [5, 13], "image_width": [5, 13], "image_height": [5, 13], "resnet50_after_adaround": 5, "offer": [6, 20, 46, 66, 92], "suit": [6, 20, 92], "network": [6, 8, 9, 13, 20, 35, 88, 90, 92, 95, 97, 100, 102, 105, 107, 108, 111, 113], "quantiz": [6, 9, 15, 16, 17, 20, 26, 27, 28, 29, 32, 35, 37, 40, 41, 42, 44, 46, 49, 50, 51, 53, 54, 58, 60, 61, 63, 64, 66, 77, 79, 82, 84, 86, 87, 91, 92, 93, 95, 96, 98, 100, 104, 109, 111], "ptq": [6, 20, 42, 53, 98, 102, 104, 105], "success": [6, 20], "sequenc": [6, 8, 20, 52, 91, 92, 93, 101, 106], "consum": [6, 20, 49, 92, 100], "non": [6, 20, 52, 61, 108, 111], "expert": [6, 20], "effort": [6, 20, 42, 53, 92], "analyz": [6, 20, 30, 38, 47, 56, 67, 78, 87, 88, 89, 92, 94, 101, 104, 111, 112], "automat": [6, 20, 30, 36, 56, 65, 74, 75, 83, 95, 100, 102, 104, 109], "variou": [6, 12, 20, 25, 30, 45, 56, 59, 100, 102, 107, 108, 109, 112], "best": [6, 13, 20, 42, 53, 92, 95, 100, 102, 108], "heurist": [6, 20], "toler": [6, 20, 92, 95], "cumul": [6, 20], "until": [6, 20, 42, 53, 78, 89, 92], "evalu": [6, 8, 20, 28, 30, 33, 37, 39, 42, 45, 46, 48, 51, 53, 56, 59, 66, 68, 70, 90, 92, 96, 97, 100, 102, 104, 105, 108, 111], "creat": [6, 8, 12, 15, 16, 17, 20, 25, 28, 30, 33, 35, 36, 37, 39, 42, 44, 45, 46, 48, 51, 53, 54, 56, 59, 61, 63, 64, 65, 66, 68, 69, 71, 77, 82, 84, 86, 87, 88, 90, 91, 93, 96, 100, 101, 102, 105, 108], "auto_qu": [6, 20, 42, 53], "eval_dataset_s": [6, 7, 20, 42, 53], "5000": [6, 20, 42, 45, 53, 59], "been": [6, 7, 8, 13, 20, 35, 51, 62, 71, 77, 81, 82, 83, 86, 108, 114], "calibration_dataset_s": [6, 20, 42, 53], "20": [6, 7, 8, 9, 10, 11, 21, 23, 24, 28, 30, 35, 41, 49, 51, 68, 73, 91, 105], "eval_dataset": [6, 7, 37, 53], "unlabeled_dataset": [6, 7, 12, 37, 53], "prepar": [6, 29, 34, 37, 38, 42, 46, 51, 53, 60, 64, 66, 67, 68, 69, 92, 102, 109], "eval_callback": [6, 7, 12, 15, 16, 17, 20, 25, 30, 33, 37, 42, 46, 53, 56, 66, 70], "compil": [6, 7, 8, 10, 11, 12, 37, 39, 41, 48], "dictionari": [6, 7, 9, 30, 46, 56, 66, 68, 70, 97, 100, 106], "argument": [6, 7, 12, 25, 28, 32, 33, 37, 39, 41, 44, 45, 46, 48, 51, 58, 59, 61, 66, 68, 77, 79, 80, 83], "num_sampl": [6, 7, 18, 20, 37, 45, 53, 59], "whole": [6, 7, 83, 108], "sampled_dataset": [6, 7, 12], "adam": [6, 7, 8, 10, 11, 12, 37, 39, 90], "categoricalcrossentropi": [6, 7, 12, 37], "categoricalaccuraci": [6, 7, 12, 37], "acc": [6, 7, 8, 12, 37], "unlabeleddatasetwrapp": [6, 20, 53], "indic": [6, 13, 18, 20, 30, 36, 49, 53, 56, 69, 88, 95, 114], "seri": [6, 20, 68], "reach": [6, 20, 92], "furhter": [6, 20], "adaround": [6, 20, 42, 47, 53, 67, 72, 87, 92, 96, 102, 107, 109], "predefin": [6, 20], "were": [6, 13, 20, 32, 36, 44, 49, 51, 58, 64, 65, 68, 75, 86, 95, 106, 114], "empir": [6, 20, 103], "adaround_dataset_s": [6, 20, 42, 53], "adaround_dataset": 6, "adaround_param": [6, 20, 42, 53], "set_adaround_param": [6, 20, 42, 53], "correspond": [6, 13, 20, 28, 29, 32, 36, 41, 44, 45, 46, 51, 54, 58, 59, 64, 65, 66, 68, 73, 74, 75, 83, 86, 88, 94, 104, 108, 114], "counter": [7, 21, 38, 39, 68], "potenti": [7, 21, 36, 38, 52, 101, 111, 112], "instabl": [7, 21, 38], "varianc": [7, 21, 38, 103], "recalcul": [7, 21, 29], "By": [7, 18, 19, 21, 22, 23, 24, 29, 30, 35, 56, 64, 88, 91, 100, 108], "aim": [7, 15, 16, 17, 21, 29], "make": [7, 8, 9, 13, 21, 29, 34, 36, 38, 60, 63, 67, 83, 88, 101, 102], "our": [7, 8, 21, 29, 46, 64, 66, 73, 75, 90], "stabl": [7, 21, 29, 61, 69, 91], "rather": [7, 21, 29, 61, 106], "noisi": [7, 21, 29], "6": [7, 8, 9, 10, 11, 17, 19, 22, 23, 24, 28, 35, 42, 51, 53, 61, 64, 68, 78, 83, 84, 86, 105, 107], "simul": [7, 8, 15, 16, 17, 25, 32, 38, 39, 44, 47, 48, 49, 51, 55, 58, 60, 63, 64, 67, 68, 79, 80, 83, 86, 87, 88, 90, 98, 102, 105, 109], "7": [7, 16, 17, 19, 21, 22, 23, 24, 36, 53, 54, 64, 68, 74, 75, 78, 79, 80, 90, 107, 114], "dir": [7, 12, 13, 14, 48, 69, 75], "accuraci": [7, 8, 12, 20, 21, 25, 26, 28, 30, 32, 33, 39, 41, 42, 44, 45, 51, 53, 56, 58, 59, 64, 68, 83, 87, 90, 91, 92, 95, 97, 98, 100, 102, 103, 104, 105, 107, 108, 109, 112, 114], "helper": [7, 21, 33, 53, 59], "train_dataset_s": 7, "re_estimation_dataset_s": 7, "train_dataset": 7, "re_estimation_dataset": 7, "current": [7, 8, 9, 18, 25, 30, 33, 35, 39, 52, 56, 59, 62, 63, 77, 99, 113], "built": [7, 8, 45, 59, 73, 74, 77, 88], "sequenti": [7, 8, 9, 34, 35, 72, 106, 107], "subclass": [7, 8, 34, 35, 77, 84], "incompat": [7, 8, 90], "backend": [7, 28], "clear_sess": [7, 28], "conv2d": [7, 8, 13, 35, 36, 49, 52, 61, 62, 65, 72, 77, 83, 88, 90, 94, 100, 109, 114], "conv1": [7, 15, 16, 17, 35, 52, 56, 60, 61, 62, 65, 70, 90], "fuse": [7, 106, 108], "relu": [7, 8, 9, 34, 35, 36, 49, 52, 55, 60, 61, 62, 65, 69, 72, 88, 90, 103, 106, 114], "maxpooling2d": 7, "conv2": [7, 35, 49, 56, 60, 61, 65, 83, 90], "flatten": [7, 61, 88], "dens": [7, 8, 9, 34, 35, 36, 69], "functional_model": [7, 8, 9], "fp32": [7, 8, 25, 32, 33, 37, 44, 45, 46, 58, 59, 64, 66, 91, 98, 103, 104, 105, 107, 108], "baselin": [7, 8, 20, 33, 45, 59, 90, 97, 100, 105], "loss_fn": [7, 90], "fit": [7, 8, 10, 11, 21, 30, 39, 56, 68, 97], "test": [7, 8, 9, 37, 46, 52, 54, 66, 69, 75], "training_range_learning_with_tf_init": [7, 11, 21, 24, 28, 51, 68], "json": [7, 12, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114], "default_config_per_channel": 7, "is_output_quant": [7, 106], "is_quant": [7, 106], "is_symmetr": [7, 49, 84, 106], "strict_symmetr": [7, 106], "unsigned_symmetr": [7, 106], "per_channel_quant": [7, 13, 49, 88, 106], "squeez": [7, 18], "pad": [7, 35, 52, 61, 62, 90], "supergroup": [7, 106, 109], "op_list": [7, 106], "clip": [7, 84, 106, 108], "gemm": [7, 106], "model_input": [7, 62, 106], "is_input_quant": [7, 106], "model_output": [7, 106], "open": 7, "tmp": [7, 12, 25, 37, 42, 46, 53, 66, 90], "w": [7, 33, 45, 46, 59, 66, 74, 114], "f": [7, 20, 42, 53, 61, 62, 73, 74, 75, 90], "dump": 7, "qsim": [7, 29, 69], "config_fil": [7, 12, 13, 25, 28, 37, 39, 42, 46, 48, 53, 55, 66, 68], "posit": [7, 8, 9, 35, 78], "15": [7, 8, 10, 11, 21, 23, 24, 68, 78, 100, 105], "job": [7, 8, 10, 11, 15, 16, 17, 21, 23, 24, 68], "good": [7, 8, 10, 11, 15, 16, 17, 21, 23, 24, 35, 64, 67, 68, 90, 91, 100], "rate": [7, 8, 9, 10, 11, 15, 16, 17, 21, 23, 24, 35, 68, 100, 105], "end": [7, 8, 10, 11, 15, 16, 17, 21, 23, 24, 28, 29, 31, 33, 51, 53, 54, 57, 61, 62, 66, 68, 78, 89, 90, 100], "factor": [7, 8, 10, 11, 15, 16, 17, 21, 23, 24, 36, 65, 68, 83, 84, 95, 100, 103], "feel": [7, 8, 21, 68, 74], "quantized_callback": [7, 8, 10, 11], "tensorboard": [7, 8, 10, 11], "log_dir": [7, 8, 10, 11], "histori": [7, 8, 10, 11], "validation_data": [7, 8, 10, 11], "reestimate_bn_stat": [7, 21, 29, 54], "reestim": [7, 29], "100": [7, 21, 29, 33, 45, 53, 54, 59, 72, 84, 86, 90], "adapt": [7, 13, 21, 38, 54, 64, 67, 87, 90, 91, 96, 102, 104, 109], "yield": [7, 21, 45, 46, 51, 54, 59, 66, 95, 107, 108], "directli": [7, 21, 29, 42, 46, 63, 66, 72, 104, 108], "bn_reestim": [7, 21, 29, 54], "far": [7, 21, 91], "first": [7, 8, 9, 12, 13, 15, 16, 17, 21, 25, 30, 35, 38, 51, 61, 67, 69, 88, 90, 100, 102, 105, 107], "effici": [7, 21, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114], "fold_all_batch_norms_to_scal": [7, 21, 29, 54], "mnist_after_bn_re_estimation_qat_range_learn": 7, "exampl": [8, 9, 20, 34, 52, 60, 62, 63, 64, 77, 78, 79, 80, 81, 82, 83, 84, 88, 89, 90, 91, 95, 98, 102, 104, 106, 107, 108, 109, 114], "pipelin": [8, 10, 11, 32, 44, 53, 54, 58, 68, 102, 105, 107, 108], "1": [8, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 50, 51, 52, 53, 54, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 91, 92, 93, 94, 95, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 110, 111, 112, 113, 114], "dataset": [8, 28, 29, 30, 33, 37, 42, 45, 46, 48, 51, 53, 54, 55, 59, 66, 68, 90, 96, 102, 103, 108], "2": [8, 28, 30, 35, 36, 39, 41, 42, 45, 48, 51, 52, 53, 54, 56, 58, 59, 61, 62, 64, 65, 66, 68, 69, 72, 73, 74, 77, 78, 79, 80, 81, 83, 84, 86, 89, 91, 95, 97, 102, 107, 108], "3": [8, 28, 30, 31, 33, 36, 37, 39, 42, 43, 45, 46, 48, 51, 52, 53, 54, 55, 56, 57, 58, 59, 61, 62, 64, 65, 66, 68, 69, 70, 71, 72, 73, 78, 81, 83, 84, 90, 95, 102, 105, 107], "imdb": 8, "sentiment": 8, "vocab_s": [8, 9, 35], "20000": [8, 9, 35], "consid": [8, 15, 17, 52, 107, 110, 113], "20k": 8, "word": [8, 83], "maxlen": [8, 9, 35], "200": [8, 9, 20, 35, 72], "movi": 8, "review": 8, "x_train": [8, 29], "y_train": 8, "x_val": 8, "y_val": 8, "load_data": 8, "num_word": 8, "pad_sequ": 8, "embed_dim": [8, 9, 35], "embed": [8, 9, 35, 61, 68, 88, 100, 107], "token": [8, 9, 35, 107], "num_head": [8, 9, 35], "attent": [8, 9, 35], "head": [8, 9, 35], "ff_dim": [8, 9, 35], "hidden": [8, 9, 35], "feed": [8, 9, 35, 108], "insid": [8, 9, 30, 35, 61, 74, 77, 88, 90], "delta": [8, 9, 35, 37, 46, 66, 84, 108], "input_dim": [8, 9, 35], "output_dim": [8, 9, 35], "block": [8, 53, 72, 78, 83, 86, 89], "multiheadattent": [8, 9, 35, 109], "key_dim": [8, 9, 35], "dropout": [8, 9, 35, 88], "layernorm": [8, 9, 35, 88], "epsilon": [8, 9, 35], "1e": [8, 9, 35, 52, 64, 90, 105], "globalaveragepooling1d": [8, 9, 35], "softmax": [8, 9, 18, 35, 88, 90], "functional_callback": 8, "histogram_freq": 8, "sparse_categorical_crossentropi": 8, "128": [8, 10, 11, 45, 59, 61, 81, 83, 90], "wrap": [8, 12, 25, 61, 72], "visual": [8, 12, 25, 26, 30, 50, 56, 67, 74, 100, 102, 103, 104, 107, 109], "multi": [8, 67, 109], "encount": [8, 77, 90], "access": [8, 18, 19, 22, 23, 24, 67, 72, 74], "within": [8, 77, 81, 88, 95, 104, 108], "granular": [8, 15, 16, 17, 30, 56, 83, 100, 107, 108, 112], "mha": [8, 109], "accur": [8, 90], "clone_lay": 8, "u": [8, 13, 51, 74], "clone": [8, 96], "diagram": [8, 93, 98, 108], "m": [8, 73, 74, 75, 87, 96], "convert_to_pb": [8, 39], "inspect": [8, 100], "1024": [8, 48, 51, 68, 91, 101], "artifact": [8, 32, 44, 48, 58, 74, 90], "3000": [8, 81], "model_after_qat": [8, 10, 11], "anoth": [8, 64, 68, 113, 114], "most": [8, 83, 90, 106], "case": [8, 9, 12, 25, 37, 42, 46, 49, 53, 60, 61, 62, 64, 66, 72, 75, 78, 83, 88, 90, 97, 103, 106], "complex": [8, 28, 37, 39, 41, 46, 48, 66, 68], "around": [8, 13, 37, 46, 48, 51, 66, 68], "elementari": 8, "logdir": 8, "summari": [8, 71, 100], "vanilla": 8, "becaus": [9, 21, 35, 61, 69, 102], "tool": [9, 37, 66, 67, 87, 90, 103, 112, 114], "sequanti": 9, "build": [9, 35, 72], "dicuss": 9, "text": [9, 35, 78, 89], "transform": [9, 18, 19, 20, 22, 23, 24, 25, 35, 51, 53, 61, 68, 83, 86, 90, 109], "found": [9, 36, 65, 69, 72, 83, 105, 108], "tokenandpositionembed": [9, 35], "transformerblock": [9, 35], "super": [9, 35, 52, 61, 62, 69, 77, 90], "att": [9, 35], "ffn": [9, 35], "layernorm1": [9, 35], "layernorm2": [9, 35], "dropout1": [9, 35], "dropout2": [9, 35], "kwarg": [9, 35, 77, 78, 81, 88, 89], "attn_output": [9, 35], "out1": [9, 35], "ffn_output": [9, 35], "token_emb": [9, 35], "pos_emb": [9, 35], "With": [9, 12, 25, 81, 102, 105], "those": [9, 46, 66, 106], "random": [9, 20, 28, 33, 35, 37, 39, 42, 45, 46, 48, 53, 69, 94, 104], "random_input": [9, 35], "embedding_lay": [9, 35], "transformer_block": [9, 35], "token_and_position_embed": 9, "symmetr": [9, 48, 49, 64, 72, 77, 78, 81, 83, 84, 88, 89, 90, 106, 108], "model_prepar": [9, 18, 19, 21, 22, 23, 24, 25, 35, 37, 51, 54, 61, 66, 68, 69, 72, 90], "prepare_model": [9, 18, 19, 21, 22, 23, 24, 25, 35, 37, 51, 54, 61, 66, 68, 69, 72, 90], "input_lay": [9, 35], "begin": [9, 35, 52, 61, 62, 78, 89, 106], "unwrap": 9, "ident": [9, 34, 60, 90], "present": [9, 21, 26, 52, 58, 62, 64, 72, 83, 86, 90, 103], "get_weight": 9, "represent": [9, 49, 79, 80, 81], "reorder": 9, "get_original_models_weights_in_functional_model_ord": 9, "original_model": [9, 35], "ndarrai": [9, 33, 36, 42, 44, 45, 46, 48, 65], "arg": [9, 68, 77, 78, 81, 83, 86, 88, 89], "quick": [9, 13, 15, 17], "lookup": 9, "remov": [9, 29, 54, 61, 72, 74, 90, 94, 98, 108, 114], "match": [9, 28, 32, 37, 44, 46, 56, 58, 66, 68, 83, 94, 100, 104, 106, 107, 108, 114], "original_model_weight": 9, "pop": 9, "out": [9, 35, 37, 38, 39, 46, 47, 61, 64, 66, 67, 68, 78, 79, 80, 89], "weight_nam": 9, "functional_model_weight_ord": 9, "enumer": [9, 28, 30, 51, 56, 58, 68, 90], "sort": 9, "weights_in_correct_ord": 9, "item": [9, 12, 25, 42, 102], "weight_info": 9, "assert": [9, 18, 61, 72, 90], "count_param": 9, "output_shap": 9, "textclassif": 9, "what": [9, 90, 111], "architectur": [9, 67], "model_weights_in_correct_ord": 9, "assert_array_equ": 9, "modelprepar": [9, 18, 19, 22, 23, 24, 25, 29, 35, 51, 61, 68, 90], "arthmet": [9, 35], "experss": [9, 35], "tfoplambda": [9, 35], "ressembl": 9, "conv_1": [9, 35, 83], "conv_2": [9, 35], "becuas": [9, 35, 44], "rais": [9, 35, 77], "hopefulli": 9, "reflect": [10, 11, 23, 24, 102, 108], "imagenet_dir": [10, 11], "assign": [10, 11, 49, 64, 77, 78, 88, 89], "dataset_train": [10, 11], "dataset_valid": [10, 11], "respect": [10, 11, 34, 71, 88, 104], "categorical_crossentropi": [10, 11, 39], "finish": [10, 11, 23, 24], "against": [10, 11, 13, 21, 23, 24, 37, 46, 66, 71, 94], "Of": [10, 11, 13, 15, 16, 17, 23, 24, 83], "cours": [10, 11, 13, 15, 16, 17, 23, 24], "instal": [10, 11, 14, 20, 22, 23, 24, 87, 96, 98, 109, 111], "jointli": [11, 23, 24], "trainabl": [11, 24, 102], "showcas": [12, 25], "analys": [12, 25, 104], "respond": [12, 25], "repres": [12, 25, 28, 30, 33, 35, 37, 39, 41, 45, 46, 48, 49, 56, 59, 60, 64, 66, 68, 79, 80, 81, 88, 89, 90, 97, 103, 104, 105, 108], "second": [12, 25, 35, 51, 69, 83, 88, 106], "singl": [12, 13, 25, 28, 37, 45, 46, 56, 59, 61, 63, 66, 83, 91, 97, 100, 103], "val_dataset": 12, "shown": [12, 25, 36, 60, 63, 64, 72, 103, 104, 107, 114], "itself": [12, 21, 25, 44, 100, 108, 110], "amount": [12, 25, 33], "exactli": [12, 25, 77, 88, 108], "multipl": [12, 25, 30, 52, 56, 57, 58, 61, 62, 65, 68, 70, 75, 83, 88, 98, 100, 109], "demonstr": [12, 25, 33, 90, 96], "quant_analyz": [12, 25, 37, 46, 66, 67, 72], "enable_per_layer_mse_loss": [12, 25, 37, 46, 66], "choic": [12, 13, 25, 37, 49, 100, 108], "enhanc": [12, 25, 37, 46, 48, 64, 66, 104, 108], "track": [12, 25, 64, 84, 104], "minimum": [12, 25, 28, 51, 61, 68, 71, 73, 78, 84], "histogram": [12, 25, 37, 46, 66, 71, 84, 102, 104, 108, 109], "seen": [12, 25, 103, 104], "html": [12, 25, 46, 61, 66, 73, 74, 75, 82, 104, 109, 112], "folder": [12, 25, 71, 104], "structur": [12, 17, 25, 49, 61, 77, 88, 100], "per_layer_quant_en": [12, 25, 104], "per_layer_quant_dis": [12, 25, 104], "min_max_rang": [12, 25, 104], "activations_pdf": [12, 25, 104], "name_": [12, 25, 46, 66], "index_0": [12, 25], "index_1": [12, 25], "index_n": [12, 25], "weights_pdf": [12, 25, 104], "layer1": [12, 25, 36, 65], "param_name_": [12, 25, 46, 66], "channel_index_0": [12, 25], "channel_index_1": [12, 25], "channel_index_n": [12, 25], "layer2": [12, 25, 36, 65], "layern": [12, 25], "per_layer_mse_loss": [12, 25, 104], "axi": [12, 25, 49, 104], "associ": [12, 25, 28, 36, 42, 45, 53, 59, 62, 77, 88, 102], "sub": [12, 25, 52, 75, 108], "illustr": [13, 15, 16, 17, 68, 91, 94, 97, 102, 108, 110, 111, 113, 114], "round": [13, 28, 30, 37, 38, 39, 42, 47, 48, 53, 55, 56, 67, 68, 86, 87, 91, 96, 102, 104, 108], "awai": 13, "low": [13, 15, 17, 22, 67, 87, 91, 93, 100, 102, 103], "basi": [13, 49, 83], "2d": [13, 83, 94, 100, 114], "imagin": 13, "64": [13, 34, 35, 37, 46, 48, 51, 66, 68, 73, 78, 83, 84, 89, 91], "filter": [13, 35], "kernel": [13, 67, 77, 83, 94, 110, 113], "28": [13, 56, 59, 81], "entireti": [13, 35], "matrix": 13, "contrast": [13, 35, 72], "repeat": [13, 64, 90, 94], "uniqu": 13, "attribut": [13, 33, 35, 59, 61, 64, 72, 77, 88, 104], "conv2d_lay": 13, "kernel_s": [13, 35, 52, 61, 62, 69, 90], "snpe": [13, 14], "qnn": [13, 14], "describ": [13, 39, 49, 51, 61, 68, 72, 83, 96, 100, 101, 102, 103, 106, 107, 108, 111], "style": 13, "ensur": [13, 58, 74, 88, 97, 102, 107], "mismatch": 13, "togeth": [13, 83], "pcq_quantsim_config": 13, "tell": 13, "saw": 13, "fashion": [13, 83], "did": [13, 103], "translat": [13, 51], "down": [13, 49, 51], "fo": [13, 51], "r": [13, 33, 45, 46, 51, 59, 64, 66], "NOT": [13, 51], "frozen": [13, 51], "depend": [13, 61, 72, 74, 75, 81, 95, 96, 102, 106, 109], "observ": [13, 21, 71, 77, 84, 88, 89, 90, 100, 103, 104, 108], "slight": 13, "plai": 13, "resnet50_pcq_adaround": 13, "invok": [13, 28, 30, 36, 38, 41, 46, 48, 51, 56, 66, 67, 68, 77, 88, 100, 102, 111, 112], "lead": [14, 22, 37, 46, 66, 83, 91, 93, 103, 107], "shift": [14, 22, 67, 103], "adjust": [14, 22, 28, 51, 52, 68, 82, 83, 91, 93, 94, 95, 102, 103, 107], "aimet_cl": 14, "cle_applied_model": [14, 31], "resnet50_after_cl": 14, "arxiv": [14, 103], "ab": [14, 103], "1906": [14, 103], "04721": [14, 103], "memori": [15, 16, 17, 30, 56, 64, 84, 95, 100, 110, 113, 114], "restor": [15, 16, 17, 68, 107], "image_net_train": [15, 16, 17, 21, 22, 23, 24], "imagenettrain": [15, 16, 17, 21, 22, 23, 24], "nn": [15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 51, 52, 53, 56, 60, 61, 62, 63, 65, 66, 68, 69, 72, 77, 78, 82, 83, 87, 88, 89, 90, 101, 109], "bool": [15, 16, 17, 19, 21, 22, 23, 24, 25, 30, 33, 36, 39, 41, 42, 45, 48, 53, 55, 56, 59, 61, 64, 65, 68, 78, 83, 84, 89], "gpu": [15, 16, 17, 19, 21, 22, 23, 24, 25, 56, 58, 67, 69, 73, 74, 102, 109], "learning_r": [15, 16, 17, 21, 23, 24, 54, 68], "learning_rate_schedul": [15, 16, 17, 21, 23, 24, 54, 68], "schedul": [15, 16, 17, 21, 23, 24, 105], "trainer": [15, 16, 17, 21, 23, 24, 30, 56, 96], "max_epoch": [15, 16, 17, 21, 23, 24], "is_avail": [15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 54, 90], "target_comp_ratio": [15, 16, 17, 30, 56, 70], "ratio": [15, 16, 17, 30, 56, 70, 94, 95], "9": [15, 16, 17, 18, 19, 22, 23, 24, 25, 30, 33, 45, 51, 59, 61, 64, 68, 69, 75, 78, 81], "num_comp_ratio_candid": [15, 16, 17, 30, 56, 70], "tri": [15, 16, 17, 100, 102], "caus": [15, 16, 17, 35, 100, 101, 102, 106, 107], "33": [15, 16, 17, 78, 89], "66": [15, 16, 17, 78, 89, 95], "00": [15, 16, 17, 78], "longer": [15, 16, 17, 72, 102, 105], "compromis": [15, 16, 17], "modules_to_ignor": [15, 16, 17, 30, 56, 66, 70, 99], "mode": [15, 16, 17, 30, 31, 39, 42, 43, 48, 53, 55, 56, 57, 61, 63, 65, 68, 70, 84, 89, 101, 102, 106], "much": [15, 16, 17, 114], "manual": [15, 16, 17, 30, 49, 56, 65, 72, 78, 89, 92, 100], "reconstruct": [15, 17, 28, 41, 51], "num_reconstruction_sampl": [15, 17, 56], "too": [15, 17, 83], "allow_custom_downsample_op": [15, 17, 56], "downsampl": [15, 17], "cost": [15, 17, 30, 56, 83, 97, 100, 105], "suggest": [15, 17, 39, 68, 97, 100, 103], "function_nam": [15, 16, 17], "eval_iter": [15, 16, 17, 30, 56, 70], "invoc": [15, 16, 17], "compress_schem": [15, 16, 17, 30, 56, 70], "cost_metr": [15, 16, 17, 30, 56, 70], "reduct": [15, 16, 17, 95, 102], "decim": [15, 16, 17, 30, 56, 70], "greedyselectionparamet": [15, 16, 17, 30, 56, 70], "channelpruningparamet": [15, 17, 56], "compressionschem": [15, 16, 17, 30, 56, 70], "costmetr": [15, 16, 17, 30, 56, 70], "greedy_param": [15, 16, 17, 30, 56, 70], "auto_param": [15, 16, 17, 30, 56, 70], "automodeparam": [15, 16, 17, 30, 56, 70], "greedy_select_param": [15, 16, 17, 30, 56], "channel_prun": [15, 17, 30, 56], "modelcompressor": [15, 16, 17, 30, 56, 70], "compress_model": [15, 16, 17, 30, 56, 70, 111], "relev": [15, 16, 17], "compressed_model": [15, 16, 30, 56], "comp_stat": [15, 16], "fall": [15, 16, 17, 97, 106], "sharpli": [15, 16, 17], "15e": [15, 17], "finetuned_model": [15, 16], "singular": [16, 17, 100, 110, 113], "decomposit": [16, 17, 100, 110, 113], "prune": [16, 30, 95, 96, 97, 99, 100, 109, 114], "spatialsvdparamet": [16, 17, 30, 56, 70], "spatial_svd": [16, 17, 30, 56, 70], "5e": [16, 17, 21, 23, 24, 54, 68], "ssvd_compressed_model": 17, "ssvd_comp_stat": 17, "ssvd_finetuned_model": 17, "further": [17, 26, 61, 72, 78, 81, 83, 89, 90, 91, 94, 98, 100], "ssvd_cp_compressed_model": 17, "cp_comp_stat": 17, "11": [17, 36, 49, 64, 73, 74, 75, 78, 81], "ssvd_cp_finetuned_model": 17, "imagefold": [18, 20], "tqdm": [18, 20], "root": [18, 20, 74, 75, 90], "compos": [18, 20, 53], "resiz": 18, "centercrop": [18, 20], "totensor": [18, 20, 53, 90], "images_mean": 18, "std": [18, 20], "images_std": 18, "manual_se": 18, "randperm": 18, "tolist": 18, "pin_memori": [18, 61], "evaluate_accuraci": 18, "top1_accuraci": 18, "dim": [18, 53, 69, 83, 90], "no_grad": [18, 19, 20, 22, 23, 24, 25, 51, 62, 68], "logit": [18, 20], "topk": [18, 20, 42], "certain": [18, 19, 22, 23, 24, 25, 51, 60, 61, 66, 68, 83, 100, 101, 106], "guidelin": [18, 19, 22, 23, 24, 25, 38, 41, 48, 51, 61, 67, 90, 102, 105], "autom": [18, 19, 22, 23, 24, 25, 38, 51, 60, 61, 67, 68, 102], "compli": [18, 19, 22, 23, 24, 25, 51, 68], "rand": [18, 19, 21, 22, 23, 24, 25, 28, 37, 45, 52, 58, 62], "modif": [18, 19, 22, 23, 24], "made": [18, 19, 22, 23, 24, 61, 106], "overrid": [18, 19, 22, 23, 24, 61, 68], "300": 18, "verifi": [19, 22, 23, 24, 35, 61], "resnet18_after_cle_bc": [19, 20, 22], "val_transform": 20, "normal": [20, 36, 52, 69, 104], "485": 20, "456": 20, "406": 20, "229": 20, "225": 20, "imagenet_dataset": 20, "_create_sampled_data_load": [20, 53], "subsetrandomsampl": [20, 53], "in_eval_mod": 20, "get_devic": 20, "_dataset": [20, 53], "k": 20, "view_a": 20, "__getitem__": [20, 53], "unlabeled_imagenet_dataset": 20, "unlabeled_imagenet_data_load": 20, "initial_accuraci": [20, 42, 53], "run_infer": [20, 42, 53], "adaround_data_load": [20, 42, 53], "optimized_accuraci": [20, 42, 53], "batchnrom": 21, "focu": 21, "unlik": [21, 79, 80], "script": 21, "didn": [21, 67], "statatist": 21, "finetuned_accuraci": [21, 23, 24], "train_load": [21, 54, 56, 90], "images_dir": 21, "finetuned_accuracy_bn_reestim": 21, "resnet18_after_qat": [21, 23, 24], "reload": 22, "techiqu": 22, "num_quant_sampl": [22, 55], "num_bias_correct_sampl": [22, 55], "quantparam": [22, 55], "bias_correct": [22, 55], "correct_bia": [22, 55], "bc_param": 22, "weight_bw": [22, 55], "act_bw": [22, 55], "round_mod": [22, 55], "held": [23, 24, 77, 88, 90], "Then": [23, 24, 28, 46, 51, 66, 68, 100], "matter": [25, 77], "softwar": [26, 98], "compress": [26, 27, 50, 87, 94, 96, 98, 109, 110, 112, 113, 114], "dramat": 26, "lost": [26, 87, 98, 108], "due": [26, 35, 62, 67, 83, 103], "At": [26, 95, 111], "onnx": [26, 49, 58, 60, 68, 73, 74, 87, 91, 92, 96, 98, 101, 102, 103, 104, 106, 108], "framework": [26, 48, 98, 102, 106, 108], "link": [26, 73, 75, 93, 96, 103, 104], "codebas": 26, "sphinx": 26, "page": [26, 69, 74, 75, 95, 108, 109], "qualcomm": [26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114], "innov": [26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114], "center": [26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114], "inc": [26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114], "quantsim_config": [26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114], "default_config": [26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114], "35": [26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114], "model": [27, 28, 29, 30, 31, 32, 33, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 88, 89, 91, 92, 93, 94, 95, 96, 97, 99, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114], "ai": [27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114], "toolkit": [27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114], "linear": [28, 29, 36, 41, 51, 54, 61, 62, 64, 65, 71, 72, 77, 83, 88, 90, 93, 94], "format": [28, 33, 34, 36, 39, 41, 46, 51, 64, 66, 68, 83, 86, 89, 92, 99], "prefix": [28, 39, 41, 51, 64, 68, 86, 88], "31": [28, 37, 41, 46, 51, 52, 55, 66, 68, 74, 75], "default_reg_param": [28, 41, 51], "default_beta_rang": [28, 41, 51], "default_warm_start": [28, 41, 51], "sourc": [28, 29, 30, 31, 32, 33, 35, 36, 37, 39, 41, 42, 43, 44, 45, 46, 48, 51, 53, 54, 55, 56, 57, 58, 59, 61, 64, 65, 66, 68, 69, 70, 71, 74, 75, 77, 78, 79, 80, 81, 82, 83, 84, 86, 88, 89, 107], "datasetv2": [28, 29], "stop": [28, 41, 51, 92], "beta": [28, 41, 51, 91], "anneal": [28, 41, 51], "start_beta": [28, 41, 51], "end_beta": [28, 41, 51], "warm": [28, 41, 51, 91], "period": [28, 41, 51, 91], "zero": [28, 41, 51, 64, 86, 108, 109], "post_training_percentil": [28, 51, 68], "percentil": [28, 51, 68, 84], "absolut": [28, 45, 51, 56, 59, 68, 78, 89], "nois": [28, 51, 67, 68, 84, 90, 102, 103, 104, 105, 106], "training_range_learning_with_tf_enhanced_init": [28, 51, 68], "aimetlogg": 28, "test_model": 28, "keras_model": 28, "dummy_forward_pass": [28, 69], "intend": [28, 37, 46, 49, 56, 66, 90, 96], "Or": [28, 37, 39, 41, 46, 48, 58, 61, 66, 68, 100], "someth": [28, 37, 39, 41, 46, 48, 66, 68, 100], "apply_adaround_exampl": [28, 41], "set_level_for_all_area": 28, "dataset_s": 28, "possible_batch": 28, "from_tensor_slic": [28, 29, 37], "w4a8": 28, "param_bw": [28, 42, 51, 53, 64], "output_bw": [28, 42, 51, 53, 64], "adarounded_model": [28, 51], "adarounded_sess": 28, "grid": [28, 51, 83], "awar": [29, 38, 54, 63, 77, 91, 93, 96, 98, 100, 102, 107, 108], "bn_re_estimation_dataset": 29, "bn_num_batch": 29, "handl": [29, 42, 53, 54, 83], "undo": [29, 54], "upon": [29, 54, 77, 88, 90], "batch_norm": [29, 36, 54, 65], "qcquantizewrapp": [29, 54], "pair": [29, 30, 36, 54, 55, 56, 65], "got": [29, 54, 61, 65], "overal": [30, 51, 56, 68, 95, 97, 107], "algorithm": [30, 33, 45, 49, 56, 59, 95, 97, 100, 107, 114], "tweak": [30, 36, 56, 64, 65], "compressor": [30, 56], "static": [30, 35, 56, 61, 108], "visualization_url": [30, 56, 70], "callabl": [30, 33, 41, 42, 45, 51, 53, 54, 56, 59, 61, 66, 77, 83], "url": [30, 56, 70, 74, 75, 96, 111], "appear": [30, 36, 52, 56, 61, 62, 65], "compressionstat": [30, 56], "use_monotonic_fit": [30, 56], "saved_eval_scores_dict": [30, 56, 70], "variabl": [30, 35, 56, 61, 74, 75, 78, 89, 96, 97], "express": [30, 56], "comp": [30, 56], "greater": [30, 36, 56, 65, 83, 95, 97], "monoton": [30, 56, 97], "dict": [30, 36, 39, 42, 45, 46, 48, 55, 56, 58, 59, 60, 61, 64, 65, 66, 68, 69, 88, 89], "eval_scor": [30, 46, 56, 66], "pickl": [30, 56], "input_op_nam": 30, "output_op_nam": [30, 33], "union": [30, 32, 35, 36, 39, 42, 44, 46, 51, 52, 53, 55, 56, 57, 58, 59, 65, 66, 68, 83, 86], "manualmodeparam": [30, 56], "rank": [30, 56, 64, 110, 113], "noth": [30, 56, 90], "list_of_module_comp_ratio_pair": [30, 56], "modulecompratiopair": [30, 56], "space": [30, 56, 83], "weight_svd": [30, 56], "comp_ratio": [30, 56], "ivar": 30, "aimet_common_def": 30, "aimet_tensorflow_def": 30, "inp_data": 30, "aimet_spatial_svd": 30, "evalfunct": 30, "denot": [30, 114], "80": [30, 78, 84, 89], "being": [30, 33, 34, 36, 49, 56, 60, 61, 62, 65, 66, 72, 83, 94], "driver": [30, 73, 75], "stat": [30, 46, 54, 56, 82, 84], "comprehens": 31, "detect": [31, 100], "shall": [31, 49], "cle": [31, 43, 57, 63, 67, 91, 92, 96, 102, 107, 109], "rtype": [31, 35, 36, 59, 81], "cross_layer_equalization_auto": [31, 57], "individu": [31, 36, 46, 57, 66, 83, 93, 97, 102, 104, 106, 107], "accord": [32, 44, 58, 102], "amongst": [32, 44, 58], "miss": [32, 35, 44, 49, 58, 62], "issu": [32, 35, 44, 52, 58, 62, 93, 98, 101, 107, 109, 111, 112], "layer_output_util": [32, 44, 58], "layeroutpututil": [32, 44, 58], "save_dir": 32, "keraslayeroutput": 32, "constructor": [32, 36, 44, 48, 55, 58, 60, 61, 65, 68], "generate_layer_output": [32, 44, 58], "input_batch": [32, 44, 58], "disk": [32, 44, 58], "obtain": [32, 36, 44, 46, 49, 58, 65, 69, 75, 95, 104], "aimet_export_artifact": [32, 44, 58], "h5": [32, 98, 102], "simplic": [32, 44, 58], "mandatori": [32, 44, 58], "load_encodings_to_sim": [32, 44, 58], "construct": [32, 44, 52, 58, 74, 75, 90, 101], "properli": [32, 44, 58, 78, 89, 90], "pre": [32, 44, 58, 73, 74, 75, 94, 98, 103, 104], "get_pre_processed_input": [32, 44, 58], "fp32_layer_output_util": [32, 44, 58], "fp32_layer_output": [32, 44, 58], "quantsim_layer_output_util": [32, 44, 58], "quantsim_layer_output": [32, 44, 58], "bitop": [33, 45, 59], "starting_op_nam": 33, "quantizergroup": [33, 45, 59], "wrt": 33, "vari": [33, 45, 59, 95, 103, 112], "earli": [33, 45, 59], "exit": [33, 45, 59, 77, 88, 90], "forward_pass_callback_2": 33, "nupi": 33, "last": [33, 99], "kept": [33, 107], "func": [33, 45, 59, 66], "encapsul": [33, 45, 59, 66], "back": [33, 45, 51, 59, 63, 68, 81, 90, 106], "OF": 33, "SUCH": 33, "damag": 33, "spdx": 33, "licens": 33, "identifi": [33, 55, 62, 74, 75, 83, 94, 96, 104, 107, 109, 114], "bsd": 33, "claus": 33, "copyright": 33, "pylint": 33, "statement": [33, 61, 67, 72, 101], "alpha": [33, 64], "directrori": 33, "preset": 33, "parent": [33, 77, 88], "retuern": 33, "seed": [33, 56], "set_se": 33, "evalutaion": 33, "org_top1": 33, "get_quantizated_model": 33, "accept": [33, 97, 103, 107], "09": 33, "cmp_re": 33, "mixed_preision_quant_model": 33, "fast_mixed_precis": 33, "sever": [34, 38, 60, 62, 67, 83, 88, 95, 104], "encourag": [34, 35, 38, 60, 61, 67], "mix": 34, "had": [34, 60, 90], "x2": [34, 60, 61], "relu2": [34, 35, 60, 62], "manditori": 35, "submodul": [35, 60], "via": [35, 67, 87, 95, 108], "inherit": [35, 77, 88], "pure": [35, 60], "over": [35, 42, 46, 53, 56, 66, 78, 84, 88, 91, 97, 100, 112], "inputlay": 35, "portion": 35, "taken": [35, 114], "get_text_classificaiton_model": 35, "model_preparer_two_subclassed_lay": 35, "get_subclass_model_with_functional_lay": 35, "sigmoid": [35, 61, 88], "binary_classifi": 35, "myfunctionalmodel": 35, "my_functional_model": 35, "classifi": 35, "model_preparer_subclassed_model_with_functional_lay": 35, "resembl": 35, "piec": [35, 61], "python": [35, 68, 73, 74, 75], "trace": [35, 60, 82], "symbol": 35, "touch": 35, "part": [35, 37, 42, 46, 53, 63, 66, 83, 100, 103, 104], "static_patch_count": 35, "guarante": 35, "furthermor": 35, "resu": 35, "resblock": 35, "twice": 35, "bad": 35, "bn1": [35, 52, 62, 65], "bn2": 35, "relu1": [35, 60, 62, 69], "plug": [36, 65], "conv2dtranspos": 36, "depthwiseconv2d": [36, 99], "crosslayersc": [36, 65], "scale_model": [36, 65], "clssetinfo": 36, "highbiasfold": [36, 65], "bias_fold": [36, 65], "cls_set_info_list": [36, 65], "bn_layer": [36, 65], "sigma": [36, 65], "element": [36, 49, 65], "model_transform_util": 36, "replace_relu6_with_relu": 36, "cross_layer_equalization_auto_stepwis": 36, "relu6": [36, 55, 65, 88, 103], "model_for_cl": 36, "folded_pair": [36, 65], "bn_dict": [36, 65], "conv_or_linear": 36, "fold_given_batch_norm": [36, 65], "layer_pair": [36, 65], "conv_linear": 36, "is_batch_norm_second": 36, "scale_cls_set": [36, 65], "cls_set": [36, 65], "cls_pair_1": [36, 65], "cls_pair_2": [36, 65], "hold": [36, 55, 65, 81, 83, 86, 88, 106, 107], "along": [36, 64, 65, 81, 83, 90, 105, 108], "depth": [36, 65, 87, 95, 107], "wise": [36, 46, 56, 65, 66, 86, 107], "clssetlayerpairinfo": [36, 65], "scale_factor": [36, 65], "relu_activation_between_lay": [36, 65], "relat": [36, 55, 56, 65, 72, 104, 108], "whose": [36, 58, 61, 65, 72, 82, 83, 103, 106], "cross_layer_equalization_manu": [36, 65], "get_example_layer_pairs_resnet50_for_fold": 36, "consecutive_layer_list": [36, 65], "get_consecutive_layer_list_from_resnet50_for_sc": 36, "scaling_factor_list": [36, 65], "format_info_for_high_bias_fold": 36, "conv_op_1": 36, "bn_op_1": 36, "conv_op_2": 36, "bn_op_2": 36, "conv_op_3": 36, "bn_op_3": 36, "bn_op": 36, "upstream": [36, 94, 114], "downstream": [36, 49, 114], "usag": [36, 49, 62, 64, 68, 84, 87], "conv_op": 36, "bn_op_with_meta": 36, "_fold_upstream_flag": 36, "append": [36, 56], "boolean": [36, 77], "is_relu_activation_in_cls_set": 36, "fill": [36, 68], "create_cls_set_info_list": 36, "mse": [37, 46, 66, 72, 104, 108], "quantanalyz": [37, 46, 47, 66, 67, 72, 102, 109], "pdf": [37, 46, 66, 109], "scalar": [37, 46, 66], "hotspot": [37, 46, 66, 104], "stochast": [37, 39, 55, 68], "toi": [37, 45, 59], "num_class": [37, 39, 53], "ey": 37, "image_dataset": 37, "label_dataset": 37, "serv": [37, 42, 46, 53, 66, 88, 111], "own": [37, 42, 46, 48, 51, 53, 54, 55, 66, 68], "action": [37, 46, 48, 51, 54, 55, 66, 68, 114], "prepared_model": [37, 51, 61, 66, 68, 69, 90], "forward_pass_callback_fn": [37, 46, 66], "eval_callback_fn": [37, 46, 66], "approxim": [37, 46, 66, 91, 95, 103, 104], "quant_analyzer_result": [37, 46, 66], "abil": [38, 47, 67, 109], "hardwar": [38, 47, 67, 71, 98, 102, 103, 108], "cross": [38, 42, 47, 53, 55, 63, 67, 71, 91, 92, 96, 101, 102, 104, 107, 112], "estim": [38, 67, 90, 102, 103], "in_plac": [39, 68], "default_data_typ": [39, 48, 68], "mechan": [39, 61, 68], "rule": [39, 83, 106], "custom_object": 39, "pth": [39, 56, 58, 64, 68], "quantize_model": [39, 48], "dummy_x": 39, "dummy_i": 39, "randint": 39, "to_categor": 39, "lr": [39, 90], "param_bw_override_list": [41, 51], "ignore_quant_ops_list": [41, 51], "default_config_fil": [41, 51], "user_onnx_lib": [41, 48], "modelproto": [41, 42, 43, 44, 46, 48], "affect": [41, 51, 83, 93, 106], "id": [41, 42, 44, 53, 70, 74, 111], "librari": [41, 48, 64, 69, 100], "commonli": [41, 51], "among": [41, 42, 51], "ceil": [41, 42, 51], "10k": [41, 51], "15k": [41, 51], "simplifi": [41, 42, 43, 44, 45, 46, 48, 72], "ada_rounded_model": 41, "auto_quant_v2": [42, 53], "cache_id": [42, 53], "strict_valid": [42, 53], "integr": [42, 47, 53, 63, 64, 67, 102], "includ": [42, 45, 49, 53, 59, 68, 72, 82, 83, 93, 97, 104, 105, 106, 108, 109, 111], "manner": [42, 53, 92], "onnxmodel": [42, 46], "proce": [42, 53, 92], "unid": [42, 53], "unintuit": [42, 53], "explicitli": [42, 86, 114], "get_quant_scheme_candid": 42, "highest": [42, 97], "_quantschemepair": 42, "set_quant_scheme_candid": 42, "math": [42, 84, 90], "onnx_model": [42, 43, 46, 48], "dummy_data": [42, 46, 48], "astyp": [42, 45, 46, 48], "float32": [42, 45, 46, 48, 69], "Its": [42, 114], "fed": 42, "unlabelled_data_load": 42, "num_of_sampl": 42, "evaldataload": 42, "acc_top1": 42, "acc_top5": 42, "batch_avg_top_1_5": 42, "4f": [42, 53], "happen": [43, 57, 90], "dir_path": [44, 58], "wherein": [44, 58], "dummy_input_dict": 44, "amp": [45, 59], "quantizer_group": [45, 59], "parameter_quant": [45, 59], "factori": [45, 59], "activation_quant": 45, "get_activation_quant": 45, "name_to_quantizer_dict": [45, 59], "get_active_quant": [45, 59], "qcquantizeop": 45, "get_candid": [45, 59], "get_param_quant": 45, "set_quantizers_to_candid": [45, 59], "bw": [45, 59, 60, 64, 68, 83], "to_list": [45, 59], "quantizer_info": 45, "fp32_output": 45, "quantize_with_mixed_precis": [45, 59], "flow": [45, 59, 61, 63, 67, 102, 107, 108], "algo": [45, 59], "default_bitwidth": [45, 59], "eval_callback_for_phase_1": [45, 59], "eval_callback_func": [45, 59], "eval_callback_for_phase_2": [45, 59], "clean": [45, 59], "quantize_with_mixed_precision_start_from_existing_cach": [45, 59], "90": [45, 59], "number_of_sampl": [45, 59], "popul": [45, 49, 59], "perform_ev": [45, 59], "in_tensor": 45, "historgram": 46, "unlabeled_dataset_iter": [46, 53, 66], "interest": [46, 66], "create_quantsim_and_encod": 46, "check_model_sensitivity_to_quant": [46, 66], "perform_per_layer_analysis_by_enabling_quant": 46, "occurr": [46, 66, 94], "record": [46, 52, 66, 84], "layer_nam": [46, 52, 66], "perform_per_layer_analysis_by_disabling_quant": 46, "export_per_layer_encoding_min_max_rang": [46, 66], "esults_dir": [46, 66], "pcq": [46, 66, 93, 104], "param_nam": [46, 66], "export_per_layer_stats_histogram": [46, 66], "ctivations_pdf": [46, 66], "eights_pdf": [46, 66], "n": [46, 66, 83, 90, 109], "am": [46, 66], "channel_index": [46, 66], "export_per_layer_mse_loss": [46, 66], "discard": [46, 66], "unlabeled_data_load": [46, 53, 66], "_get_unlabled_data_load": [46, 66], "autoqu": [47, 67, 96, 102, 105, 109], "unifi": [47, 67], "use_symmetric_encod": [48, 72], "attempt": [48, 102, 103], "line": [48, 51, 54, 55, 68, 71], "max_batch_count": [48, 51, 68], "current_batch_count": [48, 51, 68], "forward_pass_funct": 48, "syntax": 49, "usabl": 49, "xx": 49, "yy": 49, "zz": 49, "major": [49, 100], "revis": 49, "minor": [49, 109], "patch": 49, "substanti": 49, "fulli": [49, 67, 90, 99, 100, 113], "bug": [49, 109], "backward": [49, 65, 90], "assum": [49, 53, 75, 83, 86, 97], "string": [49, 106], "activation_encod": 49, "tensor_nam": 49, "param_encod": [49, 72], "constraint": [49, 83], "depict": 49, "6086959838867188": 49, "109158515930176": 49, "114": 49, "018501389771699905": 49, "21": [49, 84], "558866932988167": 49, "12636379897594452": 49, "12": [49, 73, 74, 75, 78, 83, 84], "010530316270887852": 49, "06318144500255585": 49, "06268782913684845": 49, "127": [49, 81], "0004936049808748066": 49, "fc1": [49, 61], "05589814856648445": 49, "05546144023537636": 49, "0004367042565718293": 49, "184721499681473": 49, "10788747668266296": 49, "0089906234367221": 49, "conv2d_1": 49, "1020304188132286": 49, "10380396991968155": 49, "008650330936207491": 49, "readvariableop": 49, "1462666392326355": 49, "1451239287853241": 49, "126": 49, "0011427081098743512": 49, "08333279937505722": 49, "08268175274133682": 49, "0006510374592799766": 49, "dtype": [49, 61, 64, 72, 79, 80, 81, 83, 86], "datatyp": [49, 69], "highlight": [49, 103, 111, 112], "quantizer_arg": 49, "activation_bitwidth": 49, "param_bitwidth": 49, "broken": 49, "occur": [49, 82, 83, 86], "who": 49, "benefit": [49, 83, 91], "knowledg": 49, "experiment": [51, 52, 53, 66, 68, 72, 83, 100], "v2": [51, 53, 66, 68, 77, 78, 79, 80, 81, 82, 83, 84, 86, 87, 88, 89, 90], "namespac": [51, 53, 66, 67, 68, 72], "visit": [51, 53, 66, 68, 75, 87], "overview": [51, 53, 66, 68, 72], "pars": [51, 68, 86], "get_train_dataload": [51, 54], "emploi": [51, 68], "quantized_resnet18": [51, 68], "arch_check": 52, "archcheck": 52, "check_model_arch": 52, "result_dir": 52, "_node_check_dict": 52, "arch_checker_report": 52, "dotted_name_op": 52, "nodeerrorreportobject": 52, "archcheckerreport": 52, "condit": [52, 61, 62], "modelwithnotenoughchannel": 52, "prelu": [52, 88], "stride": [52, 61, 62, 90], "batchnorm2d": [52, 62, 65, 88, 90], "example_check_for_number_of_conv_channel": 52, "fewer": [52, 100], "logger": [52, 62], "_check_conv_channel_32_bas": 52, "_check_conv_channel_larger_than_32": 52, "modelwithprelu": 52, "prelu1": 52, "example_check_for_non_performant_activ": 52, "num_paramet": 52, "_activation_check": 52, "standalon": [52, 102], "modelwithnonfoldablebn": 52, "foldabl": 52, "avg_pool1": 52, "avgpool2d": [52, 88], "example_check_for_standalone_bn": 52, "averagepool": 52, "prevent": [52, 61, 72, 94, 101], "ep": [52, 90], "05": [52, 64, 78, 90], "momentum": [52, 90], "affin": [52, 64, 67, 77, 81, 83, 86, 87, 88, 89, 90], "track_running_stat": [52, 90], "_check_batch_norm_fold": 52, "model_prepare_requir": 53, "hen": 53, "_subset_sampl": 53, "sampler": 53, "fp32_model": 53, "fakedata": 53, "eval_data_load": 53, "num_correct_predict": 53, "argmax": [53, 90], "deprec": [53, 72, 102], "dummy_input_on_cpu": 53, "dummy_input_on_gpu": 53, "preced": [54, 114], "var": 54, "load_fp32_model": 54, "imagenetpipelin": 54, "quant_sim": [54, 68], "quant_param": 55, "conv_bn_dict": 55, "perform_only_empirical_bias_corr": 55, "layers_to_ignor": 55, "unless": [55, 75, 77, 86, 114], "remain": [55, 72, 102, 103, 108], "calc": 55, "corr": 55, "irrespect": 55, "fact": [55, 83], "elig": 55, "input_bn": 55, "output_bn": 55, "in_activation_typ": 55, "no_activ": 55, "out_activation_typ": 55, "hode": 55, "No": [55, 62], "mobilenetv2": [55, 65], "512": 55, "module_prop_dict": 55, "find_all_conv_bn_with_activ": 55, "train_model": 56, "train_flag": 56, "weightsvdparamet": 56, "rank_select_schem": 56, "select_param": 56, "rankselectschem": 56, "tar": 56, "evaluate_model": 56, "honor": 56, "obvious": 56, "spatial_svd_auto_mod": 56, "mnist": 56, "mnist_trained_on_gpu": 56, "pretti": 56, "easili": 56, "spatial_svd_manual_mod": 56, "manual_param": 56, "weight_svd_auto_mod": 56, "rank_select": 56, "weight_svd_manual_mod": 56, "channel_pruning_auto_mod": 56, "mnist_torch_model": 56, "dataloadermnist": 56, "channel_pruning_manual_mod": 56, "_layer_db": 56, "ture": 56, "batch_callback": 56, "spatial_svd_auto_mode_with_layerwise_finetun": 56, "torchscript": [58, 68, 87], "naming_schem": 58, "namingschem": 58, "onnx_export_arg": [58, 68, 69], "onnxexportapiarg": [58, 68], "numer": 58, "onnx_util": 58, "pythonpath": [58, 96], "successfulli": 58, "map_loc": 58, "model_torch": 58, "phase2_revers": 59, "phase2": 59, "input_quant": [59, 72, 77, 88, 90], "output_quant": [59, 72, 77, 88, 90], "supported_kernel_op": 59, "get_input_quantizer_modul": 59, "roughli": 59, "convers": 60, "onnx_file_nam": 60, "jit": 60, "traceabl": [60, 61], "stateless": 60, "former": 60, "retrain": [60, 102], "whenev": [60, 90], "image_rgb": 60, "rgb_output": 60, "image_bw": 60, "bw_output": 60, "rgb": 60, "elementwis": [61, 88, 109], "unrol": 61, "independ": [61, 107], "modules_to_exclud": 61, "module_classes_to_exclud": 61, "concrete_arg": 61, "instanc": [61, 62, 68, 77, 111], "duplic": 61, "exclud": [61, 62, 66, 86], "partial": 61, "special": [61, 90], "control": [61, 88, 108], "won": 61, "symbolic_trac": 61, "graphmodul": [61, 90], "modelwithfunctionalrelu": 61, "9216": 61, "fc2": 61, "model_preparer_functional_exampl": 61, "allclos": 61, "modelwithreusedrelu": 61, "model_preparer_reused_exampl": 61, "modelwithelementwiseaddop": 61, "x1": 61, "model_preparer_elementwise_add_exampl": 61, "dynam": [61, 79, 80, 103, 108, 109, 112], "branch": [61, 96, 106], "weren": 61, "traceerror": 61, "workaround": [61, 101], "problem": [61, 107], "across": [61, 64, 83, 103, 104, 112], "Such": 61, "concret": 61, "truli": 61, "scope": 61, "preserv": 61, "custom_function_not_to_be_trac": 61, "call_funct": 61, "__torch_function__": 61, "sqrt": 61, "modelwithnontorchfunct": 61, "model_transform": 61, "tracer": 61, "is_leaf_modul": 61, "leaf": [61, 86, 109], "expos": [61, 91, 104], "module_to_exclud": 61, "examin": 61, "custommodul": 61, "softplu": [61, 88], "custommodel": 61, "arang": [61, 69, 78], "traceback": 61, "typeerror": 61, "receiv": 61, "invalid": [61, 83], "proxi": 61, "layout": 61, "requires_grad": [61, 81], "problemat": [61, 107, 112], "determinist": 61, "hard": 61, "do_not_trace_m": 61, "known": [62, 83, 98], "share": [62, 69, 88], "modelwithreusednod": 62, "inplac": 62, "2592": [62, 84], "view": [62, 69, 82, 87, 90, 93, 98, 101, 103, 104, 111], "model_valid": 62, "modelvalid": 62, "validate_example_model": 62, "validate_model": 62, "validate_for_reused_modul": 62, "0x7f127685a598": 62, "resolv": 62, "warn": [62, 102], "redefin": [62, 90], "distinct": [62, 90], "rewrit": [62, 101], "modelwithoutreusednod": 62, "rerun": 62, "0x7ff577373598": 62, "validate_for_missing_modul": 62, "0x7ff5703eff28": 62, "modelwithfunctionallinear": 62, "0x7f9dd9bd90d0": 62, "matmul_8": 62, "connectedgraph": 62, "op_type_map": 62, "recogn": [62, 108], "functional_op": 62, "modelwithoutfunctionallinear": 62, "parallel": [63, 84], "dataparallel": [63, 67], "move": [63, 96, 102], "forth": 63, "huggingfac": 64, "similar": [64, 83, 103, 105, 108], "alon": 64, "loraconfig": 64, "get_peft_model": 64, "lora_config": 64, "lora_alpha": 64, "lora_dropout": 64, "target_modul": 64, "replace_lora_layers_with_quantizable_lay": 64, "meta": [64, 98, 102], "track_lora_meta_data": 64, "meta_data": 64, "tmp_dir": 64, "convinplacelinear": 64, "peftquantutil": 64, "peft_util": 64, "name_to_module_dict": 64, "disable_lora_adapt": 64, "recomput": 64, "freeze_base_model_param_quant": 64, "tmpdir": 64, "export_model": [64, 68], "filename_prefix_encod": [64, 68], "base_encod": 64, "enable_adapter_and_load_weight": 64, "lora_weights_after_adaptation_for_adapter1": 64, "safetensor": 64, "use_safetensor": 64, "lora_modul": 64, "get_quantized_lora_lay": 64, "param_quant": [64, 72, 77, 83, 88, 90], "quantizedequant": [64, 72, 78, 80, 81, 83, 88, 89, 90], "base_model": 64, "adapter1": 64, "export_adapter_weight": 64, "adapter1_weight": 64, "configr": 64, "adaptermetadata": 64, "lora_a": 64, "lora_b": 64, "replaced_module_typ": 64, "adapater_name_to_meta_data": 64, "init": 64, "track_meta_data": 64, "pt": 64, "adapter_weights_path": 64, "bin": [64, 74, 75, 84], "freeze_base_model": 64, "freeze_base_model_activation_quant": 64, "get_fp_lora_lay": 64, "quantize_lora_scale_with_fixed_rang": 64, "scale_min": 64, "scale_max": 64, "mul": 64, "set_bitwidth_for_lora_adapt": 64, "conv1d": [65, 88, 109], "convtranspose2d": [65, 88], "batchnorm1d": [65, 88], "cross_layer_equalization_auto_step_by_step": 65, "conv_bn": 65, "replace_modules_of_type1_with_type2": 65, "layer_list": 65, "clspairinfo": 65, "depthwis": [65, 93, 109], "cross_layer_equalization_depthwise_lay": 65, "perform_per_layer_analysis_by_enabling_quant_wrapp": 66, "perform_per_layer_analysis_by_disabling_quant_wrapp": 66, "wrapped_module_nam": 66, "tap": 66, "packag": [67, 86, 96, 109], "plan": 67, "upgrad": [67, 75], "flexibil": 67, "extens": [67, 74, 75, 88, 96], "futur": [67, 82, 83, 86], "releas": [67, 96, 101], "core": 67, "checker": 67, "concern": 67, "peft": 67, "lora": 67, "introduc": [67, 83, 88, 102, 106, 108], "power": [67, 107], "blockwis": [67, 86], "lpbq": [67, 86], "dispatch": 67, "carefulli": 67, "compatibil": 67, "public": 67, "seq_ms": [67, 72], "apply_seq_ms": [67, 72], "orthogon": 67, "migrat": 67, "quantizationmixin": [67, 87, 88], "save_checkpoint": 68, "file_path": 68, "checkpoint": [68, 98, 102], "load_checkpoint": 68, "quant_sim_model": 68, "propagate_encod": 68, "export_to_torchscript": 68, "use_embedded_encod": 68, "opset_vers": [68, 69], "enable_onnx_check": 68, "entri": [68, 106], "data_typ": [68, 72], "fakequ": 68, "trainingextens": 68, "src": 68, "forward_pass_arg": 68, "quatiz": 68, "unction": 68, "idea": [69, 90], "travel": 69, "sparseconvolution3d": 69, "spars": 69, "sparsetensorwrapp": 69, "sparseconvtensor": 69, "scriptmodul": 69, "scatterdens": 69, "pro": [69, 83], "spconv3d": 69, "spconvmodel": 69, "spconv_tensor": 69, "sparseconv3d": 69, "spconv1": 69, "in_channel": [69, 83, 90], "out_channel": [69, 83, 90], "spconv2": 69, "conv3d": [69, 88], "normal_conv3d": 69, "spconv_scatter_dens": 69, "coord": 69, "voxel": 69, "sp_tensor": 69, "sp_outputs1": 69, "sp_outputs2": 69, "sp_outputs2_dens": 69, "sp_output": 69, "sp_outputs_relu": 69, "dense_tensor_sp_input": 69, "ncdhw": 69, "permut": 69, "ndhwc": 69, "stack": 69, "meshgrid": 69, "ij": 69, "reshap": [69, 88], "operator_export_typ": 69, "operatorexporttyp": 69, "onnx_aten_fallback": 69, "converter_arg": 69, "input_dtyp": 69, "int32": 69, "expand_sparse_op_structur": 69, "preserve_io": 69, "exported_sp_conv_model": 69, "visualize_serialized_data": 70, "visualizecompress": [70, 111], "bokeh": [70, 71], "server": 70, "publish": [70, 71], "tabl": [70, 82, 96, 111], "display_eval_scor": [70, 111], "saved_eval_scores_dict_path": 70, "display_comp_ratio_plot": [70, 111], "comp_ratio_list_path": 70, "pkl": 70, "start_bokeh_server_sess": 70, "model_compression_with_visu": 70, "65": [70, 95], "resnet18_eval_scor": 70, "comp_ratios_file_path": 70, "greedy_selection_comp_ratios_list": 70, "eval_scores_path": 70, "compression_visu": 70, "termin": [70, 96], "visualize_model": 71, "visualize_relative_weight_ranges_to_identify_problematic_lay": 71, "selected_lay": 71, "figur": [71, 91, 94, 97, 107, 110, 111, 113, 114], "visualize_weight_rang": 71, "scatter": 71, "deviat": 71, "visualize_changes_after_optim": 71, "old_model": 71, "new_model": 71, "visualize_changes_in_model_after_and_before_cl": 71, "visualiz": 71, "model_copi": 71, "visualize_weight_ranges_model": 71, "usual": [71, 90, 100, 105, 108], "visualize_relative_weight_ranges_model": 71, "easier": [72, 83], "simpler": 72, "extend": 72, "fundament": 72, "advis": 72, "subject": 72, "interact": [72, 82], "hood": 72, "properti": 72, "compon": 72, "stai": 72, "quantizewrapp": 72, "quantizationsimmodelv1": 72, "all_quant_wrapp": 72, "quant_wrapp": 72, "staticgridquantwrapp": 72, "_module_to_wrap": 72, "in_featur": [72, 77, 88, 90], "out_featur": [72, 77, 88, 90], "quantizedlinear": [72, 77, 83, 88, 90], "quantizedconv2d": [72, 83, 88, 90], "quantizationsimmodelv2": 72, "sim2": 72, "all_q_modul": 72, "qmodul": 72, "q_modul": 72, "moduledict": [72, 77, 88, 90], "modulelist": [72, 77, 88, 90], "quantizedrelu": [72, 88, 90], "staticgridquant": 72, "learnedgridquant": 72, "tensor_quant": 72, "staticgridperchannelquant": 72, "fp_quantiz": 72, "affine_quant": 72, "affinequant": [72, 79, 80], "floatquant": [72, 79, 80], "q": [72, 77, 78, 79, 80, 81, 88, 89, 108], "affine_q": 72, "affine_qdq": 72, "fp_qdq": 72, "floatquantizedequant": [72, 80], "float16": [72, 79, 80, 83], "setup": [72, 73], "sim1": 72, "wrap_linear": 72, "qlinear": [72, 77, 88], "symmetri": 72, "is_unsigned_symmetr": 72, "use_strict_symmetr": 72, "sign": [72, 78, 108], "libpymo": 72, "tfencod": 72, "copy_": 72, "OR": 72, "_remove_input_quant": 72, "_remove_output_quant": 72, "_remove_param_quant": 72, "temporarili": 72, "_is_encoding_frozen": 72, "freeze_encod": 72, "concept": 72, "mimick": 72, "allow_overwrit": [72, 86, 89], "requires_grad_": 72, "overwritten": 72, "pypi": 73, "intel": 73, "x86": 73, "processor": 73, "linux": [73, 75], "ubuntu": [73, 75], "22": [73, 75, 84, 90], "04": [73, 75], "lt": [73, 75], "pip": [73, 74, 75, 87, 96, 111], "apt": [73, 74, 75, 87], "liblapack": [73, 74, 75, 87], "libpython3": 73, "dev": [73, 74, 75], "python3": [73, 74, 75, 87, 96], "variant": [73, 75, 91, 92, 93, 102, 103, 104, 108], "latest": [73, 74], "whl": [73, 74, 75], "host": [73, 74, 75, 109, 111], "github": [73, 74, 75, 96, 109], "com": [73, 74, 75, 96, 109], "quic": [73, 74, 75, 96, 109], "prerequisit": [73, 111], "requisit": [73, 75], "cu121": [73, 74, 75], "cp310": [73, 74, 75], "manylinux_2_34_x86_64": [73, 74, 75], "torch_stabl": [73, 74, 75], "13": [73, 74, 78, 84], "cu117": 73, "cu118": 73, "older": 73, "brows": [73, 74, 75], "bash": [73, 74], "command": [73, 74, 75, 96, 111], "shell": 73, "nvidia": [73, 74, 75], "card": 73, "capabl": [73, 88, 111], "docker": 73, "455": 73, "alwai": [73, 97], "newer": 73, "cudnn": 73, "machin": [73, 74, 100], "develop": [73, 74, 75, 82, 83, 86], "click": 73, "instruct": [74, 75, 87, 96], "variant_str": 74, "ONE": 74, "pt113": 74, "aimet_vari": 74, "workspac": [74, 96], "absolute_path_to_workspac": [74, 96], "docker_image_nam": 74, "codelinaro": 74, "docker_container_nam": 74, "any_nam": 74, "any_tag": 74, "jenkin": 74, "dockerfil": 74, "grep": 74, "kill": 74, "rm": 74, "passwd": 74, "ro": 74, "home": 74, "mnt": 74, "entrypoint": 74, "hostnam": 74, "filesystem": 74, "port": [74, 111], "port_id": 74, "project": [74, 75], "wish": [74, 75], "tip": [74, 75], "post1": [74, 75], "prepend": [74, 75, 88, 96], "sudo": [74, 75, 96], "wheel": [74, 75], "tag": [74, 75, 96, 109], "ex": [74, 75, 83], "release_tag": [74, 75, 96], "download_url": [74, 75], "wheel_file_nam": [74, 75], "find_pkg_url_str": [74, 75], "usr": [74, 75], "lib": [74, 75], "dist": [74, 75], "envsetup": [74, 75], "sh": [74, 75], "pend": 75, "pip3": 75, "h": [75, 96, 113, 114], "local": [75, 96, 111], "accordingli": 75, "ye": [75, 100], "wget": 75, "gnupg2": 75, "archiv": 75, "exact": [75, 93, 102], "date": 75, "aforement": 75, "repo": 75, "ubuntu2204": 75, "x86_64": 75, "keyring_1": 75, "1_all": 75, "deb": 75, "dpkg": 75, "cat": 75, "reqs_deb_common": 75, "txt": 75, "xarg": 75, "reqs_deb_torch_common": 75, "reqs_deb_onnx_common": 75, "reqs_deb_tf_gpu": 75, "reqs_deb_torch_gpu": 75, "reqs_deb_onnx_gpu": 75, "uninstal": 75, "onnxruntime_v": 75, "c": [75, 95], "__version__": 75, "ln": 75, "gnu": 75, "libjpeg": 75, "chose": 75, "mixin": [77, 88], "quantizerbas": [77, 88, 89], "behav": [77, 88, 107], "abstract": [77, 88, 89], "__quant_init__": [77, 88], "initializd": [77, 88], "overridden": [77, 88, 106], "length": [77, 83, 88], "set_kernel": 77, "quantizedtensor": [77, 78, 81, 89], "keyword": 77, "output_encod": 77, "context": [77, 88, 90], "underli": [77, 107], "int_multipli": 77, "enc": 77, "notimplementederror": 77, "q_output": 77, "quantized_repr": [77, 81], "dq_output": 77, "qmult": 77, "quantizedmultipli": [77, 88], "classmethod": 77, "set_default_kernel": 77, "get_kernel": 77, "enter": [77, 88, 92], "is_initi": [77, 78, 79, 80, 88, 89], "from_modul": 77, "quantized_linear": 77, "get_default_kernel": 77, "retriev": 77, "module_cl": 77, "decor": 77, "subpackag": 77, "throw": 77, "declar": 77, "maskedadd": 77, "mask": 77, "quantizedmaskedadd": 77, "input_qtzr": 77, "value_qtzr": 77, "output_qtzr": 77, "clamp": [78, 79, 80, 89, 108], "lceil": [78, 79, 80, 89], "frac": [78, 79, 80, 89], "rfloor": [78, 79, 80, 89], "qmin": [78, 89, 108], "qmax": [78, 89, 108], "learnabl": [78, 89], "theta_": [78, 89], "pmatrix": [78, 89], "b_0": [78, 89], "b_1": [78, 83, 89], "cdot": [78, 89], "b_": [78, 89], "d": [78, 89], "equat": [78, 83, 89, 108], "out_": [78, 89], "j_0": [78, 89], "j_": [78, 89], "input_": [78, 89], "scale_": [78, 89], "i_0": [78, 89], "i_": [78, 89], "offset_": [78, 89], "quad": [78, 89, 108], "forall_": [78, 89], "leq": [78, 89], "i_d": [78, 89], "lfloor": [78, 79, 80, 89], "j_d": [78, 89], "b_d": [78, 89], "asymmetr": [78, 84, 89, 106, 108], "encoding_analyz": [78, 79, 80, 84, 89], "encodinganalyz": [78, 79, 80, 84, 89], "block_siz": [78, 83, 89], "129": [78, 89, 101], "255": [78, 81, 89], "122": [78, 89], "192": [78, 89], "106": [78, 89], "94": [78, 89], "145": [78, 89], "181": [78, 89], "144": [78, 89], "194": [78, 89], "74": [78, 89], "86": [78, 89], "150": [78, 89], "103": [78, 89], "37": [78, 89], "111": [78, 89], "237": [78, 89], "218": [78, 89], "49": [78, 89], "155": [78, 89], "179": [78, 89], "89": [78, 89], "110": [78, 89], "17": [78, 84, 89], "36": [78, 89], "83": [78, 89], "grad_fn": [78, 81, 89], "aliasbackward0": [78, 81, 89], "ones_lik": [78, 89], "187": [78, 89], "186": [78, 89], "131": [78, 89], "203": [78, 89], "143": [78, 89], "152": [78, 89], "226": [78, 89], "55": [78, 89], "172": [78, 89], "207": [78, 89], "146": [78, 89], "216": [78, 89], "238": [78, 89], "141": [78, 89], "178": [78, 89], "188": [78, 89], "63": [78, 89], "59": [78, 89], "19": [78, 84, 89], "162": [78, 89], "30": [78, 89], "109": [78, 89], "dequant": [78, 81, 89], "overlin": [78, 89], "qdq": [78, 79, 80, 89], "dequantizedtensor": [78, 81, 89], "2771": [78, 89], "3038": [78, 89], "0819": [78, 89], "9700": [78, 89], "9487": [78, 89], "1307": [78, 89], "7894": [78, 89], "1709": [78, 89], "2212": [78, 89], "7741": [78, 89], "0295": [78, 89], "2265": [78, 89], "0564": [78, 89], "6177": [78, 89], "0386": [78, 89], "0176": [78, 89], "6054": [78, 89], "8836": [78, 89], "1232": [78, 89], "8229": [78, 89], "5540": [78, 89], "3992": [78, 89], "2363": [78, 89], "2546": [78, 89], "0036": [78, 89], "2355": [78, 89], "1741": [78, 89], "6079": [78, 89], "6247": [78, 89], "0115": [78, 89], "2458": [78, 89], "9157": [78, 89], "4694": [78, 89], "0639": [78, 89], "2568": [78, 89], "0680": [78, 89], "6695": [78, 89], "7932": [78, 89], "1889": [78, 89], "0158": [78, 89], "5695": [78, 89], "5220": [78, 89], "1977": [78, 89], "4475": [78, 89], "0424": [78, 89], "1128": [78, 89], "8796": [78, 89], "1060": [78, 89], "5897": [78, 89], "6196": [78, 89], "9961": [78, 89], "0549": [78, 89], "6431": [78, 89], "0039": [78, 89], "8706": [78, 89], "4706": [78, 89], "2353": [78, 89], "8078": [78, 89], "3451": [78, 89], "1176": [78, 89], "4549": [78, 89], "0471": [78, 89], "5255": [78, 89], "4157": [78, 89], "0784": [78, 89], "5333": [78, 89], "1647": [78, 89], "2118": [78, 89], "2196": [78, 89], "9176": [78, 89], "9490": [78, 89], "7765": [78, 89], "4784": [78, 89], "6039": [78, 89], "3137": [78, 89], "3216": [78, 89], "8000": [78, 89], "4392": [78, 89], "4863": [78, 89], "overload": 78, "rceil": 78, "num_step": [78, 84], "num": 78, "_step": 78, "0000e": 78, "5000e": 78, "02": 78, "1921e": 78, "08": 78, "0500e": 78, "1000e": 78, "1500e": 78, "2000e": 78, "2500e": 78, "14": [78, 90], "quantize_dequant": 78, "0000": [78, 81], "0667": 78, "1333": 78, "2667": 78, "3333": 78, "4000": [78, 81], "4667": 78, "6000": [78, 81], "6667": 78, "7333": 78, "8667": 78, "9333": 78, "exponent_bit": [79, 80, 83], "mantissa_bit": [79, 80, 83], "cast": [79, 80], "expon": [79, 80, 83], "mantissa": [79, 80, 83], "x_c": [79, 80], "log_2": [79, 80], "ieee": [79, 80, 100, 103], "_max": [79, 80], "mutual": [79, 80, 83], "exclus": [79, 80, 83], "finer": [79, 80, 83], "8998": [79, 80], "0947": [79, 80], "0891": [79, 80], "1727": [79, 80], "is_bfloat16": [79, 80], "8984": [79, 80], "0859": [79, 80], "1729": [79, 80], "minmaxencodinganalyz": [79, 80, 84], "is_float16": [79, 80], "8994": [79, 80], "0889": [79, 80], "alia": [79, 80], "encodingbas": [81, 89], "57": 81, "312": 81, "153": 81, "205": 81, "set_rang": 81, "x_q": 81, "26": 81, "23": [81, 84], "x_dq": 81, "carri": 81, "gradient": 81, "thu": 81, "autograd": 81, "backpropag": 81, "38": [81, 100], "40": 81, "39": [81, 90], "51": 81, "521": 81, "41": 81, "quant_dequ": 81, "x_qdq": 81, "52": 81, "68": 81, "97": 81, "uint8": 81, "heavi": [82, 83, 86, 111, 112], "notic": [82, 83, 86], "visualization_tool": 82, "visualize_stat": 82, "save_path": 82, "quant_stats_visu": 82, "threshold": [82, 88, 92], "exce": 82, "exceed": 82, "counterpart": [83, 88], "come": [83, 100, 105], "split": 83, "con": 83, "storag": 83, "drawback": 83, "outlier": [83, 104, 108], "dimens": [83, 86, 88, 107, 110, 113], "influenc": 83, "resid": [83, 109], "chunk": 83, "isol": 83, "favor": 83, "relationship": 83, "long": [83, 86], "b_2": 83, "b_n": 83, "s_1": 83, "s_2": 83, "s_n": 83, "evenli": 83, "divid": [83, 88, 95, 105], "divis": 83, "permit": 83, "3d": 83, "arbitrari": 83, "restrict": [83, 101], "themselv": [83, 105], "linear_1": 83, "lie": 83, "leverag": 83, "expans": [83, 100], "groupedblockquantizedequant": 83, "decompressed_bw": 83, "expand": 83, "block_group": 83, "config_util": 83, "set_blockwise_quantization_for_weight": 83, "input_channel": 83, "linear1": 83, "isinst": 83, "signific": [83, 107], "switch": 83, "docstr": 83, "4d": 83, "mention": 83, "assist": 83, "set_activation_quantizers_to_float": 83, "set_grouped_blockwise_quantization_for_weight": 83, "decompress": 83, "addition": 83, "larger": [83, 110, 113], "encoding_vers": [83, 86], "exported_model": [83, 86], "gather": 84, "reset_stat": 84, "reset": 84, "update_stat": 84, "pow": 84, "0991": 84, "3696": 84, "_minmaxrang": 84, "1721": 84, "sqnrencodinganalyz": 84, "num_bin": 84, "2048": [84, 86], "asymmetric_delta_candid": 84, "symmetric_delta_candid": 84, "101": 84, "offset_candid": 84, "max_parallel": 84, "gamma": 84, "lowest": [84, 94], "3612": 84, "8497": 84, "_histogram": 84, "bin_edg": 84, "8907": 84, "3625": 84, "8343": 84, "3061": 84, "7779": 84, "2497": 84, "2784": 84, "8066": 84, "3348": 84, "8630": 84, "3912": 84, "7080": 84, "2438": 84, "percentileencodinganalyz": 84, "largest": 84, "smallest": 84, "1188": 84, "3368": 84, "27": 84, "5710": 84, "0989": 84, "6269": 84, "1548": 84, "6827": 84, "2106": 84, "2614": 84, "7335": 84, "2056": 84, "6776": 84, "1497": 84, "gptvq_weight": 86, "apply_gptvq": 86, "gptvq_param": 86, "param_encoding_path": 86, "module_names_to_exclud": 86, "block_level_module_nam": 86, "file_name_prefix": 86, "config_file_path": 86, "gptvqparamet": 86, "dataclass": 86, "row_axi": 86, "col_axi": 86, "rows_per_block": 86, "cols_per_block": 86, "vector_dim": 86, "vector_bw": 86, "vector_strid": 86, "index_bw": 86, "num_of_kmeans_iter": 86, "assignment_chunk_s": 86, "carrier": 86, "125m": 86, "optforcausallm": 86, "from_pretrain": 86, "facebook": 86, "gptvq_applied_model": 86, "lm_head": 86, "gptvq_opt": 86, "load_encod": 86, "deploi": [87, 108], "edg": [87, 98], "incur": [87, 98, 104], "workflow": [87, 90, 95, 98], "sample_input": [87, 90], "sample_output": 87, "out_dir": 87, "quantized_model": 87, "quickstart": 87, "product": [87, 95, 98], "technologi": [87, 98], "subsidiari": [87, 98], "extra": 88, "nativ": 88, "quantizedsoftmax": [88, 90], "qmul": 88, "sens": 88, "qadd": 88, "quantizedadd": 88, "calibration_data_load": 88, "adaptiveavgpool1d": 88, "quantizedadaptiveavgpool1d": 88, "adaptiveavgpool2d": 88, "quantizedadaptiveavgpool2d": 88, "adaptiveavgpool3d": 88, "quantizedadaptiveavgpool3d": 88, "adaptivemaxpool1d": 88, "quantizedadaptivemaxpool1d": 88, "adaptivemaxpool2d": 88, "quantizedadaptivemaxpool2d": 88, "adaptivemaxpool3d": 88, "quantizedadaptivemaxpool3d": 88, "alphadropout": 88, "quantizedalphadropout": 88, "avgpool1d": 88, "quantizedavgpool1d": 88, "quantizedavgpool2d": 88, "avgpool3d": 88, "quantizedavgpool3d": 88, "quantizedbatchnorm1d": 88, "quantizedbatchnorm2d": 88, "batchnorm3d": 88, "quantizedbatchnorm3d": 88, "celu": 88, "quantizedcelu": 88, "channelshuffl": 88, "quantizedchannelshuffl": 88, "constantpad1d": 88, "quantizedconstantpad1d": 88, "constantpad2d": 88, "quantizedconstantpad2d": 88, "constantpad3d": 88, "quantizedconstantpad3d": 88, "quantizedconv1d": 88, "quantizedconv3d": 88, "convtranspose1d": [88, 109], "quantizedconvtranspose1d": 88, "quantizedconvtranspose2d": 88, "convtranspose3d": 88, "quantizedconvtranspose3d": 88, "quantizeddropout": 88, "dropout2d": 88, "quantizeddropout2d": 88, "dropout3d": 88, "quantizeddropout3d": 88, "elu": 88, "quantizedelu": 88, "featurealphadropout": 88, "quantizedfeaturealphadropout": 88, "quantizedflatten": 88, "quantizedfold": 88, "fractionalmaxpool2d": 88, "quantizedfractionalmaxpool2d": 88, "fractionalmaxpool3d": 88, "quantizedfractionalmaxpool3d": 88, "gelu": 88, "quantizedgelu": 88, "glu": 88, "quantizedglu": 88, "groupnorm": 88, "quantizedgroupnorm": 88, "hardshrink": 88, "quantizedhardshrink": 88, "hardsigmoid": 88, "quantizedhardsigmoid": 88, "hardswish": 88, "quantizedhardswish": 88, "hardtanh": 88, "quantizedhardtanh": 88, "instancenorm1d": 88, "quantizedinstancenorm1d": 88, "instancenorm2d": 88, "quantizedinstancenorm2d": 88, "instancenorm3d": 88, "quantizedinstancenorm3d": 88, "lppool1d": 88, "quantizedlppool1d": 88, "lppool2d": 88, "quantizedlppool2d": 88, "quantizedlayernorm": 88, "leakyrelu": 88, "quantizedleakyrelu": 88, "localresponsenorm": 88, "quantizedlocalresponsenorm": 88, "logsigmoid": 88, "quantizedlogsigmoid": 88, "logsoftmax": 88, "quantizedlogsoftmax": 88, "maxpool1d": 88, "quantizedmaxpool1d": 88, "maxpool2d": 88, "quantizedmaxpool2d": 88, "maxpool3d": 88, "quantizedmaxpool3d": 88, "maxunpool1d": 88, "quantizedmaxunpool1d": 88, "maxunpool2d": 88, "quantizedmaxunpool2d": 88, "maxunpool3d": 88, "quantizedmaxunpool3d": 88, "mish": 88, "quantizedmish": 88, "quantizedprelu": 88, "pixelshuffl": 88, "quantizedpixelshuffl": 88, "pixelunshuffl": 88, "quantizedpixelunshuffl": 88, "rrelu": 88, "quantizedrrelu": 88, "quantizedrelu6": 88, "reflectionpad1d": 88, "quantizedreflectionpad1d": 88, "reflectionpad2d": 88, "quantizedreflectionpad2d": 88, "replicationpad1d": 88, "quantizedreplicationpad1d": 88, "replicationpad2d": 88, "quantizedreplicationpad2d": 88, "replicationpad3d": 88, "quantizedreplicationpad3d": 88, "selu": 88, "quantizedselu": 88, "silu": 88, "quantizedsilu": 88, "quantizedsigmoid": 88, "softmax2d": 88, "quantizedsoftmax2d": 88, "softmin": 88, "quantizedsoftmin": 88, "quantizedsoftplu": 88, "softshrink": 88, "quantizedsoftshrink": 88, "softsign": 88, "quantizedsoftsign": 88, "tanh": 88, "quantizedtanh": 88, "tanhshrink": 88, "quantizedtanhshrink": 88, "quantizedthreshold": 88, "unflatten": 88, "quantizedunflatten": 88, "unfold": 88, "quantizedunfold": 88, "upsampl": [88, 101], "quantizedupsampl": 88, "upsamplingbilinear2d": 88, "quantizedupsamplingbilinear2d": 88, "upsamplingnearest2d": 88, "quantizedupsamplingnearest2d": 88, "zeropad2d": 88, "quantizedzeropad2d": 88, "bceloss": 88, "quantizedbceloss": 88, "bcewithlogitsloss": 88, "quantizedbcewithlogitsloss": 88, "bilinear": [88, 101], "quantizedbilinear": 88, "ctcloss": 88, "quantizedctcloss": 88, "cosinesimilar": 88, "quantizedcosinesimilar": 88, "crossentropyloss": [88, 90], "quantizedcrossentropyloss": 88, "hingeembeddingloss": 88, "quantizedhingeembeddingloss": 88, "huberloss": 88, "quantizedhuberloss": 88, "kldivloss": 88, "quantizedkldivloss": 88, "l1loss": 88, "quantizedl1loss": 88, "mseloss": 88, "quantizedmseloss": 88, "multilabelmarginloss": 88, "quantizedmultilabelmarginloss": 88, "multilabelsoftmarginloss": 88, "quantizedmultilabelsoftmarginloss": 88, "multimarginloss": 88, "quantizedmultimarginloss": 88, "nllloss": 88, "quantizednllloss": 88, "nllloss2d": 88, "quantizednllloss2d": 88, "pairwisedist": 88, "quantizedpairwisedist": 88, "poissonnllloss": 88, "quantizedpoissonnllloss": 88, "smoothl1loss": 88, "quantizedsmoothl1loss": 88, "softmarginloss": 88, "quantizedsoftmarginloss": 88, "cosineembeddingloss": 88, "quantizedcosineembeddingloss": 88, "gaussiannllloss": 88, "quantizedgaussiannllloss": 88, "marginrankingloss": 88, "quantizedmarginrankingloss": 88, "tripletmarginloss": 88, "quantizedtripletmarginloss": 88, "tripletmarginwithdistanceloss": 88, "quantizedtripletmarginwithdistanceloss": 88, "quantizedembed": 88, "embeddingbag": 88, "quantizedembeddingbag": 88, "gru": [88, 109], "quantizedgru": 88, "rnn": [88, 109], "quantizedrnn": 88, "grucel": 88, "quantizedgrucel": 88, "rnncell": 88, "quantizedrnncel": 88, "lstm": [88, 109], "quantizedlstm": 88, "lstmcell": 88, "quantizedlstmcel": 88, "cumsum": 88, "quantizedcumsum": 88, "sin": 88, "quantizedsin": 88, "co": 88, "quantizedco": 88, "rsqrt": 88, "quantizedrsqrt": 88, "quantizedreshap": 88, "matmul": [88, 109], "quantizedmatmul": 88, "subtract": 88, "quantizedsubtract": 88, "quantizeddivid": 88, "bmm": 88, "quantizedbmm": 88, "baddbmm": 88, "quantizedbaddbmm": 88, "addmm": 88, "quantizedaddmm": 88, "concat": [88, 109], "quantizedconcat": 88, "allow_overwit": 89, "get_encod": 89, "get_legacy_encod": 89, "register_quantization_paramet": 89, "set_legacy_encod": 89, "tutori": 90, "meant": 90, "clearli": 90, "cifar10_train_data": 90, "fashionmnist": 90, "cifar10": 90, "cifar10_test_data": 90, "test_load": 90, "bn_1": 90, "bn_2": 90, "establish": 90, "send": 90, "batch_idx": 90, "zero_grad": 90, "fp_accuraci": 90, "91": 90, "70999908447266": 90, "coupl": 90, "care": [90, 100], "conform": 90, "incorrectli": 90, "thankfulli": 90, "fp_accuracy_prepar": 90, "2024": 90, "07": 90, "747": 90, "806": 90, "module_relu": 90, "module_relu_1": 90, "module_softmax": 90, "12544": 90, "getattr_1": 90, "getitem": 90, "graph_modul": 90, "print_read": 90, "passthrough": 90, "previous": 90, "theoret": 90, "idx": 90, "quantized_accuraci": 90, "1500015258789": 90, "advanc": 90, "post_qat_accuraci": 90, "92": 90, "05333709716797": 90, "happi": 90, "export_path": 90, "model_nam": 90, "fashion_mnist_model": 90, "sent": 90, "terminologi": 91, "abbrevi": 91, "bnf": 91, "hbf": 91, "often": [91, 92, 100, 105], "16x": 91, "caution": 91, "soon": 92, "prone": 92, "preprat": 92, "preceed": 93, "decreas": 93, "main": [93, 106, 109, 112], "oscil": 93, "cp": [94, 95, 97, 114], "magnitud": 94, "connect": [94, 99, 100, 113], "residu": 94, "regress": 94, "outlin": 95, "svd": [95, 96, 97, 99, 100, 109], "spatial": [95, 96, 97, 99, 100, 109], "accumul": [95, 100], "uncompress": 95, "latenc": 95, "bandwidth": 95, "websit": 95, "half": 95, "unknown": 95, "ssvd": 95, "f0": 95, "75": 95, "2b": 95, "revisit": 95, "close": [95, 108], "jupyt": 96, "familiar": 96, "teach": 96, "viewabl": 96, "metapackag": 96, "grant": 96, "admin": 96, "privileg": 96, "ip": 96, "browser": 96, "past": 96, "mkdir": 96, "cd": 96, "repositori": 96, "git": 96, "www": 96, "navig": 96, "launch": 96, "ipynb": 96, "assess": 97, "column": 97, "unmodifi": 97, "omit": [97, 106], "account": [97, 102, 105, 107], "drastic": 97, "redund": 98, "dilat": 99, "guidebook": 100, "greedi": [100, 111], "decompos": [100, 110, 113], "fc": [100, 113], "sharp": 100, "decai": 100, "slow": 100, "searcher": 100, "wors": 100, "strike": 100, "balanc": 100, "seem": [100, 102], "xiangyu": 100, "zhang": 100, "jianhua": 100, "zou": 100, "kaim": 100, "he": 100, "jian": 100, "sun": 100, "deep": 100, "transact": 100, "pattern": 100, "intellig": 100, "vol": 100, "pp": 100, "1943": 100, "1955": 100, "oct": 100, "2016": 100, "yihui": 100, "confer": [100, 103], "vision": [100, 103], "venic": 100, "2017": 100, "1398": 100, "1406": 100, "jaderberg": 100, "andrea": 100, "vedaldi": 100, "andrew": 100, "zisserman": 100, "british": 100, "jan": 100, "2014": 100, "andrei": 100, "kuzmin": 100, "marku": [100, 103], "nagel": [100, 103], "saurabh": 100, "pitr": 100, "sandeep": 100, "pendyam": 100, "tijmen": [100, 103], "blankevoort": [100, 103], "taxonomi": 100, "primit": 101, "sure": 101, "slice": 101, "align_corn": 101, "deconvolut": 101, "dconvolut": 101, "deeplabv3": 101, "address": [101, 107, 111], "briefli": 102, "suffici": [102, 104, 105, 108], "fast": 102, "easi": 102, "robust": 102, "_aimet": 102, "prep": 102, "complainc": 102, "align": 102, "retri": 102, "ref": 102, "satisfactori": [102, 107], "onto": 102, "pb": 102, "trial": [102, 107], "diagnost": 102, "becom": 103, "paper": 103, "surround": 103, "big": 103, "discrep": 103, "wide": 103, "significantli": 103, "quantizaion": 103, "analyt": [103, 111], "bottleneck": [103, 107], "hybrid": 103, "approach": [103, 108], "mart": 103, "van": 103, "baalen": 103, "seoul": 103, "octob": 103, "area": 104, "situat": 104, "pinpoint": 104, "culprit": 104, "squar": [104, 108], "monitor": 104, "contribut": [104, 107], "read": 104, "presenc": 105, "hyperparamet": 105, "converg": 105, "six": 106, "turn": 106, "empti": 106, "outsid": 106, "strict": [106, 108], "unsign": [106, 108], "throughout": [106, 112], "convent": 106, "member": 106, "insight": [107, 111, 112], "underperform": 107, "chart": 107, "confid": 107, "toward": 107, "uneven": 107, "global": 107, "inner": 107, "bert": 107, "reveal": 107, "resort": 107, "revert": 107, "mitig": 108, "ingest": 108, "000": 108, "de": 108, "hook": 108, "intercept": 108, "four": 108, "textrm": 108, "dfrac": 108, "elimin": 108, "induc": 108, "signal": 108, "satur": 108, "erro": 108, "alongsid": 108, "settabl": 108, "slim": 109, "backslash": 109, "io": 109, "user_guid": 109, "api_doc": 109, "quantizablemultiheadattent": 109, "kyuykim": 109, "mangal": 109, "geunle": 109, "correctli": 109, "klhsieh": 109, "akhobar": 109, "ashvkuma": 109, "fp16": 109, "stand": 109, "adaptiveround": 109, "recurr": 109, "\ud835\udc5a": [110, 113], "\ud835\udc5b": [110, 113], "\u210e": [110, 113], "\ud835\udc64": [110, 113], "\ud835\udc58": [110, 113], "degre": [110, 113], "augment": 111, "progress": [111, 112], "computation": [111, 112], "arrang": 111, "websocket": 111, "listen": 111, "5006": 111, "lot": 112, "term": 113, "lose": 114, "volum": 114, "hxwx8": 114, "hxwx5": 114, "propag": 114, "green": 114, "side": 114, "pink": 114, "color": 114, "orang": 114}, "objects": {"aimet_common.bias_correction": [[55, 0, 1, "", "ConvBnInfoType"]], "aimet_common.defs": [[55, 0, 1, "", "ActivationType"], [59, 0, 1, "", "CallbackFunc"], [30, 0, 1, "", "CompressionScheme"], [30, 0, 1, "", "CostMetric"], [56, 0, 1, "", "GreedySelectionParameters"], [68, 0, 1, "", "QuantScheme"]], "aimet_common.defs.ActivationType": [[55, 1, 1, "", "no_activation"], [55, 1, 1, "", "relu"], [55, 1, 1, "", "relu6"]], "aimet_common.defs.CompressionScheme": [[30, 1, 1, "", "channel_pruning"], [30, 1, 1, "", "spatial_svd"], [30, 1, 1, "", "weight_svd"]], "aimet_common.defs.CostMetric": [[30, 1, 1, "", "mac"], [30, 1, 1, "", "memory"]], "aimet_common.defs.QuantScheme": [[68, 1, 1, "", "post_training_percentile"], [68, 1, 1, "", "post_training_tf"], [68, 1, 1, "", "post_training_tf_enhanced"], [68, 1, 1, "", "training_range_learning_with_tf_enhanced_init"], [68, 1, 1, "", "training_range_learning_with_tf_init"]], "aimet_common.utils": [[66, 0, 1, "", "CallbackFunc"]], "aimet_onnx.adaround.adaround_weight.Adaround": [[41, 2, 1, "", "apply_adaround"]], "aimet_onnx.adaround.adaround_weight": [[41, 0, 1, "", "AdaroundParameters"]], "aimet_onnx.amp.mixed_precision_algo": [[45, 0, 1, "", "EvalCallbackFactory"]], "aimet_onnx.amp.mixed_precision_algo.EvalCallbackFactory": [[45, 3, 1, "", "sqnr"]], "aimet_onnx.amp.quantizer_groups": [[45, 0, 1, "", "QuantizerGroup"]], "aimet_onnx.amp.quantizer_groups.QuantizerGroup": [[45, 3, 1, "", "get_activation_quantizers"], [45, 3, 1, "", "get_active_quantizers"], [45, 3, 1, "", "get_candidate"], [45, 3, 1, "", "get_param_quantizers"], [45, 3, 1, "", "set_quantizers_to_candidate"], [45, 3, 1, "", "to_list"]], "aimet_onnx.auto_quant_v2": [[42, 0, 1, "", "AutoQuant"]], "aimet_onnx.auto_quant_v2.AutoQuant": [[42, 3, 1, "", "get_quant_scheme_candidates"], [42, 3, 1, "", "optimize"], [42, 3, 1, "", "run_inference"], [42, 3, 1, "", "set_adaround_params"], [42, 3, 1, "", "set_quant_scheme_candidates"]], "aimet_onnx.cross_layer_equalization": [[43, 2, 1, "", "equalize_model"]], "aimet_onnx.layer_output_utils": [[44, 0, 1, "", "LayerOutputUtil"]], "aimet_onnx.layer_output_utils.LayerOutputUtil": [[44, 3, 1, "", "generate_layer_outputs"]], "aimet_onnx.mixed_precision": [[45, 2, 1, "", "choose_mixed_precision"]], "aimet_onnx.quant_analyzer": [[46, 0, 1, "", "QuantAnalyzer"]], "aimet_onnx.quant_analyzer.QuantAnalyzer": [[46, 3, 1, "", "analyze"], [46, 3, 1, "", "check_model_sensitivity_to_quantization"], [46, 3, 1, "", "create_quantsim_and_encodings"], [46, 3, 1, "", "enable_per_layer_mse_loss"], [46, 3, 1, "", "export_per_layer_encoding_min_max_range"], [46, 3, 1, "", "export_per_layer_mse_loss"], [46, 3, 1, "", "export_per_layer_stats_histogram"], [46, 3, 1, "", "perform_per_layer_analysis_by_disabling_quantizers"], [46, 3, 1, "", "perform_per_layer_analysis_by_enabling_quantizers"]], "aimet_onnx.quantsim": [[48, 0, 1, "", "QuantizationSimModel"]], "aimet_onnx.quantsim.QuantizationSimModel": [[48, 3, 1, "", "compute_encodings"], [48, 3, 1, "", "export"]], "aimet_tensorflow.keras.adaround_weight.Adaround": [[28, 2, 1, "", "apply_adaround"]], "aimet_tensorflow.keras.adaround_weight": [[28, 0, 1, "", "AdaroundParameters"]], "aimet_tensorflow.keras.batch_norm_fold": [[36, 2, 1, "", "fold_all_batch_norms"], [29, 2, 1, "", "fold_all_batch_norms_to_scale"], [36, 2, 1, "", "fold_given_batch_norms"]], "aimet_tensorflow.keras.bn_reestimation": [[29, 2, 1, "", "reestimate_bn_stats"]], "aimet_tensorflow.keras.compress": [[30, 0, 1, "", "ModelCompressor"]], "aimet_tensorflow.keras.compress.ModelCompressor": [[30, 3, 1, "", "compress_model"]], "aimet_tensorflow.keras.cross_layer_equalization": [[36, 0, 1, "", "ClsSetInfo"], [31, 2, 1, "", "equalize_model"]], "aimet_tensorflow.keras.cross_layer_equalization.ClsSetInfo": [[36, 0, 1, "", "ClsSetLayerPairInfo"]], "aimet_tensorflow.keras.cross_layer_equalization.CrossLayerScaling": [[36, 2, 1, "", "scale_cls_sets"], [36, 2, 1, "", "scale_model"]], "aimet_tensorflow.keras.cross_layer_equalization.HighBiasFold": [[36, 2, 1, "id0", "bias_fold"]], "aimet_tensorflow.keras.defs": [[30, 0, 1, "", "ModuleCompRatioPair"], [30, 0, 1, "", "SpatialSvdParameters"]], "aimet_tensorflow.keras.defs.SpatialSvdParameters": [[30, 0, 1, "", "AutoModeParams"], [30, 0, 1, "", "ManualModeParams"], [30, 0, 1, "", "Mode"]], "aimet_tensorflow.keras.defs.SpatialSvdParameters.Mode": [[30, 1, 1, "", "auto"], [30, 1, 1, "", "manual"]], "aimet_tensorflow.keras.layer_output_utils": [[32, 0, 1, "", "LayerOutputUtil"]], "aimet_tensorflow.keras.layer_output_utils.LayerOutputUtil": [[32, 3, 1, "", "generate_layer_outputs"]], "aimet_tensorflow.keras.mixed_precision": [[33, 2, 1, "", "choose_fast_mixed_precision"], [33, 2, 1, "", "choose_mixed_precision"]], "aimet_tensorflow.keras.model_preparer": [[35, 2, 1, "", "prepare_model"]], "aimet_tensorflow.keras.quant_analyzer": [[37, 0, 1, "", "QuantAnalyzer"]], "aimet_tensorflow.keras.quant_analyzer.QuantAnalyzer": [[37, 3, 1, "", "analyze"]], "aimet_tensorflow.keras.quantsim": [[39, 0, 1, "", "QuantizationSimModel"]], "aimet_tensorflow.keras.quantsim.QuantizationSimModel": [[39, 3, 1, "", "compute_encodings"], [39, 3, 1, "", "export"]], "aimet_torch.amp.mixed_precision_algo": [[59, 0, 1, "", "EvalCallbackFactory"]], "aimet_torch.amp.mixed_precision_algo.EvalCallbackFactory": [[59, 3, 1, "", "sqnr"]], "aimet_torch.amp.quantizer_groups": [[59, 0, 1, "", "QuantizerGroup"]], "aimet_torch.amp.quantizer_groups.QuantizerGroup": [[59, 3, 1, "", "get_active_quantizers"], [59, 3, 1, "", "get_candidate"], [59, 3, 1, "", "get_input_quantizer_modules"], [59, 3, 1, "", "set_quantizers_to_candidate"], [59, 3, 1, "", "to_list"]], "aimet_torch.arch_checker.arch_checker.ArchChecker": [[52, 2, 1, "", "check_model_arch"]], "aimet_torch.batch_norm_fold": [[65, 2, 1, "", "fold_all_batch_norms"], [54, 2, 1, "", "fold_all_batch_norms_to_scale"], [65, 2, 1, "", "fold_given_batch_norms"]], "aimet_torch.bias_correction": [[55, 2, 1, "", "correct_bias"]], "aimet_torch.bn_reestimation": [[54, 2, 1, "", "reestimate_bn_stats"]], "aimet_torch.compress": [[56, 0, 1, "", "ModelCompressor"]], "aimet_torch.compress.ModelCompressor": [[56, 3, 1, "", "compress_model"]], "aimet_torch.cross_layer_equalization": [[65, 0, 1, "", "ClsSetInfo"], [57, 2, 1, "", "equalize_model"]], "aimet_torch.cross_layer_equalization.ClsSetInfo": [[65, 0, 1, "", "ClsSetLayerPairInfo"]], "aimet_torch.cross_layer_equalization.CrossLayerScaling": [[65, 2, 1, "", "scale_cls_sets"], [65, 2, 1, "", "scale_model"]], "aimet_torch.cross_layer_equalization.HighBiasFold": [[65, 2, 1, "id0", "bias_fold"]], "aimet_torch.defs": [[56, 0, 1, "", "ChannelPruningParameters"], [56, 0, 1, "", "ModuleCompRatioPair"], [56, 0, 1, "", "SpatialSvdParameters"], [56, 0, 1, "", "WeightSvdParameters"]], "aimet_torch.defs.ChannelPruningParameters": [[56, 0, 1, "", "AutoModeParams"], [56, 0, 1, "", "ManualModeParams"], [56, 0, 1, "", "Mode"]], "aimet_torch.defs.ChannelPruningParameters.Mode": [[56, 1, 1, "", "auto"], [56, 1, 1, "", "manual"]], "aimet_torch.defs.SpatialSvdParameters": [[56, 0, 1, "", "AutoModeParams"], [56, 0, 1, "", "ManualModeParams"], [56, 0, 1, "", "Mode"]], "aimet_torch.defs.SpatialSvdParameters.Mode": [[56, 1, 1, "", "auto"], [56, 1, 1, "", "manual"]], "aimet_torch.defs.WeightSvdParameters": [[56, 0, 1, "", "AutoModeParams"], [56, 0, 1, "", "ManualModeParams"], [56, 0, 1, "", "Mode"]], "aimet_torch.defs.WeightSvdParameters.Mode": [[56, 1, 1, "", "auto"], [56, 1, 1, "", "manual"]], "aimet_torch.gptvq.defs": [[86, 0, 1, "", "GPTVQParameters"]], "aimet_torch.gptvq.gptvq_weight.GPTVQ": [[86, 2, 1, "", "apply_gptvq"]], "aimet_torch.layer_output_utils": [[58, 0, 1, "", "LayerOutputUtil"], [58, 0, 1, "", "NamingScheme"]], "aimet_torch.layer_output_utils.LayerOutputUtil": [[58, 3, 1, "", "generate_layer_outputs"]], "aimet_torch.layer_output_utils.NamingScheme": [[58, 1, 1, "", "ONNX"], [58, 1, 1, "", "PYTORCH"], [58, 1, 1, "", "TORCHSCRIPT"]], "aimet_torch.mixed_precision": [[59, 2, 1, "", "choose_mixed_precision"]], "aimet_torch.model_preparer": [[61, 2, 1, "", "prepare_model"]], "aimet_torch.nn.modules.custom": [[69, 0, 1, "", "ScatterDense"], [69, 0, 1, "", "SparseTensorWrapper"]], "aimet_torch.peft": [[64, 0, 1, "", "AdapterMetaData"], [64, 0, 1, "", "PeftQuantUtils"], [64, 3, 1, "", "replace_lora_layers_with_quantizable_layers"], [64, 3, 1, "", "track_lora_meta_data"]], "aimet_torch.peft.PeftQuantUtils": [[64, 3, 1, "", "disable_lora_adapters"], [64, 3, 1, "", "enable_adapter_and_load_weights"], [64, 3, 1, "", "export_adapter_weights"], [64, 3, 1, "", "freeze_base_model"], [64, 3, 1, "", "freeze_base_model_activation_quantizers"], [64, 3, 1, "", "freeze_base_model_param_quantizers"], [64, 3, 1, "", "get_fp_lora_layer"], [64, 3, 1, "", "get_quantized_lora_layer"], [64, 3, 1, "", "quantize_lora_scale_with_fixed_range"], [64, 3, 1, "", "set_bitwidth_for_lora_adapters"]], "aimet_torch.v1.adaround.adaround_weight.Adaround": [[51, 2, 1, "", "apply_adaround"]], "aimet_torch.v1.adaround.adaround_weight": [[51, 0, 1, "", "AdaroundParameters"]], "aimet_torch.v1.auto_quant": [[53, 0, 1, "", "AutoQuant"]], "aimet_torch.v1.quant_analyzer": [[66, 0, 1, "", "QuantAnalyzer"]], "aimet_torch.v1.quant_analyzer.QuantAnalyzer": [[66, 3, 1, "", "analyze"], [66, 3, 1, "", "check_model_sensitivity_to_quantization"], [66, 3, 1, "", "enable_per_layer_mse_loss"], [66, 3, 1, "", "export_per_layer_encoding_min_max_range"], [66, 3, 1, "", "export_per_layer_mse_loss"], [66, 3, 1, "", "export_per_layer_stats_histogram"], [66, 3, 1, "", "perform_per_layer_analysis_by_disabling_quant_wrappers"], [66, 3, 1, "", "perform_per_layer_analysis_by_enabling_quant_wrappers"]], "aimet_torch.v1.quantsim": [[55, 0, 1, "", "QuantParams"], [68, 0, 1, "", "QuantizationSimModel"], [68, 3, 1, "", "load_checkpoint"], [68, 3, 1, "", "save_checkpoint"]], "aimet_torch.v1.quantsim.QuantizationSimModel": [[68, 3, 1, "", "compute_encodings"], [68, 3, 1, "", "export"]], "aimet_torch.v2.nn": [[88, 0, 1, "", "QuantizationMixin"]], "aimet_torch.v2.nn.QuantizationMixin": [[88, 3, 1, "", "__quant_init__"], [88, 3, 1, "", "compute_encodings"], [88, 3, 1, "", "forward"], [77, 3, 1, "", "from_module"], [77, 3, 1, "", "get_default_kernel"], [77, 3, 1, "", "get_kernel"], [77, 3, 1, "", "implements"], [88, 1, 1, "", "input_quantizers"], [88, 1, 1, "", "output_quantizers"], [88, 1, 1, "", "param_quantizers"], [77, 3, 1, "", "set_default_kernel"], [77, 3, 1, "", "set_kernel"]], "aimet_torch.v2.quantization": [[78, 4, 0, "-", "affine"], [80, 4, 0, "-", "float"]], "aimet_torch.v2.quantization.affine": [[78, 0, 1, "", "Quantize"], [78, 0, 1, "", "QuantizeDequantize"], [78, 2, 1, "", "dequantize"], [78, 2, 1, "", "quantize"], [78, 2, 1, "", "quantize_dequantize"]], "aimet_torch.v2.quantization.affine.quantizer": [[89, 0, 1, "", "Quantize"], [89, 0, 1, "", "QuantizeDequantize"], [89, 0, 1, "", "QuantizerBase"]], "aimet_torch.v2.quantization.affine.quantizer.Quantize": [[89, 3, 1, "", "forward"]], "aimet_torch.v2.quantization.affine.quantizer.QuantizeDequantize": [[89, 3, 1, "", "forward"]], "aimet_torch.v2.quantization.affine.quantizer.QuantizerBase": [[89, 3, 1, "", "allow_overwrite"], [89, 3, 1, "", "compute_encodings"], [89, 3, 1, "", "get_encoding"], [89, 3, 1, "", "get_legacy_encodings"], [89, 3, 1, "", "is_initialized"], [89, 3, 1, "", "register_quantization_parameter"], [89, 3, 1, "", "set_legacy_encodings"]], "aimet_torch.v2.quantization.encoding_analyzer": [[84, 0, 1, "", "EncodingAnalyzer"], [84, 0, 1, "", "MinMaxEncodingAnalyzer"], [84, 0, 1, "", "PercentileEncodingAnalyzer"], [84, 0, 1, "", "SqnrEncodingAnalyzer"]], "aimet_torch.v2.quantization.encoding_analyzer.EncodingAnalyzer": [[84, 3, 1, "", "compute_encodings"], [84, 3, 1, "", "reset_stats"], [84, 3, 1, "", "update_stats"]], "aimet_torch.v2.quantization.float": [[80, 0, 1, "", "FloatQuantizeDequantize"], [80, 0, 1, "", "QuantizeDequantize"]], "aimet_torch.v2.quantization.tensor": [[81, 0, 1, "", "DequantizedTensor"], [81, 0, 1, "", "QuantizedTensor"]], "aimet_torch.v2.quantization.tensor.DequantizedTensor": [[81, 3, 1, "", "dequantize"], [81, 3, 1, "", "quantize"], [81, 3, 1, "", "quantized_repr"]], "aimet_torch.v2.quantization.tensor.QuantizedTensor": [[81, 3, 1, "", "dequantize"], [81, 3, 1, "", "quantize"], [81, 3, 1, "", "quantized_repr"]], "aimet_torch.v2.quantsim.config_utils": [[83, 2, 1, "", "set_activation_quantizers_to_float"], [83, 2, 1, "", "set_blockwise_quantization_for_weights"], [83, 2, 1, "", "set_grouped_blockwise_quantization_for_weights"]], "aimet_torch.v2.visualization_tools": [[82, 2, 1, "", "visualize_stats"]], "aimet_torch.visualize_model": [[71, 2, 1, "", "visualize_changes_after_optimization"], [71, 2, 1, "", "visualize_relative_weight_ranges_to_identify_problematic_layers"], [71, 2, 1, "", "visualize_weight_ranges"]], "aimet_torch.visualize_serialized_data": [[70, 0, 1, "", "VisualizeCompression"]], "aimet_torch.visualize_serialized_data.VisualizeCompression": [[70, 3, 1, "", "display_comp_ratio_plot"], [70, 3, 1, "", "display_eval_scores"]]}, "objtypes": {"0": "py:class", "1": "py:attribute", "2": "py:function", "3": "py:method", "4": "py:module"}, "objnames": {"0": ["py", "class", "Python class"], "1": ["py", "attribute", "Python attribute"], "2": ["py", "function", "Python function"], "3": ["py", "method", "Python method"], "4": ["py", "module", "Python module"]}, "titleterms": {"automat": [0, 4, 18], "mix": [0, 4, 18, 33, 45, 59], "precis": [0, 4, 18, 33, 45, 59], "amp": [0, 4, 18, 33], "overal": [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 90], "flow": [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 64, 90, 103], "what": [0, 4, 12, 13, 18, 21, 25, 67], "thi": [0, 4, 12, 13, 18, 21, 25], "notebook": [0, 4, 12, 13, 18, 21, 25, 28, 29, 31, 51, 53, 54, 57, 66, 68, 96], "i": [0, 4, 12, 13, 18, 21, 25], "dataset": [0, 1, 2, 3, 4, 5, 6, 7, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25], "1": [0, 1, 2, 3, 4, 5, 6, 7, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 49, 55, 75, 90, 96, 109], "exampl": [0, 1, 2, 3, 4, 5, 6, 7, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 21, 22, 23, 24, 25, 28, 29, 30, 31, 32, 33, 35, 36, 37, 39, 41, 42, 43, 44, 45, 46, 48, 49, 51, 53, 54, 55, 56, 57, 58, 59, 61, 65, 66, 68, 69, 70, 71, 72, 86, 87, 96], "evalu": [0, 1, 2, 3, 4, 5, 7, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 21, 22, 23, 24, 25], "pipelin": [0, 1, 2, 3, 5, 6, 7, 12, 13, 14, 15, 16, 17, 18, 19, 21, 22, 23, 24, 25], "2": [0, 1, 2, 3, 4, 5, 6, 7, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 33, 49, 55, 75, 90, 96, 109], "convert": [0, 1, 2, 3, 9], "an": [0, 1, 2, 3], "fp32": [0, 1, 2, 3, 4, 5, 6, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24], "pytorch": [0, 1, 2, 3, 49, 50, 51, 53, 54, 55, 56, 57, 58, 59, 60, 63, 65, 66, 67, 68, 75, 87, 90, 101, 102, 112], "model": [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 34, 35, 60, 61, 62, 87, 90, 98, 100, 101, 102], "onnx": [0, 1, 2, 3, 40, 41, 42, 43, 44, 45, 46, 47, 48, 69, 75], "simplifi": [0, 1, 2, 3], "baselin": [0, 1, 2, 3, 4, 5, 6, 10, 11, 13, 14, 15, 16, 17, 18, 19, 22, 23, 24], "accuraci": [0, 1, 2, 3, 4, 5, 6, 10, 11, 13, 14, 15, 16, 17, 18, 19, 22, 23, 24], "3": [0, 1, 2, 3, 4, 5, 6, 7, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 49, 96], "creat": [0, 1, 2, 3, 4, 5, 7, 9, 10, 11, 13, 14, 18, 19, 21, 22, 23, 24], "quantiz": [0, 1, 2, 3, 4, 5, 7, 8, 10, 11, 12, 13, 14, 18, 19, 21, 22, 23, 24, 25, 33, 38, 39, 45, 47, 48, 55, 59, 67, 68, 71, 72, 78, 80, 81, 83, 85, 88, 89, 90, 102, 103, 105, 106, 107, 108, 112], "simul": [0, 1, 2, 3, 4, 5, 10, 11, 13, 14, 18, 19, 21, 22, 23, 24, 106, 108], "fold": [0, 1, 2, 3, 5, 7, 10, 11, 13, 14, 18, 19, 21, 22, 23, 24, 90], "batch": [0, 1, 2, 3, 5, 10, 11, 13, 14, 18, 19, 22, 23, 24, 93], "normal": [0, 1, 3, 5, 10, 11, 13, 14, 18, 19, 23, 24], "layer": [0, 1, 2, 3, 5, 7, 9, 10, 11, 12, 13, 14, 18, 19, 21, 22, 23, 24, 25, 31, 32, 36, 43, 44, 57, 58, 65, 97, 100, 103], "sim": [0, 1, 2, 3, 5, 10, 11, 14, 18, 19, 21, 22, 23, 24, 39, 48, 68], "comput": [0, 4, 18, 88], "encod": [0, 4, 12, 18, 25, 49, 84, 88, 108], "4": [0, 1, 2, 4, 5, 6, 7, 9, 10, 11, 13, 14, 18, 19, 21, 22, 23, 24, 49], "run": [0, 4, 18, 20, 46, 66, 96], "algorithm": [0, 4, 18], "defin": [0, 6, 7, 18, 20], "callback": [0, 7, 18], "function": [0, 6, 7, 9, 18, 20, 78], "paramet": [0, 4, 18, 28, 30, 41, 51, 56, 86, 91, 108], "call": [0, 4, 18], "api": [0, 4, 18, 26, 27, 28, 29, 30, 31, 32, 33, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 61, 64, 65, 66, 67, 68, 69, 70, 71, 83, 86, 87, 88, 89, 91, 92, 93, 103, 104, 108], "adapt": [1, 5, 19, 41, 51], "round": [1, 5, 19, 41, 51, 100], "adaround": [1, 5, 13, 19, 28, 41, 51, 91], "instanti": [1, 2, 3, 4, 5, 6, 7, 10, 11, 14, 15, 16, 17, 19, 22, 23, 24], "train": [1, 2, 3, 5, 6, 7, 8, 10, 11, 12, 13, 14, 15, 16, 17, 19, 21, 22, 23, 24, 25, 68, 85, 90, 102, 103, 105], "valid": [1, 2, 3, 5, 6, 14, 15, 16, 17, 19, 22, 23, 24, 62], "determin": [1, 2, 3, 5, 6, 10, 11, 13, 14, 19, 22, 23, 24, 108], "appli": [1, 2, 5, 6, 12, 13, 14, 19, 22, 25], "next": [1, 5, 10, 11, 14, 15, 16, 17, 19, 20, 22, 23, 24], "step": [1, 5, 10, 11, 14, 15, 16, 17, 19, 20, 22, 23, 24, 75], "For": [1, 2, 3, 5, 6, 10, 11, 14, 15, 16, 17, 19, 20, 22, 23, 24], "more": [1, 2, 3, 5, 6, 10, 11, 14, 15, 16, 17, 19, 20, 22, 23, 24, 98], "inform": [1, 2, 3, 5, 6, 10, 11, 14, 15, 16, 17, 19, 20, 22, 23, 24, 98], "cross": [2, 14, 22, 31, 36, 43, 57, 65, 103], "equal": [2, 14, 22, 31, 36, 43, 57, 65, 103], "norm": [2, 22, 93], "cle": [2, 14, 22, 36], "method": [4, 36], "load": [4, 5, 6, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25], "find": 4, "fake": 4, "op": 4, "insert": 4, "regular": [4, 33], "fast": [4, 33], "0": [4, 33, 49, 109], "summari": [4, 7, 9, 13, 21], "get": [5, 10, 11, 13, 14, 15, 16, 17, 18, 19, 22, 23, 24, 87], "score": [5, 10, 11, 13, 14, 15, 16, 17, 18, 19, 22, 23, 24], "autoqu": [6, 20, 42, 53, 92], "pretrain": [6, 12, 20], "constant": [6, 7, 20], "helper": [6, 20, 36], "5": [6, 7, 21, 49], "awar": [7, 8, 10, 11, 21, 23, 24, 68, 90, 105], "batchnorm": [7, 21, 29, 54, 90], "re": [7, 21, 29, 54, 93], "estim": [7, 21, 29, 54, 93], "prepar": [7, 9, 35, 61, 90], "kera": [7, 8, 9], "quantizationsim": 7, "perform": [7, 10, 11, 21, 23, 24, 36], "qat": [7, 10, 11, 21, 23, 24, 68, 105], "export": [7, 21, 69, 83, 90], "transform": 8, "subclass": 9, "show": 9, "similar": 9, "differ": 9, "between": 9, "origin": 9, "discuss": 9, "limit": [9, 29, 35, 61], "rang": [11, 12, 24, 25], "learn": [11, 24], "quant": [12, 25, 37, 46, 66], "analyz": [12, 25, 37, 46, 66, 84], "quantanalyz": [12, 25, 104], "per": [12, 13, 25, 97, 100], "analysi": [12, 25, 102, 104], "enabl": [12, 25], "disabl": [12, 25], "wrapper": [12, 25], "min": [12, 25], "max": [12, 25], "pdf": [12, 25], "statist": [12, 21, 25], "mse": [12, 25], "loss": [12, 25], "quantsim": [13, 14, 90, 108], "channel": [13, 15, 17, 56, 94], "pcq": 13, "compress": [15, 16, 17, 30, 56, 70, 95, 97, 100, 111], "us": [15, 16, 17, 36, 74, 91, 100, 102], "prune": [15, 17, 56, 94], "fine": [15, 16, 17, 90, 100], "tune": [15, 16, 17, 90, 100], "spatial": [16, 17, 30, 56, 110], "svd": [16, 17, 30, 56, 110, 113], "reestim": [21, 54], "bia": [22, 55], "correct": [22, 55], "techniqu": [22, 100, 103], "bc": 22, "welcom": 26, "ai": [26, 87, 98], "effici": [26, 87, 98], "toolkit": [26, 87, 98], "doc": 26, "indic": 26, "tabl": 26, "aimet": [27, 28, 29, 30, 31, 32, 33, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 50, 51, 53, 54, 55, 56, 57, 58, 59, 65, 66, 67, 68, 69, 70, 71, 73, 74, 75, 87, 91, 92, 93, 94, 95, 96, 97, 99, 100, 102, 103, 104, 105, 107, 108, 109, 110, 111, 112, 113, 114], "tensorflow": [27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 49, 75, 112], "user": [28, 31, 39, 41, 42, 43, 51, 53, 55, 57, 64, 66, 68, 98, 103], "guid": [28, 31, 39, 41, 42, 43, 51, 53, 55, 57, 66, 68, 90, 98], "link": [28, 29, 31, 39, 41, 42, 43, 51, 53, 54, 55, 57, 66, 68], "top": [28, 29, 30, 32, 33, 35, 37, 39, 41, 42, 44, 45, 46, 48, 51, 53, 54, 56, 58, 59, 61, 64, 66, 68, 70, 71, 83, 86, 88, 89], "level": [28, 29, 30, 32, 33, 35, 36, 37, 39, 41, 42, 44, 45, 46, 48, 51, 53, 54, 56, 58, 59, 61, 64, 65, 66, 68, 70, 71, 83, 86, 88, 89], "enum": [28, 51, 58, 68], "definit": [28, 30, 33, 45, 51, 56, 58, 59, 65, 68], "code": [28, 29, 30, 31, 32, 33, 35, 36, 37, 39, 41, 42, 43, 44, 45, 46, 48, 51, 53, 54, 55, 56, 57, 58, 59, 61, 65, 66, 68, 69, 70, 71, 72, 86, 96], "introduct": [29, 30, 31, 36, 43, 54, 56, 57, 65], "greedi": [30, 56, 97], "select": [30, 56, 94, 97, 100], "configur": [30, 56, 88, 106, 108], "primit": [31, 36, 57, 65], "output": [32, 44, 58], "gener": [32, 44, 58], "group": [33, 45, 59], "callbackfunc": [33, 45, 59], "guidelin": [34, 60, 68, 91, 101], "higher": [36, 65], "lower": [36, 65], "custom": [36, 69], "datatyp": 36, "manual": 36, "mode": [36, 105], "specif": [46, 49, 66], "util": [46, 62, 66], "format": 49, "version": 49, "up": 49, "file": [49, 106], "6": 49, "architectur": 52, "checker": 52, "html": 52, "report": 52, "content": 52, "bn": [54, 93], "convbninfotyp": 55, "activationtyp": 55, "param": 55, "empir": 55, "analyt": 55, "weight": [56, 94, 113], "torch": [61, 69, 75], "fx": 61, "symbol": 61, "trace": 61, "multi": 63, "gpu": [63, 75], "support": 63, "peft": 64, "lora": 64, "clssetinfo": 65, "aimet_torch": [67, 72], "refer": [67, 100, 103], "v2": [67, 72], "": 67, "new": 67, "backward": 67, "compat": 67, "sparseconvolut": 69, "spconv": 69, "modul": [69, 72, 88], "visual": [70, 71, 82, 111, 112], "migrat": 72, "chang": 72, "process": 72, "import": 72, "quantizationsimmodel": 72, "move": 72, "from": [72, 74, 75], "quantwrapp": 72, "staticgrid": 72, "learnedgrid": 72, "affin": [72, 78], "float": [72, 80, 90], "instal": [73, 74, 75], "quick": 73, "releas": [73, 74, 75, 98, 109], "packag": [73, 74, 75], "system": 73, "requir": [73, 104], "advanc": 73, "instruct": 73, "docker": 74, "set": 74, "variant": [74, 84], "prebuilt": 74, "imag": 74, "build": 74, "local": 74, "start": [74, 87, 111], "contain": 74, "pypi": [74, 75], "environ": [74, 75], "setup": [74, 75], "prerequisit": [75, 90], "13": [75, 109], "common": 75, "debian": 75, "replac": 75, "pillow": 75, "simd": 75, "onnxruntim": 75, "post": [75, 85, 102, 103], "quantizationmixin": 77, "class": [78, 80, 81, 88], "floatquantizedequant": 79, "quantizedequant": 79, "tensor": 81, "tool": [82, 102], "blockwis": 83, "low": 83, "power": 83, "lpbq": 83, "gptvq": 86, "document": 87, "featur": [87, 95, 98, 102], "descript": [87, 104], "quickstart": 90, "point": 90, "case": [91, 100, 102], "hyper": 91, "overview": [92, 93, 97, 98, 100, 103, 104, 105, 106, 108, 111, 112, 114], "workflow": [92, 93, 102, 105, 108], "procedur": 94, "winnow": [94, 114], "reconstruct": 94, "guidebook": 95, "brows": 96, "server": [96, 111], "download": 96, "relat": 96, "ratio": [97, 100, 111], "how": [97, 114], "work": [97, 114], "explor": 97, "toc": 98, "tree": 98, "known": 99, "issu": 99, "option": 100, "rank": 100, "note": [100, 109], "faq": [100, 103], "debug": 102, "detail": 104, "recommend": 105, "structur": 106, "modifi": 106, "section": 106, "diagnost": 107, "nois": 108, "scheme": 108, "oper": 108, "22": 109, "21": 109, "20": 109, "19": 109, "py37": 109, "18": 109, "17": 109, "16": 109, "14": 109, "design": 111, "bokeh": 111, "session": 111}, "envversion": {"sphinx.domains.c": 2, "sphinx.domains.changeset": 1, "sphinx.domains.citation": 1, "sphinx.domains.cpp": 8, "sphinx.domains.index": 1, "sphinx.domains.javascript": 2, "sphinx.domains.math": 2, "sphinx.domains.python": 3, "sphinx.domains.rst": 2, "sphinx.domains.std": 2, "nbsphinx": 4, "sphinx.ext.intersphinx": 1, "sphinx.ext.viewcode": 1, "sphinx": 57}, "alltitles": {"Automatic Mixed-Precision (AMP)": [[0, "Automatic-Mixed-Precision-(AMP)"], [4, "Automatic-Mixed-Precision-(AMP)"], [18, "Automatic-Mixed-Precision-(AMP)"]], "Overall flow": [[0, "Overall-flow"], [1, "Overall-flow"], [2, "Overall-flow"], [3, "Overall-flow"], [4, "Overall-flow"], [5, "Overall-flow"], [6, "Overall-flow"], [7, "Overall-flow"], [8, "Overall-flow"], [9, "Overall-flow"], [10, "Overall-flow"], [11, "Overall-flow"], [12, "Overall-flow"], [13, "Overall-flow"], [14, "Overall-flow"], [15, "Overall-flow"], [16, "Overall-flow"], [17, "Overall-flow"], [18, "Overall-flow"], [19, "Overall-flow"], [20, "Overall-flow"], [21, "Overall-flow"], [22, "Overall-flow"], [23, "Overall-flow"], [24, "Overall-flow"], [25, "Overall-flow"], [90, "overall-flow"]], "What this notebook is not": [[0, "What-this-notebook-is-not"], [4, "What-this-notebook-is-not"], [12, "What-this-notebook-is-not"], [13, "What-this-notebook-is-not"], [18, "What-this-notebook-is-not"], [21, "What-this-notebook-is-not"], [25, "What-this-notebook-is-not"]], "Dataset": [[0, "Dataset"], [1, "Dataset"], [2, "Dataset"], [3, "Dataset"], [4, "Dataset"], [5, "Dataset"], [6, "Dataset"], [7, "Dataset"], [10, "Dataset"], [11, "Dataset"], [12, "Dataset"], [13, "Dataset"], [14, "Dataset"], [15, "Dataset"], [16, "Dataset"], [17, "Dataset"], [18, "Dataset"], [19, "Dataset"], [20, "Dataset"], [21, "Dataset"], [22, "Dataset"], [23, "Dataset"], [24, "Dataset"], [25, "Dataset"]], "1. Example evaluation pipeline": [[0, "1.-Example-evaluation-pipeline"], [18, "1.-Example-evaluation-pipeline"]], "2. Convert an FP32 PyTorch model to ONNX, simplify & then evaluate baseline FP32 accuracy": [[0, "2.-Convert-an-FP32-PyTorch-model-to-ONNX,-simplify-&-then-evaluate-baseline-FP32-accuracy"], [1, "2.-Convert-an-FP32-PyTorch-model-to-ONNX,-simplify-&-then-evaluate-baseline-FP32-accuracy"], [2, "2.-Convert-an-FP32-PyTorch-model-to-ONNX,-simplify-&-then-evaluate-baseline-FP32-accuracy"], [3, "2.-Convert-an-FP32-PyTorch-model-to-ONNX,-simplify-&-then-evaluate-baseline-FP32-accuracy"]], "3. Create a quantization simulation model": [[0, "3.-Create-a-quantization-simulation-model"], [18, "3.-Create-a-quantization-simulation-model"]], "Fold Batch Normalization layers": [[0, "Fold-Batch-Normalization-layers"], [1, "Fold-Batch-Normalization-layers"], [3, "Fold-Batch-Normalization-layers"], [5, "Fold-Batch-Normalization-layers"], [10, "Fold-Batch-Normalization-layers"], [11, "Fold-Batch-Normalization-layers"], [13, "Fold-Batch-Normalization-layers"], [14, "Fold-Batch-Normalization-layers"], [18, "Fold-Batch-Normalization-layers"], [19, "Fold-Batch-Normalization-layers"], [23, "Fold-Batch-Normalization-layers"], [24, "Fold-Batch-Normalization-layers"]], "Create Quantization Sim Model": [[0, "Create-Quantization-Sim-Model"], [18, "Create-Quantization-Sim-Model"], [21, "Create-Quantization-Sim-Model"]], "Compute Encodings": [[0, "Compute-Encodings"], [4, "Compute-Encodings"], [18, "Compute-Encodings"]], "4. Run AMP algorithm on the quantized model": [[0, "4.-Run-AMP-algorithm-on-the-quantized-model"], [4, "4.-Run-AMP-algorithm-on-the-quantized-model"], [18, "4.-Run-AMP-algorithm-on-the-quantized-model"]], "Define callback functions for AMP": [[0, "Define-callback-functions-for-AMP"], [18, "Define-callback-functions-for-AMP"]], "Parameters for AMP algorithm": [[0, "Parameters-for-AMP-algorithm"], [4, "Parameters-for-AMP-algorithm"], [18, "Parameters-for-AMP-algorithm"]], "Call AMP API": [[0, "Call-AMP-API"], [18, "Call-AMP-API"]], "Adaptive Rounding (AdaRound)": [[1, "Adaptive-Rounding-(AdaRound)"], [5, "Adaptive-Rounding-(AdaRound)"], [19, "Adaptive-Rounding-(AdaRound)"]], "1. Instantiate the example training and validation pipeline": [[1, "1.-Instantiate-the-example-training-and-validation-pipeline"], [2, "1.-Instantiate-the-example-training-and-validation-pipeline"], [3, "1.-Instantiate-the-example-training-and-validation-pipeline"], [5, "1.-Instantiate-the-example-training-and-validation-pipeline"], [6, "1.-Instantiate-the-example-training-and-validation-pipeline"], [14, "1.-Instantiate-the-example-training-and-validation-pipeline"], [15, "1.-Instantiate-the-example-training-and-validation-pipeline"], [16, "1.-Instantiate-the-example-training-and-validation-pipeline"], [17, "1.-Instantiate-the-example-training-and-validation-pipeline"], [19, "1.-Instantiate-the-example-training-and-validation-pipeline"], [22, "1.-Instantiate-the-example-training-and-validation-pipeline"], [23, "1.-Instantiate-the-example-training-and-validation-pipeline"], [24, "1.-Instantiate-the-example-training-and-validation-pipeline"]], "3. Create a quantization simulation model and determine quantized accuracy": [[1, "3.-Create-a-quantization-simulation-model-and-determine-quantized-accuracy"], [2, "3.-Create-a-quantization-simulation-model-and-determine-quantized-accuracy"], [3, "3.-Create-a-quantization-simulation-model-and-determine-quantized-accuracy"], [5, "3.-Create-a-quantization-simulation-model-and-determine-quantized-accuracy"], [10, "3.-Create-a-quantization-simulation-model-and-determine-quantized-accuracy"], [11, "3.-Create-a-quantization-simulation-model-and-determine-quantized-accuracy"], [13, "3.-Create-a-quantization-simulation-model-and-determine-quantized-accuracy"], [14, "3.-Create-a-quantization-simulation-model-and-determine-quantized-accuracy"], [19, "3.-Create-a-quantization-simulation-model-and-determine-quantized-accuracy"], [22, "3.-Create-a-quantization-simulation-model-and-determine-quantized-accuracy"], [23, "3.-Create-a-quantization-simulation-model-and-determine-quantized-accuracy"], [24, "3.-Create-a-quantization-simulation-model-and-determine-quantized-accuracy"]], "Create the Quantization Sim Model": [[1, "Create-the-Quantization-Sim-Model"], [2, "Create-the-Quantization-Sim-Model"], [3, "Create-the-Quantization-Sim-Model"], [5, "Create-the-Quantization-Sim-Model"], [10, "Create-the-Quantization-Sim-Model"], [11, "Create-the-Quantization-Sim-Model"], [14, "Create-the-Quantization-Sim-Model"], [19, "Create-the-Quantization-Sim-Model"], [22, "Create-the-Quantization-Sim-Model"], [23, "Create-the-Quantization-Sim-Model"], [24, "Create-the-Quantization-Sim-Model"]], "4. Apply Adaround": [[1, "4.-Apply-Adaround"], [5, "4.-Apply-Adaround"], [13, "4.-Apply-Adaround"], [19, "4.-Apply-Adaround"]], "Next steps": [[1, "Next-steps"], [5, "Next-steps"], [10, "Next-steps"], [11, "Next-steps"], [14, "Next-steps"], [15, "Next-steps"], [16, "Next-steps"], [17, "Next-steps"], [19, "Next-steps"], [20, "Next-steps"], [22, "Next-steps"], [23, "Next-steps"], [24, "Next-steps"]], "For more information": [[1, "For-more-information"], [2, "For-more-information"], [3, "For-more-information"], [5, "For-more-information"], [6, "For-more-information"], [10, "For-more-information"], [11, "For-more-information"], [14, "For-more-information"], [15, "For-more-information"], [16, "For-more-information"], [17, "For-more-information"], [19, "For-more-information"], [20, "For-more-information"], [22, "For-more-information"], [23, "For-more-information"], [24, "For-more-information"]], "Cross-Layer Equalization": [[2, "Cross-Layer-Equalization"]], "Fold Batch Norm layers": [[2, "Fold-Batch-Norm-layers"], [22, "Fold-Batch-Norm-layers"]], "4. Apply CLE": [[2, "4.-Apply-CLE"], [14, "4.-Apply-CLE"]], "Quantization simulation": [[3, "Quantization-simulation"]], "1. Instantiate the example evaluation method": [[4, "1.-Instantiate-the-example-evaluation-method"]], "2. Load the FP32 model and evaluate the model to find the baseline FP32 accuracy": [[4, "2.-Load-the-FP32-model-and-evaluate-the-model-to-find-the-baseline-FP32-accuracy"]], "3.Create a quantization simulation model (with fake quantization ops inserted)": [[4, "3.Create-a-quantization-simulation-model-(with-fake-quantization-ops-inserted)"]], "Regular AMP": [[4, "Regular-AMP"]], "API Call for Regular AMP": [[4, "API-Call-for-Regular-AMP"]], "Fast AMP (AMP 2.0)": [[4, "Fast-AMP-(AMP-2.0)"]], "Summary": [[4, "Summary"], [7, "Summary"], [9, "Summary"], [13, "Summary"], [21, "Summary"]], "2. Load the model and evaluate to get a baseline FP32 accuracy score": [[5, "2.-Load-the-model-and-evaluate-to-get-a-baseline-FP32-accuracy-score"], [10, "2.-Load-the-model-and-evaluate-to-get-a-baseline-FP32-accuracy-score"], [11, "2.-Load-the-model-and-evaluate-to-get-a-baseline-FP32-accuracy-score"], [13, "2.-Load-the-model-and-evaluate-to-get-a-baseline-FP32-accuracy-score"], [14, "2.-Load-the-model-and-evaluate-to-get-a-baseline-FP32-accuracy-score"], [15, "2.-Load-the-model-and-evaluate-to-get-a-baseline-FP32-accuracy-score"], [16, "2.-Load-the-model-and-evaluate-to-get-a-baseline-FP32-accuracy-score"], [17, "2.-Load-the-model-and-evaluate-to-get-a-baseline-FP32-accuracy-score"], [18, "2.-Load-the-model-and-evaluate-to-get-a-baseline-FP32-accuracy-score"], [19, "2.-Load-the-model-and-evaluate-to-get-a-baseline-FP32-accuracy-score"], [22, "2.-Load-the-model-and-evaluate-to-get-a-baseline-FP32-accuracy-score"], [23, "2.-Load-the-model-and-evaluate-to-get-a-baseline-FP32-accuracy-score"], [24, "2.-Load-the-model-and-evaluate-to-get-a-baseline-FP32-accuracy-score"]], "AutoQuant": [[6, "AutoQuant"], [20, "AutoQuant"]], "2. Load a pretrained FP32 model": [[6, "2.-Load-a-pretrained-FP32-model"], [12, "2.-Load-a-pretrained-FP32-model"], [20, "2.-Load-a-pretrained-FP32-model"]], "3. Determine the baseline FP32 accuracy": [[6, "3.-Determine-the-baseline-FP32-accuracy"]], "4. Define constants and helper functions": [[6, "4.-Define-constants-and-helper-functions"]], "5. Apply AutoQuant": [[6, "5.-Apply-AutoQuant"]], "Quantization-Aware Training with BatchNorm Re-estimation": [[7, "Quantization-Aware-Training-with-BatchNorm-Re-estimation"], [21, "Quantization-Aware-Training-with-BatchNorm-Re-estimation"]], "1. Instantiate the example evaluation and training pipeline": [[7, "1.-Instantiate-the-example-evaluation-and-training-pipeline"]], "2. Define Constants and Datasets Prepare": [[7, "2.-Define-Constants-and-Datasets-Prepare"]], "2. Create the model in Keras": [[7, "2.-Create-the-model-in-Keras"]], "3. Train and evaluate the model": [[7, "3.-Train-and-evaluate-the-model"]], "4. Create a QuantizationSim Model": [[7, "4.-Create-a-QuantizationSim-Model"]], "Prepare the evaluation callback function": [[7, "Prepare-the-evaluation-callback-function"]], "5. Perform QAT": [[7, "5.-Perform-QAT"]], "Fold BatchNorm Layers": [[7, "Fold-BatchNorm-Layers"], [21, "Fold-BatchNorm-Layers"]], "5. Export Model": [[7, "5.-Export-Model"], [21, "5.-Export-Model"]], "Quantization-Aware Training with a Keras Transformer Model": [[8, "Quantization-Aware-Training-with-a-Keras-Transformer-Model"]], "Keras Model Preparer": [[9, "Keras-Model-Preparer"]], "1. Creating a Keras model with subclass layers": [[9, "1.-Creating-a-Keras-model-with-subclass-layers"]], "2. Converting the Keras model with subclass layers to a Keras model with functional layers": [[9, "2.-Converting-the-Keras-model-with-subclass-layers-to-a-Keras-model-with-functional-layers"]], "3. Showing similarities and differences between the original and converted models": [[9, "3.-Showing-similarities-and-differences-between-the-original-and-converted-models"]], "4. Discussing the limitations of the Keras Model Preparer": [[9, "4.-Discussing-the-limitations-of-the-Keras-Model-Preparer"]], "Quantization-aware training": [[10, "Quantization-aware-training"], [23, "Quantization-aware-training"]], "1. Instantiate the example evaluation and training datasets": [[10, "1.-Instantiate-the-example-evaluation-and-training-datasets"], [11, "1.-Instantiate-the-example-evaluation-and-training-datasets"]], "4. Perform QAT": [[10, "4.-Perform-QAT"], [11, "4.-Perform-QAT"], [23, "4.-Perform-QAT"], [24, "4.-Perform-QAT"]], "Quantization-Aware training with range learning": [[11, "Quantization-Aware-training-with-range-learning"]], "Quant Analyzer": [[12, "Quant-Analyzer"], [25, "Quant-Analyzer"]], "1. Example evaluation and training pipeline": [[12, "1.-Example-evaluation-and-training-pipeline"], [13, "1.-Example-evaluation-and-training-pipeline"], [21, "1.-Example-evaluation-and-training-pipeline"], [25, "1.-Example-evaluation-and-training-pipeline"]], "3. Apply QuantAnalyzer to the model": [[12, "3.-Apply-QuantAnalyzer-to-the-model"], [25, "3.-Apply-QuantAnalyzer-to-the-model"]], "Per-layer analysis by enabling/disabling quantization wrappers": [[12, "Per-layer-analysis-by-enabling/disabling-quantization-wrappers"], [25, "Per-layer-analysis-by-enabling/disabling-quantization-wrappers"]], "Encoding min/max ranges": [[12, "Encoding-min/max-ranges"], [25, "Encoding-min/max-ranges"]], "PDF of statistics": [[12, "PDF-of-statistics"], [25, "PDF-of-statistics"]], "Per-layer MSE loss": [[12, "Per-layer-MSE-loss"], [25, "Per-layer-MSE-loss"]], "Quantsim and Adaround - Per Channel Quantization (PCQ)": [[13, "Quantsim-and-Adaround---Per-Channel-Quantization-(PCQ)"]], "Cross-Layer Equalization with QuantSim": [[14, "Cross-Layer-Equalization-with-QuantSim"]], "Model compression using channel pruning": [[15, "Model-compression-using-channel-pruning"]], "3. Compress the model and fine-tune": [[15, "3.-Compress-the-model-and-fine-tune"], [16, "3.-Compress-the-model-and-fine-tune"], [17, "3.-Compress-the-model-and-fine-tune"]], "Model compression using spatial SVD": [[16, "Model-compression-using-spatial-SVD"]], "Model compression using spatial SVD and channel pruning": [[17, "Model-compression-using-spatial-SVD-and-channel-pruning"]], "1. Define Constants and Helper functions": [[20, "1.-Define-Constants-and-Helper-functions"]], "3. Run AutoQuant": [[20, "3.-Run-AutoQuant"]], "2. Load FP32 model": [[21, "2.-Load-FP32-model"]], "3. Create a quantization simulation model and Perform QAT": [[21, "3.-Create-a-quantization-simulation-model-and-Perform-QAT"]], "Perform QAT": [[21, "Perform-QAT"]], "4. Perform BatchNorm Reestimation": [[21, "4.-Perform-BatchNorm-Reestimation"]], "Re-estimate BatchNorm Statistics": [[21, "Re-estimate-BatchNorm-Statistics"]], "Cross-Layer Equalization and Bias Correction": [[22, "Cross-Layer-Equalization-and-Bias-Correction"]], "Techniques": [[22, "Techniques"]], "4. Apply CLE and BC": [[22, "4.-Apply-CLE-and-BC"]], "CLE": [[22, "CLE"]], "Bias Correction": [[22, "Bias-Correction"]], "Quantization-aware training with range learning": [[24, "Quantization-aware-training-with-range-learning"]], "2. Load the model": [[25, "2.-Load-the-model"]], "Welcome to AI Model Efficiency Toolkit API Docs!": [[26, "welcome-to-ai-model-efficiency-toolkit-api-docs"]], "Indices and tables": [[26, "indices-and-tables"]], "AIMET TensorFlow APIs": [[27, "aimet-tensorflow-apis"]], "AIMET TensorFlow AdaRound API": [[28, "aimet-tensorflow-adaround-api"]], "User Guide Link": [[28, "user-guide-link"], [31, "user-guide-link"], [39, "user-guide-link"], [41, "user-guide-link"], [42, "user-guide-link"], [43, "user-guide-link"], [51, "user-guide-link"], [53, "user-guide-link"], [55, "user-guide-link"], [57, "user-guide-link"], [66, "user-guide-link"], [68, "user-guide-link"]], "Examples Notebook Link": [[28, "examples-notebook-link"], [29, "examples-notebook-link"], [31, "examples-notebook-link"], [51, "examples-notebook-link"], [53, "examples-notebook-link"], [54, "examples-notebook-link"], [57, "examples-notebook-link"], [66, "examples-notebook-link"], [68, "examples-notebook-link"]], "Top-level API": [[28, "top-level-api"], [32, "top-level-api"], [35, "top-level-api"], [37, "top-level-api"], [39, "top-level-api"], [41, "top-level-api"], [42, "top-level-api"], [44, "top-level-api"], [45, "top-level-api"], [46, "top-level-api"], [48, "top-level-api"], [51, "top-level-api"], [53, "top-level-api"], [58, "top-level-api"], [59, "top-level-api"], [61, "top-level-api"], [64, "top-level-api"], [66, "top-level-api"], [68, "top-level-api"], [88, "top-level-api"], [89, "top-level-api"]], "Adaround Parameters": [[28, "adaround-parameters"], [41, "adaround-parameters"], [51, "adaround-parameters"]], "Enum Definition": [[28, "enum-definition"], [51, "enum-definition"], [58, "enum-definition"], [68, "enum-definition"]], "Code Examples": [[28, "code-examples"], [30, "code-examples"], [33, "code-examples"], [35, "code-examples"], [37, "code-examples"], [39, "code-examples"], [42, "code-examples"], [45, "code-examples"], [46, "code-examples"], [48, "code-examples"], [53, "code-examples"], [56, "code-examples"], [59, "code-examples"], [61, "code-examples"], [66, "code-examples"], [70, "code-examples"], [71, "code-examples"], [72, "code-examples"]], "AIMET TensorFlow BatchNorm Re-estimation APIs": [[29, "aimet-tensorflow-batchnorm-re-estimation-apis"]], "Introduction": [[29, "introduction"], [30, "introduction"], [31, "introduction"], [36, "introduction"], [43, "introduction"], [54, "introduction"], [56, "introduction"], [57, "introduction"], [65, "introduction"]], "Top-level APIs": [[29, "top-level-apis"], [54, "top-level-apis"]], "Code Example": [[29, "code-example"], [31, "code-example"], [32, "code-example"], [43, "code-example"], [44, "code-example"], [57, "code-example"], [58, "code-example"], [69, "code-example"], [86, "code-example"]], "Limitations": [[29, "limitations"], [35, "limitations"]], "AIMET TensorFlow Compression API": [[30, "aimet-tensorflow-compression-api"]], "Top-level API for Compression": [[30, "top-level-api-for-compression"], [56, "top-level-api-for-compression"]], "Greedy Selection Parameters": [[30, "greedy-selection-parameters"], [56, "greedy-selection-parameters"]], "Spatial SVD Configuration": [[30, "spatial-svd-configuration"], [56, "spatial-svd-configuration"]], "Configuration Definitions": [[30, "configuration-definitions"], [56, "configuration-definitions"]], "AIMET TensorFlow Cross Layer Equalization APIs": [[31, "aimet-tensorflow-cross-layer-equalization-apis"]], "Cross Layer Equalization API": [[31, "cross-layer-equalization-api"], [43, "cross-layer-equalization-api"], [57, "cross-layer-equalization-api"]], "Primitive APIs": [[31, "primitive-apis"], [57, "primitive-apis"]], "AIMET TensorFlow Layer Output Generation API": [[32, "aimet-tensorflow-layer-output-generation-api"]], "AIMET TensorFlow Mixed Precision API": [[33, "aimet-tensorflow-mixed-precision-api"]], "Top-level API for Regular AMP": [[33, "top-level-api-for-regular-amp"]], "Top-level API for Fast AMP (AMP 2.0)": [[33, "top-level-api-for-fast-amp-amp-2-0"]], "Quantizer Groups definition": [[33, "quantizer-groups-definition"], [45, "quantizer-groups-definition"], [59, "quantizer-groups-definition"]], "CallbackFunc Definition": [[33, "callbackfunc-definition"], [45, "callbackfunc-definition"], [59, "callbackfunc-definition"]], "TensorFlow Model Guidelines": [[34, "tensorflow-model-guidelines"]], "TensorFlow Model Preparer API": [[35, "tensorflow-model-preparer-api"]], "AIMET TensorFlow Cross Layer Equalization Primitive API": [[36, "aimet-tensorflow-cross-layer-equalization-primitive-api"]], "Higher Level APIs for Cross Layer Equalization": [[36, "higher-level-apis-for-cross-layer-equalization"], [65, "higher-level-apis-for-cross-layer-equalization"]], "Code Examples for Higher Level APIs": [[36, "code-examples-for-higher-level-apis"], [65, "code-examples-for-higher-level-apis"]], "Lower Level APIs for Cross Layer Equalization": [[36, "lower-level-apis-for-cross-layer-equalization"], [65, "lower-level-apis-for-cross-layer-equalization"]], "Custom Datatype used": [[36, "custom-datatype-used"]], "Code Example for Lower level APIs": [[36, "code-example-for-lower-level-apis"]], "Example helper methods to perform CLE in manual mode": [[36, "example-helper-methods-to-perform-cle-in-manual-mode"]], "AIMET TensorFlow Quant Analyzer API": [[37, "aimet-tensorflow-quant-analyzer-api"]], "AIMET TensorFlow Quantization APIs": [[38, "aimet-tensorflow-quantization-apis"]], "AIMET TensorFlow Quantization SIM API": [[39, "aimet-tensorflow-quantization-sim-api"]], "AIMET ONNX APIs": [[40, "aimet-onnx-apis"]], "AIMET ONNX AdaRound API": [[41, "aimet-onnx-adaround-api"]], "Code Example - Adaptive Rounding (AdaRound)": [[41, "code-example-adaptive-rounding-adaround"], [51, "code-example-adaptive-rounding-adaround"]], "AIMET ONNX AutoQuant API": [[42, "aimet-onnx-autoquant-api"]], "AIMET ONNX Cross Layer Equalization APIs": [[43, "aimet-onnx-cross-layer-equalization-apis"]], "AIMET ONNX Layer Output Generation API": [[44, "aimet-onnx-layer-output-generation-api"]], "AIMET ONNX Mixed Precision API": [[45, "aimet-onnx-mixed-precision-api"]], "AIMET ONNX Quant Analyzer API": [[46, "aimet-onnx-quant-analyzer-api"]], "Run specific utility": [[46, "run-specific-utility"], [66, "run-specific-utility"]], "AIMET ONNX Quantization APIs": [[47, "aimet-onnx-quantization-apis"]], "AIMET ONNX Quantization SIM API": [[48, "aimet-onnx-quantization-sim-api"]], "Encoding Format Specification": [[49, "encoding-format-specification"]], "1. Versioning": [[49, "versioning"]], "2. Version 0.4.0 (up to)": [[49, "version-0-4-0-up-to"]], "2.1. Encoding Specification": [[49, "encoding-specification"]], "2.2. Encoding File Example for PyTorch": [[49, "encoding-file-example-for-pytorch"]], "2.3. Encoding File Example for TensorFlow": [[49, "encoding-file-example-for-tensorflow"]], "3. Version 0.5.0": [[49, "version-0-5-0"]], "3.1. Encoding Specification": [[49, "id1"]], "3.2. Encoding File Example for PyTorch": [[49, "id2"]], "3.3. Encoding File Example for TensorFlow": [[49, "id3"]], "4. Version 0.6.1": [[49, "version-0-6-1"]], "4.1. Encoding Specification": [[49, "id4"]], "AIMET PyTorch APIs": [[50, "aimet-pytorch-apis"]], "AIMET PyTorch AdaRound API": [[51, "aimet-pytorch-adaround-api"]], "Architecture Checker API": [[52, "architecture-checker-api"]], "HTML report content": [[52, "id1"]], "AIMET PyTorch AutoQuant API": [[53, "aimet-pytorch-autoquant-api"]], "AIMET PyTorch BatchNorm Re-estimation APIs": [[54, "aimet-pytorch-batchnorm-re-estimation-apis"]], "Code Example - BN-Reestimation": [[54, "code-example-bn-reestimation"]], "AIMET PyTorch Bias Correction API": [[55, "aimet-pytorch-bias-correction-api"]], "Bias Correction API": [[55, "bias-correction-api"]], "ConvBnInfoType": [[55, "convbninfotype"]], "ActivationType": [[55, "activationtype"]], "Quantization Params": [[55, "quantization-params"]], "Code Example #1 Empirical Bias Correction": [[55, "code-example-1-empirical-bias-correction"]], "Code Example #2 Analytical + Empirical Bias correction": [[55, "code-example-2-analytical-empirical-bias-correction"]], "AIMET PyTorch Compression API": [[56, "aimet-pytorch-compression-api"]], "Weight SVD Configuration": [[56, "weight-svd-configuration"]], "Channel Pruning Configuration": [[56, "channel-pruning-configuration"]], "AIMET PyTorch Cross Layer Equalization APIs": [[57, "aimet-pytorch-cross-layer-equalization-apis"]], "AIMET PyTorch Layer Output Generation API": [[58, "aimet-pytorch-layer-output-generation-api"]], "AIMET PyTorch Mixed Precision API": [[59, "aimet-pytorch-mixed-precision-api"]], "PyTorch Model Guidelines": [[60, "pytorch-model-guidelines"]], "Model Preparer API": [[61, "model-preparer-api"]], "Limitations of torch.fx symbolic trace API": [[61, "limitations-of-torch-fx-symbolic-trace-api"]], "Model Validator Utility": [[62, "model-validator-utility"]], "PyTorch Multi-GPU support": [[63, "pytorch-multi-gpu-support"]], "PEFT LoRA": [[64, "peft-lora"]], "User flow": [[64, "user-flow"]], "AIMET PyTorch Cross Layer Equalization Primitive API": [[65, "aimet-pytorch-cross-layer-equalization-primitive-api"]], "ClsSetInfo Definition": [[65, "clssetinfo-definition"]], "Code Examples for Lower Level APIs": [[65, "code-examples-for-lower-level-apis"]], "AIMET PyTorch Quant Analyzer API": [[66, "aimet-pytorch-quant-analyzer-api"]], "AIMET PyTorch Quantization APIs": [[67, "aimet-pytorch-quantization-apis"]], "aimet_torch": [[67, "aimet-torch"]], "API Reference": [[67, "api-reference"], [67, "id1"]], "aimet_torch.v2": [[67, "aimet-torch-v2"]], "What\u2019s New": [[67, "what-s-new"]], "Backwards Compatibility": [[67, "backwards-compatibility"]], "AIMET PyTorch Quantization SIM API": [[68, "aimet-pytorch-quantization-sim-api"]], "Guidelines": [[68, "guidelines"]], "Code Example - Quantization Aware Training (QAT)": [[68, "code-example-quantization-aware-training-qat"]], "AIMET Torch SparseConvolution custom onnx export": [[69, "aimet-torch-sparseconvolution-custom-onnx-export"]], "Custom API for the spconv modules": [[69, "custom-api-for-the-spconv-modules"]], "AIMET Visualization Compression API": [[70, "aimet-visualization-compression-api"]], "Top-level API Compression": [[70, "top-level-api-compression"]], "AIMET Visualization for Quantization API": [[71, "aimet-visualization-for-quantization-api"]], "Top-level API Quantization": [[71, "top-level-api-quantization"]], "Migrate to aimet_torch.v2": [[72, "migrate-to-aimet-torch-v2"]], "Changes in aimet_torch v2": [[72, "changes-in-aimet-torch-v2"]], "Migration Process": [[72, "migration-process"]], "Imports": [[72, "imports"]], "QuantizationSimModel": [[72, "quantizationsimmodel"]], "Moving from QuantWrapper to Quantized Modules": [[72, "moving-from-quantwrapper-to-quantized-modules"]], "Moving from StaticGrid and LearnedGrid Quantizer to Affine and Float Quantizer": [[72, "moving-from-staticgrid-and-learnedgrid-quantizer-to-affine-and-float-quantizer"]], "AIMET Installation": [[73, "aimet-installation"]], "Quick Install": [[73, "quick-install"]], "Release Packages": [[73, "release-packages"]], "System Requirements": [[73, "system-requirements"]], "Advanced Installation Instructions": [[73, "advanced-installation-instructions"]], "AIMET Installation in Docker": [[74, "aimet-installation-in-docker"]], "Set variant": [[74, "set-variant"]], "Use prebuilt docker image": [[74, "use-prebuilt-docker-image"]], "Build docker image locally": [[74, "build-docker-image-locally"]], "Start docker container": [[74, "start-docker-container"]], "Install AIMET packages": [[74, "install-aimet-packages"], [75, "install-aimet-packages"]], "From PyPI": [[74, "from-pypi"], [75, "from-pypi"]], "From Release Package": [[74, "from-release-package"], [75, "from-release-package"]], "Environment setup": [[74, "environment-setup"], [75, "environment-setup"]], "AIMET Installation and Setup": [[75, "aimet-installation-and-setup"]], "Install prerequisite packages": [[75, "install-prerequisite-packages"]], "Install GPU packages": [[75, "install-gpu-packages"]], "Install GPU packages for PyTorch 2.1 or PyTorch 1.13 or ONNX or TensorFlow": [[75, "install-gpu-packages-for-pytorch-2-1-or-pytorch-1-13-or-onnx-or-tensorflow"]], "Install common debian packages": [[75, "install-common-debian-packages"]], "Install tensorflow GPU debian packages": [[75, "install-tensorflow-gpu-debian-packages"]], "Install torch GPU debian packages": [[75, "install-torch-gpu-debian-packages"]], "Install ONNX GPU debian packages": [[75, "install-onnx-gpu-debian-packages"]], "Replace Pillow with Pillow-SIMD": [[75, "replace-pillow-with-pillow-simd"]], "Replace onnxruntime with onnxruntime-gpu": [[75, "replace-onnxruntime-with-onnxruntime-gpu"]], "Post installation steps": [[75, "post-installation-steps"]], "QuantizationMixin": [[77, "quantizationmixin"]], "quantization.affine": [[78, "module-aimet_torch.v2.quantization.affine"]], "Classes": [[78, "classes"], [80, "classes"], [81, "classes"]], "Functions": [[78, "functions"]], "FloatQuantizeDequantize": [[79, "floatquantizedequantize"]], "QuantizeDequantize": [[79, "quantizedequantize"]], "quantization.float": [[80, "module-aimet_torch.v2.quantization.float"]], "quantization.tensor": [[81, "quantization-tensor"]], "Visualization Tools": [[82, "visualization-tools"]], "Blockwise Quantization": [[83, "blockwise-quantization"]], "Low Power Blockwise Quantization (LPBQ)": [[83, "low-power-blockwise-quantization-lpbq"]], "Top Level API": [[83, "top-level-api"], [86, "top-level-api"]], "Export": [[83, "export"]], "Encoding Analyzers": [[84, "encoding-analyzers"]], "Variants": [[84, "variants"]], "Post-Training Quantization": [[85, "post-training-quantization"], [102, "post-training-quantization"]], "GPTVQ": [[86, "gptvq"]], "GPTVQ Parameters": [[86, "gptvq-parameters"]], "AIMET: AI Model Efficiency Toolkit Documentation": [[87, "aimet-ai-model-efficiency-toolkit-documentation"]], "Getting Started": [[87, "getting-started"]], "Examples": [[87, null]], "Feature Descriptions": [[87, null]], "AIMET PyTorch API": [[87, null]], "Quantized Modules": [[88, "quantized-modules"]], "Configuration": [[88, "configuration"]], "Computing Encodings": [[88, "computing-encodings"]], "Quantized Module Classes": [[88, "quantized-module-classes"]], "Quantizers": [[89, "quantizers"]], "Quickstart Guide": [[90, "quickstart-guide"]], "PyTorch prerequisites": [[90, "pytorch-prerequisites"]], "Prepare the floating point model for quantization": [[90, "prepare-the-floating-point-model-for-quantization"]], "1) Model preparation": [[90, "model-preparation"]], "2) BatchNorm fold": [[90, "batchnorm-fold"]], "Quantize the model": [[90, "quantize-the-model"]], "Fine-tune the model with quantization aware training": [[90, "fine-tune-the-model-with-quantization-aware-training"]], "Export the quantsim model": [[90, "export-the-quantsim-model"]], "AIMET AdaRound": [[91, "aimet-adaround"]], "AdaRound use cases": [[91, "adaround-use-cases"]], "AdaRound hyper parameters guidelines": [[91, "adaround-hyper-parameters-guidelines"]], "AdaRound API": [[91, "adaround-api"]], "AIMET AutoQuant": [[92, "aimet-autoquant"]], "Overview": [[92, "overview"], [93, "overview"], [97, "overview"], [98, "overview"], [100, "overview"], [103, "overview"], [104, "overview"], [105, "overview"], [106, "overview"], [108, "overview"], [111, "overview"], [112, "overview"], [114, "overview"]], "Workflow": [[92, "workflow"], [93, "workflow"]], "AutoQuant API": [[92, "autoquant-api"]], "AIMET Batch Norm Re-estimation": [[93, "aimet-batch-norm-re-estimation"]], "BN Re-estimation API": [[93, "bn-re-estimation-api"]], "AIMET channel pruning": [[94, "aimet-channel-pruning"]], "Procedure": [[94, "procedure"]], "Channel selection": [[94, "channel-selection"]], "Winnowing": [[94, "winnowing"]], "Weight reconstruction": [[94, "weight-reconstruction"]], "AIMET Compression Features Guidebook": [[95, "aimet-compression-features-guidebook"]], "AIMET examples": [[96, "aimet-examples"]], "Browse the notebooks": [[96, "browse-the-notebooks"]], "Running the notebooks": [[96, "running-the-notebooks"]], "1. Run the notebook server": [[96, "run-the-notebook-server"]], "2. Download the example notebooks and related code": [[96, "download-the-example-notebooks-and-related-code"]], "3. Run the notebooks": [[96, "run-the-notebooks"]], "AIMET greedy compression ratio selection": [[97, "aimet-greedy-compression-ratio-selection"]], "How it works": [[97, "how-it-works"]], "Per-layer exploration": [[97, "per-layer-exploration"]], "Compression ratio selection": [[97, "compression-ratio-selection"], [100, "compression-ratio-selection"]], "AI Model Efficiency Toolkit User Guide": [[98, "ai-model-efficiency-toolkit-user-guide"]], "Features": [[98, "features"]], "More Information": [[98, "more-information"]], "Release Information": [[98, "release-information"]], "toc tree": [[98, "toc-tree"]], "AIMET Known Issues": [[99, "aimet-known-issues"]], "AIMET model compression": [[100, "aimet-model-compression"]], "Use Case": [[100, "use-case"]], "Model compression": [[100, "model-compression"]], "Optional techniques": [[100, "optional-techniques"]], "Rank Rounding": [[100, "rank-rounding"]], "Per-layer fine-tuning": [[100, "per-layer-fine-tuning"]], "NOTE": [[100, null]], "FAQs": [[100, "faqs"], [103, "faqs"]], "References": [[100, "references"], [103, "references"]], "Model Guidelines for PyTorch": [[101, "model-guidelines-for-pytorch"]], "AIMET model quantization": [[102, "aimet-model-quantization"]], "Use cases": [[102, "use-cases"]], "AIMET quantization features": [[102, "aimet-quantization-features"]], "Debugging and Analysis Tools": [[102, "debugging-and-analysis-tools"]], "AIMET quantization workflow": [[102, "aimet-quantization-workflow"]], "PyTorch": [[102, "pytorch"], [112, "pytorch"]], "Debugging": [[102, "debugging"]], "AIMET post-training quantization techniques": [[103, "aimet-post-training-quantization-techniques"]], "User Flow": [[103, "user-flow"]], "Cross-Layer Equalization API": [[103, "cross-layer-equalization-api"]], "AIMET QuantAnalyzer": [[104, "aimet-quantanalyzer"]], "Requirements": [[104, "requirements"]], "Detailed analysis descriptions": [[104, "detailed-analysis-descriptions"]], "QuantAnalyzer API": [[104, "quantanalyzer-api"]], "AIMET quantization aware training": [[105, "aimet-quantization-aware-training"]], "QAT workflow": [[105, "qat-workflow"]], "QAT modes": [[105, "qat-modes"]], "Recommendations for quantization-aware training": [[105, "recommendations-for-quantization-aware-training"]], "Quantization simulation configuration": [[106, "quantization-simulation-configuration"]], "Configuration file structure": [[106, "configuration-file-structure"]], "Modifying configuration file sections": [[106, "modifying-configuration-file-sections"]], "AIMET quantization diagnostics": [[107, "aimet-quantization-diagnostics"]], "AIMET quantization simulation": [[108, "aimet-quantization-simulation"]], "QuantSim workflow": [[108, "quantsim-workflow"]], "Simulating quantization noise": [[108, "simulating-quantization-noise"]], "Determining quantization parameters (encodings)": [[108, "determining-quantization-parameters-encodings"]], "Quantization schemes": [[108, "quantization-schemes"]], "Configuring quantization simulation operations": [[108, "configuring-quantization-simulation-operations"]], "Quantization Simulation APIs": [[108, "quantization-simulation-apis"]], "AIMET Release Notes": [[109, "aimet-release-notes"]], "1.22.2": [[109, "id1"]], "1.22.1": [[109, "id2"]], "1.22.0": [[109, "id3"]], "1.21.0": [[109, "id4"]], "1.20.0": [[109, "id5"]], "1.19.1.py37": [[109, "py37"]], "1.19.1": [[109, "id6"]], "1.18.0.py37": [[109, "id7"]], "1.18.0": [[109, "id8"]], "1.17.0.py37": [[109, "id9"]], "1.17.0": [[109, "id10"]], "1.16.2.py37": [[109, "id11"]], "1.16.2": [[109, "id12"]], "1.16.1.py37": [[109, "id13"]], "1.16.1": [[109, "id14"]], "1.16.0": [[109, "id15"]], "1.14.0": [[109, "id16"]], "1.13.0": [[109, "id17"]], "AIMET spatial SVD": [[110, "aimet-spatial-svd"]], "AIMET visualization": [[111, "aimet-visualization"]], "Design": [[111, "design"]], "Compression": [[111, "compression"]], "Starting a Bokeh server session": [[111, "starting-a-bokeh-server-session"]], "Visualizing compression ratios": [[111, "visualizing-compression-ratios"]], "AIMET visualization for quantization": [[112, "aimet-visualization-for-quantization"]], "Quantization": [[112, "quantization"]], "TensorFlow": [[112, "tensorflow"]], "AIMET weight SVD": [[113, "aimet-weight-svd"]], "AIMET winnowing": [[114, "aimet-winnowing"]], "Winnowing overview": [[114, "winnowing-overview"]], "How winnowing works": [[114, "how-winnowing-works"]]}, "indexentries": {"adaroundparameters (class in aimet_tensorflow.keras.adaround_weight)": [[28, "aimet_tensorflow.keras.adaround_weight.AdaroundParameters"]], "quantscheme (class in aimet_common.defs)": [[28, "aimet_common.defs.QuantScheme"], [51, "aimet_common.defs.QuantScheme"], [68, "aimet_common.defs.QuantScheme"]], "apply_adaround() (in module aimet_tensorflow.keras.adaround_weight.adaround)": [[28, "aimet_tensorflow.keras.adaround_weight.Adaround.apply_adaround"]], "post_training_percentile (aimet_common.defs.quantscheme attribute)": [[28, "aimet_common.defs.QuantScheme.post_training_percentile"], [51, "aimet_common.defs.QuantScheme.post_training_percentile"], [68, "aimet_common.defs.QuantScheme.post_training_percentile"]], "post_training_tf (aimet_common.defs.quantscheme attribute)": [[28, "aimet_common.defs.QuantScheme.post_training_tf"], [51, "aimet_common.defs.QuantScheme.post_training_tf"], [68, "aimet_common.defs.QuantScheme.post_training_tf"]], "post_training_tf_enhanced (aimet_common.defs.quantscheme attribute)": [[28, "aimet_common.defs.QuantScheme.post_training_tf_enhanced"], [51, "aimet_common.defs.QuantScheme.post_training_tf_enhanced"], [68, "aimet_common.defs.QuantScheme.post_training_tf_enhanced"]], "training_range_learning_with_tf_enhanced_init (aimet_common.defs.quantscheme attribute)": [[28, "aimet_common.defs.QuantScheme.training_range_learning_with_tf_enhanced_init"], [51, "aimet_common.defs.QuantScheme.training_range_learning_with_tf_enhanced_init"], [68, "aimet_common.defs.QuantScheme.training_range_learning_with_tf_enhanced_init"]], "training_range_learning_with_tf_init (aimet_common.defs.quantscheme attribute)": [[28, "aimet_common.defs.QuantScheme.training_range_learning_with_tf_init"], [51, "aimet_common.defs.QuantScheme.training_range_learning_with_tf_init"], [68, "aimet_common.defs.QuantScheme.training_range_learning_with_tf_init"]], "fold_all_batch_norms_to_scale() (in module aimet_tensorflow.keras.batch_norm_fold)": [[29, "aimet_tensorflow.keras.batch_norm_fold.fold_all_batch_norms_to_scale"]], "reestimate_bn_stats() (in module aimet_tensorflow.keras.bn_reestimation)": [[29, "aimet_tensorflow.keras.bn_reestimation.reestimate_bn_stats"]], "compressionscheme (class in aimet_common.defs)": [[30, "aimet_common.defs.CompressionScheme"]], "costmetric (class in aimet_common.defs)": [[30, "aimet_common.defs.CostMetric"]], "modelcompressor (class in aimet_tensorflow.keras.compress)": [[30, "aimet_tensorflow.keras.compress.ModelCompressor"]], "modulecompratiopair (class in aimet_tensorflow.keras.defs)": [[30, "aimet_tensorflow.keras.defs.ModuleCompRatioPair"]], "spatialsvdparameters (class in aimet_tensorflow.keras.defs)": [[30, "aimet_tensorflow.keras.defs.SpatialSvdParameters"]], "spatialsvdparameters.automodeparams (class in aimet_tensorflow.keras.defs)": [[30, "aimet_tensorflow.keras.defs.SpatialSvdParameters.AutoModeParams"]], "spatialsvdparameters.manualmodeparams (class in aimet_tensorflow.keras.defs)": [[30, "aimet_tensorflow.keras.defs.SpatialSvdParameters.ManualModeParams"]], "spatialsvdparameters.mode (class in aimet_tensorflow.keras.defs)": [[30, "aimet_tensorflow.keras.defs.SpatialSvdParameters.Mode"]], "auto (aimet_tensorflow.keras.defs.spatialsvdparameters.mode attribute)": [[30, "aimet_tensorflow.keras.defs.SpatialSvdParameters.Mode.auto"]], "channel_pruning (aimet_common.defs.compressionscheme attribute)": [[30, "aimet_common.defs.CompressionScheme.channel_pruning"]], "compress_model() (aimet_tensorflow.keras.compress.modelcompressor static method)": [[30, "aimet_tensorflow.keras.compress.ModelCompressor.compress_model"]], "mac (aimet_common.defs.costmetric attribute)": [[30, "aimet_common.defs.CostMetric.mac"]], "manual (aimet_tensorflow.keras.defs.spatialsvdparameters.mode attribute)": [[30, "aimet_tensorflow.keras.defs.SpatialSvdParameters.Mode.manual"]], "memory (aimet_common.defs.costmetric attribute)": [[30, "aimet_common.defs.CostMetric.memory"]], "spatial_svd (aimet_common.defs.compressionscheme attribute)": [[30, "aimet_common.defs.CompressionScheme.spatial_svd"]], "weight_svd (aimet_common.defs.compressionscheme attribute)": [[30, "aimet_common.defs.CompressionScheme.weight_svd"]], "equalize_model() (in module aimet_tensorflow.keras.cross_layer_equalization)": [[31, "aimet_tensorflow.keras.cross_layer_equalization.equalize_model"]], "layeroutpututil (class in aimet_tensorflow.keras.layer_output_utils)": [[32, "aimet_tensorflow.keras.layer_output_utils.LayerOutputUtil"]], "generate_layer_outputs() (aimet_tensorflow.keras.layer_output_utils.layeroutpututil method)": [[32, "aimet_tensorflow.keras.layer_output_utils.LayerOutputUtil.generate_layer_outputs"]], "callbackfunc (class in aimet_common.defs)": [[33, "aimet_common.defs.CallbackFunc"], [45, "aimet_common.defs.CallbackFunc"], [59, "aimet_common.defs.CallbackFunc"]], "choose_fast_mixed_precision() (in module aimet_tensorflow.keras.mixed_precision)": [[33, "aimet_tensorflow.keras.mixed_precision.choose_fast_mixed_precision"]], "choose_mixed_precision() (in module aimet_tensorflow.keras.mixed_precision)": [[33, "aimet_tensorflow.keras.mixed_precision.choose_mixed_precision"]], "prepare_model() (in module aimet_tensorflow.keras.model_preparer)": [[35, "aimet_tensorflow.keras.model_preparer.prepare_model"]], "clssetinfo (class in aimet_tensorflow.keras.cross_layer_equalization)": [[36, "aimet_tensorflow.keras.cross_layer_equalization.ClsSetInfo"]], "clssetinfo.clssetlayerpairinfo (class in aimet_tensorflow.keras.cross_layer_equalization)": [[36, "aimet_tensorflow.keras.cross_layer_equalization.ClsSetInfo.ClsSetLayerPairInfo"]], "bias_fold() (in module aimet_tensorflow.keras.cross_layer_equalization.highbiasfold)": [[36, "aimet_tensorflow.keras.cross_layer_equalization.HighBiasFold.bias_fold"], [36, "id0"]], "fold_all_batch_norms() (in module aimet_tensorflow.keras.batch_norm_fold)": [[36, "aimet_tensorflow.keras.batch_norm_fold.fold_all_batch_norms"]], "fold_given_batch_norms() (in module aimet_tensorflow.keras.batch_norm_fold)": [[36, "aimet_tensorflow.keras.batch_norm_fold.fold_given_batch_norms"]], "scale_cls_sets() (in module aimet_tensorflow.keras.cross_layer_equalization.crosslayerscaling)": [[36, "aimet_tensorflow.keras.cross_layer_equalization.CrossLayerScaling.scale_cls_sets"]], "scale_model() (in module aimet_tensorflow.keras.cross_layer_equalization.crosslayerscaling)": [[36, "aimet_tensorflow.keras.cross_layer_equalization.CrossLayerScaling.scale_model"]], "quantanalyzer (class in aimet_tensorflow.keras.quant_analyzer)": [[37, "aimet_tensorflow.keras.quant_analyzer.QuantAnalyzer"]], "analyze() (aimet_tensorflow.keras.quant_analyzer.quantanalyzer method)": [[37, "aimet_tensorflow.keras.quant_analyzer.QuantAnalyzer.analyze"]], "quantizationsimmodel (class in aimet_tensorflow.keras.quantsim)": [[39, "aimet_tensorflow.keras.quantsim.QuantizationSimModel"]], "compute_encodings() (aimet_tensorflow.keras.quantsim.quantizationsimmodel method)": [[39, "aimet_tensorflow.keras.quantsim.QuantizationSimModel.compute_encodings"]], "export() (aimet_tensorflow.keras.quantsim.quantizationsimmodel method)": [[39, "aimet_tensorflow.keras.quantsim.QuantizationSimModel.export"]], "adaroundparameters (class in aimet_onnx.adaround.adaround_weight)": [[41, "aimet_onnx.adaround.adaround_weight.AdaroundParameters"]], "apply_adaround() (in module aimet_onnx.adaround.adaround_weight.adaround)": [[41, "aimet_onnx.adaround.adaround_weight.Adaround.apply_adaround"]], "autoquant (class in aimet_onnx.auto_quant_v2)": [[42, "aimet_onnx.auto_quant_v2.AutoQuant"]], "get_quant_scheme_candidates() (aimet_onnx.auto_quant_v2.autoquant method)": [[42, "aimet_onnx.auto_quant_v2.AutoQuant.get_quant_scheme_candidates"]], "optimize() (aimet_onnx.auto_quant_v2.autoquant method)": [[42, "aimet_onnx.auto_quant_v2.AutoQuant.optimize"]], "run_inference() (aimet_onnx.auto_quant_v2.autoquant method)": [[42, "aimet_onnx.auto_quant_v2.AutoQuant.run_inference"]], "set_adaround_params() (aimet_onnx.auto_quant_v2.autoquant method)": [[42, "aimet_onnx.auto_quant_v2.AutoQuant.set_adaround_params"]], "set_quant_scheme_candidates() (aimet_onnx.auto_quant_v2.autoquant method)": [[42, "aimet_onnx.auto_quant_v2.AutoQuant.set_quant_scheme_candidates"]], "equalize_model() (in module aimet_onnx.cross_layer_equalization)": [[43, "aimet_onnx.cross_layer_equalization.equalize_model"]], "layeroutpututil (class in aimet_onnx.layer_output_utils)": [[44, "aimet_onnx.layer_output_utils.LayerOutputUtil"]], "generate_layer_outputs() (aimet_onnx.layer_output_utils.layeroutpututil method)": [[44, "aimet_onnx.layer_output_utils.LayerOutputUtil.generate_layer_outputs"]], "evalcallbackfactory (class in aimet_onnx.amp.mixed_precision_algo)": [[45, "aimet_onnx.amp.mixed_precision_algo.EvalCallbackFactory"]], "quantizergroup (class in aimet_onnx.amp.quantizer_groups)": [[45, "aimet_onnx.amp.quantizer_groups.QuantizerGroup"]], "choose_mixed_precision() (in module aimet_onnx.mixed_precision)": [[45, "aimet_onnx.mixed_precision.choose_mixed_precision"]], "get_activation_quantizers() (aimet_onnx.amp.quantizer_groups.quantizergroup method)": [[45, "aimet_onnx.amp.quantizer_groups.QuantizerGroup.get_activation_quantizers"]], "get_active_quantizers() (aimet_onnx.amp.quantizer_groups.quantizergroup method)": [[45, "aimet_onnx.amp.quantizer_groups.QuantizerGroup.get_active_quantizers"]], "get_candidate() (aimet_onnx.amp.quantizer_groups.quantizergroup method)": [[45, "aimet_onnx.amp.quantizer_groups.QuantizerGroup.get_candidate"]], "get_param_quantizers() (aimet_onnx.amp.quantizer_groups.quantizergroup method)": [[45, "aimet_onnx.amp.quantizer_groups.QuantizerGroup.get_param_quantizers"]], "set_quantizers_to_candidate() (aimet_onnx.amp.quantizer_groups.quantizergroup method)": [[45, "aimet_onnx.amp.quantizer_groups.QuantizerGroup.set_quantizers_to_candidate"]], "sqnr() (aimet_onnx.amp.mixed_precision_algo.evalcallbackfactory method)": [[45, "aimet_onnx.amp.mixed_precision_algo.EvalCallbackFactory.sqnr"]], "to_list() (aimet_onnx.amp.quantizer_groups.quantizergroup method)": [[45, "aimet_onnx.amp.quantizer_groups.QuantizerGroup.to_list"]], "quantanalyzer (class in aimet_onnx.quant_analyzer)": [[46, "aimet_onnx.quant_analyzer.QuantAnalyzer"]], "analyze() (aimet_onnx.quant_analyzer.quantanalyzer method)": [[46, "aimet_onnx.quant_analyzer.QuantAnalyzer.analyze"]], "check_model_sensitivity_to_quantization() (aimet_onnx.quant_analyzer.quantanalyzer method)": [[46, "aimet_onnx.quant_analyzer.QuantAnalyzer.check_model_sensitivity_to_quantization"]], "create_quantsim_and_encodings() (aimet_onnx.quant_analyzer.quantanalyzer method)": [[46, "aimet_onnx.quant_analyzer.QuantAnalyzer.create_quantsim_and_encodings"]], "enable_per_layer_mse_loss() (aimet_onnx.quant_analyzer.quantanalyzer method)": [[46, "aimet_onnx.quant_analyzer.QuantAnalyzer.enable_per_layer_mse_loss"]], "export_per_layer_encoding_min_max_range() (aimet_onnx.quant_analyzer.quantanalyzer method)": [[46, "aimet_onnx.quant_analyzer.QuantAnalyzer.export_per_layer_encoding_min_max_range"]], "export_per_layer_mse_loss() (aimet_onnx.quant_analyzer.quantanalyzer method)": [[46, "aimet_onnx.quant_analyzer.QuantAnalyzer.export_per_layer_mse_loss"]], "export_per_layer_stats_histogram() (aimet_onnx.quant_analyzer.quantanalyzer method)": [[46, "aimet_onnx.quant_analyzer.QuantAnalyzer.export_per_layer_stats_histogram"]], "perform_per_layer_analysis_by_disabling_quantizers() (aimet_onnx.quant_analyzer.quantanalyzer method)": [[46, "aimet_onnx.quant_analyzer.QuantAnalyzer.perform_per_layer_analysis_by_disabling_quantizers"]], "perform_per_layer_analysis_by_enabling_quantizers() (aimet_onnx.quant_analyzer.quantanalyzer method)": [[46, "aimet_onnx.quant_analyzer.QuantAnalyzer.perform_per_layer_analysis_by_enabling_quantizers"]], "quantizationsimmodel (class in aimet_onnx.quantsim)": [[48, "aimet_onnx.quantsim.QuantizationSimModel"]], "compute_encodings() (aimet_onnx.quantsim.quantizationsimmodel method)": [[48, "aimet_onnx.quantsim.QuantizationSimModel.compute_encodings"]], "export() (aimet_onnx.quantsim.quantizationsimmodel method)": [[48, "aimet_onnx.quantsim.QuantizationSimModel.export"]], "adaroundparameters (class in aimet_torch.v1.adaround.adaround_weight)": [[51, "aimet_torch.v1.adaround.adaround_weight.AdaroundParameters"]], "apply_adaround() (in module aimet_torch.v1.adaround.adaround_weight.adaround)": [[51, "aimet_torch.v1.adaround.adaround_weight.Adaround.apply_adaround"]], "check_model_arch() (in module aimet_torch.arch_checker.arch_checker.archchecker)": [[52, "aimet_torch.arch_checker.arch_checker.ArchChecker.check_model_arch"]], "autoquant (class in aimet_torch.v1.auto_quant)": [[53, "aimet_torch.v1.auto_quant.AutoQuant"]], "fold_all_batch_norms_to_scale() (in module aimet_torch.batch_norm_fold)": [[54, "aimet_torch.batch_norm_fold.fold_all_batch_norms_to_scale"]], "reestimate_bn_stats() (in module aimet_torch.bn_reestimation)": [[54, "aimet_torch.bn_reestimation.reestimate_bn_stats"]], "activationtype (class in aimet_common.defs)": [[55, "aimet_common.defs.ActivationType"]], "convbninfotype (class in aimet_common.bias_correction)": [[55, "aimet_common.bias_correction.ConvBnInfoType"]], "quantparams (class in aimet_torch.v1.quantsim)": [[55, "aimet_torch.v1.quantsim.QuantParams"]], "correct_bias() (in module aimet_torch.bias_correction)": [[55, "aimet_torch.bias_correction.correct_bias"]], "no_activation (aimet_common.defs.activationtype attribute)": [[55, "aimet_common.defs.ActivationType.no_activation"]], "relu (aimet_common.defs.activationtype attribute)": [[55, "aimet_common.defs.ActivationType.relu"]], "relu6 (aimet_common.defs.activationtype attribute)": [[55, "aimet_common.defs.ActivationType.relu6"]], "channelpruningparameters (class in aimet_torch.defs)": [[56, "aimet_torch.defs.ChannelPruningParameters"]], "channelpruningparameters.automodeparams (class in aimet_torch.defs)": [[56, "aimet_torch.defs.ChannelPruningParameters.AutoModeParams"]], "channelpruningparameters.manualmodeparams (class in aimet_torch.defs)": [[56, "aimet_torch.defs.ChannelPruningParameters.ManualModeParams"]], "channelpruningparameters.mode (class in aimet_torch.defs)": [[56, "aimet_torch.defs.ChannelPruningParameters.Mode"]], "greedyselectionparameters (class in aimet_common.defs)": [[56, "aimet_common.defs.GreedySelectionParameters"]], "modelcompressor (class in aimet_torch.compress)": [[56, "aimet_torch.compress.ModelCompressor"]], "modulecompratiopair (class in aimet_torch.defs)": [[56, "aimet_torch.defs.ModuleCompRatioPair"]], "spatialsvdparameters (class in aimet_torch.defs)": [[56, "aimet_torch.defs.SpatialSvdParameters"]], "spatialsvdparameters.automodeparams (class in aimet_torch.defs)": [[56, "aimet_torch.defs.SpatialSvdParameters.AutoModeParams"]], "spatialsvdparameters.manualmodeparams (class in aimet_torch.defs)": [[56, "aimet_torch.defs.SpatialSvdParameters.ManualModeParams"]], "spatialsvdparameters.mode (class in aimet_torch.defs)": [[56, "aimet_torch.defs.SpatialSvdParameters.Mode"]], "weightsvdparameters (class in aimet_torch.defs)": [[56, "aimet_torch.defs.WeightSvdParameters"]], "weightsvdparameters.automodeparams (class in aimet_torch.defs)": [[56, "aimet_torch.defs.WeightSvdParameters.AutoModeParams"]], "weightsvdparameters.manualmodeparams (class in aimet_torch.defs)": [[56, "aimet_torch.defs.WeightSvdParameters.ManualModeParams"]], "weightsvdparameters.mode (class in aimet_torch.defs)": [[56, "aimet_torch.defs.WeightSvdParameters.Mode"]], "auto (aimet_torch.defs.channelpruningparameters.mode attribute)": [[56, "aimet_torch.defs.ChannelPruningParameters.Mode.auto"]], "auto (aimet_torch.defs.spatialsvdparameters.mode attribute)": [[56, "aimet_torch.defs.SpatialSvdParameters.Mode.auto"]], "auto (aimet_torch.defs.weightsvdparameters.mode attribute)": [[56, "aimet_torch.defs.WeightSvdParameters.Mode.auto"]], "compress_model() (aimet_torch.compress.modelcompressor static method)": [[56, "aimet_torch.compress.ModelCompressor.compress_model"]], "manual (aimet_torch.defs.channelpruningparameters.mode attribute)": [[56, "aimet_torch.defs.ChannelPruningParameters.Mode.manual"]], "manual (aimet_torch.defs.spatialsvdparameters.mode attribute)": [[56, "aimet_torch.defs.SpatialSvdParameters.Mode.manual"]], "manual (aimet_torch.defs.weightsvdparameters.mode attribute)": [[56, "aimet_torch.defs.WeightSvdParameters.Mode.manual"]], "equalize_model() (in module aimet_torch.cross_layer_equalization)": [[57, "aimet_torch.cross_layer_equalization.equalize_model"]], "layeroutpututil (class in aimet_torch.layer_output_utils)": [[58, "aimet_torch.layer_output_utils.LayerOutputUtil"]], "namingscheme (class in aimet_torch.layer_output_utils)": [[58, "aimet_torch.layer_output_utils.NamingScheme"]], "onnx (aimet_torch.layer_output_utils.namingscheme attribute)": [[58, "aimet_torch.layer_output_utils.NamingScheme.ONNX"]], "pytorch (aimet_torch.layer_output_utils.namingscheme attribute)": [[58, "aimet_torch.layer_output_utils.NamingScheme.PYTORCH"]], "torchscript (aimet_torch.layer_output_utils.namingscheme attribute)": [[58, "aimet_torch.layer_output_utils.NamingScheme.TORCHSCRIPT"]], "generate_layer_outputs() (aimet_torch.layer_output_utils.layeroutpututil method)": [[58, "aimet_torch.layer_output_utils.LayerOutputUtil.generate_layer_outputs"]], "evalcallbackfactory (class in aimet_torch.amp.mixed_precision_algo)": [[59, "aimet_torch.amp.mixed_precision_algo.EvalCallbackFactory"]], "quantizergroup (class in aimet_torch.amp.quantizer_groups)": [[59, "aimet_torch.amp.quantizer_groups.QuantizerGroup"]], "choose_mixed_precision() (in module aimet_torch.mixed_precision)": [[59, "aimet_torch.mixed_precision.choose_mixed_precision"]], "get_active_quantizers() (aimet_torch.amp.quantizer_groups.quantizergroup method)": [[59, "aimet_torch.amp.quantizer_groups.QuantizerGroup.get_active_quantizers"]], "get_candidate() (aimet_torch.amp.quantizer_groups.quantizergroup method)": [[59, "aimet_torch.amp.quantizer_groups.QuantizerGroup.get_candidate"]], "get_input_quantizer_modules() (aimet_torch.amp.quantizer_groups.quantizergroup method)": [[59, "aimet_torch.amp.quantizer_groups.QuantizerGroup.get_input_quantizer_modules"]], "set_quantizers_to_candidate() (aimet_torch.amp.quantizer_groups.quantizergroup method)": [[59, "aimet_torch.amp.quantizer_groups.QuantizerGroup.set_quantizers_to_candidate"]], "sqnr() (aimet_torch.amp.mixed_precision_algo.evalcallbackfactory method)": [[59, "aimet_torch.amp.mixed_precision_algo.EvalCallbackFactory.sqnr"]], "to_list() (aimet_torch.amp.quantizer_groups.quantizergroup method)": [[59, "aimet_torch.amp.quantizer_groups.QuantizerGroup.to_list"]], "prepare_model() (in module aimet_torch.model_preparer)": [[61, "aimet_torch.model_preparer.prepare_model"]], "adaptermetadata (class in aimet_torch.peft)": [[64, "aimet_torch.peft.AdapterMetaData"]], "peftquantutils (class in aimet_torch.peft)": [[64, "aimet_torch.peft.PeftQuantUtils"]], "disable_lora_adapters() (aimet_torch.peft.peftquantutils method)": [[64, "aimet_torch.peft.PeftQuantUtils.disable_lora_adapters"]], "enable_adapter_and_load_weights() (aimet_torch.peft.peftquantutils method)": [[64, "aimet_torch.peft.PeftQuantUtils.enable_adapter_and_load_weights"]], "export_adapter_weights() (aimet_torch.peft.peftquantutils method)": [[64, "aimet_torch.peft.PeftQuantUtils.export_adapter_weights"]], "freeze_base_model() (aimet_torch.peft.peftquantutils method)": [[64, "aimet_torch.peft.PeftQuantUtils.freeze_base_model"]], "freeze_base_model_activation_quantizers() (aimet_torch.peft.peftquantutils method)": [[64, "aimet_torch.peft.PeftQuantUtils.freeze_base_model_activation_quantizers"]], "freeze_base_model_param_quantizers() (aimet_torch.peft.peftquantutils method)": [[64, "aimet_torch.peft.PeftQuantUtils.freeze_base_model_param_quantizers"]], "get_fp_lora_layer() (aimet_torch.peft.peftquantutils method)": [[64, "aimet_torch.peft.PeftQuantUtils.get_fp_lora_layer"]], "get_quantized_lora_layer() (aimet_torch.peft.peftquantutils method)": [[64, "aimet_torch.peft.PeftQuantUtils.get_quantized_lora_layer"]], "quantize_lora_scale_with_fixed_range() (aimet_torch.peft.peftquantutils method)": [[64, "aimet_torch.peft.PeftQuantUtils.quantize_lora_scale_with_fixed_range"]], "replace_lora_layers_with_quantizable_layers() (aimet_torch.peft method)": [[64, "aimet_torch.peft.replace_lora_layers_with_quantizable_layers"]], "set_bitwidth_for_lora_adapters() (aimet_torch.peft.peftquantutils method)": [[64, "aimet_torch.peft.PeftQuantUtils.set_bitwidth_for_lora_adapters"]], "track_lora_meta_data() (aimet_torch.peft method)": [[64, "aimet_torch.peft.track_lora_meta_data"]], "clssetinfo (class in aimet_torch.cross_layer_equalization)": [[65, "aimet_torch.cross_layer_equalization.ClsSetInfo"]], "clssetinfo.clssetlayerpairinfo (class in aimet_torch.cross_layer_equalization)": [[65, "aimet_torch.cross_layer_equalization.ClsSetInfo.ClsSetLayerPairInfo"]], "bias_fold() (in module aimet_torch.cross_layer_equalization.highbiasfold)": [[65, "aimet_torch.cross_layer_equalization.HighBiasFold.bias_fold"], [65, "id0"]], "fold_all_batch_norms() (in module aimet_torch.batch_norm_fold)": [[65, "aimet_torch.batch_norm_fold.fold_all_batch_norms"]], "fold_given_batch_norms() (in module aimet_torch.batch_norm_fold)": [[65, "aimet_torch.batch_norm_fold.fold_given_batch_norms"]], "scale_cls_sets() (in module aimet_torch.cross_layer_equalization.crosslayerscaling)": [[65, "aimet_torch.cross_layer_equalization.CrossLayerScaling.scale_cls_sets"]], "scale_model() (in module aimet_torch.cross_layer_equalization.crosslayerscaling)": [[65, "aimet_torch.cross_layer_equalization.CrossLayerScaling.scale_model"]], "callbackfunc (class in aimet_common.utils)": [[66, "aimet_common.utils.CallbackFunc"]], "quantanalyzer (class in aimet_torch.v1.quant_analyzer)": [[66, "aimet_torch.v1.quant_analyzer.QuantAnalyzer"]], "analyze() (aimet_torch.v1.quant_analyzer.quantanalyzer method)": [[66, "aimet_torch.v1.quant_analyzer.QuantAnalyzer.analyze"]], "check_model_sensitivity_to_quantization() (aimet_torch.v1.quant_analyzer.quantanalyzer method)": [[66, "aimet_torch.v1.quant_analyzer.QuantAnalyzer.check_model_sensitivity_to_quantization"]], "enable_per_layer_mse_loss() (aimet_torch.v1.quant_analyzer.quantanalyzer method)": [[66, "aimet_torch.v1.quant_analyzer.QuantAnalyzer.enable_per_layer_mse_loss"]], "export_per_layer_encoding_min_max_range() (aimet_torch.v1.quant_analyzer.quantanalyzer method)": [[66, "aimet_torch.v1.quant_analyzer.QuantAnalyzer.export_per_layer_encoding_min_max_range"]], "export_per_layer_mse_loss() (aimet_torch.v1.quant_analyzer.quantanalyzer method)": [[66, "aimet_torch.v1.quant_analyzer.QuantAnalyzer.export_per_layer_mse_loss"]], "export_per_layer_stats_histogram() (aimet_torch.v1.quant_analyzer.quantanalyzer method)": [[66, "aimet_torch.v1.quant_analyzer.QuantAnalyzer.export_per_layer_stats_histogram"]], "perform_per_layer_analysis_by_disabling_quant_wrappers() (aimet_torch.v1.quant_analyzer.quantanalyzer method)": [[66, "aimet_torch.v1.quant_analyzer.QuantAnalyzer.perform_per_layer_analysis_by_disabling_quant_wrappers"]], "perform_per_layer_analysis_by_enabling_quant_wrappers() (aimet_torch.v1.quant_analyzer.quantanalyzer method)": [[66, "aimet_torch.v1.quant_analyzer.QuantAnalyzer.perform_per_layer_analysis_by_enabling_quant_wrappers"]], "quantizationsimmodel (class in aimet_torch.v1.quantsim)": [[68, "aimet_torch.v1.quantsim.QuantizationSimModel"]], "compute_encodings() (aimet_torch.v1.quantsim.quantizationsimmodel method)": [[68, "aimet_torch.v1.quantsim.QuantizationSimModel.compute_encodings"]], "export() (aimet_torch.v1.quantsim.quantizationsimmodel method)": [[68, "aimet_torch.v1.quantsim.QuantizationSimModel.export"]], "load_checkpoint() (aimet_torch.v1.quantsim method)": [[68, "aimet_torch.v1.quantsim.load_checkpoint"]], "save_checkpoint() (aimet_torch.v1.quantsim method)": [[68, "aimet_torch.v1.quantsim.save_checkpoint"]], "scatterdense (class in aimet_torch.nn.modules.custom)": [[69, "aimet_torch.nn.modules.custom.ScatterDense"]], "sparsetensorwrapper (class in aimet_torch.nn.modules.custom)": [[69, "aimet_torch.nn.modules.custom.SparseTensorWrapper"]], "visualizecompression (class in aimet_torch.visualize_serialized_data)": [[70, "aimet_torch.visualize_serialized_data.VisualizeCompression"]], "display_comp_ratio_plot() (aimet_torch.visualize_serialized_data.visualizecompression method)": [[70, "aimet_torch.visualize_serialized_data.VisualizeCompression.display_comp_ratio_plot"]], "display_eval_scores() (aimet_torch.visualize_serialized_data.visualizecompression method)": [[70, "aimet_torch.visualize_serialized_data.VisualizeCompression.display_eval_scores"]], "visualize_changes_after_optimization() (in module aimet_torch.visualize_model)": [[71, "aimet_torch.visualize_model.visualize_changes_after_optimization"]], "visualize_relative_weight_ranges_to_identify_problematic_layers() (in module aimet_torch.visualize_model)": [[71, "aimet_torch.visualize_model.visualize_relative_weight_ranges_to_identify_problematic_layers"]], "visualize_weight_ranges() (in module aimet_torch.visualize_model)": [[71, "aimet_torch.visualize_model.visualize_weight_ranges"]], "quantizationmixin (class in aimet_torch.v2.nn)": [[77, "aimet_torch.v2.nn.QuantizationMixin"], [88, "aimet_torch.v2.nn.QuantizationMixin"]], "__quant_init__() (aimet_torch.v2.nn.quantizationmixin method)": [[77, "aimet_torch.v2.nn.QuantizationMixin.__quant_init__"], [88, "aimet_torch.v2.nn.QuantizationMixin.__quant_init__"]], "compute_encodings() (aimet_torch.v2.nn.quantizationmixin method)": [[77, "aimet_torch.v2.nn.QuantizationMixin.compute_encodings"], [88, "aimet_torch.v2.nn.QuantizationMixin.compute_encodings"]], "forward() (aimet_torch.v2.nn.quantizationmixin method)": [[77, "aimet_torch.v2.nn.QuantizationMixin.forward"], [88, "aimet_torch.v2.nn.QuantizationMixin.forward"]], "from_module() (aimet_torch.v2.nn.quantizationmixin class method)": [[77, "aimet_torch.v2.nn.QuantizationMixin.from_module"]], "get_default_kernel() (aimet_torch.v2.nn.quantizationmixin class method)": [[77, "aimet_torch.v2.nn.QuantizationMixin.get_default_kernel"]], "get_kernel() (aimet_torch.v2.nn.quantizationmixin method)": [[77, "aimet_torch.v2.nn.QuantizationMixin.get_kernel"]], "implements() (aimet_torch.v2.nn.quantizationmixin class method)": [[77, "aimet_torch.v2.nn.QuantizationMixin.implements"]], "input_quantizers (aimet_torch.v2.nn.quantizationmixin attribute)": [[77, "aimet_torch.v2.nn.QuantizationMixin.input_quantizers"], [88, "aimet_torch.v2.nn.QuantizationMixin.input_quantizers"]], "output_quantizers (aimet_torch.v2.nn.quantizationmixin attribute)": [[77, "aimet_torch.v2.nn.QuantizationMixin.output_quantizers"], [88, "aimet_torch.v2.nn.QuantizationMixin.output_quantizers"]], "param_quantizers (aimet_torch.v2.nn.quantizationmixin attribute)": [[77, "aimet_torch.v2.nn.QuantizationMixin.param_quantizers"], [88, "aimet_torch.v2.nn.QuantizationMixin.param_quantizers"]], "set_default_kernel() (aimet_torch.v2.nn.quantizationmixin class method)": [[77, "aimet_torch.v2.nn.QuantizationMixin.set_default_kernel"]], "set_kernel() (aimet_torch.v2.nn.quantizationmixin method)": [[77, "aimet_torch.v2.nn.QuantizationMixin.set_kernel"]], "quantize (class in aimet_torch.v2.quantization.affine)": [[78, "aimet_torch.v2.quantization.affine.Quantize"]], "quantizedequantize (class in aimet_torch.v2.quantization.affine)": [[78, "aimet_torch.v2.quantization.affine.QuantizeDequantize"]], "aimet_torch.v2.quantization.affine": [[78, "module-aimet_torch.v2.quantization.affine"]], "dequantize() (in module aimet_torch.v2.quantization.affine)": [[78, "aimet_torch.v2.quantization.affine.dequantize"]], "module": [[78, "module-aimet_torch.v2.quantization.affine"], [80, "module-aimet_torch.v2.quantization.float"]], "quantize() (in module aimet_torch.v2.quantization.affine)": [[78, "aimet_torch.v2.quantization.affine.quantize"]], "quantize_dequantize() (in module aimet_torch.v2.quantization.affine)": [[78, "aimet_torch.v2.quantization.affine.quantize_dequantize"]], "floatquantizedequantize (class in aimet_torch.v2.quantization.float)": [[79, "aimet_torch.v2.quantization.float.FloatQuantizeDequantize"], [80, "aimet_torch.v2.quantization.float.FloatQuantizeDequantize"]], "quantizedequantize (class in aimet_torch.v2.quantization.float)": [[79, "aimet_torch.v2.quantization.float.QuantizeDequantize"], [80, "aimet_torch.v2.quantization.float.QuantizeDequantize"]], "aimet_torch.v2.quantization.float": [[80, "module-aimet_torch.v2.quantization.float"]], "dequantizedtensor (class in aimet_torch.v2.quantization.tensor)": [[81, "aimet_torch.v2.quantization.tensor.DequantizedTensor"]], "quantizedtensor (class in aimet_torch.v2.quantization.tensor)": [[81, "aimet_torch.v2.quantization.tensor.QuantizedTensor"]], "dequantize() (aimet_torch.v2.quantization.tensor.dequantizedtensor method)": [[81, "aimet_torch.v2.quantization.tensor.DequantizedTensor.dequantize"]], "dequantize() (aimet_torch.v2.quantization.tensor.quantizedtensor method)": [[81, "aimet_torch.v2.quantization.tensor.QuantizedTensor.dequantize"]], "quantize() (aimet_torch.v2.quantization.tensor.dequantizedtensor method)": [[81, "aimet_torch.v2.quantization.tensor.DequantizedTensor.quantize"]], "quantize() (aimet_torch.v2.quantization.tensor.quantizedtensor method)": [[81, "aimet_torch.v2.quantization.tensor.QuantizedTensor.quantize"]], "quantized_repr() (aimet_torch.v2.quantization.tensor.dequantizedtensor method)": [[81, "aimet_torch.v2.quantization.tensor.DequantizedTensor.quantized_repr"]], "quantized_repr() (aimet_torch.v2.quantization.tensor.quantizedtensor method)": [[81, "aimet_torch.v2.quantization.tensor.QuantizedTensor.quantized_repr"]], "visualize_stats() (in module aimet_torch.v2.visualization_tools)": [[82, "aimet_torch.v2.visualization_tools.visualize_stats"]], "set_activation_quantizers_to_float() (in module aimet_torch.v2.quantsim.config_utils)": [[83, "aimet_torch.v2.quantsim.config_utils.set_activation_quantizers_to_float"]], "set_blockwise_quantization_for_weights() (in module aimet_torch.v2.quantsim.config_utils)": [[83, "aimet_torch.v2.quantsim.config_utils.set_blockwise_quantization_for_weights"]], "set_grouped_blockwise_quantization_for_weights() (in module aimet_torch.v2.quantsim.config_utils)": [[83, "aimet_torch.v2.quantsim.config_utils.set_grouped_blockwise_quantization_for_weights"]], "encodinganalyzer (class in aimet_torch.v2.quantization.encoding_analyzer)": [[84, "aimet_torch.v2.quantization.encoding_analyzer.EncodingAnalyzer"]], "minmaxencodinganalyzer (class in aimet_torch.v2.quantization.encoding_analyzer)": [[84, "aimet_torch.v2.quantization.encoding_analyzer.MinMaxEncodingAnalyzer"]], "percentileencodinganalyzer (class in aimet_torch.v2.quantization.encoding_analyzer)": [[84, "aimet_torch.v2.quantization.encoding_analyzer.PercentileEncodingAnalyzer"]], "sqnrencodinganalyzer (class in aimet_torch.v2.quantization.encoding_analyzer)": [[84, "aimet_torch.v2.quantization.encoding_analyzer.SqnrEncodingAnalyzer"]], "compute_encodings() (aimet_torch.v2.quantization.encoding_analyzer.encodinganalyzer method)": [[84, "aimet_torch.v2.quantization.encoding_analyzer.EncodingAnalyzer.compute_encodings"]], "reset_stats() (aimet_torch.v2.quantization.encoding_analyzer.encodinganalyzer method)": [[84, "aimet_torch.v2.quantization.encoding_analyzer.EncodingAnalyzer.reset_stats"]], "update_stats() (aimet_torch.v2.quantization.encoding_analyzer.encodinganalyzer method)": [[84, "aimet_torch.v2.quantization.encoding_analyzer.EncodingAnalyzer.update_stats"]], "gptvqparameters (class in aimet_torch.gptvq.defs)": [[86, "aimet_torch.gptvq.defs.GPTVQParameters"]], "apply_gptvq() (in module aimet_torch.gptvq.gptvq_weight.gptvq)": [[86, "aimet_torch.gptvq.gptvq_weight.GPTVQ.apply_gptvq"]], "quantize (class in aimet_torch.v2.quantization.affine.quantizer)": [[89, "aimet_torch.v2.quantization.affine.quantizer.Quantize"]], "quantizedequantize (class in aimet_torch.v2.quantization.affine.quantizer)": [[89, "aimet_torch.v2.quantization.affine.quantizer.QuantizeDequantize"]], "quantizerbase (class in aimet_torch.v2.quantization.affine.quantizer)": [[89, "aimet_torch.v2.quantization.affine.quantizer.QuantizerBase"]], "allow_overwrite() (aimet_torch.v2.quantization.affine.quantizer.quantizerbase method)": [[89, "aimet_torch.v2.quantization.affine.quantizer.QuantizerBase.allow_overwrite"]], "compute_encodings() (aimet_torch.v2.quantization.affine.quantizer.quantizerbase method)": [[89, "aimet_torch.v2.quantization.affine.quantizer.QuantizerBase.compute_encodings"]], "forward() (aimet_torch.v2.quantization.affine.quantizer.quantize method)": [[89, "aimet_torch.v2.quantization.affine.quantizer.Quantize.forward"]], "forward() (aimet_torch.v2.quantization.affine.quantizer.quantizedequantize method)": [[89, "aimet_torch.v2.quantization.affine.quantizer.QuantizeDequantize.forward"]], "get_encoding() (aimet_torch.v2.quantization.affine.quantizer.quantizerbase method)": [[89, "aimet_torch.v2.quantization.affine.quantizer.QuantizerBase.get_encoding"]], "get_legacy_encodings() (aimet_torch.v2.quantization.affine.quantizer.quantizerbase method)": [[89, "aimet_torch.v2.quantization.affine.quantizer.QuantizerBase.get_legacy_encodings"]], "is_initialized() (aimet_torch.v2.quantization.affine.quantizer.quantizerbase method)": [[89, "aimet_torch.v2.quantization.affine.quantizer.QuantizerBase.is_initialized"]], "register_quantization_parameter() (aimet_torch.v2.quantization.affine.quantizer.quantizerbase method)": [[89, "aimet_torch.v2.quantization.affine.quantizer.QuantizerBase.register_quantization_parameter"]], "set_legacy_encodings() (aimet_torch.v2.quantization.affine.quantizer.quantizerbase method)": [[89, "aimet_torch.v2.quantization.affine.quantizer.QuantizerBase.set_legacy_encodings"]]}})
\ No newline at end of file
diff --git a/releases/1.35.1/torch_v2/install/index.html b/releases/1.35.1/torch_v2/install/index.html
index 9a8c439..4074712 100644
--- a/releases/1.35.1/torch_v2/install/index.html
+++ b/releases/1.35.1/torch_v2/install/index.html
@@ -134,25 +134,25 @@ Release Packages# Pytorch 2.1 with CUDA 12.x
-python3 -m pip install https://github.com/quic/aimet/releases/download/1.35.1/aimet_torch-1.35.1.cu121-cp310-cp310-manylinux_2_34_x86_64.whl -f https://download.pytorch.org/whl/torch_stable.html
+python3 -m pip install https://github.com/quic/aimet/releases/download/1.35.1/aimet_torch-1.35.1+cu121-cp310-cp310-manylinux_2_34_x86_64.whl -f https://download.pytorch.org/whl/torch_stable.html
# Pytorch 2.1 CPU only
-python3 -m pip install https://github.com/quic/aimet/releases/download/1.35.1/aimet_torch-1.35.1.cpu-cp310-cp310-manylinux_2_34_x86_64.whl -f https://download.pytorch.org/whl/torch_stable.html
+python3 -m pip install https://github.com/quic/aimet/releases/download/1.35.1/aimet_torch-1.35.1+cpu-cp310-cp310-manylinux_2_34_x86_64.whl -f https://download.pytorch.org/whl/torch_stable.html
# Pytorch 1.13 with CUDA 11.x
-python3 -m pip install https://github.com/quic/aimet/releases/download/1.35.1/aimet_torch-1.35.1.cu117-cp310-cp310-manylinux_2_34_x86_64.whl -f https://download.pytorch.org/whl/torch_stable.html
+python3 -m pip install https://github.com/quic/aimet/releases/download/1.35.1/aimet_torch-1.35.1+cu117-cp310-cp310-manylinux_2_34_x86_64.whl -f https://download.pytorch.org/whl/torch_stable.html
TensorFlow
# Tensorflow 2.10 GPU with CUDA 11.x
-python3 -m pip install https://github.com/quic/aimet/releases/download/1.35.1/aimet_tensorflow-1.35.1.cu118-cp310-cp310-manylinux_2_34_x86_64.whl
+python3 -m pip install https://github.com/quic/aimet/releases/download/1.35.1/aimet_tensorflow-1.35.1+cu118-cp310-cp310-manylinux_2_34_x86_64.whl
# Tensorflow 2.10 CPU only
-python3 -m pip install https://github.com/quic/aimet/releases/download/1.35.1/aimet_tensorflow-1.35.1.cpu-cp310-cp310-manylinux_2_34_x86_64.whl
+python3 -m pip install https://github.com/quic/aimet/releases/download/1.35.1/aimet_tensorflow-1.35.1+cpu-cp310-cp310-manylinux_2_34_x86_64.whl
ONNX
# ONNX 1.16 GPU with CUDA 11.x
-python3 -m pip install https://github.com/quic/aimet/releases/download/1.35.1/aimet_onnx-1.35.1.cu117-cp310-cp310-manylinux_2_34_x86_64.whl -f https://download.pytorch.org/whl/torch_stable.html
+python3 -m pip install https://github.com/quic/aimet/releases/download/1.35.1/aimet_onnx-1.35.1+cu118-cp310-cp310-manylinux_2_34_x86_64.whl -f https://download.pytorch.org/whl/torch_stable.html
# ONNX 1.16 CPU
-python3 -m pip install https://github.com/quic/aimet/releases/download/1.35.1/aimet_onnx-1.35.1.cpu-cp310-cp310-manylinux_2_34_x86_64.whl -f https://download.pytorch.org/whl/torch_stable.html
+python3 -m pip install https://github.com/quic/aimet/releases/download/1.35.1/aimet_onnx-1.35.1+cpu-cp310-cp310-manylinux_2_34_x86_64.whl -f https://download.pytorch.org/whl/torch_stable.html
For older versions, please browse the releases at https://github.com/quic/aimet/releases and follow the documentation corresponding to that release to select and install the appropriate package.
diff --git a/releases/1.35.1/torch_v2/install/install_docker.html b/releases/1.35.1/torch_v2/install/install_docker.html
index e7d81f7..e5bc58f 100644
--- a/releases/1.35.1/torch_v2/install/install_docker.html
+++ b/releases/1.35.1/torch_v2/install/install_docker.html
@@ -217,14 +217,14 @@ From Release Package
Set the package details as follows:
-