From 4ad89ea8efabdc206fb950f00bc3953305c17563 Mon Sep 17 00:00:00 2001 From: dsudhakarTT Date: Tue, 18 Feb 2025 07:48:37 +0000 Subject: [PATCH] Add phi-3-medium model --- .../pytorch/text/phi3/test_phi3_medium.py | 32 +++++++++++++++++++ 1 file changed, 32 insertions(+) create mode 100644 forge/test/models/pytorch/text/phi3/test_phi3_medium.py diff --git a/forge/test/models/pytorch/text/phi3/test_phi3_medium.py b/forge/test/models/pytorch/text/phi3/test_phi3_medium.py new file mode 100644 index 000000000..28bae2ab4 --- /dev/null +++ b/forge/test/models/pytorch/text/phi3/test_phi3_medium.py @@ -0,0 +1,32 @@ +# SPDX-FileCopyrightText: (c) 2025 Tenstorrent AI ULC +# +# SPDX-License-Identifier: Apache-2.0 +import pytest +import torch +from loguru import logger +from transformers import AutoTokenizer, Phi3Config, Phi3ForCausalLM + +import forge + +from test.models.utils import Framework, Source, Task, build_module_name + +variants = ["microsoft/Phi-3-medium-128k-instruct"] + + +@pytest.mark.parametrize("variant", variants) +def test_phi3_causal_lm(variant): + config = Phi3Config.from_pretrained(variant) + config_dict = config.to_dict() + config_dict["return_dict"] = False + config_dict["use_cache"] = False + config = Phi3Config(**config_dict) + tokenizer = AutoTokenizer.from_pretrained(variant) + framework_model = Phi3ForCausalLM.from_pretrained(variant, config=config).to("cpu") + framework_model.eval() + input_prompt = "Africa is an emerging economy because" + inputs = tokenizer(input_prompt, return_tensors="pt").to("cpu") + with torch.no_grad(): + op = framework_model(inputs["input_ids"], inputs["attention_mask"]) + logger.info(f"op={op}") + module_name = build_module_name(variant, Source.HUGGINGFACE, Framework.PYTORCH, Task.CAUSAL_LM) + compiled_model = forge.compile(framework_model, [inputs["input_ids"], inputs["attention_mask"]], module_name)