Skip to content

Commit

Permalink
conftest argument fix
Browse files Browse the repository at this point in the history
  • Loading branch information
jbedichekTT committed Feb 24, 2025
1 parent 74dd352 commit bf2cd30
Show file tree
Hide file tree
Showing 4 changed files with 3 additions and 4 deletions.
1 change: 0 additions & 1 deletion tests/models/albert/test_albert_masked_lm.py
Original file line number Diff line number Diff line change
Expand Up @@ -53,7 +53,6 @@ def test_albert_masked_lm(record_property, model_name, mode, batch_size):
# retrieve index of [MASK]

results.logits = process_batched_logits(results.logits, batch_size)
# print(results.logits.shape)
logits = results.logits
mask_token_index = (tester.inputs.input_ids == tester.tokenizer.mask_token_id)[0].nonzero(as_tuple=True)[0]
predicted_token_id = logits[0, mask_token_index].argmax(axis=-1)
Expand Down
2 changes: 1 addition & 1 deletion tests/models/albert/test_albert_question_answering.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
from transformers import AutoTokenizer, AlbertForQuestionAnswering
import torch
import pytest
from tests.utils import ModelTester, process_batched_logits, batch_object_inputs
from tests.utils import ModelTester, process_batched_logits


class ThisTester(ModelTester):
Expand Down
2 changes: 1 addition & 1 deletion tests/models/albert/test_albert_sequence_classification.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
from transformers import AlbertTokenizer, AlbertForSequenceClassification
import torch
import pytest
from tests.utils import ModelTester, process_batched_logits, batch_object_inputs
from tests.utils import ModelTester, process_batched_logits


class ThisTester(ModelTester):
Expand Down
2 changes: 1 addition & 1 deletion tests/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -157,7 +157,7 @@ def test_model_eval(self, as_ttnn=False, option=None):
if as_ttnn == True:
model = self.compile_model(model, option)
if self.batch_size is not None:
outputs = self.run_model_batched(model, inputs, self.batch_size)
outputs = self.run_model_batched(model, inputs)
else:
outputs = self.run_model(model, inputs)
results = self.get_results_eval(model, inputs, outputs)
Expand Down

0 comments on commit bf2cd30

Please sign in to comment.