Skip to content

Commit

Permalink
Fix Falcon validation (#1664)
Browse files Browse the repository at this point in the history
The issue is not reproduced if using optimum-cli for export but only
when cloning the entire model repo from the HF Hub.
  • Loading branch information
AlexKoff88 authored Feb 7, 2025
1 parent 06a95e4 commit 70a8bae
Showing 1 changed file with 25 additions and 12 deletions.
37 changes: 25 additions & 12 deletions tools/who_what_benchmark/whowhatbench/model_loaders.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,10 @@ def __init__(self, model, model_dir, model_type):
self.model_type = model_type

if model_type == "text" or model_type == "visual-text":
self.config = AutoConfig.from_pretrained(model_dir, trust_remote_code=True)
try:
self.config = AutoConfig.from_pretrained(model_dir, trust_remote_code=True)
except Exception:
self.config = AutoConfig.from_pretrained(model_dir)
elif model_type == "text-to-image":
self.config = DiffusionPipeline.load_config(
model_dir, trust_remote_code=True)
Expand Down Expand Up @@ -101,17 +104,27 @@ def load_text_model(
model = OVModelForCausalLM.from_pretrained(
model_id, trust_remote_code=True, device=device, ov_config=ov_config
)
except ValueError:
config = AutoConfig.from_pretrained(
model_id, trust_remote_code=True)
model = OVModelForCausalLM.from_pretrained(
model_id,
config=config,
trust_remote_code=True,
use_cache=True,
device=device,
ov_config=ov_config,
)
except Exception:
try:
config = AutoConfig.from_pretrained(
model_id, trust_remote_code=True)
model = OVModelForCausalLM.from_pretrained(
model_id,
config=config,
trust_remote_code=True,
use_cache=True,
device=device,
ov_config=ov_config,
)
except Exception:
config = AutoConfig.from_pretrained(model_id)
model = OVModelForCausalLM.from_pretrained(
model_id,
config=config,
use_cache=True,
device=device,
ov_config=ov_config,
)

return model

Expand Down

0 comments on commit 70a8bae

Please sign in to comment.