Skip to content

Commit

Permalink
Fix vLLM import error in tests
Browse files Browse the repository at this point in the history
  • Loading branch information
rlouf committed Feb 22, 2025
1 parent 524c6b1 commit c3a1155
Showing 1 changed file with 6 additions and 6 deletions.
12 changes: 6 additions & 6 deletions tests/generate/test_integration_vllm.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,19 +4,19 @@
import pytest
import torch
from pydantic import BaseModel, constr
from vllm import LLM

try:
from vllm.sampling_params import SamplingParams
except ImportError:
pass

import outlines
import outlines.generate as generate
import outlines.grammars as grammars
import outlines.models as models
import outlines.samplers as samplers

try:
from vllm import LLM
from vllm.sampling_params import SamplingParams
except ImportError:
pass

pytestmark = pytest.mark.skipif(
not torch.cuda.is_available(), reason="vLLM models can only be run on GPU."
)
Expand Down

0 comments on commit c3a1155

Please sign in to comment.