Merge pull request #7 from RomiconEZ/feature/sycophancy-test #8
GitHub Actions / JUnit Test Report
failed
Sep 11, 2024 in 0s
2 tests run, 0 passed, 0 skipped, 2 failed.
Annotations
Check failure on line 22 in tests/test_llamator.py
github-actions / JUnit Test Report
test_llamator.test_openaiclient
openai.OpenAIError: The api_key client option must be set either by passing api_key to the client or by setting the OPENAI_API_KEY environment variable
Raw output
def test_openaiclient():
"""Тест клиента OpenAI."""
# Перезагрузка необходимых модулей
import llamator.client.specific_chat_clients
import llamator.main
importlib.reload(llamator.client.specific_chat_clients)
importlib.reload(llamator.main)
from llamator.client.specific_chat_clients import ClientOpenAI
api_key = os.getenv("OPENAI_CLIENT_API_KEY")
base_url = os.getenv("OPENAI_CLIENT_BASEURL")
model = os.getenv("OPENAI_CLIENT_MODEL")
> attack_model = ClientOpenAI(
api_key=api_key,
base_url=base_url,
model=model,
temperature=0.1,
system_prompts=["You are a strong model."],
)
tests/test_llamator.py:22:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
src/llamator/client/specific_chat_clients.py:186: in __init__
self.client = OpenAI(api_key=api_key, base_url=base_url)
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
self = <openai.OpenAI object at 0x7f4649c68290>
def __init__(
self,
*,
api_key: str | None = None,
organization: str | None = None,
base_url: str | httpx.URL | None = None,
timeout: Union[float, Timeout, None, NotGiven] = NOT_GIVEN,
max_retries: int = DEFAULT_MAX_RETRIES,
default_headers: Mapping[str, str] | None = None,
default_query: Mapping[str, object] | None = None,
# Configure a custom httpx client. See the [httpx documentation](https://www.python-httpx.org/api/#client) for more details.
http_client: httpx.Client | None = None,
# Enable or disable schema validation for data returned by the API.
# When enabled an error APIResponseValidationError is raised
# if the API responds with invalid data for the expected schema.
#
# This parameter may be removed or changed in the future.
# If you rely on this feature, please open a GitHub issue
# outlining your use-case to help us decide if it should be
# part of our public interface in the future.
_strict_response_validation: bool = False,
) -> None:
"""Construct a new synchronous openai client instance.
This automatically infers the following arguments from their corresponding environment variables if they are not provided:
- `api_key` from `OPENAI_API_KEY`
- `organization` from `OPENAI_ORG_ID`
"""
if api_key is None:
api_key = os.environ.get("OPENAI_API_KEY")
if api_key is None:
> raise OpenAIError(
"The api_key client option must be set either by passing api_key to the client or by setting the OPENAI_API_KEY environment variable"
)
E openai.OpenAIError: The api_key client option must be set either by passing api_key to the client or by setting the OPENAI_API_KEY environment variable
/opt/hostedtoolcache/Python/3.11.9/x64/lib/python3.11/site-packages/openai/_client.py:92: OpenAIError
Check failure on line 81 in tests/test_llamator.py
github-actions / JUnit Test Report
test_llamator.test_langchainclient_yandegpt
TypeError: str expected, not NoneType
Raw output
def test_langchainclient_yandegpt():
"""Тест клиента LangChain с использованием Yandex GPT."""
# Загружаем переменные окружения из .env файла
load_dotenv()
# Перезагрузка необходимых модулей
import llamator.client.specific_chat_clients
import llamator.main
importlib.reload(llamator.client.specific_chat_clients)
importlib.reload(llamator.main)
# Получаем переменные из окружения
yc_api_key = os.getenv("YC_API_KEY")
folder_ID = os.getenv("FOLDER_ID")
# Устанавливаем API ключ в окружение
> os.environ["YC_API_KEY"] = yc_api_key
tests/test_llamator.py:81:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
<frozen os>:684: in __setitem__
???
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
value = None
> ???
E TypeError: str expected, not NoneType
<frozen os>:758: TypeError
Loading