Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Re-enable system prompt override #323

Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
15 changes: 2 additions & 13 deletions ols/src/query_helpers/docs_summarizer.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@
from ols.app.metrics import TokenMetricUpdater
from ols.app.models.models import RagChunk, SummarizerResponse
from ols.constants import RAG_CONTENT_LIMIT, GenericLLMParameters
from ols.customize import prompts, reranker
from ols.customize import reranker
from ols.src.prompts.prompt_generator import (
GeneratePrompt,
restructure_history,
Expand All @@ -31,7 +31,6 @@ def __init__(self, *args: Any, **kwargs: Any) -> None:
"""Initialize the QuestionValidator."""
super().__init__(*args, **kwargs)
self._prepare_llm()
self._get_system_prompt()
self.verbose = config.ols_config.logging_config.app_log_level == logging.DEBUG

def _prepare_llm(self) -> None:
Expand All @@ -45,16 +44,6 @@ def _prepare_llm(self) -> None:
self.provider, self.model, self.generic_llm_params, self.streaming
)

def _get_system_prompt(self) -> None:
"""Retrieve the system prompt."""
# use system prompt from config if available otherwise use
# default system prompt fine-tuned for the service
if config.ols_config.system_prompt is not None:
self.system_prompt = config.ols_config.system_prompt
else:
self.system_prompt = prompts.QUERY_SYSTEM_INSTRUCTION
logger.debug("System prompt: %s", self.system_prompt)

def _prepare_prompt(
self,
query: str,
Expand Down Expand Up @@ -123,7 +112,7 @@ def _prepare_prompt(
)

final_prompt, llm_input_values = GeneratePrompt(
query, rag_context, history, self.system_prompt
query, rag_context, history, self._system_prompt
).generate_prompt(self.model)

# Tokens-check: We trigger the computation of the token count
Expand Down
2 changes: 1 addition & 1 deletion tests/unit/query_helpers/test_docs_summarizer.py
Original file line number Diff line number Diff line change
Expand Up @@ -58,7 +58,7 @@ def test_if_system_prompt_was_updated():
summarizer = DocsSummarizer(llm_loader=mock_llm_loader(None))
# expected prompt was loaded during configuration phase
expected_prompt = config.ols_config.system_prompt
assert summarizer.system_prompt == expected_prompt
assert summarizer._system_prompt == expected_prompt


def test_docs_summarizer_streaming_parameter():
Expand Down
Loading