Skip to content

Commit

Permalink
fix: changed my mind, the react agent sucks for this use case
Browse files Browse the repository at this point in the history
  • Loading branch information
j4ndrw committed Jan 31, 2024
1 parent b236186 commit 6d4d261
Show file tree
Hide file tree
Showing 7 changed files with 65 additions and 26 deletions.
2 changes: 1 addition & 1 deletion codebase_indexer/api/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,4 +10,4 @@ class Meta(BaseModel):
ollama_inference_model: str | None = None


Command = Literal["test", "search", "review", "new_conversation", "general_chat"]
Command = Literal["test", "search", "review", "forget_previous_conversation", "general_chat"]
2 changes: 1 addition & 1 deletion codebase_indexer/cli/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -51,7 +51,7 @@ def cli(args: Args):
sources = RAG.extract_sources_to_search_in(llm, question, sources)

for command in commands:
if command == "new_conversation":
if command == "forget_previous_conversation":
sources = []
continue

Expand Down
2 changes: 1 addition & 1 deletion codebase_indexer/constants.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@
"test",
"search",
"review",
"new_conversation",
"forget_previous_conversation",
"general_chat",
]

Expand Down
51 changes: 31 additions & 20 deletions codebase_indexer/rag/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,8 +21,9 @@
from codebase_indexer.api.models import Command
from codebase_indexer.constants import (DEFAULT_OLLAMA_INFERENCE_MODEL,
MAX_SOURCES_WINDOW, OLLAMA_BASE_URL)
from codebase_indexer.rag.agents import create_agent, create_tools
from codebase_indexer.rag.chains import create_search_request_removal_chain
from codebase_indexer.rag.agents import create_tools
from codebase_indexer.rag.chains import (create_query_expansion_chain,
create_search_request_removal_chain)
from codebase_indexer.rag.prompts import (
DEFAULT_CONVERSATIONAL_RETRIEVAL_CHAIN_PROMPT,
REVIEW_CONVERSATIONAL_RETRIEVAL_CHAIN_PROMPT,
Expand Down Expand Up @@ -138,18 +139,25 @@ def create():
def extract_commands(llm: ChatOllama, question: str) -> list[Command]:
command_extraction_tool, _ = create_tools(llm)

expanded_question = (
strip_generated_text(
create_query_expansion_chain(llm)
.invoke({"question": question})
.get("text", "")
)
or question
)

extracted_commands = [
*filter(
lambda x: x.lower() != "n/a", # type: ignore
map(
lambda x: x.strip(),
(
strip_generated_text(
create_agent(llm, [command_extraction_tool])
.invoke({"input": question})
.get("output", {})
command_extraction_tool
.invoke({"question": expanded_question })
.get("text", "")
.removeprefix("Answer:")
)
or ""
).split(", "),
Expand All @@ -167,24 +175,27 @@ def extract_commands(llm: ChatOllama, question: str) -> list[Command]:
def extract_sources_to_search_in(
llm: ChatOllama, question: str, sources: list[str] | None = None
) -> list[str]:
try:
_, file_path_extraction_tool = create_tools(llm)
sources = sources or []
_, file_path_extraction_tool = create_tools(llm)
sources = sources or []


file_paths = strip_generated_text(
create_agent(llm, [file_path_extraction_tool])
.invoke({"input": question})
.get("output", {})
expanded_question = (
strip_generated_text(
create_query_expansion_chain(llm)
.invoke({"question": question})
.get("text", "")
.removeprefix("Answer:")
)
if file_paths.lower() != "n/a":
sources.extend([*map(lambda x: x.strip(), file_paths.split(","))])
or question
)

return sources
except ValueError:
return sources or []
file_paths = strip_generated_text(
file_path_extraction_tool.invoke({"question": expanded_question}).get(
"text", ""
)
)
if file_paths.lower() != "n/a":
sources.extend([*map(lambda x: x.strip(), file_paths.split(","))])

return sources

@staticmethod
def filter_on_sources(
Expand Down
4 changes: 2 additions & 2 deletions codebase_indexer/rag/agents.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,10 +13,10 @@ def create_tools(llm: ChatOllama):
file_path_extractor_chain = LLMChain(llm=llm, prompt=FILE_PATH_EXTRACTOR_PROMPT)

command_tool = Tool(
name="Classify command",
name="Extract command",
func=command_chain.invoke,
return_direct=True,
description=f"Useful to figure out what command out of the following relates best to the question: {COMMANDS}.",
description=f"Useful to figure out what command out of the following relates best to the question.",
)
file_path_extractor_tool = Tool(
name="Extract file path",
Expand Down
3 changes: 2 additions & 1 deletion codebase_indexer/rag/prompts/command_extractor.txt
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
Only answer the question with the most suitable commands, out of: {commands}.
Only answer the question with the most suitable commands.
The available commands are: {commands}

If only one command suits the question, the answer should looks like this: "command_1".
If multiple commands suit the question, the answer should looks like this: "command_1, command_2, command_3".
Expand Down
27 changes: 27 additions & 0 deletions codebase_indexer/rag/prompts/file_path_extractor.txt
Original file line number Diff line number Diff line change
@@ -1,5 +1,32 @@
You are a file path extractor. You reply with the file paths provided in the questions, without additional details.
In your answer, delimit the paths by commas.

A file path is any string that looks like this: path/to/file

EXAMPLE START
Question: Can you review my implementation from those files: path/to/file/a.js, path/to/file/b.rs and path/to/file/c.py
Answer: path/to/file/a.js, path/to/file/b.rs, path/to/file/c.py
EXAMPLE END

EXAMPLE START
Question: Can you review my implementation from the path/to/file/a.js, path/to/file/b.rs and path/to/file/c.py files?
Answer: path/to/file/a.js, path/to/file/b.rs, path/to/file/c.py
EXAMPLE END

EXAMPLE START
Question: Can you review my implementation from this file: path/to/file/a.js
Answer: path/to/file/a.js
EXAMPLE END

EXAMPLE START
Question: Can you review my implementation from path/to/file/a.js?
Answer: path/to/file/a.js
EXAMPLE END

EXAMPLE START
Question: How do I fix this bug?
Answer: N/A
EXAMPLE END

Question: {question}
Answer:

0 comments on commit 6d4d261

Please sign in to comment.