Skip to content

Commit

Permalink
add agent examples (#33)
Browse files Browse the repository at this point in the history
  • Loading branch information
hwchase17 authored Oct 12, 2023
1 parent 906e62e commit 06fec48
Show file tree
Hide file tree
Showing 5 changed files with 194 additions and 4 deletions.
1 change: 1 addition & 0 deletions .github/workflows/langserve_ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@ on:
- '.github/workflows/_test.yml'
- '.github/workflows/langserve_ci.yml'
- 'langserve/**'
- 'examples/**'
- 'pyproject.toml'
- 'Makefile'
workflow_dispatch: # Allows to trigger the workflow manually in GitHub UI
Expand Down
105 changes: 105 additions & 0 deletions examples/agent/client.ipynb
Original file line number Diff line number Diff line change
@@ -0,0 +1,105 @@
{
"cells": [
{
"cell_type": "markdown",
"metadata": {},
"source": [
"# Client\n",
"\n",
"Demo of a client interacting with a remote agent. "
]
},
{
"cell_type": "code",
"execution_count": 3,
"metadata": {
"tags": []
},
"outputs": [],
"source": [
"from langserve import RemoteRunnable\n",
"\n",
"remote_runnable = RemoteRunnable(\"http://localhost:8000/\")"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"Remote runnable has the same interface as local runnables"
]
},
{
"cell_type": "code",
"execution_count": 4,
"metadata": {
"tags": []
},
"outputs": [
{
"data": {
"text/plain": [
"{'output': 'Hello! How can I assist you today?'}"
]
},
"execution_count": 4,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"await remote_runnable.ainvoke({\"input\": \"hi!\"})"
]
},
{
"cell_type": "code",
"execution_count": 5,
"metadata": {
"tags": []
},
"outputs": [
{
"data": {
"text/plain": [
"{'output': 'Eugene thinks that cats like fish.'}"
]
},
"execution_count": 5,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"remote_runnable.invoke({\"input\": \"what does eugene think of cats?\"})"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": []
}
],
"metadata": {
"kernelspec": {
"display_name": "Python 3 (ipykernel)",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.10.1"
}
},
"nbformat": 4,
"nbformat_minor": 4
}
82 changes: 82 additions & 0 deletions examples/agent/server.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,82 @@
#!/usr/bin/env python
"""Example LangChain server exposes a conversational retrieval chain."""
from fastapi import FastAPI
from langchain.agents import AgentExecutor, tool
from langchain.agents.format_scratchpad import format_to_openai_functions
from langchain.agents.output_parsers import OpenAIFunctionsAgentOutputParser
from langchain.chat_models import ChatOpenAI
from langchain.embeddings import OpenAIEmbeddings
from langchain.prompts import ChatPromptTemplate, MessagesPlaceholder
from langchain.tools.render import format_tool_to_openai_function
from langchain.vectorstores import FAISS
from pydantic import BaseModel

from langserve import add_routes

vectorstore = FAISS.from_texts(
["cats like fish", "dogs like sticks"], embedding=OpenAIEmbeddings()
)
retriever = vectorstore.as_retriever()


@tool
def get_eugene_thoughts(query: str) -> list:
"""Returns Eugene's thoughts on a topic."""
return retriever.get_relevant_documents(query)


tools = [get_eugene_thoughts]

prompt = ChatPromptTemplate.from_messages(
[
("system", "You are a helpful assistant."),
("user", "{input}"),
MessagesPlaceholder(variable_name="agent_scratchpad"),
]
)

llm = ChatOpenAI()

llm_with_tools = llm.bind(functions=[format_tool_to_openai_function(t) for t in tools])

agent = (
{
"input": lambda x: x["input"],
"agent_scratchpad": lambda x: format_to_openai_functions(
x["intermediate_steps"]
),
}
| prompt
| llm_with_tools
| OpenAIFunctionsAgentOutputParser()
)

agent_executor = AgentExecutor(agent=agent, tools=tools)

app = FastAPI(
title="LangChain Server",
version="1.0",
description="Spin up a simple api server using Langchain's Runnable interfaces",
)


# We need to add these input/output schemas because the current AgentExecutor
# is lacking in schemas.
class Input(BaseModel):
input: str


class Output(BaseModel):
output: str


# Adds routes to the app for using the chain under:
# /invoke
# /batch
# /stream
add_routes(app, agent_executor, input_type=Input, output_type=Output)

if __name__ == "__main__":
import uvicorn

uvicorn.run(app, host="localhost", port=8000)
6 changes: 4 additions & 2 deletions examples/conversational_retrieval_chain/client.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@
"source": [
"# Client\n",
"\n",
"Demo of a client interacting with a remote retriever. "
"Demo of a client interacting with a remote conversational retrieval chain. "
]
},
{
Expand Down Expand Up @@ -70,7 +70,9 @@
}
],
"source": [
"await remote_runnable.ainvoke({\"question\": \"what do cats like?\", \"chat_history\": [(\"hi\", \"hi\")]})"
"await remote_runnable.ainvoke(\n",
" {\"question\": \"what do cats like?\", \"chat_history\": [(\"hi\", \"hi\")]}\n",
")"
]
},
{
Expand Down
4 changes: 2 additions & 2 deletions examples/conversational_retrieval_chain/server.py
Original file line number Diff line number Diff line change
@@ -1,10 +1,10 @@
#!/usr/bin/env python
"""Example LangChain server exposes a conversational retrieval chain."""
from fastapi import FastAPI
from langchain.embeddings import OpenAIEmbeddings
from langchain.vectorstores import FAISS
from langchain.chains import ConversationalRetrievalChain
from langchain.chat_models import ChatOpenAI
from langchain.embeddings import OpenAIEmbeddings
from langchain.vectorstores import FAISS

from langserve import add_routes

Expand Down

0 comments on commit 06fec48

Please sign in to comment.