Deeplearning.ai의 강의내용 정리입니다.
from langchain.agents import tool
@tool # tool 데코레이터를 통해 openai function 형태로 쉽게 변환 가능
def search(query: str) -> str:
""" Search for weather online"""
return "42f"
# 자동으로 생성됨
search.name
'search'
search.description
'search(query: str) -> str - Search for weather online'
search.args
{'query': {'title': 'Query', 'type': 'string'}}
from pydantic import BaseModel, Field
class SearchInput(BaseModel):
query: str = Field(description="Thing to search for")
@tool(args_schema=SearchInput)
def search(query: str) -> str:
"""Search for the weather online."""
return "42f"
search.args
{'query': {'title': 'Query',
'description': 'Thing to search for',
'type': 'string'}}
search.run("sf")
'42f'
import requests
from pydantic import BaseModel, Field
import datetime
# Define the input schema
class OpenMeteoInput(BaseModel):
latitude: float = Field(..., description="Latitude of the location to fetch weather data for")
longitude: float = Field(..., description="Longitude of the location to fetch weather data for")
@tool(args_schema=OpenMeteoInput)
def get_current_temperature(latitude: float, longitude: float) -> dict:
"""Fetch current temperature for given coordinates."""
BASE_URL = "https://api.open-meteo.com/v1/forecast"
# Parameters for the request
params = {
'latitude': latitude,
'longitude': longitude,
'hourly': 'temperature_2m',
'forecast_days': 1,
}
# Make the request
response = requests.get(BASE_URL, params=params)
if response.status_code == 200:
results = response.json()
else:
raise Exception(f"API Request failed with status code: {response.status_code}")
current_utc_time = datetime.datetime.utcnow()
time_list = [datetime.datetime.fromisoformat(time_str.replace('Z', '+00:00')) for time_str in results['hourly']['time']]
temperature_list = results['hourly']['temperature_2m']
closest_time_index = min(range(len(time_list)), key=lambda i: abs(time_list[i] - current_utc_time))
current_temperature = temperature_list[closest_time_index]
return f'The current temperature is {current_temperature}°C'
from langchain.tools.render import format_tool_to_openai_function
format_tool_to_openai_function(get_current_temperature)
# 변환 결과
{'name': 'get_current_temperature',
'description': 'get_current_temperature(latitude: float, longitude: float) -> dict - Fetch current temperature for given coordinates.',
'parameters': {'title': 'OpenMeteoInput',
'type': 'object',
'properties': {'latitude': {'title': 'Latitude',
'description': 'Latitude of the location to fetch weather data for',
'type': 'number'},
'longitude': {'title': 'Longitude',
'description': 'Longitude of the location to fetch weather data for',
'type': 'number'}},
'required': ['latitude', 'longitude']}}
get_current_temperature({"latitude": 13, "longitude": 14})
# 함수 실행 결과
'The current temperature is 24.7°C'
import wikipedia
@tool
def search_wikipedia(query: str) -> str:
""" Run Wikipedia search and get page summaries."""
page_titles = wikipedia.search(query)
summaries = []
for page_title in page_titles[: 3]:
try:
wiki_page = wikipedia.page(title=page_itle, auto_suggenst=False)
summaries.append(f"Page: {page_title} \nSummary: {wiki_page.summary}")
except(
self.wiki_client.exceptions.PageError,
self.wiki_client.exceptions.DismbiuationError,
):
pass
if not summaries:
return "No good Wikipedia Search Result was found"
return "\n\n".join(summaries)
search_wikipedia.name
'search_wikipedia'
search_wikipedia.description
'search_wikipedia(query: str) -> str - Run Wikipedia search and get page summaries.'
format_tool_to_openai_function(search_wikipedia)
{'name': 'search_wikipedia',
'description': 'search_wikipedia(query: str) -> str - Run Wikipedia search and get page summaries.',
'parameters': {'title': 'search_wikipediaSchemaSchema',
'type': 'object',
'properties': {'query': {'title': 'Query', 'type': 'string'}},
'required': ['query']}}
우리가 쓰려고 한는 함수는 보통 api로 감싸져있는 형태인데 api들은 openapi specs specification(OAS) 형태로 되어있다
openapi specs를 선택해서 openai function calls로 변환 가능 → 이를 통해 API를 사용하기 쉬워진다.
OpenAPI spec 예시 텍스트
text = """
{
"openapi": "3.0.0",
"info": {
"version": "1.0.0",
"title": "Swagger Petstore",
"license": {
"name": "MIT"
}
},
"servers": [
{
"url": "http://petstore.swagger.io/v1"
}
],
"paths": {
"/pets": {
"get": {
"summary": "List all pets",
"operationId": "listPets",
"tags": [
"pets"
],
"parameters": [
{
"name": "limit",
"in": "query",
"description": "How many items to return at one time (max 100)",
"required": false,
"schema": {
"type": "integer",
"maximum": 100,
"format": "int32"
}
}
],
"responses": {
"200": {
"description": "A paged array of pets",
"headers": {
"x-next": {
"description": "A link to the next page of responses",
"schema": {
"type": "string"
}
}
},
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/Pets"
}
}
}
},
"default": {
"description": "unexpected error",
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/Error"
}
}
}
}
}
},
"post": {
"summary": "Create a pet",
"operationId": "createPets",
"tags": [
"pets"
],
"responses": {
"201": {
"description": "Null response"
},
"default": {
"description": "unexpected error",
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/Error"
}
}
}
}
}
}
},
"/pets/{petId}": {
"get": {
"summary": "Info for a specific pet",
"operationId": "showPetById",
"tags": [
"pets"
],
"parameters": [
{
"name": "petId",
"in": "path",
"required": true,
"description": "The id of the pet to retrieve",
"schema": {
"type": "string"
}
}
],
"responses": {
"200": {
"description": "Expected response to a valid request",
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/Pet"
}
}
}
},
"default": {
"description": "unexpected error",
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/Error"
}
}
}
}
}
}
}
},
"components": {
"schemas": {
"Pet": {
"type": "object",
"required": [
"id",
"name"
],
"properties": {
"id": {
"type": "integer",
"format": "int64"
},
"name": {
"type": "string"
},
"tag": {
"type": "string"
}
}
},
"Pets": {
"type": "array",
"maxItems": 100,
"items": {
"$ref": "#/components/schemas/Pet"
}
},
"Error": {
"type": "object",
"required": [
"code",
"message"
],
"properties": {
"code": {
"type": "integer",
"format": "int32"
},
"message": {
"type": "string"
}
}
}
}
}
}
"""
위 openAPI spec 텍스트에서 openAI function으로 변환
from langchain.chains.openai_functions.openapi import openapi_spec_to_openai_fn
from langchain.utilities.openapi import OpenAPISpec
spec = OpenAPISpec.from_text(text)
pet_openai_functions, pet_callables = openapi_spec_to_openai_fn(spec)
pet_openai_functions # 3개의 함수가 들어있다
[{'name': 'listPets',
'description': 'List all pets',
'parameters': {'type': 'object',
'properties': {'params': {'type': 'object',
'properties': {'limit': {'type': 'integer',
'maximum': 100.0,
'schema_format': 'int32',
'description': 'How many items to return at one time (max 100)'}},
'required': []}}}},
{'name': 'createPets',
'description': 'Create a pet',
'parameters': {'type': 'object', 'properties': {}}},
{'name': 'showPetById',
'description': 'Info for a specific pet',
'parameters': {'type': 'object',
'properties': {'path_params': {'type': 'object',
'properties': {'petId': {'type': 'string',
'description': 'The id of the pet to retrieve'}},
'required': ['petId']}}}}]
from langchain.chat_models import ChatOpenAI
model = ChatOpenAI(temperature=0).bind(functions=pet_openai_functions)
model.invoke("what are three pets names")
# 결과
AIMessage(content='', additional_kwargs={'function_call': {'name': 'listPets', 'arguments': '{\n "params": {\n "limit": 3\n }\n}'}})
model.invoke("tell me about pet with id 42")
# 결과
AIMessage(content='', additional_kwargs={'function_call': {'name': 'showPetById', 'arguments': '{\n "path_params": {\n "petId": "42"\n }\n}'}})
functions = [
format_tools_to_openai_function(f) for f in [
search_wikipedia, get_current_temperature
]
]
model = ChatOpenAI(temperature=0).bind(functions=functions)
model.invoke("what is the weather in sf right now")
model.invoke("what is langchain")
from langchain.prompts import ChatPromptTemplate
prompt = ChatPromptTemplate.from_messages([
("system", "You are helpful but sassy assistant"),
("user", "{input}"),
])
chain = prompt | model
chain.invoke({"input": "what is the weather in sf right now")
# 결과
AIMessage(content='', additional_kwargs={'function_call': {'name': 'get_current_temperature', 'arguments': '{\n "latitude": 37.7749,\n "longitude": -122.4194\n}'}})
model.invoke("what is langchain")
# 결과
AIMessage(content='', additional_kwargs={'function_call': {'name': 'search_wikipedia', 'arguments': '{\n "query": "langchain"\n}'}})
from langchain.agents.output_parsers import OpenAIFunctionsAgentOutputParser
chain = prompt | model | OpenAIFunctionAgentOutputParser()
reuslt = chain.invoke({"input": "what is the weather in sf right now")
type(result)
#결과
langchain.schema.agent.AgentActionMessageLog
result.tool # 함수 이름
# 결과
'get_current_temperature'
result.tool_input # 함수에 전달될 인자
# 결과
{'latitude': 37.7749, 'longitude': -122.4194}
get_current_temperature(result.tool_input) # 함수에 인자를 넣어 실행
# 결과
'The current temperature is 11.3°C'
result = chain.invoke({"input": "hi!"})
type(result)
# 결과
langchain.schema.agent.AgentFinish
result.return_values
# 결과
{'output': 'Hello! How can I assist you today?'}
from langchain.schema.agent import AgentFinish
def route(result)"
if isinstance(result, AgentFinish): # 함수를 쓰지 않기로 결정한다면 -> content 반환
return result.return_values['output']
else: # 함수를 쓰기로 결정한다면 -> 함수명에 따라 어떤 함수를 쓸지 결정해주고, argument를 넣은 값 반환
tools = {
"search_wikipedia": search_wikipedia,
"get_current_temperature": get_current_temperature,
}
return tools[result.tool].run(result.tool_input)
# 이에따라 다음과 같이 chain 가능
chain = prompt | model | OpenAIFunctionsAgentOutputParser() | route
result = chain.invoke({"input": "What is the weather in san francisco right now?"})
# 결과
'The current temperature is 11.3°C'
result = chain.invoke({"input": "What is langchain?"})
# 결과
'Page: LangChain\nSummary: LangChain is a framework designed to simplify the creation of applications using large language models (LLM ...
chain.invoke({"input": "hi!"})
# 결과
'Hello! How can I assist you today?'