import json
import asyncio
from openai import AsyncOpenAI
from openai.types.chat import *
from literalai import LiteralClient
from dotenv import load_dotenv
load_dotenv()
client = AsyncOpenAI()
lc = LiteralClient()
lc.instrument_openai()
MAX_ITER = 5
@lc.step(type="tool", name="get_current_weather")
def get_current_weather(location, unit=None):
"""Get the current weather in a given location"""
unit = unit or "Farenheit"
weather_info = {
"location": location,
"temperature": "72",
"unit": unit,
"forecast": ["sunny", "windy"],
}
return json.dumps(weather_info)
tools = [
{
"type": "function",
"function": {
"name": "get_current_weather",
"description": "Get the current weather in a given location",
"parameters": {
"type": "object",
"properties": {
"location": {
"type": "string",
"description": "The city and state, e.g. San Francisco, CA",
},
"unit": {"type": "string", "enum": ["celsius", "fahrenheit"]},
},
"required": ["location"],
},
},
}
]
@lc.step(type="run")
async def run(message_history):
tool_called = True
cur_iter = 0
while tool_called and MAX_ITER:
settings = {
"model": "gpt-4-turbo-preview",
"tools": tools,
"tool_choice": "auto",
}
response: ChatCompletion = await client.chat.completions.create(
messages=message_history, **settings
)
message: ChatCompletionMessage = response.choices[0].message
message_history.append(message)
if not message.tool_calls:
tool_called = False
for tool_call in message.tool_calls or []:
if tool_call.type == "function":
func = globals()[tool_call.function.name]
res = func(tool_call.function.arguments)
message_history.append({
"role": "tool",
"name": tool_call.function.name,
"content": res,
"tool_call_id": tool_call.id,
})
cur_iter += 1
return message_history
if __name__ == "__main__":
with lc.thread(name="Thread Example") as thread:
message_history = [
{"role": "system", "content": "You are a helpful assistant."},
{"role": "user", "content": "what's the weather in sf"}
]
lc.message(content=message_history[-1]["content"], name="user", type="user_message")
message_history = asyncio.run(run(message_history))
lc.message(content=message_history[-1].content, name="assistant", type="assistant_message")
new_message = [{"role": "user", "content": "what's the weather in paris"}]
message_history += new_message
lc.message(content=message_history[-1]["content"], name="user", type="user_message")
message_history = asyncio.run(run(message_history))
lc.message(content=message_history[-1].content, name="assistant", type="assistant_message")
lc.flush_and_stop()