import os from literalai import LiteralClient from langchain_openai import ChatOpenAI from langchain.schema.runnable.config import RunnableConfig from langchain.schema import StrOutputParser from langchain.prompts import ChatPromptTemplate literal_client = LiteralClient(api_key=os.getenv("LITERAL_API_KEY")) cb = client.langchain_callback() prompt = ChatPromptTemplate.from_messages( ['human', 'Tell me a short joke about {topic}'] ) model = ChatOpenAI(streaming=True) runnable = prompt | model | StrOutputParser() res = runnable.invoke( {"topic": "ice cream"}, config=RunnableConfig(callbacks=[cb], run_name="joke") )
import os from literalai import LiteralClient literal_client = LiteralClient(api_key=os.getenv("LITERAL_API_KEY")) with literal_client.thread(name="Langchain example") as thread: cb = client.langchain_callback() # Call your Langchain agent here
Was this page helpful?