pip install openai
.env
OPENAI_API_KEY=your_api_key
<Chat />
yield
postData
prompt
thread_id
import os import morph from morph import MorphGlobalContext from openai import OpenAI @morph.func def llm_chat_app(context: MorphGlobalContext): client = OpenAI(api_key=os.getenv("OPENAI_API_KEY")) prompt = context.vars["prompt"] # thread_id can be used to identify the chat thread thread_id = context.vars["thread_id"] # chat messages = [{"role": "user", "content": prompt}] response = client.chat.completions.create( model="gpt-4o", messages=messages, stream=True, ) for chunk in response: yield chunk.choices[0].delta.content