MorphChatStreamChunk
<Chat/>
# 📦 Package: morph_lib.stream # 🛠️ Function: create_chunk def stream_chat( text: Optional[str] = None ) -> MorphChatStreamChunk:
import os from morph_lib.stream import stream_chat from openai import OpenAI import morph from morph import MorphGlobalContext @morph.func(name="test") @morph.variables("prompt") def main(context: MorphGlobalContext): client = OpenAI(api_key=os.environ["OPENAI_API_KEY"]) propmt = context.vars["prompt"] response = client.chat.completions.create( model="gpt-4o", messages=[{"role": "user", "content": propmt}], stream=True, ) for c in response: text = c.model_dump()["choices"][0]["delta"].get("content", "") yield stream_chat(text=text)