This function creates a chat stream.
By using the MorphChatStreamChunk
returned by this function and passing it to <Chat/>
, you can build a chat application utilizing LLM.
# 📦 Package: morph_lib.stream
# 🛠️ Function: create_chunk
def stream_chat(
text: Optional[str] = None
) -> MorphChatStreamChunk:
Parameters
Text to be displayed in the chat stream.
Example
import os
from morph_lib.stream import stream_chat
from openai import OpenAI
import morph
from morph import MorphGlobalContext
@morph.func(name="test")
@morph.variables("prompt")
def main(context: MorphGlobalContext):
client = OpenAI(api_key=os.environ["OPENAI_API_KEY"])
propmt = context.vars["prompt"]
response = client.chat.completions.create(
model="gpt-4o",
messages=[{"role": "user", "content": propmt}],
stream=True,
)
for c in response:
text = c.model_dump()["choices"][0]["delta"].get("content", "")
yield stream_chat(text=text)