Note
包含同步&异步 完成&流式
闲言少叙, 看剑
Requirements
pip install openai -U
Code
import openai
import asyncio
def pp(obj: str):
print(obj.center(50, "*"))
# sync
def _sync():
## w/o stream
pp("Sync w/o stream")
response = client.chat.completions.create(
model=model, messages=messages, temperature=0
)
print(response.choices[0].message.content)
## w/ stream
pp("Sync w/ stream")
response = client.chat.completions.create(
model=model, messages=messages, temperature=0, stream=True
)
for chunk in response:
content = chunk.choices[0].delta.content
if content:
print(content, end="", flush=True)
if chunk.choices[0].finish_reason == "stop":
print()
# async
async def _async():
# w/o stream
pp("Async w/o stream")
async_response = await aclient.chat.completions.create(
model=model, messages=messages, temperature=0
)
print(async_response.choices[0].message.content)
## w/ stream
pp("Async w/ stream")
async_response_with_stream = await aclient.chat.completions.create(
model=model, messages=messages, temperature=0, stream=True
)
async for chunk in async_response_with_stream:
content = chunk.choices[0].delta.content
if content:
print(content, end="", flush=True)
if chunk.choices[0].finish_reason == "stop":
print()
async def main():
_sync()
await _async()
if __name__ == "__main__":
client = openai.OpenAI()
aclient = openai.AsyncOpenAI()
model = "Qwen1.5-32B-Chat"
messages = [{"role": "user", "content": "Who are you"}]
asyncio.run(main())
标签:stream,messages,基础,用法,content,openai,async,model,response
From: https://www.cnblogs.com/shy36/p/18141876