# main.py from fastapi import FastAPI, BackgroundTasks, HTTPException, Body from fastapi.responses import StreamingResponse import asyncio import json from volcenginesdkarkruntime import Ark from starlette.middleware.cors import CORSMiddleware app = FastAPI() app.add_middleware( CORSMiddleware, allow_origins=["*"], # 允许所有来源的请求,也可以指定特定的 URL allow_credentials=True, # 是否允许发送凭证(如 cookies 和 HTTP 认证) allow_methods=["*"], # 允许所有 HTTP 方法 allow_headers=["*"], # 允许所有头部信息 ) @app.post("/stream-data/") async def stream_data(prompt: str = Body(..., embed=True)): # 这里我们假设要根据prompt生成流式数据 # 在实际场景中,你可能需要处理更复杂的逻辑 print('-------------', prompt) async def chat_dance(prompt): client = Ark(api_key='xxxxxxx661956') stream = client.chat.completions.create( model="ep-xxxxxxxx-c4xff", messages=[ {"role": "system", "content": "你是豆包,是由字节跳动开发的 AI 人工智能助手"}, {"role": "user", "content": prompt}, ], stream=True ) for chunk in stream: if not chunk.choices: continue data = chunk.choices[0].delta.content yield json.dumps(data, ensure_ascii=False) return StreamingResponse(chat_dance(prompt)) # 运行应用的命令(在终端或命令行中执行) # uvicorn main:app --reload
标签:prompt,stream,fastapi,app,流式,豆包,import,data From: https://www.cnblogs.com/air/p/18460456