@router.post('/openai', response_class=EventSourceResponse)
async def simple_chat(user_input=Body(..., embed=True, alias='input'),
chat_history=Body(..., embed=True, alias='chat_history')):
app_input = user_input
callback_handler = StreamingCallbackHandler()
chat_prompt = PromptTemplate(
input_variables=['human_input', 'chat_history'],
template='''you are a robot having a conversation with a human being.
previous conversation:
{chat_history}
human: {human_input}
robot:'''
)
chain = LLMChain(
llm=OpenAI(
temperature=0.8,
max_retries=1,
max_tokens=2048,
streaming=True,
verbose=True,
),
prompt=chat_prompt,
)
task = chain.arun({'human_input': app_input, 'chat_history': chat_history}, callbacks=[callback_handler])
loop = asyncio.get_event_loop()
asyncio.run_coroutine_threadsafe(task, loop)
def resp():
while True:
try:
tk = callback_handler.tokens.get()
if tk is StopIteration:raise tk
yield ServerSentEvent(data=json.dumps(tk, ensure_ascii=False))
except StopIteration:
break
return EventSourceResponse(resp())
标签:chat,备份,tk,human,input,其他,True,history
From: https://www.cnblogs.com/feel-myself/p/17866558.html