我正在使用 Ollama 通过 Ollama Python API 从大型语言模型 (LLM) 生成答案。我想通过单击停止按钮取消响应生成。问题在于,只有当响应生成已经开始打印时,任务取消才会起作用。如果任务仍在处理并准备打印,则取消不起作用,并且无论如何都会打印响应。更具体地说,即使单击按钮后,此函数
prompt_mistral("Testing")
仍然执行并打印响应。
我的代码:
import ollama
import asyncio
import threading
from typing import Optional
import tkinter as tk
# Create the main window
root = tk.Tk()
root.title("Tkinter Button Example")
worker_loop: Optional[asyncio.AbstractEventLoop] = None
task_future: Optional[asyncio.Future] = None
async def get_answer_from_phi3():
print("Trying")
messages = [
{"role": "system", "content": "Hello"}
]
client = ollama.AsyncClient()
stream = await client.chat(
model='phi3',
messages=messages,
stream=True,
options= {
"top_k": 1})
try:
async for chunk in stream:
# Store generated answer
print(chunk['message']['content'], end='', flush=True)
except asyncio.exceptions.CancelledError as e:
print("Cancelled")
pass
except Exception as e:
print(e)
return "Sorry,vv an error occurred while processing your request."
async def prompt_mistral(query):
messages = []
messages.append({"role": "assistant", "content": "Write a song that celebrates the beauty, diversity, and importance of our planet, Earth. The song should evoke vivid imagery of the natural world, from lush forests and majestic mountains to serene oceans and vast deserts. It should capture the essence of Earth as a living, breathing entity that sustains all forms of life. Incorporate themes of harmony, unity, and interconnectedness, emphasizing how all elements of nature are intertwined and how humanity is an integral part of this complex web. The lyrics should reflect a sense of wonder and appreciation for the planet's resources and ecosystems, highlighting the delicate balance that sustains life. Include references to various landscapes, climates, and wildlife, painting a picture of Earth's diverse environments. The song should also touch on the responsibility we have to protect and preserve the planet for future generations, addressing issues like climate change, deforestation, pollution, and conservation efforts. Use poetic language and metaphors to convey the grandeur and fragility of Earth, and infuse the song with a hopeful and inspiring tone that encourages listeners to take action in safeguarding our shared home. The melody should be uplifting and emotionally resonant, complementing the powerful message of the lyrics"})
generated_answer = ''
try:
client = ollama.AsyncClient()
stream = await client.chat(
model='mistral',
messages=messages,
stream=True,
options= {
"top_k": 1}
)
async for chunk in stream:
# Store generated answer
generated_answer += chunk['message']['content']
print(chunk['message']['content'])
except asyncio.exceptions.CancelledError as e:
print("Cancelled reponse")
return
except Exception as e:
print(e)
return "Sorry,vv an error occurred while processing your request."
def prompt_llama(message):
async def prompt():
messages = []
messages.append({"role": "assistant", "content": message})
try:
client = ollama.AsyncClient()
stream = await client.chat(
model='llama2',
messages=messages,
stream=True,
options= {
"top_k": 1}
)
generated_answer = ''
async for chunk in stream:
# Store generated answer
generated_answer += chunk['message']['content']
print(chunk['message']['content'])
if "help" in generated_answer:
await prompt_mistral("Testing")
else:
print(generated_answer)
except asyncio.exceptions.CancelledError as e:
print("Cancelled")
return
except Exception as e:
print(e)
return "Sorry,vv an error occurred while processing your request."
def mistral_worker_function():
global worker_loop, task_future
worker_loop = asyncio.new_event_loop()
task_future = worker_loop.create_task(prompt())
worker_loop.run_until_complete(task_future)
print("Starting thread")
thread = threading.Thread(target=mistral_worker_function)
thread.start()
client = ollama.AsyncClient()
# Define the function to be called when the button is pressed
def on_button_click():
global worker_loop, task_future
# the loop and the future are not threadsafe
worker_loop.call_soon_threadsafe(
lambda: task_future.cancel()
)
def phi3_worker_function():
global worker_loop, task_future
worker_loop = asyncio.new_event_loop()
task_future = worker_loop.create_task(get_answer_from_phi3())
worker_loop.run_until_complete(task_future)
print("Starting thread")
thread = threading.Thread(target=phi3_worker_function())
thread.start()
# Create the button
button = tk.Button(root, text="Stop", command=on_button_click)
# Place the button on the window
button.pack(pady=20)
prompt_llama("Hi")
# Start the Tkinter event loop
root.mainloop()
代码中的问题在于,正在从不同的线程调用 asyncio 任务。这意味着取消按钮和 Ollama API 调用在不同的事件循环中运行。为了解决这个问题,需要确保两者都在同一个事件循环中运行,并且从同一个线程调用取消。
以下是如何修改代码的方法:
import ollama
import asyncio
import tkinter as tk
from typing import Optional
# Create the main window
root = tk.Tk()
root.title("Tkinter Button Example")
worker_loop: Optional[asyncio.AbstractEventLoop] = None
task_future: Optional[asyncio.Future] = None
async def get_answer_from_phi3():
print("Trying")
messages = [
{"role": "system", "content": "Hello"}
]
client = ollama.AsyncClient()
stream = await client.chat(
model='phi3',
messages=messages,
stream=True,
options={"top_k": 1}
)
try:
async for chunk in stream:
# Store generated answer
print(chunk['message']['content'], end='', flush=True)
except asyncio.CancelledError:
print("Cancelled")
except Exception as e:
print(e)
return "Sorry, an error occurred while processing your request."
async def prompt_mistral(query):
messages = [{"role": "assistant", "content": "Write a song that celebrates the beauty, diversity, and importance of our planet, Earth..."}]
generated_answer = ''
try:
client = ollama.AsyncClient()
stream = await client.chat(
model='mistral',
messages=messages,
stream=True,
options={"top_k": 1}
)
async for chunk in stream:
# Store generated answer
generated_answer += chunk['message']['content']
print(chunk['message']['content'])
except asyncio.CancelledError:
print("Cancelled response")
return
except Exception as e:
print(e)
return "Sorry, an error occurred while processing your request."
async def prompt_llama(message):
messages = [{"role": "assistant", "content": message}]
try:
client = ollama.AsyncClient()
stream = await client.chat(
model='llama2',
messages=messages,
stream=True,
options={"top_k": 1}
)
generated_answer = ''
async for chunk in stream:
# Store generated answer
generated_answer += chunk['message']['content']
print(chunk['message']['content'])
if "help" in generated_answer:
await prompt_mistral("Testing")
else:
print(generated_answer)
except asyncio.CancelledError:
print("Cancelled")
return
except Exception as e:
print(e)
return "Sorry, an error occurred while processing your request."
def on_button_click():
global task_future
if task_future is not None and not task_future.done():
task_future.cancel()
# Create the button
button = tk.Button(root, text="Stop", command=on_button_click)
button.pack(pady=20)
async def run_app():
global task_future
task_future = asyncio.create_task(prompt_llama("Hi"))
await task_future
# Schedule the main function to run on the main event loop
root.after_idle(lambda: asyncio.ensure_future(run_app()))
# Start the Tkinter event loop
root.mainloop()
在这个修改后的代码中,我们创建了一个名为
run_app
的异步函数,它启动了我们的主要异步任务
prompt_llama
。然后,我们使用
root.after_idle
在 Tkinter 事件循环内安排
run_app
的执行。这意味着所有内容,包括按钮单击处理程序和 Ollama API 调用,都在同一个事件循环中运行。现在,当按下停止按钮时,它应该可以立即取消 Ollama API 调用,无论其执行状态如何。
此外,我们删除了不必要的线程,并简化了代码以提高可读性。
标签:python,multithreading,asynchronous,async-await,ollama From: 78767823