Traceback (most recent call last): File "D:\新建文件夹\langchain-base\ollama_python.p...
🚨 错误信息
Traceback (most recent call last):
File "D:\新建文件夹\langchain-base\ollama_python.py", line 3, in <module>
response = chat(
model="deepseek-r1:7b",
...<2 lines>...
]
)
File "C:\Users\27717\AppData\Roaming\Python\Python313\site-packages\ollama\_client.py", line 365, in chat
return self._request(
~~~~~~~~~~~~~^
ChatResponse,
^^^^^^^^^^^^^
...<14 lines>...
stream=stream,
^^^^^^^^^^^^^^
)
^
File "C:\Users\27717\AppData\Roaming\Python\Python313\site-packages\ollama\_client.py", line 189, in _request
return cls(**self._request_raw(*args, **kwargs).json())
~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^
File "C:\Users\27717\AppData\Roaming\Python\Python313\site-packages\ollama\_client.py", line 129, in _request_raw
r = self._client.request(*args, **kwargs)
File "D:\software\Python\Lib\site-packages\httpx\_client.py", line 825, in request
return self.send(request, auth=auth, follow_redirects=follow_redirects)
~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "D:\software\Python\Lib\site-packages\httpx\_client.py", line 914, in send
response = self._send_handling_auth(
request,
...<2 lines>...
history=[],
)
File "D:\software\Python\Lib\site-packages\httpx\_client.py", line 942, in _send_handling_auth
response = self._send_handling_redirects(
request,
follow_redirects=follow_redirects,
history=history,
)
File "D:\software\Python\Lib\site-packages\httpx\_client.py", line 979, in _send_handling_redirects
response = self._send_single_request(request)
File "D:\software\Python\Lib\site-packages\httpx\_client.py", line 1014, in _send_single_request
response = transport.handle_request(request)
File "D:\software\Python\Lib\site-packages\httpx\_transports\default.py", line 250, in handle_request
resp = self._pool.handle_request(req)
File "D:\software\Python\Lib\site-packages\httpcore\_sync\connection_pool.py", line 256, in handle_request
raise exc from None
File "D:\software\Python\Lib\site-packages\httpcore\_sync\connection_pool.py", line 236, in handle_request
response = connection.handle_request(
pool_request.request
)
File "D:\software\Python\Lib\site-packages\httpcore\_sync\http_proxy.py", line 206, in handle_request
return self._connection.handle_request(proxy_request)
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^
File "D:\software\Python\Lib\site-packages\httpcore\_sync\connection.py", line 103, in handle_r return self._connection.handle_request(request)
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^
st
raise exc
File "D:\software\Python\Lib\site-packages\httpcore\_sync\http11.py", line 106, in handle_request
) = self._receive_response_headers(**kwargs)
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^
File "D:\software\Python\Lib\site-packages\httpcore\_sync\http11.py", line 177, in _receive_response_headers
event = self._receive_event(timeout=timeout)
File "D:\software\Python\Lib\site-packages\httpcore\_sync\http11.py", line 217, in _receive_event
data = self._network_stream.read(
self.READ_NUM_BYTES, timeout=timeout
)
File "D:\software\Python\Lib\site-packages\httpcore\_backends\sync.py", line 128, in read
return self._sock.recv(max_bytes)
~~~~~~~~~~~~~~~^^^^^^^^^^^
KeyboardInterrupt
🤖 AI解决方案
报错分析报告
1. 错误翻译
原文:KeyboardInterrupt
译文:键盘中断
2. 原因分析
- 根本原因:用户通过键盘中断(通常是按下
Ctrl+C)主动终止了程序执行 - 错误位置:在Ollama客户端进行HTTP请求时,网络流读取操作被中断
- 上下文:程序正在与Ollama API进行通信,等待响应时被用户强制终止
3. 解决方案
import signal
import sys
def signal_handler(sig, frame):
print('\n程序正在优雅退出...')
# 执行清理操作
sys.exit(0)
# 注册信号处理器
signal.signal(signal.SIGINT, signal_handler)
try:
response = chat(
model="deepseek-r1:7b",
messages=[
{"role": "user", "content": "你的消息内容"}
]
)
print(response)
except KeyboardInterrupt:
print('\n操作被用户取消')
# 执行必要的清理操作
4. ️ 预防措施
import ollama
client = ollama.Client(timeout=60) # 设置60秒超时
提示:这是一个正常的程序中断,不是代码错误。如果需要长时间运行的程序,建议添加适当的异常处理机制。