1 简介
访问zhipuai
的GLM-4
模型的API
时, 挂上梯子后访问失败, 显示ConnectError: TLS/SSL connection has been closed (EOF) (_ssl.c:1131) 报错信息如下
{
"name": "ConnectError",
"message": "TLS/SSL connection has been closed (EOF) (_ssl.c:1131)",
"stack": "---------------------------------------------------------------------------
ConnectError Traceback (most recent call last)
File d:\\miniconda\\envs\\research\\lib\\site-packages\\httpx\\_transports\\default.py:69, in map_httpcore_exceptions()
68 try:
---> 69 yield
70 except Exception as exc:
File d:\\miniconda\\envs\\research\\lib\\site-packages\\httpx\\_transports\\default.py:233, in HTTPTransport.handle_request(self, request)
232 with map_httpcore_exceptions():
--> 233 resp = self._pool.handle_request(req)
235 assert isinstance(resp.stream, typing.Iterable)
File d:\\miniconda\\envs\\research\\lib\\site-packages\\httpcore\\_sync\\connection_pool.py:216, in ConnectionPool.handle_request(self, request)
215 self._close_connections(closing)
--> 216 raise exc from None
218 # Return the response. Note that in this case we still have to manage
219 # the point at which the response is closed.
File d:\\miniconda\\envs\\research\\lib\\site-packages\\httpcore\\_sync\\connection_pool.py:196, in ConnectionPool.handle_request(self, request)
194 try:
195 # Send the request on the assigned connection.
--> 196 response = connection.handle_request(
197 pool_request.request
198 )
199 except ConnectionNotAvailable:
200 # In some cases a connection may initially be available to
201 # handle a request, but then become unavailable.
202 #
203 # In this case we clear the connection and try again.
File d:\\miniconda\\envs\\research\\lib\\site-packages\\httpcore\\_sync\\http_proxy.py:289, in TunnelHTTPConnection.handle_request(self, request)
283 connect_request = Request(
284 method=b\"CONNECT\",
285 url=connect_url,
286 headers=connect_headers,
287 extensions=request.extensions,
288 )
--> 289 connect_response = self._connection.handle_request(
290 connect_request
291 )
293 if connect_response.status < 200 or connect_response.status > 299:
File d:\\miniconda\\envs\\research\\lib\\site-packages\\httpcore\\_sync\\connection.py:99, in HTTPConnection.handle_request(self, request)
98 self._connect_failed = True
---> 99 raise exc
101 return self._connection.handle_request(request)
File d:\\miniconda\\envs\\research\\lib\\site-packages\\httpcore\\_sync\\connection.py:76, in HTTPConnection.handle_request(self, request)
75 if self._connection is None:
---> 76 stream = self._connect(request)
78 ssl_object = stream.get_extra_info(\"ssl_object\")
File d:\\miniconda\\envs\\research\\lib\\site-packages\\httpcore\\_sync\\connection.py:154, in HTTPConnection._connect(self, request)
153 with Trace(\"start_tls\", logger, request, kwargs) as trace:
--> 154 stream = stream.start_tls(**kwargs)
155 trace.return_value = stream
File d:\\miniconda\\envs\\research\\lib\\site-packages\\httpcore\\_backends\\sync.py:168, in SyncStream.start_tls(self, ssl_context, server_hostname, timeout)
167 self.close()
--> 168 raise exc
169 return SyncStream(sock)
File d:\\miniconda\\envs\\research\\lib\\contextlib.py:131, in _GeneratorContextManager.__exit__(self, type, value, traceback)
130 try:
--> 131 self.gen.throw(type, value, traceback)
132 except StopIteration as exc:
133 # Suppress StopIteration *unless* it's the same exception that
134 # was passed to throw(). This prevents a StopIteration
135 # raised inside the \"with\" statement from being suppressed.
File d:\\miniconda\\envs\\research\\lib\\site-packages\\httpcore\\_exceptions.py:14, in map_exceptions(map)
13 if isinstance(exc, from_exc):
---> 14 raise to_exc(exc) from exc
15 raise
ConnectError: TLS/SSL connection has been closed (EOF) (_ssl.c:1131)
The above exception was the direct cause of the following exception:
ConnectError Traceback (most recent call last)
Cell In[24], line 20
17 answer = response.choices[0].message
18 return dict(answer)[\"content\"]
---> 20 print(get_title_from_text(client, prompt))
Cell In[24], line 4, in get_title_from_text(client, prompt)
3 def get_title_from_text(client, prompt):
----> 4 response = client.chat.completions.create(
5 model=\"glm-4\",
6 messages=[
7 {
8 \"role\": \"user\",
9 \"content\": prompt
10 }
11 ],
12 top_p=0.7,
13 temperature=0.9,
14 stream=False,
15 max_tokens=2000,)
17 answer = response.choices[0].message
18 return dict(answer)[\"content\"]
File d:\\miniconda\\envs\\research\\lib\\site-packages\\zhipuai\\api_resource\\chat\\completions.py:48, in Completions.create(self, model, request_id, do_sample, stream, temperature, top_p, max_tokens, seed, messages, stop, sensitive_word_check, tools, tool_choice, extra_headers, disable_strict_validation, timeout)
46 _cast_type = object
47 _stream_cls = StreamResponse[object]
---> 48 return self._post(
49 \"/chat/completions\",
50 body={
51 \"model\": model,
52 \"request_id\": request_id,
53 \"temperature\": temperature,
54 \"top_p\": top_p,
55 \"do_sample\": do_sample,
56 \"max_tokens\": max_tokens,
57 \"seed\": seed,
58 \"messages\": messages,
59 \"stop\": stop,
60 \"sensitive_word_check\": sensitive_word_check,
61 \"stream\": stream,
62 \"tools\": tools,
63 \"tool_choice\": tool_choice,
64 },
65 options=make_user_request_input(
66 extra_headers=extra_headers,
67 ),
68 cast_type=_cast_type,
69 enable_stream=stream or False,
70 stream_cls=_stream_cls,
71 )
File d:\\miniconda\\envs\\research\\lib\\site-packages\\zhipuai\\core\\_http_client.py:292, in HttpClient.post(self, path, body, cast_type, options, files, enable_stream, stream_cls)
278 def post(
279 self,
280 path: str,
(...)
287 stream_cls: type[StreamResponse[Any]] | None = None,
288 ) -> ResponseT | StreamResponse:
289 opts = ClientRequestParam.construct(method=\"post\", json_data=body, files=make_httpx_files(files), url=path,
290 **options)
--> 292 return self.request(
293 cast_type=cast_type, params=opts,
294 enable_stream=enable_stream,
295 stream_cls=stream_cls
296 )
File d:\\miniconda\\envs\\research\\lib\\site-packages\\zhipuai\\core\\_http_client.py:254, in HttpClient.request(self, cast_type, params, enable_stream, stream_cls)
251 raise self._make_status_error(err.response) from None
253 except Exception as err:
--> 254 raise err
256 return self._parse_response(
257 cast_type=cast_type,
258 request_param=params,
(...)
261 stream_cls=stream_cls,
262 )
File d:\\miniconda\\envs\\research\\lib\\site-packages\\zhipuai\\core\\_http_client.py:241, in HttpClient.request(self, cast_type, params, enable_stream, stream_cls)
238 request = self._prepare_request(params)
240 try:
--> 241 response = self._client.send(
242 request,
243 stream=enable_stream,
244 )
245 response.raise_for_status()
246 except httpx.TimeoutException as err:
File d:\\miniconda\\envs\\research\\lib\\site-packages\\httpx\\_client.py:914, in Client.send(self, request, stream, auth, follow_redirects)
906 follow_redirects = (
907 self.follow_redirects
908 if isinstance(follow_redirects, UseClientDefault)
909 else follow_redirects
910 )
912 auth = self._build_request_auth(request, auth)
--> 914 response = self._send_handling_auth(
915 request,
916 auth=auth,
917 follow_redirects=follow_redirects,
918 history=[],
919 )
920 try:
921 if not stream:
File d:\\miniconda\\envs\\research\\lib\\site-packages\\httpx\\_client.py:942, in Client._send_handling_auth(self, request, auth, follow_redirects, history)
939 request = next(auth_flow)
941 while True:
--> 942 response = self._send_handling_redirects(
943 request,
944 follow_redirects=follow_redirects,
945 history=history,
946 )
947 try:
948 try:
File d:\\miniconda\\envs\\research\\lib\\site-packages\\httpx\\_client.py:979, in Client._send_handling_redirects(self, request, follow_redirects, history)
976 for hook in self._event_hooks[\"request\"]:
977 hook(request)
--> 979 response = self._send_single_request(request)
980 try:
981 for hook in self._event_hooks[\"response\"]:
File d:\\miniconda\\envs\\research\\lib\\site-packages\\httpx\\_client.py:1015, in Client._send_single_request(self, request)
1010 raise RuntimeError(
1011 \"Attempted to send an async request with a sync Client instance.\"
1012 )
1014 with request_context(request=request):
-> 1015 response = transport.handle_request(request)
1017 assert isinstance(response.stream, SyncByteStream)
1019 response.request = request
File d:\\miniconda\\envs\\research\\lib\\site-packages\\httpx\\_transports\\default.py:233, in HTTPTransport.handle_request(self, request)
220 req = httpcore.Request(
221 method=request.method,
222 url=httpcore.URL(
(...)
230 extensions=request.extensions,
231 )
232 with map_httpcore_exceptions():
--> 233 resp = self._pool.handle_request(req)
235 assert isinstance(resp.stream, typing.Iterable)
237 return Response(
238 status_code=resp.status,
239 headers=resp.headers,
240 stream=ResponseStream(resp.stream),
241 extensions=resp.extensions,
242 )
File d:\\miniconda\\envs\\research\\lib\\contextlib.py:131, in _GeneratorContextManager.__exit__(self, type, value, traceback)
129 value = type()
130 try:
--> 131 self.gen.throw(type, value, traceback)
132 except StopIteration as exc:
133 # Suppress StopIteration *unless* it's the same exception that
134 # was passed to throw(). This prevents a StopIteration
135 # raised inside the \"with\" statement from being suppressed.
136 return exc is not value
File d:\\miniconda\\envs\\research\\lib\\site-packages\\httpx\\_transports\\default.py:86, in map_httpcore_exceptions()
83 raise
85 message = str(exc)
---> 86 raise mapped_exc(message) from exc
ConnectError: TLS/SSL connection has been closed (EOF) (_ssl.c:1131)"
}
2 尝试
2.1
有些blog说是urllib3
版本高了的原因, 但是conda list
查看之后urllib3 version==1.25.11
应该没问题
2.2
改用http
方式访问, 成功. 估计是http
和https
的问题, 具体原理还不懂…(link)
import requests
import jwt
import time
def generate_token(apikey: str, exp_seconds: int):
try:
id, secret = apikey.split(".")
except Exception as e:
raise Exception("invalid apikey", e)
payload = {
"api_key": id,
"exp": int(round(time.time() * 1000)) + exp_seconds * 1000,
"timestamp": int(round(time.time() * 1000)),
}
return jwt.encode(
payload,
secret,
algorithm="HS256",
headers={"alg": "HS256", "sign_type": "SIGN"},
)
api_key = "" # 在此处补充API
token = generate_token(api_key, 60)
url = "https://open.bigmodel.cn/api/paas/v4/chat/completions"
headers = {
"Content-Type": "application/json",
"Authorization": f"Bearer {token}"
}
data = {
"model": "glm-4",
"messages": [
{
"role": "user",
"content": "讲一个学弟打篮球的故事"
}
],
"max_tokens": 8192,
"temperature": 0.8,
"stream": False
}
response = requests.post(url, headers=headers, json=data)
ans = response.json()
ans["choices"][0]["message"]["content"]
标签:TLS,GLM,stream,zhipuai,self,request,File,._,response
From: https://blog.csdn.net/m0_46268825/article/details/136819659