Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

api请求时报错 #53

Open
lxghfilecdn opened this issue Dec 23, 2024 · 0 comments
Open

api请求时报错 #53

lxghfilecdn opened this issue Dec 23, 2024 · 0 comments

Comments

@lxghfilecdn
Copy link

`ERROR: Exception in ASGI application
Traceback (most recent call last):
File "/home/ai/CreativeChatGLM_py310_torch231_glm4_9b/vp/lib/python3.10/site-packages/starlette/responses.py", line 265, in call
await wrap(partial(self.listen_for_disconnect, receive))
File "/home/ai/CreativeChatGLM_py310_torch231_glm4_9b/vp/lib/python3.10/site-packages/starlette/responses.py", line 261, in wrap
await func()
File "/home/ai/CreativeChatGLM_py310_torch231_glm4_9b/vp/lib/python3.10/site-packages/starlette/responses.py", line 238, in listen_for_disconnect
message = await receive()
File "/home/ai/CreativeChatGLM_py310_torch231_glm4_9b/vp/lib/python3.10/site-packages/uvicorn/protocols/http/h11_impl.py", line 546, in receive
await self.message_event.wait()
File "/usr/local/python3.10/lib/python3.10/asyncio/locks.py", line 214, in wait
await fut
asyncio.exceptions.CancelledError: Cancelled by cancel scope 71d267dc8ca0

During handling of the above exception, another exception occurred:

  • Exception Group Traceback (most recent call last):
    | File "/home/ai/CreativeChatGLM_py310_torch231_glm4_9b/vp/lib/python3.10/site-packages/uvicorn/protocols/http/h11_impl.py", line 407, in run_asgi
    | result = await app( # type: ignore[func-returns-value]
    | File "/home/ai/CreativeChatGLM_py310_torch231_glm4_9b/vp/lib/python3.10/site-packages/uvicorn/middleware/proxy_headers.py", line 78, in call
    | return await self.app(scope, receive, send)
    | File "/home/ai/CreativeChatGLM_py310_torch231_glm4_9b/vp/lib/python3.10/site-packages/fastapi/applications.py", line 1054, in call
    | await super().call(scope, receive, send)
    | File "/home/ai/CreativeChatGLM_py310_torch231_glm4_9b/vp/lib/python3.10/site-packages/starlette/applications.py", line 123, in call
    | await self.middleware_stack(scope, receive, send)
    | File "/home/ai/CreativeChatGLM_py310_torch231_glm4_9b/vp/lib/python3.10/site-packages/starlette/middleware/errors.py", line 186, in call
    | raise exc
    | File "/home/ai/CreativeChatGLM_py310_torch231_glm4_9b/vp/lib/python3.10/site-packages/starlette/middleware/errors.py", line 164, in call
    | await self.app(scope, receive, _send)
    | File "/home/ai/CreativeChatGLM_py310_torch231_glm4_9b/vp/lib/python3.10/site-packages/starlette/middleware/cors.py", line 85, in call
    | await self.app(scope, receive, send)
    | File "/home/ai/CreativeChatGLM_py310_torch231_glm4_9b/vp/lib/python3.10/site-packages/starlette/middleware/exceptions.py", line 65, in call
    | await wrap_app_handling_exceptions(self.app, conn)(scope, receive, send)
    | File "/home/ai/CreativeChatGLM_py310_torch231_glm4_9b/vp/lib/python3.10/site-packages/starlette/_exception_handler.py", line 64, in wrapped_app
    | raise exc
    | File "/home/ai/CreativeChatGLM_py310_torch231_glm4_9b/vp/lib/python3.10/site-packages/starlette/_exception_handler.py", line 53, in wrapped_app
    | await app(scope, receive, sender)
    | File "/home/ai/CreativeChatGLM_py310_torch231_glm4_9b/vp/lib/python3.10/site-packages/starlette/routing.py", line 756, in call
    | await self.middleware_stack(scope, receive, send)
    | File "/home/ai/CreativeChatGLM_py310_torch231_glm4_9b/vp/lib/python3.10/site-packages/starlette/routing.py", line 776, in app
    | await route.handle(scope, receive, send)
    | File "/home/ai/CreativeChatGLM_py310_torch231_glm4_9b/vp/lib/python3.10/site-packages/starlette/routing.py", line 297, in handle
    | await self.app(scope, receive, send)
    | File "/home/ai/CreativeChatGLM_py310_torch231_glm4_9b/vp/lib/python3.10/site-packages/starlette/routing.py", line 77, in app
    | await wrap_app_handling_exceptions(app, request)(scope, receive, send)
    | File "/home/ai/CreativeChatGLM_py310_torch231_glm4_9b/vp/lib/python3.10/site-packages/starlette/_exception_handler.py", line 64, in wrapped_app
    | raise exc
    | File "/home/ai/CreativeChatGLM_py310_torch231_glm4_9b/vp/lib/python3.10/site-packages/starlette/_exception_handler.py", line 53, in wrapped_app
    | await app(scope, receive, sender)
    | File "/home/ai/CreativeChatGLM_py310_torch231_glm4_9b/vp/lib/python3.10/site-packages/starlette/routing.py", line 75, in app
    | await response(scope, receive, send)
    | File "/home/ai/CreativeChatGLM_py310_torch231_glm4_9b/vp/lib/python3.10/site-packages/starlette/responses.py", line 258, in call
    | async with anyio.create_task_group() as task_group:
    | File "/home/ai/CreativeChatGLM_py310_torch231_glm4_9b/vp/lib/python3.10/site-packages/anyio/_backends/_asyncio.py", line 815, in aexit
    | raise BaseExceptionGroup(
    | exceptiongroup.ExceptionGroup: unhandled errors in a TaskGroup (1 sub-exception)
    +-+---------------- 1 ----------------
    | Traceback (most recent call last):
    | File "/home/ai/CreativeChatGLM_py310_torch231_glm4_9b/vp/lib/python3.10/site-packages/starlette/responses.py", line 261, in wrap
    | await func()
    | File "/home/ai/CreativeChatGLM_py310_torch231_glm4_9b/vp/lib/python3.10/site-packages/starlette/responses.py", line 250, in stream_response
    | async for chunk in self.body_iterator:
    | File "/home/ai/CreativeChatGLM_py310_torch231_glm4_9b/vp/lib/python3.10/site-packages/starlette/concurrency.py", line 65, in iterate_in_threadpool
    | yield await anyio.to_thread.run_sync(_next, as_iterator)
    | File "/home/ai/CreativeChatGLM_py310_torch231_glm4_9b/vp/lib/python3.10/site-packages/anyio/to_thread.py", line 56, in run_sync
    | return await get_async_backend().run_sync_in_worker_thread(
    | File "/home/ai/CreativeChatGLM_py310_torch231_glm4_9b/vp/lib/python3.10/site-packages/anyio/_backends/_asyncio.py", line 2505, in run_sync_in_worker_thread
    | return await future
    | File "/home/ai/CreativeChatGLM_py310_torch231_glm4_9b/vp/lib/python3.10/site-packages/anyio/_backends/_asyncio.py", line 1005, in run
    | result = context.run(func, *args)
    | File "/home/ai/CreativeChatGLM_py310_torch231_glm4_9b/vp/lib/python3.10/site-packages/starlette/concurrency.py", line 54, in _next
    | return next(iterator)
    | File "/home/ai/CreativeChatGLM_py310_torch231_glm4_9b/app_fastapi.py", line 83, in decorate
    | for item in generator:
    | File "/home/ai/CreativeChatGLM_py310_torch231_glm4_9b/predictors/base.py", line 35, in predict_continue
    | yield from self.predict_continue_tuple(*args, **kwargs)
    | TypeError: BasePredictor.predict_continue_tuple() missing 1 required positional argument: 'last_state'
    +------------------------------------
    `
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment
Labels
None yet
Projects
None yet
Development

No branches or pull requests

1 participant