RemoteProtocolError: peer closed connection without sending complete message body when using GPT-4.1 with streaming in async LangGraph

Hi, I’m getting the following error when using GPT-4.1 via Azure OpenAI with streaming=True in an async LangGraph workflow:

This happens intermittently — most of the time it works fine, but sometimes the connection drops in the middle of a streamed response, and the body isn’t fully received.

Is anyone else experiencing this, and how are you handling or resolving it?

log

RemoteProtocolError('peer closed connection without sending complete message body (incomplete chunked read)')Traceback (most recent call last):


  File "/app/.venv/lib/python3.11/site-packages/httpx/_transports/default.py", line 101, in map_httpcore_exceptions
    yield


  File "/app/.venv/lib/python3.11/site-packages/httpx/_transports/default.py", line 271, in __aiter__
    async for part in self._httpcore_stream:


  File "/app/.venv/lib/python3.11/site-packages/httpcore/_async/connection_pool.py", line 407, in __aiter__
    raise exc from None


  File "/app/.venv/lib/python3.11/site-packages/httpcore/_async/connection_pool.py", line 403, in __aiter__
    async for part in self._stream:


  File "/app/.venv/lib/python3.11/site-packages/httpcore/_async/http11.py", line 342, in __aiter__
    raise exc


  File "/app/.venv/lib/python3.11/site-packages/httpcore/_async/http11.py", line 334, in __aiter__
    async for chunk in self._connection._receive_response_body(**kwargs):


  File "/app/.venv/lib/python3.11/site-packages/httpcore/_async/http11.py", line 203, in _receive_response_body
    event = await self._receive_event(timeout=timeout)
            ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^


  File "/app/.venv/lib/python3.11/site-packages/httpcore/_async/http11.py", line 213, in _receive_event
    with map_exceptions({h11.RemoteProtocolError: RemoteProtocolError}):


  File "/usr/local/lib/python3.11/contextlib.py", line 155, in __exit__
    self.gen.throw(typ, value, traceback)


  File "/app/.venv/lib/python3.11/site-packages/httpcore/_exceptions.py", line 14, in map_exceptions
    raise to_exc(exc) from exc


httpcore.RemoteProtocolError: peer closed connection without sending complete message body (incomplete chunked read)



The above exception was the direct cause of the following exception:



Traceback (most recent call last):


  File "/app/.venv/lib/python3.11/site-packages/langgraph/pregel/__init__.py", line 2274, in astream
    async for _ in runner.atick(


  File "/app/.venv/lib/python3.11/site-packages/langgraph/pregel/runner.py", line 444, in atick
    await arun_with_retry(


  File "/app/.venv/lib/python3.11/site-packages/langgraph/pregel/retry.py", line 123, in arun_with_retry
    async for _ in task.proc.astream(task.input, config):


  File "/app/.venv/lib/python3.11/site-packages/langgraph/utils/runnable.py", line 706, in astream
    async for chunk in aiterator:


  File "/app/.venv/lib/python3.11/site-packages/langchain_core/tracers/event_stream.py", line 181, in tap_output_aiter
    first = await py_anext(output, default=sentinel)
            ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^


  File "/app/.venv/lib/python3.11/site-packages/langchain_core/utils/aiter.py", line 78, in anext_impl
    return await __anext__(iterator)
           ^^^^^^^^^^^^^^^^^^^^^^^^^


  File "/app/.venv/lib/python3.11/site-packages/langchain_core/runnables/base.py", line 1461, in atransform
    async for ichunk in input:


  File "/app/.venv/lib/python3.11/site-packages/langchain_core/runnables/base.py", line 1461, in atransform
    async for ichunk in input:


  File "/app/.venv/lib/python3.11/site-packages/langchain_core/runnables/base.py", line 1461, in atransform
    async for ichunk in input:


  File "/app/.venv/lib/python3.11/site-packages/langchain_core/runnables/base.py", line 1024, in astream
    yield await self.ainvoke(input, config, **kwargs)
          ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^


  File "/app/.venv/lib/python3.11/site-packages/langgraph/utils/runnable.py", line 371, in ainvoke
    ret = await asyncio.create_task(coro, context=context)
          ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^


  File "/app/.venv/lib/python3.11/site-packages/ddtrace/contrib/internal/asyncio/patch.py", line 50, in traced_coro
    return await coro
           ^^^^^^^^^^


  File "/app/app/llm/node.py", line 279, in __call__
    response = await async_call_llm(prompt=prompt, payload=payload, run_name=self.__class__.__name__, tools=self.tools, model_name="gpt-4.1")
               ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^


  File "/app/app/support/datadog_metric.py", line 21, in wrapper
    return await func(*args, **kwargs)
           ^^^^^^^^^^^^^^^^^^^^^^^^^^^


  File "/app/app/llm/support/llm_util.py", line 53, in async_call_llm
    return await chain.ainvoke(payload, config=config)
           ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^


  File "/app/.venv/lib/python3.11/site-packages/langchain_core/runnables/base.py", line 3075, in ainvoke
    input = await coro_with_context(part(), context, create_task=True)
            ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^


  File "/app/.venv/lib/python3.11/site-packages/ddtrace/contrib/internal/asyncio/patch.py", line 50, in traced_coro
    return await coro
           ^^^^^^^^^^


  File "/app/.venv/lib/python3.11/site-packages/langchain_core/runnables/base.py", line 5429, in ainvoke
    return await self.bound.ainvoke(
           ^^^^^^^^^^^^^^^^^^^^^^^^^


  File "/app/.venv/lib/python3.11/site-packages/langchain_core/language_models/chat_models.py", line 391, in ainvoke
    llm_result = await self.agenerate_prompt(
                 ^^^^^^^^^^^^^^^^^^^^^^^^^^^^


  File "/app/.venv/lib/python3.11/site-packages/langchain_core/language_models/chat_models.py", line 957, in agenerate_prompt
    return await self.agenerate(
           ^^^^^^^^^^^^^^^^^^^^^


  File "/app/.venv/lib/python3.11/site-packages/langchain_core/language_models/chat_models.py", line 915, in agenerate
    raise exceptions[0]


  File "/app/.venv/lib/python3.11/site-packages/ddtrace/contrib/internal/asyncio/patch.py", line 50, in traced_coro
    return await coro
           ^^^^^^^^^^


  File "/app/.venv/lib/python3.11/site-packages/langchain_core/language_models/chat_models.py", line 1072, in _agenerate_with_cache
    async for chunk in self._astream(messages, stop=stop, **kwargs):


  File "/app/.venv/lib/python3.11/site-packages/langchain_openai/chat_models/base.py", line 1110, in _astream
    async for chunk in response:


  File "/app/.venv/lib/python3.11/site-packages/openai/_streaming.py", line 147, in __aiter__
    async for item in self._iterator:


  File "/app/.venv/lib/python3.11/site-packages/openai/_streaming.py", line 202, in __stream__
    async for _sse in iterator:


  File "/app/.venv/lib/python3.11/site-packages/openai/_streaming.py", line 151, in _iter_events
    async for sse in self._decoder.aiter_bytes(self.response.aiter_bytes()):


  File "/app/.venv/lib/python3.11/site-packages/openai/_streaming.py", line 302, in aiter_bytes
    async for chunk in self._aiter_chunks(iterator):


  File "/app/.venv/lib/python3.11/site-packages/openai/_streaming.py", line 313, in _aiter_chunks
    async for chunk in iterator:


  File "/app/.venv/lib/python3.11/site-packages/httpx/_models.py", line 997, in aiter_bytes
    async for raw_bytes in self.aiter_raw():


  File "/app/.venv/lib/python3.11/site-packages/httpx/_models.py", line 1055, in aiter_raw
    async for raw_stream_bytes in self.stream:


  File "/app/.venv/lib/python3.11/site-packages/httpx/_client.py", line 176, in __aiter__
    async for chunk in self._stream:


  File "/app/.venv/lib/python3.11/site-packages/httpx/_transports/default.py", line 270, in __aiter__
    with map_httpcore_exceptions():


  File "/usr/local/lib/python3.11/contextlib.py", line 155, in __exit__
    self.gen.throw(typ, value, traceback)


  File "/app/.venv/lib/python3.11/site-packages/httpx/_transports/default.py", line 118, in map_httpcore_exceptions
    raise mapped_exc(message) from exc


httpx.RemoteProtocolError: peer closed connection without sending complete message body (incomplete chunked read)

Library version

  • python = “^3.11”
  • fastapi = “^0.115.6”
  • uvicorn = “^0.34.0”
  • langgraph = “^0.2.62”
  • langchain = “^0.3.14”
  • langchain-openai = “^0.3.0”