You will have to be more specific about what you are doing.
Model, schema, etc.
Perhaps you are not iterating through the output list to actually find content, not skipping reasoning or encrypted content, or not checking the status for correct types, or not requesting enough output tokens on a reasoning model?
I typed up a procedural demo with what you furnished as info, an “array” schema and a background request. You can walk through every step, or try your payload. See if it is your code or the input being sent.
This also doesn’t go into polling if the initial response is done (disabled background).
import os, sys, json, time, logging, httpx
logger = logging.getLogger(__name__)
def get_api_key_headers():
return {"Authorization": f"Bearer {os.environ.get("OPENAI_API_KEY")}"}
def poll_response(response_id: str, timeout: float = 1200.0, interval=1.0, printing=True, delay=4
) -> httpx.Response | None:
"""
Poll GET /v1/responses/{response_id} every 2 seconds until it leaves
'queued'/'in_progress', or until `timeout` seconds elapse.
Returns the final httpx.Response on success; logs a warning and returns None on
timeout or HTTP/client errors.
status: One of [completed, failed, in_progress, cancelled, queued, incomplete]
"""
if not response_id:
raise ValueError("No response ID received by poll_response")
url = f"https://api.openai.com/v1/responses/{response_id}"
deadline = time.monotonic() + timeout
response = None
time.sleep(delay)
try:
with httpx.Client(timeout=20) as client:
while True:
time.sleep(interval)
interval=interval + 0.1 # don't keep hitting every second at the 20 minute mark...
response = client.get(url, headers=get_api_key_headers())
response.raise_for_status()
data = response.json()
status = data.get("status")
if printing:
print(f"poll status: {status}...")
if status not in ("queued", "in_progress"):
if status in ("failed", "cancelled"):
raise RuntimeError(f"Poll failed {response_id}:\n {status}")
logger.info("Response ID reached terminal state: %s (%s)", response_id, status)
return response
if time.monotonic() >= deadline:
raise TimeoutError(f"Poll {timeout:.3f}s timeout {response_id}:\n {status}")
except Exception as exc:
try:
msg = str(response.status_code) + ": " \
+ json.loads(response.content.decode())["error"]["message"]
except Exception:
msg = str(exc)
logger.warning("Couldn't poll %s:\n %s", response_id, msg)
def delete_response(response_id: str) -> None:
if response_id:
try:
with httpx.Client(timeout=20) as client:
response = client.delete(
f"https://api.openai.com/v1/responses/{response_id}",
headers=get_api_key_headers(),
)
response.raise_for_status()
logger.info("Response ID deleted: %s", response_id)
except Exception as exc:
msg=str(response.status_code) + ": " \
+ json.loads(response.content.decode())["error"]["message"]
logger.warning(f"Couldn’t delete %s:\n %s",
response_id, msg)
pass
## ======== a schema string (like a paste from the playground JSON ==========
schema_format = r"""
{
"schema": {
"type": "object",
"properties": {
"answers": {
"type": "array",
"description": "List of answer strings.",
"items": {
"type": "string",
"description": "An answer string."
}
}
},
"required": [
"answers"
],
"additionalProperties": false
},
"strict": true
}
""".strip()
## ======== a schema container ==========
text_param = {
"format": {
"type": "json_schema",
"name": "json_response",
"description": "Ordered lists in JSON sent to API; max items = 100",
**json.loads(schema_format)
},
"verbosity": None
}
## ======== API call construction ==========
model = "gpt-5-mini"
payload = {
"model": model,
"instructions": ("You are a non-conversational AI that produces JSON arrays of items sent to an API."
"\nAutomatically fulfill natural-language queries requesting any generated lists."),
"input": "List twenty dog breeds, ranked descending by US popularity.",
"background": True,
# "store": False, # background mode must "store"
"max_output_tokens": 2000,
"text": {"format": {
"type": "json_schema",
"name": "json_response",
"description": "Ordered lists in JSON sent to API; max items = 100",
**json.loads(schema_format)
}},
}
print("\n---\n\nRequest JSON:\n" + json.dumps(payload, indent=2))
## ======== API call - create ==========
try:
response = httpx.post(
"https://api.openai.com/v1/responses",
json=payload,
headers={**get_api_key_headers()},
timeout=600, # set lower if doing background only
)
response.raise_for_status()
response_headers = {k: v for k, v in response.headers.items() if v.startswith('x-')}
response_id = response.json().get("id")
response_status = response.json().get("status")
print(f"\n---\n\n{model} Initial Response JSON:\n" + json.dumps(response.json(), indent=2))
### - Expected parsing from initial non-background response
assistant_texts = [
content["text"]
for output in response.json().get("output", [])
for content in output.get("content", [])
if content.get("type") == "output_text" and "text" in content
]
print("\n---\n\nInitial response text:\n" + str(assistant_texts))
except:
print(
response.status_code,
json.loads(response.content.decode())["error"]["message"]
)
raise
if response_status in ("queued", "in_progress"):
response = None; assistant_texts = None
## ======== API call - poll if initial is not terminal, return final ==========
response = poll_response(response_id, timeout=60.0, interval=0)
elif response_status in ("failed", "cancelled"):
raise RuntimeError(f"Inicial call failed failed {response_id}:\n {response_status}")
else:
print(f"Was this status incomplete?: {response_status}")
delete_response(response_id)
if response and response.json():
print(f"\n---\n\n{model} Final Response JSON:\n" + json.dumps(response.json(), indent=2))
assistant_texts = [
content["text"]
for output in response.json().get("output", [])
for content in output.get("content", [])
if content.get("type") == "output_text" and "text" in content
]
print("\n---\n\nCollected response text:\n" + str(assistant_texts))
poll status: queued…
poll status: queued…
poll status: completed…
Collected response text:
[‘{“answers”:[“1. Labrador Retriever”,“2. French Bulldog”,“3. Golden Retriever”,“4. German Shepherd Dog”,“5. Poodle”,“6. Bulldog”,“7. Beagle”,“8. Rottweiler”,“9. German Shorthaired Pointer”,“10. Dachshund”,“11. Pembroke Welsh Corgi”,“12. Siberian Husky”,“13. Australian Shepherd”,“14. Chihuahua”,“15. Shih Tzu”,“16. Boston Terrier”,“17. Bernese Mountain Dog”,“18. Pomeranian”,“19. Havanese”,“20. Shetland Sheepdog”]}’]