Skip to main content

Structured Outputs (JSON Mode)

Quick Start

Basic JSON Mode

from openai import OpenAI

client = OpenAI(
api_key="YOUR_API_KEY",
base_url="https://api.haimaker.ai/v1"
)

response = client.chat.completions.create(
model="openai/gpt-4o-mini",
response_format={"type": "json_object"},
messages=[
{"role": "system", "content": "You are a helpful assistant designed to output JSON."},
{"role": "user", "content": "Who won the world series in 2020?"}
]
)

print(response.choices[0].message.content)

cURL

curl https://api.haimaker.ai/v1/chat/completions \
-H "Content-Type: application/json" \
-H "Authorization: Bearer YOUR_API_KEY" \
-d '{
"model": "openai/gpt-4o-mini",
"response_format": { "type": "json_object" },
"messages": [
{
"role": "system",
"content": "You are a helpful assistant designed to output JSON."
},
{
"role": "user",
"content": "Who won the world series in 2020?"
}
]
}'

Using JSON Schema (Structured Outputs)

For more precise control, specify a JSON schema to constrain the output format:

response_format: { "type": "json_schema", "json_schema": ... , "strict": true }

This works for:

  • OpenAI models
  • Azure OpenAI models
  • xAI models (Grok-2 or later)
  • Google AI Studio - Gemini models
  • Vertex AI models (Gemini + Anthropic)
  • Bedrock Models
  • Anthropic API Models
  • Groq Models
  • Ollama Models
  • Databricks Models

Python with Pydantic

from openai import OpenAI
from pydantic import BaseModel

client = OpenAI(
api_key="YOUR_API_KEY",
base_url="https://api.haimaker.ai/v1"
)

class Step(BaseModel):
explanation: str
output: str

class MathReasoning(BaseModel):
steps: list[Step]
final_answer: str

completion = client.beta.chat.completions.parse(
model="openai/gpt-4o",
messages=[
{"role": "system", "content": "You are a helpful math tutor. Guide the user through the solution step by step."},
{"role": "user", "content": "how can I solve 8x + 7 = -23"}
],
response_format=MathReasoning,
)

math_reasoning = completion.choices[0].message.parsed
print(math_reasoning)

cURL with JSON Schema

curl https://api.haimaker.ai/v1/chat/completions \
-H "Content-Type: application/json" \
-H "Authorization: Bearer YOUR_API_KEY" \
-d '{
"model": "openai/gpt-4o",
"messages": [
{
"role": "system",
"content": "You are a helpful math tutor. Guide the user through the solution step by step."
},
{
"role": "user",
"content": "how can I solve 8x + 7 = -23"
}
],
"response_format": {
"type": "json_schema",
"json_schema": {
"name": "math_reasoning",
"schema": {
"type": "object",
"properties": {
"steps": {
"type": "array",
"items": {
"type": "object",
"properties": {
"explanation": { "type": "string" },
"output": { "type": "string" }
},
"required": ["explanation", "output"],
"additionalProperties": false
}
},
"final_answer": { "type": "string" }
},
"required": ["steps", "final_answer"],
"additionalProperties": false
},
"strict": true
}
}
}'

Using Different Models

Anthropic Claude

from openai import OpenAI

client = OpenAI(
api_key="YOUR_API_KEY",
base_url="https://api.haimaker.ai/v1"
)

response = client.chat.completions.create(
model="anthropic/claude-3-7-sonnet-latest",
response_format={"type": "json_object"},
messages=[
{"role": "system", "content": "You are a helpful assistant designed to output JSON."},
{"role": "user", "content": "List 3 famous scientists and their discoveries."}
]
)

print(response.choices[0].message.content)

Google Gemini

from openai import OpenAI

client = OpenAI(
api_key="YOUR_API_KEY",
base_url="https://api.haimaker.ai/v1"
)

response = client.chat.completions.create(
model="gemini/gemini-1.5-pro",
response_format={"type": "json_object"},
messages=[
{"role": "system", "content": "You are a helpful assistant designed to output JSON."},
{"role": "user", "content": "List 3 programming languages and their use cases."}
]
)

print(response.choices[0].message.content)

Event Extraction Example

from openai import OpenAI
from pydantic import BaseModel

client = OpenAI(
api_key="YOUR_API_KEY",
base_url="https://api.haimaker.ai/v1"
)

class CalendarEvent(BaseModel):
name: str
date: str
participants: list[str]

class EventsList(BaseModel):
events: list[CalendarEvent]

completion = client.beta.chat.completions.parse(
model="openai/gpt-4o",
messages=[
{"role": "system", "content": "Extract event information."},
{"role": "user", "content": "Alice and Bob are going to a science fair on Friday. Then on Saturday, Charlie is hosting a birthday party."}
],
response_format=EventsList,
)

events = completion.choices[0].message.parsed
for event in events.events:
print(f"Event: {event.name}, Date: {event.date}, Participants: {event.participants}")