Skip to main content

Batches API

Covers Batches, Files

Quick Start​

Call an existing Assistant.

  • Create File for Batch Completion

  • Create Batch Request

  • Retrieve the Specific Batch and File Content

Create File for Batch Completion

from litellm
import os

os.environ["OPENAI_API_KEY"] = "sk-.."

file_name = "openai_batch_completions.jsonl"
_current_dir = os.path.dirname(os.path.abspath(__file__))
file_path = os.path.join(_current_dir, file_name)
file_obj = await litellm.acreate_file(
file=open(file_path, "rb"),
purpose="batch",
custom_llm_provider="openai",
)
print("Response from creating file=", file_obj)

Create Batch Request

from litellm
import os

create_batch_response = await litellm.acreate_batch(
completion_window="24h",
endpoint="/v1/chat/completions",
input_file_id=batch_input_file_id,
custom_llm_provider="openai",
metadata={"key1": "value1", "key2": "value2"},
)

print("response from litellm.create_batch=", create_batch_response)

Retrieve the Specific Batch and File Content


retrieved_batch = await litellm.aretrieve_batch(
batch_id=create_batch_response.id, custom_llm_provider="openai"
)
print("retrieved batch=", retrieved_batch)
# just assert that we retrieved a non None batch

assert retrieved_batch.id == create_batch_response.id

# try to get file content for our original file

file_content = await litellm.afile_content(
file_id=batch_input_file_id, custom_llm_provider="openai"
)

print("file content = ", file_content)

👉 Proxy API Reference​