Categorygithub.com/openai/openai-go
modulepackage
0.1.0-alpha.41
Repository: https://github.com/openai/openai-go.git
Documentation: pkg.go.dev

# README

OpenAI Go API Library

Go Reference

[!WARNING] This release is currently in alpha. Minor breaking changes may occur.

The OpenAI Go library provides convenient access to the OpenAI REST API from applications written in Go. The full API of this library can be found in api.md.

Installation

import (
	"github.com/openai/openai-go" // imported as openai
)

Or to pin the version:

go get -u 'github.com/openai/[email protected]'

Requirements

This library requires Go 1.18+.

Usage

The full API of this library can be found in api.md.

See the examples directory for complete and runnable examples.

package main

import (
	"context"

	"github.com/openai/openai-go"
	"github.com/openai/openai-go/option"
)

func main() {
	client := openai.NewClient(
		option.WithAPIKey("My API Key"), // defaults to os.LookupEnv("OPENAI_API_KEY")
	)
	chatCompletion, err := client.Chat.Completions.New(context.TODO(), openai.ChatCompletionNewParams{
		Messages: openai.F([]openai.ChatCompletionMessageParamUnion{
			 openai.UserMessage("Say this is a test"),
		}),
		Model: openai.F(openai.ChatModelGPT4o),
	})
	if err != nil {
		panic(err.Error())
	}
	println(chatCompletion.Choices[0].Message.Content)
}

Conversations
param := openai.ChatCompletionNewParams{
	Messages: openai.F([]openai.ChatCompletionMessageParamUnion{
		openai.UserMessage("What kind of houseplant is easy to take care of?"),
  	}),
	Seed:     openai.Int(1),
	Model:    openai.F(openai.ChatModelGPT4o),
}

completion, err := client.Chat.Completions.New(ctx, param)

param.Messages.Value = append(param.Messages.Value, completion.Choices[0].Message)
param.Messages.Value = append(param.Messages.Value, openai.UserMessage("How big are those?"))

// continue the conversation
completion, err = client.Chat.Completions.New(ctx, param)
Streaming responses
question := "Write an epic"

stream := client.Chat.Completions.NewStreaming(ctx, openai.ChatCompletionNewParams{
	Messages: openai.F([]openai.ChatCompletionMessageParamUnion{
		openai.UserMessage(question),
	}),
	Seed:  openai.Int(0),
	Model: openai.F(openai.ChatModelGPT4o),
})

// optionally, an accumulator helper can be used
acc := openai.ChatCompletionAccumulator{}

for stream.Next() {
	chunk := stream.Current()
	acc.AddChunk(chunk)

	if content, ok := acc.JustFinishedContent(); ok {
		println("Content stream finished:", content)
	}

	// if using tool calls
	if tool, ok := acc.JustFinishedToolCall(); ok {
		println("Tool call stream finished:", tool.Index, tool.Name, tool.Arguments)
	}

	if refusal, ok := acc.JustFinishedRefusal(); ok {
		println("Refusal stream finished:", refusal)
	}

	// it's best to use chunks after handling JustFinished events
	if len(chunk.Choices) > 0 {
		println(chunk.Choices[0].Delta.Content)
	}
}

if err := stream.Err(); err != nil {
	panic(err)
}

// After the stream is finished, acc can be used like a ChatCompletion
_ = acc.Choices[0].Message.Content

See the full streaming and accumulation example

Tool calling
import (
	"encoding/json"
	// ...
)

// ...

question := "What is the weather in New York City?"

params := openai.ChatCompletionNewParams{
	Messages: openai.F([]openai.ChatCompletionMessageParamUnion{
		openai.UserMessage(question),
	}),
	Tools: openai.F([]openai.ChatCompletionToolParam{
		{
			Type: openai.F(openai.ChatCompletionToolTypeFunction),
			Function: openai.F(openai.FunctionDefinitionParam{
				Name:        openai.String("get_weather"),
				Description: openai.String("Get weather at the given location"),
				Parameters: openai.F(openai.FunctionParameters{
					"type": "object",
					"properties": map[string]interface{}{
						"location": map[string]string{
							"type": "string",
						},
					},
					"required": []string{"location"},
				}),
			}),
		},
	}),
	Model: openai.F(openai.ChatModelGPT4o),
}

// chat completion request with tool calls
completion, _ := client.Chat.Completions.New(ctx, params)

for _, toolCall := range completion.Choices[0].Message.ToolCalls {
	if toolCall.Function.Name == "get_weather" {
		// extract the location from the function call arguments
		var args map[string]interface{}
		_ := json.Unmarshal([]byte(toolCall.Function.Arguments), &args)

		// call a weather API with the arguments requested by the model
		weatherData := getWeather(args["location"].(string))
		params.Messages.Value = append(params.Messages.Value, openai.ToolMessage(toolCall.ID, weatherData))
	}
}

// ... continue the conversation with the information provided by the tool

See the full tool calling example

Structured outputs
import (
	"encoding/json"
	"github.com/invopop/jsonschema"
	// ...
)

// A struct that will be converted to a Structured Outputs response schema
type HistoricalComputer struct {
	Origin       Origin   `json:"origin" jsonschema_description:"The origin of the computer"`
	Name         string   `json:"full_name" jsonschema_description:"The name of the device model"`
	NotableFacts []string `json:"notable_facts" jsonschema_description:"A few key facts about the computer"`
}

type Origin struct {
	YearBuilt    int64  `json:"year_of_construction" jsonschema_description:"The year it was made"`
	Organization string `json:"organization" jsonschema_description:"The organization that was in charge of its development"`
}

func GenerateSchema[T any]() interface{} {
	reflector := jsonschema.Reflector{
		AllowAdditionalProperties: false,
		DoNotReference:            true,
	}
	var v T
	schema := reflector.Reflect(v)
	return schema
}

// Generate the JSON schema at initialization time
var HistoricalComputerResponseSchema = GenerateSchema[HistoricalComputer]()

func main() {

	// ...

	question := "What computer ran the first neural network?"

	schemaParam := openai.ResponseFormatJSONSchemaJSONSchemaParam{
		Name:        openai.F("biography"),
		Description: openai.F("Notable information about a person"),
		Schema:      openai.F(HistoricalComputerResponseSchema),
		Strict:      openai.Bool(true),
	}

	chat, _ := client.Chat.Completions.New(ctx, openai.ChatCompletionNewParams{
		// ...
		ResponseFormat: openai.F[openai.ChatCompletionNewParamsResponseFormatUnion](
			openai.ResponseFormatJSONSchemaParam{
				Type:       openai.F(openai.ResponseFormatJSONSchemaTypeJSONSchema),
				JSONSchema: openai.F(schemaParam),
			},
		),
		// only certain models can perform structured outputs
		Model: openai.F(openai.ChatModelGPT4o2024_08_06),
	})

	// extract into a well-typed struct
	historicalComputer := HistoricalComputer{}
	_ = json.Unmarshal([]byte(chat.Choices[0].Message.Content), &historicalComputer)

	historicalComputer.Name
	historicalComputer.Origin.YearBuilt
	historicalComputer.Origin.Organization
	for i, fact := range historicalComputer.NotableFacts {
		// ...
	}
}

See the full structured outputs example

Request fields

All request parameters are wrapped in a generic Field type, which we use to distinguish zero values from null or omitted fields.

This prevents accidentally sending a zero value if you forget a required parameter, and enables explicitly sending null, false, '', or 0 on optional parameters. Any field not specified is not sent.

To construct fields with values, use the helpers String(), Int(), Float(), or most commonly, the generic F[T](). To send a null, use Null[T](), and to send a nonconforming value, use Raw[T](any). For example:

params := FooParams{
	Name: openai.F("hello"),

	// Explicitly send `"description": null`
	Description: openai.Null[string](),

	Point: openai.F(openai.Point{
		X: openai.Int(0),
		Y: openai.Int(1),

		// In cases where the API specifies a given type,
		// but you want to send something else, use `Raw`:
		Z: openai.Raw[int64](0.01), // sends a float
	}),
}

Response objects

All fields in response structs are value types (not pointers or wrappers).

If a given field is null, not present, or invalid, the corresponding field will simply be its zero value.

All response structs also include a special JSON field, containing more detailed information about each property, which you can use like so:

if res.Name == "" {
	// true if `"name"` is either not present or explicitly null
	res.JSON.Name.IsNull()

	// true if the `"name"` key was not present in the response JSON at all
	res.JSON.Name.IsMissing()

	// When the API returns data that cannot be coerced to the expected type:
	if res.JSON.Name.IsInvalid() {
		raw := res.JSON.Name.Raw()

		legacyName := struct{
			First string `json:"first"`
			Last  string `json:"last"`
		}{}
		json.Unmarshal([]byte(raw), &legacyName)
		name = legacyName.First + " " + legacyName.Last
	}
}

These .JSON structs also include an Extras map containing any properties in the json response that were not specified in the struct. This can be useful for API features not yet present in the SDK.

body := res.JSON.ExtraFields["my_unexpected_field"].Raw()

RequestOptions

This library uses the functional options pattern. Functions defined in the option package return a RequestOption, which is a closure that mutates a RequestConfig. These options can be supplied to the client or at individual requests. For example:

client := openai.NewClient(
	// Adds a header to every request made by the client
	option.WithHeader("X-Some-Header", "custom_header_info"),
)

client.Chat.Completions.New(context.TODO(), ...,
	// Override the header
	option.WithHeader("X-Some-Header", "some_other_custom_header_info"),
	// Add an undocumented field to the request body, using sjson syntax
	option.WithJSONSet("some.json.path", map[string]string{"my": "object"}),
)

See the full list of request options.

Pagination

This library provides some conveniences for working with paginated list endpoints.

You can use .ListAutoPaging() methods to iterate through items across all pages:

iter := client.FineTuning.Jobs.ListAutoPaging(context.TODO(), openai.FineTuningJobListParams{
	Limit: openai.F(int64(20)),
})
// Automatically fetches more pages as needed.
for iter.Next() {
	fineTuningJob := iter.Current()
	fmt.Printf("%+v\n", fineTuningJob)
}
if err := iter.Err(); err != nil {
	panic(err.Error())
}

Or you can use simple .List() methods to fetch a single page and receive a standard response object with additional helper methods like .GetNextPage(), e.g.:

page, err := client.FineTuning.Jobs.List(context.TODO(), openai.FineTuningJobListParams{
	Limit: openai.F(int64(20)),
})
for page != nil {
	for _, job := range page.Data {
		fmt.Printf("%+v\n", job)
	}
	page, err = page.GetNextPage()
}
if err != nil {
	panic(err.Error())
}

Errors

When the API returns a non-success status code, we return an error with type *openai.Error. This contains the StatusCode, *http.Request, and *http.Response values of the request, as well as the JSON of the error body (much like other response objects in the SDK).

To handle errors, we recommend that you use the errors.As pattern:

_, err := client.FineTuning.Jobs.New(context.TODO(), openai.FineTuningJobNewParams{
	Model:        openai.F(openai.FineTuningJobNewParamsModelBabbage002),
	TrainingFile: openai.F("file-abc123"),
})
if err != nil {
	var apierr *openai.Error
	if errors.As(err, &apierr) {
		println(string(apierr.DumpRequest(true)))  // Prints the serialized HTTP request
		println(string(apierr.DumpResponse(true))) // Prints the serialized HTTP response
	}
	panic(err.Error()) // GET "/fine_tuning/jobs": 400 Bad Request { ... }
}

When other errors occur, they are returned unwrapped; for example, if HTTP transport fails, you might receive *url.Error wrapping *net.OpError.

Timeouts

Requests do not time out by default; use context to configure a timeout for a request lifecycle.

Note that if a request is retried, the context timeout does not start over. To set a per-retry timeout, use option.WithRequestTimeout().

// This sets the timeout for the request, including all the retries.
ctx, cancel := context.WithTimeout(context.Background(), 5*time.Minute)
defer cancel()
client.Chat.Completions.New(
	ctx,
	openai.ChatCompletionNewParams{
		Messages: openai.F([]openai.ChatCompletionMessageParamUnion{
			 openai.UserMessage("Say this is a test"),
		}),
		Model: openai.F(openai.ChatModelGPT4o),
	},
	// This sets the per-retry timeout
	option.WithRequestTimeout(20*time.Second),
)

File uploads

Request parameters that correspond to file uploads in multipart requests are typed as param.Field[io.Reader]. The contents of the io.Reader will by default be sent as a multipart form part with the file name of "anonymous_file" and content-type of "application/octet-stream".

The file name and content-type can be customized by implementing Name() string or ContentType() string on the run-time type of io.Reader. Note that os.File implements Name() string, so a file returned by os.Open will be sent with the file name on disk.

We also provide a helper openai.FileParam(reader io.Reader, filename string, contentType string) which can be used to wrap any io.Reader with the appropriate file name and content type.

// A file from the file system
file, err := os.Open("input.jsonl")
openai.FileNewParams{
	File:    openai.F[io.Reader](file),
	Purpose: openai.F(openai.FilePurposeFineTune),
}

// A file from a string
openai.FileNewParams{
	File:    openai.F[io.Reader](strings.NewReader("my file contents")),
	Purpose: openai.F(openai.FilePurposeFineTune),
}

// With a custom filename and contentType
openai.FileNewParams{
	File:    openai.FileParam(strings.NewReader(`{"hello": "foo"}`), "file.go", "application/json"),
	Purpose: openai.F(openai.FilePurposeFineTune),
}

Retries

Certain errors will be automatically retried 2 times by default, with a short exponential backoff. We retry by default all connection errors, 408 Request Timeout, 409 Conflict, 429 Rate Limit, and >=500 Internal errors.

You can use the WithMaxRetries option to configure or disable this:

// Configure the default for all requests:
client := openai.NewClient(
	option.WithMaxRetries(0), // default is 2
)

// Override per-request:
client.Chat.Completions.New(
	context.TODO(),
	openai.ChatCompletionNewParams{
		Messages: openai.F([]openai.ChatCompletionMessageParamUnion{
			 openai.UserMessage("Say this is a test"),
		}),
		Model: openai.F(openai.ChatModelGPT4o),
	},
	option.WithMaxRetries(5),
)

Making custom/undocumented requests

This library is typed for convenient access to the documented API. If you need to access undocumented endpoints, params, or response properties, the library can still be used.

Undocumented endpoints

To make requests to undocumented endpoints, you can use client.Get, client.Post, and other HTTP verbs. RequestOptions on the client, such as retries, will be respected when making these requests.

var (
    // params can be an io.Reader, a []byte, an encoding/json serializable object,
    // or a "…Params" struct defined in this library.
    params map[string]interface{}

    // result can be an []byte, *http.Response, a encoding/json deserializable object,
    // or a model defined in this library.
    result *http.Response
)
err := client.Post(context.Background(), "/unspecified", params, &result)
if err != nil {
    …
}

Undocumented request params

To make requests using undocumented parameters, you may use either the option.WithQuerySet() or the option.WithJSONSet() methods.

params := FooNewParams{
    ID:   openai.F("id_xxxx"),
    Data: openai.F(FooNewParamsData{
        FirstName: openai.F("John"),
    }),
}
client.Foo.New(context.Background(), params, option.WithJSONSet("data.last_name", "Doe"))

Undocumented response properties

To access undocumented response properties, you may either access the raw JSON of the response as a string with result.JSON.RawJSON(), or get the raw JSON of a particular field on the result with result.JSON.Foo.Raw().

Any fields that are not present on the response struct will be saved and can be accessed by result.JSON.ExtraFields() which returns the extra fields as a map[string]Field.

Middleware

We provide option.WithMiddleware which applies the given middleware to requests.

func Logger(req *http.Request, next option.MiddlewareNext) (res *http.Response, err error) {
	// Before the request
	start := time.Now()
	LogReq(req)

	// Forward the request to the next handler
	res, err = next(req)

	// Handle stuff after the request
	end := time.Now()
	LogRes(res, err, start - end)

    return res, err
}

client := openai.NewClient(
	option.WithMiddleware(Logger),
)

When multiple middlewares are provided as variadic arguments, the middlewares are applied left to right. If option.WithMiddleware is given multiple times, for example first in the client then the method, the middleware in the client will run first and the middleware given in the method will run next.

You may also replace the default http.Client with option.WithHTTPClient(client). Only one http client is accepted (this overwrites any previous client) and receives requests after any middleware has been applied.

Microsoft Azure OpenAI

To use this library with Azure OpenAI, use the option.RequestOption functions in the azure package.

package main

import (
	"github.com/Azure/azure-sdk-for-go/sdk/azidentity"
	"github.com/openai/openai-go"
	"github.com/openai/openai-go/azure"
)

func main() {
	const azureOpenAIEndpoint = "https://<azure-openai-resource>.openai.azure.com"

	// The latest API versions, including previews, can be found here:
	// https://learn.microsoft.com/en-us/azure/ai-services/openai/reference#rest-api-versioning
	const azureOpenAIAPIVersion = "2024-06-01"

	tokenCredential, err := azidentity.NewDefaultAzureCredential(nil)

	if err != nil {
		fmt.Printf("Failed to create the DefaultAzureCredential: %s", err)
		os.Exit(1)
	}

	client := openai.NewClient(
		azure.WithEndpoint(azureOpenAIEndpoint, azureOpenAIAPIVersion),

		// Choose between authenticating using a TokenCredential or an API Key
		azure.WithTokenCredential(tokenCredential),
		// or azure.WithAPIKey(azureOpenAIAPIKey),
	)
}

Semantic versioning

This package generally follows SemVer conventions, though certain backwards-incompatible changes may be released as minor versions:

  1. Changes to library internals which are technically public but not intended or documented for external use. (Please open a GitHub issue to let us know if you are relying on such internals).
  2. Changes that we do not expect to impact the vast majority of users in practice.

We take backwards-compatibility seriously and work hard to ensure you can rely on a smooth upgrade experience.

We are keen for your feedback; please open an issue with questions, bugs, or suggestions.

Contributing

See the contributing documentation.

# Packages

Package azure provides configuration options so you can connect and use Azure OpenAI using the [openai.Client].
No description provided by the author
No description provided by the author
No description provided by the author

# Functions

No description provided by the author
Bool is a param field helper which helps specify bools.
F is a param field helper used to initialize a [param.Field] generic struct.
FileParam is a param field helper which helps files with a mime content-type.
Float is a param field helper which helps specify floats.
No description provided by the author
No description provided by the author
Int is a param field helper which helps specify integers.
NewAudioService generates a new service that applies the given options to each request.
NewAudioSpeechService generates a new service that applies the given options to each request.
NewAudioTranscriptionService generates a new service that applies the given options to each request.
NewAudioTranslationService generates a new service that applies the given options to each request.
NewBatchService generates a new service that applies the given options to each request.
NewBetaAssistantService generates a new service that applies the given options to each request.
NewBetaService generates a new service that applies the given options to each request.
NewBetaThreadMessageService generates a new service that applies the given options to each request.
NewBetaThreadRunService generates a new service that applies the given options to each request.
NewBetaThreadRunStepService generates a new service that applies the given options to each request.
NewBetaThreadService generates a new service that applies the given options to each request.
NewBetaVectorStoreFileBatchService generates a new service that applies the given options to each request.
NewBetaVectorStoreFileService generates a new service that applies the given options to each request.
NewBetaVectorStoreService generates a new service that applies the given options to each request.
NewChatCompletionService generates a new service that applies the given options to each request.
NewChatService generates a new service that applies the given options to each request.
NewClient generates a new client with the default option read from the environment (OPENAI_API_KEY, OPENAI_ORG_ID, OPENAI_PROJECT_ID).
NewCompletionService generates a new service that applies the given options to each request.
NewEmbeddingService generates a new service that applies the given options to each request.
NewFileService generates a new service that applies the given options to each request.
NewFineTuningJobCheckpointService generates a new service that applies the given options to each request.
NewFineTuningJobService generates a new service that applies the given options to each request.
NewFineTuningService generates a new service that applies the given options to each request.
NewImageService generates a new service that applies the given options to each request.
NewModelService generates a new service that applies the given options to each request.
NewModerationService generates a new service that applies the given options to each request.
NewUploadPartService generates a new service that applies the given options to each request.
NewUploadService generates a new service that applies the given options to each request.
Null is a param field helper which explicitly sends null to the API.
Raw is a param field helper for specifying values for fields when the type you are looking to send is different from the type that is specified in the SDK.
No description provided by the author
String is a param field helper which helps specify strings.
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author

# Constants

No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
This is an alias to an internal value.
This is an alias to an internal value.
This is an alias to an internal value.
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author

# Structs

A citation within the message that points to a specific quote from a specific File associated with the assistant or the message.
A citation within the message that points to a specific quote from a specific File associated with the assistant or the message.
Represents an `assistant` that can call the model and use tools.
No description provided by the author
Represents an event emitted when streaming a Run.
Occurs when an [error](https://platform.openai.com/docs/guides/error-codes#api-errors) occurs.
Occurs when a new [thread](https://platform.openai.com/docs/api-reference/threads/object) is created.
Occurs when a [message](https://platform.openai.com/docs/api-reference/messages/object) is completed.
Occurs when a [message](https://platform.openai.com/docs/api-reference/messages/object) is created.
Occurs when parts of a [Message](https://platform.openai.com/docs/api-reference/messages/object) are being streamed.
Occurs when a [message](https://platform.openai.com/docs/api-reference/messages/object) ends before it is completed.
Occurs when a [message](https://platform.openai.com/docs/api-reference/messages/object) moves to an `in_progress` state.
Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) is cancelled.
Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) moves to a `cancelling` status.
Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) is completed.
Occurs when a new [run](https://platform.openai.com/docs/api-reference/runs/object) is created.
Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) expires.
Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) fails.
Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) ends with status `incomplete`.
Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) moves to an `in_progress` status.
Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) moves to a `queued` status.
Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) moves to a `requires_action` status.
Occurs when a [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) is cancelled.
Occurs when a [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) is completed.
Occurs when a [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) is created.
Occurs when parts of a [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) are being streamed.
Occurs when a [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) expires.
Occurs when a [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) fails.
Occurs when a [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) moves to an `in_progress` state.
No description provided by the author
Specifies a tool the model should use.
No description provided by the author
No description provided by the author
Specifies a tool the model should use.
No description provided by the author
A set of resources that are used by the assistant's tools.
No description provided by the author
No description provided by the author
AudioService contains methods and other services that help with interacting with the openai API.
No description provided by the author
AudioSpeechService contains methods and other services that help with interacting with the openai API.
No description provided by the author
AudioTranscriptionService contains methods and other services that help with interacting with the openai API.
No description provided by the author
AudioTranslationService contains methods and other services that help with interacting with the openai API.
The default strategy.
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
The request counts for different statuses within the batch.
BatchService contains methods and other services that help with interacting with the openai API.
No description provided by the author
No description provided by the author
A set of resources that are used by the assistant's tools.
No description provided by the author
No description provided by the author
No description provided by the author
BetaAssistantService contains methods and other services that help with interacting with the openai API.
No description provided by the author
A set of resources that are used by the assistant's tools.
No description provided by the author
No description provided by the author
BetaService contains methods and other services that help with interacting with the openai API.
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
BetaThreadMessageService contains methods and other services that help with interacting with the openai API.
No description provided by the author
No description provided by the author
If no thread is provided, an empty thread will be created.
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
A set of resources that are made available to the assistant's tools in this thread.
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
A set of resources that are used by the assistant's tools.
No description provided by the author
No description provided by the author
Controls for how a thread will be truncated prior to the run.
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
A set of resources that are made available to the assistant's tools in this thread.
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
Controls for how a thread will be truncated prior to the run.
BetaThreadRunService contains methods and other services that help with interacting with the openai API.
No description provided by the author
No description provided by the author
BetaThreadRunStepService contains methods and other services that help with interacting with the openai API.
No description provided by the author
No description provided by the author
No description provided by the author
BetaThreadService contains methods and other services that help with interacting with the openai API.
No description provided by the author
A set of resources that are made available to the assistant's tools in this thread.
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
BetaVectorStoreFileBatchService contains methods and other services that help with interacting with the openai API.
No description provided by the author
No description provided by the author
BetaVectorStoreFileService contains methods and other services that help with interacting with the openai API.
No description provided by the author
No description provided by the author
The expiration policy for a vector store.
BetaVectorStoreService contains methods and other services that help with interacting with the openai API.
No description provided by the author
The expiration policy for a vector store.
Represents a chat completion response returned by model, based on the provided input.
Helper to accumulate chunks from a stream.
Messages sent by the model in response to user messages.
Data about a previous audio response from the model.
Learn about [text inputs](https://platform.openai.com/docs/guides/text-generation).
Deprecated and replaced by `tool_calls`.
If the audio output modality is requested, this object contains data about the audio response from the model.
Parameters for audio output.
No description provided by the author
Log probability information for the choice.
Represents a streamed chunk of a chat completion response returned by model, based on the provided input.
No description provided by the author
A chat completion delta generated by streamed model responses.
Deprecated and replaced by `tool_calls`.
No description provided by the author
No description provided by the author
Log probability information for the choice.
No description provided by the author
Learn about [image inputs](https://platform.openai.com/docs/guides/vision).
No description provided by the author
Learn about [audio inputs](https://platform.openai.com/docs/guides/audio).
Learn about [text inputs](https://platform.openai.com/docs/guides/text-generation).
No description provided by the author
Learn about [text inputs](https://platform.openai.com/docs/guides/text-generation).
Developer-provided instructions that the model should follow, regardless of messages sent by the user.
Specifying a particular function via `{"name": "my_function"}` forces the model to call that function.
No description provided by the author
A chat completion message generated by the model.
Deprecated and replaced by `tool_calls`.
Developer-provided instructions that the model should follow, regardless of messages sent by the user.
No description provided by the author
The function that the model called.
The function that the model called.
No description provided by the author
No description provided by the author
Specifies a tool the model should use.
No description provided by the author
No description provided by the author
An object specifying the format that the model must output.
Static predicted output content, such as the content of a text file that is being regenerated.
ChatCompletionService contains methods and other services that help with interacting with the openai API.
Options for streaming response.
Developer-provided instructions that the model should follow, regardless of messages sent by the user.
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
Messages sent by an end user, containing prompts or additional context information.
ChatService contains methods and other services that help with interacting with the openai API.
Client creates a struct with services and top level methods that help with interacting with the openai API.
Text output from the Code Interpreter tool call as part of a run step.
No description provided by the author
No description provided by the author
No description provided by the author
Details of the Code Interpreter tool call the run step was involved in.
The Code Interpreter tool call definition.
Text output from the Code Interpreter tool call as part of a run step.
No description provided by the author
No description provided by the author
Text output from the Code Interpreter tool call as part of a run step.
Details of the Code Interpreter tool call the run step was involved in.
The Code Interpreter tool call definition.
Text output from the Code Interpreter tool call as part of a run step.
No description provided by the author
Represents a completion response from the API.
No description provided by the author
No description provided by the author
No description provided by the author
CompletionService contains methods and other services that help with interacting with the openai API.
Usage statistics for the completion request.
Breakdown of tokens used in a completion.
Breakdown of tokens used in the prompt.
No description provided by the author
The usage information for the request.
Represents an embedding vector returned by embedding endpoint.
No description provided by the author
EmbeddingService contains methods and other services that help with interacting with the openai API.
The strategy used to chunk the file.
The chunking strategy used to chunk the file(s).
A citation within the message that points to a specific quote from a specific File associated with the assistant or the message.
No description provided by the author
A citation within the message that points to a specific quote from a specific File associated with the assistant or the message.
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
The `File` object represents a document that has been uploaded to OpenAI.
A URL for the file that's generated when the assistant used the `code_interpreter` tool to generate a file.
No description provided by the author
A URL for the file that's generated when the assistant used the `code_interpreter` tool to generate a file.
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
For now, this is always going to be an empty object.
The ranking options for the file search.
A result instance of the file search.
No description provided by the author
Overrides for the file search tool.
Overrides for the file search tool.
The ranking options for the file search.
The ranking options for the file search.
No description provided by the author
FileService contains methods and other services that help with interacting with the openai API.
The `fine_tuning.job` object represents a fine-tuning job that has been created through the API.
The `fine_tuning.job.checkpoint` object represents a model checkpoint for a fine-tuning job that is ready to use.
No description provided by the author
Metrics at the step number during the fine-tuning job.
FineTuningJobCheckpointService contains methods and other services that help with interacting with the openai API.
For fine-tuning jobs that have `failed`, this will contain more information on the cause of the failure.
Fine-tuning job event object.
The hyperparameters used for the fine-tuning job.
No description provided by the author
No description provided by the author
The method used for fine-tuning.
Configuration for the DPO fine-tuning method.
The hyperparameters used for the fine-tuning job.
Configuration for the supervised fine-tuning method.
The hyperparameters used for the fine-tuning job.
No description provided by the author
The hyperparameters used for the fine-tuning job.
No description provided by the author
The settings for your integration with Weights and Biases.
The method used for fine-tuning.
Configuration for the DPO fine-tuning method.
The hyperparameters used for the fine-tuning job.
Configuration for the supervised fine-tuning method.
The hyperparameters used for the fine-tuning job.
FineTuningJobService contains methods and other services that help with interacting with the openai API.
The settings for your integration with Weights and Biases.
No description provided by the author
FineTuningService contains methods and other services that help with interacting with the openai API.
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
The definition of the function that was called.
The definition of the function that was called.
No description provided by the author
Represents the url or the content of an image generated by the OpenAI API.
No description provided by the author
No description provided by the author
References an image [File](https://platform.openai.com/docs/api-reference/files) in the content of a message.
References an image [File](https://platform.openai.com/docs/api-reference/files) in the content of a message.
No description provided by the author
References an image [File](https://platform.openai.com/docs/api-reference/files) in the content of a message.
No description provided by the author
No description provided by the author
No description provided by the author
ImageService contains methods and other services that help with interacting with the openai API.
No description provided by the author
No description provided by the author
References an image URL in the content of a message.
References an image URL in the content of a message.
No description provided by the author
References an image URL in the content of a message.
No description provided by the author
Represents a message within a [thread](https://platform.openai.com/docs/api-reference/threads).
No description provided by the author
No description provided by the author
No description provided by the author
References an image [File](https://platform.openai.com/docs/api-reference/files) in the content of a message.
References an image [File](https://platform.openai.com/docs/api-reference/files) in the content of a message.
References an image [File](https://platform.openai.com/docs/api-reference/files) in the content of a message.
Details of the message creation by the run step.
No description provided by the author
No description provided by the author
The delta containing the fields that have changed on the Message.
Represents a message delta i.e.
On an incomplete message, details about why the message is incomplete.
Describes an OpenAI model offering that can be used with the API.
No description provided by the author
ModelService contains methods and other services that help with interacting with the openai API.
No description provided by the author
A list of the categories, and whether they are flagged or not.
A list of the categories along with the input type(s) that the score applies to.
A list of the categories along with their scores as predicted by model.
Contains either an image URL or a data URL for a base64 encoded image.
An object describing an image to classify.
An object describing an image to classify.
No description provided by the author
Represents if a given text input is potentially harmful.
ModerationService contains methods and other services that help with interacting with the openai API.
An object describing text to classify.
This is returned when the chunking strategy is unknown.
The refusal content generated by the assistant.
The refusal content that is part of a message.
Tool call objects.
The function definition.
Represents an execution run on a [thread](https://platform.openai.com/docs/api-reference/threads).
Details on why the run is incomplete.
The last error associated with this run.
Details on the action required to continue the run.
Details on the tool outputs needed for this run to continue.
Represents a step in execution of a run.
The delta containing the fields that have changed on the run step.
Represents a run step delta i.e.
Details of the message creation by the run step.
No description provided by the author
The details of the run step.
The last error associated with this run step.
The details of the run step.
Usage statistics related to the run step.
Controls for how a thread will be truncated prior to the run.
Usage statistics related to the run.
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
The text content that is part of a message.
The text content that is part of a message.
No description provided by the author
The text content that is part of a message.
Represents a thread that contains [messages](https://platform.openai.com/docs/api-reference/messages).
No description provided by the author
A set of resources that are made available to the assistant's tools in this thread.
No description provided by the author
No description provided by the author
Details of the Code Interpreter tool call the run step was involved in.
Details of the Code Interpreter tool call the run step was involved in.
Details of the tool call.
Details of the tool call.
Represents a transcription response returned by model, based on the provided input.
No description provided by the author
The Upload object can accept byte chunks in the form of Parts.
No description provided by the author
No description provided by the author
The upload Part represents a chunk of bytes we can add to an Upload object.
No description provided by the author
UploadPartService contains methods and other services that help with interacting with the openai API.
UploadService contains methods and other services that help with interacting with the openai API.
A vector store is a collection of processed files can be used by the `file_search` tool.
No description provided by the author
The expiration policy for a vector store.
A list of files attached to a vector store.
A batch of files attached to a vector store.
No description provided by the author
No description provided by the author
No description provided by the author
The last error associated with this vector store file.

# Interfaces

A citation within the message that points to a specific quote from a specific File associated with the assistant or the message.
A citation within the message that points to a specific quote from a specific File associated with the assistant or the message.
Represents an event emitted when streaming a Run.
Controls which (if any) tool is called by the model.
Controls which (if any) tool is called by the model.
Union satisfied by [CodeInterpreterTool], [FileSearchTool] or [FunctionTool].
Satisfied by [CodeInterpreterToolParam], [FileSearchToolParam], [FunctionToolParam], [AssistantToolParam].
Satisfied by [CodeInterpreterToolParam], [BetaThreadMessageNewParamsAttachmentsToolsFileSearch], [BetaThreadMessageNewParamsAttachmentsTool].
Satisfied by [CodeInterpreterToolParam], [BetaThreadNewAndRunParamsThreadMessagesAttachmentsToolsFileSearch], [BetaThreadNewAndRunParamsThreadMessagesAttachmentsTool].
Satisfied by [CodeInterpreterToolParam], [FileSearchToolParam], [FunctionToolParam], [BetaThreadNewAndRunParamsTool].
Satisfied by [CodeInterpreterToolParam], [BetaThreadNewParamsMessagesAttachmentsToolsFileSearch], [BetaThreadNewParamsMessagesAttachmentsTool].
Satisfied by [CodeInterpreterToolParam], [BetaThreadRunNewParamsAdditionalMessagesAttachmentsToolsFileSearch], [BetaThreadRunNewParamsAdditionalMessagesAttachmentsTool].
Learn about [text inputs](https://platform.openai.com/docs/guides/text-generation).
Learn about [text inputs](https://platform.openai.com/docs/guides/text-generation).
Developer-provided instructions that the model should follow, regardless of messages sent by the user.
Deprecated in favor of `tool_choice`.
An object specifying the format that the model must output.
Up to 4 sequences where the API will stop generating further tokens.
Controls which (if any) tool is called by the model.
Text output from the Code Interpreter tool call as part of a run step.
Text output from the Code Interpreter tool call as part of a run step.
The prompt(s) to generate completions for, encoded as a string, array of strings, array of tokens, or array of token arrays.
Up to 4 sequences where the API will stop generating further tokens.
Input text to embed, encoded as a string or array of tokens.
The chunking strategy used to chunk the file(s).
The strategy used to chunk the file.
Number of examples in each batch.
Scaling factor for the learning rate.
The number of epochs to train the model for.
Number of examples in each batch.
The beta value for the DPO method.
Scaling factor for the learning rate.
The number of epochs to train the model for.
Number of examples in each batch.
Scaling factor for the learning rate.
The number of epochs to train the model for.
Number of examples in each batch.
Scaling factor for the learning rate.
The number of epochs to train the model for.
Number of examples in each batch.
The beta value for the DPO method.
Scaling factor for the learning rate.
The number of epochs to train the model for.
Number of examples in each batch.
Scaling factor for the learning rate.
The number of epochs to train the model for.
Union satisfied by [CodeInterpreterTool] or [MessageAttachmentsToolsAssistantToolsFileSearchTypeOnly].
References an image [File](https://platform.openai.com/docs/api-reference/files) in the content of a message.
References an image [File](https://platform.openai.com/docs/api-reference/files) in the content of a message.
References an image [File](https://platform.openai.com/docs/api-reference/files) in the content of a message.
An object describing an image to classify.
Input (or inputs) to classify.
The details of the run step.
The details of the run step.
Details of the Code Interpreter tool call the run step was involved in.
Details of the Code Interpreter tool call the run step was involved in.

# Type aliases

Always `file_citation`.
Always `file_citation`.
No description provided by the author
The object type, which is always `assistant`.
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
`none` means the model will not call any tools and instead generates a message.
The type of the tool.
The type of tool being defined: `code_interpreter`.
No description provided by the author
The format of the output, in one of these options: `json`, `text`, `srt`, `verbose_json`, or `vtt`.
The format to audio in.
The voice to use when generating the audio.
No description provided by the author
Always `auto`.
The time frame within which the batch should be processed.
The endpoint to be used for all requests in the batch.
The object type, which is always `batch`.
The current status of the batch.
Sort order by the `created_at` timestamp of the objects.
Sort order by the `created_at` timestamp of the objects.
The type of tool being defined: `file_search`.
The type of tool being defined: `code_interpreter`.
The role of the entity that is creating the message.
The type of tool being defined: `file_search`.
The type of tool being defined: `code_interpreter`.
The role of the entity that is creating the message.
The type of tool being defined: `code_interpreter`.
The truncation strategy to use for the thread.
The type of tool being defined: `file_search`.
The type of tool being defined: `code_interpreter`.
The role of the entity that is creating the message.
Sort order by the `created_at` timestamp of the objects.
The type of tool being defined: `file_search`.
The type of tool being defined: `code_interpreter`.
The role of the entity that is creating the message.
The truncation strategy to use for the thread.
Sort order by the `created_at` timestamp of the objects.
Filter by file status.
Sort order by the `created_at` timestamp of the objects.
Filter by file status.
Sort order by the `created_at` timestamp of the objects.
Sort order by the `created_at` timestamp of the objects.
Anchor timestamp after which the expiration policy applies.
Anchor timestamp after which the expiration policy applies.
The type of the content part.
The role of the messages author, in this case `assistant`.
Specifies the output audio format.
The voice the model uses to respond.
The reason the model stopped generating tokens.
The role of the author of this message.
The type of the tool.
The reason the model stopped generating tokens.
The object type, which is always `chat.completion.chunk`.
The service tier used for processing the request.
Specifies the detail level of the image.
The type of the content part.
The format of the encoded audio data.
The type of the content part.
The type of the content part.
The type of the content part.
The type of the content part.
The role of the messages author, in this case `developer`.
The role of the messages author, in this case `function`.
The role of the messages author, in this case `developer`.
The role of the author of this message.
The type of the tool.
No description provided by the author
The type of the tool.
`none` means the model will not call a function and instead generates a message.
The type of response format being defined: `text`.
Specifies the latency tier to use for processing the request.
No description provided by the author
The object type, which is always `chat.completion`.
The type of the predicted content you want to provide.
**o1 models only** Constrains effort on reasoning for [reasoning models](https://platform.openai.com/docs/guides/reasoning).
The service tier used for processing the request.
The role of the messages author, in this case `system`.
`none` means the model will not call any tool and instead generates a message.
The role of the messages author, in this case `tool`.
The type of the tool.
The role of the messages author, in this case `user`.
No description provided by the author
Always `logs`.
Always `image`.
Always `image`.
Always `logs`.
Always `logs`.
Always `logs`.
The type of tool call.
The type of tool call.
The type of tool being defined: `code_interpreter`.
The reason the model stopped generating tokens.
ID of the model to use.
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
The object type, which is always "text_completion".
The object type, which is always "list".
No description provided by the author
The format to return the embeddings in.
No description provided by the author
No description provided by the author
No description provided by the author
The object type, which is always "embedding".
No description provided by the author
This is an alias to an internal type.
Always `auto`.
Always `static`.
Always `file_citation`.
Always `file_citation`.
No description provided by the author
No description provided by the author
Sort order by the `created_at` timestamp of the objects.
The object type, which is always `file`.
The intended purpose of the file.
Deprecated.
Always `file_path`.
Always `file_path`.
The intended purpose of the uploaded file.
The type of tool call.
The ranker used for the file search.
The type of the content.
The type of tool call.
The ranker to use for the file search.
The type of tool being defined: `file_search`.
The object type, which is always "fine_tuning.job.checkpoint".
The log level of the event.
The object type, which is always "fine_tuning.job.event".
The type of event.
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
The type of method.
No description provided by the author
No description provided by the author
No description provided by the author
The type of integration to enable.
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
The type of method.
The name of the model to fine-tune.
The object type, which is always "fine_tuning.job".
The current status of the fine-tuning job, which can be either `validating_files`, `queued`, `running`, `succeeded`, `failed`, or `cancelled`.
The type of the integration being enabled for the fine-tuning job.
This is an alias to an internal type.
This is an alias to an internal type.
The parameters the functions accepts, described as a JSON Schema object.
The type of tool call.
The type of tool call.
The type of tool being defined: `function`.
The format in which the generated images are returned.
The size of the generated images.
Always `image_file`.
Always `image_file`.
Specifies the detail level of the image if specified by the user.
Specifies the detail level of the image if specified by the user.
The quality of the image that will be generated.
The format in which the generated images are returned.
The size of the generated images.
The style of the generated images.
No description provided by the author
The format in which the generated images are returned.
The size of the generated images.
The type of the content part.
Always `image_url`.
Specifies the detail level of the image.
Specifies the detail level of the image.
The type of tool being defined: `file_search`.
The type of tool being defined: `code_interpreter`.
Always `image_file`.
Always `image_file`.
Always `image_file`.
Always `message_creation`.
No description provided by the author
The object type, which is always `thread.message.delta`.
The entity that produced the message.
The reason the message is incomplete.
The object type, which is always `thread.message`.
The entity that produced the message.
The status of the message, which can be either `in_progress`, `incomplete`, or `completed`.
The object type, which is always "model".
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
No description provided by the author
Always `image_url`.
No description provided by the author
Always `image_url`.
No description provided by the author
No description provided by the author
Always `text`.
Always `other`.
Always `refusal`.
Always `refusal`.
The type of tool call the output is required for.
This is an alias to an internal type.
The type of response format being defined: `json_object` This is an alias to an internal type.
This is an alias to an internal type.
This is an alias to an internal type.
The type of response format being defined: `json_schema` This is an alias to an internal type.
This is an alias to an internal type.
The type of response format being defined: `text` This is an alias to an internal type.
The reason why the run is incomplete.
One of `server_error`, `rate_limit_exceeded`, or `invalid_prompt`.
The object type, which is always `thread.run`.
For now, this is always `submit_tool_outputs`.
The status of the run, which can be either `queued`, `in_progress`, `requires_action`, `cancelling`, `cancelled`, `failed`, `completed`, `incomplete`, or `expired`.
The object type, which is always `thread.run.step.delta`.
Always `message_creation`.
Always `message_creation`.
No description provided by the author
One of `server_error` or `rate_limit_exceeded`.
The object type, which is always `thread.run.step`.
The status of the run step, which can be either `in_progress`, `cancelled`, `failed`, `completed`, or `expired`.
Always `message_creation`.
The type of run step, which can be either `message_creation` or `tool_calls`.
The truncation strategy to use for the thread.
No description provided by the author
Always `static`.
Always `text`.
Always `text`.
Always `text`.
No description provided by the author
The object type, which is always `thread`.
Always `tool_calls`.
The type of tool call.
Always `tool_calls`.
The type of tool call.
The object type, which is always "upload".
The object type, which is always `upload.part`.
The status of the Upload.
No description provided by the author
Anchor timestamp after which the expiration policy applies.
The object type, which is always `vector_store.file_batch`.
The status of the vector store files batch, which can be either `in_progress`, `completed`, `cancelled` or `failed`.
No description provided by the author
One of `server_error` or `rate_limit_exceeded`.
The object type, which is always `vector_store.file`.
The status of the vector store file, which can be either `in_progress`, `completed`, `cancelled`, or `failed`.
The object type, which is always `vector_store`.
The status of the vector store, which can be either `expired`, `in_progress`, or `completed`.