Skip to content

feature: Supports Entra/AD auth for Azure OpenAI #1062

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Open
wants to merge 4 commits into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 3 additions & 2 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -468,8 +468,9 @@ For each AI provider, you need to set the corresponding API key in your environm
export OPENAI_API_KEY="your-api-key-here"

# Azure OpenAI
export AZURE_OPENAI_API_KEY="your-azure-api-key-here"
export AZURE_OPENAI_API_VERSION="2025-03-01-preview" (Optional)
export AZURE_OPENAI_API_KEY="your-azure-api-key-here" # (optional; uses Entra authentication if not set)
export AZURE_OPENAI_API_VERSION="2025-03-01-preview" # (optional)
export AZURE_OPENAI_DEPLOYMENT="my-deployment" # (optional; set this if your deployment name does not match the model name)

# OpenRouter
export OPENROUTER_API_KEY="your-openrouter-key-here"
Expand Down
1 change: 1 addition & 0 deletions codex-cli/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,7 @@
"dist"
],
"dependencies": {
"@azure/identity": "^4.10.0",
"@inkjs/ui": "^2.0.0",
"chalk": "^5.2.0",
"diff": "^7.0.0",
Expand Down
4 changes: 2 additions & 2 deletions codex-cli/src/cli.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -340,7 +340,7 @@ if (cli.flags.login) {
} catch {
/* ignore */
}
} else if (!apiKey) {
} else if (!apiKey && provider.toLowerCase() === "openai") {
apiKey = await fetchApiKey(client.issuer, client.client_id);
}
// Ensure the API key is available as an environment variable for legacy code
Expand All @@ -363,7 +363,7 @@ if (cli.flags.free) {
}

// Set of providers that don't require API keys
const NO_API_KEY_REQUIRED = new Set(["ollama"]);
const NO_API_KEY_REQUIRED = new Set(["ollama", "azure"]);

// Skip API key validation for providers that don't require an API key
if (!apiKey && !NO_API_KEY_REQUIRED.has(provider.toLowerCase())) {
Expand Down
66 changes: 8 additions & 58 deletions codex-cli/src/utils/agent/agent-loop.ts
Original file line number Diff line number Diff line change
Expand Up @@ -13,13 +13,6 @@ import type {
import type { Reasoning } from "openai/resources.mjs";

import { CLI_VERSION } from "../../version.js";
import {
OPENAI_TIMEOUT_MS,
OPENAI_ORGANIZATION,
OPENAI_PROJECT,
getBaseUrl,
AZURE_OPENAI_API_VERSION,
} from "../config.js";
import { log } from "../logger/log.js";
import { parseToolCallArguments } from "../parsers.js";
import { responsesCreateViaChatCompletions } from "../responses.js";
Expand All @@ -31,10 +24,10 @@ import {
} from "../session.js";
import { applyPatchToolInstructions } from "./apply-patch.js";
import { handleExecCommand } from "./handle-exec-command.js";
import { HttpsProxyAgent } from "https-proxy-agent";
import { createOpenAIClient } from "../openai-client.js";
import { spawnSync } from "node:child_process";
import { randomUUID } from "node:crypto";
import OpenAI, { APIConnectionTimeoutError, AzureOpenAI } from "openai";
import OpenAI, { APIConnectionTimeoutError } from "openai";
import os from "os";

// Wait time before retrying after rate limit errors (ms).
Expand All @@ -43,9 +36,6 @@ const RATE_LIMIT_RETRY_WAIT_MS = parseInt(
10,
);

// See https://github.com/openai/openai-node/tree/v4?tab=readme-ov-file#configuring-an-https-agent-eg-for-proxies
const PROXY_URL = process.env["HTTPS_PROXY"];

export type CommandConfirmation = {
review: ReviewDecision;
applyPatch?: ApplyPatchCommand | undefined;
Expand Down Expand Up @@ -115,7 +105,6 @@ const localShellTool: Tool = {

export class AgentLoop {
private model: string;
private provider: string;
private instructions?: string;
private approvalPolicy: ApprovalPolicy;
private config: AppConfig;
Expand Down Expand Up @@ -283,7 +272,6 @@ export class AgentLoop {
additionalWritableRoots,
}: AgentLoopParams & { config?: AppConfig }) {
this.model = model;
this.provider = provider;
this.instructions = instructions;
this.approvalPolicy = approvalPolicy;

Expand All @@ -293,6 +281,7 @@ export class AgentLoop {
// `instructions` that have already been passed explicitly so that
// downstream consumers (e.g. telemetry) still observe the correct values.
this.config = config ?? {
provider,
model,
instructions: instructions ?? "",
};
Expand All @@ -304,51 +293,12 @@ export class AgentLoop {

this.disableResponseStorage = disableResponseStorage ?? false;
this.sessionId = getSessionId() || randomUUID().replaceAll("-", "");
// Configure OpenAI client with optional timeout (ms) from environment
const timeoutMs = OPENAI_TIMEOUT_MS;
const apiKey = this.config.apiKey ?? process.env["OPENAI_API_KEY"] ?? "";
const baseURL = getBaseUrl(this.provider);

this.oai = new OpenAI({
// The OpenAI JS SDK only requires `apiKey` when making requests against
// the official API. When running unit‑tests we stub out all network
// calls so an undefined key is perfectly fine. We therefore only set
// the property if we actually have a value to avoid triggering runtime
// errors inside the SDK (it validates that `apiKey` is a non‑empty
// string when the field is present).
...(apiKey ? { apiKey } : {}),
baseURL,
defaultHeaders: {
originator: ORIGIN,
version: CLI_VERSION,
session_id: this.sessionId,
...(OPENAI_ORGANIZATION
? { "OpenAI-Organization": OPENAI_ORGANIZATION }
: {}),
...(OPENAI_PROJECT ? { "OpenAI-Project": OPENAI_PROJECT } : {}),
},
httpAgent: PROXY_URL ? new HttpsProxyAgent(PROXY_URL) : undefined,
...(timeoutMs !== undefined ? { timeout: timeoutMs } : {}),
});

if (this.provider.toLowerCase() === "azure") {
this.oai = new AzureOpenAI({
apiKey,
baseURL,
apiVersion: AZURE_OPENAI_API_VERSION,
defaultHeaders: {
originator: ORIGIN,
version: CLI_VERSION,
session_id: this.sessionId,
...(OPENAI_ORGANIZATION
? { "OpenAI-Organization": OPENAI_ORGANIZATION }
: {}),
...(OPENAI_PROJECT ? { "OpenAI-Project": OPENAI_PROJECT } : {}),
},
httpAgent: PROXY_URL ? new HttpsProxyAgent(PROXY_URL) : undefined,
...(timeoutMs !== undefined ? { timeout: timeoutMs } : {}),
});
}
this.oai = createOpenAIClient(this.config, {
originator: ORIGIN,
version: CLI_VERSION,
session_id: this.sessionId,
});

setSessionId(this.sessionId);
setCurrentModel(this.model);
Expand Down
7 changes: 2 additions & 5 deletions codex-cli/src/utils/config.ts
Original file line number Diff line number Diff line change
Expand Up @@ -70,10 +70,12 @@ export let OPENAI_API_KEY = process.env["OPENAI_API_KEY"] || "";

export const AZURE_OPENAI_API_VERSION =
process.env["AZURE_OPENAI_API_VERSION"] || "2025-03-01-preview";
export const AZURE_OPENAI_DEPLOYMENT = process.env["AZURE_OPENAI_DEPLOYMENT"];

export const DEFAULT_REASONING_EFFORT = "high";
export const OPENAI_ORGANIZATION = process.env["OPENAI_ORGANIZATION"] || "";
export const OPENAI_PROJECT = process.env["OPENAI_PROJECT"] || "";
export const HTTPS_PROXY_URL = process.env["HTTPS_PROXY"] || "";

// Can be set `true` when Codex is running in an environment that is marked as already
// considered sufficiently locked-down so that we allow running without an explicit sandbox.
Expand Down Expand Up @@ -126,11 +128,6 @@ export function getApiKey(provider: string = "openai"): string | undefined {
return customApiKey;
}

// If the provider not found in the providers list and `OPENAI_API_KEY` is set, use it
if (OPENAI_API_KEY !== "") {
return OPENAI_API_KEY;
}

// We tried.
return undefined;
}
Expand Down
6 changes: 0 additions & 6 deletions codex-cli/src/utils/model-utils.ts
Original file line number Diff line number Diff line change
@@ -1,7 +1,6 @@
import type { ResponseItem } from "openai/resources/responses/responses.mjs";

import { approximateTokensUsed } from "./approximate-tokens-used.js";
import { getApiKey } from "./config.js";
import { type SupportedModelId, openAiModelInfo } from "./model-info.js";
import { createOpenAIClient } from "./openai-client.js";

Expand All @@ -16,11 +15,6 @@ export const RECOMMENDED_MODELS: Array<string> = ["o4-mini", "o3"];
* lifetime of the process and the results are cached for subsequent calls.
*/
async function fetchModels(provider: string): Promise<Array<string>> {
// If the user has not configured an API key we cannot retrieve the models.
if (!getApiKey(provider)) {
throw new Error("No API key configured for provider: " + provider);
}

try {
const openai = createOpenAIClient({ provider });
const list = await openai.models.list();
Expand Down
63 changes: 48 additions & 15 deletions codex-cli/src/utils/openai-client.ts
Original file line number Diff line number Diff line change
Expand Up @@ -4,10 +4,17 @@ import {
getBaseUrl,
getApiKey,
AZURE_OPENAI_API_VERSION,
AZURE_OPENAI_DEPLOYMENT,
OPENAI_TIMEOUT_MS,
OPENAI_ORGANIZATION,
OPENAI_PROJECT,
HTTPS_PROXY_URL,
} from "./config.js";
import {
DefaultAzureCredential,
getBearerTokenProvider,
} from "@azure/identity";
import { HttpsProxyAgent } from "https-proxy-agent";
import OpenAI, { AzureOpenAI } from "openai";

type OpenAIClientConfig = {
Expand All @@ -23,29 +30,55 @@ type OpenAIClientConfig = {
*/
export function createOpenAIClient(
config: OpenAIClientConfig | AppConfig,
headers: Record<string, string> = {},
): OpenAI | AzureOpenAI {
const headers: Record<string, string> = {};
if (OPENAI_ORGANIZATION) {
headers["OpenAI-Organization"] = OPENAI_ORGANIZATION;
}
if (OPENAI_PROJECT) {
headers["OpenAI-Project"] = OPENAI_PROJECT;
}
const defaultHeaders: Record<string, string> = {
...(OPENAI_ORGANIZATION
? { "OpenAI-Organization": OPENAI_ORGANIZATION }
: {}),
...(OPENAI_PROJECT ? { "OpenAI-Project": OPENAI_PROJECT } : {}),
...headers,
};

const apiKey = getApiKey(config.provider);
const httpAgent = HTTPS_PROXY_URL
? new HttpsProxyAgent(HTTPS_PROXY_URL)
: undefined;
const baseURL = getBaseUrl(config.provider);
const timeout = OPENAI_TIMEOUT_MS;

if (config.provider?.toLowerCase() === "azure") {
if (apiKey === undefined) {
const credential = new DefaultAzureCredential();
const azureADTokenProvider = getBearerTokenProvider(
credential,
"https://cognitiveservices.azure.com/.default",
);
return new AzureOpenAI({
azureADTokenProvider,
baseURL,
timeout,
defaultHeaders,
httpAgent,
deployment: AZURE_OPENAI_DEPLOYMENT,
apiVersion: AZURE_OPENAI_API_VERSION,
});
}

return new AzureOpenAI({
apiKey: getApiKey(config.provider),
baseURL: getBaseUrl(config.provider),
apiKey,
baseURL,
timeout,
defaultHeaders,
httpAgent,
apiVersion: AZURE_OPENAI_API_VERSION,
timeout: OPENAI_TIMEOUT_MS,
defaultHeaders: headers,
});
}

return new OpenAI({
apiKey: getApiKey(config.provider),
baseURL: getBaseUrl(config.provider),
timeout: OPENAI_TIMEOUT_MS,
defaultHeaders: headers,
apiKey,
baseURL,
timeout,
defaultHeaders,
});
}
Loading