Skip to content
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
52 changes: 52 additions & 0 deletions packages/proxy/edge/index.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -183,4 +183,56 @@ describe("makeFetchApiSecrets", () => {
expect(getSetCalls()).toBe(1);
expect(fetchMock).toHaveBeenCalledTimes(1);
});

it("preserves empty endpoint_path in control-plane secrets", async () => {
const fetchMock = vi.fn().mockImplementation(async () => {
return new Response(
JSON.stringify([
{
secret: "provider-secret",
type: "openai",
metadata: {
api_base: "https://api.openai.com",
endpoint_path: "",
},
},
]),
{
status: 200,
headers: { "Content-Type": "application/json" },
},
);
});
vi.stubGlobal("fetch", fetchMock);

const { cache } = createInMemoryCache();
const waitUntilPromises: Promise<unknown>[] = [];
const ctx: EdgeContext = {
waitUntil(promise) {
waitUntilPromises.push(promise);
},
};
const opts: ProxyOpts = {
getRelativeURL() {
return "/chat/completions";
},
credentialsCache: cache,
braintrustApiUrl: "https://example.com",
};
const fetchApiSecrets = makeFetchApiSecrets({ ctx, opts });

const secrets = await fetchApiSecrets(true, "org-token", null);
await Promise.all(waitUntilPromises);

expect(secrets).toHaveLength(1);
expect(secrets[0]).toMatchObject({
secret: "provider-secret",
type: "openai",
metadata: {
api_base: "https://api.openai.com",
endpoint_path: "",
},
});
expect(fetchMock).toHaveBeenCalledTimes(1);
});
});
2 changes: 1 addition & 1 deletion packages/proxy/schema/secrets.ts
Original file line number Diff line number Diff line change
Expand Up @@ -97,7 +97,7 @@ export const OpenAIMetadataSchema = BaseMetadataSchema.merge(
z.null(),
]),
organization_id: z.string().nullish(),
// Custom endpoint path to override the default (e.g., "" to use api_base as full URL)
// Custom endpoint path to override the default. Empty string behaves like unset.
endpoint_path: z.string().nullish(),
// Auth format for the authorization header (default: "bearer")
auth_format: z.enum(["bearer", "api_key"]).nullish(),
Expand Down
101 changes: 101 additions & 0 deletions packages/proxy/src/providers/openai.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -310,6 +310,70 @@ it("falls back to provider base URL when metadata.api_base is not a string", asy
expect(requests[0].url).toBe("https://api.openai.com/v1/chat/completions");
});

it("uses derived chat endpoint when openai metadata.endpoint_path is empty", async () => {
const { fetch, requests } = createCapturingFetch({ captureOnly: true });

await callProxyV1<OpenAIChatCompletionCreateParams, OpenAIChatCompletion>({
body: {
model: "gpt-4o-mini",
messages: [{ role: "user", content: "hello" }],
stream: false,
},
proxyHeaders: {
"x-bt-endpoint-name": "openai",
},
fetch,
getApiSecrets: async () => [
{
type: "openai",
name: "openai",
secret: "provider-secret",
metadata: {
api_base: "https://custom-openai.example.com/v1",
endpoint_path: "",
},
},
],
});

expect(requests.length).toBe(1);
expect(requests[0].url).toBe(
"https://custom-openai.example.com/v1/chat/completions",
);
});

it("uses explicit endpoint override when openai metadata.endpoint_path is non-empty", async () => {
const { fetch, requests } = createCapturingFetch({ captureOnly: true });

await callProxyV1<OpenAIChatCompletionCreateParams, OpenAIChatCompletion>({
body: {
model: "gpt-4o-mini",
messages: [{ role: "user", content: "hello" }],
stream: false,
},
proxyHeaders: {
"x-bt-endpoint-name": "openai",
},
fetch,
getApiSecrets: async () => [
{
type: "openai",
name: "openai",
secret: "provider-secret",
metadata: {
api_base: "https://custom-openai.example.com",
endpoint_path: "/v1/custom/chat/completions",
},
},
],
});

expect(requests.length).toBe(1);
expect(requests[0].url).toBe(
"https://custom-openai.example.com/v1/custom/chat/completions",
);
});

it("uses model path for azure when metadata.deployment is non-string", async () => {
const { fetch, requests } = createCapturingFetch({ captureOnly: true });

Expand Down Expand Up @@ -345,6 +409,43 @@ it("uses model path for azure when metadata.deployment is non-string", async ()
);
});

it("uses derived azure endpoint when metadata.endpoint_path is empty", async () => {
const { fetch, requests } = createCapturingFetch({ captureOnly: true });

await callProxyV1<OpenAIChatCompletionCreateParams, OpenAIChatCompletion>({
body: {
model: "gpt-4o-mini",
messages: [{ role: "user", content: "hello" }],
stream: false,
},
proxyHeaders: {
"x-bt-endpoint-name": "azure",
},
fetch,
getApiSecrets: async () => [
{
type: "azure",
name: "azure",
secret: "provider-secret",
metadata: {
api_base: "https://azure.example.com",
api_version: "2023-07-01-preview",
auth_type: "api_key",
deployment: "gpt-4o-mini",
endpoint_path: "",
no_named_deployment: false,
},
},
],
});

expect(requests.length).toBe(1);
expect(requests[0].url).toContain(
"/openai/deployments/gpt-4o-mini/chat/completions",
);
expect(requests[0].url).toContain("api-version=2023-07-01-preview");
});

type InterceptedRequest = {
method: string;
url: string;
Expand Down
3 changes: 2 additions & 1 deletion packages/proxy/src/proxy.ts
Original file line number Diff line number Diff line change
Expand Up @@ -2113,7 +2113,8 @@ async function fetchOpenAI(
const endpointPath =
secret.metadata &&
"endpoint_path" in secret.metadata &&
typeof secret.metadata.endpoint_path === "string"
typeof secret.metadata.endpoint_path === "string" &&
secret.metadata.endpoint_path.length > 0
? secret.metadata.endpoint_path
: url;
fullURL = new URL(baseURL + endpointPath);
Expand Down
Loading