Skip to content

Commit 427635c

Browse files
authored
Merge pull request #3 from code-rabi/support-cerberas
feat: support cerberas
2 parents b131297 + 9236302 commit 427635c

7 files changed

Lines changed: 47 additions & 5 deletions

File tree

README.md

Lines changed: 18 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -108,7 +108,7 @@ Create an RLLM instance with sensible defaults.
108108
```typescript
109109
const rlm = createRLLM({
110110
model: 'gpt-4o-mini', // Model name
111-
provider: 'openai', // 'openai' | 'anthropic' | 'gemini' | 'openrouter' | 'custom'
111+
provider: 'openai', // 'openai' | 'anthropic' | 'gemini' | 'openrouter' | 'cerebras' | 'custom'
112112
apiKey: process.env.KEY, // Optional, uses env vars by default
113113
baseUrl: undefined, // Optional, required for 'custom' provider
114114
verbose: true, // Enable logging
@@ -131,6 +131,23 @@ const rlm = createRLLM({
131131

132132
**Note:** When using `provider: 'custom'`, the `baseUrl` parameter is **required**. An error will be thrown if it's not provided.
133133

134+
### Cerebras Provider
135+
136+
Use Cerebras with the built-in `cerebras` provider:
137+
138+
```typescript
139+
const rlm = createRLLM({
140+
provider: 'cerebras',
141+
model: 'gpt-oss-120b',
142+
// optional if CEREBRAS_API_KEY is set
143+
apiKey: process.env.CEREBRAS_API_KEY,
144+
});
145+
```
146+
147+
Defaults:
148+
- API key env var: `CEREBRAS_API_KEY`
149+
- Base URL: `https://api.cerebras.ai/v1`
150+
134151
### `RLLM` Methods
135152

136153
| Method | Description |

examples/node-modules-viz/README.md

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -211,6 +211,9 @@ Create a `.env` file:
211211
```bash
212212
OPENAI_API_KEY=your_key_here
213213
MODEL=gpt-4o-mini # or gpt-4o, gpt-4-turbo, etc.
214+
# Optional: switch provider (openai | anthropic | gemini | openrouter | cerebras)
215+
# PROVIDER=cerebras
216+
# CEREBRAS_API_KEY=your_key_here
214217
```
215218

216219
## Tips

examples/node-modules-viz/server.mjs

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -49,6 +49,7 @@ const apiKeyName = {
4949
openai: 'OPENAI_API_KEY',
5050
anthropic: 'ANTHROPIC_API_KEY',
5151
openrouter: 'OPENROUTER_API_KEY',
52+
cerebras: 'CEREBRAS_API_KEY',
5253
}[provider] || 'API key';
5354

5455
const hasApiKey = provider === 'gemini'

src/llm-client.test.ts

Lines changed: 19 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -11,27 +11,31 @@ import { LLMClient } from "./llm-client.js";
1111
// Test the provider configuration logic directly
1212
// (extracted from LLMClient to make it testable)
1313

14-
function getDefaultApiKey(provider: "openai" | "anthropic" | "openrouter" | "custom"): string | undefined {
14+
function getDefaultApiKey(provider: "openai" | "anthropic" | "openrouter" | "cerebras" | "custom"): string | undefined {
1515
switch (provider) {
1616
case "openai":
1717
return process.env["OPENAI_API_KEY"];
1818
case "anthropic":
1919
return process.env["ANTHROPIC_API_KEY"];
2020
case "openrouter":
2121
return process.env["OPENROUTER_API_KEY"];
22+
case "cerebras":
23+
return process.env["CEREBRAS_API_KEY"];
2224
case "custom":
2325
return undefined;
2426
}
2527
}
2628

27-
function getDefaultBaseUrl(provider: "openai" | "anthropic" | "openrouter" | "custom"): string | undefined {
29+
function getDefaultBaseUrl(provider: "openai" | "anthropic" | "openrouter" | "cerebras" | "custom"): string | undefined {
2830
switch (provider) {
2931
case "openai":
3032
return undefined; // Uses default
3133
case "anthropic":
3234
return "https://api.anthropic.com/v1";
3335
case "openrouter":
3436
return "https://openrouter.ai/api/v1";
37+
case "cerebras":
38+
return "https://api.cerebras.ai/v1";
3539
case "custom":
3640
return undefined;
3741
}
@@ -50,6 +54,10 @@ describe("LLMClient provider configuration", () => {
5054
it("returns OpenRouter base URL", () => {
5155
expect(getDefaultBaseUrl("openrouter")).toBe("https://openrouter.ai/api/v1");
5256
});
57+
58+
it("returns Cerebras base URL", () => {
59+
expect(getDefaultBaseUrl("cerebras")).toBe("https://api.cerebras.ai/v1");
60+
});
5361
});
5462

5563
describe("getDefaultApiKey", () => {
@@ -80,6 +88,15 @@ describe("LLMClient provider configuration", () => {
8088
process.env["OPENROUTER_API_KEY"] = original;
8189
});
8290

91+
it("reads CEREBRAS_API_KEY for cerebras provider", () => {
92+
const original = process.env["CEREBRAS_API_KEY"];
93+
process.env["CEREBRAS_API_KEY"] = "test-cerebras-key";
94+
95+
expect(getDefaultApiKey("cerebras")).toBe("test-cerebras-key");
96+
97+
process.env["CEREBRAS_API_KEY"] = original;
98+
});
99+
83100
it("returns undefined for custom provider (must be provided explicitly)", () => {
84101
expect(getDefaultApiKey("custom")).toBeUndefined();
85102
});

src/llm-client.ts

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -63,6 +63,8 @@ export class LLMClient {
6363
return process.env["GEMINI_API_KEY"] ?? process.env["GOOGLE_API_KEY"];
6464
case "openrouter":
6565
return process.env["OPENROUTER_API_KEY"];
66+
case "cerebras":
67+
return process.env["CEREBRAS_API_KEY"];
6668
case "custom":
6769
return undefined; // Must be provided explicitly
6870
}
@@ -78,6 +80,8 @@ export class LLMClient {
7880
return "https://generativelanguage.googleapis.com/v1beta/openai/";
7981
case "openrouter":
8082
return "https://openrouter.ai/api/v1";
83+
case "cerebras":
84+
return "https://api.cerebras.ai/v1";
8185
case "custom":
8286
return undefined; // Must be provided explicitly (validated in constructor)
8387
}

src/rlm.ts

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -334,7 +334,7 @@ export class RLLM {
334334
*/
335335
export function createRLLM(options: {
336336
model?: string;
337-
provider?: "openai" | "anthropic" | "gemini" | "openrouter" | "custom";
337+
provider?: "openai" | "anthropic" | "gemini" | "openrouter" | "cerebras" | "custom";
338338
apiKey?: string;
339339
baseUrl?: string;
340340
verbose?: boolean;

src/types.ts

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -23,7 +23,7 @@ export type InferContextType<S> = S extends ZodType<infer T> ? T : string;
2323
// LLM Client Types
2424
// ============================================================================
2525

26-
export type LLMProvider = "openai" | "anthropic" | "gemini" | "openrouter" | "custom";
26+
export type LLMProvider = "openai" | "anthropic" | "gemini" | "openrouter" | "cerebras" | "custom";
2727

2828
export interface ChatMessage {
2929
role: "system" | "user" | "assistant" | "tool";

0 commit comments

Comments
 (0)