feat: multi-provider support — Anthropic + Ollama (P2-002) (#74)

Co-authored-by: Jason Woltje <jason@diversecanvas.com>
Co-committed-by: Jason Woltje <jason@diversecanvas.com>
This commit was merged in pull request #74.
This commit is contained in:
2026-03-13 03:10:51 +00:00
committed by jason.woltje
parent aa9ee75a2a
commit 95f95f54cf
9 changed files with 287 additions and 11 deletions

View File

@@ -2,3 +2,4 @@ export const VERSION = '0.0.0';
export * from './chat/index.js';
export * from './agent/index.js';
export * from './provider/index.js';

View File

@@ -0,0 +1,54 @@
/** Known built-in LLM provider identifiers */
export type KnownProvider =
| 'anthropic'
| 'openai'
| 'google'
| 'ollama'
| 'xai'
| 'groq'
| 'openrouter'
| 'zai'
| 'mistral';
/** Provider identifier — known providers or custom string */
export type ProviderId = KnownProvider | string;
/** Describes an available LLM model */
export interface ModelInfo {
id: string;
provider: ProviderId;
name: string;
reasoning: boolean;
contextWindow: number;
maxTokens: number;
inputTypes: ('text' | 'image')[];
cost: {
input: number;
output: number;
cacheRead: number;
cacheWrite: number;
};
}
/** Describes an available provider */
export interface ProviderInfo {
id: ProviderId;
name: string;
available: boolean;
models: ModelInfo[];
}
/** Configuration for a custom (non-built-in) provider */
export interface CustomProviderConfig {
id: string;
name: string;
baseUrl: string;
apiKey?: string;
models: Array<{
id: string;
name: string;
reasoning?: boolean;
contextWindow?: number;
maxTokens?: number;
}>;
}