this repo has no description
0
fork

Configure Feed

Select the types of activity you want to include in your feed.

refactor: update health checks and config for LiteLLM

- Add LITELLM_URL config option
- Change health check from anthropic-proxy to LiteLLM
- Make ANTHROPIC_PROXY_URL and SESSION_SECRET optional
- Update letta.ts to detect Claude models via LiteLLM proxy
- Update integration tests for new health check structure

🤖 Generated with [Claude Code](https://claude.com/claude-code)

Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>

alice cdb8fe62 2e9a363d

+40 -33
+6
.env.example
··· 34 34 # Leave empty initially, will be populated after OAuth 35 35 ANTHROPIC_PROXY_SESSION_ID= 36 36 37 + # === LiteLLM === 38 + # LiteLLM proxy URL (provides OpenAI-compatible API for Claude) 39 + # Dev: http://localhost:4000 40 + # Prod (Docker): http://litellm:4000 41 + LITELLM_URL=http://localhost:4000 42 + 37 43 # === OpenAI === 38 44 # OpenAI API key (for embeddings only) 39 45 # Get from: https://platform.openai.com/api-keys
+8 -11
src/config.ts
··· 58 58 TELEGRAM_WEBHOOK_SECRET_TOKEN: optionalEnv('TELEGRAM_WEBHOOK_SECRET_TOKEN', ''), 59 59 60 60 // === Anthropic Proxy === 61 - ANTHROPIC_PROXY_URL: requireEnv('ANTHROPIC_PROXY_URL'), 62 - ANTHROPIC_PROXY_SESSION_SECRET: requireEnv('ANTHROPIC_PROXY_SESSION_SECRET'), 61 + ANTHROPIC_PROXY_URL: optionalEnv('ANTHROPIC_PROXY_URL', 'http://localhost:4001'), 62 + ANTHROPIC_PROXY_SESSION_SECRET: optionalEnv('ANTHROPIC_PROXY_SESSION_SECRET', ''), 63 63 ANTHROPIC_PROXY_SESSION_ID: optionalEnv('ANTHROPIC_PROXY_SESSION_ID', ''), 64 + 65 + // === LiteLLM === 66 + LITELLM_URL: optionalEnv('LITELLM_URL', 'http://localhost:4000'), 64 67 65 68 // === OpenAI (embeddings only) === 66 69 OPENAI_API_KEY: requireEnv('OPENAI_API_KEY'), ··· 94 97 throw new Error(`LETTA_BASE_URL must be a valid URL, got: ${config.LETTA_BASE_URL}`); 95 98 } 96 99 97 - try { 98 - new URL(config.ANTHROPIC_PROXY_URL); 99 - } catch { 100 - throw new Error(`ANTHROPIC_PROXY_URL must be a valid URL, got: ${config.ANTHROPIC_PROXY_URL}`); 101 - } 102 - 103 100 if (hasWebhookUrl) { 104 101 try { 105 102 new URL(config.TELEGRAM_WEBHOOK_URL); ··· 108 105 } 109 106 } 110 107 111 - // Warn if session ID is missing (needed for Anthropic proxy to work) 112 - if (!config.ANTHROPIC_PROXY_SESSION_ID) { 108 + // Warn if session ID is missing (needed for anthropic-proxy OAuth to work) 109 + if (config.ANTHROPIC_PROXY_SESSION_ID === '') { 113 110 console.warn( 114 - '⚠️ ANTHROPIC_PROXY_SESSION_ID is not set. ' + 'The Anthropic proxy will not work until OAuth flow is completed.' 111 + '⚠️ ANTHROPIC_PROXY_SESSION_ID is not set. ' + 'Complete OAuth flow at http://localhost:4001/auth/device' 115 112 ); 116 113 } 117 114
+5 -5
src/health.integration.test.ts
··· 13 13 test('healthCheck uses config values', async () => { 14 14 // Verify that healthCheck is using the config module 15 15 expect(config.LETTA_BASE_URL).toBeDefined(); 16 - expect(config.ANTHROPIC_PROXY_URL).toBeDefined(); 16 + expect(config.LITELLM_URL).toBeDefined(); 17 17 18 18 const response = await healthCheck(); 19 19 expect(response).toBeDefined(); ··· 41 41 expect(body).toHaveProperty('checks'); 42 42 expect(body.checks).toHaveProperty('db'); 43 43 expect(body.checks).toHaveProperty('letta'); 44 - expect(body.checks).toHaveProperty('proxy'); 44 + expect(body.checks).toHaveProperty('litellm'); 45 45 46 46 // Verify types 47 47 expect(typeof body.healthy).toBe('boolean'); 48 48 expect(typeof body.checks.db).toBe('boolean'); 49 49 expect(typeof body.checks.letta).toBe('boolean'); 50 - expect(typeof body.checks.proxy).toBe('boolean'); 50 + expect(typeof body.checks.litellm).toBe('boolean'); 51 51 }); 52 52 53 53 test('healthCheck status matches healthy field', async () => { ··· 64 64 expect(response.status).toBe(200); 65 65 // All checks should be true when healthy 66 66 expect(body.checks.letta).toBe(true); 67 - expect(body.checks.proxy).toBe(true); 67 + expect(body.checks.litellm).toBe(true); 68 68 } else { 69 69 expect(response.status).toBe(503); 70 70 // At least one check should be false when unhealthy 71 - const anyFailed = !body.checks.letta || !body.checks.proxy; 71 + const anyFailed = !body.checks.letta || !body.checks.litellm; 72 72 expect(anyFailed).toBe(true); 73 73 } 74 74 });
+8 -9
src/health.ts
··· 3 3 * 4 4 * Checks the health of all critical dependencies: 5 5 * - Letta API server 6 - * - Anthropic proxy 6 + * - LiteLLM proxy 7 7 * - Database (optional for M0, will be enabled in M2) 8 8 * 9 9 * Returns 200 if all services are healthy, 503 if any are down. ··· 16 16 checks: { 17 17 db: boolean; 18 18 letta: boolean; 19 - proxy: boolean; 19 + litellm: boolean; 20 20 }; 21 21 } 22 22 ··· 29 29 const checks = { 30 30 db: false, 31 31 letta: false, 32 - proxy: false, 32 + litellm: false, 33 33 }; 34 34 35 35 // DB: Optional for M0 (database module doesn't exist yet) ··· 49 49 checks.letta = false; 50 50 } 51 51 52 - // Proxy: Check health endpoint 52 + // LiteLLM: Check health endpoint 53 53 try { 54 - const proxyHealthUrl = config.ANTHROPIC_PROXY_URL.replace('/v1', '/health'); 55 - const res = await fetch(proxyHealthUrl, { 54 + const res = await fetch(`${config.LITELLM_URL}/health`, { 56 55 method: 'GET', 57 56 signal: AbortSignal.timeout(5000), // 5s timeout 58 57 }); 59 - checks.proxy = res.ok; 58 + checks.litellm = res.ok; 60 59 } catch (error) { 61 - console.error('Proxy health check failed:', error); 62 - checks.proxy = false; 60 + console.error('LiteLLM health check failed:', error); 61 + checks.litellm = false; 63 62 } 64 63 65 64 // Overall health: all checks must pass
+13 -8
src/letta.ts
··· 56 56 `Letta is accessible. Found ${llmModels.length.toString()} LLM models and ${embeddingModels.length.toString()} embedding models.` 57 57 ); 58 58 59 - // Log available Anthropic models 60 - const anthropicModels = llmModels.filter( 61 - (m) => m.provider_type === 'anthropic' || (m.provider_name?.includes('anthropic') ?? false) 59 + // Log available Claude models (via LiteLLM/openai-proxy or native Anthropic) 60 + const claudeModels = llmModels.filter( 61 + (m) => 62 + m.provider_type === 'anthropic' || 63 + (m.provider_name?.includes('litellm') ?? false) || 64 + (m.handle?.includes('claude') ?? false) || 65 + (m.handle?.includes('openai-proxy') ?? false) || 66 + m.name.includes('claude') 62 67 ); 63 68 64 - if (anthropicModels.length > 0) { 65 - console.log(`Found ${anthropicModels.length.toString()} Anthropic model(s):`); 66 - anthropicModels.forEach((m) => { 69 + if (claudeModels.length > 0) { 70 + console.log(`Found ${claudeModels.length.toString()} Claude model(s):`); 71 + claudeModels.forEach((m) => { 67 72 console.log(` - ${m.handle ?? m.name}`); 68 73 }); 69 74 } else { 70 75 console.warn( 71 - '⚠️ No Anthropic models found. ' + 76 + '⚠️ No Claude models found. ' + 72 77 'Make sure the anthropic-proxy is configured as a provider in Letta server. ' + 73 - "This may need to be done via Letta's admin interface or configuration." 78 + 'Run: bun run setup:letta' 74 79 ); 75 80 } 76 81