|
| 1 | +// ============================================ |
| 2 | +// OpenSwarm - Local Model Adapter |
| 3 | +// Created: 2026-04-10 |
| 4 | +// Purpose: Ollama, LMStudio, llama.cpp 등 로컬 OpenAI 호환 서버 지원 |
| 5 | +// ============================================ |
| 6 | + |
| 7 | +import type { |
| 8 | + CliAdapter, |
| 9 | + CliRunOptions, |
| 10 | + CliRunResult, |
| 11 | + AdapterCapabilities, |
| 12 | + WorkerResult, |
| 13 | + ReviewResult, |
| 14 | +} from './types.js'; |
| 15 | +import { t } from '../locale/index.js'; |
| 16 | + |
| 17 | +// 로컬 프로바이더 기본 URL 후보 (우선순위 순) |
| 18 | +const DEFAULT_ENDPOINTS = [ |
| 19 | + 'http://localhost:11434', // Ollama |
| 20 | + 'http://localhost:1234', // LMStudio |
| 21 | + 'http://localhost:8080', // llama.cpp server |
| 22 | +]; |
| 23 | + |
| 24 | +const DEFAULT_MODEL = 'gemma3:4b'; |
| 25 | +const HEALTH_CHECK_TIMEOUT_MS = 2000; |
| 26 | + |
| 27 | +export class LocalModelAdapter implements CliAdapter { |
| 28 | + readonly name = 'local'; |
| 29 | + |
| 30 | + readonly capabilities: AdapterCapabilities = { |
| 31 | + supportsStreaming: false, |
| 32 | + supportsJsonOutput: true, |
| 33 | + supportsModelSelection: true, |
| 34 | + managedGit: false, |
| 35 | + supportedSkills: [], |
| 36 | + }; |
| 37 | + |
| 38 | + // 활성 서버 URL (isAvailable에서 감지, run에서 사용) |
| 39 | + private activeUrl: string | null = null; |
| 40 | + private configuredUrl: string | null = null; |
| 41 | + |
| 42 | + /** config.yaml에서 baseUrl을 주입받을 때 사용 */ |
| 43 | + setBaseUrl(url: string): void { |
| 44 | + this.configuredUrl = url; |
| 45 | + } |
| 46 | + |
| 47 | + async isAvailable(): Promise<boolean> { |
| 48 | + const candidates = this.configuredUrl |
| 49 | + ? [this.configuredUrl, ...DEFAULT_ENDPOINTS] |
| 50 | + : DEFAULT_ENDPOINTS; |
| 51 | + |
| 52 | + for (const url of candidates) { |
| 53 | + try { |
| 54 | + const res = await fetch(`${url}/v1/models`, { |
| 55 | + signal: AbortSignal.timeout(HEALTH_CHECK_TIMEOUT_MS), |
| 56 | + }); |
| 57 | + if (res.ok) { |
| 58 | + this.activeUrl = url; |
| 59 | + return true; |
| 60 | + } |
| 61 | + } catch { |
| 62 | + // 서버 미실행 — 다음 후보로 |
| 63 | + } |
| 64 | + } |
| 65 | + return false; |
| 66 | + } |
| 67 | + |
| 68 | + /** 현재 활성 서버 URL 반환 (디버깅용) */ |
| 69 | + getActiveUrl(): string | null { |
| 70 | + return this.activeUrl; |
| 71 | + } |
| 72 | + |
| 73 | + /** 사용 가능한 모델 목록 조회 */ |
| 74 | + async listModels(): Promise<string[]> { |
| 75 | + if (!this.activeUrl) { |
| 76 | + const available = await this.isAvailable(); |
| 77 | + if (!available) return []; |
| 78 | + } |
| 79 | + |
| 80 | + try { |
| 81 | + const res = await fetch(`${this.activeUrl}/v1/models`, { |
| 82 | + signal: AbortSignal.timeout(HEALTH_CHECK_TIMEOUT_MS), |
| 83 | + }); |
| 84 | + if (!res.ok) return []; |
| 85 | + |
| 86 | + const data = (await res.json()) as { data?: Array<{ id: string }> }; |
| 87 | + return data.data?.map(m => m.id) ?? []; |
| 88 | + } catch { |
| 89 | + return []; |
| 90 | + } |
| 91 | + } |
| 92 | + |
| 93 | + buildCommand(_options: CliRunOptions): { command: string; args: string[] } { |
| 94 | + return { command: 'echo', args: ['"Local adapter uses run() — not shell spawn"'] }; |
| 95 | + } |
| 96 | + |
| 97 | + async run(options: CliRunOptions): Promise<CliRunResult> { |
| 98 | + const startTime = Date.now(); |
| 99 | + |
| 100 | + // 서버 연결 확인 |
| 101 | + if (!this.activeUrl) { |
| 102 | + const available = await this.isAvailable(); |
| 103 | + if (!available) { |
| 104 | + return { |
| 105 | + exitCode: 1, |
| 106 | + stdout: '', |
| 107 | + stderr: 'No local model server found. Start Ollama, LMStudio, or llama.cpp server first.\n' + |
| 108 | + `Checked: ${(this.configuredUrl ? [this.configuredUrl, ...DEFAULT_ENDPOINTS] : DEFAULT_ENDPOINTS).join(', ')}`, |
| 109 | + durationMs: Date.now() - startTime, |
| 110 | + }; |
| 111 | + } |
| 112 | + } |
| 113 | + |
| 114 | + const model = options.model ?? DEFAULT_MODEL; |
| 115 | + const body = { |
| 116 | + model, |
| 117 | + messages: [ |
| 118 | + { role: 'user' as const, content: options.prompt }, |
| 119 | + ], |
| 120 | + temperature: 0.2, |
| 121 | + stream: false, |
| 122 | + }; |
| 123 | + |
| 124 | + try { |
| 125 | + const res = await fetch(`${this.activeUrl}/v1/chat/completions`, { |
| 126 | + method: 'POST', |
| 127 | + headers: { 'Content-Type': 'application/json' }, |
| 128 | + body: JSON.stringify(body), |
| 129 | + signal: options.timeoutMs |
| 130 | + ? AbortSignal.timeout(options.timeoutMs) |
| 131 | + : undefined, |
| 132 | + }); |
| 133 | + |
| 134 | + const durationMs = Date.now() - startTime; |
| 135 | + |
| 136 | + if (!res.ok) { |
| 137 | + const errText = await res.text().catch(() => ''); |
| 138 | + |
| 139 | + // 모델 없음 에러 시 사용 가능한 모델 목록 안내 |
| 140 | + if (res.status === 404 || errText.includes('not found')) { |
| 141 | + const models = await this.listModels(); |
| 142 | + const modelList = models.length > 0 |
| 143 | + ? `Available: ${models.slice(0, 10).join(', ')}` |
| 144 | + : 'No models loaded'; |
| 145 | + return { |
| 146 | + exitCode: 1, |
| 147 | + stdout: '', |
| 148 | + stderr: `Model "${model}" not found on ${this.activeUrl}. ${modelList}`, |
| 149 | + durationMs, |
| 150 | + }; |
| 151 | + } |
| 152 | + |
| 153 | + return { |
| 154 | + exitCode: 1, |
| 155 | + stdout: '', |
| 156 | + stderr: `Local API error (${res.status}): ${errText.slice(0, 500)}`, |
| 157 | + durationMs, |
| 158 | + }; |
| 159 | + } |
| 160 | + |
| 161 | + const data = (await res.json()) as OpenAICompatResponse; |
| 162 | + const content = data.choices?.[0]?.message?.content ?? ''; |
| 163 | + |
| 164 | + if (options.onLog) { |
| 165 | + options.onLog(content.slice(0, 300)); |
| 166 | + } |
| 167 | + |
| 168 | + return { |
| 169 | + exitCode: 0, |
| 170 | + stdout: content, |
| 171 | + stderr: '', |
| 172 | + durationMs: Date.now() - startTime, |
| 173 | + }; |
| 174 | + } catch (err) { |
| 175 | + // 타임아웃 또는 네트워크 에러 |
| 176 | + const message = err instanceof Error ? err.message : String(err); |
| 177 | + const isTimeout = message.includes('abort') || message.includes('timeout'); |
| 178 | + |
| 179 | + return { |
| 180 | + exitCode: 1, |
| 181 | + stdout: '', |
| 182 | + stderr: isTimeout |
| 183 | + ? `Local model timeout after ${options.timeoutMs ?? 300000}ms (model: ${model}). Local models can be slow — consider increasing timeout.` |
| 184 | + : `Local model request failed: ${message}`, |
| 185 | + durationMs: Date.now() - startTime, |
| 186 | + }; |
| 187 | + } |
| 188 | + } |
| 189 | + |
| 190 | + parseWorkerOutput(raw: CliRunResult): WorkerResult { |
| 191 | + const text = raw.stdout; |
| 192 | + return extractWorkerResultJson(text) ?? extractWorkerFromText(text); |
| 193 | + } |
| 194 | + |
| 195 | + parseReviewerOutput(raw: CliRunResult): ReviewResult { |
| 196 | + const text = raw.stdout; |
| 197 | + return extractReviewerResultJson(text) ?? extractReviewerFromText(text); |
| 198 | + } |
| 199 | +} |
| 200 | + |
| 201 | +// OpenAI 호환 응답 타입 |
| 202 | +interface OpenAICompatResponse { |
| 203 | + choices: Array<{ |
| 204 | + message: { |
| 205 | + content: string; |
| 206 | + role: string; |
| 207 | + }; |
| 208 | + finish_reason: string; |
| 209 | + }>; |
| 210 | + usage?: { |
| 211 | + prompt_tokens: number; |
| 212 | + completion_tokens: number; |
| 213 | + total_tokens: number; |
| 214 | + }; |
| 215 | + model?: string; |
| 216 | +} |
| 217 | + |
| 218 | +// Worker/Reviewer 출력 파싱 (GPT 어댑터와 동일 로직) |
| 219 | + |
| 220 | +function extractWorkerResultJson(text: string): WorkerResult | null { |
| 221 | + const jsonMatch = text.match(/```json\s*([\s\S]*?)\s*```/); |
| 222 | + const jsonStr = jsonMatch?.[1] ?? findJsonObject(text, '"success"'); |
| 223 | + if (!jsonStr) return null; |
| 224 | + |
| 225 | + try { |
| 226 | + const parsed = JSON.parse(jsonStr); |
| 227 | + return { |
| 228 | + success: Boolean(parsed.success), |
| 229 | + summary: parsed.summary || t('common.fallback.noSummary'), |
| 230 | + filesChanged: Array.isArray(parsed.filesChanged) ? parsed.filesChanged : [], |
| 231 | + commands: Array.isArray(parsed.commands) ? parsed.commands : [], |
| 232 | + output: text, |
| 233 | + error: parsed.error, |
| 234 | + confidencePercent: typeof parsed.confidencePercent === 'number' |
| 235 | + ? parsed.confidencePercent : undefined, |
| 236 | + haltReason: parsed.haltReason || undefined, |
| 237 | + }; |
| 238 | + } catch { |
| 239 | + return null; |
| 240 | + } |
| 241 | +} |
| 242 | + |
| 243 | +function extractWorkerFromText(text: string): WorkerResult { |
| 244 | + const hasError = /error|fail|exception|cannot/i.test(text); |
| 245 | + const hasSuccess = /success|completed|done|finished/i.test(text); |
| 246 | + |
| 247 | + return { |
| 248 | + success: !hasError || hasSuccess, |
| 249 | + summary: extractSummary(text), |
| 250 | + filesChanged: [], |
| 251 | + commands: [], |
| 252 | + output: text, |
| 253 | + error: hasError ? extractErrorMessage(text) : undefined, |
| 254 | + }; |
| 255 | +} |
| 256 | + |
| 257 | +function extractReviewerResultJson(text: string): ReviewResult | null { |
| 258 | + const jsonMatch = text.match(/```json\s*([\s\S]*?)\s*```/); |
| 259 | + const jsonStr = jsonMatch?.[1] ?? findJsonObject(text, '"decision"'); |
| 260 | + if (!jsonStr) return null; |
| 261 | + |
| 262 | + try { |
| 263 | + const parsed = JSON.parse(jsonStr); |
| 264 | + const decision = parsed.decision === 'approve' || parsed.decision === 'reject' |
| 265 | + ? parsed.decision |
| 266 | + : 'revise'; |
| 267 | + return { |
| 268 | + decision, |
| 269 | + feedback: typeof parsed.feedback === 'string' ? parsed.feedback : t('common.fallback.noSummary'), |
| 270 | + issues: Array.isArray(parsed.issues) |
| 271 | + ? parsed.issues.filter((v: unknown): v is string => typeof v === 'string') |
| 272 | + : [], |
| 273 | + suggestions: Array.isArray(parsed.suggestions) |
| 274 | + ? parsed.suggestions.filter((v: unknown): v is string => typeof v === 'string') |
| 275 | + : [], |
| 276 | + }; |
| 277 | + } catch { |
| 278 | + return null; |
| 279 | + } |
| 280 | +} |
| 281 | + |
| 282 | +function extractReviewerFromText(text: string): ReviewResult { |
| 283 | + const lower = text.toLowerCase(); |
| 284 | + const decision = lower.includes('approve') |
| 285 | + ? 'approve' |
| 286 | + : lower.includes('reject') |
| 287 | + ? 'reject' |
| 288 | + : 'revise'; |
| 289 | + return { |
| 290 | + decision, |
| 291 | + feedback: extractSummary(text), |
| 292 | + issues: [], |
| 293 | + suggestions: [], |
| 294 | + }; |
| 295 | +} |
| 296 | + |
| 297 | +function findJsonObject(text: string, marker: string): string | null { |
| 298 | + const idx = text.indexOf(marker); |
| 299 | + if (idx < 0) return null; |
| 300 | + let start = text.lastIndexOf('{', idx); |
| 301 | + if (start < 0) return null; |
| 302 | + let depth = 0; |
| 303 | + for (let i = start; i < text.length; i++) { |
| 304 | + if (text[i] === '{') depth++; |
| 305 | + if (text[i] === '}') { |
| 306 | + depth--; |
| 307 | + if (depth === 0) return text.slice(start, i + 1); |
| 308 | + } |
| 309 | + } |
| 310 | + return null; |
| 311 | +} |
| 312 | + |
| 313 | +function extractSummary(text: string): string { |
| 314 | + const lines = text.split('\n').filter(l => l.trim().length > 10); |
| 315 | + if (lines.length === 0) return t('common.fallback.noSummary'); |
| 316 | + const summary = lines[0].trim(); |
| 317 | + return summary.length > 200 ? `${summary.slice(0, 200)}...` : summary; |
| 318 | +} |
| 319 | + |
| 320 | +function extractErrorMessage(text: string): string { |
| 321 | + const errorMatch = text.match(/(?:error|exception|failed?):\s*(.+)/i); |
| 322 | + if (errorMatch) return errorMatch[1].slice(0, 200); |
| 323 | + const lines = text.split('\n').filter(l => /error|fail/i.test(l)); |
| 324 | + return lines.length > 0 ? lines[0].slice(0, 200) : 'Unknown error'; |
| 325 | +} |
0 commit comments