2025-02-05 10:05:22 -05:00

48 lines
1.2 KiB
TypeScript

import { ServiceError, DocumentMetadata } from '../types';
import { ollamaService } from './ollamaService';
export class LLMService {
constructor() {
ollamaService.updateBaseUrl('http://localhost:11434');
}
async query(
question: string,
onChunk?: (chunk: string) => void
): Promise<{ answer: string, sources: DocumentMetadata[] }> {
try {
const ollamaResponse = await ollamaService.chat({
model: 'damien113/datahound-gpu:8b',
messages: [{ role: 'user', content: question }],
temperature: 0.7,
onChunk,
});
/** @type {DocumentMetadata[]} */
const sources = []; // TODO: Implement source retrieval from vector store
return {
answer: ollamaResponse.message.content,
sources,
};
} catch (error) {
console.error('Error querying LLM:', error);
throw new ServiceError(
error instanceof Error ? error.message : 'Unknown error occurred'
);
}
}
getConfig() {
return {
provider: 'ollama',
model: 'damien113/datahound-gpu:8b',
baseUrl: 'http://localhost:11434',
temperature: 0.7
};
}
}
const llmService = new LLMService();
export { llmService };