223 lines
6.2 KiB
TypeScript
Raw Normal View History

2025-02-04 01:30:36 -05:00
import { ServiceError } from '../types';
import { net } from 'electron';
interface OllamaModel {
name: string;
modified_at: string;
size: number;
digest: string;
}
interface OllamaListResponse {
models: Array<{
name: string;
model: string;
modified_at: string;
size: number;
digest: string;
}>;
}
interface OllamaChatResponse {
message: {
content: string;
};
}
interface OllamaChatParams {
model: string;
messages: Array<{ role: string; content: string }>;
temperature?: number;
onChunk?: (chunk: string) => void;
}
class OllamaService {
private baseUrl: string = 'http://127.0.0.1:11434';
private async makeRequest<T>(
path: string,
method: string = 'GET',
body?: any,
onChunk?: (chunk: string) => void
): Promise<T> {
return new Promise((resolve, reject) => {
try {
const url = new URL(path, this.baseUrl);
const request = net.request({
url: url.toString(),
method,
headers: {
'Content-Type': 'application/json',
}
});
let responseData = '';
let streamBuffer = '';
request.on('response', (response) => {
if (response.statusCode !== 200) {
const error = new Error(`HTTP error! status: ${response.statusCode}`);
console.error('Ollama request failed:', {
path,
statusCode: response.statusCode,
error
});
reject(error);
return;
}
response.on('data', (chunk) => {
try {
const chunkStr = chunk.toString();
if (path === '/api/chat') {
// Handle streaming chat response
streamBuffer += chunkStr;
const lines = streamBuffer.split('\n');
// Process all complete lines except the last one
for (let i = 0; i < lines.length - 1; i++) {
const line = lines[i].trim();
if (!line) continue;
try {
const parsed = JSON.parse(line);
if (parsed.message?.content && onChunk) {
onChunk(parsed.message.content);
}
} catch (e) {
console.warn('Failed to parse chat chunk:', { line, error: e });
}
}
// Keep the last potentially incomplete line
streamBuffer = lines[lines.length - 1];
} else {
// For non-streaming endpoints
responseData += chunkStr;
}
} catch (e) {
console.error('Error processing chunk:', e);
}
});
response.on('end', () => {
try {
if (path === '/api/chat') {
// Handle any remaining data in the buffer
if (streamBuffer.trim()) {
try {
const parsed = JSON.parse(streamBuffer);
if (parsed.message?.content && onChunk) {
onChunk(parsed.message.content);
}
} catch (e) {
console.warn('Failed to parse final chat chunk:', { buffer: streamBuffer, error: e });
}
}
resolve({
message: {
content: ''
}
} as T);
} else {
// For non-streaming endpoints
const trimmedResponse = responseData.trim();
if (!trimmedResponse) {
throw new Error('Empty response received');
}
resolve(JSON.parse(trimmedResponse) as T);
}
} catch (e) {
reject(new Error(`Failed to process response: ${e.message}`));
}
});
});
request.on('error', (error) => {
console.error('Request error:', {
path,
error: error.message
});
reject(error);
});
if (body) {
const bodyStr = JSON.stringify(body);
console.log('Sending request:', {
path,
method,
body: bodyStr
});
request.write(bodyStr);
}
request.end();
} catch (e) {
console.error('Failed to make request:', e);
reject(e);
}
});
}
async getModels(): Promise<string[]> {
try {
const response = await this.makeRequest<OllamaListResponse>('/api/tags');
return response.models.map(model => model.name);
} catch (error) {
console.error('Error fetching Ollama models:', error);
throw new ServiceError(
error instanceof Error ? error.message : 'Failed to fetch Ollama models'
);
}
}
async chat(params: OllamaChatParams): Promise<OllamaChatResponse> {
if (!params?.model || !params?.messages?.length) {
throw new ServiceError('Invalid chat parameters: model and messages are required');
}
try {
console.log('Starting chat request:', {
model: params.model,
messageCount: params.messages.length
});
const { onChunk, temperature, ...requestParams } = params;
const response = await this.makeRequest<OllamaChatResponse>(
'/api/chat',
'POST',
{
...requestParams,
stream: true,
temperature: temperature ?? 0.7
},
onChunk
);
if (!response?.message) {
throw new Error('Invalid response format from Ollama');
}
return response;
} catch (error) {
const errorMessage = error instanceof Error ? error.message : 'Failed to chat with Ollama';
console.error('Chat error:', {
error: errorMessage,
params: {
model: params.model,
messageCount: params.messages.length
}
});
throw new ServiceError(errorMessage);
}
}
updateBaseUrl(baseUrl: string) {
this.baseUrl = baseUrl;
}
}
export const ollamaService = new OllamaService();