All files / services aiService.ts

97.56% Statements 40/41
87.87% Branches 29/33
83.33% Functions 5/6
97.56% Lines 40/41

Press n or j to go to the next uncovered block, b, p or k for the previous block.

1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 1372x 2x 2x     2x                             2x       20x 19x   19x   10x 9x     9x 8x     17x     18x                             9x 9x 1x     8x 8x 8x       4x 4x               3x   1x 1x                     10x 10x 1x     9x 9x       5x   5x   5x                                         4x 1x 1x     3x 3x   2x 2x      
import 'openai/shims/node'; // Add node fetch shim
import OpenAI from 'openai';
import dotenv from 'dotenv';
 
// Load environment variables
dotenv.config();
 
// Define AI configuration types
export interface AIConfig {
  temperature?: number;
  maxTokens?: number;
  topP?: number;
}
 
// Define the base interface for all AI service implementations
export interface IAIService {
  generateText(prompt: string, config?: AIConfig): Promise<string>;
}
 
// Factory class to get the appropriate AI service implementation
export class AIService {
  private static instance: IAIService;
 
  static getInstance(): IAIService {
    if (!this.instance) {
      const provider = process.env.AI_PROVIDER?.toLowerCase() || 'openai';
      
      switch (provider) {
        case 'google':
          this.instance = new GoogleGeminiService();
          break;
        case 'openai':
        default:
          this.instance = new OpenAIService();
          break;
      }
      
      console.log(`[AIService] Initialized with provider: ${provider}`);
    }
    
    return this.instance;
  }
 
  // Main method to generate text with the configured provider
  static async generateText(prompt: string, config?: AIConfig): Promise<string> {
    return this.getInstance().generateText(prompt, config);
  }
}
 
// OpenAI implementation
class OpenAIService implements IAIService {
  private client: OpenAI;
  private modelName: string;
 
  constructor() {
    const apiKey = process.env.OPENAI_API_KEY;
    if (!apiKey) {
      throw new Error('OpenAI API key is required. Set OPENAI_API_KEY in your environment variables.');
    }
 
    this.client = new OpenAI({ apiKey });
    this.modelName = process.env.AI_MODEL || 'gpt-4';
    console.log(`[OpenAIService] Initialized with model: ${this.modelName}`);
  }
 
  async generateText(prompt: string, config?: AIConfig): Promise<string> {
    try {
      const response = await this.client.chat.completions.create({
        model: this.modelName,
        messages: [{ role: 'user', content: prompt }],
        temperature: config?.temperature ?? 0.7,
        max_tokens: config?.maxTokens,
        top_p: config?.topP ?? 1,
      });
 
      return response.choices[0]?.message?.content || '';
    } catch (error) {
      console.error('[OpenAIService] Error generating text:', error);
      throw new Error(`Failed to generate text with OpenAI: ${error instanceof Error ? error.message : String(error)}`);
    }
  }
}
 
// Google Gemini implementation
class GoogleGeminiService implements IAIService {
  private modelName: string;
  private apiKey: string;
 
  constructor() {
    this.apiKey = process.env.GOOGLE_API_KEY || '';
    if (!this.apiKey) {
      throw new Error('Google API key is required. Set GOOGLE_API_KEY in your environment variables.');
    }
    
    this.modelName = process.env.AI_MODEL || 'gemini-pro';
    console.log(`[GoogleGeminiService] Initialized with model: ${this.modelName}`);
  }
 
  async generateText(prompt: string, config?: AIConfig): Promise<string> {
    try {
      // For now using fetch directly since Google's SDK is changing frequently
      const url = `https://generativelanguage.googleapis.com/v1/models/${this.modelName}:generateContent?key=${this.apiKey}`;
      
      const response = await fetch(url, {
        method: 'POST',
        headers: {
          'Content-Type': 'application/json',
        },
        body: JSON.stringify({
          contents: [
            {
              parts: [
                { text: prompt }
              ]
            }
          ],
          generationConfig: {
            temperature: config?.temperature ?? 0.7,
            topP: config?.topP ?? 1,
            maxOutputTokens: config?.maxTokens ?? 1024,
          }
        }),
      });
      
      if (!response.ok) {
        const errorText = await response.text();
        throw new Error(`Google API error: ${response.status} ${errorText}`);
      }
      
      const data = await response.json();
      return data.candidates[0]?.content?.parts[0]?.text || '';
    } catch (error) {
      console.error('[GoogleGeminiService] Error generating text:', error);
      throw new Error(`Failed to generate text with Google Gemini: ${error instanceof Error ? error.message : String(error)}`);
    }
  }
}