diff --git a/apps/promptions-chat/.env.example b/apps/promptions-chat/.env.example index 19d97c9..2789988 100644 --- a/apps/promptions-chat/.env.example +++ b/apps/promptions-chat/.env.example @@ -1,2 +1,9 @@ # Copy this file to .env and add your OpenAI API key VITE_OPENAI_API_KEY=your_openai_api_key_here +# Optional: only set for Azure or other custom OpenAI-compatible endpoints. +# Omit for standard OpenAI API usage. +VITE_OPENAI_BASE_URL=your_openai_base_url_here +# Optional: API version is typically Azure-specific/custom-endpoint specific. +# Omit for standard OpenAI API usage. +VITE_OPENAI_API_VERSION=2024-12-01-preview +VITE_OPENAI_MODEL=gpt-4.1-mini diff --git a/apps/promptions-chat/src/services/ChatService.ts b/apps/promptions-chat/src/services/ChatService.ts index a60c3f4..8fc9845 100644 --- a/apps/promptions-chat/src/services/ChatService.ts +++ b/apps/promptions-chat/src/services/ChatService.ts @@ -7,6 +7,7 @@ interface ChatMessage { export class ChatService { private client: OpenAI; + private model: string; constructor() { // In a real application, you'd want to handle the API key more securely @@ -19,8 +20,15 @@ export class ChatService { ); } + const baseURL = import.meta.env.VITE_OPENAI_BASE_URL || process.env.OPENAI_BASE_URL; + const apiVersion = import.meta.env.VITE_OPENAI_API_VERSION || process.env.OPENAI_API_VERSION; + this.model = import.meta.env.VITE_OPENAI_MODEL || process.env.OPENAI_MODEL || "gpt-4.1"; + this.client = new OpenAI({ apiKey, + ...(baseURL ? { baseURL } : {}), + ...(apiVersion ? { defaultQuery: { "api-version": apiVersion } } : {}), + ...(baseURL ? { defaultHeaders: { "api-key": apiKey } } : {}), dangerouslyAllowBrowser: true, // Only for demo purposes - use a backend in production }); } @@ -35,7 +43,7 @@ export class ChatService { try { const stream = await this.client.chat.completions.create( { - model: "gpt-4.1", + model: this.model, messages: messages as OpenAI.Chat.Completions.ChatCompletionMessageParam[], stream: true, temperature: 0.7, @@ -64,7 +72,7 @@ export class ChatService { async sendMessage(messages: ChatMessage[]): Promise { try { const response = await this.client.chat.completions.create({ - model: "gpt-3.5-turbo", + model: this.model, messages: messages as OpenAI.Chat.Completions.ChatCompletionMessageParam[], temperature: 0.7, max_tokens: 1000, diff --git a/apps/promptions-chat/src/vite-env.d.ts b/apps/promptions-chat/src/vite-env.d.ts index c8d8de6..e7a0439 100644 --- a/apps/promptions-chat/src/vite-env.d.ts +++ b/apps/promptions-chat/src/vite-env.d.ts @@ -2,6 +2,9 @@ interface ImportMetaEnv { readonly VITE_OPENAI_API_KEY: string; + readonly VITE_OPENAI_BASE_URL?: string; + readonly VITE_OPENAI_API_VERSION?: string; + readonly VITE_OPENAI_MODEL?: string; // more env variables... }