Skip to content

Commit 5a024c8

Browse files
committed
fix: enhance error logging and improve API key handling in LLMService
1 parent 5c27862 commit 5a024c8

2 files changed

Lines changed: 22 additions & 2 deletions

File tree

src/controllers/langchainController.js

Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -3,6 +3,7 @@ const llmService = require("../services/LLMService");
33
const { connectionManager } = require("../config");
44
const { HEADERS, HTTP_STATUS, HEADER_VARIANTS } = require("../core/constants");
55
const { getHeaderValue } = require("../utils/http");
6+
const logger = require("../utils/logger");
67

78
/**
89
* LangChainController - Handles AI-powered SQL query generation
@@ -149,6 +150,12 @@ class LangChainController extends BaseController {
149150

150151
return this.sendSuccess(res, { query });
151152
} catch (error) {
153+
logger.error("Error in executePrompt:", {
154+
message: error.message,
155+
stack: error.stack,
156+
model: req.body.model,
157+
hasApiKey: !!req.body.apiKey,
158+
});
152159
this.handleError(res, error, "executing prompt");
153160
}
154161
}

src/services/LLMService.js

Lines changed: 15 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -2,6 +2,7 @@ const { getAIModel } = require("../models/model");
22
const logger = require("../utils/logger");
33
const { FALLBACK_AI_MODEL, SCHEMA_PROMPT_BUDGET_CHARS } = require("../core/constants");
44
const { buildTableCatalog, buildSchemaDSL } = require("../utils/schemaCompressor");
5+
const { inferProviderFromModel, PROVIDER_API_ENV_KEYS } = require("../core/env");
56

67
const argv = require("minimist")(process.argv.slice(2));
78

@@ -46,14 +47,26 @@ class LLMService {
4647
process.env.AI_MODEL ||
4748
FALLBACK_AI_MODEL
4849
).toLowerCase();
49-
const key = apiKey || argv.apikey || process.env.AI_API_KEY || null;
50+
51+
let key = apiKey || argv.apikey || null;
52+
if (!key) {
53+
const provider = inferProviderFromModel(aiModel);
54+
const providerEnvKey = PROVIDER_API_ENV_KEYS[provider];
55+
key = providerEnvKey ? process.env[providerEnvKey] : null;
56+
// Fallback to generic AI_API_KEY if provider-specific key not found
57+
if (!key) {
58+
key = process.env.AI_API_KEY || null;
59+
}
60+
}
5061

5162
// Only reinitialize if config changed
5263
if (this.config.model !== aiModel || this.config.apiKey !== key) {
5364
this.config.model = aiModel;
5465
this.config.apiKey = key;
5566
this.llm = getAIModel(aiModel, key);
56-
logger.info(`LLM initialized with model: ${aiModel}`);
67+
logger.info(
68+
`LLM initialized with model: ${aiModel}, provider: ${inferProviderFromModel(aiModel)}`,
69+
);
5770
}
5871

5972
return this.llm;

0 commit comments

Comments
 (0)