From 87c0959e928a7b52b69e6d302a7589ff34b8c844 Mon Sep 17 00:00:00 2001 From: your-highness Date: Thu, 25 May 2023 13:11:24 +0200 Subject: [PATCH] Introduce Error Handling for createChatCompletion() thread Closes #35 --- src/openai-thread-completion.js | 24 ++++++++++++++++++------ 1 file changed, 18 insertions(+), 6 deletions(-) diff --git a/src/openai-thread-completion.js b/src/openai-thread-completion.js index 95c025c..fab56cc 100644 --- a/src/openai-thread-completion.js +++ b/src/openai-thread-completion.js @@ -1,3 +1,6 @@ +const { Log } = require('debug-level') +const log = new Log('bot') + const { Configuration, OpenAIApi } = require("openai"); const configuration = new Configuration({ apiKey: process.env["OPENAI_API_KEY"] @@ -8,12 +11,21 @@ const model = process.env["OPENAI_MODEL_NAME"] ?? 'gpt-3.5-turbo' const max_tokens = Number(process.env["OPENAI_MAX_TOKENS"] ?? 2000) async function continueThread(messages) { - const response = await openai.createChatCompletion({ - messages: messages, - model, - max_tokens - }); - return response.data?.choices?.[0]?.message?.content + try { + const response = await openai.createChatCompletion({ + messages: messages, + model, + max_tokens + }); + log.trace(response.data?.choices?.[0]?.text); + return response.data?.choices?.[0]?.message?.content + } catch (e) { + if (e.response) { + log.error(e.response.status); + log.error(e.response.data); + } + throw e; + } } module.exports = { continueThread }