Skip to content

Instantly share code, notes, and snippets.

@exceedsystem
Last active March 23, 2024 07:58
Show Gist options
  • Save exceedsystem/8b4504c3d54ee04d54096953bfd3738f to your computer and use it in GitHub Desktop.
Save exceedsystem/8b4504c3d54ee04d54096953bfd3738f to your computer and use it in GitHub Desktop.
Sample of a VSCode Macros to call OpenAI (ChatGPT) from VSCode
// License:MIT
// https://www.exceedsystem.net/2023/04/07/vscode-macros-macro-that-calls-chat-gpt-api-from-vscode
const vscode = require('vscode');
const process = require('process');
const { Configuration, OpenAIApi, OpenAI } = require('openai');
// Completion API result class
class RunCompletionResult {
constructor(status, content, model, cost) {
this.status = status;
this.content = content;
this.model = model;
this.cost = cost;
}
}
// Exported macro commands for translating and asking questions
module.exports.macroCommands = {
Question: {
no: 1,
func: askFunc,
},
Translate: {
no: 2,
func: translateFunc,
},
'Select Model': {
no: 3,
func: selectModel,
},
};
const COST_PER_TOKENS = 1000;
const GPT_MODELS = {
'GPT-3.5 Turbo': {
modelName: 'gpt-3.5-turbo-1106',
apiCosts: { in: 0.001, out: 0.002 },
},
'GPT-4 Turbo': {
modelName: 'gpt-4-1106-preview',
apiCosts: { in: 0.01, out: 0.03 },
},
};
const MODEL_NAMES = Object.keys(GPT_MODELS);
if (!global.gptModel_nai3ohye) {
global.gptModel_nai3ohye = MODEL_NAMES[0];
}
// The API key for OpenAI are obtained from environment variables.
if (!process.env.OPENAI_API_KEY) {
throw new Error(`Environment variable 'OPENAI_API_KEY' is not defined.`);
}
const openai = new OpenAI({ apiKey: process.env.OPENAI_API_KEY })
// Output window for displaying results('up5ue9IC' is randomly generated variable name)
if (!global.up5ue9IC) {
global.up5ue9IC = { outputWindow: vscode.window.createOutputChannel('OpenAI') };
}
const outputWindow = global.up5ue9IC.outputWindow;
// Functions for processing translate and question commands
async function translateFunc() {
await processCommand('Translate', 'You are a Japanese-English translator and you only provide translation services', 'Translate the following:');
}
async function askFunc() {
await processCommand('Question', 'You answer the question correctly', '');
}
// Main function for processing commands
async function processCommand(command, behavior, instruction) {
const selectedText = await getPromptText();
if (!selectedText) return;
showInputText(command, selectedText);
const result = await runCompletion(behavior, instruction + selectedText);
if (result.status === 'error') {
await vscode.window.showErrorMessage(result.message);
} else if (result.status === 'ok') {
showOutputText(result.content, result.model, result.cost);
}
}
// Function to display input text in the output window
function showInputText(command, input) {
outputWindow.show(false);
outputWindow.appendLine(`--- <${command}> [${getTimeStampText()}] ---`);
outputWindow.appendLine('Input:');
outputWindow.appendLine(input);
outputWindow.appendLine('');
}
// Function to display output text and token information in the output window
function showOutputText(response, model, cost) {
outputWindow.appendLine(`Output(${model}):`);
outputWindow.appendLine(response);
outputWindow.appendLine('');
outputWindow.appendLine('Tokens:');
outputWindow.appendLine(cost);
outputWindow.appendLine('');
}
// Function to get the prompt text
async function getPromptText() {
const editor = vscode.window.activeTextEditor;
if (!editor) {
await vscode.window.showWarningMessage('Editor not found.');
return null;
}
const document = editor.document;
if (!document) {
await vscode.window.showWarningMessage('Document not found.');
return null;
}
let prompt = document.getText(editor.selection);
if (!prompt.length) {
prompt = await vscode.window.showInputBox({
title: `${global.gptModel_nai3ohye} Prompt`,
});
}
if (!prompt || prompt.length < 3) {
return null;
}
return prompt;
}
// Function to call OpenAI's chat completion endpoint
async function runCompletion(behavior, content) {
const { modelName, apiCosts } = GPT_MODELS[global.gptModel_nai3ohye];
const openaiResponse = await openai.chat.completions.create({
model: modelName,
messages: [
{ role: 'system', content: behavior },
{ role: 'user', content: content },
],
temperature: 0.3,
});
// Completion text
const result = openaiResponse.choices[0].message?.content ?? '';
// Calculate the api cost
const { prompt_tokens, completion_tokens } = openaiResponse.usage;
const totalAPICosts = ((prompt_tokens / COST_PER_TOKENS) * apiCosts.in
+ (completion_tokens / COST_PER_TOKENS) * apiCosts.out).toFixed(4);
const cost = `${totalAPICosts}$(${prompt_tokens}/${completion_tokens})`;
return new RunCompletionResult('ok', result, modelName, cost);
}
// Function to get a timestamp string
function getTimeStampText() {
return new Date().toLocaleString('ja-JP', {
year: 'numeric',
month: '2-digit',
day: '2-digit',
hour: '2-digit',
minute: '2-digit',
second: '2-digit',
});
}
//
async function selectModel() {
const modelName = await vscode.window.showQuickPick(MODEL_NAMES, {
title: 'Select GPT Engine'
});
if (modelName) {
global.gptModel_nai3ohye = modelName;
}
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment