Skip to content

Instantly share code, notes, and snippets.

@aynik
Last active November 14, 2023 04:23
Show Gist options
  • Save aynik/9160a02686e34b114ecfa7bdcbf2f559 to your computer and use it in GitHub Desktop.
Save aynik/9160a02686e34b114ecfa7bdcbf2f559 to your computer and use it in GitHub Desktop.
ChatGPT Web to CLI Bridge & Http API
(() => {
let websocket;
// Selectors
const textareaSelector = "textarea";
const submitButtonSelector = "button";
const containerSelector = ".group.bg-gray-50";
const contentSelector = ".whitespace-pre-wrap";
// Utilities
const getTextarea = () => document.querySelector(textareaSelector);
const getSubmitButton = () =>
getTextarea().parentElement.querySelector(submitButtonSelector);
const getContent = () => Array.from(
document.querySelectorAll(containerSelector)
).pop()?.querySelector(contentSelector);
const hasTransitionColorsClass = (submitButton) =>
submitButton.classList.contains("transition-colors");
const createObserver = (target, config, callback) => {
const observer = new MutationObserver((mutationsList) => {
callback(mutationsList, observer);
});
observer.observe(target, config);
return observer;
};
// WebSocket Handlers
const initializeWebSocket = () => {
try {
websocket = new WebSocket("ws://localhost:1234");
websocket.addEventListener("open", onOpen);
websocket.addEventListener("message", onMessage);
} catch (err) {
console.error("Failed to initialize WebSocket:", err);
}
};
const onOpen = (event) => {
console.log("Connected:", event);
send(
websocket,
localStorage.getItem("isLoading") === "1" ? "loaded" : "connected"
);
localStorage.setItem("isLoading", "0");
};
const onMessage = (event) => {
const { type, data } = JSON.parse(event.data);
if (type === "load") loadModel(data);
if (type === "text") setText(data);
};
// Primary Logic
const setText = (text) => {
const textarea = getTextarea();
const submitButton = getSubmitButton();
textarea.value = text;
textarea.dispatchEvent(new Event("input", { bubbles: true }));
createObserver(
submitButton,
{ attributes: true },
(mutationsList, observer) => {
for (let mutation of mutationsList) {
if (
mutation.type === "attributes" &&
mutation.attributeName === "disabled" &&
!submitButton.disabled
) {
submitButton.click();
observer.disconnect();
monitorChanges();
break;
}
}
}
);
};
const monitorChanges = () => {
const submitButton = getSubmitButton();
if (!submitButton) return;
let classState = false;
createObserver(
submitButton,
{ attributes: true, attributeFilter: ["class"] },
(_, observer) => {
if (!hasTransitionColorsClass(submitButton)) {
classState = true;
} else if (classState) {
const content = getContent();
if (content) send(websocket, "end", content.innerText);
observer.disconnect();
}
}
);
};
const loadModel = (model) => {
localStorage.setItem("isLoading", "1");
const value = (Math.random() + 1).toString(36).substring(7);
window.location = `/chat?model=${model}&v=${value}`;
};
const send = (websocket, messageType, messageData) => {
if (websocket.readyState === WebSocket.OPEN) {
websocket.send(JSON.stringify({ type: messageType, data: messageData }));
}
};
// Maintenance Logic
const keepAlive = () => {
setInterval(() => {
if (!websocket || websocket.readyState === WebSocket.CLOSED)
initializeWebSocket();
}, 500);
};
keepAlive();
})();
#!/usr/bin/env node
const fs = require("fs"),
path = require("path"),
WebSocket = require("ws");
const GPT_40_MODEL = "gpt-4";
const GPT_35_MODEL = "text-davinci-002-render-sha";
const scriptName = path.basename(process.argv[1]),
isLines = scriptName.includes("-lines"),
isContinue = scriptName.includes("-continue"),
modelName = scriptName.endsWith("-4") ? GPT_40_MODEL : GPT_35_MODEL;
const joinedArguments = process.argv.slice(2).join(" "),
inputFromStdin = process.stdin.isTTY
? null
: fs.readFileSync("/dev/stdin").toString();
const server = new WebSocket.Server({ port: 1234 });
server.on("connection", (socket) => {
socket.on("message", (message) => {
const parsedMessage = JSON.parse(message);
switch (parsedMessage.type) {
case "connected":
if (!isContinue) {
socket.send(JSON.stringify({ type: "load", data: modelName }));
break;
}
case "loaded":
socket.send(
JSON.stringify({
type: "text",
data:
inputFromStdin && joinedArguments.length > 0
? joinedArguments + ":\n```\n" + inputFromStdin + "\n```"
: inputFromStdin
? inputFromStdin
: joinedArguments,
})
);
break;
default:
if (isLines) {
console.log(parsedMessage.data);
} else {
process.stdout.write(parsedMessage);
}
if ("end" === parsedMessage.type) {
process.stdout.write("\n");
process.exit(0);
}
}
});
});
const http = require("http");
module.exports = ({ ws, queue }) => {
const WS_PORT = 1234;
const GPT_40_MODEL = "gpt-4";
const GPT_35_MODEL = "text-davinci-002-render-sha";
const q = queue(async (task) => {
return new Promise((resolve, reject) => {
task().then(resolve).catch(reject);
});
}, 1);
const addToQueue = (queue, asyncFn) =>
new Promise((resolve, reject) => {
queue.push(() => asyncFn().then(resolve).catch(reject));
});
const pipeToResponse = (req, res, options = {}) =>
new Promise(async (resolve, reject) => {
try {
res.setHeader("Content-Type", "text/plain; charset=UTF-8");
res.setHeader("Transfer-Encoding", "chunked");
const reqBodyBuffers = [];
for await (const chunk of req) {
reqBodyBuffers.push(chunk);
}
const reqBody = Buffer.concat(reqBodyBuffers).toString();
const server = new ws.Server({ port: WS_PORT });
server.on("connection", (socket) => {
let storedData = "",
previousData = "";
socket.on("message", (message) => {
const parsedMessage = JSON.parse(message);
switch (parsedMessage.type) {
case "connected":
if (!options.isContinue) {
socket.send(
JSON.stringify({
type: "load",
data: options.modelName || GPT_35_MODEL,
})
);
break;
}
case "loaded":
socket.send(
JSON.stringify({
type: "text",
data: reqBody,
})
);
break;
case "text":
storedData = parsedMessage.data.replace(/\u200B/g, "");
res.write(storedData.slice(previousData.length));
previousData = storedData;
break;
default:
res.end(parsedMessage.data.replace(/\u200B/g, "")
.slice(previousData.length));
socket.terminate();
server.close();
resolve();
}
});
});
} catch (error) {
reject(error);
}
});
const handleChatRequest = async (req, res) => {
if (req.url === "/chat") {
await addToQueue(q, () => pipeToResponse(req, res));
} else if (req.url === "/chat-continue") {
await addToQueue(q, () => pipeToResponse(req, res, { isContinue: true }));
} else if (req.url === "/chat-4") {
await addToQueue(q, () =>
pipeToResponse(req, res, {
modelName: GPT_40_MODEL,
})
);
} else if (req.url === "/chat-continue-4") {
await addToQueue(q, () =>
pipeToResponse(req, res, {
modelName: GPT_40_MODEL,
isContinue: true,
})
);
} else {
res.writeHead(404);
res.end();
}
};
return {
handleChatRequest
}
};
#!/usr/bin/env node
const http = require("http");
const { spawn } = require("child_process");
const { queue } = require("async");
const CHAT_PROGRAM = "chat-client.js";
const CHAT_CONTINUE_PROGRAM = "chat-continue-client.js"; // SYMBOLIC LINK
const q = queue(async (task) => {
return new Promise((resolve, reject) => {
task().then(resolve).catch(reject);
});
}, 1);
function addToQueue(asyncFn) {
return new Promise((resolve, reject) => {
q.push(() => asyncFn().then(resolve).catch(reject));
});
}
const pipeToResponse = (req, res, childProcess) =>
new Promise((resolve, reject) => {
req.pipe(childProcess.stdin);
childProcess.stdout.pipe(res);
childProcess.on("exit", resolve);
childProcess.on("error", reject);
});
const server = http.createServer(async (req, res) => {
if (req.method === "POST") {
if (req.url === "/chat") {
await addToQueue(() => pipeToResponse(req, res, spawn(CHAT_PROGRAM)));
} else if (req.url === "/chat-continue") {
await addToQueue(() =>
pipeToResponse(req, res, spawn(CHAT_CONTINUE_PROGRAM))
);
} else {
res.writeHead(404);
res.end();
}
} else {
res.writeHead(405);
res.end();
}
});
const PORT = process.env.PORT || 5678;
server.listen(PORT, () => {
console.log(`Server listening on port ${PORT}`);
});
@aynik
Copy link
Author

aynik commented Apr 8, 2023

Instructions:

  1. Just clone and build my fork of ChatGPT-electron for your platform of choice.
  2. Put chat-client.js as chat in your path and use it like: chat < prompt.txt or chat <prompt>.
  3. Make a symbolic link to chat-client.js from chat-continue-client.js to continue conversations.
  4. You can also copy chat-serve.js and start it to run a service on top of this that can be called from other services or applications remotely (you can use Tailscale Funnel to accomplish this).

Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment