Motley is a marketplace to help developers access open-source LLMs
We will use the amazing FastChat python library to host LLMs.
pip install fschat
Motley is a marketplace to help developers access open-source LLMs
We will use the amazing FastChat python library to host LLMs.
pip install fschat
const { Configuration, OpenAIApi } = require("openai"); | |
/** | |
* Lists all models available on the platform. | |
* @async | |
* @returns {Promise<Object>} A promise that resolves to an object containing a list of models. | |
* @example | |
* listModels().then(models => console.log(models)); | |
*/ |
/** | |
* Lists all models available on the platform. | |
* @async | |
* @returns {Promise<Object>} A promise that resolves to an object containing a list of models. | |
* @example | |
* listModels().then(models => console.log(models)); | |
*/ | |
async function listModels() { | |
const response = await fetch("https://api.airtable.com/v0/appwJMZ6IAUnKpSwV/all", { | |
headers: { |
import { createClient } from 'embedbase-js' | |
// initialize client | |
const embedbase = createClient( | |
'https://api.embedbase.xyz', | |
process.env.EMBEDBASE_API_KEY | |
) | |
export const config = { | |
runtime: "edge", | |
}; |
public function generateText(string $input, array $options = []): string | |
{ | |
$url = 'https://app.embedbase.xyz/api/chat'; | |
$data = [ | |
'prompt' => $input, | |
'model' => $this->modelName, | |
'stream' => false, | |
]; |
import torch | |
import torch.nn as nn | |
from transformers import AutoTokenizer, AutoModelForCausalLM | |
from models import imagebind_model | |
from models.imagebind_model import ModalityType | |
import data | |
class ImageBindGPTJ(nn.Module): | |
def __init__(self, imagebind): |
embedbase.dataset(datasetId).batchAdd([{ | |
data: 'some text', | |
metadata: { | |
path: 'google.com' | |
} | |
}]) |
#!/bin/bash | |
# This is a script that run a hello world pod on a specific node of a Kuberenetes cluster | |
# to test if the connection to the cluster is working. | |
set -e | |
trap clean_up INT | |
function clean_up() { |
import os | |
from embedbase import get_app | |
from fastapi.responses import JSONResponse | |
from embedbase.database.memory_db import MemoryDatabase | |
from embedbase.embedding.openai import OpenAI | |
app = ( | |
get_app(settings) | |
.use_embedder(OpenAI(os.environ["OPENAI_API_KEY"])) |
{ | |
"dependencies": { | |
"@llama-node/llama-cpp": "^0.0.32", | |
"llama-node": "^0.0.32" | |
}, | |
"devDependencies": { | |
"@types/node": "^18.16.3" | |
} | |
} |