Skip to content

Instantly share code, notes, and snippets.

import os
import re
import glob
import streamlit as st
from llama_index.core import VectorStoreIndex, SimpleDirectoryReader, ServiceContext, PromptTemplate
from llama_index.core.node_parser import SimpleNodeParser
from llama_index.embeddings.fastembed import FastEmbedEmbedding
from llama_index.vector_stores.qdrant import QdrantVectorStore
from llama_index.core.storage import StorageContext
from llama_index.llms.ollama import Ollama
#!/bin/bash
# Function to classify files in a given directory by type
classify_files() {
local directory="$1"
local target_type="$2"
local keyword="${3:-}" # Keyword is optional
# Validate if the directory exists
if [[ ! -d "$directory" ]]; then
import os
import sqlite3
import argparse
import logging
from datetime import datetime
import re
import hashlib
import json
import stat
from collections import Counter
import os
import gzip
import json
import argparse
import datetime
import subprocess
from multiprocessing import Pool, cpu_count
from tqdm import tqdm
import re
from dateutil import parser as date_parser
import os
import subprocess
import json
import argparse
import tiktoken
from multiprocessing import Pool, cpu_count
from langchain.text_splitter import RecursiveCharacterTextSplitter
from langchain_community.document_loaders import TextLoader
from tqdm import tqdm
import datetime
@vcappuccio
vcappuccio / ask_claude.py
Created June 21, 2024 20:21 — forked from p-i-/ask_claude.py
Minimal wrapper around Anthropic's Claude HTTP API
from pathlib import Path
from os import environ
from time import sleep
from textwrap import dedent
import nestedtext as nt
import httpx
import arrow
API_KEY = environ.get('ANTHROPIC_API_KEY', None)
!curl https://ollama.ai/install.sh | sh
import os
import asyncio
os.environ.update({'LD_LIBRARY_PATH': '/usr/lib64-nvidia'})
async def run_process(cmd):
print('>>> starting', *cmd)
p = await asyncio.subprocess.create_subprocess_exec(
import urllib.request
import os
import json
from concurrent.futures import ThreadPoolExecutor, as_completed
from tqdm import tqdm
checkpoint_file = 'download_checkpoint.json'
def load_checkpoint():
if os.path.exists(checkpoint_file):
function prompt {
# Conda-specific environment variable for the active environment name
$condaEnvName = $env:CONDA_PROMPT_MODIFIER
# Build and return the custom prompt string, incorporating Conda environment name.
return "$condaEnvName" + (Get-Location) + "`n>"
}
Set-Alias -Name python -Value ipython
$imageBytes = [System.IO.File]::ReadAllBytes("C:\Users\redzh\Documents\myStuff\NoName02\ant.jpg")
$imageBase64 = [System.Convert]::ToBase64String($imageBytes)
$body = @{
"model" = "llava:latest"
"prompt" = "Extract all the text in this picture"
"stream" = $false
"images" = @($imageBase64)
} | ConvertTo-Json -Compress