Skip to content

Instantly share code, notes, and snippets.

@salloc
salloc / Dockerfile
Created February 28, 2025 01:54 — forked from rizbud/Dockerfile
Dockerfile for creating a new laravel project
FROM php:8.4
# Install Composer
COPY --from=composer:latest /usr/bin/composer /usr/bin/composer
# Install Git
RUN apt-get update && apt-get install -y git unzip && apt-get clean
# Update PATH
ENV PATH="${PATH}:/root/.composer/vendor/bin"
@salloc
salloc / llamastack-build.yaml
Created February 28, 2025 01:53 — forked from jameswnl/llamastack-build.yaml
As of llama-stack 0.1.2. Using template ollama
version: '2'
distribution_spec:
description: Use (an external) Ollama server for running LLM inference
container_image: null
providers:
inference:
- remote::ollama
vector_io:
- inline::faiss
- remote::chromadb
#!/bin/sh
/app/tailscaled --tun=userspace-networking --socks5-server=localhost:1055 &
/app/tailscale up --auth-key=${TAILSCALE_AUTHKEY} --hostname=cloudrun-app
ALL_PROXY=socks5://localhost:1055/ /app/my-app
graph TD
A[LLM-Powered Applications] --> B[Generative AI Apps]
A --> C[Chatbots/Virtual Assistants]
A --> D[LLM Agents]
A --> E[Specific Tools]
A --> F[Technologies]
B --> B1[Text Generation]
B1 --> B1a((ChatGPT))
B1 --> B1b((Jasper))
минуле це лише те що ти памʼятаєш
@salloc
salloc / vot-node-translate-flow
Last active February 10, 2025 12:30
vot node translate
import { VOTWorkerClient } from "@vot.js/node";
import { videoData as vd } from "@vot.js/node";
const VOT_STATUS_FAILED = 0;
const VOT_STATUS_FINISHED = 1;
const VOT_STATUS_WAITING = 2;
const VOT_STATUS_LONG_WAITING = 3;
const VOT_STATUS_PART_CONTENT = 5;
const VOT_STATUS_AUDIO_REQUESTED = 6;
import matplotlib.pyplot as plt
import numpy as np
import time
def merge_sort_visualize(arr, left=0, right=None, depth=0):
if right is None:
right = len(arr) - 1
if left < right:
mid = (left + right) // 2