Using local LLMs anywhere (in text editor) - example below with Obsidian
inspired by and adopted from LLM-automator.
Code example with
mixtral
import torch | |
from langchain_community.embeddings import ( | |
OpenAIEmbeddings, | |
OllamaEmbeddings, | |
HuggingFaceEmbeddings, | |
HuggingFaceBgeEmbeddings | |
) | |
def embedding_func(selected_embedding: str = "HuggingFaceEmbeddings"): | |
""" |
import os | |
import json | |
import datetime | |
import streamlit as st | |
from llama_index.llms import Ollama | |
from llama_index.llms import ChatMessage | |
# https://docs.llamaindex.ai/en/stable/examples/llm/ollama.html | |
import sys | |
from io import StringIO | |
import streamlit as st # pip install streamlit | |
from code_editor import code_editor # pip install streamlit_code_editor | |
import ollama as ol # pip install ollama | |
st.set_page_config(layout='wide') | |
st.title('`Offline code completion`') |
""" To use: install Ollama (or LLM studio), clone OpenVoice, run this script in the OpenVoice directory | |
git clone https://github.com/myshell-ai/OpenVoice | |
cd OpenVoice | |
git clone https://huggingface.co/myshell-ai/OpenVoice | |
cp -r OpenVoice/* . | |
pip install whisper pynput pyaudio streamlit ollama | |
script source: https://x.com/Thom_Wolf/status/1758140066285658351?s=20 | |
""" |
Using local LLMs anywhere (in text editor) - example below with Obsidian
inspired by and adopted from LLM-automator.
Code example with
mixtral
import re | |
import subprocess | |
def _parse_names(data: str) -> list[str]: | |
""" | |
Parses names from a multi-line string where each line contains a name and other details. | |
Parameters: | |
data (str): A multi-line string containing names and other details. |
# basedk on: https://github.com/joaomdmoura/crewAI#getting-started | |
from crewai import Agent, Task, Crew | |
from langchain_community.llms import Ollama | |
from langchain_community.tools import DuckDuckGoSearchRun | |
# -- model | |
# ollama_llm = Ollama(model="arabic_deepseek-llm") | |
# ollama_llm = Ollama(model="arabic_notux") | |
ollama_llm = Ollama(model="arabic_mixtral") |