I have a parquet file named ah.parquet
.
It contains Apple Health data and has the following columns:
- type: Nullable(String)
- value: Nullable(String)
- start: Nullable(DateTime64(6))
- end: Nullable(DateTime64(6))
- created: Nullable(DateTime64(6))
I have a parquet file named ah.parquet
.
It contains Apple Health data and has the following columns:
import json | |
import urllib.parse | |
from dataclasses import InitVar, dataclass | |
from datetime import datetime, timedelta | |
from typing import Protocol | |
import httpx | |
import llm | |
import streamlit as st |
llmc() { | |
local system_prompt='Output a command that I can run in a ZSH terminal on macOS to accomplish the following task. Try to make the command self-documenting, using the long version of flags where possible. Output the command first enclosed in a "```zsh" codeblock followed by a concise explanation of how it accomplishes it.' | |
local temp_file=$(mktemp) | |
local capturing=true | |
local command_buffer="" | |
local first_line=true | |
local cleaned_up=false # Flag to indicate whether cleanup has been run | |
cleanup() { | |
# Only run cleanup if it hasn't been done yet |
This uses llm.datasette.io and OpenAI. | |
I use `git commit --template` to provide the output from the LLM to Git. This way, if you do not like the results, you | |
can quit your editor and no commit will be made. | |
# Shell function for generating a diff and editing it in your default editor: | |
gcllm() { | |
GIT_DIR="$(git rev-parse --git-dir)" | |
TEMPLATE="$GIT_DIR/COMMIT_EDITMSG_TEMPLATE" |
function speak_ollama() { | |
if ! command -v ollama &> /dev/null; then | |
echo "Error: ollama is not installed." | |
return 1 | |
fi | |
if [ "$#" -ne 2 ]; then | |
echo "Usage: speak_ollama <file_path> <model>" | |
return 1 | |
fi |
#!/bin/zsh | |
# #popclip | |
# name: Azure TTS | |
# icon: symbol:message.and.waveform | |
# Please apply for your own key | |
AZURE_REGION= | |
AZURE_SUBSCRIPTION_KEY= | |
# Create a temporary audio file |
from langchain.chat_models import ChatOpenAI | |
from pydantic import BaseModel, Field | |
from langchain.document_loaders import UnstructuredURLLoader | |
from langchain.chains.openai_functions import create_extraction_chain_pydantic | |
class LLMItem(BaseModel): | |
title: str = Field(description="The simple and concise title of the product") | |
description: str = Field(description="The description of the product") | |
def main(): |
#!/usr/bin/env bash | |
# sudo ln -s "$(pwd)/csv2datasette" /usr/bin/csv2datasette | |
# csv2datasette is meant to explore CSV data. It is not meant to create a sustainable DB. | |
# csv2datasette is a bash script which open CSV files directly in Datasette. It offers | |
# a number of options for reading and exploring CSV files, such as --stats, inspired by WTFCsv. | |
# | |
# `--stats` option includes, for each column: the column name, the number of unique values, | |
# the number of filled rows, the number of missing values, the mininmum value, the maximum value, | |
# the average, the sum, the shortest string, the longest string, the number of numeric values, |