LLM chat example using llama_3b
Install Anaconda and create a new env.
FROM python:3.10-slim | |
WORKDIR /apps | |
RUN apt-get update && apt-get install -y git | |
COPY ./requirements.txt requirements.txt | |
RUN pip install --no-cache-dir -r requirements.txt | |
COPY . . |
Elasticsearch version is 6.3.1
Install latest version of docker.
download docker-compose.yaml
and start elasticsearch.
# The .editorconfig is used to maintain consistent code style. | |
# The .editorconfig file is supported by most text editors. | |
# See https://editorconfig.org | |
root = true | |
[*] | |
insert_final_newline = true | |
trim_trailing_whitespace = true | |
indent_style = space |
FROM eclipse-temurin:17 as jre-build | |
RUN $JAVA_HOME/bin/jlink \ | |
--add-modules java.base,java.sql,java.naming,java.desktop,java.management,java.security.jgss,java.instrument \ | |
--strip-debug \ | |
--no-man-pages \ | |
--no-header-files \ | |
--compress=2 \ | |
--output /javaruntime |
run docker compose up -d
if you enabled docker-cli v2
or docker-compose up -d
if you prefer the docker-compose cli.
create table names (
id serial primary key,
name text
);
Install wrk and run perf test.
I hereby claim:
To claim this, I am signing this object:
class ApiException extends Exception {} | |
@FunctionalInterface | |
interface ServiceBlock<T> { | |
T call() throws ApiException; | |
} | |
@FunctionalInterface |