def lambda_handler(event, context):
if event["name"] == "Marco":
return {"name":"Polo"}
return {"name":"No"}
aws lambda invoke \
--cli-binary-format raw-in-base64-out \
def hello(): | |
print("Hello Cloud") |
import time | |
from locust import HttpUser, task, between | |
class QuickstartUser(HttpUser): | |
wait_time = between(1, 5) | |
@task | |
def hello_world(self): | |
self.client.get("/") | |
self.client.get("/fruit") |
# Amazon Forecast Example | |
[source, python] | |
---- | |
import boto3 | |
forecast = boto3.client('forecast') | |
response = forecast.create_dataset_group( | |
DatasetGroupName='forecast_dataset_group', | |
Domain='CUSTOM', |
2022-12-10 20:25:31.078Z: docker: Error response from daemon: failed to create shim: OCI runtime create failed: runc create failed: unable to start container process: error during container init: error running hook #1: error running hook: exit status 1, stdout: , stderr: Auto-detected mode as 'legacy' | |
2022-12-10 20:25:31.078Z: nvidia-container-cli: mount error: file creation failed: /var/lib/docker/overlay2/a77bbe6b76ade0966f66dc9df640032701292b1b70546365b6457b9054c67457/merged/run/nvidia-persistenced/socket: no such device or address: unknown. | |
2022-12-10 20:25:31.078Z: Stop (743 ms): Run: docker run --sig-proxy=false -a STDOUT -a STDERR --mount type=bind,src=/var/lib/docker/codespacemount/workspace,dst=/workspaces --mount type=volume,src=minikube-config,dst=/home/vscode/.minikube --mount source=codespaces-linux-var-lib-docker,target=/var/lib/docker,type=volume --mount source=/root/.codespaces/shared,target=/workspaces/.codespaces/shared,type=bind --mount source=/var/lib/docker/codespacemount/.persistedshare, |
#!/bin/bash | |
set -e | |
# This script installs a pip package in compute instance azureml_py38 environment. | |
sudo -u azureuser -i <<'EOF' | |
PACKAGE=numpy | |
ENVIRONMENT=azureml_py38 |
FROM mcr.microsoft.com/azureml/openmpi4.1.0-cuda11.1-cudnn8-ubuntu18.04:20221010.v1 | |
ENV AZUREML_CONDA_ENVIRONMENT_PATH /azureml-envs/pytorch-1.10 | |
# Create conda environment | |
RUN conda create -p $AZUREML_CONDA_ENVIRONMENT_PATH \ | |
python=3.8 \ | |
pip=22.1.2 \ | |
pytorch=1.10.0 \ | |
torchvision=0.11.1 \ | |
torchaudio=0.10.0 \ |
def lambda_handler(event, context):
if event["name"] == "Marco":
return {"name":"Polo"}
return {"name":"No"}
aws lambda invoke \
--cli-binary-format raw-in-base64-out \
2022-10-13 18:26:47.494Z: Configuration starting... | |
2022-10-13 18:26:48.368Z: @microsoft/vscode-dev-containers-cli 0.71.0. | |
2022-10-13 18:26:48.396Z: Start: Resolving Remote | |
2022-10-13 18:26:48.396Z: $ docker ps -q -a --filter label=Type=codespaces | |
2022-10-13 18:26:48.424Z: Stop (37 ms): Run: docker ps -q -a --filter label=Type=codespaces | |
2022-10-13 18:26:48.424Z: $ /usr/bin/node /usr/lib/node_modules/@microsoft/vscode-dev-containers-cli/dist/spec-node/devContainersSpecCLI.js up --user-data-folder /var/lib/docker/codespacemount/.persistedshare --container-data-folder .vscode-remote/data/Machine --container-system-data-folder /var/vscode-remote --workspace-folder /var/lib/docker/codespacemount/workspace/mlops-template --id-label Type=codespaces --log-level info --log-format json --config /var/lib/docker/codespacemount/workspace/mlops-template/.devcontainer/devcontainer.json --override-config /root/.codespaces/shared/merged_devcontainer.json --default-user-env-probe loginInteractiveShell --remove-existing-contai |
USER root | |
RUN apt update \ | |
&& apt install python3-pip -y && apt install python3-venv -y | |
USER coder | |
ENV VIRTUAL_ENV=/home/coder/venv | |
RUN python3 -m venv $VIRTUAL_ENV | |
ENV PATH="$VIRTUAL_ENV/bin:$PATH" | |
RUN pip install --no-cache-dir --upgrade pip \ | |
&& pip install --no-cache-dir black==22.3.0 click==8.1.3 pytest==7.1.3 \ |
lynx -dump https://en.wikipedia.org/wiki/Albert_Einstein | wc \ | |
--bytes | |
The result shows: | |
432232 |