Skip to content

Instantly share code, notes, and snippets.

@HumairAK
Last active July 18, 2023 13:08
Show Gist options
  • Save HumairAK/f86e5eeb6245fa8263d65a03338c2565 to your computer and use it in GitHub Desktop.
Save HumairAK/f86e5eeb6245fa8263d65a03338c2565 to your computer and use it in GitHub Desktop.
After compilation
apiVersion: tekton.dev/v1beta1
kind: PipelineRun
metadata:
name: simple-pipeline
annotations:
tekton.dev/output_artifacts: '{"produce-output": [{"key": "artifacts/$PIPELINERUN/produce-output/mydestfile.tgz",
"name": "produce-output-mydestfile", "path": "/tmp/outputs/mydestfile/data"}]}'
tekton.dev/input_artifacts: '{}'
tekton.dev/artifact_bucket: mlpipeline
tekton.dev/artifact_endpoint: minio-service.kubeflow:9000
tekton.dev/artifact_endpoint_scheme: http://
tekton.dev/artifact_items: '{"produce-output": [["mydestfile", "$(workspaces.produce-output.path)/artifacts/$ORIG_PR_NAME/$(context.taskRun.name)/mydestfile"]]}'
sidecar.istio.io/inject: "false"
tekton.dev/template: ''
pipelines.kubeflow.org/big_data_passing_format: $(workspaces.$TASK_NAME.path)/artifacts/$ORIG_PR_NAME/$TASKRUN_NAME/$TASK_PARAM_NAME
pipelines.kubeflow.org/pipeline_spec: '{"name": "simple_pipeline"}'
labels:
pipelines.kubeflow.org/pipelinename: ''
pipelines.kubeflow.org/generation: ''
spec:
pipelineSpec:
tasks:
- name: produce-output
taskSpec:
steps:
- name: main
args:
- --mydestfile
- $(workspaces.produce-output.path)/artifacts/$ORIG_PR_NAME/$(context.taskRun.name)/mydestfile
command:
- sh
- -ec
- |
program_path=$(mktemp)
printf "%s" "$0" > "$program_path"
python3 -u "$program_path" "$@"
- |
def _make_parent_dirs_and_return_path(file_path: str):
import os
os.makedirs(os.path.dirname(file_path), exist_ok=True)
return file_path
def produce_output(
mydestfile, # Note: _file is stripped
):
import urllib.request
print("starting download...")
urllib.request.urlretrieve("http://212.183.159.230/20MB.zip", mydestfile)
print("done")
import argparse
_parser = argparse.ArgumentParser(prog='Produce output', description='')
_parser.add_argument("--mydestfile", dest="mydestfile", type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS)
_parsed_args = vars(_parser.parse_args())
_outputs = produce_output(**_parsed_args)
image: quay.io/operate-first/opf-toolbox:v0.12.0
env:
- name: ORIG_PR_NAME
valueFrom:
fieldRef:
fieldPath: metadata.labels['custom.tekton.dev/originalPipelineRun']
- image: busybox
name: output-taskrun-name
command:
- sh
- -ec
- echo -n "$(context.taskRun.name)" > "$(results.taskrun-name.path)"
- image: busybox
name: copy-results-artifacts
command:
- sh
- -ec
- |
set -exo pipefail
TOTAL_SIZE=0
copy_artifact() {
if [ -d "$1" ]; then
tar -czvf "$1".tar.gz "$1"
SUFFIX=".tar.gz"
fi
ARTIFACT_SIZE=`wc -c "$1"${SUFFIX} | awk '{print $1}'`
TOTAL_SIZE=$( expr $TOTAL_SIZE + $ARTIFACT_SIZE)
touch "$2"
if [[ $TOTAL_SIZE -lt 3072 ]]; then
if [ -d "$1" ]; then
tar -tzf "$1".tar.gz > "$2"
elif ! awk "/[^[:print:]]/{f=1} END{exit !f}" "$1"; then
cp "$1" "$2"
fi
fi
}
copy_artifact $(workspaces.produce-output.path)/artifacts/$ORIG_PR_NAME/$(context.taskRun.name)/mydestfile $(results.mydestfile.path)
onError: continue
env:
- name: ORIG_PR_NAME
valueFrom:
fieldRef:
fieldPath: metadata.labels['custom.tekton.dev/originalPipelineRun']
results:
- name: mydestfile
type: string
description: /tmp/outputs/mydestfile/data
- name: taskrun-name
type: string
metadata:
labels:
pipelines.kubeflow.org/cache_enabled: "true"
annotations:
artifact_outputs: '["mydestfile"]'
pipelines.kubeflow.org/component_spec_digest: '{"name": "Produce output",
"outputs": [{"name": "mydestfile"}], "version": "Produce output@sha256=ac34ebebe299a8eda0d9441960e6c889698e565204afdb9fad195417b7b9d2d3"}'
workspaces:
- name: produce-output
workspaces:
- name: produce-output
workspace: simple-pipeline
workspaces:
- name: simple-pipeline
workspaces:
- name: simple-pipeline
volumeClaimTemplate:
spec:
storageClassName: gp3
accessModes:
- ReadWriteOnce
resources:
requests:
storage: 2Gi
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment