Skip to content

Instantly share code, notes, and snippets.

@vuiseng9
Created January 19, 2022 21:32
Show Gist options
  • Save vuiseng9/d256ce9d8810ea3cf6d1b44b15013507 to your computer and use it in GitHub Desktop.
Save vuiseng9/d256ce9d8810ea3cf6d1b44b15013507 to your computer and use it in GitHub Desktop.
docker pull openvino/ubuntu18_dev:2021.4.2 
all_onnx=$(ls /data1/vchua/tld-poc/repo/*/*.onnx)

for onnx in $all_onnx;
do
    base=$(basename $onnx .onnx)
    dir=$(dirname $onnx)
    irdir=${dir}/ir

    echo "[Info] Processing ----"
    echo $dir
    echo $base
    printf "\n\n"

    mkdir -p $irdir

    mo_onnx.py --input_model $onnx --model_name $base --output_dir $irdir 2>&1 | tee $irdir/mo.log
    
done
@vuiseng9
Copy link
Author

vuiseng9 commented Feb 18, 2022

all_onnx=$(ls *.onnx)

for onnx in $all_onnx;
do
    base=$(basename $onnx .onnx)
    # dir=$(dirname $onnx)
    # irdir=${dir}/ir
    irdir=ir

    echo "[Info] Processing ----"
    echo $dir
    echo $base
    printf "\n\n"

    mkdir -p $irdir

    mo_onnx.py --input_model $onnx --model_name $base --output_dir $irdir 2>&1 | tee -a $irdir/mo.log
    
done

@vuiseng9
Copy link
Author

vuiseng9 commented Feb 18, 2022

all_ir=$(ls *.xml)
benchdir=../benchlog/
mkdir -p $benchdir

for ir in $all_ir;
do
    base=$(basename $ir .xml)
    
    DNNL_VERBOSE=1 python3 $INTEL_OPENVINO_DIR/deployment_tools/tools/benchmark_tool/benchmark_app.py -b 1 -api sync -nireq 1 -niter 250 -m $ir | tee $benchdir/log.dnnl.$base
done

@vuiseng9
Copy link
Author

vuiseng9 commented Feb 18, 2022

docker run -v ${HOME}:/hosthome openvino/ubuntu18_dev:2021.4.2 python3 /opt/intel/openvino_2021.4.752/deployment_tools/model_optimizer/mo_onnx.py --input_model /hosthome/temp/sweep/BertIntermediate-bs001-sl384-l1023-i4096-fp32.onnx --model_name test_mo --output_dir  /hosthome/temp/open

docker run --privileged -v ${HOME}:/hosthome vuiseng9/ov-ubt18_data_dev:2021.4.2-bert-pydemo python3 /opt/intel/openvino_2021.4.752/deployment_tools/model_optimizer/mo_onnx.py --input_model /hosthome/temp/sweep/BertIntermediate-bs001-sl384-l1023-i4096-fp32.onnx --model_name test_mo --output_dir  /hosthome/temp/

@vuiseng9
Copy link
Author

mo --input_model squad-BertForQuestionAnswering.dense.fp32.onnx --model_name bert-large-fp32 --output_dir bert-large-fp32 --input input.0,input.1,input.2 --input_shape [1,-1],[1,-1],[1,-1]

Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment