You signed in with another tab or window. Reload to refresh your session.You signed out in another tab or window. Reload to refresh your session.You switched accounts on another tab or window. Reload to refresh your session.Dismiss alert
python deploy/TENSORRT/onnx-tensorrt.py --image_path path to Inference Image Folder(only support folder)
--result_path path to Inference Result
--onnx path to Yolov6 onnx file
--engine path to generate Yolov6 TensorRT engine
FP16 Inference
python deploy/TENSORRT/onnx-tensorrt.py --image_path path to Inference Image Folder(only support folder)
--result_path path to Inference Result
--onnx path to Yolov6 onnx file
--engine path to generate Yolov6 TensorRT engine
--half
INT8 Inference
python deploy/TENSORRT/onnx-tensorrt.py --image_path path to Inference Image Folder(only support folder)
--result_path path to Inference Result
--onnx path to Yolov6 onnx file
--engine path to generate Yolov6 TensorRT engine
--int8
Inference Demo
TenosrRT COCO2017 Val Benchmark
FP32
python deploy/DEEPSTREAM/tensorrt_dynamic/eval_yolov6.py
--image_path path to COCO Eval Image Folder(only support folder)
--annotations path to COCO Annotations --onnx path to Yolov6 onnx file
--engine path to generate Yolov6 TensorRT engine
FP16
python deploy/DEEPSTREAM/tensorrt_dynamic/eval_yolov6.py
--image_path path to COCO Eval Image Folder(only support folder)
--annotations path to COCO Annotations --onnx path to Yolov6 onnx file
--engine path to generate Yolov6 TensorRT engine
--half
INT8
python deploy/DEEPSTREAM/tensorrt_dynamic/eval_yolov6.py
--image_path path to COCO Eval Image Folder(only support folder)
--annotations path to COCO Annotations --onnx path to Yolov6 onnx file
--engine path to generate Yolov6 TensorRT engine
--int8
--calib_data_path path to calibration Image Folder
--calib_file_path path to calibration table
cd deepstream
export CUDA_VER=11.4 # for dGPU CUDA_VER=11.4 for Jetson
make -j32
cd ..
# video inference , output file: output_yolov6.mp4
deepstream-app -c deepstream_app_config_yoloV6.txt