-
Notifications
You must be signed in to change notification settings - Fork 30
/
start_client.sh
executable file
·114 lines (98 loc) · 4.02 KB
/
start_client.sh
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
#!/bin/bash
function check_ngc_cli_installation {
if ! command -v ngc > /dev/null; then
echo "[ERROR] The NGC CLI tool not found on device in /usr/bin/ or PATH env var"
echo "[ERROR] Please follow: https://ngc.nvidia.com/setup/installers/cli"
exit
fi
}
get_ngc_key_from_environment() {
# first check the global NGC_API_KEY environment variable.
local ngc_key=$NGC_API_KEY
# if env variable was not set, and a ~/.ngc/config exists
# try to get it from there.
if [ -z "$ngc_key" ] && [[ -f "$HOME/.ngc/config" ]]
then
ngc_key=$(cat $HOME/.ngc/config | grep apikey -m1 | awk '{print $3}')
fi
echo $ngc_key
}
check_ngc_cli_installation
NGC_API_KEY="$(get_ngc_key_from_environment)"
if [ -z "$NGC_API_KEY" ]; then
echo -e 'Did not find environment variable "$NGC_API_KEY"'
read -sp 'Please enter API key for ngc.nvidia.com: ' NGC_API_KEY
echo
fi
set -e
# Docker login to Nvidia GPU Cloud (NGC).
docker login nvcr.io -u \$oauthtoken -p ${NGC_API_KEY}
# Load config file
script_path="$( cd "$(dirname "$0")" >/dev/null 2>&1 ; pwd -P )"
if [ -z "$1" ]; then
config_path="${script_path}/../config.sh"
else
config_path=$(readlink -f $1)
fi
# Configure the required environment variables.
if [[ ! -f $config_path ]]; then
echo 'Unable to load configuration file. Override path to file with -c argument.'
exit 1
fi
source $config_path
# Clone DeepStream repo
# git clone https://github.com/NVIDIA-AI-IOT/deepstream_reference_apps.git ${tao_triton_root}/deepstream_reference_apps
git clone https://github.com/xunleiw/deepstream_reference_apps.git ${tao_triton_root}/deepstream_reference_apps
export BODYPOSE3D_HOME=${tao_triton_root}/deepstream_reference_apps/deepstream-bodypose-3d
# Download models using NGC
mkdir -p $BODYPOSE3D_HOME/models
cd $BODYPOSE3D_HOME/models
check_ngc_cli_installation
ngc registry model download-version ${pc_peoplenet_version}
ngc registry model download-version ${pc_bodypose3dnet_version}
apt-get install -y tree
tree $BODYPOSE3D_HOME -d
# Install Eigen
cd $BODYPOSE3D_HOME
wget https://gitlab.com/libeigen/eigen/-/archive/3.4.0/eigen-3.4.0.tar.gz
tar xvzf eigen-3.4.0.tar.gz
ln eigen-3.4.0 eigen -s
rm eigen-3.4.0.tar.gz
# Update event message payload of DeepStream
cp $BODYPOSE3D_HOME/sources/deepstream-sdk/eventmsg_payload.cpp /opt/nvidia/deepstream/deepstream/sources/libs/nvmsgconv/deepstream_schema
apt-get install -y libjson-glib-dev uuid-dev
cd /opt/nvidia/deepstream/deepstream/sources/libs/nvmsgconv
make; make install
# Make 3D body pose sources
export CUDA_VER=${cuda_ver}
cd $BODYPOSE3D_HOME/sources/nvdsinfer_custom_impl_BodyPose3DNet
make
cd $BODYPOSE3D_HOME/sources
make
# Run 3D body pose
./deepstream-pose-estimation-app --input file://${tao_triton_root}/scripts/pose_cls_e2e_inference/demo.mp4 \
--output ${tao_triton_root}/scripts/pose_cls_e2e_inference/demo_3dbp.mp4 \
--focal 1200.0 \
--width 1920 \
--height 1080 \
--fps \
--save-pose ${tao_triton_root}/scripts/pose_cls_e2e_inference/demo_3dbp.json
# Run the Triton client
cd ${tao_triton_root}
python3 -m tao_triton.python.entrypoints.tao_client ${tao_triton_root}/scripts/pose_cls_e2e_inference/demo_3dbp.json \
--dataset_convert_config ${tao_triton_root}/tao_triton/python/dataset_convert_specs/dataset_convert_config_pose_classification.yaml \
-m pose_classification_tao \
-x 1 \
-b 1 \
--mode Pose_classification \
-i https \
-u localhost:8000 \
--async \
--output_path ${tao_triton_root}/scripts/pose_cls_e2e_inference
# Plot inference results
python3 ./scripts/pose_cls_e2e_inference/plot_e2e_inference.py \
./scripts/pose_cls_e2e_inference/results.json \
./scripts/pose_cls_e2e_inference/demo.mp4 \
./scripts/pose_cls_e2e_inference/results.mp4
# Clean repo
rm -r ${tao_triton_root}/deepstream_reference_apps