dist_test.sh 479 B

12345678910111213141516171819202122
  1. #!/usr/bin/env bash
  2. CONFIG=$1
  3. CHECKPOINT=$2
  4. GPUS=$3
  5. NNODES=${NNODES:-1}
  6. NODE_RANK=${NODE_RANK:-0}
  7. PORT=${PORT:-29500}
  8. MASTER_ADDR=${MASTER_ADDR:-"127.0.0.1"}
  9. PYTHONPATH="$(dirname $0)/..":$PYTHONPATH \
  10. python -m torch.distributed.launch \
  11. --nnodes=$NNODES \
  12. --node_rank=$NODE_RANK \
  13. --master_addr=$MASTER_ADDR \
  14. --nproc_per_node=$GPUS \
  15. --master_port=$PORT \
  16. $(dirname "$0")/test.py \
  17. $CONFIG \
  18. $CHECKPOINT \
  19. --launcher pytorch \
  20. ${@:4}