dist_test.sh 527 B

1234567891011121314151617181920212223
  1. #!/usr/bin/env bash
  2. # Copyright (c) OpenMMLab. All rights reserved.
  3. CONFIG=$1
  4. CHECKPOINT=$2
  5. GPUS=$3
  6. NNODES=${NNODES:-1}
  7. NODE_RANK=${NODE_RANK:-0}
  8. PORT=${PORT:-29500}
  9. MASTER_ADDR=${MASTER_ADDR:-"127.0.0.1"}
  10. PYTHONPATH="$(dirname $0)/..":$PYTHONPATH \
  11. python -m torch.distributed.launch \
  12. --nnodes=$NNODES \
  13. --node_rank=$NODE_RANK \
  14. --master_addr=$MASTER_ADDR \
  15. --nproc_per_node=$GPUS \
  16. --master_port=$PORT \
  17. $(dirname "$0")/test.py \
  18. $CONFIG \
  19. $CHECKPOINT \
  20. --launcher pytorch \
  21. ${@:4}