Fix distributed train script

pull/1/head
Ross Wightman 5 years ago
parent 183d8e4aef
commit b0158a593e

@ -1,5 +1,5 @@
#!/bin/bash
NUM_PROC=$1
shift
python -m torch.distributed.launch --nproc_per_node=$NUM_PROC dtrain.py "$@"
python -m torch.distributed.launch --nproc_per_node=$NUM_PROC train.py "$@"

Loading…
Cancel
Save