Fix distributed train script

pull/1/head
Ross Wightman 6 years ago
parent 183d8e4aef
commit b0158a593e

@ -1,5 +1,5 @@
#!/bin/bash #!/bin/bash
NUM_PROC=$1 NUM_PROC=$1
shift shift
python -m torch.distributed.launch --nproc_per_node=$NUM_PROC dtrain.py "$@" python -m torch.distributed.launch --nproc_per_node=$NUM_PROC train.py "$@"

Loading…
Cancel
Save