Skip to content
This repository has been archived by the owner on Nov 17, 2023. It is now read-only.

Commit

Permalink
Remove mx.test_utils.list_gpus
Browse files Browse the repository at this point in the history
  • Loading branch information
larroy committed May 17, 2019
1 parent 9e9c5cd commit a37eb61
Show file tree
Hide file tree
Showing 6 changed files with 10 additions and 21 deletions.
2 changes: 1 addition & 1 deletion example/distributed_training-horovod/gluon_mnist.py
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,7 @@

if not args.no_cuda:
# Disable CUDA if there are no GPUs.
if not mx.test_utils.list_gpus():
if mx.context.num_gpus() == 0:
args.no_cuda = True

logging.basicConfig(level=logging.INFO)
Expand Down
9 changes: 4 additions & 5 deletions example/image-classification/test_score.py
Original file line number Diff line number Diff line change
Expand Up @@ -51,11 +51,10 @@ def test_imagenet1k_inception_bn(**kwargs):
assert r > g and r < g + .1

if __name__ == '__main__':
gpus = mx.test_utils.list_gpus()
assert len(gpus) > 0
batch_size = 16 * len(gpus)
gpus = ','.join([str(i) for i in gpus])

num_gpus = mx.context.num_gpus()
assert num_gpus > 0
batch_size = 16 * num_gpus
gpus = ','.join(map(str, range(num_gpus)))
kwargs = {'gpus':gpus, 'batch_size':batch_size, 'max_num_examples':500}
download_data()
test_imagenet1k_resnet(**kwargs)
Expand Down
2 changes: 1 addition & 1 deletion python/mxnet/gluon/contrib/nn/basic_layers.py
Original file line number Diff line number Diff line change
Expand Up @@ -233,7 +233,7 @@ def _get_num_devices(self):
warnings.warn("Caution using SyncBatchNorm: "
"if not using all the GPUs, please mannually set num_devices",
UserWarning)
num_devices = len(test_utils.list_gpus())
num_devices = mx.context.num_gpus()
num_devices = num_devices if num_devices > 0 else 1
return num_devices

Expand Down
10 changes: 0 additions & 10 deletions python/mxnet/test_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -1431,16 +1431,6 @@ def check_consistency(sym, ctx_list, scale=1.0, grad_req='write',

return gt

def list_gpus():
"""Return a list of GPUs
Returns
-------
list of int:
If there are n GPUs, then return a list [0,1,...,n-1]. Otherwise returns
[].
"""
return range(mx.util.get_gpu_count())

def download(url, fname=None, dirname=None, overwrite=False, retries=5):
"""Download an given URL
Expand Down
6 changes: 3 additions & 3 deletions tools/caffe_converter/test_converter.py
Original file line number Diff line number Diff line change
Expand Up @@ -90,9 +90,9 @@ def main():
gpus = [-1]
default_batch_size = 32
else:
gpus = mx.test_utils.list_gpus()
assert gpus, 'At least one GPU is needed to run test_converter in GPU mode'
default_batch_size = 32 * len(gpus)
num_gpus = mx.context.nu_gpus()
assert num_gpus, 'At least one GPU is needed to run test_converter in GPU mode'
default_batch_size = 32 * num_gpus

models = ['bvlc_googlenet', 'vgg-16', 'resnet-50']

Expand Down

0 comments on commit a37eb61

Please sign in to comment.