Skip to content

Commit 476c824

Browse files
authored
Find out a bug. When set batch_size = -1 to use the autobatch.
reproduce:
1 parent 4fb6dd4 commit 476c824

1 file changed

Lines changed: 1 addition & 1 deletion

File tree

utils/torch_utils.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -68,7 +68,7 @@ def select_device(device='', batch_size=None, newline=True):
6868
if cuda:
6969
devices = device.split(',') if device else '0' # range(torch.cuda.device_count()) # i.e. 0,1,6,7
7070
n = len(devices) # device count
71-
if n > 1 and batch_size: # check batch_size is divisible by device_count
71+
if n > 1 and batch_size > 0: # check batch_size is divisible by device_count
7272
assert batch_size % n == 0, f'batch-size {batch_size} not multiple of GPU count {n}'
7373
space = ' ' * (len(s) + 1)
7474
for i, d in enumerate(devices):

0 commit comments

Comments
 (0)