Skip to content

Commit 095cada

Browse files
authored
Update bytes to GB with bitshift (#6886)
1 parent a1e1029 commit 095cada

File tree

4 files changed

+11
-10
lines changed

4 files changed

+11
-10
lines changed

utils/__init__.py

Lines changed: 3 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -21,14 +21,13 @@ def notebook_init(verbose=True):
2121
if is_colab():
2222
shutil.rmtree('/content/sample_data', ignore_errors=True) # remove colab /sample_data directory
2323

24+
# System info
2425
if verbose:
25-
# System info
26-
# gb = 1 / 1000 ** 3 # bytes to GB
27-
gib = 1 / 1024 ** 3 # bytes to GiB
26+
gb = 1 << 30 # bytes to GiB (1024 ** 3)
2827
ram = psutil.virtual_memory().total
2928
total, used, free = shutil.disk_usage("/")
3029
display.clear_output()
31-
s = f'({os.cpu_count()} CPUs, {ram * gib:.1f} GB RAM, {(total - free) * gib:.1f}/{total * gib:.1f} GB disk)'
30+
s = f'({os.cpu_count()} CPUs, {ram / gb:.1f} GB RAM, {(total - free) / gb:.1f}/{total / gb:.1f} GB disk)'
3231
else:
3332
s = ''
3433

utils/autobatch.py

Lines changed: 4 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -34,11 +34,12 @@ def autobatch(model, imgsz=640, fraction=0.9, batch_size=16):
3434
LOGGER.info(f'{prefix}CUDA not detected, using default CPU batch-size {batch_size}')
3535
return batch_size
3636

37+
gb = 1 << 30 # bytes to GiB (1024 ** 3)
3738
d = str(device).upper() # 'CUDA:0'
3839
properties = torch.cuda.get_device_properties(device) # device properties
39-
t = properties.total_memory / 1024 ** 3 # (GiB)
40-
r = torch.cuda.memory_reserved(device) / 1024 ** 3 # (GiB)
41-
a = torch.cuda.memory_allocated(device) / 1024 ** 3 # (GiB)
40+
t = properties.total_memory / gb # (GiB)
41+
r = torch.cuda.memory_reserved(device) / gb # (GiB)
42+
a = torch.cuda.memory_allocated(device) / gb # (GiB)
4243
f = t - (r + a) # free inside reserved
4344
LOGGER.info(f'{prefix}{d} ({properties.name}) {t:.2f}G total, {r:.2f}G reserved, {a:.2f}G allocated, {f:.2f}G free')
4445

utils/general.py

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -223,11 +223,12 @@ def emojis(str=''):
223223

224224
def file_size(path):
225225
# Return file/dir size (MB)
226+
mb = 1 << 20 # bytes to MiB (1024 ** 2)
226227
path = Path(path)
227228
if path.is_file():
228-
return path.stat().st_size / 1E6
229+
return path.stat().st_size / mb
229230
elif path.is_dir():
230-
return sum(f.stat().st_size for f in path.glob('**/*') if f.is_file()) / 1E6
231+
return sum(f.stat().st_size for f in path.glob('**/*') if f.is_file()) / mb
231232
else:
232233
return 0.0
233234

utils/torch_utils.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -86,7 +86,7 @@ def select_device(device='', batch_size=0, newline=True):
8686
space = ' ' * (len(s) + 1)
8787
for i, d in enumerate(devices):
8888
p = torch.cuda.get_device_properties(i)
89-
s += f"{'' if i == 0 else space}CUDA:{d} ({p.name}, {p.total_memory / 1024 ** 2:.0f}MiB)\n" # bytes to MB
89+
s += f"{'' if i == 0 else space}CUDA:{d} ({p.name}, {p.total_memory / (1 << 20):.0f}MiB)\n" # bytes to MB
9090
else:
9191
s += 'CPU\n'
9292

0 commit comments

Comments
 (0)