-
Notifications
You must be signed in to change notification settings - Fork 5
/
Copy pathget_model_size_runtime.py
59 lines (48 loc) · 2.2 KB
/
get_model_size_runtime.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
from itertools import product
import torch
from models import SpatiotemporalModel, SUPPORTED_TEMPORAL_MODELS, SUPPORTED_SPATIAL_MODELS
from time import time
ndims = 4
num_classes = 9
sequencelength = 365
batchsize = 2
imagesize = 32
import warnings
warnings.filterwarnings("ignore")
print(f"# simulated batchsize={batchsize}, imagesize={imagesize}, sequencelength={sequencelength}; time=-1: test failed (likely OOM error)")
print(f"spatial_backbone, temporal_backbone, num_params, forward_time_cpu, backward_time_cpu, forward_time_cuda, backward_time_cuda, init_time")
for spatial_backbone, temporal_backbone in product(SUPPORTED_SPATIAL_MODELS, SUPPORTED_TEMPORAL_MODELS):
# test __init__
start = time()
model = SpatiotemporalModel(spatial_backbone, temporal_backbone, input_dim=ndims, num_classes=num_classes,
sequencelength=sequencelength, pretrained_spatial=False)
init_time = time() - start
# test forward
start = time()
y_pred = model(torch.ones(batchsize, sequencelength, ndims, imagesize, imagesize))
forward_time_cpu = time() - start
# test backward
start = time()
y_pred.mean().backward()
backward_time_cpu = time() - start
try:
model = SpatiotemporalModel(spatial_backbone, temporal_backbone, input_dim=ndims, num_classes=num_classes,
sequencelength=sequencelength, pretrained_spatial=False, device="cuda")
model = model.to("cuda")
X = torch.ones(batchsize, sequencelength, ndims, imagesize, imagesize).to("cuda")
# test forward
start = time()
y_pred = model(X)
forward_time_cuda = time() - start
# test backward
start = time()
y_pred.mean().backward()
backward_time_cuda = time() - start
except RuntimeError:
# likely out of memory error
forward_time_cuda = -1
backward_time_cuda = -1
pass
# number of parameters
num_params = sum(p.numel() for p in model.parameters() if p.requires_grad)
print(f"{spatial_backbone}, {temporal_backbone}, {num_params}, {forward_time_cpu:.4f}, {backward_time_cpu:.4f}, {forward_time_cuda:.4f}, {backward_time_cuda:.4f}, {init_time:.4f}")