-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathtiming.py
57 lines (45 loc) · 1.46 KB
/
timing.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
import random
from collections import defaultdict
import minitorch
import time
import sys
import numpy as np
FastTensorBackend = minitorch.TensorBackend(minitorch.FastOps)
GPUBackend = minitorch.TensorBackend(minitorch.CudaOps)
def run_matmul(backend, size=16) -> None:
batch_size = 2
x = minitorch.rand((batch_size, size, size), backend=backend)
y = minitorch.rand((batch_size, size, size), backend=backend)
z = x @ y
if __name__ == "__main__":
# Warmup
run_matmul(FastTensorBackend)
run_matmul(GPUBackend)
ntrials = 3
times = {}
for size in [64, 128, 256, 512]:
print(f"Running size {size}")
times[size] = {}
simple_times = []
fast_times = []
gpu_times = []
for _ in range(ntrials):
start_fast = time.time()
run_matmul(FastTensorBackend, size)
end_fast = time.time()
start_gpu = time.time()
run_matmul(GPUBackend, size)
end_gpu = time.time()
fast_time = end_fast - start_fast
gpu_time = end_gpu - start_gpu
fast_times.append(fast_time)
gpu_times.append(gpu_time)
times[size]["fast"] = np.mean(fast_times)
times[size]["gpu"] = np.mean(gpu_times)
print(times[size])
print()
print("Timing summary")
for size, stimes in times.items():
print(f"Size: {size}")
for b, t in stimes.items():
print(f" {b}: {t:.5f}")