Skip to content

Commit 6c25c72

Browse files
adding itr_suppress parameter and formatted printing
1 parent d194dbc commit 6c25c72

1 file changed

Lines changed: 9 additions & 5 deletions

File tree

analysis/timefit.py

Lines changed: 9 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -21,31 +21,35 @@ def inference(d, n_iter, lr, workload, sys, print_freq=10):
2121

2222
log_max_time = torch.rand(1, requires_grad=True)
2323
alpha = torch.rand(1, requires_grad=True)
24+
itr_suppress = torch.rand(1, requires_grad=True)
2425

2526
t_latency = d[:,0]
2627
itr = d[:,1]
2728
dvfs = d[:,2]
2829

2930
criterion = nn.MSELoss()
30-
optimizer = optim.Adam([log_max_time, alpha], lr=lr)
31+
optimizer = optim.Adam([log_max_time, alpha, itr_suppress], lr=lr)
3132

3233
print(f'---------------{workload} {sys} lr = {lr}---------------')
3334

3435
for _ in range(n_iter):
3536
max_time = torch.exp(log_max_time)
36-
pred = itr + max_time/dvfs**(1+alpha)
37+
pred = itr_suppress*itr + max_time/dvfs**(1+alpha)
3738
loss = criterion(pred, t_latency)
3839

3940
optimizer.zero_grad()
4041
loss.backward()
4142
optimizer.step()
4243

4344
if _ % print_freq == 0:
44-
print(max_time, alpha, loss)
45+
if _==0:
46+
print(f'{"max_time":^10} {"alpha":^10} {"itr_suppress":^10} {"loss":^10}')
47+
48+
print(f'{max_time.item():^10.3f} {alpha.item():^10.3f} {itr_suppress.item():^10.3f} {loss.item():^10.3f}')
4549

4650
return pred
4751

48-
def run(n_iter=2000, lr=1e-1):
52+
def run(n_iter=2000, lr=1e-1, target_col='read_99th_mean'):
4953
#read linux_mcd.csv
5054
for workload in ['mcd']:
5155
df_comb, _, _ = read_agg_data.start_analysis(workload) #DATA
@@ -56,7 +60,7 @@ def run(n_iter=2000, lr=1e-1):
5660

5761
for sys in ['ebbrt_tuned']:
5862
df = df_comb[(df_comb['sys']==sys)].copy()
59-
df = df[['read_99th_mean','itr', 'dvfs']]
63+
df = df[[target_col,'itr', 'dvfs']]
6064
d = df.values
6165
d = torch.tensor(d)
6266

0 commit comments

Comments
 (0)