2020-08-22 14:24:34 +00:00
|
|
|
import torch
|
|
|
|
|
|
|
|
# Utility class that stores detached, named losses in a rotating buffer for smooth metric outputting.
|
2022-03-04 19:01:16 +00:00
|
|
|
from torch import distributed
|
|
|
|
|
|
|
|
|
2020-08-22 14:24:34 +00:00
|
|
|
class LossAccumulator:
|
2020-08-23 23:22:34 +00:00
|
|
|
def __init__(self, buffer_sz=50):
|
2020-08-22 14:24:34 +00:00
|
|
|
self.buffer_sz = buffer_sz
|
|
|
|
self.buffers = {}
|
2020-10-28 21:46:59 +00:00
|
|
|
self.counters = {}
|
2020-08-22 14:24:34 +00:00
|
|
|
|
|
|
|
def add_loss(self, name, tensor):
|
|
|
|
if name not in self.buffers.keys():
|
2020-11-03 15:09:58 +00:00
|
|
|
if "_histogram" in name:
|
|
|
|
tensor = torch.flatten(tensor.detach().cpu())
|
|
|
|
self.buffers[name] = (0, torch.zeros((self.buffer_sz, tensor.shape[0])), False)
|
|
|
|
else:
|
|
|
|
self.buffers[name] = (0, torch.zeros(self.buffer_sz), False)
|
2020-10-10 01:47:59 +00:00
|
|
|
i, buf, filled = self.buffers[name]
|
2020-10-03 04:53:42 +00:00
|
|
|
# Can take tensors or just plain python numbers.
|
2020-11-03 15:09:58 +00:00
|
|
|
if '_histogram' in name:
|
|
|
|
buf[i] = torch.flatten(tensor.detach().cpu())
|
|
|
|
elif isinstance(tensor, torch.Tensor):
|
2020-10-03 04:53:42 +00:00
|
|
|
buf[i] = tensor.detach().cpu()
|
|
|
|
else:
|
|
|
|
buf[i] = tensor
|
2020-10-10 01:47:59 +00:00
|
|
|
filled = i+1 >= self.buffer_sz or filled
|
|
|
|
self.buffers[name] = ((i+1) % self.buffer_sz, buf, filled)
|
2020-08-22 14:24:34 +00:00
|
|
|
|
2020-10-28 21:46:59 +00:00
|
|
|
def increment_metric(self, name):
|
|
|
|
if name not in self.counters.keys():
|
|
|
|
self.counters[name] = 1
|
|
|
|
else:
|
|
|
|
self.counters[name] += 1
|
|
|
|
|
2020-08-22 14:24:34 +00:00
|
|
|
def as_dict(self):
|
|
|
|
result = {}
|
2020-08-23 23:22:34 +00:00
|
|
|
for k, v in self.buffers.items():
|
2020-10-10 01:47:59 +00:00
|
|
|
i, buf, filled = v
|
2020-11-03 15:09:58 +00:00
|
|
|
if '_histogram' in k:
|
|
|
|
result["loss_" + k] = torch.flatten(buf)
|
2020-10-10 01:47:59 +00:00
|
|
|
if filled:
|
|
|
|
result["loss_" + k] = torch.mean(buf)
|
|
|
|
else:
|
|
|
|
result["loss_" + k] = torch.mean(buf[:i])
|
2020-10-28 21:46:59 +00:00
|
|
|
for k, v in self.counters.items():
|
|
|
|
result[k] = v
|
2021-08-09 20:58:35 +00:00
|
|
|
return result
|
|
|
|
|
|
|
|
|
|
|
|
# Stores losses in an infinitely-sized list.
|
|
|
|
class InfStorageLossAccumulator:
|
|
|
|
def __init__(self):
|
|
|
|
self.buffers = {}
|
|
|
|
|
|
|
|
def add_loss(self, name, tensor):
|
|
|
|
if name not in self.buffers.keys():
|
|
|
|
if "_histogram" in name:
|
|
|
|
tensor = torch.flatten(tensor.detach().cpu())
|
|
|
|
self.buffers[name] = []
|
|
|
|
else:
|
|
|
|
self.buffers[name] = []
|
|
|
|
buf = self.buffers[name]
|
|
|
|
# Can take tensors or just plain python numbers.
|
|
|
|
if '_histogram' in name:
|
|
|
|
buf.append(torch.flatten(tensor.detach().cpu()))
|
|
|
|
elif isinstance(tensor, torch.Tensor):
|
|
|
|
buf.append(tensor.detach().cpu())
|
|
|
|
else:
|
|
|
|
buf.append(tensor)
|
|
|
|
|
|
|
|
def increment_metric(self, name):
|
|
|
|
pass
|
|
|
|
|
|
|
|
def as_dict(self):
|
|
|
|
result = {}
|
|
|
|
for k, buf in self.buffers.items():
|
|
|
|
if '_histogram' in k:
|
|
|
|
result["loss_" + k] = torch.flatten(buf)
|
|
|
|
else:
|
|
|
|
result["loss_" + k] = torch.mean(torch.stack(buf))
|
2020-08-22 14:24:34 +00:00
|
|
|
return result
|