93 lines
2.2 KiB
Python
Executable File
93 lines
2.2 KiB
Python
Executable File
import math
|
|
import torch
|
|
import torch.nn.functional as F
|
|
import traceback
|
|
|
|
from typing import Literal, overload
|
|
from functools import partial
|
|
from einops import rearrange
|
|
|
|
from torch import Tensor, einsum, nn
|
|
from torch.distributions import Categorical
|
|
from torch.nn.utils.rnn import pad_sequence
|
|
from torch.utils.checkpoint import checkpoint
|
|
from torchmetrics.classification import BinaryAccuracy, MulticlassAccuracy, MulticlassPrecision
|
|
from torchvision.models import resnet18
|
|
|
|
from ..data import get_symmap
|
|
|
|
class Model(nn.Module):
|
|
def __init__(
|
|
self,
|
|
n_tokens: int = 0, # number of token types
|
|
n_len: int = 6, # how long a sequence can be
|
|
d_model: int = 512,
|
|
):
|
|
super().__init__()
|
|
|
|
|
|
_symmap = get_symmap()
|
|
self.symmap = { f'{v}': k for k, v in _symmap.items() }
|
|
self.symmap['0'] = ""
|
|
|
|
if n_tokens == 0:
|
|
n_tokens = len(_symmap.keys())
|
|
|
|
self.n_tokens = n_tokens
|
|
self.n_len = n_len + 2 # start/stop tokens
|
|
self.d_model = d_model
|
|
|
|
self.resnet = resnet18(pretrained=False)
|
|
self.resnet.fc = nn.Linear( self.d_model, self.n_tokens * self.n_len )
|
|
|
|
self.accuracy_metric = MulticlassAccuracy(
|
|
n_tokens,
|
|
#top_k=10,
|
|
average="micro",
|
|
multidim_average="global",
|
|
)
|
|
|
|
self.precision_metric = MulticlassPrecision(
|
|
n_tokens,
|
|
#top_k=10,
|
|
average="micro",
|
|
multidim_average="global",
|
|
)
|
|
def forward(
|
|
self,
|
|
|
|
image,
|
|
text = None, #
|
|
|
|
sampling_temperature: float = 1.0,
|
|
):
|
|
x_list = torch.stack( image, dim=0 )
|
|
|
|
x = self.resnet( x_list )
|
|
y = x.view(x.size(0), self.n_len, self.n_tokens)
|
|
|
|
# either of these should do, but my VALL-E forward pass uses this, so might as well keep to it
|
|
# pred = y.argmax(dim=2)
|
|
pred = Categorical(logits=y / sampling_temperature).sample()
|
|
|
|
answer = [ "".join([ self.symmap[f'{x.item()}'] for x in t ]) for t in pred ]
|
|
|
|
if text is not None:
|
|
y_list = rearrange(pad_sequence(text), "t b -> b t")
|
|
|
|
loss = 0
|
|
for i in range(self.n_len):
|
|
if i >= y_list.shape[1]:
|
|
break
|
|
loss += F.cross_entropy( y[:, i], y_list[:, i] )
|
|
|
|
self.loss = dict(
|
|
nll=loss
|
|
)
|
|
|
|
self.stats = dict(
|
|
acc = self.accuracy_metric( pred, y_list ),
|
|
precision = self.precision_metric( pred, y_list ),
|
|
)
|
|
|
|
return answer |