DL-Art-School/codes/test.py

58 lines
2.1 KiB
Python
Raw Normal View History

2019-08-23 13:42:47 +00:00
import os.path as osp
import logging
import time
import argparse
from collections import OrderedDict
2020-08-12 14:46:15 +00:00
import os
2019-08-23 13:42:47 +00:00
import options.options as option
import utils.util as util
from data.util import bgr2ycbcr
2020-07-24 18:26:44 +00:00
import models.archs.SwitchedResidualGenerator_arch as srg
2020-10-08 05:10:54 +00:00
from switched_conv.switched_conv_util import save_attention_to_image, save_attention_to_image_rgb
from switched_conv.switched_conv import compute_attention_specificity
2019-08-23 13:42:47 +00:00
from data import create_dataset, create_dataloader
from models import create_model
2020-04-24 05:59:09 +00:00
from tqdm import tqdm
import torch
2020-07-24 18:26:44 +00:00
import models.networks as networks
2020-10-06 00:01:50 +00:00
class CheckpointFunction(torch.autograd.Function):
@staticmethod
def forward(ctx, run_function, length, *args):
ctx.run_function = run_function
ctx.input_tensors = list(args[:length])
ctx.input_params = list(args[length:])
with torch.no_grad():
output_tensors = ctx.run_function(*ctx.input_tensors)
return output_tensors
@staticmethod
def backward(ctx, *output_grads):
for i in range(len(ctx.input_tensors)):
temp = ctx.input_tensors[i]
ctx.input_tensors[i] = temp.detach()
ctx.input_tensors[i].requires_grad = True
with torch.enable_grad():
output_tensors = ctx.run_function(*ctx.input_tensors)
print("Backpropping")
input_grads = torch.autograd.grad(output_tensors, ctx.input_tensors + ctx.input_params, output_grads, allow_unused=True)
return (None, None) + input_grads
from models.archs.arch_util import ConvGnSilu, UpconvBlock
import torch.nn as nn
2020-04-24 05:59:09 +00:00
if __name__ == "__main__":
2020-10-06 00:01:50 +00:00
model = nn.Sequential(ConvGnSilu(3, 64, 3, norm=False),
ConvGnSilu(64, 3, 3, norm=False)
)
model.train()
seed = torch.randn(1,3,32,32)
recurrent = seed
outs = []
for i in range(10):
args = (recurrent, ) + tuple(model.parameters())
recurrent = CheckpointFunction.apply(model, 1, *args)
outs.append(recurrent)
l = nn.L1Loss()(recurrent, torch.randn(1,3,32,32))
l.backward()