조현아

run resnet & FAA getBraTS_5

This diff is collapsed. Click to expand it.
......@@ -54,10 +54,13 @@ def train_child(args, model, dataset, subset_indx, device=None):
if torch.cuda.device_count() > 1:
print('\n[+] Use {} GPUs'.format(torch.cuda.device_count()))
model = nn.DataParallel(model)
elif torch.cuda.device_count() == 1:
print('\n[+] Use {} GPUs'.format(torch.cuda.device_count()))
start_t = time.time()
for step in range(args.start_step, args.max_step):
batch = next(data_loader)
_train_res = train_step(args, model, optimizer, scheduler, criterion, batch, step, None, device)
if step % args.print_step == 0:
......@@ -173,7 +176,7 @@ def process_fn(args_str, model, dataset, Dm_indx, Da_indx, T, transform_candidat
device = torch.device('cuda:%d' % device_id)
_transform = []
print('[+] Child %d training strated (GPU: %d)' % (k, device_id))
print('[+] Child %d training started (GPU: %d)' % (k, device_id))
# train child model
child_model = copy.deepcopy(model)
......@@ -188,7 +191,7 @@ def process_fn(args_str, model, dataset, Dm_indx, Da_indx, T, transform_candidat
return _transform
#fast_auto_augment(args, model, K=4, B=1, num_process=4)
def fast_auto_augment(args, model, transform_candidates=None, K=5, B=100, T=2, N=10, num_process=5):
args_str = json.dumps(args._asdict())
dataset = get_dataset(args, None, 'trainval')
......
......@@ -4,6 +4,12 @@ class BaseNet(nn.Module):
def __init__(self, backbone, args):
super(BaseNet, self).__init__()
#testing
for layer in backbone.children():
print("\nRESNET50 LAYERS\n")
print(layer)
# Separate layers
self.first = nn.Sequential(*list(backbone.children())[:1])
self.after = nn.Sequential(*list(backbone.children())[1:-1])
......@@ -14,6 +20,20 @@ class BaseNet(nn.Module):
def forward(self, x):
f = self.first(x)
x = self.after(f)
x = x.reshape(x.size(0), -1)
x = self.fc(x)
return x, f
"""
print("before reshape:\n", x.size())
#[128, 2048, 4, 4]
# #cifar 내장[128, 2048, 1, 1]
x = x.reshape(x.size(0), -1)
print("after reshape:\n", x.size())
#[128, 32768]
#cifar [128, 2048]
#RuntimeError: size mismatch, m1: [128 x 32768], m2: [2048 x 10]
print("fc :\n", self.fc)
#Linear(in_features=2048, out_features=10, bias=True)
#cifar Linear(in_features=2048, out_features=1000, bias=True)
"""
......
import torch
import torch.nn as nn
import torch.nn.functional as F
class BasicBlock(nn.Module):
expansion = 1
def __init__(self, in_planes, planes, stride=1):
super(BasicBlock, self).__init__()
self.conv1 = nn.Conv2d(in_planes, planes, kernel_size=3, stride=stride, padding=1, bias=False)
self.bn1 = nn.BatchNorm2d(planes)
self.conv2 = nn.Conv2d(planes, planes, kernel_size=3, stride=1, padding=1, bias=False)
self.bn2 = nn.BatchNorm2d(planes)
self.shortcut = nn.Sequential()
if stride != 1 or in_planes != self.expansion*planes:
self.shortcut = nn.Sequential(
nn.Conv2d(in_planes, self.expansion*planes, kernel_size=1, stride=stride, bias=False),
nn.BatchNorm2d(self.expansion*planes)
)
def forward(self, x):
out = F.relu(self.bn1(self.conv1(x)))
out = self.bn2(self.conv2(out))
out += self.shortcut(x)
out = F.relu(out)
return out
class Bottleneck(nn.Module):
expansion = 4
def __init__(self, in_planes, planes, stride=1):
super(Bottleneck, self).__init__()
self.conv1 = nn.Conv2d(in_planes, planes, kernel_size=1, bias=False)
self.bn1 = nn.BatchNorm2d(planes)
self.conv2 = nn.Conv2d(planes, planes, kernel_size=3, stride=stride, padding=1, bias=False)
self.bn2 = nn.BatchNorm2d(planes)
self.conv3 = nn.Conv2d(planes, self.expansion*planes, kernel_size=1, bias=False)
self.bn3 = nn.BatchNorm2d(self.expansion*planes)
self.shortcut = nn.Sequential()
if stride != 1 or in_planes != self.expansion*planes:
self.shortcut = nn.Sequential(
nn.Conv2d(in_planes, self.expansion*planes, kernel_size=1, stride=stride, bias=False),
nn.BatchNorm2d(self.expansion*planes)
)
def forward(self, x):
out = F.relu(self.bn1(self.conv1(x)))
out = F.relu(self.bn2(self.conv2(out)))
out = self.bn3(self.conv3(out))
out += self.shortcut(x)
out = F.relu(out)
return out
class ResNet(nn.Module):
def __init__(self, block, num_blocks, num_classes=10):
super(ResNet, self).__init__()
self.in_planes = 64
self.conv1 = nn.Conv2d(1, 64, kernel_size=3, stride=1, padding=1, bias=False)
self.bn1 = nn.BatchNorm2d(64)
self.layer1 = self._make_layer(block, 64, num_blocks[0], stride=1)
self.layer2 = self._make_layer(block, 128, num_blocks[1], stride=2)
self.layer3 = self._make_layer(block, 256, num_blocks[2], stride=2)
self.layer4 = self._make_layer(block, 512, num_blocks[3], stride=2)
self.linear = nn.Linear(512*block.expansion, num_classes)
def _make_layer(self, block, planes, num_blocks, stride):
strides = [stride] + [1]*(num_blocks-1)
layers = []
for stride in strides:
layers.append(block(self.in_planes, planes, stride))
self.in_planes = planes * block.expansion
return nn.Sequential(*layers)
def forward(self, x):
out = F.relu(self.bn1(self.conv1(x)))
out = self.layer1(out)
out = self.layer2(out)
out = self.layer3(out)
out = self.layer4(out)
out = F.avg_pool2d(out, 4)
out = out.view(out.size(0), -1)
out = self.linear(out)
return out
def ResNet18():
return ResNet(BasicBlock, [2,2,2,2])
def ResNet34():
return ResNet(BasicBlock, [3,4,6,3])
def ResNet50():
return ResNet(Bottleneck, [3,4,6,3])
def ResNet101():
return ResNet(Bottleneck, [3,4,23,3])
def ResNet152():
return ResNet(Bottleneck, [3,8,36,3])
def test():
net = ResNet18()
y = net(torch.randn(1,3,32,32))
print(y.size())
This diff is collapsed. Click to expand it.
......@@ -6,6 +6,7 @@ import pickle as cp
import glob
import numpy as np
import pandas as pd
from natsort import natsorted
from PIL import Image
import torch
......@@ -21,6 +22,7 @@ from sklearn.model_selection import train_test_split
from sklearn.model_selection import KFold
from networks import basenet
from networks import grayResNet, grayResNet2
DATASET_PATH = '/content/drive/My Drive/CD2 Project/data/BraTS_Training/train_frame/'
TRAIN_DATASET_PATH = '/content/drive/My Drive/CD2 Project/data/BraTS_Training/train_frame/'
......@@ -55,40 +57,6 @@ def split_dataset(args, dataset, k):
return Dm_indexes, Da_indexes
def split_dataset2222(args, dataset, k):
# load dataset
X = list(range(len(dataset)))
# split to k-fold
#assert len(X) == len(Y)
def _it_to_list(_it):
return list(zip(*list(_it)))
x_train = ()
x_test = ()
for i in range(k):
#xtr, xte = train_test_split(X, random_state=args.seed, test_size=0.1)
xtr, xte = train_test_split(X, random_state=None, test_size=0.1)
x_train.append(np.array(xtr))
x_test.append(np.array(xte))
y_train = np.array([0]* len(x_train))
y_test = np.array([0]* len(x_test))
x_train = tuple(x_train)
x_test = tuple(x_test)
trainset = (zip(x_train, y_train),)
testset = (zip(x_test, y_test),)
Dm_indexes, Da_indexes = trainset, testset
print(type(Dm_indexes), np.shape(Dm_indexes))
print("DM\n", np.shape(Dm_indexes), Dm_indexes, "\nDA\n", np.shape(Da_indexes), Da_indexes)
return Dm_indexes, Da_indexes
def concat_image_features(image, features, max_features=3):
_, h, w = image.shape
......@@ -159,8 +127,22 @@ def parse_args(kwargs):
def select_model(args):
if args.network in models.__dict__:
backbone = models.__dict__[args.network]()
# resnet_dict = {'ResNet18':grayResNet.ResNet18(), 'ResNet34':grayResNet.ResNet34(),
# 'ResNet50':grayResNet.ResNet50(), 'ResNet101':grayResNet.ResNet101(), 'ResNet152':grayResNet.ResNet152()}
# grayResNet2
resnet_dict = {'resnet18':grayResNet2.resnet18(), 'resnet34':grayResNet2.resnet34(),
'resnet50':grayResNet2.resnet50(), 'resnet101':grayResNet2.resnet101(), 'resnet152':grayResNet2.resnet152()}
if args.network in resnet_dict:
backbone = resnet_dict[args.network]
#testing
# print("\nRESNET50 LAYERS\n")
# for layer in backbone.children():
# print(layer)
# print("LAYER THE END\n")
model = basenet.BaseNet(backbone, args)
else:
Net = getattr(importlib.import_module('networks.{}'.format(args.network)), 'Net')
......