조현아

train max_step_6500

1 +{
2 + "nbformat": 4,
3 + "nbformat_minor": 0,
4 + "metadata": {
5 + "colab": {
6 + "name": "FAA2.ipynb",
7 + "provenance": [],
8 + "collapsed_sections": [],
9 + "toc_visible": true
10 + },
11 + "kernelspec": {
12 + "name": "python3",
13 + "display_name": "Python 3"
14 + },
15 + "accelerator": "GPU"
16 + },
17 + "cells": [
18 + {
19 + "cell_type": "code",
20 + "metadata": {
21 + "id": "sWjZQ8LCWcZv",
22 + "colab_type": "code",
23 + "outputId": "3d4f5ec9-214c-4365-b43c-a3946f447631",
24 + "colab": {
25 + "base_uri": "https://localhost:8080/",
26 + "height": 35
27 + }
28 + },
29 + "source": [
30 + "from google.colab import drive\n",
31 + "drive.mount('/content/drive')"
32 + ],
33 + "execution_count": 0,
34 + "outputs": [
35 + {
36 + "output_type": "stream",
37 + "text": [
38 + "Drive already mounted at /content/drive; to attempt to forcibly remount, call drive.mount(\"/content/drive\", force_remount=True).\n"
39 + ],
40 + "name": "stdout"
41 + }
42 + ]
43 + },
44 + {
45 + "cell_type": "code",
46 + "metadata": {
47 + "id": "3arNqMB_Wgbx",
48 + "colab_type": "code",
49 + "outputId": "7f1de510-e87c-4a78-8f63-8349aeba3a8b",
50 + "colab": {
51 + "base_uri": "https://localhost:8080/",
52 + "height": 35
53 + }
54 + },
55 + "source": [
56 + "!git clone http://khuhub.khu.ac.kr/2020-1-capstone-design2/2016104167.git"
57 + ],
58 + "execution_count": 0,
59 + "outputs": [
60 + {
61 + "output_type": "stream",
62 + "text": [
63 + "fatal: destination path '2016104167' already exists and is not an empty directory.\n"
64 + ],
65 + "name": "stdout"
66 + }
67 + ]
68 + },
69 + {
70 + "cell_type": "code",
71 + "metadata": {
72 + "id": "ISXM-edL-lGF",
73 + "colab_type": "code",
74 + "outputId": "b3d9b459-bdbf-4bcf-8c23-3ae0dd99a913",
75 + "colab": {
76 + "base_uri": "https://localhost:8080/",
77 + "height": 35
78 + }
79 + },
80 + "source": [
81 + "%cd '2016104167/code/FAA2/'"
82 + ],
83 + "execution_count": 0,
84 + "outputs": [
85 + {
86 + "output_type": "stream",
87 + "text": [
88 + "/content/2016104167/code/FAA2\n"
89 + ],
90 + "name": "stdout"
91 + }
92 + ]
93 + },
94 + {
95 + "cell_type": "code",
96 + "metadata": {
97 + "id": "43zJwd05_Tst",
98 + "colab_type": "code",
99 + "outputId": "bb293b7c-5b79-4720-fff8-5bfe077b6694",
100 + "colab": {
101 + "base_uri": "https://localhost:8080/",
102 + "height": 718
103 + }
104 + },
105 + "source": [
106 + "!python -m pip install -r \"requirements.txt\""
107 + ],
108 + "execution_count": 0,
109 + "outputs": [
110 + {
111 + "output_type": "stream",
112 + "text": [
113 + "Requirement already satisfied: future in /usr/local/lib/python3.6/dist-packages (from -r requirements.txt (line 1)) (0.16.0)\n",
114 + "Requirement already satisfied: tb-nightly in /usr/local/lib/python3.6/dist-packages (from -r requirements.txt (line 2)) (2.3.0a20200331)\n",
115 + "Requirement already satisfied: torchvision in /usr/local/lib/python3.6/dist-packages (from -r requirements.txt (line 3)) (0.5.0)\n",
116 + "Requirement already satisfied: torch in /usr/local/lib/python3.6/dist-packages (from -r requirements.txt (line 4)) (1.4.0)\n",
117 + "Requirement already satisfied: hyperopt in /usr/local/lib/python3.6/dist-packages (from -r requirements.txt (line 5)) (0.1.2)\n",
118 + "Requirement already satisfied: pillow==6.2.1 in /usr/local/lib/python3.6/dist-packages (from -r requirements.txt (line 6)) (6.2.1)\n",
119 + "Requirement already satisfied: natsort in /usr/local/lib/python3.6/dist-packages (from -r requirements.txt (line 7)) (5.5.0)\n",
120 + "Requirement already satisfied: fire in /usr/local/lib/python3.6/dist-packages (from -r requirements.txt (line 8)) (0.3.0)\n",
121 + "Requirement already satisfied: werkzeug>=0.11.15 in /usr/local/lib/python3.6/dist-packages (from tb-nightly->-r requirements.txt (line 2)) (1.0.0)\n",
122 + "Requirement already satisfied: numpy>=1.12.0 in /usr/local/lib/python3.6/dist-packages (from tb-nightly->-r requirements.txt (line 2)) (1.18.2)\n",
123 + "Requirement already satisfied: requests<3,>=2.21.0 in /usr/local/lib/python3.6/dist-packages (from tb-nightly->-r requirements.txt (line 2)) (2.21.0)\n",
124 + "Requirement already satisfied: setuptools>=41.0.0 in /usr/local/lib/python3.6/dist-packages (from tb-nightly->-r requirements.txt (line 2)) (46.0.0)\n",
125 + "Requirement already satisfied: markdown>=2.6.8 in /usr/local/lib/python3.6/dist-packages (from tb-nightly->-r requirements.txt (line 2)) (3.2.1)\n",
126 + "Requirement already satisfied: protobuf>=3.6.0 in /usr/local/lib/python3.6/dist-packages (from tb-nightly->-r requirements.txt (line 2)) (3.10.0)\n",
127 + "Requirement already satisfied: google-auth-oauthlib<0.5,>=0.4.1 in /usr/local/lib/python3.6/dist-packages (from tb-nightly->-r requirements.txt (line 2)) (0.4.1)\n",
128 + "Requirement already satisfied: google-auth<2,>=1.6.3 in /usr/local/lib/python3.6/dist-packages (from tb-nightly->-r requirements.txt (line 2)) (1.7.2)\n",
129 + "Requirement already satisfied: grpcio>=1.24.3 in /usr/local/lib/python3.6/dist-packages (from tb-nightly->-r requirements.txt (line 2)) (1.27.2)\n",
130 + "Requirement already satisfied: wheel>=0.26; python_version >= \"3\" in /usr/local/lib/python3.6/dist-packages (from tb-nightly->-r requirements.txt (line 2)) (0.34.2)\n",
131 + "Requirement already satisfied: six>=1.10.0 in /usr/local/lib/python3.6/dist-packages (from tb-nightly->-r requirements.txt (line 2)) (1.12.0)\n",
132 + "Requirement already satisfied: tensorboard-plugin-wit>=1.6.0 in /usr/local/lib/python3.6/dist-packages (from tb-nightly->-r requirements.txt (line 2)) (1.6.0.post2)\n",
133 + "Requirement already satisfied: absl-py>=0.4 in /usr/local/lib/python3.6/dist-packages (from tb-nightly->-r requirements.txt (line 2)) (0.9.0)\n",
134 + "Requirement already satisfied: scipy in /usr/local/lib/python3.6/dist-packages (from hyperopt->-r requirements.txt (line 5)) (1.4.1)\n",
135 + "Requirement already satisfied: pymongo in /usr/local/lib/python3.6/dist-packages (from hyperopt->-r requirements.txt (line 5)) (3.10.1)\n",
136 + "Requirement already satisfied: networkx in /usr/local/lib/python3.6/dist-packages (from hyperopt->-r requirements.txt (line 5)) (2.4)\n",
137 + "Requirement already satisfied: tqdm in /usr/local/lib/python3.6/dist-packages (from hyperopt->-r requirements.txt (line 5)) (4.38.0)\n",
138 + "Requirement already satisfied: termcolor in /usr/local/lib/python3.6/dist-packages (from fire->-r requirements.txt (line 8)) (1.1.0)\n",
139 + "Requirement already satisfied: chardet<3.1.0,>=3.0.2 in /usr/local/lib/python3.6/dist-packages (from requests<3,>=2.21.0->tb-nightly->-r requirements.txt (line 2)) (3.0.4)\n",
140 + "Requirement already satisfied: urllib3<1.25,>=1.21.1 in /usr/local/lib/python3.6/dist-packages (from requests<3,>=2.21.0->tb-nightly->-r requirements.txt (line 2)) (1.24.3)\n",
141 + "Requirement already satisfied: certifi>=2017.4.17 in /usr/local/lib/python3.6/dist-packages (from requests<3,>=2.21.0->tb-nightly->-r requirements.txt (line 2)) (2019.11.28)\n",
142 + "Requirement already satisfied: idna<2.9,>=2.5 in /usr/local/lib/python3.6/dist-packages (from requests<3,>=2.21.0->tb-nightly->-r requirements.txt (line 2)) (2.8)\n",
143 + "Requirement already satisfied: requests-oauthlib>=0.7.0 in /usr/local/lib/python3.6/dist-packages (from google-auth-oauthlib<0.5,>=0.4.1->tb-nightly->-r requirements.txt (line 2)) (1.3.0)\n",
144 + "Requirement already satisfied: pyasn1-modules>=0.2.1 in /usr/local/lib/python3.6/dist-packages (from google-auth<2,>=1.6.3->tb-nightly->-r requirements.txt (line 2)) (0.2.8)\n",
145 + "Requirement already satisfied: rsa<4.1,>=3.1.4 in /usr/local/lib/python3.6/dist-packages (from google-auth<2,>=1.6.3->tb-nightly->-r requirements.txt (line 2)) (4.0)\n",
146 + "Requirement already satisfied: cachetools<3.2,>=2.0.0 in /usr/local/lib/python3.6/dist-packages (from google-auth<2,>=1.6.3->tb-nightly->-r requirements.txt (line 2)) (3.1.1)\n",
147 + "Requirement already satisfied: decorator>=4.3.0 in /usr/local/lib/python3.6/dist-packages (from networkx->hyperopt->-r requirements.txt (line 5)) (4.4.2)\n",
148 + "Requirement already satisfied: oauthlib>=3.0.0 in /usr/local/lib/python3.6/dist-packages (from requests-oauthlib>=0.7.0->google-auth-oauthlib<0.5,>=0.4.1->tb-nightly->-r requirements.txt (line 2)) (3.1.0)\n",
149 + "Requirement already satisfied: pyasn1<0.5.0,>=0.4.6 in /usr/local/lib/python3.6/dist-packages (from pyasn1-modules>=0.2.1->google-auth<2,>=1.6.3->tb-nightly->-r requirements.txt (line 2)) (0.4.8)\n"
150 + ],
151 + "name": "stdout"
152 + }
153 + ]
154 + },
155 + {
156 + "cell_type": "code",
157 + "metadata": {
158 + "id": "16kGbCYwfhYF",
159 + "colab_type": "code",
160 + "colab": {}
161 + },
162 + "source": [
163 + "# !pip3 install http://download.pytorch.org/whl/cu80/torch-0.3.0.post4-cp36-cp36m-linux_x86_64.whl\n",
164 + "# !pip3 install torchvision"
165 + ],
166 + "execution_count": 0,
167 + "outputs": []
168 + },
169 + {
170 + "cell_type": "code",
171 + "metadata": {
172 + "id": "hofwjBN3ZY_h",
173 + "colab_type": "code",
174 + "colab": {}
175 + },
176 + "source": [
177 + "use_cuda = True"
178 + ],
179 + "execution_count": 0,
180 + "outputs": []
181 + },
182 + {
183 + "cell_type": "code",
184 + "metadata": {
185 + "id": "0h78dEdg_Jsg",
186 + "colab_type": "code",
187 + "colab": {}
188 + },
189 + "source": [
190 + "# try CIFAR10\n",
191 + "#!python \"train.py\" --seed=24 --scale=3 --optimizer=sgd --fast_auto_augment=True --use_cuda=True --network=ResNet50"
192 + ],
193 + "execution_count": 0,
194 + "outputs": []
195 + },
196 + {
197 + "cell_type": "code",
198 + "metadata": {
199 + "id": "nz8P9CpzES4L",
200 + "colab_type": "code",
201 + "outputId": "913ec5c8-4a66-45fd-8f76-a8367376c270",
202 + "colab": {
203 + "base_uri": "https://localhost:8080/",
204 + "height": 1000
205 + }
206 + },
207 + "source": [
208 + "# BraTS, grayResNet2\n",
209 + "!python \"train.py\" --use_cuda=True --network=resnet50 --dataset=BraTS --optimizer=adam --fast_auto_augment=True"
210 + ],
211 + "execution_count": 0,
212 + "outputs": [
213 + {
214 + "output_type": "stream",
215 + "text": [
216 + "\n",
217 + "[+] Parse arguments\n",
218 + "Args(augment_path=None, batch_size=128, dataset='BraTS', fast_auto_augment=True, learning_rate=0.0001, max_step=10000, network='resnet50', num_workers=4, optimizer='adam', print_step=500, scheduler='exp', seed=None, start_step=0, use_cuda=True, val_step=500)\n",
219 + "\n",
220 + "[+] Create log dir\n",
221 + "2020-04-01 05:45:32.118038: I tensorflow/stream_executor/platform/default/dso_loader.cc:44] Successfully opened dynamic library libcudart.so.10.1\n",
222 + "\n",
223 + "[+] Create network\n",
224 + "BaseNet(\n",
225 + " (first): Sequential(\n",
226 + " (0): Conv2d(1, 64, kernel_size=(7, 7), stride=(2, 2), padding=(3, 3), bias=False)\n",
227 + " )\n",
228 + " (after): Sequential(\n",
229 + " (0): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
230 + " (1): ReLU(inplace=True)\n",
231 + " (2): MaxPool2d(kernel_size=3, stride=2, padding=1, dilation=1, ceil_mode=False)\n",
232 + " (3): Sequential(\n",
233 + " (0): Bottleneck(\n",
234 + " (conv1): Conv2d(64, 64, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
235 + " (bn1): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
236 + " (conv2): Conv2d(64, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n",
237 + " (bn2): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
238 + " (conv3): Conv2d(64, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
239 + " (bn3): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
240 + " (relu): ReLU(inplace=True)\n",
241 + " (downsample): Sequential(\n",
242 + " (0): Conv2d(64, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
243 + " (1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
244 + " )\n",
245 + " )\n",
246 + " (1): Bottleneck(\n",
247 + " (conv1): Conv2d(256, 64, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
248 + " (bn1): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
249 + " (conv2): Conv2d(64, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n",
250 + " (bn2): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
251 + " (conv3): Conv2d(64, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
252 + " (bn3): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
253 + " (relu): ReLU(inplace=True)\n",
254 + " )\n",
255 + " (2): Bottleneck(\n",
256 + " (conv1): Conv2d(256, 64, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
257 + " (bn1): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
258 + " (conv2): Conv2d(64, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n",
259 + " (bn2): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
260 + " (conv3): Conv2d(64, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
261 + " (bn3): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
262 + " (relu): ReLU(inplace=True)\n",
263 + " )\n",
264 + " )\n",
265 + " (4): Sequential(\n",
266 + " (0): Bottleneck(\n",
267 + " (conv1): Conv2d(256, 128, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
268 + " (bn1): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
269 + " (conv2): Conv2d(128, 128, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1), bias=False)\n",
270 + " (bn2): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
271 + " (conv3): Conv2d(128, 512, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
272 + " (bn3): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
273 + " (relu): ReLU(inplace=True)\n",
274 + " (downsample): Sequential(\n",
275 + " (0): Conv2d(256, 512, kernel_size=(1, 1), stride=(2, 2), bias=False)\n",
276 + " (1): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
277 + " )\n",
278 + " )\n",
279 + " (1): Bottleneck(\n",
280 + " (conv1): Conv2d(512, 128, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
281 + " (bn1): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
282 + " (conv2): Conv2d(128, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n",
283 + " (bn2): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
284 + " (conv3): Conv2d(128, 512, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
285 + " (bn3): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
286 + " (relu): ReLU(inplace=True)\n",
287 + " )\n",
288 + " (2): Bottleneck(\n",
289 + " (conv1): Conv2d(512, 128, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
290 + " (bn1): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
291 + " (conv2): Conv2d(128, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n",
292 + " (bn2): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
293 + " (conv3): Conv2d(128, 512, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
294 + " (bn3): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
295 + " (relu): ReLU(inplace=True)\n",
296 + " )\n",
297 + " (3): Bottleneck(\n",
298 + " (conv1): Conv2d(512, 128, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
299 + " (bn1): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
300 + " (conv2): Conv2d(128, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n",
301 + " (bn2): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
302 + " (conv3): Conv2d(128, 512, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
303 + " (bn3): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
304 + " (relu): ReLU(inplace=True)\n",
305 + " )\n",
306 + " )\n",
307 + " (5): Sequential(\n",
308 + " (0): Bottleneck(\n",
309 + " (conv1): Conv2d(512, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
310 + " (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
311 + " (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1), bias=False)\n",
312 + " (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
313 + " (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
314 + " (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
315 + " (relu): ReLU(inplace=True)\n",
316 + " (downsample): Sequential(\n",
317 + " (0): Conv2d(512, 1024, kernel_size=(1, 1), stride=(2, 2), bias=False)\n",
318 + " (1): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
319 + " )\n",
320 + " )\n",
321 + " (1): Bottleneck(\n",
322 + " (conv1): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
323 + " (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
324 + " (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n",
325 + " (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
326 + " (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
327 + " (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
328 + " (relu): ReLU(inplace=True)\n",
329 + " )\n",
330 + " (2): Bottleneck(\n",
331 + " (conv1): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
332 + " (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
333 + " (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n",
334 + " (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
335 + " (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
336 + " (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
337 + " (relu): ReLU(inplace=True)\n",
338 + " )\n",
339 + " (3): Bottleneck(\n",
340 + " (conv1): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
341 + " (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
342 + " (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n",
343 + " (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
344 + " (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
345 + " (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
346 + " (relu): ReLU(inplace=True)\n",
347 + " )\n",
348 + " (4): Bottleneck(\n",
349 + " (conv1): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
350 + " (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
351 + " (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n",
352 + " (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
353 + " (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
354 + " (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
355 + " (relu): ReLU(inplace=True)\n",
356 + " )\n",
357 + " (5): Bottleneck(\n",
358 + " (conv1): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
359 + " (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
360 + " (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n",
361 + " (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
362 + " (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
363 + " (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
364 + " (relu): ReLU(inplace=True)\n",
365 + " )\n",
366 + " )\n",
367 + " (6): Sequential(\n",
368 + " (0): Bottleneck(\n",
369 + " (conv1): Conv2d(1024, 512, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
370 + " (bn1): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
371 + " (conv2): Conv2d(512, 512, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1), bias=False)\n",
372 + " (bn2): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
373 + " (conv3): Conv2d(512, 2048, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
374 + " (bn3): BatchNorm2d(2048, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
375 + " (relu): ReLU(inplace=True)\n",
376 + " (downsample): Sequential(\n",
377 + " (0): Conv2d(1024, 2048, kernel_size=(1, 1), stride=(2, 2), bias=False)\n",
378 + " (1): BatchNorm2d(2048, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
379 + " )\n",
380 + " )\n",
381 + " (1): Bottleneck(\n",
382 + " (conv1): Conv2d(2048, 512, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
383 + " (bn1): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
384 + " (conv2): Conv2d(512, 512, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n",
385 + " (bn2): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
386 + " (conv3): Conv2d(512, 2048, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
387 + " (bn3): BatchNorm2d(2048, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
388 + " (relu): ReLU(inplace=True)\n",
389 + " )\n",
390 + " (2): Bottleneck(\n",
391 + " (conv1): Conv2d(2048, 512, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
392 + " (bn1): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
393 + " (conv2): Conv2d(512, 512, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n",
394 + " (bn2): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
395 + " (conv3): Conv2d(512, 2048, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
396 + " (bn3): BatchNorm2d(2048, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
397 + " (relu): ReLU(inplace=True)\n",
398 + " )\n",
399 + " )\n",
400 + " (7): AdaptiveAvgPool2d(output_size=(1, 1))\n",
401 + " )\n",
402 + " (fc): Linear(in_features=2048, out_features=1000, bias=True)\n",
403 + ")\n",
404 + "\n",
405 + "[+] Load dataset\n",
406 + "[+] Child 0 training started (GPU: 0)\n",
407 + "\n",
408 + "[+] Training step: 0/10000\tElapsed time: 0.24min\tLearning rate: 9.999283e-05\tDevice name: Tesla P100-PCIE-16GB\n",
409 + " Acc@1 : 0.000%\n",
410 + " Acc@5 : 0.000%\n",
411 + " Loss : 7.242412567138672\n",
412 + "\n",
413 + "[+] Training step: 500/10000\tElapsed time: 9.44min\tLearning rate: 9.647145853624023e-05\tDevice name: Tesla P100-PCIE-16GB\n",
414 + " Acc@1 : 100.000%\n",
415 + " Acc@5 : 100.000%\n",
416 + " Loss : 0.00023103877902030945\n"
417 + ],
418 + "name": "stdout"
419 + }
420 + ]
421 + },
422 + {
423 + "cell_type": "code",
424 + "metadata": {
425 + "id": "3iBnXLMsES7H",
426 + "colab_type": "code",
427 + "colab": {}
428 + },
429 + "source": [
430 + ""
431 + ],
432 + "execution_count": 0,
433 + "outputs": []
434 + },
435 + {
436 + "cell_type": "code",
437 + "metadata": {
438 + "id": "Wc8cguWUhp9l",
439 + "colab_type": "code",
440 + "colab": {}
441 + },
442 + "source": [
443 + ""
444 + ],
445 + "execution_count": 0,
446 + "outputs": []
447 + }
448 + ]
449 +}
...\ No newline at end of file ...\ No newline at end of file
...@@ -104,20 +104,20 @@ def dict_to_namedtuple(d): ...@@ -104,20 +104,20 @@ def dict_to_namedtuple(d):
104 104
105 def parse_args(kwargs): 105 def parse_args(kwargs):
106 # combine with default args 106 # combine with default args
107 - kwargs['dataset'] = kwargs['dataset'] if 'dataset' in kwargs else 'cifar10' 107 + kwargs['dataset'] = kwargs['dataset'] if 'dataset' in kwargs else 'BraTS'
108 - kwargs['network'] = kwargs['network'] if 'network' in kwargs else 'resnet_cifar10' 108 + kwargs['network'] = kwargs['network'] if 'network' in kwargs else 'resnet50'
109 kwargs['optimizer'] = kwargs['optimizer'] if 'optimizer' in kwargs else 'adam' 109 kwargs['optimizer'] = kwargs['optimizer'] if 'optimizer' in kwargs else 'adam'
110 - kwargs['learning_rate'] = kwargs['learning_rate'] if 'learning_rate' in kwargs else 0.1 110 + kwargs['learning_rate'] = kwargs['learning_rate'] if 'learning_rate' in kwargs else 0.0001
111 kwargs['seed'] = kwargs['seed'] if 'seed' in kwargs else None 111 kwargs['seed'] = kwargs['seed'] if 'seed' in kwargs else None
112 kwargs['use_cuda'] = kwargs['use_cuda'] if 'use_cuda' in kwargs else True 112 kwargs['use_cuda'] = kwargs['use_cuda'] if 'use_cuda' in kwargs else True
113 kwargs['use_cuda'] = kwargs['use_cuda'] and torch.cuda.is_available() 113 kwargs['use_cuda'] = kwargs['use_cuda'] and torch.cuda.is_available()
114 kwargs['num_workers'] = kwargs['num_workers'] if 'num_workers' in kwargs else 4 114 kwargs['num_workers'] = kwargs['num_workers'] if 'num_workers' in kwargs else 4
115 - kwargs['print_step'] = kwargs['print_step'] if 'print_step' in kwargs else 2000 115 + kwargs['print_step'] = kwargs['print_step'] if 'print_step' in kwargs else 500
116 - kwargs['val_step'] = kwargs['val_step'] if 'val_step' in kwargs else 2000 116 + kwargs['val_step'] = kwargs['val_step'] if 'val_step' in kwargs else 500
117 kwargs['scheduler'] = kwargs['scheduler'] if 'scheduler' in kwargs else 'exp' 117 kwargs['scheduler'] = kwargs['scheduler'] if 'scheduler' in kwargs else 'exp'
118 kwargs['batch_size'] = kwargs['batch_size'] if 'batch_size' in kwargs else 128 118 kwargs['batch_size'] = kwargs['batch_size'] if 'batch_size' in kwargs else 128
119 kwargs['start_step'] = kwargs['start_step'] if 'start_step' in kwargs else 0 119 kwargs['start_step'] = kwargs['start_step'] if 'start_step' in kwargs else 0
120 - kwargs['max_step'] = kwargs['max_step'] if 'max_step' in kwargs else 64000 120 + kwargs['max_step'] = kwargs['max_step'] if 'max_step' in kwargs else 6500
121 kwargs['fast_auto_augment'] = kwargs['fast_auto_augment'] if 'fast_auto_augment' in kwargs else False 121 kwargs['fast_auto_augment'] = kwargs['fast_auto_augment'] if 'fast_auto_augment' in kwargs else False
122 kwargs['augment_path'] = kwargs['augment_path'] if 'augment_path' in kwargs else None 122 kwargs['augment_path'] = kwargs['augment_path'] if 'augment_path' in kwargs else None
123 123
......