SimpleConvNet.hpp 3.93 KB
#ifndef SIMPLECONV_
#define SIMPLECONV_
#include"Layers.hpp"
#include<iostream>
#include<cstdio>
#include<string.h>
#include<stdlib.h>
struct input_dim {
	int d1, d2, d3;
	input_dim(int d1, int d2, int d3) :d1(d1), d2(d2), d3(d3) {};
};

struct conv_param {
	int fn1, fn2, fn3;
	int filtersize, pad, stride;
	conv_param(int ftnum1, int ftnum2, int ftnum3, int ftsize, int pad, int stride) :fn1(ftnum1),
		fn2(ftnum2), fn3(ftnum3), filtersize(ftsize), pad(pad), stride(stride) {};
};

class SimpleConvNet {
private:
	std::vector< Layer* > layers;

	std::vector<Mat> W[7];		// weights
	std::vector<int> shape[7];	// shape of each weights
public:
	SimpleConvNet() {}
	~SimpleConvNet() {}
	SimpleConvNet(input_dim id, conv_param cp, int hidden_size = 512, int output_size = 10, bool pretrained = true) {
		
		if (pretrained)
			load_trained("params_noLNORM.txt");

		layers.push_back(new Convolution(W[0], 1, 1));
		layers.push_back(new LightNormalization());
		layers.push_back(new Relu());
		layers.push_back(new Pooling(2, 2, 2));

		layers.push_back(new Convolution(W[1], 1, 0));
		layers.push_back(new LightNormalization());
		layers.push_back(new Relu());

		layers.push_back(new DW_Convolution(W[2], 1, 1));
		layers.push_back(new LightNormalization());
		layers.push_back(new Relu());
		layers.push_back(new Pooling(2, 2, 2));

		layers.push_back(new Convolution(W[3], 1, 0));
		layers.push_back(new LightNormalization());
		layers.push_back(new Relu());

		layers.push_back(new DW_Convolution(W[4], 1, 1));
		layers.push_back(new LightNormalization());
		layers.push_back(new Relu());
		layers.push_back(new Pooling(2, 2, 2));

		layers.push_back(new Affine(W[5]));
		//layers.push_back(new LightNormalization());
		layers.push_back(new Relu());

		layers.push_back(new Affine(W[6]));
	}

	std::vector< Mat > predict(std::vector<Mat>& x) {
		for (int i = 0; i < layers.size(); i++) {
			//printf("%d Layer : (%d, %d, %d, %d)\n",i, x.size(), x[0].dim, x[0].row, x[0].col);
			x = layers[i]->forward(x);
		}
		//printf("Layer : (%d %d %d %d)\n", x.size(), x[0].dim, x[0].row, x[0].col);
		return x;
	}

	double accuracy(std::vector< std::vector< unsigned char > > x, std::vector< int > ans, int batch_size = 100) {
		// ...
		return 1.0;
	}

	std::vector<int> argmax(std::vector< Mat >& x) {
		std::vector<int> pred;
		for (int n = 0; n < x.size(); n++) {
			int pid = 0, pos;
			double pval = -1e9;
			for (int i = 0; i < x[n].mat.size(); i++) {
				if (pval < x[n].mat[i]) {
					pval = x[n].mat[i];
					pid = i;
				}
			}
			pred.push_back(pid);
		}
		return pred;
	}

	void load_trained(const char* filename = "params.txt") {
		FILE *f = fopen(filename, "r");
		if (f == NULL) {
			printf("File not found\n");
			exit(1);
		}
		char line[10] = { 0 };
		int keynum;
		while (fscanf(f, "%s", line) == 1) {
			char s[4][10] = { 0 };
			keynum = line[1] - '0' - 1;

			// get shape
			fscanf(f, "%s", s[0]); // "(num
			fscanf(f, "%s", s[1]); // num
			if (s[1][strlen(s[1]) - 1] != '\"') {
				fscanf(f, "%s", s[2]);
				fscanf(f, "%s", s[3]);
			}

			// nw = number of weights : shape[0]
			// size = input size of W[key]
			int size = 1, nw = 0;
			for (int i = 0; i < 4; i++) {
				int val = 0;
				for (int j = 0; j < strlen(s[i]); j++) {
					if ('0' <= s[i][j] && s[i][j] <= '9') {
						val = 10 * val + (s[i][j] - '0');
					}
				}
				if (val) {
					shape[keynum].push_back(val);
					size *= val;
					if (nw == 0)
						nw = val;
				}
			}
			// Read data of W[key]
			int fsize = size / nw;
			double *mm = new double[fsize];
			for (int i = 0; i < size; i++) {
				fscanf(f, "%lf", &mm[i%fsize]);
				if (i%fsize == fsize - 1) {
					if (shape[keynum].size() == 2)
						W[keynum].push_back(Mat(1, 1, shape[keynum][1], std::vector<double>(mm, mm + fsize)));
					else if (shape[keynum].size() == 4)
						W[keynum].push_back(Mat(shape[keynum][1], shape[keynum][2],
							shape[keynum][3], std::vector<double>(mm, mm + fsize)));
				}
			}
		}
		printf("Trained weights loading done\n");
	}
};
#endif