Hyunjun

add receipt code

...@@ -72,4 +72,8 @@ image_process.py : cropping을 통해 정사각형 형태를 만들고 32*32 크기로 resize, ...@@ -72,4 +72,8 @@ image_process.py : cropping을 통해 정사각형 형태를 만들고 32*32 크기로 resize,
72 72
73 73
74 15. params.pkl 추가 74 15. params.pkl 추가
75 -제작한 데이터셋을 트레이닝하고 나온 가중치를 저장한 params.pkl 파일 추가
...\ No newline at end of file ...\ No newline at end of file
75 +제작한 데이터셋을 트레이닝하고 나온 가중치를 저장한 params.pkl 파일 추가
76 +
77 +
78 +16. simple_convnet_rasberryPi 코드 추가
79 +인식한 결과에 따라 계산서가 나오도록 코드를 추가
...\ No newline at end of file ...\ No newline at end of file
......
...@@ -18,7 +18,7 @@ struct conv_param { ...@@ -18,7 +18,7 @@ struct conv_param {
18 }; 18 };
19 19
20 class SimpleConvNet { 20 class SimpleConvNet {
21 -private: 21 +private:
22 std::vector< Layer* > layers; 22 std::vector< Layer* > layers;
23 23
24 std::vector<Mat> W[7]; // weights 24 std::vector<Mat> W[7]; // weights
...@@ -26,9 +26,10 @@ private: ...@@ -26,9 +26,10 @@ private:
26 public: 26 public:
27 SimpleConvNet() {} 27 SimpleConvNet() {}
28 ~SimpleConvNet() {} 28 ~SimpleConvNet() {}
29 - SimpleConvNet(input_dim id, conv_param cp, int hidden_size=512, int output_size=10, bool pretrained=true) { 29 + SimpleConvNet(input_dim id, conv_param cp, int hidden_size = 512, int output_size = 10, bool pretrained = true) {
30 - if (pretrained) 30 +
31 - load_trained("params.txt"); 31 + if (pretrained)
32 + load_trained("params_noLNORM.txt");
32 33
33 layers.push_back(new Convolution(W[0], 1, 1)); 34 layers.push_back(new Convolution(W[0], 1, 1));
34 layers.push_back(new LightNormalization()); 35 layers.push_back(new LightNormalization());
...@@ -47,14 +48,14 @@ public: ...@@ -47,14 +48,14 @@ public:
47 layers.push_back(new Convolution(W[3], 1, 0)); 48 layers.push_back(new Convolution(W[3], 1, 0));
48 layers.push_back(new LightNormalization()); 49 layers.push_back(new LightNormalization());
49 layers.push_back(new Relu()); 50 layers.push_back(new Relu());
50 - 51 +
51 layers.push_back(new DW_Convolution(W[4], 1, 1)); 52 layers.push_back(new DW_Convolution(W[4], 1, 1));
52 layers.push_back(new LightNormalization()); 53 layers.push_back(new LightNormalization());
53 layers.push_back(new Relu()); 54 layers.push_back(new Relu());
54 layers.push_back(new Pooling(2, 2, 2)); 55 layers.push_back(new Pooling(2, 2, 2));
55 56
56 layers.push_back(new Affine(W[5])); 57 layers.push_back(new Affine(W[5]));
57 - layers.push_back(new LightNormalization()); 58 + //layers.push_back(new LightNormalization());
58 layers.push_back(new Relu()); 59 layers.push_back(new Relu());
59 60
60 layers.push_back(new Affine(W[6])); 61 layers.push_back(new Affine(W[6]));
...@@ -62,12 +63,15 @@ public: ...@@ -62,12 +63,15 @@ public:
62 63
63 std::vector< Mat > predict(std::vector<Mat>& x) { 64 std::vector< Mat > predict(std::vector<Mat>& x) {
64 for (int i = 0; i < layers.size(); i++) { 65 for (int i = 0; i < layers.size(); i++) {
66 + //printf("%d Layer : (%d, %d, %d, %d)\n",i, x.size(), x[0].dim, x[0].row, x[0].col);
65 x = layers[i]->forward(x); 67 x = layers[i]->forward(x);
66 } 68 }
69 + //printf("Layer : (%d %d %d %d)\n", x.size(), x[0].dim, x[0].row, x[0].col);
67 return x; 70 return x;
68 } 71 }
69 72
70 - double accuracy(std::vector< std::vector< unsigned char > > x, std::vector< int > ans, int batch_size=100) { 73 + double accuracy(std::vector< std::vector< unsigned char > > x, std::vector< int > ans, int batch_size = 100) {
74 + // ...
71 return 1.0; 75 return 1.0;
72 } 76 }
73 77
...@@ -87,21 +91,21 @@ public: ...@@ -87,21 +91,21 @@ public:
87 return pred; 91 return pred;
88 } 92 }
89 93
90 - void load_trained(const char* filename="params.txt") { 94 + void load_trained(const char* filename = "params.txt") {
91 FILE *f = fopen(filename, "r"); 95 FILE *f = fopen(filename, "r");
92 if (f == NULL) { 96 if (f == NULL) {
93 printf("File not found\n"); 97 printf("File not found\n");
94 exit(1); 98 exit(1);
95 - } 99 + }
96 char line[10] = { 0 }; 100 char line[10] = { 0 };
97 int keynum; 101 int keynum;
98 - while (fscanf(f, "%s", line)==1) { 102 + while (fscanf(f, "%s", line) == 1) {
99 char s[4][10] = { 0 }; 103 char s[4][10] = { 0 };
100 keynum = line[1] - '0' - 1; 104 keynum = line[1] - '0' - 1;
101 105
102 // get shape 106 // get shape
103 - fscanf(f, "%s", s[0]); 107 + fscanf(f, "%s", s[0]); // "(num
104 - fscanf(f, "%s", s[1]); 108 + fscanf(f, "%s", s[1]); // num
105 if (s[1][strlen(s[1]) - 1] != '\"') { 109 if (s[1][strlen(s[1]) - 1] != '\"') {
106 fscanf(f, "%s", s[2]); 110 fscanf(f, "%s", s[2]);
107 fscanf(f, "%s", s[3]); 111 fscanf(f, "%s", s[3]);
...@@ -109,7 +113,7 @@ public: ...@@ -109,7 +113,7 @@ public:
109 113
110 // nw = number of weights : shape[0] 114 // nw = number of weights : shape[0]
111 // size = input size of W[key] 115 // size = input size of W[key]
112 - int size = 1, nw=0; 116 + int size = 1, nw = 0;
113 for (int i = 0; i < 4; i++) { 117 for (int i = 0; i < 4; i++) {
114 int val = 0; 118 int val = 0;
115 for (int j = 0; j < strlen(s[i]); j++) { 119 for (int j = 0; j < strlen(s[i]); j++) {
...@@ -130,9 +134,9 @@ public: ...@@ -130,9 +134,9 @@ public:
130 for (int i = 0; i < size; i++) { 134 for (int i = 0; i < size; i++) {
131 fscanf(f, "%lf", &mm[i%fsize]); 135 fscanf(f, "%lf", &mm[i%fsize]);
132 if (i%fsize == fsize - 1) { 136 if (i%fsize == fsize - 1) {
133 - if(shape[keynum].size() == 2) 137 + if (shape[keynum].size() == 2)
134 W[keynum].push_back(Mat(1, 1, shape[keynum][1], std::vector<double>(mm, mm + fsize))); 138 W[keynum].push_back(Mat(1, 1, shape[keynum][1], std::vector<double>(mm, mm + fsize)));
135 - else if(shape[keynum].size() == 4) 139 + else if (shape[keynum].size() == 4)
136 W[keynum].push_back(Mat(shape[keynum][1], shape[keynum][2], 140 W[keynum].push_back(Mat(shape[keynum][1], shape[keynum][2],
137 shape[keynum][3], std::vector<double>(mm, mm + fsize))); 141 shape[keynum][3], std::vector<double>(mm, mm + fsize)));
138 } 142 }
......
1 #include"Layers.hpp" 1 #include"Layers.hpp"
2 #include"SimpleConvNet.hpp" 2 #include"SimpleConvNet.hpp"
3 +#include"fstream"
3 using namespace std; 4 using namespace std;
4 int main() { 5 int main() {
5 6
...@@ -7,7 +8,7 @@ int main() { ...@@ -7,7 +8,7 @@ int main() {
7 conv_param cp = { 32,32,64, 3,1,1 }; 8 conv_param cp = { 32,32,64, 3,1,1 };
8 SimpleConvNet SCN(id, cp); 9 SimpleConvNet SCN(id, cp);
9 10
10 - freopen("input.txt", "r", stdin); 11 + freopen("input.txt", "r", stdin);
11 vector<Mat> X; 12 vector<Mat> X;
12 int nx = 1, dim = 3, row = 32, col = 32; 13 int nx = 1, dim = 3, row = 32, col = 32;
13 double tmp; 14 double tmp;
...@@ -43,7 +44,93 @@ int main() { ...@@ -43,7 +44,93 @@ int main() {
43 auto pred = SCN.argmax(x); 44 auto pred = SCN.argmax(x);
44 45
45 int num = 0, pd; 46 int num = 0, pd;
47 +
48 +
49 +
50 + char receipt[100];
51 + int total = 0;
46 52
47 - printf("predict : %d ", pred[0]); 53 + ifstream fin;
54 + ofstream fout;
55 + fin.open("receipt.txt");
56 + char c;
57 + int cnt=0;
58 +
59 + while(fin.get(c)){
60 + receipt[cnt] = c;
61 + cnt++;
62 + }
63 + fin.close();
64 +
65 + fout.open("receipt.txt");
66 + for(int i=0; i<cnt; i++){
67 + fout << receipt[i];
68 + }
69 +
70 + switch(pred[0]){
71 + case 0:
72 + cout << "can\n";
73 + fout << 0;
74 + break;
75 + case 1:
76 + cout << "ramen\n";
77 + fout << 1;
78 + break;
79 + case 2:
80 + cout << "cigarette\n";
81 + fout << 2;
82 + break;
83 + case 3:
84 + cout << "instant rice\n";
85 + fout << 3;
86 + break;
87 + case 4:
88 + cout << "wet tissue\n";
89 + fout << 4;
90 + break;
91 + default:
92 + cout << "try again\n";
93 + break;
94 + }
95 +
96 + fout.close();
97 +
98 + fin.open("receipt.txt");
99 + cnt=0;
100 +
101 + while(fin.get(c)){
102 + receipt[cnt] = c;
103 + cnt++;
104 + }
105 + fin.close();
106 + cout << "\n\n===================receipt===================\n\n\n";
107 + for(int i = 0; i<cnt; i++){
108 + switch(receipt[i]){
109 + case '0':
110 + cout << "can---------------------------------1,200Won\n";
111 + total += 1200;
112 + break;
113 + case '1':
114 + cout << "ramen-------------------------------1,000Won\n";
115 + total += 1000;
116 + break;
117 + case '2':
118 + cout << "cigarette---------------------------4,500Won\n";
119 + total += 4500;
120 + break;
121 + case '3':
122 + cout << "instant rice------------------------1,500Won\n";
123 + total += 1500;
124 + break;
125 + case '4':
126 + cout << "wet tissue--------------------------2,000Won\n";
127 + total += 2000;
128 + break;
129 + default:
130 + break;
131 + }
132 + }
133 + cout << "\nTotal-------------------------------" << total << "Won\n";
134 + cout << "\n\n=============================================\n\n\n";
48 return 0; 135 return 0;
49 -}
...\ No newline at end of file ...\ No newline at end of file
136 +}
......
...@@ -2,9 +2,14 @@ from numpy import array, savetxt ...@@ -2,9 +2,14 @@ from numpy import array, savetxt
2 from PIL import Image 2 from PIL import Image
3 import sys 3 import sys
4 import picamera 4 import picamera
5 +import time
5 6
6 camera = picamera.PiCamera() 7 camera = picamera.PiCamera()
7 8
9 +camera.start_preview()
10 +time.sleep(3)
11 +camera.stop_preview()
12 +
8 filename = 'image.jpg' 13 filename = 'image.jpg'
9 camera.capture(filename) 14 camera.capture(filename)
10 15
......
This diff could not be displayed because it is too large.
1 +0201
2 +112
...\ No newline at end of file ...\ No newline at end of file