1 | /*
|
---|
2 | * Rick van der Zwet
|
---|
3 | * 0433373
|
---|
4 | * OS Assigment 3
|
---|
5 | * Licence: BSD
|
---|
6 | * $Id: nn.c 557 2008-04-08 22:57:09Z rick $
|
---|
7 | */
|
---|
8 |
|
---|
9 | #include <sysexits.h>
|
---|
10 | #include <stdio.h>
|
---|
11 | #include <stdlib.h>
|
---|
12 | #include <math.h>
|
---|
13 | #include <time.h>
|
---|
14 |
|
---|
15 | /* NOTE: All first knobs are bias knobs or hidden stale knobs
|
---|
16 | * - Validation is done using rounding, please make outputs discrete or
|
---|
17 | * alter validation function
|
---|
18 | */
|
---|
19 |
|
---|
20 | /* Allow uniform and easy calls at functions */
|
---|
21 | #define TRUE 1
|
---|
22 | #define FALSE 0
|
---|
23 |
|
---|
24 |
|
---|
25 | /* Network variables */
|
---|
26 | /*NOTE: first node is 'hidden' bias knob */
|
---|
27 | #ifndef INPUT_SIZE
|
---|
28 | #define INPUT_SIZE 11
|
---|
29 | #endif
|
---|
30 |
|
---|
31 | /*NOTE: first node is 'hidden' bias knob */
|
---|
32 | #ifndef HIDDEN_SIZE
|
---|
33 | #define HIDDEN_SIZE 11
|
---|
34 | #endif
|
---|
35 |
|
---|
36 | /*NOTE: first node is 'hidden' 'lame' knob */
|
---|
37 | #ifndef OUTPUT_SIZE
|
---|
38 | #define OUTPUT_SIZE 11
|
---|
39 | #endif
|
---|
40 |
|
---|
41 | /* Learn speed alpha of network */
|
---|
42 | #ifndef LEARN_SPEED
|
---|
43 | #define LEARN_SPEED 0.5
|
---|
44 | #endif
|
---|
45 |
|
---|
46 | /* After QUALITY_ROUND trainingset check quality of network */
|
---|
47 | #define QUALITY_ROUND 100
|
---|
48 |
|
---|
49 | /* Training set, to be used to train network */
|
---|
50 | char * file_training = "data/training.txt";
|
---|
51 | /* Validation set, to be used to test end result of network */
|
---|
52 | char * file_validate = "data/validate.txt";
|
---|
53 | /* Quality set, to be used to do quick testing whether network is
|
---|
54 | * improving
|
---|
55 | */
|
---|
56 | char * file_quality = "data/quality.txt";
|
---|
57 |
|
---|
58 | /* Globally defined arrays, which represent the network */
|
---|
59 | double hidden[HIDDEN_SIZE];
|
---|
60 | double input[INPUT_SIZE];
|
---|
61 | double output[OUTPUT_SIZE];
|
---|
62 | double target[OUTPUT_SIZE];
|
---|
63 | double weight_HtoO[HIDDEN_SIZE][OUTPUT_SIZE];
|
---|
64 | double weight_ItoH[INPUT_SIZE][HIDDEN_SIZE];
|
---|
65 |
|
---|
66 | #define WEIGHT_NOT_USED -99999
|
---|
67 |
|
---|
68 | void stdInit() {
|
---|
69 | int i;
|
---|
70 | /* Should never change, been using */
|
---|
71 | for (i = 0; i < INPUT_SIZE; i++)
|
---|
72 | weight_ItoH[i][0] = WEIGHT_NOT_USED;
|
---|
73 | for (i = 0; i < HIDDEN_SIZE; i++)
|
---|
74 | weight_HtoO[i][0] = WEIGHT_NOT_USED;
|
---|
75 | }
|
---|
76 |
|
---|
77 |
|
---|
78 | /* Random init of weights */
|
---|
79 | void randInit() {
|
---|
80 | int i,j;
|
---|
81 |
|
---|
82 | /* Different numbers every call */
|
---|
83 | srandom(time(NULL));
|
---|
84 |
|
---|
85 | for (i = 0; i < INPUT_SIZE; i++)
|
---|
86 | for ( j = 1; j < HIDDEN_SIZE; j++) {
|
---|
87 | weight_ItoH[i][j] = (double)(random() % 100) / 100;
|
---|
88 | }
|
---|
89 |
|
---|
90 | for (i = 0; i < HIDDEN_SIZE; i++)
|
---|
91 | for (j = 1; j < OUTPUT_SIZE; j++)
|
---|
92 | weight_HtoO[i][j] = (double)(random() % 100) / 100;
|
---|
93 |
|
---|
94 | stdInit();
|
---|
95 | }
|
---|
96 |
|
---|
97 | /* Fixed init of weights */
|
---|
98 | void fixedInit() {
|
---|
99 | int i,j;
|
---|
100 | for (i = 0; i < INPUT_SIZE; i++)
|
---|
101 | for ( j = 1; j < HIDDEN_SIZE; j++) {
|
---|
102 | weight_ItoH[i][j] = 0.5;
|
---|
103 | }
|
---|
104 |
|
---|
105 | for (i = 0; i < HIDDEN_SIZE; i++)
|
---|
106 | for (j = 1; j < OUTPUT_SIZE; j++)
|
---|
107 | weight_HtoO[i][j] = 0.5;
|
---|
108 |
|
---|
109 | stdInit();
|
---|
110 | }
|
---|
111 |
|
---|
112 | /* Define exact wights, used for debugging calculations
|
---|
113 | * other flags INPUT = 2, HIDDEN = 2, OUTPUT = 1
|
---|
114 | */
|
---|
115 | void debugInit() {
|
---|
116 | stdInit();
|
---|
117 | weight_ItoH[0][1] = 1;
|
---|
118 | weight_ItoH[0][2] = 1;
|
---|
119 | weight_ItoH[1][1] = 0.62;
|
---|
120 | weight_ItoH[1][2] = 0.42;
|
---|
121 | weight_ItoH[2][1] = 0.55;
|
---|
122 | weight_ItoH[2][2] = -0.17;
|
---|
123 |
|
---|
124 | weight_HtoO[0][1] = 1;
|
---|
125 | weight_HtoO[1][1] = 0.35;
|
---|
126 | weight_HtoO[2][1] = 0.81;
|
---|
127 | }
|
---|
128 |
|
---|
129 | /* calculate Aj's and Ai's (outputs) */
|
---|
130 | void nnCalc() {
|
---|
131 | int i,j;
|
---|
132 | double total;
|
---|
133 | for (i = 1; i < HIDDEN_SIZE; i++) {
|
---|
134 | total = 0;
|
---|
135 | for (j = 0; j < INPUT_SIZE; j++)
|
---|
136 | total += weight_ItoH[j][i] * input[j];
|
---|
137 | hidden[i] = 1 / ( 1 + exp(total * (-1)));
|
---|
138 | }
|
---|
139 |
|
---|
140 | for (i = 1; i < OUTPUT_SIZE; i++) {
|
---|
141 | total = 0;
|
---|
142 | for (j = 0; j < HIDDEN_SIZE; j++)
|
---|
143 | total += weight_HtoO[j][i] * hidden[j];
|
---|
144 | output[i] = 1 / ( 1 + exp(total * (-1)));
|
---|
145 | }
|
---|
146 |
|
---|
147 | }
|
---|
148 |
|
---|
149 | /* train network, NOTE: nnCalc needs to be called first */
|
---|
150 | void nnTrain() {
|
---|
151 | int i,j;
|
---|
152 | double hidden_delta[HIDDEN_SIZE];
|
---|
153 | double output_delta[OUTPUT_SIZE];
|
---|
154 | double output_error[OUTPUT_SIZE];
|
---|
155 | double hidden_sum_delta[HIDDEN_SIZE];
|
---|
156 |
|
---|
157 | for (i = 1; i < OUTPUT_SIZE; i++) {
|
---|
158 | output_error[i] = target[i] - output[i];
|
---|
159 | output_delta[i] = output_error[i] * output[i] * (1 - output[i]);
|
---|
160 | }
|
---|
161 |
|
---|
162 | for (i = 0; i < HIDDEN_SIZE; i++) {
|
---|
163 | hidden_sum_delta[i] = 0;
|
---|
164 | for (j = 1; j < OUTPUT_SIZE; j++)
|
---|
165 | hidden_sum_delta[i] += weight_HtoO[i][j] * output_delta[j];
|
---|
166 | hidden_delta[i] = hidden[i] * (1 - hidden[i]) *
|
---|
167 | hidden_sum_delta[i];
|
---|
168 | }
|
---|
169 |
|
---|
170 | for (i = 0; i < HIDDEN_SIZE; i++)
|
---|
171 | for (j = 1; j < OUTPUT_SIZE; j++) {
|
---|
172 | weight_HtoO[i][j] = weight_HtoO[i][j] + LEARN_SPEED *
|
---|
173 | hidden[i] * output_delta[j];
|
---|
174 | }
|
---|
175 |
|
---|
176 | for (i = 0; i < INPUT_SIZE; i++)
|
---|
177 | for (j = 1; j < HIDDEN_SIZE; j++) {
|
---|
178 | weight_ItoH[i][j] = weight_ItoH[i][j] + LEARN_SPEED *
|
---|
179 | input[i] * hidden_delta[j];
|
---|
180 | }
|
---|
181 | }
|
---|
182 |
|
---|
183 | /* Verify wether target, matches output */
|
---|
184 | int nnValidate() {
|
---|
185 | int i;
|
---|
186 | //printf ("Rounding: %lf - %lf\n",output[1], target[1]);
|
---|
187 | for (i = 1; i < OUTPUT_SIZE; i++)
|
---|
188 | if (round(output[i]) != round(target[i]))
|
---|
189 | return FALSE;
|
---|
190 | return TRUE;
|
---|
191 | }
|
---|
192 |
|
---|
193 | /* Pretty print of output */
|
---|
194 | void nnOutput() {
|
---|
195 | int i;
|
---|
196 | for(i = 0; i < INPUT_SIZE; i++)
|
---|
197 | printf("%lf, ", input[i]);
|
---|
198 | printf("= %lf - %lf - ", output[1], target[1]);
|
---|
199 | if (nnValidate() == TRUE)
|
---|
200 | printf("OK");
|
---|
201 | else
|
---|
202 | printf("ERROR");
|
---|
203 | printf("\n");
|
---|
204 | }
|
---|
205 |
|
---|
206 | /* Pretty print of hidden knobs */
|
---|
207 | void nnHiddenOutput() {
|
---|
208 | int i;
|
---|
209 | for(i = 0; i < HIDDEN_SIZE; i++)
|
---|
210 | printf("%lf, ", hidden[i]);
|
---|
211 | printf(" - HIDDEN\n");
|
---|
212 | }
|
---|
213 |
|
---|
214 |
|
---|
215 | /* Pretty print of all weights */
|
---|
216 | void nnNeuronOutput() {
|
---|
217 | int i,j;
|
---|
218 | for (i = 0; i < INPUT_SIZE; i++)
|
---|
219 | for(j = 0; j < HIDDEN_SIZE; j++)
|
---|
220 | if (weight_ItoH[i][j] != WEIGHT_NOT_USED)
|
---|
221 | printf("weight_ItoH[%i][%i] = %lf\n", i, j,
|
---|
222 | weight_ItoH[i][j]);
|
---|
223 | printf("---\n");
|
---|
224 | for (i = 0; i < HIDDEN_SIZE; i++)
|
---|
225 | for(j = 0; j < OUTPUT_SIZE; j++)
|
---|
226 | if (weight_ItoH[i][j] != WEIGHT_NOT_USED)
|
---|
227 | printf("weight_HtoO[%i][%i] = %lf\n", i, j,
|
---|
228 | weight_HtoO[i][j]);
|
---|
229 | }
|
---|
230 |
|
---|
231 | int nnReadInput(FILE * handle) {
|
---|
232 | int i = 1;
|
---|
233 | double finput;
|
---|
234 | while (fscanf(handle, "%lf", &finput) != EOF) {
|
---|
235 | if (i < INPUT_SIZE)
|
---|
236 | input[i] = finput;
|
---|
237 | else if (i < (INPUT_SIZE + OUTPUT_SIZE))
|
---|
238 | target[i - INPUT_SIZE] = finput;
|
---|
239 |
|
---|
240 | /* Calc next input */
|
---|
241 | i++;
|
---|
242 | /* Skip hidden output knob */
|
---|
243 | if (i == INPUT_SIZE)
|
---|
244 | i++;
|
---|
245 | if (i == (INPUT_SIZE + OUTPUT_SIZE))
|
---|
246 | return TRUE;
|
---|
247 | }
|
---|
248 |
|
---|
249 | /* Input not complete */
|
---|
250 | return FALSE;
|
---|
251 | }
|
---|
252 |
|
---|
253 | /* Verify quality of current network */
|
---|
254 | double nnQualityCheck(char * file) {
|
---|
255 | double validate_total = 0;
|
---|
256 | double validate_ok = 0;
|
---|
257 | double validate_percent = 0;
|
---|
258 | FILE * handle;
|
---|
259 |
|
---|
260 | handle = fopen(file,"r");
|
---|
261 | while (nnReadInput(handle) == TRUE) {
|
---|
262 | validate_total++;
|
---|
263 | nnCalc();
|
---|
264 | if (nnValidate() == TRUE)
|
---|
265 | validate_ok++;
|
---|
266 | //else
|
---|
267 | // nnOutput();
|
---|
268 | }
|
---|
269 | fclose(handle);
|
---|
270 | validate_percent = (validate_ok / validate_total) * 100;
|
---|
271 | printf("Validating: %.0lf/%.0lf - %.2lf %%\n",
|
---|
272 | validate_ok,validate_total,validate_percent);
|
---|
273 |
|
---|
274 | return(validate_percent);
|
---|
275 | }
|
---|
276 |
|
---|
277 | /* Main program */
|
---|
278 | int main (int argc, char * argv[]) {
|
---|
279 | int i,training_total, training_best;
|
---|
280 | double quality_max, quality;
|
---|
281 | FILE * handle;
|
---|
282 |
|
---|
283 | /* Set the bias knob */
|
---|
284 | input[0] = -1;
|
---|
285 | hidden[0] = -1;
|
---|
286 |
|
---|
287 | /* Init set of all wights */
|
---|
288 | //debugInit();
|
---|
289 | fixedInit();
|
---|
290 | //randInit();
|
---|
291 |
|
---|
292 | /* Set initial quality */
|
---|
293 | quality_max = nnQualityCheck(file_quality);
|
---|
294 | training_best = 0;
|
---|
295 | training_total = 0;
|
---|
296 |
|
---|
297 | printf("Running neural network with following parameters\n");
|
---|
298 | printf("Input nodes : %i\n", INPUT_SIZE);
|
---|
299 | printf("Hidden nodes : %i\n", HIDDEN_SIZE);
|
---|
300 | printf("Output nodes : %i\n", OUTPUT_SIZE);
|
---|
301 | printf("Learning rate : %lf\n", LEARN_SPEED);
|
---|
302 | printf("Quality check : %i\n", QUALITY_ROUND);
|
---|
303 | printf("Initial quality : %lf %%\n", quality_max);
|
---|
304 | /* Start training */
|
---|
305 | //nnNeuronOutput();
|
---|
306 | i = 1;
|
---|
307 | handle = fopen(file_training,"r");
|
---|
308 | while ( nnReadInput(handle) == TRUE) {
|
---|
309 | training_total++;
|
---|
310 | nnCalc();
|
---|
311 | //nnOutput();
|
---|
312 | //nnHiddenOutput();
|
---|
313 |
|
---|
314 | if (nnValidate() == FALSE) {
|
---|
315 | nnTrain();
|
---|
316 | //nnNeuronOutput();
|
---|
317 | }
|
---|
318 |
|
---|
319 | /* Verifiy quality, stop training when quality is going down */
|
---|
320 | if ((training_total % QUALITY_ROUND) == 0) {
|
---|
321 | printf("Learned: %i - ", training_total);
|
---|
322 | quality = nnQualityCheck(file_quality);
|
---|
323 | if (quality > quality_max) {
|
---|
324 | quality_max = quality;
|
---|
325 | training_best = training_total;
|
---|
326 | }
|
---|
327 | }
|
---|
328 | }
|
---|
329 | fclose(handle);
|
---|
330 | printf("Max quality: %.2lf%% at training round: %i\n", quality_max,
|
---|
331 | training_best);
|
---|
332 | quality = nnQualityCheck(file_validate);
|
---|
333 | return(EX_OK);
|
---|
334 | }
|
---|
335 |
|
---|