Next: Module: Genetic Algorithm
Up: Source Code
Previous: Module: Individual
  Contents
Subsections
1
2 /* Declarations for standard 2 layer networks */
3
4 #ifndef STDNET_H
5 #define STDNET_H
6
7 #include "defs.h"
8
9 char *StdOptStr();
10 char *StdUsage();
11 char StdName[80];
12
13 /* These and Nin,Nhid,Nout,Ntrain are to be defined */
14 /* in initStd() */
15
16 int Nbits; /* bits for bias and weights (gen.alg.) */
17 float Width; /* weights in [-Width,Width] */
18
19 int handleStdOpt(char opt,char* arg);
20 int initStd();
21 void initTrain();
22
23 /* Standard Decoding */
24
25 int OffW1,OffW2,IncW1,IncW2,OffLR,OffIM;
26 float FGmult,FGadd;
27 int FGmax;
28
29 float *grey2float;
30 float *grey2lrate;
31 float *grey2imp;
32 word *int2grey;
33
34 int initDecoding();
35
36 #define Nlrate 8
37 #define Nimp 8
38 #define Llrate 0.02
39 #define Hlrate 20.0
40 #define Limp 0.0
41 #define Himp 0.95
42
43 #define decode(x,i,n) grey2float[getbits(x,i,n)]
44 #define weight1(x,i,h) decode(x,IncW1*h+Nbits*i,Nbits)
45 #define weight2(x,h,o) decode(x,OffW2+IncW2*o+Nbits*h,Nbits)
46 #define bias1(x,h) weight1(x,Nin,h)
47 #define bias2(x,o) weight2(x,Nhid,o)
48 #define lrate(x) grey2lrate[getbits(x,OffLR,Nlrate)]
49 #define imp(x) grey2imp[getbits(x,OffIM,Nimp)]
50 #define float2int(x) ((int)(x*FGmult+FGadd))
51
52 #define sigma(y) 1/(1+exp(-(y)))
53
54 #endif
55
1
2 /* Implementation of standard 2 layer networks */
3
4 #include <math.h>
5
6 #include "defs.h"
7 #include "ind.h"
8 #include "stdnet.h"
9 #include "genback.h"
10
11 char IndStdOptStr[40]="B:W:E:b:\0";
12 char NetStdOptStr[40]="\0";
13
14 int Nbits =8;
15 float Width =10.0;
16 float Estimate =0;
17 int Nback =0;
18
19 char *IndStdUsage=
20 "-B <no. of bits for weights> 8\n"
21 "-W <interval for weights [-W,W]>: 10\n"
22 "-E <error estimation factor>: 0.0\n"
23 "-b <no. of backprop. iterations>: 0\n\0";
24
25 char *NetStdUsage="\0";
26
27 char* IndOptStr()
28 {
29 strcat(IndStdOptStr,StdOptStr());
30 return IndStdOptStr;
31 }
32 char* NetOptStr()
33 {
34 strcat(NetStdOptStr,StdOptStr());
35 return NetStdOptStr;
36 }
37 char* IndUsage()
38 {
39 char *s;
40
41 s=(char*)malloc(512);
42 strcpy(s,StdUsage());
43 strcat(s,IndStdUsage);
44 return s;
45 }
46 char* NetUsage()
47 {
48 char *s;
49
50 s=(char*)malloc(512);
51 strcpy(s,StdUsage());
52 strcat(s,NetStdUsage);
53 return s;
54 }
55
56 int handleIndOpt(char opt,char* arg)
57 {
58 switch(opt)
59 {
60 case 'B': return (Nbits =getint(arg,1,24))<0;
61 case 'W': return getfloat(&Width ,arg,0.01,100);
62 case 'E': return getfloat(&Estimate ,arg,0.1,100);
63 case 'b': return (Nback =getint(arg,0,100000))<0;
64 };
65 return handleStdOpt(opt,arg);
66 }
67
68 int handleNetOpt(char opt,char* arg)
69 {
70 return handleStdOpt(opt,arg);
71 }
72
73 float *grey2float= (float*)0;
74 float *grey2lrate= (float*)0;
75 float *grey2imp= (float*)0;
76 word *int2grey= (word*)0;
77
78 float *Hidden=(float*)0;
79
80 float **TrainIn= (float**)0; /* Training Set Input */
81 float **TrainOut= (float**)0; /* Training Set Output */
82
83 int OffW1,OffW2,IncW1,IncW2,OffLR,OffIM;
84
85 void intgrey(word *a,int n)
86 {
87 int i;
88 int sum,bit,par;
89
90 for(i=0;i<n;i++)
91 {
92 bit=n>>1; sum=0; par=0;
93 while(bit)
94 {
95 if(bit&i) par=!par;
96 if(par) sum|=bit;
97 bit>>=1;
98 };
99 a[sum]=i;
100 };
101 }
102
103 void greyfloat(float *a,int n,float l,float h)
104 {
105 int i;
106 int sum,bit,par;
107
108 for(i=0;i<n;i++)
109 {
110 bit=n>>1; sum=0; par=0;
111 while(bit)
112 {
113 if(bit&i) par=!par;
114 if(par) sum|=bit;
115 bit>>=1;
116 };
117 a[i]=(float)sum*(h-l)/(n-1)+l;
118 };
119 }
120
121 int getTrainSet()
122 {
123 int i,j,k;
124 float *p,*q;
125
126 if(TrainIn) return 0;
127 if(!(TrainIn=(float**)malloc(Ntrain*PNTLEN))) return 1;
128 if(!(TrainOut=(float**)malloc(Ntrain*PNTLEN))) return 1;
129 if(!(p=(float*)malloc((Nin+1)*Ntrain*FLOATLEN))) return 1;
130 if(!(q=(float*)malloc(Nout*Ntrain*FLOATLEN))) return 1;
131 for(i=0;i<Ntrain;i++)
132 {
133 TrainIn[i]=p; p+=(Nin+1);
134 TrainOut[i]=q; q+=Nout;
135 TrainIn[i][Nin]=1.0;
136 };
137 initTrain();
138 for(i=0;i<4*Ntrain;i++)
139 {
140 j=getrand()%Ntrain; k=getrand()%Ntrain;
141 p=TrainIn[k]; q=TrainOut[k];
142 TrainIn[k]=TrainIn[j]; TrainOut[k]=TrainOut[j];
143 TrainIn[j]=p; TrainOut[j]=q;
144 };
145 return 0;
146 }
147
148 int initDecoding()
149 {
150 int i;
151
152 OffW1=0;
153 IncW1=(Nin+1)*Nbits;
154 OffW2=IncW1*Nhid;
155 IncW2=(Nhid+1)*Nbits;
156 OffLR=CrBits-Nlrate-Nimp;
157 OffIM=CrBits-Nimp;
158 FGmax=(1<<Nbits)-1;
159 FGmult=0.5*(float)FGmax/Width;
160 FGadd=0.5*(float)FGmax+0.5;
161 if(!(grey2float=(float*)malloc((1<<Nbits)*FLOATLEN))) return 1;
162 greyfloat(grey2float,1<<Nbits,-Width,Width);
163
164 if(Nback)
165 {
166 if(int2grey) return 0;
167 if(!(int2grey=(word*)malloc((1<<Nbits)*WORDLEN))) return 1;
168 intgrey(int2grey,1<<Nbits);
169 if(!(grey2lrate=(float*)malloc((1<<Nlrate)*FLOATLEN))) return 1;
170 greyfloat(grey2lrate,1<<Nlrate,log(Llrate),log(Hlrate));
171 for(i=0;i<(1<<Nlrate);i++) grey2lrate[i]=exp(grey2lrate[i]);
172 if(!(grey2imp=(float*)malloc((1<<Nimp)*FLOATLEN))) return 1;
173 greyfloat(grey2imp,1<<Nimp,Limp,Himp);
174 };
175 return 0;
176 }
177
178 int initInd()
179 {
180 char s[80]="\0";
181 char t[80]="\0";
182
183 initStd();
184 if(!(Ntrain && Nin && Nhid && Nout)) return 1;
185
186 CrBits=(Nin+1)*Nbits*Nhid+(Nhid+1)*Nbits*Nout;
187 if(Nback) CrBits+= Nlrate+Nimp;
188 CrBytes=(CrBits-1)/8+1;
189 CrWords=(CrBytes-1)/WORDLEN+1;
190
191 if(initDecoding()) return 1;
192 if(getTrainSet()) return 1;
193
194 if(!(Hidden=(float*)malloc(Nhid*FLOATLEN))) return 1;
195
196 NoTrain=Ntrain;
197 OffsTrain=0;
198
199 if(Nback)
200 sprintf(t,
201 " %d backpropagation steps per generation\n",Nback);
202 if(Estimate!=0.0)
203 {
204 NoTrain=(Estimate*((Nin+1)*Nhid+(Nhid+1)*Nout))/(Nin+Nout)+1;
205 NoTrain=min(NoTrain,Ntrain);
206 sprintf(s,
207 "Estimation: Initial factor %4.1f, %d patterns (%d %%) used\n",
208 Estimate,NoTrain,(NoTrain*100)/Ntrain);
209 };
210 sprintf(IndParamStr,
211 "Network: %s (%d patterns)\n"
212 " Topology %d-%d-%d, %d Neurons, %d bits (Weights %d)\n"
213 " Weights in [%6.2f,%6.2f]\n%s%s",
214 StdName,Ntrain,Nin,Nhid,Nout,Nhid+Nout,CrBits,Nbits,
215 -Width,Width,t,s);
216 return 0;
217 }
218
219 errtyp calcerr(ind x)
220 {
221 int p,pp,i,j,k;
222 float e,s,d;
223
224 if(Nback)
225 backsteps(x,Nback);
226 p=OffsTrain; e=0.0;
227 for(pp=0;pp<NoTrain;pp++)
228 {
229 for(j=0;j<Nhid;j++)
230 {
231 s=0.0;
232 for(i=0;i<Nin;i++)
233 s+=weight1(x,i,j)*TrainIn[p][i];
234 Hidden[j]=sigma(s+bias1(x,j));
235 };
236 for(k=0;k<Nout;k++)
237 {
238 s=0.0;
239 for(j=0;j<Nhid;j++) s+=weight2(x,j,k)*Hidden[j];
240 d=sigma(s+bias2(x,k))-TrainOut[p][k];
241 e+=d*d;
242 };
243 p++; if(p>=Ntrain) p=0;
244 };
245 GenCalcs+=NoTrain;
246 return (0.5*e)/((float)NoTrain);
247 }
248
249 void printind(ind x)
250 {
251 int i,j,k,c;
252
253 c=0;
254 for(j=0;j<Nhid;j++)
255 {
256 if(c>70-6*Nin) { printf("\n"); c=0; };
257 printf("(");
258 for(i=0;i<Nin;i++) printf("%5.2f,",weight1(x,i,j));
259 printf("b:%5.2f)",bias1(x,j));
260 c+=9+6*Nin;
261 };
262 printf("\n"); c=0;
263 for(k=0;k<Nout;k++)
264 {
265 if(c>70-6*Nhid) { printf("\n"); c=0; };
266 printf("(");
267 for(j=0;j<Nhid;j++) printf("%5.2f,",weight2(x,j,k));
268 printf("b:%5.2f)",bias2(x,k));
269 c+=9+6*Nhid;
270 };
271 printf("\n");
272 }
273
274 /* Backpropagation */
275
276 int initNet()
277 {
278 int i,j;
279 float *p,*q;
280
281 initStd();
282 if(getTrainSet()) return 1;
283 Nback=0;
284
285 sprintf(NetParamStr,
286 "Network: %s\n"
287 " Topology %d-%d-%d, %d Neurons, %d patterns\n",
288 StdName,Nin,Nhid,Nout,Nhid+Nout,Ntrain);
289 return 0;
290 }
(c) Bernhard Ömer - oemer@tph.tuwien.ac.at - http://tph.tuwien.ac.at/~oemer/