1 | // This file is a part of Framsticks SDK. http://www.framsticks.com/ |
---|
2 | // Copyright (C) 1999-2020 Maciej Komosinski and Szymon Ulatowski. |
---|
3 | // See LICENSE.txt for details. |
---|
4 | |
---|
5 | #include <ctype.h> //isupper() |
---|
6 | #include "genooperators.h" |
---|
7 | #include <common/log.h> |
---|
8 | #include <common/nonstd_math.h> |
---|
9 | #include <frams/util/rndutil.h> |
---|
10 | |
---|
11 | static double distrib_force[] = // for '!' |
---|
12 | { |
---|
13 | 3, // distribution 0 -__/ +1 |
---|
14 | 0.001, 0.2, // "slow" neurons |
---|
15 | 0.001, 1, |
---|
16 | 1, 1, // "fast" neurons |
---|
17 | }; |
---|
18 | static double distrib_inertia[] = // for '=' |
---|
19 | { |
---|
20 | 2, // distribution 0 |..- +1 |
---|
21 | 0, 0, // "fast" neurons |
---|
22 | 0.7, 0.98, |
---|
23 | }; |
---|
24 | static double distrib_sigmo[] = // for '/' |
---|
25 | { |
---|
26 | 5, // distribution -999 -..-^-..- +999 |
---|
27 | -999, -999, //"perceptron" |
---|
28 | 999, 999, |
---|
29 | -5, -1, // nonlinear |
---|
30 | 1, 5, |
---|
31 | -1, 1, // ~linear |
---|
32 | }; |
---|
33 | |
---|
34 | |
---|
35 | int GenoOperators::roulette(const double *probtab, const int count) |
---|
36 | { |
---|
37 | double sum = 0; |
---|
38 | int i; |
---|
39 | for (i = 0; i < count; i++) sum += probtab[i]; |
---|
40 | double sel = rndDouble(sum); |
---|
41 | for (sum = 0, i = 0; i < count; i++) { sum += probtab[i]; if (sel < sum) return i; } |
---|
42 | return -1; |
---|
43 | } |
---|
44 | |
---|
45 | bool GenoOperators::getMinMaxDef(ParamInterface *p, int i, double &mn, double &mx, double &def) |
---|
46 | { |
---|
47 | mn = mx = def = 0; |
---|
48 | int defined = 0; |
---|
49 | if (p->type(i)[0] == 'f') |
---|
50 | { |
---|
51 | double _mn = 0, _mx = 1, _def = 0.5; |
---|
52 | defined = p->getMinMaxDouble(i, _mn, _mx, _def); |
---|
53 | if (defined == 1) _mx = _mn + 1000.0; //only min was defined, so let's set some arbitrary range, just to have some freedom. Assumes _mn is not close to maxdouble... |
---|
54 | if (_mx < _mn && defined == 3) //only default was defined, so let's assume some arbitrary range. Again, no check for min/maxdouble... |
---|
55 | { |
---|
56 | _mn = _def - 500.0; |
---|
57 | _mx = _def + 500.0; |
---|
58 | } |
---|
59 | if (defined < 3) _def = (_mn + _mx) / 2.0; |
---|
60 | mn = _mn; mx = _mx; def = _def; |
---|
61 | } |
---|
62 | if (p->type(i)[0] == 'd') |
---|
63 | { |
---|
64 | paInt _mn = 0, _mx = 1, _def = 0; |
---|
65 | defined = p->getMinMaxInt(i, _mn, _mx, _def); |
---|
66 | if (defined == 1) _mx = _mn + 1000; //only min was defined, so let's set some arbitrary range, just to have some freedom. Assumes _mn is not close to maxint... |
---|
67 | if (_mx < _mn && defined == 3) //only default was defined, so let's assume some arbitrary range. Again, no check for min/maxint... |
---|
68 | { |
---|
69 | _mn = _def - 500; |
---|
70 | _mx = _def + 500; |
---|
71 | } |
---|
72 | if (defined < 3) _def = (_mn + _mx) / 2; |
---|
73 | mn = _mn; mx = _mx; def = _def; |
---|
74 | } |
---|
75 | return defined == 3; |
---|
76 | } |
---|
77 | |
---|
78 | bool GenoOperators::mutateRandomNeuronOrNeuroclassProperty(Neuro* n) |
---|
79 | { |
---|
80 | bool mutated = false; |
---|
81 | int prop = selectRandomNeuronProperty(n); |
---|
82 | if (prop >= 0) |
---|
83 | { |
---|
84 | if (prop >= GenoOperators::NEUROCLASS_PROP_OFFSET) |
---|
85 | { |
---|
86 | SyntParam par = n->classProperties(); //commits changes when p is destroyed |
---|
87 | mutated = mutateProperty(par, prop - GenoOperators::NEUROCLASS_PROP_OFFSET); |
---|
88 | } |
---|
89 | else |
---|
90 | { |
---|
91 | Param par = n->extraProperties(); |
---|
92 | mutated = mutateProperty(par, prop); |
---|
93 | } |
---|
94 | } |
---|
95 | return mutated; |
---|
96 | } |
---|
97 | |
---|
98 | int GenoOperators::selectRandomNeuronProperty(Neuro *n) |
---|
99 | { |
---|
100 | int neuext = n->extraProperties().getPropCount(), |
---|
101 | neucls = n->getClass() == NULL ? 0 : n->getClass()->getProperties().getPropCount(); |
---|
102 | if (neuext + neucls == 0) return -1; //no properties in this neuron |
---|
103 | int index = rndUint(neuext + neucls); |
---|
104 | if (index >= neuext) index = index - neuext + NEUROCLASS_PROP_OFFSET; |
---|
105 | return index; |
---|
106 | } |
---|
107 | |
---|
108 | double GenoOperators::mutateNeuronProperty(double current, Neuro *n, int i) |
---|
109 | { |
---|
110 | if (i == -1) return mutateCreepNoLimit('f', current, 2, true); //i==-1: mutating weight of neural connection |
---|
111 | Param p; |
---|
112 | if (i >= NEUROCLASS_PROP_OFFSET) { i -= NEUROCLASS_PROP_OFFSET; p = n->getClass()->getProperties(); } |
---|
113 | else p = n->extraProperties(); |
---|
114 | double newval = current; |
---|
115 | /*bool ok=*/getMutatedProperty(p, i, current, newval); |
---|
116 | return newval; |
---|
117 | } |
---|
118 | |
---|
119 | bool GenoOperators::mutatePropertyNaive(ParamInterface &p, int i) |
---|
120 | { |
---|
121 | double mn, mx, df; |
---|
122 | if (p.type(i)[0] != 'f' && p.type(i)[0] != 'd') return false; //don't know how to mutate |
---|
123 | getMinMaxDef(&p, i, mn, mx, df); |
---|
124 | |
---|
125 | ExtValue ev; |
---|
126 | p.get(i, ev); |
---|
127 | ev.setDouble(mutateCreep(p.type(i)[0], ev.getDouble(), mn, mx, true)); |
---|
128 | p.set(i, ev); |
---|
129 | return true; |
---|
130 | } |
---|
131 | |
---|
132 | bool GenoOperators::mutateProperty(ParamInterface &p, int i) |
---|
133 | { |
---|
134 | double newval; |
---|
135 | ExtValue ev; |
---|
136 | p.get(i, ev); |
---|
137 | bool ok = getMutatedProperty(p, i, ev.getDouble(), newval); |
---|
138 | if (ok) { ev.setDouble(newval); p.set(i, ev); } |
---|
139 | return ok; |
---|
140 | } |
---|
141 | |
---|
142 | bool GenoOperators::getMutatedProperty(ParamInterface &p, int i, double oldval, double &newval) |
---|
143 | { |
---|
144 | newval = 0; |
---|
145 | if (p.type(i)[0] != 'f' && p.type(i)[0] != 'd') return false; //don't know how to mutate |
---|
146 | const char *n = p.id(i), *na = p.name(i); |
---|
147 | if (strcmp(n, "si") == 0 && strcmp(na, "Sigmoid") == 0) newval = CustomRnd(distrib_sigmo); else |
---|
148 | if (strcmp(n, "in") == 0 && strcmp(na, "Inertia") == 0) newval = CustomRnd(distrib_inertia); else |
---|
149 | if (strcmp(n, "fo") == 0 && strcmp(na, "Force") == 0) newval = CustomRnd(distrib_force); else |
---|
150 | { |
---|
151 | double mn, mx, df; |
---|
152 | getMinMaxDef(&p, i, mn, mx, df); |
---|
153 | newval = mutateCreep(p.type(i)[0], oldval, mn, mx, true); |
---|
154 | } |
---|
155 | return true; |
---|
156 | } |
---|
157 | |
---|
158 | double GenoOperators::mutateCreepNoLimit(char type, double current, double stddev, bool limit_precision_3digits) |
---|
159 | { |
---|
160 | double result = RndGen.Gauss(current, stddev); |
---|
161 | if (type == 'd') |
---|
162 | { |
---|
163 | result = int(result + 0.5); |
---|
164 | if (result == current) result += rndUint(2) * 2 - 1; //force some change |
---|
165 | } |
---|
166 | else |
---|
167 | { |
---|
168 | if (limit_precision_3digits) |
---|
169 | result = floor(result * 1000 + 0.5) / 1000.0; //round |
---|
170 | } |
---|
171 | return result; |
---|
172 | } |
---|
173 | |
---|
174 | double GenoOperators::mutateCreep(char type, double current, double mn, double mx, double stddev, bool limit_precision_3digits) |
---|
175 | { |
---|
176 | double result = mutateCreepNoLimit(type, current, stddev, limit_precision_3digits); |
---|
177 | if (result<mn || result>mx) //exceeds boundary, so bring to the allowed range |
---|
178 | { |
---|
179 | //reflect: |
---|
180 | if (result > mx) result = mx - (result - mx); else |
---|
181 | if (result < mn) result = mn + (mn - result); |
---|
182 | //wrap (just in case 'result' exceeded the allowed range so much that after reflection above it exceeded the other boundary): |
---|
183 | if (result > mx) result = mn + fmod(result - mx, mx - mn); else |
---|
184 | if (result < mn) result = mn + fmod(mn - result, mx - mn); |
---|
185 | if (limit_precision_3digits) |
---|
186 | { |
---|
187 | //reflect and wrap above may have changed the (limited) precision, so try to round again (maybe unnecessarily, because we don't know if reflect+wrap above were triggered) |
---|
188 | double result_try = floor(result * 1000 + 0.5) / 1000.0; //round |
---|
189 | if (mn <= result_try && result_try <= mx) result = result_try; //after rounding still witin allowed range, so keep rounded value |
---|
190 | } |
---|
191 | } |
---|
192 | return result; |
---|
193 | } |
---|
194 | |
---|
195 | double GenoOperators::mutateCreep(char type, double current, double mn, double mx, bool limit_precision_3digits) |
---|
196 | { |
---|
197 | double stddev = (mx - mn) / 2 / 5; // magic arbitrary formula for stddev, which becomes /halfinterval, 5 times narrower |
---|
198 | return mutateCreep(type, current, mn, mx, stddev, limit_precision_3digits); |
---|
199 | } |
---|
200 | |
---|
201 | void GenoOperators::setIntFromDoubleWithProbabilisticDithering(ParamInterface &p, int index, double value) //TODO |
---|
202 | { |
---|
203 | p.setInt(index, (paInt)(value + 0.5)); //TODO value=2.499 will result in 2 and 2.5 will result in 3, but we want these cases to be 2 or 3 with almost equal probability. value=2.1 should be mostly 2, rarely 3. Careful with negative values (test it!) |
---|
204 | } |
---|
205 | |
---|
206 | void GenoOperators::linearMix(vector<double> &p1, vector<double> &p2, double proportion) |
---|
207 | { |
---|
208 | if (p1.size() != p2.size()) |
---|
209 | { |
---|
210 | logPrintf("GenoOperators", "linearMix", LOG_ERROR, "Cannot mix vectors of different length (%d and %d)", p1.size(), p2.size()); |
---|
211 | return; |
---|
212 | } |
---|
213 | for (unsigned int i = 0; i < p1.size(); i++) |
---|
214 | { |
---|
215 | double v1 = p1[i]; |
---|
216 | double v2 = p2[i]; |
---|
217 | p1[i] = v1 * proportion + v2 * (1 - proportion); |
---|
218 | p2[i] = v2 * proportion + v1 * (1 - proportion); |
---|
219 | } |
---|
220 | } |
---|
221 | |
---|
222 | void GenoOperators::linearMix(ParamInterface &p1, int i1, ParamInterface &p2, int i2, double proportion) |
---|
223 | { |
---|
224 | char type1 = p1.type(i1)[0]; |
---|
225 | char type2 = p2.type(i2)[0]; |
---|
226 | if (type1 == 'f' && type2 == 'f') |
---|
227 | { |
---|
228 | double v1 = p1.getDouble(i1); |
---|
229 | double v2 = p2.getDouble(i2); |
---|
230 | p1.setDouble(i1, v1 * proportion + v2 * (1 - proportion)); |
---|
231 | p2.setDouble(i2, v2 * proportion + v1 * (1 - proportion)); |
---|
232 | } |
---|
233 | else |
---|
234 | if (type1 == 'd' && type2 == 'd') |
---|
235 | { |
---|
236 | int v1 = p1.getInt(i1); |
---|
237 | int v2 = p2.getInt(i2); |
---|
238 | setIntFromDoubleWithProbabilisticDithering(p1, i1, v1 * proportion + v2 * (1 - proportion)); |
---|
239 | setIntFromDoubleWithProbabilisticDithering(p2, i2, v2 * proportion + v1 * (1 - proportion)); |
---|
240 | } |
---|
241 | else |
---|
242 | logPrintf("GenoOperators", "linearMix", LOG_WARN, "Cannot mix values of types '%c' and '%c'", type1, type2); |
---|
243 | } |
---|
244 | |
---|
245 | int GenoOperators::getActiveNeuroClassCount(Model::ShapeType for_shape_type) |
---|
246 | { |
---|
247 | int count = 0; |
---|
248 | for (int i = 0; i < Neuro::getClassCount(); i++) |
---|
249 | { |
---|
250 | NeuroClass *nc = Neuro::getClass(i); |
---|
251 | if (nc->isShapeTypeSupported(for_shape_type) && nc->genactive) |
---|
252 | count++; |
---|
253 | } |
---|
254 | return count; |
---|
255 | } |
---|
256 | |
---|
257 | NeuroClass *GenoOperators::getRandomNeuroClass(Model::ShapeType for_shape_type) |
---|
258 | { |
---|
259 | vector<NeuroClass *> active; |
---|
260 | for (int i = 0; i < Neuro::getClassCount(); i++) |
---|
261 | { |
---|
262 | NeuroClass *nc = Neuro::getClass(i); |
---|
263 | if (nc->isShapeTypeSupported(for_shape_type) && nc->genactive) |
---|
264 | active.push_back(nc); |
---|
265 | } |
---|
266 | if (active.size() == 0) return NULL; else return active[rndUint(active.size())]; |
---|
267 | } |
---|
268 | |
---|
269 | NeuroClass *GenoOperators::getRandomNeuroClassWithOutput(Model::ShapeType for_shape_type) |
---|
270 | { |
---|
271 | vector<NeuroClass *> active; |
---|
272 | for (int i = 0; i < Neuro::getClassCount(); i++) |
---|
273 | { |
---|
274 | NeuroClass *nc = Neuro::getClass(i); |
---|
275 | if (nc->isShapeTypeSupported(for_shape_type) && nc->genactive && nc->getPreferredOutput() != 0) |
---|
276 | active.push_back(nc); |
---|
277 | } |
---|
278 | if (active.size() == 0) return NULL; else return active[rndUint(active.size())]; |
---|
279 | } |
---|
280 | |
---|
281 | NeuroClass *GenoOperators::getRandomNeuroClassWithInput(Model::ShapeType for_shape_type) |
---|
282 | { |
---|
283 | vector<NeuroClass *> active; |
---|
284 | for (int i = 0; i < Neuro::getClassCount(); i++) |
---|
285 | { |
---|
286 | NeuroClass *nc = Neuro::getClass(i); |
---|
287 | if (nc->isShapeTypeSupported(for_shape_type) && nc->genactive && nc->getPreferredInputs() != 0) |
---|
288 | active.push_back(nc); |
---|
289 | } |
---|
290 | if (active.size() == 0) return NULL; else return active[rndUint(active.size())]; |
---|
291 | } |
---|
292 | |
---|
293 | NeuroClass *GenoOperators::getRandomNeuroClassWithOutputAndNoInputs(Model::ShapeType for_shape_type) |
---|
294 | { |
---|
295 | vector<NeuroClass *> active; |
---|
296 | for (int i = 0; i < Neuro::getClassCount(); i++) |
---|
297 | { |
---|
298 | NeuroClass *nc = Neuro::getClass(i); |
---|
299 | if (nc->isShapeTypeSupported(for_shape_type) && nc->genactive && nc->getPreferredOutput() != 0 && nc->getPreferredInputs() == 0) |
---|
300 | active.push_back(nc); |
---|
301 | } |
---|
302 | if (active.size() == 0) return NULL; else return active[rndUint(active.size())]; |
---|
303 | } |
---|
304 | |
---|
305 | int GenoOperators::getRandomNeuroClassWithOutput(const vector<NeuroClass *> &NClist) |
---|
306 | { |
---|
307 | vector<int> allowed; |
---|
308 | for (size_t i = 0; i < NClist.size(); i++) |
---|
309 | if (NClist[i]->getPreferredOutput() != 0) //this NeuroClass provides output |
---|
310 | allowed.push_back(i); |
---|
311 | if (allowed.size() == 0) return -1; else return allowed[rndUint(allowed.size())]; |
---|
312 | } |
---|
313 | |
---|
314 | int GenoOperators::getRandomNeuroClassWithInput(const vector<NeuroClass *> &NClist) |
---|
315 | { |
---|
316 | vector<int> allowed; |
---|
317 | for (size_t i = 0; i < NClist.size(); i++) |
---|
318 | if (NClist[i]->getPreferredInputs() != 0) //this NeuroClass wants one input connection or more |
---|
319 | allowed.push_back(i); |
---|
320 | if (allowed.size() == 0) return -1; else return allowed[rndUint(allowed.size())]; |
---|
321 | } |
---|
322 | |
---|
323 | int GenoOperators::getRandomChar(const char *choices, const char *excluded) |
---|
324 | { |
---|
325 | int allowed_count = 0; |
---|
326 | for (size_t i = 0; i < strlen(choices); i++) if (!strchrn0(excluded, choices[i])) allowed_count++; |
---|
327 | if (allowed_count == 0) return -1; //no char is allowed |
---|
328 | int rnd_index = rndUint(allowed_count) + 1; |
---|
329 | allowed_count = 0; |
---|
330 | for (size_t i = 0; i < strlen(choices); i++) |
---|
331 | { |
---|
332 | if (!strchrn0(excluded, choices[i])) allowed_count++; |
---|
333 | if (allowed_count == rnd_index) return i; |
---|
334 | } |
---|
335 | return -1; //never happens |
---|
336 | } |
---|
337 | |
---|
338 | NeuroClass *GenoOperators::parseNeuroClass(char *&s) |
---|
339 | { |
---|
340 | int maxlen = (int)strlen(s); |
---|
341 | int NClen = 0; |
---|
342 | NeuroClass *NC = NULL; |
---|
343 | for (int i = 0; i < Neuro::getClassCount(); i++) |
---|
344 | { |
---|
345 | const char *ncname = Neuro::getClass(i)->name.c_str(); |
---|
346 | int ncnamelen = (int)strlen(ncname); |
---|
347 | if (maxlen >= ncnamelen && ncnamelen > NClen && (strncmp(s, ncname, ncnamelen) == 0)) |
---|
348 | { |
---|
349 | NC = Neuro::getClass(i); |
---|
350 | NClen = ncnamelen; |
---|
351 | } |
---|
352 | } |
---|
353 | s += NClen; |
---|
354 | return NC; |
---|
355 | } |
---|
356 | |
---|
357 | Neuro *GenoOperators::findNeuro(const Model *m, const NeuroClass *nc) |
---|
358 | { |
---|
359 | if (!m) return NULL; |
---|
360 | for (int i = 0; i < m->getNeuroCount(); i++) |
---|
361 | if (m->getNeuro(i)->getClass() == nc) return m->getNeuro(i); |
---|
362 | return NULL; //neuron of class 'nc' was not found |
---|
363 | } |
---|
364 | |
---|
365 | int GenoOperators::neuroClassProp(char *&s, NeuroClass *nc, bool also_v1_N_props) |
---|
366 | { |
---|
367 | int len = (int)strlen(s); |
---|
368 | int Len = 0, I = -1; |
---|
369 | if (nc) |
---|
370 | { |
---|
371 | Param p = nc->getProperties(); |
---|
372 | for (int i = 0; i < p.getPropCount(); i++) |
---|
373 | { |
---|
374 | const char *n = p.id(i); |
---|
375 | int l = (int)strlen(n); |
---|
376 | if (len >= l && l > Len && (strncmp(s, n, l) == 0)) { I = NEUROCLASS_PROP_OFFSET + i; Len = l; } |
---|
377 | if (also_v1_N_props) //recognize old properties symbols /=! |
---|
378 | { |
---|
379 | if (strcmp(n, "si") == 0) n = "/"; else |
---|
380 | if (strcmp(n, "in") == 0) n = "="; else |
---|
381 | if (strcmp(n, "fo") == 0) n = "!"; |
---|
382 | l = (int)strlen(n); |
---|
383 | if (len >= l && l > Len && (strncmp(s, n, l) == 0)) { I = NEUROCLASS_PROP_OFFSET + i; Len = l; } |
---|
384 | } |
---|
385 | } |
---|
386 | } |
---|
387 | Neuro n; |
---|
388 | Param p = n.extraProperties(); |
---|
389 | for (int i = 0; i < p.getPropCount(); i++) |
---|
390 | { |
---|
391 | const char *n = p.id(i); |
---|
392 | int l = (int)strlen(n); |
---|
393 | if (len >= l && l > Len && (strncmp(s, n, l) == 0)) { I = i; Len = l; } |
---|
394 | } |
---|
395 | s += Len; |
---|
396 | return I; |
---|
397 | } |
---|
398 | |
---|
399 | bool GenoOperators::isWS(const char c) |
---|
400 | { |
---|
401 | return c == ' ' || c == '\n' || c == '\t' || c == '\r'; |
---|
402 | } |
---|
403 | |
---|
404 | void GenoOperators::skipWS(char *&s) |
---|
405 | { |
---|
406 | if (s == NULL) |
---|
407 | logMessage("GenoOperators", "skipWS", LOG_WARN, "NULL reference!"); |
---|
408 | else |
---|
409 | while (isWS(*s)) s++; |
---|
410 | } |
---|
411 | |
---|
412 | bool GenoOperators::areAlike(char *g1, char *g2) |
---|
413 | { |
---|
414 | while (*g1 || *g2) |
---|
415 | { |
---|
416 | skipWS(g1); |
---|
417 | skipWS(g2); |
---|
418 | if (*g1 != *g2) return false; //when difference |
---|
419 | if (!*g1 && !*g2) break; //both end |
---|
420 | g1++; |
---|
421 | g2++; |
---|
422 | } |
---|
423 | return true; //equal |
---|
424 | } |
---|
425 | |
---|
426 | char *GenoOperators::strchrn0(const char *str, char ch) |
---|
427 | { |
---|
428 | return ch == 0 ? NULL : strchr((char *)str, ch); |
---|
429 | } |
---|
430 | |
---|
431 | bool GenoOperators::canStartNeuroClassName(const char firstchar) |
---|
432 | { |
---|
433 | return isupper(firstchar) || firstchar == '|' || firstchar == '@' || firstchar == '*'; |
---|
434 | } |
---|