source: cpp/frams/genetics/genooperators.cpp @ 1305

Last change on this file since 1305 was 1287, checked in by Maciej Komosinski, 11 months ago

size_t <-> int type casts

  • Property svn:eol-style set to native
File size: 20.0 KB
Line 
1// This file is a part of Framsticks SDK.  http://www.framsticks.com/
2// Copyright (C) 1999-2023  Maciej Komosinski and Szymon Ulatowski.
3// See LICENSE.txt for details.
4
5#include <ctype.h>  //isupper()
6#include <algorithm> // std::min, std::max
7#include <cmath> // std::floor()
8#include "genooperators.h"
9#include <common/log.h>
10#include <common/nonstd_math.h>
11#include <frams/util/rndutil.h>
12
13//
14// custom distributions for mutations of various parameters
15//
16static double distrib_force[] =   // for '!'
17{
18        3,             // distribution 0 -__/ +1
19        0.001, 0.2,    // "slow" neurons
20        0.001, 1,
21        1, 1,          // "fast" neurons
22};
23static double distrib_inertia[] =  // for '='
24{
25        2,             // distribution 0 |..- +1
26        0, 0,          // "fast" neurons
27        0.7, 0.98,
28};
29static double distrib_sigmo[] =  // for '/'
30{
31        5,             // distribution -999 -..-^-..- +999
32        -999, -999,    //"perceptron"
33        999, 999,
34        -5, -1,        // nonlinear
35        1, 5,
36        -1, 1,         // ~linear
37};
38/*
39static double distrib_weight[] =
40{
415,                 // distribution -999 _-^_^-_ +999
42-999, 999,         // each weight value may be useful, especially...
43-5, -0.3,          // ...little non-zero values
44-3, -0.6,
450.6, 3,
460.3, 5,
47};
48*/
49
50int GenoOperators::roulette(const double *probtab, const int count)
51{
52        double sum = 0;
53        int i;
54        for (i = 0; i < count; i++) sum += probtab[i];
55        double sel = rndDouble(sum);
56        for (sum = 0, i = 0; i < count; i++) { sum += probtab[i]; if (sel < sum) return i; }
57        return -1;
58}
59
60bool GenoOperators::getMinMaxDef(ParamInterface *p, int i, double &mn, double &mx, double &def)
61{
62        mn = mx = def = 0;
63        int defined = 0;
64        if (p->type(i)[0] == 'f')
65        {
66                double _mn = 0, _mx = 1, _def = 0.5;
67                defined = p->getMinMaxDouble(i, _mn, _mx, _def);
68                if (defined == 1) _mx = _mn + 1000.0; //only min was defined, so let's set some arbitrary range, just to have some freedom. Assumes _mn is not close to maxdouble...
69                if (_mx < _mn && defined == 3) //only default was defined, so let's assume some arbitrary range. Again, no check for min/maxdouble...
70                {
71                        _mn = _def - 500.0;
72                        _mx = _def + 500.0;
73                }
74                if (defined < 3) _def = (_mn + _mx) / 2.0;
75                mn = _mn; mx = _mx; def = _def;
76        }
77        if (p->type(i)[0] == 'd')
78        {
79                paInt _mn = 0, _mx = 1, _def = 0;
80                defined = p->getMinMaxInt(i, _mn, _mx, _def);
81                if (defined == 1) _mx = _mn + 1000; //only min was defined, so let's set some arbitrary range, just to have some freedom. Assumes _mn is not close to maxint...
82                if (_mx < _mn && defined == 3) //only default was defined, so let's assume some arbitrary range. Again, no check for min/maxint...
83                {
84                        _mn = _def - 500;
85                        _mx = _def + 500;
86                }
87                if (defined < 3) _def = (_mn + _mx) / 2;
88                mn = _mn; mx = _mx; def = _def;
89        }
90        return defined == 3;
91}
92
93bool GenoOperators::mutateRandomNeuroClassProperty(Neuro* n)
94{
95        bool mutated = false;
96        int prop = selectRandomNeuroClassProperty(n);
97        if (prop >= 0)
98        {
99                if (prop >= GenoOperators::NEUROCLASS_PROP_OFFSET)
100                {
101                        SyntParam par = n->classProperties();   //commits changes when this object is destroyed
102                        mutated = mutateProperty(par, prop - GenoOperators::NEUROCLASS_PROP_OFFSET);
103                }
104                else
105                {
106                        Param par = n->extraProperties();
107                        mutated = mutateProperty(par, prop);
108                }
109        }
110        return mutated;
111}
112
113int GenoOperators::selectRandomNeuroClassProperty(Neuro *n)
114{
115        int neuext = n->extraProperties().getPropCount(),
116                neucls = n->getClass() == NULL ? 0 : n->getClass()->getProperties().getPropCount();
117        if (neuext + neucls == 0) return -1; //no properties in this neuron
118        int index = rndUint(neuext + neucls);
119        if (index >= neuext) index = index - neuext + NEUROCLASS_PROP_OFFSET;
120        return index;
121}
122
123double GenoOperators::getMutatedNeuroClassProperty(double current, Neuro *n, int i)
124{
125        if (i == -1)
126        {
127                logPrintf("GenoOperators", "getMutatedNeuroClassProperty", LOG_WARN, "Deprecated usage in C++ source: to mutate connection weight, use getMutatedNeuronConnectionWeight().");
128                return getMutatedNeuronConnectionWeight(current);
129        }
130        Param p;
131        if (i >= NEUROCLASS_PROP_OFFSET) { i -= NEUROCLASS_PROP_OFFSET; p = n->getClass()->getProperties(); }
132        else p = n->extraProperties();
133        double newval = current;
134        /*bool ok=*/getMutatedProperty(p, i, current, newval);
135        return newval;
136}
137
138double GenoOperators::getMutatedNeuronConnectionWeight(double current)
139{
140        return mutateCreepNoLimit('f', current, 2, true);
141}
142
143bool GenoOperators::mutatePropertyNaive(ParamInterface &p, int i)
144{
145        double mn, mx, df;
146        if (p.type(i)[0] != 'f' && p.type(i)[0] != 'd') return false; //don't know how to mutate
147        getMinMaxDef(&p, i, mn, mx, df);
148
149        ExtValue ev;
150        p.get(i, ev);
151        ev.setDouble(mutateCreep(p.type(i)[0], ev.getDouble(), mn, mx, true));
152        p.set(i, ev);
153        return true;
154}
155
156bool GenoOperators::mutateProperty(ParamInterface &p, int i)
157{
158        double newval;
159        ExtValue ev;
160        p.get(i, ev);
161        bool ok = getMutatedProperty(p, i, ev.getDouble(), newval);
162        if (ok) { ev.setDouble(newval); p.set(i, ev); }
163        return ok;
164}
165
166bool GenoOperators::getMutatedProperty(ParamInterface &p, int i, double oldval, double &newval)
167{
168        newval = 0;
169        if (p.type(i)[0] != 'f' && p.type(i)[0] != 'd') return false; //don't know how to mutate
170        const char *n = p.id(i), *na = p.name(i);
171        if (strcmp(n, "si") == 0 && strcmp(na, "Sigmoid") == 0) newval = round(CustomRnd(distrib_sigmo), 3); else
172                if (strcmp(n, "in") == 0 && strcmp(na, "Inertia") == 0) newval = round(CustomRnd(distrib_inertia), 3); else
173                        if (strcmp(n, "fo") == 0 && strcmp(na, "Force") == 0) newval = round(CustomRnd(distrib_force), 3); else
174                        {
175                                double mn, mx, df;
176                                getMinMaxDef(&p, i, mn, mx, df);
177                                newval = mutateCreep(p.type(i)[0], oldval, mn, mx, true);
178                        }
179        return true;
180}
181
182double GenoOperators::mutateCreepNoLimit(char type, double current, double stddev, bool limit_precision_3digits)
183{
184        double result = RndGen.Gauss(current, stddev);
185        if (type == 'd')
186        {
187                result = int(result + 0.5);
188                if (result == current) result += rndUint(2) * 2 - 1; //force some change
189        }
190        else
191        {
192                if (limit_precision_3digits)
193                        result = round(result, 3);
194        }
195        return result;
196}
197
198double GenoOperators::mutateCreep(char type, double current, double mn, double mx, double stddev, bool limit_precision_3digits)
199{
200        double result = mutateCreepNoLimit(type, current, stddev, limit_precision_3digits);
201        if (result<mn || result>mx) //exceeds boundary, so bring to the allowed range
202        {
203                //reflect:
204                if (result > mx) result = mx - (result - mx); else
205                        if (result < mn) result = mn + (mn - result);
206                //wrap (just in case 'result' exceeded the allowed range so much that after the reflection above it exceeded the other boundary):
207                if (result > mx) result = mn + fmod(result - mx, mx - mn); else
208                        if (result < mn) result = mn + fmod(mn - result, mx - mn);
209                if (limit_precision_3digits)
210                {
211                        //reflect and wrap above may have changed the (limited) precision, so try to round again (maybe unnecessarily, because we don't know if reflect+wrap above were triggered)
212                        double result_try = round(result, 3);
213                        if (mn <= result_try && result_try <= mx) result = result_try; //after rounding still within allowed range, so keep rounded value
214                }
215        }
216        clipNegativeZeroIfNeeded(result, mn); //so we don't get -0.0 when minimum is 0.0
217        return result;
218}
219
220double GenoOperators::mutateCreep(char type, double current, double mn, double mx, bool limit_precision_3digits)
221{
222        double stddev = (mx - mn) / 2 / 5; // magic arbitrary formula for stddev, which becomes /halfinterval, 5 times narrower
223        return mutateCreep(type, current, mn, mx, stddev, limit_precision_3digits);
224}
225
226void GenoOperators::setIntFromDoubleWithProbabilisticDithering(ParamInterface &p, int index, double value)
227{
228        // Deterministic rounding to the closest integer:
229        //value += 0.5; // value==2.499 will become int 2 and value==2.5 will become int 3, but we want these cases to be 2 or 3 with almost equal probability (stochastic rounding).
230
231        //stochastic rounding (value==2.1 should turn in most cases to int 2, rarely to int 3; value==-2.1 should become mostly int -2, rarely int -3):
232        double lower = std::floor(value);
233        value = rndDouble(1) < (value - lower) ? lower + 1 : lower;
234
235        p.setInt(index, (paInt)value);
236}
237
238void GenoOperators::linearMix(vector<double> &p1, vector<double> &p2, double proportion)
239{
240        if (p1.size() != p2.size())
241        {
242                logPrintf("GenoOperators", "linearMix", LOG_ERROR, "Cannot mix vectors of different length (%d and %d)", p1.size(), p2.size());
243                return;
244        }
245        for (unsigned int i = 0; i < p1.size(); i++)
246        {
247                double v1 = p1[i];
248                double v2 = p2[i];
249                p1[i] = v1 * proportion + v2 * (1 - proportion);
250                p2[i] = v2 * proportion + v1 * (1 - proportion);
251        }
252}
253
254void GenoOperators::linearMix(ParamInterface &p1, int i1, ParamInterface &p2, int i2, double proportion)
255{
256        char type1 = p1.type(i1)[0];
257        char type2 = p2.type(i2)[0];
258        if (type1 == 'f' && type2 == 'f')
259        {
260                double v1 = p1.getDouble(i1);
261                double v2 = p2.getDouble(i2);
262                p1.setDouble(i1, v1 * proportion + v2 * (1 - proportion));
263                p2.setDouble(i2, v2 * proportion + v1 * (1 - proportion));
264        }
265        else
266                if (type1 == 'd' && type2 == 'd')
267                {
268                        int v1 = p1.getInt(i1);
269                        int v2 = p2.getInt(i2);
270                        setIntFromDoubleWithProbabilisticDithering(p1, i1, v1 * proportion + v2 * (1 - proportion));
271                        setIntFromDoubleWithProbabilisticDithering(p2, i2, v2 * proportion + v1 * (1 - proportion));
272                }
273                else
274                        logPrintf("GenoOperators", "linearMix", LOG_WARN, "Cannot mix values of types '%c' and '%c'", type1, type2);
275}
276
277int GenoOperators::getActiveNeuroClassCount(Model::ShapeType for_shape_type)
278{
279        int count = 0;
280        for (int i = 0; i < Neuro::getClassCount(); i++)
281        {
282                NeuroClass *nc = Neuro::getClass(i);
283                if (nc->isShapeTypeSupported(for_shape_type) && nc->genactive)
284                        count++;
285        }
286        return count;
287}
288
289NeuroClass *GenoOperators::getRandomNeuroClass(Model::ShapeType for_shape_type)
290{
291        vector<NeuroClass *> active;
292        for (int i = 0; i < Neuro::getClassCount(); i++)
293        {
294                NeuroClass *nc = Neuro::getClass(i);
295                if (nc->isShapeTypeSupported(for_shape_type) && nc->genactive)
296                        active.push_back(nc);
297        }
298        if (active.size() == 0) return NULL; else return active[rndUint(active.size())];
299}
300
301NeuroClass *GenoOperators::getRandomNeuroClassWithOutput(Model::ShapeType for_shape_type)
302{
303        vector<NeuroClass *> active;
304        for (int i = 0; i < Neuro::getClassCount(); i++)
305        {
306                NeuroClass *nc = Neuro::getClass(i);
307                if (nc->isShapeTypeSupported(for_shape_type) && nc->genactive && nc->getPreferredOutput() != 0)
308                        active.push_back(nc);
309        }
310        if (active.size() == 0) return NULL; else return active[rndUint(active.size())];
311}
312
313NeuroClass *GenoOperators::getRandomNeuroClassWithInput(Model::ShapeType for_shape_type)
314{
315        vector<NeuroClass *> active;
316        for (int i = 0; i < Neuro::getClassCount(); i++)
317        {
318                NeuroClass *nc = Neuro::getClass(i);
319                if (nc->isShapeTypeSupported(for_shape_type) && nc->genactive && nc->getPreferredInputs() != 0)
320                        active.push_back(nc);
321        }
322        if (active.size() == 0) return NULL; else return active[rndUint(active.size())];
323}
324
325NeuroClass *GenoOperators::getRandomNeuroClassWithOutputAndWantingNoInputs(Model::ShapeType for_shape_type)
326{
327        vector<NeuroClass *> active;
328        for (int i = 0; i < Neuro::getClassCount(); i++)
329        {
330                NeuroClass *nc = Neuro::getClass(i);
331                if (nc->isShapeTypeSupported(for_shape_type) && nc->genactive && nc->getPreferredOutput() != 0 && nc->getPreferredInputs() == 0)
332                        active.push_back(nc);
333        }
334        if (active.size() == 0) return NULL; else return active[rndUint(active.size())];
335}
336
337NeuroClass *GenoOperators::getRandomNeuroClassWithOutputAndWantingNoOrAnyInputs(Model::ShapeType for_shape_type)
338{
339        vector<NeuroClass *> active;
340        for (int i = 0; i < Neuro::getClassCount(); i++)
341        {
342                NeuroClass *nc = Neuro::getClass(i);
343                if (nc->isShapeTypeSupported(for_shape_type) && nc->genactive && nc->getPreferredOutput() != 0 && nc->getPreferredInputs() <= 0) // getPreferredInputs() should be 0 or -1 (any)
344                        active.push_back(nc);
345        }
346        if (active.size() == 0) return NULL; else return active[rndUint(active.size())];
347}
348
349int GenoOperators::getRandomNeuroClassWithOutput(const vector<NeuroClass *> &NClist)
350{
351        vector<int> allowed;
352        for (int i = 0; i < (int)NClist.size(); i++)
353                if (NClist[i]->getPreferredOutput() != 0) //this NeuroClass provides output
354                        allowed.push_back(i);
355        if (allowed.size() == 0) return -1; else return allowed[rndUint(allowed.size())];
356}
357
358int GenoOperators::getRandomNeuroClassWithInput(const vector<NeuroClass *> &NClist)
359{
360        vector<int> allowed;
361        for (int i = 0; i < (int)NClist.size(); i++)
362                if (NClist[i]->getPreferredInputs() != 0) //this NeuroClass wants one input connection or more                 
363                        allowed.push_back(i);
364        if (allowed.size() == 0) return -1; else return allowed[rndUint(allowed.size())];
365}
366
367NeuroClass *GenoOperators::parseNeuroClass(char *&s, ModelEnum::ShapeType for_shape_type)
368{
369        int maxlen = (int)strlen(s);
370        int NClen = 0;
371        NeuroClass *NC = NULL;
372        for (int i = 0; i < Neuro::getClassCount(); i++)
373        {
374                NeuroClass *nci = Neuro::getClass(i);
375                if (!nci->isShapeTypeSupported(for_shape_type))
376                        continue;
377                const char *nciname = nci->name.c_str();
378                int ncinamelen = (int)strlen(nciname);
379                if (maxlen >= ncinamelen && ncinamelen > NClen && (strncmp(s, nciname, ncinamelen) == 0))
380                {
381                        NC = nci;
382                        NClen = ncinamelen;
383                }
384        }
385        s += NClen;
386        return NC;
387}
388
389Neuro *GenoOperators::findNeuro(const Model *m, const NeuroClass *nc)
390{
391        if (!m) return NULL;
392        for (int i = 0; i < m->getNeuroCount(); i++)
393                if (m->getNeuro(i)->getClass() == nc) return m->getNeuro(i);
394        return NULL; //neuron of class 'nc' was not found
395}
396
397int GenoOperators::neuroClassProp(char *&s, NeuroClass *nc, bool also_v1_N_props)
398{
399        int len = (int)strlen(s);
400        int Len = 0, I = -1;
401        if (nc)
402        {
403                Param p = nc->getProperties();
404                for (int i = 0; i < p.getPropCount(); i++)
405                {
406                        const char *n = p.id(i);
407                        int l = (int)strlen(n);
408                        if (len >= l && l > Len && (strncmp(s, n, l) == 0)) { I = NEUROCLASS_PROP_OFFSET + i; Len = l; }
409                        if (also_v1_N_props) //recognize old symbols of properties:  /=!
410                        {
411                                if (strcmp(n, "si") == 0) n = "/"; else
412                                        if (strcmp(n, "in") == 0) n = "="; else
413                                                if (strcmp(n, "fo") == 0) n = "!";
414                                l = (int)strlen(n);
415                                if (len >= l && l > Len && (strncmp(s, n, l) == 0)) { I = NEUROCLASS_PROP_OFFSET + i; Len = l; }
416                        }
417                }
418        }
419        Neuro n;
420        Param p = n.extraProperties();
421        for (int i = 0; i < p.getPropCount(); i++)
422        {
423                const char *n = p.id(i);
424                int l = (int)strlen(n);
425                if (len >= l && l > Len && (strncmp(s, n, l) == 0)) { I = i; Len = l; }
426        }
427        s += Len;
428        return I;
429}
430
431bool GenoOperators::canStartNeuroClassName(const char firstchar)
432{
433        return isupper(firstchar) || firstchar == '|' || firstchar == '@' || firstchar == '*';
434}
435
436bool GenoOperators::isWS(const char c)
437{
438        return c == ' ' || c == '\n' || c == '\t' || c == '\r';
439}
440
441void GenoOperators::skipWS(char *&s)
442{
443        if (s == NULL)
444                logMessage("GenoOperators", "skipWS", LOG_WARN, "NULL reference!");
445        else
446                while (isWS(*s)) s++;
447}
448
449bool GenoOperators::areAlike(char *g1, char *g2)
450{
451        while (*g1 || *g2)
452        {
453                skipWS(g1);
454                skipWS(g2);
455                if (*g1 != *g2) return false; //when difference
456                if (!*g1 && !*g2) break; //both end
457                g1++;
458                g2++;
459        }
460        return true; //equal
461}
462
463char *GenoOperators::strchrn0(const char *str, char ch)
464{
465        return ch == 0 ? NULL : strchr((char *)str, ch);
466}
467
468int GenoOperators::getRandomChar(const char *choices, const char *excluded)
469{
470        int allowed_count = 0;
471        for (size_t i = 0; i < strlen(choices); i++) if (!strchrn0(excluded, choices[i])) allowed_count++;
472        if (allowed_count == 0) return -1; //no char is allowed
473        int rnd_index = rndUint(allowed_count) + 1;
474        allowed_count = 0;
475        for (size_t i = 0; i < strlen(choices); i++)
476        {
477                if (!strchrn0(excluded, choices[i])) allowed_count++;
478                if (allowed_count == rnd_index) return int(i);
479        }
480        return -1; //never happens
481}
482
483string GenoOperators::simplifiedModifiers_rR(const string& str)
484{
485        int R = 0; //positive means more 'R', negative means more 'r'
486        for (char c : str)
487        {
488                if (c == 'R') R++; else
489                        if (c == 'r') R--;
490        }
491        R %= 8; // 8 * 45 degrees = 360 degrees. After this, we get R=-7..+7
492
493        /* now, simplify homogeneous sequences of rR longer than 4: for example, rrrrr == RRR and RRRRRR == rr
494        -7      1
495        -6      2
496        -5      3
497        -4      -4 (or 4; we choose +4 meaning we will never see rrrr)
498        -3..3      (no changes)
499        4       4 (or -4)
500        5       -3
501        6       -2
502        7       -1
503        */
504        if (R <= -4) R += 8; //-4 => +4
505        else if (R >= 5) R -= 8;
506
507        return R == 0 ? "" : (R > 0 ? string(R, 'R') : string(-R, 'r'));
508}
509
510//#include <cassert>
511string GenoOperators::simplifiedModifiersFixedOrder(const char *str_of_char_pairs, vector<int> &char_counts)
512{
513        //      assert(strlen(str_of_char_pairs) == char_counts.size());
514        //      assert(char_counts.size() % 2 == 0);
515        const int MAX_NUMBER_SAME_TYPE = 8; // max. number of modifiers of each type (case-sensitive) - mainly for rR, even though for rR, 4 would be sufficient if we assume lower or upper can be chosen as required for minimal length just as simplifiedModifiers_rR() does, e.g. rrrrr==RRR, RRRRRR==rr
516        string simplified;
517        //#define CLUMP_IDENTICAL_MODIFIERS //if GeneProps::normalizeBiol4() is used, this is not good because properties are calculated incrementally, non-linearly, their values are updated after each modifier character and some properties interact with each other due to normalization so they can saturate when clumped, therefore it is better keep the modifiers dispersed to equalize their effects
518#ifdef CLUMP_IDENTICAL_MODIFIERS
519        for (size_t i = 0; i < strlen(str_of_char_pairs); i++)
520                if ((i % 2) == 0) //only even index "i" in str_of_char_pairs
521                        for (int j = 0; j < std::min(MAX_NUMBER_SAME_TYPE, abs(char_counts[i] - char_counts[i + 1])); j++) //assume that an even-index char and the following odd-index char have the opposite influence, so they cancel out.
522                                simplified += str_of_char_pairs[i + (char_counts[i + 1] > char_counts[i])]; //inner loop adds a sequence of same chars such as rrrrr or QQQ
523#else
524        for (size_t i = 0; i < strlen(str_of_char_pairs); i++)
525                if ((i % 2) == 0) //only even index "i" in str_of_char_pairs
526                {
527                        char_counts[i] -= char_counts[i + 1]; //from now on, even items in the vector store the difference between antagonistic modifier symbols; odd items are not needed
528                        char_counts[i] = std::min(std::max(char_counts[i], -MAX_NUMBER_SAME_TYPE), MAX_NUMBER_SAME_TYPE);
529                }
530        int remaining;
531        do {
532                remaining = 0;
533                for (size_t i = 0; i < strlen(str_of_char_pairs); i++)
534                        if ((i % 2) == 0) //only even index "i" in str_of_char_pairs
535                                if (char_counts[i] != 0)
536                                {
537                                        simplified += str_of_char_pairs[i + (char_counts[i] < 0)];
538                                        char_counts[i] += char_counts[i] > 0 ? -1 : +1; //decrease the difference towards zero
539                                        remaining += abs(char_counts[i]);
540                                }
541        } while (remaining > 0);
542#endif
543        return simplified;
544}
545
546string GenoOperators::simplifiedModifiers(const string & original)
547{
548        const int MAX_NUMBER_SAME_TYPE = 5; // max. number of modifiers of each type (case-insensitive). The more characters, the closer we can get to min and max values of a given property at the expense of the length of evolved genotypes. 5 is "close enough", but how close we get to the extreme also depends on the initial value of a given property, which is not always exactly in the middle of min and max. rR is treated separately in simplification because their influence follows different (i.e., simple additive) logic - so the simplifiedModifiersFixedOrder() logic with cancelling out antagonistic modifiers would be appropriate for rR.
549        int counter[256] = {}; //initialize with zeros; 256 is unnecessarily too big and redundant, but enables very fast access (indexed directly by the ascii code)
550        string simplified = "";
551        for (int i = int(original.size()) - 1; i >= 0; i--) //iterate from end to begin so it is easier to remove "oldest" = first modifiers
552        {
553                unsigned char c = original[i];
554                if (!std::isalpha(c) || c == 'r' || c == 'R') //ignore non-alphabet characters; also, 'r' and 'R' are handled separately by simplifiedModifiers_rR()
555                        continue;
556                unsigned char lower = std::tolower(c);
557                counter[lower]++;
558                if (counter[lower] <= MAX_NUMBER_SAME_TYPE) //get rid of modifiers that are too numerous, but get rid of the first ones in the string (="oldest", the last ones looking from the end), because their influence on the parameter value is the smallest
559                        simplified += c;
560        }
561        std::reverse(simplified.begin(), simplified.end()); //"simplified" was built in reverse order, so need to restore the order that corresponds to "original"
562        return simplifiedModifiers_rR(original) + simplified;
563}
Note: See TracBrowser for help on using the repository browser.