├── .gitignore
├── LICENSE
├── README.md
├── main.go
├── media
├── first_example.png
└── second_example.png
├── model
└── neural
│ ├── multiLayerNetwork.go
│ ├── neuralLayer.go
│ ├── neuronUnit.go
│ ├── pattern.go
│ └── transferFunction.go
├── res
├── iris.all_data.csv
└── sonar.all_data.csv
├── util
└── util.go
└── validation
└── validation.go
/.gitignore:
--------------------------------------------------------------------------------
1 | # Binaries for programs and plugins
2 | *.exe
3 | *.dll
4 | *.so
5 | *.dylib
6 |
7 | # Test binary, build with `go test -c`
8 | *.test
9 |
10 | # Output of the go coverage tool, specifically when used with LiteIDE
11 | *.out
12 |
13 | # Project-local glide cache, RE: https://github.com/Masterminds/glide/issues/736
14 | .glide/
15 | .idea/
16 | run.sh
17 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | GNU GENERAL PUBLIC LICENSE
2 | Version 3, 29 June 2007
3 |
4 | Copyright (C) 2007 Free Software Foundation, Inc.
5 | Everyone is permitted to copy and distribute verbatim copies
6 | of this license document, but changing it is not allowed.
7 |
8 | Preamble
9 |
10 | The GNU General Public License is a free, copyleft license for
11 | software and other kinds of works.
12 |
13 | The licenses for most software and other practical works are designed
14 | to take away your freedom to share and change the works. By contrast,
15 | the GNU General Public License is intended to guarantee your freedom to
16 | share and change all versions of a program--to make sure it remains free
17 | software for all its users. We, the Free Software Foundation, use the
18 | GNU General Public License for most of our software; it applies also to
19 | any other work released this way by its authors. You can apply it to
20 | your programs, too.
21 |
22 | When we speak of free software, we are referring to freedom, not
23 | price. Our General Public Licenses are designed to make sure that you
24 | have the freedom to distribute copies of free software (and charge for
25 | them if you wish), that you receive source code or can get it if you
26 | want it, that you can change the software or use pieces of it in new
27 | free programs, and that you know you can do these things.
28 |
29 | To protect your rights, we need to prevent others from denying you
30 | these rights or asking you to surrender the rights. Therefore, you have
31 | certain responsibilities if you distribute copies of the software, or if
32 | you modify it: responsibilities to respect the freedom of others.
33 |
34 | For example, if you distribute copies of such a program, whether
35 | gratis or for a fee, you must pass on to the recipients the same
36 | freedoms that you received. You must make sure that they, too, receive
37 | or can get the source code. And you must show them these terms so they
38 | know their rights.
39 |
40 | Developers that use the GNU GPL protect your rights with two steps:
41 | (1) assert copyright on the software, and (2) offer you this License
42 | giving you legal permission to copy, distribute and/or modify it.
43 |
44 | For the developers' and authors' protection, the GPL clearly explains
45 | that there is no warranty for this free software. For both users' and
46 | authors' sake, the GPL requires that modified versions be marked as
47 | changed, so that their problems will not be attributed erroneously to
48 | authors of previous versions.
49 |
50 | Some devices are designed to deny users access to install or run
51 | modified versions of the software inside them, although the manufacturer
52 | can do so. This is fundamentally incompatible with the aim of
53 | protecting users' freedom to change the software. The systematic
54 | pattern of such abuse occurs in the area of products for individuals to
55 | use, which is precisely where it is most unacceptable. Therefore, we
56 | have designed this version of the GPL to prohibit the practice for those
57 | products. If such problems arise substantially in other domains, we
58 | stand ready to extend this provision to those domains in future versions
59 | of the GPL, as needed to protect the freedom of users.
60 |
61 | Finally, every program is threatened constantly by software patents.
62 | States should not allow patents to restrict development and use of
63 | software on general-purpose computers, but in those that do, we wish to
64 | avoid the special danger that patents applied to a free program could
65 | make it effectively proprietary. To prevent this, the GPL assures that
66 | patents cannot be used to render the program non-free.
67 |
68 | The precise terms and conditions for copying, distribution and
69 | modification follow.
70 |
71 | TERMS AND CONDITIONS
72 |
73 | 0. Definitions.
74 |
75 | "This License" refers to version 3 of the GNU General Public License.
76 |
77 | "Copyright" also means copyright-like laws that apply to other kinds of
78 | works, such as semiconductor masks.
79 |
80 | "The Program" refers to any copyrightable work licensed under this
81 | License. Each licensee is addressed as "you". "Licensees" and
82 | "recipients" may be individuals or organizations.
83 |
84 | To "modify" a work means to copy from or adapt all or part of the work
85 | in a fashion requiring copyright permission, other than the making of an
86 | exact copy. The resulting work is called a "modified version" of the
87 | earlier work or a work "based on" the earlier work.
88 |
89 | A "covered work" means either the unmodified Program or a work based
90 | on the Program.
91 |
92 | To "propagate" a work means to do anything with it that, without
93 | permission, would make you directly or secondarily liable for
94 | infringement under applicable copyright law, except executing it on a
95 | computer or modifying a private copy. Propagation includes copying,
96 | distribution (with or without modification), making available to the
97 | public, and in some countries other activities as well.
98 |
99 | To "convey" a work means any kind of propagation that enables other
100 | parties to make or receive copies. Mere interaction with a user through
101 | a computer network, with no transfer of a copy, is not conveying.
102 |
103 | An interactive user interface displays "Appropriate Legal Notices"
104 | to the extent that it includes a convenient and prominently visible
105 | feature that (1) displays an appropriate copyright notice, and (2)
106 | tells the user that there is no warranty for the work (except to the
107 | extent that warranties are provided), that licensees may convey the
108 | work under this License, and how to view a copy of this License. If
109 | the interface presents a list of user commands or options, such as a
110 | menu, a prominent item in the list meets this criterion.
111 |
112 | 1. Source Code.
113 |
114 | The "source code" for a work means the preferred form of the work
115 | for making modifications to it. "Object code" means any non-source
116 | form of a work.
117 |
118 | A "Standard Interface" means an interface that either is an official
119 | standard defined by a recognized standards body, or, in the case of
120 | interfaces specified for a particular programming language, one that
121 | is widely used among developers working in that language.
122 |
123 | The "System Libraries" of an executable work include anything, other
124 | than the work as a whole, that (a) is included in the normal form of
125 | packaging a Major Component, but which is not part of that Major
126 | Component, and (b) serves only to enable use of the work with that
127 | Major Component, or to implement a Standard Interface for which an
128 | implementation is available to the public in source code form. A
129 | "Major Component", in this context, means a major essential component
130 | (kernel, window system, and so on) of the specific operating system
131 | (if any) on which the executable work runs, or a compiler used to
132 | produce the work, or an object code interpreter used to run it.
133 |
134 | The "Corresponding Source" for a work in object code form means all
135 | the source code needed to generate, install, and (for an executable
136 | work) run the object code and to modify the work, including scripts to
137 | control those activities. However, it does not include the work's
138 | System Libraries, or general-purpose tools or generally available free
139 | programs which are used unmodified in performing those activities but
140 | which are not part of the work. For example, Corresponding Source
141 | includes interface definition files associated with source files for
142 | the work, and the source code for shared libraries and dynamically
143 | linked subprograms that the work is specifically designed to require,
144 | such as by intimate data communication or control flow between those
145 | subprograms and other parts of the work.
146 |
147 | The Corresponding Source need not include anything that users
148 | can regenerate automatically from other parts of the Corresponding
149 | Source.
150 |
151 | The Corresponding Source for a work in source code form is that
152 | same work.
153 |
154 | 2. Basic Permissions.
155 |
156 | All rights granted under this License are granted for the term of
157 | copyright on the Program, and are irrevocable provided the stated
158 | conditions are met. This License explicitly affirms your unlimited
159 | permission to run the unmodified Program. The output from running a
160 | covered work is covered by this License only if the output, given its
161 | content, constitutes a covered work. This License acknowledges your
162 | rights of fair use or other equivalent, as provided by copyright law.
163 |
164 | You may make, run and propagate covered works that you do not
165 | convey, without conditions so long as your license otherwise remains
166 | in force. You may convey covered works to others for the sole purpose
167 | of having them make modifications exclusively for you, or provide you
168 | with facilities for running those works, provided that you comply with
169 | the terms of this License in conveying all material for which you do
170 | not control copyright. Those thus making or running the covered works
171 | for you must do so exclusively on your behalf, under your direction
172 | and control, on terms that prohibit them from making any copies of
173 | your copyrighted material outside their relationship with you.
174 |
175 | Conveying under any other circumstances is permitted solely under
176 | the conditions stated below. Sublicensing is not allowed; section 10
177 | makes it unnecessary.
178 |
179 | 3. Protecting Users' Legal Rights From Anti-Circumvention Law.
180 |
181 | No covered work shall be deemed part of an effective technological
182 | measure under any applicable law fulfilling obligations under article
183 | 11 of the WIPO copyright treaty adopted on 20 December 1996, or
184 | similar laws prohibiting or restricting circumvention of such
185 | measures.
186 |
187 | When you convey a covered work, you waive any legal power to forbid
188 | circumvention of technological measures to the extent such circumvention
189 | is effected by exercising rights under this License with respect to
190 | the covered work, and you disclaim any intention to limit operation or
191 | modification of the work as a means of enforcing, against the work's
192 | users, your or third parties' legal rights to forbid circumvention of
193 | technological measures.
194 |
195 | 4. Conveying Verbatim Copies.
196 |
197 | You may convey verbatim copies of the Program's source code as you
198 | receive it, in any medium, provided that you conspicuously and
199 | appropriately publish on each copy an appropriate copyright notice;
200 | keep intact all notices stating that this License and any
201 | non-permissive terms added in accord with section 7 apply to the code;
202 | keep intact all notices of the absence of any warranty; and give all
203 | recipients a copy of this License along with the Program.
204 |
205 | You may charge any price or no price for each copy that you convey,
206 | and you may offer support or warranty protection for a fee.
207 |
208 | 5. Conveying Modified Source Versions.
209 |
210 | You may convey a work based on the Program, or the modifications to
211 | produce it from the Program, in the form of source code under the
212 | terms of section 4, provided that you also meet all of these conditions:
213 |
214 | a) The work must carry prominent notices stating that you modified
215 | it, and giving a relevant date.
216 |
217 | b) The work must carry prominent notices stating that it is
218 | released under this License and any conditions added under section
219 | 7. This requirement modifies the requirement in section 4 to
220 | "keep intact all notices".
221 |
222 | c) You must license the entire work, as a whole, under this
223 | License to anyone who comes into possession of a copy. This
224 | License will therefore apply, along with any applicable section 7
225 | additional terms, to the whole of the work, and all its parts,
226 | regardless of how they are packaged. This License gives no
227 | permission to license the work in any other way, but it does not
228 | invalidate such permission if you have separately received it.
229 |
230 | d) If the work has interactive user interfaces, each must display
231 | Appropriate Legal Notices; however, if the Program has interactive
232 | interfaces that do not display Appropriate Legal Notices, your
233 | work need not make them do so.
234 |
235 | A compilation of a covered work with other separate and independent
236 | works, which are not by their nature extensions of the covered work,
237 | and which are not combined with it such as to form a larger program,
238 | in or on a volume of a storage or distribution medium, is called an
239 | "aggregate" if the compilation and its resulting copyright are not
240 | used to limit the access or legal rights of the compilation's users
241 | beyond what the individual works permit. Inclusion of a covered work
242 | in an aggregate does not cause this License to apply to the other
243 | parts of the aggregate.
244 |
245 | 6. Conveying Non-Source Forms.
246 |
247 | You may convey a covered work in object code form under the terms
248 | of sections 4 and 5, provided that you also convey the
249 | machine-readable Corresponding Source under the terms of this License,
250 | in one of these ways:
251 |
252 | a) Convey the object code in, or embodied in, a physical product
253 | (including a physical distribution medium), accompanied by the
254 | Corresponding Source fixed on a durable physical medium
255 | customarily used for software interchange.
256 |
257 | b) Convey the object code in, or embodied in, a physical product
258 | (including a physical distribution medium), accompanied by a
259 | written offer, valid for at least three years and valid for as
260 | long as you offer spare parts or customer support for that product
261 | model, to give anyone who possesses the object code either (1) a
262 | copy of the Corresponding Source for all the software in the
263 | product that is covered by this License, on a durable physical
264 | medium customarily used for software interchange, for a price no
265 | more than your reasonable cost of physically performing this
266 | conveying of source, or (2) access to copy the
267 | Corresponding Source from a network server at no charge.
268 |
269 | c) Convey individual copies of the object code with a copy of the
270 | written offer to provide the Corresponding Source. This
271 | alternative is allowed only occasionally and noncommercially, and
272 | only if you received the object code with such an offer, in accord
273 | with subsection 6b.
274 |
275 | d) Convey the object code by offering access from a designated
276 | place (gratis or for a charge), and offer equivalent access to the
277 | Corresponding Source in the same way through the same place at no
278 | further charge. You need not require recipients to copy the
279 | Corresponding Source along with the object code. If the place to
280 | copy the object code is a network server, the Corresponding Source
281 | may be on a different server (operated by you or a third party)
282 | that supports equivalent copying facilities, provided you maintain
283 | clear directions next to the object code saying where to find the
284 | Corresponding Source. Regardless of what server hosts the
285 | Corresponding Source, you remain obligated to ensure that it is
286 | available for as long as needed to satisfy these requirements.
287 |
288 | e) Convey the object code using peer-to-peer transmission, provided
289 | you inform other peers where the object code and Corresponding
290 | Source of the work are being offered to the general public at no
291 | charge under subsection 6d.
292 |
293 | A separable portion of the object code, whose source code is excluded
294 | from the Corresponding Source as a System Library, need not be
295 | included in conveying the object code work.
296 |
297 | A "User Product" is either (1) a "consumer product", which means any
298 | tangible personal property which is normally used for personal, family,
299 | or household purposes, or (2) anything designed or sold for incorporation
300 | into a dwelling. In determining whether a product is a consumer product,
301 | doubtful cases shall be resolved in favor of coverage. For a particular
302 | product received by a particular user, "normally used" refers to a
303 | typical or common use of that class of product, regardless of the status
304 | of the particular user or of the way in which the particular user
305 | actually uses, or expects or is expected to use, the product. A product
306 | is a consumer product regardless of whether the product has substantial
307 | commercial, industrial or non-consumer uses, unless such uses represent
308 | the only significant mode of use of the product.
309 |
310 | "Installation Information" for a User Product means any methods,
311 | procedures, authorization keys, or other information required to install
312 | and execute modified versions of a covered work in that User Product from
313 | a modified version of its Corresponding Source. The information must
314 | suffice to ensure that the continued functioning of the modified object
315 | code is in no case prevented or interfered with solely because
316 | modification has been made.
317 |
318 | If you convey an object code work under this section in, or with, or
319 | specifically for use in, a User Product, and the conveying occurs as
320 | part of a transaction in which the right of possession and use of the
321 | User Product is transferred to the recipient in perpetuity or for a
322 | fixed term (regardless of how the transaction is characterized), the
323 | Corresponding Source conveyed under this section must be accompanied
324 | by the Installation Information. But this requirement does not apply
325 | if neither you nor any third party retains the ability to install
326 | modified object code on the User Product (for example, the work has
327 | been installed in ROM).
328 |
329 | The requirement to provide Installation Information does not include a
330 | requirement to continue to provide support service, warranty, or updates
331 | for a work that has been modified or installed by the recipient, or for
332 | the User Product in which it has been modified or installed. Access to a
333 | network may be denied when the modification itself materially and
334 | adversely affects the operation of the network or violates the rules and
335 | protocols for communication across the network.
336 |
337 | Corresponding Source conveyed, and Installation Information provided,
338 | in accord with this section must be in a format that is publicly
339 | documented (and with an implementation available to the public in
340 | source code form), and must require no special password or key for
341 | unpacking, reading or copying.
342 |
343 | 7. Additional Terms.
344 |
345 | "Additional permissions" are terms that supplement the terms of this
346 | License by making exceptions from one or more of its conditions.
347 | Additional permissions that are applicable to the entire Program shall
348 | be treated as though they were included in this License, to the extent
349 | that they are valid under applicable law. If additional permissions
350 | apply only to part of the Program, that part may be used separately
351 | under those permissions, but the entire Program remains governed by
352 | this License without regard to the additional permissions.
353 |
354 | When you convey a copy of a covered work, you may at your option
355 | remove any additional permissions from that copy, or from any part of
356 | it. (Additional permissions may be written to require their own
357 | removal in certain cases when you modify the work.) You may place
358 | additional permissions on material, added by you to a covered work,
359 | for which you have or can give appropriate copyright permission.
360 |
361 | Notwithstanding any other provision of this License, for material you
362 | add to a covered work, you may (if authorized by the copyright holders of
363 | that material) supplement the terms of this License with terms:
364 |
365 | a) Disclaiming warranty or limiting liability differently from the
366 | terms of sections 15 and 16 of this License; or
367 |
368 | b) Requiring preservation of specified reasonable legal notices or
369 | author attributions in that material or in the Appropriate Legal
370 | Notices displayed by works containing it; or
371 |
372 | c) Prohibiting misrepresentation of the origin of that material, or
373 | requiring that modified versions of such material be marked in
374 | reasonable ways as different from the original version; or
375 |
376 | d) Limiting the use for publicity purposes of names of licensors or
377 | authors of the material; or
378 |
379 | e) Declining to grant rights under trademark law for use of some
380 | trade names, trademarks, or service marks; or
381 |
382 | f) Requiring indemnification of licensors and authors of that
383 | material by anyone who conveys the material (or modified versions of
384 | it) with contractual assumptions of liability to the recipient, for
385 | any liability that these contractual assumptions directly impose on
386 | those licensors and authors.
387 |
388 | All other non-permissive additional terms are considered "further
389 | restrictions" within the meaning of section 10. If the Program as you
390 | received it, or any part of it, contains a notice stating that it is
391 | governed by this License along with a term that is a further
392 | restriction, you may remove that term. If a license document contains
393 | a further restriction but permits relicensing or conveying under this
394 | License, you may add to a covered work material governed by the terms
395 | of that license document, provided that the further restriction does
396 | not survive such relicensing or conveying.
397 |
398 | If you add terms to a covered work in accord with this section, you
399 | must place, in the relevant source files, a statement of the
400 | additional terms that apply to those files, or a notice indicating
401 | where to find the applicable terms.
402 |
403 | Additional terms, permissive or non-permissive, may be stated in the
404 | form of a separately written license, or stated as exceptions;
405 | the above requirements apply either way.
406 |
407 | 8. Termination.
408 |
409 | You may not propagate or modify a covered work except as expressly
410 | provided under this License. Any attempt otherwise to propagate or
411 | modify it is void, and will automatically terminate your rights under
412 | this License (including any patent licenses granted under the third
413 | paragraph of section 11).
414 |
415 | However, if you cease all violation of this License, then your
416 | license from a particular copyright holder is reinstated (a)
417 | provisionally, unless and until the copyright holder explicitly and
418 | finally terminates your license, and (b) permanently, if the copyright
419 | holder fails to notify you of the violation by some reasonable means
420 | prior to 60 days after the cessation.
421 |
422 | Moreover, your license from a particular copyright holder is
423 | reinstated permanently if the copyright holder notifies you of the
424 | violation by some reasonable means, this is the first time you have
425 | received notice of violation of this License (for any work) from that
426 | copyright holder, and you cure the violation prior to 30 days after
427 | your receipt of the notice.
428 |
429 | Termination of your rights under this section does not terminate the
430 | licenses of parties who have received copies or rights from you under
431 | this License. If your rights have been terminated and not permanently
432 | reinstated, you do not qualify to receive new licenses for the same
433 | material under section 10.
434 |
435 | 9. Acceptance Not Required for Having Copies.
436 |
437 | You are not required to accept this License in order to receive or
438 | run a copy of the Program. Ancillary propagation of a covered work
439 | occurring solely as a consequence of using peer-to-peer transmission
440 | to receive a copy likewise does not require acceptance. However,
441 | nothing other than this License grants you permission to propagate or
442 | modify any covered work. These actions infringe copyright if you do
443 | not accept this License. Therefore, by modifying or propagating a
444 | covered work, you indicate your acceptance of this License to do so.
445 |
446 | 10. Automatic Licensing of Downstream Recipients.
447 |
448 | Each time you convey a covered work, the recipient automatically
449 | receives a license from the original licensors, to run, modify and
450 | propagate that work, subject to this License. You are not responsible
451 | for enforcing compliance by third parties with this License.
452 |
453 | An "entity transaction" is a transaction transferring control of an
454 | organization, or substantially all assets of one, or subdividing an
455 | organization, or merging organizations. If propagation of a covered
456 | work results from an entity transaction, each party to that
457 | transaction who receives a copy of the work also receives whatever
458 | licenses to the work the party's predecessor in interest had or could
459 | give under the previous paragraph, plus a right to possession of the
460 | Corresponding Source of the work from the predecessor in interest, if
461 | the predecessor has it or can get it with reasonable efforts.
462 |
463 | You may not impose any further restrictions on the exercise of the
464 | rights granted or affirmed under this License. For example, you may
465 | not impose a license fee, royalty, or other charge for exercise of
466 | rights granted under this License, and you may not initiate litigation
467 | (including a cross-claim or counterclaim in a lawsuit) alleging that
468 | any patent claim is infringed by making, using, selling, offering for
469 | sale, or importing the Program or any portion of it.
470 |
471 | 11. Patents.
472 |
473 | A "contributor" is a copyright holder who authorizes use under this
474 | License of the Program or a work on which the Program is based. The
475 | work thus licensed is called the contributor's "contributor version".
476 |
477 | A contributor's "essential patent claims" are all patent claims
478 | owned or controlled by the contributor, whether already acquired or
479 | hereafter acquired, that would be infringed by some manner, permitted
480 | by this License, of making, using, or selling its contributor version,
481 | but do not include claims that would be infringed only as a
482 | consequence of further modification of the contributor version. For
483 | purposes of this definition, "control" includes the right to grant
484 | patent sublicenses in a manner consistent with the requirements of
485 | this License.
486 |
487 | Each contributor grants you a non-exclusive, worldwide, royalty-free
488 | patent license under the contributor's essential patent claims, to
489 | make, use, sell, offer for sale, import and otherwise run, modify and
490 | propagate the contents of its contributor version.
491 |
492 | In the following three paragraphs, a "patent license" is any express
493 | agreement or commitment, however denominated, not to enforce a patent
494 | (such as an express permission to practice a patent or covenant not to
495 | sue for patent infringement). To "grant" such a patent license to a
496 | party means to make such an agreement or commitment not to enforce a
497 | patent against the party.
498 |
499 | If you convey a covered work, knowingly relying on a patent license,
500 | and the Corresponding Source of the work is not available for anyone
501 | to copy, free of charge and under the terms of this License, through a
502 | publicly available network server or other readily accessible means,
503 | then you must either (1) cause the Corresponding Source to be so
504 | available, or (2) arrange to deprive yourself of the benefit of the
505 | patent license for this particular work, or (3) arrange, in a manner
506 | consistent with the requirements of this License, to extend the patent
507 | license to downstream recipients. "Knowingly relying" means you have
508 | actual knowledge that, but for the patent license, your conveying the
509 | covered work in a country, or your recipient's use of the covered work
510 | in a country, would infringe one or more identifiable patents in that
511 | country that you have reason to believe are valid.
512 |
513 | If, pursuant to or in connection with a single transaction or
514 | arrangement, you convey, or propagate by procuring conveyance of, a
515 | covered work, and grant a patent license to some of the parties
516 | receiving the covered work authorizing them to use, propagate, modify
517 | or convey a specific copy of the covered work, then the patent license
518 | you grant is automatically extended to all recipients of the covered
519 | work and works based on it.
520 |
521 | A patent license is "discriminatory" if it does not include within
522 | the scope of its coverage, prohibits the exercise of, or is
523 | conditioned on the non-exercise of one or more of the rights that are
524 | specifically granted under this License. You may not convey a covered
525 | work if you are a party to an arrangement with a third party that is
526 | in the business of distributing software, under which you make payment
527 | to the third party based on the extent of your activity of conveying
528 | the work, and under which the third party grants, to any of the
529 | parties who would receive the covered work from you, a discriminatory
530 | patent license (a) in connection with copies of the covered work
531 | conveyed by you (or copies made from those copies), or (b) primarily
532 | for and in connection with specific products or compilations that
533 | contain the covered work, unless you entered into that arrangement,
534 | or that patent license was granted, prior to 28 March 2007.
535 |
536 | Nothing in this License shall be construed as excluding or limiting
537 | any implied license or other defenses to infringement that may
538 | otherwise be available to you under applicable patent law.
539 |
540 | 12. No Surrender of Others' Freedom.
541 |
542 | If conditions are imposed on you (whether by court order, agreement or
543 | otherwise) that contradict the conditions of this License, they do not
544 | excuse you from the conditions of this License. If you cannot convey a
545 | covered work so as to satisfy simultaneously your obligations under this
546 | License and any other pertinent obligations, then as a consequence you may
547 | not convey it at all. For example, if you agree to terms that obligate you
548 | to collect a royalty for further conveying from those to whom you convey
549 | the Program, the only way you could satisfy both those terms and this
550 | License would be to refrain entirely from conveying the Program.
551 |
552 | 13. Use with the GNU Affero General Public License.
553 |
554 | Notwithstanding any other provision of this License, you have
555 | permission to link or combine any covered work with a work licensed
556 | under version 3 of the GNU Affero General Public License into a single
557 | combined work, and to convey the resulting work. The terms of this
558 | License will continue to apply to the part which is the covered work,
559 | but the special requirements of the GNU Affero General Public License,
560 | section 13, concerning interaction through a network will apply to the
561 | combination as such.
562 |
563 | 14. Revised Versions of this License.
564 |
565 | The Free Software Foundation may publish revised and/or new versions of
566 | the GNU General Public License from time to time. Such new versions will
567 | be similar in spirit to the present version, but may differ in detail to
568 | address new problems or concerns.
569 |
570 | Each version is given a distinguishing version number. If the
571 | Program specifies that a certain numbered version of the GNU General
572 | Public License "or any later version" applies to it, you have the
573 | option of following the terms and conditions either of that numbered
574 | version or of any later version published by the Free Software
575 | Foundation. If the Program does not specify a version number of the
576 | GNU General Public License, you may choose any version ever published
577 | by the Free Software Foundation.
578 |
579 | If the Program specifies that a proxy can decide which future
580 | versions of the GNU General Public License can be used, that proxy's
581 | public statement of acceptance of a version permanently authorizes you
582 | to choose that version for the Program.
583 |
584 | Later license versions may give you additional or different
585 | permissions. However, no additional obligations are imposed on any
586 | author or copyright holder as a result of your choosing to follow a
587 | later version.
588 |
589 | 15. Disclaimer of Warranty.
590 |
591 | THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY
592 | APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT
593 | HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY
594 | OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO,
595 | THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
596 | PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM
597 | IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF
598 | ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
599 |
600 | 16. Limitation of Liability.
601 |
602 | IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
603 | WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS
604 | THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY
605 | GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE
606 | USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF
607 | DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD
608 | PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS),
609 | EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF
610 | SUCH DAMAGES.
611 |
612 | 17. Interpretation of Sections 15 and 16.
613 |
614 | If the disclaimer of warranty and limitation of liability provided
615 | above cannot be given local legal effect according to their terms,
616 | reviewing courts shall apply local law that most closely approximates
617 | an absolute waiver of all civil liability in connection with the
618 | Program, unless a warranty or assumption of liability accompanies a
619 | copy of the Program in return for a fee.
620 |
621 | END OF TERMS AND CONDITIONS
622 |
623 | How to Apply These Terms to Your New Programs
624 |
625 | If you develop a new program, and you want it to be of the greatest
626 | possible use to the public, the best way to achieve this is to make it
627 | free software which everyone can redistribute and change under these terms.
628 |
629 | To do so, attach the following notices to the program. It is safest
630 | to attach them to the start of each source file to most effectively
631 | state the exclusion of warranty; and each file should have at least
632 | the "copyright" line and a pointer to where the full notice is found.
633 |
634 | {one line to give the program's name and a brief idea of what it does.}
635 | Copyright (C) {year} {name of author}
636 |
637 | This program is free software: you can redistribute it and/or modify
638 | it under the terms of the GNU General Public License as published by
639 | the Free Software Foundation, either version 3 of the License, or
640 | (at your option) any later version.
641 |
642 | This program is distributed in the hope that it will be useful,
643 | but WITHOUT ANY WARRANTY; without even the implied warranty of
644 | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
645 | GNU General Public License for more details.
646 |
647 | You should have received a copy of the GNU General Public License
648 | along with this program. If not, see .
649 |
650 | Also add information on how to contact you by electronic and paper mail.
651 |
652 | If the program does terminal interaction, make it output a short
653 | notice like this when it starts in an interactive mode:
654 |
655 | {project} Copyright (C) {year} {fullname}
656 | This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'.
657 | This is free software, and you are welcome to redistribute it
658 | under certain conditions; type `show c' for details.
659 |
660 | The hypothetical commands `show w' and `show c' should show the appropriate
661 | parts of the General Public License. Of course, your program's commands
662 | might be different; for a GUI interface, you would use an "about box".
663 |
664 | You should also get your employer (if you work as a programmer) or school,
665 | if any, to sign a "copyright disclaimer" for the program, if necessary.
666 | For more information on this, and how to apply and follow the GNU GPL, see
667 | .
668 |
669 | The GNU General Public License does not permit incorporating your program
670 | into proprietary programs. If your program is a subroutine library, you
671 | may consider it more useful to permit linking proprietary applications with
672 | the library. If this is what you want to do, use the GNU Lesser General
673 | Public License instead of this License. But first, please read
674 | .
675 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # Go Perceptron
2 |
3 | A single / multi level perceptron classifier with weights estimated from sonar training data set using stochastic gradient descent. Recently I added back propagation algorithm over multilayer perceptron network.
4 | The implementation is in dev. Planned features:
5 |
6 | - complete future features XD (see above)
7 | - find co-workers
8 | - create a ml library in openqasm (just kidding)
9 | - brainstorming / devtesting other an models
10 |
11 | ### Updates
12 |
13 | 2017-10-04: Introduced Recurrent Neural Network (Elman Network) with "learn to sum integer" task. Big refactoring in code (working on)
14 |
15 | 2017-08-08: Introduced multi layer perceptron network definition with parametric number of hidden layer and neurons. Back propagation algorithm with different transfer function actived - I wanna thank you [dakk](https://github.com/dakk) because I was truly inspired by your code.
16 |
17 | 2017-08-01: Introduced validation package and k-fold cross validation.
18 |
19 | 2017-07-31: I started working on ```mlp``` branch for MLP + back prop. It doens't work yet but...I push first commit after some exp in dev. I delete ```dev``` because of some structs optimization.
20 |
21 | 2017-07-31: we started working on k-fold validation.
22 |
23 | ### Dependencies
24 |
25 | - [logrus](https://github.com/sirupsen/logrus)
26 |
27 | ### Run test
28 |
29 | To run a simple test just open a shell and run the following:
30 |
31 | ```
32 | git clone https://github.com/made2591/go-perceptron-go
33 | cd go-perceptron-go
34 | go get github.com/sirupsen/logrus
35 | go run main.go
36 | ```
37 |
38 | You can setup a MultiLayerPerceptron using ```PrepareMLPNet```. The first parameter, a simple ```[]int```, define the entire network struct. Example:
39 |
40 | - [4, 3, 3] will define a network struct with 3 layer: input, hidden, output, with respectively 4, 3 and 3 neurons. For classification problems the input layers has to be define with a number of neurons that match features of pattern shown to network. Of course, the output layer should have a number of unit equals to the number of class in training set.
41 | The network will have this topology:
42 |
43 | 
44 |
45 | - [4, 6, 4, 3] will have this topology:
46 |
47 | 
48 |
49 | ### To complete yet
50 |
51 | - test methods
52 |
53 | ### Future features
54 |
55 | - [mathgl](https://github.com/go-gl/mathgl.git) for better vector space handling
56 | - some other cool neural model XD
57 |
--------------------------------------------------------------------------------
/main.go:
--------------------------------------------------------------------------------
1 | // Main package provide main to test library
2 | package main
3 |
4 | import (
5 | // sys import
6 | "os"
7 |
8 | // third part import
9 | log "github.com/sirupsen/logrus"
10 |
11 | // this repo internal import
12 | mn "github.com/made2591/go-perceptron-go/model/neural"
13 | mu "github.com/made2591/go-perceptron-go/util"
14 | v "github.com/made2591/go-perceptron-go/validation"
15 | )
16 |
17 | func init() {
18 | // Output to stdout instead of the default stderr
19 | log.SetOutput(os.Stdout)
20 | // Only log the warning severity or above.
21 | log.SetLevel(log.InfoLevel)
22 | }
23 |
24 | //############################ MAIN ############################
25 |
26 | func main() {
27 |
28 | // #############################################################################################################
29 | // ###################################### Single layer perceptron model ######################################
30 | // #############################################################################################################
31 |
32 | if true {
33 |
34 | log.WithFields(log.Fields{
35 | "level": "info",
36 | "place": "main",
37 | "msg": "single layer perceptron train and test over sonar dataset",
38 | }).Info("Compute single layer perceptron on sonar data set (binary classification problem)")
39 |
40 | // percentage and shuffling in dataset
41 | var filePath string = "./res/sonar.all_data.csv"
42 | var percentage float64 = 0.67
43 | var shuffle = 1
44 |
45 | // single layer neuron parameters
46 | var bias float64 = 0.0
47 | var learningRate float64 = 0.01
48 |
49 | // training parameters
50 | var epochs int = 500
51 | var folds int = 5
52 |
53 | // Patterns initialization
54 | var patterns, _, _ = mn.LoadPatternsFromCSVFile(filePath)
55 |
56 | // NeuronUnit initialization
57 | var neuron mn.NeuronUnit = mn.NeuronUnit{Weights: make([]float64, len(patterns[0].Features)), Bias: bias, Lrate: learningRate}
58 |
59 | // compute scores for each folds execution
60 | var scores []float64 = v.KFoldValidation(&neuron, patterns, epochs, folds, shuffle)
61 |
62 | // use simpler validation
63 | var neuron2 mn.NeuronUnit = mn.NeuronUnit{Weights: make([]float64, len(patterns[0].Features)), Bias: bias, Lrate: learningRate}
64 | var scores2 []float64 = v.RandomSubsamplingValidation(&neuron2, patterns, percentage, epochs, folds, shuffle)
65 |
66 | log.WithFields(log.Fields{
67 | "level": "info",
68 | "place": "main",
69 | "scores": scores,
70 | }).Info("Scores reached: ", scores)
71 |
72 | log.WithFields(log.Fields{
73 | "level": "info",
74 | "place": "main",
75 | "scores": scores2,
76 | }).Info("Scores reached: ", scores2)
77 |
78 | }
79 |
80 | // #############################################################################################################
81 | // ###################################### Multilayer perceptron model ########################################
82 | // #############################################################################################################
83 |
84 | if false {
85 |
86 | log.WithFields(log.Fields{
87 | "level": "info",
88 | "place": "main",
89 | "msg": "multi layer perceptron train and test over iris dataset",
90 | }).Info("Compute backpropagation multi layer perceptron on sonar data set (binary classification problem)")
91 |
92 | // percentage and shuffling in dataset
93 | var filePath = "./res/iris.all_data.csv"
94 | //filePath = "./res/sonar.all_data.csv"
95 |
96 | // single layer neuron parameters
97 | var learningRate = 0.01
98 | var percentage = 0.67
99 | var shuffle = 1
100 |
101 | // training parameters
102 | var epochs = 500
103 | var folds = 3
104 |
105 | // Patterns initialization
106 | var patterns, _ , mapped = mn.LoadPatternsFromCSVFile(filePath)
107 |
108 | //input layer : 4 neuron, represents the feature of Iris, more in general dimensions of pattern
109 | //hidden layer : 3 neuron, activation using sigmoid, number of neuron in hidden level
110 | // 2° hidden l : * neuron, insert number of level you want
111 | //output layer : 3 neuron, represents the class of Iris, more in general dimensions of mapped values
112 | var layers []int = []int{len(patterns[0].Features), 20, len(mapped)}
113 |
114 | //Multilayer perceptron model, with one hidden layer.
115 | var mlp mn.MultiLayerNetwork = mn.PrepareMLPNet(layers, learningRate, mn.SigmoidalTransfer, mn.SigmoidalTransferDerivate)
116 |
117 | // compute scores for each folds execution
118 | var scores = v.MLPKFoldValidation(&mlp, patterns, epochs, folds, shuffle, mapped)
119 |
120 | // use simpler validation
121 | var mlp2 mn.MultiLayerNetwork = mn.PrepareMLPNet(layers, learningRate, mn.SigmoidalTransfer, mn.SigmoidalTransferDerivate)
122 | var scores2 = v.MLPRandomSubsamplingValidation(&mlp2, patterns, percentage, epochs, folds, shuffle, mapped)
123 |
124 | log.WithFields(log.Fields{
125 | "level": "info",
126 | "place": "main",
127 | "scores": scores,
128 | }).Info("Scores reached: ", scores)
129 |
130 | log.WithFields(log.Fields{
131 | "level": "info",
132 | "place": "main",
133 | "scores": scores2,
134 | }).Info("Scores reached: ", scores2)
135 |
136 | }
137 |
138 | // #############################################################################################################
139 | // ######################################### Recurrent Neural Network ########################################
140 | // #############################################################################################################
141 |
142 | if true {
143 |
144 | log.WithFields(log.Fields{
145 | "level": "info",
146 | "place": "main",
147 | "msg": "multi layer perceptron train and test over iris dataset",
148 | }).Info("Compute training algorithm on elman network using iris data set (binary classification problem)")
149 |
150 | // percentage and shuffling in dataset
151 | //var filePath = ".\\res\\iris.all_data.csv"
152 | //var filePath = "./res/sonar.all_data.csv"
153 |
154 | // single layer neuron parameters
155 | var learningRate = 0.01
156 | var shuffle = 1
157 |
158 | // training parameters
159 | var epochs = 500
160 |
161 | // Patterns initialization
162 | var patterns = mn.CreateRandomPatternArray(8, 30)
163 |
164 | //log.Info(patterns[0].Features[:int(len(patterns[0].Features)/2)])
165 | //n := mu.ConvertBinToInt(patterns[0].Features[:int(len(patterns[0].Features)/2)])
166 | //log.Info(n)
167 | //os.Exit(1)
168 |
169 | //input layer : 4 neuron, represents the feature of Iris, more in general dimensions of pattern
170 | //hidden layer : 3 neuron, activation using sigmoid, number of neuron in hidden level
171 | // 2° hidden l : * neuron, insert number of level you want
172 | //output layer : 3 neuron, represents the class of Iris, more in general dimensions of mapped values
173 |
174 | //Multilayer perceptron model, with one hidden layer.
175 | var mlp mn.MultiLayerNetwork =
176 | mn.PrepareElmanNet(len(patterns[0].Features)+10,
177 | 10, len(patterns[0].MultipleExpectation), learningRate,
178 | mn.SigmoidalTransfer, mn.SigmoidalTransferDerivate)
179 |
180 | // compute scores for each folds execution
181 | var mean, _ = v.RNNValidation(&mlp, patterns, epochs, shuffle)
182 |
183 | log.WithFields(log.Fields{
184 | "level": "info",
185 | "place": "main",
186 | "precision": mu.Round(mean, .5, 2),
187 | }).Info("Scores reached: ", mu.Round(mean, .5, 2))
188 |
189 | }
190 |
191 | }
192 |
--------------------------------------------------------------------------------
/media/first_example.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/made2591/go-perceptron-go/f9cb1d41709be57f3f5d407c39e08defd021f9ce/media/first_example.png
--------------------------------------------------------------------------------
/media/second_example.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/made2591/go-perceptron-go/f9cb1d41709be57f3f5d407c39e08defd021f9ce/media/second_example.png
--------------------------------------------------------------------------------
/model/neural/multiLayerNetwork.go:
--------------------------------------------------------------------------------
1 | // Neural provides struct to represents most common neural networks model and algorithms to train / test them.
2 | package neural
3 |
4 | import (
5 |
6 | // sys import
7 | "os"
8 | //"fmt"
9 | "math"
10 |
11 | // third part import
12 | log "github.com/sirupsen/logrus"
13 | mu "github.com/made2591/go-perceptron-go/util"
14 |
15 | "time"
16 | "math/rand"
17 | )
18 |
19 | func init() {
20 | // Output to stdout instead of the default stderr
21 | log.SetOutput(os.Stdout)
22 | // Only log the warning severity or above.
23 | log.SetLevel(log.InfoLevel)
24 | }
25 |
26 | type MultiLayerNetwork struct {
27 |
28 | // Lrate represents learning rate of neuron
29 | L_rate float64
30 |
31 | // NeuralLayers represents layer of neurons
32 | NeuralLayers []NeuralLayer
33 |
34 | // Transfer function
35 | T_func transferFunction
36 |
37 | // Transfer function derivative
38 | T_func_d transferFunction
39 |
40 | }
41 |
42 | // PrepareMLPNet create a multi layer Perceptron neural network.
43 | // [l:[]int] is an int array with layers neurons number [input, ..., output]
44 | // [lr:int] is the learning rate of neural network
45 | // [tr:transferFunction] is a transfer function
46 | // [tr:transferFunction] the respective transfer function derivative
47 | func PrepareMLPNet(l []int, lr float64, tf transferFunction, trd transferFunction) (mlp MultiLayerNetwork) {
48 |
49 | // setup learning rate and transfer function
50 | mlp.L_rate = lr
51 | mlp.T_func = tf
52 | mlp.T_func_d = trd
53 |
54 | // setup layers
55 | mlp.NeuralLayers = make([]NeuralLayer, len(l))
56 |
57 | // for each layers specified
58 | for il, ql := range l {
59 |
60 | // if it is not the first
61 | if il != 0 {
62 |
63 | // prepare the GENERIC layer with specific dimension and correct number of links for each NeuronUnits
64 | mlp.NeuralLayers[il] = PrepareLayer(ql, l[il-1])
65 |
66 | } else {
67 |
68 | // prepare the INPUT layer with specific dimension and No links to previous.
69 | mlp.NeuralLayers[il] = PrepareLayer(ql, 0)
70 |
71 | }
72 |
73 | }
74 |
75 | log.WithFields(log.Fields{
76 | "level": "info",
77 | "msg": "multilayer perceptron init completed",
78 | "layers": len(mlp.NeuralLayers),
79 | "learningRate: ": mlp.L_rate,
80 | }).Info("Complete Multilayer Perceptron init.")
81 |
82 | return
83 |
84 | }
85 |
86 |
87 | // PrepareElmanNet create a recurrent neUral network neural network.
88 | // [l:[]int] is an int array with layers neurons number [input, ..., output]
89 | // [lr:int] is the learning rate of neural network
90 | // [tr:transferFunction] is a transfer function
91 | // [tr:transferFunction] the respective transfer function derivative
92 | func PrepareElmanNet(i int, h int, o int, lr float64, tf transferFunction, trd transferFunction) (rnn MultiLayerNetwork) {
93 |
94 | // setup a three layer network with Input Context dimension
95 | rnn = PrepareMLPNet([]int{i, h, o}, lr, tf, trd);
96 |
97 | log.WithFields(log.Fields{
98 | "level": "info",
99 | "msg": "recurrent neural network init completed",
100 | "inputLayer": i,
101 | "hiddenLayer": h,
102 | "outputLayer": o,
103 | "learningRate: ": rnn.L_rate,
104 | }).Info("Complete RNN init.")
105 |
106 | return
107 |
108 | }
109 |
110 | // Execute a multi layer Perceptron neural network.
111 | // [mlp:MultiLayerNetwork] multilayer perceptron network pointer, [s:Pattern] input value
112 | // It returns output values by network
113 | func Execute(mlp *MultiLayerNetwork, s *Pattern, options ...int) (r []float64) {
114 |
115 | // new value
116 | nv := 0.0
117 |
118 | // result of execution for each OUTPUT NeuronUnit in OUTPUT NeuralLayer
119 | r = make([]float64, mlp.NeuralLayers[len(mlp.NeuralLayers)-1].Length)
120 |
121 | // show pattern to network =>
122 | for i := 0; i < len(s.Features); i++ {
123 |
124 | // setup value of each neurons in first layers to respective features of pattern
125 | mlp.NeuralLayers[0].NeuronUnits[i].Value = s.Features[i]
126 |
127 | }
128 |
129 | // init context
130 | for i := len(s.Features); i < mlp.NeuralLayers[0].Length; i++ {
131 |
132 | // setup value of each neurons in context layers to 0.5
133 | mlp.NeuralLayers[0].NeuronUnits[i].Value = 0.5
134 |
135 | }
136 |
137 | //OLD: TODO REMOVE
138 | // show pattern to network =>
139 | //for i := 0; i < mlp.NeuralLayers[0].Length; i++ {
140 | //
141 | // // setup value of each neurons in first layers to respective features of pattern
142 | // mlp.NeuralLayers[0].NeuronUnits[i].Value = s.Features[i]
143 | //
144 | //}
145 | //OLD: END REMOVE
146 |
147 | // execute - hiddens + output
148 | // for each layers from first hidden to output
149 | for k := 1; k < len(mlp.NeuralLayers); k++ {
150 |
151 | // for each neurons in focused level
152 | for i := 0; i < mlp.NeuralLayers[k].Length; i++ {
153 |
154 | // init new value
155 | nv = 0.0
156 |
157 | // for each neurons in previous level (for k = 1, INPUT)
158 | for j := 0; j < mlp.NeuralLayers[k - 1].Length; j++ {
159 |
160 | // sum output value of previous neurons multiplied by weight between previous and focused neuron
161 | nv += mlp.NeuralLayers[k].NeuronUnits[i].Weights[j] * mlp.NeuralLayers[k - 1].NeuronUnits[j].Value
162 |
163 | log.WithFields(log.Fields{
164 | "level": "debug",
165 | "msg": "multilayer perceptron execution",
166 | "len(mlp.NeuralLayers)": len(mlp.NeuralLayers),
167 | "layer: ": k,
168 | "neuron: ": i,
169 | "previous neuron: ": j,
170 | }).Debug("Compute output propagation.")
171 |
172 | }
173 |
174 | // add neuron bias
175 | nv += mlp.NeuralLayers[k].NeuronUnits[i].Bias
176 |
177 | // compute activation function to new output value
178 | mlp.NeuralLayers[k].NeuronUnits[i].Value = mlp.T_func(nv)
179 |
180 | // save output of hidden layer to context if nextwork is RECURRENT
181 | if k == 1 && len(options) > 0 && options[0] == 1 {
182 |
183 | for z := len(s.Features); z < mlp.NeuralLayers[0].Length; z++ {
184 |
185 | log.WithFields(log.Fields{
186 | "level" : "debug",
187 | "len z" : z,
188 | "s.Features" : s.Features,
189 | "len(s.Features)" : len(s.Features),
190 | "len mlp.NeuralLayers[0].NeuronUnits" : len(mlp.NeuralLayers[0].NeuronUnits),
191 | "len mlp.NeuralLayers[k].NeuronUnits" : len(mlp.NeuralLayers[k].NeuronUnits),
192 | }).Debug("Save output of hidden layer to context.")
193 |
194 | mlp.NeuralLayers[0].NeuronUnits[z].Value = mlp.NeuralLayers[k].NeuronUnits[z-len(s.Features)].Value
195 |
196 | }
197 |
198 | }
199 |
200 | log.WithFields(log.Fields{
201 | "level": "debug",
202 | "msg": "setup new neuron output value after transfer function application",
203 | "len(mlp.NeuralLayers)": len(mlp.NeuralLayers),
204 | "layer: ": k,
205 | "neuron: ": i,
206 | "outputvalue" : mlp.NeuralLayers[k].NeuronUnits[i].Value,
207 | }).Debug("Setup new neuron output value after transfer function application.")
208 |
209 | }
210 |
211 | }
212 |
213 |
214 | // get ouput values
215 | for i := 0; i < mlp.NeuralLayers[len(mlp.NeuralLayers)-1].Length; i++ {
216 |
217 | // simply accumulate values of all neurons in last level
218 | r[i] = mlp.NeuralLayers[len(mlp.NeuralLayers)-1].NeuronUnits[i].Value
219 |
220 | }
221 |
222 | return r
223 |
224 | }
225 |
226 |
227 |
228 | // BackPropagation algorithm for assisted learning. Convergence is not guaranteed and very slow.
229 | // Use as a stop criterion the average between previous and current errors and a maximum number of iterations.
230 | // [mlp:MultiLayerNetwork] input value [s:Pattern] input value (scaled between 0 and 1)
231 | // [o:[]float64] expected output value (scaled between 0 and 1)
232 | // return [r:float64] delta error between generated output and expected output
233 | func BackPropagate(mlp *MultiLayerNetwork, s *Pattern, o []float64, options ...int) (r float64) {
234 |
235 | var no []float64;
236 | // execute network with pattern passed over each level to output
237 | if len(options) == 1 {
238 | no = Execute(mlp, s, options[0])
239 | } else {
240 | no = Execute(mlp, s)
241 | }
242 |
243 | // init error
244 | e := 0.0
245 |
246 | // compute output error and delta in output layer
247 | for i := 0; i < mlp.NeuralLayers[len(mlp.NeuralLayers)-1].Length; i++ {
248 |
249 | // compute error in output: output for given pattern - output computed by network
250 | e = o[i] - no[i]
251 |
252 | // compute delta for each neuron in output layer as:
253 | // error in output * derivative of transfer function of network output
254 | mlp.NeuralLayers[len(mlp.NeuralLayers)-1].NeuronUnits[i].Delta = e * mlp.T_func_d(no[i])
255 |
256 | }
257 |
258 | // backpropagate error to previous layers
259 | // for each layers starting from the last hidden (len(mlp.NeuralLayers)-2)
260 | for k := len(mlp.NeuralLayers)-2; k >= 0; k-- {
261 |
262 | // compute actual layer errors and re-compute delta
263 | for i := 0; i < mlp.NeuralLayers[k].Length; i++ {
264 |
265 | // reset error accumulator
266 | e = 0.0
267 |
268 | // for each link to next layer
269 | for j := 0; j < mlp.NeuralLayers[k + 1].Length; j++ {
270 |
271 | // sum delta value of next neurons multiplied by weight between focused neuron and all neurons in next level
272 | e += mlp.NeuralLayers[k + 1].NeuronUnits[j].Delta * mlp.NeuralLayers[k + 1].NeuronUnits[j].Weights[i]
273 |
274 | }
275 |
276 | // compute delta for each neuron in focused layer as error * derivative of transfer function
277 | mlp.NeuralLayers[k].NeuronUnits[i].Delta = e * mlp.T_func_d(mlp.NeuralLayers[k].NeuronUnits[i].Value)
278 |
279 | }
280 |
281 | // compute weights in the next layer
282 | // for each link to next layer
283 | for i := 0; i < mlp.NeuralLayers[k + 1].Length; i++ {
284 |
285 | // for each neurons in actual level (for k = 0, INPUT)
286 | for j := 0; j < mlp.NeuralLayers[k].Length; j++ {
287 |
288 | // sum learning rate * next level next neuron Delta * actual level actual neuron output value
289 | mlp.NeuralLayers[k + 1].NeuronUnits[i].Weights[j] +=
290 | mlp.L_rate * mlp.NeuralLayers[k + 1].NeuronUnits[i].Delta * mlp.NeuralLayers[k].NeuronUnits[j].Value
291 |
292 | }
293 |
294 | // learning rate * next level next neuron Delta * actual level actual neuron output value
295 | mlp.NeuralLayers[k + 1].NeuronUnits[i].Bias += mlp.L_rate * mlp.NeuralLayers[k + 1].NeuronUnits[i].Delta
296 |
297 | }
298 |
299 | // copy hidden output to context
300 | if k == 1 && len(options) > 0 && options[0] == 1 {
301 |
302 | for z := len(s.Features); z < mlp.NeuralLayers[0].Length; z++ {
303 |
304 | // save output of hidden layer to context
305 | mlp.NeuralLayers[0].NeuronUnits[z].Value = mlp.NeuralLayers[k].NeuronUnits[z-len(s.Features)].Value
306 |
307 | }
308 |
309 | }
310 |
311 | }
312 |
313 | // compute global errors as sum of abs difference between output execution for each neuron in output layer
314 | // and desired value in each neuron in output layer
315 | for i := 0; i < len(o); i++ {
316 |
317 | r += math.Abs(no[i] - o[i])
318 |
319 | }
320 |
321 | // average error
322 | r = r / float64(len(o))
323 |
324 | return
325 |
326 | }
327 |
328 | // MLPTrain train a mlp MultiLayerNetwork with BackPropagation algorithm for assisted learning.
329 | func MLPTrain(mlp *MultiLayerNetwork, patterns []Pattern, mapped []string, epochs int) {
330 |
331 | epoch := 0
332 | output := make([]float64, len(mapped))
333 |
334 | // for fixed number of epochs
335 | for {
336 |
337 | // for each pattern in training set
338 | for _, pattern := range patterns {
339 |
340 | // setup desired output for each unit
341 | for io, _ := range output {
342 | output[io] = 0.0
343 | }
344 | // setup desired output for specific class of pattern focused
345 | output[int(pattern.SingleExpectation)] = 1.0
346 | // back propagation
347 | BackPropagate(mlp, &pattern, output)
348 |
349 | }
350 |
351 | log.WithFields(log.Fields{
352 | "level": "info",
353 | "place": "validation",
354 | "method": "MLPTrain",
355 | "epoch": epoch,
356 | }).Debug("Training epoch completed.")
357 |
358 | // if max number of epochs is reached
359 | if epoch > epochs {
360 | // exit
361 | break
362 | }
363 | // increase number of epoch
364 | epoch++
365 |
366 | }
367 |
368 | }
369 |
370 | // ElmanTrain train a mlp MultiLayerNetwork with BackPropagation algorithm for assisted learning.
371 | func ElmanTrain(mlp *MultiLayerNetwork, patterns []Pattern, epochs int) {
372 |
373 | epoch := 0
374 |
375 | // for fixed number of epochs
376 | for {
377 |
378 | rand.Seed(time.Now().UTC().UnixNano())
379 | p_i_r := rand.Intn(len(patterns))
380 |
381 | // for each pattern in training set
382 | for p_i, pattern := range patterns {
383 |
384 | // back propagation
385 | BackPropagate(mlp, &pattern, pattern.MultipleExpectation, 1)
386 |
387 | if (epoch % 100 == 0 && p_i == p_i_r) {
388 |
389 | // get output from network
390 | o_out := Execute(mlp, &pattern, 1)
391 | for o_out_i, o_out_v := range(o_out) {
392 | o_out[o_out_i] = mu.Round(o_out_v, .5, 0)
393 | }
394 | log.WithFields(log.Fields{
395 | "SUM": " ==========================",
396 | }).Info()
397 | log.WithFields(log.Fields{
398 | "a_n_1": mu.ConvertBinToInt(pattern.Features[0:int(len(pattern.Features)/2)]),
399 | "a_n_2": pattern.Features[0:int(len(pattern.Features)/2)],
400 | }).Info()
401 | log.WithFields(log.Fields{
402 | "b_n_1": mu.ConvertBinToInt(pattern.Features[int(len(pattern.Features)/2):]),
403 | "b_n_2": pattern.Features[int(len(pattern.Features)/2):],
404 | }).Info()
405 | log.WithFields(log.Fields{
406 | "sum_1": mu.ConvertBinToInt(pattern.MultipleExpectation),
407 | "sum_2": pattern.MultipleExpectation,
408 | }).Info()
409 | log.WithFields(log.Fields{
410 | "sum_1": mu.ConvertBinToInt(o_out),
411 | "sum_2": o_out,
412 | }).Info()
413 | log.WithFields(log.Fields{
414 | "END": " ==========================",
415 | }).Info()
416 |
417 | }
418 |
419 | }
420 |
421 | log.WithFields(log.Fields{
422 | "level": "info",
423 | "place": "validation",
424 | "method": "ElmanTrain",
425 | "epoch": epoch,
426 | }).Debug("Training epoch completed.")
427 |
428 | // if max number of epochs is reached
429 | if epoch > epochs {
430 | // exit
431 | break
432 | }
433 | // increase number of epoch
434 | epoch++
435 |
436 | }
437 |
438 | }
--------------------------------------------------------------------------------
/model/neural/neuralLayer.go:
--------------------------------------------------------------------------------
1 | // Neural provides struct to represents most common neural networks model and algorithms to train / test them.
2 | package neural
3 |
4 | import (
5 |
6 | // sys import
7 | "os"
8 |
9 | // third part import
10 | log "github.com/sirupsen/logrus"
11 |
12 | )
13 |
14 | // Level struct represents a simple NeuronUnits network with a slice of n NeuronUnits.
15 | type NeuralLayer struct {
16 |
17 | // NeuronUnits represents NeuronUnits in layer
18 | NeuronUnits []NeuronUnit
19 | // Lrate represents number of NeuronUnit in layer
20 | Length int
21 |
22 | }
23 |
24 | // #######################################################################################
25 |
26 | func init() {
27 |
28 | // Output to stdout instead of the default stderr
29 | log.SetOutput(os.Stdout)
30 | // Only log the warning severity or above.
31 | log.SetLevel(log.InfoLevel)
32 |
33 | }
34 |
35 | // PrepareLayer create a NeuralLayer with n NeuronUnits inside
36 | // [n:int] is an int that specifies the number of neurons in the NeuralLayer
37 | // [p:int] is an int that specifies the number of neurons in the previous NeuralLayer
38 | // It returns a NeuralLayer object
39 | func PrepareLayer(n int, p int) (l NeuralLayer) {
40 |
41 | l = NeuralLayer{NeuronUnits: make([]NeuronUnit, n), Length: n}
42 |
43 | for i := 0; i < n; i++ {
44 | RandomNeuronInit(&l.NeuronUnits[i], p)
45 | }
46 |
47 | log.WithFields(log.Fields{
48 | "level": "info",
49 | "msg": "multilayer perceptron init completed",
50 | "neurons": len(l.NeuronUnits),
51 | "lengthPreviousLayer": l.Length,
52 | }).Info("Complete NeuralLayer init.")
53 |
54 | return
55 |
56 | }
57 |
58 | //make([]NeuronUnit, n)
--------------------------------------------------------------------------------
/model/neural/neuronUnit.go:
--------------------------------------------------------------------------------
1 | // Neural provides struct to represents most common neural networks model and algorithms to train / test them.
2 | package neural
3 |
4 | import (
5 |
6 | // sys import
7 | "math/rand"
8 | "os"
9 |
10 | // third part import
11 | log "github.com/sirupsen/logrus"
12 |
13 | // this repo internal import
14 | mu "github.com/made2591/go-perceptron-go/util"
15 | )
16 |
17 | const (
18 |
19 | SCALING_FACTOR = 0.0000000000001
20 |
21 | )
22 |
23 | // NeuronUnit struct represents a simple NeuronUnit network with a slice of n weights.
24 | type NeuronUnit struct {
25 |
26 | // Weights represents NeuronUnit vector representation
27 | Weights []float64
28 | // Bias represents NeuronUnit natural propensity to spread signal
29 | Bias float64
30 | // Lrate represents learning rate of neuron
31 | Lrate float64
32 |
33 | // Value represents desired value when loading input into network in Multi NeuralLayer Perceptron
34 | Value float64
35 | // Delta represents delta error for unit
36 | Delta float64
37 |
38 | }
39 |
40 | // #######################################################################################
41 |
42 | func init() {
43 | // Output to stdout instead of the default stderr
44 | log.SetOutput(os.Stdout)
45 | // Only log the warning severity or above.
46 | log.SetLevel(log.InfoLevel)
47 | }
48 |
49 | // RandomNeuronInit initialize neuron weight, bias and learning rate using NormFloat64 random value.
50 | func RandomNeuronInit(neuron *NeuronUnit, dim int) {
51 |
52 | neuron.Weights = make([]float64, dim)
53 |
54 | // init random weights
55 | for index, _ := range neuron.Weights {
56 | // init random threshold weight
57 | neuron.Weights[index] = rand.NormFloat64() * SCALING_FACTOR
58 | }
59 |
60 | // init random bias and lrate
61 | neuron.Bias = rand.NormFloat64() * SCALING_FACTOR
62 | neuron.Lrate = rand.NormFloat64() * SCALING_FACTOR
63 | neuron.Value = rand.NormFloat64() * SCALING_FACTOR
64 | neuron.Delta = rand.NormFloat64() * SCALING_FACTOR
65 |
66 | log.WithFields(log.Fields{
67 | "level": "debug",
68 | "place": "neuron",
69 | "func": "RandomNeuronInit",
70 | "msg": "random neuron weights init",
71 | "weights": neuron.Weights,
72 | }).Debug()
73 |
74 | }
75 |
76 | // UpdateWeights performs update in neuron weights with respect to passed pattern.
77 | // It returns error of prediction before and after updating weights.
78 | func UpdateWeights(neuron *NeuronUnit, pattern *Pattern) (float64, float64) {
79 |
80 | // compute prediction value and error for pattern given neuron BEFORE update (actual state)
81 | var predictedValue, prevError, postError float64 = Predict(neuron, pattern), 0.0, 0.0
82 | prevError = pattern.SingleExpectation - predictedValue
83 |
84 | // performs weights update for neuron
85 | neuron.Bias = neuron.Bias + neuron.Lrate*prevError
86 |
87 | // performs weights update for neuron
88 | for index, _ := range neuron.Weights {
89 | neuron.Weights[index] = neuron.Weights[index] + neuron.Lrate*prevError*pattern.Features[index]
90 | }
91 |
92 | // compute prediction value and error for pattern given neuron AFTER update (actual state)
93 | predictedValue = Predict(neuron, pattern)
94 | postError = pattern.SingleExpectation - predictedValue
95 |
96 | log.WithFields(log.Fields{
97 | "level": "debug",
98 | "place": "neuron",
99 | "func": "UpdateWeights",
100 | "msg": "updating weights of neuron",
101 | "weights": neuron.Weights,
102 | }).Debug()
103 |
104 | // return errors
105 | return prevError, postError
106 |
107 | }
108 |
109 | // TrainNeuron trains a passed neuron with patterns passed, for specified number of epoch.
110 | // If init is 0, leaves weights unchanged before training.
111 | // If init is 1, reset weights and bias of neuron before training.
112 | func TrainNeuron(neuron *NeuronUnit, patterns []Pattern, epochs int, init int) {
113 |
114 | // init weights if specified
115 | if init == 1 {
116 | neuron.Weights = make([]float64, len(patterns[0].Features))
117 | neuron.Bias = 0.0
118 | }
119 |
120 | // init counter
121 | var epoch int = 0
122 |
123 | // accumulator errors prev and post weights updates
124 | var squaredPrevError, squaredPostError float64 = 0.0, 0.0
125 |
126 | // in each epoch
127 | for epoch < epochs {
128 |
129 | // update weight using each pattern in training set
130 | for _, pattern := range patterns {
131 | prevError, postError := UpdateWeights(neuron, &pattern)
132 | // NOTE: in each step, use weights already updated by previous
133 | squaredPrevError = squaredPrevError + (prevError * prevError)
134 | squaredPostError = squaredPostError + (postError * postError)
135 | }
136 |
137 | log.WithFields(log.Fields{
138 | "level": "debug",
139 | "place": "error evolution in epoch",
140 | "method": "TrainNeuron",
141 | "msg": "epoch and squared errors reached before and after updating weights",
142 | "epochReached": epoch + 1,
143 | "squaredErrorPrev": squaredPrevError,
144 | "squaredErrorPost": squaredPostError,
145 | }).Debug()
146 |
147 | // increment epoch counter
148 | epoch++
149 |
150 | }
151 |
152 | }
153 |
154 | // Predict performs a neuron prediction to passed pattern.
155 | // It returns a float64 binary predicted value.
156 | func Predict(neuron *NeuronUnit, pattern *Pattern) float64 {
157 |
158 | if mu.ScalarProduct(neuron.Weights, pattern.Features)+neuron.Bias < 0.0 {
159 | return 0.0
160 | }
161 | return 1.0
162 |
163 | }
164 |
165 | // Accuracy calculate percentage of equal values between two float64 based slices.
166 | // It returns int number and a float64 percentage value of corrected values.
167 | func Accuracy(actual []float64, predicted []float64) (int, float64) {
168 |
169 | // if slices have different number of elements
170 | if len(actual) != len(predicted) {
171 | log.WithFields(log.Fields{
172 | "level": "error",
173 | "place": "neuron",
174 | "method": "Accuracy",
175 | "msg": "accuracy between actual and predicted slices of values",
176 | "actualLen": len(actual),
177 | "predictedLen": len(predicted),
178 | }).Error("Failed to compute accuracy between actual values and predictions: different length.")
179 | return -1, -1.0
180 | }
181 |
182 | // init result
183 | var correct int = 0
184 |
185 | for index, value := range actual {
186 | if value == predicted[index] {
187 | correct++
188 | }
189 | }
190 |
191 | // return correct
192 | return correct, float64(correct) / float64(len(actual)) * 100.0
193 |
194 | }
195 |
--------------------------------------------------------------------------------
/model/neural/pattern.go:
--------------------------------------------------------------------------------
1 | // Neural provides struct to represents most common neural networks model and algorithms to train / test them.
2 | package neural
3 |
4 | import (
5 | // sys import
6 | "encoding/csv"
7 | "io"
8 | "io/ioutil"
9 | "os"
10 | "strings"
11 |
12 | // third part import
13 | log "github.com/sirupsen/logrus"
14 |
15 | // this repo internal import
16 | mu "github.com/made2591/go-perceptron-go/util"
17 | )
18 |
19 | func init() {
20 | // Output to stdout instead of the default stderr
21 | log.SetOutput(os.Stdout)
22 | // Only log the warning severity or above.
23 | log.SetLevel(log.InfoLevel)
24 | }
25 |
26 | // Pattern struct represents one pattern with dimensions and desired value
27 | type Pattern struct {
28 |
29 | // Features that describe the pattern
30 | Features []float64
31 | // Raw (usually string) expected value
32 | SingleRawExpectation string
33 | // Numeric representation of expected value
34 | SingleExpectation float64
35 | // Numeric representation of expected value
36 | MultipleExpectation []float64
37 |
38 | }
39 |
40 | // #######################################################################################
41 |
42 | // LoadPatternsFromCSVFile load a CSV dataset into an array of Pattern.
43 | func LoadPatternsFromCSVFile(filePath string) ([]Pattern, error, []string) {
44 |
45 | // init patterns
46 | var patterns []Pattern
47 |
48 | // read content ([]byte), check error (error)
49 | fileContent, error := ioutil.ReadFile(filePath)
50 |
51 | if error != nil {
52 | log.WithFields(log.Fields{
53 | "level": "fatal",
54 | "place": "patterns",
55 | "method": "LoadPatternsFromCSVFile",
56 | "msg": "reading file in specific path",
57 | "filePath": filePath,
58 | "error": error,
59 | }).Fatal("Failed to read file in specified path.")
60 | return patterns, error, nil
61 | }
62 |
63 | // create pointer to read file
64 | pointer := csv.NewReader(strings.NewReader(string(fileContent)))
65 |
66 | var lineCounter int = 0
67 | // for each record in file
68 | for {
69 |
70 | // read line, check error
71 | line, error := pointer.Read()
72 |
73 | log.WithFields(log.Fields{
74 | "level": "debug",
75 | "place": "patterns",
76 | "method": "LoadPatternsFromCSVFile",
77 | "line": line,
78 | }).Debug()
79 |
80 | // if end of file reached, exit loop
81 | if error == io.EOF {
82 | log.WithFields(log.Fields{
83 | "level": "info",
84 | "place": "patterns",
85 | "method": "LoadPatternsFromCSVFile",
86 | "readData": len(patterns),
87 | "msg": "File reading completed.",
88 | }).Info("File reading completed.")
89 | break
90 | }
91 |
92 | // if another error encountered, exit program
93 | if error != nil {
94 | log.WithFields(log.Fields{
95 | "level": "error",
96 | "place": "patterns",
97 | "method": "LoadPatternsFromCSVFile",
98 | "msg": "parsing file in specific line number",
99 | "lineCounter": lineCounter,
100 | "error": error,
101 | }).Error("Failed to parse line.")
102 | return patterns, error, nil
103 | }
104 |
105 | // line values cast to float64
106 | var floatingValues []float64 = mu.StringToFloat(line, 1, -1.0)
107 |
108 | // add casted pattern to training set
109 | patterns = append(
110 | patterns,
111 | Pattern{Features: floatingValues, SingleRawExpectation: line[len(line)-1]})
112 |
113 | lineCounter = lineCounter + 1
114 |
115 | }
116 |
117 | // cast expected values to float64 numeric values
118 | mapped := RawExpectedConversion(patterns)
119 |
120 | // return patterns
121 | return patterns, nil, mapped
122 |
123 | }
124 |
125 | // RawExpectedConversion converts (string) raw expected values in patterns
126 | // training / testing sets to float64 values
127 | // It works on pattern struct (pointer) passed. It doens't returns nothing
128 | func RawExpectedConversion(patterns []Pattern) []string {
129 |
130 | // collect expected string values
131 | var rawExpectedValues []string
132 |
133 | // for each pattern in training set
134 | for _, pattern := range patterns {
135 | check, _ := mu.StringInSlice(pattern.SingleRawExpectation, rawExpectedValues)
136 | if !check {
137 | rawExpectedValues = append(rawExpectedValues, pattern.SingleRawExpectation)
138 | }
139 | log.WithFields(log.Fields{
140 | "level": "debug",
141 | "place": "patterns",
142 | "msg": "raw class exctraction",
143 | "rawExpectedAdded": pattern.SingleRawExpectation,
144 | }).Debug()
145 | }
146 |
147 | log.WithFields(log.Fields{
148 | "level": "info",
149 | "place": "patterns",
150 | "msg": "raw class exctraction completed",
151 | "numberOfRawUnique": len(rawExpectedValues),
152 | }).Info("Complete SingleRawExpectation value set filling.")
153 |
154 | // for each pattern in training set
155 | for index, _ := range patterns {
156 | // find mapped int value (using index of rawExpectedValues slice)
157 | for mapped, value := range rawExpectedValues {
158 | if strings.Compare(value, patterns[index].SingleRawExpectation) == 0 {
159 | // conversion to float64 value
160 | patterns[index].SingleExpectation = float64(mapped)
161 | }
162 | }
163 | }
164 |
165 | return rawExpectedValues
166 |
167 | }
168 |
169 | // CreateRandomPatternArray load a CSV dataset into an array of Pattern.
170 | func CreateRandomPatternArray(d int, k int) ([]Pattern) {
171 |
172 | // init patterns
173 | var patterns []Pattern;
174 |
175 | // for i times
176 | var i = 0
177 | for i < k {
178 |
179 | a := mu.GenerateRandomIntWithBinaryDim(d)
180 | b := mu.GenerateRandomIntWithBinaryDim(d)
181 | c := a+b
182 |
183 | log.WithFields(log.Fields{
184 | "ai": a,
185 | "as": mu.ConvertIntToBinary(a, d),
186 | "bi": b,
187 | "bs": mu.ConvertIntToBinary(b, d),
188 | "ci": c,
189 | "cs": mu.ConvertIntToBinary(c, d+1),
190 | }).Debug()
191 |
192 | ab := mu.ConvertIntToBinary(a, d)
193 | bb := mu.ConvertIntToBinary(b, d)
194 | for _, v := range(bb) {
195 | ab = append(ab, v)
196 | }
197 |
198 | // add casted pattern to training set
199 | patterns = append(
200 | patterns,
201 | Pattern{Features: ab,
202 | MultipleExpectation: mu.ConvertIntToBinary(c, d+1)})
203 |
204 | i = i + 1
205 |
206 | }
207 |
208 | // return patterns
209 | return patterns
210 |
211 | }
--------------------------------------------------------------------------------
/model/neural/transferFunction.go:
--------------------------------------------------------------------------------
1 | // Neural provides struct to represents most common neural networks model and algorithms to train / test them.
2 | package neural
3 |
4 | import (
5 |
6 | "math"
7 |
8 | )
9 |
10 | // define transfer function desiderata
11 | type transferFunction func(float64) float64
12 |
13 | // different type of transfer function
14 |
15 | func HeavysideTransfer(d float64) float64 {
16 |
17 | if d >= 0.0 {
18 | return 1.0
19 | }
20 | return 0.0
21 |
22 | }
23 |
24 | func HeavysideTransferDerivate(d float64) float64 {
25 |
26 | return 1.0
27 |
28 | }
29 |
30 | func SigmoidalTransfer(d float64) float64 {
31 |
32 | return 1 / (1 + math.Pow(math.E, - d))
33 |
34 | }
35 |
36 | func SigmoidalTransferDerivate(d float64) float64 {
37 |
38 | return 1.0
39 |
40 | }
41 |
42 | func HyperbolicTransfer(d float64) float64 {
43 |
44 | return math.Tanh(d)
45 |
46 | }
47 |
48 | func HyperbolicTransferDerivate(d float64) float64 {
49 |
50 | return 1 - math.Pow(d, 2)
51 |
52 | }
--------------------------------------------------------------------------------
/res/iris.all_data.csv:
--------------------------------------------------------------------------------
1 | 5.1,3.4,1.5,0.2,Iris-setosa
2 | 6.3,2.5,5,1.9,Iris-virginica
3 | 5.9,3,4.2,1.5,Iris-versicolor
4 | 5.7,2.6,3.5,1,Iris-versicolor
5 | 7.7,2.8,6.7,2,Iris-virginica
6 | 6,3,4.8,1.8,Iris-virginica
7 | 6.7,3.3,5.7,2.5,Iris-virginica
8 | 5.8,2.7,4.1,1,Iris-versicolor
9 | 5.5,2.3,4,1.3,Iris-versicolor
10 | 6.6,3,4.4,1.4,Iris-versicolor
11 | 5.1,3.7,1.5,0.4,Iris-setosa
12 | 4.6,3.6,1,0.2,Iris-setosa
13 | 7.7,2.6,6.9,2.3,Iris-virginica
14 | 6.1,2.8,4,1.3,Iris-versicolor
15 | 4.9,2.4,3.3,1,Iris-versicolor
16 | 6.8,3,5.5,2.1,Iris-virginica
17 | 4.9,3.1,1.5,0.1,Iris-setosa
18 | 6.4,2.7,5.3,1.9,Iris-virginica
19 | 5.5,2.4,3.8,1.1,Iris-versicolor
20 | 5.6,3,4.5,1.5,Iris-versicolor
21 | 5.8,2.8,5.1,2.4,Iris-virginica
22 | 5.8,2.7,5.1,1.9,Iris-virginica
23 | 6.4,2.8,5.6,2.2,Iris-virginica
24 | 6.7,3,5.2,2.3,Iris-virginica
25 | 4.6,3.1,1.5,0.2,Iris-setosa
26 | 6.9,3.1,5.4,2.1,Iris-virginica
27 | 5.9,3,5.1,1.8,Iris-virginica
28 | 6.2,2.8,4.8,1.8,Iris-virginica
29 | 5.1,3.5,1.4,0.3,Iris-setosa
30 | 5.8,2.7,3.9,1.2,Iris-versicolor
31 | 6.7,3.1,4.7,1.5,Iris-versicolor
32 | 6.3,2.5,4.9,1.5,Iris-versicolor
33 | 6.8,2.8,4.8,1.4,Iris-versicolor
34 | 6.2,2.2,4.5,1.5,Iris-versicolor
35 | 4.9,3.1,1.5,0.1,Iris-setosa
36 | 5.2,4.1,1.5,0.1,Iris-setosa
37 | 6.4,3.2,5.3,2.3,Iris-virginica
38 | 5,2.3,3.3,1,Iris-versicolor
39 | 5.6,3,4.1,1.3,Iris-versicolor
40 | 6.3,3.4,5.6,2.4,Iris-virginica
41 | 6.3,2.3,4.4,1.3,Iris-versicolor
42 | 6.3,2.8,5.1,1.5,Iris-virginica
43 | 5.1,3.3,1.7,0.5,Iris-setosa
44 | 6.3,2.9,5.6,1.8,Iris-virginica
45 | 5,3.4,1.5,0.2,Iris-setosa
46 | 7.2,3,5.8,1.6,Iris-virginica
47 | 6.4,3.2,4.5,1.5,Iris-versicolor
48 | 5,2,3.5,1,Iris-versicolor
49 | 5.5,2.5,4,1.3,Iris-versicolor
50 | 5.1,3.8,1.6,0.2,Iris-setosa
51 | 6.1,2.6,5.6,1.4,Iris-virginica
52 | 6.2,2.9,4.3,1.3,Iris-versicolor
53 | 7.6,3,6.6,2.1,Iris-virginica
54 | 5.3,3.7,1.5,0.2,Iris-setosa
55 | 5.4,3.9,1.3,0.4,Iris-setosa
56 | 6,3.4,4.5,1.6,Iris-versicolor
57 | 5.2,3.5,1.5,0.2,Iris-setosa
58 | 4.6,3.4,1.4,0.3,Iris-setosa
59 | 4.9,2.5,4.5,1.7,Iris-virginica
60 | 6.1,3,4.6,1.4,Iris-versicolor
61 | 5.1,3.5,1.4,0.2,Iris-setosa
62 | 5.4,3.7,1.5,0.2,Iris-setosa
63 | 6.8,3.2,5.9,2.3,Iris-virginica
64 | 6.4,2.8,5.6,2.1,Iris-virginica
65 | 5,3.5,1.6,0.6,Iris-setosa
66 | 5,3,1.6,0.2,Iris-setosa
67 | 4.9,3,1.4,0.2,Iris-setosa
68 | 4.8,3.4,1.6,0.2,Iris-setosa
69 | 6.5,3,5.2,2,Iris-virginica
70 | 5.4,3.9,1.7,0.4,Iris-setosa
71 | 5,3.2,1.2,0.2,Iris-setosa
72 | 7.7,3.8,6.7,2.2,Iris-virginica
73 | 7.3,2.9,6.3,1.8,Iris-virginica
74 | 6.7,3.1,5.6,2.4,Iris-virginica
75 | 5.7,2.8,4.1,1.3,Iris-versicolor
76 | 5.5,2.4,3.7,1,Iris-versicolor
77 | 6.5,3,5.8,2.2,Iris-virginica
78 | 6.6,2.9,4.6,1.3,Iris-versicolor
79 | 5.7,3.8,1.7,0.3,Iris-setosa
80 | 4.8,3.1,1.6,0.2,Iris-setosa
81 | 5,3.3,1.4,0.2,Iris-setosa
82 | 5.7,2.9,4.2,1.3,Iris-versicolor
83 | 4.7,3.2,1.3,0.2,Iris-setosa
84 | 4.4,3.2,1.3,0.2,Iris-setosa
85 | 4.8,3,1.4,0.1,Iris-setosa
86 | 6.4,3.1,5.5,1.8,Iris-virginica
87 | 4.3,3,1.1,0.1,Iris-setosa
88 | 5.8,2.7,5.1,1.9,Iris-virginica
89 | 5.6,2.9,3.6,1.3,Iris-versicolor
90 | 7.2,3.6,6.1,2.5,Iris-virginica
91 | 5.7,2.5,5,2,Iris-virginica
92 | 6.9,3.1,4.9,1.5,Iris-versicolor
93 | 6.5,3.2,5.1,2,Iris-virginica
94 | 6,2.2,4,1,Iris-versicolor
95 | 4.9,3.1,1.5,0.1,Iris-setosa
96 | 5.1,3.8,1.9,0.4,Iris-setosa
97 | 5.5,4.2,1.4,0.2,Iris-setosa
98 | 5.8,2.6,4,1.2,Iris-versicolor
99 | 7,3.2,4.7,1.4,Iris-versicolor
100 | 5.2,3.4,1.4,0.2,Iris-setosa
101 | 5.2,2.7,3.9,1.4,Iris-versicolor
102 | 6.3,3.3,6,2.5,Iris-virginica
103 | 6.7,3,5,1.7,Iris-versicolor
104 | 6,2.7,5.1,1.6,Iris-versicolor
105 | 7.2,3.2,6,1.8,Iris-virginica
106 | 6.7,2.5,5.8,1.8,Iris-virginica
107 | 5.7,3,4.2,1.2,Iris-versicolor
108 | 5.6,2.7,4.2,1.3,Iris-versicolor
109 | 6.5,2.8,4.6,1.5,Iris-versicolor
110 | 6.1,2.9,4.7,1.4,Iris-versicolor
111 | 6,2.9,4.5,1.5,Iris-versicolor
112 | 6.9,3.2,5.7,2.3,Iris-virginica
113 | 5.8,4,1.2,0.2,Iris-setosa
114 | 5.9,3.2,4.8,1.8,Iris-versicolor
115 | 5.4,3.4,1.7,0.2,Iris-setosa
116 | 5.6,2.5,3.9,1.1,Iris-versicolor
117 | 6.4,2.9,4.3,1.3,Iris-versicolor
118 | 7.9,3.8,6.4,2,Iris-virginica
119 | 5.4,3,4.5,1.5,Iris-versicolor
120 | 5.5,3.5,1.3,0.2,Iris-setosa
121 | 5.7,4.4,1.5,0.4,Iris-setosa
122 | 5,3.4,1.6,0.4,Iris-setosa
123 | 6.1,2.8,4.7,1.2,Iris-versicolor
124 | 6.3,3.3,4.7,1.6,Iris-versicolor
125 | 5,3.6,1.4,0.2,Iris-setosa
126 | 4.4,3,1.3,0.2,Iris-setosa
127 | 4.7,3.2,1.6,0.2,Iris-setosa
128 | 5.1,2.5,3,1.1,Iris-versicolor
129 | 6.5,3,5.5,1.8,Iris-virginica
130 | 7.4,2.8,6.1,1.9,Iris-virginica
131 | 5.5,2.6,4.4,1.2,Iris-versicolor
132 | 6.2,3.4,5.4,2.3,Iris-virginica
133 | 5,3.5,1.3,0.3,Iris-setosa
134 | 7.7,3,6.1,2.3,Iris-virginica
135 | 4.6,3.2,1.4,0.2,Iris-setosa
136 | 4.8,3.4,1.9,0.2,Iris-setosa
137 | 5.7,2.8,4.5,1.3,Iris-versicolor
138 | 6.7,3.3,5.7,2.1,Iris-virginica
139 | 5.4,3.4,1.5,0.4,Iris-setosa
140 | 4.4,2.9,1.4,0.2,Iris-setosa
141 | 4.5,2.3,1.3,0.3,Iris-setosa
142 | 6,2.2,5,1.5,Iris-virginica
143 | 6.9,3.1,5.1,2.3,Iris-virginica
144 | 6.3,2.7,4.9,1.8,Iris-virginica
145 | 6.7,3.1,4.4,1.4,Iris-versicolor
146 | 5.6,2.8,4.9,2,Iris-virginica
147 | 5.1,3.8,1.5,0.3,Iris-setosa
148 | 4.8,3,1.4,0.3,Iris-setosa
149 | 7.1,3,5.9,2.1,Iris-virginica
150 | 6.1,3,4.9,1.8,Iris-virginica
--------------------------------------------------------------------------------
/res/sonar.all_data.csv:
--------------------------------------------------------------------------------
1 | 0.0200,0.0371,0.0428,0.0207,0.0954,0.0986,0.1539,0.1601,0.3109,0.2111,0.1609,0.1582,0.2238,0.0645,0.0660,0.2273,0.3100,0.2999,0.5078,0.4797,0.5783,0.5071,0.4328,0.5550,0.6711,0.6415,0.7104,0.8080,0.6791,0.3857,0.1307,0.2604,0.5121,0.7547,0.8537,0.8507,0.6692,0.6097,0.4943,0.2744,0.0510,0.2834,0.2825,0.4256,0.2641,0.1386,0.1051,0.1343,0.0383,0.0324,0.0232,0.0027,0.0065,0.0159,0.0072,0.0167,0.0180,0.0084,0.0090,0.0032,R
2 | 0.0453,0.0523,0.0843,0.0689,0.1183,0.2583,0.2156,0.3481,0.3337,0.2872,0.4918,0.6552,0.6919,0.7797,0.7464,0.9444,1.0000,0.8874,0.8024,0.7818,0.5212,0.4052,0.3957,0.3914,0.3250,0.3200,0.3271,0.2767,0.4423,0.2028,0.3788,0.2947,0.1984,0.2341,0.1306,0.4182,0.3835,0.1057,0.1840,0.1970,0.1674,0.0583,0.1401,0.1628,0.0621,0.0203,0.0530,0.0742,0.0409,0.0061,0.0125,0.0084,0.0089,0.0048,0.0094,0.0191,0.0140,0.0049,0.0052,0.0044,R
3 | 0.0262,0.0582,0.1099,0.1083,0.0974,0.2280,0.2431,0.3771,0.5598,0.6194,0.6333,0.7060,0.5544,0.5320,0.6479,0.6931,0.6759,0.7551,0.8929,0.8619,0.7974,0.6737,0.4293,0.3648,0.5331,0.2413,0.5070,0.8533,0.6036,0.8514,0.8512,0.5045,0.1862,0.2709,0.4232,0.3043,0.6116,0.6756,0.5375,0.4719,0.4647,0.2587,0.2129,0.2222,0.2111,0.0176,0.1348,0.0744,0.0130,0.0106,0.0033,0.0232,0.0166,0.0095,0.0180,0.0244,0.0316,0.0164,0.0095,0.0078,R
4 | 0.0100,0.0171,0.0623,0.0205,0.0205,0.0368,0.1098,0.1276,0.0598,0.1264,0.0881,0.1992,0.0184,0.2261,0.1729,0.2131,0.0693,0.2281,0.4060,0.3973,0.2741,0.3690,0.5556,0.4846,0.3140,0.5334,0.5256,0.2520,0.2090,0.3559,0.6260,0.7340,0.6120,0.3497,0.3953,0.3012,0.5408,0.8814,0.9857,0.9167,0.6121,0.5006,0.3210,0.3202,0.4295,0.3654,0.2655,0.1576,0.0681,0.0294,0.0241,0.0121,0.0036,0.0150,0.0085,0.0073,0.0050,0.0044,0.0040,0.0117,R
5 | 0.0762,0.0666,0.0481,0.0394,0.0590,0.0649,0.1209,0.2467,0.3564,0.4459,0.4152,0.3952,0.4256,0.4135,0.4528,0.5326,0.7306,0.6193,0.2032,0.4636,0.4148,0.4292,0.5730,0.5399,0.3161,0.2285,0.6995,1.0000,0.7262,0.4724,0.5103,0.5459,0.2881,0.0981,0.1951,0.4181,0.4604,0.3217,0.2828,0.2430,0.1979,0.2444,0.1847,0.0841,0.0692,0.0528,0.0357,0.0085,0.0230,0.0046,0.0156,0.0031,0.0054,0.0105,0.0110,0.0015,0.0072,0.0048,0.0107,0.0094,R
6 | 0.0286,0.0453,0.0277,0.0174,0.0384,0.0990,0.1201,0.1833,0.2105,0.3039,0.2988,0.4250,0.6343,0.8198,1.0000,0.9988,0.9508,0.9025,0.7234,0.5122,0.2074,0.3985,0.5890,0.2872,0.2043,0.5782,0.5389,0.3750,0.3411,0.5067,0.5580,0.4778,0.3299,0.2198,0.1407,0.2856,0.3807,0.4158,0.4054,0.3296,0.2707,0.2650,0.0723,0.1238,0.1192,0.1089,0.0623,0.0494,0.0264,0.0081,0.0104,0.0045,0.0014,0.0038,0.0013,0.0089,0.0057,0.0027,0.0051,0.0062,R
7 | 0.0317,0.0956,0.1321,0.1408,0.1674,0.1710,0.0731,0.1401,0.2083,0.3513,0.1786,0.0658,0.0513,0.3752,0.5419,0.5440,0.5150,0.4262,0.2024,0.4233,0.7723,0.9735,0.9390,0.5559,0.5268,0.6826,0.5713,0.5429,0.2177,0.2149,0.5811,0.6323,0.2965,0.1873,0.2969,0.5163,0.6153,0.4283,0.5479,0.6133,0.5017,0.2377,0.1957,0.1749,0.1304,0.0597,0.1124,0.1047,0.0507,0.0159,0.0195,0.0201,0.0248,0.0131,0.0070,0.0138,0.0092,0.0143,0.0036,0.0103,R
8 | 0.0519,0.0548,0.0842,0.0319,0.1158,0.0922,0.1027,0.0613,0.1465,0.2838,0.2802,0.3086,0.2657,0.3801,0.5626,0.4376,0.2617,0.1199,0.6676,0.9402,0.7832,0.5352,0.6809,0.9174,0.7613,0.8220,0.8872,0.6091,0.2967,0.1103,0.1318,0.0624,0.0990,0.4006,0.3666,0.1050,0.1915,0.3930,0.4288,0.2546,0.1151,0.2196,0.1879,0.1437,0.2146,0.2360,0.1125,0.0254,0.0285,0.0178,0.0052,0.0081,0.0120,0.0045,0.0121,0.0097,0.0085,0.0047,0.0048,0.0053,R
9 | 0.0223,0.0375,0.0484,0.0475,0.0647,0.0591,0.0753,0.0098,0.0684,0.1487,0.1156,0.1654,0.3833,0.3598,0.1713,0.1136,0.0349,0.3796,0.7401,0.9925,0.9802,0.8890,0.6712,0.4286,0.3374,0.7366,0.9611,0.7353,0.4856,0.1594,0.3007,0.4096,0.3170,0.3305,0.3408,0.2186,0.2463,0.2726,0.1680,0.2792,0.2558,0.1740,0.2121,0.1099,0.0985,0.1271,0.1459,0.1164,0.0777,0.0439,0.0061,0.0145,0.0128,0.0145,0.0058,0.0049,0.0065,0.0093,0.0059,0.0022,R
10 | 0.0164,0.0173,0.0347,0.0070,0.0187,0.0671,0.1056,0.0697,0.0962,0.0251,0.0801,0.1056,0.1266,0.0890,0.0198,0.1133,0.2826,0.3234,0.3238,0.4333,0.6068,0.7652,0.9203,0.9719,0.9207,0.7545,0.8289,0.8907,0.7309,0.6896,0.5829,0.4935,0.3101,0.0306,0.0244,0.1108,0.1594,0.1371,0.0696,0.0452,0.0620,0.1421,0.1597,0.1384,0.0372,0.0688,0.0867,0.0513,0.0092,0.0198,0.0118,0.0090,0.0223,0.0179,0.0084,0.0068,0.0032,0.0035,0.0056,0.0040,R
11 | 0.0039,0.0063,0.0152,0.0336,0.0310,0.0284,0.0396,0.0272,0.0323,0.0452,0.0492,0.0996,0.1424,0.1194,0.0628,0.0907,0.1177,0.1429,0.1223,0.1104,0.1847,0.3715,0.4382,0.5707,0.6654,0.7476,0.7654,0.8555,0.9720,0.9221,0.7502,0.7209,0.7757,0.6055,0.5021,0.4499,0.3947,0.4281,0.4427,0.3749,0.1972,0.0511,0.0793,0.1269,0.1533,0.0690,0.0402,0.0534,0.0228,0.0073,0.0062,0.0062,0.0120,0.0052,0.0056,0.0093,0.0042,0.0003,0.0053,0.0036,R
12 | 0.0123,0.0309,0.0169,0.0313,0.0358,0.0102,0.0182,0.0579,0.1122,0.0835,0.0548,0.0847,0.2026,0.2557,0.1870,0.2032,0.1463,0.2849,0.5824,0.7728,0.7852,0.8515,0.5312,0.3653,0.5973,0.8275,1.0000,0.8673,0.6301,0.4591,0.3940,0.2576,0.2817,0.2641,0.2757,0.2698,0.3994,0.4576,0.3940,0.2522,0.1782,0.1354,0.0516,0.0337,0.0894,0.0861,0.0872,0.0445,0.0134,0.0217,0.0188,0.0133,0.0265,0.0224,0.0074,0.0118,0.0026,0.0092,0.0009,0.0044,R
13 | 0.0079,0.0086,0.0055,0.0250,0.0344,0.0546,0.0528,0.0958,0.1009,0.1240,0.1097,0.1215,0.1874,0.3383,0.3227,0.2723,0.3943,0.6432,0.7271,0.8673,0.9674,0.9847,0.9480,0.8036,0.6833,0.5136,0.3090,0.0832,0.4019,0.2344,0.1905,0.1235,0.1717,0.2351,0.2489,0.3649,0.3382,0.1589,0.0989,0.1089,0.1043,0.0839,0.1391,0.0819,0.0678,0.0663,0.1202,0.0692,0.0152,0.0266,0.0174,0.0176,0.0127,0.0088,0.0098,0.0019,0.0059,0.0058,0.0059,0.0032,R
14 | 0.0090,0.0062,0.0253,0.0489,0.1197,0.1589,0.1392,0.0987,0.0955,0.1895,0.1896,0.2547,0.4073,0.2988,0.2901,0.5326,0.4022,0.1571,0.3024,0.3907,0.3542,0.4438,0.6414,0.4601,0.6009,0.8690,0.8345,0.7669,0.5081,0.4620,0.5380,0.5375,0.3844,0.3601,0.7402,0.7761,0.3858,0.0667,0.3684,0.6114,0.3510,0.2312,0.2195,0.3051,0.1937,0.1570,0.0479,0.0538,0.0146,0.0068,0.0187,0.0059,0.0095,0.0194,0.0080,0.0152,0.0158,0.0053,0.0189,0.0102,R
15 | 0.0124,0.0433,0.0604,0.0449,0.0597,0.0355,0.0531,0.0343,0.1052,0.2120,0.1640,0.1901,0.3026,0.2019,0.0592,0.2390,0.3657,0.3809,0.5929,0.6299,0.5801,0.4574,0.4449,0.3691,0.6446,0.8940,0.8978,0.4980,0.3333,0.2350,0.1553,0.3666,0.4340,0.3082,0.3024,0.4109,0.5501,0.4129,0.5499,0.5018,0.3132,0.2802,0.2351,0.2298,0.1155,0.0724,0.0621,0.0318,0.0450,0.0167,0.0078,0.0083,0.0057,0.0174,0.0188,0.0054,0.0114,0.0196,0.0147,0.0062,R
16 | 0.0298,0.0615,0.0650,0.0921,0.1615,0.2294,0.2176,0.2033,0.1459,0.0852,0.2476,0.3645,0.2777,0.2826,0.3237,0.4335,0.5638,0.4555,0.4348,0.6433,0.3932,0.1989,0.3540,0.9165,0.9371,0.4620,0.2771,0.6613,0.8028,0.4200,0.5192,0.6962,0.5792,0.8889,0.7863,0.7133,0.7615,0.4401,0.3009,0.3163,0.2809,0.2898,0.0526,0.1867,0.1553,0.1633,0.1252,0.0748,0.0452,0.0064,0.0154,0.0031,0.0153,0.0071,0.0212,0.0076,0.0152,0.0049,0.0200,0.0073,R
17 | 0.0352,0.0116,0.0191,0.0469,0.0737,0.1185,0.1683,0.1541,0.1466,0.2912,0.2328,0.2237,0.2470,0.1560,0.3491,0.3308,0.2299,0.2203,0.2493,0.4128,0.3158,0.6191,0.5854,0.3395,0.2561,0.5599,0.8145,0.6941,0.6985,0.8660,0.5930,0.3664,0.6750,0.8697,0.7837,0.7552,0.5789,0.4713,0.1252,0.6087,0.7322,0.5977,0.3431,0.1803,0.2378,0.3424,0.2303,0.0689,0.0216,0.0469,0.0426,0.0346,0.0158,0.0154,0.0109,0.0048,0.0095,0.0015,0.0073,0.0067,R
18 | 0.0192,0.0607,0.0378,0.0774,0.1388,0.0809,0.0568,0.0219,0.1037,0.1186,0.1237,0.1601,0.3520,0.4479,0.3769,0.5761,0.6426,0.6790,0.7157,0.5466,0.5399,0.6362,0.7849,0.7756,0.5780,0.4862,0.4181,0.2457,0.0716,0.0613,0.1816,0.4493,0.5976,0.3785,0.2495,0.5771,0.8852,0.8409,0.3570,0.3133,0.6096,0.6378,0.2709,0.1419,0.1260,0.1288,0.0790,0.0829,0.0520,0.0216,0.0360,0.0331,0.0131,0.0120,0.0108,0.0024,0.0045,0.0037,0.0112,0.0075,R
19 | 0.0270,0.0092,0.0145,0.0278,0.0412,0.0757,0.1026,0.1138,0.0794,0.1520,0.1675,0.1370,0.1361,0.1345,0.2144,0.5354,0.6830,0.5600,0.3093,0.3226,0.4430,0.5573,0.5782,0.6173,0.8132,0.9819,0.9823,0.9166,0.7423,0.7736,0.8473,0.7352,0.6671,0.6083,0.6239,0.5972,0.5715,0.5242,0.2924,0.1536,0.2003,0.2031,0.2207,0.1778,0.1353,0.1373,0.0749,0.0472,0.0325,0.0179,0.0045,0.0084,0.0010,0.0018,0.0068,0.0039,0.0120,0.0132,0.0070,0.0088,R
20 | 0.0126,0.0149,0.0641,0.1732,0.2565,0.2559,0.2947,0.4110,0.4983,0.5920,0.5832,0.5419,0.5472,0.5314,0.4981,0.6985,0.8292,0.7839,0.8215,0.9363,1.0000,0.9224,0.7839,0.5470,0.4562,0.5922,0.5448,0.3971,0.0882,0.2385,0.2005,0.0587,0.2544,0.2009,0.0329,0.1547,0.1212,0.2446,0.3171,0.3195,0.3051,0.0836,0.1266,0.1381,0.1136,0.0516,0.0073,0.0278,0.0372,0.0121,0.0153,0.0092,0.0035,0.0098,0.0121,0.0006,0.0181,0.0094,0.0116,0.0063,R
21 | 0.0473,0.0509,0.0819,0.1252,0.1783,0.3070,0.3008,0.2362,0.3830,0.3759,0.3021,0.2909,0.2301,0.1411,0.1582,0.2430,0.4474,0.5964,0.6744,0.7969,0.8319,0.7813,0.8626,0.7369,0.4122,0.2596,0.3392,0.3788,0.4488,0.6281,0.7449,0.7328,0.7704,0.7870,0.6048,0.5860,0.6385,0.7279,0.6286,0.5316,0.4069,0.1791,0.1625,0.2527,0.1903,0.1643,0.0604,0.0209,0.0436,0.0175,0.0107,0.0193,0.0118,0.0064,0.0042,0.0054,0.0049,0.0082,0.0028,0.0027,R
22 | 0.0664,0.0575,0.0842,0.0372,0.0458,0.0771,0.0771,0.1130,0.2353,0.1838,0.2869,0.4129,0.3647,0.1984,0.2840,0.4039,0.5837,0.6792,0.6086,0.4858,0.3246,0.2013,0.2082,0.1686,0.2484,0.2736,0.2984,0.4655,0.6990,0.7474,0.7956,0.7981,0.6715,0.6942,0.7440,0.8169,0.8912,1.0000,0.8753,0.7061,0.6803,0.5898,0.4618,0.3639,0.1492,0.1216,0.1306,0.1198,0.0578,0.0235,0.0135,0.0141,0.0190,0.0043,0.0036,0.0026,0.0024,0.0162,0.0109,0.0079,R
23 | 0.0099,0.0484,0.0299,0.0297,0.0652,0.1077,0.2363,0.2385,0.0075,0.1882,0.1456,0.1892,0.3176,0.1340,0.2169,0.2458,0.2589,0.2786,0.2298,0.0656,0.1441,0.1179,0.1668,0.1783,0.2476,0.2570,0.1036,0.5356,0.7124,0.6291,0.4756,0.6015,0.7208,0.6234,0.5725,0.7523,0.8712,0.9252,0.9709,0.9297,0.8995,0.7911,0.5600,0.2838,0.4407,0.5507,0.4331,0.2905,0.1981,0.0779,0.0396,0.0173,0.0149,0.0115,0.0202,0.0139,0.0029,0.0160,0.0106,0.0134,R
24 | 0.0115,0.0150,0.0136,0.0076,0.0211,0.1058,0.1023,0.0440,0.0931,0.0734,0.0740,0.0622,0.1055,0.1183,0.1721,0.2584,0.3232,0.3817,0.4243,0.4217,0.4449,0.4075,0.3306,0.4012,0.4466,0.5218,0.7552,0.9503,1.0000,0.9084,0.8283,0.7571,0.7262,0.6152,0.5680,0.5757,0.5324,0.3672,0.1669,0.0866,0.0646,0.1891,0.2683,0.2887,0.2341,0.1668,0.1015,0.1195,0.0704,0.0167,0.0107,0.0091,0.0016,0.0084,0.0064,0.0026,0.0029,0.0037,0.0070,0.0041,R
25 | 0.0293,0.0644,0.0390,0.0173,0.0476,0.0816,0.0993,0.0315,0.0736,0.0860,0.0414,0.0472,0.0835,0.0938,0.1466,0.0809,0.1179,0.2179,0.3326,0.3258,0.2111,0.2302,0.3361,0.4259,0.4609,0.2606,0.0874,0.2862,0.5606,0.8344,0.8096,0.7250,0.8048,0.9435,1.0000,0.8960,0.5516,0.3037,0.2338,0.2382,0.3318,0.3821,0.1575,0.2228,0.1582,0.1433,0.1634,0.1133,0.0567,0.0133,0.0170,0.0035,0.0052,0.0083,0.0078,0.0075,0.0105,0.0160,0.0095,0.0011,R
26 | 0.0201,0.0026,0.0138,0.0062,0.0133,0.0151,0.0541,0.0210,0.0505,0.1097,0.0841,0.0942,0.1204,0.0420,0.0031,0.0162,0.0624,0.2127,0.3436,0.3813,0.3825,0.4764,0.6313,0.7523,0.8675,0.8788,0.7901,0.8357,0.9631,0.9619,0.9236,0.8903,0.9708,0.9647,0.7892,0.5307,0.2718,0.1953,0.1374,0.3105,0.3790,0.4105,0.3355,0.2998,0.2748,0.2024,0.1043,0.0453,0.0337,0.0122,0.0072,0.0108,0.0070,0.0063,0.0030,0.0011,0.0007,0.0024,0.0057,0.0044,R
27 | 0.0151,0.0320,0.0599,0.1050,0.1163,0.1734,0.1679,0.1119,0.0889,0.1205,0.0847,0.1518,0.2305,0.2793,0.3404,0.4527,0.6950,0.8807,0.9154,0.7542,0.6736,0.7146,0.8335,0.7701,0.6993,0.6543,0.5040,0.4926,0.4992,0.4161,0.1631,0.0404,0.0637,0.2962,0.3609,0.1866,0.0476,0.1497,0.2405,0.1980,0.3175,0.2379,0.1716,0.1559,0.1556,0.0422,0.0493,0.0476,0.0219,0.0059,0.0086,0.0061,0.0015,0.0084,0.0128,0.0054,0.0011,0.0019,0.0023,0.0062,R
28 | 0.0177,0.0300,0.0288,0.0394,0.0630,0.0526,0.0688,0.0633,0.0624,0.0613,0.1680,0.3476,0.4561,0.5188,0.6308,0.7201,0.5153,0.3818,0.2644,0.3345,0.4865,0.6628,0.7389,0.9213,1.0000,0.7750,0.5593,0.6172,0.8635,0.6592,0.4770,0.4983,0.3330,0.3076,0.2876,0.2226,0.0794,0.0603,0.1049,0.0606,0.1530,0.0983,0.1643,0.1901,0.1107,0.1917,0.1467,0.0392,0.0356,0.0270,0.0168,0.0102,0.0122,0.0044,0.0075,0.0124,0.0099,0.0057,0.0032,0.0019,R
29 | 0.0100,0.0275,0.0190,0.0371,0.0416,0.0201,0.0314,0.0651,0.1896,0.2668,0.3376,0.3282,0.2432,0.1268,0.1278,0.4441,0.6795,0.7051,0.7966,0.9401,0.9857,0.8193,0.5789,0.6394,0.7043,0.6875,0.4081,0.1811,0.2064,0.3917,0.3791,0.2042,0.2227,0.3341,0.3984,0.5077,0.5534,0.3352,0.2723,0.2278,0.2044,0.1986,0.0835,0.0908,0.1380,0.1948,0.1211,0.0843,0.0589,0.0247,0.0118,0.0088,0.0104,0.0036,0.0088,0.0047,0.0117,0.0020,0.0091,0.0058,R
30 | 0.0189,0.0308,0.0197,0.0622,0.0080,0.0789,0.1440,0.1451,0.1789,0.2522,0.2607,0.3710,0.3906,0.2672,0.2716,0.4183,0.6988,0.5733,0.2226,0.2631,0.7473,0.7263,0.3393,0.2824,0.6053,0.5897,0.4967,0.8616,0.8339,0.4084,0.2268,0.1745,0.0507,0.1588,0.3040,0.1369,0.1605,0.2061,0.0734,0.0202,0.1638,0.1583,0.1830,0.1886,0.1008,0.0663,0.0183,0.0404,0.0108,0.0143,0.0091,0.0038,0.0096,0.0142,0.0190,0.0140,0.0099,0.0092,0.0052,0.0075,R
31 | 0.0240,0.0218,0.0324,0.0569,0.0330,0.0513,0.0897,0.0713,0.0569,0.0389,0.1934,0.2434,0.2906,0.2606,0.3811,0.4997,0.3015,0.3655,0.6791,0.7307,0.5053,0.4441,0.6987,0.8133,0.7781,0.8943,0.8929,0.8913,0.8610,0.8063,0.5540,0.2446,0.3459,0.1615,0.2467,0.5564,0.4681,0.0979,0.1582,0.0751,0.3321,0.3745,0.2666,0.1078,0.1418,0.1687,0.0738,0.0634,0.0144,0.0226,0.0061,0.0162,0.0146,0.0093,0.0112,0.0094,0.0054,0.0019,0.0066,0.0023,R
32 | 0.0084,0.0153,0.0291,0.0432,0.0951,0.0752,0.0414,0.0259,0.0692,0.1753,0.1970,0.1167,0.1683,0.0814,0.2179,0.5121,0.7231,0.7776,0.6222,0.3501,0.3733,0.2622,0.3776,0.7361,0.8673,0.8223,0.7772,0.7862,0.5652,0.3635,0.3534,0.3865,0.3370,0.1693,0.2627,0.3195,0.1388,0.1048,0.1681,0.1910,0.1174,0.0933,0.0856,0.0951,0.0986,0.0956,0.0426,0.0407,0.0106,0.0179,0.0056,0.0236,0.0114,0.0136,0.0117,0.0060,0.0058,0.0031,0.0072,0.0045,R
33 | 0.0195,0.0213,0.0058,0.0190,0.0319,0.0571,0.1004,0.0668,0.0691,0.0242,0.0728,0.0639,0.3002,0.3854,0.4767,0.4602,0.3175,0.4160,0.6428,1.0000,0.8631,0.5212,0.3156,0.5952,0.7732,0.6042,0.4375,0.5487,0.4720,0.6235,0.3851,0.1590,0.3891,0.5294,0.3504,0.4480,0.4041,0.5031,0.6475,0.5493,0.3548,0.2028,0.1882,0.0845,0.1315,0.1590,0.0562,0.0617,0.0343,0.0370,0.0261,0.0157,0.0074,0.0271,0.0203,0.0089,0.0095,0.0095,0.0021,0.0053,R
34 | 0.0442,0.0477,0.0049,0.0581,0.0278,0.0678,0.1664,0.1490,0.0974,0.1268,0.1109,0.2375,0.2007,0.2140,0.1109,0.2036,0.2468,0.6682,0.8345,0.8252,0.8017,0.8982,0.9664,0.8515,0.6626,0.3241,0.2054,0.5669,0.5726,0.4877,0.7532,0.7600,0.5185,0.4120,0.5560,0.5569,0.1336,0.3831,0.4611,0.4330,0.2556,0.1466,0.3489,0.2659,0.0944,0.1370,0.1344,0.0416,0.0719,0.0637,0.0210,0.0204,0.0216,0.0135,0.0055,0.0073,0.0080,0.0105,0.0059,0.0105,R
35 | 0.0311,0.0491,0.0692,0.0831,0.0079,0.0200,0.0981,0.1016,0.2025,0.0767,0.1767,0.2555,0.2812,0.2722,0.3227,0.3463,0.5395,0.7911,0.9064,0.8701,0.7672,0.2957,0.4148,0.6043,0.3178,0.3482,0.6158,0.8049,0.6289,0.4999,0.5830,0.6660,0.4124,0.1260,0.2487,0.4676,0.5382,0.3150,0.2139,0.1848,0.1679,0.2328,0.1015,0.0713,0.0615,0.0779,0.0761,0.0845,0.0592,0.0068,0.0089,0.0087,0.0032,0.0130,0.0188,0.0101,0.0229,0.0182,0.0046,0.0038,R
36 | 0.0206,0.0132,0.0533,0.0569,0.0647,0.1432,0.1344,0.2041,0.1571,0.1573,0.2327,0.1785,0.1507,0.1916,0.2061,0.2307,0.2360,0.1299,0.3812,0.5858,0.4497,0.4876,1.0000,0.8675,0.4718,0.5341,0.6197,0.7143,0.5605,0.3728,0.2481,0.1921,0.1386,0.3325,0.2883,0.3228,0.2607,0.2040,0.2396,0.1319,0.0683,0.0334,0.0716,0.0976,0.0787,0.0522,0.0500,0.0231,0.0221,0.0144,0.0307,0.0386,0.0147,0.0018,0.0100,0.0096,0.0077,0.0180,0.0109,0.0070,R
37 | 0.0094,0.0166,0.0398,0.0359,0.0681,0.0706,0.1020,0.0893,0.0381,0.1328,0.1303,0.0273,0.0644,0.0712,0.1204,0.0717,0.1224,0.2349,0.3684,0.3918,0.4925,0.8793,0.9606,0.8786,0.6905,0.6937,0.5674,0.6540,0.7802,0.7575,0.5836,0.6316,0.8108,0.9039,0.8647,0.6695,0.4027,0.2370,0.2685,0.3662,0.3267,0.2200,0.2996,0.2205,0.1163,0.0635,0.0465,0.0422,0.0174,0.0172,0.0134,0.0141,0.0191,0.0145,0.0065,0.0129,0.0217,0.0087,0.0077,0.0122,R
38 | 0.0333,0.0221,0.0270,0.0481,0.0679,0.0981,0.0843,0.1172,0.0759,0.0920,0.1475,0.0522,0.1119,0.0970,0.1174,0.1678,0.1642,0.1205,0.0494,0.1544,0.3485,0.6146,0.9146,0.9364,0.8677,0.8772,0.8553,0.8833,1.0000,0.8296,0.6601,0.5499,0.5716,0.6859,0.6825,0.5142,0.2750,0.1358,0.1551,0.2646,0.1994,0.1883,0.2746,0.1651,0.0575,0.0695,0.0598,0.0456,0.0021,0.0068,0.0036,0.0022,0.0032,0.0060,0.0054,0.0063,0.0143,0.0132,0.0051,0.0041,R
39 | 0.0123,0.0022,0.0196,0.0206,0.0180,0.0492,0.0033,0.0398,0.0791,0.0475,0.1152,0.0520,0.1192,0.1943,0.1840,0.2077,0.1956,0.1630,0.1218,0.1017,0.1354,0.3157,0.4645,0.5906,0.6776,0.8119,0.8594,0.9228,0.8387,0.7238,0.6292,0.5181,0.4629,0.5255,0.5147,0.3929,0.1279,0.0411,0.0859,0.1131,0.1306,0.1757,0.2648,0.1955,0.0656,0.0580,0.0319,0.0301,0.0272,0.0074,0.0149,0.0125,0.0134,0.0026,0.0038,0.0018,0.0113,0.0058,0.0047,0.0071,R
40 | 0.0091,0.0213,0.0206,0.0505,0.0657,0.0795,0.0970,0.0872,0.0743,0.0837,0.1579,0.0898,0.0309,0.1856,0.2969,0.2032,0.1264,0.1655,0.1661,0.2091,0.2310,0.4460,0.6634,0.6933,0.7663,0.8206,0.7049,0.7560,0.7466,0.6387,0.4846,0.3328,0.5356,0.8741,0.8573,0.6718,0.3446,0.3150,0.2702,0.2598,0.2742,0.3594,0.4382,0.2460,0.0758,0.0187,0.0797,0.0748,0.0367,0.0155,0.0300,0.0112,0.0112,0.0102,0.0026,0.0097,0.0098,0.0043,0.0071,0.0108,R
41 | 0.0068,0.0232,0.0513,0.0444,0.0249,0.0637,0.0422,0.1130,0.1911,0.2475,0.1606,0.0922,0.2398,0.3220,0.4295,0.2652,0.0666,0.1442,0.2373,0.2595,0.2493,0.3903,0.6384,0.8037,0.7026,0.6874,0.6997,0.8558,1.0000,0.9621,0.8996,0.7575,0.6902,0.5686,0.4396,0.4546,0.2959,0.1587,0.1681,0.0842,0.1173,0.1754,0.2728,0.1705,0.0194,0.0213,0.0354,0.0420,0.0093,0.0204,0.0199,0.0173,0.0163,0.0055,0.0045,0.0068,0.0041,0.0052,0.0194,0.0105,R
42 | 0.0093,0.0185,0.0056,0.0064,0.0260,0.0458,0.0470,0.0057,0.0425,0.0640,0.0888,0.1599,0.1541,0.2768,0.2176,0.2799,0.3491,0.2824,0.2479,0.3005,0.4300,0.4684,0.4520,0.5026,0.6217,0.6571,0.6632,0.7321,0.8534,1.0000,0.8448,0.6354,0.6308,0.6211,0.6976,0.5868,0.4889,0.3683,0.2043,0.1469,0.2220,0.1449,0.1490,0.1211,0.1144,0.0791,0.0365,0.0152,0.0085,0.0120,0.0022,0.0069,0.0064,0.0129,0.0114,0.0054,0.0089,0.0050,0.0058,0.0025,R
43 | 0.0211,0.0319,0.0415,0.0286,0.0121,0.0438,0.1299,0.1390,0.0695,0.0568,0.0869,0.1935,0.1478,0.1871,0.1994,0.3283,0.6861,0.5814,0.2500,0.1734,0.3363,0.5588,0.6592,0.7012,0.8099,0.8901,0.8745,0.7887,0.8725,0.9376,0.8920,0.7508,0.6832,0.7610,0.9017,1.0000,0.9123,0.7388,0.5915,0.4057,0.3019,0.2331,0.2931,0.2298,0.2391,0.1910,0.1096,0.0300,0.0171,0.0383,0.0053,0.0090,0.0042,0.0153,0.0106,0.0020,0.0105,0.0049,0.0070,0.0080,R
44 | 0.0093,0.0269,0.0217,0.0339,0.0305,0.1172,0.1450,0.0638,0.0740,0.1360,0.2132,0.3738,0.3738,0.2673,0.2333,0.5367,0.7312,0.7659,0.6271,0.4395,0.4330,0.4326,0.5544,0.7360,0.8589,0.8989,0.9420,0.9401,0.9379,0.8575,0.7284,0.6700,0.7547,0.8773,0.9919,0.9922,0.9419,0.8388,0.6605,0.4816,0.2917,0.1769,0.1136,0.0701,0.1578,0.1938,0.1106,0.0693,0.0176,0.0205,0.0309,0.0212,0.0091,0.0056,0.0086,0.0092,0.0070,0.0116,0.0060,0.0110,R
45 | 0.0257,0.0447,0.0388,0.0239,0.1315,0.1323,0.1608,0.2145,0.0847,0.0561,0.0891,0.0861,0.1531,0.1524,0.1849,0.2871,0.2009,0.2748,0.5017,0.2172,0.4978,0.5265,0.3647,0.5768,0.5161,0.5715,0.4006,0.3650,0.6685,0.8659,0.8052,0.4082,0.3379,0.5092,0.6776,0.7313,0.6062,0.7040,0.8849,0.8979,0.7751,0.7247,0.7733,0.7762,0.6009,0.4514,0.3096,0.1859,0.0956,0.0206,0.0206,0.0096,0.0153,0.0096,0.0131,0.0198,0.0025,0.0199,0.0255,0.0180,R
46 | 0.0408,0.0653,0.0397,0.0604,0.0496,0.1817,0.1178,0.1024,0.0583,0.2176,0.2459,0.3332,0.3087,0.2613,0.3232,0.3731,0.4203,0.5364,0.7062,0.8196,0.8835,0.8299,0.7609,0.7605,0.8367,0.8905,0.7652,0.5897,0.3037,0.0823,0.2787,0.7241,0.8032,0.8050,0.7676,0.7468,0.6253,0.1730,0.2916,0.5003,0.5220,0.4824,0.4004,0.3877,0.1651,0.0442,0.0663,0.0418,0.0475,0.0235,0.0066,0.0062,0.0129,0.0184,0.0069,0.0198,0.0199,0.0102,0.0070,0.0055,R
47 | 0.0308,0.0339,0.0202,0.0889,0.1570,0.1750,0.0920,0.1353,0.1593,0.2795,0.3336,0.2940,0.1608,0.3335,0.4985,0.7295,0.7350,0.8253,0.8793,0.9657,1.0000,0.8707,0.6471,0.5973,0.8218,0.7755,0.6111,0.4195,0.2990,0.1354,0.2438,0.5624,0.5555,0.6963,0.7298,0.7022,0.5468,0.1421,0.4738,0.6410,0.4375,0.3178,0.2377,0.2808,0.1374,0.1136,0.1034,0.0688,0.0422,0.0117,0.0070,0.0167,0.0127,0.0138,0.0090,0.0051,0.0029,0.0122,0.0056,0.0020,R
48 | 0.0373,0.0281,0.0232,0.0225,0.0179,0.0733,0.0841,0.1031,0.0993,0.0802,0.1564,0.2565,0.2624,0.1179,0.0597,0.1563,0.2241,0.3586,0.1792,0.3256,0.6079,0.6988,0.8391,0.8553,0.7710,0.6215,0.5736,0.4402,0.4056,0.4411,0.5130,0.5965,0.7272,0.6539,0.5902,0.5393,0.4897,0.4081,0.4145,0.6003,0.7196,0.6633,0.6287,0.4087,0.3212,0.2518,0.1482,0.0988,0.0317,0.0269,0.0066,0.0008,0.0045,0.0024,0.0006,0.0073,0.0096,0.0054,0.0085,0.0060,R
49 | 0.0190,0.0038,0.0642,0.0452,0.0333,0.0690,0.0901,0.1454,0.0740,0.0349,0.1459,0.3473,0.3197,0.2823,0.0166,0.0572,0.2164,0.4563,0.3819,0.5627,0.6484,0.7235,0.8242,0.8766,1.0000,0.8582,0.6563,0.5087,0.4817,0.4530,0.4521,0.4532,0.5385,0.5308,0.5356,0.5271,0.4260,0.2436,0.1205,0.3845,0.4107,0.5067,0.4216,0.2479,0.1586,0.1124,0.0651,0.0789,0.0325,0.0070,0.0026,0.0093,0.0118,0.0112,0.0094,0.0140,0.0072,0.0022,0.0055,0.0122,R
50 | 0.0119,0.0582,0.0623,0.0600,0.1397,0.1883,0.1422,0.1447,0.0487,0.0864,0.2143,0.3720,0.2665,0.2113,0.1103,0.1136,0.1934,0.4142,0.3279,0.6222,0.7468,0.7676,0.7867,0.8253,1.0000,0.9481,0.7539,0.6008,0.5437,0.5387,0.5619,0.5141,0.6084,0.5621,0.5956,0.6078,0.5025,0.2829,0.0477,0.2811,0.3422,0.5147,0.4372,0.2470,0.1708,0.1343,0.0838,0.0755,0.0304,0.0074,0.0069,0.0025,0.0103,0.0074,0.0123,0.0069,0.0076,0.0073,0.0030,0.0138,R
51 | 0.0353,0.0713,0.0326,0.0272,0.0370,0.0792,0.1083,0.0687,0.0298,0.0880,0.1078,0.0979,0.2250,0.2819,0.2099,0.1240,0.1699,0.0939,0.1091,0.1410,0.1268,0.3151,0.1430,0.2264,0.5756,0.7876,0.7158,0.5998,0.5583,0.6295,0.7659,0.8940,0.8436,0.6807,0.8380,1.0000,0.9497,0.7866,0.5647,0.3480,0.2585,0.2304,0.2948,0.3363,0.3017,0.2193,0.1316,0.1078,0.0559,0.0035,0.0098,0.0163,0.0242,0.0043,0.0202,0.0108,0.0037,0.0096,0.0093,0.0053,R
52 | 0.0131,0.0068,0.0308,0.0311,0.0085,0.0767,0.0771,0.0640,0.0726,0.0901,0.0750,0.0844,0.1226,0.1619,0.2317,0.2934,0.3526,0.3657,0.3221,0.3093,0.4084,0.4285,0.4663,0.5956,0.6948,0.8386,0.8875,0.6404,0.3308,0.3425,0.4920,0.4592,0.3034,0.4366,0.5175,0.5122,0.4746,0.4902,0.4603,0.4460,0.4196,0.2873,0.2296,0.0949,0.0095,0.0527,0.0383,0.0107,0.0108,0.0077,0.0109,0.0062,0.0028,0.0040,0.0075,0.0039,0.0053,0.0013,0.0052,0.0023,R
53 | 0.0087,0.0046,0.0081,0.0230,0.0586,0.0682,0.0993,0.0717,0.0576,0.0818,0.1315,0.1862,0.2789,0.2579,0.2240,0.2568,0.2933,0.2991,0.3924,0.4691,0.5665,0.6464,0.6774,0.7577,0.8856,0.9419,1.0000,0.8564,0.6790,0.5587,0.4147,0.2946,0.2025,0.0688,0.1171,0.2157,0.2216,0.2776,0.2309,0.1444,0.1513,0.1745,0.1756,0.1424,0.0908,0.0138,0.0469,0.0480,0.0159,0.0045,0.0015,0.0052,0.0038,0.0079,0.0114,0.0050,0.0030,0.0064,0.0058,0.0030,R
54 | 0.0293,0.0378,0.0257,0.0062,0.0130,0.0612,0.0895,0.1107,0.0973,0.0751,0.0528,0.1209,0.1763,0.2039,0.2727,0.2321,0.2676,0.2934,0.3295,0.4910,0.5402,0.6257,0.6826,0.7527,0.8504,0.8938,0.9928,0.9134,0.7080,0.6318,0.6126,0.4638,0.2797,0.1721,0.1665,0.2561,0.2735,0.3209,0.2724,0.1880,0.1552,0.2522,0.2121,0.1801,0.1473,0.0681,0.1091,0.0919,0.0397,0.0093,0.0076,0.0065,0.0072,0.0108,0.0051,0.0102,0.0041,0.0055,0.0050,0.0087,R
55 | 0.0132,0.0080,0.0188,0.0141,0.0436,0.0668,0.0609,0.0131,0.0899,0.0922,0.1445,0.1475,0.2087,0.2558,0.2603,0.1985,0.2394,0.3134,0.4077,0.4529,0.4893,0.5666,0.6234,0.6741,0.8282,0.8823,0.9196,0.8965,0.7549,0.6736,0.6463,0.5007,0.3663,0.2298,0.1362,0.2123,0.2395,0.2673,0.2865,0.2060,0.1659,0.2633,0.2552,0.1696,0.1467,0.1286,0.0926,0.0716,0.0325,0.0258,0.0136,0.0044,0.0028,0.0021,0.0022,0.0048,0.0138,0.0140,0.0028,0.0064,R
56 | 0.0201,0.0116,0.0123,0.0245,0.0547,0.0208,0.0891,0.0836,0.1335,0.1199,0.1742,0.1387,0.2042,0.2580,0.2616,0.2097,0.2532,0.3213,0.4327,0.4760,0.5328,0.6057,0.6696,0.7476,0.8930,0.9405,1.0000,0.9785,0.8473,0.7639,0.6701,0.4989,0.3718,0.2196,0.1416,0.2680,0.2630,0.3104,0.3392,0.2123,0.1170,0.2655,0.2203,0.1541,0.1464,0.1044,0.1225,0.0745,0.0490,0.0224,0.0032,0.0076,0.0045,0.0056,0.0075,0.0037,0.0045,0.0029,0.0008,0.0018,R
57 | 0.0152,0.0102,0.0113,0.0263,0.0097,0.0391,0.0857,0.0915,0.0949,0.1504,0.1911,0.2115,0.2249,0.2573,0.1701,0.2023,0.2538,0.3417,0.4026,0.4553,0.5525,0.5991,0.5854,0.7114,0.9500,0.9858,1.0000,0.9578,0.8642,0.7128,0.5893,0.4323,0.2897,0.1744,0.0770,0.2297,0.2459,0.3101,0.3312,0.2220,0.0871,0.2064,0.1808,0.1624,0.1120,0.0815,0.1117,0.0950,0.0412,0.0120,0.0048,0.0049,0.0041,0.0036,0.0013,0.0046,0.0037,0.0011,0.0034,0.0033,R
58 | 0.0216,0.0124,0.0174,0.0152,0.0608,0.1026,0.1139,0.0877,0.1160,0.0866,0.1564,0.0780,0.0997,0.0915,0.0662,0.1134,0.1740,0.2573,0.3294,0.3910,0.5438,0.6115,0.7022,0.7610,0.7973,0.9105,0.8807,0.7949,0.7990,0.7180,0.6407,0.6312,0.5929,0.6168,0.6498,0.6764,0.6253,0.5117,0.3890,0.3273,0.2509,0.1530,0.1323,0.1657,0.1215,0.0978,0.0452,0.0273,0.0179,0.0092,0.0018,0.0052,0.0049,0.0096,0.0134,0.0122,0.0047,0.0018,0.0006,0.0023,R
59 | 0.0225,0.0019,0.0075,0.0097,0.0445,0.0906,0.0889,0.0655,0.1624,0.1452,0.1442,0.0948,0.0618,0.1641,0.0708,0.0844,0.2590,0.2679,0.3094,0.4678,0.5958,0.7245,0.8773,0.9214,0.9282,0.9942,1.0000,0.9071,0.8545,0.7293,0.6499,0.6071,0.5588,0.5967,0.6275,0.5459,0.4786,0.3965,0.2087,0.1651,0.1836,0.0652,0.0758,0.0486,0.0353,0.0297,0.0241,0.0379,0.0119,0.0073,0.0051,0.0034,0.0129,0.0100,0.0044,0.0057,0.0030,0.0035,0.0021,0.0027,R
60 | 0.0125,0.0152,0.0218,0.0175,0.0362,0.0696,0.0873,0.0616,0.1252,0.1302,0.0888,0.0500,0.0628,0.1274,0.0801,0.0742,0.2048,0.2950,0.3193,0.4567,0.5959,0.7101,0.8225,0.8425,0.9065,0.9802,1.0000,0.8752,0.7583,0.6616,0.5786,0.5128,0.4776,0.4994,0.5197,0.5071,0.4577,0.3505,0.1845,0.1890,0.1967,0.1041,0.0550,0.0492,0.0622,0.0505,0.0247,0.0219,0.0102,0.0047,0.0019,0.0041,0.0074,0.0030,0.0050,0.0048,0.0017,0.0041,0.0086,0.0058,R
61 | 0.0130,0.0006,0.0088,0.0456,0.0525,0.0778,0.0931,0.0941,0.1711,0.1483,0.1532,0.1100,0.0890,0.1236,0.1197,0.1145,0.2137,0.2838,0.3640,0.5430,0.6673,0.7979,0.9273,0.9027,0.9192,1.0000,0.9821,0.9092,0.8184,0.6962,0.5900,0.5447,0.5142,0.5389,0.5531,0.5318,0.4826,0.3790,0.1831,0.1750,0.1679,0.0674,0.0609,0.0375,0.0533,0.0278,0.0179,0.0114,0.0073,0.0116,0.0092,0.0078,0.0041,0.0013,0.0011,0.0045,0.0039,0.0022,0.0023,0.0016,R
62 | 0.0135,0.0045,0.0051,0.0289,0.0561,0.0929,0.1031,0.0883,0.1596,0.1908,0.1576,0.1112,0.1197,0.1174,0.1415,0.2215,0.2658,0.2713,0.3862,0.5717,0.6797,0.8747,1.0000,0.8948,0.8420,0.9174,0.9307,0.9050,0.8228,0.6986,0.5831,0.4924,0.4563,0.5159,0.5670,0.5284,0.5144,0.3742,0.2282,0.1193,0.1088,0.0431,0.1070,0.0583,0.0046,0.0473,0.0408,0.0290,0.0192,0.0094,0.0025,0.0037,0.0084,0.0102,0.0096,0.0024,0.0037,0.0028,0.0030,0.0030,R
63 | 0.0086,0.0215,0.0242,0.0445,0.0667,0.0771,0.0499,0.0906,0.1229,0.1185,0.0775,0.1101,0.1042,0.0853,0.0456,0.1304,0.2690,0.2947,0.3669,0.4948,0.6275,0.8162,0.9237,0.8710,0.8052,0.8756,1.0000,0.9858,0.9427,0.8114,0.6987,0.6810,0.6591,0.6954,0.7290,0.6680,0.5917,0.4899,0.3439,0.2366,0.1716,0.1013,0.0766,0.0845,0.0260,0.0333,0.0205,0.0309,0.0101,0.0095,0.0047,0.0072,0.0054,0.0022,0.0016,0.0029,0.0058,0.0050,0.0024,0.0030,R
64 | 0.0067,0.0096,0.0024,0.0058,0.0197,0.0618,0.0432,0.0951,0.0836,0.1180,0.0978,0.0909,0.0656,0.0593,0.0832,0.1297,0.2038,0.3811,0.4451,0.5224,0.5911,0.6566,0.6308,0.5998,0.4958,0.5647,0.6906,0.8513,1.0000,0.9166,0.7676,0.6177,0.5468,0.5516,0.5463,0.5515,0.4561,0.3466,0.3384,0.2853,0.2502,0.1641,0.1605,0.1491,0.1326,0.0687,0.0602,0.0561,0.0306,0.0154,0.0029,0.0048,0.0023,0.0020,0.0040,0.0019,0.0034,0.0034,0.0051,0.0031,R
65 | 0.0071,0.0103,0.0135,0.0494,0.0253,0.0806,0.0701,0.0738,0.0117,0.0898,0.0289,0.1554,0.1437,0.1035,0.1424,0.1227,0.0892,0.2047,0.0827,0.1524,0.3031,0.1608,0.0667,0.1426,0.0395,0.1653,0.3399,0.4855,0.5206,0.5508,0.6102,0.5989,0.6764,0.8897,1.0000,0.9517,0.8459,0.7073,0.6697,0.6326,0.5102,0.4161,0.2816,0.1705,0.1421,0.0971,0.0879,0.0863,0.0355,0.0233,0.0252,0.0043,0.0048,0.0076,0.0124,0.0105,0.0054,0.0032,0.0073,0.0063,R
66 | 0.0176,0.0172,0.0501,0.0285,0.0262,0.0351,0.0362,0.0535,0.0258,0.0474,0.0526,0.1854,0.1040,0.0948,0.0912,0.1688,0.1568,0.0375,0.1316,0.2086,0.1976,0.0946,0.1965,0.1242,0.0616,0.2141,0.4642,0.6471,0.6340,0.6107,0.7046,0.5376,0.5934,0.8443,0.9481,0.9705,0.7766,0.6313,0.5760,0.6148,0.5450,0.4813,0.3406,0.1916,0.1134,0.0640,0.0911,0.0980,0.0563,0.0187,0.0088,0.0042,0.0175,0.0171,0.0079,0.0050,0.0112,0.0179,0.0294,0.0063,R
67 | 0.0265,0.0440,0.0137,0.0084,0.0305,0.0438,0.0341,0.0780,0.0844,0.0779,0.0327,0.2060,0.1908,0.1065,0.1457,0.2232,0.2070,0.1105,0.1078,0.1165,0.2224,0.0689,0.2060,0.2384,0.0904,0.2278,0.5872,0.8457,0.8467,0.7679,0.8055,0.6260,0.6545,0.8747,0.9885,0.9348,0.6960,0.5733,0.5872,0.6663,0.5651,0.5247,0.3684,0.1997,0.1512,0.0508,0.0931,0.0982,0.0524,0.0188,0.0100,0.0038,0.0187,0.0156,0.0068,0.0097,0.0073,0.0081,0.0086,0.0095,R
68 | 0.0368,0.0403,0.0317,0.0293,0.0820,0.1342,0.1161,0.0663,0.0155,0.0506,0.0906,0.2545,0.1464,0.1272,0.1223,0.1669,0.1424,0.1285,0.1857,0.1136,0.2069,0.0219,0.2400,0.2547,0.0240,0.1923,0.4753,0.7003,0.6825,0.6443,0.7063,0.5373,0.6601,0.8708,0.9518,0.9605,0.7712,0.6772,0.6431,0.6720,0.6035,0.5155,0.3802,0.2278,0.1522,0.0801,0.0804,0.0752,0.0566,0.0175,0.0058,0.0091,0.0160,0.0160,0.0081,0.0070,0.0135,0.0067,0.0078,0.0068,R
69 | 0.0195,0.0142,0.0181,0.0406,0.0391,0.0249,0.0892,0.0973,0.0840,0.1191,0.1522,0.1322,0.1434,0.1244,0.0653,0.0890,0.1226,0.1846,0.3880,0.3658,0.2297,0.2610,0.4193,0.5848,0.5643,0.5448,0.4772,0.6897,0.9797,1.0000,0.9546,0.8835,0.7662,0.6547,0.5447,0.4593,0.4679,0.1987,0.0699,0.1493,0.1713,0.1654,0.2600,0.3846,0.3754,0.2414,0.1077,0.0224,0.0155,0.0187,0.0125,0.0028,0.0067,0.0120,0.0012,0.0022,0.0058,0.0042,0.0067,0.0012,R
70 | 0.0216,0.0215,0.0273,0.0139,0.0357,0.0785,0.0906,0.0908,0.1151,0.0973,0.1203,0.1102,0.1192,0.1762,0.2390,0.2138,0.1929,0.1765,0.0746,0.1265,0.2005,0.1571,0.2605,0.5386,0.8440,1.0000,0.8684,0.6742,0.5537,0.4638,0.3609,0.2055,0.1620,0.2092,0.3100,0.2344,0.1058,0.0383,0.0528,0.1291,0.2241,0.1915,0.1587,0.0942,0.0840,0.0670,0.0342,0.0469,0.0357,0.0136,0.0082,0.0140,0.0044,0.0052,0.0073,0.0021,0.0047,0.0024,0.0009,0.0017,R
71 | 0.0065,0.0122,0.0068,0.0108,0.0217,0.0284,0.0527,0.0575,0.1054,0.1109,0.0937,0.0827,0.0920,0.0911,0.1487,0.1666,0.1268,0.1374,0.1095,0.1286,0.2146,0.2889,0.4238,0.6168,0.8167,0.9622,0.8280,0.5816,0.4667,0.3539,0.2727,0.1410,0.1863,0.2176,0.2360,0.1725,0.0589,0.0621,0.1847,0.2452,0.2984,0.3041,0.2275,0.1480,0.1102,0.1178,0.0608,0.0333,0.0276,0.0100,0.0023,0.0069,0.0025,0.0027,0.0052,0.0036,0.0026,0.0036,0.0006,0.0035,R
72 | 0.0036,0.0078,0.0092,0.0387,0.0530,0.1197,0.1243,0.1026,0.1239,0.0888,0.0937,0.1245,0.1599,0.1542,0.1846,0.1732,0.1477,0.1748,0.1455,0.1579,0.2257,0.1975,0.3368,0.5828,0.8505,1.0000,0.8457,0.6624,0.5564,0.3925,0.3233,0.2054,0.1920,0.2227,0.3147,0.2268,0.0795,0.0748,0.1166,0.1969,0.2619,0.2507,0.1983,0.0948,0.0931,0.0965,0.0381,0.0435,0.0336,0.0055,0.0079,0.0119,0.0055,0.0035,0.0036,0.0004,0.0018,0.0049,0.0024,0.0016,R
73 | 0.0208,0.0186,0.0131,0.0211,0.0610,0.0613,0.0612,0.0506,0.0989,0.1093,0.1063,0.1179,0.1291,0.1591,0.1680,0.1918,0.1615,0.1647,0.1397,0.1426,0.2429,0.2816,0.4290,0.6443,0.9061,1.0000,0.8087,0.6119,0.5260,0.3677,0.2746,0.1020,0.1339,0.1582,0.1952,0.1787,0.0429,0.1096,0.1762,0.2481,0.3150,0.2920,0.1902,0.0696,0.0758,0.0910,0.0441,0.0244,0.0265,0.0095,0.0140,0.0074,0.0063,0.0081,0.0087,0.0044,0.0028,0.0019,0.0049,0.0023,R
74 | 0.0139,0.0222,0.0089,0.0108,0.0215,0.0136,0.0659,0.0954,0.0786,0.1015,0.1261,0.0828,0.0493,0.0848,0.1514,0.1396,0.1066,0.1923,0.2991,0.3247,0.3797,0.5658,0.7483,0.8757,0.9048,0.7511,0.6858,0.7043,0.5864,0.3773,0.2206,0.2628,0.2672,0.2907,0.1982,0.2288,0.3186,0.2871,0.2921,0.2806,0.2682,0.2112,0.1513,0.1789,0.1850,0.1717,0.0898,0.0656,0.0445,0.0110,0.0024,0.0062,0.0072,0.0113,0.0012,0.0022,0.0025,0.0059,0.0039,0.0048,R
75 | 0.0109,0.0093,0.0121,0.0378,0.0679,0.0863,0.1004,0.0664,0.0941,0.1036,0.0972,0.0501,0.1546,0.3404,0.4804,0.6570,0.7738,0.7827,0.8152,0.8129,0.8297,0.8535,0.8870,0.8894,0.8980,0.9667,1.0000,0.9134,0.6762,0.4659,0.2895,0.2959,0.1746,0.2112,0.2569,0.2276,0.2149,0.1601,0.0371,0.0117,0.0488,0.0288,0.0597,0.0431,0.0369,0.0025,0.0327,0.0257,0.0182,0.0108,0.0124,0.0077,0.0023,0.0117,0.0053,0.0077,0.0076,0.0056,0.0055,0.0039,R
76 | 0.0202,0.0104,0.0325,0.0239,0.0807,0.1529,0.1154,0.0608,0.1317,0.1370,0.0843,0.0269,0.1254,0.3046,0.5584,0.7973,0.8341,0.8057,0.8616,0.8769,0.9413,0.9403,0.9409,1.0000,0.9725,0.9309,0.9351,0.7317,0.4421,0.3244,0.4161,0.4611,0.4031,0.3000,0.2459,0.1348,0.2541,0.2255,0.1598,0.1485,0.0845,0.0569,0.0855,0.1262,0.1153,0.0570,0.0426,0.0425,0.0235,0.0006,0.0188,0.0127,0.0081,0.0067,0.0043,0.0065,0.0049,0.0054,0.0073,0.0054,R
77 | 0.0239,0.0189,0.0466,0.0440,0.0657,0.0742,0.1380,0.1099,0.1384,0.1376,0.0938,0.0259,0.1499,0.2851,0.5743,0.8278,0.8669,0.8131,0.9045,0.9046,1.0000,0.9976,0.9872,0.9761,0.9009,0.9724,0.9675,0.7633,0.4434,0.3822,0.4727,0.4007,0.3381,0.3172,0.2222,0.0733,0.2692,0.1888,0.0712,0.1062,0.0694,0.0300,0.0893,0.1459,0.1348,0.0391,0.0546,0.0469,0.0201,0.0095,0.0155,0.0091,0.0151,0.0080,0.0018,0.0078,0.0045,0.0026,0.0036,0.0024,R
78 | 0.0336,0.0294,0.0476,0.0539,0.0794,0.0804,0.1136,0.1228,0.1235,0.0842,0.0357,0.0689,0.1705,0.3257,0.4602,0.6225,0.7327,0.7843,0.7988,0.8261,1.0000,0.9814,0.9620,0.9601,0.9118,0.9086,0.7931,0.5877,0.3474,0.4235,0.4633,0.3410,0.2849,0.2847,0.1742,0.0549,0.1192,0.1154,0.0855,0.1811,0.1264,0.0799,0.0378,0.1268,0.1125,0.0505,0.0949,0.0677,0.0259,0.0170,0.0033,0.0150,0.0111,0.0032,0.0035,0.0169,0.0137,0.0015,0.0069,0.0051,R
79 | 0.0231,0.0351,0.0030,0.0304,0.0339,0.0860,0.1738,0.1351,0.1063,0.0347,0.0575,0.1382,0.2274,0.4038,0.5223,0.6847,0.7521,0.7760,0.7708,0.8627,1.0000,0.8873,0.8057,0.8760,0.9066,0.9430,0.8846,0.6500,0.2970,0.2423,0.2992,0.2285,0.2277,0.1529,0.1037,0.0352,0.1073,0.1373,0.1331,0.1454,0.1115,0.0440,0.0762,0.1381,0.0831,0.0654,0.0844,0.0595,0.0497,0.0313,0.0154,0.0106,0.0097,0.0022,0.0052,0.0072,0.0056,0.0038,0.0043,0.0030,R
80 | 0.0108,0.0086,0.0058,0.0460,0.0752,0.0887,0.1015,0.0494,0.0472,0.0393,0.1106,0.1412,0.2202,0.2976,0.4116,0.4754,0.5390,0.6279,0.7060,0.7918,0.9493,1.0000,0.9645,0.9432,0.8658,0.7895,0.6501,0.4492,0.4739,0.6153,0.4929,0.3195,0.3735,0.3336,0.1052,0.0671,0.0379,0.0461,0.1694,0.2169,0.1677,0.0644,0.0159,0.0778,0.0653,0.0210,0.0509,0.0387,0.0262,0.0101,0.0161,0.0029,0.0078,0.0114,0.0083,0.0058,0.0003,0.0023,0.0026,0.0027,R
81 | 0.0229,0.0369,0.0040,0.0375,0.0455,0.1452,0.2211,0.1188,0.0750,0.1631,0.2709,0.3358,0.4091,0.4400,0.5485,0.7213,0.8137,0.9185,1.0000,0.9418,0.9116,0.9349,0.7484,0.5146,0.4106,0.3443,0.6981,0.8713,0.9013,0.8014,0.4380,0.1319,0.1709,0.2484,0.3044,0.2312,0.1338,0.2056,0.2474,0.2790,0.1610,0.0056,0.0351,0.1148,0.1331,0.0276,0.0763,0.0631,0.0309,0.0240,0.0115,0.0064,0.0022,0.0122,0.0151,0.0056,0.0026,0.0029,0.0104,0.0163,R
82 | 0.0100,0.0194,0.0155,0.0489,0.0839,0.1009,0.1627,0.2071,0.2696,0.2990,0.3242,0.3565,0.3951,0.5201,0.6953,0.8468,1.0000,0.9278,0.8510,0.8010,0.8142,0.8825,0.7302,0.6107,0.7159,0.8458,0.6319,0.4808,0.6291,0.7152,0.6005,0.4235,0.4106,0.3992,0.1730,0.1975,0.2370,0.1339,0.1583,0.3151,0.1968,0.2054,0.1272,0.1129,0.1946,0.2195,0.1930,0.1498,0.0773,0.0196,0.0122,0.0130,0.0073,0.0077,0.0075,0.0060,0.0080,0.0019,0.0053,0.0019,R
83 | 0.0409,0.0421,0.0573,0.0130,0.0183,0.1019,0.1054,0.1070,0.2302,0.2259,0.2373,0.3323,0.3827,0.4840,0.6812,0.7555,0.9522,0.9826,0.8871,0.8268,0.7561,0.8217,0.6967,0.6444,0.6948,0.8014,0.6053,0.6084,0.8877,0.8557,0.5563,0.2897,0.3638,0.4786,0.2908,0.0899,0.2043,0.1707,0.0407,0.1286,0.1581,0.2191,0.1701,0.0971,0.2217,0.2732,0.1874,0.1062,0.0665,0.0405,0.0113,0.0028,0.0036,0.0105,0.0120,0.0087,0.0061,0.0061,0.0030,0.0078,R
84 | 0.0217,0.0340,0.0392,0.0236,0.1081,0.1164,0.1398,0.1009,0.1147,0.1777,0.4079,0.4113,0.3973,0.5078,0.6509,0.8073,0.9819,1.0000,0.9407,0.8452,0.8106,0.8460,0.6212,0.5815,0.7745,0.8204,0.5601,0.2989,0.5009,0.6628,0.5753,0.4055,0.3746,0.3481,0.1580,0.1422,0.2130,0.1866,0.1003,0.2396,0.2241,0.2029,0.0710,0.1606,0.1669,0.1700,0.1829,0.1403,0.0506,0.0224,0.0095,0.0031,0.0103,0.0078,0.0077,0.0094,0.0031,0.0030,0.0013,0.0069,R
85 | 0.0378,0.0318,0.0423,0.0350,0.1787,0.1635,0.0887,0.0817,0.1779,0.2053,0.3135,0.3118,0.3686,0.3885,0.5850,0.7868,0.9739,1.0000,0.9843,0.8610,0.8443,0.9061,0.5847,0.4033,0.5946,0.6793,0.6389,0.5002,0.5578,0.4831,0.4729,0.3318,0.3969,0.3894,0.2314,0.1036,0.1312,0.0864,0.2569,0.3179,0.2649,0.2714,0.1713,0.0584,0.1230,0.2200,0.2198,0.1074,0.0423,0.0162,0.0093,0.0046,0.0044,0.0078,0.0102,0.0065,0.0061,0.0062,0.0043,0.0053,R
86 | 0.0365,0.1632,0.1636,0.1421,0.1130,0.1306,0.2112,0.2268,0.2992,0.3735,0.3042,0.0387,0.2679,0.5397,0.6204,0.7257,0.8350,0.6888,0.4450,0.3921,0.5605,0.7545,0.8311,1.0000,0.8762,0.7092,0.7009,0.5014,0.3942,0.4456,0.4072,0.0773,0.1423,0.0401,0.3597,0.6847,0.7076,0.3597,0.0612,0.3027,0.3966,0.3868,0.2380,0.2059,0.2288,0.1704,0.1587,0.1792,0.1022,0.0151,0.0223,0.0110,0.0071,0.0205,0.0164,0.0063,0.0078,0.0094,0.0110,0.0068,R
87 | 0.0188,0.0370,0.0953,0.0824,0.0249,0.0488,0.1424,0.1972,0.1873,0.1806,0.2139,0.1523,0.1975,0.4844,0.7298,0.7807,0.7906,0.6122,0.4200,0.2807,0.5148,0.7569,0.8596,1.0000,0.8457,0.6797,0.6971,0.5843,0.4772,0.5201,0.4241,0.1592,0.1668,0.0588,0.3967,0.7147,0.7319,0.3509,0.0589,0.2690,0.4200,0.3874,0.2440,0.2000,0.2307,0.1886,0.1960,0.1701,0.1366,0.0398,0.0143,0.0093,0.0033,0.0113,0.0030,0.0057,0.0090,0.0057,0.0068,0.0024,R
88 | 0.0856,0.0454,0.0382,0.0203,0.0385,0.0534,0.2140,0.3110,0.2837,0.2751,0.2707,0.0946,0.1020,0.4519,0.6737,0.6699,0.7066,0.5632,0.3785,0.2721,0.5297,0.7697,0.8643,0.9304,0.9372,0.6247,0.6024,0.6810,0.5047,0.5775,0.4754,0.2400,0.2779,0.1997,0.5305,0.7409,0.7775,0.4424,0.1416,0.3508,0.4482,0.4208,0.3054,0.2235,0.2611,0.2798,0.2392,0.2021,0.1326,0.0358,0.0128,0.0172,0.0138,0.0079,0.0037,0.0051,0.0258,0.0102,0.0037,0.0037,R
89 | 0.0274,0.0242,0.0621,0.0560,0.1129,0.0973,0.1823,0.1745,0.1440,0.1808,0.2366,0.0906,0.1749,0.4012,0.5187,0.7312,0.9062,0.9260,0.7434,0.4463,0.5103,0.6952,0.7755,0.8364,0.7283,0.6399,0.5759,0.4146,0.3495,0.4437,0.2665,0.2024,0.1942,0.0765,0.3725,0.5843,0.4827,0.2347,0.0999,0.3244,0.3990,0.2975,0.1684,0.1761,0.1683,0.0729,0.1190,0.1297,0.0748,0.0067,0.0255,0.0113,0.0108,0.0085,0.0047,0.0074,0.0104,0.0161,0.0220,0.0173,R
90 | 0.0235,0.0291,0.0749,0.0519,0.0227,0.0834,0.0677,0.2002,0.2876,0.3674,0.2974,0.0837,0.1912,0.5040,0.6352,0.6804,0.7505,0.6595,0.4509,0.2964,0.4019,0.6794,0.8297,1.0000,0.8240,0.7115,0.7726,0.6124,0.4936,0.5648,0.4906,0.1820,0.1811,0.1107,0.4603,0.6650,0.6423,0.2166,0.1951,0.4947,0.4925,0.4041,0.2402,0.1392,0.1779,0.1946,0.1723,0.1522,0.0929,0.0179,0.0242,0.0083,0.0037,0.0095,0.0105,0.0030,0.0132,0.0068,0.0108,0.0090,R
91 | 0.0126,0.0519,0.0621,0.0518,0.1072,0.2587,0.2304,0.2067,0.3416,0.4284,0.3015,0.1207,0.3299,0.5707,0.6962,0.9751,1.0000,0.9293,0.6210,0.4586,0.5001,0.5032,0.7082,0.8420,0.8109,0.7690,0.8105,0.6203,0.2356,0.2595,0.6299,0.6762,0.2903,0.4393,0.8529,0.7180,0.4801,0.5856,0.4993,0.2866,0.0601,0.1167,0.2737,0.2812,0.2078,0.0660,0.0491,0.0345,0.0172,0.0287,0.0027,0.0208,0.0048,0.0199,0.0126,0.0022,0.0037,0.0034,0.0114,0.0077,R
92 | 0.0253,0.0808,0.0507,0.0244,0.1724,0.3823,0.3729,0.3583,0.3429,0.2197,0.2653,0.3223,0.5582,0.6916,0.7943,0.7152,0.3512,0.2008,0.2676,0.4299,0.5280,0.3489,0.1430,0.5453,0.6338,0.7712,0.6838,0.8015,0.8073,0.8310,0.7792,0.5049,0.1413,0.2767,0.5084,0.4787,0.1356,0.2299,0.2789,0.3833,0.2933,0.1155,0.1705,0.1294,0.0909,0.0800,0.0567,0.0198,0.0114,0.0151,0.0085,0.0178,0.0073,0.0079,0.0038,0.0116,0.0033,0.0039,0.0081,0.0053,R
93 | 0.0260,0.0192,0.0254,0.0061,0.0352,0.0701,0.1263,0.1080,0.1523,0.1630,0.1030,0.2187,0.1542,0.2630,0.2940,0.2978,0.0699,0.1401,0.2990,0.3915,0.3598,0.2403,0.4208,0.5675,0.6094,0.6323,0.6549,0.7673,1.0000,0.8463,0.5509,0.4444,0.5169,0.4268,0.1802,0.0791,0.0535,0.1906,0.2561,0.2153,0.2769,0.2841,0.1733,0.0815,0.0335,0.0933,0.1018,0.0309,0.0208,0.0318,0.0132,0.0118,0.0120,0.0051,0.0070,0.0015,0.0035,0.0008,0.0044,0.0077,R
94 | 0.0459,0.0437,0.0347,0.0456,0.0067,0.0890,0.1798,0.1741,0.1598,0.1408,0.2693,0.3259,0.4545,0.5785,0.4471,0.2231,0.2164,0.3201,0.2915,0.4235,0.4460,0.2380,0.6415,0.8966,0.8918,0.7529,0.6838,0.8390,1.0000,0.8362,0.5427,0.4577,0.8067,0.6973,0.3915,0.1558,0.1598,0.2161,0.5178,0.4782,0.2344,0.3599,0.2785,0.1807,0.0352,0.0473,0.0322,0.0408,0.0163,0.0088,0.0121,0.0067,0.0032,0.0109,0.0164,0.0151,0.0070,0.0085,0.0117,0.0056,R
95 | 0.0025,0.0309,0.0171,0.0228,0.0434,0.1224,0.1947,0.1661,0.1368,0.1430,0.0994,0.2250,0.2444,0.3239,0.3039,0.2410,0.0367,0.1672,0.3038,0.4069,0.3613,0.1994,0.4611,0.6849,0.7272,0.7152,0.7102,0.8516,1.0000,0.7690,0.4841,0.3717,0.6096,0.5110,0.2586,0.0916,0.0947,0.2287,0.3480,0.2095,0.1901,0.2941,0.2211,0.1524,0.0746,0.0606,0.0692,0.0446,0.0344,0.0082,0.0108,0.0149,0.0077,0.0036,0.0114,0.0085,0.0101,0.0016,0.0028,0.0014,R
96 | 0.0291,0.0400,0.0771,0.0809,0.0521,0.1051,0.0145,0.0674,0.1294,0.1146,0.0942,0.0794,0.0252,0.1191,0.1045,0.2050,0.1556,0.2690,0.3784,0.4024,0.3470,0.1395,0.1208,0.2827,0.1500,0.2626,0.4468,0.7520,0.9036,0.7812,0.4766,0.2483,0.5372,0.6279,0.3647,0.4572,0.6359,0.6474,0.5520,0.3253,0.2292,0.0653,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0056,0.0237,0.0204,0.0050,0.0137,0.0164,0.0081,0.0139,0.0111,R
97 | 0.0181,0.0146,0.0026,0.0141,0.0421,0.0473,0.0361,0.0741,0.1398,0.1045,0.0904,0.0671,0.0997,0.1056,0.0346,0.1231,0.1626,0.3652,0.3262,0.2995,0.2109,0.2104,0.2085,0.2282,0.0747,0.1969,0.4086,0.6385,0.7970,0.7508,0.5517,0.2214,0.4672,0.4479,0.2297,0.3235,0.4480,0.5581,0.6520,0.5354,0.2478,0.2268,0.1788,0.0898,0.0536,0.0374,0.0990,0.0956,0.0317,0.0142,0.0076,0.0223,0.0255,0.0145,0.0233,0.0041,0.0018,0.0048,0.0089,0.0085,R
98 | 0.0491,0.0279,0.0592,0.1270,0.1772,0.1908,0.2217,0.0768,0.1246,0.2028,0.0947,0.2497,0.2209,0.3195,0.3340,0.3323,0.2780,0.2975,0.2948,0.1729,0.3264,0.3834,0.3523,0.5410,0.5228,0.4475,0.5340,0.5323,0.3907,0.3456,0.4091,0.4639,0.5580,0.5727,0.6355,0.7563,0.6903,0.6176,0.5379,0.5622,0.6508,0.4797,0.3736,0.2804,0.1982,0.2438,0.1789,0.1706,0.0762,0.0238,0.0268,0.0081,0.0129,0.0161,0.0063,0.0119,0.0194,0.0140,0.0332,0.0439,M
99 | 0.1313,0.2339,0.3059,0.4264,0.4010,0.1791,0.1853,0.0055,0.1929,0.2231,0.2907,0.2259,0.3136,0.3302,0.3660,0.3956,0.4386,0.4670,0.5255,0.3735,0.2243,0.1973,0.4337,0.6532,0.5070,0.2796,0.4163,0.5950,0.5242,0.4178,0.3714,0.2375,0.0863,0.1437,0.2896,0.4577,0.3725,0.3372,0.3803,0.4181,0.3603,0.2711,0.1653,0.1951,0.2811,0.2246,0.1921,0.1500,0.0665,0.0193,0.0156,0.0362,0.0210,0.0154,0.0180,0.0013,0.0106,0.0127,0.0178,0.0231,M
100 | 0.0201,0.0423,0.0554,0.0783,0.0620,0.0871,0.1201,0.2707,0.1206,0.0279,0.2251,0.2615,0.1770,0.3709,0.4533,0.5553,0.4616,0.3797,0.3450,0.2665,0.2395,0.1127,0.2556,0.5169,0.3779,0.4082,0.5353,0.5116,0.4544,0.4258,0.3869,0.3939,0.4661,0.3974,0.2194,0.1816,0.1023,0.2108,0.3253,0.3697,0.2912,0.3010,0.2563,0.1927,0.2062,0.1751,0.0841,0.1035,0.0641,0.0153,0.0081,0.0191,0.0182,0.0160,0.0290,0.0090,0.0242,0.0224,0.0190,0.0096,M
101 | 0.0629,0.1065,0.1526,0.1229,0.1437,0.1190,0.0884,0.0907,0.2107,0.3597,0.5466,0.5205,0.5127,0.5395,0.6558,0.8705,0.9786,0.9335,0.7917,0.7383,0.6908,0.3850,0.0671,0.0502,0.2717,0.2839,0.2234,0.1911,0.0408,0.2531,0.1979,0.1891,0.2433,0.1956,0.2667,0.1340,0.1073,0.2023,0.1794,0.0227,0.1313,0.1775,0.1549,0.1626,0.0708,0.0129,0.0795,0.0762,0.0117,0.0061,0.0257,0.0089,0.0262,0.0108,0.0138,0.0187,0.0230,0.0057,0.0113,0.0131,M
102 | 0.0335,0.0134,0.0696,0.1180,0.0348,0.1180,0.1948,0.1607,0.3036,0.4372,0.5533,0.5771,0.7022,0.7067,0.7367,0.7391,0.8622,0.9458,0.8782,0.7913,0.5760,0.3061,0.0563,0.0239,0.2554,0.4862,0.5027,0.4402,0.2847,0.1797,0.3560,0.3522,0.3321,0.3112,0.3638,0.0754,0.1834,0.1820,0.1815,0.1593,0.0576,0.0954,0.1086,0.0812,0.0784,0.0487,0.0439,0.0586,0.0370,0.0185,0.0302,0.0244,0.0232,0.0093,0.0159,0.0193,0.0032,0.0377,0.0126,0.0156,M
103 | 0.0587,0.1210,0.1268,0.1498,0.1436,0.0561,0.0832,0.0672,0.1372,0.2352,0.3208,0.4257,0.5201,0.4914,0.5950,0.7221,0.9039,0.9111,0.8723,0.7686,0.7326,0.5222,0.3097,0.3172,0.2270,0.1640,0.1746,0.1835,0.2048,0.1674,0.2767,0.3104,0.3399,0.4441,0.5046,0.2814,0.1681,0.2633,0.3198,0.1933,0.0934,0.0443,0.0780,0.0722,0.0405,0.0553,0.1081,0.1139,0.0767,0.0265,0.0215,0.0331,0.0111,0.0088,0.0158,0.0122,0.0038,0.0101,0.0228,0.0124,M
104 | 0.0162,0.0253,0.0262,0.0386,0.0645,0.0472,0.1056,0.1388,0.0598,0.1334,0.2969,0.4754,0.5677,0.5690,0.6421,0.7487,0.8999,1.0000,0.9690,0.9032,0.7685,0.6998,0.6644,0.5964,0.3711,0.0921,0.0481,0.0876,0.1040,0.1714,0.3264,0.4612,0.3939,0.5050,0.4833,0.3511,0.2319,0.4029,0.3676,0.1510,0.0745,0.1395,0.1552,0.0377,0.0636,0.0443,0.0264,0.0223,0.0187,0.0077,0.0137,0.0071,0.0082,0.0232,0.0198,0.0074,0.0035,0.0100,0.0048,0.0019,M
105 | 0.0307,0.0523,0.0653,0.0521,0.0611,0.0577,0.0665,0.0664,0.1460,0.2792,0.3877,0.4992,0.4981,0.4972,0.5607,0.7339,0.8230,0.9173,0.9975,0.9911,0.8240,0.6498,0.5980,0.4862,0.3150,0.1543,0.0989,0.0284,0.1008,0.2636,0.2694,0.2930,0.2925,0.3998,0.3660,0.3172,0.4609,0.4374,0.1820,0.3376,0.6202,0.4448,0.1863,0.1420,0.0589,0.0576,0.0672,0.0269,0.0245,0.0190,0.0063,0.0321,0.0189,0.0137,0.0277,0.0152,0.0052,0.0121,0.0124,0.0055,M
106 | 0.0116,0.0179,0.0449,0.1096,0.1913,0.0924,0.0761,0.1092,0.0757,0.1006,0.2500,0.3988,0.3809,0.4753,0.6165,0.6464,0.8024,0.9208,0.9832,0.9634,0.8646,0.8325,0.8276,0.8007,0.6102,0.4853,0.4355,0.4307,0.4399,0.3833,0.3032,0.3035,0.3197,0.2292,0.2131,0.2347,0.3201,0.4455,0.3655,0.2715,0.1747,0.1781,0.2199,0.1056,0.0573,0.0307,0.0237,0.0470,0.0102,0.0057,0.0031,0.0163,0.0099,0.0084,0.0270,0.0277,0.0097,0.0054,0.0148,0.0092,M
107 | 0.0331,0.0423,0.0474,0.0818,0.0835,0.0756,0.0374,0.0961,0.0548,0.0193,0.0897,0.1734,0.1936,0.2803,0.3313,0.5020,0.6360,0.7096,0.8333,0.8730,0.8073,0.7507,0.7526,0.7298,0.6177,0.4946,0.4531,0.4099,0.4540,0.4124,0.3139,0.3194,0.3692,0.3776,0.4469,0.4777,0.4716,0.4664,0.3893,0.4255,0.4064,0.3712,0.3863,0.2802,0.1283,0.1117,0.1303,0.0787,0.0436,0.0224,0.0133,0.0078,0.0174,0.0176,0.0038,0.0129,0.0066,0.0044,0.0134,0.0092,M
108 | 0.0428,0.0555,0.0708,0.0618,0.1215,0.1524,0.1543,0.0391,0.0610,0.0113,0.1255,0.2473,0.3011,0.3747,0.4520,0.5392,0.6588,0.7113,0.7602,0.8672,0.8416,0.7974,0.8385,0.9317,0.8555,0.6162,0.4139,0.3269,0.3108,0.2554,0.3367,0.4465,0.5000,0.5111,0.5194,0.4619,0.4234,0.4372,0.4277,0.4433,0.3700,0.3324,0.2564,0.2527,0.2137,0.1789,0.1010,0.0528,0.0453,0.0118,0.0009,0.0142,0.0179,0.0079,0.0060,0.0131,0.0089,0.0084,0.0113,0.0049,M
109 | 0.0599,0.0474,0.0498,0.0387,0.1026,0.0773,0.0853,0.0447,0.1094,0.0351,0.1582,0.2023,0.2268,0.2829,0.3819,0.4665,0.6687,0.8647,0.9361,0.9367,0.9144,0.9162,0.9311,0.8604,0.7327,0.5763,0.4162,0.4113,0.4146,0.3149,0.2936,0.3169,0.3149,0.4132,0.3994,0.4195,0.4532,0.4419,0.4737,0.3431,0.3194,0.3370,0.2493,0.2650,0.1748,0.0932,0.0530,0.0081,0.0342,0.0137,0.0028,0.0013,0.0005,0.0227,0.0209,0.0081,0.0117,0.0114,0.0112,0.0100,M
110 | 0.0264,0.0071,0.0342,0.0793,0.1043,0.0783,0.1417,0.1176,0.0453,0.0945,0.1132,0.0840,0.0717,0.1968,0.2633,0.4191,0.5050,0.6711,0.7922,0.8381,0.8759,0.9422,1.0000,0.9931,0.9575,0.8647,0.7215,0.5801,0.4964,0.4886,0.4079,0.2443,0.1768,0.2472,0.3518,0.3762,0.2909,0.2311,0.3168,0.3554,0.3741,0.4443,0.3261,0.1963,0.0864,0.1688,0.1991,0.1217,0.0628,0.0323,0.0253,0.0214,0.0262,0.0177,0.0037,0.0068,0.0121,0.0077,0.0078,0.0066,M
111 | 0.0210,0.0121,0.0203,0.1036,0.1675,0.0418,0.0723,0.0828,0.0494,0.0686,0.1125,0.1741,0.2710,0.3087,0.3575,0.4998,0.6011,0.6470,0.8067,0.9008,0.8906,0.9338,1.0000,0.9102,0.8496,0.7867,0.7688,0.7718,0.6268,0.4301,0.2077,0.1198,0.1660,0.2618,0.3862,0.3958,0.3248,0.2302,0.3250,0.4022,0.4344,0.4008,0.3370,0.2518,0.2101,0.1181,0.1150,0.0550,0.0293,0.0183,0.0104,0.0117,0.0101,0.0061,0.0031,0.0099,0.0080,0.0107,0.0161,0.0133,M
112 | 0.0530,0.0885,0.1997,0.2604,0.3225,0.2247,0.0617,0.2287,0.0950,0.0740,0.1610,0.2226,0.2703,0.3365,0.4266,0.4144,0.5655,0.6921,0.8547,0.9234,0.9171,1.0000,0.9532,0.9101,0.8337,0.7053,0.6534,0.4483,0.2460,0.2020,0.1446,0.0994,0.1510,0.2392,0.4434,0.5023,0.4441,0.4571,0.3927,0.2900,0.3408,0.4990,0.3632,0.1387,0.1800,0.1299,0.0523,0.0817,0.0469,0.0114,0.0299,0.0244,0.0199,0.0257,0.0082,0.0151,0.0171,0.0146,0.0134,0.0056,M
113 | 0.0454,0.0472,0.0697,0.1021,0.1397,0.1493,0.1487,0.0771,0.1171,0.1675,0.2799,0.3323,0.4012,0.4296,0.5350,0.5411,0.6870,0.8045,0.9194,0.9169,1.0000,0.9972,0.9093,0.7918,0.6705,0.5324,0.3572,0.2484,0.3161,0.3775,0.3138,0.1713,0.2937,0.5234,0.5926,0.5437,0.4516,0.3379,0.3215,0.2178,0.1674,0.2634,0.2980,0.2037,0.1155,0.0919,0.0882,0.0228,0.0380,0.0142,0.0137,0.0120,0.0042,0.0238,0.0129,0.0084,0.0218,0.0321,0.0154,0.0053,M
114 | 0.0283,0.0599,0.0656,0.0229,0.0839,0.1673,0.1154,0.1098,0.1370,0.1767,0.1995,0.2869,0.3275,0.3769,0.4169,0.5036,0.6180,0.8025,0.9333,0.9399,0.9275,0.9450,0.8328,0.7773,0.7007,0.6154,0.5810,0.4454,0.3707,0.2891,0.2185,0.1711,0.3578,0.3947,0.2867,0.2401,0.3619,0.3314,0.3763,0.4767,0.4059,0.3661,0.2320,0.1450,0.1017,0.1111,0.0655,0.0271,0.0244,0.0179,0.0109,0.0147,0.0170,0.0158,0.0046,0.0073,0.0054,0.0033,0.0045,0.0079,M
115 | 0.0114,0.0222,0.0269,0.0384,0.1217,0.2062,0.1489,0.0929,0.1350,0.1799,0.2486,0.2973,0.3672,0.4394,0.5258,0.6755,0.7402,0.8284,0.9033,0.9584,1.0000,0.9982,0.8899,0.7493,0.6367,0.6744,0.7207,0.6821,0.5512,0.4789,0.3924,0.2533,0.1089,0.1390,0.2551,0.3301,0.2818,0.2142,0.2266,0.2142,0.2354,0.2871,0.2596,0.1925,0.1256,0.1003,0.0951,0.1210,0.0728,0.0174,0.0213,0.0269,0.0152,0.0257,0.0097,0.0041,0.0050,0.0145,0.0103,0.0025,M
116 | 0.0414,0.0436,0.0447,0.0844,0.0419,0.1215,0.2002,0.1516,0.0818,0.1975,0.2309,0.3025,0.3938,0.5050,0.5872,0.6610,0.7417,0.8006,0.8456,0.7939,0.8804,0.8384,0.7852,0.8479,0.7434,0.6433,0.5514,0.3519,0.3168,0.3346,0.2056,0.1032,0.3168,0.4040,0.4282,0.4538,0.3704,0.3741,0.3839,0.3494,0.4380,0.4265,0.2854,0.2808,0.2395,0.0369,0.0805,0.0541,0.0177,0.0065,0.0222,0.0045,0.0136,0.0113,0.0053,0.0165,0.0141,0.0077,0.0246,0.0198,M
117 | 0.0094,0.0333,0.0306,0.0376,0.1296,0.1795,0.1909,0.1692,0.1870,0.1725,0.2228,0.3106,0.4144,0.5157,0.5369,0.5107,0.6441,0.7326,0.8164,0.8856,0.9891,1.0000,0.8750,0.8631,0.9074,0.8674,0.7750,0.6600,0.5615,0.4016,0.2331,0.1164,0.1095,0.0431,0.0619,0.1956,0.2120,0.3242,0.4102,0.2939,0.1911,0.1702,0.1010,0.1512,0.1427,0.1097,0.1173,0.0972,0.0703,0.0281,0.0216,0.0153,0.0112,0.0241,0.0164,0.0055,0.0078,0.0055,0.0091,0.0067,M
118 | 0.0228,0.0106,0.0130,0.0842,0.1117,0.1506,0.1776,0.0997,0.1428,0.2227,0.2621,0.3109,0.2859,0.3316,0.3755,0.4499,0.4765,0.6254,0.7304,0.8702,0.9349,0.9614,0.9126,0.9443,1.0000,0.9455,0.8815,0.7520,0.7068,0.5986,0.3857,0.2510,0.2162,0.0968,0.1323,0.1344,0.2250,0.3244,0.3939,0.3806,0.3258,0.3654,0.2983,0.1779,0.1535,0.1199,0.0959,0.0765,0.0649,0.0313,0.0185,0.0098,0.0178,0.0077,0.0074,0.0095,0.0055,0.0045,0.0063,0.0039,M
119 | 0.0363,0.0478,0.0298,0.0210,0.1409,0.1916,0.1349,0.1613,0.1703,0.1444,0.1989,0.2154,0.2863,0.3570,0.3980,0.4359,0.5334,0.6304,0.6995,0.7435,0.8379,0.8641,0.9014,0.9432,0.9536,1.0000,0.9547,0.9745,0.8962,0.7196,0.5462,0.3156,0.2525,0.1969,0.2189,0.1533,0.0711,0.1498,0.1755,0.2276,0.1322,0.1056,0.1973,0.1692,0.1881,0.1177,0.0779,0.0495,0.0492,0.0194,0.0250,0.0115,0.0190,0.0055,0.0096,0.0050,0.0066,0.0114,0.0073,0.0033,M
120 | 0.0261,0.0266,0.0223,0.0749,0.1364,0.1513,0.1316,0.1654,0.1864,0.2013,0.2890,0.3650,0.3510,0.3495,0.4325,0.5398,0.6237,0.6876,0.7329,0.8107,0.8396,0.8632,0.8747,0.9607,0.9716,0.9121,0.8576,0.8798,0.7720,0.5711,0.4264,0.2860,0.3114,0.2066,0.1165,0.0185,0.1302,0.2480,0.1637,0.1103,0.2144,0.2033,0.1887,0.1370,0.1376,0.0307,0.0373,0.0606,0.0399,0.0169,0.0135,0.0222,0.0175,0.0127,0.0022,0.0124,0.0054,0.0021,0.0028,0.0023,M
121 | 0.0346,0.0509,0.0079,0.0243,0.0432,0.0735,0.0938,0.1134,0.1228,0.1508,0.1809,0.2390,0.2947,0.2866,0.4010,0.5325,0.5486,0.5823,0.6041,0.6749,0.7084,0.7890,0.9284,0.9781,0.9738,1.0000,0.9702,0.9956,0.8235,0.6020,0.5342,0.4867,0.3526,0.1566,0.0946,0.1613,0.2824,0.3390,0.3019,0.2945,0.2978,0.2676,0.2055,0.2069,0.1625,0.1216,0.1013,0.0744,0.0386,0.0050,0.0146,0.0040,0.0122,0.0107,0.0112,0.0102,0.0052,0.0024,0.0079,0.0031,M
122 | 0.0162,0.0041,0.0239,0.0441,0.0630,0.0921,0.1368,0.1078,0.1552,0.1779,0.2164,0.2568,0.3089,0.3829,0.4393,0.5335,0.5996,0.6728,0.7309,0.8092,0.8941,0.9668,1.0000,0.9893,0.9376,0.8991,0.9184,0.9128,0.7811,0.6018,0.3765,0.3300,0.2280,0.0212,0.1117,0.1788,0.2373,0.2843,0.2241,0.2715,0.3363,0.2546,0.1867,0.2160,0.1278,0.0768,0.1070,0.0946,0.0636,0.0227,0.0128,0.0173,0.0135,0.0114,0.0062,0.0157,0.0088,0.0036,0.0053,0.0030,M
123 | 0.0249,0.0119,0.0277,0.0760,0.1218,0.1538,0.1192,0.1229,0.2119,0.2531,0.2855,0.2961,0.3341,0.4287,0.5205,0.6087,0.7236,0.7577,0.7726,0.8098,0.8995,0.9247,0.9365,0.9853,0.9776,1.0000,0.9896,0.9076,0.7306,0.5758,0.4469,0.3719,0.2079,0.0955,0.0488,0.1406,0.2554,0.2054,0.1614,0.2232,0.1773,0.2293,0.2521,0.1464,0.0673,0.0965,0.1492,0.1128,0.0463,0.0193,0.0140,0.0027,0.0068,0.0150,0.0012,0.0133,0.0048,0.0244,0.0077,0.0074,M
124 | 0.0270,0.0163,0.0341,0.0247,0.0822,0.1256,0.1323,0.1584,0.2017,0.2122,0.2210,0.2399,0.2964,0.4061,0.5095,0.5512,0.6613,0.6804,0.6520,0.6788,0.7811,0.8369,0.8969,0.9856,1.0000,0.9395,0.8917,0.8105,0.6828,0.5572,0.4301,0.3339,0.2035,0.0798,0.0809,0.1525,0.2626,0.2456,0.1980,0.2412,0.2409,0.1901,0.2077,0.1767,0.1119,0.0779,0.1344,0.0960,0.0598,0.0330,0.0197,0.0189,0.0204,0.0085,0.0043,0.0092,0.0138,0.0094,0.0105,0.0093,M
125 | 0.0388,0.0324,0.0688,0.0898,0.1267,0.1515,0.2134,0.2613,0.2832,0.2718,0.3645,0.3934,0.3843,0.4677,0.5364,0.4823,0.4835,0.5862,0.7579,0.6997,0.6918,0.8633,0.9107,0.9346,0.7884,0.8585,0.9261,0.7080,0.5779,0.5215,0.4505,0.3129,0.1448,0.1046,0.1820,0.1519,0.1017,0.1438,0.1986,0.2039,0.2778,0.2879,0.1331,0.1140,0.1310,0.1433,0.0624,0.0100,0.0098,0.0131,0.0152,0.0255,0.0071,0.0263,0.0079,0.0111,0.0107,0.0068,0.0097,0.0067,M
126 | 0.0228,0.0853,0.1000,0.0428,0.1117,0.1651,0.1597,0.2116,0.3295,0.3517,0.3330,0.3643,0.4020,0.4731,0.5196,0.6573,0.8426,0.8476,0.8344,0.8453,0.7999,0.8537,0.9642,1.0000,0.9357,0.9409,0.9070,0.7104,0.6320,0.5667,0.3501,0.2447,0.1698,0.3290,0.3674,0.2331,0.2413,0.2556,0.1892,0.1940,0.3074,0.2785,0.0308,0.1238,0.1854,0.1753,0.1079,0.0728,0.0242,0.0191,0.0159,0.0172,0.0191,0.0260,0.0140,0.0125,0.0116,0.0093,0.0012,0.0036,M
127 | 0.0715,0.0849,0.0587,0.0218,0.0862,0.1801,0.1916,0.1896,0.2960,0.4186,0.4867,0.5249,0.5959,0.6855,0.8573,0.9718,0.8693,0.8711,0.8954,0.9922,0.8980,0.8158,0.8373,0.7541,0.5893,0.5488,0.5643,0.5406,0.4783,0.4439,0.3698,0.2574,0.1478,0.1743,0.1229,0.1588,0.1803,0.1436,0.1667,0.2630,0.2234,0.1239,0.0869,0.2092,0.1499,0.0676,0.0899,0.0927,0.0658,0.0086,0.0216,0.0153,0.0121,0.0096,0.0196,0.0042,0.0066,0.0099,0.0083,0.0124,M
128 | 0.0209,0.0261,0.0120,0.0768,0.1064,0.1680,0.3016,0.3460,0.3314,0.4125,0.3943,0.1334,0.4622,0.9970,0.9137,0.8292,0.6994,0.7825,0.8789,0.8501,0.8920,0.9473,1.0000,0.8975,0.7806,0.8321,0.6502,0.4548,0.4732,0.3391,0.2747,0.0978,0.0477,0.1403,0.1834,0.2148,0.1271,0.1912,0.3391,0.3444,0.2369,0.1195,0.2665,0.2587,0.1393,0.1083,0.1383,0.1321,0.1069,0.0325,0.0316,0.0057,0.0159,0.0085,0.0372,0.0101,0.0127,0.0288,0.0129,0.0023,M
129 | 0.0374,0.0586,0.0628,0.0534,0.0255,0.1422,0.2072,0.2734,0.3070,0.2597,0.3483,0.3999,0.4574,0.5950,0.7924,0.8272,0.8087,0.8977,0.9828,0.8982,0.8890,0.9367,0.9122,0.7936,0.6718,0.6318,0.4865,0.3388,0.4832,0.3822,0.3075,0.1267,0.0743,0.1510,0.1906,0.1817,0.1709,0.0946,0.2829,0.3006,0.1602,0.1483,0.2875,0.2047,0.1064,0.1395,0.1065,0.0527,0.0395,0.0183,0.0353,0.0118,0.0063,0.0237,0.0032,0.0087,0.0124,0.0113,0.0098,0.0126,M
130 | 0.1371,0.1226,0.1385,0.1484,0.1776,0.1428,0.1773,0.2161,0.1630,0.2067,0.4257,0.5484,0.7131,0.7003,0.6777,0.7939,0.9382,0.8925,0.9146,0.7832,0.7960,0.7983,0.7716,0.6615,0.4860,0.5572,0.4697,0.5640,0.4517,0.3369,0.2684,0.2339,0.3052,0.3016,0.2753,0.1041,0.1757,0.3156,0.3603,0.2736,0.1301,0.2458,0.3404,0.1753,0.0679,0.1062,0.0643,0.0532,0.0531,0.0272,0.0171,0.0118,0.0129,0.0344,0.0065,0.0067,0.0022,0.0079,0.0146,0.0051,M
131 | 0.0443,0.0446,0.0235,0.1008,0.2252,0.2611,0.2061,0.1668,0.1801,0.3083,0.3794,0.5364,0.6173,0.7842,0.8392,0.9016,1.0000,0.8911,0.8753,0.7886,0.7156,0.7581,0.6372,0.3210,0.2076,0.2279,0.3309,0.2847,0.1949,0.1671,0.1025,0.1362,0.2212,0.1124,0.1677,0.1039,0.2562,0.2624,0.2236,0.1180,0.1103,0.2831,0.2385,0.0255,0.1967,0.1483,0.0434,0.0627,0.0513,0.0473,0.0248,0.0274,0.0205,0.0141,0.0185,0.0055,0.0045,0.0115,0.0152,0.0100,M
132 | 0.1150,0.1163,0.0866,0.0358,0.0232,0.1267,0.2417,0.2661,0.4346,0.5378,0.3816,0.0991,0.0616,0.1795,0.3907,0.3602,0.3041,0.2428,0.4060,0.8395,0.9777,0.4680,0.0610,0.2143,0.1348,0.2854,0.1617,0.2649,0.4565,0.6502,0.2848,0.3296,0.5370,0.6627,0.8626,0.8547,0.7848,0.9016,0.8827,0.6086,0.2810,0.0906,0.1177,0.2694,0.5214,0.4232,0.2340,0.1928,0.1092,0.0507,0.0228,0.0099,0.0065,0.0085,0.0166,0.0110,0.0190,0.0141,0.0068,0.0086,M
133 | 0.0968,0.0821,0.0629,0.0608,0.0617,0.1207,0.0944,0.4223,0.5744,0.5025,0.3488,0.1700,0.2076,0.3087,0.4224,0.5312,0.2436,0.1884,0.1908,0.8321,1.0000,0.4076,0.0960,0.1928,0.2419,0.3790,0.2893,0.3451,0.3777,0.5213,0.2316,0.3335,0.4781,0.6116,0.6705,0.7375,0.7356,0.7792,0.6788,0.5259,0.2762,0.1545,0.2019,0.2231,0.4221,0.3067,0.1329,0.1349,0.1057,0.0499,0.0206,0.0073,0.0081,0.0303,0.0190,0.0212,0.0126,0.0201,0.0210,0.0041,M
134 | 0.0790,0.0707,0.0352,0.1660,0.1330,0.0226,0.0771,0.2678,0.5664,0.6609,0.5002,0.2583,0.1650,0.4347,0.4515,0.4579,0.3366,0.4000,0.5325,0.9010,0.9939,0.3689,0.1012,0.0248,0.2318,0.3981,0.2259,0.5247,0.6898,0.8316,0.4326,0.3741,0.5756,0.8043,0.7963,0.7174,0.7056,0.8148,0.7601,0.6034,0.4554,0.4729,0.4478,0.3722,0.4693,0.3839,0.0768,0.1467,0.0777,0.0469,0.0193,0.0298,0.0390,0.0294,0.0175,0.0249,0.0141,0.0073,0.0025,0.0101,M
135 | 0.1083,0.1070,0.0257,0.0837,0.0748,0.1125,0.3322,0.4590,0.5526,0.5966,0.5304,0.2251,0.2402,0.2689,0.6646,0.6632,0.1674,0.0837,0.4331,0.8718,0.7992,0.3712,0.1703,0.1611,0.2086,0.2847,0.2211,0.6134,0.5807,0.6925,0.3825,0.4303,0.7791,0.8703,1.0000,0.9212,0.9386,0.9303,0.7314,0.4791,0.2087,0.2016,0.1669,0.2872,0.4374,0.3097,0.1578,0.0553,0.0334,0.0209,0.0172,0.0180,0.0110,0.0234,0.0276,0.0032,0.0084,0.0122,0.0082,0.0143,M
136 | 0.0094,0.0611,0.1136,0.1203,0.0403,0.1227,0.2495,0.4566,0.6587,0.5079,0.3350,0.0834,0.3004,0.3957,0.3769,0.3828,0.1247,0.1363,0.2678,0.9188,0.9779,0.3236,0.1944,0.1874,0.0885,0.3443,0.2953,0.5908,0.4564,0.7334,0.1969,0.2790,0.6212,0.8681,0.8621,0.9380,0.8327,0.9480,0.6721,0.4436,0.5163,0.3809,0.1557,0.1449,0.2662,0.1806,0.1699,0.2559,0.1129,0.0201,0.0480,0.0234,0.0175,0.0352,0.0158,0.0326,0.0201,0.0168,0.0245,0.0154,M
137 | 0.1088,0.1278,0.0926,0.1234,0.1276,0.1731,0.1948,0.4262,0.6828,0.5761,0.4733,0.2362,0.1023,0.2904,0.4713,0.4659,0.1415,0.0849,0.3257,0.9007,0.9312,0.4856,0.1346,0.1604,0.2737,0.5609,0.3654,0.6139,0.5470,0.8474,0.5638,0.5443,0.5086,0.6253,0.8497,0.8406,0.8420,0.9136,0.7713,0.4882,0.3724,0.4469,0.4586,0.4491,0.5616,0.4305,0.0945,0.0794,0.0274,0.0154,0.0140,0.0455,0.0213,0.0082,0.0124,0.0167,0.0103,0.0205,0.0178,0.0187,M
138 | 0.0430,0.0902,0.0833,0.0813,0.0165,0.0277,0.0569,0.2057,0.3887,0.7106,0.7342,0.5033,0.3000,0.1951,0.2767,0.3737,0.2507,0.2507,0.3292,0.4871,0.6527,0.8454,0.9739,1.0000,0.6665,0.5323,0.4024,0.3444,0.4239,0.4182,0.4393,0.1162,0.4336,0.6553,0.6172,0.4373,0.4118,0.3641,0.4572,0.4367,0.2964,0.4312,0.4155,0.1824,0.1487,0.0138,0.1164,0.2052,0.1069,0.0199,0.0208,0.0176,0.0197,0.0210,0.0141,0.0049,0.0027,0.0162,0.0059,0.0021,M
139 | 0.0731,0.1249,0.1665,0.1496,0.1443,0.2770,0.2555,0.1712,0.0466,0.1114,0.1739,0.3160,0.3249,0.2164,0.2031,0.2580,0.1796,0.2422,0.3609,0.1810,0.2604,0.6572,0.9734,0.9757,0.8079,0.6521,0.4915,0.5363,0.7649,0.5250,0.5101,0.4219,0.4160,0.1906,0.0223,0.4219,0.5496,0.2483,0.2034,0.2729,0.2837,0.4463,0.3178,0.0807,0.1192,0.2134,0.3241,0.2945,0.1474,0.0211,0.0361,0.0444,0.0230,0.0290,0.0141,0.0161,0.0177,0.0194,0.0207,0.0057,M
140 | 0.0164,0.0627,0.0738,0.0608,0.0233,0.1048,0.1338,0.0644,0.1522,0.0780,0.1791,0.2681,0.1788,0.1039,0.1980,0.3234,0.3748,0.2586,0.3680,0.3508,0.5606,0.5231,0.5469,0.6954,0.6352,0.6757,0.8499,0.8025,0.6563,0.8591,0.6655,0.5369,0.3118,0.3763,0.2801,0.0875,0.3319,0.4237,0.1801,0.3743,0.4627,0.1614,0.2494,0.3202,0.2265,0.1146,0.0476,0.0943,0.0824,0.0171,0.0244,0.0258,0.0143,0.0226,0.0187,0.0185,0.0110,0.0094,0.0078,0.0112,M
141 | 0.0412,0.1135,0.0518,0.0232,0.0646,0.1124,0.1787,0.2407,0.2682,0.2058,0.1546,0.2671,0.3141,0.2904,0.3531,0.5079,0.4639,0.1859,0.4474,0.4079,0.5400,0.4786,0.4332,0.6113,0.5091,0.4606,0.7243,0.8987,0.8826,0.9201,0.8005,0.6033,0.2120,0.2866,0.4033,0.2803,0.3087,0.3550,0.2545,0.1432,0.5869,0.6431,0.5826,0.4286,0.4894,0.5777,0.4315,0.2640,0.1794,0.0772,0.0798,0.0376,0.0143,0.0272,0.0127,0.0166,0.0095,0.0225,0.0098,0.0085,M
142 | 0.0707,0.1252,0.1447,0.1644,0.1693,0.0844,0.0715,0.0947,0.1583,0.1247,0.2340,0.1764,0.2284,0.3115,0.4725,0.5543,0.5386,0.3746,0.4583,0.5961,0.7464,0.7644,0.5711,0.6257,0.6695,0.7131,0.7567,0.8077,0.8477,0.9289,0.9513,0.7995,0.4362,0.4048,0.4952,0.1712,0.3652,0.3763,0.2841,0.0427,0.5331,0.6952,0.4288,0.3063,0.5835,0.5692,0.2630,0.1196,0.0983,0.0374,0.0291,0.0156,0.0197,0.0135,0.0127,0.0138,0.0133,0.0131,0.0154,0.0218,M
143 | 0.0526,0.0563,0.1219,0.1206,0.0246,0.1022,0.0539,0.0439,0.2291,0.1632,0.2544,0.2807,0.3011,0.3361,0.3024,0.2285,0.2910,0.1316,0.1151,0.3404,0.5562,0.6379,0.6553,0.7384,0.6534,0.5423,0.6877,0.7325,0.7726,0.8229,0.8787,0.9108,0.6705,0.6092,0.7505,0.4775,0.1666,0.3749,0.3776,0.2106,0.5886,0.5628,0.2577,0.5245,0.6149,0.5123,0.3385,0.1499,0.0546,0.0270,0.0380,0.0339,0.0149,0.0335,0.0376,0.0174,0.0132,0.0103,0.0364,0.0208,M
144 | 0.0516,0.0944,0.0622,0.0415,0.0995,0.2431,0.1777,0.2018,0.2611,0.1294,0.2646,0.2778,0.4432,0.3672,0.2035,0.2764,0.3252,0.1536,0.2784,0.3508,0.5187,0.7052,0.7143,0.6814,0.5100,0.5308,0.6131,0.8388,0.9031,0.8607,0.9656,0.9168,0.7132,0.6898,0.7310,0.4134,0.1580,0.1819,0.1381,0.2960,0.6935,0.8246,0.5351,0.4403,0.6448,0.6214,0.3016,0.1379,0.0364,0.0355,0.0456,0.0432,0.0274,0.0152,0.0120,0.0129,0.0020,0.0109,0.0074,0.0078,M
145 | 0.0299,0.0688,0.0992,0.1021,0.0800,0.0629,0.0130,0.0813,0.1761,0.0998,0.0523,0.0904,0.2655,0.3099,0.3520,0.3892,0.3962,0.2449,0.2355,0.3045,0.3112,0.4698,0.5534,0.4532,0.4464,0.4670,0.4621,0.6988,0.7626,0.7025,0.7382,0.7446,0.7927,0.5227,0.3967,0.3042,0.1309,0.2408,0.1780,0.1598,0.5657,0.6443,0.4241,0.4567,0.5760,0.5293,0.3287,0.1283,0.0698,0.0334,0.0342,0.0459,0.0277,0.0172,0.0087,0.0046,0.0203,0.0130,0.0115,0.0015,M
146 | 0.0721,0.1574,0.1112,0.1085,0.0666,0.1800,0.1108,0.2794,0.1408,0.0795,0.2534,0.3920,0.3375,0.1610,0.1889,0.3308,0.2282,0.2177,0.1853,0.5167,0.5342,0.6298,0.8437,0.6756,0.5825,0.6141,0.8809,0.8375,0.3869,0.5051,0.5455,0.4241,0.1534,0.4950,0.6983,0.7109,0.5647,0.4870,0.5515,0.4433,0.5250,0.6075,0.5251,0.1359,0.4268,0.4442,0.2193,0.0900,0.1200,0.0628,0.0234,0.0309,0.0127,0.0082,0.0281,0.0117,0.0092,0.0147,0.0157,0.0129,M
147 | 0.1021,0.0830,0.0577,0.0627,0.0635,0.1328,0.0988,0.1787,0.1199,0.1369,0.2509,0.2631,0.2796,0.2977,0.3823,0.3129,0.3956,0.2093,0.3218,0.3345,0.3184,0.2887,0.3610,0.2566,0.4106,0.4591,0.4722,0.7278,0.7591,0.6579,0.7514,0.6666,0.4903,0.5962,0.6552,0.4014,0.1188,0.3245,0.3107,0.1354,0.5109,0.7988,0.7517,0.5508,0.5858,0.7292,0.5522,0.3339,0.1608,0.0475,0.1004,0.0709,0.0317,0.0309,0.0252,0.0087,0.0177,0.0214,0.0227,0.0106,M
148 | 0.0654,0.0649,0.0737,0.1132,0.2482,0.1257,0.1797,0.0989,0.2460,0.3422,0.2128,0.1377,0.4032,0.5684,0.2398,0.4331,0.5954,0.5772,0.8176,0.8835,0.5248,0.6373,0.8375,0.6699,0.7756,0.8750,0.8300,0.6896,0.3372,0.6405,0.7138,0.8202,0.6657,0.5254,0.2960,0.0704,0.0970,0.3941,0.6028,0.3521,0.3924,0.4808,0.4602,0.4164,0.5438,0.5649,0.3195,0.2484,0.1299,0.0825,0.0243,0.0210,0.0361,0.0239,0.0447,0.0394,0.0355,0.0440,0.0243,0.0098,M
149 | 0.0712,0.0901,0.1276,0.1497,0.1284,0.1165,0.1285,0.1684,0.1830,0.2127,0.2891,0.3985,0.4576,0.5821,0.5027,0.1930,0.2579,0.3177,0.2745,0.6186,0.8958,0.7442,0.5188,0.2811,0.1773,0.6607,0.7576,0.5122,0.4701,0.5479,0.4347,0.1276,0.0846,0.0927,0.0313,0.0998,0.1781,0.1586,0.3001,0.2208,0.1455,0.2895,0.3203,0.1414,0.0629,0.0734,0.0805,0.0608,0.0565,0.0286,0.0154,0.0154,0.0156,0.0054,0.0030,0.0048,0.0087,0.0101,0.0095,0.0068,M
150 | 0.0207,0.0535,0.0334,0.0818,0.0740,0.0324,0.0918,0.1070,0.1553,0.1234,0.1796,0.1787,0.1247,0.2577,0.3370,0.3990,0.1647,0.2266,0.3219,0.5356,0.8159,1.0000,0.8701,0.6889,0.6299,0.5738,0.5707,0.5976,0.4301,0.2058,0.1000,0.2247,0.2308,0.3977,0.3317,0.1726,0.1429,0.2168,0.1967,0.2140,0.3674,0.2023,0.0778,0.0925,0.2388,0.3400,0.2594,0.1102,0.0911,0.0462,0.0171,0.0033,0.0050,0.0190,0.0103,0.0121,0.0042,0.0090,0.0070,0.0099,M
151 | 0.0209,0.0278,0.0115,0.0445,0.0427,0.0766,0.1458,0.1430,0.1894,0.1853,0.1748,0.1556,0.1476,0.1378,0.2584,0.3827,0.4784,0.5360,0.6192,0.7912,0.9264,1.0000,0.9080,0.7435,0.5557,0.3172,0.1295,0.0598,0.2722,0.3616,0.3293,0.4855,0.3936,0.1845,0.0342,0.2489,0.3837,0.3514,0.2654,0.1760,0.1599,0.0866,0.0590,0.0813,0.0492,0.0417,0.0495,0.0367,0.0115,0.0118,0.0133,0.0096,0.0014,0.0049,0.0039,0.0029,0.0078,0.0047,0.0021,0.0011,M
152 | 0.0231,0.0315,0.0170,0.0226,0.0410,0.0116,0.0223,0.0805,0.2365,0.2461,0.2245,0.1520,0.1732,0.3099,0.4380,0.5595,0.6820,0.6164,0.6803,0.8435,0.9921,1.0000,0.7983,0.5426,0.3952,0.5179,0.5650,0.3042,0.1881,0.3960,0.2286,0.3544,0.4187,0.2398,0.1847,0.3760,0.4331,0.3626,0.2519,0.1870,0.1046,0.2339,0.1991,0.1100,0.0684,0.0303,0.0674,0.0785,0.0455,0.0246,0.0151,0.0125,0.0036,0.0123,0.0043,0.0114,0.0052,0.0091,0.0008,0.0092,M
153 | 0.0131,0.0201,0.0045,0.0217,0.0230,0.0481,0.0742,0.0333,0.1369,0.2079,0.2295,0.1990,0.1184,0.1891,0.2949,0.5343,0.6850,0.7923,0.8220,0.7290,0.7352,0.7918,0.8057,0.4898,0.1934,0.2924,0.6255,0.8546,0.8966,0.7821,0.5168,0.4840,0.4038,0.3411,0.2849,0.2353,0.2699,0.4442,0.4323,0.3314,0.1195,0.1669,0.3702,0.3072,0.0945,0.1545,0.1394,0.0772,0.0615,0.0230,0.0111,0.0168,0.0086,0.0045,0.0062,0.0065,0.0030,0.0066,0.0029,0.0053,M
154 | 0.0233,0.0394,0.0416,0.0547,0.0993,0.1515,0.1674,0.1513,0.1723,0.2078,0.1239,0.0236,0.1771,0.3115,0.4990,0.6707,0.7655,0.8485,0.9805,1.0000,1.0000,0.9992,0.9067,0.6803,0.5103,0.4716,0.4980,0.6196,0.7171,0.6316,0.3554,0.2897,0.4316,0.3791,0.2421,0.0944,0.0351,0.0844,0.0436,0.1130,0.2045,0.1937,0.0834,0.1502,0.1675,0.1058,0.1111,0.0849,0.0596,0.0201,0.0071,0.0104,0.0062,0.0026,0.0025,0.0061,0.0038,0.0101,0.0078,0.0006,M
155 | 0.0117,0.0069,0.0279,0.0583,0.0915,0.1267,0.1577,0.1927,0.2361,0.2169,0.1180,0.0754,0.2782,0.3758,0.5093,0.6592,0.7071,0.7532,0.8357,0.8593,0.9615,0.9838,0.8705,0.6403,0.5067,0.5395,0.6934,0.8487,0.8213,0.5962,0.2950,0.2758,0.2885,0.1893,0.1446,0.0955,0.0888,0.0836,0.0894,0.1547,0.2318,0.2225,0.1035,0.1721,0.2017,0.1787,0.1112,0.0398,0.0305,0.0084,0.0039,0.0053,0.0029,0.0020,0.0013,0.0029,0.0020,0.0062,0.0026,0.0052,M
156 | 0.0211,0.0128,0.0015,0.0450,0.0711,0.1563,0.1518,0.1206,0.1666,0.1345,0.0785,0.0367,0.1227,0.2614,0.4280,0.6122,0.7435,0.8130,0.9006,0.9603,0.9162,0.9140,0.7851,0.5134,0.3439,0.3290,0.2571,0.3685,0.5765,0.6190,0.4613,0.3615,0.4434,0.3864,0.3093,0.2138,0.1112,0.1386,0.1523,0.0996,0.1644,0.1902,0.1313,0.1776,0.2000,0.0765,0.0727,0.0749,0.0449,0.0134,0.0174,0.0117,0.0023,0.0047,0.0049,0.0031,0.0024,0.0039,0.0051,0.0015,M
157 | 0.0047,0.0059,0.0080,0.0554,0.0883,0.1278,0.1674,0.1373,0.2922,0.3469,0.3265,0.3263,0.2301,0.1253,0.2102,0.2401,0.1928,0.1673,0.1228,0.0902,0.1557,0.3291,0.5268,0.6740,0.7906,0.8938,0.9395,0.9493,0.9040,0.9151,0.8828,0.8086,0.7180,0.6720,0.6447,0.6879,0.6241,0.4936,0.4144,0.4240,0.4546,0.4392,0.4323,0.4921,0.4710,0.3196,0.2241,0.1806,0.0990,0.0251,0.0129,0.0095,0.0126,0.0069,0.0039,0.0068,0.0060,0.0045,0.0002,0.0029,M
158 | 0.0201,0.0178,0.0274,0.0232,0.0724,0.0833,0.1232,0.1298,0.2085,0.2720,0.2188,0.3037,0.2959,0.2059,0.0906,0.1610,0.1800,0.2180,0.2026,0.1506,0.0521,0.2143,0.4333,0.5943,0.6926,0.7576,0.8787,0.9060,0.8528,0.9087,0.9657,0.9306,0.7774,0.6643,0.6604,0.6884,0.6938,0.5932,0.5774,0.6223,0.5841,0.4527,0.4911,0.5762,0.5013,0.4042,0.3123,0.2232,0.1085,0.0414,0.0253,0.0131,0.0049,0.0104,0.0102,0.0092,0.0083,0.0020,0.0048,0.0036,M
159 | 0.0107,0.0453,0.0289,0.0713,0.1075,0.1019,0.1606,0.2119,0.3061,0.2936,0.3104,0.3431,0.2456,0.1887,0.1184,0.2080,0.2736,0.3274,0.2344,0.1260,0.0576,0.1241,0.3239,0.4357,0.5734,0.7825,0.9252,0.9349,0.9348,1.0000,0.9308,0.8478,0.7605,0.7040,0.7539,0.7990,0.7673,0.5955,0.4731,0.4840,0.4340,0.3954,0.4837,0.5379,0.4485,0.2674,0.1541,0.1359,0.0941,0.0261,0.0079,0.0164,0.0120,0.0113,0.0021,0.0097,0.0072,0.0060,0.0017,0.0036,M
160 | 0.0235,0.0220,0.0167,0.0516,0.0746,0.1121,0.1258,0.1717,0.3074,0.3199,0.2946,0.2484,0.2510,0.1806,0.1413,0.3019,0.3635,0.3887,0.2980,0.2219,0.1624,0.1343,0.2046,0.3791,0.5771,0.7545,0.8406,0.8547,0.9036,1.0000,0.9646,0.7912,0.6412,0.5986,0.6835,0.7771,0.8084,0.7426,0.6295,0.5708,0.4433,0.3361,0.3795,0.4950,0.4373,0.2404,0.1128,0.1654,0.0933,0.0225,0.0214,0.0221,0.0152,0.0083,0.0058,0.0023,0.0057,0.0052,0.0027,0.0021,M
161 | 0.0258,0.0433,0.0547,0.0681,0.0784,0.1250,0.1296,0.1729,0.2794,0.2954,0.2506,0.2601,0.2249,0.2115,0.1270,0.1193,0.1794,0.2185,0.1646,0.0740,0.0625,0.2381,0.4824,0.6372,0.7531,0.8959,0.9941,0.9957,0.9328,0.9344,0.8854,0.7690,0.6865,0.6390,0.6378,0.6629,0.5983,0.4565,0.3129,0.4158,0.4325,0.4031,0.4201,0.4557,0.3955,0.2966,0.2095,0.1558,0.0884,0.0265,0.0121,0.0091,0.0062,0.0019,0.0045,0.0079,0.0031,0.0063,0.0048,0.0050,M
162 | 0.0305,0.0363,0.0214,0.0227,0.0456,0.0665,0.0939,0.0972,0.2535,0.3127,0.2192,0.2621,0.2419,0.2179,0.1159,0.1237,0.0886,0.1755,0.1758,0.1540,0.0512,0.1805,0.4039,0.5697,0.6577,0.7474,0.8543,0.9085,0.8668,0.8892,0.9065,0.8522,0.7204,0.6200,0.6253,0.6848,0.7337,0.6281,0.5725,0.6119,0.5597,0.4965,0.5027,0.5772,0.5907,0.4803,0.3877,0.2779,0.1427,0.0424,0.0271,0.0200,0.0070,0.0070,0.0086,0.0089,0.0074,0.0042,0.0055,0.0021,M
163 | 0.0217,0.0152,0.0346,0.0346,0.0484,0.0526,0.0773,0.0862,0.1451,0.2110,0.2343,0.2087,0.1645,0.1689,0.1650,0.1967,0.2934,0.3709,0.4309,0.4161,0.5116,0.6501,0.7717,0.8491,0.9104,0.8912,0.8189,0.6779,0.5368,0.5207,0.5651,0.5749,0.5250,0.4255,0.3330,0.2331,0.1451,0.1648,0.2694,0.3730,0.4467,0.4133,0.3743,0.3021,0.2069,0.1790,0.1689,0.1341,0.0769,0.0222,0.0205,0.0123,0.0067,0.0011,0.0026,0.0049,0.0029,0.0022,0.0022,0.0032,M
164 | 0.0072,0.0027,0.0089,0.0061,0.0420,0.0865,0.1182,0.0999,0.1976,0.2318,0.2472,0.2880,0.2126,0.0708,0.1194,0.2808,0.4221,0.5279,0.5857,0.6153,0.6753,0.7873,0.8974,0.9828,1.0000,0.8460,0.6055,0.3036,0.0144,0.2526,0.4335,0.4918,0.5409,0.5961,0.5248,0.3777,0.2369,0.1720,0.1878,0.3250,0.2575,0.2423,0.2706,0.2323,0.1724,0.1457,0.1175,0.0868,0.0392,0.0131,0.0092,0.0078,0.0071,0.0081,0.0034,0.0064,0.0037,0.0036,0.0012,0.0037,M
165 | 0.0163,0.0198,0.0202,0.0386,0.0752,0.1444,0.1487,0.1484,0.2442,0.2822,0.3691,0.3750,0.3927,0.3308,0.1085,0.1139,0.3446,0.5441,0.6470,0.7276,0.7894,0.8264,0.8697,0.7836,0.7140,0.5698,0.2908,0.4636,0.6409,0.7405,0.8069,0.8420,1.0000,0.9536,0.6755,0.3905,0.1249,0.3629,0.6356,0.8116,0.7664,0.5417,0.2614,0.1723,0.2814,0.2764,0.1985,0.1502,0.1219,0.0493,0.0027,0.0077,0.0026,0.0031,0.0083,0.0020,0.0084,0.0108,0.0083,0.0033,M
166 | 0.0221,0.0065,0.0164,0.0487,0.0519,0.0849,0.0812,0.1833,0.2228,0.1810,0.2549,0.2984,0.2624,0.1893,0.0668,0.2666,0.4274,0.6291,0.7782,0.7686,0.8099,0.8493,0.9440,0.9450,0.9655,0.8045,0.4969,0.3960,0.3856,0.5574,0.7309,0.8549,0.9425,0.8726,0.6673,0.4694,0.1546,0.1748,0.3607,0.5208,0.5177,0.3702,0.2240,0.0816,0.0395,0.0785,0.1052,0.1034,0.0764,0.0216,0.0167,0.0089,0.0051,0.0015,0.0075,0.0058,0.0016,0.0070,0.0074,0.0038,M
167 | 0.0411,0.0277,0.0604,0.0525,0.0489,0.0385,0.0611,0.1117,0.1237,0.2300,0.1370,0.1335,0.2137,0.1526,0.0775,0.1196,0.0903,0.0689,0.2071,0.2975,0.2836,0.3353,0.3622,0.3202,0.3452,0.3562,0.3892,0.6622,0.9254,1.0000,0.8528,0.6297,0.5250,0.4012,0.2901,0.2007,0.3356,0.4799,0.6147,0.6246,0.4973,0.3492,0.2662,0.3137,0.4282,0.4262,0.3511,0.2458,0.1259,0.0327,0.0181,0.0217,0.0038,0.0019,0.0065,0.0132,0.0108,0.0050,0.0085,0.0044,M
168 | 0.0137,0.0297,0.0116,0.0082,0.0241,0.0253,0.0279,0.0130,0.0489,0.0874,0.1100,0.1084,0.1094,0.1023,0.0601,0.0906,0.1313,0.2758,0.3660,0.5269,0.5810,0.6181,0.5875,0.4639,0.5424,0.7367,0.9089,1.0000,0.8247,0.5441,0.3349,0.0877,0.1600,0.4169,0.6576,0.7390,0.7963,0.7493,0.6795,0.4713,0.2355,0.1704,0.2728,0.4016,0.4125,0.3470,0.2739,0.1790,0.0922,0.0276,0.0169,0.0081,0.0040,0.0025,0.0036,0.0058,0.0067,0.0035,0.0043,0.0033,M
169 | 0.0015,0.0186,0.0289,0.0195,0.0515,0.0817,0.1005,0.0124,0.1168,0.1476,0.2118,0.2575,0.2354,0.1334,0.0092,0.1951,0.3685,0.4646,0.5418,0.6260,0.7420,0.8257,0.8609,0.8400,0.8949,0.9945,1.0000,0.9649,0.8747,0.6257,0.2184,0.2945,0.3645,0.5012,0.7843,0.9361,0.8195,0.6207,0.4513,0.3004,0.2674,0.2241,0.3141,0.3693,0.2986,0.2226,0.0849,0.0359,0.0289,0.0122,0.0045,0.0108,0.0075,0.0089,0.0036,0.0029,0.0013,0.0010,0.0032,0.0047,M
170 | 0.0130,0.0120,0.0436,0.0624,0.0428,0.0349,0.0384,0.0446,0.1318,0.1375,0.2026,0.2389,0.2112,0.1444,0.0742,0.1533,0.3052,0.4116,0.5466,0.5933,0.6663,0.7333,0.7136,0.7014,0.7758,0.9137,0.9964,1.0000,0.8881,0.6585,0.2707,0.1746,0.2709,0.4853,0.7184,0.8209,0.7536,0.6496,0.4708,0.3482,0.3508,0.3181,0.3524,0.3659,0.2846,0.1714,0.0694,0.0303,0.0292,0.0116,0.0024,0.0084,0.0100,0.0018,0.0035,0.0058,0.0011,0.0009,0.0033,0.0026,M
171 | 0.0134,0.0172,0.0178,0.0363,0.0444,0.0744,0.0800,0.0456,0.0368,0.1250,0.2405,0.2325,0.2523,0.1472,0.0669,0.1100,0.2353,0.3282,0.4416,0.5167,0.6508,0.7793,0.7978,0.7786,0.8587,0.9321,0.9454,0.8645,0.7220,0.4850,0.1357,0.2951,0.4715,0.6036,0.8083,0.9870,0.8800,0.6411,0.4276,0.2702,0.2642,0.3342,0.4335,0.4542,0.3960,0.2525,0.1084,0.0372,0.0286,0.0099,0.0046,0.0094,0.0048,0.0047,0.0016,0.0008,0.0042,0.0024,0.0027,0.0041,M
172 | 0.0179,0.0136,0.0408,0.0633,0.0596,0.0808,0.2090,0.3465,0.5276,0.5965,0.6254,0.4507,0.3693,0.2864,0.1635,0.0422,0.1785,0.4394,0.6950,0.8097,0.8550,0.8717,0.8601,0.9201,0.8729,0.8084,0.8694,0.8411,0.5793,0.3754,0.3485,0.4639,0.6495,0.6901,0.5666,0.5188,0.5060,0.3885,0.3762,0.3738,0.2605,0.1591,0.1875,0.2267,0.1577,0.1211,0.0883,0.0850,0.0355,0.0219,0.0086,0.0123,0.0060,0.0187,0.0111,0.0126,0.0081,0.0155,0.0160,0.0085,M
173 | 0.0180,0.0444,0.0476,0.0698,0.1615,0.0887,0.0596,0.1071,0.3175,0.2918,0.3273,0.3035,0.3033,0.2587,0.1682,0.1308,0.2803,0.4519,0.6641,0.7683,0.6960,0.4393,0.2432,0.2886,0.4974,0.8172,1.0000,0.9238,0.8519,0.7722,0.5772,0.5190,0.6824,0.6220,0.5054,0.3578,0.3809,0.3813,0.3359,0.2771,0.3648,0.3834,0.3453,0.2096,0.1031,0.0798,0.0701,0.0526,0.0241,0.0117,0.0122,0.0122,0.0114,0.0098,0.0027,0.0025,0.0026,0.0050,0.0073,0.0022,M
174 | 0.0329,0.0216,0.0386,0.0627,0.1158,0.1482,0.2054,0.1605,0.2532,0.2672,0.3056,0.3161,0.2314,0.2067,0.1804,0.2808,0.4423,0.5947,0.6601,0.5844,0.4539,0.4789,0.5646,0.5281,0.7115,1.0000,0.9564,0.6090,0.5112,0.4000,0.0482,0.1852,0.2186,0.1436,0.1757,0.1428,0.1644,0.3089,0.3648,0.4441,0.3859,0.2813,0.1238,0.0953,0.1201,0.0825,0.0618,0.0141,0.0108,0.0124,0.0104,0.0095,0.0151,0.0059,0.0015,0.0053,0.0016,0.0042,0.0053,0.0074,M
175 | 0.0191,0.0173,0.0291,0.0301,0.0463,0.0690,0.0576,0.1103,0.2423,0.3134,0.4786,0.5239,0.4393,0.3440,0.2869,0.3889,0.4420,0.3892,0.4088,0.5006,0.7271,0.9385,1.0000,0.9831,0.9932,0.9161,0.8237,0.6957,0.4536,0.3281,0.2522,0.3964,0.4154,0.3308,0.1445,0.1923,0.3208,0.3367,0.5683,0.5505,0.3231,0.0448,0.3131,0.3387,0.4130,0.3639,0.2069,0.0859,0.0600,0.0267,0.0125,0.0040,0.0136,0.0137,0.0172,0.0132,0.0110,0.0122,0.0114,0.0068,M
176 | 0.0294,0.0123,0.0117,0.0113,0.0497,0.0998,0.1326,0.1117,0.2984,0.3473,0.4231,0.5044,0.5237,0.4398,0.3236,0.2956,0.3286,0.3231,0.4528,0.6339,0.7044,0.8314,0.8449,0.8512,0.9138,0.9985,1.0000,0.7544,0.4661,0.3924,0.3849,0.4674,0.4245,0.3095,0.0752,0.2885,0.4072,0.3170,0.2863,0.2634,0.0541,0.1874,0.3459,0.4646,0.4366,0.2581,0.1319,0.0505,0.0112,0.0059,0.0041,0.0056,0.0104,0.0079,0.0014,0.0054,0.0015,0.0006,0.0081,0.0043,M
177 | 0.0635,0.0709,0.0453,0.0333,0.0185,0.1260,0.1015,0.1918,0.3362,0.3900,0.4674,0.5632,0.5506,0.4343,0.3052,0.3492,0.3975,0.3875,0.5280,0.7198,0.7702,0.8562,0.8688,0.9236,1.0000,0.9662,0.9822,0.7360,0.4158,0.2918,0.3280,0.3690,0.3450,0.2863,0.0864,0.3724,0.4649,0.3488,0.1817,0.1142,0.1220,0.2621,0.4461,0.4726,0.3263,0.1423,0.0390,0.0406,0.0311,0.0086,0.0154,0.0048,0.0025,0.0087,0.0072,0.0095,0.0086,0.0085,0.0040,0.0051,M
178 | 0.0201,0.0165,0.0344,0.0330,0.0397,0.0443,0.0684,0.0903,0.1739,0.2571,0.2931,0.3108,0.3603,0.3002,0.2718,0.2007,0.1801,0.2234,0.3568,0.5492,0.7209,0.8318,0.8864,0.9520,0.9637,1.0000,0.9673,0.8664,0.7896,0.6345,0.5351,0.4056,0.2563,0.2894,0.3588,0.4296,0.4773,0.4516,0.3765,0.3051,0.1921,0.1184,0.1984,0.1570,0.0660,0.1294,0.0797,0.0052,0.0233,0.0152,0.0125,0.0054,0.0057,0.0137,0.0109,0.0035,0.0056,0.0105,0.0082,0.0036,M
179 | 0.0197,0.0394,0.0384,0.0076,0.0251,0.0629,0.0747,0.0578,0.1357,0.1695,0.1734,0.2470,0.3141,0.3297,0.2759,0.2056,0.1162,0.1884,0.3390,0.3926,0.4282,0.5418,0.6448,0.7223,0.7853,0.7984,0.8847,0.9582,0.8990,0.6831,0.6108,0.5480,0.5058,0.4476,0.2401,0.1405,0.1772,0.1742,0.3326,0.4021,0.3009,0.2075,0.1206,0.0255,0.0298,0.0691,0.0781,0.0777,0.0369,0.0057,0.0091,0.0134,0.0097,0.0042,0.0058,0.0072,0.0041,0.0045,0.0047,0.0054,M
180 | 0.0394,0.0420,0.0446,0.0551,0.0597,0.1416,0.0956,0.0802,0.1618,0.2558,0.3078,0.3404,0.3400,0.3951,0.3352,0.2252,0.2086,0.2248,0.3382,0.4578,0.6474,0.6708,0.7007,0.7619,0.7745,0.6767,0.7373,0.7834,0.9619,1.0000,0.8086,0.5558,0.5409,0.4988,0.3108,0.2897,0.2244,0.0960,0.2287,0.3228,0.3454,0.3882,0.3240,0.0926,0.1173,0.0566,0.0766,0.0969,0.0588,0.0050,0.0118,0.0146,0.0040,0.0114,0.0032,0.0062,0.0101,0.0068,0.0053,0.0087,M
181 | 0.0310,0.0221,0.0433,0.0191,0.0964,0.1827,0.1106,0.1702,0.2804,0.4432,0.5222,0.5611,0.5379,0.4048,0.2245,0.1784,0.2297,0.2720,0.5209,0.6898,0.8202,0.8780,0.7600,0.7616,0.7152,0.7288,0.8686,0.9509,0.8348,0.5730,0.4363,0.4289,0.4240,0.3156,0.1287,0.1477,0.2062,0.2400,0.5173,0.5168,0.1491,0.2407,0.3415,0.4494,0.4624,0.2001,0.0775,0.1232,0.0783,0.0089,0.0249,0.0204,0.0059,0.0053,0.0079,0.0037,0.0015,0.0056,0.0067,0.0054,M
182 | 0.0423,0.0321,0.0709,0.0108,0.1070,0.0973,0.0961,0.1323,0.2462,0.2696,0.3412,0.4292,0.3682,0.3940,0.2965,0.3172,0.2825,0.3050,0.2408,0.5420,0.6802,0.6320,0.5824,0.6805,0.5984,0.8412,0.9911,0.9187,0.8005,0.6713,0.5632,0.7332,0.6038,0.2575,0.0349,0.1799,0.3039,0.4760,0.5756,0.4254,0.5046,0.7179,0.6163,0.5663,0.5749,0.3593,0.2526,0.2299,0.1271,0.0356,0.0367,0.0176,0.0035,0.0093,0.0121,0.0075,0.0056,0.0021,0.0043,0.0017,M
183 | 0.0095,0.0308,0.0539,0.0411,0.0613,0.1039,0.1016,0.1394,0.2592,0.3745,0.4229,0.4499,0.5404,0.4303,0.3333,0.3496,0.3426,0.2851,0.4062,0.6833,0.7650,0.6670,0.5703,0.5995,0.6484,0.8614,0.9819,0.9380,0.8435,0.6074,0.5403,0.6890,0.5977,0.3244,0.0516,0.3157,0.3590,0.3881,0.5716,0.4314,0.3051,0.4393,0.4302,0.4831,0.5084,0.1952,0.1539,0.2037,0.1054,0.0251,0.0357,0.0181,0.0019,0.0102,0.0133,0.0040,0.0042,0.0030,0.0031,0.0033,M
184 | 0.0096,0.0404,0.0682,0.0688,0.0887,0.0932,0.0955,0.2140,0.2546,0.2952,0.4025,0.5148,0.4901,0.4127,0.3575,0.3447,0.3068,0.2945,0.4351,0.7264,0.8147,0.8103,0.6665,0.6958,0.7748,0.8688,1.0000,0.9941,0.8793,0.6482,0.5876,0.6408,0.4972,0.2755,0.0300,0.3356,0.3167,0.4133,0.6281,0.4977,0.2613,0.4697,0.4806,0.4921,0.5294,0.2216,0.1401,0.1888,0.0947,0.0134,0.0310,0.0237,0.0078,0.0144,0.0170,0.0012,0.0109,0.0036,0.0043,0.0018,M
185 | 0.0269,0.0383,0.0505,0.0707,0.1313,0.2103,0.2263,0.2524,0.3595,0.5915,0.6675,0.5679,0.5175,0.3334,0.2002,0.2856,0.2937,0.3424,0.5949,0.7526,0.8959,0.8147,0.7109,0.7378,0.7201,0.8254,0.8917,0.9820,0.8179,0.4848,0.3203,0.2775,0.2382,0.2911,0.1675,0.3156,0.1869,0.3391,0.5993,0.4124,0.1181,0.3651,0.4655,0.4777,0.3517,0.0920,0.1227,0.1785,0.1085,0.0300,0.0346,0.0167,0.0199,0.0145,0.0081,0.0045,0.0043,0.0027,0.0055,0.0057,M
186 | 0.0340,0.0625,0.0381,0.0257,0.0441,0.1027,0.1287,0.1850,0.2647,0.4117,0.5245,0.5341,0.5554,0.3915,0.2950,0.3075,0.3021,0.2719,0.5443,0.7932,0.8751,0.8667,0.7107,0.6911,0.7287,0.8792,1.0000,0.9816,0.8984,0.6048,0.4934,0.5371,0.4586,0.2908,0.0774,0.2249,0.1602,0.3958,0.6117,0.5196,0.2321,0.4370,0.3797,0.4322,0.4892,0.1901,0.0940,0.1364,0.0906,0.0144,0.0329,0.0141,0.0019,0.0067,0.0099,0.0042,0.0057,0.0051,0.0033,0.0058,M
187 | 0.0209,0.0191,0.0411,0.0321,0.0698,0.1579,0.1438,0.1402,0.3048,0.3914,0.3504,0.3669,0.3943,0.3311,0.3331,0.3002,0.2324,0.1381,0.3450,0.4428,0.4890,0.3677,0.4379,0.4864,0.6207,0.7256,0.6624,0.7689,0.7981,0.8577,0.9273,0.7009,0.4851,0.3409,0.1406,0.1147,0.1433,0.1820,0.3605,0.5529,0.5988,0.5077,0.5512,0.5027,0.7034,0.5904,0.4069,0.2761,0.1584,0.0510,0.0054,0.0078,0.0201,0.0104,0.0039,0.0031,0.0062,0.0087,0.0070,0.0042,M
188 | 0.0368,0.0279,0.0103,0.0566,0.0759,0.0679,0.0970,0.1473,0.2164,0.2544,0.2936,0.2935,0.2657,0.3187,0.2794,0.2534,0.1980,0.1929,0.2826,0.3245,0.3504,0.3324,0.4217,0.4774,0.4808,0.6325,0.8334,0.9458,1.0000,0.8425,0.5524,0.4795,0.5200,0.3968,0.1940,0.1519,0.2010,0.1736,0.1029,0.2244,0.3717,0.4449,0.3939,0.2030,0.2010,0.2187,0.1840,0.1477,0.0971,0.0224,0.0151,0.0105,0.0024,0.0018,0.0057,0.0092,0.0009,0.0086,0.0110,0.0052,M
189 | 0.0089,0.0274,0.0248,0.0237,0.0224,0.0845,0.1488,0.1224,0.1569,0.2119,0.3003,0.3094,0.2743,0.2547,0.1870,0.1452,0.1457,0.2429,0.3259,0.3679,0.3355,0.3100,0.3914,0.5280,0.6409,0.7707,0.8754,1.0000,0.9806,0.6969,0.4973,0.5020,0.5359,0.3842,0.1848,0.1149,0.1570,0.1311,0.1583,0.2631,0.3103,0.4512,0.3785,0.1269,0.1459,0.1092,0.1485,0.1385,0.0716,0.0176,0.0199,0.0096,0.0103,0.0093,0.0025,0.0044,0.0021,0.0069,0.0060,0.0018,M
190 | 0.0158,0.0239,0.0150,0.0494,0.0988,0.1425,0.1463,0.1219,0.1697,0.1923,0.2361,0.2719,0.3049,0.2986,0.2226,0.1745,0.2459,0.3100,0.3572,0.4283,0.4268,0.3735,0.4585,0.6094,0.7221,0.7595,0.8706,1.0000,0.9815,0.7187,0.5848,0.4192,0.3756,0.3263,0.1944,0.1394,0.1670,0.1275,0.1666,0.2574,0.2258,0.2777,0.1613,0.1335,0.1976,0.1234,0.1554,0.1057,0.0490,0.0097,0.0223,0.0121,0.0108,0.0057,0.0028,0.0079,0.0034,0.0046,0.0022,0.0021,M
191 | 0.0156,0.0210,0.0282,0.0596,0.0462,0.0779,0.1365,0.0780,0.1038,0.1567,0.2476,0.2783,0.2896,0.2956,0.3189,0.1892,0.1730,0.2226,0.2427,0.3149,0.4102,0.3808,0.4896,0.6292,0.7519,0.7985,0.8830,0.9915,0.9223,0.6981,0.6167,0.5069,0.3921,0.3524,0.2183,0.1245,0.1592,0.1626,0.2356,0.2483,0.2437,0.2715,0.1184,0.1157,0.1449,0.1883,0.1954,0.1492,0.0511,0.0155,0.0189,0.0150,0.0060,0.0082,0.0091,0.0038,0.0056,0.0056,0.0048,0.0024,M
192 | 0.0315,0.0252,0.0167,0.0479,0.0902,0.1057,0.1024,0.1209,0.1241,0.1533,0.2128,0.2536,0.2686,0.2803,0.1886,0.1485,0.2160,0.2417,0.2989,0.3341,0.3786,0.3956,0.5232,0.6913,0.7868,0.8337,0.9199,1.0000,0.8990,0.6456,0.5967,0.4355,0.2997,0.2294,0.1866,0.0922,0.1829,0.1743,0.2452,0.2407,0.2518,0.3184,0.1685,0.0675,0.1186,0.1833,0.1878,0.1114,0.0310,0.0143,0.0138,0.0108,0.0062,0.0044,0.0072,0.0007,0.0054,0.0035,0.0001,0.0055,M
193 | 0.0056,0.0267,0.0221,0.0561,0.0936,0.1146,0.0706,0.0996,0.1673,0.1859,0.2481,0.2712,0.2934,0.2637,0.1880,0.1405,0.2028,0.2613,0.2778,0.3346,0.3830,0.4003,0.5114,0.6860,0.7490,0.7843,0.9021,1.0000,0.8888,0.6511,0.6083,0.4463,0.2948,0.1729,0.1488,0.0801,0.1770,0.1382,0.2404,0.2046,0.1970,0.2778,0.1377,0.0685,0.0664,0.1665,0.1807,0.1245,0.0516,0.0044,0.0185,0.0072,0.0055,0.0074,0.0068,0.0084,0.0037,0.0024,0.0034,0.0007,M
194 | 0.0203,0.0121,0.0380,0.0128,0.0537,0.0874,0.1021,0.0852,0.1136,0.1747,0.2198,0.2721,0.2105,0.1727,0.2040,0.1786,0.1318,0.2260,0.2358,0.3107,0.3906,0.3631,0.4809,0.6531,0.7812,0.8395,0.9180,0.9769,0.8937,0.7022,0.6500,0.5069,0.3903,0.3009,0.1565,0.0985,0.2200,0.2243,0.2736,0.2152,0.2438,0.3154,0.2112,0.0991,0.0594,0.1940,0.1937,0.1082,0.0336,0.0177,0.0209,0.0134,0.0094,0.0047,0.0045,0.0042,0.0028,0.0036,0.0013,0.0016,M
195 | 0.0392,0.0108,0.0267,0.0257,0.0410,0.0491,0.1053,0.1690,0.2105,0.2471,0.2680,0.3049,0.2863,0.2294,0.1165,0.2127,0.2062,0.2222,0.3241,0.4330,0.5071,0.5944,0.7078,0.7641,0.8878,0.9711,0.9880,0.9812,0.9464,0.8542,0.6457,0.3397,0.3828,0.3204,0.1331,0.0440,0.1234,0.2030,0.1652,0.1043,0.1066,0.2110,0.2417,0.1631,0.0769,0.0723,0.0912,0.0812,0.0496,0.0101,0.0089,0.0083,0.0080,0.0026,0.0079,0.0042,0.0071,0.0044,0.0022,0.0014,M
196 | 0.0129,0.0141,0.0309,0.0375,0.0767,0.0787,0.0662,0.1108,0.1777,0.2245,0.2431,0.3134,0.3206,0.2917,0.2249,0.2347,0.2143,0.2939,0.4898,0.6127,0.7531,0.7718,0.7432,0.8673,0.9308,0.9836,1.0000,0.9595,0.8722,0.6862,0.4901,0.3280,0.3115,0.1969,0.1019,0.0317,0.0756,0.0907,0.1066,0.1380,0.0665,0.1475,0.2470,0.2788,0.2709,0.2283,0.1818,0.1185,0.0546,0.0219,0.0204,0.0124,0.0093,0.0072,0.0019,0.0027,0.0054,0.0017,0.0024,0.0029,M
197 | 0.0050,0.0017,0.0270,0.0450,0.0958,0.0830,0.0879,0.1220,0.1977,0.2282,0.2521,0.3484,0.3309,0.2614,0.1782,0.2055,0.2298,0.3545,0.6218,0.7265,0.8346,0.8268,0.8366,0.9408,0.9510,0.9801,0.9974,1.0000,0.9036,0.6409,0.3857,0.2908,0.2040,0.1653,0.1769,0.1140,0.0740,0.0941,0.0621,0.0426,0.0572,0.1068,0.1909,0.2229,0.2203,0.2265,0.1766,0.1097,0.0558,0.0142,0.0281,0.0165,0.0056,0.0010,0.0027,0.0062,0.0024,0.0063,0.0017,0.0028,M
198 | 0.0366,0.0421,0.0504,0.0250,0.0596,0.0252,0.0958,0.0991,0.1419,0.1847,0.2222,0.2648,0.2508,0.2291,0.1555,0.1863,0.2387,0.3345,0.5233,0.6684,0.7766,0.7928,0.7940,0.9129,0.9498,0.9835,1.0000,0.9471,0.8237,0.6252,0.4181,0.3209,0.2658,0.2196,0.1588,0.0561,0.0948,0.1700,0.1215,0.1282,0.0386,0.1329,0.2331,0.2468,0.1960,0.1985,0.1570,0.0921,0.0549,0.0194,0.0166,0.0132,0.0027,0.0022,0.0059,0.0016,0.0025,0.0017,0.0027,0.0027,M
199 | 0.0238,0.0318,0.0422,0.0399,0.0788,0.0766,0.0881,0.1143,0.1594,0.2048,0.2652,0.3100,0.2381,0.1918,0.1430,0.1735,0.1781,0.2852,0.5036,0.6166,0.7616,0.8125,0.7793,0.8788,0.8813,0.9470,1.0000,0.9739,0.8446,0.6151,0.4302,0.3165,0.2869,0.2017,0.1206,0.0271,0.0580,0.1262,0.1072,0.1082,0.0360,0.1197,0.2061,0.2054,0.1878,0.2047,0.1716,0.1069,0.0477,0.0170,0.0186,0.0096,0.0071,0.0084,0.0038,0.0026,0.0028,0.0013,0.0035,0.0060,M
200 | 0.0116,0.0744,0.0367,0.0225,0.0076,0.0545,0.1110,0.1069,0.1708,0.2271,0.3171,0.2882,0.2657,0.2307,0.1889,0.1791,0.2298,0.3715,0.6223,0.7260,0.7934,0.8045,0.8067,0.9173,0.9327,0.9562,1.0000,0.9818,0.8684,0.6381,0.3997,0.3242,0.2835,0.2413,0.2321,0.1260,0.0693,0.0701,0.1439,0.1475,0.0438,0.0469,0.1476,0.1742,0.1555,0.1651,0.1181,0.0720,0.0321,0.0056,0.0202,0.0141,0.0103,0.0100,0.0034,0.0026,0.0037,0.0044,0.0057,0.0035,M
201 | 0.0131,0.0387,0.0329,0.0078,0.0721,0.1341,0.1626,0.1902,0.2610,0.3193,0.3468,0.3738,0.3055,0.1926,0.1385,0.2122,0.2758,0.4576,0.6487,0.7154,0.8010,0.7924,0.8793,1.0000,0.9865,0.9474,0.9474,0.9315,0.8326,0.6213,0.3772,0.2822,0.2042,0.2190,0.2223,0.1327,0.0521,0.0618,0.1416,0.1460,0.0846,0.1055,0.1639,0.1916,0.2085,0.2335,0.1964,0.1300,0.0633,0.0183,0.0137,0.0150,0.0076,0.0032,0.0037,0.0071,0.0040,0.0009,0.0015,0.0085,M
202 | 0.0335,0.0258,0.0398,0.0570,0.0529,0.1091,0.1709,0.1684,0.1865,0.2660,0.3188,0.3553,0.3116,0.1965,0.1780,0.2794,0.2870,0.3969,0.5599,0.6936,0.7969,0.7452,0.8203,0.9261,0.8810,0.8814,0.9301,0.9955,0.8576,0.6069,0.3934,0.2464,0.1645,0.1140,0.0956,0.0080,0.0702,0.0936,0.0894,0.1127,0.0873,0.1020,0.1964,0.2256,0.1814,0.2012,0.1688,0.1037,0.0501,0.0136,0.0130,0.0120,0.0039,0.0053,0.0062,0.0046,0.0045,0.0022,0.0005,0.0031,M
203 | 0.0272,0.0378,0.0488,0.0848,0.1127,0.1103,0.1349,0.2337,0.3113,0.3997,0.3941,0.3309,0.2926,0.1760,0.1739,0.2043,0.2088,0.2678,0.2434,0.1839,0.2802,0.6172,0.8015,0.8313,0.8440,0.8494,0.9168,1.0000,0.7896,0.5371,0.6472,0.6505,0.4959,0.2175,0.0990,0.0434,0.1708,0.1979,0.1880,0.1108,0.1702,0.0585,0.0638,0.1391,0.0638,0.0581,0.0641,0.1044,0.0732,0.0275,0.0146,0.0091,0.0045,0.0043,0.0043,0.0098,0.0054,0.0051,0.0065,0.0103,M
204 | 0.0187,0.0346,0.0168,0.0177,0.0393,0.1630,0.2028,0.1694,0.2328,0.2684,0.3108,0.2933,0.2275,0.0994,0.1801,0.2200,0.2732,0.2862,0.2034,0.1740,0.4130,0.6879,0.8120,0.8453,0.8919,0.9300,0.9987,1.0000,0.8104,0.6199,0.6041,0.5547,0.4160,0.1472,0.0849,0.0608,0.0969,0.1411,0.1676,0.1200,0.1201,0.1036,0.1977,0.1339,0.0902,0.1085,0.1521,0.1363,0.0858,0.0290,0.0203,0.0116,0.0098,0.0199,0.0033,0.0101,0.0065,0.0115,0.0193,0.0157,M
205 | 0.0323,0.0101,0.0298,0.0564,0.0760,0.0958,0.0990,0.1018,0.1030,0.2154,0.3085,0.3425,0.2990,0.1402,0.1235,0.1534,0.1901,0.2429,0.2120,0.2395,0.3272,0.5949,0.8302,0.9045,0.9888,0.9912,0.9448,1.0000,0.9092,0.7412,0.7691,0.7117,0.5304,0.2131,0.0928,0.1297,0.1159,0.1226,0.1768,0.0345,0.1562,0.0824,0.1149,0.1694,0.0954,0.0080,0.0790,0.1255,0.0647,0.0179,0.0051,0.0061,0.0093,0.0135,0.0063,0.0063,0.0034,0.0032,0.0062,0.0067,M
206 | 0.0522,0.0437,0.0180,0.0292,0.0351,0.1171,0.1257,0.1178,0.1258,0.2529,0.2716,0.2374,0.1878,0.0983,0.0683,0.1503,0.1723,0.2339,0.1962,0.1395,0.3164,0.5888,0.7631,0.8473,0.9424,0.9986,0.9699,1.0000,0.8630,0.6979,0.7717,0.7305,0.5197,0.1786,0.1098,0.1446,0.1066,0.1440,0.1929,0.0325,0.1490,0.0328,0.0537,0.1309,0.0910,0.0757,0.1059,0.1005,0.0535,0.0235,0.0155,0.0160,0.0029,0.0051,0.0062,0.0089,0.0140,0.0138,0.0077,0.0031,M
207 | 0.0303,0.0353,0.0490,0.0608,0.0167,0.1354,0.1465,0.1123,0.1945,0.2354,0.2898,0.2812,0.1578,0.0273,0.0673,0.1444,0.2070,0.2645,0.2828,0.4293,0.5685,0.6990,0.7246,0.7622,0.9242,1.0000,0.9979,0.8297,0.7032,0.7141,0.6893,0.4961,0.2584,0.0969,0.0776,0.0364,0.1572,0.1823,0.1349,0.0849,0.0492,0.1367,0.1552,0.1548,0.1319,0.0985,0.1258,0.0954,0.0489,0.0241,0.0042,0.0086,0.0046,0.0126,0.0036,0.0035,0.0034,0.0079,0.0036,0.0048,M
208 | 0.0260,0.0363,0.0136,0.0272,0.0214,0.0338,0.0655,0.1400,0.1843,0.2354,0.2720,0.2442,0.1665,0.0336,0.1302,0.1708,0.2177,0.3175,0.3714,0.4552,0.5700,0.7397,0.8062,0.8837,0.9432,1.0000,0.9375,0.7603,0.7123,0.8358,0.7622,0.4567,0.1715,0.1549,0.1641,0.1869,0.2655,0.1713,0.0959,0.0768,0.0847,0.2076,0.2505,0.1862,0.1439,0.1470,0.0991,0.0041,0.0154,0.0116,0.0181,0.0146,0.0129,0.0047,0.0039,0.0061,0.0040,0.0036,0.0061,0.0115,M
209 |
--------------------------------------------------------------------------------
/util/util.go:
--------------------------------------------------------------------------------
1 | // Util provides util to handle common tasks: file and struct operations, string manipulation, etc.
2 | package util
3 |
4 | import (
5 |
6 | // sys import
7 | "os"
8 | "time"
9 | "strconv"
10 | "math"
11 | "math/rand"
12 |
13 | // github import
14 | log "github.com/sirupsen/logrus"
15 |
16 | )
17 |
18 | func init() {
19 | // Output to stdout instead of the default stderr
20 | log.SetOutput(os.Stdout)
21 | // Only log the warning severity or above.
22 | log.SetLevel(log.DebugLevel)
23 | }
24 |
25 | // Random return pseudo random number in [min, max]
26 | func Random(min, max int) int {
27 | max = max + 1
28 | rand.Seed(time.Now().Unix())
29 | return rand.Intn(max-min) + min
30 | }
31 |
32 | // StringInSlice looks for a string in slice.
33 | // It returns true or false and position of string in slice (false, -1 if not found).
34 | func StringInSlice(element string, slice []string) (bool, int) {
35 |
36 | // for element in slice
37 | for index, value := range slice {
38 | if value == element {
39 | return true, index
40 | }
41 | }
42 |
43 | // return false, placeholder
44 | return false, -1
45 |
46 | }
47 |
48 | // StringToFloat cast a slice of string element to a slice of float64 element.
49 | // If passed mode is 0, for each error encountered in casting, passed default will be inserted
50 | // in slice.
51 | // If passed mode is 1, output slice will contain only correctly casted element.
52 | // It returns slice converted.
53 | func StringToFloat(slice []string, mode int, def float64) []float64 {
54 |
55 | // result declaration
56 | var result []float64
57 |
58 | // if mode is 0, pre-alloc struct
59 | if mode == 0 {
60 | // pre-alloc struct
61 | result = make([]float64, len(slice))
62 | }
63 |
64 | for index, value := range slice {
65 |
66 | // cast and error
67 | casted, err := strconv.ParseFloat(value, 64)
68 |
69 | if mode == 0 {
70 | if err == nil {
71 | // add casted
72 | result[index] = casted
73 | } else {
74 | // add default
75 | result[index] = def
76 | }
77 | } else {
78 | if err == nil {
79 | // add casted
80 | result = append(result, casted)
81 | }
82 | }
83 | }
84 |
85 | // return result
86 | return result
87 |
88 | }
89 |
90 | // ScalarProduct compute scalar product between two float64 based slices.
91 | // It returns a float64 value.
92 | func ScalarProduct(a []float64, b []float64) float64 {
93 |
94 | // if slices have different number of elements
95 | if len(a) != len(b) {
96 | log.WithFields(log.Fields{
97 | "level": "error",
98 | "place": "mixed",
99 | "method": "ScalarProduct",
100 | "msg": "scalar product between slices",
101 | "aLen": len(a),
102 | "bLen": len(b),
103 | }).Error("Failed to compute scalar product between slices: different length.")
104 | return -1.0
105 | }
106 |
107 | // init result
108 | var result float64 = 0.0
109 |
110 | // for each element compute product
111 | for index, value := range a {
112 | result = result + (value * b[index])
113 | }
114 |
115 | // return value
116 | return result
117 |
118 | }
119 |
120 | // MaxInSlice return max value in float64 slice
121 | // It returns the max float64 value and index of max in slice.
122 | func MaxInSlice(v []float64) (float64, int) {
123 | mv := 0.0
124 | mi := 0
125 | for i, e := range v {
126 | if e > mv {
127 | mv = e
128 | mi = i
129 | }
130 | }
131 | return mv, mi
132 | }
133 |
134 | func GenerateRandomIntWithBinaryDim(d int) int64 {
135 |
136 | rand.Seed(time.Now().UTC().UnixNano())
137 | return rand.Int63n(int64(2^d))
138 |
139 | }
140 |
141 | func GenerateRandomBinaryInt(d int) []float64 {
142 |
143 | rand.Seed(time.Now().UTC().UnixNano())
144 | bn := rand.Int63n(int64(2^d))
145 | bi := make([]float64, d)
146 | bs := strconv.FormatInt(bn, 2)
147 | zn := d-len(bs)
148 | for z_index := 0; z_index < d; z_index++ {
149 | if z_index < zn {
150 | bi[z_index] = 0.0
151 | } else {
152 | bi[z_index], _ = strconv.ParseFloat(string(bs[z_index-zn]), 64)
153 | }
154 | }
155 | // log.Debug(bn)
156 | // log.Debug(bi)
157 | // log.Debug(bs)
158 | // log.Debug(zn)
159 | return bi
160 |
161 | }
162 |
163 | func ConvertIntToBinary(n int64, d int) []float64 {
164 |
165 | bs := strconv.FormatInt(n, 2)
166 | bi := make([]float64, d)
167 | zn := d-len(bs)
168 | if zn < 0 {
169 | log.Warning("Too small base")
170 | bi = make([]float64, len(bs))
171 | zn = 0
172 | }
173 | for z_index := 0; z_index < d; z_index++ {
174 | if z_index < zn {
175 | bi[z_index] = 0.0
176 | } else {
177 | bi[z_index], _ = strconv.ParseFloat(string(bs[z_index-zn]), 64)
178 | }
179 | }
180 | return bi
181 |
182 | }
183 |
184 | func ConvertBinToInt(n []float64) int {
185 |
186 | bi := 0
187 | for i := len(n)-1; i >= 0; i-- {
188 | // log.Info(int(n[i]), " * ", 2, "^", ((len(n)-i)-1), "=", (int(n[i])*int(math.Pow(float64(2), float64(len(n)-i-1)))))
189 | bi = bi + (int(n[i])*int(math.Pow(float64(2), float64(len(n)-i-1))))
190 | }
191 | // log.Info(n, " = ", bi)
192 | return bi
193 |
194 | }
195 |
196 | func Round(val float64, roundOn float64, places int ) (newVal float64) {
197 | var round float64
198 | pow := math.Pow(10, float64(places))
199 | digit := pow * val
200 | _, div := math.Modf(digit)
201 | if div >= roundOn {
202 | round = math.Ceil(digit)
203 | } else {
204 | round = math.Floor(digit)
205 | }
206 | newVal = round / pow
207 | return
208 | }
--------------------------------------------------------------------------------
/validation/validation.go:
--------------------------------------------------------------------------------
1 | package validation
2 |
3 | import (
4 | "math/rand"
5 | "time"
6 |
7 | // third part import
8 | log "github.com/sirupsen/logrus"
9 |
10 | // internal import
11 | mn "github.com/made2591/go-perceptron-go/model/neural"
12 | mu "github.com/made2591/go-perceptron-go/util"
13 |
14 | //"fmt"
15 | )
16 |
17 | // TrainTestPatternsSplit split an array of patterns in training and testing.
18 | // if shuffle is 0 the function takes the first percentage items as train and the other as test
19 | // otherwise the patterns array is shuffled before partitioning
20 | func TrainTestPatternsSplit(patterns []mn.Pattern, percentage float64, shuffle int) (train []mn.Pattern, test []mn.Pattern) {
21 |
22 | // create splitting pivot
23 | var splitPivot int = int(float64(len(patterns)) * percentage)
24 | train = make([]mn.Pattern, splitPivot)
25 | test = make([]mn.Pattern, len(patterns)-splitPivot)
26 |
27 | // if mixed mode, split with shuffling
28 | if shuffle == 1 {
29 | // create random indexes permutation
30 | rand.Seed(time.Now().UTC().UnixNano())
31 | perm := rand.Perm(len(patterns))
32 |
33 | // copy training data
34 | for i := 0; i < splitPivot; i++ {
35 | train[i] = patterns[perm[i]]
36 | }
37 | // copy test data
38 | for i := 0; i < len(patterns)-splitPivot; i++ {
39 | test[i] = patterns[perm[i]]
40 | }
41 |
42 | } else {
43 | // else, split without shuffle
44 | train = patterns[:splitPivot]
45 | test = patterns[splitPivot:]
46 | }
47 |
48 | log.WithFields(log.Fields{
49 | "level": "info",
50 | "msg": "splitting completed",
51 | "trainSet": len(train),
52 | "testSet: ": len(test),
53 | }).Info("Complete splitting train/test set.")
54 |
55 | return train, test
56 | }
57 |
58 | // TrainTestPatternsSplit split an array of patterns in training and testing.
59 | // if shuffle is 0 the function takes the first percentage items as train and the other as test
60 | // otherwise the patterns array is shuffled before partitioning
61 | func TrainTestPatternSplit(patterns []mn.Pattern, percentage float64, shuffle int) (train []mn.Pattern, test []mn.Pattern) {
62 |
63 | // create splitting pivot
64 | var splitPivot int = int(float64(len(patterns)) * percentage)
65 | train = make([]mn.Pattern, splitPivot)
66 | test = make([]mn.Pattern, len(patterns)-splitPivot)
67 |
68 | // if mixed mode, split with shuffling
69 | if shuffle == 1 {
70 | // create random indexes permutation
71 | rand.Seed(time.Now().UTC().UnixNano())
72 | perm := rand.Perm(len(patterns))
73 |
74 | // copy training data
75 | for i := 0; i < splitPivot; i++ {
76 | train[i] = patterns[perm[i]]
77 | }
78 | // copy test data
79 | for i := 0; i < len(patterns)-splitPivot; i++ {
80 | test[i] = patterns[perm[i]]
81 | }
82 |
83 | } else {
84 | // else, split without shuffle
85 | train = patterns[:splitPivot]
86 | test = patterns[splitPivot:]
87 | }
88 |
89 | log.WithFields(log.Fields{
90 | "level": "info",
91 | "msg": "splitting completed",
92 | "trainSet": len(train),
93 | "testSet: ": len(test),
94 | }).Info("Complete splitting train/test set.")
95 |
96 | return train, test
97 | }
98 |
99 | // KFoldPatternsSplit split an array of patterns in k subsets.
100 | // if shuffle is 0 the function partitions the items maintaining the order
101 | // otherwise the patterns array is shuffled before partitioning
102 | func KFoldPatternsSplit(patterns []mn.Pattern, k int, shuffle int) [][]mn.Pattern {
103 |
104 | // get the size of each fold
105 | var size = int(len(patterns) / k)
106 | var freeElements = int(len(patterns) % k)
107 |
108 | folds := make([][]mn.Pattern, k)
109 |
110 | var perm []int
111 | // if mixed mode, split with shuffling
112 | if shuffle == 1 {
113 | // create random indexes permutation
114 | rand.Seed(time.Now().UTC().UnixNano())
115 | perm = rand.Perm(len(patterns))
116 | }
117 |
118 | // start splitting
119 | currSize := 0
120 | foldStart := 0
121 | curr := 0
122 | for f := 0; f < k; f++ {
123 | curr = foldStart
124 | currSize = size
125 | if f < freeElements {
126 | // add another
127 | currSize++
128 | }
129 |
130 | // create array
131 | folds[f] = make([]mn.Pattern, currSize)
132 |
133 | // copy elements
134 |
135 | for i := 0; i < currSize; i++ {
136 | if shuffle == 1 {
137 | folds[f][i] = patterns[perm[curr]]
138 | } else {
139 | folds[f][i] = patterns[curr]
140 | }
141 | curr++
142 | }
143 |
144 | foldStart = curr
145 |
146 | }
147 |
148 | log.WithFields(log.Fields{
149 | "level": "info",
150 | "msg": "splitting completed",
151 | "numberOfFolds": k,
152 | "meanFoldSize: ": size,
153 | "consideredElements": (size * k) + freeElements,
154 | }).Info("Complete folds splitting.")
155 |
156 | return folds
157 | }
158 |
159 | // RandomSubsamplingValidation perform evaluation on neuron algorithm.
160 | // It returns scores reached for each fold iteration.
161 | func RandomSubsamplingValidation(neuron *mn.NeuronUnit, patterns []mn.Pattern, percentage float64, epochs int, folds int, shuffle int) []float64 {
162 |
163 | // results and predictions vars init
164 | var scores, actual, predicted []float64
165 | var train, test []mn.Pattern
166 |
167 | scores = make([]float64, folds)
168 |
169 | for t := 0; t < folds; t++ {
170 | // split the dataset with shuffling
171 | train, test = TrainTestPatternsSplit(patterns, percentage, shuffle)
172 |
173 | // train neuron with set of patterns, for specified number of epochs
174 | mn.TrainNeuron(neuron, train, epochs, 1)
175 |
176 | // compute predictions for each pattern in testing set
177 | for _, pattern := range test {
178 | actual = append(actual, pattern.SingleExpectation)
179 | predicted = append(predicted, mn.Predict(neuron, &pattern))
180 | }
181 |
182 | // compute score
183 | _, percentageCorrect := mn.Accuracy(actual, predicted)
184 | scores[t] = percentageCorrect
185 |
186 | log.WithFields(log.Fields{
187 | "level": "info",
188 | "place": "validation",
189 | "method": "RandomSubsamplingValidation",
190 | "foldNumber": t,
191 | "trainSetLen": len(train),
192 | "testSetLen": len(test),
193 | "percentageCorrect": percentageCorrect,
194 | }).Info("Evaluation completed for current fold.")
195 | }
196 |
197 | // compute average score
198 | acc := 0.0
199 | for i := 0; i < len(scores); i++ {
200 | acc += scores[i]
201 | }
202 |
203 | mean := acc / float64(len(scores))
204 |
205 | log.WithFields(log.Fields{
206 | "level": "info",
207 | "place": "validation",
208 | "method": "RandomSubsamplingValidation",
209 | "folds": folds,
210 | "trainSetLen": len(train),
211 | "testSetLen": len(test),
212 | "meanScore": mean,
213 | }).Info("Evaluation completed for all folds.")
214 |
215 | return scores
216 | }
217 |
218 | // RandomSubsamplingValidation perform evaluation on neuron algorithm.
219 | // It returns scores reached for each fold iteration.
220 | func KFoldValidation(neuron *mn.NeuronUnit, patterns []mn.Pattern, epochs int, k int, shuffle int) []float64 {
221 |
222 | // results and predictions vars init
223 | var scores, actual, predicted []float64
224 | var train, test []mn.Pattern
225 |
226 | scores = make([]float64, k)
227 |
228 | // split the dataset with shuffling
229 | folds := KFoldPatternsSplit(patterns, k, shuffle)
230 |
231 | // the t-th fold is used as test
232 | for t := 0; t < k; t++ {
233 | // prepare train
234 | train = nil
235 | for i := 0; i < k; i++ {
236 | if i != t {
237 | train = append(train, folds[i]...)
238 | }
239 | }
240 | test = folds[t]
241 |
242 | // train neuron with set of patterns, for specified number of epochs
243 | mn.TrainNeuron(neuron, train, epochs, 1)
244 |
245 | // compute predictions for each pattern in testing set
246 | for _, pattern := range test {
247 | actual = append(actual, pattern.SingleExpectation)
248 | predicted = append(predicted, mn.Predict(neuron, &pattern))
249 | }
250 |
251 | // compute score
252 | _, percentageCorrect := mn.Accuracy(actual, predicted)
253 | scores[t] = percentageCorrect
254 |
255 | log.WithFields(log.Fields{
256 | "level": "info",
257 | "place": "validation",
258 | "method": "KFoldValidation",
259 | "foldNumber": t,
260 | "trainSetLen": len(train),
261 | "testSetLen": len(test),
262 | "percentageCorrect": percentageCorrect,
263 | }).Info("Evaluation completed for current fold.")
264 | }
265 |
266 | // compute average score
267 | acc := 0.0
268 | for i := 0; i < len(scores); i++ {
269 | acc += scores[i]
270 | }
271 |
272 | mean := acc / float64(len(scores))
273 |
274 | log.WithFields(log.Fields{
275 | "level": "info",
276 | "place": "validation",
277 | "method": "KFoldValidation",
278 | "folds": k,
279 | "trainSetLen": len(train),
280 | "testSetLen": len(test),
281 | "meanScore": mean,
282 | }).Info("Evaluation completed for all folds.")
283 |
284 | return scores
285 |
286 | }
287 |
288 | // It returns scores reached for each fold iteration.
289 | func MLPRandomSubsamplingValidation(mlp *mn.MultiLayerNetwork, patterns []mn.Pattern, percentage float64, epochs int, folds int, shuffle int, mapped []string) []float64 {
290 |
291 | // results and predictions vars init
292 | var scores, actual, predicted []float64
293 | var train, test []mn.Pattern
294 |
295 | scores = make([]float64, folds)
296 |
297 | for t := 0; t < folds; t++ {
298 | // split the dataset with shuffling
299 | train, test = TrainTestPatternsSplit(patterns, percentage, shuffle)
300 |
301 | // train mlp with set of patterns, for specified number of epochs
302 | mn.MLPTrain(mlp, patterns, mapped, epochs)
303 |
304 | // compute predictions for each pattern in testing set
305 | for _, pattern := range test {
306 | // get actual
307 | actual = append(actual, pattern.SingleExpectation)
308 | // get output from network
309 | o_out := mn.Execute(mlp, &pattern)
310 | // get index of max output
311 | _, indexMaxOut := mu.MaxInSlice(o_out)
312 | // add to predicted values
313 | predicted = append(predicted, float64(indexMaxOut))
314 | }
315 |
316 | // compute score
317 | _, percentageCorrect := mn.Accuracy(actual, predicted)
318 | scores[t] = percentageCorrect
319 |
320 | log.WithFields(log.Fields{
321 | "level": "info",
322 | "place": "validation",
323 | "method": "MLPRandomSubsamplingValidation",
324 | "foldNumber": t,
325 | "trainSetLen": len(train),
326 | "testSetLen": len(test),
327 | "percentageCorrect": percentageCorrect,
328 | }).Info("Evaluation completed for current fold.")
329 | }
330 |
331 | // compute average score
332 | acc := 0.0
333 | for i := 0; i < len(scores); i++ {
334 | acc += scores[i]
335 | }
336 |
337 | mean := acc / float64(len(scores))
338 |
339 | log.WithFields(log.Fields{
340 | "level": "info",
341 | "place": "validation",
342 | "method": "MLPRandomSubsamplingValidation",
343 | "folds": folds,
344 | "trainSetLen": len(train),
345 | "testSetLen": len(test),
346 | "meanScore": mean,
347 | }).Info("Evaluation completed for all folds.")
348 |
349 | return scores
350 | }
351 |
352 | // RandomSubsamplingValidation perform evaluation on neuron algorithm.
353 | // It returns scores reached for each fold iteration.
354 | func MLPKFoldValidation(mlp *mn.MultiLayerNetwork, patterns []mn.Pattern, epochs int, k int, shuffle int, mapped []string) []float64 {
355 |
356 | // results and predictions vars init
357 | var scores, actual, predicted []float64
358 | var train, test []mn.Pattern
359 |
360 | scores = make([]float64, k)
361 |
362 | // split the dataset with shuffling
363 | folds := KFoldPatternsSplit(patterns, k, shuffle)
364 |
365 | // the t-th fold is used as test
366 | for t := 0; t < k; t++ {
367 | // prepare train
368 | train = nil
369 | for i := 0; i < k; i++ {
370 | if i != t {
371 | train = append(train, folds[i]...)
372 | }
373 | }
374 | test = folds[t]
375 |
376 | // train mlp with set of patterns, for specified number of epochs
377 | mn.MLPTrain(mlp, patterns, mapped, epochs)
378 |
379 | // compute predictions for each pattern in testing set
380 | for _, pattern := range test {
381 | // get actual
382 | actual = append(actual, pattern.SingleExpectation)
383 | // get output from network
384 | o_out := mn.Execute(mlp, &pattern)
385 | // get index of max output
386 | _, indexMaxOut := mu.MaxInSlice(o_out)
387 | // add to predicted values
388 | predicted = append(predicted, float64(indexMaxOut))
389 | }
390 |
391 | // compute score
392 | _, percentageCorrect := mn.Accuracy(actual, predicted)
393 | scores[t] = percentageCorrect
394 |
395 | log.WithFields(log.Fields{
396 | "level": "info",
397 | "place": "validation",
398 | "method": "MLPKFoldValidation",
399 | "foldNumber": t,
400 | "trainSetLen": len(train),
401 | "testSetLen": len(test),
402 | "percentageCorrect": percentageCorrect,
403 | }).Info("Evaluation completed for current fold.")
404 | }
405 |
406 | // compute average score
407 | acc := 0.0
408 | for i := 0; i < len(scores); i++ {
409 | acc += scores[i]
410 | }
411 |
412 | mean := acc / float64(len(scores))
413 |
414 | log.WithFields(log.Fields{
415 | "level": "info",
416 | "place": "validation",
417 | "method": "MLPKFoldValidation",
418 | "folds": k,
419 | "trainSetLen": len(train),
420 | "testSetLen": len(test),
421 | "meanScore": mean,
422 | }).Info("Evaluation completed for all folds.")
423 |
424 | return scores
425 |
426 | }
427 |
428 | // RNNValidation perform evaluation on neuron algorithm.
429 | func RNNValidation(mlp *mn.MultiLayerNetwork, patterns []mn.Pattern, epochs int, shuffle int) (float64, []float64) {
430 |
431 | // results and predictions vars init
432 | var scores []float64
433 | scores = make([]float64, len(patterns))
434 |
435 | // train mlp with set of patterns, for specified number of epochs
436 | mn.ElmanTrain(mlp, patterns, epochs)
437 | p_cor := 0.0
438 |
439 | // compute predictions for each pattern in testing set
440 | for p_i, pattern := range patterns {
441 | // get output from network
442 | o_out := mn.Execute(mlp, &pattern, 1)
443 | for o_out_i, o_out_v := range(o_out) {
444 | o_out[o_out_i] = mu.Round(o_out_v, .5, 0)
445 | }
446 | log.WithFields(log.Fields{
447 | "a_p_b": pattern.Features,
448 | "rea_c": pattern.MultipleExpectation,
449 | "pre_c": o_out,
450 | }).Debug()
451 |
452 | // add to predicted values
453 | _, p_cor = mn.Accuracy(pattern.MultipleExpectation, o_out)
454 | // compute score
455 | scores[p_i] = p_cor;
456 | }
457 |
458 | // compute average score
459 | acc := 0.0
460 | for i := 0; i < len(scores); i++ {
461 | acc += scores[i]
462 | }
463 |
464 | mean := acc / float64(len(scores))
465 |
466 | log.WithFields(log.Fields{
467 | "level": "info",
468 | "place": "validation",
469 | "method": "RNNValidation",
470 | "trainSetLen": len(patterns),
471 | "testSetLen": len(patterns),
472 | "meanScore": mean,
473 | }).Info("Evaluation completed for all patterns.")
474 |
475 | return mean, scores
476 |
477 | }
--------------------------------------------------------------------------------