-
Notifications
You must be signed in to change notification settings - Fork 22
/
NeuralNetwork.h
2327 lines (2028 loc) · 92.9 KB
/
NeuralNetwork.h
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954
955
956
957
958
959
960
961
962
963
964
965
966
967
968
969
970
971
972
973
974
975
976
977
978
979
980
981
982
983
984
985
986
987
988
989
990
991
992
993
994
995
996
997
998
999
1000
// https://learn.sparkfun.com/tutorials/efficient-arduino-programming-with-arduino-cli-and-visual-studio-code/all
// https://www.arduino.cc/en/Hacking/libraryTutorial
/*
NeuralNetwork.h - Library for MLP Neural Networks.
Created by George Chousos, April 11, 2019. *0
Released into the public domain.
*0: Mainly Based On https://www.youtube.com/watch?v=L_PByyJ9g-I
*/
/*
[double on some Arduino boards like UNO for example is equal to float]
- https://www.arduino.cc/reference/en/language/variables/data-types/double/
- https://forum.arduino.cc/index.php?topic=613873.0
*/
/*
[Error #777]
NeuralNetwork(const unsigned int *layer_, const float *default_Weights, const float *default_Bias, const unsigned int &NumberOflayers , bool isProgmem)
Because someone, might want to have default or pretrained weights and biases and not in the PROGMEM but in the SRAM.
- https://stackoverflow.com/questions/56024569/arduino-compiler-takes-into-account-const-progmem-type-as-equal-to-const-type
- https://forum.arduino.cc/index.php?topic=614438.0
*/
// STR(MSGX) | pragma message
#define MSG0
// i would love if there could be a list of microcontrollers or a way to determine with defined(..) the size of SRAM or etc.
// Defines a list of microcontroller Attiny series As__AVR_ATtinyXXX__
#if defined(__AVR_ATtiny2313__) || defined(__AVR_ATtiny4313__) || defined(__AVR_ATtiny24__) || defined(__AVR_ATtiny44__) || defined(__AVR_ATtiny84__) || defined(__AVR_ATtiny25__) || defined(__AVR_ATtiny45__) || defined(__AVR_ATtiny85__)
#define As__AVR_ATtinyX__
#if defined(__AVR_ATtiny85__)
#undef MSG0
#define MSG0 \n⌥▌"////////////// [⚠] [𝗥𝗲𝗺𝗶𝗻𝗱𝗲𝗿] Last time i tried backpropagation on an ATtiny85 I had Issues [...]"
#endif
#endif
// Defines a list of microcontroller series (as) As__No_Common_Serial_Support (in that moment)
#if defined(As__AVR_ATtinyX__) // or etc.
#define As__No_Common_Serial_Support
#endif
#if defined(ESP32)
#define AS_SOFTWARE_EMULATED_EEPROM
#endif
// - This prevents problems if someone accidently #include's your library twice.
#ifndef NeuralNetwork_h
#define NeuralNetwork_h
// - That gives you access to the standard types and constants of the Arduino language.
#include "Arduino.h"
// https://arduino.stackexchange.com/questions/94743/is-ifdef-sd-h-considered-a-bad-practice/
#define SD_LIB_NAME <SD.h>
#if defined(__SD_H__) || defined(SD_h)
#define SUPPORTS_SD_FUNCTIONALITY
#elif defined __has_include
#if __has_include(SD_LIB_NAME)
#include SD_LIB_NAME
#define SUPPORTS_SD_FUNCTIONALITY
#endif
#endif
#define EEPROM_LIB_NAME <EEPROM.h>
#if defined(EEPROM_h) || defined(__EEPROM_H__)
#define INCLUDES_EEPROM_H
#elif defined __has_include
#if __has_include(EEPROM_LIB_NAME)
#include EEPROM_LIB_NAME
#define INCLUDES_EEPROM_H
#endif
#endif
// STR(MSGX) | pragma message
#define MSG1
#define MSG2
#define MSG3
#define MSG4
#define MSG5
#define MSG6
#define MSG7
#define MSG8
#define MSG9
#define MSG10
#define LOVE \n❤ 𝖀𝖓𝖈𝖔𝖓𝖉𝖎𝖙𝖎𝖔𝖓𝖆𝖑 𝕷𝖔𝖛𝖊 ❤\n
#define ATOL atol
#define LLONG long
#define DFLOAT float
#define DFLOAT_LEN 7
#define PGM_READ_DFLOAT pgm_read_float
#define IS_CONST
#if defined(_1_OPTIMIZE)
#if ((_1_OPTIMIZE bitor B01111111) == B11111111)
#define USE_PROGMEM
#define NO_BACKPROP
#undef IS_CONST
#define IS_CONST const
#undef MSG1
#define MSG1 \n⌥▌" [1] B10000000 [⚠] [𝗥𝗲𝗺𝗶𝗻𝗱𝗲𝗿] Backpropagation is not Allowed with (USE_PROGMEM)."
#endif
#if ((_1_OPTIMIZE bitor B10111111) == B11111111)
#define REDUCE_RAM_DELETE_OUTPUTS
#define NO_BACKPROP
#undef MSG2
#define MSG2 \n⌥▌" [1] B01000000 [⚠] [𝗥𝗲𝗺𝗶𝗻𝗱𝗲𝗿] Backpropagation is not Allowed with (REDUCE_RAM_DELETE_OUTPUTS)."
#endif
#if ((_1_OPTIMIZE bitor B11101111) == B11111111)
#define REDUCE_RAM_WEIGHTS_COMMON
#define REDUCE_RAM_WEIGHTS_LVL2
//#warning [⚠] Backpropagating more than once after a FeedForward [...]
#elif ((_1_OPTIMIZE bitor B11011111) == B11111111)
#undef MSG3
#define MSG3 \n⌥▌" [1] B00100000 [⚠] [𝗥𝗲𝗺𝗶𝗻𝗱𝗲𝗿] Is not implemented yet."
//#define REDUCE_RAM_WEIGHTS_COMMON
//#define REDUCE_RAM_WEIGHTS_LVL1
#endif
#if ((_1_OPTIMIZE bitor B11110111) == B11111111)
#define REDUCE_RAM_DELETE_PREVIOUS_LAYER_GAMMA
#undef MSG4
#define MSG4 \n⌥▌" [1] B00001000 [⚠] [𝗥𝗲𝗺𝗶𝗻𝗱𝗲𝗿] Always Enabled not switchable yet."
#endif
#if ((_1_OPTIMIZE bitor B11111011) == B11111111)
#define REDUCE_RAM_STATIC_REFERENCE
#undef MSG5
#define MSG5 \n⌥▌" [1] B00000100 [⚠] [𝗥𝗲𝗺𝗶𝗻𝗱𝗲𝗿] Be careful with multiple NN objects."
#endif
#if ((_1_OPTIMIZE bitor B11111101) == B11111111)
#define DISABLE_MSE
#undef MSG6
#define MSG6 \n⌥▌" [1] B00000010 [⚠] [𝗥𝗲𝗺𝗶𝗻𝗱𝗲𝗿] MSE is disabled (DISABLE_MSE) (DEFAULT_LOSS)"
#endif
#if ((_1_OPTIMIZE bitor B11111110) == B11111111)
#undef ATOL
#undef LLONG
#undef DFLOAT_LEN
#undef DFLOAT
#undef PGM_READ_DFLOAT
#define USE_64_BIT_DOUBLE
#define ATOL atoll
#define LLONG long long
#define DFLOAT_LEN 15
#define DFLOAT double
#define PGM_READ_DFLOAT pgm_read_double
double pgm_read_double(const double* address) {
double result;
memcpy_P(&result, address, sizeof(double));
return result;
}
#undef MSG7
#define MSG7 \n⌥▌" [1] B00000001 [⚠] [𝗥𝗲𝗺𝗶𝗻𝗱𝗲𝗿] Not all MCUs support 64bit\8byte double (USE_64_BIT_DOUBLE)."
#endif
//if i'll make most of the things static/global, i can significantly reduce rom but with the "limitation" of "one" NN per skeatch
#endif
// Disable SIMD parallel processing if double precision is enabled
#if defined(CONFIG_IDF_TARGET_ESP32S3) || defined(USE_ESP_SIMD)
#if defined(USE_64_BIT_DOUBLE)
#undef MSG7
#define MSG7 \n⌥▌" [1] B00000001 [⚠] [𝗥𝗲𝗺𝗶𝗻𝗱𝗲𝗿] SIMD disabled, there is no support when double precision."
#else
#define ESP_SUPPORTS_SIMD
#include "esp_dsp.h"
#endif
#endif
#if defined(_2_OPTIMIZE)
#if ((_2_OPTIMIZE bitor B01111111) == B11111111)
#if defined(REDUCE_RAM_WEIGHTS_COMMON)
#undef MSG3
#define MSG3 \n⌥▌" [_] B00110000 [⚠] [𝗥𝗲𝗺𝗶𝗻𝗱𝗲𝗿] There is no need for (REDUCE_RAM_WEIGHTS_LVLX)"
#endif
#define NO_BACKPROP
#define USE_INTERNAL_EEPROM
#if defined(ACTIVATION__PER_LAYER)
#define SIZEOF_FX sizeof(byte)
#else
#define SIZEOF_FX 0
#endif
#if defined(AS_SOFTWARE_EMULATED_EEPROM)
#undef MSG9
#define MSG9 \n⌥▌" [2] B10000000 [⚠] [𝗪𝗔𝗥𝗡𝗜𝗡𝗚] ESP32 MCUs are defined (AS_SOFTWARE_EMULATED_EEPROM)."
#endif
#undef MSG8
#define MSG8 \n⌥▌" [2] B10000000 [⚠] [𝗥𝗲𝗺𝗶𝗻𝗱𝗲𝗿] Backpropagation is not Allowed with (USE_INTERNAL_EEPROM)."
#if !defined(EEPROM_h) || !defined(__EEPROM_H__)
// for some reason it says 'EEPROM' was not declared in this scope even though i #include it below. So it needs it at the sketch i guess too
#include <EEPROM.h>
#define INCLUDES_EEPROM_H
#endif
#endif
#endif
#define ACT1 0
#define ACT2 0
#define ACT3 0
#define ACT4 0
#define ACT5 0
#define ACT6 0
#define ACT7 0
#define ACT8 0
#define ACT9 0
#define ACT10 0
#define ACT11 0
#define ACT12 0
#define ACT13 0
#define ACT14 0
// Custom Activation Fuctions
#define CACT1 0
#define CACT2 0
#define CACT3 0
#define CACT4 0
#define CACT5 0
// Custom Activation Fuctions Definitions (eg. DFLOAT CUSTOM_AFX(...);)
#define CUSTOM_AF1_DEFINITION
#define CUSTOM_AF2_DEFINITION
#define CUSTOM_AF3_DEFINITION
#define CUSTOM_AF4_DEFINITION
#define CUSTOM_AF5_DEFINITION
#define CUSTOM_DF1_DEFINITION
#define CUSTOM_DF2_DEFINITION
#define CUSTOM_DF3_DEFINITION
#define CUSTOM_DF4_DEFINITION
#define CUSTOM_DF5_DEFINITION
// STR(AX) | pragma message | A = Activation | AL = All | CA = Custom Activation
#define A1
#define A2
#define A3
#define A4
#define A5
#define A6
#define A7
#define A8
#define A9
#define A10
#define A11
#define A12
#define A13
#define A14
#define AL
#define CA1
#define CA2
#define CA3
#define CA4
#define CA5
// NB = NO BACKPROP | CSTA = Custom Activation message | NB_CA5 = NB message for CAs
#define NB_CA1
#define NB_CA2
#define NB_CA3
#define NB_CA4
#define NB_CA5
#define CSTA
#define NB
#if defined(Sigmoid) && (defined(ACTIVATION__PER_LAYER) || !defined(ACTIVATION))
#undef A1
#undef ACT1
#undef Sigmoid
#undef DEFAULT_ACTIVATION_FUNCTION
#define ACT1 1
#define ACTIVATION
#define ACTIVATION_FUNCTION Sigmoid
#define Sigmoid Sigmoid
#define A1 |‣ Sigmoid
#endif
#if defined(Tanh) && (defined(ACTIVATION__PER_LAYER) || !defined(ACTIVATION))
#undef A2
#undef ACT2
#undef Tanh
#undef ACTIVATION
#undef ACTIVATION_FUNCTION
#undef DEFAULT_ACTIVATION_FUNCTION
#define ACT2 1
#define ACTIVATION
#define ACTIVATION_FUNCTION Tanh
#define Tanh Tanh
#define A2 |‣ Tanh
#endif
#if defined(ReLU) && (defined(ACTIVATION__PER_LAYER) || !defined(ACTIVATION))
#undef A3
#undef ACT3
#undef ReLU
#undef ACTIVATION
#undef ACTIVATION_FUNCTION
#undef DEFAULT_ACTIVATION_FUNCTION
#define ACT3 1
#define ACTIVATION
#define ACTIVATION_FUNCTION ReLU
#define SUPPORTS_CLIPPING // i mean "supports" / usually-needs ?
#define ReLU ReLU
#define A3 |‣ ReLU
#endif
#if defined(LeakyELU) && (defined(ACTIVATION__PER_LAYER) || !defined(ACTIVATION))
#undef A4
#undef ACT4
#undef LeakyELU
#undef ACTIVATION
#undef ACTIVATION_FUNCTION
#undef DEFAULT_ACTIVATION_FUNCTION
#define ACT4 1
#define ACTIVATION
#define ACTIVATION_FUNCTION LeakyELU
#define SUPPORTS_CLIPPING // i mean "supports" / usually-needs ?
#define LeakyELU LeakyELU
#define A4 |‣ LeakyELU
#endif
#if defined(ELU) && (defined(ACTIVATION__PER_LAYER) || !defined(ACTIVATION))
#undef A5
#undef ACT5
#undef ELU
#undef ACTIVATION
#undef ACTIVATION_FUNCTION
#undef DEFAULT_ACTIVATION_FUNCTION
#define ACT5 1
#define ACTIVATION
#define ACTIVATION_FUNCTION ELU
#define SUPPORTS_CLIPPING // i mean "supports" / usually-needs ?
#define ELU ELU
#define A5 |‣ ELU
#endif
#if defined(SELU) && (defined(ACTIVATION__PER_LAYER) || !defined(ACTIVATION))
#undef A6
#undef ACT6
#undef SELU
#undef ACTIVATION
#undef ACTIVATION_FUNCTION
#undef DEFAULT_ACTIVATION_FUNCTION
#define ACT6 1
#define ACTIVATION
#define ACTIVATION_FUNCTION SELU
#define SELU SELU
#define A6 |‣ SELU
#endif
#if defined(Softmax) && (defined(ACTIVATION__PER_LAYER) || !defined(ACTIVATION))
#undef A7
#undef ACT7
#undef Softmax
#undef ACTIVATION
#undef ACTIVATION_FUNCTION
#undef DEFAULT_ACTIVATION_FUNCTION
#define ACT7 1
#define ACTIVATION
#define ACTIVATION_FUNCTION Softmax
#define Softmax Softmax
#define A7 |‣ Softmax
#endif
#if defined(Identity) && (defined(ACTIVATION__PER_LAYER) || !defined(ACTIVATION))
#undef A8
#undef ACT8
#undef Identity
#undef ACTIVATION
#undef ACTIVATION_FUNCTION
#undef DEFAULT_ACTIVATION_FUNCTION
#define ACT8 1
#define ACTIVATION
#define ACTIVATION_FUNCTION Identity
#define Identity Identity
#define A8 |‣ Identity
#endif
#if defined(BinaryStep) && (defined(ACTIVATION__PER_LAYER) || !defined(ACTIVATION))
#undef NB
#undef A9
#undef ACT9
#undef BinaryStep
#undef ACTIVATION
#undef ACTIVATION_FUNCTION
#undef DEFAULT_ACTIVATION_FUNCTION
#define NO_BACKPROP
#define NB | 𝗡𝗢_𝗕𝗔𝗖𝗞𝗣𝗥𝗢𝗣 SUPPORT FOR:
#define ACT9 1
#define ACTIVATION
#define ACTIVATION_FUNCTION BinaryStep
#define BinaryStep BinaryStep
#define A9 |‣ BinaryStep
#endif
#if defined(Softplus) && (defined(ACTIVATION__PER_LAYER) || !defined(ACTIVATION))
#undef NB
#undef A10
#undef ACT10
#undef Softplus
#undef ACTIVATION
#undef ACTIVATION_FUNCTION
#undef DEFAULT_ACTIVATION_FUNCTION
#define NO_BACKPROP
#define NB | 𝗡𝗢_𝗕𝗔𝗖𝗞𝗣𝗥𝗢𝗣 SUPPORT FOR:
#define ACT10 1
#define ACTIVATION
#define ACTIVATION_FUNCTION Softplus
#define Softplus Softplus
#define A10 |‣ Softplus
#endif
#if defined(SiLU) && (defined(ACTIVATION__PER_LAYER) || !defined(ACTIVATION))
#undef NB
#undef A11
#undef ACT11
#undef SiLU
#undef ACTIVATION
#undef ACTIVATION_FUNCTION
#undef DEFAULT_ACTIVATION_FUNCTION
#define NO_BACKPROP
#define NB | 𝗡𝗢_𝗕𝗔𝗖𝗞𝗣𝗥𝗢𝗣 SUPPORT FOR:
#define ACT11 1
#define ACTIVATION
#define ACTIVATION_FUNCTION SiLU
#define SiLU SiLU
#define A11 |‣ SiLU
#endif
#if defined(GELU) && (defined(ACTIVATION__PER_LAYER) || !defined(ACTIVATION))
#undef NB
#undef A12
#undef ACT12
#undef GELU
#undef ACTIVATION
#undef ACTIVATION_FUNCTION
#undef DEFAULT_ACTIVATION_FUNCTION
#define NO_BACKPROP
#define NB | 𝗡𝗢_𝗕𝗔𝗖𝗞𝗣𝗥𝗢𝗣 SUPPORT FOR:
#define ACT12 1
#define ACTIVATION
#define ACTIVATION_FUNCTION GELU
#define GELU GELU
#define A12 |‣ GELU
#endif
#if defined(Mish) && (defined(ACTIVATION__PER_LAYER) || !defined(ACTIVATION))
#undef NB
#undef A13
#undef ACT13
#undef Mish
#undef ACTIVATION
#undef ACTIVATION_FUNCTION
#undef DEFAULT_ACTIVATION_FUNCTION
#define NO_BACKPROP
#define NB | 𝗡𝗢_𝗕𝗔𝗖𝗞𝗣𝗥𝗢𝗣 SUPPORT FOR:
#define ACT13 1
#define ACTIVATION
#define ACTIVATION_FUNCTION Mish
#define Mish Mish
#define A13 |‣ Mish
#endif
#if defined(Gaussian) && (defined(ACTIVATION__PER_LAYER) || !defined(ACTIVATION))
#undef NB
#undef A14
#undef ACT14
#undef Gaussian
#undef ACTIVATION
#undef ACTIVATION_FUNCTION
#undef DEFAULT_ACTIVATION_FUNCTION
#define NO_BACKPROP
#define NB | 𝗡𝗢_𝗕𝗔𝗖𝗞𝗣𝗥𝗢𝗣 SUPPORT FOR:
#define ACT14 1
#define ACTIVATION
#define ACTIVATION_FUNCTION Gaussian
#define Gaussian Gaussian
#define A14 |‣ Gaussian
#endif
#define CONCATENATE_WITHOUT_SPACE(x, y) CONCATENATE_IMPL(x, y)
#define CONCATENATE_IMPL(x, y) x ## y
#if defined(CUSTOM_AF1) && (defined(ACTIVATION__PER_LAYER) || !defined(ACTIVATION))
#undef CA1
#undef CSTA
#undef CACT1
#undef ACTIVATION
#undef ACTIVATION_FUNCTION
#undef CUSTOM_AF1_DEFINITION
#undef DEFAULT_ACTIVATION_FUNCTION
#define ACTIVATION
#define CUSTOM_AF1_DEFINITION DFLOAT CUSTOM_AF1(const DFLOAT &x);
#define CSTA ||| (𝗖𝗨𝗦𝗧𝗢𝗠)
#if defined(CUSTOM_DF1)
#undef CUSTOM_DF1
#undef CUSTOM_DF1_DEFINITION
#define CUSTOM_DF1 CONCATENATE_WITHOUT_SPACE(CUSTOM_AF1, Der)
#define CUSTOM_DF1_DEFINITION DFLOAT CUSTOM_DF1(const float &fx);
#else
#define NO_BACKPROP
#undef NB
#undef NB_CA1
#define NB | 𝗡𝗢_𝗕𝗔𝗖𝗞𝗣𝗥𝗢𝗣 SUPPORT FOR:
#define NB_CA1 |‣ CUSTOM_AF1
#endif
#define CACT1 1
#define ACTIVATION_FUNCTION CUSTOM_AF1
#define CA1 |‣ CUSTOM_AF1
#endif
#if defined(CUSTOM_AF2) && (defined(ACTIVATION__PER_LAYER) || !defined(ACTIVATION))
#undef CA2
#undef CSTA
#undef CACT2
#undef ACTIVATION
#undef ACTIVATION_FUNCTION
#undef CUSTOM_AF2_DEFINITION
#undef DEFAULT_ACTIVATION_FUNCTION
#define ACTIVATION
#define CUSTOM_AF2_DEFINITION DFLOAT CUSTOM_AF2(const DFLOAT &x);
#define CSTA ||| (𝗖𝗨𝗦𝗧𝗢𝗠)
#if defined(CUSTOM_DF2)
#undef CUSTOM_DF2
#undef CUSTOM_DF2_DEFINITION
#define CUSTOM_DF2 CONCATENATE_WITHOUT_SPACE(CUSTOM_AF2, Der)
#define CUSTOM_DF2_DEFINITION DFLOAT CUSTOM_DF2(const DFLOAT &fx);
#else
#define NO_BACKPROP
#undef NB
#undef NB_CA2
#define NB | 𝗡𝗢_𝗕𝗔𝗖𝗞𝗣𝗥𝗢𝗣 SUPPORT FOR:
#define NB_CA2 |‣ CUSTOM_AF2
#endif
#define CACT2 1
#define ACTIVATION_FUNCTION CUSTOM_AF2
#define CA2 |‣ CUSTOM_AF2
#endif
#if defined(CUSTOM_AF3) && (defined(ACTIVATION__PER_LAYER) || !defined(ACTIVATION))
#undef CA3
#undef CSTA
#undef CACT3
#undef ACTIVATION
#undef CUSTOM_AF3_DEFINITION
#undef ACTIVATION_FUNCTION
#undef DEFAULT_ACTIVATION_FUNCTION
#define ACTIVATION
#define CUSTOM_AF3_DEFINITION DFLOAT CUSTOM_AF3(const DFLOAT &x);
#define CSTA ||| (𝗖𝗨𝗦𝗧𝗢𝗠)
#if defined(CUSTOM_DF3)
#undef CUSTOM_DF3
#undef CUSTOM_DF3_DEFINITION
#define CUSTOM_DF3 CONCATENATE_WITHOUT_SPACE(CUSTOM_AF3, Der)
#define CUSTOM_DF3_DEFINITION DFLOAT CUSTOM_DF3(const DFLOAT &fx);
#else
#define NO_BACKPROP
#undef NB
#undef NB_CA3
#define NB | 𝗡𝗢_𝗕𝗔𝗖𝗞𝗣𝗥𝗢𝗣 SUPPORT FOR:
#define NB_CA3 |‣ CUSTOM_AF3
#endif
#define CACT3 1
#define ACTIVATION_FUNCTION CUSTOM_AF3
#define CA3 |‣ CUSTOM_AF3
#endif
#if defined(CUSTOM_AF4) && (defined(ACTIVATION__PER_LAYER) || !defined(ACTIVATION))
#undef CA4
#undef CSTA
#undef CACT4
#undef ACTIVATION
#undef CUSTOM_AF4_DEFINITION
#undef ACTIVATION_FUNCTION
#undef DEFAULT_ACTIVATION_FUNCTION
#define ACTIVATION
#define CUSTOM_AF4_DEFINITION DFLOAT CUSTOM_AF4(const DFLOAT &x);
#define CSTA ||| (𝗖𝗨𝗦𝗧𝗢𝗠)
#if defined(CUSTOM_DF4)
#undef CUSTOM_DF4
#undef CUSTOM_DF4_DEFINITION
#define CUSTOM_DF4 CONCATENATE_WITHOUT_SPACE(CUSTOM_AF4, Der)
#define CUSTOM_DF4_DEFINITION DFLOAT CUSTOM_DF4(const DFLOAT &fx);
#else
#define NO_BACKPROP
#undef NB
#undef NB_CA4
#define NB | 𝗡𝗢_𝗕𝗔𝗖𝗞𝗣𝗥𝗢𝗣 SUPPORT FOR:
#define NB_CA4 |‣ CUSTOM_AF4
#endif
#define CACT4 1
#define ACTIVATION_FUNCTION CUSTOM_AF4
#define CA4 |‣ CUSTOM_AF4
#endif
#if defined(CUSTOM_AF5) && (defined(ACTIVATION__PER_LAYER) || !defined(ACTIVATION))
#undef CA5
#undef CSTA
#undef CACT5
#undef ACTIVATION
#undef CUSTOM_AF5_DEFINITION
#undef ACTIVATION_FUNCTION
#undef DEFAULT_ACTIVATION_FUNCTION
#define ACTIVATION
#define CUSTOM_AF5_DEFINITION DFLOAT CUSTOM_AF5(const DFLOAT &x);
#define CSTA ||| (𝗖𝗨𝗦𝗧𝗢𝗠)
#if defined(CUSTOM_DF5)
#undef CUSTOM_DF5
#undef CUSTOM_DF5_DEFINITION
#define CUSTOM_DF5 CONCATENATE_WITHOUT_SPACE(CUSTOM_AF5, Der)
#define CUSTOM_DF5_DEFINITION DFLOAT CUSTOM_DF5(const DFLOAT &fx);
#else
#define NO_BACKPROP
#undef NB
#undef NB_CA5
#define NB | 𝗡𝗢_𝗕𝗔𝗖𝗞𝗣𝗥𝗢𝗣 SUPPORT FOR:
#define NB_CA5 |‣ CUSTOM_AF5
#endif
#define CACT5 1
#define ACTIVATION_FUNCTION CUSTOM_AF5
#define CA5 |‣ CUSTOM_AF5
#endif
#define NUM_OF_USED_ACTIVATION_FUNCTIONS (ACT1 + ACT2 + ACT3 + ACT4 + ACT5 + ACT6 + ACT7 + ACT8 + ACT9 + ACT9 + ACT10 + ACT11 + ACT12 + ACT13 + ACT14 + CACT1 + CACT2 + CACT3 + CACT4 + CACT5)
#if !defined(ACTIVATION)
#if defined(ACTIVATION__PER_LAYER)
// ACTIVATE ALL FUNCTIONS
#define NO_BACKPROP
#define ALL_ACTIVATION_FUNCTIONS
#define AL |‣ "(ALL_ACTIVATION_FUNCTIONS)"
#undef NUM_OF_USED_ACTIVATION_FUNCTIONS
#define NUM_OF_USED_ACTIVATION_FUNCTIONS (14 + CACT1 + CACT2 + CACT3 + CACT4 + CACT5)
#undef MSG10
#define MSG10 \n⌥▌"////////////// [⚠] [𝗥𝗲𝗺𝗶𝗻𝗱𝗲𝗿] Backpropagation is not Allowed With (ALL_ACTIVATION_FUNCTIONS)."
#else
//ENABLE DEFAULT ACTIVATION FUNCTION
// i will also create a mechanism to show #error if more than one is defined with B opperations?
#undef A1
#undef Sigmoid
#define DEFAULT_ACTIVATION_FUNCTION
#define ACTIVATION //Sigmoid default but for more than one you must declare it
#define ACTIVATION_FUNCTION Sigmoid
#define Sigmoid Sigmoid
#define A1 |‣ Sigmoid
#endif
#endif
#define MAKE_FUN_NAME1(actname,value) actname(value)
#define MAKE_FUN_NAME2(actname,value) actname ## Der(value)
#define ACTIVATE_WITH(actname,value) MAKE_FUN_NAME1(actname,value)
#define DERIVATIVE_OF(actname,value) MAKE_FUN_NAME2(actname,value)
//LOSS | If there's no Loss function definition and no DISABLE_MSE definition, define the Default | MEAN_SQUARED_ERROR = Default
#if !defined(CATEGORICAL_CROSS_ENTROPY) and !defined(BINARY_CROSS_ENTROPY) and !defined(MEAN_SQUARED_ERROR) and !defined(DISABLE_MSE)
#define DEFAULT_LOSS
#endif
#define STR_HELPER(x) #x
#define STR(x) STR_HELPER(x)
#define INFORMATION LOVE MSG0 MSG1 MSG2 MSG3 MSG4 MSG5 MSG6 MSG7 MSG8 MSG9 MSG10 \n\n 𝗨𝗦𝗜𝗡𝗚 [ƒx] AL A1 A2 A3 A4 A5 A6 A7 A8 A9 A10 A11 A12 A13 A14 CSTA CA1 |⌥|\n\n NB A9 A10 A11 A12 A13 A14 NB_CA1 NB_CA2 NB_CA3 NB_CA4 NB_CA5
#pragma message( STR(INFORMATION) )
// i might change static variables to plain variables and just pass a pointer from outer class?
class NeuralNetwork
{
private:
#if defined(USE_INTERNAL_EEPROM)
unsigned int address = 0;
#if defined(ACTIVATION__PER_LAYER)
byte F1; // first activation function only for use in FdF_Individual_iEEPROM
#endif
#endif
int Individual_Input = 0;
bool isAllocdWithNew = true; // If weights and biases are allocated with new, for the destractor later | TODO: #if !defined(USE_PROGMEM) and etc. in constructors
bool FIRST_TIME_FDFp = false; // determines if there are trashes left in last outputs .
const DFLOAT *_inputs; // Pointer to primary/first Inputs Array from Sketch .
// (Used for backpropagation) .
#if defined(SUPPORTS_SD_FUNCTIONALITY)
bool isAlreadyLoadedOnce = false; // Determines if load() function has been called more than once, so the next time it will clean | I mean... if you use sd library then you have a spare byte right?
#endif
class Layer
{
public:
#if !defined(REDUCE_RAM_STATIC_REFERENCE)
NeuralNetwork *me;
#endif
unsigned int _numberOfInputs; // # of neurons in the previous layer.
unsigned int _numberOfOutputs; // # of neurons in the current layer.
IS_CONST DFLOAT *bias; // bias of this layer || Please do not wrap it into #ifdef USE_INTERNAL_EEPROM because it is being used when FdF_Individual_iEEPROM
DFLOAT *outputs; // outputs of this layer [1D Array] pointers.
//#if defined(REDUCE_RAM_WEIGHTS_LVL1)
// DFLOAT *weights; // weights of this layer [1D Array] pointers. #(used if #REDUCE_RAM_WEIGHTS_LVL1 defined)
//#endif
#if !defined(REDUCE_RAM_WEIGHTS_COMMON)
IS_CONST DFLOAT **weights; // weights of this layer [2D Array] pointers. #(used if NOT #REDUCE_RAM_WEIGHTS_COMMON defined)
#endif
#if !defined(NO_BACKPROP)
DFLOAT *preLgamma; // gamma of previous layer [1D Array] pointers.
#endif
// Default Constractor .
// #0 Constructor .
// #1 Constructor With default/("probably") preptained, weights and biases.
Layer();
#if !defined(USE_PROGMEM)
// ^^^^^ I keep this USE_PROGMEM instead of NO_BACKPROP because that way if I add a NeuralNetwork::feedforward_PROGMEM, with -fpermisive someone will be able to use both RAM-NN and PROGMEM-NN at the same time
Layer(const unsigned int &NumberOfInputs, const unsigned int &NumberOfOutputs, NeuralNetwork * const NN = NULL); // #0
#endif
Layer(const unsigned int &NumberOfInputs, const unsigned int &NumberOfOutputs, IS_CONST DFLOAT *default_Bias, NeuralNetwork * const NN = NULL); // #(used if #REDUCE_RAM_WEIGHTS_LVL2 defined)
Layer(const unsigned int &NumberOfInputs, const unsigned int &NumberOfOutputs, IS_CONST DFLOAT *default_Weights, IS_CONST DFLOAT *default_Bias, NeuralNetwork * const NN = NULL); // #1 #(used if NOT #REDUCE_RAM_WEIGHTS_LVL2 defined)
void FeedForward_Individual(const DFLOAT &input, const int &j);
void FdF_Individual_PROGMEM(const DFLOAT &input, const int &j);
#if defined(USE_INTERNAL_EEPROM)
void FdF_Individual_iEEPROM(const DFLOAT &input, const int &j);
#endif
void FeedForward(const DFLOAT *inputs); // Calculates the outputs() of layer.
void FdF_PROGMEM(const DFLOAT *inputs);
#if defined(USE_INTERNAL_EEPROM)
void FdF_IN_EEPROM(const DFLOAT *inputs);
#endif
#if !defined (NO_BACKPROP)
void BackPropOutput(const DFLOAT *_expected_, const DFLOAT *inputs);
void BackPropHidden(const Layer *frontLayer, const DFLOAT *inputs);
#endif
// "Extra Math"
DFLOAT erf(DFLOAT x);
//if i acctually consider using other complicated activation functions i might need to think again about the before_Activation_output Array [...]
DFLOAT Sigmoid (const DFLOAT &x ); // Sigmoid Activation Function 1/(1+e^(-x)) .
DFLOAT SigmoidDer (const DFLOAT &fx); // Derivative of Sigmoid Activation Function.
DFLOAT Tanh (const DFLOAT &x );
DFLOAT TanhDer (const DFLOAT &fx);
DFLOAT ReLU (const DFLOAT &x );
DFLOAT ReLUDer (const DFLOAT &fx); // x is also fx on ReLU
DFLOAT LeakyELU (const DFLOAT &x );
DFLOAT LeakyELUDer(const DFLOAT &fx);
DFLOAT ELU (const DFLOAT &x ); // α = 1
DFLOAT ELUDer (const DFLOAT &fx);
DFLOAT SELU (const DFLOAT &x ); // Maybe use https://stackoverflow.com/a/42264773/11465149
DFLOAT SELUDer (const DFLOAT &fx);
void Softmax ();
DFLOAT SoftmaxSum (const DFLOAT &x ); // returns exp(outputs[i] + (*bias)) to each output and then sums it into sumOfSoftmax
DFLOAT SoftmaxDer (const DFLOAT &fx);
DFLOAT Identity (const DFLOAT &x );
DFLOAT IdentityDer(const DFLOAT &x );
// Custom Activation Fuctions Definitions (eg. DFLOAT CUSTOM_AFX(...);)
CUSTOM_AF1_DEFINITION
CUSTOM_AF2_DEFINITION
CUSTOM_AF3_DEFINITION
CUSTOM_AF4_DEFINITION
CUSTOM_AF5_DEFINITION
CUSTOM_DF1_DEFINITION
CUSTOM_DF2_DEFINITION
CUSTOM_DF3_DEFINITION
CUSTOM_DF4_DEFINITION
CUSTOM_DF5_DEFINITION
// NO_BACKPROP support
DFLOAT BinaryStep (const DFLOAT &x );
DFLOAT Softplus (const DFLOAT &x );
DFLOAT SiLU (const DFLOAT &x );
DFLOAT GELU (const DFLOAT &x );
DFLOAT Mish (const DFLOAT &x );
DFLOAT Gaussian (const DFLOAT &x );
#if defined(USE_INTERNAL_EEPROM)
void print_INTERNAL_EEPROM();
#endif
void print_PROGMEM();
void print();
};
//just like "static DFLOAT *wights" [...] i might have a function to switch?
#if defined(ACTIVATION__PER_LAYER)
typedef DFLOAT (Layer::*method_function) (const DFLOAT &);
inline static const method_function (activation_Function_ptrs)[NUM_OF_USED_ACTIVATION_FUNCTIONS] = {
#if defined(ALL_ACTIVATION_FUNCTIONS) or defined(Sigmoid)
&Layer::Sigmoid,
#endif
#if defined(ALL_ACTIVATION_FUNCTIONS) or defined(Tanh)
&Layer::Tanh,
#endif
#if defined(ALL_ACTIVATION_FUNCTIONS) or defined(ReLU)
&Layer::ReLU,
#endif
#if defined(ALL_ACTIVATION_FUNCTIONS) or defined(LeakyELU)
&Layer::LeakyELU,
#endif
#if defined(ALL_ACTIVATION_FUNCTIONS) or defined(ELU)
&Layer::ELU,
#endif
#if defined(ALL_ACTIVATION_FUNCTIONS) or defined(SELU)
&Layer::SELU,
#endif
#if defined(ALL_ACTIVATION_FUNCTIONS) or defined(Softmax)
&Layer::SoftmaxSum,
#endif
#if defined(ALL_ACTIVATION_FUNCTIONS) or defined(Identity)
&Layer::Identity,
#endif
#if defined(ALL_ACTIVATION_FUNCTIONS) or defined(BinaryStep)
&Layer::BinaryStep,
#endif
#if defined(ALL_ACTIVATION_FUNCTIONS) or defined(Softplus)
&Layer::Softplus,
#endif
#if defined(ALL_ACTIVATION_FUNCTIONS) or defined(SiLU)
&Layer::SiLU,
#endif
#if defined(ALL_ACTIVATION_FUNCTIONS) or defined(GELU)
&Layer::GELU,
#endif
#if defined(ALL_ACTIVATION_FUNCTIONS) or defined(Mish)
&Layer::Mish,
#endif
#if defined(ALL_ACTIVATION_FUNCTIONS) or defined(Gaussian)
&Layer::Gaussian,
#endif
#if defined(CUSTOM_AF1)
&Layer::CUSTOM_AF1,
#endif
#if defined(CUSTOM_AF2)
&Layer::CUSTOM_AF2,
#endif
#if defined(CUSTOM_AF3)
&Layer::CUSTOM_AF3,
#endif
#if defined(CUSTOM_AF4)
&Layer::CUSTOM_AF4,
#endif
#if defined(CUSTOM_AF5)
&Layer::CUSTOM_AF5,
#endif
};
#if !defined(NO_BACKPROP)
inline static const method_function (derivative_Function_ptrs)[NUM_OF_USED_ACTIVATION_FUNCTIONS] = {
#if defined(Sigmoid)
&Layer::SigmoidDer,
#endif
#if defined(Tanh)
&Layer::TanhDer,
#endif
#if defined(ReLU)
&Layer::ReLUDer,
#endif
#if defined(LeakyELU)
&Layer::LeakyELUDer,
#endif
#if defined(ELU)
&Layer::ELUDer,
#endif
#if defined(SELU)
&Layer::SELUDer,
#endif
#if defined(Softmax)
&Layer::SoftmaxDer,
#endif
#if defined(Identity)
&Layer::IdentityDer,
#endif
#if defined(CUSTOM_DF1)
&Layer::CUSTOM_DF1,
#endif
#if defined(CUSTOM_DF2)
&Layer::CUSTOM_DF2,
#endif
#if defined(CUSTOM_DF3)
&Layer::CUSTOM_DF3,
#endif
#if defined(CUSTOM_DF4)
&Layer::CUSTOM_DF4,
#endif
#if defined(CUSTOM_DF5)
&Layer::CUSTOM_DF5,
#endif
};
#endif
//https://stackoverflow.com/a/31708674/11465149
//http://www.cs.technion.ac.il/users/yechiel/c++-faq/array-memfnptrs.html // ??? [x]
#endif
public:
#if defined(REDUCE_RAM_STATIC_REFERENCE)
static NeuralNetwork *me;
#endif
// Guessing that BackProp wont be called more than once excactly after a feedforward call, IF REDUCE_RAM_WEIGHTS_LVL2 else i should have had a temp weights-size variable or something
// need to add a function for those who want to switch/redirect the pointer to a deferent weight Array... maybe? ... Why not?!? lol.
// issues with multiple NNs too ...
#if defined(REDUCE_RAM_WEIGHTS_LVL2)
IS_CONST DFLOAT *weights; // pointer to sketch's Array of Weights. #(used if #REDUCE_RAM_WEIGHTS_LVL2 defined)
int i_j = 0;
#endif
#if defined(ACTIVATION__PER_LAYER)
unsigned int AtlayerIndex; // who 's gonna make a network with more than 255 layers :P ?!?!? but anyways i will use int or i will add byte too, using a property definition with bunch of other things like this for max optimization ... lol
byte *ActFunctionPerLayer; // lets be realistic... byte because. xD
#endif
// #5 This is the sum of the exp(outputs) of the previous layer (for All and each layer)
#if defined(ALL_ACTIVATION_FUNCTIONS) or defined(Softmax)
DFLOAT sumOfSoftmax = 0;
#endif
// no negative values allowed, (just saying..)
// Alphas and Lamdas of Activation Functions | #6 MACROS.
#if defined(ALL_ACTIVATION_FUNCTIONS) or defined(LeakyELU)
DFLOAT AlphaLeaky = 0.01 ;
#endif
#if defined(ALL_ACTIVATION_FUNCTIONS) or defined(SELU)
DFLOAT AlphaSELU = 1.6733 ;
DFLOAT LamdaSELU = 1.0507 ;
#endif
#if defined(ALL_ACTIVATION_FUNCTIONS) or defined(ELU)
DFLOAT AlphaELU = 1 ;
#endif
Layer *layers; // layers in the network [1D Array].
unsigned int numberOflayers = 0; // Number of layers.
// unsigned float doesn't exist..? lol
#if !defined (NO_BACKPROP)
DFLOAT LearningRateOfWeights = 0.33 ; // Learning Rate of Weights.
DFLOAT LearningRateOfBiases = 0.066; // Learning Rate of Biases .
#endif
// LOSS VARIABLES | V Because #6
#if defined(MEAN_SQUARED_ERROR) or defined(DEFAULT_LOSS)
DFLOAT sumSquaredError = 0;
DFLOAT MeanSqrdError = 0;
#endif
#if defined(CATEGORICAL_CROSS_ENTROPY)
DFLOAT sumOfCategoricalCrossEntropy = 0;
DFLOAT CategoricalCrossEntropy = 0;
#endif
#if defined(BINARY_CROSS_ENTROPY)
DFLOAT sumOfBinaryCrossEntropy = 0;
DFLOAT BinaryCrossEntropy = 0;
#endif
void pdestract(); // partial destaction
~NeuralNetwork(); // Destructor.
NeuralNetwork();
#if defined(USE_INTERNAL_EEPROM)
NeuralNetwork(unsigned int address);
#endif
#if !defined(NO_BACKPROP)
NeuralNetwork(const unsigned int *layer_, const unsigned int &NumberOflayers, byte *_ActFunctionPerLayer = NULL); // #0
NeuralNetwork(const unsigned int *layer_, const unsigned int &NumberOflayers, const DFLOAT &LRw, const DFLOAT &LRb, byte *_ActFunctionPerLayer = NULL); // #0
#endif
NeuralNetwork(const unsigned int *layer_, IS_CONST DFLOAT *default_Weights, IS_CONST DFLOAT *default_Bias, const unsigned int &NumberOflayers, byte *_ActFunctionPerLayer = NULL); // #1
// NeuralNetwork(const unsigned int *layer_, const PROGMEM DFLOAT *default_Weights, const PROGMEM DFLOAT *default_Bias, const unsigned int &NumberOflayers , bool isProgmem); // isProgmem (because of the Error #777) ? i get it in a way but ..
void reset_Individual_Input_Counter();
DFLOAT *FeedForward_Individual(const DFLOAT &input);
DFLOAT *FeedForward(const DFLOAT *inputs); // Moves Calculated outputs as inputs to next layer.
//LOSS FUNCTIONS +common
DFLOAT getMeanSqrdError (unsigned int inputsPerEpoch);
DFLOAT getBinaryCrossEntropy (unsigned int inputsPerEpoch);
DFLOAT getCategoricalCrossEntropy (unsigned int inputsPerEpoch);
DFLOAT loss (DFLOAT &sum, DFLOAT &loss, unsigned int batch_size);
#if !defined (NO_BACKPROP)
void BackProp(const DFLOAT *expected); // BackPropopagation - (error, delta-weights, etc.).
#endif
#if defined(SUPPORTS_SD_FUNCTIONALITY)