-
Notifications
You must be signed in to change notification settings - Fork 0
/
14863637259966.html
executable file
·1149 lines (964 loc) · 127 KB
/
14863637259966.html
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954
955
956
957
958
959
960
961
962
963
964
965
966
967
968
969
970
971
972
973
974
975
976
977
978
979
980
981
982
983
984
985
986
987
988
989
990
991
992
993
994
995
996
997
998
999
1000
<!doctype html>
<html class="no-js" lang="en">
<head>
<meta charset="utf-8" />
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
<title>
低素质弹幕分类器 CNN 训练笔记 - 雪地
</title>
<link href="atom.xml" rel="alternate" title="雪地" type="application/atom+xml">
<link rel="stylesheet" href="asset/css/foundation.min.css" />
<link rel="stylesheet" href="asset/css/docs.css" />
<link rel="icon" href="asset/img/favicon.ico" />
<script src="asset/js/vendor/modernizr.js"></script>
<script src="asset/js/vendor/jquery.js"></script>
<script src="asset/highlightjs/highlight.pack.js"></script>
<link href="asset/highlightjs/styles/github.css" media="screen, projection" rel="stylesheet" type="text/css">
<script>hljs.initHighlightingOnLoad();</script>
<script type="text/javascript">
function before_search(){
var searchVal = 'site:yinzo.github.io ' + document.getElementById('search_input').value;
document.getElementById('search_q').value = searchVal;
return true;
}
</script>
</head>
<body class="antialiased hide-extras">
<div class="marketing off-canvas-wrap" data-offcanvas>
<div class="inner-wrap">
<nav class="top-bar docs-bar hide-for-small" data-topbar>
<section class="top-bar-section">
<div class="row">
<div style="position: relative;width:100%;"><div style="position: absolute; width:100%;">
<ul id="main-menu" class="left">
<li id="menu_item_index"><a href="index.html">Blog</a></li>
<li id="menu_item_archives"><a href="archives.html">Archives</a></li>
<li id="menu_item_about"><a href="http://yinz.xyz/">Home</a></li>
</ul>
<ul class="right" id="search-wrap">
<li>
<form target="_blank" onsubmit="return before_search();" action="http://google.com/search" method="get">
<input type="hidden" id="search_q" name="q" value="" />
<input tabindex="1" type="search" id="search_input" placeholder="Search"/>
</form>
</li>
</ul>
</div></div>
</div>
</section>
</nav>
<nav class="tab-bar show-for-small">
<a href="javascript:void(0)" class="left-off-canvas-toggle menu-icon">
<span> 雪地</span>
</a>
</nav>
<aside class="left-off-canvas-menu">
<ul class="off-canvas-list">
<li><a href="index.html">Blog</a></li>
<li><a href="archives.html">Archives</a></li>
<li><a href="http://yinz.xyz/">Home</a></li>
<li><label>Categories</label></li>
<li><a href="Security%20Info.html">Security Info</a></li>
<li><a href="Adversary%20Learning.html">Adversary Learning</a></li>
<li><a href="TCPIP.html">TCP/IP</a></li>
<li><a href="Pattern%20Recognition.html">Pattern Recognition</a></li>
<li><a href="Python.html">Python</a></li>
<li><a href="OS.html">OS</a></li>
<li><a href="Deep%20Learning.html">Deep Learning</a></li>
<li><a href="Machine%20Learning.html">Machine Learning</a></li>
</ul>
</aside>
<a class="exit-off-canvas" href="#"></a>
<section id="main-content" role="main" class="scroll-container">
<script type="text/javascript">
$(function(){
$('#menu_item_index').addClass('is_active');
});
</script>
<div class="row">
<div class="large-8 medium-8 columns">
<div class="markdown-body article-wrap">
<div class="article">
<h1>低素质弹幕分类器 CNN 训练笔记</h1>
<div class="read-more clearfix">
<span class="date">2017/2/6 14:48 下午</span>
<span>posted in </span>
<span class="posted-in"><a href='Deep%20Learning.html'>Deep Learning</a></span>
<span class="comments">
</span>
</div>
</div><!-- article -->
<div class="article-content">
<p>一开始使用这个结构,迭代10次。</p>
<pre><code class="language-python">model = Sequential()
model.add(Convolution1D(100, 4, border_mode='valid', input_shape=(100, word_model.vector_size)))
model.add(Activation('relu'))
model.add(Convolution1D(5, 4, border_mode='valid'))
model.add(Activation('relu'))
model.add(Flatten())
model.add(Dense(32, activation='relu'))
model.add(Dense(2, activation='softmax'))
model.compile(loss='categorical_crossentropy',
optimizer='adam',
metrics=['accuracy']
)
</code></pre>
<p>完成训练后,乍一看准确率很高,结果 print 出来看一下,低素质弹幕完全没有被过滤,完全是将分类全部丢给 positive 达到的高准确率 (0.98) 的确是 meaningless classification<br/>
并且这个结果在loss里看得很清楚,loss一直是处于15+的</p>
<span id="more"></span><!-- more -->
<p>尝试增加第二个卷积层的节点数,然而训练并没有明显变好</p>
<pre><code class="language-python">model = Sequential()
model.add(Convolution1D(100, 4, border_mode='valid', input_shape=(100, word_model.vector_size)))
model.add(Activation('relu'))
model.add(Convolution1D(100, 4, border_mode='valid'))
model.add(Activation('relu'))
model.add(Flatten())
model.add(Dense(32, activation='relu'))
model.add(Dense(2, activation='softmax'))
model.compile(loss='categorical_crossentropy',
optimizer='adam',
metrics=['accuracy']
)
</code></pre>
<p>然后加大了FC层的隐节点,瞬间loss开始猛降,最后降到了0.3左右,print 出来一看,的确效果不错,但是有一部分语句较短的低素质的弹幕没有被识别出来。</p>
<pre><code class="language-python">model = Sequential()
model.add(Convolution1D(100, 4, border_mode='valid', input_shape=(100, word_model.vector_size)))
model.add(Activation('relu'))
model.add(Convolution1D(100, 4, border_mode='valid', input_shape=(100, word_model.vector_size)))
model.add(Activation('relu'))
model.add(Flatten())
model.add(Dense(100, activation='relu'))
model.add(Dense(2, activation='softmax'))
model.compile(loss='categorical_crossentropy',
optimizer='adam',
metrics=['accuracy']
)
</code></pre>
<p>然后由于感觉最后一次迭代没有收敛到极致,尝试加大迭代次数看看这个模型的极限如何。</p>
<p>设置迭代100次后。</p>
<pre><code>Epoch 100/100
2999/2999 [==============================] - 9s - loss: 3.7457e-04 - acc: 1.0000
</code></pre>
<p>虽然 acc 和 loss 都到了令人发指的地步,但是发现训练集和测试集忘记shuffle了。。<br/>
但是还是看了一眼测试结果,骂人弹幕的识别率为意料之中的0,因为全都被判定为普通弹幕了。shuffle之后重新训练看看吧,先迭代10次,看看效果,然后再测试100次的过拟合程度</p>
<p>10次的结果是准确率在92.45左右,人工检测的结果还可以,检测出了一部分,但是不够理想,调整的迭代20次看看。</p>
<pre><code>Epoch 20/20
2999/2999 [==============================] - 9s - loss: 0.0689 - acc: 0.9810
</code></pre>
<pre><code>Correct: 1918
Incorrect: 83
Accuracy: 95.852
</code></pre>
<p>人工检测结果有所提升,但是仍然不够理想,提高到50次看看</p>
<pre><code>Epoch 50/50
2999/2999 [==============================] - 9s - loss: 6.4179e-04 - acc: 1.0000
</code></pre>
<pre><code>Correct: 1963
Incorrect: 38
Accuracy: 98.101
</code></pre>
<p>虽然训练数据很好看,但是检查弹幕的识别情况,发现已经过拟合。基本把所有低素质弹幕识别成普通弹幕。</p>
<pre><code>Negative damku accuracy: 7.692
True negative: 2
False negative: 24
</code></pre>
<p>重新回到10次迭代,尝试画出roc曲线</p>
<pre><code>Epoch 10/10
2999/2999 [==============================] - 9s - loss: 0.7608 - acc: 0.8943
Correct: 1886
Incorrect: 115
Overall accuracy: 94.253
Negative damku accuracy: 30.769
True negative: 8
False negative: 18
</code></pre>
<p>然后是20次迭代</p>
<pre><code>Epoch 20/20
2999/2999 [==============================] - 10s - loss: 0.1805 - acc: 0.9710
Correct: 1908
Incorrect: 93
Overall accuracy: 95.352
Negative damku accuracy: 19.231
True negative: 5
False negative: 21
</code></pre>
<p>15次迭代</p>
<pre><code>Epoch 15/15
2999/2999 [==============================] - 9s - loss: 0.3569 - acc: 0.9650
Correct: 1782
Incorrect: 219
Overall accuracy: 89.055
Negative damku accuracy: 46.154
True negative: 12
False negative: 14
</code></pre>
<pre><code>Epoch 17/17
2999/2999 [==============================] - 9s - loss: 0.3631 - acc: 0.9847
Correct: 1893
Incorrect: 108
Overall accuracy: 94.603
Negative damku accuracy: 26.923
True negative: 7
False negative: 19
</code></pre>
<pre><code>2999/2999 [==============================] - 10s - loss: 0.2556 - acc: 0.9760
Correct: 1816
Incorrect: 185
Overall accuracy: 90.755
Negative damku accuracy: 30.769
True negative: 8
False negative: 18
</code></pre>
<p>突然想到训练数据其实不需要遵从概率分布,直接使用上次贝叶斯分类器的弹幕数据即可(上次训练贝叶斯分类器的时候没注意训练样本的概率分布问题,这是个错误)。导入新样本后进行迭代测试</p>
<pre><code>Epoch 1/1
5405/5405 [==============================] - 19s - loss: 0.6693 - acc: 0.6699
Train epoch: 1
Correct: 2912
Incorrect: 692
Overall accuracy: 80.799
Negative damku accuracy: 81.522
True negative: 1328
False negative: 301
==========
Epoch 1/1
5405/5405 [==============================] - 19s - loss: 0.4421 - acc: 0.8344
Train epoch: 2
Correct: 3133
Incorrect: 471
Overall accuracy: 86.931
Negative damku accuracy: 82.627
True negative: 1346
False negative: 283
==========
Epoch 1/1
5405/5405 [==============================] - 19s - loss: 0.2734 - acc: 0.9075
Train epoch: 3
Correct: 3294
Incorrect: 310
Overall accuracy: 91.398
Negative damku accuracy: 86.618
True negative: 1411
False negative: 218
==========
Epoch 1/1
5405/5405 [==============================] - 20s - loss: 0.1724 - acc: 0.9404
Train epoch: 4
Correct: 3365
Incorrect: 239
Overall accuracy: 93.368
Negative damku accuracy: 90.117
True negative: 1468
False negative: 161
==========
Epoch 1/1
5405/5405 [==============================] - 22s - loss: 0.1117 - acc: 0.9641
Train epoch: 5
Correct: 3390
Incorrect: 214
Overall accuracy: 94.062
Negative damku accuracy: 92.879
True negative: 1513
False negative: 116
==========
Epoch 1/1
5405/5405 [==============================] - 18s - loss: 0.0770 - acc: 0.9771
Train epoch: 6
Correct: 3416
Incorrect: 188
Overall accuracy: 94.784
Negative damku accuracy: 94.598
True negative: 1541
False negative: 88
==========
Epoch 1/1
5405/5405 [==============================] - 18s - loss: 0.0505 - acc: 0.9858
Train epoch: 7
Correct: 3403
Incorrect: 201
Overall accuracy: 94.423
Negative damku accuracy: 91.590
True negative: 1492
False negative: 137
==========
Epoch 1/1
5405/5405 [==============================] - 18s - loss: 0.0414 - acc: 0.9900
Train epoch: 8
Correct: 3426
Incorrect: 178
Overall accuracy: 95.061
Negative damku accuracy: 94.905
True negative: 1546
False negative: 83
==========
Epoch 1/1
5405/5405 [==============================] - 18s - loss: 0.0365 - acc: 0.9902
Train epoch: 9
Correct: 3417
Incorrect: 187
Overall accuracy: 94.811
Negative damku accuracy: 92.756
True negative: 1511
False negative: 118
==========
Epoch 1/1
5405/5405 [==============================] - 18s - loss: 0.0231 - acc: 0.9943
Train epoch: 10
Correct: 3415
Incorrect: 189
Overall accuracy: 94.756
Negative damku accuracy: 92.449
True negative: 1506
False negative: 123
==========
Epoch 1/1
5405/5405 [==============================] - 18s - loss: 0.0320 - acc: 0.9906
Train epoch: 11
Correct: 3396
Incorrect: 208
Overall accuracy: 94.229
Negative damku accuracy: 94.905
True negative: 1546
False negative: 83
==========
Epoch 1/1
5405/5405 [==============================] - 18s - loss: 0.0158 - acc: 0.9959
Train epoch: 12
Correct: 3416
Incorrect: 188
Overall accuracy: 94.784
Negative damku accuracy: 93.738
True negative: 1527
False negative: 102
==========
Epoch 1/1
5405/5405 [==============================] - 18s - loss: 0.0093 - acc: 0.9983
Train epoch: 13
Correct: 3415
Incorrect: 189
Overall accuracy: 94.756
Negative damku accuracy: 95.212
True negative: 1551
False negative: 78
==========
Epoch 1/1
5405/5405 [==============================] - 18s - loss: 0.0048 - acc: 0.9991
Train epoch: 14
Correct: 3421
Incorrect: 183
Overall accuracy: 94.922
Negative damku accuracy: 94.843
True negative: 1545
False negative: 84
==========
Epoch 1/1
5405/5405 [==============================] - 18s - loss: 0.0052 - acc: 0.9989
Train epoch: 15
Correct: 3421
Incorrect: 183
Overall accuracy: 94.922
Negative damku accuracy: 93.923
True negative: 1530
False negative: 99
==========
Epoch 1/1
5405/5405 [==============================] - 20s - loss: 0.0024 - acc: 0.9998
Train epoch: 16
Correct: 3413
Incorrect: 191
Overall accuracy: 94.700
Negative damku accuracy: 94.291
True negative: 1536
False negative: 93
==========
Epoch 1/1
5405/5405 [==============================] - 18s - loss: 0.0028 - acc: 0.9998
Train epoch: 17
Correct: 3418
Incorrect: 186
Overall accuracy: 94.839
Negative damku accuracy: 93.186
True negative: 1518
False negative: 111
==========
Epoch 1/1
5405/5405 [==============================] - 21s - loss: 0.0024 - acc: 0.9996
Train epoch: 18
Correct: 3415
Incorrect: 189
Overall accuracy: 94.756
Negative damku accuracy: 94.352
True negative: 1537
False negative: 92
==========
Epoch 1/1
5405/5405 [==============================] - 19s - loss: 0.0013 - acc: 0.9996
Train epoch: 19
Correct: 3425
Incorrect: 179
Overall accuracy: 95.033
Negative damku accuracy: 94.475
True negative: 1539
False negative: 90
==========
Epoch 1/1
5405/5405 [==============================] - 18s - loss: 9.6297e-04 - acc: 0.9998
Train epoch: 20
Correct: 3417
Incorrect: 187
Overall accuracy: 94.811
Negative damku accuracy: 93.493
True negative: 1523
False negative: 106
==========
</code></pre>
<p>想起来一开始 word2vec model 是用的娱乐区弹幕训练的,不完全符合环境。导出游戏区的弹幕重新训练看。</p>
<pre><code>Epoch 1/1
5405/5405 [==============================] - 17s - loss: 0.5780 - acc: 0.7441
Train epoch: 1
Correct: 3140
Incorrect: 464
Overall accuracy: 87.125
Negative damku accuracy: 89.134
True negative: 1452
False negative: 177
==========
Epoch 1/1
5405/5405 [==============================] - 17s - loss: 0.2168 - acc: 0.9258
Train epoch: 2
Correct: 3444
Incorrect: 160
Overall accuracy: 95.560
Negative damku accuracy: 93.738
True negative: 1527
False negative: 102
==========
Epoch 1/1
5405/5405 [==============================] - 18s - loss: 0.0978 - acc: 0.9697
Train epoch: 3
Correct: 3459
Incorrect: 145
Overall accuracy: 95.977
Negative damku accuracy: 95.887
True negative: 1562
False negative: 67
==========
Epoch 1/1
5405/5405 [==============================] - 22s - loss: 0.0606 - acc: 0.9824
Train epoch: 4
Correct: 3426
Incorrect: 178
Overall accuracy: 95.061
Negative damku accuracy: 96.746
True negative: 1576
False negative: 53
==========
Epoch 1/1
5405/5405 [==============================] - 23s - loss: 0.1076 - acc: 0.9678
Train epoch: 5
Correct: 3468
Incorrect: 136
Overall accuracy: 96.226
Negative damku accuracy: 94.537
True negative: 1540
False negative: 89
==========
Epoch 1/1
5405/5405 [==============================] - 20s - loss: 0.0476 - acc: 0.9856
Train epoch: 6
Correct: 3465
Incorrect: 139
Overall accuracy: 96.143
Negative damku accuracy: 95.028
True negative: 1548
False negative: 81
==========
Epoch 1/1
5405/5405 [==============================] - 19s - loss: 0.0285 - acc: 0.9911
Train epoch: 7
Correct: 3472
Incorrect: 132
Overall accuracy: 96.337
Negative damku accuracy: 95.150
True negative: 1550
False negative: 79
==========
Epoch 1/1
5405/5405 [==============================] - 18s - loss: 0.0192 - acc: 0.9943
Train epoch: 8
Correct: 3473
Incorrect: 131
Overall accuracy: 96.365
Negative damku accuracy: 96.010
True negative: 1564
False negative: 65
==========
Epoch 1/1
5405/5405 [==============================] - 18s - loss: 0.0128 - acc: 0.9956
Train epoch: 9
Correct: 3472
Incorrect: 132
Overall accuracy: 96.337
Negative damku accuracy: 95.580
True negative: 1557
False negative: 72
==========
Epoch 1/1
5405/5405 [==============================] - 17s - loss: 0.0079 - acc: 0.9972
Train epoch: 10
Correct: 3474
Incorrect: 130
Overall accuracy: 96.393
Negative damku accuracy: 95.580
True negative: 1557
False negative: 72
==========
Epoch 1/1
5405/5405 [==============================] - 20s - loss: 0.0060 - acc: 0.9981
Train epoch: 11
Correct: 3476
Incorrect: 128
Overall accuracy: 96.448
Negative damku accuracy: 95.396
True negative: 1554
False negative: 75
==========
Epoch 1/1
5405/5405 [==============================] - 27s - loss: 0.0045 - acc: 0.9989
Train epoch: 12
Correct: 3478
Incorrect: 126
Overall accuracy: 96.504
Negative damku accuracy: 95.089
True negative: 1549
False negative: 80
==========
Epoch 1/1
5405/5405 [==============================] - 22s - loss: 0.0031 - acc: 0.9994
Train epoch: 13
Correct: 3476
Incorrect: 128
Overall accuracy: 96.448
Negative damku accuracy: 95.150
True negative: 1550
False negative: 79
==========
Epoch 1/1
5405/5405 [==============================] - 19s - loss: 0.0024 - acc: 0.9994
Train epoch: 14
Correct: 3479
Incorrect: 125
Overall accuracy: 96.532
Negative damku accuracy: 95.089
True negative: 1549
False negative: 80
==========
Epoch 1/1
5405/5405 [==============================] - 19s - loss: 0.0020 - acc: 0.9994
Train epoch: 15
Correct: 3476
Incorrect: 128
Overall accuracy: 96.448
Negative damku accuracy: 94.966
True negative: 1547
False negative: 82
==========
Epoch 1/1
5405/5405 [==============================] - 22s - loss: 0.0018 - acc: 0.9994
Train epoch: 16
Correct: 3474
Incorrect: 130
Overall accuracy: 96.393
Negative damku accuracy: 95.150
True negative: 1550
False negative: 79
==========
Epoch 1/1
5405/5405 [==============================] - 19s - loss: 0.0016 - acc: 0.9994
Train epoch: 17
Correct: 3475
Incorrect: 129
Overall accuracy: 96.421
Negative damku accuracy: 95.457
True negative: 1555
False negative: 74
==========
Epoch 1/1
5405/5405 [==============================] - 21s - loss: 0.0014 - acc: 0.9994
Train epoch: 18
Correct: 3474
Incorrect: 130
Overall accuracy: 96.393
Negative damku accuracy: 95.150
True negative: 1550
False negative: 79
==========
Epoch 1/1
5405/5405 [==============================] - 24s - loss: 0.0013 - acc: 0.9996
Train epoch: 19
Correct: 3474
Incorrect: 130
Overall accuracy: 96.393
Negative damku accuracy: 95.089
True negative: 1549
False negative: 80
==========
Epoch 1/1
5405/5405 [==============================] - 21s - loss: 0.0037 - acc: 0.9991
Train epoch: 20
Correct: 3469
Incorrect: 135
Overall accuracy: 96.254
Negative damku accuracy: 96.624
True negative: 1574
False negative: 55
==========
</code></pre>
<p>效果提升明显</p>
<p>尝试把训练 ratio 提高到 0.8</p>
<pre><code>Epoch 1/1
7206/7206 [==============================] - 24s - loss: 0.5097 - acc: 0.7778
Train epoch: 1
Correct: 1673
Incorrect: 130
Overall accuracy: 92.790
Negative damku accuracy: 91.779
True negative: 748
False negative: 67
==========
Epoch 1/1
7206/7206 [==============================] - 23s - loss: 0.1654 - acc: 0.9455
Train epoch: 2
Correct: 1745
Incorrect: 58
Overall accuracy: 96.783
Negative damku accuracy: 95.092
True negative: 775
False negative: 40
==========
Epoch 1/1
7206/7206 [==============================] - 24s - loss: 0.0891 - acc: 0.9732
Train epoch: 3
Correct: 1750
Incorrect: 53
Overall accuracy: 97.060
Negative damku accuracy: 97.055
True negative: 791
False negative: 24
==========
Epoch 1/1
7206/7206 [==============================] - 23s - loss: 0.0570 - acc: 0.9829
Train epoch: 4
Correct: 1739
Incorrect: 64
Overall accuracy: 96.450
Negative damku accuracy: 96.933
True negative: 790
False negative: 25
==========
Epoch 1/1
7206/7206 [==============================] - 22s - loss: 0.0394 - acc: 0.9878
Train epoch: 5
Correct: 1754
Incorrect: 49
Overall accuracy: 97.282
Negative damku accuracy: 96.074
True negative: 783
False negative: 32
==========
Epoch 1/1
7206/7206 [==============================] - 22s - loss: 0.0471 - acc: 0.9872
Train epoch: 6
Correct: 1747
Incorrect: 56
Overall accuracy: 96.894
Negative damku accuracy: 95.706
True negative: 780
False negative: 35
==========
Epoch 1/1
7206/7206 [==============================] - 22s - loss: 0.0266 - acc: 0.9926
Train epoch: 7
Correct: 1735
Incorrect: 68
Overall accuracy: 96.229
Negative damku accuracy: 95.706
True negative: 780
False negative: 35
==========
Epoch 1/1
7206/7206 [==============================] - 26s - loss: 0.0235 - acc: 0.9921
Train epoch: 8
Correct: 1742
Incorrect: 61
Overall accuracy: 96.617
Negative damku accuracy: 95.706
True negative: 780
False negative: 35
==========
Epoch 1/1
7206/7206 [==============================] - 22s - loss: 0.0211 - acc: 0.9928
Train epoch: 9
Correct: 1753
Incorrect: 50
Overall accuracy: 97.227
Negative damku accuracy: 96.074
True negative: 783
False negative: 32
==========
Epoch 1/1
7206/7206 [==============================] - 22s - loss: 0.0207 - acc: 0.9929
Train epoch: 10
Correct: 1750
Incorrect: 53
Overall accuracy: 97.060
Negative damku accuracy: 95.951
True negative: 782
False negative: 33
==========
Epoch 1/1
7206/7206 [==============================] - 22s - loss: 0.0282 - acc: 0.9913
Train epoch: 11
Correct: 1743
Incorrect: 60
Overall accuracy: 96.672
Negative damku accuracy: 96.442
True negative: 786
False negative: 29
==========
Epoch 1/1
7206/7206 [==============================] - 22s - loss: 0.0174 - acc: 0.9947
Train epoch: 12
Correct: 1737
Incorrect: 66
Overall accuracy: 96.339
Negative damku accuracy: 96.564
True negative: 787
False negative: 28
==========
Epoch 1/1
7206/7206 [==============================] - 22s - loss: 0.0135 - acc: 0.9965
Train epoch: 13
Correct: 1741
Incorrect: 62
Overall accuracy: 96.561
Negative damku accuracy: 96.933
True negative: 790
False negative: 25
==========
Epoch 1/1
7206/7206 [==============================] - 22s - loss: 0.0106 - acc: 0.9965
Train epoch: 14
Correct: 1743
Incorrect: 60
Overall accuracy: 96.672
Negative damku accuracy: 96.687
True negative: 788
False negative: 27
==========
Epoch 1/1
7206/7206 [==============================] - 22s - loss: 0.0068 - acc: 0.9975
Train epoch: 15
Correct: 1751
Incorrect: 52
Overall accuracy: 97.116
Negative damku accuracy: 95.460
True negative: 778
False negative: 37
==========
Epoch 1/1
7206/7206 [==============================] - 22s - loss: 0.0053 - acc: 0.9982
Train epoch: 16
Correct: 1748
Incorrect: 55
Overall accuracy: 96.950
Negative damku accuracy: 96.564
True negative: 787
False negative: 28
==========
Epoch 1/1
7206/7206 [==============================] - 23s - loss: 0.0051 - acc: 0.9986
Train epoch: 17
Correct: 1751
Incorrect: 52
Overall accuracy: 97.116
Negative damku accuracy: 95.460
True negative: 778
False negative: 37
==========
Epoch 1/1
7206/7206 [==============================] - 24s - loss: 0.0038 - acc: 0.9989
Train epoch: 18
Correct: 1749
Incorrect: 54
Overall accuracy: 97.005
Negative damku accuracy: 96.319
True negative: 785
False negative: 30
==========
Epoch 1/1
7206/7206 [==============================] - 22s - loss: 0.0036 - acc: 0.9990
Train epoch: 19
Correct: 1747
Incorrect: 56
Overall accuracy: 96.894
Negative damku accuracy: 95.583
True negative: 779
False negative: 36
==========
Epoch 1/1
7206/7206 [==============================] - 23s - loss: 0.0035 - acc: 0.9989
Train epoch: 20
Correct: 1746
Incorrect: 57
Overall accuracy: 96.839
Negative damku accuracy: 95.215
True negative: 776
False negative: 39
==========
</code></pre>
<p>测试效果提升了约1~2个百分点。</p>
<p>暂时没有想到能够优化的方面了,选用第3次迭代的模型作为最终模型</p>
</div>
<div class="row">
<div class="large-6 columns">
<p class="text-left" style="padding:15px 0px;">
<a href="14863637393852.html"
title="Previous Post: 低素质弹幕分类器的CNN实现">« 低素质弹幕分类器的CNN实现</a>
</p>
</div>
<div class="large-6 columns">
<p class="text-right" style="padding:15px 0px;">
<a href="14828521678770.html"
title="Next Post: 在 Linode 上编译 hybla 模块">在 Linode 上编译 hybla 模块 »</a>
</p>
</div>
</div>
<div class="comments-wrap">
<div class="share-comments">
<div id="disqus_thread"></div>
<script>
/**
* RECOMMENDED CONFIGURATION VARIABLES: EDIT AND UNCOMMENT THE SECTION BELOW TO INSERT DYNAMIC VALUES FROM YOUR PLATFORM OR CMS.
* LEARN WHY DEFINING THESE VARIABLES IS IMPORTANT: https://disqus.com/admin/universalcode/#configuration-variables
*/
/*
var disqus_config = function () {
this.page.url = PAGE_URL; // Replace PAGE_URL with your page's canonical URL variable
this.page.identifier = PAGE_IDENTIFIER; // Replace PAGE_IDENTIFIER with your page's unique identifier variable
};
*/
(function() { // DON'T EDIT BELOW THIS LINE
var d = document, s = d.createElement('script');
s.src = '//yinzo.disqus.com/embed.js';
s.setAttribute('data-timestamp', +new Date());
(d.head || d.body).appendChild(s);
})();
</script>
<noscript>Please enable JavaScript to view the <a href="https://disqus.com/?ref_noscript" rel="nofollow">comments powered by Disqus.</a></noscript>
</div>
</div>
</div><!-- article-wrap -->
</div><!-- large 8 -->
<div class="large-4 medium-4 columns">
<div class="hide-for-small">
<div id="sidebar" class="sidebar">
<div id="site-info" class="site-info">
<div class="site-a-logo"><img src="asset/img/3.png" /></div>
<h1>雪地</h1>
<div class="site-des"></div>
<div class="social">
<a class="github" target="_blank" href="https://github.com/Yinzo" title="GitHub">GitHub</a>
<a class="email" href="mailto:yinz995-1@yahoo.com" title="Email">Email</a>
<a class="rss" href="atom.xml" title="RSS">RSS</a>
</div>
</div>
<div id="site-categories" class="side-item ">
<div class="side-header">
<h2>Categories</h2>
</div>
<div class="side-content">
<p class="cat-list">
<a href="Security%20Info.html"><strong>Security Info</strong></a>
<a href="Adversary%20Learning.html"><strong>Adversary Learning</strong></a>