http//umercalbra.org/lectures/deep-learg/ z l l-1 = f w l 1 z l 1 1 f x = 1 + e x x x > 0 f x = 0 x 0 z l l-1 = f w l 1 z l 1
typedef struct Layer *layer; Coecto *coecto; sfmt_t rg; t ; Networ; double sel.c typedef struct Neuro *z; t ; Layer; typedef struct Weght *w; t _pre; t _post; Coecto; typedef double Neuro, Weght; vod createnetwor Networ *etwor, cost t umber_of_layers, cost sfmt_t rg etwor -> layer = Layer * malloc umber_of_layers * szeof Layer ; etwor -> coecto = Coecto * malloc umber_of_layers * szeof Coecto ; etwor -> = umber_of_layers; etwor -> rg = rg; vod createlayer Networ *etwor, cost t layer_d, cost t umber_of_euros Layer *layer = &etwor -> layer [ layer_d ]; layer -> = umber_of_euros; layer -> z = Neuro * malloc umber_of_euros * szeof Neuro ; for t = 0; < layer -> ; ++ layer -> z [ ] = 0.; vod deletenetwor Networ *etwor free etwor -> layer ; free etwor -> coecto ; vod deletelayer Networ *etwor, cost t layer_d Layer *layer = &etwor -> layer [ layer_d ]; free layer -> z ; vod createlayer Networ *etwor, cost t layer_d, cost t umber_of_euros Layer *layer = &etwor -> layer [ layer_d ]; layer -> = umber_of_euros; layer -> z = Neuro * malloc umber_of_euros * szeof Neuro ; for t = 0; < layer -> ; ++ layer -> z [ ] = 0.; vod createcoecto Networ *etwor, cost t layer_d, double *fuc Networ *, cost t, cost t Coecto *coecto = &etwor -> coecto [ layer_d ]; cost t _pre = etwor -> layer [ layer_d ]. + 1; // +1 cost t _post = layer_d == etwor -> - 1? 1 etwor -> layer [ layer_d + 1 ]. ; coecto -> w = Weght * malloc _pre * _post * szeof Weght ; vod deletelayer Networ *etwor, cost t layer_d Layer *layer = &etwor -> layer [ layer_d ]; free layer -> z ; vod createlayer Networ *etwor, cost t layer_d, cost t umber_of_euros Layer *layer = &etwor -> layer [ layer_d ]; layer -> = umber_of_euros; t bas = layer_d < etwor -> - 1? 1 0; // for t = 0; < _post; ++ for t = 0; < _pre; ++ coecto -> w [ + _pre * ] = fuc etwor,, ; coecto -> _pre = _pre; coecto -> _post = _post; vod deletecoecto Networ *etwor, cost t layer_d Coecto *coecto = &etwor -> coecto [ layer_d ]; free coecto -> w ; layer -> z = Neuro * malloc umber_of_euros + bas * szeof Neuro ; for t = 0; < layer -> ; ++ layer -> z [ ] = 0.; f bas layer -> z [ layer -> ] = +1.; //
ma double all_to_all Networ *, cost t, cost t retur 1.; t ma vod sfmt_t rg; sfmt_t_ge_rad &rg, getpd ; // Networ etwor; createnetwor &etwor, 3, rg ; createlayer &etwor, 0, 2 ; createlayer &etwor, 1, 2 ; createlayer &etwor, 2, 1 ; createcoecto &etwor, 0, all_to_all ; createcoecto &etwor, 1, all_to_all ; // deletecoecto &etwor, 1 ; deletecoecto &etwor, 0 ; deletelayer &etwor, 2 ; deletelayer &etwor, 1 ; deletelayer &etwor, 0 ; deletenetwor &etwor ; retur 0; [ya000836@purple99 ~/dl]$ gcc -O3 -std=c99 -Wall -I SFMT-src-1.5.1 -D SFMT_MEXP=19937 -o sel sel.c SFMT-src-1.5.1/SFMT.c [ya000836@purple99 ~/dl]$./sel [ya000836@purple99 ~/dl]$ vod setiput Networ *etwor, Neuro x [ ] Layer *put_layer = &etwor -> layer [ 0 ]; for t = 0; < put_layer -> ; ++ put_layer -> z [ ] = x [ ]; vod forwardpropagato Networ *etwor, double *actvato double for t = 0; < etwor -> - 1; ++ Layer *l_pre = &etwor -> layer [ ]; Layer *l_post = &etwor -> layer [ + 1 ]; Coecto *c = &etwor -> coecto [ ]; for t = 0; < c -> _post; ++ Neuro u = 0.; for t = 0; < c -> _pre; ++ u += c -> w [ + c -> _pre * ] * l_pre -> z [ ] ; l_post -> z [ ] = actvato u ; double sgmod double x retur 1. / 1. + exp - x ; Neuro x [] = 0., 1. ; setiput &etwor, x ; forwardpropagato &etwor, sgmod ; z l = f w l 1 z l 1 vod dump Networ * ;
スケジュール 第1回 10/03 ガイダンス ニューラルネット概論と肩慣らし 第2回 10/08 誤差逆伝播のミニ講義と実装 作ってわかる深層学習 山 匡 MIコース 第3回 10/10 手書き文字認識のテスト 第4回 10/15 自己符号化器のミニ講義と実装 第5回 10/17 実装 第6回 10/22 実装 2回目スライド資料 第7回 10/24 実装 2018年度 MI/CS実験第二 パーセプトロン Roseblat 1958 パーセプトロン パーセプトロン パーセプトロンの特徴 Roseblat 1958 世界で最初のニューラルネット 中間層に多数のニューロン 第一次ブームの火付け役 入力層 中間層の結合は スパースかつランダムで固定 3層構造 入力 中間 出力 中間層 出力層の結合重みを 教師信号を使って変化させる 教師付学習 教師信号 教師付学習機械 教師信号 教師付学習 学習ルール!パーセプトロン則 " " E= 誤差 ベクトル D = x1, z1,, x, z 文脈信号 x と教師信号 zのペア 種類 x を入力層に入れるとzを出力 するように重みを変える 教師付学習 dw 計算する式 E = 2 を定義して これを減らす w hdde = E whdde ベクトル 学習係数 E whdde hdde =w t η Ewhdde t + 1 = w # 1! " teacher output 2 z z 2 誤差! " 1" "zteacher zoutput"2 教師信号E = hdde dt エポック数に 関して離散化 1 " teacher 2 zoutput" z 2 hdde t η! E 個々のサンプル hdde w t + 1 = whdde t η hdde に関して書くと w!! E= E = whdde t η E,whdde
w hdde t + 1 = w hdde t η E = E = w hdde t η w 0.. w l 1 w hdde E,w hdde w hdde w hdde w hdde = z teacher w hdde z output t + 1 = w hdde t η w hdde = w hdde t + η z teacher z output z output 1 z output z output 1 z output z hdde z hdde E = 1 2 z output w hdde = f w hdde z teacher w hdde f x = w hdde = z output z output w hdde z output 2 E = z output z hdde zoutput w hdde = f z teacher w hdde 1 1 + e x f x = f x 1 f x t + 1 = w hdde t η w hdde = w hdde t + η z teacher z output z output z output z hdde z hdde 1 z output Layer *output_layer = &etwor -> layer [ etwor -> - 1 ]; Layer *hdde_layer = &etwor -> layer [ etwor -> - 2 ]; Coecto *c = &etwor -> coecto [ etwor -> - 2 ]; for t = 0; < c -> _post; ++ for t = 0; < c -> _pre; ++ double o = output_layer -> z [ ]; double d = z [ ] - o * dff_actvato o ; c -> dw [ + c -> _pre * ] += Eta * d * hdde_layer -> z [ ] ; z hdde double updatebyperceptrorule Networ *etwor, Neuro z [ ], double *dff_actvato double ; x1 x0 z 0 0 0 Networ etwor; createnetwor &etwor, 3, rg ; createlayer &etwor, 0, 2 ; createlayer &etwor, 1, 128 ; createlayer &etwor, 2, 1 ; createcoecto &etwor, 0, sparse_radom ; createcoecto &etwor, 1, uform_radom ; 0 1 1 1 0 1 1 1 0 double uform_radom Networ *, cost t, cost t retur 1. - 2. * sfmt_gerad_real2 & -> rg ; double sparse_radom Networ *, cost t, cost t retur sfmt_gerad_real2 & -> rg < 0.5? uform_radom,, 0.; vod createcoecto Networ *etwor, cost t layer_d, double *fuc Networ *, cost t, cost t Coecto *coecto = &etwor -> coecto [ layer_d ]; cost t _pre = etwor -> layer [ layer_d ]. + 1; // +1 cost t _post = layer_d == etwor -> - 1? 1 etwor -> layer [ layer_d + 1 ]. ; coecto -> w = Weght * malloc _pre * _post * szeof Weght ; for t = 0; < _post; ++ for t = 0; < _pre; ++ coecto -> w [ + _pre * ] = fuc etwor,, ; coecto -> dw = Weght * malloc _pre * _post * szeof Weght ; for t = 0; < _post; ++ for t = 0; < _pre; ++ coecto -> dw [ + _pre * ] = 0.; coecto -> _pre = _pre; coecto -> _post = _post; vod deletecoecto Networ *etwor, cost t layer_d Coecto *coecto = &etwor -> coecto [ layer_d ]; free coecto -> w ; free coecto -> dw ; Neuro x [ 4 ][ 2 ] = 0., 0., 0., 1., 1., 0., 1., 1. ; Neuro z [ 4 ][ 1 ] = 0., 1., 1., 0. ; cost t umber_of_trag_data = 4; // Trag double error = 1.0; // arbtrary large umber cost double Epslo = 0.001; // tolerat error rate t = 0; whle error > Epslo error = 0.; talzedw &etwor ; for t = 0; < umber_of_trag_data; ++ //t = t umber_of_trag_data * sfmt_gerad_real2 &rg ; t = ; setiput &etwor, x [ ] ; forwardpropagato &etwor, sgmod ; error += updatebyperceptrorule &etwor, z [ ], dff_sgmod ; updatew &etwor ; prtf "%d %f\",, error ; ++; fprtf stderr, "# of epochs = %d\", ; double dff_sgmod double z retur z * 1. - z ; // f'x = fx 1 - fx = z 1 - z
// Test Layer *output_layer = &etwor. layer [ etwor. - 1 ]; cost t = output_layer -> ; for t = 0; < umber_of_trag_data; ++ setiput &etwor, x [ ] ; forwardpropagato &etwor, sgmod ; for t = 0; < ; ++ fprtf stderr, "%f%s", output_layer -> z [ ], == - 1? "\" " " ; vod talzedw Networ *etwor Coecto *c = &etwor -> coecto [ etwor -> - 2 ]; for t = 0; < c -> _post; ++ for t = 0; < c -> _pre; ++ c -> dw [ + c -> _pre * ] = 0.; vod updatew Networ *etwor Coecto *c = &etwor -> coecto [ etwor -> - 2 ]; for t = 0; < c -> _post; ++ for t = 0; < c -> _pre; ++ c -> w [ + c -> _pre * ] += c -> dw [ + c -> _pre * ]; [ya000836@purple99 ~/dl]$ gcc -O3 -std=c99 -Wall -I SFMT-src-1.5.1 -D SFMT_MEXP=19937 -o perceptro perceptro.c SFMT-src-1.5.1/SFMT.c [ya000836@purple99 ~/dl]$./perceptro 0 0.879170 1 0.836272 2 0.766234 3 0.658244 4 0.543239 4302001 0.001000 4302002 0.001000 4302003 0.001000 4302004 0.001000 4302005 0.001000 # of epochs = 4302006 0.022023 0.977739 0.977538 0.022691 [ya000836@purple99 ~/dl]$./perceptro > perceptro.dat 0.022023 0.977739 0.977538 0.022691 [ya000836@purple99 ~/dl]$ guplot G N U P L O T Verso 4.6 patchlevel 6 last modfed September 2014 Buld System Darw x86_64 Copyrght C 1986-1993, 1998, 2004, 2007-2014 Thomas Wllams, Col Kelley ad may others guplot home faq, bugs, etc mmedate help Termal type set to 'x11' guplot> plot 'perceptro.dat' http//www.guplot.fo type "help FAQ" type "help" plot wdow ht 'h'
l-1 l w l 1 w l 1 z l w l 1 t + 1 = w l 1 t η = z l z l w l 1 = z l 1 z l z l 1 w l 1 l E = z teacher z l z l z l = w l 1 z l+1 z l+1 z l t + 1 = w l 1 t + η E = z l z teacher z l+1 z l z l+1 1 z l+1 z l w l z l 1 z l l s output layer otherwse z l 1 z l+1 z l+1 z l z l = z l+1 z l+1 z l = f = z l+1 w l zl 1 z l+1 w l w l z l = z l+1 = f z teacher z l w l zl f updatebybacpropagato typedef double Neuro, Delta, Weght; typedef struct Neuro *z; Delta *delta; t ; Layer; vod createlayer Networ *etwor, cost t layer_d, cost t umber_of_euros layer -> delta = Delta * malloc umber_of_euros + bas * szeof Delta ; for t = 0; < layer -> ; ++ layer -> delta [ ] = 0.; f bas layer -> delta [ layer -> ] = 0.; // vod deletelayer Networ *etwor, cost t layer_d free layer -> delta ; perceptro.c updatebyperceptrorule updatebybacpropagato updatebybacpropagato dl/code/bp.c
code/mst.c, h code/mst_test.c code/mst/ mae mst_test mdr pg./mst_test./pg E = 1 z teacher z output 2 2 =4 =60000 whle error > Epslo error = 0.; talzedw &etwor ; for t = 0; < umber_of_trag_data; ++ //t = t umber_of_trag_data * sfmt_gerad_real2 &rg ; t = ; setiput &etwor, x [ ] ; forwardpropagato &etwor, sgmod ; error += updatebybacpropagato &etwor, z [ ], dff_sgmod ; whle error > Epslo error = 0.; talzedw &etwor ; for t = 0; < MINI_BATCH_SIZE; ++ t = t umber_of_trag_data * sfmt_gerad_real2 &rg ; setiput &etwor, x [ ] ; forwardpropagato &etwor, sgmod ; error += updatebybacpropagato &etwor, z [ ], dff_sgmod ;
mst_geerate_pg z l = f w l 1 z l 1 θ l dw l dt dθ l = E w l = E dt θ l
スケジュール 第1回 10/03 ガイダンス ニューラルネット概論と肩慣らし 第2回 10/08 誤差逆伝播のミニ講義と実装 作ってわかる深層学習 山 匡 MIコース 第3回 10/10 手書き文字認識のテスト 第4回 10/15 自己符号化器のミニ講義と実装 第5回 10/17 実装 第6回 10/22 実装 3回目スライド資料 第7回 10/24 実装 2018年度 MI/CS実験第二 自己符号化器 オートエンコーダ 復号化 自己符号化器 符号化 例 画像圧縮 深層化と勾配消失問題 復号化 フーリエ変換と画像圧縮 武田祐一 https//www.sldeshare.et/grou799/ss-46355460 符号化 画像をうまく表現する基底を構成する 深層化と勾配消失問題 f x = 1 1 + e x f x = f x 1 f x < 1/4 逆伝播する度にデルタの値が指数的に小さくなる 深層化と勾配消失問題! x f x = 0 x>0 x 0! 1 x>0 f x = 0 x 0 ReLUはその点は性質がいい 一気に普及
vod copycoecto Networ *etwor_src, t layer_d_src, Networ *etwor_dst, t layer_d_dst Networ etwor; createnetwor &etwor1, 3, rg ; createlayer &etwor1, 0, MNIST_IMAGE_SIZE ; createlayer &etwor1, 1, 128 ; createlayer &etwor1, 2, MNIST_IMAGE_SIZE ; createcoecto &etwor1, 0, uform_radom ; createcoecto &etwor1, 1, uform_radom ; copycoectowthtraspose &etwor, 0, &etwor, 1 ; vod copycoectowthtraspose Networ *etwor_src, t layer_d_src, Networ *etwor_dst, t layer_d_dst ; double updatebybacpropagatopartal Networ *etwor, Neuro z [ ], double *dff_actvato double ; // Trag for t = 0; < MNIST_TRAINING_DATA_SIZE; ++ talzedw &etwor1 ; double error = 0.; copycoectowthtraspose &etwor, 0, &etwor, 1 ; for t = 0; < 1; ++ //t = t MNIST_TRAINING_DATA_SIZE * sfmt_gerad_real2 &rg ; t = ; // Use all trag data oe by oe sequetally setiput &etwor1, trag_mage [ ] ; forwardpropagato &etwor1, sgmod ; error += updatebybacpropagatopartal &etwor1, etwor1. layer [ 0 ]. z, dff_sgmod ; updatew &etwor1 ; f % 100 == 0 prtf "%d %f\",, error ; Networ etwor2; createnetwor &etwor2, 4, rg ; createlayer &etwor2, 0, MNIST_IMAGE_SIZE ; createlayer &etwor2, 1, 128 ; createlayer &etwor2, 2, 64 ; createlayer &etwor2, 3, 128 ; createcoecto &etwor2, 0, uform_radom ; createcoecto &etwor2, 1, uform_radom ; createcoecto &etwor2, 2, uform_radom ; copycoecto &etwor1, 0, &etwor2, 0 ; copycoectowthtraspose &etwor2, 1, &etwor2, 2 ; deletecoecto &etwor1, 1 ; deletecoecto &etwor1, 0 ; deletelayer &etwor1, 2 ; deletelayer &etwor1, 1 ; deletelayer &etwor1, 0 ; deletenetwor &etwor1 ; // Trag for t = 0; < MNIST_TRAINING_DATA_SIZE; ++ talzedw &etwor2 ; double error = 0.; copycoectowthtraspose &etwor2, 1, &etwor2, 2 ; for t = 0; < 1; ++ //t = t MNIST_TRAINING_DATA_SIZE * sfmt_gerad_real2 &rg ; t = ; // Use all trag data oe by oe sequetally setiput &etwor2, trag_mage [ ] ; forwardpropagato &etwor2, sgmod ; error += updatebybacpropagatopartal &etwor2, etwor2. layer [ 1 ]. z, dff_sgmod ; updatew &etwor2 ; f % 100 == 0 prtf "%d %f\",, error ;