! Copyright (C) 2011 Roman Maksymchuk. ! See http://factorcode.org/license.txt for BSD license. USING: accessors alien.c-types arrays colors.constants combinators combinators.smart fry google.charts grouping io kernel locals math math.functions math.parser math.vectors models namespaces prettyprint random sequences specialized-arrays strings tools.time ui ui.gadgets ui.gadgets.labels ui.gadgets.tracks ui.gadgets.worlds prettyprint.config ; IN: data-mining.neural-nets FROM: alien.c-types => float ; SPECIALIZED-ARRAY: float FROM: alien.c-types => int ; SPECIALIZED-ARRAY: int TUPLE: net { #inputs integer } { #outputs integer } { #layers integer } { weights array } { act-funcs array } { act-deriv-funcs array } { outputs array } ; SYMBOLS: act-mult dWeights err echo? graph? calc-err? mod-epoch mod-error ; 2.0 act-mult set-global f not echo? set-global f not graph? set-global f not calc-err? set-global CONSTANT: el-1 { 1.0 } inline : act-tanh ( x -- y ) act-mult get * tanh ; inline : act-tanh-deriv ( x -- y ) act-tanh sq 1 swap - act-mult get * ; inline : act-lin ( x -- y ) act-mult get * ; inline : act-lin-deriv ( x -- y ) drop act-mult get ; inline : act-logist ( x -- y ) act-mult get * neg exp 1 + 2 swap / 1 - ; inline : act-logist-deriv ( x -- y ) 1 over - * 2 * act-mult get * ; inline : err-sq ( seq1 seq2 -- seq ) v- dup v* ; inline : err-diff ( seq1 seq2 -- seq ) v- ; inline : err-abs ( seq1 seq2 -- seq ) v- vabs ; inline : net-new ( layers-seq act-func-seq -- net ) [ dup length 1 - \ act-tanh ] when-empty dup [ { { \ act-tanh [ \ act-tanh-deriv ] } { \ act-lin [ \ act-lin-deriv ] } { \ act-logist [ \ act-logist-deriv ] } [ . "No such activation function!" throw ] } case ] map net new [ [ act-deriv-funcs<< ] [ act-funcs<< ] bi ] keep over [ first >>#inputs ] [ last >>#outputs ] [ length 1 - >>#layers ] tri swap 2 clump [ [ last ] [ first ] bi 1 + '[ _ ] replicate ] map >>weights ; inline : net-init ( net -- net ) dup weights>> [ [ [ drop 2000 random 1000 - 10000 /f ] map ] map ] map >>weights ; inline : net-prod ( input net -- ) [ el-1 append ] dip [ [ weights>> ] [ act-funcs>> ] bi [| act | [ v. act execute( x -- y ) ] with map dup [ el-1 append ] dip ] 2map nip ] keep outputs<< ; inline :: lern-backprop ( input-seq output-seq err-func param-seq epoch net -- ) output-seq input-seq [| inp | inp net [ net-prod ] [ outputs>> last ] bi err-func execute( x1 x2 -- y ) net [ outputs>> reverse ] [ weights>> reverse ] [ act-deriv-funcs>> reverse ] tri [| act | [ [ act execute( x -- y ) * ] 2map dup ] dip dup first length 1 - 0 [ but-last-slice n*v v+ ] 2reduce swap ] 3map nip net outputs>> but-last-slice inp prefix reverse swap param-seq [ first3 [ epoch ^ * 1 ] dip - * :> step [ el-1 append ] dip [ step * v*n ] with map ] 3map reverse dWeights get param-seq [ third :> mom [ mom v*n v+ ] 2map ] 3map dup dWeights set net weights>> [ [ v+ ] 2map ] 2map net weights<< ] 2each ; inline :: lern-backprop-batch ( input-seq output-seq err-func param-seq epoch net -- ) dWeights get [ [ length 0 ] map ] map output-seq input-seq [| inp | inp net [ net-prod ] [ outputs>> last ] bi err-func execute( x1 x2 -- y ) net [ outputs>> reverse ] [ weights>> reverse ] [ act-deriv-funcs>> reverse ] tri [| act | [ [ act execute( x -- y ) * ] 2map dup ] dip dup first length 1 - 0 [ but-last-slice n*v v+ ] 2reduce swap ] 3map nip net outputs>> but-last-slice inp prefix reverse swap [ [ el-1 append ] dip [ v*n ] with map ] 2map reverse [ [ v+ ] 2map ] 2map ] 2each dWeights get param-seq [ first3 :> mom epoch ^ * 1 mom - * :> step [ [ step v*n ] [ mom v*n ] bi* v+ ] 2map ] 3map dup dWeights set net weights>> [ [ v+ ] 2map ] 2map net weights<< ; inline :: calc-rmse-error ( input-seq output-seq net -- error ) 0 output-seq input-seq [ net [ net-prod ] [ outputs>> last ] bi err-sq sum + ] 2each sqrt output-seq length / net #outputs>> / dup err set ; inline :: calc-class-error ( input-seq output-seq net -- error ) net #outputs>> dup '[ _ ] replicate err set 0 output-seq input-seq [ net [ net-prod [ supremum ] [ index ] bi ] [ outputs>> last [ supremum ] [ index ] bi ] bi 2dup swap err get nth [ nth 1 + ] [ set-nth ] 2bi = not [ 1 + ] when ] 2each output-seq length /f ; inline :: net-learn ( train-input-seq train-output-seq test-input-seq test-output-seq err-func lern-func param-seq calc-error epochs net -- ) net weights>> [ [ length 0 ] map ] map dWeights set param-seq dup length 1 = [ net #layers>> swap first ] when :> params echo? get [ [ vertical "Net: " net #inputs>> number>string append net weights>> [ length neg number>string append ] each