4 "Protocol implemented by any element of a neural network."
5 (run [this x] "Evaluates the net")
6 (train [this x y d] "Trains the net returning the updated net and deltas")
7 (collect [this key] "Collect key from the network")
8 (inputs [this] "Number of inputs")
9 (outputs [this] "Number of outputs")
10 (check [this] "Ensure that the number of inputs matches the outputs"))
15 ;; a map initializes a single neural element
16 clojure.lang.IPersistentMap
17 (run [this x] ((this :phi) ((this :accum) x (this :weights))))
18 (train [this x y d] ((fn [delta] [delta ((this :train) this delta)])
19 ((this :learn) this x (or y (run this x)) d)))
20 (collect [this key] (this key))
21 (inputs [this] (count (this :weights)))
25 ;; a list of many ns in the same layer
27 (run [this x] (vec (map (fn [n] (run n x)) this)))
29 (let [trained (map (fn [n d] (train n x (run n x) d)) this d)]
30 [(vec (apply map (comp vec list) (vec (map first trained))))
31 (map second trained)]))
32 (collect [this key] (map (fn [el] (collect el key)) this))
33 (inputs [this] (apply max (map inputs this)))
34 (outputs [this] (reduce + (map outputs this)))
37 (map-indexed (fn [i err] (when (not (empty? err)) {i err}))
40 ;; a vector of ns in different layers
41 clojure.lang.IPersistentVector
43 (reduce (fn [x layer] (run layer (if (vector? x) x [x]))) x this))
45 (let [xs (reverse (reduce
46 (fn [xs layer] (cons (run layer (first xs)) xs))
48 trained (reduce (fn [ds [x n y]]
49 (cons (train n x y (first (first ds))) ds))
51 (reverse (map list xs this (rest xs))))
52 deltas (map first trained)
53 net (map second trained)]
54 [(first (first trained)) (vec (map second (butlast trained)))]))
55 (collect [this key] (vec (map (fn [el] (collect el key)) this)))
56 (inputs [this] (inputs (first this)))
57 (outputs [this] (outputs (last this)))
59 (let [boundries (map (fn [el] [(inputs el) (outputs el)]) this)]
63 (when (not (= (second a) (first b)))
64 {:at i :from (second a) :to (first b)}))
65 (map (fn [a b] [a b]) boundries (rest boundries)))))))
68 ;; run a single simple neuron
69 (let [n {:phi identity
70 :accum (comp (partial reduce +) (partial map *))
74 ;; show the numbers of inputs and outputs of some networks
75 (let [n {:weights [1 2 1 4 1]}]
76 (outputs [n (list n n) n]) ; 1
77 (outputs (list n (list n n) n)) ; 4
78 (inputs [n (list n n) n]) ; 5
79 (inputs (list n (list n n) n))) ; 5
81 ;; evaluate a single neuron
83 :accum (comp (partial reduce +) (partial map (fn [x w] (* x w))))
88 (let [n {:phi identity
89 :accum (comp (partial reduce +) (partial map (fn [x w] (* x w))))}
90 n1 (assoc n :weights [1 1])
91 n2 (assoc n :weights [2 2])]
92 (run [(list n1 n2) n2] [1 2])) ; 18
94 ;; learning on a perceptron
95 (defn perceptron:learn [this x y d]
96 (let [dir ((fn [dif] (cond (> d 0) 1 (< d 0) -1 true 0)) (- d y))]
97 (vec (map (fn [x] (* dir (this :eta) x)) x))))
99 (let [n {:phi identity
101 :accum (comp (partial reduce +) (partial map (fn [x w] (* x w))))
102 :learn perceptron:learn
104 (assoc n :weights (vec (map + (n :weights) delta))))
112 (= d (run (second (train n x (run n x) d)) x)) ; true
113 ((second (train n [0 1] (run n [0 1]) 0)) :weights) ; [1.0 0.9]
114 ;; converting binary to decimal
118 (second (train n (first p) (run n (first p)) (second p))))
119 m epic)) n (range 100))
120 :weights)) ; [2.0000000000000013 1.0000000000000007]
122 ;; back propagation learning
123 (defn back-prop:run [v]
126 (defn back-prop:learn [this x res d]
130 {:delta-w (* (this :eta) gradient y)
133 (map :y x) (this :weights))))
134 (if (and (map? d) (get d :desired))
135 (* (- (d :desired) (res :y)) ; output layer
136 ((this :d-phi) (res :v)))
137 (* ((this :d-phi) (get res :v 1)) ; hidden layer
138 (reduce + (map (fn [a] (* (a :gradient) (a :weight)))
139 (if (vector? d) d (vec (list d)))))))))
141 (let [n {:phi back-prop:run
143 :accum (comp (partial reduce +)
144 (partial map (fn [x w] (* (x :y) w))))
145 :learn back-prop:learn
147 n1 (assoc n :weights [1 1])
148 n2 (assoc n :weights [2 2])
151 (run n1 x) ; {:v 2, :y 2}
152 (run (list n1 n2) x) ; [{:v 2, :y 2} {:v 4, :y 4}]
153 (run [(list n1 n2) n1] x)) ; {:v 6, :y 6}
155 ;; learning binary representations with back-propagation
156 (let [n {:phi back-prop:run
158 :accum (comp (partial reduce +)
159 (partial map (fn [x w] (* (x :y) w))))
160 :learn back-prop:learn
161 :train (fn [n delta] (assoc n :weights
162 (vec (map + (n :weights)
163 (map :delta-w delta)))))
165 :weights [0.5 0.5 0.5]}
179 (let [x (vec (map (fn [el] {:y el}) (first p)))
180 d {:desired (second p)}]
181 ;; (println (format "x:%S->%S run:%S"
182 ;; x (d :desired) ((run n x) :y)))
183 (second (train n x (run n x) d))))
184 m epic)) net (range 20))]
186 ;; converting binary to decimal
189 [(first p) (second p)
190 ((run trained (map (fn [el] {:y el}) (first p))) :y)])
194 ;; [[0 0 1] 1 1.0000000325609912]
195 ;; [[0 1 0] 2 2.0000000163062657]
196 ;; [[0 1 1] 3 3.000000048867257]
197 ;; [[1 0 0] 4 3.9999998651699054]
198 ;; [[1 0 1] 5 4.999999897730897]
199 ;; [[1 1 0] 6 5.999999881476171]
200 ;; [[1 1 1] 7 6.999999914037161])
202 ;; check if inputs match the outputs
203 (let [n {:phi back-prop:run
205 :accum (comp (partial reduce +)
206 (partial map (fn [x w] (* (x :y) w))))
207 :learn back-prop:learn
209 n0 (assoc n :weights [1])
210 n1 (assoc n :weights [1 1])
211 n2 (assoc n :weights [2 2])
214 (check [(list n1 n2) n2
215 (list n1 n1) (list n1 n1 n2 n2 n1)])) ; ({:at 1, :from 1, :to 2})