9 from data_about_players
import Data
12 def __init__( self
, filename
):
13 s
= "./gnet/gnet_train -l 3 -n 30 -p 1000 -e 0.0005 -o gonet.net ./"+filename
15 ret
= subprocess
.call(args
)
17 s
= "./gnet/gnet_run gonet.net"
19 self
.p
= subprocess
.Popen(args
, stdin
=subprocess
.PIPE
, stdout
=subprocess
.PIPE
)
20 def __call__(self
, vector
):
21 self
.p
.stdin
.write(' '.join([str(a
) for a
in vector
]) + '\n')
22 a
= self
.p
.stdout
.readline()
23 return [ float(num
) for num
in a
.split()]
25 if __name__
== '__main__':
26 main_pat_filename
= Data
.main_pat_filename
29 player_vector
= Data
.questionare_total
30 players_ignore
= [ "Yi Ch'ang-ho 2004-" ]#, "Fujisawa Hideyuki","Yuki Satoshi", "Otake Hideo", "Yi Ch'ang-ho 2005+","Takao Shinji","Hane Naoki","Kobayashi Koichi" ]
31 players_all
= [ p
for p
in player_vector
.keys() if p
not in players_ignore
]
33 ### Object creating input vector when called
34 print "Creating input vector generator from main pat file:", main_pat_filename
36 i
= InputVectorGenerator(main_pat_filename
, num_features
)
38 # Create list of input vectors
40 for name
in players_all
:
41 input_vectors
+= [i(Data
.pat_files_folder
+ name
)]
43 #print '"%s"'%(players_all[2],)
44 #print input_vectors[2]
46 if len(input_vectors
) == 0:
47 print >>sys
.stderr
, "No reference vectors."
51 # Change this to False, if you do not want to use PCA
54 # Create PCA object, trained on input_vectors
55 print >>sys
.stderr
, "Running PCA."
56 pca
= PCA(input_vectors
, reduce=True)
57 # Perform a PCA on input vectors
58 input_vectors
= pca
.process_list_of_vectors(input_vectors
)
59 # Creates a Composed object that first generates an input vector
60 # and then performs a PCA analysis on it.
63 ### n/4-fold cross validation
64 #bounds = random.sample(range(1,len(players_all)), len(players_all) / 10 )
66 for x
in range(1,len(players_all
)/4):
67 bounds
+= [4*x
for _
in [1] if 4*x
< len(players_all
)]
69 print >>sys
.stderr
, "Pop too small."
76 sentinel
=len(players_all
)
78 for b
in bounds
+[sentinel
]:
79 validation_set
= range(prev
, b
)
80 reference_set
= range(0,prev
) + range(b
,sentinel
)
81 print "Reference set :",
82 for pr
in range(0, prev
):
84 for pr
in validation_set
:
86 for pr
in range(b
, sentinel
):
91 ### Object creating output vector when called;
93 for index
in reference_set
:
94 data
.append( (input_vectors
[index
], r(player_vector
[players_all
[index
]])) )
97 ### We can enlarge the data set by adding linear combinations of input and output vectors
98 use_lin_combinations
= True
99 if use_lin_combinations
:
100 data
+= Combinator().combine(data
)
102 print_set_to_file(data
,'nn_cross.data')
104 nn
= NeuralNet('nn_cross.data')
105 # Create list of output vectors using weighted kNN algorithm approximating output_vector
107 return list(10*numpy
.random
.random(k
))
108 output_vectors
= [ nn(input_vectors
[index
]) for index
in validation_set
]
109 #output_vectors = [ r(rand_vect(4)) for index in validation_set ]
110 desired_vectors
= [ r(player_vector
[players_all
[index
]]) for index
in validation_set
]
112 for vec_set
,text
in [(output_vectors
, "Output: "), (desired_vectors
, "Desired:")]:
120 for o
,d
in zip(output_vectors
, desired_vectors
):
129 # print "%2.3f"%(e,),
133 print "Total square err: %2.3f"%( sum(errs
),)
134 mean
= numpy
.array(errs
).mean()
135 print "Mean square err per player: " + u
"%2.3f ( = sd \u00B1 %2.3f) "%(mean
, sqrt(mean
))
136 mean
= numpy
.array(es
).mean()
137 print "Mean square err per style: " + u
"%2.3f ( = sd \u00B1 %2.3f) "%(mean
, sqrt(mean
))
139 print "Players sorted by mean square error:"
140 p
= zip(errs
, players_all
)
143 print "%2.3f %s"%(err
,name
)