inner and outer done for cnf_dmv.py, seems to work. todo: reestimation for cnf
[dmvccm.git] / src / loc_h_dmv.py
blob984c8851b0329bbcf7b0b802564c48829a1a0e79
1 # loc_h_dmv.py
2 #
3 # dmv reestimation and inside-outside probabilities using loc_h, and
4 # no CNF-style rules
6 #import numpy # numpy provides Fast Arrays, for future optimization
7 import io
8 from common_dmv import *
10 ### todo: debug with @accepts once in a while, but it's SLOW
11 # from typecheck import accepts, Any
13 if __name__ == "__main__":
14 print "loc_h_dmv module tests:"
16 def adj(middle, loc_h):
17 "middle is eg. k when rewriting for i<k<j (inside probabilities)."
18 return middle == loc_h or middle == loc_h+1 # ADJ == True
20 def make_GO_AT(p_STOP,p_ATTACH):
21 p_GO_AT = {}
22 for (a,h,dir), p_ah in p_ATTACH.iteritems():
23 p_GO_AT[a,h,dir, NON] = p_ah * (1-p_STOP[h, dir, NON])
24 p_GO_AT[a,h,dir, ADJ] = p_ah * (1-p_STOP[h, dir, ADJ])
25 return p_GO_AT
27 class DMV_Grammar(io.Grammar):
28 def __str__(self):
29 def t(n):
30 return "%d=%s" % (n, self.numtag(n))
31 def p(dict,key):
32 if key in dict:
33 return dict[key]
34 else:
35 return 0.0
36 def p_a(a,h):
37 p_L = p(self.p_ATTACH,(a,h,LEFT))
38 p_R = p(self.p_ATTACH,(a,h,RIGHT))
39 if p_L == 0.0 and p_R == 0.0:
40 return ''
41 else:
42 if p_L > 0.0:
43 str = "p_ATTACH[ %s|%s,L] = %.4f" % (t(a), t(h), p_L)
44 else:
45 str = ''
46 if p_R > 0.0:
47 str = str.ljust(40)
48 str += "p_ATTACH[ %s|%s,R] = %.4f" % (t(a), t(h), p_R)
49 return str+'\n'
51 root, stop, att, ord = "","","",""
52 for h in self.headnums():
53 root += "p_ROOT[%s] = %.4f\n" % (t(h), p(self.p_ROOT, (h)))
54 stop += "p_STOP[stop|%s,L,adj] = %.4f\t" % (t(h), p(self.p_STOP, (h,LEFT,ADJ)))
55 stop += "p_STOP[stop|%s,R,adj] = %.4f\n" % (t(h), p(self.p_STOP, (h,RIGHT,ADJ)))
56 stop += "p_STOP[stop|%s,L,non] = %.4f\t" % (t(h), p(self.p_STOP, (h,LEFT,NON)))
57 stop += "p_STOP[stop|%s,R,non] = %.4f\n" % (t(h), p(self.p_STOP, (h,RIGHT,NON)))
58 att += ''.join([p_a(a,h) for a in self.headnums()])
59 ord += "p_ORDER[ left-first|%s ] = %.4f\t" % (t(h), p(self.p_ORDER, (GOL,h)))
60 ord += "p_ORDER[right-first|%s ] = %.4f\n" % (t(h), p(self.p_ORDER, (GOR,h)))
61 return root + stop + att + ord
63 def __init__(self, numtag, tagnum, p_ROOT, p_STOP, p_ATTACH, p_ORDER):
64 io.Grammar.__init__(self, numtag, tagnum)
65 self.p_ROOT = p_ROOT # p_ROOT[w] = p
66 self.p_ORDER = p_ORDER # p_ORDER[seals, w] = p
67 self.p_STOP = p_STOP # p_STOP[w, LEFT, NON] = p (etc. for LA,RN,RA)
68 self.p_ATTACH = p_ATTACH # p_ATTACH[a, h, LEFT] = p (etc. for R)
69 # p_GO_AT[a, h, LEFT, NON] = p (etc. for LA,RN,RA)
70 self.p_GO_AT = make_GO_AT(self.p_STOP, self.p_ATTACH)
72 def p_GO_AT_or0(self, a, h, dir, adj):
73 try:
74 return self.p_GO_AT[a, h, dir, adj]
75 except:
76 return 0.0
79 def locs(sent_nums, start, stop):
80 '''Return the between-word locations of all words in some fragment of
81 sent. We make sure to offset the locations correctly so that for
82 any w in the returned list, sent[w]==loc_w.
84 start is inclusive, stop is exclusive, as in klein-thesis and
85 Python's list-slicing.'''
86 for i0,w in enumerate(sent_nums[start:stop]):
87 loc_w = i0+start
88 yield (loc_w, w)
90 ###################################################
91 # P_INSIDE (dmv-specific) #
92 ###################################################
94 #@accepts(int, int, (int, int), int, Any(), [str], {tuple:float}, IsOneOf(None,{}))
95 def inner(i, j, node, loc_h, g, sent, ichart={}, mpptree=None):
96 ''' The ichart is of this form:
97 ichart[i,j,LHS, loc_h]
98 where i and j are between-word positions.
100 loc_h gives adjacency (along with k for attachment rules), and is
101 needed in P_STOP reestimation.
103 sent_nums = g.sent_nums(sent)
105 def terminal(i,j,node, loc_h, tabs):
106 if not i <= loc_h < j:
107 if 'INNER' in DEBUG:
108 print "%s*= 0.0 (wrong loc_h)" % tabs
109 return 0.0
110 elif POS(node) == sent_nums[i] and node in g.p_ORDER:
111 # todo: add to ichart perhaps? Although, it _is_ simple lookup..
112 prob = g.p_ORDER[node]
113 else:
114 if 'INNER' in DEBUG:
115 print "%sLACKING TERMINAL:" % tabs
116 prob = 0.0
117 if 'INNER' in DEBUG:
118 print "%s*= %.4f (terminal: %s -> %s_%d)" % (tabs,prob, node_str(node), sent[i], loc_h)
119 return prob
121 def e(i,j, (s_h,h), loc_h, n_t):
122 def to_mpp(p, L, R):
123 if mpptree:
124 key = (i,j, (s_h,h), loc_h)
125 if key not in mpptree:
126 mpptree[key] = (p, L, R)
127 elif mpptree[key][0] < p:
128 mpptree[key] = (p, L, R)
130 def tab():
131 "Tabs for debug output"
132 return "\t"*n_t
134 if (i, j, (s_h,h), loc_h) in ichart:
135 if 'INNER' in DEBUG:
136 print "%s*= %.4f in ichart: i:%d j:%d node:%s loc:%s" % (tab(),ichart[i, j, (s_h,h), loc_h], i, j,
137 node_str((s_h,h)), loc_h)
138 return ichart[i, j, (s_h,h), loc_h]
139 else:
140 # Either terminal rewrites, using p_ORDER:
141 if i+1 == j and (s_h == GOR or s_h == GOL):
142 return terminal(i, j, (s_h,h), loc_h, tab())
143 else: # Or not at terminal level yet:
144 if 'INNER' in DEBUG:
145 print "%s%s (%.1f) from %d to %d" % (tab(),node_str((s_h,h)),loc_h,i,j)
146 if s_h == SEAL:
147 p_RGOL = g.p_STOP[h, LEFT, adj(i,loc_h)] * e(i,j,(RGOL,h),loc_h,n_t+1)
148 p_LGOR = g.p_STOP[h, RIGHT, adj(j,loc_h)] * e(i,j,(LGOR,h),loc_h,n_t+1)
149 p = p_RGOL + p_LGOR
150 to_mpp(p_RGOL, STOPKEY, (i,j, (RGOL,h),loc_h))
151 to_mpp(p_LGOR, (i,j, (RGOL,h),loc_h), STOPKEY )
152 if 'INNER' in DEBUG:
153 print "%sp= %.4f (STOP)" % (tab(), p)
154 elif s_h == RGOL or s_h == GOL:
155 p = 0.0
156 if s_h == RGOL:
157 p = g.p_STOP[h, RIGHT, adj(j,loc_h)] * e(i,j, (GOR,h),loc_h,n_t+1)
158 to_mpp(p, (i,j, (GOR,h),loc_h), STOPKEY)
159 for k in xgo_left(i, loc_h): # i < k <= loc_l(h)
160 p_R = e(k, j, ( s_h,h), loc_h, n_t+1)
161 if p_R > 0.0:
162 for loc_a,a in locs(sent_nums, i, k):
163 p_ah = g.p_GO_AT_or0(a, h, LEFT, adj(k,loc_h))
164 if p_ah > 0.0:
165 p_L = e(i, k, (SEAL,a), loc_a, n_t+1)
166 p_add = p_L * p_ah * p_R
167 p += p_add
168 to_mpp(p_add,
169 (i, k, (SEAL,a), loc_a),
170 (k, j, ( s_h,h), loc_h))
171 if 'INNER' in DEBUG:
172 print "%sp= %.4f (ATTACH)" % (tab(), p)
173 elif s_h == GOR or s_h == LGOR:
174 p = 0.0
175 if s_h == LGOR:
176 p = g.p_STOP[h, LEFT, adj(i,loc_h)] * e(i,j, (GOL,h),loc_h,n_t+1)
177 to_mpp(p, (i,j, (GOL,h),loc_h), STOPKEY)
178 for k in xgo_right(loc_h, j): # loc_l(h) < k < j
179 p_L = e(i, k, ( s_h,h), loc_h, n_t+1)
180 if p_L > 0.0:
181 for loc_a,a in locs(sent_nums,k,j):
182 p_ah = g.p_GO_AT_or0(a, h, RIGHT, adj(k,loc_h))
183 p_R = e(k, j, (SEAL,a), loc_a, n_t+1)
184 p_add = p_L * p_ah * p_R
185 p += p_add
186 to_mpp(p_add,
187 (i, k, ( s_h,h), loc_h),
188 (k, j, (SEAL,a), loc_a))
190 if 'INNER' in DEBUG:
191 print "%sp= %.4f (ATTACH)" % (tab(), p)
192 # elif s_h == GOL: # todo
194 ichart[i, j, (s_h,h), loc_h] = p
195 return p
196 # end of e-function
198 inner_prob = e(i,j,node,loc_h, 0)
199 if 'INNER' in DEBUG:
200 print debug_ichart(g,sent,ichart)
201 return inner_prob
202 # end of dmv.inner(i, j, node, loc_h, g, sent, ichart={})
205 def debug_ichart(g,sent,ichart):
206 str = "---ICHART:---\n"
207 for (s,t,LHS,loc_h),v in ichart.iteritems():
208 str += "%s -> %s_%d ... %s_%d (loc_h:%s):\t%.4f\n" % (node_str(LHS,g.numtag),
209 sent[s], s, sent[s], t, loc_h, v)
210 str += "---ICHART:end---\n"
211 return str
214 def inner_sent(g, sent, ichart={}):
215 return sum([g.p_ROOT[w] * inner(0, len(sent), (SEAL,w), loc_w, g, sent, ichart)
216 for loc_w,w in locs(g.sent_nums(sent),0,len(sent))])
222 ###################################################
223 # P_OUTSIDE (dmv-specific) #
224 ###################################################
226 #@accepts(int, int, (int, int), int, Any(), [str], {tuple:float}, {tuple:float})
227 def outer(i,j,w_node,loc_w, g, sent, ichart={}, ochart={}):
228 ''' http://www.student.uib.no/~kun041/dmvccm/DMVCCM.html#outer
230 w_node is a pair (seals,POS); the w in klein-thesis is made up of
231 POS(w) and loc_w
233 sent_nums = g.sent_nums(sent)
235 # local functions:
236 def e(i,j,LHS,loc_h): # P_{INSIDE}
237 try:
238 return ichart[i,j,LHS,loc_h]
239 except:
240 return inner(i,j,LHS,loc_h,g,sent,ichart)
242 def f(i,j,w_node,loc_w):
243 if not (i <= loc_w < j):
244 return 0.0
245 if (i,j,w_node,loc_w) in ochart:
246 return ochart[i,j, w_node,loc_w]
247 if w_node == ROOT:
248 if i == 0 and j == len(sent):
249 return 1.0
250 else: # ROOT may only be used on full sentence
251 return 0.0
252 # but we may have non-ROOTs (stops) over full sentence too:
253 w = POS(w_node)
254 s_w = seals(w_node)
256 # todo: try either if p_M > 0.0: or sum(), and speed-test them
258 if s_w == SEAL: # w == a
259 # todo: do the i<sent<j check here to save on calls?
260 p = g.p_ROOT[w] * f(i,j,ROOT,loc_w)
261 # left attach
262 for k in xgt(j, sent): # j<k<len(sent)+1
263 for loc_h,h in locs(sent_nums,j,k):
264 p_wh = g.p_GO_AT_or0(w, h, LEFT, adj(j, loc_h))
265 for s_h in [RGOL, GOL]:
266 p += f(i,k,(s_h,h),loc_h) * p_wh * e(j,k,(s_h,h),loc_h)
267 # right attach
268 for k in xlt(i): # k<i
269 for loc_h,h in locs(sent_nums,k,i):
270 p_wh = g.p_GO_AT_or0(w, h, RIGHT, adj(i, loc_h))
271 for s_h in [LGOR, GOR]:
272 p += e(k,i,(s_h,h), loc_h) * p_wh * f(k,j,(s_h,h), loc_h)
274 elif s_w == RGOL or s_w == GOL: # w == h, left stop + left attach
275 if s_w == RGOL:
276 s_h = SEAL
277 else: # s_w == GOL
278 s_h = LGOR
279 p = g.p_STOP[w, LEFT, adj(i,loc_w)] * f(i,j,( s_h,w),loc_w)
280 for k in xlt(i): # k<i
281 for loc_a,a in locs(sent_nums,k,i):
282 p_aw = g.p_GO_AT_or0(a, w, LEFT, adj(i, loc_w))
283 p += e(k,i, (SEAL,a),loc_a) * p_aw * f(k,j,w_node,loc_w)
285 elif s_w == GOR or s_w == LGOR: # w == h, right stop + right attach
286 if s_w == GOR:
287 s_h = RGOL
288 else: # s_w == LGOR
289 s_h = SEAL
290 p = g.p_STOP[w, RIGHT, adj(j,loc_w)] * f(i,j,( s_h,w),loc_w)
291 for k in xgt(j, sent): # j<k<len(sent)+1
292 for loc_a,a in locs(sent_nums,j,k):
293 p_ah = g.p_GO_AT_or0(a, w, RIGHT, adj(j, loc_w))
294 p += f(i,k,w_node,loc_w) * p_ah * e(j,k,(SEAL,a),loc_a)
296 ochart[i,j,w_node,loc_w] = p
297 return p
298 # end outer.f()
300 return f(i,j,w_node,loc_w)
301 # end outer(i,j,w_node,loc_w, g,sent, ichart,ochart)
306 ###################################################
307 # Reestimation: #
308 ###################################################
310 def reest_zeros(h_nums):
311 '''A dict to hold numerators and denominators for our 6+ reestimation
312 formulas. '''
313 # todo: p_ORDER?
314 fr = { ('ROOT','den'):0.0 } # holds sum over p_sent
315 for h in h_nums:
316 fr['ROOT','num',h] = 0.0
317 for s_h in [GOR,GOL,RGOL,LGOR]:
318 x = (s_h,h)
319 fr['hat_a','den',x] = 0.0 # = c()
320 # not all arguments are attached to, so we just initialize
321 # fr['hat_a','num',a,(s_h,h)] as they show up, in reest_freq
322 for adj in [NON, ADJ]:
323 for nd in ['num','den']:
324 fr['STOP',nd,x,adj] = 0.0
325 return fr
327 def reest_freq(g, corpus):
328 fr = reest_zeros(g.headnums())
329 ichart = {}
330 ochart = {}
331 p_sent = None # 50 % speed increase on storing this locally
333 # local functions altogether 2x faster than global
334 def c(i,j,LHS,loc_h,sent):
335 if not p_sent > 0.0:
336 return p_sent
338 p_in = e(i,j, LHS,loc_h,sent)
339 if not p_in > 0.0:
340 return p_in
342 p_out = f(i,j, LHS,loc_h,sent)
343 return p_in * p_out / p_sent
344 # end reest_freq.c()
346 def f(i,j,LHS,loc_h,sent): # P_{OUTSIDE}
347 try:
348 return ochart[i,j,LHS,loc_h]
349 except:
350 return outer(i,j,LHS,loc_h,g,sent,ichart,ochart)
351 # end reest_freq.f()
353 def e(i,j,LHS,loc_h,sent): # P_{INSIDE}
354 try:
355 return ichart[i,j,LHS,loc_h]
356 except:
357 return inner(i,j,LHS,loc_h,g,sent,ichart)
358 # end reest_freq.e()
360 def w_left(i,j, x,loc_h,sent,sent_nums):
361 h = POS(x)
362 if not p_sent > 0.0:
363 return p_sent
365 for k in xtween(i, j):
366 p_out = f(i,j, x,loc_h, sent)
367 if not p_out > 0.0:
368 continue
369 p_R = e(k,j, x,loc_h, sent)
370 if not p_R > 0.0:
371 continue
373 for loc_a,a in locs(sent_nums, i,k): # i<=loc_l(a)<k
374 p_rule = g.p_GO_AT_or0(a, h, LEFT, adj(k, loc_h))
375 p_L = e(i,k, (SEAL,a), loc_a, sent)
376 p = p_L * p_out * p_R * p_rule / p_sent
377 try:
378 fr['hat_a','num',a,x] += p
379 except:
380 fr['hat_a','num',a,x] = p
381 # end reest_freq.w_left()
383 def w_right(i,j, x,loc_h,sent,sent_nums):
384 h = POS(x)
385 if not p_sent > 0.0:
386 return p_sent
388 for k in xtween(i, j):
389 p_out = f(i,j, x,loc_h, sent)
390 if not p_out > 0.0:
391 continue
392 p_L = e(i,k, x,loc_h, sent)
393 if not p_L > 0.0:
394 continue
396 for loc_a,a in locs(sent_nums, k,j): # k<=loc_l(a)<j
397 p_rule = g.p_GO_AT_or0(a, h, RIGHT, adj(k, loc_h))
398 p_R = e(k,j, (SEAL,a),loc_a, sent)
399 p = p_L * p_out * p_R * p_rule / p_sent
400 try:
401 fr['hat_a','num',a,x] += p
402 except:
403 fr['hat_a','num',a,x] = p
404 # end reest_freq.w_right()
406 # in reest_freq:
407 for sent in corpus:
408 if 'REEST' in DEBUG:
409 print sent
410 ichart = {}
411 ochart = {}
412 p_sent = inner_sent(g, sent, ichart)
413 fr['ROOT','den'] += p_sent
415 sent_nums = g.sent_nums(sent)
417 for loc_h,h in locs(sent_nums,0,len(sent)+1): # locs-stop is exclusive, thus +1
418 # root:
419 fr['ROOT','num',h] += g.p_ROOT[h] * e(0,len(sent), (SEAL,h),loc_h, sent)
421 loc_l_h = loc_h
422 loc_r_h = loc_l_h+1
424 # left non-adjacent stop:
425 for i in xlt(loc_l_h):
426 fr['STOP','num',(GOL,h),NON] += c(loc_l_h, j, (LGOR, h),loc_h, sent)
427 fr['STOP','den',(GOL,h),NON] += c(loc_l_h, j, (GOL, h),loc_h, sent)
428 for j in xgteq(loc_r_h, sent):
429 fr['STOP','num',(RGOL,h),NON] += c(i, j, (SEAL, h),loc_h, sent)
430 fr['STOP','den',(RGOL,h),NON] += c(i, j, (RGOL, h),loc_h, sent)
431 # left adjacent stop, i = loc_l_h
432 fr['STOP','num',(GOL,h),ADJ] += c(loc_l_h, loc_r_h, (LGOR, h),loc_h, sent)
433 fr['STOP','den',(GOL,h),ADJ] += c(loc_l_h, loc_r_h, (GOL, h),loc_h, sent)
434 for j in xgteq(loc_r_h, sent):
435 fr['STOP','num',(RGOL,h),ADJ] += c(loc_l_h, j, (SEAL, h),loc_h, sent)
436 fr['STOP','den',(RGOL,h),ADJ] += c(loc_l_h, j, (RGOL, h),loc_h, sent)
437 # right non-adjacent stop:
438 for j in xgt(loc_r_h, sent):
439 fr['STOP','num',(GOR,h),NON] += c(loc_l_h, j, (RGOL, h),loc_h, sent)
440 fr['STOP','den',(GOR,h),NON] += c(loc_l_h, j, (GOR, h),loc_h, sent)
441 for i in xlteq(loc_l_h):
442 fr['STOP','num',(LGOR,h),NON] += c(loc_l_h, j, (SEAL, h),loc_h, sent)
443 fr['STOP','den',(LGOR,h),NON] += c(loc_l_h, j, (LGOR, h),loc_h, sent)
444 # right adjacent stop, j = loc_r_h
445 fr['STOP','num',(GOR,h),ADJ] += c(loc_l_h, loc_r_h, (RGOL, h),loc_h, sent)
446 fr['STOP','den',(GOR,h),ADJ] += c(loc_l_h, loc_r_h, (GOR, h),loc_h, sent)
447 for i in xlteq(loc_l_h):
448 fr['STOP','num',(LGOR,h),ADJ] += c(loc_l_h, j, (SEAL, h),loc_h, sent)
449 fr['STOP','den',(LGOR,h),ADJ] += c(loc_l_h, j, (LGOR, h),loc_h, sent)
451 # left attachment:
452 if 'REEST_ATTACH' in DEBUG:
453 print "Lattach %s: for i < %s"%(g.numtag(h),sent[0:loc_h+1])
454 for s_h in [RGOL, GOL]:
455 x = (s_h, h)
456 for i in xlt(loc_l_h): # i < loc_l(h)
457 if 'REEST_ATTACH' in DEBUG:
458 print "\tfor j >= %s"%sent[loc_h:len(sent)]
459 for j in xgteq(loc_r_h, sent): # j >= loc_r(h)
460 fr['hat_a','den',x] += c(i,j, x,loc_h, sent) # v_q in L&Y
461 if 'REEST_ATTACH' in DEBUG:
462 print "\t\tc( %d , %d, %s, %s, sent)=%.4f"%(i,j,node_str(x),loc_h,fr['hat_a','den',x])
463 w_left(i, j, x,loc_h, sent,sent_nums) # compute w for all a in sent
465 # right attachment:
466 if 'REEST_ATTACH' in DEBUG:
467 print "Rattach %s: for i <= %s"%(g.numtag(h),sent[0:loc_h+1])
468 for s_h in [GOR, LGOR]:
469 x = (s_h, h)
470 for i in xlteq(loc_l_h): # i <= loc_l(h)
471 if 'REEST_ATTACH' in DEBUG:
472 print "\tfor j > %s"%sent[loc_h:len(sent)]
473 for j in xgt(loc_r_h, sent): # j > loc_r(h)
474 fr['hat_a','den',x] += c(i,j, x,loc_h, sent) # v_q in L&Y
475 if 'REEST_ATTACH' in DEBUG:
476 print "\t\tc( %d , %d, %s, %s, sent)=%.4f"%(loc_h,j,node_str(x),loc_h,fr['hat_a','den',x])
477 w_right(loc_l_h,j, x,loc_h, sent,sent_nums) # compute w for all a in sent
479 # end for loc_h,h
480 # end for sent
482 return fr
484 def reestimate(g, corpus):
485 fr = reest_freq(g, corpus)
486 p_ROOT, p_STOP, p_ATTACH = {},{},{}
488 for h in g.headnums():
489 reest_head(h, fr, g, p_ROOT, p_STOP, p_ATTACH)
491 g.p_STOP = p_STOP
492 g.p_ATTACH = p_ATTACH
493 g.p_GO_AT = make_GO_AT(p_STOP,p_ATTACH)
494 g.p_ROOT = p_ROOT
495 return fr
498 def reest_head(h, fr, g, p_ROOT, p_STOP, p_ATTACH):
499 "Given a single head, update g with the reestimated probability."
500 # remove 0-prob stuff? todo
501 try:
502 p_ROOT[h] = fr['ROOT','num',h] / fr['ROOT','den']
503 except:
504 p_ROOT[h] = fr['ROOT','den']
506 for dir in [LEFT,RIGHT]:
507 for adj in [ADJ, NON]: # p_STOP
508 p_STOP[h, dir, adj] = 0.0
509 for s_h in dirseal(dir):
510 x = (s_h,h)
511 p = fr['STOP','den', x, adj]
512 if p > 0.0:
513 p = fr['STOP', 'num', x, adj] / p
514 p_STOP[h, dir, adj] += p
516 for s_h in dirseal(dir): # make hat_a for p_ATTACH
517 x = (s_h,h)
518 hat_a = {}
520 p_c = fr['hat_a','den',x]
521 for a in g.headnums():
522 try:
523 hat_a[a,x] = fr['hat_a','num',a,x] / p_c
524 except:
525 pass
527 sum_hat_a = sum([hat_a[w,x] for w in g.headnums()
528 if (w,x) in hat_a])
530 for a in g.headnums():
531 if (a,h,dir) not in p_ATTACH:
532 p_ATTACH[a,h,dir] = 0.0
533 try: # (a,x) might not be in hat_a
534 p_ATTACH[a,h,dir] += hat_a[a,x] / sum_hat_a
535 except:
536 pass
545 ###################################################
546 # Most Probable Parse: #
547 ###################################################
549 STOPKEY = (-1,-1,STOP,-1)
550 ROOTKEY = (-1,-1,ROOT,-1)
552 def make_mpptree(g, sent):
553 '''Tell inner() to make an mpptree, connect ROOT to this. (Logically,
554 this should be part of inner_sent though...)'''
555 ichart = {}
556 mpptree = { ROOTKEY:(0.0, ROOTKEY, None) }
557 for loc_w,w in locs(g.sent_nums(sent),0,len(sent)):
558 p = g.p_ROOT[w] * inner(0, len(sent), (SEAL,w), loc_w, g, sent, ichart, mpptree)
559 L = ROOTKEY
560 R = (0,len(sent), (SEAL,w), loc_w)
561 if mpptree[ROOTKEY][0] < p:
562 mpptree[ROOTKEY] = (p, L, R)
563 return mpptree
565 def parse_mpptree(mpptree, sent):
566 '''mpptree is a dict of the form {k:(p,L,R),...}; where k, L and R
567 are `keys' of the form (i,j,node,loc).
569 returns an mpp of the form [((head, loc_h),(arg, loc_a)), ...],
570 where head and arg are tags.'''
571 # local functions for clear access to mpptree:
572 def k_node(key):
573 return key[2]
574 def k_POS(key):
575 return POS(k_node(key))
576 def k_seals(key):
577 return seals(k_node(key))
578 def k_locnode(key):
579 return (k_node(key),key[3])
580 def k_locPOS(key):
581 return (k_POS(key),key[3])
582 def k_terminal(key):
583 s_k = k_seals(key) # i+1 == j
584 return key[0] + 1 == key[1] and (s_k == GOR or s_k == GOL)
585 def t_L(tree_entry):
586 return tree_entry[1]
587 def t_R(tree_entry):
588 return tree_entry[2]
590 # arbitrarily, "ROOT attaches to right". We add it here to
591 # avoid further complications:
592 firstkey = t_R(mpptree[ROOTKEY])
593 deps = set([ (k_locPOS(ROOTKEY), k_locPOS(firstkey), RIGHT) ])
594 q = [firstkey]
596 while len(q) > 0:
597 k = q.pop()
598 if k_terminal(k):
599 continue
600 else:
601 L = t_L( mpptree[k] )
602 R = t_R( mpptree[k] )
603 if k_locnode( k ) == k_locnode( L ): # Rattach
604 deps.add((k_locPOS( k ), k_locPOS( R ), LEFT))
605 q.extend( [L, R] )
606 elif k_locnode( k ) == k_locnode( R ): # Lattach
607 deps.add((k_locPOS( k ), k_locPOS( L ), RIGHT))
608 q.extend( [L, R] )
609 elif R == STOPKEY:
610 q.append( L )
611 elif L == STOPKEY:
612 q.append( R )
613 return deps
615 def mpp(g, sent):
616 tagf = g.numtag # localized function, todo: speed-test
617 mpptree = make_mpptree(g, sent)
618 return set([((tagf(h), loc_h), (tagf(a), loc_a))
619 for (h, loc_h),(a,loc_a),dir in parse_mpptree(mpptree,sent)])
622 ########################################################################
623 # testing functions: #
624 ########################################################################
626 testcorpus = [s.split() for s in ['det nn vbd c vbd','vbd nn c vbd',
627 'det nn vbd', 'det nn vbd c pp',
628 'det nn vbd', 'det vbd vbd c pp',
629 'det nn vbd', 'det nn vbd c vbd',
630 'det nn vbd', 'det nn vbd c vbd',
631 'det nn vbd', 'det nn vbd c vbd',
632 'det nn vbd', 'det nn vbd c pp',
633 'det nn vbd pp', 'det nn vbd', ]]
635 def testgrammar():
636 import loc_h_harmonic
637 reload(loc_h_harmonic)
639 # make sure these are the way they were when setting up the tests:
640 loc_h_harmonic.HARMONIC_C = 0.0
641 loc_h_harmonic.FNONSTOP_MIN = 25
642 loc_h_harmonic.FSTOP_MIN = 5
643 loc_h_harmonic.RIGHT_FIRST = 1.0
645 return loc_h_harmonic.initialize(testcorpus)
647 def ig(s,t,LHS,loc_h):
648 return inner(s,t,LHS,loc_h,testgrammar(),'det nn vbd'.split(),{})
650 def testreestimation():
651 g = testgrammar()
652 print g
653 # DEBUG.add('REEST_ATTACH')
654 f = reestimate(g, testcorpus)
655 print g
656 testreestimation_regression(f)
657 return f
659 def testreestimation_regression(fr):
660 f_stops = {('STOP', 'den', (RGOL,3),NON): 12.212773236178391, ('STOP', 'den', (GOR,2),ADJ): 4.0, ('STOP', 'num', (GOR,4),NON): 2.5553487221351365, ('STOP', 'den', (RGOL,2),NON): 1.274904052793207, ('STOP', 'num', (RGOL,1),ADJ): 14.999999999999995, ('STOP', 'den', (GOR,3),ADJ): 15.0, ('STOP', 'num', (RGOL,4),ADJ): 16.65701084787457, ('STOP', 'num', (RGOL,0),ADJ): 4.1600647714443468, ('STOP', 'den', (RGOL,4),NON): 6.0170669155897105, ('STOP', 'num', (RGOL,3),ADJ): 2.7872267638216113, ('STOP', 'num', (RGOL,2),ADJ): 2.9723139990470515, ('STOP', 'den', (RGOL,2),ADJ): 4.0, ('STOP', 'den', (GOR,3),NON): 12.945787931730905, ('STOP', 'den', (RGOL,3),ADJ): 14.999999999999996, ('STOP', 'den', (GOR,2),NON): 0.0, ('STOP', 'den', (RGOL,0),ADJ): 8.0, ('STOP', 'num', (GOR,4),ADJ): 19.44465127786486, ('STOP', 'den', (GOR,1),NON): 3.1966410324085777, ('STOP', 'den', (RGOL,1),ADJ): 14.999999999999995, ('STOP', 'num', (GOR,3),ADJ): 4.1061665495365558, ('STOP', 'den', (GOR,0),NON): 4.8282499043902476, ('STOP', 'num', (RGOL,4),NON): 5.3429891521254289, ('STOP', 'num', (GOR,2),ADJ): 4.0, ('STOP', 'den', (RGOL,4),ADJ): 22.0, ('STOP', 'num', (GOR,1),ADJ): 12.400273895299103, ('STOP', 'num', (RGOL,2),NON): 1.0276860009529487, ('STOP', 'num', (GOR,0),ADJ): 3.1717500956097533, ('STOP', 'num', (RGOL,3),NON): 12.212773236178391, ('STOP', 'den', (GOR,4),ADJ): 22.0, ('STOP', 'den', (GOR,4),NON): 2.8705211946979836, ('STOP', 'num', (RGOL,0),NON): 3.8399352285556518, ('STOP', 'num', (RGOL,1),NON): 0.0, ('STOP', 'num', (GOR,0),NON): 4.8282499043902476, ('STOP', 'num', (GOR,1),NON): 2.5997261047008959, ('STOP', 'den', (RGOL,1),NON): 0.0, ('STOP', 'den', (GOR,0),ADJ): 8.0, ('STOP', 'num', (GOR,2),NON): 0.0, ('STOP', 'den', (RGOL,0),NON): 4.6540557322109795, ('STOP', 'den', (GOR,1),ADJ): 15.0, ('STOP', 'num', (GOR,3),NON): 10.893833450463443}
661 for k,v in f_stops.iteritems():
662 if not k in fr:
663 print '''Regression in P_STOP reestimation, should be fr[%s]=%.4f,
664 but %s not in fr'''%(k,v,k)
665 elif not "%.10f"%fr[k] == "%.10f"%v:
666 print '''Regression in P_STOP reestimation, should be fr[%s]=%.4f,
667 got fr[%s]=%.4f.'''%(k,v,k,fr[k])
669 def testmpp_regression(mpptree,k_n):
670 mpp = {ROOTKEY: (2.877072116829971e-05, STOPKEY, (0, 3, (2, 3), 1)),
671 (0, 1, (1, 1), 0): (0.1111111111111111, (0, 1, (0, 1), 0), STOPKEY),
672 (0, 1, (2, 1), 0): (0.049382716049382713, STOPKEY, (0, 1, (1, 1), 0)),
673 (0, 3, (1, 3), 1): (0.00027619892321567721,
674 (0, 1, (2, 1), 0),
675 (1, 3, (1, 3), 1)),
676 (0, 3, (2, 3), 1): (0.00012275507698474543, STOPKEY, (0, 3, (1, 3), 1)),
677 (1, 3, (0, 3), 1): (0.025280986819448362,
678 (1, 2, (0, 3), 1),
679 (2, 3, (2, 4), 2)),
680 (1, 3, (1, 3), 1): (0.0067415964851862296, (1, 3, (0, 3), 1), STOPKEY),
681 (2, 3, (1, 4), 2): (0.32692307692307693, (2, 3, (0, 4), 2), STOPKEY),
682 (2, 3, (2, 4), 2): (0.037721893491124266, STOPKEY, (2, 3, (1, 4), 2))}
683 for k,(v,L,R) in mpp.iteritems():
684 k2 = k[0:k_n] # 3 if the new does not check loc_h
685 if type(k)==str:
686 k2 = k
687 if k2 not in mpptree:
688 print "mpp regression, %s missing"%(k2,)
689 else:
690 vnew = mpptree[k2][0]
691 if not "%.10f"%vnew == "%.10f"%v:
692 print "mpp regression, wanted %s=%.5f, got %.5f"%(k2,v,vnew)
695 def testgrammar_a():
696 h, a = 0, 1
697 p_ROOT, p_STOP, p_ATTACH, p_ORDER = {},{},{},{}
698 p_ROOT[h] = 0.9
699 p_ROOT[a] = 0.1
700 p_STOP[h,LEFT,NON] = 1.0
701 p_STOP[h,LEFT,ADJ] = 1.0
702 p_STOP[h,RIGHT,NON] = 0.4 # RSTOP
703 p_STOP[h,RIGHT,ADJ] = 0.3 # RSTOP
704 p_STOP[a,LEFT,NON] = 1.0
705 p_STOP[a,LEFT,ADJ] = 1.0
706 p_STOP[a,RIGHT,NON] = 0.4 # RSTOP
707 p_STOP[a,RIGHT,ADJ] = 0.3 # RSTOP
708 p_ATTACH[a,h,LEFT] = 1.0 # not used
709 p_ATTACH[a,h,RIGHT] = 1.0 # not used
710 p_ATTACH[h,a,LEFT] = 1.0 # not used
711 p_ATTACH[h,a,RIGHT] = 1.0 # not used
712 p_ATTACH[h,h,LEFT] = 1.0 # not used
713 p_ATTACH[h,h,RIGHT] = 1.0 # not used
714 p_ORDER[(GOR, h)] = 1.0
715 p_ORDER[(GOL, h)] = 0.0
716 p_ORDER[(GOR, a)] = 1.0
717 p_ORDER[(GOL, a)] = 0.0
718 g = DMV_Grammar({h:'h',a:'a'}, {'h':h,'a':a}, p_ROOT, p_STOP, p_ATTACH, p_ORDER)
719 # these probabilities are impossible so add them manually:
720 g.p_GO_AT[a,a,LEFT,NON] = 0.4 # Lattach
721 g.p_GO_AT[a,a,LEFT,ADJ] = 0.6 # Lattach
722 g.p_GO_AT[h,a,LEFT,NON] = 0.2 # Lattach to h
723 g.p_GO_AT[h,a,LEFT,ADJ] = 0.1 # Lattach to h
724 g.p_GO_AT[a,a,RIGHT,NON] = 1.0 # Rattach
725 g.p_GO_AT[a,a,RIGHT,ADJ] = 1.0 # Rattach
726 g.p_GO_AT[h,a,RIGHT,NON] = 1.0 # Rattach to h
727 g.p_GO_AT[h,a,RIGHT,ADJ] = 1.0 # Rattach to h
728 g.p_GO_AT[h,h,LEFT,NON] = 0.2 # Lattach
729 g.p_GO_AT[h,h,LEFT,ADJ] = 0.1 # Lattach
730 g.p_GO_AT[a,h,LEFT,NON] = 0.4 # Lattach to a
731 g.p_GO_AT[a,h,LEFT,ADJ] = 0.6 # Lattach to a
732 g.p_GO_AT[h,h,RIGHT,NON] = 1.0 # Rattach
733 g.p_GO_AT[h,h,RIGHT,ADJ] = 1.0 # Rattach
734 g.p_GO_AT[a,h,RIGHT,NON] = 1.0 # Rattach to a
735 g.p_GO_AT[a,h,RIGHT,ADJ] = 1.0 # Rattach to a
736 return g
739 def testgrammar_h():
740 h = 0
741 p_ROOT, p_STOP, p_ATTACH, p_ORDER = {},{},{},{}
742 p_ROOT[h] = 1.0
743 p_STOP[h,LEFT,NON] = 1.0
744 p_STOP[h,LEFT,ADJ] = 1.0
745 p_STOP[h,RIGHT,NON] = 0.4
746 p_STOP[h,RIGHT,ADJ] = 0.3
747 p_ATTACH[h,h,LEFT] = 1.0 # not used
748 p_ATTACH[h,h,RIGHT] = 1.0 # not used
749 p_ORDER[(GOR, h)] = 1.0
750 p_ORDER[(GOL, h)] = 0.0
751 g = DMV_Grammar({h:'h'}, {'h':h}, p_ROOT, p_STOP, p_ATTACH, p_ORDER)
752 g.p_GO_AT[h,h,LEFT,NON] = 0.6 # these probabilities are impossible
753 g.p_GO_AT[h,h,LEFT,ADJ] = 0.7 # so add them manually...
754 g.p_GO_AT[h,h,RIGHT,NON] = 1.0
755 g.p_GO_AT[h,h,RIGHT,ADJ] = 1.0
756 return g
760 def testreestimation_h():
761 DEBUG.add('REEST')
762 g = testgrammar_h()
763 reestimate(g,['h h h'.split()])
766 def test(wanted, got):
767 if not wanted == got:
768 raise Warning, "Regression! Should be %s: %s" % (wanted, got)
770 def regression_tests():
771 testmpp_regression(make_mpptree(testgrammar(), testcorpus[2]),4)
772 h = 0
774 test("0.120",
775 "%.3f" % inner(0, 2, (SEAL,h), 0, testgrammar_h(), 'h h'.split(),{}))
776 test("0.063",
777 "%.3f" % inner(0, 2, (SEAL,h), 1, testgrammar_h(), 'h h'.split(),{}))
779 test("0.1092",
780 "%.4f" % inner(0, 3, (SEAL,0), 0, testgrammar_h(), 'h h h'.split(),{}))
781 test("0.0252",
782 "%.4f" % inner(0, 3, (SEAL,0), 1, testgrammar_h(), 'h h h'.split(),{}))
783 test("0.0498",
784 "%.4f" % inner(0, 3, (SEAL,h), 2, testgrammar_h(), 'h h h'.split(),{}))
786 test("0.58" ,
787 "%.2f" % outer(1, 3, (RGOL,h), 2, testgrammar_h(),'h h h'.split(),{},{}))
788 test("0.61" , # ftw? can't be right... there's an 0.4 shared between these two...
789 "%.2f" % outer(1, 3, (RGOL,h), 1, testgrammar_h(),'h h h'.split(),{},{}))
791 test("0.00" ,
792 "%.2f" % outer(1, 3, (RGOL,h), 0, testgrammar_h(),'h h h'.split(),{},{}))
793 test("0.00" ,
794 "%.2f" % outer(1, 3, (RGOL,h), 3, testgrammar_h(),'h h h'.split(),{},{}))
796 test("0.1089" ,
797 "%.4f" % outer(0, 1, (GOR,h), 0,testgrammar_a(),'h a'.split(),{},{}))
798 test("0.3600" ,
799 "%.4f" % outer(0, 2, (GOR,h), 0,testgrammar_a(),'h a'.split(),{},{}))
800 test("0.0000" ,
801 "%.4f" % outer(0, 3, (GOR,h), 0,testgrammar_a(),'h a'.split(),{},{}))
803 # todo: add more of these tests...
805 if __name__ == "__main__":
806 DEBUG.clear()
808 # import profile
809 # profile.run('testreestimation()')
811 # import timeit
812 # print timeit.Timer("loc_h_dmv.testreestimation()",'''import loc_h_dmv
813 # reload(loc_h_dmv)''').timeit(1)
815 regression_tests()
817 # print "mpp-test:"
818 # import pprint
819 # for s in testcorpus:
820 # print "sent:%s\nparse:set(\n%s)"%(s,pprint.pformat(list(mpp(testgrammar(), s)),
821 # width=40))
824 # import pprint
825 # pprint.pprint( testreestimation())
829 def testIO():
830 g = testgrammar()
831 inners = [(sent, inner_sent(g, sent, {})) for sent in testcorpus]
832 return inners