doc: update manpages since we got new features
[unicorn.git] / test / unit / test_http_parser.rb
blob0443b460f26f216aa8d534aa8e20ae39a19e780e
1 # -*- encoding: binary -*-
3 # Copyright (c) 2005 Zed A. Shaw 
4 # You can redistribute it and/or modify it under the same terms as Ruby.
6 # Additional work donated by contributors.  See http://mongrel.rubyforge.org/attributions.html
7 # for more information.
9 require 'test/test_helper'
11 include Unicorn
13 class HttpParserTest < Test::Unit::TestCase
15   def test_parse_simple
16     parser = HttpParser.new
17     req = {}
18     http = "GET / HTTP/1.1\r\n\r\n"
19     assert_equal req, parser.headers(req, http)
20     assert_equal '', http
22     assert_equal 'HTTP/1.1', req['SERVER_PROTOCOL']
23     assert_equal '/', req['REQUEST_PATH']
24     assert_equal 'HTTP/1.1', req['HTTP_VERSION']
25     assert_equal '/', req['REQUEST_URI']
26     assert_equal 'GET', req['REQUEST_METHOD']
27     assert_nil req['FRAGMENT']
28     assert_equal '', req['QUERY_STRING']
30     assert parser.keepalive?
31     parser.reset
32     req.clear
34     http = "G"
35     assert_nil parser.headers(req, http)
36     assert_equal "G", http
37     assert req.empty?
39     # try parsing again to ensure we were reset correctly
40     http = "GET /hello-world HTTP/1.1\r\n\r\n"
41     assert parser.headers(req, http)
43     assert_equal 'HTTP/1.1', req['SERVER_PROTOCOL']
44     assert_equal '/hello-world', req['REQUEST_PATH']
45     assert_equal 'HTTP/1.1', req['HTTP_VERSION']
46     assert_equal '/hello-world', req['REQUEST_URI']
47     assert_equal 'GET', req['REQUEST_METHOD']
48     assert_nil req['FRAGMENT']
49     assert_equal '', req['QUERY_STRING']
50     assert_equal '', http
51     assert parser.keepalive?
52   end
54   def test_connection_close_no_ka
55     parser = HttpParser.new
56     req = {}
57     tmp = "GET / HTTP/1.1\r\nConnection: close\r\n\r\n"
58     assert_equal req.object_id, parser.headers(req, tmp).object_id
59     assert_equal "GET", req['REQUEST_METHOD']
60     assert ! parser.keepalive?
61   end
63   def test_connection_keep_alive_ka
64     parser = HttpParser.new
65     req = {}
66     tmp = "HEAD / HTTP/1.1\r\nConnection: keep-alive\r\n\r\n"
67     assert_equal req.object_id, parser.headers(req, tmp).object_id
68     assert parser.keepalive?
69   end
71   def test_connection_keep_alive_ka_bad_method
72     parser = HttpParser.new
73     req = {}
74     tmp = "POST / HTTP/1.1\r\nConnection: keep-alive\r\n\r\n"
75     assert_equal req.object_id, parser.headers(req, tmp).object_id
76     assert ! parser.keepalive?
77   end
79   def test_connection_keep_alive_ka_bad_version
80     parser = HttpParser.new
81     req = {}
82     tmp = "GET / HTTP/1.0\r\nConnection: keep-alive\r\n\r\n"
83     assert_equal req.object_id, parser.headers(req, tmp).object_id
84     assert parser.keepalive?
85   end
87   def test_parse_server_host_default_port
88     parser = HttpParser.new
89     req = {}
90     tmp = "GET / HTTP/1.1\r\nHost: foo\r\n\r\n"
91     assert_equal req, parser.headers(req, tmp)
92     assert_equal 'foo', req['SERVER_NAME']
93     assert_equal '80', req['SERVER_PORT']
94     assert_equal '', tmp
95     assert parser.keepalive?
96   end
98   def test_parse_server_host_alt_port
99     parser = HttpParser.new
100     req = {}
101     tmp = "GET / HTTP/1.1\r\nHost: foo:999\r\n\r\n"
102     assert_equal req, parser.headers(req, tmp)
103     assert_equal 'foo', req['SERVER_NAME']
104     assert_equal '999', req['SERVER_PORT']
105     assert_equal '', tmp
106     assert parser.keepalive?
107   end
109   def test_parse_server_host_empty_port
110     parser = HttpParser.new
111     req = {}
112     tmp = "GET / HTTP/1.1\r\nHost: foo:\r\n\r\n"
113     assert_equal req, parser.headers(req, tmp)
114     assert_equal 'foo', req['SERVER_NAME']
115     assert_equal '80', req['SERVER_PORT']
116     assert_equal '', tmp
117     assert parser.keepalive?
118   end
120   def test_parse_server_host_xfp_https
121     parser = HttpParser.new
122     req = {}
123     tmp = "GET / HTTP/1.1\r\nHost: foo:\r\n" \
124           "X-Forwarded-Proto: https\r\n\r\n"
125     assert_equal req, parser.headers(req, tmp)
126     assert_equal 'foo', req['SERVER_NAME']
127     assert_equal '443', req['SERVER_PORT']
128     assert_equal '', tmp
129     assert parser.keepalive?
130   end
132   def test_parse_strange_headers
133     parser = HttpParser.new
134     req = {}
135     should_be_good = "GET / HTTP/1.1\r\naaaaaaaaaaaaa:++++++++++\r\n\r\n"
136     assert_equal req, parser.headers(req, should_be_good)
137     assert_equal '', should_be_good
138     assert parser.keepalive?
139   end
141   # legacy test case from Mongrel that we never supported before...
142   # I still consider Pound irrelevant, unfortunately stupid clients that
143   # send extremely big headers do exist and they've managed to find Unicorn...
144   def test_nasty_pound_header
145     parser = HttpParser.new
146     nasty_pound_header = "GET / HTTP/1.1\r\nX-SSL-Bullshit:   -----BEGIN CERTIFICATE-----\r\n\tMIIFbTCCBFWgAwIBAgICH4cwDQYJKoZIhvcNAQEFBQAwcDELMAkGA1UEBhMCVUsx\r\n\tETAPBgNVBAoTCGVTY2llbmNlMRIwEAYDVQQLEwlBdXRob3JpdHkxCzAJBgNVBAMT\r\n\tAkNBMS0wKwYJKoZIhvcNAQkBFh5jYS1vcGVyYXRvckBncmlkLXN1cHBvcnQuYWMu\r\n\tdWswHhcNMDYwNzI3MTQxMzI4WhcNMDcwNzI3MTQxMzI4WjBbMQswCQYDVQQGEwJV\r\n\tSzERMA8GA1UEChMIZVNjaWVuY2UxEzARBgNVBAsTCk1hbmNoZXN0ZXIxCzAJBgNV\r\n\tBAcTmrsogriqMWLAk1DMRcwFQYDVQQDEw5taWNoYWVsIHBhcmQYJKoZIhvcNAQEB\r\n\tBQADggEPADCCAQoCggEBANPEQBgl1IaKdSS1TbhF3hEXSl72G9J+WC/1R64fAcEF\r\n\tW51rEyFYiIeZGx/BVzwXbeBoNUK41OK65sxGuflMo5gLflbwJtHBRIEKAfVVp3YR\r\n\tgW7cMA/s/XKgL1GEC7rQw8lIZT8RApukCGqOVHSi/F1SiFlPDxuDfmdiNzL31+sL\r\n\t0iwHDdNkGjy5pyBSB8Y79dsSJtCW/iaLB0/n8Sj7HgvvZJ7x0fr+RQjYOUUfrePP\r\n\tu2MSpFyf+9BbC/aXgaZuiCvSR+8Snv3xApQY+fULK/xY8h8Ua51iXoQ5jrgu2SqR\r\n\twgA7BUi3G8LFzMBl8FRCDYGUDy7M6QaHXx1ZWIPWNKsCAwEAAaOCAiQwggIgMAwG\r\n\tA1UdEwEB/wQCMAAwEQYJYIZIAYb4QgEBBAQDAgWgMA4GA1UdDwEB/wQEAwID6DAs\r\n\tBglghkgBhvhCAQ0EHxYdVUsgZS1TY2llbmNlIFVzZXIgQ2VydGlmaWNhdGUwHQYD\r\n\tVR0OBBYEFDTt/sf9PeMaZDHkUIldrDYMNTBZMIGaBgNVHSMEgZIwgY+AFAI4qxGj\r\n\tloCLDdMVKwiljjDastqooXSkcjBwMQswCQYDVQQGEwJVSzERMA8GA1UEChMIZVNj\r\n\taWVuY2UxEjAQBgNVBAsTCUF1dGhvcml0eTELMAkGA1UEAxMCQ0ExLTArBgkqhkiG\r\n\t9w0BCQEWHmNhLW9wZXJhdG9yQGdyaWQtc3VwcG9ydC5hYy51a4IBADApBgNVHRIE\r\n\tIjAggR5jYS1vcGVyYXRvckBncmlkLXN1cHBvcnQuYWMudWswGQYDVR0gBBIwEDAO\r\n\tBgwrBgEEAdkvAQEBAQYwPQYJYIZIAYb4QgEEBDAWLmh0dHA6Ly9jYS5ncmlkLXN1\r\n\tcHBvcnQuYWMudmT4sopwqlBWsvcHViL2NybC9jYWNybC5jcmwwPQYJYIZIAYb4QgEDBDAWLmh0\r\n\tdHA6Ly9jYS5ncmlkLXN1cHBvcnQuYWMudWsvcHViL2NybC9jYWNybC5jcmwwPwYD\r\n\tVR0fBDgwNjA0oDKgMIYuaHR0cDovL2NhLmdyaWQt5hYy51ay9wdWIv\r\n\tY3JsL2NhY3JsLmNybDANBgkqhkiG9w0BAQUFAAOCAQEAS/U4iiooBENGW/Hwmmd3\r\n\tXCy6Zrt08YjKCzGNjorT98g8uGsqYjSxv/hmi0qlnlHs+k/3Iobc3LjS5AMYr5L8\r\n\tUO7OSkgFFlLHQyC9JzPfmLCAugvzEbyv4Olnsr8hbxF1MbKZoQxUZtMVu29wjfXk\r\n\thTeApBv7eaKCWpSp7MCbvgzm74izKhu3vlDk9w6qVrxePfGgpKPqfHiOoGhFnbTK\r\n\twTC6o2xq5y0qZ03JonF7OJspEd3I5zKY3E+ov7/ZhW6DqT8UFvsAdjvQbXyhV8Eu\r\n\tYhixw1aKEPzNjNowuIseVogKOLXxWI5vAi5HgXdS0/ES5gDGsABo4fqovUKlgop3\r\n\tRA==\r\n\t-----END CERTIFICATE-----\r\n\r\n"
147     req = {}
148     buf = nasty_pound_header.dup
150     assert nasty_pound_header =~ /(-----BEGIN .*--END CERTIFICATE-----)/m
151     expect = $1.dup
152     expect.gsub!(/\r\n\t/, ' ')
153     assert_equal req, parser.headers(req, buf)
154     assert_equal '', buf
155     assert_equal expect, req['HTTP_X_SSL_BULLSHIT']
156   end
158   def test_continuation_eats_leading_spaces
159     parser = HttpParser.new
160     header = "GET / HTTP/1.1\r\n" \
161              "X-ASDF:      \r\n" \
162              "\t\r\n" \
163              "    \r\n" \
164              "  ASDF\r\n\r\n"
165     req = {}
166     assert_equal req, parser.headers(req, header)
167     assert_equal '', header
168     assert_equal 'ASDF', req['HTTP_X_ASDF']
169   end
171   def test_continuation_eats_scattered_leading_spaces
172     parser = HttpParser.new
173     header = "GET / HTTP/1.1\r\n" \
174              "X-ASDF:   hi\r\n" \
175              "    y\r\n" \
176              "\t\r\n" \
177              "       x\r\n" \
178              "  ASDF\r\n\r\n"
179     req = {}
180     assert_equal req, parser.headers(req, header)
181     assert_equal '', header
182     assert_equal 'hi y x ASDF', req['HTTP_X_ASDF']
183   end
185   def test_continuation_with_absolute_uri_and_ignored_host_header
186     parser = HttpParser.new
187     header = "GET http://example.com/ HTTP/1.1\r\n" \
188              "Host: \r\n" \
189              "    YHBT.net\r\n" \
190              "\r\n"
191     req = {}
192     assert_equal req, parser.headers(req, header)
193     assert_equal 'example.com', req['HTTP_HOST']
194   end
196   # this may seem to be testing more of an implementation detail, but
197   # it also helps ensure we're safe in the presence of multiple parsers
198   # in case we ever go multithreaded/evented...
199   def test_resumable_continuations
200     nr = 1000
201     req = {}
202     header = "GET / HTTP/1.1\r\n" \
203              "X-ASDF:      \r\n" \
204              "  hello\r\n"
205     tmp = []
206     nr.times { |i|
207       parser = HttpParser.new
208       assert parser.headers(req, "#{header} #{i}\r\n").nil?
209       asdf = req['HTTP_X_ASDF']
210       assert_equal "hello #{i}", asdf
211       tmp << [ parser, asdf ]
212       req.clear
213     }
214     tmp.each_with_index { |(parser, asdf), i|
215       assert_equal req, parser.headers(req, "#{header} #{i}\r\n .\r\n\r\n")
216       assert_equal "hello #{i} .", asdf
217     }
218   end
220   def test_invalid_continuation
221     parser = HttpParser.new
222     header = "GET / HTTP/1.1\r\n" \
223              "    y\r\n" \
224              "Host: hello\r\n" \
225              "\r\n"
226     req = {}
227     assert_raises(HttpParserError) { parser.headers(req, header) }
228   end
230   def test_parse_ie6_urls
231     %w(/some/random/path"
232        /some/random/path>
233        /some/random/path<
234        /we/love/you/ie6?q=<"">
235        /url?<="&>="
236        /mal"formed"?
237     ).each do |path|
238       parser = HttpParser.new
239       req = {}
240       sorta_safe = %(GET #{path} HTTP/1.1\r\n\r\n)
241       assert_equal req, parser.headers(req, sorta_safe)
242       assert_equal path, req['REQUEST_URI']
243       assert_equal '', sorta_safe
244       assert parser.keepalive?
245     end
246   end
247   
248   def test_parse_error
249     parser = HttpParser.new
250     req = {}
251     bad_http = "GET / SsUTF/1.1"
253     assert_raises(HttpParserError) { parser.headers(req, bad_http) }
255     # make sure we can recover
256     parser.reset
257     req.clear
258     assert_equal req, parser.headers(req, "GET / HTTP/1.0\r\n\r\n")
259     assert ! parser.keepalive?
260   end
262   def test_piecemeal
263     parser = HttpParser.new
264     req = {}
265     http = "GET"
266     assert_nil parser.headers(req, http)
267     assert_nil parser.headers(req, http)
268     assert_nil parser.headers(req, http << " / HTTP/1.0")
269     assert_equal '/', req['REQUEST_PATH']
270     assert_equal '/', req['REQUEST_URI']
271     assert_equal 'GET', req['REQUEST_METHOD']
272     assert_nil parser.headers(req, http << "\r\n")
273     assert_equal 'HTTP/1.0', req['HTTP_VERSION']
274     assert_nil parser.headers(req, http << "\r")
275     assert_equal req, parser.headers(req, http << "\n")
276     assert_equal 'HTTP/1.0', req['SERVER_PROTOCOL']
277     assert_nil req['FRAGMENT']
278     assert_equal '', req['QUERY_STRING']
279     assert_equal "", http
280     assert ! parser.keepalive?
281   end
283   # not common, but underscores do appear in practice
284   def test_absolute_uri_underscores
285     parser = HttpParser.new
286     req = {}
287     http = "GET http://under_score.example.com/foo?q=bar HTTP/1.0\r\n\r\n"
288     assert_equal req, parser.headers(req, http)
289     assert_equal 'http', req['rack.url_scheme']
290     assert_equal '/foo?q=bar', req['REQUEST_URI']
291     assert_equal '/foo', req['REQUEST_PATH']
292     assert_equal 'q=bar', req['QUERY_STRING']
294     assert_equal 'under_score.example.com', req['HTTP_HOST']
295     assert_equal 'under_score.example.com', req['SERVER_NAME']
296     assert_equal '80', req['SERVER_PORT']
297     assert_equal "", http
298     assert ! parser.keepalive?
299   end
301   # some dumb clients add users because they're stupid
302   def test_absolute_uri_w_user
303     parser = HttpParser.new
304     req = {}
305     http = "GET http://user%20space@example.com/foo?q=bar HTTP/1.0\r\n\r\n"
306     assert_equal req, parser.headers(req, http)
307     assert_equal 'http', req['rack.url_scheme']
308     assert_equal '/foo?q=bar', req['REQUEST_URI']
309     assert_equal '/foo', req['REQUEST_PATH']
310     assert_equal 'q=bar', req['QUERY_STRING']
312     assert_equal 'example.com', req['HTTP_HOST']
313     assert_equal 'example.com', req['SERVER_NAME']
314     assert_equal '80', req['SERVER_PORT']
315     assert_equal "", http
316     assert ! parser.keepalive?
317   end
319   # since Mongrel supported anything URI.parse supported, we're stuck
320   # supporting everything URI.parse supports
321   def test_absolute_uri_uri_parse
322     "#{URI::REGEXP::PATTERN::UNRESERVED};:&=+$,".split(//).each do |char|
323       parser = HttpParser.new
324       req = {}
325       http = "GET http://#{char}@example.com/ HTTP/1.0\r\n\r\n"
326       assert_equal req, parser.headers(req, http)
327       assert_equal 'http', req['rack.url_scheme']
328       assert_equal '/', req['REQUEST_URI']
329       assert_equal '/', req['REQUEST_PATH']
330       assert_equal '', req['QUERY_STRING']
332       assert_equal 'example.com', req['HTTP_HOST']
333       assert_equal 'example.com', req['SERVER_NAME']
334       assert_equal '80', req['SERVER_PORT']
335       assert_equal "", http
336       assert ! parser.keepalive?
337     end
338   end
340   def test_absolute_uri
341     parser = HttpParser.new
342     req = {}
343     http = "GET http://example.com/foo?q=bar HTTP/1.0\r\n\r\n"
344     assert_equal req, parser.headers(req, http)
345     assert_equal 'http', req['rack.url_scheme']
346     assert_equal '/foo?q=bar', req['REQUEST_URI']
347     assert_equal '/foo', req['REQUEST_PATH']
348     assert_equal 'q=bar', req['QUERY_STRING']
350     assert_equal 'example.com', req['HTTP_HOST']
351     assert_equal 'example.com', req['SERVER_NAME']
352     assert_equal '80', req['SERVER_PORT']
353     assert_equal "", http
354     assert ! parser.keepalive?
355   end
357   # X-Forwarded-Proto is not in rfc2616, absolute URIs are, however...
358   def test_absolute_uri_https
359     parser = HttpParser.new
360     req = {}
361     http = "GET https://example.com/foo?q=bar HTTP/1.1\r\n" \
362            "X-Forwarded-Proto: http\r\n\r\n"
363     assert_equal req, parser.headers(req, http)
364     assert_equal 'https', req['rack.url_scheme']
365     assert_equal '/foo?q=bar', req['REQUEST_URI']
366     assert_equal '/foo', req['REQUEST_PATH']
367     assert_equal 'q=bar', req['QUERY_STRING']
369     assert_equal 'example.com', req['HTTP_HOST']
370     assert_equal 'example.com', req['SERVER_NAME']
371     assert_equal '443', req['SERVER_PORT']
372     assert_equal "", http
373     assert parser.keepalive?
374   end
376   # Host: header should be ignored for absolute URIs
377   def test_absolute_uri_with_port
378     parser = HttpParser.new
379     req = {}
380     http = "GET http://example.com:8080/foo?q=bar HTTP/1.2\r\n" \
381            "Host: bad.example.com\r\n\r\n"
382     assert_equal req, parser.headers(req, http)
383     assert_equal 'http', req['rack.url_scheme']
384     assert_equal '/foo?q=bar', req['REQUEST_URI']
385     assert_equal '/foo', req['REQUEST_PATH']
386     assert_equal 'q=bar', req['QUERY_STRING']
388     assert_equal 'example.com:8080', req['HTTP_HOST']
389     assert_equal 'example.com', req['SERVER_NAME']
390     assert_equal '8080', req['SERVER_PORT']
391     assert_equal "", http
392     assert ! parser.keepalive? # TODO: read HTTP/1.2 when it's final
393   end
395   def test_absolute_uri_with_empty_port
396     parser = HttpParser.new
397     req = {}
398     http = "GET https://example.com:/foo?q=bar HTTP/1.1\r\n" \
399            "Host: bad.example.com\r\n\r\n"
400     assert_equal req, parser.headers(req, http)
401     assert_equal 'https', req['rack.url_scheme']
402     assert_equal '/foo?q=bar', req['REQUEST_URI']
403     assert_equal '/foo', req['REQUEST_PATH']
404     assert_equal 'q=bar', req['QUERY_STRING']
406     assert_equal 'example.com:', req['HTTP_HOST']
407     assert_equal 'example.com', req['SERVER_NAME']
408     assert_equal '443', req['SERVER_PORT']
409     assert_equal "", http
410     assert parser.keepalive? # TODO: read HTTP/1.2 when it's final
411   end
413   def test_put_body_oneshot
414     parser = HttpParser.new
415     req = {}
416     http = "PUT / HTTP/1.0\r\nContent-Length: 5\r\n\r\nabcde"
417     assert_equal req, parser.headers(req, http)
418     assert_equal '/', req['REQUEST_PATH']
419     assert_equal '/', req['REQUEST_URI']
420     assert_equal 'PUT', req['REQUEST_METHOD']
421     assert_equal 'HTTP/1.0', req['HTTP_VERSION']
422     assert_equal 'HTTP/1.0', req['SERVER_PROTOCOL']
423     assert_equal "abcde", http
424     assert ! parser.keepalive? # TODO: read HTTP/1.2 when it's final
425   end
427   def test_put_body_later
428     parser = HttpParser.new
429     req = {}
430     http = "PUT /l HTTP/1.0\r\nContent-Length: 5\r\n\r\n"
431     assert_equal req, parser.headers(req, http)
432     assert_equal '/l', req['REQUEST_PATH']
433     assert_equal '/l', req['REQUEST_URI']
434     assert_equal 'PUT', req['REQUEST_METHOD']
435     assert_equal 'HTTP/1.0', req['HTTP_VERSION']
436     assert_equal 'HTTP/1.0', req['SERVER_PROTOCOL']
437     assert_equal "", http
438     assert ! parser.keepalive? # TODO: read HTTP/1.2 when it's final
439   end
441   def test_unknown_methods
442     %w(GETT HEADR XGET XHEAD).each { |m|
443       parser = HttpParser.new
444       req = {}
445       s = "#{m} /forums/1/topics/2375?page=1#posts-17408 HTTP/1.1\r\n\r\n"
446       ok = false
447       assert_nothing_raised do
448         ok = parser.headers(req, s)
449       end
450       assert ok
451       assert_equal '/forums/1/topics/2375?page=1', req['REQUEST_URI']
452       assert_equal 'posts-17408', req['FRAGMENT']
453       assert_equal 'page=1', req['QUERY_STRING']
454       assert_equal "", s
455       assert_equal m, req['REQUEST_METHOD']
456       assert ! parser.keepalive? # TODO: read HTTP/1.2 when it's final
457     }
458   end
460   def test_fragment_in_uri
461     parser = HttpParser.new
462     req = {}
463     get = "GET /forums/1/topics/2375?page=1#posts-17408 HTTP/1.1\r\n\r\n"
464     ok = false
465     assert_nothing_raised do
466       ok = parser.headers(req, get)
467     end
468     assert ok
469     assert_equal '/forums/1/topics/2375?page=1', req['REQUEST_URI']
470     assert_equal 'posts-17408', req['FRAGMENT']
471     assert_equal 'page=1', req['QUERY_STRING']
472     assert_equal '', get
473     assert parser.keepalive?
474   end
476   # lame random garbage maker
477   def rand_data(min, max, readable=true)
478     count = min + ((rand(max)+1) *10).to_i
479     res = count.to_s + "/"
480     
481     if readable
482       res << Digest::SHA1.hexdigest(rand(count * 100).to_s) * (count / 40)
483     else
484       res << Digest::SHA1.digest(rand(count * 100).to_s) * (count / 20)
485     end
487     return res
488   end
489   
491   def test_horrible_queries
492     parser = HttpParser.new
494     # then that large header names are caught
495     10.times do |c|
496       get = "GET /#{rand_data(10,120)} HTTP/1.1\r\nX-#{rand_data(1024, 1024+(c*1024))}: Test\r\n\r\n"
497       assert_raises Unicorn::HttpParserError do
498         parser.headers({}, get)
499         parser.reset
500       end
501     end
503     # then that large mangled field values are caught
504     10.times do |c|
505       get = "GET /#{rand_data(10,120)} HTTP/1.1\r\nX-Test: #{rand_data(1024, 1024+(c*1024), false)}\r\n\r\n"
506       assert_raises Unicorn::HttpParserError do
507         parser.headers({}, get)
508         parser.reset
509       end
510     end
512     # then large headers are rejected too
513     get = "GET /#{rand_data(10,120)} HTTP/1.1\r\n"
514     get << "X-Test: test\r\n" * (80 * 1024)
515     assert_raises Unicorn::HttpParserError do
516       parser.headers({}, get)
517       parser.reset
518     end
520     # finally just that random garbage gets blocked all the time
521     10.times do |c|
522       get = "GET #{rand_data(1024, 1024+(c*1024), false)} #{rand_data(1024, 1024+(c*1024), false)}\r\n\r\n"
523       assert_raises Unicorn::HttpParserError do
524         parser.headers({}, get)
525         parser.reset
526       end
527     end
529   end