1 # -*- encoding: binary -*-
3 # Copyright (c) 2005 Zed A. Shaw
4 # You can redistribute it and/or modify it under the same terms as Ruby.
6 # Additional work donated by contributors. See http://mongrel.rubyforge.org/attributions.html
7 # for more information.
9 require 'test/test_helper'
13 class HttpParserTest < Test::Unit::TestCase
16 parser = HttpParser.new
18 http = "GET / HTTP/1.1\r\n\r\n"
19 assert_equal req, parser.headers(req, http)
22 assert_equal 'HTTP/1.1', req['SERVER_PROTOCOL']
23 assert_equal '/', req['REQUEST_PATH']
24 assert_equal 'HTTP/1.1', req['HTTP_VERSION']
25 assert_equal '/', req['REQUEST_URI']
26 assert_equal 'GET', req['REQUEST_METHOD']
27 assert_nil req['FRAGMENT']
28 assert_equal '', req['QUERY_STRING']
30 assert parser.keepalive?
35 assert_nil parser.headers(req, http)
36 assert_equal "G", http
39 # try parsing again to ensure we were reset correctly
40 http = "GET /hello-world HTTP/1.1\r\n\r\n"
41 assert parser.headers(req, http)
43 assert_equal 'HTTP/1.1', req['SERVER_PROTOCOL']
44 assert_equal '/hello-world', req['REQUEST_PATH']
45 assert_equal 'HTTP/1.1', req['HTTP_VERSION']
46 assert_equal '/hello-world', req['REQUEST_URI']
47 assert_equal 'GET', req['REQUEST_METHOD']
48 assert_nil req['FRAGMENT']
49 assert_equal '', req['QUERY_STRING']
51 assert parser.keepalive?
55 parser = HttpParser.new
57 tmp = "GET / HTTP/1.1\r\nHost:\tfoo.bar\r\n\r\n"
58 assert_equal req.object_id, parser.headers(req, tmp).object_id
59 assert_equal "foo.bar", req['HTTP_HOST']
62 def test_connection_close_no_ka
63 parser = HttpParser.new
65 tmp = "GET / HTTP/1.1\r\nConnection: close\r\n\r\n"
66 assert_equal req.object_id, parser.headers(req, tmp).object_id
67 assert_equal "GET", req['REQUEST_METHOD']
68 assert ! parser.keepalive?
71 def test_connection_keep_alive_ka
72 parser = HttpParser.new
74 tmp = "HEAD / HTTP/1.1\r\nConnection: keep-alive\r\n\r\n"
75 assert_equal req.object_id, parser.headers(req, tmp).object_id
76 assert parser.keepalive?
79 def test_connection_keep_alive_no_body
80 parser = HttpParser.new
82 tmp = "POST / HTTP/1.1\r\nConnection: keep-alive\r\n\r\n"
83 assert_equal req.object_id, parser.headers(req, tmp).object_id
84 assert parser.keepalive?
87 def test_connection_keep_alive_no_body_empty
88 parser = HttpParser.new
90 tmp = "POST / HTTP/1.1\r\n" \
91 "Content-Length: 0\r\n" \
92 "Connection: keep-alive\r\n\r\n"
93 assert_equal req.object_id, parser.headers(req, tmp).object_id
94 assert parser.keepalive?
97 def test_connection_keep_alive_ka_bad_version
98 parser = HttpParser.new
100 tmp = "GET / HTTP/1.0\r\nConnection: keep-alive\r\n\r\n"
101 assert_equal req.object_id, parser.headers(req, tmp).object_id
102 assert parser.keepalive?
105 def test_parse_server_host_default_port
106 parser = HttpParser.new
108 tmp = "GET / HTTP/1.1\r\nHost: foo\r\n\r\n"
109 assert_equal req, parser.headers(req, tmp)
110 assert_equal 'foo', req['SERVER_NAME']
111 assert_equal '80', req['SERVER_PORT']
113 assert parser.keepalive?
116 def test_parse_server_host_alt_port
117 parser = HttpParser.new
119 tmp = "GET / HTTP/1.1\r\nHost: foo:999\r\n\r\n"
120 assert_equal req, parser.headers(req, tmp)
121 assert_equal 'foo', req['SERVER_NAME']
122 assert_equal '999', req['SERVER_PORT']
124 assert parser.keepalive?
127 def test_parse_server_host_empty_port
128 parser = HttpParser.new
130 tmp = "GET / HTTP/1.1\r\nHost: foo:\r\n\r\n"
131 assert_equal req, parser.headers(req, tmp)
132 assert_equal 'foo', req['SERVER_NAME']
133 assert_equal '80', req['SERVER_PORT']
135 assert parser.keepalive?
138 def test_parse_server_host_xfp_https
139 parser = HttpParser.new
141 tmp = "GET / HTTP/1.1\r\nHost: foo:\r\n" \
142 "X-Forwarded-Proto: https\r\n\r\n"
143 assert_equal req, parser.headers(req, tmp)
144 assert_equal 'foo', req['SERVER_NAME']
145 assert_equal '443', req['SERVER_PORT']
147 assert parser.keepalive?
150 def test_parse_xfp_https_chained
151 parser = HttpParser.new
153 tmp = "GET / HTTP/1.0\r\n" \
154 "X-Forwarded-Proto: https,http\r\n\r\n"
155 assert_equal req, parser.headers(req, tmp)
156 assert_equal '443', req['SERVER_PORT'], req.inspect
157 assert_equal 'https', req['rack.url_scheme'], req.inspect
161 def test_parse_xfp_https_chained_backwards
162 parser = HttpParser.new
164 tmp = "GET / HTTP/1.0\r\n" \
165 "X-Forwarded-Proto: http,https\r\n\r\n"
166 assert_equal req, parser.headers(req, tmp)
167 assert_equal '80', req['SERVER_PORT'], req.inspect
168 assert_equal 'http', req['rack.url_scheme'], req.inspect
172 def test_parse_xfp_gopher_is_ignored
173 parser = HttpParser.new
175 tmp = "GET / HTTP/1.0\r\n" \
176 "X-Forwarded-Proto: gopher\r\n\r\n"
177 assert_equal req, parser.headers(req, tmp)
178 assert_equal '80', req['SERVER_PORT'], req.inspect
179 assert_equal 'http', req['rack.url_scheme'], req.inspect
183 def test_parse_x_forwarded_ssl_on
184 parser = HttpParser.new
186 tmp = "GET / HTTP/1.0\r\n" \
187 "X-Forwarded-Ssl: on\r\n\r\n"
188 assert_equal req, parser.headers(req, tmp)
189 assert_equal '443', req['SERVER_PORT'], req.inspect
190 assert_equal 'https', req['rack.url_scheme'], req.inspect
194 def test_parse_x_forwarded_ssl_off
195 parser = HttpParser.new
197 tmp = "GET / HTTP/1.0\r\n" \
198 "X-Forwarded-Ssl: off\r\n\r\n"
199 assert_equal req, parser.headers(req, tmp)
200 assert_equal '80', req['SERVER_PORT'], req.inspect
201 assert_equal 'http', req['rack.url_scheme'], req.inspect
205 def test_parse_strange_headers
206 parser = HttpParser.new
208 should_be_good = "GET / HTTP/1.1\r\naaaaaaaaaaaaa:++++++++++\r\n\r\n"
209 assert_equal req, parser.headers(req, should_be_good)
210 assert_equal '', should_be_good
211 assert parser.keepalive?
214 # legacy test case from Mongrel that we never supported before...
215 # I still consider Pound irrelevant, unfortunately stupid clients that
216 # send extremely big headers do exist and they've managed to find Unicorn...
217 def test_nasty_pound_header
218 parser = HttpParser.new
219 nasty_pound_header = "GET / HTTP/1.1\r\nX-SSL-Bullshit: -----BEGIN CERTIFICATE-----\r\n\tMIIFbTCCBFWgAwIBAgICH4cwDQYJKoZIhvcNAQEFBQAwcDELMAkGA1UEBhMCVUsx\r\n\tETAPBgNVBAoTCGVTY2llbmNlMRIwEAYDVQQLEwlBdXRob3JpdHkxCzAJBgNVBAMT\r\n\tAkNBMS0wKwYJKoZIhvcNAQkBFh5jYS1vcGVyYXRvckBncmlkLXN1cHBvcnQuYWMu\r\n\tdWswHhcNMDYwNzI3MTQxMzI4WhcNMDcwNzI3MTQxMzI4WjBbMQswCQYDVQQGEwJV\r\n\tSzERMA8GA1UEChMIZVNjaWVuY2UxEzARBgNVBAsTCk1hbmNoZXN0ZXIxCzAJBgNV\r\n\tBAcTmrsogriqMWLAk1DMRcwFQYDVQQDEw5taWNoYWVsIHBhcmQYJKoZIhvcNAQEB\r\n\tBQADggEPADCCAQoCggEBANPEQBgl1IaKdSS1TbhF3hEXSl72G9J+WC/1R64fAcEF\r\n\tW51rEyFYiIeZGx/BVzwXbeBoNUK41OK65sxGuflMo5gLflbwJtHBRIEKAfVVp3YR\r\n\tgW7cMA/s/XKgL1GEC7rQw8lIZT8RApukCGqOVHSi/F1SiFlPDxuDfmdiNzL31+sL\r\n\t0iwHDdNkGjy5pyBSB8Y79dsSJtCW/iaLB0/n8Sj7HgvvZJ7x0fr+RQjYOUUfrePP\r\n\tu2MSpFyf+9BbC/aXgaZuiCvSR+8Snv3xApQY+fULK/xY8h8Ua51iXoQ5jrgu2SqR\r\n\twgA7BUi3G8LFzMBl8FRCDYGUDy7M6QaHXx1ZWIPWNKsCAwEAAaOCAiQwggIgMAwG\r\n\tA1UdEwEB/wQCMAAwEQYJYIZIAYb4QgEBBAQDAgWgMA4GA1UdDwEB/wQEAwID6DAs\r\n\tBglghkgBhvhCAQ0EHxYdVUsgZS1TY2llbmNlIFVzZXIgQ2VydGlmaWNhdGUwHQYD\r\n\tVR0OBBYEFDTt/sf9PeMaZDHkUIldrDYMNTBZMIGaBgNVHSMEgZIwgY+AFAI4qxGj\r\n\tloCLDdMVKwiljjDastqooXSkcjBwMQswCQYDVQQGEwJVSzERMA8GA1UEChMIZVNj\r\n\taWVuY2UxEjAQBgNVBAsTCUF1dGhvcml0eTELMAkGA1UEAxMCQ0ExLTArBgkqhkiG\r\n\t9w0BCQEWHmNhLW9wZXJhdG9yQGdyaWQtc3VwcG9ydC5hYy51a4IBADApBgNVHRIE\r\n\tIjAggR5jYS1vcGVyYXRvckBncmlkLXN1cHBvcnQuYWMudWswGQYDVR0gBBIwEDAO\r\n\tBgwrBgEEAdkvAQEBAQYwPQYJYIZIAYb4QgEEBDAWLmh0dHA6Ly9jYS5ncmlkLXN1\r\n\tcHBvcnQuYWMudmT4sopwqlBWsvcHViL2NybC9jYWNybC5jcmwwPQYJYIZIAYb4QgEDBDAWLmh0\r\n\tdHA6Ly9jYS5ncmlkLXN1cHBvcnQuYWMudWsvcHViL2NybC9jYWNybC5jcmwwPwYD\r\n\tVR0fBDgwNjA0oDKgMIYuaHR0cDovL2NhLmdyaWQt5hYy51ay9wdWIv\r\n\tY3JsL2NhY3JsLmNybDANBgkqhkiG9w0BAQUFAAOCAQEAS/U4iiooBENGW/Hwmmd3\r\n\tXCy6Zrt08YjKCzGNjorT98g8uGsqYjSxv/hmi0qlnlHs+k/3Iobc3LjS5AMYr5L8\r\n\tUO7OSkgFFlLHQyC9JzPfmLCAugvzEbyv4Olnsr8hbxF1MbKZoQxUZtMVu29wjfXk\r\n\thTeApBv7eaKCWpSp7MCbvgzm74izKhu3vlDk9w6qVrxePfGgpKPqfHiOoGhFnbTK\r\n\twTC6o2xq5y0qZ03JonF7OJspEd3I5zKY3E+ov7/ZhW6DqT8UFvsAdjvQbXyhV8Eu\r\n\tYhixw1aKEPzNjNowuIseVogKOLXxWI5vAi5HgXdS0/ES5gDGsABo4fqovUKlgop3\r\n\tRA==\r\n\t-----END CERTIFICATE-----\r\n\r\n"
221 buf = nasty_pound_header.dup
223 assert nasty_pound_header =~ /(-----BEGIN .*--END CERTIFICATE-----)/m
225 expect.gsub!(/\r\n\t/, ' ')
226 assert_equal req, parser.headers(req, buf)
228 assert_equal expect, req['HTTP_X_SSL_BULLSHIT']
231 def test_continuation_eats_leading_spaces
232 parser = HttpParser.new
233 header = "GET / HTTP/1.1\r\n" \
239 assert_equal req, parser.headers(req, header)
240 assert_equal '', header
241 assert_equal 'ASDF', req['HTTP_X_ASDF']
244 def test_continuation_eats_scattered_leading_spaces
245 parser = HttpParser.new
246 header = "GET / HTTP/1.1\r\n" \
253 assert_equal req, parser.headers(req, header)
254 assert_equal '', header
255 assert_equal 'hi y x ASDF', req['HTTP_X_ASDF']
258 def test_continuation_with_absolute_uri_and_ignored_host_header
259 parser = HttpParser.new
260 header = "GET http://example.com/ HTTP/1.1\r\n" \
265 assert_equal req, parser.headers(req, header)
266 assert_equal 'example.com', req['HTTP_HOST']
269 # this may seem to be testing more of an implementation detail, but
270 # it also helps ensure we're safe in the presence of multiple parsers
271 # in case we ever go multithreaded/evented...
272 def test_resumable_continuations
275 header = "GET / HTTP/1.1\r\n" \
280 parser = HttpParser.new
281 assert parser.headers(req, "#{header} #{i}\r\n").nil?
282 asdf = req['HTTP_X_ASDF']
283 assert_equal "hello #{i}", asdf
284 tmp << [ parser, asdf ]
287 tmp.each_with_index { |(parser, asdf), i|
288 assert_equal req, parser.headers(req, "#{header} #{i}\r\n .\r\n\r\n")
289 assert_equal "hello #{i} .", asdf
293 def test_invalid_continuation
294 parser = HttpParser.new
295 header = "GET / HTTP/1.1\r\n" \
300 assert_raises(HttpParserError) { parser.headers(req, header) }
303 def test_parse_ie6_urls
304 %w(/some/random/path"
307 /we/love/you/ie6?q=<"">
311 parser = HttpParser.new
313 sorta_safe = %(GET #{path} HTTP/1.1\r\n\r\n)
314 assert_equal req, parser.headers(req, sorta_safe)
315 assert_equal path, req['REQUEST_URI']
316 assert_equal '', sorta_safe
317 assert parser.keepalive?
322 parser = HttpParser.new
324 bad_http = "GET / SsUTF/1.1"
326 assert_raises(HttpParserError) { parser.headers(req, bad_http) }
328 # make sure we can recover
331 assert_equal req, parser.headers(req, "GET / HTTP/1.0\r\n\r\n")
332 assert ! parser.keepalive?
336 parser = HttpParser.new
339 assert_nil parser.headers(req, http)
340 assert_nil parser.headers(req, http)
341 assert_nil parser.headers(req, http << " / HTTP/1.0")
342 assert_equal '/', req['REQUEST_PATH']
343 assert_equal '/', req['REQUEST_URI']
344 assert_equal 'GET', req['REQUEST_METHOD']
345 assert_nil parser.headers(req, http << "\r\n")
346 assert_equal 'HTTP/1.0', req['HTTP_VERSION']
347 assert_nil parser.headers(req, http << "\r")
348 assert_equal req, parser.headers(req, http << "\n")
349 assert_equal 'HTTP/1.0', req['SERVER_PROTOCOL']
350 assert_nil req['FRAGMENT']
351 assert_equal '', req['QUERY_STRING']
352 assert_equal "", http
353 assert ! parser.keepalive?
356 # not common, but underscores do appear in practice
357 def test_absolute_uri_underscores
358 parser = HttpParser.new
360 http = "GET http://under_score.example.com/foo?q=bar HTTP/1.0\r\n\r\n"
361 assert_equal req, parser.headers(req, http)
362 assert_equal 'http', req['rack.url_scheme']
363 assert_equal '/foo?q=bar', req['REQUEST_URI']
364 assert_equal '/foo', req['REQUEST_PATH']
365 assert_equal 'q=bar', req['QUERY_STRING']
367 assert_equal 'under_score.example.com', req['HTTP_HOST']
368 assert_equal 'under_score.example.com', req['SERVER_NAME']
369 assert_equal '80', req['SERVER_PORT']
370 assert_equal "", http
371 assert ! parser.keepalive?
374 # some dumb clients add users because they're stupid
375 def test_absolute_uri_w_user
376 parser = HttpParser.new
378 http = "GET http://user%20space@example.com/foo?q=bar HTTP/1.0\r\n\r\n"
379 assert_equal req, parser.headers(req, http)
380 assert_equal 'http', req['rack.url_scheme']
381 assert_equal '/foo?q=bar', req['REQUEST_URI']
382 assert_equal '/foo', req['REQUEST_PATH']
383 assert_equal 'q=bar', req['QUERY_STRING']
385 assert_equal 'example.com', req['HTTP_HOST']
386 assert_equal 'example.com', req['SERVER_NAME']
387 assert_equal '80', req['SERVER_PORT']
388 assert_equal "", http
389 assert ! parser.keepalive?
392 # since Mongrel supported anything URI.parse supported, we're stuck
393 # supporting everything URI.parse supports
394 def test_absolute_uri_uri_parse
395 "#{URI::REGEXP::PATTERN::UNRESERVED};:&=+$,".split(//).each do |char|
396 parser = HttpParser.new
398 http = "GET http://#{char}@example.com/ HTTP/1.0\r\n\r\n"
399 assert_equal req, parser.headers(req, http)
400 assert_equal 'http', req['rack.url_scheme']
401 assert_equal '/', req['REQUEST_URI']
402 assert_equal '/', req['REQUEST_PATH']
403 assert_equal '', req['QUERY_STRING']
405 assert_equal 'example.com', req['HTTP_HOST']
406 assert_equal 'example.com', req['SERVER_NAME']
407 assert_equal '80', req['SERVER_PORT']
408 assert_equal "", http
409 assert ! parser.keepalive?
413 def test_absolute_uri
414 parser = HttpParser.new
416 http = "GET http://example.com/foo?q=bar HTTP/1.0\r\n\r\n"
417 assert_equal req, parser.headers(req, http)
418 assert_equal 'http', req['rack.url_scheme']
419 assert_equal '/foo?q=bar', req['REQUEST_URI']
420 assert_equal '/foo', req['REQUEST_PATH']
421 assert_equal 'q=bar', req['QUERY_STRING']
423 assert_equal 'example.com', req['HTTP_HOST']
424 assert_equal 'example.com', req['SERVER_NAME']
425 assert_equal '80', req['SERVER_PORT']
426 assert_equal "", http
427 assert ! parser.keepalive?
430 # X-Forwarded-Proto is not in rfc2616, absolute URIs are, however...
431 def test_absolute_uri_https
432 parser = HttpParser.new
434 http = "GET https://example.com/foo?q=bar HTTP/1.1\r\n" \
435 "X-Forwarded-Proto: http\r\n\r\n"
436 assert_equal req, parser.headers(req, http)
437 assert_equal 'https', req['rack.url_scheme']
438 assert_equal '/foo?q=bar', req['REQUEST_URI']
439 assert_equal '/foo', req['REQUEST_PATH']
440 assert_equal 'q=bar', req['QUERY_STRING']
442 assert_equal 'example.com', req['HTTP_HOST']
443 assert_equal 'example.com', req['SERVER_NAME']
444 assert_equal '443', req['SERVER_PORT']
445 assert_equal "", http
446 assert parser.keepalive?
449 # Host: header should be ignored for absolute URIs
450 def test_absolute_uri_with_port
451 parser = HttpParser.new
453 http = "GET http://example.com:8080/foo?q=bar HTTP/1.2\r\n" \
454 "Host: bad.example.com\r\n\r\n"
455 assert_equal req, parser.headers(req, http)
456 assert_equal 'http', req['rack.url_scheme']
457 assert_equal '/foo?q=bar', req['REQUEST_URI']
458 assert_equal '/foo', req['REQUEST_PATH']
459 assert_equal 'q=bar', req['QUERY_STRING']
461 assert_equal 'example.com:8080', req['HTTP_HOST']
462 assert_equal 'example.com', req['SERVER_NAME']
463 assert_equal '8080', req['SERVER_PORT']
464 assert_equal "", http
465 assert ! parser.keepalive? # TODO: read HTTP/1.2 when it's final
468 def test_absolute_uri_with_empty_port
469 parser = HttpParser.new
471 http = "GET https://example.com:/foo?q=bar HTTP/1.1\r\n" \
472 "Host: bad.example.com\r\n\r\n"
473 assert_equal req, parser.headers(req, http)
474 assert_equal 'https', req['rack.url_scheme']
475 assert_equal '/foo?q=bar', req['REQUEST_URI']
476 assert_equal '/foo', req['REQUEST_PATH']
477 assert_equal 'q=bar', req['QUERY_STRING']
479 assert_equal 'example.com:', req['HTTP_HOST']
480 assert_equal 'example.com', req['SERVER_NAME']
481 assert_equal '443', req['SERVER_PORT']
482 assert_equal "", http
483 assert parser.keepalive? # TODO: read HTTP/1.2 when it's final
486 def test_absolute_ipv6_uri
487 parser = HttpParser.new
489 url = "http://[::1]/foo?q=bar"
490 http = "GET #{url} HTTP/1.1\r\n" \
491 "Host: bad.example.com\r\n\r\n"
492 assert_equal req, parser.headers(req, http)
493 assert_equal 'http', req['rack.url_scheme']
494 assert_equal '/foo?q=bar', req['REQUEST_URI']
495 assert_equal '/foo', req['REQUEST_PATH']
496 assert_equal 'q=bar', req['QUERY_STRING']
499 assert_equal "[::1]", uri.host,
500 "URI.parse changed upstream for #{url}? host=#{uri.host}"
501 assert_equal "[::1]", req['HTTP_HOST']
502 assert_equal "[::1]", req['SERVER_NAME']
503 assert_equal '80', req['SERVER_PORT']
504 assert_equal "", http
505 assert parser.keepalive? # TODO: read HTTP/1.2 when it's final
508 def test_absolute_ipv6_uri_alpha
509 parser = HttpParser.new
511 url = "http://[::a]/"
512 http = "GET #{url} HTTP/1.1\r\n" \
513 "Host: bad.example.com\r\n\r\n"
514 assert_equal req, parser.headers(req, http)
515 assert_equal 'http', req['rack.url_scheme']
518 assert_equal "[::a]", uri.host,
519 "URI.parse changed upstream for #{url}? host=#{uri.host}"
520 assert_equal "[::a]", req['HTTP_HOST']
521 assert_equal "[::a]", req['SERVER_NAME']
522 assert_equal '80', req['SERVER_PORT']
525 def test_absolute_ipv6_uri_alpha_2
526 parser = HttpParser.new
528 url = "http://[::B]/"
529 http = "GET #{url} HTTP/1.1\r\n" \
530 "Host: bad.example.com\r\n\r\n"
531 assert_equal req, parser.headers(req, http)
532 assert_equal 'http', req['rack.url_scheme']
535 assert_equal "[::B]", uri.host,
536 "URI.parse changed upstream for #{url}? host=#{uri.host}"
537 assert_equal "[::B]", req['HTTP_HOST']
538 assert_equal "[::B]", req['SERVER_NAME']
539 assert_equal '80', req['SERVER_PORT']
542 def test_absolute_ipv6_uri_with_empty_port
543 parser = HttpParser.new
545 url = "https://[::1]:/foo?q=bar"
546 http = "GET #{url} HTTP/1.1\r\n" \
547 "Host: bad.example.com\r\n\r\n"
548 assert_equal req, parser.headers(req, http)
549 assert_equal 'https', req['rack.url_scheme']
550 assert_equal '/foo?q=bar', req['REQUEST_URI']
551 assert_equal '/foo', req['REQUEST_PATH']
552 assert_equal 'q=bar', req['QUERY_STRING']
555 assert_equal "[::1]", uri.host,
556 "URI.parse changed upstream for #{url}? host=#{uri.host}"
557 assert_equal "[::1]:", req['HTTP_HOST']
558 assert_equal "[::1]", req['SERVER_NAME']
559 assert_equal '443', req['SERVER_PORT']
560 assert_equal "", http
561 assert parser.keepalive? # TODO: read HTTP/1.2 when it's final
564 def test_absolute_ipv6_uri_with_port
565 parser = HttpParser.new
567 url = "https://[::1]:666/foo?q=bar"
568 http = "GET #{url} HTTP/1.1\r\n" \
569 "Host: bad.example.com\r\n\r\n"
570 assert_equal req, parser.headers(req, http)
571 assert_equal 'https', req['rack.url_scheme']
572 assert_equal '/foo?q=bar', req['REQUEST_URI']
573 assert_equal '/foo', req['REQUEST_PATH']
574 assert_equal 'q=bar', req['QUERY_STRING']
577 assert_equal "[::1]", uri.host,
578 "URI.parse changed upstream for #{url}? host=#{uri.host}"
579 assert_equal "[::1]:666", req['HTTP_HOST']
580 assert_equal "[::1]", req['SERVER_NAME']
581 assert_equal '666', req['SERVER_PORT']
582 assert_equal "", http
583 assert parser.keepalive? # TODO: read HTTP/1.2 when it's final
586 def test_ipv6_host_header
587 parser = HttpParser.new
589 http = "GET / HTTP/1.1\r\n" \
590 "Host: [::1]\r\n\r\n"
591 assert_equal req, parser.headers(req, http)
592 assert_equal "[::1]", req['HTTP_HOST']
593 assert_equal "[::1]", req['SERVER_NAME']
594 assert_equal '80', req['SERVER_PORT']
595 assert_equal "", http
596 assert parser.keepalive? # TODO: read HTTP/1.2 when it's final
599 def test_ipv6_host_header_with_port
600 parser = HttpParser.new
602 http = "GET / HTTP/1.1\r\n" \
603 "Host: [::1]:666\r\n\r\n"
604 assert_equal req, parser.headers(req, http)
605 assert_equal "[::1]", req['SERVER_NAME']
606 assert_equal '666', req['SERVER_PORT']
607 assert_equal "[::1]:666", req['HTTP_HOST']
608 assert_equal "", http
609 assert parser.keepalive? # TODO: read HTTP/1.2 when it's final
612 def test_ipv6_host_header_with_empty_port
613 parser = HttpParser.new
615 http = "GET / HTTP/1.1\r\n" \
616 "Host: [::1]:\r\n\r\n"
617 assert_equal req, parser.headers(req, http)
618 assert_equal "[::1]", req['SERVER_NAME']
619 assert_equal '80', req['SERVER_PORT']
620 assert_equal "[::1]:", req['HTTP_HOST']
621 assert_equal "", http
622 assert parser.keepalive? # TODO: read HTTP/1.2 when it's final
625 # XXX Highly unlikely..., just make sure we don't segfault or assert on it
626 def test_broken_ipv6_host_header
627 parser = HttpParser.new
629 http = "GET / HTTP/1.1\r\n" \
630 "Host: [::1:\r\n\r\n"
631 assert_equal req, parser.headers(req, http)
632 assert_equal "[", req['SERVER_NAME']
633 assert_equal ':1:', req['SERVER_PORT']
634 assert_equal "[::1:", req['HTTP_HOST']
635 assert_equal "", http
638 def test_put_body_oneshot
639 parser = HttpParser.new
641 http = "PUT / HTTP/1.0\r\nContent-Length: 5\r\n\r\nabcde"
642 assert_equal req, parser.headers(req, http)
643 assert_equal '/', req['REQUEST_PATH']
644 assert_equal '/', req['REQUEST_URI']
645 assert_equal 'PUT', req['REQUEST_METHOD']
646 assert_equal 'HTTP/1.0', req['HTTP_VERSION']
647 assert_equal 'HTTP/1.0', req['SERVER_PROTOCOL']
648 assert_equal "abcde", http
649 assert ! parser.keepalive? # TODO: read HTTP/1.2 when it's final
652 def test_put_body_later
653 parser = HttpParser.new
655 http = "PUT /l HTTP/1.0\r\nContent-Length: 5\r\n\r\n"
656 assert_equal req, parser.headers(req, http)
657 assert_equal '/l', req['REQUEST_PATH']
658 assert_equal '/l', req['REQUEST_URI']
659 assert_equal 'PUT', req['REQUEST_METHOD']
660 assert_equal 'HTTP/1.0', req['HTTP_VERSION']
661 assert_equal 'HTTP/1.0', req['SERVER_PROTOCOL']
662 assert_equal "", http
663 assert ! parser.keepalive? # TODO: read HTTP/1.2 when it's final
666 def test_unknown_methods
667 %w(GETT HEADR XGET XHEAD).each { |m|
668 parser = HttpParser.new
670 s = "#{m} /forums/1/topics/2375?page=1#posts-17408 HTTP/1.1\r\n\r\n"
672 assert_nothing_raised do
673 ok = parser.headers(req, s)
676 assert_equal '/forums/1/topics/2375?page=1', req['REQUEST_URI']
677 assert_equal 'posts-17408', req['FRAGMENT']
678 assert_equal 'page=1', req['QUERY_STRING']
680 assert_equal m, req['REQUEST_METHOD']
681 assert parser.keepalive? # TODO: read HTTP/1.2 when it's final
685 def test_fragment_in_uri
686 parser = HttpParser.new
688 get = "GET /forums/1/topics/2375?page=1#posts-17408 HTTP/1.1\r\n\r\n"
690 assert_nothing_raised do
691 ok = parser.headers(req, get)
694 assert_equal '/forums/1/topics/2375?page=1', req['REQUEST_URI']
695 assert_equal 'posts-17408', req['FRAGMENT']
696 assert_equal 'page=1', req['QUERY_STRING']
698 assert parser.keepalive?
701 # lame random garbage maker
702 def rand_data(min, max, readable=true)
703 count = min + ((rand(max)+1) *10).to_i
704 res = count.to_s + "/"
707 res << Digest::SHA1.hexdigest(rand(count * 100).to_s) * (count / 40)
709 res << Digest::SHA1.digest(rand(count * 100).to_s) * (count / 20)
716 def test_horrible_queries
717 parser = HttpParser.new
719 # then that large header names are caught
721 get = "GET /#{rand_data(10,120)} HTTP/1.1\r\nX-#{rand_data(1024, 1024+(c*1024))}: Test\r\n\r\n"
722 assert_raises Unicorn::HttpParserError do
723 parser.headers({}, get)
728 # then that large mangled field values are caught
730 get = "GET /#{rand_data(10,120)} HTTP/1.1\r\nX-Test: #{rand_data(1024, 1024+(c*1024), false)}\r\n\r\n"
731 assert_raises Unicorn::HttpParserError do
732 parser.headers({}, get)
737 # then large headers are rejected too
738 get = "GET /#{rand_data(10,120)} HTTP/1.1\r\n"
739 get << "X-Test: test\r\n" * (80 * 1024)
740 assert_raises Unicorn::HttpParserError do
741 parser.headers({}, get)
745 # finally just that random garbage gets blocked all the time
747 get = "GET #{rand_data(1024, 1024+(c*1024), false)} #{rand_data(1024, 1024+(c*1024), false)}\r\n\r\n"
748 assert_raises Unicorn::HttpParserError do
749 parser.headers({}, get)
756 # so we don't care about the portability of this test
757 # if it doesn't leak on Linux, it won't leak anywhere else
758 # unless your C compiler or platform is otherwise broken
759 LINUX_PROC_PID_STATUS = "/proc/self/status"
761 match_rss = /^VmRSS:\s+(\d+)/
762 if File.read(LINUX_PROC_PID_STATUS) =~ match_rss
764 1000000.times { Unicorn::HttpParser.new }
765 File.read(LINUX_PROC_PID_STATUS) =~ match_rss
767 diff = after - before
768 assert(diff < 10000, "memory grew more than 10M: #{diff}")
770 end if RUBY_PLATFORM =~ /linux/ &&
771 File.readable?(LINUX_PROC_PID_STATUS) &&
772 !defined?(RUBY_ENGINE)