1 # -*- encoding: binary -*-
3 # Copyright (c) 2005 Zed A. Shaw
4 # You can redistribute it and/or modify it under the same terms as Ruby 1.8 or
7 # Additional work donated by contributors. See http://mongrel.rubyforge.org/attributions.html
8 # for more information.
10 require 'test/test_helper'
14 class HttpParserTest < Test::Unit::TestCase
17 parser = HttpParser.new
20 http << "GET / HTTP/1.1\r\n\r\n"
21 assert_equal req, parser.parse
24 assert_equal 'HTTP/1.1', req['SERVER_PROTOCOL']
25 assert_equal '/', req['REQUEST_PATH']
26 assert_equal 'HTTP/1.1', req['HTTP_VERSION']
27 assert_equal '/', req['REQUEST_URI']
28 assert_equal 'GET', req['REQUEST_METHOD']
29 assert_nil req['FRAGMENT']
30 assert_equal '', req['QUERY_STRING']
32 assert parser.keepalive?
37 assert_nil parser.parse
38 assert_equal "G", http
41 # try parsing again to ensure we were reset correctly
42 http << "ET /hello-world HTTP/1.1\r\n\r\n"
45 assert_equal 'HTTP/1.1', req['SERVER_PROTOCOL']
46 assert_equal '/hello-world', req['REQUEST_PATH']
47 assert_equal 'HTTP/1.1', req['HTTP_VERSION']
48 assert_equal '/hello-world', req['REQUEST_URI']
49 assert_equal 'GET', req['REQUEST_METHOD']
50 assert_nil req['FRAGMENT']
51 assert_equal '', req['QUERY_STRING']
53 assert parser.keepalive?
57 parser = HttpParser.new
59 parser.buf << "GET / HTTP/1.1\r\nHost:\tfoo.bar\r\n\r\n"
60 assert_equal req.object_id, parser.parse.object_id
61 assert_equal "foo.bar", req['HTTP_HOST']
64 def test_connection_close_no_ka
65 parser = HttpParser.new
67 parser.buf << "GET / HTTP/1.1\r\nConnection: close\r\n\r\n"
68 assert_equal req.object_id, parser.parse.object_id
69 assert_equal "GET", req['REQUEST_METHOD']
70 assert ! parser.keepalive?
73 def test_connection_keep_alive_ka
74 parser = HttpParser.new
76 parser.buf << "HEAD / HTTP/1.1\r\nConnection: keep-alive\r\n\r\n"
77 assert_equal req.object_id, parser.parse.object_id
78 assert parser.keepalive?
81 def test_connection_keep_alive_no_body
82 parser = HttpParser.new
84 parser.buf << "POST / HTTP/1.1\r\nConnection: keep-alive\r\n\r\n"
85 assert_equal req.object_id, parser.parse.object_id
86 assert parser.keepalive?
89 def test_connection_keep_alive_no_body_empty
90 parser = HttpParser.new
92 parser.buf << "POST / HTTP/1.1\r\n" \
93 "Content-Length: 0\r\n" \
94 "Connection: keep-alive\r\n\r\n"
95 assert_equal req.object_id, parser.parse.object_id
96 assert parser.keepalive?
99 def test_connection_keep_alive_ka_bad_version
100 parser = HttpParser.new
102 parser.buf << "GET / HTTP/1.0\r\nConnection: keep-alive\r\n\r\n"
103 assert_equal req.object_id, parser.parse.object_id
104 assert parser.keepalive?
107 def test_parse_server_host_default_port
108 parser = HttpParser.new
110 parser.buf << "GET / HTTP/1.1\r\nHost: foo\r\n\r\n"
111 assert_equal req, parser.parse
112 assert_equal 'foo', req['SERVER_NAME']
113 assert_equal '80', req['SERVER_PORT']
114 assert_equal '', parser.buf
115 assert parser.keepalive?
118 def test_parse_server_host_alt_port
119 parser = HttpParser.new
121 parser.buf << "GET / HTTP/1.1\r\nHost: foo:999\r\n\r\n"
122 assert_equal req, parser.parse
123 assert_equal 'foo', req['SERVER_NAME']
124 assert_equal '999', req['SERVER_PORT']
125 assert_equal '', parser.buf
126 assert parser.keepalive?
129 def test_parse_server_host_empty_port
130 parser = HttpParser.new
132 parser.buf << "GET / HTTP/1.1\r\nHost: foo:\r\n\r\n"
133 assert_equal req, parser.parse
134 assert_equal 'foo', req['SERVER_NAME']
135 assert_equal '80', req['SERVER_PORT']
136 assert_equal '', parser.buf
137 assert parser.keepalive?
140 def test_parse_server_host_xfp_https
141 parser = HttpParser.new
143 parser.buf << "GET / HTTP/1.1\r\nHost: foo:\r\n" \
144 "X-Forwarded-Proto: https\r\n\r\n"
145 assert_equal req, parser.parse
146 assert_equal 'foo', req['SERVER_NAME']
147 assert_equal '443', req['SERVER_PORT']
148 assert_equal '', parser.buf
149 assert parser.keepalive?
152 def test_parse_xfp_https_chained
153 parser = HttpParser.new
155 parser.buf << "GET / HTTP/1.0\r\n" \
156 "X-Forwarded-Proto: https,http\r\n\r\n"
157 assert_equal req, parser.parse
158 assert_equal '443', req['SERVER_PORT'], req.inspect
159 assert_equal 'https', req['rack.url_scheme'], req.inspect
160 assert_equal '', parser.buf
163 def test_parse_xfp_https_chained_backwards
164 parser = HttpParser.new
166 parser.buf << "GET / HTTP/1.0\r\n" \
167 "X-Forwarded-Proto: http,https\r\n\r\n"
168 assert_equal req, parser.parse
169 assert_equal '80', req['SERVER_PORT'], req.inspect
170 assert_equal 'http', req['rack.url_scheme'], req.inspect
171 assert_equal '', parser.buf
174 def test_parse_xfp_gopher_is_ignored
175 parser = HttpParser.new
177 parser.buf << "GET / HTTP/1.0\r\n" \
178 "X-Forwarded-Proto: gopher\r\n\r\n"
179 assert_equal req, parser.parse
180 assert_equal '80', req['SERVER_PORT'], req.inspect
181 assert_equal 'http', req['rack.url_scheme'], req.inspect
182 assert_equal '', parser.buf
185 def test_parse_x_forwarded_ssl_on
186 parser = HttpParser.new
188 parser.buf << "GET / HTTP/1.0\r\n" \
189 "X-Forwarded-Ssl: on\r\n\r\n"
190 assert_equal req, parser.parse
191 assert_equal '443', req['SERVER_PORT'], req.inspect
192 assert_equal 'https', req['rack.url_scheme'], req.inspect
193 assert_equal '', parser.buf
196 def test_parse_x_forwarded_ssl_off
197 parser = HttpParser.new
199 parser.buf << "GET / HTTP/1.0\r\nX-Forwarded-Ssl: off\r\n\r\n"
200 assert_equal req, parser.parse
201 assert_equal '80', req['SERVER_PORT'], req.inspect
202 assert_equal 'http', req['rack.url_scheme'], req.inspect
203 assert_equal '', parser.buf
206 def test_parse_strange_headers
207 parser = HttpParser.new
209 should_be_good = "GET / HTTP/1.1\r\naaaaaaaaaaaaa:++++++++++\r\n\r\n"
210 parser.buf << should_be_good
211 assert_equal req, parser.parse
212 assert_equal '', parser.buf
213 assert parser.keepalive?
216 # legacy test case from Mongrel that we never supported before...
217 # I still consider Pound irrelevant, unfortunately stupid clients that
218 # send extremely big headers do exist and they've managed to find Unicorn...
219 def test_nasty_pound_header
220 parser = HttpParser.new
221 nasty_pound_header = "GET / HTTP/1.1\r\nX-SSL-Bullshit: -----BEGIN CERTIFICATE-----\r\n\tMIIFbTCCBFWgAwIBAgICH4cwDQYJKoZIhvcNAQEFBQAwcDELMAkGA1UEBhMCVUsx\r\n\tETAPBgNVBAoTCGVTY2llbmNlMRIwEAYDVQQLEwlBdXRob3JpdHkxCzAJBgNVBAMT\r\n\tAkNBMS0wKwYJKoZIhvcNAQkBFh5jYS1vcGVyYXRvckBncmlkLXN1cHBvcnQuYWMu\r\n\tdWswHhcNMDYwNzI3MTQxMzI4WhcNMDcwNzI3MTQxMzI4WjBbMQswCQYDVQQGEwJV\r\n\tSzERMA8GA1UEChMIZVNjaWVuY2UxEzARBgNVBAsTCk1hbmNoZXN0ZXIxCzAJBgNV\r\n\tBAcTmrsogriqMWLAk1DMRcwFQYDVQQDEw5taWNoYWVsIHBhcmQYJKoZIhvcNAQEB\r\n\tBQADggEPADCCAQoCggEBANPEQBgl1IaKdSS1TbhF3hEXSl72G9J+WC/1R64fAcEF\r\n\tW51rEyFYiIeZGx/BVzwXbeBoNUK41OK65sxGuflMo5gLflbwJtHBRIEKAfVVp3YR\r\n\tgW7cMA/s/XKgL1GEC7rQw8lIZT8RApukCGqOVHSi/F1SiFlPDxuDfmdiNzL31+sL\r\n\t0iwHDdNkGjy5pyBSB8Y79dsSJtCW/iaLB0/n8Sj7HgvvZJ7x0fr+RQjYOUUfrePP\r\n\tu2MSpFyf+9BbC/aXgaZuiCvSR+8Snv3xApQY+fULK/xY8h8Ua51iXoQ5jrgu2SqR\r\n\twgA7BUi3G8LFzMBl8FRCDYGUDy7M6QaHXx1ZWIPWNKsCAwEAAaOCAiQwggIgMAwG\r\n\tA1UdEwEB/wQCMAAwEQYJYIZIAYb4QgEBBAQDAgWgMA4GA1UdDwEB/wQEAwID6DAs\r\n\tBglghkgBhvhCAQ0EHxYdVUsgZS1TY2llbmNlIFVzZXIgQ2VydGlmaWNhdGUwHQYD\r\n\tVR0OBBYEFDTt/sf9PeMaZDHkUIldrDYMNTBZMIGaBgNVHSMEgZIwgY+AFAI4qxGj\r\n\tloCLDdMVKwiljjDastqooXSkcjBwMQswCQYDVQQGEwJVSzERMA8GA1UEChMIZVNj\r\n\taWVuY2UxEjAQBgNVBAsTCUF1dGhvcml0eTELMAkGA1UEAxMCQ0ExLTArBgkqhkiG\r\n\t9w0BCQEWHmNhLW9wZXJhdG9yQGdyaWQtc3VwcG9ydC5hYy51a4IBADApBgNVHRIE\r\n\tIjAggR5jYS1vcGVyYXRvckBncmlkLXN1cHBvcnQuYWMudWswGQYDVR0gBBIwEDAO\r\n\tBgwrBgEEAdkvAQEBAQYwPQYJYIZIAYb4QgEEBDAWLmh0dHA6Ly9jYS5ncmlkLXN1\r\n\tcHBvcnQuYWMudmT4sopwqlBWsvcHViL2NybC9jYWNybC5jcmwwPQYJYIZIAYb4QgEDBDAWLmh0\r\n\tdHA6Ly9jYS5ncmlkLXN1cHBvcnQuYWMudWsvcHViL2NybC9jYWNybC5jcmwwPwYD\r\n\tVR0fBDgwNjA0oDKgMIYuaHR0cDovL2NhLmdyaWQt5hYy51ay9wdWIv\r\n\tY3JsL2NhY3JsLmNybDANBgkqhkiG9w0BAQUFAAOCAQEAS/U4iiooBENGW/Hwmmd3\r\n\tXCy6Zrt08YjKCzGNjorT98g8uGsqYjSxv/hmi0qlnlHs+k/3Iobc3LjS5AMYr5L8\r\n\tUO7OSkgFFlLHQyC9JzPfmLCAugvzEbyv4Olnsr8hbxF1MbKZoQxUZtMVu29wjfXk\r\n\thTeApBv7eaKCWpSp7MCbvgzm74izKhu3vlDk9w6qVrxePfGgpKPqfHiOoGhFnbTK\r\n\twTC6o2xq5y0qZ03JonF7OJspEd3I5zKY3E+ov7/ZhW6DqT8UFvsAdjvQbXyhV8Eu\r\n\tYhixw1aKEPzNjNowuIseVogKOLXxWI5vAi5HgXdS0/ES5gDGsABo4fqovUKlgop3\r\n\tRA==\r\n\t-----END CERTIFICATE-----\r\n\r\n"
223 parser.buf << nasty_pound_header.dup
225 assert nasty_pound_header =~ /(-----BEGIN .*--END CERTIFICATE-----)/m
227 expect.gsub!(/\r\n\t/, ' ')
228 assert_equal req, parser.parse
229 assert_equal '', parser.buf
230 assert_equal expect, req['HTTP_X_SSL_BULLSHIT']
233 def test_continuation_eats_leading_spaces
234 parser = HttpParser.new
235 header = "GET / HTTP/1.1\r\n" \
242 assert_equal req, parser.parse
243 assert_equal '', parser.buf
244 assert_equal 'ASDF', req['HTTP_X_ASDF']
247 def test_continuation_eats_scattered_leading_spaces
248 parser = HttpParser.new
249 header = "GET / HTTP/1.1\r\n" \
257 assert_equal req, parser.parse
258 assert_equal '', parser.buf
259 assert_equal 'hi y x ASDF', req['HTTP_X_ASDF']
262 def test_continuation_eats_trailing_spaces
263 parser = HttpParser.new
264 header = "GET / HTTP/1.1\r\n" \
271 assert_equal req, parser.parse
272 assert_equal '', parser.buf
273 assert_equal 'b ASDF', req['HTTP_X_ASDF']
276 def test_continuation_with_absolute_uri_and_ignored_host_header
277 parser = HttpParser.new
278 header = "GET http://example.com/ HTTP/1.1\r\n" \
284 assert_equal req, parser.parse
285 assert_equal 'example.com', req['HTTP_HOST']
288 # this may seem to be testing more of an implementation detail, but
289 # it also helps ensure we're safe in the presence of multiple parsers
290 # in case we ever go multithreaded/evented...
291 def test_resumable_continuations
293 header = "GET / HTTP/1.1\r\n" \
298 parser = HttpParser.new
300 parser.buf << "#{header} #{i}\r\n"
301 assert parser.parse.nil?
302 asdf = req['HTTP_X_ASDF']
303 assert_equal "hello #{i}", asdf
304 tmp << [ parser, asdf ]
306 tmp.each_with_index { |(parser, asdf), i|
307 parser.buf << " .\r\n\r\n"
309 assert_equal "hello #{i} .", asdf
313 def test_invalid_continuation
314 parser = HttpParser.new
315 header = "GET / HTTP/1.1\r\n" \
320 assert_raises(HttpParserError) { parser.parse }
323 def test_parse_ie6_urls
324 %w(/some/random/path"
327 /we/love/you/ie6?q=<"">
331 parser = HttpParser.new
333 sorta_safe = %(GET #{path} HTTP/1.1\r\n\r\n)
334 assert_equal req, parser.headers(req, sorta_safe)
335 assert_equal path, req['REQUEST_URI']
336 assert_equal '', sorta_safe
337 assert parser.keepalive?
342 parser = HttpParser.new
344 bad_http = "GET / SsUTF/1.1"
346 assert_raises(HttpParserError) { parser.headers(req, bad_http) }
348 # make sure we can recover
351 assert_equal req, parser.headers(req, "GET / HTTP/1.0\r\n\r\n")
352 assert ! parser.keepalive?
356 parser = HttpParser.new
359 assert_nil parser.headers(req, http)
360 assert_nil parser.headers(req, http)
361 assert_nil parser.headers(req, http << " / HTTP/1.0")
362 assert_equal '/', req['REQUEST_PATH']
363 assert_equal '/', req['REQUEST_URI']
364 assert_equal 'GET', req['REQUEST_METHOD']
365 assert_nil parser.headers(req, http << "\r\n")
366 assert_equal 'HTTP/1.0', req['HTTP_VERSION']
367 assert_nil parser.headers(req, http << "\r")
368 assert_equal req, parser.headers(req, http << "\n")
369 assert_equal 'HTTP/1.0', req['SERVER_PROTOCOL']
370 assert_nil req['FRAGMENT']
371 assert_equal '', req['QUERY_STRING']
372 assert_equal "", http
373 assert ! parser.keepalive?
376 # not common, but underscores do appear in practice
377 def test_absolute_uri_underscores
378 parser = HttpParser.new
380 http = "GET http://under_score.example.com/foo?q=bar HTTP/1.0\r\n\r\n"
382 assert_equal req, parser.parse
383 assert_equal 'http', req['rack.url_scheme']
384 assert_equal '/foo?q=bar', req['REQUEST_URI']
385 assert_equal '/foo', req['REQUEST_PATH']
386 assert_equal 'q=bar', req['QUERY_STRING']
388 assert_equal 'under_score.example.com', req['HTTP_HOST']
389 assert_equal 'under_score.example.com', req['SERVER_NAME']
390 assert_equal '80', req['SERVER_PORT']
391 assert_equal "", parser.buf
392 assert ! parser.keepalive?
395 # some dumb clients add users because they're stupid
396 def test_absolute_uri_w_user
397 parser = HttpParser.new
399 http = "GET http://user%20space@example.com/foo?q=bar HTTP/1.0\r\n\r\n"
401 assert_equal req, parser.parse
402 assert_equal 'http', req['rack.url_scheme']
403 assert_equal '/foo?q=bar', req['REQUEST_URI']
404 assert_equal '/foo', req['REQUEST_PATH']
405 assert_equal 'q=bar', req['QUERY_STRING']
407 assert_equal 'example.com', req['HTTP_HOST']
408 assert_equal 'example.com', req['SERVER_NAME']
409 assert_equal '80', req['SERVER_PORT']
410 assert_equal "", parser.buf
411 assert ! parser.keepalive?
414 # since Mongrel supported anything URI.parse supported, we're stuck
415 # supporting everything URI.parse supports
416 def test_absolute_uri_uri_parse
417 "#{URI::REGEXP::PATTERN::UNRESERVED};:&=+$,".split(//).each do |char|
418 parser = HttpParser.new
420 http = "GET http://#{char}@example.com/ HTTP/1.0\r\n\r\n"
421 assert_equal req, parser.headers(req, http)
422 assert_equal 'http', req['rack.url_scheme']
423 assert_equal '/', req['REQUEST_URI']
424 assert_equal '/', req['REQUEST_PATH']
425 assert_equal '', req['QUERY_STRING']
427 assert_equal 'example.com', req['HTTP_HOST']
428 assert_equal 'example.com', req['SERVER_NAME']
429 assert_equal '80', req['SERVER_PORT']
430 assert_equal "", http
431 assert ! parser.keepalive?
435 def test_absolute_uri
436 parser = HttpParser.new
438 parser.buf << "GET http://example.com/foo?q=bar HTTP/1.0\r\n\r\n"
439 assert_equal req, parser.parse
440 assert_equal 'http', req['rack.url_scheme']
441 assert_equal '/foo?q=bar', req['REQUEST_URI']
442 assert_equal '/foo', req['REQUEST_PATH']
443 assert_equal 'q=bar', req['QUERY_STRING']
445 assert_equal 'example.com', req['HTTP_HOST']
446 assert_equal 'example.com', req['SERVER_NAME']
447 assert_equal '80', req['SERVER_PORT']
448 assert_equal "", parser.buf
449 assert ! parser.keepalive?
452 # X-Forwarded-Proto is not in rfc2616, absolute URIs are, however...
453 def test_absolute_uri_https
454 parser = HttpParser.new
456 http = "GET https://example.com/foo?q=bar HTTP/1.1\r\n" \
457 "X-Forwarded-Proto: http\r\n\r\n"
459 assert_equal req, parser.parse
460 assert_equal 'https', req['rack.url_scheme']
461 assert_equal '/foo?q=bar', req['REQUEST_URI']
462 assert_equal '/foo', req['REQUEST_PATH']
463 assert_equal 'q=bar', req['QUERY_STRING']
465 assert_equal 'example.com', req['HTTP_HOST']
466 assert_equal 'example.com', req['SERVER_NAME']
467 assert_equal '443', req['SERVER_PORT']
468 assert_equal "", parser.buf
469 assert parser.keepalive?
472 # Host: header should be ignored for absolute URIs
473 def test_absolute_uri_with_port
474 parser = HttpParser.new
476 parser.buf << "GET http://example.com:8080/foo?q=bar HTTP/1.2\r\n" \
477 "Host: bad.example.com\r\n\r\n"
478 assert_equal req, parser.parse
479 assert_equal 'http', req['rack.url_scheme']
480 assert_equal '/foo?q=bar', req['REQUEST_URI']
481 assert_equal '/foo', req['REQUEST_PATH']
482 assert_equal 'q=bar', req['QUERY_STRING']
484 assert_equal 'example.com:8080', req['HTTP_HOST']
485 assert_equal 'example.com', req['SERVER_NAME']
486 assert_equal '8080', req['SERVER_PORT']
487 assert_equal "", parser.buf
488 assert ! parser.keepalive? # TODO: read HTTP/1.2 when it's final
491 def test_absolute_uri_with_empty_port
492 parser = HttpParser.new
494 parser.buf << "GET https://example.com:/foo?q=bar HTTP/1.1\r\n" \
495 "Host: bad.example.com\r\n\r\n"
496 assert_equal req, parser.parse
497 assert_equal 'https', req['rack.url_scheme']
498 assert_equal '/foo?q=bar', req['REQUEST_URI']
499 assert_equal '/foo', req['REQUEST_PATH']
500 assert_equal 'q=bar', req['QUERY_STRING']
502 assert_equal 'example.com:', req['HTTP_HOST']
503 assert_equal 'example.com', req['SERVER_NAME']
504 assert_equal '443', req['SERVER_PORT']
505 assert_equal "", parser.buf
506 assert parser.keepalive? # TODO: read HTTP/1.2 when it's final
509 def test_absolute_ipv6_uri
510 parser = HttpParser.new
512 url = "http://[::1]/foo?q=bar"
513 http = "GET #{url} HTTP/1.1\r\n" \
514 "Host: bad.example.com\r\n\r\n"
515 assert_equal req, parser.headers(req, http)
516 assert_equal 'http', req['rack.url_scheme']
517 assert_equal '/foo?q=bar', req['REQUEST_URI']
518 assert_equal '/foo', req['REQUEST_PATH']
519 assert_equal 'q=bar', req['QUERY_STRING']
522 assert_equal "[::1]", uri.host,
523 "URI.parse changed upstream for #{url}? host=#{uri.host}"
524 assert_equal "[::1]", req['HTTP_HOST']
525 assert_equal "[::1]", req['SERVER_NAME']
526 assert_equal '80', req['SERVER_PORT']
527 assert_equal "", http
528 assert parser.keepalive? # TODO: read HTTP/1.2 when it's final
531 def test_absolute_ipv6_uri_alpha
532 parser = HttpParser.new
534 url = "http://[::a]/"
535 http = "GET #{url} HTTP/1.1\r\n" \
536 "Host: bad.example.com\r\n\r\n"
537 assert_equal req, parser.headers(req, http)
538 assert_equal 'http', req['rack.url_scheme']
541 assert_equal "[::a]", uri.host,
542 "URI.parse changed upstream for #{url}? host=#{uri.host}"
543 assert_equal "[::a]", req['HTTP_HOST']
544 assert_equal "[::a]", req['SERVER_NAME']
545 assert_equal '80', req['SERVER_PORT']
548 def test_absolute_ipv6_uri_alpha_2
549 parser = HttpParser.new
551 url = "http://[::B]/"
552 http = "GET #{url} HTTP/1.1\r\n" \
553 "Host: bad.example.com\r\n\r\n"
554 assert_equal req, parser.headers(req, http)
555 assert_equal 'http', req['rack.url_scheme']
558 assert_equal "[::B]", uri.host,
559 "URI.parse changed upstream for #{url}? host=#{uri.host}"
560 assert_equal "[::B]", req['HTTP_HOST']
561 assert_equal "[::B]", req['SERVER_NAME']
562 assert_equal '80', req['SERVER_PORT']
565 def test_absolute_ipv6_uri_with_empty_port
566 parser = HttpParser.new
568 url = "https://[::1]:/foo?q=bar"
569 http = "GET #{url} HTTP/1.1\r\n" \
570 "Host: bad.example.com\r\n\r\n"
571 assert_equal req, parser.headers(req, http)
572 assert_equal 'https', req['rack.url_scheme']
573 assert_equal '/foo?q=bar', req['REQUEST_URI']
574 assert_equal '/foo', req['REQUEST_PATH']
575 assert_equal 'q=bar', req['QUERY_STRING']
578 assert_equal "[::1]", uri.host,
579 "URI.parse changed upstream for #{url}? host=#{uri.host}"
580 assert_equal "[::1]:", req['HTTP_HOST']
581 assert_equal "[::1]", req['SERVER_NAME']
582 assert_equal '443', req['SERVER_PORT']
583 assert_equal "", http
584 assert parser.keepalive? # TODO: read HTTP/1.2 when it's final
587 def test_absolute_ipv6_uri_with_port
588 parser = HttpParser.new
590 url = "https://[::1]:666/foo?q=bar"
591 http = "GET #{url} HTTP/1.1\r\n" \
592 "Host: bad.example.com\r\n\r\n"
593 assert_equal req, parser.headers(req, http)
594 assert_equal 'https', req['rack.url_scheme']
595 assert_equal '/foo?q=bar', req['REQUEST_URI']
596 assert_equal '/foo', req['REQUEST_PATH']
597 assert_equal 'q=bar', req['QUERY_STRING']
600 assert_equal "[::1]", uri.host,
601 "URI.parse changed upstream for #{url}? host=#{uri.host}"
602 assert_equal "[::1]:666", req['HTTP_HOST']
603 assert_equal "[::1]", req['SERVER_NAME']
604 assert_equal '666', req['SERVER_PORT']
605 assert_equal "", http
606 assert parser.keepalive? # TODO: read HTTP/1.2 when it's final
609 def test_ipv6_host_header
610 parser = HttpParser.new
612 parser.buf << "GET / HTTP/1.1\r\n" \
613 "Host: [::1]\r\n\r\n"
614 assert_equal req, parser.parse
615 assert_equal "[::1]", req['HTTP_HOST']
616 assert_equal "[::1]", req['SERVER_NAME']
617 assert_equal '80', req['SERVER_PORT']
618 assert_equal "", parser.buf
619 assert parser.keepalive? # TODO: read HTTP/1.2 when it's final
622 def test_ipv6_host_header_with_port
623 parser = HttpParser.new
625 parser.buf << "GET / HTTP/1.1\r\n" \
626 "Host: [::1]:666\r\n\r\n"
627 assert_equal req, parser.parse
628 assert_equal "[::1]", req['SERVER_NAME']
629 assert_equal '666', req['SERVER_PORT']
630 assert_equal "[::1]:666", req['HTTP_HOST']
631 assert_equal "", parser.buf
632 assert parser.keepalive? # TODO: read HTTP/1.2 when it's final
635 def test_ipv6_host_header_with_empty_port
636 parser = HttpParser.new
638 parser.buf << "GET / HTTP/1.1\r\nHost: [::1]:\r\n\r\n"
639 assert_equal req, parser.parse
640 assert_equal "[::1]", req['SERVER_NAME']
641 assert_equal '80', req['SERVER_PORT']
642 assert_equal "[::1]:", req['HTTP_HOST']
643 assert_equal "", parser.buf
644 assert parser.keepalive? # TODO: read HTTP/1.2 when it's final
647 # XXX Highly unlikely..., just make sure we don't segfault or assert on it
648 def test_broken_ipv6_host_header
649 parser = HttpParser.new
651 parser.buf << "GET / HTTP/1.1\r\nHost: [::1:\r\n\r\n"
652 assert_equal req, parser.parse
653 assert_equal "[", req['SERVER_NAME']
654 assert_equal ':1:', req['SERVER_PORT']
655 assert_equal "[::1:", req['HTTP_HOST']
656 assert_equal "", parser.buf
659 def test_put_body_oneshot
660 parser = HttpParser.new
662 parser.buf << "PUT / HTTP/1.0\r\nContent-Length: 5\r\n\r\nabcde"
663 assert_equal req, parser.parse
664 assert_equal '/', req['REQUEST_PATH']
665 assert_equal '/', req['REQUEST_URI']
666 assert_equal 'PUT', req['REQUEST_METHOD']
667 assert_equal 'HTTP/1.0', req['HTTP_VERSION']
668 assert_equal 'HTTP/1.0', req['SERVER_PROTOCOL']
669 assert_equal "abcde", parser.buf
670 assert ! parser.keepalive? # TODO: read HTTP/1.2 when it's final
673 def test_put_body_later
674 parser = HttpParser.new
676 parser.buf << "PUT /l HTTP/1.0\r\nContent-Length: 5\r\n\r\n"
677 assert_equal req, parser.parse
678 assert_equal '/l', req['REQUEST_PATH']
679 assert_equal '/l', req['REQUEST_URI']
680 assert_equal 'PUT', req['REQUEST_METHOD']
681 assert_equal 'HTTP/1.0', req['HTTP_VERSION']
682 assert_equal 'HTTP/1.0', req['SERVER_PROTOCOL']
683 assert_equal "", parser.buf
684 assert ! parser.keepalive? # TODO: read HTTP/1.2 when it's final
687 def test_unknown_methods
688 %w(GETT HEADR XGET XHEAD).each { |m|
689 parser = HttpParser.new
691 s = "#{m} /forums/1/topics/2375?page=1#posts-17408 HTTP/1.1\r\n\r\n"
692 ok = parser.headers(req, s)
694 assert_equal '/forums/1/topics/2375?page=1', req['REQUEST_URI']
695 assert_equal 'posts-17408', req['FRAGMENT']
696 assert_equal 'page=1', req['QUERY_STRING']
698 assert_equal m, req['REQUEST_METHOD']
699 assert parser.keepalive? # TODO: read HTTP/1.2 when it's final
703 def test_fragment_in_uri
704 parser = HttpParser.new
706 get = "GET /forums/1/topics/2375?page=1#posts-17408 HTTP/1.1\r\n\r\n"
710 assert_equal '/forums/1/topics/2375?page=1', req['REQUEST_URI']
711 assert_equal 'posts-17408', req['FRAGMENT']
712 assert_equal 'page=1', req['QUERY_STRING']
713 assert_equal '', parser.buf
714 assert parser.keepalive?
717 # lame random garbage maker
718 def rand_data(min, max, readable=true)
719 count = min + ((rand(max)+1) *10).to_i
720 res = count.to_s + "/"
723 res << Digest::SHA1.hexdigest(rand(count * 100).to_s) * (count / 40)
725 res << Digest::SHA1.digest(rand(count * 100).to_s) * (count / 20)
732 def test_horrible_queries
733 parser = HttpParser.new
735 # then that large header names are caught
737 get = "GET /#{rand_data(10,120)} HTTP/1.1\r\nX-#{rand_data(1024, 1024+(c*1024))}: Test\r\n\r\n"
738 assert_raises(Unicorn::HttpParserError,Unicorn::RequestURITooLongError) do
745 # then that large mangled field values are caught
747 get = "GET /#{rand_data(10,120)} HTTP/1.1\r\nX-Test: #{rand_data(1024, 1024+(c*1024), false)}\r\n\r\n"
748 assert_raises(Unicorn::HttpParserError,Unicorn::RequestURITooLongError) do
755 # then large headers are rejected too
756 get = "GET /#{rand_data(10,120)} HTTP/1.1\r\n"
757 get << "X-Test: test\r\n" * (80 * 1024)
759 assert_raises(Unicorn::HttpParserError,Unicorn::RequestURITooLongError) do
764 # finally just that random garbage gets blocked all the time
766 get = "GET #{rand_data(1024, 1024+(c*1024), false)} #{rand_data(1024, 1024+(c*1024), false)}\r\n\r\n"
767 assert_raises(Unicorn::HttpParserError,Unicorn::RequestURITooLongError) do
777 parser = HttpParser.new
778 get = "GET / HTTP/1.1\r\nHost:\texample.com\r\n\r\n"
779 assert parser.add_parse(get)
780 assert_equal 'example.com', parser.env['HTTP_HOST']
783 def test_trailing_whitespace
784 parser = HttpParser.new
785 get = "GET / HTTP/1.1\r\nHost: example.com \r\n\r\n"
786 assert parser.add_parse(get)
787 assert_equal 'example.com', parser.env['HTTP_HOST']
790 def test_trailing_tab
791 parser = HttpParser.new
792 get = "GET / HTTP/1.1\r\nHost: example.com\t\r\n\r\n"
793 assert parser.add_parse(get)
794 assert_equal 'example.com', parser.env['HTTP_HOST']
797 def test_trailing_multiple_linear_whitespace
798 parser = HttpParser.new
799 get = "GET / HTTP/1.1\r\nHost: example.com\t \t \t\r\n\r\n"
800 assert parser.add_parse(get)
801 assert_equal 'example.com', parser.env['HTTP_HOST']
804 def test_embedded_linear_whitespace_ok
805 parser = HttpParser.new
806 get = "GET / HTTP/1.1\r\nX-Space: hello\t world\t \r\n\r\n"
807 assert parser.add_parse(get)
808 assert_equal "hello\t world", parser.env["HTTP_X_SPACE"]
811 def test_null_byte_header
812 parser = HttpParser.new
813 get = "GET / HTTP/1.1\r\nHost: \0\r\n\r\n"
814 assert_raises(HttpParserError) { parser.add_parse(get) }
817 def test_null_byte_in_middle
818 parser = HttpParser.new
819 get = "GET / HTTP/1.1\r\nHost: hello\0world\r\n\r\n"
820 assert_raises(HttpParserError) { parser.add_parse(get) }
823 def test_null_byte_at_end
824 parser = HttpParser.new
825 get = "GET / HTTP/1.1\r\nHost: hello\0\r\n\r\n"
826 assert_raises(HttpParserError) { parser.add_parse(get) }
829 def test_empty_header
830 parser = HttpParser.new
831 get = "GET / HTTP/1.1\r\nHost: \r\n\r\n"
832 assert parser.add_parse(get)
833 assert_equal '', parser.env['HTTP_HOST']
836 # so we don't care about the portability of this test
837 # if it doesn't leak on Linux, it won't leak anywhere else
838 # unless your C compiler or platform is otherwise broken
839 LINUX_PROC_PID_STATUS = "/proc/self/status"
841 match_rss = /^VmRSS:\s+(\d+)/
842 if File.read(LINUX_PROC_PID_STATUS) =~ match_rss
844 1000000.times { Unicorn::HttpParser.new }
845 File.read(LINUX_PROC_PID_STATUS) =~ match_rss
847 diff = after - before
848 assert(diff < 10000, "memory grew more than 10M: #{diff}")
850 end if RUBY_PLATFORM =~ /linux/ &&
851 File.readable?(LINUX_PROC_PID_STATUS) &&
852 !defined?(RUBY_ENGINE)