From 18e93c034db0c08bcc5b5f7a72a8fb6e909c009c Mon Sep 17 00:00:00 2001 From: evanweaver Date: Mon, 24 Mar 2008 03:48:10 +0000 Subject: Backport Eric's changes to the http parser from trunk (Eric Wong). Apply fix for Ragel 6 (Eric Wong, Ry Dahl). Two tests fail with the new parser (1 failed with the old parser). Needs investigation. Close #12 (mongrel_rails send_signal leaves a filehandle open until gc). Close #14 (mongrel_rails command line option --num-procs does not change the max number of procs). Close #15 (mongrel squashes helpful exception in register method). Close #16, XXX needs audit! (CGIWrapper "options" attr_reader has no corresponding @options variable). Close #20 (Mongrel doesn't erase temporary files during it's operation on windows). Close #19, XXX needs audit! (HttpResponse#reset does not properly reset HeaderOut). Close #22 (gem_plugin should load gems from Gem.path not Gem.dir). Close #23 (mongrel_cluster's mongrel_rails configuration option isn't fully respected). If I had git, being offline wouldn't have resulted in one massive commit. git-svn-id: svn+ssh://rubyforge.org/var/svn/mongrel/branches/stable_1-2@995 19e92222-5c0b-0410-8929-a290d50e31e9 --- test/unit/test_handlers.rb | 10 +++ test/unit/test_http11.rb | 156 ------------------------------------------ test/unit/test_http_parser.rb | 156 ++++++++++++++++++++++++++++++++++++++++++ 3 files changed, 166 insertions(+), 156 deletions(-) delete mode 100644 test/unit/test_http11.rb create mode 100644 test/unit/test_http_parser.rb (limited to 'test') diff --git a/test/unit/test_handlers.rb b/test/unit/test_handlers.rb index 1d316e5..27c0c08 100644 --- a/test/unit/test_handlers.rb +++ b/test/unit/test_handlers.rb @@ -61,6 +61,16 @@ class HandlersTest < Test::Unit::TestCase @config.stop(false, true) File.delete "/tmp/testfile" end + + def test_registration_exception_is_not_lost + assert_raises(Mongrel::URIClassifier::RegistrationError) do + @config = Mongrel::Configurator.new do + listener do + uri "bogus", :handler => SimpleHandler.new + end + end + end + end def test_more_web_server res = hit([ "http://localhost:9998/test", diff --git a/test/unit/test_http11.rb b/test/unit/test_http11.rb deleted file mode 100644 index e083abf..0000000 --- a/test/unit/test_http11.rb +++ /dev/null @@ -1,156 +0,0 @@ -# Copyright (c) 2005 Zed A. Shaw -# You can redistribute it and/or modify it under the same terms as Ruby. -# -# Additional work donated by contributors. See http://mongrel.rubyforge.org/attributions.html -# for more information. - -require 'test/test_helper' - -include Mongrel - -class HttpParserTest < Test::Unit::TestCase - - def test_parse_simple - parser = HttpParser.new - req = {} - http = "GET / HTTP/1.1\r\n\r\n" - nread = parser.execute(req, http, 0) - - assert nread == http.length, "Failed to parse the full HTTP request" - assert parser.finished?, "Parser didn't finish" - assert !parser.error?, "Parser had error" - assert nread == parser.nread, "Number read returned from execute does not match" - - assert_equal 'HTTP/1.1', req['SERVER_PROTOCOL'] - assert_equal '/', req['REQUEST_PATH'] - assert_equal 'HTTP/1.1', req['HTTP_VERSION'] - assert_equal '/', req['REQUEST_URI'] - assert_equal 'CGI/1.2', req['GATEWAY_INTERFACE'] - assert_equal 'GET', req['REQUEST_METHOD'] - assert_nil req['FRAGMENT'] - assert_nil req['QUERY_STRING'] - - parser.reset - assert parser.nread == 0, "Number read after reset should be 0" - end - - def test_parse_dumbfuck_headers - parser = HttpParser.new - req = {} - should_be_good = "GET / HTTP/1.1\r\naaaaaaaaaaaaa:++++++++++\r\n\r\n" - nread = parser.execute(req, should_be_good, 0) - assert_equal should_be_good.length, nread - assert parser.finished? - assert !parser.error? - - nasty_pound_header = "GET / HTTP/1.1\r\nX-SSL-Bullshit: -----BEGIN CERTIFICATE-----\r\n\tMIIFbTCCBFWgAwIBAgICH4cwDQYJKoZIhvcNAQEFBQAwcDELMAkGA1UEBhMCVUsx\r\n\tETAPBgNVBAoTCGVTY2llbmNlMRIwEAYDVQQLEwlBdXRob3JpdHkxCzAJBgNVBAMT\r\n\tAkNBMS0wKwYJKoZIhvcNAQkBFh5jYS1vcGVyYXRvckBncmlkLXN1cHBvcnQuYWMu\r\n\tdWswHhcNMDYwNzI3MTQxMzI4WhcNMDcwNzI3MTQxMzI4WjBbMQswCQYDVQQGEwJV\r\n\tSzERMA8GA1UEChMIZVNjaWVuY2UxEzARBgNVBAsTCk1hbmNoZXN0ZXIxCzAJBgNV\r\n\tBAcTmrsogriqMWLAk1DMRcwFQYDVQQDEw5taWNoYWVsIHBhcmQYJKoZIhvcNAQEB\r\n\tBQADggEPADCCAQoCggEBANPEQBgl1IaKdSS1TbhF3hEXSl72G9J+WC/1R64fAcEF\r\n\tW51rEyFYiIeZGx/BVzwXbeBoNUK41OK65sxGuflMo5gLflbwJtHBRIEKAfVVp3YR\r\n\tgW7cMA/s/XKgL1GEC7rQw8lIZT8RApukCGqOVHSi/F1SiFlPDxuDfmdiNzL31+sL\r\n\t0iwHDdNkGjy5pyBSB8Y79dsSJtCW/iaLB0/n8Sj7HgvvZJ7x0fr+RQjYOUUfrePP\r\n\tu2MSpFyf+9BbC/aXgaZuiCvSR+8Snv3xApQY+fULK/xY8h8Ua51iXoQ5jrgu2SqR\r\n\twgA7BUi3G8LFzMBl8FRCDYGUDy7M6QaHXx1ZWIPWNKsCAwEAAaOCAiQwggIgMAwG\r\n\tA1UdEwEB/wQCMAAwEQYJYIZIAYb4QgEBBAQDAgWgMA4GA1UdDwEB/wQEAwID6DAs\r\n\tBglghkgBhvhCAQ0EHxYdVUsgZS1TY2llbmNlIFVzZXIgQ2VydGlmaWNhdGUwHQYD\r\n\tVR0OBBYEFDTt/sf9PeMaZDHkUIldrDYMNTBZMIGaBgNVHSMEgZIwgY+AFAI4qxGj\r\n\tloCLDdMVKwiljjDastqooXSkcjBwMQswCQYDVQQGEwJVSzERMA8GA1UEChMIZVNj\r\n\taWVuY2UxEjAQBgNVBAsTCUF1dGhvcml0eTELMAkGA1UEAxMCQ0ExLTArBgkqhkiG\r\n\t9w0BCQEWHmNhLW9wZXJhdG9yQGdyaWQtc3VwcG9ydC5hYy51a4IBADApBgNVHRIE\r\n\tIjAggR5jYS1vcGVyYXRvckBncmlkLXN1cHBvcnQuYWMudWswGQYDVR0gBBIwEDAO\r\n\tBgwrBgEEAdkvAQEBAQYwPQYJYIZIAYb4QgEEBDAWLmh0dHA6Ly9jYS5ncmlkLXN1\r\n\tcHBvcnQuYWMudmT4sopwqlBWsvcHViL2NybC9jYWNybC5jcmwwPQYJYIZIAYb4QgEDBDAWLmh0\r\n\tdHA6Ly9jYS5ncmlkLXN1cHBvcnQuYWMudWsvcHViL2NybC9jYWNybC5jcmwwPwYD\r\n\tVR0fBDgwNjA0oDKgMIYuaHR0cDovL2NhLmdyaWQt5hYy51ay9wdWIv\r\n\tY3JsL2NhY3JsLmNybDANBgkqhkiG9w0BAQUFAAOCAQEAS/U4iiooBENGW/Hwmmd3\r\n\tXCy6Zrt08YjKCzGNjorT98g8uGsqYjSxv/hmi0qlnlHs+k/3Iobc3LjS5AMYr5L8\r\n\tUO7OSkgFFlLHQyC9JzPfmLCAugvzEbyv4Olnsr8hbxF1MbKZoQxUZtMVu29wjfXk\r\n\thTeApBv7eaKCWpSp7MCbvgzm74izKhu3vlDk9w6qVrxePfGgpKPqfHiOoGhFnbTK\r\n\twTC6o2xq5y0qZ03JonF7OJspEd3I5zKY3E+ov7/ZhW6DqT8UFvsAdjvQbXyhV8Eu\r\n\tYhixw1aKEPzNjNowuIseVogKOLXxWI5vAi5HgXdS0/ES5gDGsABo4fqovUKlgop3\r\n\tRA==\r\n\t-----END CERTIFICATE-----\r\n\r\n" - parser = HttpParser.new - req = {} - #nread = parser.execute(req, nasty_pound_header, 0) - #assert_equal nasty_pound_header.length, nread - #assert parser.finished? - #assert !parser.error? - end - - def test_parse_error - parser = HttpParser.new - req = {} - bad_http = "GET / SsUTF/1.1" - - error = false - begin - nread = parser.execute(req, bad_http, 0) - rescue => details - error = true - end - - assert error, "failed to throw exception" - assert !parser.finished?, "Parser shouldn't be finished" - assert parser.error?, "Parser SHOULD have error" - end - - def test_fragment_in_uri - parser = HttpParser.new - req = {} - get = "GET /forums/1/topics/2375?page=1#posts-17408 HTTP/1.1\r\n\r\n" - assert_nothing_raised do - parser.execute(req, get, 0) - end - assert parser.finished? - assert_equal '/forums/1/topics/2375?page=1', req['REQUEST_URI'] - assert_equal 'posts-17408', req['FRAGMENT'] - end - - # lame random garbage maker - def rand_data(min, max, readable=true) - count = min + ((rand(max)+1) *10).to_i - res = count.to_s + "/" - - if readable - res << Digest::SHA1.hexdigest(rand(count * 100).to_s) * (count / 40) - else - res << Digest::SHA1.digest(rand(count * 100).to_s) * (count / 20) - end - - return res - end - - - def test_horrible_queries - parser = HttpParser.new - - # then that large header names are caught - 10.times do |c| - get = "GET /#{rand_data(10,120)} HTTP/1.1\r\nX-#{rand_data(1024, 1024+(c*1024))}: Test\r\n\r\n" - assert_raises Mongrel::HttpParserError do - parser.execute({}, get, 0) - parser.reset - end - end - - # then that large mangled field values are caught - 10.times do |c| - get = "GET /#{rand_data(10,120)} HTTP/1.1\r\nX-Test: #{rand_data(1024, 1024+(c*1024), false)}\r\n\r\n" - assert_raises Mongrel::HttpParserError do - parser.execute({}, get, 0) - parser.reset - end - end - - # then large headers are rejected too - get = "GET /#{rand_data(10,120)} HTTP/1.1\r\n" - get << "X-Test: test\r\n" * (80 * 1024) - assert_raises Mongrel::HttpParserError do - parser.execute({}, get, 0) - parser.reset - end - - # finally just that random garbage gets blocked all the time - 10.times do |c| - get = "GET #{rand_data(1024, 1024+(c*1024), false)} #{rand_data(1024, 1024+(c*1024), false)}\r\n\r\n" - assert_raises Mongrel::HttpParserError do - parser.execute({}, get, 0) - parser.reset - end - end - - end - - - - def test_query_parse - res = HttpRequest.query_parse("zed=1&frank=#{HttpRequest.escape('&&& ')}") - assert res["zed"], "didn't get the request right" - assert res["frank"], "no frank" - assert_equal "1", res["zed"], "wrong result" - assert_equal "&&& ", HttpRequest.unescape(res["frank"]), "wrong result" - - res = HttpRequest.query_parse("zed=1&zed=2&zed=3&frank=11;zed=45") - assert res["zed"], "didn't get the request right" - assert res["frank"], "no frank" - assert_equal 4,res["zed"].length, "wrong number for zed" - assert_equal "11",res["frank"], "wrong number for frank" - end - -end - diff --git a/test/unit/test_http_parser.rb b/test/unit/test_http_parser.rb new file mode 100644 index 0000000..d8f3fe8 --- /dev/null +++ b/test/unit/test_http_parser.rb @@ -0,0 +1,156 @@ +# Copyright (c) 2005 Zed A. Shaw +# You can redistribute it and/or modify it under the same terms as Ruby. +# +# Additional work donated by contributors. See http://mongrel.rubyforge.org/attributions.html +# for more information. + +require 'test/test_helper' + +include Mongrel + +class HttpParserTest < Test::Unit::TestCase + + def test_parse_simple + parser = HttpParser.new + req = {} + http = "GET / HTTP/1.1\r\n\r\n" + nread = parser.execute(req, http, 0) + + assert nread == http.length, "Failed to parse the full HTTP request" + assert parser.finished?, "Parser didn't finish" + assert !parser.error?, "Parser had error" + assert nread == parser.nread, "Number read returned from execute does not match" + + assert_equal 'HTTP/1.1', req['SERVER_PROTOCOL'] + assert_equal '/', req['REQUEST_PATH'] + assert_equal 'HTTP/1.1', req['HTTP_VERSION'] + assert_equal '/', req['REQUEST_URI'] + assert_equal 'CGI/1.2', req['GATEWAY_INTERFACE'] + assert_equal 'GET', req['REQUEST_METHOD'] + assert_nil req['FRAGMENT'] + assert_nil req['QUERY_STRING'] + + parser.reset + assert parser.nread == 0, "Number read after reset should be 0" + end + + def test_parse_strange_headers + parser = HttpParser.new + req = {} + should_be_good = "GET / HTTP/1.1\r\naaaaaaaaaaaaa:++++++++++\r\n\r\n" + nread = parser.execute(req, should_be_good, 0) + assert_equal should_be_good.length, nread + assert parser.finished? + assert !parser.error? + + nasty_pound_header = "GET / HTTP/1.1\r\nX-SSL-Bullshit: -----BEGIN CERTIFICATE-----\r\n\tMIIFbTCCBFWgAwIBAgICH4cwDQYJKoZIhvcNAQEFBQAwcDELMAkGA1UEBhMCVUsx\r\n\tETAPBgNVBAoTCGVTY2llbmNlMRIwEAYDVQQLEwlBdXRob3JpdHkxCzAJBgNVBAMT\r\n\tAkNBMS0wKwYJKoZIhvcNAQkBFh5jYS1vcGVyYXRvckBncmlkLXN1cHBvcnQuYWMu\r\n\tdWswHhcNMDYwNzI3MTQxMzI4WhcNMDcwNzI3MTQxMzI4WjBbMQswCQYDVQQGEwJV\r\n\tSzERMA8GA1UEChMIZVNjaWVuY2UxEzARBgNVBAsTCk1hbmNoZXN0ZXIxCzAJBgNV\r\n\tBAcTmrsogriqMWLAk1DMRcwFQYDVQQDEw5taWNoYWVsIHBhcmQYJKoZIhvcNAQEB\r\n\tBQADggEPADCCAQoCggEBANPEQBgl1IaKdSS1TbhF3hEXSl72G9J+WC/1R64fAcEF\r\n\tW51rEyFYiIeZGx/BVzwXbeBoNUK41OK65sxGuflMo5gLflbwJtHBRIEKAfVVp3YR\r\n\tgW7cMA/s/XKgL1GEC7rQw8lIZT8RApukCGqOVHSi/F1SiFlPDxuDfmdiNzL31+sL\r\n\t0iwHDdNkGjy5pyBSB8Y79dsSJtCW/iaLB0/n8Sj7HgvvZJ7x0fr+RQjYOUUfrePP\r\n\tu2MSpFyf+9BbC/aXgaZuiCvSR+8Snv3xApQY+fULK/xY8h8Ua51iXoQ5jrgu2SqR\r\n\twgA7BUi3G8LFzMBl8FRCDYGUDy7M6QaHXx1ZWIPWNKsCAwEAAaOCAiQwggIgMAwG\r\n\tA1UdEwEB/wQCMAAwEQYJYIZIAYb4QgEBBAQDAgWgMA4GA1UdDwEB/wQEAwID6DAs\r\n\tBglghkgBhvhCAQ0EHxYdVUsgZS1TY2llbmNlIFVzZXIgQ2VydGlmaWNhdGUwHQYD\r\n\tVR0OBBYEFDTt/sf9PeMaZDHkUIldrDYMNTBZMIGaBgNVHSMEgZIwgY+AFAI4qxGj\r\n\tloCLDdMVKwiljjDastqooXSkcjBwMQswCQYDVQQGEwJVSzERMA8GA1UEChMIZVNj\r\n\taWVuY2UxEjAQBgNVBAsTCUF1dGhvcml0eTELMAkGA1UEAxMCQ0ExLTArBgkqhkiG\r\n\t9w0BCQEWHmNhLW9wZXJhdG9yQGdyaWQtc3VwcG9ydC5hYy51a4IBADApBgNVHRIE\r\n\tIjAggR5jYS1vcGVyYXRvckBncmlkLXN1cHBvcnQuYWMudWswGQYDVR0gBBIwEDAO\r\n\tBgwrBgEEAdkvAQEBAQYwPQYJYIZIAYb4QgEEBDAWLmh0dHA6Ly9jYS5ncmlkLXN1\r\n\tcHBvcnQuYWMudmT4sopwqlBWsvcHViL2NybC9jYWNybC5jcmwwPQYJYIZIAYb4QgEDBDAWLmh0\r\n\tdHA6Ly9jYS5ncmlkLXN1cHBvcnQuYWMudWsvcHViL2NybC9jYWNybC5jcmwwPwYD\r\n\tVR0fBDgwNjA0oDKgMIYuaHR0cDovL2NhLmdyaWQt5hYy51ay9wdWIv\r\n\tY3JsL2NhY3JsLmNybDANBgkqhkiG9w0BAQUFAAOCAQEAS/U4iiooBENGW/Hwmmd3\r\n\tXCy6Zrt08YjKCzGNjorT98g8uGsqYjSxv/hmi0qlnlHs+k/3Iobc3LjS5AMYr5L8\r\n\tUO7OSkgFFlLHQyC9JzPfmLCAugvzEbyv4Olnsr8hbxF1MbKZoQxUZtMVu29wjfXk\r\n\thTeApBv7eaKCWpSp7MCbvgzm74izKhu3vlDk9w6qVrxePfGgpKPqfHiOoGhFnbTK\r\n\twTC6o2xq5y0qZ03JonF7OJspEd3I5zKY3E+ov7/ZhW6DqT8UFvsAdjvQbXyhV8Eu\r\n\tYhixw1aKEPzNjNowuIseVogKOLXxWI5vAi5HgXdS0/ES5gDGsABo4fqovUKlgop3\r\n\tRA==\r\n\t-----END CERTIFICATE-----\r\n\r\n" + parser = HttpParser.new + req = {} + nread = parser.execute(req, nasty_pound_header, 0) + assert_equal nasty_pound_header.length, nread + assert parser.finished? + assert !parser.error? + end + + def test_parse_error + parser = HttpParser.new + req = {} + bad_http = "GET / SsUTF/1.1" + + error = false + begin + nread = parser.execute(req, bad_http, 0) + rescue => details + error = true + end + + assert error, "failed to throw exception" + assert !parser.finished?, "Parser shouldn't be finished" + assert parser.error?, "Parser SHOULD have error" + end + + def test_fragment_in_uri + parser = HttpParser.new + req = {} + get = "GET /forums/1/topics/2375?page=1#posts-17408 HTTP/1.1\r\n\r\n" + assert_nothing_raised do + parser.execute(req, get, 0) + end + assert parser.finished? + assert_equal '/forums/1/topics/2375?page=1', req['REQUEST_URI'] + assert_equal 'posts-17408', req['FRAGMENT'] + end + + # lame random garbage maker + def rand_data(min, max, readable=true) + count = min + ((rand(max)+1) *10).to_i + res = count.to_s + "/" + + if readable + res << Digest::SHA1.hexdigest(rand(count * 100).to_s) * (count / 40) + else + res << Digest::SHA1.digest(rand(count * 100).to_s) * (count / 20) + end + + return res + end + + + def test_horrible_queries + parser = HttpParser.new + + # then that large header names are caught + 10.times do |c| + get = "GET /#{rand_data(10,120)} HTTP/1.1\r\nX-#{rand_data(1024, 1024+(c*1024))}: Test\r\n\r\n" + assert_raises Mongrel::HttpParserError do + parser.execute({}, get, 0) + parser.reset + end + end + + # then that large mangled field values are caught + 10.times do |c| + get = "GET /#{rand_data(10,120)} HTTP/1.1\r\nX-Test: #{rand_data(1024, 1024+(c*1024), false)}\r\n\r\n" + assert_raises Mongrel::HttpParserError do + parser.execute({}, get, 0) + parser.reset + end + end + + # then large headers are rejected too + get = "GET /#{rand_data(10,120)} HTTP/1.1\r\n" + get << "X-Test: test\r\n" * (80 * 1024) + assert_raises Mongrel::HttpParserError do + parser.execute({}, get, 0) + parser.reset + end + + # finally just that random garbage gets blocked all the time + 10.times do |c| + get = "GET #{rand_data(1024, 1024+(c*1024), false)} #{rand_data(1024, 1024+(c*1024), false)}\r\n\r\n" + assert_raises Mongrel::HttpParserError do + parser.execute({}, get, 0) + parser.reset + end + end + + end + + + + def test_query_parse + res = HttpRequest.query_parse("zed=1&frank=#{HttpRequest.escape('&&& ')}") + assert res["zed"], "didn't get the request right" + assert res["frank"], "no frank" + assert_equal "1", res["zed"], "wrong result" + assert_equal "&&& ", HttpRequest.unescape(res["frank"]), "wrong result" + + res = HttpRequest.query_parse("zed=1&zed=2&zed=3&frank=11;zed=45") + assert res["zed"], "didn't get the request right" + assert res["frank"], "no frank" + assert_equal 4,res["zed"].length, "wrong number for zed" + assert_equal "11",res["frank"], "wrong number for frank" + end + +end + -- cgit v1.2.3-24-ge0c7