From 347c41d307f85b1264fbf1c06fff9e5c8cb2e091 Mon Sep 17 00:00:00 2001 From: Eric Wong Date: Sun, 6 Nov 2011 04:39:08 +0000 Subject: [PATCH] tests: integration tests for mogtool --bigfile We need to test deprecated behavior to not break expectations for large, archived files in the future. --- test/integration.rb | 82 ++++++++++++++++++++++++++++++ test/test_mogtool_bigfile.rb | 116 +++++++++++++++++++++++++++++++++++++++++++ 2 files changed, 198 insertions(+) create mode 100644 test/integration.rb create mode 100644 test/test_mogtool_bigfile.rb diff --git a/test/integration.rb b/test/integration.rb new file mode 100644 index 0000000..548cb02 --- /dev/null +++ b/test/integration.rb @@ -0,0 +1,82 @@ +# -*- encoding: binary -*- +$stdout.sync = $stderr.sync = true +require 'test/unit' +require 'securerandom' +require 'tempfile' +require 'digest' +require 'mogilefs' + +class TestMogIntegration < Test::Unit::TestCase + def uuid + SecureRandom.uuid + rescue NoMethodError + require "uuid" + UUID.generate + end + + def setup + @to_close = [] + @trackers = ENV["MOG_TEST_TRACKERS"].split(/,/) + now = Time.now + domain = "rbmogtest#{now.strftime('%Y%m%d%H%M%S')}.#{uuid}" + mogadm!("domain", "add", domain) + sleep 4 # wait for monitor to refresh caches + @domain = domain + end + + def mogadm(*args) + x("mogadm", "--trackers=#{@trackers.join(',')}", *args) + end + + def x(*cmd) + out, err = tmpfile("out"), tmpfile("err") + puts cmd.join(' ') if $VERBOSE + pid = fork do + $stderr.reopen(err.path, "a") + $stdout.reopen(out.path, "a") + out.close + err.close + exec(*cmd) + end + _, status = Process.waitpid2(pid) + out.rewind + err.rewind + [ status, out, err ] + end + + def mogadm!(*args) + status, out, err = mogadm(*args) + assert status.success?, "#{status.inspect} / #{out.read} / #{err.read}" + [ status, out, err ] + end + + def x!(*cmd) + status, out, err = x(*cmd) + assert status.success?, "#{status.inspect} / #{out.read} / #{err.read}" + [ status, out, err ] + end + + def tmpfile(name) + tmp = Tempfile.new(name) + @to_close << tmp + tmp + end + + def teardown + if defined?(@domain) + client = MogileFS::MogileFS.new :hosts => @trackers, :domain => @domain + client.each_key("") { |key| + p [ :delete, key ] if $VERBOSE + client.delete(key) + } + mogadm!("domain", "delete", @domain) + end + @to_close.each do |io| + io.closed? or io.close + end + end +end if ENV["MOG_TEST_TRACKERS"] + +class TestMogIntegration + warn "MOG_TEST_TRACKERS not defined" +end unless ENV["MOG_TEST_TRACKERS"] diff --git a/test/test_mogtool_bigfile.rb b/test/test_mogtool_bigfile.rb new file mode 100644 index 0000000..c268184 --- /dev/null +++ b/test/test_mogtool_bigfile.rb @@ -0,0 +1,116 @@ +# -*- encoding: binary -*- +require "./test/integration" +require "net/http" +ok = true +unless File.executable?(`which mogtool 2>/dev/null`.strip) + warn "mogtool not found, skipping #{__FILE__}" + ok = false +end + +class TestMogtoolBigfile < TestMogIntegration + buf = File.open("/dev/urandom") { |fp| fp.read(1024) } + buf *= 1024 + RAND = Tempfile.new("rand") + RAND.sync = true + sha1 = Digest::SHA1.new + 100.times { sha1 << buf; RAND.write(buf) } + buf = nil + RAND_SHA1 = sha1.hexdigest + + def setup + super + RAND.rewind + @big_uuid = "big-#{uuid}" + @client = MogileFS::MogileFS.new(:hosts => @trackers, :domain => @domain) + end + + def mogtool!(*args) + x!("mogtool", "--trackers=#{@trackers.join(',')}", + "--domain=#@domain", *args) + end + + # the mogtool definition of gzip is wrong and just raw zlib deflate + def test_bigfile_gzip_mogtool + mogtool!("inject", "--gzip", "--bigfile", RAND.path, @big_uuid) + sha1_check + end + + def test_bigfile_mogtool + mogtool!("inject", "--bigfile", RAND.path, @big_uuid) + sha1_check + + # ensure fallback works for rebalanced/replaced files + part1 = "#@big_uuid,1" + tmp = tmpfile("part1") + before_uris = @client.get_uris(part1) + @client.get_file_data(part1) { |sock| MogileFS::X.copy_stream(sock, tmp) } + @client.delete(part1) + @client.store_file(part1, nil, tmp.path) + wait_for_DELETE(before_uris) + sha1_check + + # corrupt the existing data in part1 + @client.store_content(part1, nil, "HELLO") + after_uris = @client.get_uris(part1) + + # corruption is detected on verify + junk = tmpfile("junk") + assert_raises(MogileFS::ChecksumMismatchError) do + @client.bigfile_write("_big_info:#@big_uuid", junk, :verify => true) + end + + # corruption is NOT detected on verify + junk = tmpfile("junk") + assert_nothing_raised do + @client.bigfile_write("_big_info:#@big_uuid", junk, :verify => false) + end + + # restoring no-corrupted data succeeds! + @client.store_file(part1, nil, tmp.path) + sha1_check + + # missing parts fail + before_uris = @client.get_uris(part1) + @client.delete(part1) + junk = tmpfile("junk") + assert_raises(MogileFS::Backend::UnknownKeyError) do + @client.bigfile_write("_big_info:#@big_uuid", junk, :verify => true) + end + end + + def wait_for_DELETE(uris) + uris.each do |uri| + tries = 0 + begin + Net::HTTP.start(uri.host, uri.port) do |http| + sleep(0.1) while Net::HTTPOK === http.head(uri.path) + end + rescue + if (tries += 1) < 666 + sleep(0.1) + retry + end + raise + end + end + end + + def sha1_check + r, w = IO.pipe + @to_close << r + @to_close << w + th = Thread.new do + sha1 = Digest::SHA1.new + buf = "" + while r.read(16384, buf) + sha1 << buf + end + sha1.hexdigest + end + res = @client.bigfile_write("_big_info:#@big_uuid", w, :verify => true) + w.close + read_sha1 = th.value + assert_equal RAND_SHA1, read_sha1 + assert_equal RAND.size, res[0] + end +end if ok -- 2.11.4.GIT