1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
| | # -*- encoding: binary -*-
require "./test/fresh"
require "net/http"
ok = true
unless File.executable?(`which mogtool 2>/dev/null`.strip)
warn "mogtool not found, skipping #{__FILE__}"
ok = false
end
class TestMogtoolBigfile < Test::Unit::TestCase
include TestFreshSetup
buf = File.open("/dev/urandom") { |fp| fp.read(1024) }
buf *= 1024
RAND = Tempfile.new("rand")
RAND.sync = true
sha1 = Digest::SHA1.new
100.times { sha1 << buf; RAND.write(buf) }
buf = nil
RAND_SHA1 = sha1.hexdigest
def setup
setup_mogilefs
add_host_device_domain
RAND.rewind
@big_uuid = "big-#{uuid}"
@client = MogileFS::MogileFS.new(:hosts => @trackers, :domain => @domain)
end
alias teardown teardown_mogilefs
def mogtool!(*args)
x!("mogtool", "--trackers=#{@trackers.join(',')}",
"--domain=#@domain", *args)
end
# the mogtool definition of gzip is wrong and just raw zlib deflate
def test_bigfile_gzip_mogtool
mogtool!("inject", "--gzip", "--bigfile", RAND.path, @big_uuid)
sha1_check
end
def test_bigfile_mogtool
mogtool!("inject", "--bigfile", RAND.path, @big_uuid)
sha1_check
# ensure fallback works for rebalanced/replaced files
part1 = "#@big_uuid,1"
tmp = tmpfile("part1")
before_uris = @client.get_uris(part1)
@client.get_file_data(part1, tmp)
@client.delete(part1)
@client.store_file(part1, nil, tmp.path)
wait_for_DELETE(before_uris)
sha1_check
# corrupt the existing data in part1
@client.store_content(part1, nil, "HELLO")
@client.get_uris(part1)
# corruption is detected on verify
junk = tmpfile("junk")
assert_raises(MogileFS::ChecksumMismatchError) do
@client.bigfile_write("_big_info:#@big_uuid", junk, :verify => true)
end
# corruption is NOT detected on verify
junk = tmpfile("junk")
@client.bigfile_write("_big_info:#@big_uuid", junk, :verify => false)
# restoring no-corrupted data succeeds!
@client.store_file(part1, nil, tmp.path)
sha1_check
# missing parts fail
before_uris = @client.get_uris(part1)
@client.delete(part1)
junk = tmpfile("junk")
assert_raises(MogileFS::Backend::UnknownKeyError) do
@client.bigfile_write("_big_info:#@big_uuid", junk, :verify => true)
end
end
def wait_for_DELETE(uris)
uris.each do |uri|
tries = 0
begin
Net::HTTP.start(uri.host, uri.port) do |http|
sleep(0.1) while Net::HTTPOK === http.head(uri.path)
end
rescue
if (tries += 1) < 666
sleep(0.1)
retry
end
raise
end
end
end
def sha1_check
r, w = IO.pipe
@to_close << r
@to_close << w
th = Thread.new do
sha1 = Digest::SHA1.new
buf = ""
while r.read(16384, buf)
sha1 << buf
end
sha1.hexdigest
end
res = @client.bigfile_write("_big_info:#@big_uuid", w, :verify => true)
w.close
read_sha1 = th.value
assert_equal RAND_SHA1, read_sha1
assert_equal RAND.size, res[0]
end
end if ok
|