1 # -*- encoding: binary -*-
4 # Used for reading deprecated "bigfile" objects generated by the deprecated
5 # mogtool(1) utility. This is for reading legacy data and not recommended for
6 # new projects. MogileFS itself is capable of storing standalone objects
7 # of arbitrary length (as long as the underlying database and underlying
8 # filesystem on the DAV devices accept them).
10 module MogileFS::Bigfile
11 # VALID_TYPES = %w(file tarball partition).map { |x| x.freeze }.freeze
13 # returns a big_info hash if successful
15 bigfile_parse_info(get_file_data(key))
18 # returns total bytes written and the big_info hash if successful, raises an
19 # exception if not. wr_io is expected to be an IO-like object capable of
20 # receiving the write method.
21 def bigfile_write(key, wr_io, opts = { :verify => false })
22 info = bigfile_stat(key)
24 t = @get_file_data_timeout
26 # we only decode raw zlib deflated streams that mogtool (unfortunately)
27 # generates. tarballs and gzip(1) are up to to the application to decrypt.
28 if info[:compressed] || opts[:verify]
29 require 'mogilefs/bigfile/filter'
30 wr_io = MogileFS::Bigfile::Filter.new(wr_io, info, opts)
33 info[:parts].each_with_index do |part,part_nr|
34 next if part_nr == 0 # info[:parts][0] is always empty
37 sock = MogileFS::HTTPReader.first(part[:paths], t)
39 # part[:paths] may not be valid anymore due to rebalancing, however we
40 # can get_keys on key,<part_nr> and retry paths if all paths fail
41 part_key = "#{key.sub(/^_big_info:/, '')},#{part_nr}"
42 paths = get_paths(part_key)
44 raise MogileFS::Backend::NoDevices,
45 "no device for key=#{part_key.inspect}", []
46 sock = MogileFS::HTTPReader.first(paths, t)
50 w = MogileFS.io.copy_stream(sock, wr_io)
55 wr_io.respond_to?(:md5_check!) and wr_io.md5_check!(part[:md5])
59 total += wr_io.flushed_bytes if wr_io.respond_to?(:flushed_bytes)
65 # parses the contents of a _big_info: string or IO object
66 def bigfile_parse_info(info) # :nodoc:
68 info.each_line do |line|
71 when /^(des|type|filename)\s+(.+)$/
73 when /^compressed\s+([01])$/
74 rv[:compressed] = ($1 == '1')
75 when /^(chunks|size)\s+(\d+)$/
76 rv[$1.to_sym] = $2.to_i
77 when /^part\s+(\d+)\s+bytes=(\d+)\s+md5=(.+)\s+paths:\s+(.+)$/
78 rv[:parts][$1.to_i] = {
81 :paths => $4.split(/\s*,\s*/),
91 # Copied from mogtool:
92 # http://code.sixapart.com/svn/mogilefs/utils/mogtool, r1221
94 # this is a temporary file that we delete when we're doing recording all chunks
98 starttime=UNIXTIMESTAMP
100 # when done, we write the _info file and delete the _pre.
104 des Cow's ljdb backup as of 2004-11-17
105 type { partition, file, tarball }
107 filename ljbinlog.305.gz
108 partblocks 234324324324
111 part 1 <bytes> <md5hex>
112 part 2 <bytes> <md5hex>
113 part 3 <bytes> <md5hex>
114 part 4 <bytes> <md5hex>
115 part 5 <bytes> <md5hex>
124 BEGIN MOGTOOL RECIEPT
129 part 1 bytes=23423432 md5=2349823948239423984 paths: http://dev5/2/23/23/.fid, http://dev6/23/423/4/324.fid
130 part 1 bytes=23423432 md5=2349823948239423984 paths: http://dev5/2/23/23/.fid, http://dev6/23/423/4/324.fid
131 part 1 bytes=23423432 md5=2349823948239423984 paths: http://dev5/2/23/23/.fid, http://dev6/23/423/4/324.fid
132 part 1 bytes=23423432 md5=2349823948239423984 paths: http://dev5/2/23/23/.fid, http://dev6/23/423/4/324.fid