1 # -*- encoding: binary -*-
3 # \MogileFS file manipulation client.
5 # Create a new instance that will communicate with these trackers:
6 # hosts = %w[192.168.1.69:6001 192.168.1.70:6001]
7 # mg = MogileFS::MogileFS.new(:domain => 'test', :hosts => hosts)
9 # # Stores "A bunch of text to store" into 'some_key' with a class of 'text'.
10 # mg.store_content('some_key', 'text', "A bunch of text to store")
12 # # Retrieve data from 'some_key' as a string
13 # data = mg.get_file_data('some_key')
15 # # Store the contents of 'image.jpeg' into the key 'my_image' with a
17 # mg.store_file('my_image', 'image', 'image.jpeg')
19 # # Store the contents of 'image.jpeg' into the key 'my_image' with a
20 # # class of 'image' using an open IO object.
21 # File.open('image.jpeg') { |fp| mg.store_file('my_image', 'image', fp) }
23 # # Retrieve the contents of 'my_image' into '/path/to/huge_file'
24 # # without slurping the entire contents into memory:
25 # mg.get_file_data('my_image', '/path/to/huge_file')
27 # # Remove the key 'my_image' and 'some_key'.
28 # mg.delete('my_image')
29 # mg.delete('some_key')
31 class MogileFS::MogileFS < MogileFS::Client
32 include MogileFS::Bigfile
34 # The domain of keys for this MogileFS client.
37 # The timeout for get_file_data. Defaults to five seconds.
38 attr_accessor :get_file_data_timeout
40 # Creates a new MogileFS::MogileFS instance. +args+ must include a key
41 # :domain specifying the domain of this client.
42 def initialize(args = {})
43 @domain = args[:domain]
45 @get_file_data_timeout = 5
47 raise ArgumentError, "you must specify a domain" unless @domain
49 if @backend = args[:db_backend]
56 # Enumerates keys, limited by optional +prefix+
57 def each_key(prefix = "", &block)
60 keys, after = list_keys(prefix, after, 1000, &block)
61 end while keys && keys[0]
65 # Retrieves the contents of +key+. If +dst+ is specified, +dst+
66 # should be an IO-like object capable of receiving the +write+ method
67 # or a path name. +copy_length+ may be specified to limit the number of
68 # bytes to retrieve, and +src_offset+ can be specified to specified the
69 # start position of the copy.
70 def get_file_data(key, dst = nil, copy_length = nil, src_offset = nil)
71 paths = get_paths(key)
72 sock = MogileFS::HTTPReader.first(paths, @get_file_data_timeout,
73 copy_length, src_offset)
82 sock.close if sock && ! sock.closed?
85 # Get the paths (URLs as strings) for +key+. If +args+ is specified,
87 # - :noverify -> boolean, whether or not the tracker checks (default: true)
88 # - :pathcount -> a positive integer of URLs to retrieve (default: 2)
89 # - :zone -> "alt" or nil (default: nil)
91 # :noverify defaults to false because this client library is capable of
92 # verifying paths for readability itself. It is also faster and more
93 # reliable to verify paths on the client.
94 def get_paths(key, *args)
103 opts[:noverify] = args[:noverify]
104 opts[:zone] = args[:zone]
105 pathcount = args[:pathcount] and opts[:pathcount] = pathcount.to_i
108 opts[:noverify] = false == opts[:noverify] ? 0 : 1
109 @backend.respond_to?(:_get_paths) and return @backend._get_paths(opts)
110 res = @backend.get_paths(opts)
111 (1..res['paths'].to_i).map { |i| res["path#{i}"] }
114 # Returns +true+ if +key+ exists, +false+ if not
117 args = { :key => key, :domain => @domain }
118 @backend.pipeline_dispatch(:get_paths, args) { |x| rv = (Hash === x) }
119 @backend.pipeline_wait(1)
123 # Get the URIs for +key+ (paths) as URI::HTTP objects
124 def get_uris(key, *args)
125 get_paths(key, *args).map! { |path| URI.parse(path) }
128 # Creates a new file +key+ in +klass+. +bytes+ is currently unused.
129 # Consider using store_file instead of this method for large files.
130 # This requires a block passed to it and operates like File.open.
131 # This atomically replaces existing data stored as +key+ when
132 def new_file(key, klass = nil, bytes = 0) # :yields: file
133 raise MogileFS::ReadOnlyError if readonly?
134 opts = { :domain => @domain, :key => key, :multi_dest => 1 }
135 opts[:class] = klass if klass && klass != "default"
136 res = @backend.create_open(opts)
138 dests = if dev_count = res['dev_count'] # multi_dest succeeded
139 (1..dev_count.to_i).map do |i|
140 [res["devid_#{i}"], res["path_#{i}"]]
142 else # single destination returned
143 # 0x0040: d0e4 4f4b 2064 6576 6964 3d31 2666 6964 ..OK.devid=1&fid
144 # 0x0050: 3d33 2670 6174 683d 6874 7470 3a2f 2f31 =3&path=http://1
145 # 0x0060: 3932 2e31 3638 2e31 2e37 323a 3735 3030 92.168.1.72:7500
146 # 0x0070: 2f64 6576 312f 302f 3030 302f 3030 302f /dev1/0/000/000/
147 # 0x0080: 3030 3030 3030 3030 3033 2e66 6964 0d0a 0000000003.fid..
149 [[res['devid'], res['path']]]
152 case (dests[0][1] rescue nil)
153 when /^http:\/\// then
154 http_file = MogileFS::HTTPFile.new(dests, bytes)
156 rv = http_file.commit
157 @backend.create_close(:fid => res['fid'],
158 :devid => http_file.devid,
161 :path => http_file.uri.to_s,
165 raise MogileFS::EmptyPathError,
166 "Empty path for mogile upload res=#{res.inspect}"
168 raise MogileFS::UnsupportedPathError,
169 "paths '#{dests.inspect}' returned by backend is not supported"
173 # Copies the contents of +file+ into +key+ in class +klass+. +file+ can be
174 # either a path name (String or Pathname object) or an IO-like object that
175 # responds to #read or #readpartial. Returns size of +file+ stored.
176 # This atomically replaces existing data stored as +key+
177 def store_file(key, klass, file)
178 raise MogileFS::ReadOnlyError if readonly?
180 new_file(key, klass) { |mfp| mfp.big_io = file }
183 # Stores +content+ into +key+ in class +klass+, where +content+ is a String
184 # This atomically replaces existing data stored as +key+
185 def store_content(key, klass, content)
186 raise MogileFS::ReadOnlyError if readonly?
188 new_file key, klass do |mfp|
189 if content.is_a?(MogileFS::Util::StoreContent)
190 mfp.streaming_io = content
199 raise MogileFS::ReadOnlyError if readonly?
201 @backend.delete :domain => @domain, :key => key
205 # Sleeps +duration+, only used for testing
206 def sleep(duration) # :nodoc:
207 @backend.sleep :duration => duration
210 # Renames a key +from+ to key +to+.
212 raise MogileFS::ReadOnlyError if readonly?
214 @backend.rename :domain => @domain, :from_key => from, :to_key => to
218 # Returns the size of +key+.
220 @backend.respond_to?(:_size) and return @backend._size(domain, key)
222 file_info(key)["length"].to_i
223 rescue MogileFS::Backend::UnknownCommandError
224 paths_size(get_paths(key))
228 def paths_size(paths) # :nodoc:
229 require "mogilefs/paths_size"
230 MogileFS::PathsSize.call(paths)
233 # Lists keys starting with +prefix+ following +after+ up to +limit+. If
234 # +after+ is nil the list starts at the beginning.
235 def list_keys(prefix = "", after = nil, limit = 1000, &block)
236 @backend.respond_to?(:_list_keys) and
237 return @backend._list_keys(domain, prefix, after, limit, &block)
240 res = @backend.list_keys(:domain => domain, :prefix => prefix,
241 :after => after, :limit => limit)
242 rescue MogileFS::Backend::NoneMatchError
246 keys = (1..res['key_count'].to_i).map { |i| res["key_#{i}"] }
249 keys.each { |key| block.call(key) }
251 list_keys_verbose(keys, block)
255 [ keys, res['next_after'] ]
258 def list_keys_verbose(keys, block) # :nodoc:
259 # emulate the MogileFS::Mysql interface, slowly...
262 on_file_info = lambda do |info|
263 Hash === info or raise info
264 file_info_cleanup(info)
266 # deal with trackers with multiple queryworkers responding out-of-order
267 ready[info["key"]] = info
268 while info = ready.delete(ordered[0])
269 block.call(ordered.shift, info["length"], info["devcount"])
272 opts = { :domain => @domain }
276 @backend.pipeline_dispatch(:file_info, opts, &on_file_info)
278 @backend.pipeline_wait
279 rescue MogileFS::Backend::UnknownCommandError # MogileFS < 2.45
280 @backend.shutdown # reset the socket
281 args = { :pathcount => 0x7fffffff }
283 paths = get_paths(key, args)
284 block.call(key, paths_size(paths), paths.size)
286 rescue MogileFS::PipelineError, SystemCallError,
287 MogileFS::RequestTruncatedError,
288 MogileFS::UnreadableSocketError,
289 MogileFS::InvalidResponseError, # truncated response
292 keys = ordered - ready.keys
300 # Return metadata about a file as a hash.
301 # Returns the domain, class, length, devcount, etc. as keys.
302 # Optionally, device ids (not paths) can be returned as
303 # well if :devices is specified and +true+.
305 # This should only be used for informational purposes, and not usually
306 # for dynamically serving files.
308 # mg.file_info("bar")
315 # "class" => "default",
319 def file_info(key, args = nil)
320 opts = { :domain => @domain, :key => key }
321 args and devices = args[:devices] and opts[:devices] = devices ? 1 : 0
322 file_info_cleanup(@backend.file_info(opts))
325 def file_info_cleanup(rv) # :nodoc:
326 %w(fid length devcount).each { |f| rv[f] = rv[f].to_i }
327 devids = rv["devids"] and
328 rv["devids"] = devids.split(/,/).map! { |x| x.to_i }
332 # Given an Integer +fid+ or String +key+ and domain, thorougly search
333 # the database for all occurences of a particular fid.
335 # Use this sparingly, this command hits the master database numerous
336 # times and is very expensive. This is not for production use, only
337 # troubleshooting and debugging.
339 # Searches for fid=666:
341 # client.file_debug(666)
343 # Search for key=foo using the default domain for this object:
345 # client.file_debug("foo")
347 # Search for key=foo in domain="bar":
349 # client.file_debug(:key => "foo", :domain => "bar")
353 when Integer then args = { "fid" => args }
354 when String then args = { "key" => args }
356 opts = { :domain => args[:domain] || @domain }.merge!(args)
358 rv = @backend.file_debug(opts)
361 when /_(?:classid|devcount|dmid|fid|length|
362 nexttry|fromdevid|failcount|flags|devid|type)\z/x
365 rv[k] = v.split(/,/).map! { |x| x.to_i }