about summary refs log tree commit homepage
path: root/lib/mogilefs/mogilefs.rb
diff options
context:
space:
mode:
authorEric Wong <normalperson@yhbt.net>2012-10-31 19:38:09 +0000
committerEric Wong <normalperson@yhbt.net>2012-10-31 19:47:37 +0000
commita8bc9cd0fce3169eccc67b191fdb40abfae59547 (patch)
tree53ba22b5d2dddc5bfe328e297badadfa49e985e5 /lib/mogilefs/mogilefs.rb
parent3e0b092efc2c5a5383a5d2063b830fe5d5e652f9 (diff)
downloadmogilefs-client-a8bc9cd0fce3169eccc67b191fdb40abfae59547.tar.gz
We now have separate Net::HTTP::Persistent instances
between clients that may have different timeouts and
also between GET and PUT requests.  This hurts our
ability to reuse sockets, but correctness is probably
more important.
Diffstat (limited to 'lib/mogilefs/mogilefs.rb')
-rw-r--r--lib/mogilefs/mogilefs.rb39
1 files changed, 33 insertions, 6 deletions
diff --git a/lib/mogilefs/mogilefs.rb b/lib/mogilefs/mogilefs.rb
index 7503bba..8bab9ef 100644
--- a/lib/mogilefs/mogilefs.rb
+++ b/lib/mogilefs/mogilefs.rb
@@ -69,6 +69,10 @@ class MogileFS::MogileFS < MogileFS::Client
 
     @get_file_data_timeout = args[:get_file_data_timeout] || 5
     @new_file_max_time = args[:new_file_max_time] || 3600.0
+    @nhp_get = MogileFS::NHP.new('get')
+    @nhp_get.open_timeout = @nhp_get.read_timeout = @get_file_data_timeout
+    @nhp_put = MogileFS::NHP.new('put')
+    @nhp_put.open_timeout = @nhp_put.read_timeout = @new_file_max_time
 
     raise ArgumentError, "you must specify a domain" unless @domain
 
@@ -146,17 +150,40 @@ class MogileFS::MogileFS < MogileFS::Client
   # start position of the copy.
   def get_file_data(key, dst = nil, copy_length = nil, src_offset = nil)
     paths = get_paths(key)
-    sock = MogileFS::HTTPReader.first(paths, @get_file_data_timeout,
-                                      copy_length, src_offset)
+    if src_offset || copy_length
+      src_offset ||= 0
+      range_end = copy_length ? src_offset + copy_length - 1 : nil
+      range = [ src_offset, range_end ]
+    end
+
     if dst
+      sock = MogileFS::HTTPReader.first(paths, @get_file_data_timeout, range)
       sock.stream_to(dst)
     elsif block_given?
+      sock = MogileFS::HTTPReader.first(paths, @get_file_data_timeout, range)
       yield(sock)
     else
-      sock.to_s
+      errors = nil
+      paths.each do |path|
+        uri = URI.parse(path)
+        get = Net::HTTP::Get.new(uri.path)
+        get["range"] = "bytes=#{range[0]}-#{range[1]}" if range
+        begin
+          res = @nhp_get.request(uri, get)
+          case res.code.to_i
+          when 200, 206
+            return res.body
+          end
+          (errors ||= []) << "#{path} - #{res.message} (#{res.class})"
+        rescue => e
+          (errors ||= []) << "#{path} - #{e.message} (#{e.class})"
+        end
+      end
+      raise MogileFS::Error,
+            "all paths failed with GET: #{errors.join(', ')}", []
     end
-    ensure
-      sock.close if sock && ! sock.closed?
+  ensure
+    sock.close if sock && ! sock.closed?
   end
 
   # Get the paths (URLs as strings) for +key+.  If +args+ is specified,
@@ -253,7 +280,7 @@ class MogileFS::MogileFS < MogileFS::Client
   #   an array of URI::HTTP objects to the stored destinations
   def new_file(key, args = nil, bytes = nil) # :yields: file
     raise MogileFS::ReadOnlyError if readonly?
-    opts = { :key => key, :multi_dest => 1 }
+    opts = { :key => key, :multi_dest => 1, :nhp_put => @nhp_put }
     case args
     when Hash
       opts[:domain] = args[:domain]