From 6c8f2db31097998aaab21c05cab53b50bfc243c5 Mon Sep 17 00:00:00 2001 From: Eric Wong Date: Thu, 4 Sep 2008 18:53:14 -0700 Subject: [PATCH] Lower the large-file threshold to 64K Yes, I'm quite miserly when it comes to memory usage. Since the file is already on disk, just read it incrementally and stream it out to avoid having to deal with potential memory exhaustion issues on busy systems. There's also no benefit to slurping 256MB and anything above 64K leads to diminishing returns on most systems I've seen. --- lib/mogilefs/mogilefs.rb | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/mogilefs/mogilefs.rb b/lib/mogilefs/mogilefs.rb index b1fe57d..aac7e3d 100644 --- a/lib/mogilefs/mogilefs.rb +++ b/lib/mogilefs/mogilefs.rb @@ -172,7 +172,7 @@ class MogileFS::MogileFS < MogileFS::Client if file.respond_to? :sysread then return sysrwloop(file, mfp) else - if File.size(file) > (256 * 1024 * 1024) # Bigass file, handle differently + if File.size(file) > 0x10000 # Bigass file, handle differently mfp.bigfile = file return mfp.close else -- 2.11.4.GIT