Tests for GridIO#each

This commit is contained in:
Kyle Banker 2010-12-01 13:30:57 -05:00
parent c80cd285fa
commit 9a8d83ad24
6 changed files with 43 additions and 11 deletions

View File

@ -45,7 +45,7 @@ module Mongo
end end
# Store a file in the file store. This method is designed only for writing new files; # Store a file in the file store. This method is designed only for writing new files;
# if you need to update a given file, first delete it using #Grid#delete. # if you need to update a given file, first delete it using Grid#delete.
# #
# Note that arbitary metadata attributes can be saved to the file by passing # Note that arbitary metadata attributes can be saved to the file by passing
# them in as options. # them in as options.

View File

@ -191,11 +191,14 @@ module Mongo
end end
id id
end end
# Read a chunk of the data from the file and yield it to the given # Read a chunk of the data from the file and yield it to the given
# block. It will read from the current file position. # block.
# #
# @param [Block] A block called with each chunk # Note that this method reads from the current file position.
#
# @yield Yields on chunk per iteration as defined by this file's
# chunk size.
# #
# @return [Mongo::GridIO] self # @return [Mongo::GridIO] self
def each def each

Binary file not shown.

BIN
test/data/sample_data Normal file

Binary file not shown.

View File

@ -62,7 +62,6 @@ class GridIOTest < Test::Unit::TestCase
end end
context "Content types" do context "Content types" do
if defined?(MIME) if defined?(MIME)
should "determine common content types from the extension" do should "determine common content types from the extension" do
file = GridIO.new(@files, @chunks, 'sample.pdf', 'w') file = GridIO.new(@files, @chunks, 'sample.pdf', 'w')

View File

@ -18,7 +18,8 @@ class GridTest < Test::Unit::TestCase
setup do setup do
@data = "GRIDDATA" * 50000 @data = "GRIDDATA" * 50000
@grid = Grid.new(@db, 'test-fs') @grid = Grid.new(@db, 'test-fs')
@id = @grid.put(@data, :filename => 'sample', :metadata => {'app' => 'photos'}) @id = @grid.put(@data, :filename => 'sample',
:metadata => {'app' => 'photos'})
end end
should "check existence" do should "check existence" do
@ -120,7 +121,8 @@ class GridTest < Test::Unit::TestCase
context "Storing data with a length of zero" do context "Storing data with a length of zero" do
setup do setup do
@grid = Grid.new(@db, 'test-fs') @grid = Grid.new(@db, 'test-fs')
@id = @grid.put('', :filename => 'sample', :metadata => {'app' => 'photos'}) @id = @grid.put('', :filename => 'sample',
:metadata => {'app' => 'photos'})
end end
should "return the zero length" do should "return the zero length" do
@ -129,6 +131,34 @@ class GridTest < Test::Unit::TestCase
end end
end end
context "Grid streaming: " do
setup do
@grid = Grid.new(@db, 'test-fs')
filename = 'sample_data'
@io = File.open(File.join(File.dirname(__FILE__), 'data', filename), 'r')
id = @grid.put(@io, :filename => filename)
@file = @grid.get(id)
@io.rewind
@data = @io.read
if @data.respond_to?(:force_encoding)
@data.force_encoding("binary")
end
end
should "read the file" do
read_data = ""
@file.each do |chunk|
read_data << chunk
end
assert_equal @data.length, read_data.length
end
should "read the file if no block is given" do
read_data = @file.each
assert_equal @data.length, read_data.length
end
end
context "Streaming: " do || {} context "Streaming: " do || {}
setup do setup do
def read_and_write_stream(filename, read_length, opts={}) def read_and_write_stream(filename, read_length, opts={})
@ -158,12 +188,12 @@ class GridTest < Test::Unit::TestCase
read_and_write_stream('small_data.txt', 1) read_and_write_stream('small_data.txt', 1)
end end
should "put and get a large io object when reading smaller than the chunk size" do should "put and get a large io object if reading less than the chunk size" do
read_and_write_stream('sample_file.pdf', 256 * 1024) read_and_write_stream('sample_data', 256 * 1024)
end end
should "put and get a large io object when reading larger than the chunk size" do should "put and get a large io object if reading more than the chunk size" do
read_and_write_stream('sample_file.pdf', 300 * 1024) read_and_write_stream('sample_data', 300 * 1024)
end end
end end
end end