mongo-ruby-driver/test/grid_test.rb

166 lines
4.9 KiB
Ruby
Raw Permalink Normal View History

2010-02-12 23:03:07 +00:00
require 'test/test_helper'
2010-02-20 00:17:38 +00:00
include Mongo
2010-02-12 23:03:07 +00:00
2010-02-22 20:49:04 +00:00
class GridTest < Test::Unit::TestCase
context "Tests:" do
2010-02-12 23:03:07 +00:00
setup do
2010-02-22 20:49:04 +00:00
@db ||= Connection.new(ENV['MONGO_RUBY_DRIVER_HOST'] || 'localhost',
2010-04-05 19:48:35 +00:00
ENV['MONGO_RUBY_DRIVER_PORT'] || Connection::DEFAULT_PORT).db(MONGO_TEST_DB)
2010-02-22 20:49:04 +00:00
@files = @db.collection('test-fs.files')
@chunks = @db.collection('test-fs.chunks')
2010-02-12 23:03:07 +00:00
end
2010-02-22 20:49:04 +00:00
teardown do
@files.remove
@chunks.remove
2010-02-12 23:03:07 +00:00
end
2010-02-22 20:49:04 +00:00
context "A basic grid-stored file" do
setup do
@data = "GRIDDATA" * 50000
@grid = Grid.new(@db, 'test-fs')
@id = @grid.put(@data, :filename => 'sample', :metadata => {'app' => 'photos'})
2010-02-22 20:49:04 +00:00
end
2010-02-12 23:03:07 +00:00
should "check existence" do
file = @grid.exist?(:filename => 'sample')
assert_equal 'sample', file['filename']
end
should "return nil if it doesn't exist" do
assert_nil @grid.exist?(:metadata => 'foo')
end
2010-02-22 20:49:04 +00:00
should "retrieve the stored data" do
data = @grid.get(@id).data
assert_equal @data, data
end
2010-02-12 23:03:07 +00:00
should "have a unique index on chunks" do
assert @chunks.index_information['files_id_1_n_1']['unique']
end
2010-02-22 20:49:04 +00:00
should "store the filename" do
file = @grid.get(@id)
assert_equal 'sample', file.filename
2010-02-12 23:03:07 +00:00
end
2010-02-22 20:49:04 +00:00
should "store any relevant metadata" do
file = @grid.get(@id)
assert_equal 'photos', file.metadata['app']
2010-02-20 00:17:38 +00:00
end
2010-02-22 20:49:04 +00:00
should "delete the file and any chunks" do
@grid.delete(@id)
assert_raise GridFileNotFound do
2010-02-22 20:49:04 +00:00
@grid.get(@id)
end
2010-03-23 21:00:31 +00:00
assert_equal nil, @db['test-fs']['chunks'].find_one({:files_id => @id})
2010-02-22 20:49:04 +00:00
end
2010-02-19 21:20:46 +00:00
end
context "Filename not required" do
setup do
@data = "GRIDDATA" * 50000
@grid = Grid.new(@db, 'test-fs')
@metadata = {'app' => 'photos'}
end
2010-03-30 17:51:05 +00:00
should "store the file with the old filename api" do
id = @grid.put(@data, :filename => 'sample', :metadata => @metadata)
file = @grid.get(id)
assert_equal 'sample', file.filename
assert_equal @metadata, file.metadata
end
should "store without a filename" do
id = @grid.put(@data, :metadata => @metadata)
file = @grid.get(id)
assert_nil file.filename
file_doc = @files.find_one({'_id' => id})
assert !file_doc.has_key?('filename')
assert_equal @metadata, file.metadata
end
2010-03-30 17:51:05 +00:00
should "store with filename and metadata with the new api" do
id = @grid.put(@data, :filename => 'sample', :metadata => @metadata)
file = @grid.get(id)
assert_equal 'sample', file.filename
assert_equal @metadata, file.metadata
end
end
context "Writing arbitrary data fields" do
setup do
@data = "GRIDDATA" * 50000
@grid = Grid.new(@db, 'test-fs')
end
should "write random keys to the files collection" do
id = @grid.put(@data, :phrases => ["blimey", "ahoy!"])
file = @grid.get(id)
assert_equal ["blimey", "ahoy!"], file['phrases']
end
should "ignore special keys" do
id = @grid.put(@data, :file_length => 100, :phrase => "blimey")
file = @grid.get(id)
assert_equal "blimey", file['phrase']
assert_equal 400_000, file.file_length
end
end
2010-03-15 22:25:46 +00:00
context "Storing data with a length of zero" do
setup do
@grid = Grid.new(@db, 'test-fs')
@id = @grid.put('', :filename => 'sample', :metadata => {'app' => 'photos'})
2010-03-15 22:25:46 +00:00
end
should "return the zero length" do
data = @grid.get(@id)
assert_equal 0, data.read.length
end
end
2010-02-22 20:49:04 +00:00
context "Streaming: " do || {}
setup do
def read_and_write_stream(filename, read_length, opts={})
io = File.open(File.join(File.dirname(__FILE__), 'data', filename), 'r')
id = @grid.put(io, opts.merge!(:filename => filename + read_length.to_s))
2010-02-22 20:49:04 +00:00
file = @grid.get(id)
io.rewind
data = io.read
if data.respond_to?(:force_encoding)
data.force_encoding(:binary)
end
read_data = ""
while(chunk = file.read(read_length))
read_data << chunk
end
assert_equal data.length, read_data.length
end
2010-02-19 21:20:46 +00:00
2010-02-22 20:49:04 +00:00
@grid = Grid.new(@db, 'test-fs')
end
2010-02-19 21:20:46 +00:00
2010-02-22 20:49:04 +00:00
should "put and get a small io object with a small chunk size" do
read_and_write_stream('small_data.txt', 1, :chunk_size => 2)
end
should "put and get a small io object" do
read_and_write_stream('small_data.txt', 1)
end
2010-02-19 21:20:46 +00:00
2010-02-22 20:49:04 +00:00
should "put and get a large io object when reading smaller than the chunk size" do
read_and_write_stream('sample_file.pdf', 256 * 1024)
end
should "put and get a large io object when reading larger than the chunk size" do
read_and_write_stream('sample_file.pdf', 300 * 1024)
end
2010-02-19 21:20:46 +00:00
end
end
2010-02-12 23:03:07 +00:00
end