From 89fe06250e186812e95ac0542bd5c4da63826f32 Mon Sep 17 00:00:00 2001 From: Kyle Banker Date: Mon, 22 Feb 2010 15:49:04 -0500 Subject: [PATCH] re-require shoulda; gridfs decoupling --- README.rdoc | 5 +- lib/mongo/gridfs/grid.rb | 17 +- lib/mongo/gridfs/grid_file_system.rb | 28 ++- lib/mongo/gridfs/grid_io.rb | 49 ++--- test/binary_test.rb | 16 +- test/collection_test.rb | 49 +++-- test/grid_file_system_test.rb | 298 ++++++++++++++------------- test/grid_io_test.rb | 46 +++-- test/grid_test.rb | 135 ++++++------ test/test_helper.rb | 34 +-- test/unit/collection_test.rb | 91 ++++---- test/unit/connection_test.rb | 186 ++++++++--------- test/unit/cursor_test.rb | 166 +++++++-------- test/unit/db_test.rb | 144 ++++++------- 14 files changed, 635 insertions(+), 629 deletions(-) diff --git a/README.rdoc b/README.rdoc index 5277845..b18ffb5 100644 --- a/README.rdoc +++ b/README.rdoc @@ -292,10 +292,11 @@ It's also possible to test replica pairs with connection pooling: $ rake test:pooled_pair_insert -===Mocha +===Shoulda and Mocha -Running the test suite requires mocha. You can install it as follows: +Running the test suite requires shoulda and mocha. You can install them as follows: + $ gem install shoulda $ gem install mocha The tests assume that the Mongo database is running on the default port. You diff --git a/lib/mongo/gridfs/grid.rb b/lib/mongo/gridfs/grid.rb index e040e9c..7b02be4 100644 --- a/lib/mongo/gridfs/grid.rb +++ b/lib/mongo/gridfs/grid.rb @@ -22,22 +22,25 @@ module Mongo def initialize(db, fs_name=DEFAULT_FS_NAME) check_params(db) - @db = db - @files = @db["#{fs_name}.files"] - @chunks = @db["#{fs_name}.chunks"] + @db = db + @files = @db["#{fs_name}.files"] + @chunks = @db["#{fs_name}.chunks"] + @fs_name = fs_name @chunks.create_index([['files_id', Mongo::ASCENDING], ['n', Mongo::ASCENDING]]) end def put(data, filename, opts={}) - file = GridIO.new(@files, @chunks, filename, 'w', false, opts=opts) + opts.merge!(default_grid_io_opts) + file = GridIO.new(@files, @chunks, filename, 'w', opts=opts) file.write(data) file.close file.files_id end def get(id) - GridIO.new(@files, @chunks, nil, 'r', false, :_id => id) + opts = {:query => {'_id' => id}}.merge!(default_grid_io_opts) + GridIO.new(@files, @chunks, nil, 'r', opts) end def delete(id) @@ -47,6 +50,10 @@ module Mongo private + def default_grid_io_opts + {:fs_name => @fs_name} + end + def check_params(db) if !db.is_a?(Mongo::DB) raise MongoArgumentError, "db must be an instance of Mongo::DB." diff --git a/lib/mongo/gridfs/grid_file_system.rb b/lib/mongo/gridfs/grid_file_system.rb index 1281ddd..990d9ac 100644 --- a/lib/mongo/gridfs/grid_file_system.rb +++ b/lib/mongo/gridfs/grid_file_system.rb @@ -23,10 +23,12 @@ module Mongo super @files.create_index([['filename', 1], ['uploadDate', -1]]) + @default_query_opts = {:sort => [['filename', 1], ['uploadDate', -1]], :limit => 1} end def open(filename, mode, opts={}) - file = GridIO.new(@files, @chunks, filename, mode, true, opts) + opts.merge!(default_grid_io_opts(filename)) + file = GridIO.new(@files, @chunks, filename, mode, opts) return file unless block_given? result = nil begin @@ -37,15 +39,31 @@ module Mongo result end - def put(data, filename) + def put(data, filename, opts={}) + opts.merge!(default_grid_io_opts(filename)) + file = GridIO.new(@files, @chunks, filename, 'w', opts) + file.write(data) + file.close + file.files_id end - def get(id) + def get(filename, opts={}) + opts.merge!(default_grid_io_opts(filename)) + GridIO.new(@files, @chunks, filename, 'r', opts) end - # Deletes all files matching the given criteria. - def delete(criteria) + def delete(filename, opts={}) + ids = @files.find({'filename' => filename}, ['_id']) + ids.each do |id| + @files.remove({'_id' => id}) + @chunks.remove('files_id' => id) + end end + private + + def default_grid_io_opts(filename=nil) + {:fs_name => @fs_name, :query => {'filename' => filename}, :query_opts => @default_query_opts} + end end end diff --git a/lib/mongo/gridfs/grid_io.rb b/lib/mongo/gridfs/grid_io.rb index 20068c5..2eb5c89 100644 --- a/lib/mongo/gridfs/grid_io.rb +++ b/lib/mongo/gridfs/grid_io.rb @@ -23,20 +23,20 @@ module Mongo attr_reader :content_type, :chunk_size, :upload_date, :files_id, :filename, :metadata - def initialize(files, chunks, filename, mode, filesystem, opts={}) - @files = files - @chunks = chunks - @filename = filename - @mode = mode - @content_type = opts[:content_type] || DEFAULT_CONTENT_TYPE - @chunk_size = opts[:chunk_size] || DEFAULT_CHUNK_SIZE - @files_id = opts[:_id] + def initialize(files, chunks, filename, mode, opts={}) + @files = files + @chunks = chunks + @filename = filename + @mode = mode + @query = opts[:query] || {} + @query_opts = opts[:query_opts] || {} + @fs_name = opts[:fs_name] || Grid::DEFAULT_FS_NAME case @mode - when 'r' then init_read(filesystem, opts) + when 'r' then init_read(opts) when 'w' then init_write(opts) else - raise GridError, "Invalid file mode #{@mode}. Valid options include 'r' and 'w'." + raise GridError, "Invalid file mode #{@mode}. Mode should be 'r' or 'w'." end end @@ -147,9 +147,7 @@ module Mongo chunk end - # TODO: Perhaps use an upsert here instead? def save_chunk(chunk) - @chunks.remove('_id' => chunk['_id']) @chunks.insert(chunk) end @@ -159,22 +157,17 @@ module Mongo chunk end - def get_chunk_for_read(n) - chunk = get_chunk(n) - return nil unless chunk - end - def last_chunk_number (@file_length / @chunk_size).to_i end - # Read a file in its entirety (optimized). + # Read a file in its entirety. def read_all buf = '' while true buf << @current_chunk['data'].to_s - break if @current_chunk['n'] == last_chunk_number @current_chunk = get_chunk(@current_chunk['n'] + 1) + break unless @current_chunk end buf end @@ -232,15 +225,10 @@ module Mongo string.length - to_write end - # Initialize based on whether the supplied file exists. - def init_read(filesystem, opts) - if filesystem - doc = @files.find({'filename' => @filename}, :sort => [["uploadDate", -1]], :limit => 1).next_document - raise GridError, "Could not open file with filename #{@filename}" unless doc - else - doc = @files.find({'_id' => @files_id}).next_document - raise GridError, "Could not open file with id #{@files_id}" unless doc - end + # Initialize the class for reading a file. + def init_read(opts) + doc = @files.find(@query, @query_opts).next_document + raise GridError, "Could not open file matching #{@query.inspect} #{@query_opts.inspect}" unless doc @files_id = doc['_id'] @content_type = doc['contentType'] @@ -251,11 +239,12 @@ module Mongo @metadata = doc['metadata'] @md5 = doc['md5'] @filename = doc['filename'] + @current_chunk = get_chunk(0) @file_position = 0 end - # Validates and sets up the class for the given file mode. + # Initialize the class for writing a file. def init_write(opts) @files_id = opts[:_id] || Mongo::ObjectID.new @content_type = opts[:content_type] || @content_type || DEFAULT_CONTENT_TYPE @@ -281,7 +270,7 @@ module Mongo # Get a server-side md5. md5_command = OrderedHash.new md5_command['filemd5'] = @files_id - md5_command['root'] = 'fs' + md5_command['root'] = @fs_name h['md5'] = @files.db.command(md5_command)['md5'] h diff --git a/test/binary_test.rb b/test/binary_test.rb index 87c6769..cc84126 100644 --- a/test/binary_test.rb +++ b/test/binary_test.rb @@ -1,13 +1,15 @@ # encoding:utf-8 require 'test/test_helper' -context "Inspecting" do - setup do - @data = ("THIS IS BINARY " * 50).unpack("c*") - end +class BinaryTest < Test::Unit::TestCase + context "Inspecting" do + setup do + @data = ("THIS IS BINARY " * 50).unpack("c*") + end - should "not display actual data" do - binary = Mongo::Binary.new(@data) - assert_equal "", binary.inspect + should "not display actual data" do + binary = Mongo::Binary.new(@data) + assert_equal "", binary.inspect + end end end diff --git a/test/collection_test.rb b/test/collection_test.rb index eb0ed4e..7e5440e 100644 --- a/test/collection_test.rb +++ b/test/collection_test.rb @@ -1,7 +1,7 @@ require 'test/test_helper' class TestCollection < Test::Unit::TestCase - @@connection = Connection.new(ENV['MONGO_RUBY_DRIVER_HOST'] || 'localhost', ENV['MONGO_RUBY_DRIVER_PORT'] || Connection::DEFAULT_PORT) + @@connection ||= Connection.new(ENV['MONGO_RUBY_DRIVER_HOST'] || 'localhost', ENV['MONGO_RUBY_DRIVER_PORT'] || Connection::DEFAULT_PORT) @@db = @@connection.db('ruby-mongo-test') @@test = @@db.collection("test") @@version = @@connection.server_version @@ -75,32 +75,31 @@ class TestCollection < Test::Unit::TestCase end if @@version > "1.1" - context "distinct queries" do - setup do - @@test.remove - @@test.insert([{:a => 0, :b => {:c => "a"}}, - {:a => 1, :b => {:c => "b"}}, - {:a => 1, :b => {:c => "c"}}, - {:a => 2, :b => {:c => "a"}}, - {:a => 3}, - {:a => 3}]) + def setup_for_distinct + @@test.remove + @@test.insert([{:a => 0, :b => {:c => "a"}}, + {:a => 1, :b => {:c => "b"}}, + {:a => 1, :b => {:c => "c"}}, + {:a => 2, :b => {:c => "a"}}, + {:a => 3}, + {:a => 3}]) + end + + def test_distinct_queries + setup_for_distinct + assert_equal [0, 1, 2, 3], @@test.distinct(:a).sort + assert_equal ["a", "b", "c"], @@test.distinct("b.c").sort + end + + if @@version >= "1.2" + def test_filter_collection_with_query + setup_for_distinct + assert_equal [2, 3], @@test.distinct(:a, {:a => {"$gt" => 1}}).sort end - should "return distinct values" do - assert_equal [0, 1, 2, 3], @@test.distinct(:a).sort - assert_equal ["a", "b", "c"], @@test.distinct("b.c").sort - end - - if @@version >= "1.2" - - should "filter collection with query" do - assert_equal [2, 3], @@test.distinct(:a, {:a => {"$gt" => 1}}).sort - end - - should "filter nested objects" do - assert_equal ["a", "b"], @@test.distinct("b.c", {"b.c" => {"$ne" => "c"}}).sort - end - + def test_filter_nested_objects + setup_for_distinct + assert_equal ["a", "b"], @@test.distinct("b.c", {"b.c" => {"$ne" => "c"}}).sort end end end diff --git a/test/grid_file_system_test.rb b/test/grid_file_system_test.rb index a3832bc..99628e8 100644 --- a/test/grid_file_system_test.rb +++ b/test/grid_file_system_test.rb @@ -1,184 +1,186 @@ require 'test/test_helper' include Mongo -context "GridFileSystem:" do - setup do - @db ||= Connection.new(ENV['MONGO_RUBY_DRIVER_HOST'] || 'localhost', - ENV['MONGO_RUBY_DRIVER_PORT'] || Connection::DEFAULT_PORT).db('ruby-mongo-test') - @files = @db.collection('fs.files') - @chunks = @db.collection('fs.chunks') - end - - teardown do - @files.remove - @chunks.remove - end - - context "When reading:" do +class GridFileSystemTest < Test::Unit::TestCase + context "GridFileSystem:" do setup do - @data = "CHUNKS" * 50000 - @grid = GridFileSystem.new(@db) - @grid.open('sample', 'w') do |f| - f.write @data - end - - @grid = GridFileSystem.new(@db) + @con = Connection.new(ENV['MONGO_RUBY_DRIVER_HOST'] || 'localhost', + ENV['MONGO_RUBY_DRIVER_PORT'] || Connection::DEFAULT_PORT) + @db = @con.db('mongo-ruby-test') end - should "read sample data" do - data = @grid.open('sample', 'r') { |f| f.read } - assert_equal data.length, @data.length + teardown do + @db['fs.files'].remove + @db['fs.chunks'].remove end - should "return an empty string if length is zero" do - data = @grid.open('sample', 'r') { |f| f.read(0) } - assert_equal '', data - end - - should "return the first n bytes" do - data = @grid.open('sample', 'r') {|f| f.read(288888) } - assert_equal 288888, data.length - assert_equal @data[0...288888], data - end - - should "return the first n bytes even with an offset" do - data = @grid.open('sample', 'r') do |f| - f.seek(1000) - f.read(288888) - end - assert_equal 288888, data.length - assert_equal @data[1000...289888], data - end - end - - context "When writing:" do - setup do - @data = "BYTES" * 50000 - @grid = GridFileSystem.new(@db) - @grid.open('sample', 'w') do |f| - f.write @data - end - end - - should "read sample data" do - data = @grid.open('sample', 'r') { |f| f.read } - assert_equal data.length, @data.length - end - - should "return the total number of bytes written" do - data = 'a' * 300000 - assert_equal 300000, @grid.open('write', 'w') {|f| f.write(data) } - end - - should "more read sample data" do - data = @grid.open('sample', 'r') { |f| f.read } - assert_equal data.length, @data.length - end - - should "raise exception if not opened for write" do - assert_raise GridError do - @grid.open('io', 'r') { |f| f.write('hello') } - end - end - - context "and when overwriting the file" do + context "When reading:" do setup do - @old = @grid.open('sample', 'r') - - @new_data = "DATA" * 1000 - @grid.open('sample', 'w') do |f| - f.write @new_data + @chunks_data = "CHUNKS" * 50000 + @grid = GridFileSystem.new(@db) + @grid.open('sample.file', 'w') do |f| + f.write @chunks_data end - @new = @grid.open('sample', 'r') + @grid = GridFileSystem.new(@db) end - should "have a newer upload date" do - assert @new.upload_date > @old.upload_date + should "read sample data" do + data = @grid.open('sample.file', 'r') { |f| f.read } + assert_equal data.length, @chunks_data.length end - should "have a different files_id" do - assert_not_equal @new.files_id, @old.files_id + should "return an empty string if length is zero" do + data = @grid.open('sample.file', 'r') { |f| f.read(0) } + assert_equal '', data end - should "contain the new data" do - assert_equal @new_data, @new.read + should "return the first n bytes" do + data = @grid.open('sample.file', 'r') {|f| f.read(288888) } + assert_equal 288888, data.length + assert_equal @chunks_data[0...288888], data end - end - end - context "When writing chunks:" do - setup do - data = "B" * 50000 - @grid = GridFileSystem.new(@db) - @grid.open('sample', 'w', :chunk_size => 1000) do |f| - f.write data + should "return the first n bytes even with an offset" do + data = @grid.open('sample.file', 'r') do |f| + f.seek(1000) + f.read(288888) + end + assert_equal 288888, data.length + assert_equal @chunks_data[1000...289888], data end end - should "write the correct number of chunks" do - file = @files.find_one({:filename => 'sample'}) - chunks = @chunks.find({'files_id' => file['_id']}).to_a - assert_equal 50, chunks.length - end - end + context "When writing:" do + setup do + @data = "BYTES" * 50 + @grid = GridFileSystem.new(@db) + @grid.open('sample', 'w') do |f| + f.write @data + end + end - context "Positioning:" do - setup do - data = 'hello, world' + '1' * 5000 + 'goodbye!' + '2' * 1000 + '!' - @grid = GridFileSystem.new(@db) - @grid.open('hello', 'w', :chunk_size => 1000) do |f| - f.write data + should "read sample data" do + data = @grid.open('sample', 'r') { |f| f.read } + assert_equal data.length, @data.length + end + + should "return the total number of bytes written" do + data = 'a' * 300000 + assert_equal 300000, @grid.open('sample', 'w') {|f| f.write(data) } + end + + should "more read sample data" do + data = @grid.open('sample', 'r') { |f| f.read } + assert_equal data.length, @data.length + end + + should "raise exception if not opened for write" do + assert_raise GridError do + @grid.open('io', 'r') { |f| f.write('hello') } + end + end + + context "and when overwriting the file" do + setup do + @old = @grid.open('sample', 'r') + + @new_data = "DATA" * 10 + sleep(2) + @grid.open('sample', 'w') do |f| + f.write @new_data + end + + @new = @grid.open('sample', 'r') + end + + should "have a newer upload date" do + assert @new.upload_date > @old.upload_date, "New data is not greater than old date." + end + + should "have a different files_id" do + assert_not_equal @new.files_id, @old.files_id + end + + should "contain the new data" do + assert_equal @new_data, @new.read, "Expected DATA" + end + end + end + + context "When writing chunks:" do + setup do + data = "B" * 50000 + @grid = GridFileSystem.new(@db) + @grid.open('sample', 'w', :chunk_size => 1000) do |f| + f.write data + end + end + + should "write the correct number of chunks" do + file = @db['fs.files'].find_one({:filename => 'sample'}) + chunks = @db['fs.chunks'].find({'files_id' => file['_id']}).to_a + assert_equal 50, chunks.length end end - should "seek within chunks" do - @grid.open('hello', 'r') do |f| - f.seek(0) - assert_equal 'h', f.read(1) - f.seek(7) - assert_equal 'w', f.read(1) - f.seek(4) - assert_equal 'o', f.read(1) - f.seek(0) - f.seek(7, IO::SEEK_CUR) - assert_equal 'w', f.read(1) - f.seek(-2, IO::SEEK_CUR) - assert_equal ' ', f.read(1) - f.seek(-4, IO::SEEK_CUR) - assert_equal 'l', f.read(1) - f.seek(3, IO::SEEK_CUR) - assert_equal 'w', f.read(1) + context "Positioning:" do + setup do + data = 'hello, world' + '1' * 5000 + 'goodbye!' + '2' * 1000 + '!' + @grid = GridFileSystem.new(@db) + @grid.open('hello', 'w', :chunk_size => 1000) do |f| + f.write data + end end - end - should "seek between chunks" do - @grid.open('hello', 'r') do |f| - f.seek(1000) - assert_equal '11111', f.read(5) - - f.seek(5009) - assert_equal '111goodbye!222', f.read(14) - - f.seek(-1, IO::SEEK_END) - assert_equal '!', f.read(1) - f.seek(-6, IO::SEEK_END) - assert_equal '2', f.read(1) + should "seek within chunks" do + @grid.open('hello', 'r') do |f| + f.seek(0) + assert_equal 'h', f.read(1) + f.seek(7) + assert_equal 'w', f.read(1) + f.seek(4) + assert_equal 'o', f.read(1) + f.seek(0) + f.seek(7, IO::SEEK_CUR) + assert_equal 'w', f.read(1) + f.seek(-2, IO::SEEK_CUR) + assert_equal ' ', f.read(1) + f.seek(-4, IO::SEEK_CUR) + assert_equal 'l', f.read(1) + f.seek(3, IO::SEEK_CUR) + assert_equal 'w', f.read(1) + end end - end - should "tell the current position" do - @grid.open('hello', 'r') do |f| - assert_equal 0, f.tell + should "seek between chunks" do + @grid.open('hello', 'r') do |f| + f.seek(1000) + assert_equal '11111', f.read(5) - f.seek(999) - assert_equal 999, f.tell + f.seek(5009) + assert_equal '111goodbye!222', f.read(14) + + f.seek(-1, IO::SEEK_END) + assert_equal '!', f.read(1) + f.seek(-6, IO::SEEK_END) + assert_equal '2', f.read(1) + end end - end - should "seek only in read mode" do - assert_raise GridError do - @grid.open('hello', 'w') {|f| f.seek(0) } + should "tell the current position" do + @grid.open('hello', 'r') do |f| + assert_equal 0, f.tell + + f.seek(999) + assert_equal 999, f.tell + end + end + + should "seek only in read mode" do + assert_raise GridError do + @grid.open('hello', 'w') {|f| f.seek(0) } + end end end end diff --git a/test/grid_io_test.rb b/test/grid_io_test.rb index 955758a..dcf15df 100644 --- a/test/grid_io_test.rb +++ b/test/grid_io_test.rb @@ -1,33 +1,37 @@ require 'test/test_helper' include Mongo -context "" do - setup do - @db ||= Connection.new(ENV['MONGO_RUBY_DRIVER_HOST'] || 'localhost', - ENV['MONGO_RUBY_DRIVER_PORT'] || Connection::DEFAULT_PORT).db('ruby-mongo-test') - @files = @db.collection('fs.files') - @chunks = @db.collection('fs.chunks') - end +class GridIOTest < Test::Unit::TestCase - teardown do - @files.remove - @chunks.remove - end - - context "Options" do + context "GridIO" do setup do - @filename = 'test' - @mode = 'w' + @db ||= Connection.new(ENV['MONGO_RUBY_DRIVER_HOST'] || 'localhost', + ENV['MONGO_RUBY_DRIVER_PORT'] || Connection::DEFAULT_PORT).db('ruby-mongo-test') + @files = @db.collection('fs.files') + @chunks = @db.collection('fs.chunks') end - should "set default 256k chunk size" do - file = GridIO.new(@files, @chunks, @filename, @mode, false) - assert_equal 256 * 1024, file.chunk_size + teardown do + @files.remove + @chunks.remove end - should "set chunk size" do - file = GridIO.new(@files, @chunks, @filename, @mode, false, :chunk_size => 1000) - assert_equal 1000, file.chunk_size + context "Options" do + setup do + @filename = 'test' + @mode = 'w' + end + + should "set default 256k chunk size" do + file = GridIO.new(@files, @chunks, @filename, @mode) + assert_equal 256 * 1024, file.chunk_size + end + + should "set chunk size" do + file = GridIO.new(@files, @chunks, @filename, @mode, :chunk_size => 1000) + assert_equal 1000, file.chunk_size + end end end + end diff --git a/test/grid_test.rb b/test/grid_test.rb index 296c5e7..fa25d15 100644 --- a/test/grid_test.rb +++ b/test/grid_test.rb @@ -1,84 +1,87 @@ require 'test/test_helper' include Mongo -context "Tests:" do - setup do - @db ||= Connection.new(ENV['MONGO_RUBY_DRIVER_HOST'] || 'localhost', - ENV['MONGO_RUBY_DRIVER_PORT'] || Connection::DEFAULT_PORT).db('ruby-mongo-test') - @files = @db.collection('test-fs.files') - @chunks = @db.collection('test-fs.chunks') - end - - teardown do - @files.remove - @chunks.remove - end - - context "A basic grid-stored file" do +class GridTest < Test::Unit::TestCase + context "Tests:" do setup do - @data = "GRIDDATA" * 50000 - @grid = Grid.new(@db, 'test-fs') - @id = @grid.put(@data, 'sample', :metadata => {'app' => 'photos'}) + @db ||= Connection.new(ENV['MONGO_RUBY_DRIVER_HOST'] || 'localhost', + ENV['MONGO_RUBY_DRIVER_PORT'] || Connection::DEFAULT_PORT).db('ruby-mongo-test') + @files = @db.collection('test-fs.files') + @chunks = @db.collection('test-fs.chunks') end - should "retrieve the stored data" do - data = @grid.get(@id).data - assert_equal @data, data + teardown do + @files.remove + @chunks.remove end - should "store the filename" do - file = @grid.get(@id) - assert_equal 'sample', file.filename - end - - should "store any relevant metadata" do - file = @grid.get(@id) - assert_equal 'photos', file.metadata['app'] - end - - should "delete the file and any chunks" do - @grid.delete(@id) - assert_raise GridError do - @grid.get(@id) - end - end - end - - context "Streaming: " do - setup do - def read_and_write_stream(filename, read_length, opts={}) - io = File.open(File.join(File.dirname(__FILE__), 'data', filename), 'r') - id = @grid.put(io, filename + read_length.to_s, opts) - file = @grid.get(id) - io.rewind - data = io.read - if data.respond_to?(:force_encoding) - data.force_encoding(:binary) - end - read_data = "" - while(chunk = file.read(read_length)) - read_data << chunk - end - assert_equal data.length, read_data.length + context "A basic grid-stored file" do + setup do + @data = "GRIDDATA" * 50000 + @grid = Grid.new(@db, 'test-fs') + @id = @grid.put(@data, 'sample', :metadata => {'app' => 'photos'}) end - @grid = Grid.new(@db, 'test-fs') + should "retrieve the stored data" do + data = @grid.get(@id).data + assert_equal @data, data + end + + should "store the filename" do + file = @grid.get(@id) + assert_equal 'sample', file.filename + end + + should "store any relevant metadata" do + file = @grid.get(@id) + assert_equal 'photos', file.metadata['app'] + end + + should "delete the file and any chunks" do + @grid.delete(@id) + assert_raise GridError do + @grid.get(@id) + end + end end - should "put and get a small io object with a small chunk size" do - read_and_write_stream('small_data.txt', 1, :chunk_size => 2) - end + context "Streaming: " do || {} + setup do + def read_and_write_stream(filename, read_length, opts={}) + io = File.open(File.join(File.dirname(__FILE__), 'data', filename), 'r') + id = @grid.put(io, filename + read_length.to_s, opts) + file = @grid.get(id) + io.rewind + data = io.read + if data.respond_to?(:force_encoding) + data.force_encoding(:binary) + end + read_data = "" + while(chunk = file.read(read_length)) + read_data << chunk + end + assert_equal data.length, read_data.length + assert_equal data, read_data, "Unequal!" + end - should "put and get a small io object" do - read_and_write_stream('small_data.txt', 1) - end + @grid = Grid.new(@db, 'test-fs') + end - should "put and get a large io object when reading smaller than the chunk size" do - read_and_write_stream('sample_file.pdf', 256 * 1024) - end + should "put and get a small io object with a small chunk size" do + read_and_write_stream('small_data.txt', 1, :chunk_size => 2) + end - should "put and get a large io object when reading larger than the chunk size" do - read_and_write_stream('sample_file.pdf', 300 * 1024) + should "put and get a small io object" do + read_and_write_stream('small_data.txt', 1) + end + + should "put and get a large io object when reading smaller than the chunk size" do + read_and_write_stream('sample_file.pdf', 256 * 1024) + end + + should "put and get a large io object when reading larger than the chunk size" do + read_and_write_stream('sample_file.pdf', 300 * 1024) + end end end end diff --git a/test/test_helper.rb b/test/test_helper.rb index 3e33a6e..06b1ca7 100644 --- a/test/test_helper.rb +++ b/test/test_helper.rb @@ -5,12 +5,14 @@ require 'test/unit' begin require 'rubygems' + require 'shoulda' require 'mocha' rescue LoadError puts < @logger, :connect => false) - @db = @conn['testing'] - @coll = @db.collection('books') - @conn.expects(:send_message).with do |op, msg, log| - op == 2001 && log.include?("db.books.update") + context "Basic operations: " do + setup do + @logger = mock() end - @coll.update({}, {:title => 'Moby Dick'}) - end - should "send insert message" do - @conn = Connection.new('localhost', 27017, :logger => @logger, :connect => false) - @db = @conn['testing'] - @coll = @db.collection('books') - @conn.expects(:send_message).with do |op, msg, log| - op == 2002 && log.include?("db.books.insert") + should "send update message" do + @conn = Connection.new('localhost', 27017, :logger => @logger, :connect => false) + @db = @conn['testing'] + @coll = @db.collection('books') + @conn.expects(:send_message).with do |op, msg, log| + op == 2001 && log.include?("db.books.update") + end + @coll.update({}, {:title => 'Moby Dick'}) end - @coll.insert({:title => 'Moby Dick'}) - end - should "not log binary data" do - @conn = Connection.new('localhost', 27017, :logger => @logger, :connect => false) - @db = @conn['testing'] - @coll = @db.collection('books') - data = Mongo::Binary.new(("BINARY " * 1000).unpack("c*")) - @conn.expects(:send_message).with do |op, msg, log| - op == 2002 && log.include?("Mongo::Binary") + should "send insert message" do + @conn = Connection.new('localhost', 27017, :logger => @logger, :connect => false) + @db = @conn['testing'] + @coll = @db.collection('books') + @conn.expects(:send_message).with do |op, msg, log| + op == 2002 && log.include?("db.books.insert") + end + @coll.insert({:title => 'Moby Dick'}) end - @coll.insert({:data => data}) - end - should "send safe update message" do - @conn = Connection.new('localhost', 27017, :logger => @logger, :connect => false) - @db = @conn['testing'] - @coll = @db.collection('books') - @conn.expects(:send_message_with_safe_check).with do |op, msg, db_name, log| - op == 2001 && log.include?("db.books.update") + should "not log binary data" do + @conn = Connection.new('localhost', 27017, :logger => @logger, :connect => false) + @db = @conn['testing'] + @coll = @db.collection('books') + data = Mongo::Binary.new(("BINARY " * 1000).unpack("c*")) + @conn.expects(:send_message).with do |op, msg, log| + op == 2002 && log.include?("Mongo::Binary") + end + @coll.insert({:data => data}) end - @coll.update({}, {:title => 'Moby Dick'}, :safe => true) - end - should "send safe insert message" do - @conn = Connection.new('localhost', 27017, :logger => @logger, :connect => false) - @db = @conn['testing'] - @coll = @db.collection('books') - @conn.expects(:send_message_with_safe_check).with do |op, msg, db_name, log| - op == 2001 && log.include?("db.books.update") + should "send safe update message" do + @conn = Connection.new('localhost', 27017, :logger => @logger, :connect => false) + @db = @conn['testing'] + @coll = @db.collection('books') + @conn.expects(:send_message_with_safe_check).with do |op, msg, db_name, log| + op == 2001 && log.include?("db.books.update") + end + @coll.update({}, {:title => 'Moby Dick'}, :safe => true) + end + + should "send safe insert message" do + @conn = Connection.new('localhost', 27017, :logger => @logger, :connect => false) + @db = @conn['testing'] + @coll = @db.collection('books') + @conn.expects(:send_message_with_safe_check).with do |op, msg, db_name, log| + op == 2001 && log.include?("db.books.update") + end + @coll.update({}, {:title => 'Moby Dick'}, :safe => true) end - @coll.update({}, {:title => 'Moby Dick'}, :safe => true) end end diff --git a/test/unit/connection_test.rb b/test/unit/connection_test.rb index a718b11..ec0a5f7 100644 --- a/test/unit/connection_test.rb +++ b/test/unit/connection_test.rb @@ -1,114 +1,116 @@ require 'test/test_helper' include Mongo -context "Initialization: " do - setup do - def new_mock_socket - socket = Object.new - socket.stubs(:setsockopt).with(Socket::IPPROTO_TCP, Socket::TCP_NODELAY, 1) - socket - end - - def new_mock_db - db = Object.new - end - end - - context "given a single node" do +class ConnectionTest < Test::Unit::TestCase + context "Initialization: " do setup do - TCPSocket.stubs(:new).returns(new_mock_socket) - @conn = Connection.new('localhost', 27017, :connect => false) - - admin_db = new_mock_db - admin_db.expects(:command).returns({'ok' => 1, 'ismaster' => 1}) - @conn.expects(:[]).with('admin').returns(admin_db) - @conn.connect_to_master - end - - should "set localhost and port to master" do - assert_equal 'localhost', @conn.host - assert_equal 27017, @conn.port - end - - should "set connection pool to 1" do - assert_equal 1, @conn.size - end - - should "default slave_ok to false" do - assert !@conn.slave_ok? - end - end - - context "initializing a paired connection" do - should "require left and right nodes" do - assert_raise MongoArgumentError do - Connection.paired(['localhost', 27018], :connect => false) + def new_mock_socket + socket = Object.new + socket.stubs(:setsockopt).with(Socket::IPPROTO_TCP, Socket::TCP_NODELAY, 1) + socket end - assert_raise MongoArgumentError do - Connection.paired(['localhost', 27018], :connect => false) + def new_mock_db + db = Object.new end end - should "store both nodes" do - @conn = Connection.paired([['localhost', 27017], ['localhost', 27018]], :connect => false) + context "given a single node" do + setup do + TCPSocket.stubs(:new).returns(new_mock_socket) + @conn = Connection.new('localhost', 27017, :connect => false) - assert_equal ['localhost', 27017], @conn.nodes[0] - assert_equal ['localhost', 27018], @conn.nodes[1] - end - end - - context "initializing with a mongodb uri" do - should "parse a simple uri" do - @conn = Connection.from_uri("mongodb://localhost", :connect => false) - assert_equal ['localhost', 27017], @conn.nodes[0] - end - - should "parse a uri specifying multiple nodes" do - @conn = Connection.from_uri("mongodb://localhost:27017,mydb.com:27018", :connect => false) - assert_equal ['localhost', 27017], @conn.nodes[0] - assert_equal ['mydb.com', 27018], @conn.nodes[1] - end - - should "parse a uri specifying multiple nodes with auth" do - @conn = Connection.from_uri("mongodb://kyle:s3cr3t@localhost:27017/app,mickey:m0u5e@mydb.com:27018/dsny", :connect => false) - assert_equal ['localhost', 27017], @conn.nodes[0] - assert_equal ['mydb.com', 27018], @conn.nodes[1] - assert_equal ['kyle', 's3cr3t', 'app'], @conn.auths[0] - assert_equal ['mickey', 'm0u5e', 'dsny'], @conn.auths[1] - end - - should "attempt to connect" do - TCPSocket.stubs(:new).returns(new_mock_socket) - @conn = Connection.from_uri("mongodb://localhost", :connect => false) - - admin_db = new_mock_db - admin_db.expects(:command).returns({'ok' => 1, 'ismaster' => 1}) - @conn.expects(:[]).with('admin').returns(admin_db) - @conn.connect_to_master - end - - should "raise an error on invalid uris" do - assert_raise MongoArgumentError do - Connection.from_uri("mongo://localhost", :connect => false) + admin_db = new_mock_db + admin_db.expects(:command).returns({'ok' => 1, 'ismaster' => 1}) + @conn.expects(:[]).with('admin').returns(admin_db) + @conn.connect_to_master end - assert_raise MongoArgumentError do - Connection.from_uri("mongodb://localhost:abc", :connect => false) + should "set localhost and port to master" do + assert_equal 'localhost', @conn.host + assert_equal 27017, @conn.port end - assert_raise MongoArgumentError do - Connection.from_uri("mongodb://localhost:27017, my.db.com:27018, ", :connect => false) + should "set connection pool to 1" do + assert_equal 1, @conn.size + end + + should "default slave_ok to false" do + assert !@conn.slave_ok? end end - should "require all of username, password, and database if any one is specified" do - assert_raise MongoArgumentError do - Connection.from_uri("mongodb://localhost/db", :connect => false) + context "initializing a paired connection" do + should "require left and right nodes" do + assert_raise MongoArgumentError do + Connection.paired(['localhost', 27018], :connect => false) + end + + assert_raise MongoArgumentError do + Connection.paired(['localhost', 27018], :connect => false) + end end - assert_raise MongoArgumentError do - Connection.from_uri("mongodb://kyle:password@localhost", :connect => false) + should "store both nodes" do + @conn = Connection.paired([['localhost', 27017], ['localhost', 27018]], :connect => false) + + assert_equal ['localhost', 27017], @conn.nodes[0] + assert_equal ['localhost', 27018], @conn.nodes[1] + end + end + + context "initializing with a mongodb uri" do + should "parse a simple uri" do + @conn = Connection.from_uri("mongodb://localhost", :connect => false) + assert_equal ['localhost', 27017], @conn.nodes[0] + end + + should "parse a uri specifying multiple nodes" do + @conn = Connection.from_uri("mongodb://localhost:27017,mydb.com:27018", :connect => false) + assert_equal ['localhost', 27017], @conn.nodes[0] + assert_equal ['mydb.com', 27018], @conn.nodes[1] + end + + should "parse a uri specifying multiple nodes with auth" do + @conn = Connection.from_uri("mongodb://kyle:s3cr3t@localhost:27017/app,mickey:m0u5e@mydb.com:27018/dsny", :connect => false) + assert_equal ['localhost', 27017], @conn.nodes[0] + assert_equal ['mydb.com', 27018], @conn.nodes[1] + assert_equal ['kyle', 's3cr3t', 'app'], @conn.auths[0] + assert_equal ['mickey', 'm0u5e', 'dsny'], @conn.auths[1] + end + + should "attempt to connect" do + TCPSocket.stubs(:new).returns(new_mock_socket) + @conn = Connection.from_uri("mongodb://localhost", :connect => false) + + admin_db = new_mock_db + admin_db.expects(:command).returns({'ok' => 1, 'ismaster' => 1}) + @conn.expects(:[]).with('admin').returns(admin_db) + @conn.connect_to_master + end + + should "raise an error on invalid uris" do + assert_raise MongoArgumentError do + Connection.from_uri("mongo://localhost", :connect => false) + end + + assert_raise MongoArgumentError do + Connection.from_uri("mongodb://localhost:abc", :connect => false) + end + + assert_raise MongoArgumentError do + Connection.from_uri("mongodb://localhost:27017, my.db.com:27018, ", :connect => false) + end + end + + should "require all of username, password, and database if any one is specified" do + assert_raise MongoArgumentError do + Connection.from_uri("mongodb://localhost/db", :connect => false) + end + + assert_raise MongoArgumentError do + Connection.from_uri("mongodb://kyle:password@localhost", :connect => false) + end end end end diff --git a/test/unit/cursor_test.rb b/test/unit/cursor_test.rb index 28cacc5..3be5839 100644 --- a/test/unit/cursor_test.rb +++ b/test/unit/cursor_test.rb @@ -1,91 +1,93 @@ require 'test/test_helper' -context "Cursor options" do - setup do - @connection = stub(:class => Connection) - @db = stub(:name => "testing", :slave_ok? => false, :connection => @connection) - @collection = stub(:db => @db, :name => "items") - @cursor = Cursor.new(@collection) +class CursorTest < Test::Unit::TestCase + context "Cursor options" do + setup do + @connection = stub(:class => Connection) + @db = stub(:name => "testing", :slave_ok? => false, :connection => @connection) + @collection = stub(:db => @db, :name => "items") + @cursor = Cursor.new(@collection) + end + + should "set admin to false" do + assert_equal false, @cursor.admin + + @cursor = Cursor.new(@collection, :admin => true) + assert_equal true, @cursor.admin + end + + should "set selector" do + assert @cursor.selector == {} + + @cursor = Cursor.new(@collection, :selector => {:name => "Jones"}) + assert @cursor.selector == {:name => "Jones"} + end + + should "set fields" do + assert_nil @cursor.fields + + @cursor = Cursor.new(@collection, :fields => [:name, :date]) + assert @cursor.fields == {:name => 1, :date => 1} + end + + should "set limit" do + assert_equal 0, @cursor.limit + + @cursor = Cursor.new(@collection, :limit => 10) + assert_equal 10, @cursor.limit + end + + + should "set skip" do + assert_equal 0, @cursor.skip + + @cursor = Cursor.new(@collection, :skip => 5) + assert_equal 5, @cursor.skip + end + + should "set sort order" do + assert_nil @cursor.order + + @cursor = Cursor.new(@collection, :order => "last_name") + assert_equal "last_name", @cursor.order + end + + should "set hint" do + assert_nil @cursor.hint + + @cursor = Cursor.new(@collection, :hint => "name") + assert_equal "name", @cursor.hint + end + + should "cache full collection name" do + assert_equal "testing.items", @cursor.full_collection_name + end end - should "set admin to false" do - assert_equal false, @cursor.admin + context "Query fields" do + setup do + @connection = stub(:class => Collection) + @db = stub(:slave_ok? => true, :name => "testing", :connection => @connection) + @collection = stub(:db => @db, :name => "items") + end - @cursor = Cursor.new(@collection, :admin => true) - assert_equal true, @cursor.admin - end + should "when an array should return a hash with each key" do + @cursor = Cursor.new(@collection, :fields => [:name, :age]) + result = @cursor.fields + assert_equal result.keys.sort{|a,b| a.to_s <=> b.to_s}, [:age, :name].sort{|a,b| a.to_s <=> b.to_s} + assert result.values.all? {|v| v == 1} + end - should "set selector" do - assert @cursor.selector == {} + should "when a string, return a hash with just the key" do + @cursor = Cursor.new(@collection, :fields => "name") + result = @cursor.fields + assert_equal result.keys.sort, ["name"] + assert result.values.all? {|v| v == 1} + end - @cursor = Cursor.new(@collection, :selector => {:name => "Jones"}) - assert @cursor.selector == {:name => "Jones"} - end - - should "set fields" do - assert_nil @cursor.fields - - @cursor = Cursor.new(@collection, :fields => [:name, :date]) - assert @cursor.fields == {:name => 1, :date => 1} - end - - should "set limit" do - assert_equal 0, @cursor.limit - - @cursor = Cursor.new(@collection, :limit => 10) - assert_equal 10, @cursor.limit - end - - - should "set skip" do - assert_equal 0, @cursor.skip - - @cursor = Cursor.new(@collection, :skip => 5) - assert_equal 5, @cursor.skip - end - - should "set sort order" do - assert_nil @cursor.order - - @cursor = Cursor.new(@collection, :order => "last_name") - assert_equal "last_name", @cursor.order - end - - should "set hint" do - assert_nil @cursor.hint - - @cursor = Cursor.new(@collection, :hint => "name") - assert_equal "name", @cursor.hint - end - - should "cache full collection name" do - assert_equal "testing.items", @cursor.full_collection_name - end -end - -context "Query fields" do - setup do - @connection = stub(:class => Collection) - @db = stub(:slave_ok? => true, :name => "testing", :connection => @connection) - @collection = stub(:db => @db, :name => "items") - end - - should "when an array should return a hash with each key" do - @cursor = Cursor.new(@collection, :fields => [:name, :age]) - result = @cursor.fields - assert_equal result.keys.sort{|a,b| a.to_s <=> b.to_s}, [:age, :name].sort{|a,b| a.to_s <=> b.to_s} - assert result.values.all? {|v| v == 1} - end - - should "when a string, return a hash with just the key" do - @cursor = Cursor.new(@collection, :fields => "name") - result = @cursor.fields - assert_equal result.keys.sort, ["name"] - assert result.values.all? {|v| v == 1} - end - - should "return nil when neither hash nor string nor symbol" do - @cursor = Cursor.new(@collection, :fields => 1234567) - assert_nil @cursor.fields + should "return nil when neither hash nor string nor symbol" do + @cursor = Cursor.new(@collection, :fields => 1234567) + assert_nil @cursor.fields + end end end diff --git a/test/unit/db_test.rb b/test/unit/db_test.rb index 097e3cc..7f3c9f8 100644 --- a/test/unit/db_test.rb +++ b/test/unit/db_test.rb @@ -1,98 +1,98 @@ require 'test/test_helper' -context "DBTest: " do - setup do - def insert_message(db, documents) - documents = [documents] unless documents.is_a?(Array) - message = ByteBuffer.new - message.put_int(0) - BSON.serialize_cstr(message, "#{db.name}.test") - documents.each { |doc| message.put_array(BSON.new.serialize(doc, true).to_a) } - message = db.add_message_headers(Mongo::Constants::OP_INSERT, message) - end - end - - context "DB commands" do +class DBTest < Test::Unit::TestCase + context "DBTest: " do setup do - @conn = stub() - @db = DB.new("testing", @conn) - @collection = mock() - @db.stubs(:system_command_collection).returns(@collection) - end - - should "raise an error if given a hash with more than one key" do - assert_raise MongoArgumentError do - @db.command(:buildinfo => 1, :somekey => 1) + def insert_message(db, documents) + documents = [documents] unless documents.is_a?(Array) + message = ByteBuffer.new + message.put_int(0) + BSON.serialize_cstr(message, "#{db.name}.test") + documents.each { |doc| message.put_array(BSON.new.serialize(doc, true).to_a) } + message = db.add_message_headers(Mongo::Constants::OP_INSERT, message) end end - should "raise an error if the selector is omitted" do - assert_raise MongoArgumentError do - @db.command({}, true) + context "DB commands" do + setup do + @conn = stub() + @db = DB.new("testing", @conn) + @collection = mock() + @db.stubs(:system_command_collection).returns(@collection) end - end - should "create the proper cursor" do - @cursor = mock(:next_document => {"ok" => 1}) - Cursor.expects(:new).with(@collection, :admin => true, - :limit => -1, :selector => {:buildinfo => 1}, :socket => nil).returns(@cursor) - command = {:buildinfo => 1} - @db.command(command, true) - end + should "raise an error if given a hash with more than one key" do + assert_raise MongoArgumentError do + @db.command(:buildinfo => 1, :somekey => 1) + end + end - should "raise an error when the command fails" do - @cursor = mock(:next_document => {"ok" => 0}) - Cursor.expects(:new).with(@collection, :admin => true, - :limit => -1, :selector => {:buildinfo => 1}, :socket => nil).returns(@cursor) - assert_raise OperationFailure do + should "raise an error if the selector is omitted" do + assert_raise MongoArgumentError do + @db.command({}, true) + end + end + + should "create the proper cursor" do + @cursor = mock(:next_document => {"ok" => 1}) + Cursor.expects(:new).with(@collection, :admin => true, + :limit => -1, :selector => {:buildinfo => 1}, :socket => nil).returns(@cursor) command = {:buildinfo => 1} - @db.command(command, true, true) + @db.command(command, true) end - end - should "raise an error if logging out fails" do - @db.expects(:command).returns({}) - assert_raise MongoDBError do - @db.logout + should "raise an error when the command fails" do + @cursor = mock(:next_document => {"ok" => 0}) + Cursor.expects(:new).with(@collection, :admin => true, + :limit => -1, :selector => {:buildinfo => 1}, :socket => nil).returns(@cursor) + assert_raise OperationFailure do + command = {:buildinfo => 1} + @db.command(command, true, true) + end end - end - should "raise an error if collection creation fails" do - @db.expects(:collection_names).returns([]) - @db.expects(:command).returns({}) - assert_raise MongoDBError do - @db.create_collection("foo") + should "raise an error if logging out fails" do + @db.expects(:command).returns({}) + assert_raise MongoDBError do + @db.logout + end end - end - should "raise an error if getlasterror fails" do - @db.expects(:command).returns({}) - assert_raise MongoDBError do - @db.error + should "raise an error if collection creation fails" do + @db.expects(:collection_names).returns([]) + @db.expects(:command).returns({}) + assert_raise MongoDBError do + @db.create_collection("foo") + end end - end - should "raise an error if rename fails" do - @db.expects(:command).returns({}) - assert_raise MongoDBError do - @db.rename_collection("foo", "bar") + should "raise an error if getlasterror fails" do + @db.expects(:command).returns({}) + assert_raise MongoDBError do + @db.error + end end - end - should "raise an error if drop_index fails" do - @db.expects(:command).returns({}) - assert_raise MongoDBError do - @db.drop_index("foo", "bar") + should "raise an error if rename fails" do + @db.expects(:command).returns({}) + assert_raise MongoDBError do + @db.rename_collection("foo", "bar") + end end - end - should "raise an error if set_profiling_level fails" do - @db.expects(:command).returns({}) - assert_raise MongoDBError do - @db.profiling_level = :slow_only + should "raise an error if drop_index fails" do + @db.expects(:command).returns({}) + assert_raise MongoDBError do + @db.drop_index("foo", "bar") + end + end + + should "raise an error if set_profiling_level fails" do + @db.expects(:command).returns({}) + assert_raise MongoDBError do + @db.profiling_level = :slow_only + end end end end end - -