re-require shoulda; gridfs decoupling

This commit is contained in:
Kyle Banker 2010-02-22 15:49:04 -05:00
parent 54a68c7438
commit 89fe06250e
14 changed files with 635 additions and 629 deletions

View File

@ -292,10 +292,11 @@ It's also possible to test replica pairs with connection pooling:
$ rake test:pooled_pair_insert
===Mocha
===Shoulda and Mocha
Running the test suite requires mocha. You can install it as follows:
Running the test suite requires shoulda and mocha. You can install them as follows:
$ gem install shoulda
$ gem install mocha
The tests assume that the Mongo database is running on the default port. You

View File

@ -25,19 +25,22 @@ module Mongo
@db = db
@files = @db["#{fs_name}.files"]
@chunks = @db["#{fs_name}.chunks"]
@fs_name = fs_name
@chunks.create_index([['files_id', Mongo::ASCENDING], ['n', Mongo::ASCENDING]])
end
def put(data, filename, opts={})
file = GridIO.new(@files, @chunks, filename, 'w', false, opts=opts)
opts.merge!(default_grid_io_opts)
file = GridIO.new(@files, @chunks, filename, 'w', opts=opts)
file.write(data)
file.close
file.files_id
end
def get(id)
GridIO.new(@files, @chunks, nil, 'r', false, :_id => id)
opts = {:query => {'_id' => id}}.merge!(default_grid_io_opts)
GridIO.new(@files, @chunks, nil, 'r', opts)
end
def delete(id)
@ -47,6 +50,10 @@ module Mongo
private
def default_grid_io_opts
{:fs_name => @fs_name}
end
def check_params(db)
if !db.is_a?(Mongo::DB)
raise MongoArgumentError, "db must be an instance of Mongo::DB."

View File

@ -23,10 +23,12 @@ module Mongo
super
@files.create_index([['filename', 1], ['uploadDate', -1]])
@default_query_opts = {:sort => [['filename', 1], ['uploadDate', -1]], :limit => 1}
end
def open(filename, mode, opts={})
file = GridIO.new(@files, @chunks, filename, mode, true, opts)
opts.merge!(default_grid_io_opts(filename))
file = GridIO.new(@files, @chunks, filename, mode, opts)
return file unless block_given?
result = nil
begin
@ -37,15 +39,31 @@ module Mongo
result
end
def put(data, filename)
def put(data, filename, opts={})
opts.merge!(default_grid_io_opts(filename))
file = GridIO.new(@files, @chunks, filename, 'w', opts)
file.write(data)
file.close
file.files_id
end
def get(id)
def get(filename, opts={})
opts.merge!(default_grid_io_opts(filename))
GridIO.new(@files, @chunks, filename, 'r', opts)
end
# Deletes all files matching the given criteria.
def delete(criteria)
def delete(filename, opts={})
ids = @files.find({'filename' => filename}, ['_id'])
ids.each do |id|
@files.remove({'_id' => id})
@chunks.remove('files_id' => id)
end
end
private
def default_grid_io_opts(filename=nil)
{:fs_name => @fs_name, :query => {'filename' => filename}, :query_opts => @default_query_opts}
end
end
end

View File

@ -23,20 +23,20 @@ module Mongo
attr_reader :content_type, :chunk_size, :upload_date, :files_id, :filename, :metadata
def initialize(files, chunks, filename, mode, filesystem, opts={})
def initialize(files, chunks, filename, mode, opts={})
@files = files
@chunks = chunks
@filename = filename
@mode = mode
@content_type = opts[:content_type] || DEFAULT_CONTENT_TYPE
@chunk_size = opts[:chunk_size] || DEFAULT_CHUNK_SIZE
@files_id = opts[:_id]
@query = opts[:query] || {}
@query_opts = opts[:query_opts] || {}
@fs_name = opts[:fs_name] || Grid::DEFAULT_FS_NAME
case @mode
when 'r' then init_read(filesystem, opts)
when 'r' then init_read(opts)
when 'w' then init_write(opts)
else
raise GridError, "Invalid file mode #{@mode}. Valid options include 'r' and 'w'."
raise GridError, "Invalid file mode #{@mode}. Mode should be 'r' or 'w'."
end
end
@ -147,9 +147,7 @@ module Mongo
chunk
end
# TODO: Perhaps use an upsert here instead?
def save_chunk(chunk)
@chunks.remove('_id' => chunk['_id'])
@chunks.insert(chunk)
end
@ -159,22 +157,17 @@ module Mongo
chunk
end
def get_chunk_for_read(n)
chunk = get_chunk(n)
return nil unless chunk
end
def last_chunk_number
(@file_length / @chunk_size).to_i
end
# Read a file in its entirety (optimized).
# Read a file in its entirety.
def read_all
buf = ''
while true
buf << @current_chunk['data'].to_s
break if @current_chunk['n'] == last_chunk_number
@current_chunk = get_chunk(@current_chunk['n'] + 1)
break unless @current_chunk
end
buf
end
@ -232,15 +225,10 @@ module Mongo
string.length - to_write
end
# Initialize based on whether the supplied file exists.
def init_read(filesystem, opts)
if filesystem
doc = @files.find({'filename' => @filename}, :sort => [["uploadDate", -1]], :limit => 1).next_document
raise GridError, "Could not open file with filename #{@filename}" unless doc
else
doc = @files.find({'_id' => @files_id}).next_document
raise GridError, "Could not open file with id #{@files_id}" unless doc
end
# Initialize the class for reading a file.
def init_read(opts)
doc = @files.find(@query, @query_opts).next_document
raise GridError, "Could not open file matching #{@query.inspect} #{@query_opts.inspect}" unless doc
@files_id = doc['_id']
@content_type = doc['contentType']
@ -251,11 +239,12 @@ module Mongo
@metadata = doc['metadata']
@md5 = doc['md5']
@filename = doc['filename']
@current_chunk = get_chunk(0)
@file_position = 0
end
# Validates and sets up the class for the given file mode.
# Initialize the class for writing a file.
def init_write(opts)
@files_id = opts[:_id] || Mongo::ObjectID.new
@content_type = opts[:content_type] || @content_type || DEFAULT_CONTENT_TYPE
@ -281,7 +270,7 @@ module Mongo
# Get a server-side md5.
md5_command = OrderedHash.new
md5_command['filemd5'] = @files_id
md5_command['root'] = 'fs'
md5_command['root'] = @fs_name
h['md5'] = @files.db.command(md5_command)['md5']
h

View File

@ -1,7 +1,8 @@
# encoding:utf-8
require 'test/test_helper'
context "Inspecting" do
class BinaryTest < Test::Unit::TestCase
context "Inspecting" do
setup do
@data = ("THIS IS BINARY " * 50).unpack("c*")
end
@ -10,4 +11,5 @@ context "Inspecting" do
binary = Mongo::Binary.new(@data)
assert_equal "<Mongo::Binary:#{binary.object_id}>", binary.inspect
end
end
end

View File

@ -1,7 +1,7 @@
require 'test/test_helper'
class TestCollection < Test::Unit::TestCase
@@connection = Connection.new(ENV['MONGO_RUBY_DRIVER_HOST'] || 'localhost', ENV['MONGO_RUBY_DRIVER_PORT'] || Connection::DEFAULT_PORT)
@@connection ||= Connection.new(ENV['MONGO_RUBY_DRIVER_HOST'] || 'localhost', ENV['MONGO_RUBY_DRIVER_PORT'] || Connection::DEFAULT_PORT)
@@db = @@connection.db('ruby-mongo-test')
@@test = @@db.collection("test")
@@version = @@connection.server_version
@ -75,8 +75,7 @@ class TestCollection < Test::Unit::TestCase
end
if @@version > "1.1"
context "distinct queries" do
setup do
def setup_for_distinct
@@test.remove
@@test.insert([{:a => 0, :b => {:c => "a"}},
{:a => 1, :b => {:c => "b"}},
@ -86,22 +85,22 @@ class TestCollection < Test::Unit::TestCase
{:a => 3}])
end
should "return distinct values" do
def test_distinct_queries
setup_for_distinct
assert_equal [0, 1, 2, 3], @@test.distinct(:a).sort
assert_equal ["a", "b", "c"], @@test.distinct("b.c").sort
end
if @@version >= "1.2"
should "filter collection with query" do
def test_filter_collection_with_query
setup_for_distinct
assert_equal [2, 3], @@test.distinct(:a, {:a => {"$gt" => 1}}).sort
end
should "filter nested objects" do
def test_filter_nested_objects
setup_for_distinct
assert_equal ["a", "b"], @@test.distinct("b.c", {"b.c" => {"$ne" => "c"}}).sort
end
end
end
end

View File

@ -1,59 +1,59 @@
require 'test/test_helper'
include Mongo
context "GridFileSystem:" do
class GridFileSystemTest < Test::Unit::TestCase
context "GridFileSystem:" do
setup do
@db ||= Connection.new(ENV['MONGO_RUBY_DRIVER_HOST'] || 'localhost',
ENV['MONGO_RUBY_DRIVER_PORT'] || Connection::DEFAULT_PORT).db('ruby-mongo-test')
@files = @db.collection('fs.files')
@chunks = @db.collection('fs.chunks')
@con = Connection.new(ENV['MONGO_RUBY_DRIVER_HOST'] || 'localhost',
ENV['MONGO_RUBY_DRIVER_PORT'] || Connection::DEFAULT_PORT)
@db = @con.db('mongo-ruby-test')
end
teardown do
@files.remove
@chunks.remove
@db['fs.files'].remove
@db['fs.chunks'].remove
end
context "When reading:" do
setup do
@data = "CHUNKS" * 50000
@chunks_data = "CHUNKS" * 50000
@grid = GridFileSystem.new(@db)
@grid.open('sample', 'w') do |f|
f.write @data
@grid.open('sample.file', 'w') do |f|
f.write @chunks_data
end
@grid = GridFileSystem.new(@db)
end
should "read sample data" do
data = @grid.open('sample', 'r') { |f| f.read }
assert_equal data.length, @data.length
data = @grid.open('sample.file', 'r') { |f| f.read }
assert_equal data.length, @chunks_data.length
end
should "return an empty string if length is zero" do
data = @grid.open('sample', 'r') { |f| f.read(0) }
data = @grid.open('sample.file', 'r') { |f| f.read(0) }
assert_equal '', data
end
should "return the first n bytes" do
data = @grid.open('sample', 'r') {|f| f.read(288888) }
data = @grid.open('sample.file', 'r') {|f| f.read(288888) }
assert_equal 288888, data.length
assert_equal @data[0...288888], data
assert_equal @chunks_data[0...288888], data
end
should "return the first n bytes even with an offset" do
data = @grid.open('sample', 'r') do |f|
data = @grid.open('sample.file', 'r') do |f|
f.seek(1000)
f.read(288888)
end
assert_equal 288888, data.length
assert_equal @data[1000...289888], data
assert_equal @chunks_data[1000...289888], data
end
end
context "When writing:" do
setup do
@data = "BYTES" * 50000
@data = "BYTES" * 50
@grid = GridFileSystem.new(@db)
@grid.open('sample', 'w') do |f|
f.write @data
@ -67,7 +67,7 @@ context "GridFileSystem:" do
should "return the total number of bytes written" do
data = 'a' * 300000
assert_equal 300000, @grid.open('write', 'w') {|f| f.write(data) }
assert_equal 300000, @grid.open('sample', 'w') {|f| f.write(data) }
end
should "more read sample data" do
@ -85,7 +85,8 @@ context "GridFileSystem:" do
setup do
@old = @grid.open('sample', 'r')
@new_data = "DATA" * 1000
@new_data = "DATA" * 10
sleep(2)
@grid.open('sample', 'w') do |f|
f.write @new_data
end
@ -94,7 +95,7 @@ context "GridFileSystem:" do
end
should "have a newer upload date" do
assert @new.upload_date > @old.upload_date
assert @new.upload_date > @old.upload_date, "New data is not greater than old date."
end
should "have a different files_id" do
@ -102,7 +103,7 @@ context "GridFileSystem:" do
end
should "contain the new data" do
assert_equal @new_data, @new.read
assert_equal @new_data, @new.read, "Expected DATA"
end
end
end
@ -117,8 +118,8 @@ context "GridFileSystem:" do
end
should "write the correct number of chunks" do
file = @files.find_one({:filename => 'sample'})
chunks = @chunks.find({'files_id' => file['_id']}).to_a
file = @db['fs.files'].find_one({:filename => 'sample'})
chunks = @db['fs.chunks'].find({'files_id' => file['_id']}).to_a
assert_equal 50, chunks.length
end
end
@ -182,4 +183,5 @@ context "GridFileSystem:" do
end
end
end
end
end

View File

@ -1,7 +1,9 @@
require 'test/test_helper'
include Mongo
context "" do
class GridIOTest < Test::Unit::TestCase
context "GridIO" do
setup do
@db ||= Connection.new(ENV['MONGO_RUBY_DRIVER_HOST'] || 'localhost',
ENV['MONGO_RUBY_DRIVER_PORT'] || Connection::DEFAULT_PORT).db('ruby-mongo-test')
@ -21,13 +23,15 @@ context "" do
end
should "set default 256k chunk size" do
file = GridIO.new(@files, @chunks, @filename, @mode, false)
file = GridIO.new(@files, @chunks, @filename, @mode)
assert_equal 256 * 1024, file.chunk_size
end
should "set chunk size" do
file = GridIO.new(@files, @chunks, @filename, @mode, false, :chunk_size => 1000)
file = GridIO.new(@files, @chunks, @filename, @mode, :chunk_size => 1000)
assert_equal 1000, file.chunk_size
end
end
end
end

View File

@ -1,7 +1,8 @@
require 'test/test_helper'
include Mongo
context "Tests:" do
class GridTest < Test::Unit::TestCase
context "Tests:" do
setup do
@db ||= Connection.new(ENV['MONGO_RUBY_DRIVER_HOST'] || 'localhost',
ENV['MONGO_RUBY_DRIVER_PORT'] || Connection::DEFAULT_PORT).db('ruby-mongo-test')
@ -44,7 +45,7 @@ context "Tests:" do
end
end
context "Streaming: " do
context "Streaming: " do || {}
setup do
def read_and_write_stream(filename, read_length, opts={})
io = File.open(File.join(File.dirname(__FILE__), 'data', filename), 'r')
@ -60,6 +61,7 @@ context "Tests:" do
read_data << chunk
end
assert_equal data.length, read_data.length
assert_equal data, read_data, "Unequal!"
end
@grid = Grid.new(@db, 'test-fs')
@ -81,4 +83,5 @@ context "Tests:" do
read_and_write_stream('sample_file.pdf', 300 * 1024)
end
end
end
end

View File

@ -5,12 +5,14 @@ require 'test/unit'
begin
require 'rubygems'
require 'shoulda'
require 'mocha'
rescue LoadError
puts <<MSG
This test suite requires mocha.
You can install it as follows:
This test suite requires shoulda and mocha.
You can install them as follows:
gem install shoulda
gem install mocha
MSG
@ -38,31 +40,3 @@ class Test::Unit::TestCase
end
end
end
# shoulda-mini
# based on test/spec/mini 5
# http://gist.github.com/307649
# chris@ozmm.org
#
def context(*args, &block)
return super unless (name = args.first) && block
require 'test/unit'
klass = Class.new(Test::Unit::TestCase) do
def self.should(name, &block)
define_method("test_#{name.to_s.gsub(/\W/,'_')}", &block) if block
end
def self.xshould(*args) end
def self.context(*args, &block) instance_eval(&block) end
def self.setup(&block)
define_method(:setup) { self.class.setups.each { |s| instance_eval(&s) } }
setups << block
end
def self.setups; @setups ||= [] end
def self.teardown(&block) define_method(:teardown, &block) end
end
(class << klass; self end).send(:define_method, :name) { name.gsub(/\W/,'_') }
klass.class_eval do
include Mongo
end
klass.class_eval &block
end

View File

@ -1,6 +1,8 @@
require 'test/test_helper'
context "Basic operations: " do
class CollectionTest < Test::Unit::TestCase
context "Basic operations: " do
setup do
@logger = mock()
end
@ -55,4 +57,5 @@ context "Basic operations: " do
end
@coll.update({}, {:title => 'Moby Dick'}, :safe => true)
end
end
end

View File

@ -1,7 +1,8 @@
require 'test/test_helper'
include Mongo
context "Initialization: " do
class ConnectionTest < Test::Unit::TestCase
context "Initialization: " do
setup do
def new_mock_socket
socket = Object.new
@ -112,4 +113,5 @@ context "Initialization: " do
end
end
end
end
end

View File

@ -1,6 +1,7 @@
require 'test/test_helper'
context "Cursor options" do
class CursorTest < Test::Unit::TestCase
context "Cursor options" do
setup do
@connection = stub(:class => Connection)
@db = stub(:name => "testing", :slave_ok? => false, :connection => @connection)
@ -61,9 +62,9 @@ context "Cursor options" do
should "cache full collection name" do
assert_equal "testing.items", @cursor.full_collection_name
end
end
end
context "Query fields" do
context "Query fields" do
setup do
@connection = stub(:class => Collection)
@db = stub(:slave_ok? => true, :name => "testing", :connection => @connection)
@ -88,4 +89,5 @@ context "Query fields" do
@cursor = Cursor.new(@collection, :fields => 1234567)
assert_nil @cursor.fields
end
end
end

View File

@ -1,6 +1,7 @@
require 'test/test_helper'
context "DBTest: " do
class DBTest < Test::Unit::TestCase
context "DBTest: " do
setup do
def insert_message(db, documents)
documents = [documents] unless documents.is_a?(Array)
@ -93,6 +94,5 @@ context "DBTest: " do
end
end
end
end
end