RUBY-422 Cleanup testing output
This commit is contained in:
parent
ec86275b60
commit
6944794fb2
|
@ -59,7 +59,7 @@ module BSON
|
||||||
def self.to_utf8_binary(str)
|
def self.to_utf8_binary(str)
|
||||||
begin
|
begin
|
||||||
str.unpack("U*")
|
str.unpack("U*")
|
||||||
rescue => ex
|
rescue
|
||||||
raise InvalidStringEncoding, "String not valid utf-8: #{str.inspect}"
|
raise InvalidStringEncoding, "String not valid utf-8: #{str.inspect}"
|
||||||
end
|
end
|
||||||
str.encode(UTF8_ENCODING).force_encoding(BINARY_ENCODING)
|
str.encode(UTF8_ENCODING).force_encoding(BINARY_ENCODING)
|
||||||
|
@ -70,7 +70,7 @@ module BSON
|
||||||
def self.to_utf8_binary(str)
|
def self.to_utf8_binary(str)
|
||||||
begin
|
begin
|
||||||
str.unpack("U*")
|
str.unpack("U*")
|
||||||
rescue => ex
|
rescue
|
||||||
raise InvalidStringEncoding, "String not valid utf-8: #{str.inspect}"
|
raise InvalidStringEncoding, "String not valid utf-8: #{str.inspect}"
|
||||||
end
|
end
|
||||||
str
|
str
|
||||||
|
|
|
@ -57,7 +57,7 @@ module BSON
|
||||||
def self.to_utf8_binary(str)
|
def self.to_utf8_binary(str)
|
||||||
begin
|
begin
|
||||||
str.unpack("U*")
|
str.unpack("U*")
|
||||||
rescue => ex
|
rescue
|
||||||
raise InvalidStringEncoding, "String not valid utf-8: #{str.inspect}"
|
raise InvalidStringEncoding, "String not valid utf-8: #{str.inspect}"
|
||||||
end
|
end
|
||||||
str
|
str
|
||||||
|
|
|
@ -192,6 +192,9 @@ module BSON
|
||||||
@@index = 0
|
@@index = 0
|
||||||
@@machine_id = Digest::MD5.digest(Socket.gethostname)[0, 3]
|
@@machine_id = Digest::MD5.digest(Socket.gethostname)[0, 3]
|
||||||
|
|
||||||
|
# We need to check whether BSON_CODER is defined because it's required by
|
||||||
|
# the BSON C extensions.
|
||||||
|
if defined?(BSON::BSON_CODER) && BSON::BSON_CODER == BSON::BSON_RUBY
|
||||||
# This gets overwritten by the C extension if it loads.
|
# This gets overwritten by the C extension if it loads.
|
||||||
def generate(oid_time=nil)
|
def generate(oid_time=nil)
|
||||||
oid = ''
|
oid = ''
|
||||||
|
@ -223,4 +226,5 @@ module BSON
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
end
|
||||||
end
|
end
|
||||||
|
|
|
@ -961,7 +961,7 @@ module Mongo
|
||||||
begin
|
begin
|
||||||
message.put_binary(BSON::BSON_CODER.serialize(doc, check_keys, true, @connection.max_bson_size).to_s)
|
message.put_binary(BSON::BSON_CODER.serialize(doc, check_keys, true, @connection.max_bson_size).to_s)
|
||||||
true
|
true
|
||||||
rescue StandardError => e # StandardError will be replaced with BSONError
|
rescue StandardError # StandardError will be replaced with BSONError
|
||||||
doc.delete(:_id)
|
doc.delete(:_id)
|
||||||
error_docs << doc
|
error_docs << doc
|
||||||
false
|
false
|
||||||
|
|
|
@ -639,7 +639,7 @@ module Mongo
|
||||||
else
|
else
|
||||||
config = self['admin'].command({:ismaster => 1}, :socket => socket)
|
config = self['admin'].command({:ismaster => 1}, :socket => socket)
|
||||||
end
|
end
|
||||||
rescue OperationFailure, SocketError, SystemCallError, IOError => ex
|
rescue OperationFailure, SocketError, SystemCallError, IOError
|
||||||
close
|
close
|
||||||
ensure
|
ensure
|
||||||
socket.close if socket
|
socket.close if socket
|
||||||
|
|
|
@ -115,7 +115,7 @@ module Mongo
|
||||||
id = file.close
|
id = file.close
|
||||||
if versions
|
if versions
|
||||||
self.delete do
|
self.delete do
|
||||||
@files.find({'filename' => filename, '_id' => {'$ne' => id}}, :fields => ['_id'], :sort => ['uploadDate', -1], :skip => (versions -1))
|
@files.find({'filename' => filename, '_id' => {'$ne' => id}}, :fields => ['_id'], :sort => ['uploadDate', -1], :skip => (versions - 1))
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
|
@ -186,7 +186,10 @@ module Mongo
|
||||||
|
|
||||||
def receive_header(sock, expected_response, exhaust=false)
|
def receive_header(sock, expected_response, exhaust=false)
|
||||||
header = receive_message_on_socket(16, sock)
|
header = receive_message_on_socket(16, sock)
|
||||||
size, request_id, response_to = header.unpack('VVV')
|
|
||||||
|
# unpacks to size, request_id, response_to
|
||||||
|
response_to = header.unpack('VVV')[2]
|
||||||
|
|
||||||
if !exhaust && expected_response != response_to
|
if !exhaust && expected_response != response_to
|
||||||
raise Mongo::ConnectionFailure, "Expected response #{expected_response} but got #{response_to}"
|
raise Mongo::ConnectionFailure, "Expected response #{expected_response} but got #{response_to}"
|
||||||
end
|
end
|
||||||
|
@ -204,7 +207,10 @@ module Mongo
|
||||||
raise "Short read for DB response header; " +
|
raise "Short read for DB response header; " +
|
||||||
"expected #{RESPONSE_HEADER_SIZE} bytes, saw #{header_buf.length}"
|
"expected #{RESPONSE_HEADER_SIZE} bytes, saw #{header_buf.length}"
|
||||||
end
|
end
|
||||||
flags, cursor_id_a, cursor_id_b, starting_from, number_remaining = header_buf.unpack('VVVVV')
|
|
||||||
|
# unpacks to flags, cursor_id_a, cursor_id_b, starting_from, number_remaining
|
||||||
|
flags, cursor_id_a, cursor_id_b, _, number_remaining = header_buf.unpack('VVVVV')
|
||||||
|
|
||||||
check_response_flags(flags)
|
check_response_flags(flags)
|
||||||
cursor_id = (cursor_id_b << 32) + cursor_id_a
|
cursor_id = (cursor_id_b << 32) + cursor_id_a
|
||||||
[number_remaining, cursor_id]
|
[number_remaining, cursor_id]
|
||||||
|
|
|
@ -74,7 +74,7 @@ module Mongo
|
||||||
begin
|
begin
|
||||||
result = @connection['admin'].command({:ping => 1}, :socket => @socket)
|
result = @connection['admin'].command({:ping => 1}, :socket => @socket)
|
||||||
return result['ok'] == 1
|
return result['ok'] == 1
|
||||||
rescue OperationFailure, SocketError, SystemCallError, IOError => ex
|
rescue OperationFailure, SocketError, SystemCallError, IOError
|
||||||
return nil
|
return nil
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
|
@ -133,7 +133,7 @@ module Mongo
|
||||||
def ping
|
def ping
|
||||||
begin
|
begin
|
||||||
return self.connection['admin'].command({:ping => 1}, :socket => @node.socket)
|
return self.connection['admin'].command({:ping => 1}, :socket => @node.socket)
|
||||||
rescue OperationFailure, SocketError, SystemCallError, IOError => ex
|
rescue OperationFailure, SocketError, SystemCallError, IOError
|
||||||
return false
|
return false
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
|
@ -74,24 +74,22 @@ class BSONTest < Test::Unit::TestCase
|
||||||
end
|
end
|
||||||
|
|
||||||
def test_limit_max_bson_size
|
def test_limit_max_bson_size
|
||||||
doc = {'name' => 'a' * BSON_CODER.max_bson_size}
|
doc = {'name' => 'a' * BSON::DEFAULT_MAX_BSON_SIZE}
|
||||||
assert_raise InvalidDocument do
|
assert_raise InvalidDocument do
|
||||||
assert @encoder.serialize(doc)
|
assert @encoder.serialize(doc)
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
def test_max_bson_size
|
|
||||||
assert BSON_CODER.max_bson_size >= BSON::DEFAULT_MAX_BSON_SIZE
|
|
||||||
end
|
|
||||||
|
|
||||||
def test_update_max_bson_size
|
def test_update_max_bson_size
|
||||||
require 'ostruct'
|
require 'ostruct'
|
||||||
mock_conn = OpenStruct.new
|
mock_conn = OpenStruct.new
|
||||||
size = 7 * 1024 * 1024
|
size = 7 * 1024 * 1024
|
||||||
mock_conn.max_bson_size = size
|
mock_conn.max_bson_size = size
|
||||||
|
silently do
|
||||||
assert_equal size, BSON_CODER.update_max_bson_size(mock_conn)
|
assert_equal size, BSON_CODER.update_max_bson_size(mock_conn)
|
||||||
assert_equal size, BSON_CODER.max_bson_size
|
assert_equal size, BSON_CODER.max_bson_size
|
||||||
end
|
end
|
||||||
|
end
|
||||||
|
|
||||||
def test_round_trip
|
def test_round_trip
|
||||||
doc = {'doc' => 123}
|
doc = {'doc' => 123}
|
||||||
|
@ -217,10 +215,10 @@ class BSONTest < Test::Unit::TestCase
|
||||||
doc = {'doc' => {'age' => 42, 'date' => Time.now.utc, 'shoe_size' => 9.5}}
|
doc = {'doc' => {'age' => 42, 'date' => Time.now.utc, 'shoe_size' => 9.5}}
|
||||||
bson = @encoder.serialize(doc)
|
bson = @encoder.serialize(doc)
|
||||||
doc2 = @encoder.deserialize(bson)
|
doc2 = @encoder.deserialize(bson)
|
||||||
assert doc['doc']
|
assert doc2['doc']
|
||||||
assert_equal 42, doc['doc']['age']
|
assert_equal 42, doc2['doc']['age']
|
||||||
assert_equal 9.5, doc['doc']['shoe_size']
|
assert_equal 9.5, doc2['doc']['shoe_size']
|
||||||
assert_in_delta Time.now, doc['doc']['date'], 1
|
assert_in_delta Time.now, doc2['doc']['date'], 1
|
||||||
end
|
end
|
||||||
|
|
||||||
def test_oid
|
def test_oid
|
||||||
|
@ -269,6 +267,7 @@ class BSONTest < Test::Unit::TestCase
|
||||||
doc = {'date' => [Time.now.utc]}
|
doc = {'date' => [Time.now.utc]}
|
||||||
bson = @encoder.serialize(doc)
|
bson = @encoder.serialize(doc)
|
||||||
doc2 = @encoder.deserialize(bson)
|
doc2 = @encoder.deserialize(bson)
|
||||||
|
assert doc2
|
||||||
end
|
end
|
||||||
|
|
||||||
def test_date_returns_as_utc
|
def test_date_returns_as_utc
|
||||||
|
@ -297,7 +296,7 @@ class BSONTest < Test::Unit::TestCase
|
||||||
[DateTime.now, Date.today, Zone].each do |invalid_date|
|
[DateTime.now, Date.today, Zone].each do |invalid_date|
|
||||||
doc = {:date => invalid_date}
|
doc = {:date => invalid_date}
|
||||||
begin
|
begin
|
||||||
bson = BSON::BSON_CODER.serialize(doc)
|
BSON::BSON_CODER.serialize(doc)
|
||||||
rescue => e
|
rescue => e
|
||||||
ensure
|
ensure
|
||||||
if !invalid_date.is_a? Time
|
if !invalid_date.is_a? Time
|
||||||
|
@ -431,7 +430,7 @@ class BSONTest < Test::Unit::TestCase
|
||||||
|
|
||||||
if !(RUBY_PLATFORM =~ /java/)
|
if !(RUBY_PLATFORM =~ /java/)
|
||||||
def test_timestamp
|
def test_timestamp
|
||||||
val = {"test" => [4, 20]}
|
# val = {"test" => [4, 20]}
|
||||||
result = @encoder.deserialize([0x13, 0x00, 0x00, 0x00,
|
result = @encoder.deserialize([0x13, 0x00, 0x00, 0x00,
|
||||||
0x11, 0x74, 0x65, 0x73,
|
0x11, 0x74, 0x65, 0x73,
|
||||||
0x74, 0x00, 0x04, 0x00,
|
0x74, 0x00, 0x04, 0x00,
|
||||||
|
@ -455,7 +454,7 @@ class BSONTest < Test::Unit::TestCase
|
||||||
def test_overflow
|
def test_overflow
|
||||||
doc = {"x" => 2**75}
|
doc = {"x" => 2**75}
|
||||||
assert_raise RangeError do
|
assert_raise RangeError do
|
||||||
bson = @encoder.serialize(doc)
|
@encoder.serialize(doc)
|
||||||
end
|
end
|
||||||
|
|
||||||
doc = {"x" => 9223372036854775}
|
doc = {"x" => 9223372036854775}
|
||||||
|
@ -466,7 +465,7 @@ class BSONTest < Test::Unit::TestCase
|
||||||
|
|
||||||
doc["x"] = doc["x"] + 1
|
doc["x"] = doc["x"] + 1
|
||||||
assert_raise RangeError do
|
assert_raise RangeError do
|
||||||
bson = @encoder.serialize(doc)
|
@encoder.serialize(doc)
|
||||||
end
|
end
|
||||||
|
|
||||||
doc = {"x" => -9223372036854775}
|
doc = {"x" => -9223372036854775}
|
||||||
|
@ -477,7 +476,7 @@ class BSONTest < Test::Unit::TestCase
|
||||||
|
|
||||||
doc["x"] = doc["x"] - 1
|
doc["x"] = doc["x"] - 1
|
||||||
assert_raise RangeError do
|
assert_raise RangeError do
|
||||||
bson = BSON::BSON_CODER.serialize(doc)
|
BSON::BSON_CODER.serialize(doc)
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@ -529,7 +528,7 @@ class BSONTest < Test::Unit::TestCase
|
||||||
#one = {"_foo" => "foo"}
|
#one = {"_foo" => "foo"}
|
||||||
|
|
||||||
#assert_equal @encoder.serialize(one).to_a, @encoder.serialize(dup).to_a
|
#assert_equal @encoder.serialize(one).to_a, @encoder.serialize(dup).to_a
|
||||||
warn "Pending test for duplicate keys"
|
#warn "Pending test for duplicate keys"
|
||||||
end
|
end
|
||||||
|
|
||||||
def test_no_duplicate_id_when_moving_id
|
def test_no_duplicate_id_when_moving_id
|
||||||
|
|
|
@ -1,6 +1,5 @@
|
||||||
# encoding:utf-8
|
# encoding:utf-8
|
||||||
require './test/bson/test_helper'
|
require './test/bson/test_helper'
|
||||||
require './test/support/hash_with_indifferent_access'
|
|
||||||
|
|
||||||
class HashWithIndifferentAccessTest < Test::Unit::TestCase
|
class HashWithIndifferentAccessTest < Test::Unit::TestCase
|
||||||
include BSON
|
include BSON
|
||||||
|
|
|
@ -7,7 +7,7 @@ class JSONTest < Test::Unit::TestCase
|
||||||
# This test passes when run by itself but fails
|
# This test passes when run by itself but fails
|
||||||
# when run as part of the whole test suite.
|
# when run as part of the whole test suite.
|
||||||
def test_object_id_as_json
|
def test_object_id_as_json
|
||||||
warn "Pending test object id as json"
|
#warn "Pending test object id as json"
|
||||||
#id = BSON::ObjectId.new
|
#id = BSON::ObjectId.new
|
||||||
|
|
||||||
#obj = {'_id' => id}
|
#obj = {'_id' => id}
|
||||||
|
|
|
@ -42,8 +42,10 @@ class TestCollection < Test::Unit::TestCase
|
||||||
end
|
end
|
||||||
|
|
||||||
def test_pk_factory_on_collection
|
def test_pk_factory_on_collection
|
||||||
|
silently do
|
||||||
@coll = Collection.new('foo', @@db, TestPK)
|
@coll = Collection.new('foo', @@db, TestPK)
|
||||||
assert_equal TestPK, @coll.pk_factory
|
assert_equal TestPK, @coll.pk_factory
|
||||||
|
end
|
||||||
|
|
||||||
|
|
||||||
@coll2 = Collection.new('foo', @@db, :pk => TestPK)
|
@coll2 = Collection.new('foo', @@db, :pk => TestPK)
|
||||||
|
@ -197,6 +199,7 @@ class TestCollection < Test::Unit::TestCase
|
||||||
docs << {:bar => 1}
|
docs << {:bar => 1}
|
||||||
doc_ids, error_docs = @@test.insert(docs, :collect_on_error => true)
|
doc_ids, error_docs = @@test.insert(docs, :collect_on_error => true)
|
||||||
assert_equal 2, @@test.count
|
assert_equal 2, @@test.count
|
||||||
|
assert_equal 2, doc_ids.count
|
||||||
assert_equal error_docs, []
|
assert_equal error_docs, []
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@ -215,6 +218,7 @@ class TestCollection < Test::Unit::TestCase
|
||||||
|
|
||||||
doc_ids, error_docs = @@test.insert(docs, :collect_on_error => true)
|
doc_ids, error_docs = @@test.insert(docs, :collect_on_error => true)
|
||||||
assert_equal 2, @@test.count
|
assert_equal 2, @@test.count
|
||||||
|
assert_equal 2, doc_ids.count
|
||||||
assert_equal error_docs, invalid_docs
|
assert_equal error_docs, invalid_docs
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@ -232,6 +236,7 @@ class TestCollection < Test::Unit::TestCase
|
||||||
|
|
||||||
doc_ids, error_docs = @@test.insert(docs, :collect_on_error => true)
|
doc_ids, error_docs = @@test.insert(docs, :collect_on_error => true)
|
||||||
assert_equal 2, @@test.count
|
assert_equal 2, @@test.count
|
||||||
|
assert_equal 2, doc_ids.count
|
||||||
assert_equal error_docs, invalid_docs
|
assert_equal error_docs, invalid_docs
|
||||||
end
|
end
|
||||||
|
|
||||||
|
|
|
@ -133,7 +133,7 @@ class TestConnection < Test::Unit::TestCase
|
||||||
output = StringIO.new
|
output = StringIO.new
|
||||||
logger = Logger.new(output)
|
logger = Logger.new(output)
|
||||||
logger.level = Logger::DEBUG
|
logger.level = Logger::DEBUG
|
||||||
connection = standard_connection(:logger => logger).db(MONGO_TEST_DB)
|
standard_connection(:logger => logger).db(MONGO_TEST_DB)
|
||||||
assert output.string.include?("admin['$cmd'].find")
|
assert output.string.include?("admin['$cmd'].find")
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@ -141,8 +141,8 @@ class TestConnection < Test::Unit::TestCase
|
||||||
output = StringIO.new
|
output = StringIO.new
|
||||||
logger = Logger.new(output)
|
logger = Logger.new(output)
|
||||||
logger.level = Logger::DEBUG
|
logger.level = Logger::DEBUG
|
||||||
connection = standard_connection(:logger => logger).db(MONGO_TEST_DB)
|
standard_connection(:logger => logger).db(MONGO_TEST_DB)
|
||||||
assert_match /\(\d+ms\)/, output.string
|
assert_match(/\(\d+ms\)/, output.string)
|
||||||
assert output.string.include?("admin['$cmd'].find")
|
assert output.string.include?("admin['$cmd'].find")
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@ -170,8 +170,10 @@ class TestConnection < Test::Unit::TestCase
|
||||||
end
|
end
|
||||||
|
|
||||||
def test_nodes
|
def test_nodes
|
||||||
conn = Connection.multi([['foo', 27017], ['bar', 27018]], :connect => false)
|
silently do
|
||||||
nodes = conn.nodes
|
@conn = Connection.multi([['foo', 27017], ['bar', 27018]], :connect => false)
|
||||||
|
end
|
||||||
|
nodes = @conn.seeds
|
||||||
assert_equal 2, nodes.length
|
assert_equal 2, nodes.length
|
||||||
assert_equal ['foo', 27017], nodes[0]
|
assert_equal ['foo', 27017], nodes[0]
|
||||||
assert_equal ['bar', 27018], nodes[1]
|
assert_equal ['bar', 27018], nodes[1]
|
||||||
|
@ -227,7 +229,7 @@ class TestConnection < Test::Unit::TestCase
|
||||||
conn.expects(:[]).with('admin').returns(admin_db)
|
conn.expects(:[]).with('admin').returns(admin_db)
|
||||||
|
|
||||||
conn.connect
|
conn.connect
|
||||||
assert_equal Mongo::DEFAULT_MAX_BSON_SIZE, BSON::BSON_CODER.max_bson_size
|
assert_equal Mongo::DEFAULT_MAX_BSON_SIZE, conn.max_bson_size
|
||||||
end
|
end
|
||||||
|
|
||||||
def test_connection_activity
|
def test_connection_activity
|
||||||
|
@ -290,7 +292,7 @@ class TestConnection < Test::Unit::TestCase
|
||||||
context "Socket pools" do
|
context "Socket pools" do
|
||||||
context "checking out writers" do
|
context "checking out writers" do
|
||||||
setup do
|
setup do
|
||||||
@con = standard_connection(:pool_size => 10, :timeout => 10)
|
@con = standard_connection(:pool_size => 10, :pool_timeout => 10)
|
||||||
@coll = @con[MONGO_TEST_DB]['test-connection-exceptions']
|
@coll = @con[MONGO_TEST_DB]['test-connection-exceptions']
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@ -325,7 +327,7 @@ class TestConnection < Test::Unit::TestCase
|
||||||
|
|
||||||
context "Connection exceptions" do
|
context "Connection exceptions" do
|
||||||
setup do
|
setup do
|
||||||
@con = standard_connection(:pool_size => 10, :timeout => 10)
|
@con = standard_connection(:pool_size => 10, :pool_timeout => 10)
|
||||||
@coll = @con[MONGO_TEST_DB]['test-connection-exceptions']
|
@coll = @con[MONGO_TEST_DB]['test-connection-exceptions']
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@ -363,6 +365,7 @@ class TestConnection < Test::Unit::TestCase
|
||||||
TCPSocket.stubs(:new).returns(fake_socket)
|
TCPSocket.stubs(:new).returns(fake_socket)
|
||||||
|
|
||||||
@con.primary_pool.checkout_new_socket
|
@con.primary_pool.checkout_new_socket
|
||||||
|
@con.primary_pool.expects(:warn)
|
||||||
assert @con.primary_pool.close
|
assert @con.primary_pool.close
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
|
@ -238,17 +238,17 @@ class CursorTest < Test::Unit::TestCase
|
||||||
end
|
end
|
||||||
|
|
||||||
def test_timeout
|
def test_timeout
|
||||||
opts = Cursor.new(@@coll).query_opts
|
opts = Cursor.new(@@coll).options
|
||||||
assert_equal 0, opts & Mongo::Constants::OP_QUERY_NO_CURSOR_TIMEOUT
|
assert_equal 0, opts & Mongo::Constants::OP_QUERY_NO_CURSOR_TIMEOUT
|
||||||
|
|
||||||
opts = Cursor.new(@@coll, :timeout => false).query_opts
|
opts = Cursor.new(@@coll, :timeout => false).options
|
||||||
assert_equal Mongo::Constants::OP_QUERY_NO_CURSOR_TIMEOUT,
|
assert_equal Mongo::Constants::OP_QUERY_NO_CURSOR_TIMEOUT,
|
||||||
opts & Mongo::Constants::OP_QUERY_NO_CURSOR_TIMEOUT
|
opts & Mongo::Constants::OP_QUERY_NO_CURSOR_TIMEOUT
|
||||||
end
|
end
|
||||||
|
|
||||||
def test_limit_exceptions
|
def test_limit_exceptions
|
||||||
cursor = @@coll.find()
|
cursor = @@coll.find()
|
||||||
firstResult = cursor.next_document
|
cursor.next_document
|
||||||
assert_raise InvalidOperation, "Cannot modify the query once it has been run or closed." do
|
assert_raise InvalidOperation, "Cannot modify the query once it has been run or closed." do
|
||||||
cursor.limit(1)
|
cursor.limit(1)
|
||||||
end
|
end
|
||||||
|
@ -278,7 +278,7 @@ class CursorTest < Test::Unit::TestCase
|
||||||
|
|
||||||
def test_skip_exceptions
|
def test_skip_exceptions
|
||||||
cursor = @@coll.find()
|
cursor = @@coll.find()
|
||||||
firstResult = cursor.next_document
|
cursor.next_document
|
||||||
assert_raise InvalidOperation, "Cannot modify the query once it has been run or closed." do
|
assert_raise InvalidOperation, "Cannot modify the query once it has been run or closed." do
|
||||||
cursor.skip(1)
|
cursor.skip(1)
|
||||||
end
|
end
|
||||||
|
|
|
@ -263,7 +263,9 @@ class GridFileSystemTest < Test::Unit::TestCase
|
||||||
|
|
||||||
should "seek only in read mode" do
|
should "seek only in read mode" do
|
||||||
assert_raise GridError do
|
assert_raise GridError do
|
||||||
@grid.open('hello', 'w') {|f| f.seek(0) }
|
silently do
|
||||||
|
@grid.open('hello', 'w') { |f| f.seek(0) }
|
||||||
|
end
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
|
@ -45,7 +45,7 @@ class GridIOTest < Test::Unit::TestCase
|
||||||
should "read data character by character using" do
|
should "read data character by character using" do
|
||||||
bytes = 0
|
bytes = 0
|
||||||
file = GridIO.new(@files, @chunks, nil, "r", :query => {:_id => @file.files_id})
|
file = GridIO.new(@files, @chunks, nil, "r", :query => {:_id => @file.files_id})
|
||||||
while char = file.getc
|
while file.getc
|
||||||
bytes += 1
|
bytes += 1
|
||||||
end
|
end
|
||||||
assert_equal bytes, 1_000_000
|
assert_equal bytes, 1_000_000
|
||||||
|
@ -111,7 +111,7 @@ class GridIOTest < Test::Unit::TestCase
|
||||||
|
|
||||||
should "tell position, eof, and rewind" do
|
should "tell position, eof, and rewind" do
|
||||||
file = GridIO.new(@files, @chunks, nil, "r", :query => {:_id => @file.files_id})
|
file = GridIO.new(@files, @chunks, nil, "r", :query => {:_id => @file.files_id})
|
||||||
string = file.read(1000)
|
file.read(1000)
|
||||||
assert_equal 1000, file.pos
|
assert_equal 1000, file.pos
|
||||||
assert !file.eof?
|
assert !file.eof?
|
||||||
file.read
|
file.read
|
||||||
|
|
|
@ -142,7 +142,9 @@ class GridTest < Test::Unit::TestCase
|
||||||
end
|
end
|
||||||
|
|
||||||
should "ignore special keys" do
|
should "ignore special keys" do
|
||||||
id = @grid.put(@data, :file_length => 100, :phrase => "blimey")
|
id = silently do
|
||||||
|
@grid.put(@data, :file_length => 100, :phrase => "blimey")
|
||||||
|
end
|
||||||
file = @grid.get(id)
|
file = @grid.get(id)
|
||||||
|
|
||||||
assert_equal "blimey", file['phrase']
|
assert_equal "blimey", file['phrase']
|
||||||
|
@ -153,8 +155,9 @@ class GridTest < Test::Unit::TestCase
|
||||||
context "Storing data with a length of zero" do
|
context "Storing data with a length of zero" do
|
||||||
setup do
|
setup do
|
||||||
@grid = Grid.new(@db, 'test-fs')
|
@grid = Grid.new(@db, 'test-fs')
|
||||||
@id = @grid.put('', :filename => 'sample',
|
@id = silently do
|
||||||
:metadata => {'app' => 'photos'})
|
@grid.put('', :filename => 'sample', :metadata => {'app' => 'photos'})
|
||||||
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
should "return the zero length" do
|
should "return the zero length" do
|
||||||
|
@ -201,7 +204,9 @@ class GridTest < Test::Unit::TestCase
|
||||||
@grid = Grid.new(@db, 'test-fs')
|
@grid = Grid.new(@db, 'test-fs')
|
||||||
filename = 'empty_data'
|
filename = 'empty_data'
|
||||||
@io = File.open(File.join(File.dirname(__FILE__), 'data', filename), 'r')
|
@io = File.open(File.join(File.dirname(__FILE__), 'data', filename), 'r')
|
||||||
id = @grid.put(@io, :filename => filename)
|
id = silently do
|
||||||
|
@grid.put(@io, :filename => filename)
|
||||||
|
end
|
||||||
@file = @grid.get(id)
|
@file = @grid.get(id)
|
||||||
@io.rewind
|
@io.rewind
|
||||||
@data = @io.read
|
@data = @io.read
|
||||||
|
@ -239,8 +244,10 @@ class GridTest < Test::Unit::TestCase
|
||||||
end
|
end
|
||||||
|
|
||||||
should "put and get an empty io object" do
|
should "put and get an empty io object" do
|
||||||
|
silently do
|
||||||
read_and_write_stream('empty_data', 1)
|
read_and_write_stream('empty_data', 1)
|
||||||
end
|
end
|
||||||
|
end
|
||||||
|
|
||||||
should "put and get a small io object" do
|
should "put and get a small io object" do
|
||||||
read_and_write_stream('small_data.txt', 1)
|
read_and_write_stream('small_data.txt', 1)
|
||||||
|
|
|
@ -14,7 +14,9 @@ class ConnectTest < Test::Unit::TestCase
|
||||||
# TODO: test connect timeout.
|
# TODO: test connect timeout.
|
||||||
|
|
||||||
def test_connect_with_deprecated_multi
|
def test_connect_with_deprecated_multi
|
||||||
|
silently do
|
||||||
@conn = Connection.multi([[@rs.host, @rs.ports[0]], [@rs.host, @rs.ports[1]]], :name => @rs.name)
|
@conn = Connection.multi([[@rs.host, @rs.ports[0]], [@rs.host, @rs.ports[1]]], :name => @rs.name)
|
||||||
|
end
|
||||||
assert @conn.is_a?(ReplSetConnection)
|
assert @conn.is_a?(ReplSetConnection)
|
||||||
assert @conn.connected?
|
assert @conn.connected?
|
||||||
end
|
end
|
||||||
|
|
|
@ -7,7 +7,7 @@ class ReplicaSetPooledInsertTest < Test::Unit::TestCase
|
||||||
|
|
||||||
def setup
|
def setup
|
||||||
ensure_rs
|
ensure_rs
|
||||||
@conn = ReplSetConnection.new(build_seeds(3), :pool_size => 10, :timeout => 5, :refresh_mode => false)
|
@conn = ReplSetConnection.new(build_seeds(3), :pool_size => 10, :pool_timeout => 5, :refresh_mode => false)
|
||||||
@db = @conn.db(MONGO_TEST_DB)
|
@db = @conn.db(MONGO_TEST_DB)
|
||||||
@db.drop_collection("test-sets")
|
@db.drop_collection("test-sets")
|
||||||
@coll = @db.collection("test-sets")
|
@coll = @db.collection("test-sets")
|
||||||
|
|
|
@ -21,7 +21,7 @@ class Test::Unit::TestCase
|
||||||
begin
|
begin
|
||||||
yield
|
yield
|
||||||
rescue Mongo::ConnectionFailure => ex
|
rescue Mongo::ConnectionFailure => ex
|
||||||
puts "Rescue attempt #{retries}: from #{ex}"
|
#puts "Rescue attempt #{retries}: from #{ex}"
|
||||||
retries += 1
|
retries += 1
|
||||||
raise ex if retries > max_retries
|
raise ex if retries > max_retries
|
||||||
sleep(2)
|
sleep(2)
|
||||||
|
|
|
@ -7,8 +7,11 @@ require 'test/unit'
|
||||||
def silently
|
def silently
|
||||||
warn_level = $VERBOSE
|
warn_level = $VERBOSE
|
||||||
$VERBOSE = nil
|
$VERBOSE = nil
|
||||||
|
begin
|
||||||
result = yield
|
result = yield
|
||||||
|
ensure
|
||||||
$VERBOSE = warn_level
|
$VERBOSE = warn_level
|
||||||
|
end
|
||||||
result
|
result
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@ -25,7 +28,6 @@ You can install them as follows:
|
||||||
gem install mocha
|
gem install mocha
|
||||||
|
|
||||||
MSG
|
MSG
|
||||||
|
|
||||||
exit
|
exit
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@ -88,7 +90,7 @@ class Test::Unit::TestCase
|
||||||
end
|
end
|
||||||
|
|
||||||
def new_mock_db
|
def new_mock_db
|
||||||
db = Object.new
|
Object.new
|
||||||
end
|
end
|
||||||
|
|
||||||
def assert_raise_error(klass, message)
|
def assert_raise_error(klass, message)
|
||||||
|
|
|
@ -6,7 +6,7 @@ class TestThreadingLargePool < Test::Unit::TestCase
|
||||||
|
|
||||||
include Mongo
|
include Mongo
|
||||||
|
|
||||||
@@db = standard_connection(:pool_size => 50, :timeout => 60).db(MONGO_TEST_DB)
|
@@db = standard_connection(:pool_size => 50, :pool_timeout => 60).db(MONGO_TEST_DB)
|
||||||
@@coll = @@db.collection('thread-test-collection')
|
@@coll = @@db.collection('thread-test-collection')
|
||||||
|
|
||||||
def set_up_safe_data
|
def set_up_safe_data
|
||||||
|
|
|
@ -4,7 +4,7 @@ class TestThreading < Test::Unit::TestCase
|
||||||
|
|
||||||
include Mongo
|
include Mongo
|
||||||
|
|
||||||
@@con = standard_connection(:pool_size => 10, :timeout => 30)
|
@@con = standard_connection(:pool_size => 10, :pool_timeout => 30)
|
||||||
@@db = @@con[MONGO_TEST_DB]
|
@@db = @@con[MONGO_TEST_DB]
|
||||||
@@coll = @@db.collection('thread-test-collection')
|
@@coll = @@db.collection('thread-test-collection')
|
||||||
|
|
||||||
|
|
|
@ -81,13 +81,6 @@ class ConnectionTest < Test::Unit::TestCase
|
||||||
assert_equal({:w => 2, :wtimeout => 1000, :fsync => true, :j => true}, @conn.safe)
|
assert_equal({:w => 2, :wtimeout => 1000, :fsync => true, :j => true}, @conn.safe)
|
||||||
end
|
end
|
||||||
|
|
||||||
should "have wtimeoutMS take precidence over the depricated wtimeout" do
|
|
||||||
host_name = "localhost"
|
|
||||||
opts = "safe=true&wtimeout=100&wtimeoutMS=500"
|
|
||||||
@conn = Connection.from_uri("mongodb://#{host_name}/foo?#{opts}", :connect => false)
|
|
||||||
assert_equal({:wtimeout => 500}, @conn.safe)
|
|
||||||
end
|
|
||||||
|
|
||||||
should "set timeout options on connection" do
|
should "set timeout options on connection" do
|
||||||
host_name = "localhost"
|
host_name = "localhost"
|
||||||
opts = "connectTimeoutMS=1000&socketTimeoutMS=5000"
|
opts = "connectTimeoutMS=1000&socketTimeoutMS=5000"
|
||||||
|
|
|
@ -78,10 +78,9 @@ class URITest < Test::Unit::TestCase
|
||||||
end
|
end
|
||||||
|
|
||||||
def test_opts_safe
|
def test_opts_safe
|
||||||
parser = Mongo::URIParser.new('mongodb://localhost:27018?safe=true;w=2;journal=true;wtimeout=200;fsync=true;wtimeoutMS=200')
|
parser = Mongo::URIParser.new('mongodb://localhost:27018?safe=true;w=2;journal=true;fsync=true;wtimeoutMS=200')
|
||||||
assert parser.safe
|
assert parser.safe
|
||||||
assert_equal 2, parser.w
|
assert_equal 2, parser.w
|
||||||
assert_equal 200, parser.wtimeout
|
|
||||||
assert parser.fsync
|
assert parser.fsync
|
||||||
assert parser.journal
|
assert parser.journal
|
||||||
assert_equal 200, parser.wtimeoutms
|
assert_equal 200, parser.wtimeoutms
|
||||||
|
|
Loading…
Reference in New Issue