2009-12-01 22:29:22 +00:00
|
|
|
# encoding:utf-8
|
2009-12-01 18:49:57 +00:00
|
|
|
require 'test/test_helper'
|
2010-01-21 19:49:20 +00:00
|
|
|
require 'complex'
|
|
|
|
require 'bigdecimal'
|
|
|
|
require 'rational'
|
2008-12-16 12:20:20 +00:00
|
|
|
|
2010-03-02 16:11:07 +00:00
|
|
|
begin
|
|
|
|
require 'active_support/core_ext'
|
|
|
|
require 'active_support/hash_with_indifferent_access'
|
|
|
|
Time.zone = "Pacific Time (US & Canada)"
|
|
|
|
Zone = Time.zone.now
|
|
|
|
rescue LoadError
|
|
|
|
warn 'Could not test BSON with HashWithIndifferentAccess.'
|
|
|
|
module ActiveSupport
|
|
|
|
class TimeWithZone
|
|
|
|
end
|
2010-01-20 17:11:58 +00:00
|
|
|
end
|
2010-03-02 16:11:07 +00:00
|
|
|
Zone = ActiveSupport::TimeWithZone.new
|
2010-01-20 17:11:58 +00:00
|
|
|
end
|
|
|
|
|
2008-12-16 12:20:20 +00:00
|
|
|
class BSONTest < Test::Unit::TestCase
|
|
|
|
|
2010-04-05 14:39:55 +00:00
|
|
|
include BSON
|
2010-03-19 22:10:18 +00:00
|
|
|
|
2010-04-12 16:01:50 +00:00
|
|
|
def test_serialize_returns_byte_buffer
|
|
|
|
doc = {'doc' => 'hello, world'}
|
|
|
|
bson = BSON.serialize(doc)
|
|
|
|
assert bson.is_a?(ByteBuffer)
|
|
|
|
end
|
|
|
|
|
2010-03-19 22:10:18 +00:00
|
|
|
def test_deprecated_bson_module
|
|
|
|
doc = {'doc' => 'hello, world'}
|
|
|
|
bson = BSON.serialize(doc)
|
|
|
|
assert_equal doc, BSON.deserialize(bson)
|
|
|
|
end
|
|
|
|
|
2008-12-16 12:33:39 +00:00
|
|
|
def test_string
|
|
|
|
doc = {'doc' => 'hello, world'}
|
2010-04-05 14:39:55 +00:00
|
|
|
bson = bson = BSON::BSON_CODER.serialize(doc)
|
|
|
|
assert_equal doc, BSON::BSON_CODER.deserialize(bson)
|
2008-12-16 12:23:22 +00:00
|
|
|
end
|
|
|
|
|
2009-11-30 22:06:21 +00:00
|
|
|
def test_valid_utf8_string
|
2009-12-02 15:08:26 +00:00
|
|
|
doc = {'doc' => 'aé'}
|
2010-04-05 14:39:55 +00:00
|
|
|
bson = bson = BSON::BSON_CODER.serialize(doc)
|
|
|
|
assert_equal doc, BSON::BSON_CODER.deserialize(bson)
|
2009-11-30 22:06:21 +00:00
|
|
|
end
|
|
|
|
|
2009-12-02 15:52:28 +00:00
|
|
|
def test_valid_utf8_key
|
|
|
|
doc = {'aé' => 'hello'}
|
2010-04-05 14:39:55 +00:00
|
|
|
bson = bson = BSON::BSON_CODER.serialize(doc)
|
|
|
|
assert_equal doc, BSON::BSON_CODER.deserialize(bson)
|
2009-12-02 15:52:28 +00:00
|
|
|
end
|
|
|
|
|
2009-12-16 16:32:54 +00:00
|
|
|
def test_document_length
|
|
|
|
doc = {'name' => 'a' * 5 * 1024 * 1024}
|
|
|
|
assert_raise InvalidDocument do
|
2010-04-05 14:39:55 +00:00
|
|
|
assert BSON::BSON_CODER.serialize(doc)
|
2009-12-16 16:32:54 +00:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2009-12-02 15:08:26 +00:00
|
|
|
# In 1.8 we test that other string encodings raise an exception.
|
|
|
|
# In 1.9 we test that they get auto-converted.
|
2009-12-01 22:23:24 +00:00
|
|
|
if RUBY_VERSION < '1.9'
|
|
|
|
require 'iconv'
|
|
|
|
def test_invalid_string
|
2009-12-02 15:25:46 +00:00
|
|
|
string = Iconv.conv('iso-8859-1', 'utf-8', 'aé')
|
2009-12-02 15:08:26 +00:00
|
|
|
doc = {'doc' => string}
|
2009-12-01 22:23:24 +00:00
|
|
|
assert_raise InvalidStringEncoding do
|
2010-04-05 14:39:55 +00:00
|
|
|
BSON::BSON_CODER.serialize(doc)
|
2009-12-01 22:23:24 +00:00
|
|
|
end
|
2009-11-30 22:06:21 +00:00
|
|
|
end
|
2009-12-02 15:52:28 +00:00
|
|
|
|
|
|
|
def test_invalid_key
|
|
|
|
key = Iconv.conv('iso-8859-1', 'utf-8', 'aé')
|
|
|
|
doc = {key => 'hello'}
|
|
|
|
assert_raise InvalidStringEncoding do
|
2010-04-05 14:39:55 +00:00
|
|
|
BSON::BSON_CODER.serialize(doc)
|
2009-12-02 15:52:28 +00:00
|
|
|
end
|
|
|
|
end
|
2009-12-02 15:08:26 +00:00
|
|
|
else
|
|
|
|
def test_non_utf8_string
|
2010-04-05 14:39:55 +00:00
|
|
|
bson = BSON::BSON_CODER.serialize({'str' => 'aé'.encode('iso-8859-1')})
|
|
|
|
result = BSON::BSON_CODER.deserialize(bson)['str']
|
2009-12-02 15:08:26 +00:00
|
|
|
assert_equal 'aé', result
|
|
|
|
assert_equal 'UTF-8', result.encoding.name
|
|
|
|
end
|
2009-12-02 15:52:28 +00:00
|
|
|
|
|
|
|
def test_non_utf8_key
|
2010-04-05 14:39:55 +00:00
|
|
|
bson = BSON::BSON_CODER.serialize({'aé'.encode('iso-8859-1') => 'hello'})
|
|
|
|
assert_equal 'hello', BSON::BSON_CODER.deserialize(bson)['aé']
|
2009-12-02 15:52:28 +00:00
|
|
|
end
|
2009-11-30 22:06:21 +00:00
|
|
|
end
|
|
|
|
|
2008-12-16 12:33:39 +00:00
|
|
|
def test_code
|
2009-03-12 19:34:15 +00:00
|
|
|
doc = {'$where' => Code.new('this.a.b < this.b')}
|
2010-04-05 14:39:55 +00:00
|
|
|
bson = BSON::BSON_CODER.serialize(doc)
|
|
|
|
assert_equal doc, BSON::BSON_CODER.deserialize(bson)
|
2008-12-16 12:20:20 +00:00
|
|
|
end
|
2008-12-16 12:33:39 +00:00
|
|
|
|
|
|
|
def test_number
|
|
|
|
doc = {'doc' => 41.99}
|
2010-04-05 14:39:55 +00:00
|
|
|
bson = BSON::BSON_CODER.serialize(doc)
|
|
|
|
assert_equal doc, BSON::BSON_CODER.deserialize(bson)
|
2008-12-16 12:33:39 +00:00
|
|
|
end
|
|
|
|
|
|
|
|
def test_int
|
|
|
|
doc = {'doc' => 42}
|
2010-04-05 14:39:55 +00:00
|
|
|
bson = BSON::BSON_CODER.serialize(doc)
|
|
|
|
assert_equal doc, BSON::BSON_CODER.deserialize(bson)
|
2009-05-15 15:19:13 +00:00
|
|
|
|
|
|
|
doc = {"doc" => -5600}
|
2010-04-05 14:39:55 +00:00
|
|
|
bson = BSON::BSON_CODER.serialize(doc)
|
|
|
|
assert_equal doc, BSON::BSON_CODER.deserialize(bson)
|
2009-05-15 15:19:13 +00:00
|
|
|
|
|
|
|
doc = {"doc" => 2147483647}
|
2010-04-05 14:39:55 +00:00
|
|
|
bson = BSON::BSON_CODER.serialize(doc)
|
|
|
|
assert_equal doc, BSON::BSON_CODER.deserialize(bson)
|
2009-05-15 15:19:13 +00:00
|
|
|
|
|
|
|
doc = {"doc" => -2147483648}
|
2010-04-05 14:39:55 +00:00
|
|
|
bson = BSON::BSON_CODER.serialize(doc)
|
|
|
|
assert_equal doc, BSON::BSON_CODER.deserialize(bson)
|
2008-12-16 12:33:39 +00:00
|
|
|
end
|
|
|
|
|
2009-03-05 19:58:34 +00:00
|
|
|
def test_ordered_hash
|
|
|
|
doc = OrderedHash.new
|
|
|
|
doc["b"] = 1
|
|
|
|
doc["a"] = 2
|
|
|
|
doc["c"] = 3
|
|
|
|
doc["d"] = 4
|
2010-04-05 14:39:55 +00:00
|
|
|
bson = BSON::BSON_CODER.serialize(doc)
|
|
|
|
assert_equal doc, BSON::BSON_CODER.deserialize(bson)
|
2009-03-05 19:58:34 +00:00
|
|
|
end
|
|
|
|
|
2008-12-16 12:33:39 +00:00
|
|
|
def test_object
|
|
|
|
doc = {'doc' => {'age' => 42, 'name' => 'Spongebob', 'shoe_size' => 9.5}}
|
2010-04-05 14:39:55 +00:00
|
|
|
bson = BSON::BSON_CODER.serialize(doc)
|
|
|
|
assert_equal doc, BSON::BSON_CODER.deserialize(bson)
|
2008-12-16 12:33:39 +00:00
|
|
|
end
|
|
|
|
|
|
|
|
def test_oid
|
2008-12-19 02:57:20 +00:00
|
|
|
doc = {'doc' => ObjectID.new}
|
2010-04-05 14:39:55 +00:00
|
|
|
bson = BSON::BSON_CODER.serialize(doc)
|
|
|
|
assert_equal doc, BSON::BSON_CODER.deserialize(bson)
|
2008-12-16 12:33:39 +00:00
|
|
|
end
|
|
|
|
|
|
|
|
def test_array
|
|
|
|
doc = {'doc' => [1, 2, 'a', 'b']}
|
2010-04-05 14:39:55 +00:00
|
|
|
bson = BSON::BSON_CODER.serialize(doc)
|
|
|
|
assert_equal doc, BSON::BSON_CODER.deserialize(bson)
|
2008-12-16 12:33:39 +00:00
|
|
|
end
|
|
|
|
|
|
|
|
def test_regex
|
|
|
|
doc = {'doc' => /foobar/i}
|
2010-04-05 14:39:55 +00:00
|
|
|
bson = BSON::BSON_CODER.serialize(doc)
|
|
|
|
doc2 = BSON::BSON_CODER.deserialize(bson)
|
2009-01-13 14:12:13 +00:00
|
|
|
assert_equal doc, doc2
|
|
|
|
|
|
|
|
r = doc2['doc']
|
2010-02-15 14:47:53 +00:00
|
|
|
assert_kind_of Regexp, r
|
2009-01-13 14:12:13 +00:00
|
|
|
|
|
|
|
doc = {'doc' => r}
|
2010-04-05 14:39:55 +00:00
|
|
|
bson_doc = BSON::BSON_CODER.serialize(doc)
|
2009-01-13 14:12:13 +00:00
|
|
|
doc2 = nil
|
2010-04-05 14:39:55 +00:00
|
|
|
doc2 = BSON::BSON_CODER.deserialize(bson_doc)
|
2009-01-13 14:12:13 +00:00
|
|
|
assert_equal doc, doc2
|
2008-12-16 12:33:39 +00:00
|
|
|
end
|
|
|
|
|
|
|
|
def test_boolean
|
|
|
|
doc = {'doc' => true}
|
2010-04-05 14:39:55 +00:00
|
|
|
bson = BSON::BSON_CODER.serialize(doc)
|
|
|
|
assert_equal doc, BSON::BSON_CODER.deserialize(bson)
|
2008-12-16 12:33:39 +00:00
|
|
|
end
|
|
|
|
|
|
|
|
def test_date
|
|
|
|
doc = {'date' => Time.now}
|
2010-04-05 14:39:55 +00:00
|
|
|
bson = BSON::BSON_CODER.serialize(doc)
|
|
|
|
doc2 = BSON::BSON_CODER.deserialize(bson)
|
2009-02-27 16:27:07 +00:00
|
|
|
# Mongo only stores up to the millisecond
|
|
|
|
assert_in_delta doc['date'], doc2['date'], 0.001
|
2008-12-16 12:33:39 +00:00
|
|
|
end
|
|
|
|
|
2009-07-16 20:02:57 +00:00
|
|
|
def test_date_returns_as_utc
|
|
|
|
doc = {'date' => Time.now}
|
2010-04-05 14:39:55 +00:00
|
|
|
bson = BSON::BSON_CODER.serialize(doc)
|
|
|
|
doc2 = BSON::BSON_CODER.deserialize(bson)
|
2009-07-16 20:32:57 +00:00
|
|
|
assert doc2['date'].utc?
|
2009-07-16 20:02:57 +00:00
|
|
|
end
|
|
|
|
|
2009-09-16 16:53:46 +00:00
|
|
|
def test_date_before_epoch
|
2009-09-16 20:12:20 +00:00
|
|
|
begin
|
|
|
|
doc = {'date' => Time.utc(1600)}
|
2010-04-05 14:39:55 +00:00
|
|
|
bson = BSON::BSON_CODER.serialize(doc)
|
|
|
|
doc2 = BSON::BSON_CODER.deserialize(bson)
|
2009-09-16 20:12:20 +00:00
|
|
|
# Mongo only stores up to the millisecond
|
|
|
|
assert_in_delta doc['date'], doc2['date'], 0.001
|
|
|
|
rescue ArgumentError
|
|
|
|
# some versions of Ruby won't let you create pre-epoch Time instances
|
|
|
|
#
|
|
|
|
# TODO figure out how that will work if somebady has saved data
|
|
|
|
# w/ early dates already and is just querying for it.
|
|
|
|
end
|
2009-09-16 16:53:46 +00:00
|
|
|
end
|
|
|
|
|
2010-01-20 17:11:58 +00:00
|
|
|
def test_exeption_on_using_unsupported_date_class
|
2010-03-02 16:11:07 +00:00
|
|
|
[DateTime.now, Date.today, Zone].each do |invalid_date|
|
2010-01-06 19:30:58 +00:00
|
|
|
doc = {:date => invalid_date}
|
|
|
|
begin
|
2010-04-05 14:39:55 +00:00
|
|
|
bson = BSON::BSON_CODER.serialize(doc)
|
2010-01-06 19:30:58 +00:00
|
|
|
rescue => e
|
|
|
|
ensure
|
2010-03-19 20:00:17 +00:00
|
|
|
if !invalid_date.is_a? Time
|
|
|
|
assert_equal InvalidDocument, e.class
|
|
|
|
assert_match /UTC Time/, e.message
|
|
|
|
end
|
2010-01-06 19:30:58 +00:00
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2009-01-08 19:43:30 +00:00
|
|
|
def test_dbref
|
|
|
|
oid = ObjectID.new
|
2009-01-09 20:22:10 +00:00
|
|
|
doc = {}
|
2009-03-10 18:17:01 +00:00
|
|
|
doc['dbref'] = DBRef.new('namespace', oid)
|
2010-04-05 14:39:55 +00:00
|
|
|
bson = BSON::BSON_CODER.serialize(doc)
|
|
|
|
doc2 = BSON::BSON_CODER.deserialize(bson)
|
2009-01-08 19:43:30 +00:00
|
|
|
assert_equal 'namespace', doc2['dbref'].namespace
|
|
|
|
assert_equal oid, doc2['dbref'].object_id
|
|
|
|
end
|
|
|
|
|
2009-01-09 23:20:53 +00:00
|
|
|
def test_symbol
|
|
|
|
doc = {'sym' => :foo}
|
2010-04-05 14:39:55 +00:00
|
|
|
bson = BSON::BSON_CODER.serialize(doc)
|
|
|
|
doc2 = BSON::BSON_CODER.deserialize(bson)
|
2009-01-09 23:20:53 +00:00
|
|
|
assert_equal :foo, doc2['sym']
|
|
|
|
end
|
|
|
|
|
2009-01-10 00:58:53 +00:00
|
|
|
def test_binary
|
2009-02-02 15:07:01 +00:00
|
|
|
bin = Binary.new
|
|
|
|
'binstring'.each_byte { |b| bin.put(b) }
|
2009-01-10 00:58:53 +00:00
|
|
|
|
|
|
|
doc = {'bin' => bin}
|
2010-04-05 14:39:55 +00:00
|
|
|
bson = BSON::BSON_CODER.serialize(doc)
|
|
|
|
doc2 = BSON::BSON_CODER.deserialize(bson)
|
2009-02-02 15:07:01 +00:00
|
|
|
bin2 = doc2['bin']
|
|
|
|
assert_kind_of Binary, bin2
|
|
|
|
assert_equal 'binstring', bin2.to_s
|
2009-02-27 16:27:07 +00:00
|
|
|
assert_equal Binary::SUBTYPE_BYTES, bin2.subtype
|
2009-02-02 15:07:01 +00:00
|
|
|
end
|
2010-03-31 19:16:12 +00:00
|
|
|
|
|
|
|
def test_binary_with_string
|
|
|
|
b = Binary.new('somebinarystring')
|
|
|
|
doc = {'bin' => b}
|
2010-04-05 14:39:55 +00:00
|
|
|
bson = BSON::BSON_CODER.serialize(doc)
|
|
|
|
doc2 = BSON::BSON_CODER.deserialize(bson)
|
2010-03-31 19:16:12 +00:00
|
|
|
bin2 = doc2['bin']
|
|
|
|
assert_kind_of Binary, bin2
|
|
|
|
assert_equal 'somebinarystring', bin2.to_s
|
|
|
|
assert_equal Binary::SUBTYPE_BYTES, bin2.subtype
|
|
|
|
end
|
2009-02-02 15:07:01 +00:00
|
|
|
|
|
|
|
def test_binary_type
|
|
|
|
bin = Binary.new([1, 2, 3, 4, 5], Binary::SUBTYPE_USER_DEFINED)
|
|
|
|
|
|
|
|
doc = {'bin' => bin}
|
2010-04-05 14:39:55 +00:00
|
|
|
bson = BSON::BSON_CODER.serialize(doc)
|
|
|
|
doc2 = BSON::BSON_CODER.deserialize(bson)
|
2009-02-02 15:07:01 +00:00
|
|
|
bin2 = doc2['bin']
|
|
|
|
assert_kind_of Binary, bin2
|
2009-03-05 15:27:19 +00:00
|
|
|
assert_equal [1, 2, 3, 4, 5], bin2.to_a
|
2009-02-02 15:07:01 +00:00
|
|
|
assert_equal Binary::SUBTYPE_USER_DEFINED, bin2.subtype
|
2009-01-10 00:58:53 +00:00
|
|
|
end
|
|
|
|
|
2009-01-27 21:26:42 +00:00
|
|
|
def test_binary_byte_buffer
|
2010-04-05 15:07:01 +00:00
|
|
|
bb = Binary.new
|
2009-02-02 15:07:01 +00:00
|
|
|
5.times { |i| bb.put(i + 1) }
|
|
|
|
|
2009-01-27 21:26:42 +00:00
|
|
|
doc = {'bin' => bb}
|
2010-04-05 14:39:55 +00:00
|
|
|
bson = BSON::BSON_CODER.serialize(doc)
|
|
|
|
doc2 = BSON::BSON_CODER.deserialize(bson)
|
2009-02-02 15:07:01 +00:00
|
|
|
bin2 = doc2['bin']
|
|
|
|
assert_kind_of Binary, bin2
|
|
|
|
assert_equal [1, 2, 3, 4, 5], bin2.to_a
|
|
|
|
assert_equal Binary::SUBTYPE_BYTES, bin2.subtype
|
2009-01-27 21:26:42 +00:00
|
|
|
end
|
2009-01-26 18:52:11 +00:00
|
|
|
|
2009-01-26 14:02:18 +00:00
|
|
|
def test_put_id_first
|
|
|
|
val = OrderedHash.new
|
|
|
|
val['not_id'] = 1
|
|
|
|
val['_id'] = 2
|
2010-04-05 14:39:55 +00:00
|
|
|
roundtrip = BSON::BSON_CODER.deserialize(BSON::BSON_CODER.serialize(val, false, true).to_a)
|
2009-01-27 21:26:42 +00:00
|
|
|
assert_kind_of OrderedHash, roundtrip
|
|
|
|
assert_equal '_id', roundtrip.keys.first
|
2009-01-26 14:02:18 +00:00
|
|
|
|
|
|
|
val = {'a' => 'foo', 'b' => 'bar', :_id => 42, 'z' => 'hello'}
|
2010-04-05 14:39:55 +00:00
|
|
|
roundtrip = BSON::BSON_CODER.deserialize(BSON::BSON_CODER.serialize(val, false, true).to_a)
|
2009-01-27 21:26:42 +00:00
|
|
|
assert_kind_of OrderedHash, roundtrip
|
|
|
|
assert_equal '_id', roundtrip.keys.first
|
2009-01-26 14:02:18 +00:00
|
|
|
end
|
|
|
|
|
2009-05-12 18:21:17 +00:00
|
|
|
def test_nil_id
|
|
|
|
doc = {"_id" => nil}
|
2010-04-05 14:39:55 +00:00
|
|
|
assert_equal doc, BSON::BSON_CODER.deserialize(bson = BSON::BSON_CODER.serialize(doc, false, true).to_a)
|
2009-05-12 18:21:17 +00:00
|
|
|
end
|
|
|
|
|
2009-04-22 17:49:56 +00:00
|
|
|
def test_timestamp
|
|
|
|
val = {"test" => [4, 20]}
|
2010-04-05 14:39:55 +00:00
|
|
|
assert_equal val, BSON::BSON_CODER.deserialize([0x13, 0x00, 0x00, 0x00,
|
2009-04-22 17:49:56 +00:00
|
|
|
0x11, 0x74, 0x65, 0x73,
|
|
|
|
0x74, 0x00, 0x04, 0x00,
|
|
|
|
0x00, 0x00, 0x14, 0x00,
|
|
|
|
0x00, 0x00, 0x00])
|
|
|
|
end
|
|
|
|
|
2009-05-15 15:19:13 +00:00
|
|
|
def test_overflow
|
2009-08-03 21:57:44 +00:00
|
|
|
doc = {"x" => 2**75}
|
2009-05-15 15:19:13 +00:00
|
|
|
assert_raise RangeError do
|
2010-04-05 14:39:55 +00:00
|
|
|
bson = BSON::BSON_CODER.serialize(doc)
|
2009-05-15 15:19:13 +00:00
|
|
|
end
|
|
|
|
|
2009-08-03 21:57:44 +00:00
|
|
|
doc = {"x" => 9223372036854775}
|
2010-04-05 14:39:55 +00:00
|
|
|
assert_equal doc, BSON::BSON_CODER.deserialize(BSON::BSON_CODER.serialize(doc).to_a)
|
2009-08-03 21:57:44 +00:00
|
|
|
|
|
|
|
doc = {"x" => 9223372036854775807}
|
2010-04-05 14:39:55 +00:00
|
|
|
assert_equal doc, BSON::BSON_CODER.deserialize(BSON::BSON_CODER.serialize(doc).to_a)
|
2009-05-15 15:19:13 +00:00
|
|
|
|
|
|
|
doc["x"] = doc["x"] + 1
|
|
|
|
assert_raise RangeError do
|
2010-04-05 14:39:55 +00:00
|
|
|
bson = BSON::BSON_CODER.serialize(doc)
|
2009-05-15 15:19:13 +00:00
|
|
|
end
|
2009-08-03 21:57:44 +00:00
|
|
|
|
|
|
|
doc = {"x" => -9223372036854775}
|
2010-04-05 14:39:55 +00:00
|
|
|
assert_equal doc, BSON::BSON_CODER.deserialize(BSON::BSON_CODER.serialize(doc).to_a)
|
2009-08-03 21:57:44 +00:00
|
|
|
|
|
|
|
doc = {"x" => -9223372036854775808}
|
2010-04-05 14:39:55 +00:00
|
|
|
assert_equal doc, BSON::BSON_CODER.deserialize(BSON::BSON_CODER.serialize(doc).to_a)
|
2009-08-03 21:57:44 +00:00
|
|
|
|
|
|
|
doc["x"] = doc["x"] - 1
|
|
|
|
assert_raise RangeError do
|
2010-04-05 14:39:55 +00:00
|
|
|
bson = BSON::BSON_CODER.serialize(doc)
|
2009-08-03 21:57:44 +00:00
|
|
|
end
|
2009-05-15 15:19:13 +00:00
|
|
|
end
|
|
|
|
|
2010-01-21 19:49:20 +00:00
|
|
|
def test_invalid_numeric_types
|
2010-01-21 20:09:54 +00:00
|
|
|
[BigDecimal.new("1.0"), Complex(0, 1), Rational(2, 3)].each do |type|
|
2010-01-21 19:49:20 +00:00
|
|
|
doc = {"x" => type}
|
|
|
|
begin
|
2010-04-05 14:39:55 +00:00
|
|
|
BSON::BSON_CODER.serialize(doc)
|
2010-01-21 19:49:20 +00:00
|
|
|
rescue => e
|
|
|
|
ensure
|
|
|
|
assert_equal InvalidDocument, e.class
|
2010-01-21 20:42:59 +00:00
|
|
|
assert_match /Cannot serialize/, e.message
|
2010-01-21 19:49:20 +00:00
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2009-01-28 20:38:31 +00:00
|
|
|
def test_do_not_change_original_object
|
|
|
|
val = OrderedHash.new
|
|
|
|
val['not_id'] = 1
|
|
|
|
val['_id'] = 2
|
|
|
|
assert val.keys.include?('_id')
|
2010-04-05 14:39:55 +00:00
|
|
|
BSON::BSON_CODER.serialize(val)
|
2009-01-28 20:38:31 +00:00
|
|
|
assert val.keys.include?('_id')
|
|
|
|
|
|
|
|
val = {'a' => 'foo', 'b' => 'bar', :_id => 42, 'z' => 'hello'}
|
|
|
|
assert val.keys.include?(:_id)
|
2010-04-05 14:39:55 +00:00
|
|
|
BSON::BSON_CODER.serialize(val)
|
2009-01-28 20:38:31 +00:00
|
|
|
assert val.keys.include?(:_id)
|
|
|
|
end
|
|
|
|
|
2009-10-22 21:12:42 +00:00
|
|
|
# note we only test for _id here because in the general case we will
|
|
|
|
# write duplicates for :key and "key". _id is a special case because
|
|
|
|
# we call has_key? to check for it's existance rather than just iterating
|
|
|
|
# over it like we do for the rest of the keys. thus, things like
|
|
|
|
# HashWithIndifferentAccess can cause problems for _id but not for other
|
|
|
|
# keys. rather than require rails to test with HWIA directly, we do this
|
|
|
|
# somewhat hacky test.
|
|
|
|
def test_no_duplicate_id
|
|
|
|
dup = {"_id" => "foo", :_id => "foo"}
|
|
|
|
one = {"_id" => "foo"}
|
|
|
|
|
2010-04-05 14:39:55 +00:00
|
|
|
assert_equal BSON::BSON_CODER.serialize(one).to_a, BSON::BSON_CODER.serialize(dup).to_a
|
2009-10-22 21:12:42 +00:00
|
|
|
end
|
|
|
|
|
2010-02-27 17:22:34 +00:00
|
|
|
def test_no_duplicate_id_when_moving_id
|
|
|
|
dup = {"_id" => "foo", :_id => "foo"}
|
|
|
|
one = {:_id => "foo"}
|
|
|
|
|
2010-04-05 14:39:55 +00:00
|
|
|
assert_equal BSON::BSON_CODER.serialize(one, false, true).to_s, BSON::BSON_CODER.serialize(dup, false, true).to_s
|
2010-02-27 17:22:34 +00:00
|
|
|
end
|
|
|
|
|
2009-12-17 15:05:38 +00:00
|
|
|
def test_null_character
|
|
|
|
doc = {"a" => "\x00"}
|
|
|
|
|
2010-04-05 14:39:55 +00:00
|
|
|
assert_equal doc, BSON::BSON_CODER.deserialize(BSON::BSON_CODER.serialize(doc).to_a)
|
2009-12-17 15:05:38 +00:00
|
|
|
|
|
|
|
assert_raise InvalidDocument do
|
2010-04-05 14:39:55 +00:00
|
|
|
BSON::BSON_CODER.serialize({"\x00" => "a"})
|
2009-12-17 15:05:38 +00:00
|
|
|
end
|
2009-12-17 15:24:58 +00:00
|
|
|
|
|
|
|
assert_raise InvalidDocument do
|
2010-04-05 14:39:55 +00:00
|
|
|
BSON::BSON_CODER.serialize({"a" => (Regexp.compile "ab\x00c")})
|
2009-12-17 15:24:58 +00:00
|
|
|
end
|
2009-12-17 15:05:38 +00:00
|
|
|
end
|
|
|
|
|
2010-01-20 23:50:07 +00:00
|
|
|
def test_max_key
|
|
|
|
doc = {"a" => MaxKey.new}
|
|
|
|
|
2010-04-05 14:39:55 +00:00
|
|
|
assert_equal doc, BSON::BSON_CODER.deserialize(BSON::BSON_CODER.serialize(doc).to_a)
|
2010-01-20 23:50:07 +00:00
|
|
|
end
|
|
|
|
|
|
|
|
def test_min_key
|
|
|
|
doc = {"a" => MinKey.new}
|
|
|
|
|
2010-04-05 14:39:55 +00:00
|
|
|
assert_equal doc, BSON::BSON_CODER.deserialize(BSON::BSON_CODER.serialize(doc).to_a)
|
2010-01-20 23:50:07 +00:00
|
|
|
end
|
|
|
|
|
2009-12-29 17:59:30 +00:00
|
|
|
def test_invalid_object
|
|
|
|
o = Object.new
|
|
|
|
assert_raise InvalidDocument do
|
2010-04-05 14:39:55 +00:00
|
|
|
BSON::BSON_CODER.serialize({:foo => o})
|
2009-12-29 17:59:30 +00:00
|
|
|
end
|
|
|
|
|
|
|
|
assert_raise InvalidDocument do
|
2010-04-05 14:39:55 +00:00
|
|
|
BSON::BSON_CODER.serialize({:foo => Date.today})
|
2009-12-29 17:59:30 +00:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2010-01-28 23:44:50 +00:00
|
|
|
def test_move_id
|
|
|
|
a = OrderedHash.new
|
|
|
|
a['text'] = 'abc'
|
|
|
|
a['key'] = 'abc'
|
|
|
|
a['_id'] = 1
|
|
|
|
|
2010-02-27 17:22:34 +00:00
|
|
|
|
2010-01-28 23:44:50 +00:00
|
|
|
assert_equal ")\000\000\000\020_id\000\001\000\000\000\002text" +
|
|
|
|
"\000\004\000\000\000abc\000\002key\000\004\000\000\000abc\000\000",
|
2010-04-05 14:39:55 +00:00
|
|
|
BSON::BSON_CODER.serialize(a, false, true).to_s
|
2010-01-28 23:44:50 +00:00
|
|
|
assert_equal ")\000\000\000\002text\000\004\000\000\000abc\000\002key" +
|
|
|
|
"\000\004\000\000\000abc\000\020_id\000\001\000\000\000\000",
|
2010-04-05 14:39:55 +00:00
|
|
|
BSON::BSON_CODER.serialize(a, false, false).to_s
|
2010-01-28 23:44:50 +00:00
|
|
|
end
|
|
|
|
|
|
|
|
def test_move_id_with_nested_doc
|
|
|
|
b = OrderedHash.new
|
|
|
|
b['text'] = 'abc'
|
|
|
|
b['_id'] = 2
|
|
|
|
c = OrderedHash.new
|
|
|
|
c['text'] = 'abc'
|
|
|
|
c['hash'] = b
|
|
|
|
c['_id'] = 3
|
|
|
|
assert_equal ">\000\000\000\020_id\000\003\000\000\000\002text" +
|
|
|
|
"\000\004\000\000\000abc\000\003hash\000\034\000\000" +
|
|
|
|
"\000\002text\000\004\000\000\000abc\000\020_id\000\002\000\000\000\000\000",
|
2010-04-05 14:39:55 +00:00
|
|
|
BSON::BSON_CODER.serialize(c, false, true).to_s
|
2010-01-28 23:44:50 +00:00
|
|
|
assert_equal ">\000\000\000\002text\000\004\000\000\000abc\000\003hash" +
|
|
|
|
"\000\034\000\000\000\002text\000\004\000\000\000abc\000\020_id" +
|
|
|
|
"\000\002\000\000\000\000\020_id\000\003\000\000\000\000",
|
2010-04-05 14:39:55 +00:00
|
|
|
BSON::BSON_CODER.serialize(c, false, false).to_s
|
2010-01-28 23:44:50 +00:00
|
|
|
end
|
2010-02-27 17:22:34 +00:00
|
|
|
|
|
|
|
if defined?(HashWithIndifferentAccess)
|
|
|
|
def test_keep_id_with_hash_with_indifferent_access
|
|
|
|
doc = HashWithIndifferentAccess.new
|
2010-03-02 16:11:07 +00:00
|
|
|
embedded = HashWithIndifferentAccess.new
|
|
|
|
embedded['_id'] = ObjectID.new
|
|
|
|
doc['_id'] = ObjectID.new
|
|
|
|
doc['embedded'] = [embedded]
|
2010-04-05 14:39:55 +00:00
|
|
|
BSON::BSON_CODER.serialize(doc, false, true).to_a
|
2010-02-27 17:22:34 +00:00
|
|
|
assert doc.has_key?("_id")
|
2010-03-02 16:11:07 +00:00
|
|
|
assert doc['embedded'][0].has_key?("_id")
|
2010-02-27 17:22:34 +00:00
|
|
|
|
|
|
|
doc['_id'] = ObjectID.new
|
2010-04-05 14:39:55 +00:00
|
|
|
BSON::BSON_CODER.serialize(doc, false, true).to_a
|
2010-02-27 17:22:34 +00:00
|
|
|
assert doc.has_key?("_id")
|
|
|
|
end
|
|
|
|
end
|
2008-12-16 12:20:20 +00:00
|
|
|
end
|