Try again: Split the BSON logic out to a separate gem
- changed the BSON constant to BSON_CODER and scoped it inside of a module - changed the directory layout for all of the BSON related files - updated the C extension to find the BSON files at their new directory locations - updated the C extension to use better/safer macros for accessing the C API; extension now compiles cleanly under rubinius/rbx - changed directory layout for BSON related tests - modified the Rakefile to understand the new layout
This commit is contained in:
parent
18d7d1c699
commit
a9b3c8e7a5
9
Rakefile
9
Rakefile
|
@ -27,6 +27,7 @@ namespace :test do
|
||||||
ENV['C_EXT'] = 'TRUE'
|
ENV['C_EXT'] = 'TRUE'
|
||||||
Rake::Task['test:unit'].invoke
|
Rake::Task['test:unit'].invoke
|
||||||
Rake::Task['test:functional'].invoke
|
Rake::Task['test:functional'].invoke
|
||||||
|
Rake::Task['test:bson'].invoke
|
||||||
Rake::Task['test:pooled_threading'].invoke
|
Rake::Task['test:pooled_threading'].invoke
|
||||||
Rake::Task['test:drop_databases'].invoke
|
Rake::Task['test:drop_databases'].invoke
|
||||||
ENV['C_EXT'] = nil
|
ENV['C_EXT'] = nil
|
||||||
|
@ -37,10 +38,11 @@ namespace :test do
|
||||||
ENV['C_EXT'] = nil
|
ENV['C_EXT'] = nil
|
||||||
Rake::Task['test:unit'].invoke
|
Rake::Task['test:unit'].invoke
|
||||||
Rake::Task['test:functional'].invoke
|
Rake::Task['test:functional'].invoke
|
||||||
|
Rake::Task['test:bson'].invoke
|
||||||
Rake::Task['test:pooled_threading'].invoke
|
Rake::Task['test:pooled_threading'].invoke
|
||||||
Rake::Task['test:drop_databases'].invoke
|
Rake::Task['test:drop_databases'].invoke
|
||||||
end
|
end
|
||||||
|
|
||||||
Rake::TestTask.new(:unit) do |t|
|
Rake::TestTask.new(:unit) do |t|
|
||||||
t.test_files = FileList['test/unit/*_test.rb']
|
t.test_files = FileList['test/unit/*_test.rb']
|
||||||
t.verbose = true
|
t.verbose = true
|
||||||
|
@ -86,6 +88,11 @@ namespace :test do
|
||||||
t.verbose = true
|
t.verbose = true
|
||||||
end
|
end
|
||||||
|
|
||||||
|
Rake::TestTask.new(:bson) do |t|
|
||||||
|
t.test_files = FileList['test/mongo_bson/*_test.rb']
|
||||||
|
t.verbose = true
|
||||||
|
end
|
||||||
|
|
||||||
task :drop_databases do |t|
|
task :drop_databases do |t|
|
||||||
puts "Dropping test database..."
|
puts "Dropping test database..."
|
||||||
require File.join(File.dirname(__FILE__), 'lib', 'mongo')
|
require File.join(File.dirname(__FILE__), 'lib', 'mongo')
|
||||||
|
|
|
@ -9,9 +9,9 @@ TRIALS = 100000
|
||||||
|
|
||||||
def encode(doc)
|
def encode(doc)
|
||||||
t0 = Time.new
|
t0 = Time.new
|
||||||
b = BSON.new
|
b = Mongo::BSON_CODER.new
|
||||||
TRIALS.times { |i|
|
TRIALS.times { |i|
|
||||||
b = BSON.new
|
b = Mongo::BSON_CODER.new
|
||||||
b.serialize doc
|
b.serialize doc
|
||||||
}
|
}
|
||||||
print "took: #{Time.now.to_f - t0.to_f}\n"
|
print "took: #{Time.now.to_f - t0.to_f}\n"
|
||||||
|
|
|
@ -128,9 +128,22 @@ static void write_utf8(buffer_t buffer, VALUE string, char check_null) {
|
||||||
#define INT2STRING(buffer, i) asprintf(buffer, "%d", i);
|
#define INT2STRING(buffer, i) asprintf(buffer, "%d", i);
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
// this sucks too.
|
/* for rubinius compatibility, use the RREGEXP_SOURCE macro to retrieve
|
||||||
#ifndef RREGEXP_SRC
|
* the regex's source pattern. MRI 1.8 and 1.9 both have RREGEXP_SRC
|
||||||
#define RREGEXP_SRC(r) rb_str_new(RREGEXP((r))->str, RREGEXP((r))->len)
|
* defined, but the underlying structure is different, so the second
|
||||||
|
* if/else takes care of that.
|
||||||
|
*/
|
||||||
|
#ifndef RREGEXP_SOURCE
|
||||||
|
#ifdef RREGEXP_SRC
|
||||||
|
#define RREGEXP_SOURCE(r) RREGEXP_SRC(r)
|
||||||
|
#else
|
||||||
|
#define RREGEXP_SOURCE(r) rb_str_new(RREGEXP((r))->str, RREGEXP((r))->len)
|
||||||
|
#endif
|
||||||
|
#endif
|
||||||
|
|
||||||
|
// rubinius compatibility
|
||||||
|
#ifndef RREGEXP_OPTIONS
|
||||||
|
#define RREGEXP_OPTIONS(r) RREGEXP(value)->ptr->options
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
static char zero = 0;
|
static char zero = 0;
|
||||||
|
@ -275,7 +288,7 @@ static int write_element(VALUE key, VALUE value, VALUE extra, int allow_id) {
|
||||||
}
|
}
|
||||||
case T_STRING:
|
case T_STRING:
|
||||||
{
|
{
|
||||||
if (strcmp(rb_class2name(RBASIC(value)->klass),
|
if (strcmp(rb_obj_classname(value),
|
||||||
"Mongo::Code") == 0) {
|
"Mongo::Code") == 0) {
|
||||||
buffer_position length_location, start_position, total_length;
|
buffer_position length_location, start_position, total_length;
|
||||||
int length;
|
int length;
|
||||||
|
@ -319,7 +332,7 @@ static int write_element(VALUE key, VALUE value, VALUE extra, int allow_id) {
|
||||||
case T_OBJECT:
|
case T_OBJECT:
|
||||||
{
|
{
|
||||||
// TODO there has to be a better way to do these checks...
|
// TODO there has to be a better way to do these checks...
|
||||||
const char* cls = rb_class2name(RBASIC(value)->klass);
|
const char* cls = rb_obj_classname(value);
|
||||||
if (strcmp(cls, "Mongo::Binary") == 0 ||
|
if (strcmp(cls, "Mongo::Binary") == 0 ||
|
||||||
strcmp(cls, "ByteBuffer") == 0) {
|
strcmp(cls, "ByteBuffer") == 0) {
|
||||||
const char subtype = strcmp(cls, "ByteBuffer") ?
|
const char subtype = strcmp(cls, "ByteBuffer") ?
|
||||||
|
@ -397,7 +410,7 @@ static int write_element(VALUE key, VALUE value, VALUE extra, int allow_id) {
|
||||||
}
|
}
|
||||||
case T_DATA:
|
case T_DATA:
|
||||||
{
|
{
|
||||||
const char* cls = rb_class2name(RBASIC(value)->klass);
|
const char* cls = rb_obj_classname(value);
|
||||||
if (strcmp(cls, "Time") == 0) {
|
if (strcmp(cls, "Time") == 0) {
|
||||||
double t = NUM2DBL(rb_funcall(value, rb_intern("to_f"), 0));
|
double t = NUM2DBL(rb_funcall(value, rb_intern("to_f"), 0));
|
||||||
long long time_since_epoch = (long long)round(t * 1000);
|
long long time_since_epoch = (long long)round(t * 1000);
|
||||||
|
@ -416,8 +429,8 @@ static int write_element(VALUE key, VALUE value, VALUE extra, int allow_id) {
|
||||||
}
|
}
|
||||||
case T_REGEXP:
|
case T_REGEXP:
|
||||||
{
|
{
|
||||||
VALUE pattern = RREGEXP_SRC(value);
|
VALUE pattern = RREGEXP_SOURCE(value);
|
||||||
long flags = RREGEXP(value)->ptr->options;
|
long flags = RREGEXP_OPTIONS(value);
|
||||||
VALUE has_extra;
|
VALUE has_extra;
|
||||||
|
|
||||||
write_name_and_type(buffer, key, 0x0B);
|
write_name_and_type(buffer, key, 0x0B);
|
||||||
|
@ -452,7 +465,7 @@ static int write_element(VALUE key, VALUE value, VALUE extra, int allow_id) {
|
||||||
}
|
}
|
||||||
default:
|
default:
|
||||||
{
|
{
|
||||||
const char* cls = rb_class2name(RBASIC(value)->klass);
|
const char* cls = rb_obj_classname(value);
|
||||||
buffer_free(buffer);
|
buffer_free(buffer);
|
||||||
rb_raise(InvalidDocument, "Cannot serialize an object of class %s (type %d) into BSON.", cls, TYPE(value));
|
rb_raise(InvalidDocument, "Cannot serialize an object of class %s (type %d) into BSON.", cls, TYPE(value));
|
||||||
break;
|
break;
|
||||||
|
@ -495,7 +508,7 @@ static void write_doc(buffer_t buffer, VALUE hash, VALUE check_keys, VALUE move_
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
allow_id = 1;
|
allow_id = 1;
|
||||||
if (strcmp(rb_class2name(RBASIC(hash)->klass), "Hash") == 0) {
|
if (strcmp(rb_obj_classname(hash), "Hash") == 0) {
|
||||||
if ((rb_funcall(hash, rb_intern("has_key?"), 1, id_str) == Qtrue) &&
|
if ((rb_funcall(hash, rb_intern("has_key?"), 1, id_str) == Qtrue) &&
|
||||||
(rb_funcall(hash, rb_intern("has_key?"), 1, id_sym) == Qtrue)) {
|
(rb_funcall(hash, rb_intern("has_key?"), 1, id_sym) == Qtrue)) {
|
||||||
VALUE oid_sym = rb_hash_delete(hash, id_sym);
|
VALUE oid_sym = rb_hash_delete(hash, id_sym);
|
||||||
|
@ -512,7 +525,7 @@ static void write_doc(buffer_t buffer, VALUE hash, VALUE check_keys, VALUE move_
|
||||||
}
|
}
|
||||||
|
|
||||||
// we have to check for an OrderedHash and handle that specially
|
// we have to check for an OrderedHash and handle that specially
|
||||||
if (strcmp(rb_class2name(RBASIC(hash)->klass), "OrderedHash") == 0) {
|
if (strcmp(rb_obj_classname(hash), "OrderedHash") == 0) {
|
||||||
VALUE keys = rb_funcall(hash, rb_intern("keys"), 0);
|
VALUE keys = rb_funcall(hash, rb_intern("keys"), 0);
|
||||||
int i;
|
int i;
|
||||||
for(i = 0; i < RARRAY_LEN(keys); i++) {
|
for(i = 0; i < RARRAY_LEN(keys); i++) {
|
||||||
|
@ -883,25 +896,25 @@ void Init_cbson() {
|
||||||
Time = rb_const_get(rb_cObject, rb_intern("Time"));
|
Time = rb_const_get(rb_cObject, rb_intern("Time"));
|
||||||
|
|
||||||
mongo = rb_const_get(rb_cObject, rb_intern("Mongo"));
|
mongo = rb_const_get(rb_cObject, rb_intern("Mongo"));
|
||||||
rb_require("mongo/types/binary");
|
rb_require("mongo_bson/types/binary");
|
||||||
Binary = rb_const_get(mongo, rb_intern("Binary"));
|
Binary = rb_const_get(mongo, rb_intern("Binary"));
|
||||||
rb_require("mongo/types/objectid");
|
rb_require("mongo_bson/types/objectid");
|
||||||
ObjectID = rb_const_get(mongo, rb_intern("ObjectID"));
|
ObjectID = rb_const_get(mongo, rb_intern("ObjectID"));
|
||||||
rb_require("mongo/types/dbref");
|
rb_require("mongo_bson/types/dbref");
|
||||||
DBRef = rb_const_get(mongo, rb_intern("DBRef"));
|
DBRef = rb_const_get(mongo, rb_intern("DBRef"));
|
||||||
rb_require("mongo/types/code");
|
rb_require("mongo_bson/types/code");
|
||||||
Code = rb_const_get(mongo, rb_intern("Code"));
|
Code = rb_const_get(mongo, rb_intern("Code"));
|
||||||
rb_require("mongo/types/min_max_keys");
|
rb_require("mongo_bson/types/min_max_keys");
|
||||||
MinKey = rb_const_get(mongo, rb_intern("MinKey"));
|
MinKey = rb_const_get(mongo, rb_intern("MinKey"));
|
||||||
MaxKey = rb_const_get(mongo, rb_intern("MaxKey"));
|
MaxKey = rb_const_get(mongo, rb_intern("MaxKey"));
|
||||||
rb_require("mongo/types/regexp_of_holding");
|
rb_require("mongo_bson/types/regexp_of_holding");
|
||||||
Regexp = rb_const_get(rb_cObject, rb_intern("Regexp"));
|
Regexp = rb_const_get(rb_cObject, rb_intern("Regexp"));
|
||||||
RegexpOfHolding = rb_const_get(mongo, rb_intern("RegexpOfHolding"));
|
RegexpOfHolding = rb_const_get(mongo, rb_intern("RegexpOfHolding"));
|
||||||
rb_require("mongo/exceptions");
|
rb_require("mongo_bson/exceptions");
|
||||||
InvalidName = rb_const_get(mongo, rb_intern("InvalidName"));
|
InvalidName = rb_const_get(mongo, rb_intern("InvalidName"));
|
||||||
InvalidStringEncoding = rb_const_get(mongo, rb_intern("InvalidStringEncoding"));
|
InvalidStringEncoding = rb_const_get(mongo, rb_intern("InvalidStringEncoding"));
|
||||||
InvalidDocument = rb_const_get(mongo, rb_intern("InvalidDocument"));
|
InvalidDocument = rb_const_get(mongo, rb_intern("InvalidDocument"));
|
||||||
rb_require("mongo/util/ordered_hash");
|
rb_require("mongo_bson/ordered_hash");
|
||||||
OrderedHash = rb_const_get(rb_cObject, rb_intern("OrderedHash"));
|
OrderedHash = rb_const_get(rb_cObject, rb_intern("OrderedHash"));
|
||||||
|
|
||||||
CBson = rb_define_module("CBson");
|
CBson = rb_define_module("CBson");
|
||||||
|
|
25
lib/mongo.rb
25
lib/mongo.rb
|
@ -4,21 +4,8 @@ module Mongo
|
||||||
VERSION = "0.19.2"
|
VERSION = "0.19.2"
|
||||||
end
|
end
|
||||||
|
|
||||||
begin
|
require 'mongo_bson'
|
||||||
# Need this for running test with and without c ext in Ruby 1.9.
|
|
||||||
raise LoadError if ENV['TEST_MODE'] && !ENV['C_EXT']
|
|
||||||
require 'mongo_ext/cbson'
|
|
||||||
raise LoadError unless defined?(CBson::VERSION) && CBson::VERSION == Mongo::VERSION
|
|
||||||
require 'mongo/util/bson_c'
|
|
||||||
BSON = BSON_C
|
|
||||||
rescue LoadError
|
|
||||||
require 'mongo/util/bson_ruby'
|
|
||||||
BSON = BSON_RUBY
|
|
||||||
warn "\n**Notice: C extension not loaded. This is required for optimum MongoDB Ruby driver performance."
|
|
||||||
warn " You can install the extension as follows:\n gem install mongo_ext\n"
|
|
||||||
warn " If you continue to receive this message after installing, make sure that the"
|
|
||||||
warn " mongo_ext gem is in your load path and that the mongo_ext and mongo gems are of the same version.\n"
|
|
||||||
end
|
|
||||||
|
|
||||||
module Mongo
|
module Mongo
|
||||||
ASCENDING = 1
|
ASCENDING = 1
|
||||||
|
@ -40,17 +27,9 @@ module Mongo
|
||||||
|
|
||||||
end
|
end
|
||||||
|
|
||||||
require 'mongo/types/binary'
|
|
||||||
require 'mongo/types/code'
|
|
||||||
require 'mongo/types/dbref'
|
|
||||||
require 'mongo/types/objectid'
|
|
||||||
require 'mongo/types/regexp_of_holding'
|
|
||||||
require 'mongo/types/min_max_keys'
|
|
||||||
|
|
||||||
require 'mongo/util/support'
|
require 'mongo/util/support'
|
||||||
require 'mongo/util/conversions'
|
require 'mongo/util/conversions'
|
||||||
require 'mongo/util/server_version'
|
require 'mongo/util/server_version'
|
||||||
require 'mongo/util/bson_ruby'
|
|
||||||
|
|
||||||
require 'mongo/collection'
|
require 'mongo/collection'
|
||||||
require 'mongo/connection'
|
require 'mongo/connection'
|
||||||
|
|
|
@ -261,7 +261,7 @@ module Mongo
|
||||||
message = ByteBuffer.new([0, 0, 0, 0])
|
message = ByteBuffer.new([0, 0, 0, 0])
|
||||||
BSON_RUBY.serialize_cstr(message, "#{@db.name}.#{@name}")
|
BSON_RUBY.serialize_cstr(message, "#{@db.name}.#{@name}")
|
||||||
message.put_int(0)
|
message.put_int(0)
|
||||||
message.put_array(BSON.serialize(selector, false, true).to_a)
|
message.put_array(BSON_CODER.serialize(selector, false, true).to_a)
|
||||||
|
|
||||||
if opts[:safe]
|
if opts[:safe]
|
||||||
@connection.send_message_with_safe_check(Mongo::Constants::OP_DELETE, message, @db.name,
|
@connection.send_message_with_safe_check(Mongo::Constants::OP_DELETE, message, @db.name,
|
||||||
|
@ -303,8 +303,8 @@ module Mongo
|
||||||
update_options += 1 if options[:upsert]
|
update_options += 1 if options[:upsert]
|
||||||
update_options += 2 if options[:multi]
|
update_options += 2 if options[:multi]
|
||||||
message.put_int(update_options)
|
message.put_int(update_options)
|
||||||
message.put_array(BSON.serialize(selector, false, true).to_a)
|
message.put_array(BSON_CODER.serialize(selector, false, true).to_a)
|
||||||
message.put_array(BSON.serialize(document, false, true).to_a)
|
message.put_array(BSON_CODER.serialize(document, false, true).to_a)
|
||||||
if options[:safe]
|
if options[:safe]
|
||||||
@connection.send_message_with_safe_check(Mongo::Constants::OP_UPDATE, message, @db.name,
|
@connection.send_message_with_safe_check(Mongo::Constants::OP_UPDATE, message, @db.name,
|
||||||
"db.#{@name}.update(#{selector.inspect}, #{document.inspect})")
|
"db.#{@name}.update(#{selector.inspect}, #{document.inspect})")
|
||||||
|
@ -590,7 +590,7 @@ module Mongo
|
||||||
# Initial byte is 0.
|
# Initial byte is 0.
|
||||||
message = ByteBuffer.new([0, 0, 0, 0])
|
message = ByteBuffer.new([0, 0, 0, 0])
|
||||||
BSON_RUBY.serialize_cstr(message, "#{@db.name}.#{collection_name}")
|
BSON_RUBY.serialize_cstr(message, "#{@db.name}.#{collection_name}")
|
||||||
documents.each { |doc| message.put_array(BSON.serialize(doc, check_keys, true).to_a) }
|
documents.each { |doc| message.put_array(BSON_CODER.serialize(doc, check_keys, true).to_a) }
|
||||||
if safe
|
if safe
|
||||||
@connection.send_message_with_safe_check(Mongo::Constants::OP_INSERT, message, @db.name,
|
@connection.send_message_with_safe_check(Mongo::Constants::OP_INSERT, message, @db.name,
|
||||||
"db.#{collection_name}.insert(#{documents.inspect})")
|
"db.#{collection_name}.insert(#{documents.inspect})")
|
||||||
|
|
|
@ -650,7 +650,7 @@ module Mongo
|
||||||
buf.put_array(receive_message_on_socket(size - 4, sock).unpack("C*"), 4)
|
buf.put_array(receive_message_on_socket(size - 4, sock).unpack("C*"), 4)
|
||||||
number_remaining -= 1
|
number_remaining -= 1
|
||||||
buf.rewind
|
buf.rewind
|
||||||
docs << BSON.deserialize(buf)
|
docs << BSON_CODER.deserialize(buf)
|
||||||
end
|
end
|
||||||
[docs, number_received, cursor_id]
|
[docs, number_received, cursor_id]
|
||||||
end
|
end
|
||||||
|
@ -661,7 +661,7 @@ module Mongo
|
||||||
BSON_RUBY.serialize_cstr(message, "#{db_name}.$cmd")
|
BSON_RUBY.serialize_cstr(message, "#{db_name}.$cmd")
|
||||||
message.put_int(0)
|
message.put_int(0)
|
||||||
message.put_int(-1)
|
message.put_int(-1)
|
||||||
message.put_array(BSON.serialize({:getlasterror => 1}, false).unpack("C*"))
|
message.put_array(BSON_CODER.serialize({:getlasterror => 1}, false).unpack("C*"))
|
||||||
add_message_headers(Mongo::Constants::OP_QUERY, message)
|
add_message_headers(Mongo::Constants::OP_QUERY, message)
|
||||||
end
|
end
|
||||||
|
|
||||||
|
|
|
@ -356,8 +356,8 @@ module Mongo
|
||||||
if query_contains_special_fields?
|
if query_contains_special_fields?
|
||||||
selector = selector_with_special_query_fields
|
selector = selector_with_special_query_fields
|
||||||
end
|
end
|
||||||
message.put_array(BSON.serialize(selector, false).to_a)
|
message.put_array(BSON_CODER.serialize(selector, false).to_a)
|
||||||
message.put_array(BSON.serialize(@fields, false).to_a) if @fields
|
message.put_array(BSON_CODER.serialize(@fields, false).to_a) if @fields
|
||||||
message
|
message
|
||||||
end
|
end
|
||||||
|
|
||||||
|
|
|
@ -14,9 +14,9 @@
|
||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
# ++
|
# ++
|
||||||
|
|
||||||
require 'mongo/types/objectid'
|
require 'mongo_bson/types/objectid'
|
||||||
require 'mongo/util/byte_buffer'
|
require 'mongo_bson/byte_buffer'
|
||||||
require 'mongo/util/ordered_hash'
|
require 'mongo_bson/ordered_hash'
|
||||||
|
|
||||||
module GridFS
|
module GridFS
|
||||||
|
|
||||||
|
|
|
@ -14,8 +14,8 @@
|
||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
# ++
|
# ++
|
||||||
|
|
||||||
require 'mongo/types/objectid'
|
require 'mongo_bson/types/objectid'
|
||||||
require 'mongo/util/ordered_hash'
|
require 'mongo_bson/ordered_hash'
|
||||||
require 'mongo/gridfs/chunk'
|
require 'mongo/gridfs/chunk'
|
||||||
|
|
||||||
module GridFS
|
module GridFS
|
||||||
|
|
|
@ -1,18 +1,20 @@
|
||||||
# A thin wrapper for the CBson class
|
# A thin wrapper for the CBson class
|
||||||
class BSON_C
|
module Mongo
|
||||||
|
class BSON_C
|
||||||
|
|
||||||
def self.serialize(obj, check_keys=false, move_id=false)
|
def self.serialize(obj, check_keys=false, move_id=false)
|
||||||
ByteBuffer.new(CBson.serialize(obj, check_keys, move_id))
|
ByteBuffer.new(CBson.serialize(obj, check_keys, move_id))
|
||||||
end
|
|
||||||
|
|
||||||
def self.deserialize(buf=nil)
|
|
||||||
if buf.is_a? String
|
|
||||||
to_deserialize = ByteBuffer.new(buf) if buf
|
|
||||||
else
|
|
||||||
buf = ByteBuffer.new(buf.to_a) if buf
|
|
||||||
end
|
end
|
||||||
buf.rewind
|
|
||||||
CBson.deserialize(buf.to_s)
|
|
||||||
end
|
|
||||||
|
|
||||||
|
def self.deserialize(buf=nil)
|
||||||
|
if buf.is_a? String
|
||||||
|
to_deserialize = ByteBuffer.new(buf) if buf
|
||||||
|
else
|
||||||
|
buf = ByteBuffer.new(buf.to_a) if buf
|
||||||
|
end
|
||||||
|
buf.rewind
|
||||||
|
CBson.deserialize(buf.to_s)
|
||||||
|
end
|
||||||
|
|
||||||
|
end
|
||||||
end
|
end
|
||||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -14,7 +14,7 @@
|
||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
# ++
|
# ++
|
||||||
|
|
||||||
require 'mongo/util/byte_buffer'
|
require 'mongo_bson/byte_buffer'
|
||||||
|
|
||||||
module Mongo
|
module Mongo
|
||||||
|
|
||||||
|
|
|
@ -12,11 +12,12 @@ Gem::Specification.new do |s|
|
||||||
s.require_paths = ['lib']
|
s.require_paths = ['lib']
|
||||||
|
|
||||||
s.files = ['README.rdoc', 'Rakefile', 'mongo-ruby-driver.gemspec', 'LICENSE.txt']
|
s.files = ['README.rdoc', 'Rakefile', 'mongo-ruby-driver.gemspec', 'LICENSE.txt']
|
||||||
s.files += Dir['lib/**/*.rb'] + Dir['examples/**/*.rb'] + Dir['bin/**/*.rb']
|
s.files += Dir['lib/mongo/*.rb'] + Dir['examples/**/*.rb'] + Dir['bin/**/*.rb']
|
||||||
s.test_files = Dir['test/**/*.rb']
|
s.test_files = Dir['test/**/*.rb']
|
||||||
|
|
||||||
s.has_rdoc = true
|
s.has_rdoc = true
|
||||||
s.test_files = Dir['test/**/*.rb']
|
s.test_files = Dir['test/**/*.rb']
|
||||||
|
s.test_files -= Dir['test/mongo_bson/*.rb'] # remove these files from the manifest
|
||||||
|
|
||||||
s.has_rdoc = true
|
s.has_rdoc = true
|
||||||
s.rdoc_options = ['--main', 'README.rdoc', '--inline-source']
|
s.rdoc_options = ['--main', 'README.rdoc', '--inline-source']
|
||||||
|
|
|
@ -1,3 +1,4 @@
|
||||||
|
$:.unshift(File.join(File.dirname(__FILE__), '..', 'lib'))
|
||||||
require 'test/test_helper'
|
require 'test/test_helper'
|
||||||
require 'mongo/gridfs'
|
require 'mongo/gridfs'
|
||||||
|
|
||||||
|
|
|
@ -24,26 +24,26 @@ class BSONTest < Test::Unit::TestCase
|
||||||
|
|
||||||
def test_string
|
def test_string
|
||||||
doc = {'doc' => 'hello, world'}
|
doc = {'doc' => 'hello, world'}
|
||||||
bson = bson = BSON.serialize(doc)
|
bson = bson = Mongo::BSON_CODER.serialize(doc)
|
||||||
assert_equal doc, BSON.deserialize(bson)
|
assert_equal doc, Mongo::BSON_CODER.deserialize(bson)
|
||||||
end
|
end
|
||||||
|
|
||||||
def test_valid_utf8_string
|
def test_valid_utf8_string
|
||||||
doc = {'doc' => 'aé'}
|
doc = {'doc' => 'aé'}
|
||||||
bson = bson = BSON.serialize(doc)
|
bson = bson = Mongo::BSON_CODER.serialize(doc)
|
||||||
assert_equal doc, BSON.deserialize(bson)
|
assert_equal doc, Mongo::BSON_CODER.deserialize(bson)
|
||||||
end
|
end
|
||||||
|
|
||||||
def test_valid_utf8_key
|
def test_valid_utf8_key
|
||||||
doc = {'aé' => 'hello'}
|
doc = {'aé' => 'hello'}
|
||||||
bson = bson = BSON.serialize(doc)
|
bson = bson = Mongo::BSON_CODER.serialize(doc)
|
||||||
assert_equal doc, BSON.deserialize(bson)
|
assert_equal doc, Mongo::BSON_CODER.deserialize(bson)
|
||||||
end
|
end
|
||||||
|
|
||||||
def test_document_length
|
def test_document_length
|
||||||
doc = {'name' => 'a' * 5 * 1024 * 1024}
|
doc = {'name' => 'a' * 5 * 1024 * 1024}
|
||||||
assert_raise InvalidDocument do
|
assert_raise InvalidDocument do
|
||||||
assert BSON.serialize(doc)
|
assert Mongo::BSON_CODER.serialize(doc)
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@ -55,7 +55,7 @@ class BSONTest < Test::Unit::TestCase
|
||||||
string = Iconv.conv('iso-8859-1', 'utf-8', 'aé')
|
string = Iconv.conv('iso-8859-1', 'utf-8', 'aé')
|
||||||
doc = {'doc' => string}
|
doc = {'doc' => string}
|
||||||
assert_raise InvalidStringEncoding do
|
assert_raise InvalidStringEncoding do
|
||||||
BSON.serialize(doc)
|
Mongo::BSON_CODER.serialize(doc)
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@ -63,51 +63,51 @@ class BSONTest < Test::Unit::TestCase
|
||||||
key = Iconv.conv('iso-8859-1', 'utf-8', 'aé')
|
key = Iconv.conv('iso-8859-1', 'utf-8', 'aé')
|
||||||
doc = {key => 'hello'}
|
doc = {key => 'hello'}
|
||||||
assert_raise InvalidStringEncoding do
|
assert_raise InvalidStringEncoding do
|
||||||
BSON.serialize(doc)
|
Mongo::BSON_CODER.serialize(doc)
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
else
|
else
|
||||||
def test_non_utf8_string
|
def test_non_utf8_string
|
||||||
bson = BSON.serialize({'str' => 'aé'.encode('iso-8859-1')})
|
bson = Mongo::BSON_CODER.serialize({'str' => 'aé'.encode('iso-8859-1')})
|
||||||
result = BSON.deserialize(bson)['str']
|
result = Mongo::BSON_CODER.deserialize(bson)['str']
|
||||||
assert_equal 'aé', result
|
assert_equal 'aé', result
|
||||||
assert_equal 'UTF-8', result.encoding.name
|
assert_equal 'UTF-8', result.encoding.name
|
||||||
end
|
end
|
||||||
|
|
||||||
def test_non_utf8_key
|
def test_non_utf8_key
|
||||||
bson = BSON.serialize({'aé'.encode('iso-8859-1') => 'hello'})
|
bson = Mongo::BSON_CODER.serialize({'aé'.encode('iso-8859-1') => 'hello'})
|
||||||
assert_equal 'hello', BSON.deserialize(bson)['aé']
|
assert_equal 'hello', Mongo::BSON_CODER.deserialize(bson)['aé']
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
def test_code
|
def test_code
|
||||||
doc = {'$where' => Code.new('this.a.b < this.b')}
|
doc = {'$where' => Code.new('this.a.b < this.b')}
|
||||||
bson = BSON.serialize(doc)
|
bson = Mongo::BSON_CODER.serialize(doc)
|
||||||
assert_equal doc, BSON.deserialize(bson)
|
assert_equal doc, Mongo::BSON_CODER.deserialize(bson)
|
||||||
end
|
end
|
||||||
|
|
||||||
def test_number
|
def test_number
|
||||||
doc = {'doc' => 41.99}
|
doc = {'doc' => 41.99}
|
||||||
bson = BSON.serialize(doc)
|
bson = Mongo::BSON_CODER.serialize(doc)
|
||||||
assert_equal doc, BSON.deserialize(bson)
|
assert_equal doc, Mongo::BSON_CODER.deserialize(bson)
|
||||||
end
|
end
|
||||||
|
|
||||||
def test_int
|
def test_int
|
||||||
doc = {'doc' => 42}
|
doc = {'doc' => 42}
|
||||||
bson = BSON.serialize(doc)
|
bson = Mongo::BSON_CODER.serialize(doc)
|
||||||
assert_equal doc, BSON.deserialize(bson)
|
assert_equal doc, Mongo::BSON_CODER.deserialize(bson)
|
||||||
|
|
||||||
doc = {"doc" => -5600}
|
doc = {"doc" => -5600}
|
||||||
bson = BSON.serialize(doc)
|
bson = Mongo::BSON_CODER.serialize(doc)
|
||||||
assert_equal doc, BSON.deserialize(bson)
|
assert_equal doc, Mongo::BSON_CODER.deserialize(bson)
|
||||||
|
|
||||||
doc = {"doc" => 2147483647}
|
doc = {"doc" => 2147483647}
|
||||||
bson = BSON.serialize(doc)
|
bson = Mongo::BSON_CODER.serialize(doc)
|
||||||
assert_equal doc, BSON.deserialize(bson)
|
assert_equal doc, Mongo::BSON_CODER.deserialize(bson)
|
||||||
|
|
||||||
doc = {"doc" => -2147483648}
|
doc = {"doc" => -2147483648}
|
||||||
bson = BSON.serialize(doc)
|
bson = Mongo::BSON_CODER.serialize(doc)
|
||||||
assert_equal doc, BSON.deserialize(bson)
|
assert_equal doc, Mongo::BSON_CODER.deserialize(bson)
|
||||||
end
|
end
|
||||||
|
|
||||||
def test_ordered_hash
|
def test_ordered_hash
|
||||||
|
@ -116,32 +116,32 @@ class BSONTest < Test::Unit::TestCase
|
||||||
doc["a"] = 2
|
doc["a"] = 2
|
||||||
doc["c"] = 3
|
doc["c"] = 3
|
||||||
doc["d"] = 4
|
doc["d"] = 4
|
||||||
bson = BSON.serialize(doc)
|
bson = Mongo::BSON_CODER.serialize(doc)
|
||||||
assert_equal doc, BSON.deserialize(bson)
|
assert_equal doc, Mongo::BSON_CODER.deserialize(bson)
|
||||||
end
|
end
|
||||||
|
|
||||||
def test_object
|
def test_object
|
||||||
doc = {'doc' => {'age' => 42, 'name' => 'Spongebob', 'shoe_size' => 9.5}}
|
doc = {'doc' => {'age' => 42, 'name' => 'Spongebob', 'shoe_size' => 9.5}}
|
||||||
bson = BSON.serialize(doc)
|
bson = Mongo::BSON_CODER.serialize(doc)
|
||||||
assert_equal doc, BSON.deserialize(bson)
|
assert_equal doc, Mongo::BSON_CODER.deserialize(bson)
|
||||||
end
|
end
|
||||||
|
|
||||||
def test_oid
|
def test_oid
|
||||||
doc = {'doc' => ObjectID.new}
|
doc = {'doc' => ObjectID.new}
|
||||||
bson = BSON.serialize(doc)
|
bson = Mongo::BSON_CODER.serialize(doc)
|
||||||
assert_equal doc, BSON.deserialize(bson)
|
assert_equal doc, Mongo::BSON_CODER.deserialize(bson)
|
||||||
end
|
end
|
||||||
|
|
||||||
def test_array
|
def test_array
|
||||||
doc = {'doc' => [1, 2, 'a', 'b']}
|
doc = {'doc' => [1, 2, 'a', 'b']}
|
||||||
bson = BSON.serialize(doc)
|
bson = Mongo::BSON_CODER.serialize(doc)
|
||||||
assert_equal doc, BSON.deserialize(bson)
|
assert_equal doc, Mongo::BSON_CODER.deserialize(bson)
|
||||||
end
|
end
|
||||||
|
|
||||||
def test_regex
|
def test_regex
|
||||||
doc = {'doc' => /foobar/i}
|
doc = {'doc' => /foobar/i}
|
||||||
bson = BSON.serialize(doc)
|
bson = Mongo::BSON_CODER.serialize(doc)
|
||||||
doc2 = BSON.deserialize(bson)
|
doc2 = Mongo::BSON_CODER.deserialize(bson)
|
||||||
assert_equal doc, doc2
|
assert_equal doc, doc2
|
||||||
|
|
||||||
r = doc2['doc']
|
r = doc2['doc']
|
||||||
|
@ -151,9 +151,9 @@ class BSONTest < Test::Unit::TestCase
|
||||||
assert_equal 'zywcab', r.extra_options_str
|
assert_equal 'zywcab', r.extra_options_str
|
||||||
|
|
||||||
doc = {'doc' => r}
|
doc = {'doc' => r}
|
||||||
bson_doc = BSON.serialize(doc)
|
bson_doc = Mongo::BSON_CODER.serialize(doc)
|
||||||
doc2 = nil
|
doc2 = nil
|
||||||
doc2 = BSON.deserialize(bson_doc)
|
doc2 = Mongo::BSON_CODER.deserialize(bson_doc)
|
||||||
assert_equal doc, doc2
|
assert_equal doc, doc2
|
||||||
|
|
||||||
r = doc2['doc']
|
r = doc2['doc']
|
||||||
|
@ -163,30 +163,30 @@ class BSONTest < Test::Unit::TestCase
|
||||||
|
|
||||||
def test_boolean
|
def test_boolean
|
||||||
doc = {'doc' => true}
|
doc = {'doc' => true}
|
||||||
bson = BSON.serialize(doc)
|
bson = Mongo::BSON_CODER.serialize(doc)
|
||||||
assert_equal doc, BSON.deserialize(bson)
|
assert_equal doc, Mongo::BSON_CODER.deserialize(bson)
|
||||||
end
|
end
|
||||||
|
|
||||||
def test_date
|
def test_date
|
||||||
doc = {'date' => Time.now}
|
doc = {'date' => Time.now}
|
||||||
bson = BSON.serialize(doc)
|
bson = Mongo::BSON_CODER.serialize(doc)
|
||||||
doc2 = BSON.deserialize(bson)
|
doc2 = Mongo::BSON_CODER.deserialize(bson)
|
||||||
# Mongo only stores up to the millisecond
|
# Mongo only stores up to the millisecond
|
||||||
assert_in_delta doc['date'], doc2['date'], 0.001
|
assert_in_delta doc['date'], doc2['date'], 0.001
|
||||||
end
|
end
|
||||||
|
|
||||||
def test_date_returns_as_utc
|
def test_date_returns_as_utc
|
||||||
doc = {'date' => Time.now}
|
doc = {'date' => Time.now}
|
||||||
bson = BSON.serialize(doc)
|
bson = Mongo::BSON_CODER.serialize(doc)
|
||||||
doc2 = BSON.deserialize(bson)
|
doc2 = Mongo::BSON_CODER.deserialize(bson)
|
||||||
assert doc2['date'].utc?
|
assert doc2['date'].utc?
|
||||||
end
|
end
|
||||||
|
|
||||||
def test_date_before_epoch
|
def test_date_before_epoch
|
||||||
begin
|
begin
|
||||||
doc = {'date' => Time.utc(1600)}
|
doc = {'date' => Time.utc(1600)}
|
||||||
bson = BSON.serialize(doc)
|
bson = Mongo::BSON_CODER.serialize(doc)
|
||||||
doc2 = BSON.deserialize(bson)
|
doc2 = Mongo::BSON_CODER.deserialize(bson)
|
||||||
# Mongo only stores up to the millisecond
|
# Mongo only stores up to the millisecond
|
||||||
assert_in_delta doc['date'], doc2['date'], 0.001
|
assert_in_delta doc['date'], doc2['date'], 0.001
|
||||||
rescue ArgumentError
|
rescue ArgumentError
|
||||||
|
@ -201,7 +201,7 @@ class BSONTest < Test::Unit::TestCase
|
||||||
[DateTime.now, Date.today, Zone].each do |invalid_date|
|
[DateTime.now, Date.today, Zone].each do |invalid_date|
|
||||||
doc = {:date => invalid_date}
|
doc = {:date => invalid_date}
|
||||||
begin
|
begin
|
||||||
bson = BSON.serialize(doc)
|
bson = Mongo::BSON_CODER.serialize(doc)
|
||||||
rescue => e
|
rescue => e
|
||||||
ensure
|
ensure
|
||||||
assert_equal InvalidDocument, e.class
|
assert_equal InvalidDocument, e.class
|
||||||
|
@ -214,16 +214,16 @@ class BSONTest < Test::Unit::TestCase
|
||||||
oid = ObjectID.new
|
oid = ObjectID.new
|
||||||
doc = {}
|
doc = {}
|
||||||
doc['dbref'] = DBRef.new('namespace', oid)
|
doc['dbref'] = DBRef.new('namespace', oid)
|
||||||
bson = BSON.serialize(doc)
|
bson = Mongo::BSON_CODER.serialize(doc)
|
||||||
doc2 = BSON.deserialize(bson)
|
doc2 = Mongo::BSON_CODER.deserialize(bson)
|
||||||
assert_equal 'namespace', doc2['dbref'].namespace
|
assert_equal 'namespace', doc2['dbref'].namespace
|
||||||
assert_equal oid, doc2['dbref'].object_id
|
assert_equal oid, doc2['dbref'].object_id
|
||||||
end
|
end
|
||||||
|
|
||||||
def test_symbol
|
def test_symbol
|
||||||
doc = {'sym' => :foo}
|
doc = {'sym' => :foo}
|
||||||
bson = BSON.serialize(doc)
|
bson = Mongo::BSON_CODER.serialize(doc)
|
||||||
doc2 = BSON.deserialize(bson)
|
doc2 = Mongo::BSON_CODER.deserialize(bson)
|
||||||
assert_equal :foo, doc2['sym']
|
assert_equal :foo, doc2['sym']
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@ -232,8 +232,8 @@ class BSONTest < Test::Unit::TestCase
|
||||||
'binstring'.each_byte { |b| bin.put(b) }
|
'binstring'.each_byte { |b| bin.put(b) }
|
||||||
|
|
||||||
doc = {'bin' => bin}
|
doc = {'bin' => bin}
|
||||||
bson = BSON.serialize(doc)
|
bson = Mongo::BSON_CODER.serialize(doc)
|
||||||
doc2 = BSON.deserialize(bson)
|
doc2 = Mongo::BSON_CODER.deserialize(bson)
|
||||||
bin2 = doc2['bin']
|
bin2 = doc2['bin']
|
||||||
assert_kind_of Binary, bin2
|
assert_kind_of Binary, bin2
|
||||||
assert_equal 'binstring', bin2.to_s
|
assert_equal 'binstring', bin2.to_s
|
||||||
|
@ -244,8 +244,8 @@ class BSONTest < Test::Unit::TestCase
|
||||||
bin = Binary.new([1, 2, 3, 4, 5], Binary::SUBTYPE_USER_DEFINED)
|
bin = Binary.new([1, 2, 3, 4, 5], Binary::SUBTYPE_USER_DEFINED)
|
||||||
|
|
||||||
doc = {'bin' => bin}
|
doc = {'bin' => bin}
|
||||||
bson = BSON.serialize(doc)
|
bson = Mongo::BSON_CODER.serialize(doc)
|
||||||
doc2 = BSON.deserialize(bson)
|
doc2 = Mongo::BSON_CODER.deserialize(bson)
|
||||||
bin2 = doc2['bin']
|
bin2 = doc2['bin']
|
||||||
assert_kind_of Binary, bin2
|
assert_kind_of Binary, bin2
|
||||||
assert_equal [1, 2, 3, 4, 5], bin2.to_a
|
assert_equal [1, 2, 3, 4, 5], bin2.to_a
|
||||||
|
@ -257,8 +257,8 @@ class BSONTest < Test::Unit::TestCase
|
||||||
5.times { |i| bb.put(i + 1) }
|
5.times { |i| bb.put(i + 1) }
|
||||||
|
|
||||||
doc = {'bin' => bb}
|
doc = {'bin' => bb}
|
||||||
bson = BSON.serialize(doc)
|
bson = Mongo::BSON_CODER.serialize(doc)
|
||||||
doc2 = BSON.deserialize(bson)
|
doc2 = Mongo::BSON_CODER.deserialize(bson)
|
||||||
bin2 = doc2['bin']
|
bin2 = doc2['bin']
|
||||||
assert_kind_of Binary, bin2
|
assert_kind_of Binary, bin2
|
||||||
assert_equal [1, 2, 3, 4, 5], bin2.to_a
|
assert_equal [1, 2, 3, 4, 5], bin2.to_a
|
||||||
|
@ -269,24 +269,24 @@ class BSONTest < Test::Unit::TestCase
|
||||||
val = OrderedHash.new
|
val = OrderedHash.new
|
||||||
val['not_id'] = 1
|
val['not_id'] = 1
|
||||||
val['_id'] = 2
|
val['_id'] = 2
|
||||||
roundtrip = BSON.deserialize(BSON.serialize(val, false, true).to_a)
|
roundtrip = Mongo::BSON_CODER.deserialize(Mongo::BSON_CODER.serialize(val, false, true).to_a)
|
||||||
assert_kind_of OrderedHash, roundtrip
|
assert_kind_of OrderedHash, roundtrip
|
||||||
assert_equal '_id', roundtrip.keys.first
|
assert_equal '_id', roundtrip.keys.first
|
||||||
|
|
||||||
val = {'a' => 'foo', 'b' => 'bar', :_id => 42, 'z' => 'hello'}
|
val = {'a' => 'foo', 'b' => 'bar', :_id => 42, 'z' => 'hello'}
|
||||||
roundtrip = BSON.deserialize(BSON.serialize(val, false, true).to_a)
|
roundtrip = Mongo::BSON_CODER.deserialize(Mongo::BSON_CODER.serialize(val, false, true).to_a)
|
||||||
assert_kind_of OrderedHash, roundtrip
|
assert_kind_of OrderedHash, roundtrip
|
||||||
assert_equal '_id', roundtrip.keys.first
|
assert_equal '_id', roundtrip.keys.first
|
||||||
end
|
end
|
||||||
|
|
||||||
def test_nil_id
|
def test_nil_id
|
||||||
doc = {"_id" => nil}
|
doc = {"_id" => nil}
|
||||||
assert_equal doc, BSON.deserialize(bson = BSON.serialize(doc, false, true).to_a)
|
assert_equal doc, Mongo::BSON_CODER.deserialize(bson = Mongo::BSON_CODER.serialize(doc, false, true).to_a)
|
||||||
end
|
end
|
||||||
|
|
||||||
def test_timestamp
|
def test_timestamp
|
||||||
val = {"test" => [4, 20]}
|
val = {"test" => [4, 20]}
|
||||||
assert_equal val, BSON.deserialize([0x13, 0x00, 0x00, 0x00,
|
assert_equal val, Mongo::BSON_CODER.deserialize([0x13, 0x00, 0x00, 0x00,
|
||||||
0x11, 0x74, 0x65, 0x73,
|
0x11, 0x74, 0x65, 0x73,
|
||||||
0x74, 0x00, 0x04, 0x00,
|
0x74, 0x00, 0x04, 0x00,
|
||||||
0x00, 0x00, 0x14, 0x00,
|
0x00, 0x00, 0x14, 0x00,
|
||||||
|
@ -296,29 +296,29 @@ class BSONTest < Test::Unit::TestCase
|
||||||
def test_overflow
|
def test_overflow
|
||||||
doc = {"x" => 2**75}
|
doc = {"x" => 2**75}
|
||||||
assert_raise RangeError do
|
assert_raise RangeError do
|
||||||
bson = BSON.serialize(doc)
|
bson = Mongo::BSON_CODER.serialize(doc)
|
||||||
end
|
end
|
||||||
|
|
||||||
doc = {"x" => 9223372036854775}
|
doc = {"x" => 9223372036854775}
|
||||||
assert_equal doc, BSON.deserialize(BSON.serialize(doc).to_a)
|
assert_equal doc, Mongo::BSON_CODER.deserialize(Mongo::BSON_CODER.serialize(doc).to_a)
|
||||||
|
|
||||||
doc = {"x" => 9223372036854775807}
|
doc = {"x" => 9223372036854775807}
|
||||||
assert_equal doc, BSON.deserialize(BSON.serialize(doc).to_a)
|
assert_equal doc, Mongo::BSON_CODER.deserialize(Mongo::BSON_CODER.serialize(doc).to_a)
|
||||||
|
|
||||||
doc["x"] = doc["x"] + 1
|
doc["x"] = doc["x"] + 1
|
||||||
assert_raise RangeError do
|
assert_raise RangeError do
|
||||||
bson = BSON.serialize(doc)
|
bson = Mongo::BSON_CODER.serialize(doc)
|
||||||
end
|
end
|
||||||
|
|
||||||
doc = {"x" => -9223372036854775}
|
doc = {"x" => -9223372036854775}
|
||||||
assert_equal doc, BSON.deserialize(BSON.serialize(doc).to_a)
|
assert_equal doc, Mongo::BSON_CODER.deserialize(Mongo::BSON_CODER.serialize(doc).to_a)
|
||||||
|
|
||||||
doc = {"x" => -9223372036854775808}
|
doc = {"x" => -9223372036854775808}
|
||||||
assert_equal doc, BSON.deserialize(BSON.serialize(doc).to_a)
|
assert_equal doc, Mongo::BSON_CODER.deserialize(Mongo::BSON_CODER.serialize(doc).to_a)
|
||||||
|
|
||||||
doc["x"] = doc["x"] - 1
|
doc["x"] = doc["x"] - 1
|
||||||
assert_raise RangeError do
|
assert_raise RangeError do
|
||||||
bson = BSON.serialize(doc)
|
bson = Mongo::BSON_CODER.serialize(doc)
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@ -326,7 +326,7 @@ class BSONTest < Test::Unit::TestCase
|
||||||
[BigDecimal.new("1.0"), Complex(0, 1), Rational(2, 3)].each do |type|
|
[BigDecimal.new("1.0"), Complex(0, 1), Rational(2, 3)].each do |type|
|
||||||
doc = {"x" => type}
|
doc = {"x" => type}
|
||||||
begin
|
begin
|
||||||
BSON.serialize(doc)
|
Mongo::BSON_CODER.serialize(doc)
|
||||||
rescue => e
|
rescue => e
|
||||||
ensure
|
ensure
|
||||||
assert_equal InvalidDocument, e.class
|
assert_equal InvalidDocument, e.class
|
||||||
|
@ -340,12 +340,12 @@ class BSONTest < Test::Unit::TestCase
|
||||||
val['not_id'] = 1
|
val['not_id'] = 1
|
||||||
val['_id'] = 2
|
val['_id'] = 2
|
||||||
assert val.keys.include?('_id')
|
assert val.keys.include?('_id')
|
||||||
BSON.serialize(val)
|
Mongo::BSON_CODER.serialize(val)
|
||||||
assert val.keys.include?('_id')
|
assert val.keys.include?('_id')
|
||||||
|
|
||||||
val = {'a' => 'foo', 'b' => 'bar', :_id => 42, 'z' => 'hello'}
|
val = {'a' => 'foo', 'b' => 'bar', :_id => 42, 'z' => 'hello'}
|
||||||
assert val.keys.include?(:_id)
|
assert val.keys.include?(:_id)
|
||||||
BSON.serialize(val)
|
Mongo::BSON_CODER.serialize(val)
|
||||||
assert val.keys.include?(:_id)
|
assert val.keys.include?(:_id)
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@ -360,50 +360,50 @@ class BSONTest < Test::Unit::TestCase
|
||||||
dup = {"_id" => "foo", :_id => "foo"}
|
dup = {"_id" => "foo", :_id => "foo"}
|
||||||
one = {"_id" => "foo"}
|
one = {"_id" => "foo"}
|
||||||
|
|
||||||
assert_equal BSON.serialize(one).to_a, BSON.serialize(dup).to_a
|
assert_equal Mongo::BSON_CODER.serialize(one).to_a, Mongo::BSON_CODER.serialize(dup).to_a
|
||||||
end
|
end
|
||||||
|
|
||||||
def test_no_duplicate_id_when_moving_id
|
def test_no_duplicate_id_when_moving_id
|
||||||
dup = {"_id" => "foo", :_id => "foo"}
|
dup = {"_id" => "foo", :_id => "foo"}
|
||||||
one = {:_id => "foo"}
|
one = {:_id => "foo"}
|
||||||
|
|
||||||
assert_equal BSON.serialize(one, false, true).to_s, BSON.serialize(dup, false, true).to_s
|
assert_equal Mongo::BSON_CODER.serialize(one, false, true).to_s, Mongo::BSON_CODER.serialize(dup, false, true).to_s
|
||||||
end
|
end
|
||||||
|
|
||||||
def test_null_character
|
def test_null_character
|
||||||
doc = {"a" => "\x00"}
|
doc = {"a" => "\x00"}
|
||||||
|
|
||||||
assert_equal doc, BSON.deserialize(BSON.serialize(doc).to_a)
|
assert_equal doc, Mongo::BSON_CODER.deserialize(Mongo::BSON_CODER.serialize(doc).to_a)
|
||||||
|
|
||||||
assert_raise InvalidDocument do
|
assert_raise InvalidDocument do
|
||||||
BSON.serialize({"\x00" => "a"})
|
Mongo::BSON_CODER.serialize({"\x00" => "a"})
|
||||||
end
|
end
|
||||||
|
|
||||||
assert_raise InvalidDocument do
|
assert_raise InvalidDocument do
|
||||||
BSON.serialize({"a" => (Regexp.compile "ab\x00c")})
|
Mongo::BSON_CODER.serialize({"a" => (Regexp.compile "ab\x00c")})
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
def test_max_key
|
def test_max_key
|
||||||
doc = {"a" => MaxKey.new}
|
doc = {"a" => MaxKey.new}
|
||||||
|
|
||||||
assert_equal doc, BSON.deserialize(BSON.serialize(doc).to_a)
|
assert_equal doc, Mongo::BSON_CODER.deserialize(Mongo::BSON_CODER.serialize(doc).to_a)
|
||||||
end
|
end
|
||||||
|
|
||||||
def test_min_key
|
def test_min_key
|
||||||
doc = {"a" => MinKey.new}
|
doc = {"a" => MinKey.new}
|
||||||
|
|
||||||
assert_equal doc, BSON.deserialize(BSON.serialize(doc).to_a)
|
assert_equal doc, Mongo::BSON_CODER.deserialize(Mongo::BSON_CODER.serialize(doc).to_a)
|
||||||
end
|
end
|
||||||
|
|
||||||
def test_invalid_object
|
def test_invalid_object
|
||||||
o = Object.new
|
o = Object.new
|
||||||
assert_raise InvalidDocument do
|
assert_raise InvalidDocument do
|
||||||
BSON.serialize({:foo => o})
|
Mongo::BSON_CODER.serialize({:foo => o})
|
||||||
end
|
end
|
||||||
|
|
||||||
assert_raise InvalidDocument do
|
assert_raise InvalidDocument do
|
||||||
BSON.serialize({:foo => Date.today})
|
Mongo::BSON_CODER.serialize({:foo => Date.today})
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@ -416,10 +416,10 @@ class BSONTest < Test::Unit::TestCase
|
||||||
|
|
||||||
assert_equal ")\000\000\000\020_id\000\001\000\000\000\002text" +
|
assert_equal ")\000\000\000\020_id\000\001\000\000\000\002text" +
|
||||||
"\000\004\000\000\000abc\000\002key\000\004\000\000\000abc\000\000",
|
"\000\004\000\000\000abc\000\002key\000\004\000\000\000abc\000\000",
|
||||||
BSON.serialize(a, false, true).to_s
|
Mongo::BSON_CODER.serialize(a, false, true).to_s
|
||||||
assert_equal ")\000\000\000\002text\000\004\000\000\000abc\000\002key" +
|
assert_equal ")\000\000\000\002text\000\004\000\000\000abc\000\002key" +
|
||||||
"\000\004\000\000\000abc\000\020_id\000\001\000\000\000\000",
|
"\000\004\000\000\000abc\000\020_id\000\001\000\000\000\000",
|
||||||
BSON.serialize(a, false, false).to_s
|
Mongo::BSON_CODER.serialize(a, false, false).to_s
|
||||||
end
|
end
|
||||||
|
|
||||||
def test_move_id_with_nested_doc
|
def test_move_id_with_nested_doc
|
||||||
|
@ -433,11 +433,11 @@ class BSONTest < Test::Unit::TestCase
|
||||||
assert_equal ">\000\000\000\020_id\000\003\000\000\000\002text" +
|
assert_equal ">\000\000\000\020_id\000\003\000\000\000\002text" +
|
||||||
"\000\004\000\000\000abc\000\003hash\000\034\000\000" +
|
"\000\004\000\000\000abc\000\003hash\000\034\000\000" +
|
||||||
"\000\002text\000\004\000\000\000abc\000\020_id\000\002\000\000\000\000\000",
|
"\000\002text\000\004\000\000\000abc\000\020_id\000\002\000\000\000\000\000",
|
||||||
BSON.serialize(c, false, true).to_s
|
Mongo::BSON_CODER.serialize(c, false, true).to_s
|
||||||
assert_equal ">\000\000\000\002text\000\004\000\000\000abc\000\003hash" +
|
assert_equal ">\000\000\000\002text\000\004\000\000\000abc\000\003hash" +
|
||||||
"\000\034\000\000\000\002text\000\004\000\000\000abc\000\020_id" +
|
"\000\034\000\000\000\002text\000\004\000\000\000abc\000\020_id" +
|
||||||
"\000\002\000\000\000\000\020_id\000\003\000\000\000\000",
|
"\000\002\000\000\000\000\020_id\000\003\000\000\000\000",
|
||||||
BSON.serialize(c, false, false).to_s
|
Mongo::BSON_CODER.serialize(c, false, false).to_s
|
||||||
end
|
end
|
||||||
|
|
||||||
if defined?(HashWithIndifferentAccess)
|
if defined?(HashWithIndifferentAccess)
|
||||||
|
@ -447,12 +447,12 @@ class BSONTest < Test::Unit::TestCase
|
||||||
embedded['_id'] = ObjectID.new
|
embedded['_id'] = ObjectID.new
|
||||||
doc['_id'] = ObjectID.new
|
doc['_id'] = ObjectID.new
|
||||||
doc['embedded'] = [embedded]
|
doc['embedded'] = [embedded]
|
||||||
BSON.serialize(doc, false, true).to_a
|
Mongo::BSON_CODER.serialize(doc, false, true).to_a
|
||||||
assert doc.has_key?("_id")
|
assert doc.has_key?("_id")
|
||||||
assert doc['embedded'][0].has_key?("_id")
|
assert doc['embedded'][0].has_key?("_id")
|
||||||
|
|
||||||
doc['_id'] = ObjectID.new
|
doc['_id'] = ObjectID.new
|
||||||
BSON.serialize(doc, false, true).to_a
|
Mongo::BSON_CODER.serialize(doc, false, true).to_a
|
||||||
assert doc.has_key?("_id")
|
assert doc.has_key?("_id")
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
|
@ -7,8 +7,8 @@ class DBTest < Test::Unit::TestCase
|
||||||
documents = [documents] unless documents.is_a?(Array)
|
documents = [documents] unless documents.is_a?(Array)
|
||||||
message = ByteBuffer.new
|
message = ByteBuffer.new
|
||||||
message.put_int(0)
|
message.put_int(0)
|
||||||
BSON.serialize_cstr(message, "#{db.name}.test")
|
Mongo::BSON_CODER..serialize_cstr(message, "#{db.name}.test")
|
||||||
documents.each { |doc| message.put_array(BSON.new.serialize(doc, true).to_a) }
|
documents.each { |doc| message.put_array(Mongo::BSON_CODER.new.serialize(doc, true).to_a) }
|
||||||
message = db.add_message_headers(Mongo::Constants::OP_INSERT, message)
|
message = db.add_message_headers(Mongo::Constants::OP_INSERT, message)
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
Loading…
Reference in New Issue