merge new bson commits
This commit is contained in:
commit
f2bc05f51c
9
Rakefile
9
Rakefile
|
@ -27,6 +27,7 @@ namespace :test do
|
|||
ENV['C_EXT'] = 'TRUE'
|
||||
Rake::Task['test:unit'].invoke
|
||||
Rake::Task['test:functional'].invoke
|
||||
Rake::Task['test:bson'].invoke
|
||||
Rake::Task['test:pooled_threading'].invoke
|
||||
Rake::Task['test:drop_databases'].invoke
|
||||
ENV['C_EXT'] = nil
|
||||
|
@ -37,10 +38,11 @@ namespace :test do
|
|||
ENV['C_EXT'] = nil
|
||||
Rake::Task['test:unit'].invoke
|
||||
Rake::Task['test:functional'].invoke
|
||||
Rake::Task['test:bson'].invoke
|
||||
Rake::Task['test:pooled_threading'].invoke
|
||||
Rake::Task['test:drop_databases'].invoke
|
||||
end
|
||||
|
||||
|
||||
Rake::TestTask.new(:unit) do |t|
|
||||
t.test_files = FileList['test/unit/*_test.rb']
|
||||
t.verbose = true
|
||||
|
@ -91,6 +93,11 @@ namespace :test do
|
|||
t.verbose = true
|
||||
end
|
||||
|
||||
Rake::TestTask.new(:bson) do |t|
|
||||
t.test_files = FileList['test/mongo_bson/*_test.rb']
|
||||
t.verbose = true
|
||||
end
|
||||
|
||||
task :drop_databases do |t|
|
||||
puts "Dropping test database..."
|
||||
require File.join(File.dirname(__FILE__), 'lib', 'mongo')
|
||||
|
|
|
@ -9,9 +9,9 @@ TRIALS = 100000
|
|||
|
||||
def encode(doc)
|
||||
t0 = Time.new
|
||||
b = BSON.new
|
||||
b = Mongo::BSON_CODER.new
|
||||
TRIALS.times { |i|
|
||||
b = BSON.new
|
||||
b = Mongo::BSON_CODER.new
|
||||
b.serialize doc
|
||||
}
|
||||
print "took: #{Time.now.to_f - t0.to_f}\n"
|
||||
|
|
|
@ -128,11 +128,15 @@ static void write_utf8(buffer_t buffer, VALUE string, char check_null) {
|
|||
#define INT2STRING(buffer, i) asprintf(buffer, "%d", i);
|
||||
#endif
|
||||
|
||||
// this sucks too.
|
||||
#ifndef RREGEXP_SRC
|
||||
#define RREGEXP_SRC(r) rb_str_new(RREGEXP((r))->str, RREGEXP((r))->len)
|
||||
#endif
|
||||
|
||||
// rubinius compatibility
|
||||
#ifndef RREGEXP_OPTIONS
|
||||
#define RREGEXP_OPTIONS(r) RREGEXP(value)->ptr->options
|
||||
#endif
|
||||
|
||||
static char zero = 0;
|
||||
static char one = 1;
|
||||
|
||||
|
@ -275,7 +279,7 @@ static int write_element(VALUE key, VALUE value, VALUE extra, int allow_id) {
|
|||
}
|
||||
case T_STRING:
|
||||
{
|
||||
if (strcmp(rb_class2name(RBASIC(value)->klass),
|
||||
if (strcmp(rb_obj_classname(value),
|
||||
"Mongo::Code") == 0) {
|
||||
buffer_position length_location, start_position, total_length;
|
||||
int length;
|
||||
|
@ -319,7 +323,7 @@ static int write_element(VALUE key, VALUE value, VALUE extra, int allow_id) {
|
|||
case T_OBJECT:
|
||||
{
|
||||
// TODO there has to be a better way to do these checks...
|
||||
const char* cls = rb_class2name(RBASIC(value)->klass);
|
||||
const char* cls = rb_obj_classname(value);
|
||||
if (strcmp(cls, "Mongo::Binary") == 0 ||
|
||||
strcmp(cls, "ByteBuffer") == 0) {
|
||||
const char subtype = strcmp(cls, "ByteBuffer") ?
|
||||
|
@ -397,7 +401,7 @@ static int write_element(VALUE key, VALUE value, VALUE extra, int allow_id) {
|
|||
}
|
||||
case T_DATA:
|
||||
{
|
||||
const char* cls = rb_class2name(RBASIC(value)->klass);
|
||||
const char* cls = rb_obj_classname(value);
|
||||
if (strcmp(cls, "Time") == 0) {
|
||||
double t = NUM2DBL(rb_funcall(value, rb_intern("to_f"), 0));
|
||||
long long time_since_epoch = (long long)round(t * 1000);
|
||||
|
@ -417,7 +421,7 @@ static int write_element(VALUE key, VALUE value, VALUE extra, int allow_id) {
|
|||
case T_REGEXP:
|
||||
{
|
||||
VALUE pattern = RREGEXP_SRC(value);
|
||||
long flags = RREGEXP(value)->ptr->options;
|
||||
long flags = RREGEXP_OPTIONS(value);
|
||||
VALUE has_extra;
|
||||
|
||||
write_name_and_type(buffer, key, 0x0B);
|
||||
|
@ -452,7 +456,7 @@ static int write_element(VALUE key, VALUE value, VALUE extra, int allow_id) {
|
|||
}
|
||||
default:
|
||||
{
|
||||
const char* cls = rb_class2name(RBASIC(value)->klass);
|
||||
const char* cls = rb_obj_classname(value);
|
||||
buffer_free(buffer);
|
||||
rb_raise(InvalidDocument, "Cannot serialize an object of class %s (type %d) into BSON.", cls, TYPE(value));
|
||||
break;
|
||||
|
@ -495,7 +499,7 @@ static void write_doc(buffer_t buffer, VALUE hash, VALUE check_keys, VALUE move_
|
|||
}
|
||||
else {
|
||||
allow_id = 1;
|
||||
if (strcmp(rb_class2name(RBASIC(hash)->klass), "Hash") == 0) {
|
||||
if (strcmp(rb_obj_classname(hash), "Hash") == 0) {
|
||||
if ((rb_funcall(hash, rb_intern("has_key?"), 1, id_str) == Qtrue) &&
|
||||
(rb_funcall(hash, rb_intern("has_key?"), 1, id_sym) == Qtrue)) {
|
||||
VALUE oid_sym = rb_hash_delete(hash, id_sym);
|
||||
|
@ -512,7 +516,7 @@ static void write_doc(buffer_t buffer, VALUE hash, VALUE check_keys, VALUE move_
|
|||
}
|
||||
|
||||
// we have to check for an OrderedHash and handle that specially
|
||||
if (strcmp(rb_class2name(RBASIC(hash)->klass), "OrderedHash") == 0) {
|
||||
if (strcmp(rb_obj_classname(hash), "OrderedHash") == 0) {
|
||||
VALUE keys = rb_funcall(hash, rb_intern("keys"), 0);
|
||||
int i;
|
||||
for(i = 0; i < RARRAY_LEN(keys); i++) {
|
||||
|
@ -883,25 +887,25 @@ void Init_cbson() {
|
|||
Time = rb_const_get(rb_cObject, rb_intern("Time"));
|
||||
|
||||
mongo = rb_const_get(rb_cObject, rb_intern("Mongo"));
|
||||
rb_require("mongo/types/binary");
|
||||
rb_require("mongo_bson/types/binary");
|
||||
Binary = rb_const_get(mongo, rb_intern("Binary"));
|
||||
rb_require("mongo/types/objectid");
|
||||
rb_require("mongo_bson/types/objectid");
|
||||
ObjectID = rb_const_get(mongo, rb_intern("ObjectID"));
|
||||
rb_require("mongo/types/dbref");
|
||||
rb_require("mongo_bson/types/dbref");
|
||||
DBRef = rb_const_get(mongo, rb_intern("DBRef"));
|
||||
rb_require("mongo/types/code");
|
||||
rb_require("mongo_bson/types/code");
|
||||
Code = rb_const_get(mongo, rb_intern("Code"));
|
||||
rb_require("mongo/types/min_max_keys");
|
||||
rb_require("mongo_bson/types/min_max_keys");
|
||||
MinKey = rb_const_get(mongo, rb_intern("MinKey"));
|
||||
MaxKey = rb_const_get(mongo, rb_intern("MaxKey"));
|
||||
rb_require("mongo/types/regexp_of_holding");
|
||||
rb_require("mongo_bson/types/regexp_of_holding");
|
||||
Regexp = rb_const_get(rb_cObject, rb_intern("Regexp"));
|
||||
RegexpOfHolding = rb_const_get(mongo, rb_intern("RegexpOfHolding"));
|
||||
rb_require("mongo/exceptions");
|
||||
rb_require("mongo_bson/exceptions");
|
||||
InvalidName = rb_const_get(mongo, rb_intern("InvalidName"));
|
||||
InvalidStringEncoding = rb_const_get(mongo, rb_intern("InvalidStringEncoding"));
|
||||
InvalidDocument = rb_const_get(mongo, rb_intern("InvalidDocument"));
|
||||
rb_require("mongo/util/ordered_hash");
|
||||
rb_require("mongo_bson/ordered_hash");
|
||||
OrderedHash = rb_const_get(rb_cObject, rb_intern("OrderedHash"));
|
||||
|
||||
CBson = rb_define_module("CBson");
|
||||
|
|
25
lib/mongo.rb
25
lib/mongo.rb
|
@ -4,21 +4,8 @@ module Mongo
|
|||
VERSION = "0.19.2"
|
||||
end
|
||||
|
||||
begin
|
||||
# Need this for running test with and without c ext in Ruby 1.9.
|
||||
raise LoadError if ENV['TEST_MODE'] && !ENV['C_EXT']
|
||||
require 'mongo_ext/cbson'
|
||||
raise LoadError unless defined?(CBson::VERSION) && CBson::VERSION == Mongo::VERSION
|
||||
require 'mongo/util/bson_c'
|
||||
BSON = BSON_C
|
||||
rescue LoadError
|
||||
require 'mongo/util/bson_ruby'
|
||||
BSON = BSON_RUBY
|
||||
warn "\n**Notice: C extension not loaded. This is required for optimum MongoDB Ruby driver performance."
|
||||
warn " You can install the extension as follows:\n gem install mongo_ext\n"
|
||||
warn " If you continue to receive this message after installing, make sure that the"
|
||||
warn " mongo_ext gem is in your load path and that the mongo_ext and mongo gems are of the same version.\n"
|
||||
end
|
||||
require 'mongo_bson'
|
||||
|
||||
|
||||
module Mongo
|
||||
ASCENDING = 1
|
||||
|
@ -41,18 +28,10 @@ module Mongo
|
|||
|
||||
end
|
||||
|
||||
require 'mongo/types/binary'
|
||||
require 'mongo/types/code'
|
||||
require 'mongo/types/dbref'
|
||||
require 'mongo/types/objectid'
|
||||
require 'mongo/types/regexp_of_holding'
|
||||
require 'mongo/types/min_max_keys'
|
||||
|
||||
require 'mongo/util/support'
|
||||
require 'mongo/util/core_ext'
|
||||
require 'mongo/util/conversions'
|
||||
require 'mongo/util/server_version'
|
||||
require 'mongo/util/bson_ruby'
|
||||
|
||||
require 'mongo/collection'
|
||||
require 'mongo/connection'
|
||||
|
|
|
@ -261,7 +261,7 @@ module Mongo
|
|||
message = ByteBuffer.new([0, 0, 0, 0])
|
||||
BSON_RUBY.serialize_cstr(message, "#{@db.name}.#{@name}")
|
||||
message.put_int(0)
|
||||
message.put_array(BSON.serialize(selector, false, true).to_a)
|
||||
message.put_array(BSON_CODER.serialize(selector, false, true).to_a)
|
||||
|
||||
if opts[:safe]
|
||||
@connection.send_message_with_safe_check(Mongo::Constants::OP_DELETE, message, @db.name,
|
||||
|
@ -303,8 +303,8 @@ module Mongo
|
|||
update_options += 1 if options[:upsert]
|
||||
update_options += 2 if options[:multi]
|
||||
message.put_int(update_options)
|
||||
message.put_array(BSON.serialize(selector, false, true).to_a)
|
||||
message.put_array(BSON.serialize(document, false, true).to_a)
|
||||
message.put_array(BSON_CODER.serialize(selector, false, true).to_a)
|
||||
message.put_array(BSON_CODER.serialize(document, false, true).to_a)
|
||||
if options[:safe]
|
||||
@connection.send_message_with_safe_check(Mongo::Constants::OP_UPDATE, message, @db.name,
|
||||
"db.#{@name}.update(#{selector.inspect}, #{document.inspect})")
|
||||
|
@ -631,7 +631,7 @@ module Mongo
|
|||
# Initial byte is 0.
|
||||
message = ByteBuffer.new([0, 0, 0, 0])
|
||||
BSON_RUBY.serialize_cstr(message, "#{@db.name}.#{collection_name}")
|
||||
documents.each { |doc| message.put_array(BSON.serialize(doc, check_keys, true).to_a) }
|
||||
documents.each { |doc| message.put_array(BSON_CODER.serialize(doc, check_keys, true).to_a) }
|
||||
if safe
|
||||
@connection.send_message_with_safe_check(Mongo::Constants::OP_INSERT, message, @db.name,
|
||||
"db.#{collection_name}.insert(#{documents.inspect})")
|
||||
|
|
|
@ -664,7 +664,7 @@ module Mongo
|
|||
buf.put_array(receive_message_on_socket(size - 4, sock).unpack("C*"), 4)
|
||||
number_remaining -= 1
|
||||
buf.rewind
|
||||
docs << BSON.deserialize(buf)
|
||||
docs << BSON_CODER.deserialize(buf)
|
||||
end
|
||||
[docs, number_received, cursor_id]
|
||||
end
|
||||
|
@ -675,7 +675,7 @@ module Mongo
|
|||
BSON_RUBY.serialize_cstr(message, "#{db_name}.$cmd")
|
||||
message.put_int(0)
|
||||
message.put_int(-1)
|
||||
message.put_array(BSON.serialize({:getlasterror => 1}, false).unpack("C*"))
|
||||
message.put_array(BSON_CODER.serialize({:getlasterror => 1}, false).unpack("C*"))
|
||||
add_message_headers(Mongo::Constants::OP_QUERY, message)
|
||||
end
|
||||
|
||||
|
|
|
@ -356,8 +356,8 @@ module Mongo
|
|||
if query_contains_special_fields?
|
||||
selector = selector_with_special_query_fields
|
||||
end
|
||||
message.put_array(BSON.serialize(selector, false).to_a)
|
||||
message.put_array(BSON.serialize(@fields, false).to_a) if @fields
|
||||
message.put_array(BSON_CODER.serialize(selector, false).to_a)
|
||||
message.put_array(BSON_CODER.serialize(@fields, false).to_a) if @fields
|
||||
message
|
||||
end
|
||||
|
||||
|
|
|
@ -14,9 +14,9 @@
|
|||
# limitations under the License.
|
||||
# ++
|
||||
|
||||
require 'mongo/types/objectid'
|
||||
require 'mongo/util/byte_buffer'
|
||||
require 'mongo/util/ordered_hash'
|
||||
require 'mongo_bson/types/objectid'
|
||||
require 'mongo_bson/byte_buffer'
|
||||
require 'mongo_bson/ordered_hash'
|
||||
|
||||
module GridFS
|
||||
|
||||
|
|
|
@ -14,8 +14,8 @@
|
|||
# limitations under the License.
|
||||
# ++
|
||||
|
||||
require 'mongo/types/objectid'
|
||||
require 'mongo/util/ordered_hash'
|
||||
require 'mongo_bson/types/objectid'
|
||||
require 'mongo_bson/ordered_hash'
|
||||
require 'mongo/gridfs/chunk'
|
||||
|
||||
module GridFS
|
||||
|
|
|
@ -1,18 +0,0 @@
|
|||
# A thin wrapper for the CBson class
|
||||
class BSON_C
|
||||
|
||||
def self.serialize(obj, check_keys=false, move_id=false)
|
||||
ByteBuffer.new(CBson.serialize(obj, check_keys, move_id))
|
||||
end
|
||||
|
||||
def self.deserialize(buf=nil)
|
||||
if buf.is_a? String
|
||||
to_deserialize = ByteBuffer.new(buf) if buf
|
||||
else
|
||||
buf = ByteBuffer.new(buf.to_a) if buf
|
||||
end
|
||||
buf.rewind
|
||||
CBson.deserialize(buf.to_s)
|
||||
end
|
||||
|
||||
end
|
|
@ -1,606 +0,0 @@
|
|||
# --
|
||||
# Copyright (C) 2008-2010 10gen Inc.
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify it
|
||||
# under the terms of the GNU Affero General Public License, version 3, as
|
||||
# published by the Free Software Foundation.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful, but WITHOUT
|
||||
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
|
||||
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Affero General Public License
|
||||
# for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU Affero General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
# ++
|
||||
|
||||
require 'base64'
|
||||
require 'mongo/util/byte_buffer'
|
||||
require 'mongo/util/ordered_hash'
|
||||
require 'mongo/types/binary'
|
||||
require 'mongo/types/dbref'
|
||||
require 'mongo/types/objectid'
|
||||
require 'mongo/types/regexp_of_holding'
|
||||
|
||||
# A BSON seralizer/deserializer in pure Ruby.
|
||||
class BSON_RUBY
|
||||
|
||||
include Mongo
|
||||
|
||||
MINKEY = -1
|
||||
EOO = 0
|
||||
NUMBER = 1
|
||||
STRING = 2
|
||||
OBJECT = 3
|
||||
ARRAY = 4
|
||||
BINARY = 5
|
||||
UNDEFINED = 6
|
||||
OID = 7
|
||||
BOOLEAN = 8
|
||||
DATE = 9
|
||||
NULL = 10
|
||||
REGEX = 11
|
||||
REF = 12
|
||||
CODE = 13
|
||||
SYMBOL = 14
|
||||
CODE_W_SCOPE = 15
|
||||
NUMBER_INT = 16
|
||||
TIMESTAMP = 17
|
||||
NUMBER_LONG = 18
|
||||
MAXKEY = 127
|
||||
|
||||
def initialize
|
||||
@buf = ByteBuffer.new
|
||||
end
|
||||
|
||||
if RUBY_VERSION >= '1.9'
|
||||
def self.to_utf8(str)
|
||||
str.encode("utf-8")
|
||||
end
|
||||
else
|
||||
def self.to_utf8(str)
|
||||
begin
|
||||
str.unpack("U*")
|
||||
rescue => ex
|
||||
raise InvalidStringEncoding, "String not valid utf-8: #{str}"
|
||||
end
|
||||
str
|
||||
end
|
||||
end
|
||||
|
||||
def self.serialize_cstr(buf, val)
|
||||
buf.put_array(to_utf8(val.to_s).unpack("C*") << 0)
|
||||
end
|
||||
|
||||
def self.serialize_key(buf, key)
|
||||
raise InvalidDocument, "Key names / regex patterns must not contain the NULL byte" if key.include? "\x00"
|
||||
self.serialize_cstr(buf, key)
|
||||
end
|
||||
|
||||
def to_a
|
||||
@buf.to_a
|
||||
end
|
||||
|
||||
def to_s
|
||||
@buf.to_s
|
||||
end
|
||||
|
||||
# Serializes an object.
|
||||
# Implemented to ensure an API compatible with BSON extension.
|
||||
def self.serialize(obj, check_keys=false, move_id=false)
|
||||
new.serialize(obj, check_keys, move_id)
|
||||
end
|
||||
|
||||
def self.deserialize(buf=nil)
|
||||
new.deserialize(buf)
|
||||
end
|
||||
|
||||
def serialize(obj, check_keys=false, move_id=false)
|
||||
raise "Document is null" unless obj
|
||||
|
||||
@buf.rewind
|
||||
# put in a placeholder for the total size
|
||||
@buf.put_int(0)
|
||||
|
||||
# Write key/value pairs. Always write _id first if it exists.
|
||||
if move_id
|
||||
if obj.has_key? '_id'
|
||||
serialize_key_value('_id', obj['_id'], false)
|
||||
elsif obj.has_key? :_id
|
||||
serialize_key_value('_id', obj[:_id], false)
|
||||
end
|
||||
obj.each {|k, v| serialize_key_value(k, v, check_keys) unless k == '_id' || k == :_id }
|
||||
else
|
||||
if obj.has_key?('_id') && obj.has_key?(:_id)
|
||||
obj['_id'] = obj.delete(:_id)
|
||||
end
|
||||
obj.each {|k, v| serialize_key_value(k, v, check_keys) }
|
||||
end
|
||||
|
||||
serialize_eoo_element(@buf)
|
||||
if @buf.size > 4 * 1024 * 1024
|
||||
raise InvalidDocument, "Document is too large (#{@buf.size}). BSON documents are limited to 4MB (#{4 * 1024 * 1024})."
|
||||
end
|
||||
@buf.put_int(@buf.size, 0)
|
||||
self
|
||||
end
|
||||
|
||||
# Returns the array stored in the buffer.
|
||||
# Implemented to ensure an API compatible with BSON extension.
|
||||
def unpack(arg)
|
||||
@buf.to_a
|
||||
end
|
||||
|
||||
def serialize_key_value(k, v, check_keys)
|
||||
k = k.to_s
|
||||
if check_keys
|
||||
if k[0] == ?$
|
||||
raise InvalidName.new("key #{k} must not start with '$'")
|
||||
end
|
||||
if k.include? ?.
|
||||
raise InvalidName.new("key #{k} must not contain '.'")
|
||||
end
|
||||
end
|
||||
type = bson_type(v)
|
||||
case type
|
||||
when STRING, SYMBOL
|
||||
serialize_string_element(@buf, k, v, type)
|
||||
when NUMBER, NUMBER_INT
|
||||
serialize_number_element(@buf, k, v, type)
|
||||
when OBJECT
|
||||
serialize_object_element(@buf, k, v, check_keys)
|
||||
when OID
|
||||
serialize_oid_element(@buf, k, v)
|
||||
when ARRAY
|
||||
serialize_array_element(@buf, k, v, check_keys)
|
||||
when REGEX
|
||||
serialize_regex_element(@buf, k, v)
|
||||
when BOOLEAN
|
||||
serialize_boolean_element(@buf, k, v)
|
||||
when DATE
|
||||
serialize_date_element(@buf, k, v)
|
||||
when NULL
|
||||
serialize_null_element(@buf, k)
|
||||
when REF
|
||||
serialize_dbref_element(@buf, k, v)
|
||||
when BINARY
|
||||
serialize_binary_element(@buf, k, v)
|
||||
when UNDEFINED
|
||||
serialize_null_element(@buf, k)
|
||||
when CODE_W_SCOPE
|
||||
serialize_code_w_scope(@buf, k, v)
|
||||
when MAXKEY
|
||||
serialize_max_key_element(@buf, k)
|
||||
when MINKEY
|
||||
serialize_min_key_element(@buf, k)
|
||||
else
|
||||
raise "unhandled type #{type}"
|
||||
end
|
||||
end
|
||||
|
||||
def deserialize(buf=nil)
|
||||
# If buf is nil, use @buf, assumed to contain already-serialized BSON.
|
||||
# This is only true during testing.
|
||||
if buf.is_a? String
|
||||
@buf = ByteBuffer.new(buf) if buf
|
||||
else
|
||||
@buf = ByteBuffer.new(buf.to_a) if buf
|
||||
end
|
||||
@buf.rewind
|
||||
@buf.get_int # eat message size
|
||||
doc = OrderedHash.new
|
||||
while @buf.more?
|
||||
type = @buf.get
|
||||
case type
|
||||
when STRING, CODE
|
||||
key = deserialize_cstr(@buf)
|
||||
doc[key] = deserialize_string_data(@buf)
|
||||
when SYMBOL
|
||||
key = deserialize_cstr(@buf)
|
||||
doc[key] = deserialize_string_data(@buf).intern
|
||||
when NUMBER
|
||||
key = deserialize_cstr(@buf)
|
||||
doc[key] = deserialize_number_data(@buf)
|
||||
when NUMBER_INT
|
||||
key = deserialize_cstr(@buf)
|
||||
doc[key] = deserialize_number_int_data(@buf)
|
||||
when NUMBER_LONG
|
||||
key = deserialize_cstr(@buf)
|
||||
doc[key] = deserialize_number_long_data(@buf)
|
||||
when OID
|
||||
key = deserialize_cstr(@buf)
|
||||
doc[key] = deserialize_oid_data(@buf)
|
||||
when ARRAY
|
||||
key = deserialize_cstr(@buf)
|
||||
doc[key] = deserialize_array_data(@buf)
|
||||
when REGEX
|
||||
key = deserialize_cstr(@buf)
|
||||
doc[key] = deserialize_regex_data(@buf)
|
||||
when OBJECT
|
||||
key = deserialize_cstr(@buf)
|
||||
doc[key] = deserialize_object_data(@buf)
|
||||
when BOOLEAN
|
||||
key = deserialize_cstr(@buf)
|
||||
doc[key] = deserialize_boolean_data(@buf)
|
||||
when DATE
|
||||
key = deserialize_cstr(@buf)
|
||||
doc[key] = deserialize_date_data(@buf)
|
||||
when NULL
|
||||
key = deserialize_cstr(@buf)
|
||||
doc[key] = nil
|
||||
when UNDEFINED
|
||||
key = deserialize_cstr(@buf)
|
||||
doc[key] = nil
|
||||
when REF
|
||||
key = deserialize_cstr(@buf)
|
||||
doc[key] = deserialize_dbref_data(@buf)
|
||||
when BINARY
|
||||
key = deserialize_cstr(@buf)
|
||||
doc[key] = deserialize_binary_data(@buf)
|
||||
when CODE_W_SCOPE
|
||||
key = deserialize_cstr(@buf)
|
||||
doc[key] = deserialize_code_w_scope_data(@buf)
|
||||
when TIMESTAMP
|
||||
key = deserialize_cstr(@buf)
|
||||
doc[key] = [deserialize_number_int_data(@buf),
|
||||
deserialize_number_int_data(@buf)]
|
||||
when MAXKEY
|
||||
key = deserialize_cstr(@buf)
|
||||
doc[key] = MaxKey.new
|
||||
when MINKEY, 255 # This is currently easier than unpack the type byte as an unsigned char.
|
||||
key = deserialize_cstr(@buf)
|
||||
doc[key] = MinKey.new
|
||||
when EOO
|
||||
break
|
||||
else
|
||||
raise "Unknown type #{type}, key = #{key}"
|
||||
end
|
||||
end
|
||||
@buf.rewind
|
||||
doc
|
||||
end
|
||||
|
||||
# For debugging.
|
||||
def hex_dump
|
||||
str = ''
|
||||
@buf.to_a.each_with_index { |b,i|
|
||||
if (i % 8) == 0
|
||||
str << "\n" if i > 0
|
||||
str << '%4d: ' % i
|
||||
else
|
||||
str << ' '
|
||||
end
|
||||
str << '%02X' % b
|
||||
}
|
||||
str
|
||||
end
|
||||
|
||||
def deserialize_date_data(buf)
|
||||
unsigned = buf.get_long()
|
||||
# see note for deserialize_number_long_data below
|
||||
milliseconds = unsigned >= 2 ** 64 / 2 ? unsigned - 2**64 : unsigned
|
||||
Time.at(milliseconds.to_f / 1000.0).utc # at() takes fractional seconds
|
||||
end
|
||||
|
||||
def deserialize_boolean_data(buf)
|
||||
buf.get == 1
|
||||
end
|
||||
|
||||
def deserialize_number_data(buf)
|
||||
buf.get_double
|
||||
end
|
||||
|
||||
def deserialize_number_int_data(buf)
|
||||
# sometimes ruby makes me angry... why would the same code pack as signed
|
||||
# but unpack as unsigned
|
||||
unsigned = buf.get_int
|
||||
unsigned >= 2**32 / 2 ? unsigned - 2**32 : unsigned
|
||||
end
|
||||
|
||||
def deserialize_number_long_data(buf)
|
||||
# same note as above applies here...
|
||||
unsigned = buf.get_long
|
||||
unsigned >= 2 ** 64 / 2 ? unsigned - 2**64 : unsigned
|
||||
end
|
||||
|
||||
def deserialize_object_data(buf)
|
||||
size = buf.get_int
|
||||
buf.position -= 4
|
||||
object = BSON.new().deserialize(buf.get(size))
|
||||
if object.has_key? "$ref"
|
||||
DBRef.new(object["$ref"], object["$id"])
|
||||
else
|
||||
object
|
||||
end
|
||||
end
|
||||
|
||||
def deserialize_array_data(buf)
|
||||
h = deserialize_object_data(buf)
|
||||
a = []
|
||||
h.each { |k, v| a[k.to_i] = v }
|
||||
a
|
||||
end
|
||||
|
||||
def deserialize_regex_data(buf)
|
||||
str = deserialize_cstr(buf)
|
||||
options_str = deserialize_cstr(buf)
|
||||
options = 0
|
||||
options |= Regexp::IGNORECASE if options_str.include?('i')
|
||||
options |= Regexp::MULTILINE if options_str.include?('m')
|
||||
options |= Regexp::EXTENDED if options_str.include?('x')
|
||||
options_str.gsub!(/[imx]/, '') # Now remove the three we understand
|
||||
if options_str == ''
|
||||
Regexp.new(str, options)
|
||||
else
|
||||
warn("Using deprecated Regexp options #{options_str}; future versions of this MongoDB driver will support only i, m, and x. See deprecated class RegexpOfHolding for more info.")
|
||||
RegexpOfHolding.new(str, options, options_str)
|
||||
end
|
||||
end
|
||||
|
||||
def deserialize_string_data(buf)
|
||||
len = buf.get_int
|
||||
bytes = buf.get(len)
|
||||
str = bytes[0..-2]
|
||||
if str.respond_to? "pack"
|
||||
str = str.pack("C*")
|
||||
end
|
||||
if RUBY_VERSION >= '1.9'
|
||||
str.force_encoding("utf-8")
|
||||
end
|
||||
str
|
||||
end
|
||||
|
||||
def deserialize_code_w_scope_data(buf)
|
||||
buf.get_int
|
||||
len = buf.get_int
|
||||
code = buf.get(len)[0..-2]
|
||||
if code.respond_to? "pack"
|
||||
code = code.pack("C*")
|
||||
end
|
||||
if RUBY_VERSION >= '1.9'
|
||||
code.force_encoding("utf-8")
|
||||
end
|
||||
|
||||
scope_size = buf.get_int
|
||||
buf.position -= 4
|
||||
scope = BSON.new().deserialize(buf.get(scope_size))
|
||||
|
||||
Code.new(code, scope)
|
||||
end
|
||||
|
||||
def deserialize_oid_data(buf)
|
||||
ObjectID.new(buf.get(12))
|
||||
end
|
||||
|
||||
def deserialize_dbref_data(buf)
|
||||
ns = deserialize_string_data(buf)
|
||||
oid = deserialize_oid_data(buf)
|
||||
DBRef.new(ns, oid)
|
||||
end
|
||||
|
||||
def deserialize_binary_data(buf)
|
||||
len = buf.get_int
|
||||
type = buf.get
|
||||
len = buf.get_int if type == Binary::SUBTYPE_BYTES
|
||||
Binary.new(buf.get(len), type)
|
||||
end
|
||||
|
||||
def serialize_eoo_element(buf)
|
||||
buf.put(EOO)
|
||||
end
|
||||
|
||||
def serialize_null_element(buf, key)
|
||||
buf.put(NULL)
|
||||
self.class.serialize_key(buf, key)
|
||||
end
|
||||
|
||||
def serialize_dbref_element(buf, key, val)
|
||||
oh = OrderedHash.new
|
||||
oh['$ref'] = val.namespace
|
||||
oh['$id'] = val.object_id
|
||||
serialize_object_element(buf, key, oh, false)
|
||||
end
|
||||
|
||||
def serialize_binary_element(buf, key, val)
|
||||
buf.put(BINARY)
|
||||
self.class.serialize_key(buf, key)
|
||||
|
||||
bytes = val.to_a
|
||||
num_bytes = bytes.length
|
||||
subtype = val.respond_to?(:subtype) ? val.subtype : Binary::SUBTYPE_BYTES
|
||||
if subtype == Binary::SUBTYPE_BYTES
|
||||
buf.put_int(num_bytes + 4)
|
||||
buf.put(subtype)
|
||||
buf.put_int(num_bytes)
|
||||
buf.put_array(bytes)
|
||||
else
|
||||
buf.put_int(num_bytes)
|
||||
buf.put(subtype)
|
||||
buf.put_array(bytes)
|
||||
end
|
||||
end
|
||||
|
||||
def serialize_boolean_element(buf, key, val)
|
||||
buf.put(BOOLEAN)
|
||||
self.class.serialize_key(buf, key)
|
||||
buf.put(val ? 1 : 0)
|
||||
end
|
||||
|
||||
def serialize_date_element(buf, key, val)
|
||||
buf.put(DATE)
|
||||
self.class.serialize_key(buf, key)
|
||||
millisecs = (val.to_f * 1000).to_i
|
||||
buf.put_long(millisecs)
|
||||
end
|
||||
|
||||
def serialize_number_element(buf, key, val, type)
|
||||
if type == NUMBER
|
||||
buf.put(type)
|
||||
self.class.serialize_key(buf, key)
|
||||
buf.put_double(val)
|
||||
else
|
||||
if val > 2**64 / 2 - 1 or val < -2**64 / 2
|
||||
raise RangeError.new("MongoDB can only handle 8-byte ints")
|
||||
end
|
||||
if val > 2**32 / 2 - 1 or val < -2**32 / 2
|
||||
buf.put(NUMBER_LONG)
|
||||
self.class.serialize_key(buf, key)
|
||||
buf.put_long(val)
|
||||
else
|
||||
buf.put(type)
|
||||
self.class.serialize_key(buf, key)
|
||||
buf.put_int(val)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
def serialize_object_element(buf, key, val, check_keys, opcode=OBJECT)
|
||||
buf.put(opcode)
|
||||
self.class.serialize_key(buf, key)
|
||||
buf.put_array(BSON.new.serialize(val, check_keys).to_a)
|
||||
end
|
||||
|
||||
def serialize_array_element(buf, key, val, check_keys)
|
||||
# Turn array into hash with integer indices as keys
|
||||
h = OrderedHash.new
|
||||
i = 0
|
||||
val.each { |v| h[i] = v; i += 1 }
|
||||
serialize_object_element(buf, key, h, check_keys, ARRAY)
|
||||
end
|
||||
|
||||
def serialize_regex_element(buf, key, val)
|
||||
buf.put(REGEX)
|
||||
self.class.serialize_key(buf, key)
|
||||
|
||||
str = val.source
|
||||
# We use serialize_key here since regex patterns aren't prefixed with
|
||||
# length (can't contain the NULL byte).
|
||||
self.class.serialize_key(buf, str)
|
||||
|
||||
options = val.options
|
||||
options_str = ''
|
||||
options_str << 'i' if ((options & Regexp::IGNORECASE) != 0)
|
||||
options_str << 'm' if ((options & Regexp::MULTILINE) != 0)
|
||||
options_str << 'x' if ((options & Regexp::EXTENDED) != 0)
|
||||
options_str << val.extra_options_str if val.respond_to?(:extra_options_str)
|
||||
# Must store option chars in alphabetical order
|
||||
self.class.serialize_cstr(buf, options_str.split(//).sort.uniq.join)
|
||||
end
|
||||
|
||||
def serialize_max_key_element(buf, key)
|
||||
buf.put(MAXKEY)
|
||||
self.class.serialize_key(buf, key)
|
||||
end
|
||||
|
||||
def serialize_min_key_element(buf, key)
|
||||
buf.put(MINKEY)
|
||||
self.class.serialize_key(buf, key)
|
||||
end
|
||||
|
||||
def serialize_oid_element(buf, key, val)
|
||||
buf.put(OID)
|
||||
self.class.serialize_key(buf, key)
|
||||
|
||||
buf.put_array(val.to_a)
|
||||
end
|
||||
|
||||
def serialize_string_element(buf, key, val, type)
|
||||
buf.put(type)
|
||||
self.class.serialize_key(buf, key)
|
||||
|
||||
# Make a hole for the length
|
||||
len_pos = buf.position
|
||||
buf.put_int(0)
|
||||
|
||||
# Save the string
|
||||
start_pos = buf.position
|
||||
self.class.serialize_cstr(buf, val)
|
||||
end_pos = buf.position
|
||||
|
||||
# Put the string size in front
|
||||
buf.put_int(end_pos - start_pos, len_pos)
|
||||
|
||||
# Go back to where we were
|
||||
buf.position = end_pos
|
||||
end
|
||||
|
||||
def serialize_code_w_scope(buf, key, val)
|
||||
buf.put(CODE_W_SCOPE)
|
||||
self.class.serialize_key(buf, key)
|
||||
|
||||
# Make a hole for the length
|
||||
len_pos = buf.position
|
||||
buf.put_int(0)
|
||||
|
||||
buf.put_int(val.length + 1)
|
||||
self.class.serialize_cstr(buf, val)
|
||||
buf.put_array(BSON.new.serialize(val.scope).to_a)
|
||||
|
||||
end_pos = buf.position
|
||||
buf.put_int(end_pos - len_pos, len_pos)
|
||||
buf.position = end_pos
|
||||
end
|
||||
|
||||
def deserialize_cstr(buf)
|
||||
chars = ""
|
||||
while true
|
||||
b = buf.get
|
||||
break if b == 0
|
||||
chars << b.chr
|
||||
end
|
||||
if RUBY_VERSION >= '1.9'
|
||||
chars.force_encoding("utf-8") # Mongo stores UTF-8
|
||||
end
|
||||
chars
|
||||
end
|
||||
|
||||
def bson_type(o)
|
||||
case o
|
||||
when nil
|
||||
NULL
|
||||
when Integer
|
||||
NUMBER_INT
|
||||
when Float
|
||||
NUMBER
|
||||
when ByteBuffer
|
||||
BINARY
|
||||
when Code
|
||||
CODE_W_SCOPE
|
||||
when String
|
||||
STRING
|
||||
when Array
|
||||
ARRAY
|
||||
when Regexp
|
||||
REGEX
|
||||
when ObjectID
|
||||
OID
|
||||
when DBRef
|
||||
REF
|
||||
when true, false
|
||||
BOOLEAN
|
||||
when Time
|
||||
DATE
|
||||
when Hash
|
||||
OBJECT
|
||||
when Symbol
|
||||
SYMBOL
|
||||
when MaxKey
|
||||
MAXKEY
|
||||
when MinKey
|
||||
MINKEY
|
||||
when Numeric
|
||||
raise InvalidDocument, "Cannot serialize the Numeric type #{o.class} as BSON; only Fixum, Bignum, and Float are supported."
|
||||
when Date, DateTime
|
||||
raise InvalidDocument, "#{o.class} is not currently supported; " +
|
||||
"use a UTC Time instance instead."
|
||||
else
|
||||
if defined?(ActiveSupport::TimeWithZone) && o.is_a?(ActiveSupport::TimeWithZone)
|
||||
raise InvalidDocument, "ActiveSupport::TimeWithZone is not currently supported; " +
|
||||
"use a UTC Time instance instead."
|
||||
else
|
||||
raise InvalidDocument, "Cannot serialize #{o.class} as a BSON type; it either isn't supported or won't translate to BSON."
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
end
|
|
@ -0,0 +1,40 @@
|
|||
$:.unshift(File.join(File.dirname(__FILE__), '..', 'lib'))
|
||||
|
||||
module Mongo
|
||||
module BSON
|
||||
VERSION = "0.19.2"
|
||||
end
|
||||
end
|
||||
|
||||
begin
|
||||
# Need this for running test with and without c ext in Ruby 1.9.
|
||||
raise LoadError if ENV['TEST_MODE'] && !ENV['C_EXT']
|
||||
require 'mongo_ext/cbson'
|
||||
raise LoadError unless defined?(CBson::VERSION) && CBson::VERSION == Mongo::BSON::VERSION
|
||||
require 'mongo_bson/bson_c'
|
||||
module Mongo
|
||||
BSON_CODER = BSON_C
|
||||
end
|
||||
rescue LoadError
|
||||
require 'mongo_bson/bson_ruby'
|
||||
module Mongo
|
||||
BSON_CODER = BSON_RUBY
|
||||
end
|
||||
warn "\n**Notice: C extension not loaded. This is required for optimum MongoDB Ruby driver performance."
|
||||
warn " You can install the extension as follows:\n gem install mongo_ext\n"
|
||||
warn " If you continue to receive this message after installing, make sure that the"
|
||||
warn " mongo_ext gem is in your load path and that the mongo_ext and mongo gems are of the same version.\n"
|
||||
end
|
||||
|
||||
require 'mongo_bson/types/binary'
|
||||
require 'mongo_bson/types/code'
|
||||
require 'mongo_bson/types/dbref'
|
||||
require 'mongo_bson/types/objectid'
|
||||
require 'mongo_bson/types/regexp_of_holding'
|
||||
require 'mongo_bson/types/min_max_keys'
|
||||
|
||||
require 'base64'
|
||||
require 'mongo_bson/ordered_hash'
|
||||
require 'mongo_bson/byte_buffer'
|
||||
require 'mongo_bson/bson_ruby'
|
||||
require 'mongo_bson/exceptions'
|
|
@ -0,0 +1,20 @@
|
|||
# A thin wrapper for the CBson class
|
||||
module Mongo
|
||||
class BSON_C
|
||||
|
||||
def self.serialize(obj, check_keys=false, move_id=false)
|
||||
ByteBuffer.new(CBson.serialize(obj, check_keys, move_id))
|
||||
end
|
||||
|
||||
def self.deserialize(buf=nil)
|
||||
if buf.is_a? String
|
||||
to_deserialize = ByteBuffer.new(buf) if buf
|
||||
else
|
||||
buf = ByteBuffer.new(buf.to_a) if buf
|
||||
end
|
||||
buf.rewind
|
||||
CBson.deserialize(buf.to_s)
|
||||
end
|
||||
|
||||
end
|
||||
end
|
|
@ -0,0 +1,601 @@
|
|||
# --
|
||||
# Copyright (C) 2008-2010 10gen Inc.
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify it
|
||||
# under the terms of the GNU Affero General Public License, version 3, as
|
||||
# published by the Free Software Foundation.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful, but WITHOUT
|
||||
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
|
||||
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Affero General Public License
|
||||
# for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU Affero General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
# ++
|
||||
|
||||
module Mongo
|
||||
# A BSON seralizer/deserializer in pure Ruby.
|
||||
class BSON_RUBY
|
||||
|
||||
# why was this necessary?
|
||||
#include Mongo
|
||||
|
||||
MINKEY = -1
|
||||
EOO = 0
|
||||
NUMBER = 1
|
||||
STRING = 2
|
||||
OBJECT = 3
|
||||
ARRAY = 4
|
||||
BINARY = 5
|
||||
UNDEFINED = 6
|
||||
OID = 7
|
||||
BOOLEAN = 8
|
||||
DATE = 9
|
||||
NULL = 10
|
||||
REGEX = 11
|
||||
REF = 12
|
||||
CODE = 13
|
||||
SYMBOL = 14
|
||||
CODE_W_SCOPE = 15
|
||||
NUMBER_INT = 16
|
||||
TIMESTAMP = 17
|
||||
NUMBER_LONG = 18
|
||||
MAXKEY = 127
|
||||
|
||||
def initialize
|
||||
@buf = ByteBuffer.new
|
||||
end
|
||||
|
||||
if RUBY_VERSION >= '1.9'
|
||||
def self.to_utf8(str)
|
||||
str.encode("utf-8")
|
||||
end
|
||||
else
|
||||
def self.to_utf8(str)
|
||||
begin
|
||||
str.unpack("U*")
|
||||
rescue => ex
|
||||
raise InvalidStringEncoding, "String not valid utf-8: #{str}"
|
||||
end
|
||||
str
|
||||
end
|
||||
end
|
||||
|
||||
def self.serialize_cstr(buf, val)
|
||||
buf.put_array(to_utf8(val.to_s).unpack("C*") << 0)
|
||||
end
|
||||
|
||||
def self.serialize_key(buf, key)
|
||||
raise InvalidDocument, "Key names / regex patterns must not contain the NULL byte" if key.include? "\x00"
|
||||
self.serialize_cstr(buf, key)
|
||||
end
|
||||
|
||||
def to_a
|
||||
@buf.to_a
|
||||
end
|
||||
|
||||
def to_s
|
||||
@buf.to_s
|
||||
end
|
||||
|
||||
# Serializes an object.
|
||||
# Implemented to ensure an API compatible with BSON extension.
|
||||
def self.serialize(obj, check_keys=false, move_id=false)
|
||||
new.serialize(obj, check_keys, move_id)
|
||||
end
|
||||
|
||||
def self.deserialize(buf=nil)
|
||||
new.deserialize(buf)
|
||||
end
|
||||
|
||||
def serialize(obj, check_keys=false, move_id=false)
|
||||
raise "Document is null" unless obj
|
||||
|
||||
@buf.rewind
|
||||
# put in a placeholder for the total size
|
||||
@buf.put_int(0)
|
||||
|
||||
# Write key/value pairs. Always write _id first if it exists.
|
||||
if move_id
|
||||
if obj.has_key? '_id'
|
||||
serialize_key_value('_id', obj['_id'], false)
|
||||
elsif obj.has_key? :_id
|
||||
serialize_key_value('_id', obj[:_id], false)
|
||||
end
|
||||
obj.each {|k, v| serialize_key_value(k, v, check_keys) unless k == '_id' || k == :_id }
|
||||
else
|
||||
if obj.has_key?('_id') && obj.has_key?(:_id)
|
||||
obj['_id'] = obj.delete(:_id)
|
||||
end
|
||||
obj.each {|k, v| serialize_key_value(k, v, check_keys) }
|
||||
end
|
||||
|
||||
serialize_eoo_element(@buf)
|
||||
if @buf.size > 4 * 1024 * 1024
|
||||
raise InvalidDocument, "Document is too large (#{@buf.size}). BSON documents are limited to 4MB (#{4 * 1024 * 1024})."
|
||||
end
|
||||
@buf.put_int(@buf.size, 0)
|
||||
self
|
||||
end
|
||||
|
||||
# Returns the array stored in the buffer.
|
||||
# Implemented to ensure an API compatible with BSON extension.
|
||||
def unpack(arg)
|
||||
@buf.to_a
|
||||
end
|
||||
|
||||
def serialize_key_value(k, v, check_keys)
|
||||
k = k.to_s
|
||||
if check_keys
|
||||
if k[0] == ?$
|
||||
raise InvalidName.new("key #{k} must not start with '$'")
|
||||
end
|
||||
if k.include? ?.
|
||||
raise InvalidName.new("key #{k} must not contain '.'")
|
||||
end
|
||||
end
|
||||
type = bson_type(v)
|
||||
case type
|
||||
when STRING, SYMBOL
|
||||
serialize_string_element(@buf, k, v, type)
|
||||
when NUMBER, NUMBER_INT
|
||||
serialize_number_element(@buf, k, v, type)
|
||||
when OBJECT
|
||||
serialize_object_element(@buf, k, v, check_keys)
|
||||
when OID
|
||||
serialize_oid_element(@buf, k, v)
|
||||
when ARRAY
|
||||
serialize_array_element(@buf, k, v, check_keys)
|
||||
when REGEX
|
||||
serialize_regex_element(@buf, k, v)
|
||||
when BOOLEAN
|
||||
serialize_boolean_element(@buf, k, v)
|
||||
when DATE
|
||||
serialize_date_element(@buf, k, v)
|
||||
when NULL
|
||||
serialize_null_element(@buf, k)
|
||||
when REF
|
||||
serialize_dbref_element(@buf, k, v)
|
||||
when BINARY
|
||||
serialize_binary_element(@buf, k, v)
|
||||
when UNDEFINED
|
||||
serialize_null_element(@buf, k)
|
||||
when CODE_W_SCOPE
|
||||
serialize_code_w_scope(@buf, k, v)
|
||||
when MAXKEY
|
||||
serialize_max_key_element(@buf, k)
|
||||
when MINKEY
|
||||
serialize_min_key_element(@buf, k)
|
||||
else
|
||||
raise "unhandled type #{type}"
|
||||
end
|
||||
end
|
||||
|
||||
def deserialize(buf=nil)
|
||||
# If buf is nil, use @buf, assumed to contain already-serialized BSON.
|
||||
# This is only true during testing.
|
||||
if buf.is_a? String
|
||||
@buf = ByteBuffer.new(buf) if buf
|
||||
else
|
||||
@buf = ByteBuffer.new(buf.to_a) if buf
|
||||
end
|
||||
@buf.rewind
|
||||
@buf.get_int # eat message size
|
||||
doc = OrderedHash.new
|
||||
while @buf.more?
|
||||
type = @buf.get
|
||||
case type
|
||||
when STRING, CODE
|
||||
key = deserialize_cstr(@buf)
|
||||
doc[key] = deserialize_string_data(@buf)
|
||||
when SYMBOL
|
||||
key = deserialize_cstr(@buf)
|
||||
doc[key] = deserialize_string_data(@buf).intern
|
||||
when NUMBER
|
||||
key = deserialize_cstr(@buf)
|
||||
doc[key] = deserialize_number_data(@buf)
|
||||
when NUMBER_INT
|
||||
key = deserialize_cstr(@buf)
|
||||
doc[key] = deserialize_number_int_data(@buf)
|
||||
when NUMBER_LONG
|
||||
key = deserialize_cstr(@buf)
|
||||
doc[key] = deserialize_number_long_data(@buf)
|
||||
when OID
|
||||
key = deserialize_cstr(@buf)
|
||||
doc[key] = deserialize_oid_data(@buf)
|
||||
when ARRAY
|
||||
key = deserialize_cstr(@buf)
|
||||
doc[key] = deserialize_array_data(@buf)
|
||||
when REGEX
|
||||
key = deserialize_cstr(@buf)
|
||||
doc[key] = deserialize_regex_data(@buf)
|
||||
when OBJECT
|
||||
key = deserialize_cstr(@buf)
|
||||
doc[key] = deserialize_object_data(@buf)
|
||||
when BOOLEAN
|
||||
key = deserialize_cstr(@buf)
|
||||
doc[key] = deserialize_boolean_data(@buf)
|
||||
when DATE
|
||||
key = deserialize_cstr(@buf)
|
||||
doc[key] = deserialize_date_data(@buf)
|
||||
when NULL
|
||||
key = deserialize_cstr(@buf)
|
||||
doc[key] = nil
|
||||
when UNDEFINED
|
||||
key = deserialize_cstr(@buf)
|
||||
doc[key] = nil
|
||||
when REF
|
||||
key = deserialize_cstr(@buf)
|
||||
doc[key] = deserialize_dbref_data(@buf)
|
||||
when BINARY
|
||||
key = deserialize_cstr(@buf)
|
||||
doc[key] = deserialize_binary_data(@buf)
|
||||
when CODE_W_SCOPE
|
||||
key = deserialize_cstr(@buf)
|
||||
doc[key] = deserialize_code_w_scope_data(@buf)
|
||||
when TIMESTAMP
|
||||
key = deserialize_cstr(@buf)
|
||||
doc[key] = [deserialize_number_int_data(@buf),
|
||||
deserialize_number_int_data(@buf)]
|
||||
when MAXKEY
|
||||
key = deserialize_cstr(@buf)
|
||||
doc[key] = MaxKey.new
|
||||
when MINKEY, 255 # This is currently easier than unpack the type byte as an unsigned char.
|
||||
key = deserialize_cstr(@buf)
|
||||
doc[key] = MinKey.new
|
||||
when EOO
|
||||
break
|
||||
else
|
||||
raise "Unknown type #{type}, key = #{key}"
|
||||
end
|
||||
end
|
||||
@buf.rewind
|
||||
doc
|
||||
end
|
||||
|
||||
# For debugging.
|
||||
def hex_dump
|
||||
str = ''
|
||||
@buf.to_a.each_with_index { |b,i|
|
||||
if (i % 8) == 0
|
||||
str << "\n" if i > 0
|
||||
str << '%4d: ' % i
|
||||
else
|
||||
str << ' '
|
||||
end
|
||||
str << '%02X' % b
|
||||
}
|
||||
str
|
||||
end
|
||||
|
||||
def deserialize_date_data(buf)
|
||||
unsigned = buf.get_long()
|
||||
# see note for deserialize_number_long_data below
|
||||
milliseconds = unsigned >= 2 ** 64 / 2 ? unsigned - 2**64 : unsigned
|
||||
Time.at(milliseconds.to_f / 1000.0).utc # at() takes fractional seconds
|
||||
end
|
||||
|
||||
def deserialize_boolean_data(buf)
|
||||
buf.get == 1
|
||||
end
|
||||
|
||||
def deserialize_number_data(buf)
|
||||
buf.get_double
|
||||
end
|
||||
|
||||
def deserialize_number_int_data(buf)
|
||||
# sometimes ruby makes me angry... why would the same code pack as signed
|
||||
# but unpack as unsigned
|
||||
unsigned = buf.get_int
|
||||
unsigned >= 2**32 / 2 ? unsigned - 2**32 : unsigned
|
||||
end
|
||||
|
||||
def deserialize_number_long_data(buf)
|
||||
# same note as above applies here...
|
||||
unsigned = buf.get_long
|
||||
unsigned >= 2 ** 64 / 2 ? unsigned - 2**64 : unsigned
|
||||
end
|
||||
|
||||
def deserialize_object_data(buf)
|
||||
size = buf.get_int
|
||||
buf.position -= 4
|
||||
object = BSON_CODER.new().deserialize(buf.get(size))
|
||||
if object.has_key? "$ref"
|
||||
DBRef.new(object["$ref"], object["$id"])
|
||||
else
|
||||
object
|
||||
end
|
||||
end
|
||||
|
||||
def deserialize_array_data(buf)
|
||||
h = deserialize_object_data(buf)
|
||||
a = []
|
||||
h.each { |k, v| a[k.to_i] = v }
|
||||
a
|
||||
end
|
||||
|
||||
def deserialize_regex_data(buf)
|
||||
str = deserialize_cstr(buf)
|
||||
options_str = deserialize_cstr(buf)
|
||||
options = 0
|
||||
options |= Regexp::IGNORECASE if options_str.include?('i')
|
||||
options |= Regexp::MULTILINE if options_str.include?('m')
|
||||
options |= Regexp::EXTENDED if options_str.include?('x')
|
||||
options_str.gsub!(/[imx]/, '') # Now remove the three we understand
|
||||
if options_str == ''
|
||||
Regexp.new(str, options)
|
||||
else
|
||||
warn("Using deprecated Regexp options #{options_str}; future versions of this MongoDB driver will support only i, m, and x. See deprecated class RegexpOfHolding for more info.")
|
||||
RegexpOfHolding.new(str, options, options_str)
|
||||
end
|
||||
end
|
||||
|
||||
def deserialize_string_data(buf)
|
||||
len = buf.get_int
|
||||
bytes = buf.get(len)
|
||||
str = bytes[0..-2]
|
||||
if str.respond_to? "pack"
|
||||
str = str.pack("C*")
|
||||
end
|
||||
if RUBY_VERSION >= '1.9'
|
||||
str.force_encoding("utf-8")
|
||||
end
|
||||
str
|
||||
end
|
||||
|
||||
def deserialize_code_w_scope_data(buf)
|
||||
buf.get_int
|
||||
len = buf.get_int
|
||||
code = buf.get(len)[0..-2]
|
||||
if code.respond_to? "pack"
|
||||
code = code.pack("C*")
|
||||
end
|
||||
if RUBY_VERSION >= '1.9'
|
||||
code.force_encoding("utf-8")
|
||||
end
|
||||
|
||||
scope_size = buf.get_int
|
||||
buf.position -= 4
|
||||
scope = BSON_CODER.new().deserialize(buf.get(scope_size))
|
||||
|
||||
Code.new(code, scope)
|
||||
end
|
||||
|
||||
def deserialize_oid_data(buf)
|
||||
ObjectID.new(buf.get(12))
|
||||
end
|
||||
|
||||
def deserialize_dbref_data(buf)
|
||||
ns = deserialize_string_data(buf)
|
||||
oid = deserialize_oid_data(buf)
|
||||
DBRef.new(ns, oid)
|
||||
end
|
||||
|
||||
def deserialize_binary_data(buf)
|
||||
len = buf.get_int
|
||||
type = buf.get
|
||||
len = buf.get_int if type == Binary::SUBTYPE_BYTES
|
||||
Binary.new(buf.get(len), type)
|
||||
end
|
||||
|
||||
def serialize_eoo_element(buf)
|
||||
buf.put(EOO)
|
||||
end
|
||||
|
||||
def serialize_null_element(buf, key)
|
||||
buf.put(NULL)
|
||||
self.class.serialize_key(buf, key)
|
||||
end
|
||||
|
||||
def serialize_dbref_element(buf, key, val)
|
||||
oh = OrderedHash.new
|
||||
oh['$ref'] = val.namespace
|
||||
oh['$id'] = val.object_id
|
||||
serialize_object_element(buf, key, oh, false)
|
||||
end
|
||||
|
||||
def serialize_binary_element(buf, key, val)
|
||||
buf.put(BINARY)
|
||||
self.class.serialize_key(buf, key)
|
||||
|
||||
bytes = val.to_a
|
||||
num_bytes = bytes.length
|
||||
subtype = val.respond_to?(:subtype) ? val.subtype : Binary::SUBTYPE_BYTES
|
||||
if subtype == Binary::SUBTYPE_BYTES
|
||||
buf.put_int(num_bytes + 4)
|
||||
buf.put(subtype)
|
||||
buf.put_int(num_bytes)
|
||||
buf.put_array(bytes)
|
||||
else
|
||||
buf.put_int(num_bytes)
|
||||
buf.put(subtype)
|
||||
buf.put_array(bytes)
|
||||
end
|
||||
end
|
||||
|
||||
def serialize_boolean_element(buf, key, val)
|
||||
buf.put(BOOLEAN)
|
||||
self.class.serialize_key(buf, key)
|
||||
buf.put(val ? 1 : 0)
|
||||
end
|
||||
|
||||
def serialize_date_element(buf, key, val)
|
||||
buf.put(DATE)
|
||||
self.class.serialize_key(buf, key)
|
||||
millisecs = (val.to_f * 1000).to_i
|
||||
buf.put_long(millisecs)
|
||||
end
|
||||
|
||||
def serialize_number_element(buf, key, val, type)
|
||||
if type == NUMBER
|
||||
buf.put(type)
|
||||
self.class.serialize_key(buf, key)
|
||||
buf.put_double(val)
|
||||
else
|
||||
if val > 2**64 / 2 - 1 or val < -2**64 / 2
|
||||
raise RangeError.new("MongoDB can only handle 8-byte ints")
|
||||
end
|
||||
if val > 2**32 / 2 - 1 or val < -2**32 / 2
|
||||
buf.put(NUMBER_LONG)
|
||||
self.class.serialize_key(buf, key)
|
||||
buf.put_long(val)
|
||||
else
|
||||
buf.put(type)
|
||||
self.class.serialize_key(buf, key)
|
||||
buf.put_int(val)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
def serialize_object_element(buf, key, val, check_keys, opcode=OBJECT)
|
||||
buf.put(opcode)
|
||||
self.class.serialize_key(buf, key)
|
||||
buf.put_array(BSON_CODER.new.serialize(val, check_keys).to_a)
|
||||
end
|
||||
|
||||
def serialize_array_element(buf, key, val, check_keys)
|
||||
# Turn array into hash with integer indices as keys
|
||||
h = OrderedHash.new
|
||||
i = 0
|
||||
val.each { |v| h[i] = v; i += 1 }
|
||||
serialize_object_element(buf, key, h, check_keys, ARRAY)
|
||||
end
|
||||
|
||||
def serialize_regex_element(buf, key, val)
|
||||
buf.put(REGEX)
|
||||
self.class.serialize_key(buf, key)
|
||||
|
||||
str = val.source
|
||||
# We use serialize_key here since regex patterns aren't prefixed with
|
||||
# length (can't contain the NULL byte).
|
||||
self.class.serialize_key(buf, str)
|
||||
|
||||
options = val.options
|
||||
options_str = ''
|
||||
options_str << 'i' if ((options & Regexp::IGNORECASE) != 0)
|
||||
options_str << 'm' if ((options & Regexp::MULTILINE) != 0)
|
||||
options_str << 'x' if ((options & Regexp::EXTENDED) != 0)
|
||||
options_str << val.extra_options_str if val.respond_to?(:extra_options_str)
|
||||
# Must store option chars in alphabetical order
|
||||
self.class.serialize_cstr(buf, options_str.split(//).sort.uniq.join)
|
||||
end
|
||||
|
||||
def serialize_max_key_element(buf, key)
|
||||
buf.put(MAXKEY)
|
||||
self.class.serialize_key(buf, key)
|
||||
end
|
||||
|
||||
def serialize_min_key_element(buf, key)
|
||||
buf.put(MINKEY)
|
||||
self.class.serialize_key(buf, key)
|
||||
end
|
||||
|
||||
def serialize_oid_element(buf, key, val)
|
||||
buf.put(OID)
|
||||
self.class.serialize_key(buf, key)
|
||||
|
||||
buf.put_array(val.to_a)
|
||||
end
|
||||
|
||||
def serialize_string_element(buf, key, val, type)
|
||||
buf.put(type)
|
||||
self.class.serialize_key(buf, key)
|
||||
|
||||
# Make a hole for the length
|
||||
len_pos = buf.position
|
||||
buf.put_int(0)
|
||||
|
||||
# Save the string
|
||||
start_pos = buf.position
|
||||
self.class.serialize_cstr(buf, val)
|
||||
end_pos = buf.position
|
||||
|
||||
# Put the string size in front
|
||||
buf.put_int(end_pos - start_pos, len_pos)
|
||||
|
||||
# Go back to where we were
|
||||
buf.position = end_pos
|
||||
end
|
||||
|
||||
def serialize_code_w_scope(buf, key, val)
|
||||
buf.put(CODE_W_SCOPE)
|
||||
self.class.serialize_key(buf, key)
|
||||
|
||||
# Make a hole for the length
|
||||
len_pos = buf.position
|
||||
buf.put_int(0)
|
||||
|
||||
buf.put_int(val.length + 1)
|
||||
self.class.serialize_cstr(buf, val)
|
||||
buf.put_array(BSON_CODER.new.serialize(val.scope).to_a)
|
||||
|
||||
end_pos = buf.position
|
||||
buf.put_int(end_pos - len_pos, len_pos)
|
||||
buf.position = end_pos
|
||||
end
|
||||
|
||||
def deserialize_cstr(buf)
|
||||
chars = ""
|
||||
while true
|
||||
b = buf.get
|
||||
break if b == 0
|
||||
chars << b.chr
|
||||
end
|
||||
if RUBY_VERSION >= '1.9'
|
||||
chars.force_encoding("utf-8") # Mongo stores UTF-8
|
||||
end
|
||||
chars
|
||||
end
|
||||
|
||||
def bson_type(o)
|
||||
case o
|
||||
when nil
|
||||
NULL
|
||||
when Integer
|
||||
NUMBER_INT
|
||||
when Float
|
||||
NUMBER
|
||||
when ByteBuffer
|
||||
BINARY
|
||||
when Code
|
||||
CODE_W_SCOPE
|
||||
when String
|
||||
STRING
|
||||
when Array
|
||||
ARRAY
|
||||
when Regexp
|
||||
REGEX
|
||||
when ObjectID
|
||||
OID
|
||||
when DBRef
|
||||
REF
|
||||
when true, false
|
||||
BOOLEAN
|
||||
when Time
|
||||
DATE
|
||||
when Hash
|
||||
OBJECT
|
||||
when Symbol
|
||||
SYMBOL
|
||||
when MaxKey
|
||||
MAXKEY
|
||||
when MinKey
|
||||
MINKEY
|
||||
when Numeric
|
||||
raise InvalidDocument, "Cannot serialize the Numeric type #{o.class} as BSON; only Fixum, Bignum, and Float are supported."
|
||||
when Date, DateTime
|
||||
raise InvalidDocument, "#{o.class} is not currently supported; " +
|
||||
"use a UTC Time instance instead."
|
||||
else
|
||||
if defined?(ActiveSupport::TimeWithZone) && o.is_a?(ActiveSupport::TimeWithZone)
|
||||
raise InvalidDocument, "ActiveSupport::TimeWithZone is not currently supported; " +
|
||||
"use a UTC Time instance instead."
|
||||
else
|
||||
raise InvalidDocument, "Cannot serialize #{o.class} as a BSON type; it either isn't supported or won't translate to BSON."
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
end
|
||||
end
|
|
@ -0,0 +1,36 @@
|
|||
# --
|
||||
# Copyright (C) 2008-2010 10gen Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
# ++
|
||||
|
||||
module Mongo
|
||||
# Generic Mongo Ruby Driver exception class.
|
||||
class MongoRubyError < StandardError; end
|
||||
|
||||
# Raised when MongoDB itself has returned an error.
|
||||
class MongoDBError < RuntimeError; end
|
||||
|
||||
# Raised when given a string is not valid utf-8 (Ruby 1.8 only).
|
||||
class InvalidStringEncoding < MongoRubyError; end
|
||||
|
||||
# Raised when attempting to initialize an invalid ObjectID.
|
||||
class InvalidObjectID < MongoRubyError; end
|
||||
|
||||
# Raised when trying to insert a document that exceeds the 4MB limit or
|
||||
# when the document contains objects that can't be serialized as BSON.
|
||||
class InvalidDocument < MongoDBError; end
|
||||
|
||||
# Raised when an invalid name is used.
|
||||
class InvalidName < RuntimeError; end
|
||||
end
|
|
@ -14,7 +14,7 @@
|
|||
# limitations under the License.
|
||||
# ++
|
||||
|
||||
require 'mongo/util/byte_buffer'
|
||||
require 'mongo_bson/byte_buffer'
|
||||
|
||||
module Mongo
|
||||
|
|
@ -0,0 +1,23 @@
|
|||
require "lib/mongo_bson"
|
||||
|
||||
Gem::Specification.new do |s|
|
||||
s.name = 'mongo_bson'
|
||||
|
||||
s.version = Mongo::BSON::VERSION
|
||||
|
||||
s.platform = Gem::Platform::RUBY
|
||||
s.summary = 'Ruby implementation of BSON'
|
||||
s.description = 'A Ruby BSON implementation for MongoDB. For more information about Mongo, see http://www.mongodb.org. For more information on BSON, see http://www.bsonspec.org.'
|
||||
|
||||
s.require_paths = ['lib']
|
||||
|
||||
s.files = ['Rakefile', 'mongo-bson.gemspec', 'LICENSE.txt']
|
||||
s.files += ['lib/mongo_bson.rb'] + Dir['lib/mongo_bson/**/*.rb']
|
||||
s.test_files = Dir['test/mongo_bson/*.rb']
|
||||
|
||||
s.has_rdoc = true
|
||||
|
||||
s.authors = ['Jim Menard', 'Mike Dirolf', 'Kyle Banker']
|
||||
s.email = 'mongodb-dev@googlegroups.com'
|
||||
s.homepage = 'http://www.mongodb.org'
|
||||
end
|
|
@ -12,11 +12,13 @@ Gem::Specification.new do |s|
|
|||
s.require_paths = ['lib']
|
||||
|
||||
s.files = ['README.rdoc', 'Rakefile', 'mongo-ruby-driver.gemspec', 'LICENSE.txt']
|
||||
s.files += Dir['lib/**/*.rb'] + Dir['examples/**/*.rb'] + Dir['bin/**/*.rb']
|
||||
s.files += ['lib/mongo.rb'] + Dir['lib/mongo/**/*.rb']
|
||||
s.files += Dir['examples/**/*.rb'] + Dir['bin/**/*.rb']
|
||||
s.test_files = Dir['test/**/*.rb']
|
||||
|
||||
s.has_rdoc = true
|
||||
s.test_files = Dir['test/**/*.rb']
|
||||
s.test_files -= Dir['test/mongo_bson/*.rb'] # remove these files from the manifest
|
||||
|
||||
s.has_rdoc = true
|
||||
s.rdoc_options = ['--main', 'README.rdoc', '--inline-source']
|
||||
|
|
|
@ -1,3 +1,4 @@
|
|||
$:.unshift(File.join(File.dirname(__FILE__), '..', 'lib'))
|
||||
require 'test/test_helper'
|
||||
require 'mongo/gridfs'
|
||||
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
require 'test/test_helper'
|
||||
require 'mongo/exceptions'
|
||||
require 'mongo/util/conversions'
|
||||
require 'mongo/util/ordered_hash'
|
||||
require 'mongo_bson/ordered_hash'
|
||||
|
||||
class ConversionsTest < Test::Unit::TestCase
|
||||
include Mongo::Conversions
|
||||
|
|
|
@ -24,26 +24,26 @@ class BSONTest < Test::Unit::TestCase
|
|||
|
||||
def test_string
|
||||
doc = {'doc' => 'hello, world'}
|
||||
bson = bson = BSON.serialize(doc)
|
||||
assert_equal doc, BSON.deserialize(bson)
|
||||
bson = bson = Mongo::BSON_CODER.serialize(doc)
|
||||
assert_equal doc, Mongo::BSON_CODER.deserialize(bson)
|
||||
end
|
||||
|
||||
def test_valid_utf8_string
|
||||
doc = {'doc' => 'aé'}
|
||||
bson = bson = BSON.serialize(doc)
|
||||
assert_equal doc, BSON.deserialize(bson)
|
||||
bson = bson = Mongo::BSON_CODER.serialize(doc)
|
||||
assert_equal doc, Mongo::BSON_CODER.deserialize(bson)
|
||||
end
|
||||
|
||||
def test_valid_utf8_key
|
||||
doc = {'aé' => 'hello'}
|
||||
bson = bson = BSON.serialize(doc)
|
||||
assert_equal doc, BSON.deserialize(bson)
|
||||
bson = bson = Mongo::BSON_CODER.serialize(doc)
|
||||
assert_equal doc, Mongo::BSON_CODER.deserialize(bson)
|
||||
end
|
||||
|
||||
def test_document_length
|
||||
doc = {'name' => 'a' * 5 * 1024 * 1024}
|
||||
assert_raise InvalidDocument do
|
||||
assert BSON.serialize(doc)
|
||||
assert Mongo::BSON_CODER.serialize(doc)
|
||||
end
|
||||
end
|
||||
|
||||
|
@ -55,7 +55,7 @@ class BSONTest < Test::Unit::TestCase
|
|||
string = Iconv.conv('iso-8859-1', 'utf-8', 'aé')
|
||||
doc = {'doc' => string}
|
||||
assert_raise InvalidStringEncoding do
|
||||
BSON.serialize(doc)
|
||||
Mongo::BSON_CODER.serialize(doc)
|
||||
end
|
||||
end
|
||||
|
||||
|
@ -63,51 +63,51 @@ class BSONTest < Test::Unit::TestCase
|
|||
key = Iconv.conv('iso-8859-1', 'utf-8', 'aé')
|
||||
doc = {key => 'hello'}
|
||||
assert_raise InvalidStringEncoding do
|
||||
BSON.serialize(doc)
|
||||
Mongo::BSON_CODER.serialize(doc)
|
||||
end
|
||||
end
|
||||
else
|
||||
def test_non_utf8_string
|
||||
bson = BSON.serialize({'str' => 'aé'.encode('iso-8859-1')})
|
||||
result = BSON.deserialize(bson)['str']
|
||||
bson = Mongo::BSON_CODER.serialize({'str' => 'aé'.encode('iso-8859-1')})
|
||||
result = Mongo::BSON_CODER.deserialize(bson)['str']
|
||||
assert_equal 'aé', result
|
||||
assert_equal 'UTF-8', result.encoding.name
|
||||
end
|
||||
|
||||
def test_non_utf8_key
|
||||
bson = BSON.serialize({'aé'.encode('iso-8859-1') => 'hello'})
|
||||
assert_equal 'hello', BSON.deserialize(bson)['aé']
|
||||
bson = Mongo::BSON_CODER.serialize({'aé'.encode('iso-8859-1') => 'hello'})
|
||||
assert_equal 'hello', Mongo::BSON_CODER.deserialize(bson)['aé']
|
||||
end
|
||||
end
|
||||
|
||||
def test_code
|
||||
doc = {'$where' => Code.new('this.a.b < this.b')}
|
||||
bson = BSON.serialize(doc)
|
||||
assert_equal doc, BSON.deserialize(bson)
|
||||
bson = Mongo::BSON_CODER.serialize(doc)
|
||||
assert_equal doc, Mongo::BSON_CODER.deserialize(bson)
|
||||
end
|
||||
|
||||
def test_number
|
||||
doc = {'doc' => 41.99}
|
||||
bson = BSON.serialize(doc)
|
||||
assert_equal doc, BSON.deserialize(bson)
|
||||
bson = Mongo::BSON_CODER.serialize(doc)
|
||||
assert_equal doc, Mongo::BSON_CODER.deserialize(bson)
|
||||
end
|
||||
|
||||
def test_int
|
||||
doc = {'doc' => 42}
|
||||
bson = BSON.serialize(doc)
|
||||
assert_equal doc, BSON.deserialize(bson)
|
||||
bson = Mongo::BSON_CODER.serialize(doc)
|
||||
assert_equal doc, Mongo::BSON_CODER.deserialize(bson)
|
||||
|
||||
doc = {"doc" => -5600}
|
||||
bson = BSON.serialize(doc)
|
||||
assert_equal doc, BSON.deserialize(bson)
|
||||
bson = Mongo::BSON_CODER.serialize(doc)
|
||||
assert_equal doc, Mongo::BSON_CODER.deserialize(bson)
|
||||
|
||||
doc = {"doc" => 2147483647}
|
||||
bson = BSON.serialize(doc)
|
||||
assert_equal doc, BSON.deserialize(bson)
|
||||
bson = Mongo::BSON_CODER.serialize(doc)
|
||||
assert_equal doc, Mongo::BSON_CODER.deserialize(bson)
|
||||
|
||||
doc = {"doc" => -2147483648}
|
||||
bson = BSON.serialize(doc)
|
||||
assert_equal doc, BSON.deserialize(bson)
|
||||
bson = Mongo::BSON_CODER.serialize(doc)
|
||||
assert_equal doc, Mongo::BSON_CODER.deserialize(bson)
|
||||
end
|
||||
|
||||
def test_ordered_hash
|
||||
|
@ -116,32 +116,32 @@ class BSONTest < Test::Unit::TestCase
|
|||
doc["a"] = 2
|
||||
doc["c"] = 3
|
||||
doc["d"] = 4
|
||||
bson = BSON.serialize(doc)
|
||||
assert_equal doc, BSON.deserialize(bson)
|
||||
bson = Mongo::BSON_CODER.serialize(doc)
|
||||
assert_equal doc, Mongo::BSON_CODER.deserialize(bson)
|
||||
end
|
||||
|
||||
def test_object
|
||||
doc = {'doc' => {'age' => 42, 'name' => 'Spongebob', 'shoe_size' => 9.5}}
|
||||
bson = BSON.serialize(doc)
|
||||
assert_equal doc, BSON.deserialize(bson)
|
||||
bson = Mongo::BSON_CODER.serialize(doc)
|
||||
assert_equal doc, Mongo::BSON_CODER.deserialize(bson)
|
||||
end
|
||||
|
||||
def test_oid
|
||||
doc = {'doc' => ObjectID.new}
|
||||
bson = BSON.serialize(doc)
|
||||
assert_equal doc, BSON.deserialize(bson)
|
||||
bson = Mongo::BSON_CODER.serialize(doc)
|
||||
assert_equal doc, Mongo::BSON_CODER.deserialize(bson)
|
||||
end
|
||||
|
||||
def test_array
|
||||
doc = {'doc' => [1, 2, 'a', 'b']}
|
||||
bson = BSON.serialize(doc)
|
||||
assert_equal doc, BSON.deserialize(bson)
|
||||
bson = Mongo::BSON_CODER.serialize(doc)
|
||||
assert_equal doc, Mongo::BSON_CODER.deserialize(bson)
|
||||
end
|
||||
|
||||
def test_regex
|
||||
doc = {'doc' => /foobar/i}
|
||||
bson = BSON.serialize(doc)
|
||||
doc2 = BSON.deserialize(bson)
|
||||
bson = Mongo::BSON_CODER.serialize(doc)
|
||||
doc2 = Mongo::BSON_CODER.deserialize(bson)
|
||||
assert_equal doc, doc2
|
||||
|
||||
r = doc2['doc']
|
||||
|
@ -151,9 +151,9 @@ class BSONTest < Test::Unit::TestCase
|
|||
assert_equal 'zywcab', r.extra_options_str
|
||||
|
||||
doc = {'doc' => r}
|
||||
bson_doc = BSON.serialize(doc)
|
||||
bson_doc = Mongo::BSON_CODER.serialize(doc)
|
||||
doc2 = nil
|
||||
doc2 = BSON.deserialize(bson_doc)
|
||||
doc2 = Mongo::BSON_CODER.deserialize(bson_doc)
|
||||
assert_equal doc, doc2
|
||||
|
||||
r = doc2['doc']
|
||||
|
@ -163,30 +163,30 @@ class BSONTest < Test::Unit::TestCase
|
|||
|
||||
def test_boolean
|
||||
doc = {'doc' => true}
|
||||
bson = BSON.serialize(doc)
|
||||
assert_equal doc, BSON.deserialize(bson)
|
||||
bson = Mongo::BSON_CODER.serialize(doc)
|
||||
assert_equal doc, Mongo::BSON_CODER.deserialize(bson)
|
||||
end
|
||||
|
||||
def test_date
|
||||
doc = {'date' => Time.now}
|
||||
bson = BSON.serialize(doc)
|
||||
doc2 = BSON.deserialize(bson)
|
||||
bson = Mongo::BSON_CODER.serialize(doc)
|
||||
doc2 = Mongo::BSON_CODER.deserialize(bson)
|
||||
# Mongo only stores up to the millisecond
|
||||
assert_in_delta doc['date'], doc2['date'], 0.001
|
||||
end
|
||||
|
||||
def test_date_returns_as_utc
|
||||
doc = {'date' => Time.now}
|
||||
bson = BSON.serialize(doc)
|
||||
doc2 = BSON.deserialize(bson)
|
||||
bson = Mongo::BSON_CODER.serialize(doc)
|
||||
doc2 = Mongo::BSON_CODER.deserialize(bson)
|
||||
assert doc2['date'].utc?
|
||||
end
|
||||
|
||||
def test_date_before_epoch
|
||||
begin
|
||||
doc = {'date' => Time.utc(1600)}
|
||||
bson = BSON.serialize(doc)
|
||||
doc2 = BSON.deserialize(bson)
|
||||
bson = Mongo::BSON_CODER.serialize(doc)
|
||||
doc2 = Mongo::BSON_CODER.deserialize(bson)
|
||||
# Mongo only stores up to the millisecond
|
||||
assert_in_delta doc['date'], doc2['date'], 0.001
|
||||
rescue ArgumentError
|
||||
|
@ -201,7 +201,7 @@ class BSONTest < Test::Unit::TestCase
|
|||
[DateTime.now, Date.today, Zone].each do |invalid_date|
|
||||
doc = {:date => invalid_date}
|
||||
begin
|
||||
bson = BSON.serialize(doc)
|
||||
bson = Mongo::BSON_CODER.serialize(doc)
|
||||
rescue => e
|
||||
ensure
|
||||
assert_equal InvalidDocument, e.class
|
||||
|
@ -214,16 +214,16 @@ class BSONTest < Test::Unit::TestCase
|
|||
oid = ObjectID.new
|
||||
doc = {}
|
||||
doc['dbref'] = DBRef.new('namespace', oid)
|
||||
bson = BSON.serialize(doc)
|
||||
doc2 = BSON.deserialize(bson)
|
||||
bson = Mongo::BSON_CODER.serialize(doc)
|
||||
doc2 = Mongo::BSON_CODER.deserialize(bson)
|
||||
assert_equal 'namespace', doc2['dbref'].namespace
|
||||
assert_equal oid, doc2['dbref'].object_id
|
||||
end
|
||||
|
||||
def test_symbol
|
||||
doc = {'sym' => :foo}
|
||||
bson = BSON.serialize(doc)
|
||||
doc2 = BSON.deserialize(bson)
|
||||
bson = Mongo::BSON_CODER.serialize(doc)
|
||||
doc2 = Mongo::BSON_CODER.deserialize(bson)
|
||||
assert_equal :foo, doc2['sym']
|
||||
end
|
||||
|
||||
|
@ -232,8 +232,8 @@ class BSONTest < Test::Unit::TestCase
|
|||
'binstring'.each_byte { |b| bin.put(b) }
|
||||
|
||||
doc = {'bin' => bin}
|
||||
bson = BSON.serialize(doc)
|
||||
doc2 = BSON.deserialize(bson)
|
||||
bson = Mongo::BSON_CODER.serialize(doc)
|
||||
doc2 = Mongo::BSON_CODER.deserialize(bson)
|
||||
bin2 = doc2['bin']
|
||||
assert_kind_of Binary, bin2
|
||||
assert_equal 'binstring', bin2.to_s
|
||||
|
@ -244,8 +244,8 @@ class BSONTest < Test::Unit::TestCase
|
|||
bin = Binary.new([1, 2, 3, 4, 5], Binary::SUBTYPE_USER_DEFINED)
|
||||
|
||||
doc = {'bin' => bin}
|
||||
bson = BSON.serialize(doc)
|
||||
doc2 = BSON.deserialize(bson)
|
||||
bson = Mongo::BSON_CODER.serialize(doc)
|
||||
doc2 = Mongo::BSON_CODER.deserialize(bson)
|
||||
bin2 = doc2['bin']
|
||||
assert_kind_of Binary, bin2
|
||||
assert_equal [1, 2, 3, 4, 5], bin2.to_a
|
||||
|
@ -257,8 +257,8 @@ class BSONTest < Test::Unit::TestCase
|
|||
5.times { |i| bb.put(i + 1) }
|
||||
|
||||
doc = {'bin' => bb}
|
||||
bson = BSON.serialize(doc)
|
||||
doc2 = BSON.deserialize(bson)
|
||||
bson = Mongo::BSON_CODER.serialize(doc)
|
||||
doc2 = Mongo::BSON_CODER.deserialize(bson)
|
||||
bin2 = doc2['bin']
|
||||
assert_kind_of Binary, bin2
|
||||
assert_equal [1, 2, 3, 4, 5], bin2.to_a
|
||||
|
@ -269,24 +269,24 @@ class BSONTest < Test::Unit::TestCase
|
|||
val = OrderedHash.new
|
||||
val['not_id'] = 1
|
||||
val['_id'] = 2
|
||||
roundtrip = BSON.deserialize(BSON.serialize(val, false, true).to_a)
|
||||
roundtrip = Mongo::BSON_CODER.deserialize(Mongo::BSON_CODER.serialize(val, false, true).to_a)
|
||||
assert_kind_of OrderedHash, roundtrip
|
||||
assert_equal '_id', roundtrip.keys.first
|
||||
|
||||
val = {'a' => 'foo', 'b' => 'bar', :_id => 42, 'z' => 'hello'}
|
||||
roundtrip = BSON.deserialize(BSON.serialize(val, false, true).to_a)
|
||||
roundtrip = Mongo::BSON_CODER.deserialize(Mongo::BSON_CODER.serialize(val, false, true).to_a)
|
||||
assert_kind_of OrderedHash, roundtrip
|
||||
assert_equal '_id', roundtrip.keys.first
|
||||
end
|
||||
|
||||
def test_nil_id
|
||||
doc = {"_id" => nil}
|
||||
assert_equal doc, BSON.deserialize(bson = BSON.serialize(doc, false, true).to_a)
|
||||
assert_equal doc, Mongo::BSON_CODER.deserialize(bson = Mongo::BSON_CODER.serialize(doc, false, true).to_a)
|
||||
end
|
||||
|
||||
def test_timestamp
|
||||
val = {"test" => [4, 20]}
|
||||
assert_equal val, BSON.deserialize([0x13, 0x00, 0x00, 0x00,
|
||||
assert_equal val, Mongo::BSON_CODER.deserialize([0x13, 0x00, 0x00, 0x00,
|
||||
0x11, 0x74, 0x65, 0x73,
|
||||
0x74, 0x00, 0x04, 0x00,
|
||||
0x00, 0x00, 0x14, 0x00,
|
||||
|
@ -296,29 +296,29 @@ class BSONTest < Test::Unit::TestCase
|
|||
def test_overflow
|
||||
doc = {"x" => 2**75}
|
||||
assert_raise RangeError do
|
||||
bson = BSON.serialize(doc)
|
||||
bson = Mongo::BSON_CODER.serialize(doc)
|
||||
end
|
||||
|
||||
doc = {"x" => 9223372036854775}
|
||||
assert_equal doc, BSON.deserialize(BSON.serialize(doc).to_a)
|
||||
assert_equal doc, Mongo::BSON_CODER.deserialize(Mongo::BSON_CODER.serialize(doc).to_a)
|
||||
|
||||
doc = {"x" => 9223372036854775807}
|
||||
assert_equal doc, BSON.deserialize(BSON.serialize(doc).to_a)
|
||||
assert_equal doc, Mongo::BSON_CODER.deserialize(Mongo::BSON_CODER.serialize(doc).to_a)
|
||||
|
||||
doc["x"] = doc["x"] + 1
|
||||
assert_raise RangeError do
|
||||
bson = BSON.serialize(doc)
|
||||
bson = Mongo::BSON_CODER.serialize(doc)
|
||||
end
|
||||
|
||||
doc = {"x" => -9223372036854775}
|
||||
assert_equal doc, BSON.deserialize(BSON.serialize(doc).to_a)
|
||||
assert_equal doc, Mongo::BSON_CODER.deserialize(Mongo::BSON_CODER.serialize(doc).to_a)
|
||||
|
||||
doc = {"x" => -9223372036854775808}
|
||||
assert_equal doc, BSON.deserialize(BSON.serialize(doc).to_a)
|
||||
assert_equal doc, Mongo::BSON_CODER.deserialize(Mongo::BSON_CODER.serialize(doc).to_a)
|
||||
|
||||
doc["x"] = doc["x"] - 1
|
||||
assert_raise RangeError do
|
||||
bson = BSON.serialize(doc)
|
||||
bson = Mongo::BSON_CODER.serialize(doc)
|
||||
end
|
||||
end
|
||||
|
||||
|
@ -326,7 +326,7 @@ class BSONTest < Test::Unit::TestCase
|
|||
[BigDecimal.new("1.0"), Complex(0, 1), Rational(2, 3)].each do |type|
|
||||
doc = {"x" => type}
|
||||
begin
|
||||
BSON.serialize(doc)
|
||||
Mongo::BSON_CODER.serialize(doc)
|
||||
rescue => e
|
||||
ensure
|
||||
assert_equal InvalidDocument, e.class
|
||||
|
@ -340,12 +340,12 @@ class BSONTest < Test::Unit::TestCase
|
|||
val['not_id'] = 1
|
||||
val['_id'] = 2
|
||||
assert val.keys.include?('_id')
|
||||
BSON.serialize(val)
|
||||
Mongo::BSON_CODER.serialize(val)
|
||||
assert val.keys.include?('_id')
|
||||
|
||||
val = {'a' => 'foo', 'b' => 'bar', :_id => 42, 'z' => 'hello'}
|
||||
assert val.keys.include?(:_id)
|
||||
BSON.serialize(val)
|
||||
Mongo::BSON_CODER.serialize(val)
|
||||
assert val.keys.include?(:_id)
|
||||
end
|
||||
|
||||
|
@ -360,50 +360,50 @@ class BSONTest < Test::Unit::TestCase
|
|||
dup = {"_id" => "foo", :_id => "foo"}
|
||||
one = {"_id" => "foo"}
|
||||
|
||||
assert_equal BSON.serialize(one).to_a, BSON.serialize(dup).to_a
|
||||
assert_equal Mongo::BSON_CODER.serialize(one).to_a, Mongo::BSON_CODER.serialize(dup).to_a
|
||||
end
|
||||
|
||||
def test_no_duplicate_id_when_moving_id
|
||||
dup = {"_id" => "foo", :_id => "foo"}
|
||||
one = {:_id => "foo"}
|
||||
|
||||
assert_equal BSON.serialize(one, false, true).to_s, BSON.serialize(dup, false, true).to_s
|
||||
assert_equal Mongo::BSON_CODER.serialize(one, false, true).to_s, Mongo::BSON_CODER.serialize(dup, false, true).to_s
|
||||
end
|
||||
|
||||
def test_null_character
|
||||
doc = {"a" => "\x00"}
|
||||
|
||||
assert_equal doc, BSON.deserialize(BSON.serialize(doc).to_a)
|
||||
assert_equal doc, Mongo::BSON_CODER.deserialize(Mongo::BSON_CODER.serialize(doc).to_a)
|
||||
|
||||
assert_raise InvalidDocument do
|
||||
BSON.serialize({"\x00" => "a"})
|
||||
Mongo::BSON_CODER.serialize({"\x00" => "a"})
|
||||
end
|
||||
|
||||
assert_raise InvalidDocument do
|
||||
BSON.serialize({"a" => (Regexp.compile "ab\x00c")})
|
||||
Mongo::BSON_CODER.serialize({"a" => (Regexp.compile "ab\x00c")})
|
||||
end
|
||||
end
|
||||
|
||||
def test_max_key
|
||||
doc = {"a" => MaxKey.new}
|
||||
|
||||
assert_equal doc, BSON.deserialize(BSON.serialize(doc).to_a)
|
||||
assert_equal doc, Mongo::BSON_CODER.deserialize(Mongo::BSON_CODER.serialize(doc).to_a)
|
||||
end
|
||||
|
||||
def test_min_key
|
||||
doc = {"a" => MinKey.new}
|
||||
|
||||
assert_equal doc, BSON.deserialize(BSON.serialize(doc).to_a)
|
||||
assert_equal doc, Mongo::BSON_CODER.deserialize(Mongo::BSON_CODER.serialize(doc).to_a)
|
||||
end
|
||||
|
||||
def test_invalid_object
|
||||
o = Object.new
|
||||
assert_raise InvalidDocument do
|
||||
BSON.serialize({:foo => o})
|
||||
Mongo::BSON_CODER.serialize({:foo => o})
|
||||
end
|
||||
|
||||
assert_raise InvalidDocument do
|
||||
BSON.serialize({:foo => Date.today})
|
||||
Mongo::BSON_CODER.serialize({:foo => Date.today})
|
||||
end
|
||||
end
|
||||
|
||||
|
@ -416,10 +416,10 @@ class BSONTest < Test::Unit::TestCase
|
|||
|
||||
assert_equal ")\000\000\000\020_id\000\001\000\000\000\002text" +
|
||||
"\000\004\000\000\000abc\000\002key\000\004\000\000\000abc\000\000",
|
||||
BSON.serialize(a, false, true).to_s
|
||||
Mongo::BSON_CODER.serialize(a, false, true).to_s
|
||||
assert_equal ")\000\000\000\002text\000\004\000\000\000abc\000\002key" +
|
||||
"\000\004\000\000\000abc\000\020_id\000\001\000\000\000\000",
|
||||
BSON.serialize(a, false, false).to_s
|
||||
Mongo::BSON_CODER.serialize(a, false, false).to_s
|
||||
end
|
||||
|
||||
def test_move_id_with_nested_doc
|
||||
|
@ -433,11 +433,11 @@ class BSONTest < Test::Unit::TestCase
|
|||
assert_equal ">\000\000\000\020_id\000\003\000\000\000\002text" +
|
||||
"\000\004\000\000\000abc\000\003hash\000\034\000\000" +
|
||||
"\000\002text\000\004\000\000\000abc\000\020_id\000\002\000\000\000\000\000",
|
||||
BSON.serialize(c, false, true).to_s
|
||||
Mongo::BSON_CODER.serialize(c, false, true).to_s
|
||||
assert_equal ">\000\000\000\002text\000\004\000\000\000abc\000\003hash" +
|
||||
"\000\034\000\000\000\002text\000\004\000\000\000abc\000\020_id" +
|
||||
"\000\002\000\000\000\000\020_id\000\003\000\000\000\000",
|
||||
BSON.serialize(c, false, false).to_s
|
||||
Mongo::BSON_CODER.serialize(c, false, false).to_s
|
||||
end
|
||||
|
||||
if defined?(HashWithIndifferentAccess)
|
||||
|
@ -447,12 +447,12 @@ class BSONTest < Test::Unit::TestCase
|
|||
embedded['_id'] = ObjectID.new
|
||||
doc['_id'] = ObjectID.new
|
||||
doc['embedded'] = [embedded]
|
||||
BSON.serialize(doc, false, true).to_a
|
||||
Mongo::BSON_CODER.serialize(doc, false, true).to_a
|
||||
assert doc.has_key?("_id")
|
||||
assert doc['embedded'][0].has_key?("_id")
|
||||
|
||||
doc['_id'] = ObjectID.new
|
||||
BSON.serialize(doc, false, true).to_a
|
||||
Mongo::BSON_CODER.serialize(doc, false, true).to_a
|
||||
assert doc.has_key?("_id")
|
||||
end
|
||||
end
|
|
@ -7,8 +7,8 @@ class DBTest < Test::Unit::TestCase
|
|||
documents = [documents] unless documents.is_a?(Array)
|
||||
message = ByteBuffer.new
|
||||
message.put_int(0)
|
||||
BSON.serialize_cstr(message, "#{db.name}.test")
|
||||
documents.each { |doc| message.put_array(BSON.new.serialize(doc, true).to_a) }
|
||||
Mongo::BSON_CODER..serialize_cstr(message, "#{db.name}.test")
|
||||
documents.each { |doc| message.put_array(Mongo::BSON_CODER.new.serialize(doc, true).to_a) }
|
||||
message = db.add_message_headers(Mongo::Constants::OP_INSERT, message)
|
||||
end
|
||||
end
|
||||
|
|
Loading…
Reference in New Issue