renamed mongo_bson to bson
This commit is contained in:
parent
40b481ad3c
commit
c4d5cb641b
@ -280,7 +280,7 @@ static int write_element(VALUE key, VALUE value, VALUE extra, int allow_id) {
|
||||
case T_STRING:
|
||||
{
|
||||
if (strcmp(rb_obj_classname(value),
|
||||
"Mongo::Code") == 0) {
|
||||
"BSON::Code") == 0) {
|
||||
buffer_position length_location, start_position, total_length;
|
||||
int length;
|
||||
write_name_and_type(buffer, key, 0x0F);
|
||||
@ -324,7 +324,7 @@ static int write_element(VALUE key, VALUE value, VALUE extra, int allow_id) {
|
||||
{
|
||||
// TODO there has to be a better way to do these checks...
|
||||
const char* cls = rb_obj_classname(value);
|
||||
if (strcmp(cls, "Mongo::Binary") == 0 ||
|
||||
if (strcmp(cls, "BSON::Binary") == 0 ||
|
||||
strcmp(cls, "ByteBuffer") == 0) {
|
||||
const char subtype = strcmp(cls, "ByteBuffer") ?
|
||||
(const char)FIX2INT(rb_funcall(value, rb_intern("subtype"), 0)) : 2;
|
||||
@ -343,7 +343,7 @@ static int write_element(VALUE key, VALUE value, VALUE extra, int allow_id) {
|
||||
SAFE_WRITE(buffer, RSTRING_PTR(string_data), length);
|
||||
break;
|
||||
}
|
||||
if (strcmp(cls, "Mongo::ObjectID") == 0) {
|
||||
if (strcmp(cls, "BSON::ObjectID") == 0) {
|
||||
VALUE as_array = rb_funcall(value, rb_intern("to_a"), 0);
|
||||
int i;
|
||||
write_name_and_type(buffer, key, 0x07);
|
||||
@ -353,7 +353,7 @@ static int write_element(VALUE key, VALUE value, VALUE extra, int allow_id) {
|
||||
}
|
||||
break;
|
||||
}
|
||||
if (strcmp(cls, "Mongo::DBRef") == 0) {
|
||||
if (strcmp(cls, "BSON::DBRef") == 0) {
|
||||
buffer_position length_location, start_position, obj_length;
|
||||
VALUE ns, oid;
|
||||
write_name_and_type(buffer, key, 0x03);
|
||||
@ -377,11 +377,11 @@ static int write_element(VALUE key, VALUE value, VALUE extra, int allow_id) {
|
||||
SAFE_WRITE_AT_POS(buffer, length_location, (const char*)&obj_length, 4);
|
||||
break;
|
||||
}
|
||||
if (strcmp(cls, "Mongo::MaxKey") == 0) {
|
||||
if (strcmp(cls, "BSON::MaxKey") == 0) {
|
||||
write_name_and_type(buffer, key, 0x7f);
|
||||
break;
|
||||
}
|
||||
if (strcmp(cls, "Mongo::MinKey") == 0) {
|
||||
if (strcmp(cls, "BSON::MinKey") == 0) {
|
||||
write_name_and_type(buffer, key, 0xff);
|
||||
break;
|
||||
}
|
||||
@ -883,29 +883,29 @@ static VALUE objectid_generate(VALUE self)
|
||||
|
||||
|
||||
void Init_cbson() {
|
||||
VALUE mongo, CBson, Digest, ext_version;
|
||||
VALUE bson, CBson, Digest, ext_version;
|
||||
Time = rb_const_get(rb_cObject, rb_intern("Time"));
|
||||
|
||||
mongo = rb_const_get(rb_cObject, rb_intern("Mongo"));
|
||||
rb_require("mongo_bson/types/binary");
|
||||
Binary = rb_const_get(mongo, rb_intern("Binary"));
|
||||
rb_require("mongo_bson/types/objectid");
|
||||
ObjectID = rb_const_get(mongo, rb_intern("ObjectID"));
|
||||
rb_require("mongo_bson/types/dbref");
|
||||
DBRef = rb_const_get(mongo, rb_intern("DBRef"));
|
||||
rb_require("mongo_bson/types/code");
|
||||
Code = rb_const_get(mongo, rb_intern("Code"));
|
||||
rb_require("mongo_bson/types/min_max_keys");
|
||||
MinKey = rb_const_get(mongo, rb_intern("MinKey"));
|
||||
MaxKey = rb_const_get(mongo, rb_intern("MaxKey"));
|
||||
rb_require("mongo_bson/types/regexp_of_holding");
|
||||
bson = rb_const_get(rb_cObject, rb_intern("BSON"));
|
||||
rb_require("bson/types/binary");
|
||||
Binary = rb_const_get(bson, rb_intern("Binary"));
|
||||
rb_require("bson/types/objectid");
|
||||
ObjectID = rb_const_get(bson, rb_intern("ObjectID"));
|
||||
rb_require("bson/types/dbref");
|
||||
DBRef = rb_const_get(bson, rb_intern("DBRef"));
|
||||
rb_require("bson/types/code");
|
||||
Code = rb_const_get(bson, rb_intern("Code"));
|
||||
rb_require("bson/types/min_max_keys");
|
||||
MinKey = rb_const_get(bson, rb_intern("MinKey"));
|
||||
MaxKey = rb_const_get(bson, rb_intern("MaxKey"));
|
||||
rb_require("bson/types/regexp_of_holding");
|
||||
Regexp = rb_const_get(rb_cObject, rb_intern("Regexp"));
|
||||
RegexpOfHolding = rb_const_get(mongo, rb_intern("RegexpOfHolding"));
|
||||
rb_require("mongo_bson/exceptions");
|
||||
InvalidName = rb_const_get(mongo, rb_intern("InvalidName"));
|
||||
InvalidStringEncoding = rb_const_get(mongo, rb_intern("InvalidStringEncoding"));
|
||||
InvalidDocument = rb_const_get(mongo, rb_intern("InvalidDocument"));
|
||||
rb_require("mongo_bson/ordered_hash");
|
||||
RegexpOfHolding = rb_const_get(bson, rb_intern("RegexpOfHolding"));
|
||||
rb_require("bson/exceptions");
|
||||
InvalidName = rb_const_get(bson, rb_intern("InvalidName"));
|
||||
InvalidStringEncoding = rb_const_get(bson, rb_intern("InvalidStringEncoding"));
|
||||
InvalidDocument = rb_const_get(bson, rb_intern("InvalidDocument"));
|
||||
rb_require("bson/ordered_hash");
|
||||
OrderedHash = rb_const_get(rb_cObject, rb_intern("OrderedHash"));
|
||||
|
||||
CBson = rb_define_module("CBson");
|
||||
|
@ -7,4 +7,4 @@ have_header("ruby/regex.h") || have_header("regex.h")
|
||||
have_header("ruby/encoding.h")
|
||||
|
||||
dir_config('cbson')
|
||||
create_makefile('mongo_ext/cbson')
|
||||
create_makefile('bson_ext/cbson')
|
||||
|
@ -4,9 +4,6 @@ module Mongo
|
||||
VERSION = "0.19.2"
|
||||
end
|
||||
|
||||
require 'mongo_bson'
|
||||
|
||||
|
||||
module Mongo
|
||||
ASCENDING = 1
|
||||
DESCENDING = -1
|
||||
@ -28,6 +25,8 @@ module Mongo
|
||||
|
||||
end
|
||||
|
||||
require 'bson'
|
||||
|
||||
require 'mongo/util/support'
|
||||
require 'mongo/util/core_ext'
|
||||
require 'mongo/util/conversions'
|
||||
|
@ -45,18 +45,18 @@ module Mongo
|
||||
name = name.to_s
|
||||
|
||||
if name.empty? or name.include? ".."
|
||||
raise InvalidName, "collection names cannot be empty"
|
||||
raise Mongo::InvalidName, "collection names cannot be empty"
|
||||
end
|
||||
if name.include? "$"
|
||||
raise InvalidName, "collection names must not contain '$'" unless name =~ /((^\$cmd)|(oplog\.\$main))/
|
||||
raise Mongo::InvalidName, "collection names must not contain '$'" unless name =~ /((^\$cmd)|(oplog\.\$main))/
|
||||
end
|
||||
if name.match(/^\./) or name.match(/\.$/)
|
||||
raise InvalidName, "collection names must not start or end with '.'"
|
||||
raise Mongo::InvalidName, "collection names must not start or end with '.'"
|
||||
end
|
||||
|
||||
@db, @name = db, name
|
||||
@connection = @db.connection
|
||||
@pk_factory = pk_factory || ObjectID
|
||||
@pk_factory = pk_factory || BSON::ObjectID
|
||||
@hint = nil
|
||||
end
|
||||
|
||||
@ -181,7 +181,7 @@ module Mongo
|
||||
spec = case spec_or_object_id
|
||||
when nil
|
||||
{}
|
||||
when ObjectID
|
||||
when BSON::ObjectID
|
||||
{:_id => spec_or_object_id}
|
||||
when Hash
|
||||
spec_or_object_id
|
||||
@ -260,10 +260,10 @@ module Mongo
|
||||
# @core remove remove-instance_method
|
||||
def remove(selector={}, opts={})
|
||||
# Initial byte is 0.
|
||||
message = ByteBuffer.new([0, 0, 0, 0])
|
||||
BSON_RUBY.serialize_cstr(message, "#{@db.name}.#{@name}")
|
||||
message = BSON::ByteBuffer.new([0, 0, 0, 0])
|
||||
BSON::BSON_RUBY.serialize_cstr(message, "#{@db.name}.#{@name}")
|
||||
message.put_int(0)
|
||||
message.put_array(BSON_CODER.serialize(selector, false, true).to_a)
|
||||
message.put_array(BSON::BSON_CODER.serialize(selector, false, true).to_a)
|
||||
|
||||
if opts[:safe]
|
||||
@connection.send_message_with_safe_check(Mongo::Constants::OP_DELETE, message, @db.name,
|
||||
@ -299,14 +299,14 @@ module Mongo
|
||||
# @core update update-instance_method
|
||||
def update(selector, document, options={})
|
||||
# Initial byte is 0.
|
||||
message = ByteBuffer.new([0, 0, 0, 0])
|
||||
BSON_RUBY.serialize_cstr(message, "#{@db.name}.#{@name}")
|
||||
message = BSON::ByteBuffer.new([0, 0, 0, 0])
|
||||
BSON::BSON_RUBY.serialize_cstr(message, "#{@db.name}.#{@name}")
|
||||
update_options = 0
|
||||
update_options += 1 if options[:upsert]
|
||||
update_options += 2 if options[:multi]
|
||||
message.put_int(update_options)
|
||||
message.put_array(BSON_CODER.serialize(selector, false, true).to_a)
|
||||
message.put_array(BSON_CODER.serialize(document, false, true).to_a)
|
||||
message.put_array(BSON::BSON_CODER.serialize(selector, false, true).to_a)
|
||||
message.put_array(BSON::BSON_CODER.serialize(document, false, true).to_a)
|
||||
if options[:safe]
|
||||
@connection.send_message_with_safe_check(Mongo::Constants::OP_UPDATE, message, @db.name,
|
||||
"#{@db.name}['#{@name}'].update(#{selector.inspect}, #{document.inspect})")
|
||||
@ -415,15 +415,15 @@ module Mongo
|
||||
|
||||
# Perform a map/reduce operation on the current collection.
|
||||
#
|
||||
# @param [String, Code] map a map function, written in JavaScript.
|
||||
# @param [String, Code] reduce a reduce function, written in JavaScript.
|
||||
# @param [String, BSON::Code] map a map function, written in JavaScript.
|
||||
# @param [String, BSON::Code] reduce a reduce function, written in JavaScript.
|
||||
#
|
||||
# @option opts [Hash] :query ({}) a query selector document, like what's passed to #find, to limit
|
||||
# the operation to a subset of the collection.
|
||||
# @option opts [Array] :sort ([]) an array of [key, direction] pairs to sort by. Direction should
|
||||
# be specified as Mongo::ASCENDING (or :ascending / :asc) or Mongo::DESCENDING (or :descending / :desc)
|
||||
# @option opts [Integer] :limit (nil) if passing a query, number of objects to return from the collection.
|
||||
# @option opts [String, Code] :finalize (nil) a javascript function to apply to the result set after the
|
||||
# @option opts [String, BSON::Code] :finalize (nil) a javascript function to apply to the result set after the
|
||||
# map/reduce operation has finished.
|
||||
# @option opts [String] :out (nil) the name of the output collection. If specified, the collection will not be treated as temporary.
|
||||
# @option opts [Boolean] :keeptemp (false) if true, the generated collection will be persisted. default is false.
|
||||
@ -435,8 +435,8 @@ module Mongo
|
||||
#
|
||||
# @core mapreduce map_reduce-instance_method
|
||||
def map_reduce(map, reduce, opts={})
|
||||
map = Code.new(map) unless map.is_a?(Code)
|
||||
reduce = Code.new(reduce) unless reduce.is_a?(Code)
|
||||
map = BSON::Code.new(map) unless map.is_a?(BSON::Code)
|
||||
reduce = BSON::Code.new(reduce) unless reduce.is_a?(BSON::Code)
|
||||
|
||||
hash = OrderedHash.new
|
||||
hash['mapreduce'] = self.name
|
||||
@ -454,12 +454,12 @@ module Mongo
|
||||
|
||||
# Perform a group aggregation.
|
||||
#
|
||||
# @param [Array, String, Code, Nil] :key either 1) an array of fields to group by,
|
||||
# @param [Array, String, BSON::Code, Nil] :key either 1) an array of fields to group by,
|
||||
# 2) a javascript function to generate the key object, or 3) nil.
|
||||
# @param [Hash] condition an optional document specifying a query to limit the documents over which group is run.
|
||||
# @param [Hash] initial initial value of the aggregation counter object
|
||||
# @param [String, Code] reduce aggregation function, in JavaScript
|
||||
# @param [String, Code] finalize :: optional. a JavaScript function that receives and modifies
|
||||
# @param [String, BSON::Code] reduce aggregation function, in JavaScript
|
||||
# @param [String, BSON::Code] finalize :: optional. a JavaScript function that receives and modifies
|
||||
# each of the resultant grouped objects. Available only when group is run
|
||||
# with command set to true.
|
||||
# @param [Nil] deprecated this param in a placeholder for a deprecated param. It will be removed
|
||||
@ -475,7 +475,7 @@ module Mongo
|
||||
"See http://api.mongodb.org/ruby/current/Mongo/Collection.html#group-instance_method for details."
|
||||
end
|
||||
|
||||
reduce = Code.new(reduce) unless reduce.is_a?(Code)
|
||||
reduce = BSON::Code.new(reduce) unless reduce.is_a?(BSON::Code)
|
||||
|
||||
group_command = {
|
||||
"group" => {
|
||||
@ -493,7 +493,7 @@ module Mongo
|
||||
key.each { |k| key_value[k] = 1 }
|
||||
else
|
||||
key_type = "$keyf"
|
||||
key_value = key.is_a?(Code) ? key : Code.new(key)
|
||||
key_value = key.is_a?(BSON::Code) ? key : BSON::Code.new(key)
|
||||
end
|
||||
|
||||
group_command["group"][key_type] = key_value
|
||||
@ -501,9 +501,9 @@ module Mongo
|
||||
|
||||
# only add finalize if specified
|
||||
# check to see if users have sent the finalizer as the last argument.
|
||||
finalize = deprecated if deprecated.is_a?(String) || deprecated.is_a?(Code)
|
||||
finalize = Code.new(finalize) if finalize.is_a?(String)
|
||||
if finalize.is_a?(Code)
|
||||
finalize = deprecated if deprecated.is_a?(String) || deprecated.is_a?(BSON::Code)
|
||||
finalize = BSON::Code.new(finalize) if finalize.is_a?(String)
|
||||
if finalize.is_a?(BSON::Code)
|
||||
group_command['group']['finalize'] = finalize
|
||||
end
|
||||
|
||||
@ -631,9 +631,9 @@ module Mongo
|
||||
# +check_keys+ setting.
|
||||
def insert_documents(documents, collection_name=@name, check_keys=true, safe=false)
|
||||
# Initial byte is 0.
|
||||
message = ByteBuffer.new([0, 0, 0, 0])
|
||||
BSON_RUBY.serialize_cstr(message, "#{@db.name}.#{collection_name}")
|
||||
documents.each { |doc| message.put_array(BSON_CODER.serialize(doc, check_keys, true).to_a) }
|
||||
message = BSON::ByteBuffer.new([0, 0, 0, 0])
|
||||
BSON::BSON_RUBY.serialize_cstr(message, "#{@db.name}.#{collection_name}")
|
||||
documents.each { |doc| message.put_array(BSON::BSON_CODER.serialize(doc, check_keys, true).to_a) }
|
||||
if safe
|
||||
@connection.send_message_with_safe_check(Mongo::Constants::OP_INSERT, message, @db.name,
|
||||
"#{@db.name}['#{collection_name}'].insert(#{documents.inspect})")
|
||||
|
@ -344,7 +344,7 @@ module Mongo
|
||||
# Send a message to MongoDB, adding the necessary headers.
|
||||
#
|
||||
# @param [Integer] operation a MongoDB opcode.
|
||||
# @param [ByteBuffer] message a message to send to the database.
|
||||
# @param [BSON::ByteBuffer] message a message to send to the database.
|
||||
# @param [String] log_message text version of +message+ for logging.
|
||||
#
|
||||
# @return [True]
|
||||
@ -363,7 +363,7 @@ module Mongo
|
||||
# an exception if the operation has failed.
|
||||
#
|
||||
# @param [Integer] operation a MongoDB opcode.
|
||||
# @param [ByteBuffer] message a message to send to the database.
|
||||
# @param [BSON::ByteBuffer] message a message to send to the database.
|
||||
# @param [String] db_name the name of the database. used on call to get_last_error.
|
||||
# @param [String] log_message text version of +message+ for logging.
|
||||
#
|
||||
@ -394,7 +394,7 @@ module Mongo
|
||||
# Sends a message to the database and waits for the response.
|
||||
#
|
||||
# @param [Integer] operation a MongoDB opcode.
|
||||
# @param [ByteBuffer] message a message to send to the database.
|
||||
# @param [BSON::ByteBuffer] message a message to send to the database.
|
||||
# @param [String] log_message text version of +message+ for logging.
|
||||
# @param [Socket] socket a socket to use in lieu of checking out a new one.
|
||||
#
|
||||
@ -625,7 +625,7 @@ module Mongo
|
||||
end
|
||||
|
||||
def receive_header(sock)
|
||||
header = ByteBuffer.new
|
||||
header = BSON::ByteBuffer.new
|
||||
header.put_array(receive_message_on_socket(16, sock).unpack("C*"))
|
||||
unless header.size == STANDARD_HEADER_SIZE
|
||||
raise "Short read for DB response header: " +
|
||||
@ -639,7 +639,7 @@ module Mongo
|
||||
end
|
||||
|
||||
def receive_response_header(sock)
|
||||
header_buf = ByteBuffer.new
|
||||
header_buf = BSON::ByteBuffer.new
|
||||
header_buf.put_array(receive_message_on_socket(RESPONSE_HEADER_SIZE, sock).unpack("C*"))
|
||||
if header_buf.length != RESPONSE_HEADER_SIZE
|
||||
raise "Short read for DB response header; " +
|
||||
@ -657,32 +657,32 @@ module Mongo
|
||||
docs = []
|
||||
number_remaining = number_received
|
||||
while number_remaining > 0 do
|
||||
buf = ByteBuffer.new
|
||||
buf = BSON::ByteBuffer.new
|
||||
buf.put_array(receive_message_on_socket(4, sock).unpack("C*"))
|
||||
buf.rewind
|
||||
size = buf.get_int
|
||||
buf.put_array(receive_message_on_socket(size - 4, sock).unpack("C*"), 4)
|
||||
number_remaining -= 1
|
||||
buf.rewind
|
||||
docs << BSON_CODER.deserialize(buf)
|
||||
docs << BSON::BSON_CODER.deserialize(buf)
|
||||
end
|
||||
[docs, number_received, cursor_id]
|
||||
end
|
||||
|
||||
def last_error_message(db_name)
|
||||
message = ByteBuffer.new
|
||||
message = BSON::ByteBuffer.new
|
||||
message.put_int(0)
|
||||
BSON_RUBY.serialize_cstr(message, "#{db_name}.$cmd")
|
||||
BSON::BSON_RUBY.serialize_cstr(message, "#{db_name}.$cmd")
|
||||
message.put_int(0)
|
||||
message.put_int(-1)
|
||||
message.put_array(BSON_CODER.serialize({:getlasterror => 1}, false).unpack("C*"))
|
||||
message.put_array(BSON::BSON_CODER.serialize({:getlasterror => 1}, false).unpack("C*"))
|
||||
add_message_headers(Mongo::Constants::OP_QUERY, message)
|
||||
end
|
||||
|
||||
# Prepares a message for transmission to MongoDB by
|
||||
# constructing a valid message header.
|
||||
def add_message_headers(operation, message)
|
||||
headers = ByteBuffer.new
|
||||
headers = BSON::ByteBuffer.new
|
||||
|
||||
# Message size.
|
||||
headers.put_int(16 + message.size)
|
||||
|
@ -227,7 +227,7 @@ module Mongo
|
||||
# @return [True]
|
||||
def close
|
||||
if @cursor_id
|
||||
message = ByteBuffer.new([0, 0, 0, 0])
|
||||
message = BSON::ByteBuffer.new([0, 0, 0, 0])
|
||||
message.put_int(1)
|
||||
message.put_long(@cursor_id)
|
||||
@connection.send_message(Mongo::Constants::OP_KILL_CURSORS, message, "cursor.close")
|
||||
@ -311,11 +311,11 @@ module Mongo
|
||||
|
||||
def refill_via_get_more
|
||||
return if send_initial_query || @cursor_id.zero?
|
||||
message = ByteBuffer.new([0, 0, 0, 0])
|
||||
message = BSON::ByteBuffer.new([0, 0, 0, 0])
|
||||
|
||||
# DB name.
|
||||
db_name = @admin ? 'admin' : @db.name
|
||||
BSON_RUBY.serialize_cstr(message, "#{db_name}.#{@collection.name}")
|
||||
BSON::BSON_RUBY.serialize_cstr(message, "#{db_name}.#{@collection.name}")
|
||||
|
||||
# Number of results to return; db decides for now.
|
||||
message.put_int(0)
|
||||
@ -343,15 +343,15 @@ module Mongo
|
||||
end
|
||||
|
||||
def construct_query_message
|
||||
message = ByteBuffer.new
|
||||
message = BSON::ByteBuffer.new
|
||||
message.put_int(query_opts)
|
||||
db_name = @admin ? 'admin' : @db.name
|
||||
BSON_RUBY.serialize_cstr(message, "#{db_name}.#{@collection.name}")
|
||||
BSON::BSON_RUBY.serialize_cstr(message, "#{db_name}.#{@collection.name}")
|
||||
message.put_int(@skip)
|
||||
message.put_int(@limit)
|
||||
spec = query_contains_special_fields? ? construct_query_spec : @selector
|
||||
message.put_array(BSON_CODER.serialize(spec, false).to_a)
|
||||
message.put_array(BSON_CODER.serialize(@fields, false).to_a) if @fields
|
||||
message.put_array(BSON::BSON_CODER.serialize(spec, false).to_a)
|
||||
message.put_array(BSON::BSON_CODER.serialize(@fields, false).to_a) if @fields
|
||||
message
|
||||
end
|
||||
|
||||
|
@ -14,9 +14,9 @@
|
||||
# limitations under the License.
|
||||
# ++
|
||||
|
||||
require 'mongo_bson/types/objectid'
|
||||
require 'mongo_bson/byte_buffer'
|
||||
require 'mongo_bson/ordered_hash'
|
||||
require 'bson/types/objectid'
|
||||
require 'bson/byte_buffer'
|
||||
require 'bson/ordered_hash'
|
||||
|
||||
module GridFS
|
||||
|
||||
|
@ -163,7 +163,7 @@ module Mongo
|
||||
# This method will be invoked automatically when
|
||||
# on GridIO#open is passed a block. Otherwise, it must be called manually.
|
||||
#
|
||||
# @return [Mongo::ObjectID]
|
||||
# @return [BSON::ObjectID]
|
||||
def close
|
||||
if @mode[0] == ?w
|
||||
if @current_chunk['n'].zero? && @chunk_position.zero?
|
||||
@ -183,7 +183,7 @@ module Mongo
|
||||
|
||||
def create_chunk(n)
|
||||
chunk = OrderedHash.new
|
||||
chunk['_id'] = Mongo::ObjectID.new
|
||||
chunk['_id'] = BSON::ObjectID.new
|
||||
chunk['n'] = n
|
||||
chunk['files_id'] = @files_id
|
||||
chunk['data'] = ''
|
||||
@ -261,7 +261,7 @@ module Mongo
|
||||
end
|
||||
chunk_available = @chunk_size - @chunk_position
|
||||
step_size = (to_write > chunk_available) ? chunk_available : to_write
|
||||
@current_chunk['data'] = Binary.new((@current_chunk['data'].to_s << string[-to_write, step_size]).unpack("c*"))
|
||||
@current_chunk['data'] = BSON::Binary.new((@current_chunk['data'].to_s << string[-to_write, step_size]).unpack("c*"))
|
||||
@chunk_position += step_size
|
||||
to_write -= step_size
|
||||
save_chunk(@current_chunk)
|
||||
@ -290,7 +290,7 @@ module Mongo
|
||||
|
||||
# Initialize the class for writing a file.
|
||||
def init_write(opts)
|
||||
@files_id = opts[:_id] || Mongo::ObjectID.new
|
||||
@files_id = opts[:_id] || BSON::ObjectID.new
|
||||
@content_type = opts[:content_type] || (defined? MIME) && get_content_type || DEFAULT_CONTENT_TYPE
|
||||
@chunk_size = opts[:chunk_size] || DEFAULT_CHUNK_SIZE
|
||||
@metadata = opts[:metadata] if opts[:metadata]
|
||||
|
@ -14,8 +14,8 @@
|
||||
# limitations under the License.
|
||||
# ++
|
||||
|
||||
require 'mongo_bson/types/objectid'
|
||||
require 'mongo_bson/ordered_hash'
|
||||
require 'bson/types/objectid'
|
||||
require 'bson/ordered_hash'
|
||||
require 'mongo/gridfs/chunk'
|
||||
|
||||
module GridFS
|
||||
|
@ -1,65 +0,0 @@
|
||||
$:.unshift(File.join(File.dirname(__FILE__), '..', 'lib'))
|
||||
|
||||
module Mongo
|
||||
module BSON
|
||||
VERSION = "0.19.2"
|
||||
def self.serialize(obj, check_keys=false, move_id=false)
|
||||
warn "BSON has been deprecated. Use Mongo::BSON_CODER instead."
|
||||
BSON_CODER.serialize(obj, check_keys, move_id)
|
||||
end
|
||||
|
||||
|
||||
def self.deserialize(buf=nil)
|
||||
warn "BSON has been deprecated. Use Mongo::BSON_CODER instead."
|
||||
BSON_CODER.deserialize(buf)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
# This just exists for deprecation warnings. Will be removed in an upcoming version.
|
||||
module BSON
|
||||
def self.serialize(obj, check_keys=false, move_id=false)
|
||||
warn "BSON has been deprecated. Use Mongo::BSON_CODER instead."
|
||||
BSON_CODER.serialize(obj, check_keys, move_id)
|
||||
end
|
||||
|
||||
|
||||
def self.deserialize(buf=nil)
|
||||
warn "BSON has been deprecated. Use Mongo::BSON_CODER instead."
|
||||
BSON_CODER.deserialize(buf)
|
||||
end
|
||||
end
|
||||
|
||||
|
||||
begin
|
||||
# Need this for running test with and without c ext in Ruby 1.9.
|
||||
raise LoadError if ENV['TEST_MODE'] && !ENV['C_EXT']
|
||||
require 'mongo_ext/cbson'
|
||||
raise LoadError unless defined?(CBson::VERSION) && CBson::VERSION == Mongo::BSON::VERSION
|
||||
require 'mongo_bson/bson_c'
|
||||
module Mongo
|
||||
BSON_CODER = BSON_C
|
||||
end
|
||||
rescue LoadError
|
||||
require 'mongo_bson/bson_ruby'
|
||||
module Mongo
|
||||
BSON_CODER = BSON_RUBY
|
||||
end
|
||||
warn "\n**Notice: C extension not loaded. This is required for optimum MongoDB Ruby driver performance."
|
||||
warn " You can install the extension as follows:\n gem install mongo_ext\n"
|
||||
warn " If you continue to receive this message after installing, make sure that the"
|
||||
warn " mongo_ext gem is in your load path and that the mongo_ext and mongo gems are of the same version.\n"
|
||||
end
|
||||
|
||||
require 'mongo_bson/types/binary'
|
||||
require 'mongo_bson/types/code'
|
||||
require 'mongo_bson/types/dbref'
|
||||
require 'mongo_bson/types/objectid'
|
||||
require 'mongo_bson/types/regexp_of_holding'
|
||||
require 'mongo_bson/types/min_max_keys'
|
||||
|
||||
require 'base64'
|
||||
require 'mongo_bson/ordered_hash'
|
||||
require 'mongo_bson/byte_buffer'
|
||||
require 'mongo_bson/bson_ruby'
|
||||
require 'mongo_bson/exceptions'
|
@ -1,20 +0,0 @@
|
||||
# A thin wrapper for the CBson class
|
||||
module Mongo
|
||||
class BSON_C
|
||||
|
||||
def self.serialize(obj, check_keys=false, move_id=false)
|
||||
ByteBuffer.new(CBson.serialize(obj, check_keys, move_id))
|
||||
end
|
||||
|
||||
def self.deserialize(buf=nil)
|
||||
if buf.is_a? String
|
||||
to_deserialize = ByteBuffer.new(buf) if buf
|
||||
else
|
||||
buf = ByteBuffer.new(buf.to_a) if buf
|
||||
end
|
||||
buf.rewind
|
||||
CBson.deserialize(buf.to_s)
|
||||
end
|
||||
|
||||
end
|
||||
end
|
@ -1,601 +0,0 @@
|
||||
# --
|
||||
# Copyright (C) 2008-2010 10gen Inc.
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify it
|
||||
# under the terms of the GNU Affero General Public License, version 3, as
|
||||
# published by the Free Software Foundation.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful, but WITHOUT
|
||||
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
|
||||
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Affero General Public License
|
||||
# for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU Affero General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
# ++
|
||||
|
||||
module Mongo
|
||||
# A BSON seralizer/deserializer in pure Ruby.
|
||||
class BSON_RUBY
|
||||
|
||||
# why was this necessary?
|
||||
#include Mongo
|
||||
|
||||
MINKEY = -1
|
||||
EOO = 0
|
||||
NUMBER = 1
|
||||
STRING = 2
|
||||
OBJECT = 3
|
||||
ARRAY = 4
|
||||
BINARY = 5
|
||||
UNDEFINED = 6
|
||||
OID = 7
|
||||
BOOLEAN = 8
|
||||
DATE = 9
|
||||
NULL = 10
|
||||
REGEX = 11
|
||||
REF = 12
|
||||
CODE = 13
|
||||
SYMBOL = 14
|
||||
CODE_W_SCOPE = 15
|
||||
NUMBER_INT = 16
|
||||
TIMESTAMP = 17
|
||||
NUMBER_LONG = 18
|
||||
MAXKEY = 127
|
||||
|
||||
def initialize
|
||||
@buf = ByteBuffer.new
|
||||
end
|
||||
|
||||
if RUBY_VERSION >= '1.9'
|
||||
def self.to_utf8(str)
|
||||
str.encode("utf-8")
|
||||
end
|
||||
else
|
||||
def self.to_utf8(str)
|
||||
begin
|
||||
str.unpack("U*")
|
||||
rescue => ex
|
||||
raise InvalidStringEncoding, "String not valid utf-8: #{str}"
|
||||
end
|
||||
str
|
||||
end
|
||||
end
|
||||
|
||||
def self.serialize_cstr(buf, val)
|
||||
buf.put_array(to_utf8(val.to_s).unpack("C*") << 0)
|
||||
end
|
||||
|
||||
def self.serialize_key(buf, key)
|
||||
raise InvalidDocument, "Key names / regex patterns must not contain the NULL byte" if key.include? "\x00"
|
||||
self.serialize_cstr(buf, key)
|
||||
end
|
||||
|
||||
def to_a
|
||||
@buf.to_a
|
||||
end
|
||||
|
||||
def to_s
|
||||
@buf.to_s
|
||||
end
|
||||
|
||||
# Serializes an object.
|
||||
# Implemented to ensure an API compatible with BSON extension.
|
||||
def self.serialize(obj, check_keys=false, move_id=false)
|
||||
new.serialize(obj, check_keys, move_id)
|
||||
end
|
||||
|
||||
def self.deserialize(buf=nil)
|
||||
new.deserialize(buf)
|
||||
end
|
||||
|
||||
def serialize(obj, check_keys=false, move_id=false)
|
||||
raise "Document is null" unless obj
|
||||
|
||||
@buf.rewind
|
||||
# put in a placeholder for the total size
|
||||
@buf.put_int(0)
|
||||
|
||||
# Write key/value pairs. Always write _id first if it exists.
|
||||
if move_id
|
||||
if obj.has_key? '_id'
|
||||
serialize_key_value('_id', obj['_id'], false)
|
||||
elsif obj.has_key? :_id
|
||||
serialize_key_value('_id', obj[:_id], false)
|
||||
end
|
||||
obj.each {|k, v| serialize_key_value(k, v, check_keys) unless k == '_id' || k == :_id }
|
||||
else
|
||||
if obj.has_key?('_id') && obj.has_key?(:_id)
|
||||
obj['_id'] = obj.delete(:_id)
|
||||
end
|
||||
obj.each {|k, v| serialize_key_value(k, v, check_keys) }
|
||||
end
|
||||
|
||||
serialize_eoo_element(@buf)
|
||||
if @buf.size > 4 * 1024 * 1024
|
||||
raise InvalidDocument, "Document is too large (#{@buf.size}). BSON documents are limited to 4MB (#{4 * 1024 * 1024})."
|
||||
end
|
||||
@buf.put_int(@buf.size, 0)
|
||||
self
|
||||
end
|
||||
|
||||
# Returns the array stored in the buffer.
|
||||
# Implemented to ensure an API compatible with BSON extension.
|
||||
def unpack(arg)
|
||||
@buf.to_a
|
||||
end
|
||||
|
||||
def serialize_key_value(k, v, check_keys)
|
||||
k = k.to_s
|
||||
if check_keys
|
||||
if k[0] == ?$
|
||||
raise InvalidName.new("key #{k} must not start with '$'")
|
||||
end
|
||||
if k.include? ?.
|
||||
raise InvalidName.new("key #{k} must not contain '.'")
|
||||
end
|
||||
end
|
||||
type = bson_type(v)
|
||||
case type
|
||||
when STRING, SYMBOL
|
||||
serialize_string_element(@buf, k, v, type)
|
||||
when NUMBER, NUMBER_INT
|
||||
serialize_number_element(@buf, k, v, type)
|
||||
when OBJECT
|
||||
serialize_object_element(@buf, k, v, check_keys)
|
||||
when OID
|
||||
serialize_oid_element(@buf, k, v)
|
||||
when ARRAY
|
||||
serialize_array_element(@buf, k, v, check_keys)
|
||||
when REGEX
|
||||
serialize_regex_element(@buf, k, v)
|
||||
when BOOLEAN
|
||||
serialize_boolean_element(@buf, k, v)
|
||||
when DATE
|
||||
serialize_date_element(@buf, k, v)
|
||||
when NULL
|
||||
serialize_null_element(@buf, k)
|
||||
when REF
|
||||
serialize_dbref_element(@buf, k, v)
|
||||
when BINARY
|
||||
serialize_binary_element(@buf, k, v)
|
||||
when UNDEFINED
|
||||
serialize_null_element(@buf, k)
|
||||
when CODE_W_SCOPE
|
||||
serialize_code_w_scope(@buf, k, v)
|
||||
when MAXKEY
|
||||
serialize_max_key_element(@buf, k)
|
||||
when MINKEY
|
||||
serialize_min_key_element(@buf, k)
|
||||
else
|
||||
raise "unhandled type #{type}"
|
||||
end
|
||||
end
|
||||
|
||||
def deserialize(buf=nil)
|
||||
# If buf is nil, use @buf, assumed to contain already-serialized BSON.
|
||||
# This is only true during testing.
|
||||
if buf.is_a? String
|
||||
@buf = ByteBuffer.new(buf) if buf
|
||||
else
|
||||
@buf = ByteBuffer.new(buf.to_a) if buf
|
||||
end
|
||||
@buf.rewind
|
||||
@buf.get_int # eat message size
|
||||
doc = OrderedHash.new
|
||||
while @buf.more?
|
||||
type = @buf.get
|
||||
case type
|
||||
when STRING, CODE
|
||||
key = deserialize_cstr(@buf)
|
||||
doc[key] = deserialize_string_data(@buf)
|
||||
when SYMBOL
|
||||
key = deserialize_cstr(@buf)
|
||||
doc[key] = deserialize_string_data(@buf).intern
|
||||
when NUMBER
|
||||
key = deserialize_cstr(@buf)
|
||||
doc[key] = deserialize_number_data(@buf)
|
||||
when NUMBER_INT
|
||||
key = deserialize_cstr(@buf)
|
||||
doc[key] = deserialize_number_int_data(@buf)
|
||||
when NUMBER_LONG
|
||||
key = deserialize_cstr(@buf)
|
||||
doc[key] = deserialize_number_long_data(@buf)
|
||||
when OID
|
||||
key = deserialize_cstr(@buf)
|
||||
doc[key] = deserialize_oid_data(@buf)
|
||||
when ARRAY
|
||||
key = deserialize_cstr(@buf)
|
||||
doc[key] = deserialize_array_data(@buf)
|
||||
when REGEX
|
||||
key = deserialize_cstr(@buf)
|
||||
doc[key] = deserialize_regex_data(@buf)
|
||||
when OBJECT
|
||||
key = deserialize_cstr(@buf)
|
||||
doc[key] = deserialize_object_data(@buf)
|
||||
when BOOLEAN
|
||||
key = deserialize_cstr(@buf)
|
||||
doc[key] = deserialize_boolean_data(@buf)
|
||||
when DATE
|
||||
key = deserialize_cstr(@buf)
|
||||
doc[key] = deserialize_date_data(@buf)
|
||||
when NULL
|
||||
key = deserialize_cstr(@buf)
|
||||
doc[key] = nil
|
||||
when UNDEFINED
|
||||
key = deserialize_cstr(@buf)
|
||||
doc[key] = nil
|
||||
when REF
|
||||
key = deserialize_cstr(@buf)
|
||||
doc[key] = deserialize_dbref_data(@buf)
|
||||
when BINARY
|
||||
key = deserialize_cstr(@buf)
|
||||
doc[key] = deserialize_binary_data(@buf)
|
||||
when CODE_W_SCOPE
|
||||
key = deserialize_cstr(@buf)
|
||||
doc[key] = deserialize_code_w_scope_data(@buf)
|
||||
when TIMESTAMP
|
||||
key = deserialize_cstr(@buf)
|
||||
doc[key] = [deserialize_number_int_data(@buf),
|
||||
deserialize_number_int_data(@buf)]
|
||||
when MAXKEY
|
||||
key = deserialize_cstr(@buf)
|
||||
doc[key] = MaxKey.new
|
||||
when MINKEY, 255 # This is currently easier than unpack the type byte as an unsigned char.
|
||||
key = deserialize_cstr(@buf)
|
||||
doc[key] = MinKey.new
|
||||
when EOO
|
||||
break
|
||||
else
|
||||
raise "Unknown type #{type}, key = #{key}"
|
||||
end
|
||||
end
|
||||
@buf.rewind
|
||||
doc
|
||||
end
|
||||
|
||||
# For debugging.
|
||||
def hex_dump
|
||||
str = ''
|
||||
@buf.to_a.each_with_index { |b,i|
|
||||
if (i % 8) == 0
|
||||
str << "\n" if i > 0
|
||||
str << '%4d: ' % i
|
||||
else
|
||||
str << ' '
|
||||
end
|
||||
str << '%02X' % b
|
||||
}
|
||||
str
|
||||
end
|
||||
|
||||
def deserialize_date_data(buf)
|
||||
unsigned = buf.get_long()
|
||||
# see note for deserialize_number_long_data below
|
||||
milliseconds = unsigned >= 2 ** 64 / 2 ? unsigned - 2**64 : unsigned
|
||||
Time.at(milliseconds.to_f / 1000.0).utc # at() takes fractional seconds
|
||||
end
|
||||
|
||||
def deserialize_boolean_data(buf)
|
||||
buf.get == 1
|
||||
end
|
||||
|
||||
def deserialize_number_data(buf)
|
||||
buf.get_double
|
||||
end
|
||||
|
||||
def deserialize_number_int_data(buf)
|
||||
# sometimes ruby makes me angry... why would the same code pack as signed
|
||||
# but unpack as unsigned
|
||||
unsigned = buf.get_int
|
||||
unsigned >= 2**32 / 2 ? unsigned - 2**32 : unsigned
|
||||
end
|
||||
|
||||
def deserialize_number_long_data(buf)
|
||||
# same note as above applies here...
|
||||
unsigned = buf.get_long
|
||||
unsigned >= 2 ** 64 / 2 ? unsigned - 2**64 : unsigned
|
||||
end
|
||||
|
||||
def deserialize_object_data(buf)
|
||||
size = buf.get_int
|
||||
buf.position -= 4
|
||||
object = BSON_CODER.new().deserialize(buf.get(size))
|
||||
if object.has_key? "$ref"
|
||||
DBRef.new(object["$ref"], object["$id"])
|
||||
else
|
||||
object
|
||||
end
|
||||
end
|
||||
|
||||
def deserialize_array_data(buf)
|
||||
h = deserialize_object_data(buf)
|
||||
a = []
|
||||
h.each { |k, v| a[k.to_i] = v }
|
||||
a
|
||||
end
|
||||
|
||||
def deserialize_regex_data(buf)
|
||||
str = deserialize_cstr(buf)
|
||||
options_str = deserialize_cstr(buf)
|
||||
options = 0
|
||||
options |= Regexp::IGNORECASE if options_str.include?('i')
|
||||
options |= Regexp::MULTILINE if options_str.include?('m')
|
||||
options |= Regexp::EXTENDED if options_str.include?('x')
|
||||
options_str.gsub!(/[imx]/, '') # Now remove the three we understand
|
||||
if options_str == ''
|
||||
Regexp.new(str, options)
|
||||
else
|
||||
warn("Using deprecated Regexp options #{options_str}; future versions of this MongoDB driver will support only i, m, and x. See deprecated class RegexpOfHolding for more info.")
|
||||
RegexpOfHolding.new(str, options, options_str)
|
||||
end
|
||||
end
|
||||
|
||||
def deserialize_string_data(buf)
|
||||
len = buf.get_int
|
||||
bytes = buf.get(len)
|
||||
str = bytes[0..-2]
|
||||
if str.respond_to? "pack"
|
||||
str = str.pack("C*")
|
||||
end
|
||||
if RUBY_VERSION >= '1.9'
|
||||
str.force_encoding("utf-8")
|
||||
end
|
||||
str
|
||||
end
|
||||
|
||||
def deserialize_code_w_scope_data(buf)
|
||||
buf.get_int
|
||||
len = buf.get_int
|
||||
code = buf.get(len)[0..-2]
|
||||
if code.respond_to? "pack"
|
||||
code = code.pack("C*")
|
||||
end
|
||||
if RUBY_VERSION >= '1.9'
|
||||
code.force_encoding("utf-8")
|
||||
end
|
||||
|
||||
scope_size = buf.get_int
|
||||
buf.position -= 4
|
||||
scope = BSON_CODER.new().deserialize(buf.get(scope_size))
|
||||
|
||||
Code.new(code, scope)
|
||||
end
|
||||
|
||||
def deserialize_oid_data(buf)
|
||||
ObjectID.new(buf.get(12))
|
||||
end
|
||||
|
||||
def deserialize_dbref_data(buf)
|
||||
ns = deserialize_string_data(buf)
|
||||
oid = deserialize_oid_data(buf)
|
||||
DBRef.new(ns, oid)
|
||||
end
|
||||
|
||||
def deserialize_binary_data(buf)
|
||||
len = buf.get_int
|
||||
type = buf.get
|
||||
len = buf.get_int if type == Binary::SUBTYPE_BYTES
|
||||
Binary.new(buf.get(len), type)
|
||||
end
|
||||
|
||||
def serialize_eoo_element(buf)
|
||||
buf.put(EOO)
|
||||
end
|
||||
|
||||
def serialize_null_element(buf, key)
|
||||
buf.put(NULL)
|
||||
self.class.serialize_key(buf, key)
|
||||
end
|
||||
|
||||
def serialize_dbref_element(buf, key, val)
|
||||
oh = OrderedHash.new
|
||||
oh['$ref'] = val.namespace
|
||||
oh['$id'] = val.object_id
|
||||
serialize_object_element(buf, key, oh, false)
|
||||
end
|
||||
|
||||
def serialize_binary_element(buf, key, val)
|
||||
buf.put(BINARY)
|
||||
self.class.serialize_key(buf, key)
|
||||
|
||||
bytes = val.to_a
|
||||
num_bytes = bytes.length
|
||||
subtype = val.respond_to?(:subtype) ? val.subtype : Binary::SUBTYPE_BYTES
|
||||
if subtype == Binary::SUBTYPE_BYTES
|
||||
buf.put_int(num_bytes + 4)
|
||||
buf.put(subtype)
|
||||
buf.put_int(num_bytes)
|
||||
buf.put_array(bytes)
|
||||
else
|
||||
buf.put_int(num_bytes)
|
||||
buf.put(subtype)
|
||||
buf.put_array(bytes)
|
||||
end
|
||||
end
|
||||
|
||||
def serialize_boolean_element(buf, key, val)
|
||||
buf.put(BOOLEAN)
|
||||
self.class.serialize_key(buf, key)
|
||||
buf.put(val ? 1 : 0)
|
||||
end
|
||||
|
||||
def serialize_date_element(buf, key, val)
|
||||
buf.put(DATE)
|
||||
self.class.serialize_key(buf, key)
|
||||
millisecs = (val.to_f * 1000).to_i
|
||||
buf.put_long(millisecs)
|
||||
end
|
||||
|
||||
def serialize_number_element(buf, key, val, type)
|
||||
if type == NUMBER
|
||||
buf.put(type)
|
||||
self.class.serialize_key(buf, key)
|
||||
buf.put_double(val)
|
||||
else
|
||||
if val > 2**64 / 2 - 1 or val < -2**64 / 2
|
||||
raise RangeError.new("MongoDB can only handle 8-byte ints")
|
||||
end
|
||||
if val > 2**32 / 2 - 1 or val < -2**32 / 2
|
||||
buf.put(NUMBER_LONG)
|
||||
self.class.serialize_key(buf, key)
|
||||
buf.put_long(val)
|
||||
else
|
||||
buf.put(type)
|
||||
self.class.serialize_key(buf, key)
|
||||
buf.put_int(val)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
def serialize_object_element(buf, key, val, check_keys, opcode=OBJECT)
|
||||
buf.put(opcode)
|
||||
self.class.serialize_key(buf, key)
|
||||
buf.put_array(BSON_CODER.new.serialize(val, check_keys).to_a)
|
||||
end
|
||||
|
||||
def serialize_array_element(buf, key, val, check_keys)
|
||||
# Turn array into hash with integer indices as keys
|
||||
h = OrderedHash.new
|
||||
i = 0
|
||||
val.each { |v| h[i] = v; i += 1 }
|
||||
serialize_object_element(buf, key, h, check_keys, ARRAY)
|
||||
end
|
||||
|
||||
def serialize_regex_element(buf, key, val)
|
||||
buf.put(REGEX)
|
||||
self.class.serialize_key(buf, key)
|
||||
|
||||
str = val.source
|
||||
# We use serialize_key here since regex patterns aren't prefixed with
|
||||
# length (can't contain the NULL byte).
|
||||
self.class.serialize_key(buf, str)
|
||||
|
||||
options = val.options
|
||||
options_str = ''
|
||||
options_str << 'i' if ((options & Regexp::IGNORECASE) != 0)
|
||||
options_str << 'm' if ((options & Regexp::MULTILINE) != 0)
|
||||
options_str << 'x' if ((options & Regexp::EXTENDED) != 0)
|
||||
options_str << val.extra_options_str if val.respond_to?(:extra_options_str)
|
||||
# Must store option chars in alphabetical order
|
||||
self.class.serialize_cstr(buf, options_str.split(//).sort.uniq.join)
|
||||
end
|
||||
|
||||
def serialize_max_key_element(buf, key)
|
||||
buf.put(MAXKEY)
|
||||
self.class.serialize_key(buf, key)
|
||||
end
|
||||
|
||||
def serialize_min_key_element(buf, key)
|
||||
buf.put(MINKEY)
|
||||
self.class.serialize_key(buf, key)
|
||||
end
|
||||
|
||||
def serialize_oid_element(buf, key, val)
|
||||
buf.put(OID)
|
||||
self.class.serialize_key(buf, key)
|
||||
|
||||
buf.put_array(val.to_a)
|
||||
end
|
||||
|
||||
def serialize_string_element(buf, key, val, type)
|
||||
buf.put(type)
|
||||
self.class.serialize_key(buf, key)
|
||||
|
||||
# Make a hole for the length
|
||||
len_pos = buf.position
|
||||
buf.put_int(0)
|
||||
|
||||
# Save the string
|
||||
start_pos = buf.position
|
||||
self.class.serialize_cstr(buf, val)
|
||||
end_pos = buf.position
|
||||
|
||||
# Put the string size in front
|
||||
buf.put_int(end_pos - start_pos, len_pos)
|
||||
|
||||
# Go back to where we were
|
||||
buf.position = end_pos
|
||||
end
|
||||
|
||||
def serialize_code_w_scope(buf, key, val)
|
||||
buf.put(CODE_W_SCOPE)
|
||||
self.class.serialize_key(buf, key)
|
||||
|
||||
# Make a hole for the length
|
||||
len_pos = buf.position
|
||||
buf.put_int(0)
|
||||
|
||||
buf.put_int(val.length + 1)
|
||||
self.class.serialize_cstr(buf, val)
|
||||
buf.put_array(BSON_CODER.new.serialize(val.scope).to_a)
|
||||
|
||||
end_pos = buf.position
|
||||
buf.put_int(end_pos - len_pos, len_pos)
|
||||
buf.position = end_pos
|
||||
end
|
||||
|
||||
def deserialize_cstr(buf)
|
||||
chars = ""
|
||||
while true
|
||||
b = buf.get
|
||||
break if b == 0
|
||||
chars << b.chr
|
||||
end
|
||||
if RUBY_VERSION >= '1.9'
|
||||
chars.force_encoding("utf-8") # Mongo stores UTF-8
|
||||
end
|
||||
chars
|
||||
end
|
||||
|
||||
def bson_type(o)
|
||||
case o
|
||||
when nil
|
||||
NULL
|
||||
when Integer
|
||||
NUMBER_INT
|
||||
when Float
|
||||
NUMBER
|
||||
when ByteBuffer
|
||||
BINARY
|
||||
when Code
|
||||
CODE_W_SCOPE
|
||||
when String
|
||||
STRING
|
||||
when Array
|
||||
ARRAY
|
||||
when Regexp
|
||||
REGEX
|
||||
when ObjectID
|
||||
OID
|
||||
when DBRef
|
||||
REF
|
||||
when true, false
|
||||
BOOLEAN
|
||||
when Time
|
||||
DATE
|
||||
when Hash
|
||||
OBJECT
|
||||
when Symbol
|
||||
SYMBOL
|
||||
when MaxKey
|
||||
MAXKEY
|
||||
when MinKey
|
||||
MINKEY
|
||||
when Numeric
|
||||
raise InvalidDocument, "Cannot serialize the Numeric type #{o.class} as BSON; only Fixum, Bignum, and Float are supported."
|
||||
when Date, DateTime
|
||||
raise InvalidDocument, "#{o.class} is not currently supported; " +
|
||||
"use a UTC Time instance instead."
|
||||
else
|
||||
if defined?(ActiveSupport::TimeWithZone) && o.is_a?(ActiveSupport::TimeWithZone)
|
||||
raise InvalidDocument, "ActiveSupport::TimeWithZone is not currently supported; " +
|
||||
"use a UTC Time instance instead."
|
||||
else
|
||||
raise InvalidDocument, "Cannot serialize #{o.class} as a BSON type; it either isn't supported or won't translate to BSON."
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
end
|
||||
end
|
@ -1,222 +0,0 @@
|
||||
# --
|
||||
# Copyright (C) 2008-2010 10gen Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
# ++
|
||||
|
||||
# A byte buffer.
|
||||
class ByteBuffer
|
||||
|
||||
# Commonly-used integers.
|
||||
INT_LOOKUP = {
|
||||
0 => [0, 0, 0, 0],
|
||||
1 => [1, 0, 0, 0],
|
||||
2 => [2, 0, 0, 0],
|
||||
3 => [3, 0, 0, 0],
|
||||
4 => [4, 0, 0, 0],
|
||||
2001 => [209, 7, 0, 0],
|
||||
2002 => [210, 7, 0, 0],
|
||||
2004 => [212, 7, 0, 0],
|
||||
2005 => [213, 7, 0, 0],
|
||||
2006 => [214, 7, 0, 0]
|
||||
}
|
||||
|
||||
attr_reader :order
|
||||
|
||||
def initialize(initial_data=[])
|
||||
@buf = initial_data
|
||||
@cursor = @buf.length
|
||||
@order = :little_endian
|
||||
@int_pack_order = 'V'
|
||||
@double_pack_order = 'E'
|
||||
end
|
||||
|
||||
if RUBY_VERSION >= '1.9'
|
||||
def self.to_utf8(str)
|
||||
str.encode("utf-8")
|
||||
end
|
||||
else
|
||||
def self.to_utf8(str)
|
||||
begin
|
||||
str.unpack("U*")
|
||||
rescue => ex
|
||||
raise InvalidStringEncoding, "String not valid utf-8: #{str}"
|
||||
end
|
||||
str
|
||||
end
|
||||
end
|
||||
|
||||
def self.serialize_cstr(buf, val)
|
||||
buf.put_array(to_utf8(val.to_s).unpack("C*") + [0])
|
||||
end
|
||||
|
||||
# +endianness+ should be :little_endian or :big_endian. Default is :little_endian
|
||||
def order=(endianness)
|
||||
@order = endianness
|
||||
@int_pack_order = endianness == :little_endian ? 'V' : 'N'
|
||||
@double_pack_order = endianness == :little_endian ? 'E' : 'G'
|
||||
end
|
||||
|
||||
def rewind
|
||||
@cursor = 0
|
||||
end
|
||||
|
||||
def position
|
||||
@cursor
|
||||
end
|
||||
|
||||
def position=(val)
|
||||
@cursor = val
|
||||
end
|
||||
|
||||
def clear
|
||||
@buf = []
|
||||
rewind
|
||||
end
|
||||
|
||||
def size
|
||||
@buf.size
|
||||
end
|
||||
alias_method :length, :size
|
||||
|
||||
# Appends a second ByteBuffer object, +buffer+, to the current buffer.
|
||||
def append!(buffer)
|
||||
@buf = @buf + buffer.to_a
|
||||
self
|
||||
end
|
||||
|
||||
# Prepends a second ByteBuffer object, +buffer+, to the current buffer.
|
||||
def prepend!(buffer)
|
||||
@buf = buffer.to_a + @buf
|
||||
self
|
||||
end
|
||||
|
||||
def put(byte, offset=nil)
|
||||
@cursor = offset if offset
|
||||
@buf[@cursor] = byte
|
||||
@cursor += 1
|
||||
end
|
||||
|
||||
def put_array(array, offset=nil)
|
||||
@cursor = offset if offset
|
||||
@buf[@cursor, array.length] = array
|
||||
@cursor += array.length
|
||||
end
|
||||
|
||||
def put_int(i, offset=nil)
|
||||
unless a = INT_LOOKUP[i]
|
||||
a = []
|
||||
[i].pack(@int_pack_order).each_byte { |b| a << b }
|
||||
end
|
||||
put_array(a, offset)
|
||||
end
|
||||
|
||||
def put_long(i, offset=nil)
|
||||
offset = @cursor unless offset
|
||||
if @int_pack_order == 'N'
|
||||
put_int(i >> 32, offset)
|
||||
put_int(i & 0xffffffff, offset + 4)
|
||||
else
|
||||
put_int(i & 0xffffffff, offset)
|
||||
put_int(i >> 32, offset + 4)
|
||||
end
|
||||
end
|
||||
|
||||
def put_double(d, offset=nil)
|
||||
a = []
|
||||
[d].pack(@double_pack_order).each_byte { |b| a << b }
|
||||
put_array(a, offset)
|
||||
end
|
||||
|
||||
# If +size+ == nil, returns one byte. Else returns array of bytes of length
|
||||
# # +size+.
|
||||
def get(len=nil)
|
||||
one_byte = len.nil?
|
||||
len ||= 1
|
||||
check_read_length(len)
|
||||
start = @cursor
|
||||
@cursor += len
|
||||
if one_byte
|
||||
@buf[start]
|
||||
else
|
||||
if @buf.respond_to? "unpack"
|
||||
@buf[start, len].unpack("C*")
|
||||
else
|
||||
@buf[start, len]
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
def get_int
|
||||
check_read_length(4)
|
||||
vals = ""
|
||||
(@cursor..@cursor+3).each { |i| vals << @buf[i].chr }
|
||||
@cursor += 4
|
||||
vals.unpack(@int_pack_order)[0]
|
||||
end
|
||||
|
||||
def get_long
|
||||
i1 = get_int
|
||||
i2 = get_int
|
||||
if @int_pack_order == 'N'
|
||||
(i1 << 32) + i2
|
||||
else
|
||||
(i2 << 32) + i1
|
||||
end
|
||||
end
|
||||
|
||||
def get_double
|
||||
check_read_length(8)
|
||||
vals = ""
|
||||
(@cursor..@cursor+7).each { |i| vals << @buf[i].chr }
|
||||
@cursor += 8
|
||||
vals.unpack(@double_pack_order)[0]
|
||||
end
|
||||
|
||||
def more?
|
||||
@cursor < @buf.size
|
||||
end
|
||||
|
||||
def to_a
|
||||
if @buf.respond_to? "unpack"
|
||||
@buf.unpack("C*")
|
||||
else
|
||||
@buf
|
||||
end
|
||||
end
|
||||
|
||||
def unpack(args)
|
||||
to_a
|
||||
end
|
||||
|
||||
def to_s
|
||||
if @buf.respond_to? :fast_pack
|
||||
@buf.fast_pack
|
||||
elsif @buf.respond_to? "pack"
|
||||
@buf.pack("C*")
|
||||
else
|
||||
@buf
|
||||
end
|
||||
end
|
||||
|
||||
def dump
|
||||
@buf.each_with_index { |c, i| $stderr.puts "#{'%04d' % i}: #{'%02x' % c} #{'%03o' % c} #{'%s' % c.chr} #{'%3d' % c}" }
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def check_read_length(len)
|
||||
raise "attempt to read past end of buffer" if @cursor + len > @buf.length
|
||||
end
|
||||
|
||||
end
|
@ -1,36 +0,0 @@
|
||||
# --
|
||||
# Copyright (C) 2008-2010 10gen Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
# ++
|
||||
|
||||
module Mongo
|
||||
# Generic Mongo Ruby Driver exception class.
|
||||
class MongoRubyError < StandardError; end
|
||||
|
||||
# Raised when MongoDB itself has returned an error.
|
||||
class MongoDBError < RuntimeError; end
|
||||
|
||||
# Raised when given a string is not valid utf-8 (Ruby 1.8 only).
|
||||
class InvalidStringEncoding < MongoRubyError; end
|
||||
|
||||
# Raised when attempting to initialize an invalid ObjectID.
|
||||
class InvalidObjectID < MongoRubyError; end
|
||||
|
||||
# Raised when trying to insert a document that exceeds the 4MB limit or
|
||||
# when the document contains objects that can't be serialized as BSON.
|
||||
class InvalidDocument < MongoDBError; end
|
||||
|
||||
# Raised when an invalid name is used.
|
||||
class InvalidName < RuntimeError; end
|
||||
end
|
@ -1,140 +0,0 @@
|
||||
# --
|
||||
# Copyright (C) 2008-2010 10gen Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
# ++
|
||||
|
||||
# A hash in which the order of keys are preserved.
|
||||
#
|
||||
# Under Ruby 1.9 and greater, this class has no added methods because Ruby's
|
||||
# Hash already keeps its keys ordered by order of insertion.
|
||||
class OrderedHash < Hash
|
||||
|
||||
def ==(other)
|
||||
begin
|
||||
!other.nil? &&
|
||||
keys == other.keys &&
|
||||
values == other.values
|
||||
rescue
|
||||
false
|
||||
end
|
||||
end
|
||||
|
||||
# We only need the body of this class if the RUBY_VERSION is before 1.9
|
||||
if RUBY_VERSION < '1.9'
|
||||
attr_accessor :ordered_keys
|
||||
|
||||
def self.[] *args
|
||||
oh = OrderedHash.new
|
||||
if Hash === args[0]
|
||||
oh.merge! args[0]
|
||||
elsif (args.size % 2) != 0
|
||||
raise ArgumentError, "odd number of elements for Hash"
|
||||
else
|
||||
0.step(args.size - 1, 2) do |key|
|
||||
value = key + 1
|
||||
oh[args[key]] = args[value]
|
||||
end
|
||||
end
|
||||
oh
|
||||
end
|
||||
|
||||
def initialize(*a, &b)
|
||||
super
|
||||
@ordered_keys = []
|
||||
end
|
||||
|
||||
def keys
|
||||
@ordered_keys || []
|
||||
end
|
||||
|
||||
def []=(key, value)
|
||||
@ordered_keys ||= []
|
||||
@ordered_keys << key unless @ordered_keys.include?(key)
|
||||
super(key, value)
|
||||
end
|
||||
|
||||
def each
|
||||
@ordered_keys ||= []
|
||||
@ordered_keys.each { |k| yield k, self[k] }
|
||||
self
|
||||
end
|
||||
alias :each_pair :each
|
||||
|
||||
def to_a
|
||||
@ordered_keys ||= []
|
||||
@ordered_keys.map { |k| [k, self[k]] }
|
||||
end
|
||||
|
||||
def values
|
||||
collect { |k, v| v }
|
||||
end
|
||||
|
||||
def merge(other)
|
||||
oh = self.dup
|
||||
oh.merge!(other)
|
||||
oh
|
||||
end
|
||||
|
||||
def merge!(other)
|
||||
@ordered_keys ||= []
|
||||
@ordered_keys += other.keys # unordered if not an OrderedHash
|
||||
@ordered_keys.uniq!
|
||||
super(other)
|
||||
end
|
||||
|
||||
alias :update :merge!
|
||||
|
||||
def inspect
|
||||
str = '{'
|
||||
str << (@ordered_keys || []).collect { |k| "\"#{k}\"=>#{self.[](k).inspect}" }.join(", ")
|
||||
str << '}'
|
||||
end
|
||||
|
||||
def delete(key, &block)
|
||||
@ordered_keys.delete(key) if @ordered_keys
|
||||
super
|
||||
end
|
||||
|
||||
def delete_if(&block)
|
||||
self.each { |k,v|
|
||||
if yield k, v
|
||||
delete(k)
|
||||
end
|
||||
}
|
||||
end
|
||||
|
||||
def clear
|
||||
super
|
||||
@ordered_keys = []
|
||||
end
|
||||
|
||||
def hash
|
||||
code = 17
|
||||
each_pair do |key, value|
|
||||
code = 37 * code + key.hash
|
||||
code = 37 * code + value.hash
|
||||
end
|
||||
code & 0x7fffffff
|
||||
end
|
||||
|
||||
def eql?(o)
|
||||
if o.instance_of? OrderedHash
|
||||
self.hash == o.hash
|
||||
else
|
||||
false
|
||||
end
|
||||
end
|
||||
|
||||
end
|
||||
end
|
@ -1,54 +0,0 @@
|
||||
# --
|
||||
# Copyright (C) 2008-2010 10gen Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
# ++
|
||||
|
||||
require 'mongo_bson/byte_buffer'
|
||||
|
||||
module Mongo
|
||||
|
||||
# An array of binary bytes with a MongoDB subtype. See the subtype
|
||||
# constants for reference.
|
||||
#
|
||||
# Use this class when storing binary data in documents.
|
||||
class Binary < ByteBuffer
|
||||
|
||||
SUBTYPE_BYTES = 0x02
|
||||
SUBTYPE_UUID = 0x03
|
||||
SUBTYPE_MD5 = 0x05
|
||||
SUBTYPE_USER_DEFINED = 0x80
|
||||
|
||||
# One of the SUBTYPE_* constants. Default is SUBTYPE_BYTES.
|
||||
attr_accessor :subtype
|
||||
|
||||
# Create a buffer for storing binary data in MongoDB.
|
||||
#
|
||||
# @param [Array, String] data to story as BSON binary. If a string is given, the value will be
|
||||
# concerted to an array of bytes using String#unpack("c*").
|
||||
# @param [Fixnum] one of four values specifying a BSON binary subtype. Possible values are
|
||||
# SUBTYPE_BYTES, SUBTYPE_UUID, SUBTYPE_MD5, and SUBTYPE_USER_DEFINED.
|
||||
#
|
||||
# @see http://www.mongodb.org/display/DOCS/BSON#BSON-noteondatabinary BSON binary subtypes.
|
||||
def initialize(data=[], subtype=SUBTYPE_BYTES)
|
||||
data = data.unpack("c*") if data.is_a?(String)
|
||||
super(data)
|
||||
@subtype = subtype
|
||||
end
|
||||
|
||||
def inspect
|
||||
"<Mongo::Binary:#{object_id}>"
|
||||
end
|
||||
|
||||
end
|
||||
end
|
@ -1,36 +0,0 @@
|
||||
# --
|
||||
# Copyright (C) 2008-2010 10gen Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
# ++
|
||||
|
||||
module Mongo
|
||||
|
||||
# JavaScript code to be evaluated by MongoDB.
|
||||
class Code < String
|
||||
|
||||
# Hash mapping identifiers to their values
|
||||
attr_accessor :scope
|
||||
|
||||
# Wrap code to be evaluated by MongoDB.
|
||||
#
|
||||
# @param [String] code the JavaScript code.
|
||||
# @param [Hash] a document mapping identifiers to values, which
|
||||
# represent the scope in which the code is to be executed.
|
||||
def initialize(code, scope={})
|
||||
super(code)
|
||||
@scope = scope
|
||||
end
|
||||
|
||||
end
|
||||
end
|
@ -1,40 +0,0 @@
|
||||
# --
|
||||
# Copyright (C) 2008-2010 10gen Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
# ++
|
||||
|
||||
module Mongo
|
||||
|
||||
# A reference to another object in a MongoDB database.
|
||||
class DBRef
|
||||
|
||||
attr_reader :namespace, :object_id
|
||||
|
||||
# Create a DBRef. Use this class in conjunction with DB#dereference.
|
||||
#
|
||||
# @param [String] a collection name
|
||||
# @param [ObjectID] an object id
|
||||
#
|
||||
# @core dbrefs constructor_details
|
||||
def initialize(namespace, object_id)
|
||||
@namespace = namespace
|
||||
@object_id = object_id
|
||||
end
|
||||
|
||||
def to_s
|
||||
"ns: #{namespace}, id: #{object_id}"
|
||||
end
|
||||
|
||||
end
|
||||
end
|
@ -1,58 +0,0 @@
|
||||
# --
|
||||
# Copyright (C) 2008-2010 10gen Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
# ++
|
||||
|
||||
module Mongo
|
||||
|
||||
# A class representing the BSON MaxKey type. MaxKey will always compare greater than
|
||||
# all other BSON types and values.
|
||||
#
|
||||
# @example Sorting (assume @numbers is a collection):
|
||||
#
|
||||
# >> @numbers.save({"n" => Mongo::MaxKey.new})
|
||||
# >> @numbers.save({"n" => 0})
|
||||
# >> @numbers.save({"n" => 5_000_000})
|
||||
# >> @numbers.find.sort("n").to_a
|
||||
# => [{"_id"=>4b5a050c238d3bace2000004, "n"=>0},
|
||||
# {"_id"=>4b5a04e6238d3bace2000002, "n"=>5_000_000},
|
||||
# {"_id"=>4b5a04ea238d3bace2000003, "n"=>#<Mongo::MaxKey:0x1014ef410>},
|
||||
# ]
|
||||
class MaxKey
|
||||
|
||||
def ==(obj)
|
||||
obj.class == MaxKey
|
||||
end
|
||||
end
|
||||
|
||||
# A class representing the BSON MinKey type. MinKey will always compare less than
|
||||
# all other BSON types and values.
|
||||
#
|
||||
# @example Sorting (assume @numbers is a collection):
|
||||
#
|
||||
# >> @numbers.save({"n" => Mongo::MinKey.new})
|
||||
# >> @numbers.save({"n" => -1_000_000})
|
||||
# >> @numbers.save({"n" => 1_000_000})
|
||||
# >> @numbers.find.sort("n").to_a
|
||||
# => [{"_id"=>4b5a050c238d3bace2000004, "n"=>#<Mongo::MinKey:0x1014ef410>},
|
||||
# {"_id"=>4b5a04e6238d3bace2000002, "n"=>-1_000_000},
|
||||
# {"_id"=>4b5a04ea238d3bace2000003, "n"=>1_000_000},
|
||||
# ]
|
||||
class MinKey
|
||||
|
||||
def ==(obj)
|
||||
obj.class == MinKey
|
||||
end
|
||||
end
|
||||
end
|
@ -1,180 +0,0 @@
|
||||
# --
|
||||
# Copyright (C) 2008-2010 10gen Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
# ++
|
||||
|
||||
require 'thread'
|
||||
require 'socket'
|
||||
require 'digest/md5'
|
||||
|
||||
module Mongo
|
||||
|
||||
# Generates MongoDB object ids.
|
||||
#
|
||||
# @core objectids
|
||||
class ObjectID
|
||||
@@lock = Mutex.new
|
||||
@@index = 0
|
||||
|
||||
# Create a new object id. If no parameter is given, an id corresponding
|
||||
# to the ObjectID BSON data type will be created. This is a 12-byte value
|
||||
# consisting of a 4-byte timestamp, a 3-byte machine id, a 2-byte process id,
|
||||
# and a 3-byte counter.
|
||||
#
|
||||
# @param [Array] data should be an array of bytes. If you want
|
||||
# to generate a standard MongoDB object id, leave this argument blank.
|
||||
def initialize(data=nil)
|
||||
@data = data || generate
|
||||
end
|
||||
|
||||
# Determine if the supplied string is legal. Legal strings will
|
||||
# consist of 24 hexadecimal characters.
|
||||
#
|
||||
# @param [String] str
|
||||
#
|
||||
# @return [Boolean]
|
||||
def self.legal?(str)
|
||||
len = 24
|
||||
str =~ /([0-9a-f]+)/i
|
||||
match = $1
|
||||
str && str.length == len && match == str
|
||||
end
|
||||
|
||||
# Create an object id from the given time. This is useful for doing range
|
||||
# queries; it works because MongoDB's object ids begin
|
||||
# with a timestamp.
|
||||
#
|
||||
# @param [Time] time a utc time to encode as an object id.
|
||||
#
|
||||
# @return [Mongo::ObjectID]
|
||||
#
|
||||
# @example Return all document created before Jan 1, 2010.
|
||||
# time = Time.utc(2010, 1, 1)
|
||||
# time_id = ObjectID.from_time(time)
|
||||
# collection.find({'_id' => {'$lt' => time_id}})
|
||||
def self.from_time(time)
|
||||
self.new([time.to_i,0,0].pack("NNN").unpack("C12"))
|
||||
end
|
||||
|
||||
# Adds a primary key to the given document if needed.
|
||||
#
|
||||
# @param [Hash] doc a document requiring an _id.
|
||||
#
|
||||
# @return [Mongo::ObjectID, Object] returns a newly-created or
|
||||
# current _id for the given document.
|
||||
def self.create_pk(doc)
|
||||
doc.has_key?(:_id) || doc.has_key?('_id') ? doc : doc.merge!(:_id => self.new)
|
||||
end
|
||||
|
||||
# Check equality of this object id with another.
|
||||
#
|
||||
# @param [Mongo::ObjectID] object_id
|
||||
def eql?(object_id)
|
||||
@data == object_id.instance_variable_get("@data")
|
||||
end
|
||||
alias_method :==, :eql?
|
||||
|
||||
# Get a unique hashcode for this object.
|
||||
# This is required since we've defined an #eql? method.
|
||||
#
|
||||
# @return [Integer]
|
||||
def hash
|
||||
@data.hash
|
||||
end
|
||||
|
||||
# Get an array representation of the object id.
|
||||
#
|
||||
# @return [Array]
|
||||
def to_a
|
||||
@data.dup
|
||||
end
|
||||
|
||||
# Given a string representation of an ObjectID, return a new ObjectID
|
||||
# with that value.
|
||||
#
|
||||
# @param [String] str
|
||||
#
|
||||
# @return [Mongo::ObjectID]
|
||||
def self.from_string(str)
|
||||
raise InvalidObjectID, "illegal ObjectID format" unless legal?(str)
|
||||
data = []
|
||||
12.times do |i|
|
||||
data[i] = str[i * 2, 2].to_i(16)
|
||||
end
|
||||
self.new(data)
|
||||
end
|
||||
|
||||
# Get a string representation of this object id.
|
||||
#
|
||||
# @return [String]
|
||||
def to_s
|
||||
str = ' ' * 24
|
||||
12.times do |i|
|
||||
str[i * 2, 2] = '%02x' % @data[i]
|
||||
end
|
||||
str
|
||||
end
|
||||
|
||||
def inspect
|
||||
"ObjectID('#{to_s}')"
|
||||
end
|
||||
|
||||
# Convert to MongoDB extended JSON format. Since JSON includes type information,
|
||||
# but lacks an ObjectID type, this JSON format encodes the type using an $id key.
|
||||
#
|
||||
# @return [String] the object id represented as MongoDB extended JSON.
|
||||
def to_json(escaped=false)
|
||||
"{\"$oid\": \"#{to_s}\"}"
|
||||
end
|
||||
|
||||
# Return the UTC time at which this ObjectID was generated. This may
|
||||
# be used in lieu of a created_at timestamp since this information
|
||||
# is always encoded in the object id.
|
||||
#
|
||||
# @return [Time] the time at which this object was created.
|
||||
def generation_time
|
||||
Time.at(@data.pack("C4").unpack("N")[0]).utc
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
# We need to define this method only if CBson isn't loaded.
|
||||
unless defined? CBson
|
||||
def generate
|
||||
oid = ''
|
||||
|
||||
# 4 bytes current time
|
||||
time = Time.new.to_i
|
||||
oid += [time].pack("N")
|
||||
|
||||
# 3 bytes machine
|
||||
oid += Digest::MD5.digest(Socket.gethostname)[0, 3]
|
||||
|
||||
# 2 bytes pid
|
||||
oid += [Process.pid % 0xFFFF].pack("n")
|
||||
|
||||
# 3 bytes inc
|
||||
oid += [get_inc].pack("N")[1, 3]
|
||||
|
||||
oid.unpack("C12")
|
||||
end
|
||||
end
|
||||
|
||||
def get_inc
|
||||
@@lock.synchronize do
|
||||
@@index = (@@index + 1) % 0xFFFFFF
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
@ -1,45 +0,0 @@
|
||||
# --
|
||||
# Copyright (C) 2008-2010 10gen Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
# ++
|
||||
|
||||
module Mongo
|
||||
|
||||
# A Regexp that can hold on to extra options and ignore them. Mongo
|
||||
# regexes may contain option characters beyond 'i', 'm', and 'x'. (Note
|
||||
# that Mongo only uses those three, but that regexes coming from other
|
||||
# languages may store different option characters.)
|
||||
#
|
||||
# Note that you do not have to use this class at all if you wish to
|
||||
# store regular expressions in Mongo. The Mongo and Ruby regex option
|
||||
# flags are the same. Storing regexes is discouraged, in any case.
|
||||
#
|
||||
# @deprecated
|
||||
class RegexpOfHolding < Regexp
|
||||
|
||||
attr_accessor :extra_options_str
|
||||
|
||||
# @deprecated we're no longer supporting this.
|
||||
# +str+ and +options+ are the same as Regexp. +extra_options_str+
|
||||
# contains all the other flags that were in Mongo but we do not use or
|
||||
# understand.
|
||||
def initialize(str, options, extra_options_str)
|
||||
warn "RegexpOfHolding is deprecated; the modifiers i, m, and x will be stored automatically as BSON." +
|
||||
"If you're only storing the options i, m, and x, you can safely ignore this message."
|
||||
super(str, options)
|
||||
@extra_options_str = extra_options_str
|
||||
end
|
||||
end
|
||||
|
||||
end
|
@ -12,9 +12,9 @@ class TestCollection < Test::Unit::TestCase
|
||||
|
||||
def test_optional_pk_factory
|
||||
@coll_default_pk = @@db.collection('stuff')
|
||||
assert_equal Mongo::ObjectID, @coll_default_pk.pk_factory
|
||||
assert_equal BSON::ObjectID, @coll_default_pk.pk_factory
|
||||
@coll_default_pk = @@db.create_collection('more-stuff')
|
||||
assert_equal Mongo::ObjectID, @coll_default_pk.pk_factory
|
||||
assert_equal BSON::ObjectID, @coll_default_pk.pk_factory
|
||||
|
||||
# Create a db with a pk_factory.
|
||||
@db = Connection.new(ENV['MONGO_RUBY_DRIVER_HOST'] || 'localhost',
|
||||
@ -27,11 +27,11 @@ class TestCollection < Test::Unit::TestCase
|
||||
end
|
||||
|
||||
def test_valid_names
|
||||
assert_raise InvalidName do
|
||||
assert_raise Mongo::InvalidName do
|
||||
@@db["te$t"]
|
||||
end
|
||||
|
||||
assert_raise InvalidName do
|
||||
assert_raise Mongo::InvalidName do
|
||||
@@db['$main']
|
||||
end
|
||||
|
||||
|
@ -1,7 +1,7 @@
|
||||
require 'test/test_helper'
|
||||
require 'mongo/exceptions'
|
||||
require 'mongo/util/conversions'
|
||||
require 'mongo_bson/ordered_hash'
|
||||
require 'bson/ordered_hash'
|
||||
|
||||
class ConversionsTest < Test::Unit::TestCase
|
||||
include Mongo::Conversions
|
||||
|
@ -8,7 +8,7 @@ class BinaryTest < Test::Unit::TestCase
|
||||
end
|
||||
|
||||
should "not display actual data" do
|
||||
binary = Mongo::Binary.new(@data)
|
||||
binary = BSON::Binary.new(@data)
|
||||
assert_equal "<Mongo::Binary:#{binary.object_id}>", binary.inspect
|
||||
end
|
||||
end
|
||||
|
@ -20,8 +20,7 @@ end
|
||||
|
||||
class BSONTest < Test::Unit::TestCase
|
||||
|
||||
include Mongo
|
||||
|
||||
include BSON
|
||||
|
||||
def test_deprecated_bson_module
|
||||
doc = {'doc' => 'hello, world'}
|
||||
@ -31,26 +30,26 @@ class BSONTest < Test::Unit::TestCase
|
||||
|
||||
def test_string
|
||||
doc = {'doc' => 'hello, world'}
|
||||
bson = bson = Mongo::BSON_CODER.serialize(doc)
|
||||
assert_equal doc, Mongo::BSON_CODER.deserialize(bson)
|
||||
bson = bson = BSON::BSON_CODER.serialize(doc)
|
||||
assert_equal doc, BSON::BSON_CODER.deserialize(bson)
|
||||
end
|
||||
|
||||
def test_valid_utf8_string
|
||||
doc = {'doc' => 'aé'}
|
||||
bson = bson = Mongo::BSON_CODER.serialize(doc)
|
||||
assert_equal doc, Mongo::BSON_CODER.deserialize(bson)
|
||||
bson = bson = BSON::BSON_CODER.serialize(doc)
|
||||
assert_equal doc, BSON::BSON_CODER.deserialize(bson)
|
||||
end
|
||||
|
||||
def test_valid_utf8_key
|
||||
doc = {'aé' => 'hello'}
|
||||
bson = bson = Mongo::BSON_CODER.serialize(doc)
|
||||
assert_equal doc, Mongo::BSON_CODER.deserialize(bson)
|
||||
bson = bson = BSON::BSON_CODER.serialize(doc)
|
||||
assert_equal doc, BSON::BSON_CODER.deserialize(bson)
|
||||
end
|
||||
|
||||
def test_document_length
|
||||
doc = {'name' => 'a' * 5 * 1024 * 1024}
|
||||
assert_raise InvalidDocument do
|
||||
assert Mongo::BSON_CODER.serialize(doc)
|
||||
assert BSON::BSON_CODER.serialize(doc)
|
||||
end
|
||||
end
|
||||
|
||||
@ -62,7 +61,7 @@ class BSONTest < Test::Unit::TestCase
|
||||
string = Iconv.conv('iso-8859-1', 'utf-8', 'aé')
|
||||
doc = {'doc' => string}
|
||||
assert_raise InvalidStringEncoding do
|
||||
Mongo::BSON_CODER.serialize(doc)
|
||||
BSON::BSON_CODER.serialize(doc)
|
||||
end
|
||||
end
|
||||
|
||||
@ -70,51 +69,51 @@ class BSONTest < Test::Unit::TestCase
|
||||
key = Iconv.conv('iso-8859-1', 'utf-8', 'aé')
|
||||
doc = {key => 'hello'}
|
||||
assert_raise InvalidStringEncoding do
|
||||
Mongo::BSON_CODER.serialize(doc)
|
||||
BSON::BSON_CODER.serialize(doc)
|
||||
end
|
||||
end
|
||||
else
|
||||
def test_non_utf8_string
|
||||
bson = Mongo::BSON_CODER.serialize({'str' => 'aé'.encode('iso-8859-1')})
|
||||
result = Mongo::BSON_CODER.deserialize(bson)['str']
|
||||
bson = BSON::BSON_CODER.serialize({'str' => 'aé'.encode('iso-8859-1')})
|
||||
result = BSON::BSON_CODER.deserialize(bson)['str']
|
||||
assert_equal 'aé', result
|
||||
assert_equal 'UTF-8', result.encoding.name
|
||||
end
|
||||
|
||||
def test_non_utf8_key
|
||||
bson = Mongo::BSON_CODER.serialize({'aé'.encode('iso-8859-1') => 'hello'})
|
||||
assert_equal 'hello', Mongo::BSON_CODER.deserialize(bson)['aé']
|
||||
bson = BSON::BSON_CODER.serialize({'aé'.encode('iso-8859-1') => 'hello'})
|
||||
assert_equal 'hello', BSON::BSON_CODER.deserialize(bson)['aé']
|
||||
end
|
||||
end
|
||||
|
||||
def test_code
|
||||
doc = {'$where' => Code.new('this.a.b < this.b')}
|
||||
bson = Mongo::BSON_CODER.serialize(doc)
|
||||
assert_equal doc, Mongo::BSON_CODER.deserialize(bson)
|
||||
bson = BSON::BSON_CODER.serialize(doc)
|
||||
assert_equal doc, BSON::BSON_CODER.deserialize(bson)
|
||||
end
|
||||
|
||||
def test_number
|
||||
doc = {'doc' => 41.99}
|
||||
bson = Mongo::BSON_CODER.serialize(doc)
|
||||
assert_equal doc, Mongo::BSON_CODER.deserialize(bson)
|
||||
bson = BSON::BSON_CODER.serialize(doc)
|
||||
assert_equal doc, BSON::BSON_CODER.deserialize(bson)
|
||||
end
|
||||
|
||||
def test_int
|
||||
doc = {'doc' => 42}
|
||||
bson = Mongo::BSON_CODER.serialize(doc)
|
||||
assert_equal doc, Mongo::BSON_CODER.deserialize(bson)
|
||||
bson = BSON::BSON_CODER.serialize(doc)
|
||||
assert_equal doc, BSON::BSON_CODER.deserialize(bson)
|
||||
|
||||
doc = {"doc" => -5600}
|
||||
bson = Mongo::BSON_CODER.serialize(doc)
|
||||
assert_equal doc, Mongo::BSON_CODER.deserialize(bson)
|
||||
bson = BSON::BSON_CODER.serialize(doc)
|
||||
assert_equal doc, BSON::BSON_CODER.deserialize(bson)
|
||||
|
||||
doc = {"doc" => 2147483647}
|
||||
bson = Mongo::BSON_CODER.serialize(doc)
|
||||
assert_equal doc, Mongo::BSON_CODER.deserialize(bson)
|
||||
bson = BSON::BSON_CODER.serialize(doc)
|
||||
assert_equal doc, BSON::BSON_CODER.deserialize(bson)
|
||||
|
||||
doc = {"doc" => -2147483648}
|
||||
bson = Mongo::BSON_CODER.serialize(doc)
|
||||
assert_equal doc, Mongo::BSON_CODER.deserialize(bson)
|
||||
bson = BSON::BSON_CODER.serialize(doc)
|
||||
assert_equal doc, BSON::BSON_CODER.deserialize(bson)
|
||||
end
|
||||
|
||||
def test_ordered_hash
|
||||
@ -123,32 +122,32 @@ class BSONTest < Test::Unit::TestCase
|
||||
doc["a"] = 2
|
||||
doc["c"] = 3
|
||||
doc["d"] = 4
|
||||
bson = Mongo::BSON_CODER.serialize(doc)
|
||||
assert_equal doc, Mongo::BSON_CODER.deserialize(bson)
|
||||
bson = BSON::BSON_CODER.serialize(doc)
|
||||
assert_equal doc, BSON::BSON_CODER.deserialize(bson)
|
||||
end
|
||||
|
||||
def test_object
|
||||
doc = {'doc' => {'age' => 42, 'name' => 'Spongebob', 'shoe_size' => 9.5}}
|
||||
bson = Mongo::BSON_CODER.serialize(doc)
|
||||
assert_equal doc, Mongo::BSON_CODER.deserialize(bson)
|
||||
bson = BSON::BSON_CODER.serialize(doc)
|
||||
assert_equal doc, BSON::BSON_CODER.deserialize(bson)
|
||||
end
|
||||
|
||||
def test_oid
|
||||
doc = {'doc' => ObjectID.new}
|
||||
bson = Mongo::BSON_CODER.serialize(doc)
|
||||
assert_equal doc, Mongo::BSON_CODER.deserialize(bson)
|
||||
bson = BSON::BSON_CODER.serialize(doc)
|
||||
assert_equal doc, BSON::BSON_CODER.deserialize(bson)
|
||||
end
|
||||
|
||||
def test_array
|
||||
doc = {'doc' => [1, 2, 'a', 'b']}
|
||||
bson = Mongo::BSON_CODER.serialize(doc)
|
||||
assert_equal doc, Mongo::BSON_CODER.deserialize(bson)
|
||||
bson = BSON::BSON_CODER.serialize(doc)
|
||||
assert_equal doc, BSON::BSON_CODER.deserialize(bson)
|
||||
end
|
||||
|
||||
def test_regex
|
||||
doc = {'doc' => /foobar/i}
|
||||
bson = Mongo::BSON_CODER.serialize(doc)
|
||||
doc2 = Mongo::BSON_CODER.deserialize(bson)
|
||||
bson = BSON::BSON_CODER.serialize(doc)
|
||||
doc2 = BSON::BSON_CODER.deserialize(bson)
|
||||
assert_equal doc, doc2
|
||||
|
||||
r = doc2['doc']
|
||||
@ -158,9 +157,9 @@ class BSONTest < Test::Unit::TestCase
|
||||
assert_equal 'zywcab', r.extra_options_str
|
||||
|
||||
doc = {'doc' => r}
|
||||
bson_doc = Mongo::BSON_CODER.serialize(doc)
|
||||
bson_doc = BSON::BSON_CODER.serialize(doc)
|
||||
doc2 = nil
|
||||
doc2 = Mongo::BSON_CODER.deserialize(bson_doc)
|
||||
doc2 = BSON::BSON_CODER.deserialize(bson_doc)
|
||||
assert_equal doc, doc2
|
||||
|
||||
r = doc2['doc']
|
||||
@ -170,30 +169,30 @@ class BSONTest < Test::Unit::TestCase
|
||||
|
||||
def test_boolean
|
||||
doc = {'doc' => true}
|
||||
bson = Mongo::BSON_CODER.serialize(doc)
|
||||
assert_equal doc, Mongo::BSON_CODER.deserialize(bson)
|
||||
bson = BSON::BSON_CODER.serialize(doc)
|
||||
assert_equal doc, BSON::BSON_CODER.deserialize(bson)
|
||||
end
|
||||
|
||||
def test_date
|
||||
doc = {'date' => Time.now}
|
||||
bson = Mongo::BSON_CODER.serialize(doc)
|
||||
doc2 = Mongo::BSON_CODER.deserialize(bson)
|
||||
bson = BSON::BSON_CODER.serialize(doc)
|
||||
doc2 = BSON::BSON_CODER.deserialize(bson)
|
||||
# Mongo only stores up to the millisecond
|
||||
assert_in_delta doc['date'], doc2['date'], 0.001
|
||||
end
|
||||
|
||||
def test_date_returns_as_utc
|
||||
doc = {'date' => Time.now}
|
||||
bson = Mongo::BSON_CODER.serialize(doc)
|
||||
doc2 = Mongo::BSON_CODER.deserialize(bson)
|
||||
bson = BSON::BSON_CODER.serialize(doc)
|
||||
doc2 = BSON::BSON_CODER.deserialize(bson)
|
||||
assert doc2['date'].utc?
|
||||
end
|
||||
|
||||
def test_date_before_epoch
|
||||
begin
|
||||
doc = {'date' => Time.utc(1600)}
|
||||
bson = Mongo::BSON_CODER.serialize(doc)
|
||||
doc2 = Mongo::BSON_CODER.deserialize(bson)
|
||||
bson = BSON::BSON_CODER.serialize(doc)
|
||||
doc2 = BSON::BSON_CODER.deserialize(bson)
|
||||
# Mongo only stores up to the millisecond
|
||||
assert_in_delta doc['date'], doc2['date'], 0.001
|
||||
rescue ArgumentError
|
||||
@ -208,7 +207,7 @@ class BSONTest < Test::Unit::TestCase
|
||||
[DateTime.now, Date.today, Zone].each do |invalid_date|
|
||||
doc = {:date => invalid_date}
|
||||
begin
|
||||
bson = Mongo::BSON_CODER.serialize(doc)
|
||||
bson = BSON::BSON_CODER.serialize(doc)
|
||||
rescue => e
|
||||
ensure
|
||||
if !invalid_date.is_a? Time
|
||||
@ -223,16 +222,16 @@ class BSONTest < Test::Unit::TestCase
|
||||
oid = ObjectID.new
|
||||
doc = {}
|
||||
doc['dbref'] = DBRef.new('namespace', oid)
|
||||
bson = Mongo::BSON_CODER.serialize(doc)
|
||||
doc2 = Mongo::BSON_CODER.deserialize(bson)
|
||||
bson = BSON::BSON_CODER.serialize(doc)
|
||||
doc2 = BSON::BSON_CODER.deserialize(bson)
|
||||
assert_equal 'namespace', doc2['dbref'].namespace
|
||||
assert_equal oid, doc2['dbref'].object_id
|
||||
end
|
||||
|
||||
def test_symbol
|
||||
doc = {'sym' => :foo}
|
||||
bson = Mongo::BSON_CODER.serialize(doc)
|
||||
doc2 = Mongo::BSON_CODER.deserialize(bson)
|
||||
bson = BSON::BSON_CODER.serialize(doc)
|
||||
doc2 = BSON::BSON_CODER.deserialize(bson)
|
||||
assert_equal :foo, doc2['sym']
|
||||
end
|
||||
|
||||
@ -241,8 +240,8 @@ class BSONTest < Test::Unit::TestCase
|
||||
'binstring'.each_byte { |b| bin.put(b) }
|
||||
|
||||
doc = {'bin' => bin}
|
||||
bson = Mongo::BSON_CODER.serialize(doc)
|
||||
doc2 = Mongo::BSON_CODER.deserialize(bson)
|
||||
bson = BSON::BSON_CODER.serialize(doc)
|
||||
doc2 = BSON::BSON_CODER.deserialize(bson)
|
||||
bin2 = doc2['bin']
|
||||
assert_kind_of Binary, bin2
|
||||
assert_equal 'binstring', bin2.to_s
|
||||
@ -252,8 +251,8 @@ class BSONTest < Test::Unit::TestCase
|
||||
def test_binary_with_string
|
||||
b = Binary.new('somebinarystring')
|
||||
doc = {'bin' => b}
|
||||
bson = Mongo::BSON_CODER.serialize(doc)
|
||||
doc2 = Mongo::BSON_CODER.deserialize(bson)
|
||||
bson = BSON::BSON_CODER.serialize(doc)
|
||||
doc2 = BSON::BSON_CODER.deserialize(bson)
|
||||
bin2 = doc2['bin']
|
||||
assert_kind_of Binary, bin2
|
||||
assert_equal 'somebinarystring', bin2.to_s
|
||||
@ -264,8 +263,8 @@ class BSONTest < Test::Unit::TestCase
|
||||
bin = Binary.new([1, 2, 3, 4, 5], Binary::SUBTYPE_USER_DEFINED)
|
||||
|
||||
doc = {'bin' => bin}
|
||||
bson = Mongo::BSON_CODER.serialize(doc)
|
||||
doc2 = Mongo::BSON_CODER.deserialize(bson)
|
||||
bson = BSON::BSON_CODER.serialize(doc)
|
||||
doc2 = BSON::BSON_CODER.deserialize(bson)
|
||||
bin2 = doc2['bin']
|
||||
assert_kind_of Binary, bin2
|
||||
assert_equal [1, 2, 3, 4, 5], bin2.to_a
|
||||
@ -277,8 +276,8 @@ class BSONTest < Test::Unit::TestCase
|
||||
5.times { |i| bb.put(i + 1) }
|
||||
|
||||
doc = {'bin' => bb}
|
||||
bson = Mongo::BSON_CODER.serialize(doc)
|
||||
doc2 = Mongo::BSON_CODER.deserialize(bson)
|
||||
bson = BSON::BSON_CODER.serialize(doc)
|
||||
doc2 = BSON::BSON_CODER.deserialize(bson)
|
||||
bin2 = doc2['bin']
|
||||
assert_kind_of Binary, bin2
|
||||
assert_equal [1, 2, 3, 4, 5], bin2.to_a
|
||||
@ -289,24 +288,24 @@ class BSONTest < Test::Unit::TestCase
|
||||
val = OrderedHash.new
|
||||
val['not_id'] = 1
|
||||
val['_id'] = 2
|
||||
roundtrip = Mongo::BSON_CODER.deserialize(Mongo::BSON_CODER.serialize(val, false, true).to_a)
|
||||
roundtrip = BSON::BSON_CODER.deserialize(BSON::BSON_CODER.serialize(val, false, true).to_a)
|
||||
assert_kind_of OrderedHash, roundtrip
|
||||
assert_equal '_id', roundtrip.keys.first
|
||||
|
||||
val = {'a' => 'foo', 'b' => 'bar', :_id => 42, 'z' => 'hello'}
|
||||
roundtrip = Mongo::BSON_CODER.deserialize(Mongo::BSON_CODER.serialize(val, false, true).to_a)
|
||||
roundtrip = BSON::BSON_CODER.deserialize(BSON::BSON_CODER.serialize(val, false, true).to_a)
|
||||
assert_kind_of OrderedHash, roundtrip
|
||||
assert_equal '_id', roundtrip.keys.first
|
||||
end
|
||||
|
||||
def test_nil_id
|
||||
doc = {"_id" => nil}
|
||||
assert_equal doc, Mongo::BSON_CODER.deserialize(bson = Mongo::BSON_CODER.serialize(doc, false, true).to_a)
|
||||
assert_equal doc, BSON::BSON_CODER.deserialize(bson = BSON::BSON_CODER.serialize(doc, false, true).to_a)
|
||||
end
|
||||
|
||||
def test_timestamp
|
||||
val = {"test" => [4, 20]}
|
||||
assert_equal val, Mongo::BSON_CODER.deserialize([0x13, 0x00, 0x00, 0x00,
|
||||
assert_equal val, BSON::BSON_CODER.deserialize([0x13, 0x00, 0x00, 0x00,
|
||||
0x11, 0x74, 0x65, 0x73,
|
||||
0x74, 0x00, 0x04, 0x00,
|
||||
0x00, 0x00, 0x14, 0x00,
|
||||
@ -316,29 +315,29 @@ class BSONTest < Test::Unit::TestCase
|
||||
def test_overflow
|
||||
doc = {"x" => 2**75}
|
||||
assert_raise RangeError do
|
||||
bson = Mongo::BSON_CODER.serialize(doc)
|
||||
bson = BSON::BSON_CODER.serialize(doc)
|
||||
end
|
||||
|
||||
doc = {"x" => 9223372036854775}
|
||||
assert_equal doc, Mongo::BSON_CODER.deserialize(Mongo::BSON_CODER.serialize(doc).to_a)
|
||||
assert_equal doc, BSON::BSON_CODER.deserialize(BSON::BSON_CODER.serialize(doc).to_a)
|
||||
|
||||
doc = {"x" => 9223372036854775807}
|
||||
assert_equal doc, Mongo::BSON_CODER.deserialize(Mongo::BSON_CODER.serialize(doc).to_a)
|
||||
assert_equal doc, BSON::BSON_CODER.deserialize(BSON::BSON_CODER.serialize(doc).to_a)
|
||||
|
||||
doc["x"] = doc["x"] + 1
|
||||
assert_raise RangeError do
|
||||
bson = Mongo::BSON_CODER.serialize(doc)
|
||||
bson = BSON::BSON_CODER.serialize(doc)
|
||||
end
|
||||
|
||||
doc = {"x" => -9223372036854775}
|
||||
assert_equal doc, Mongo::BSON_CODER.deserialize(Mongo::BSON_CODER.serialize(doc).to_a)
|
||||
assert_equal doc, BSON::BSON_CODER.deserialize(BSON::BSON_CODER.serialize(doc).to_a)
|
||||
|
||||
doc = {"x" => -9223372036854775808}
|
||||
assert_equal doc, Mongo::BSON_CODER.deserialize(Mongo::BSON_CODER.serialize(doc).to_a)
|
||||
assert_equal doc, BSON::BSON_CODER.deserialize(BSON::BSON_CODER.serialize(doc).to_a)
|
||||
|
||||
doc["x"] = doc["x"] - 1
|
||||
assert_raise RangeError do
|
||||
bson = Mongo::BSON_CODER.serialize(doc)
|
||||
bson = BSON::BSON_CODER.serialize(doc)
|
||||
end
|
||||
end
|
||||
|
||||
@ -346,7 +345,7 @@ class BSONTest < Test::Unit::TestCase
|
||||
[BigDecimal.new("1.0"), Complex(0, 1), Rational(2, 3)].each do |type|
|
||||
doc = {"x" => type}
|
||||
begin
|
||||
Mongo::BSON_CODER.serialize(doc)
|
||||
BSON::BSON_CODER.serialize(doc)
|
||||
rescue => e
|
||||
ensure
|
||||
assert_equal InvalidDocument, e.class
|
||||
@ -360,12 +359,12 @@ class BSONTest < Test::Unit::TestCase
|
||||
val['not_id'] = 1
|
||||
val['_id'] = 2
|
||||
assert val.keys.include?('_id')
|
||||
Mongo::BSON_CODER.serialize(val)
|
||||
BSON::BSON_CODER.serialize(val)
|
||||
assert val.keys.include?('_id')
|
||||
|
||||
val = {'a' => 'foo', 'b' => 'bar', :_id => 42, 'z' => 'hello'}
|
||||
assert val.keys.include?(:_id)
|
||||
Mongo::BSON_CODER.serialize(val)
|
||||
BSON::BSON_CODER.serialize(val)
|
||||
assert val.keys.include?(:_id)
|
||||
end
|
||||
|
||||
@ -380,50 +379,50 @@ class BSONTest < Test::Unit::TestCase
|
||||
dup = {"_id" => "foo", :_id => "foo"}
|
||||
one = {"_id" => "foo"}
|
||||
|
||||
assert_equal Mongo::BSON_CODER.serialize(one).to_a, Mongo::BSON_CODER.serialize(dup).to_a
|
||||
assert_equal BSON::BSON_CODER.serialize(one).to_a, BSON::BSON_CODER.serialize(dup).to_a
|
||||
end
|
||||
|
||||
def test_no_duplicate_id_when_moving_id
|
||||
dup = {"_id" => "foo", :_id => "foo"}
|
||||
one = {:_id => "foo"}
|
||||
|
||||
assert_equal Mongo::BSON_CODER.serialize(one, false, true).to_s, Mongo::BSON_CODER.serialize(dup, false, true).to_s
|
||||
assert_equal BSON::BSON_CODER.serialize(one, false, true).to_s, BSON::BSON_CODER.serialize(dup, false, true).to_s
|
||||
end
|
||||
|
||||
def test_null_character
|
||||
doc = {"a" => "\x00"}
|
||||
|
||||
assert_equal doc, Mongo::BSON_CODER.deserialize(Mongo::BSON_CODER.serialize(doc).to_a)
|
||||
assert_equal doc, BSON::BSON_CODER.deserialize(BSON::BSON_CODER.serialize(doc).to_a)
|
||||
|
||||
assert_raise InvalidDocument do
|
||||
Mongo::BSON_CODER.serialize({"\x00" => "a"})
|
||||
BSON::BSON_CODER.serialize({"\x00" => "a"})
|
||||
end
|
||||
|
||||
assert_raise InvalidDocument do
|
||||
Mongo::BSON_CODER.serialize({"a" => (Regexp.compile "ab\x00c")})
|
||||
BSON::BSON_CODER.serialize({"a" => (Regexp.compile "ab\x00c")})
|
||||
end
|
||||
end
|
||||
|
||||
def test_max_key
|
||||
doc = {"a" => MaxKey.new}
|
||||
|
||||
assert_equal doc, Mongo::BSON_CODER.deserialize(Mongo::BSON_CODER.serialize(doc).to_a)
|
||||
assert_equal doc, BSON::BSON_CODER.deserialize(BSON::BSON_CODER.serialize(doc).to_a)
|
||||
end
|
||||
|
||||
def test_min_key
|
||||
doc = {"a" => MinKey.new}
|
||||
|
||||
assert_equal doc, Mongo::BSON_CODER.deserialize(Mongo::BSON_CODER.serialize(doc).to_a)
|
||||
assert_equal doc, BSON::BSON_CODER.deserialize(BSON::BSON_CODER.serialize(doc).to_a)
|
||||
end
|
||||
|
||||
def test_invalid_object
|
||||
o = Object.new
|
||||
assert_raise InvalidDocument do
|
||||
Mongo::BSON_CODER.serialize({:foo => o})
|
||||
BSON::BSON_CODER.serialize({:foo => o})
|
||||
end
|
||||
|
||||
assert_raise InvalidDocument do
|
||||
Mongo::BSON_CODER.serialize({:foo => Date.today})
|
||||
BSON::BSON_CODER.serialize({:foo => Date.today})
|
||||
end
|
||||
end
|
||||
|
||||
@ -436,10 +435,10 @@ class BSONTest < Test::Unit::TestCase
|
||||
|
||||
assert_equal ")\000\000\000\020_id\000\001\000\000\000\002text" +
|
||||
"\000\004\000\000\000abc\000\002key\000\004\000\000\000abc\000\000",
|
||||
Mongo::BSON_CODER.serialize(a, false, true).to_s
|
||||
BSON::BSON_CODER.serialize(a, false, true).to_s
|
||||
assert_equal ")\000\000\000\002text\000\004\000\000\000abc\000\002key" +
|
||||
"\000\004\000\000\000abc\000\020_id\000\001\000\000\000\000",
|
||||
Mongo::BSON_CODER.serialize(a, false, false).to_s
|
||||
BSON::BSON_CODER.serialize(a, false, false).to_s
|
||||
end
|
||||
|
||||
def test_move_id_with_nested_doc
|
||||
@ -453,11 +452,11 @@ class BSONTest < Test::Unit::TestCase
|
||||
assert_equal ">\000\000\000\020_id\000\003\000\000\000\002text" +
|
||||
"\000\004\000\000\000abc\000\003hash\000\034\000\000" +
|
||||
"\000\002text\000\004\000\000\000abc\000\020_id\000\002\000\000\000\000\000",
|
||||
Mongo::BSON_CODER.serialize(c, false, true).to_s
|
||||
BSON::BSON_CODER.serialize(c, false, true).to_s
|
||||
assert_equal ">\000\000\000\002text\000\004\000\000\000abc\000\003hash" +
|
||||
"\000\034\000\000\000\002text\000\004\000\000\000abc\000\020_id" +
|
||||
"\000\002\000\000\000\000\020_id\000\003\000\000\000\000",
|
||||
Mongo::BSON_CODER.serialize(c, false, false).to_s
|
||||
BSON::BSON_CODER.serialize(c, false, false).to_s
|
||||
end
|
||||
|
||||
if defined?(HashWithIndifferentAccess)
|
||||
@ -467,12 +466,12 @@ class BSONTest < Test::Unit::TestCase
|
||||
embedded['_id'] = ObjectID.new
|
||||
doc['_id'] = ObjectID.new
|
||||
doc['embedded'] = [embedded]
|
||||
Mongo::BSON_CODER.serialize(doc, false, true).to_a
|
||||
BSON::BSON_CODER.serialize(doc, false, true).to_a
|
||||
assert doc.has_key?("_id")
|
||||
assert doc['embedded'][0].has_key?("_id")
|
||||
|
||||
doc['_id'] = ObjectID.new
|
||||
Mongo::BSON_CODER.serialize(doc, false, true).to_a
|
||||
BSON::BSON_CODER.serialize(doc, false, true).to_a
|
||||
assert doc.has_key?("_id")
|
||||
end
|
||||
end
|
||||
|
@ -1,6 +1,7 @@
|
||||
require 'test/test_helper'
|
||||
|
||||
class ByteBufferTest < Test::Unit::TestCase
|
||||
include BSON
|
||||
|
||||
def setup
|
||||
@buf = ByteBuffer.new
|
||||
|
@ -3,6 +3,7 @@ require 'test/test_helper'
|
||||
class ObjectIDTest < Test::Unit::TestCase
|
||||
|
||||
include Mongo
|
||||
include BSON
|
||||
|
||||
def setup
|
||||
@o = ObjectID.new
|
||||
|
@ -19,11 +19,12 @@ MSG
|
||||
exit
|
||||
end
|
||||
|
||||
require 'mongo_ext/cbson' if ENV['C_EXT']
|
||||
require 'bson_ext/cbson' if ENV['C_EXT']
|
||||
|
||||
# NOTE: most tests assume that MongoDB is running.
|
||||
class Test::Unit::TestCase
|
||||
include Mongo
|
||||
include BSON
|
||||
|
||||
# Generic code for rescuing connection failures and retrying operations.
|
||||
# This could be combined with some timeout functionality.
|
||||
|
@ -41,9 +41,9 @@ class CollectionTest < Test::Unit::TestCase
|
||||
@conn = Connection.new('localhost', 27017, :logger => @logger, :connect => false)
|
||||
@db = @conn['testing']
|
||||
@coll = @db.collection('books')
|
||||
data = Mongo::Binary.new(("BINARY " * 1000).unpack("c*"))
|
||||
data = BSON::Binary.new(("BINARY " * 1000).unpack("c*"))
|
||||
@conn.expects(:send_message).with do |op, msg, log|
|
||||
op == 2002 && log.include?("Mongo::Binary")
|
||||
op == 2002 && log.include?("BSON::Binary")
|
||||
end
|
||||
@coll.insert({:data => data})
|
||||
end
|
||||
|
@ -53,7 +53,7 @@ class DBTest < Test::Unit::TestCase
|
||||
|
||||
should "raise an error if logging out fails" do
|
||||
@db.expects(:command).returns({})
|
||||
assert_raise MongoDBError do
|
||||
assert_raise Mongo::MongoDBError do
|
||||
@db.logout
|
||||
end
|
||||
end
|
||||
@ -61,35 +61,35 @@ class DBTest < Test::Unit::TestCase
|
||||
should "raise an error if collection creation fails" do
|
||||
@db.expects(:collection_names).returns([])
|
||||
@db.expects(:command).returns({})
|
||||
assert_raise MongoDBError do
|
||||
assert_raise Mongo::MongoDBError do
|
||||
@db.create_collection("foo")
|
||||
end
|
||||
end
|
||||
|
||||
should "raise an error if getlasterror fails" do
|
||||
@db.expects(:command).returns({})
|
||||
assert_raise MongoDBError do
|
||||
assert_raise Mongo::MongoDBError do
|
||||
@db.error
|
||||
end
|
||||
end
|
||||
|
||||
should "raise an error if rename fails" do
|
||||
@db.expects(:command).returns({})
|
||||
assert_raise MongoDBError do
|
||||
assert_raise Mongo::MongoDBError do
|
||||
@db.rename_collection("foo", "bar")
|
||||
end
|
||||
end
|
||||
|
||||
should "raise an error if drop_index fails" do
|
||||
@db.expects(:command).returns({})
|
||||
assert_raise MongoDBError do
|
||||
assert_raise Mongo::MongoDBError do
|
||||
@db.drop_index("foo", "bar")
|
||||
end
|
||||
end
|
||||
|
||||
should "raise an error if set_profiling_level fails" do
|
||||
@db.expects(:command).returns({})
|
||||
assert_raise MongoDBError do
|
||||
assert_raise Mongo::MongoDBError do
|
||||
@db.profiling_level = :slow_only
|
||||
end
|
||||
end
|
||||
|
Loading…
Reference in New Issue
Block a user