RUBY-158 deprecate BSON::ObjectID for BSON::ObjectId
This commit is contained in:
parent
4bebf72fd3
commit
c7c309b00d
|
@ -75,6 +75,7 @@
|
|||
static VALUE Binary;
|
||||
static VALUE Time;
|
||||
static VALUE ObjectID;
|
||||
static VALUE ObjectId;
|
||||
static VALUE DBRef;
|
||||
static VALUE Code;
|
||||
static VALUE MinKey;
|
||||
|
@ -379,7 +380,7 @@ static int write_element(VALUE key, VALUE value, VALUE extra, int allow_id) {
|
|||
SAFE_WRITE(buffer, RSTRING_PTR(string_data), length);
|
||||
break;
|
||||
}
|
||||
if (strcmp(cls, "BSON::ObjectID") == 0) {
|
||||
if ((strcmp(cls, "BSON::ObjectId") == 0) || (strcmp(cls, "BSON::ObjectID") == 0)) {
|
||||
VALUE as_array = rb_funcall(value, rb_intern("to_a"), 0);
|
||||
int i;
|
||||
write_name_and_type(buffer, key, 0x07);
|
||||
|
@ -691,7 +692,7 @@ static VALUE get_value(const char* buffer, int* position, int type) {
|
|||
{
|
||||
VALUE str = rb_str_new(buffer + *position, 12);
|
||||
VALUE oid = rb_funcall(str, rb_intern("unpack"), 1, rb_str_new2("C*"));
|
||||
value = rb_class_new_instance(1, &oid, ObjectID);
|
||||
value = rb_class_new_instance(1, &oid, ObjectId);
|
||||
*position += 12;
|
||||
break;
|
||||
}
|
||||
|
@ -756,7 +757,7 @@ static VALUE get_value(const char* buffer, int* position, int type) {
|
|||
|
||||
str = rb_str_new(buffer + *position, 12);
|
||||
oid = rb_funcall(str, rb_intern("unpack"), 1, rb_str_new2("C*"));
|
||||
id = rb_class_new_instance(1, &oid, ObjectID);
|
||||
id = rb_class_new_instance(1, &oid, ObjectId);
|
||||
*position += 12;
|
||||
|
||||
argv[0] = collection;
|
||||
|
@ -916,6 +917,8 @@ void Init_cbson() {
|
|||
Binary = rb_const_get(bson, rb_intern("Binary"));
|
||||
rb_require("bson/types/objectid");
|
||||
ObjectID = rb_const_get(bson, rb_intern("ObjectID"));
|
||||
rb_require("bson/types/object_id");
|
||||
ObjectId = rb_const_get(bson, rb_intern("ObjectId"));
|
||||
rb_require("bson/types/dbref");
|
||||
DBRef = rb_const_get(bson, rb_intern("DBRef"));
|
||||
rb_require("bson/types/code");
|
||||
|
|
|
@ -60,6 +60,7 @@ require 'bson/types/binary'
|
|||
require 'bson/types/code'
|
||||
require 'bson/types/dbref'
|
||||
require 'bson/types/objectid'
|
||||
require 'bson/types/object_id'
|
||||
require 'bson/types/min_max_keys'
|
||||
|
||||
require 'base64'
|
||||
|
|
|
@ -360,7 +360,7 @@ module BSON
|
|||
end
|
||||
|
||||
def deserialize_oid_data(buf)
|
||||
ObjectID.new(buf.get(12))
|
||||
ObjectId.new(buf.get(12))
|
||||
end
|
||||
|
||||
def deserialize_dbref_data(buf)
|
||||
|
@ -562,6 +562,8 @@ module BSON
|
|||
REGEX
|
||||
when ObjectID
|
||||
OID
|
||||
when ObjectId
|
||||
OID
|
||||
when DBRef
|
||||
REF
|
||||
when true, false
|
||||
|
|
|
@ -32,6 +32,9 @@ module BSON
|
|||
# Raised when attempting to initialize an invalid ObjectID.
|
||||
class InvalidObjectID < BSONError; end
|
||||
|
||||
# Raised when attempting to initialize an invalid ObjectID.
|
||||
class InvalidObjectId < BSONError; end
|
||||
|
||||
# Raised when trying to insert a document that exceeds the 4MB limit or
|
||||
# when the document contains objects that can't be serialized as BSON.
|
||||
class InvalidDocument < BSONError; end
|
||||
|
|
|
@ -0,0 +1,186 @@
|
|||
# encoding: UTF-8
|
||||
|
||||
# --
|
||||
# Copyright (C) 2008-2010 10gen Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
# ++
|
||||
|
||||
require 'thread'
|
||||
require 'socket'
|
||||
require 'digest/md5'
|
||||
|
||||
module BSON
|
||||
|
||||
def BSON::ObjectId(s)
|
||||
ObjectId.from_string(s)
|
||||
end
|
||||
|
||||
# Generates MongoDB object ids.
|
||||
#
|
||||
# @core objectids
|
||||
class ObjectId
|
||||
@@lock = Mutex.new
|
||||
@@index = 0
|
||||
|
||||
# Create a new object id. If no parameter is given, an id corresponding
|
||||
# to the ObjectId BSON data type will be created. This is a 12-byte value
|
||||
# consisting of a 4-byte timestamp, a 3-byte machine id, a 2-byte process id,
|
||||
# and a 3-byte counter.
|
||||
#
|
||||
# @param [Array] data should be an array of bytes. If you want
|
||||
# to generate a standard MongoDB object id, leave this argument blank.
|
||||
def initialize(data=nil)
|
||||
@data = data || generate
|
||||
end
|
||||
|
||||
# Determine if the supplied string is legal. Legal strings will
|
||||
# consist of 24 hexadecimal characters.
|
||||
#
|
||||
# @param [String] str
|
||||
#
|
||||
# @return [Boolean]
|
||||
def self.legal?(str)
|
||||
len = 24
|
||||
str =~ /([0-9a-f]+)/i
|
||||
match = $1
|
||||
str && str.length == len && match == str
|
||||
end
|
||||
|
||||
# Create an object id from the given time. This is useful for doing range
|
||||
# queries; it works because MongoDB's object ids begin
|
||||
# with a timestamp.
|
||||
#
|
||||
# @param [Time] time a utc time to encode as an object id.
|
||||
#
|
||||
# @return [Mongo::ObjectId]
|
||||
#
|
||||
# @example Return all document created before Jan 1, 2010.
|
||||
# time = Time.utc(2010, 1, 1)
|
||||
# time_id = ObjectId.from_time(time)
|
||||
# collection.find({'_id' => {'$lt' => time_id}})
|
||||
def self.from_time(time)
|
||||
self.new([time.to_i,0,0].pack("NNN").unpack("C12"))
|
||||
end
|
||||
|
||||
# Adds a primary key to the given document if needed.
|
||||
#
|
||||
# @param [Hash] doc a document requiring an _id.
|
||||
#
|
||||
# @return [Mongo::ObjectId, Object] returns a newly-created or
|
||||
# current _id for the given document.
|
||||
def self.create_pk(doc)
|
||||
doc.has_key?(:_id) || doc.has_key?('_id') ? doc : doc.merge!(:_id => self.new)
|
||||
end
|
||||
|
||||
# Check equality of this object id with another.
|
||||
#
|
||||
# @param [Mongo::ObjectId] object_id
|
||||
def eql?(object_id)
|
||||
@data == object_id.instance_variable_get("@data")
|
||||
end
|
||||
alias_method :==, :eql?
|
||||
|
||||
# Get a unique hashcode for this object.
|
||||
# This is required since we've defined an #eql? method.
|
||||
#
|
||||
# @return [Integer]
|
||||
def hash
|
||||
@data.hash
|
||||
end
|
||||
|
||||
# Get an array representation of the object id.
|
||||
#
|
||||
# @return [Array]
|
||||
def to_a
|
||||
@data.dup
|
||||
end
|
||||
|
||||
# Given a string representation of an ObjectId, return a new ObjectId
|
||||
# with that value.
|
||||
#
|
||||
# @param [String] str
|
||||
#
|
||||
# @return [Mongo::ObjectId]
|
||||
def self.from_string(str)
|
||||
raise InvalidObjectId, "illegal ObjectId format" unless legal?(str)
|
||||
data = []
|
||||
12.times do |i|
|
||||
data[i] = str[i * 2, 2].to_i(16)
|
||||
end
|
||||
self.new(data)
|
||||
end
|
||||
|
||||
# Get a string representation of this object id.
|
||||
#
|
||||
# @return [String]
|
||||
def to_s
|
||||
str = ' ' * 24
|
||||
12.times do |i|
|
||||
str[i * 2, 2] = '%02x' % @data[i]
|
||||
end
|
||||
str
|
||||
end
|
||||
|
||||
def inspect
|
||||
"BSON::ObjectId('#{to_s}')"
|
||||
end
|
||||
|
||||
# Convert to MongoDB extended JSON format. Since JSON includes type information,
|
||||
# but lacks an ObjectId type, this JSON format encodes the type using an $oid key.
|
||||
#
|
||||
# @return [String] the object id represented as MongoDB extended JSON.
|
||||
def to_json(*a)
|
||||
"{\"$oid\": \"#{to_s}\"}"
|
||||
end
|
||||
|
||||
# Return the UTC time at which this ObjectId was generated. This may
|
||||
# be used in lieu of a created_at timestamp since this information
|
||||
# is always encoded in the object id.
|
||||
#
|
||||
# @return [Time] the time at which this object was created.
|
||||
def generation_time
|
||||
Time.at(@data.pack("C4").unpack("N")[0]).utc
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
# We need to define this method only if CBson isn't loaded.
|
||||
unless defined? CBson
|
||||
def generate
|
||||
oid = ''
|
||||
|
||||
# 4 bytes current time
|
||||
time = Time.new.to_i
|
||||
oid += [time].pack("N")
|
||||
|
||||
# 3 bytes machine
|
||||
oid += Digest::MD5.digest(Socket.gethostname)[0, 3]
|
||||
|
||||
# 2 bytes pid
|
||||
oid += [Process.pid % 0xFFFF].pack("n")
|
||||
|
||||
# 3 bytes inc
|
||||
oid += [get_inc].pack("N")[1, 3]
|
||||
|
||||
oid.unpack("C12")
|
||||
end
|
||||
end
|
||||
|
||||
def get_inc
|
||||
@@lock.synchronize do
|
||||
@@index = (@@index + 1) % 0xFFFFFF
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
|
@ -33,6 +33,8 @@ module BSON
|
|||
@@lock = Mutex.new
|
||||
@@index = 0
|
||||
|
||||
# @deprecated
|
||||
#
|
||||
# Create a new object id. If no parameter is given, an id corresponding
|
||||
# to the ObjectID BSON data type will be created. This is a 12-byte value
|
||||
# consisting of a 4-byte timestamp, a 3-byte machine id, a 2-byte process id,
|
||||
|
@ -41,6 +43,7 @@ module BSON
|
|||
# @param [Array] data should be an array of bytes. If you want
|
||||
# to generate a standard MongoDB object id, leave this argument blank.
|
||||
def initialize(data=nil)
|
||||
warn "BSON::ObjectID is deprecated. Please use BSON::ObjectId instead."
|
||||
@data = data || generate
|
||||
end
|
||||
|
||||
|
|
|
@ -58,7 +58,7 @@ module Mongo
|
|||
|
||||
@db, @name = db, name
|
||||
@connection = @db.connection
|
||||
@pk_factory = pk_factory || BSON::ObjectID
|
||||
@pk_factory = pk_factory || BSON::ObjectId
|
||||
@hint = nil
|
||||
end
|
||||
|
||||
|
@ -145,15 +145,19 @@ module Mongo
|
|||
hint = opts.delete(:hint)
|
||||
snapshot = opts.delete(:snapshot)
|
||||
batch_size = opts.delete(:batch_size)
|
||||
|
||||
if opts[:timeout] == false && !block_given?
|
||||
raise ArgumentError, "Timeout can be set to false only when #find is invoked with a block."
|
||||
else
|
||||
timeout = opts.delete(:timeout) || false
|
||||
end
|
||||
timeout = block_given? ? opts.fetch(:timeout, true) : true
|
||||
|
||||
if hint
|
||||
hint = normalize_hint_fields(hint)
|
||||
else
|
||||
hint = @hint # assumed to be normalized already
|
||||
end
|
||||
|
||||
raise RuntimeError, "Unknown options [#{opts.inspect}]" unless opts.empty?
|
||||
|
||||
cursor = Cursor.new(self, :selector => selector, :fields => fields, :skip => skip, :limit => limit,
|
||||
|
@ -173,9 +177,9 @@ module Mongo
|
|||
# @return [OrderedHash, Nil]
|
||||
# a single document or nil if no result is found.
|
||||
#
|
||||
# @param [Hash, ObjectID, Nil] spec_or_object_id a hash specifying elements
|
||||
# @param [Hash, ObjectId, Nil] spec_or_object_id a hash specifying elements
|
||||
# which must be present for a document to be included in the result set or an
|
||||
# instance of ObjectID to be used as the value for an _id query.
|
||||
# instance of ObjectId to be used as the value for an _id query.
|
||||
# If nil, an empty selector, {}, will be used.
|
||||
#
|
||||
# @option opts [Hash]
|
||||
|
@ -187,12 +191,12 @@ module Mongo
|
|||
spec = case spec_or_object_id
|
||||
when nil
|
||||
{}
|
||||
when BSON::ObjectID
|
||||
when BSON::ObjectId
|
||||
{:_id => spec_or_object_id}
|
||||
when Hash
|
||||
spec_or_object_id
|
||||
else
|
||||
raise TypeError, "spec_or_object_id must be an instance of ObjectID or Hash, or nil"
|
||||
raise TypeError, "spec_or_object_id must be an instance of ObjectId or Hash, or nil"
|
||||
end
|
||||
find(spec, opts.merge(:limit => -1)).next_document
|
||||
end
|
||||
|
@ -204,7 +208,7 @@ module Mongo
|
|||
# then an update (upsert) operation will be performed, and any existing
|
||||
# document with that _id is overwritten. Otherwise an insert operation is performed.
|
||||
#
|
||||
# @return [ObjectID] the _id of the saved document.
|
||||
# @return [ObjectId] the _id of the saved document.
|
||||
#
|
||||
# @option opts [Boolean, Hash] :safe (+false+)
|
||||
# run the operation in safe mode, which run a getlasterror command on the
|
||||
|
@ -230,7 +234,7 @@ module Mongo
|
|||
# @param [Hash, Array] doc_or_docs
|
||||
# a document (as a hash) or array of documents to be inserted.
|
||||
#
|
||||
# @return [ObjectID, Array]
|
||||
# @return [ObjectId, Array]
|
||||
# the _id of the inserted document or a list of _ids of all inserted documents.
|
||||
# Note: the object may have been modified by the database's PK factory, if it has one.
|
||||
#
|
||||
|
|
|
@ -60,7 +60,7 @@ module Mongo
|
|||
# @option options [Boolean] :strict (False) If true, collections must exist to be accessed and must
|
||||
# not exist to be created. See DB#collection and DB#create_collection.
|
||||
#
|
||||
# @option options [Object, #create_pk(doc)] :pk (Mongo::ObjectID) A primary key factory object,
|
||||
# @option options [Object, #create_pk(doc)] :pk (Mongo::ObjectId) A primary key factory object,
|
||||
# which should take a hash and return a hash which merges the original hash with any primary key
|
||||
# fields the factory wishes to inject. (NOTE: if the object already has a primary key,
|
||||
# the factory should not inject a new key).
|
||||
|
|
|
@ -51,7 +51,7 @@ module Mongo
|
|||
#
|
||||
# @option opts [String] :filename (nil) a name for the file.
|
||||
# @option opts [Hash] :metadata ({}) any additional data to store with the file.
|
||||
# @option opts [ObjectID] :_id (ObjectID) a unique id for
|
||||
# @option opts [ObjectId] :_id (ObjectId) a unique id for
|
||||
# the file to be use in lieu of an automatically generated one.
|
||||
# @option opts [String] :content_type ('binary/octet-stream') If no content type is specified,
|
||||
# the content type will may be inferred from the filename extension if the mime-types gem can be
|
||||
|
@ -60,7 +60,7 @@ module Mongo
|
|||
# @option opts [Boolean] :safe (false) When safe mode is enabled, the chunks sent to the server
|
||||
# will be validated using an md5 hash. If validation fails, an exception will be raised.
|
||||
#
|
||||
# @return [Mongo::ObjectID] the file's id.
|
||||
# @return [Mongo::ObjectId] the file's id.
|
||||
def put(data, opts={})
|
||||
filename = opts[:filename]
|
||||
opts.merge!(default_grid_io_opts)
|
||||
|
|
|
@ -42,7 +42,7 @@ module Mongo
|
|||
#
|
||||
# # Check for existence by _id
|
||||
# @grid = Grid.new(@db)
|
||||
# @grid.exist?(:_id => BSON::ObjectID.from_string('4bddcd24beffd95a7db9b8c8'))
|
||||
# @grid.exist?(:_id => BSON::ObjectId.from_string('4bddcd24beffd95a7db9b8c8'))
|
||||
#
|
||||
# # Check for existence by an arbitrary attribute.
|
||||
# @grid = Grid.new(@db)
|
||||
|
|
|
@ -55,7 +55,7 @@ module Mongo
|
|||
# @param [Hash] opts see GridIO#new
|
||||
#
|
||||
# @option opts [Hash] :metadata ({}) any additional data to store with the file.
|
||||
# @option opts [ObjectID] :_id (ObjectID) a unique id for
|
||||
# @option opts [ObjectId] :_id (ObjectId) a unique id for
|
||||
# the file to be use in lieu of an automatically generated one.
|
||||
# @option opts [String] :content_type ('binary/octet-stream') If no content type is specified,
|
||||
# the content type will may be inferred from the filename extension if the mime-types gem can be
|
||||
|
|
|
@ -47,7 +47,7 @@ module Mongo
|
|||
# @option opts [String] :fs_name the file system prefix.
|
||||
# @option opts [Integer] (262144) :chunk_size size of file chunks in bytes.
|
||||
# @option opts [Hash] :metadata ({}) any additional data to store with the file.
|
||||
# @option opts [ObjectID] :_id (ObjectID) a unique id for
|
||||
# @option opts [ObjectId] :_id (ObjectId) a unique id for
|
||||
# the file to be use in lieu of an automatically generated one.
|
||||
# @option opts [String] :content_type ('binary/octet-stream') If no content type is specified,
|
||||
# the content type will may be inferred from the filename extension if the mime-types gem can be
|
||||
|
@ -180,7 +180,7 @@ module Mongo
|
|||
# This method will be invoked automatically when
|
||||
# on GridIO#open is passed a block. Otherwise, it must be called manually.
|
||||
#
|
||||
# @return [BSON::ObjectID]
|
||||
# @return [BSON::ObjectId]
|
||||
def close
|
||||
if @mode[0] == ?w
|
||||
if @current_chunk['n'].zero? && @chunk_position.zero?
|
||||
|
@ -200,7 +200,7 @@ module Mongo
|
|||
|
||||
def create_chunk(n)
|
||||
chunk = BSON::OrderedHash.new
|
||||
chunk['_id'] = BSON::ObjectID.new
|
||||
chunk['_id'] = BSON::ObjectId.new
|
||||
chunk['n'] = n
|
||||
chunk['files_id'] = @files_id
|
||||
chunk['data'] = ''
|
||||
|
@ -308,7 +308,7 @@ module Mongo
|
|||
|
||||
# Initialize the class for writing a file.
|
||||
def init_write(opts)
|
||||
@files_id = opts.delete(:_id) || BSON::ObjectID.new
|
||||
@files_id = opts.delete(:_id) || BSON::ObjectId.new
|
||||
@content_type = opts.delete(:content_type) || (defined? MIME) && get_content_type || DEFAULT_CONTENT_TYPE
|
||||
@chunk_size = opts.delete(:chunk_size) || DEFAULT_CHUNK_SIZE
|
||||
@metadata = opts.delete(:metadata) if opts[:metadata]
|
||||
|
|
|
@ -12,9 +12,9 @@ class TestCollection < Test::Unit::TestCase
|
|||
|
||||
def test_optional_pk_factory
|
||||
@coll_default_pk = @@db.collection('stuff')
|
||||
assert_equal BSON::ObjectID, @coll_default_pk.pk_factory
|
||||
assert_equal BSON::ObjectId, @coll_default_pk.pk_factory
|
||||
@coll_default_pk = @@db.create_collection('more-stuff')
|
||||
assert_equal BSON::ObjectID, @coll_default_pk.pk_factory
|
||||
assert_equal BSON::ObjectId, @coll_default_pk.pk_factory
|
||||
|
||||
# Create a db with a pk_factory.
|
||||
@db = Connection.new(ENV['MONGO_RUBY_DRIVER_HOST'] || 'localhost',
|
||||
|
@ -334,7 +334,7 @@ class TestCollection < Test::Unit::TestCase
|
|||
|
||||
assert_equal nil, @@test.find_one("hello" => "foo")
|
||||
assert_equal nil, @@test.find_one(BSON::OrderedHash["hello", "foo"])
|
||||
assert_equal nil, @@test.find_one(ObjectID.new)
|
||||
assert_equal nil, @@test.find_one(ObjectId.new)
|
||||
|
||||
assert_raise TypeError do
|
||||
@@test.find_one(6)
|
||||
|
|
|
@ -29,8 +29,8 @@ class DBAPITest < Test::Unit::TestCase
|
|||
end
|
||||
|
||||
def test_insert
|
||||
assert_kind_of BSON::ObjectID, @@coll.insert('a' => 2)
|
||||
assert_kind_of BSON::ObjectID, @@coll.insert('b' => 3)
|
||||
assert_kind_of BSON::ObjectId, @@coll.insert('a' => 2)
|
||||
assert_kind_of BSON::ObjectId, @@coll.insert('b' => 3)
|
||||
|
||||
assert_equal 3, @@coll.count
|
||||
docs = @@coll.find().to_a
|
||||
|
@ -62,7 +62,7 @@ class DBAPITest < Test::Unit::TestCase
|
|||
ids = @@coll.insert([{'a' => 2}, {'b' => 3}])
|
||||
|
||||
ids.each do |i|
|
||||
assert_kind_of BSON::ObjectID, i
|
||||
assert_kind_of BSON::ObjectId, i
|
||||
end
|
||||
|
||||
assert_equal 3, @@coll.count
|
||||
|
@ -575,7 +575,7 @@ class DBAPITest < Test::Unit::TestCase
|
|||
def test_deref
|
||||
@@coll.remove
|
||||
|
||||
assert_equal nil, @@db.dereference(DBRef.new("test", ObjectID.new))
|
||||
assert_equal nil, @@db.dereference(DBRef.new("test", ObjectId.new))
|
||||
@@coll.insert({"x" => "hello"})
|
||||
key = @@coll.find_one()["_id"]
|
||||
assert_equal "hello", @@db.dereference(DBRef.new("test", key))["x"]
|
||||
|
@ -596,7 +596,7 @@ class DBAPITest < Test::Unit::TestCase
|
|||
a = {"hello" => "world"}
|
||||
|
||||
id = @@coll.save(a)
|
||||
assert_kind_of ObjectID, id
|
||||
assert_kind_of ObjectId, id
|
||||
assert_equal 1, @@coll.count
|
||||
|
||||
assert_equal id, @@coll.save(a)
|
||||
|
@ -625,14 +625,14 @@ class DBAPITest < Test::Unit::TestCase
|
|||
|
||||
@@coll.save("hello" => "mike")
|
||||
id = @@coll.save("hello" => "world")
|
||||
assert_kind_of ObjectID, id
|
||||
assert_kind_of ObjectId, id
|
||||
|
||||
assert_equal "world", @@coll.find_one(:_id => id)["hello"]
|
||||
@@coll.find(:_id => id).to_a.each do |doc|
|
||||
assert_equal "world", doc["hello"]
|
||||
end
|
||||
|
||||
id = ObjectID.from_string(id.to_s)
|
||||
id = ObjectId.from_string(id.to_s)
|
||||
assert_equal "world", @@coll.find_one(:_id => id)["hello"]
|
||||
end
|
||||
|
||||
|
|
|
@ -6,7 +6,7 @@ require 'logger'
|
|||
|
||||
class TestPKFactory
|
||||
def create_pk(row)
|
||||
row['_id'] ||= BSON::ObjectID.new
|
||||
row['_id'] ||= BSON::ObjectId.new
|
||||
row
|
||||
end
|
||||
end
|
||||
|
@ -102,7 +102,7 @@ class DBTest < Test::Unit::TestCase
|
|||
assert_not_nil oid
|
||||
assert_equal insert_id, oid
|
||||
|
||||
oid = BSON::ObjectID.new
|
||||
oid = BSON::ObjectId.new
|
||||
data = {'_id' => oid, 'name' => 'Barney', 'age' => 41}
|
||||
coll.insert(data)
|
||||
row = coll.find_one({'name' => data['name']})
|
||||
|
|
|
@ -0,0 +1,132 @@
|
|||
require 'test/test_helper'
|
||||
require 'rubygems'
|
||||
require 'json'
|
||||
|
||||
class ObjectIdTest < Test::Unit::TestCase
|
||||
|
||||
include Mongo
|
||||
include BSON
|
||||
|
||||
def setup
|
||||
@o = ObjectId.new
|
||||
end
|
||||
|
||||
def test_hashcode
|
||||
assert_equal @o.instance_variable_get(:@data).hash, @o.hash
|
||||
end
|
||||
|
||||
def test_array_uniq_for_equilavent_ids
|
||||
a = ObjectId.new('123')
|
||||
b = ObjectId.new('123')
|
||||
assert_equal a, b
|
||||
assert_equal 1, [a, b].uniq.size
|
||||
end
|
||||
|
||||
def test_create_pk_method
|
||||
doc = {:name => 'Mongo'}
|
||||
doc = ObjectId.create_pk(doc)
|
||||
assert doc[:_id]
|
||||
|
||||
doc = {:name => 'Mongo', :_id => '12345'}
|
||||
doc = ObjectId.create_pk(doc)
|
||||
assert_equal '12345', doc[:_id]
|
||||
end
|
||||
|
||||
def test_different
|
||||
a = ObjectId.new
|
||||
b = ObjectId.new
|
||||
assert_not_equal a.to_a, b.to_a
|
||||
assert_not_equal a, b
|
||||
end
|
||||
|
||||
def test_eql?
|
||||
o2 = ObjectId.new(@o.to_a)
|
||||
assert_equal @o, o2
|
||||
end
|
||||
|
||||
def test_to_s
|
||||
s = @o.to_s
|
||||
assert_equal 24, s.length
|
||||
s =~ /^([0-9a-f]+)$/
|
||||
assert_equal 24, $1.length
|
||||
end
|
||||
|
||||
def test_method
|
||||
assert_equal ObjectId.from_string(@o.to_s), BSON::ObjectId(@o.to_s)
|
||||
end
|
||||
|
||||
def test_inspect
|
||||
assert_equal "BSON::ObjectId('#{@o.to_s}')", @o.inspect
|
||||
end
|
||||
|
||||
def test_save_and_restore
|
||||
host = ENV['MONGO_RUBY_DRIVER_HOST'] || 'localhost'
|
||||
port = ENV['MONGO_RUBY_DRIVER_PORT'] || Connection::DEFAULT_PORT
|
||||
db = Connection.new(host, port).db(MONGO_TEST_DB)
|
||||
coll = db.collection('test')
|
||||
|
||||
coll.remove
|
||||
coll << {'a' => 1, '_id' => @o}
|
||||
|
||||
row = coll.find().collect.first
|
||||
assert_equal 1, row['a']
|
||||
assert_equal @o, row['_id']
|
||||
end
|
||||
|
||||
def test_from_string
|
||||
hex_str = @o.to_s
|
||||
o2 = ObjectId.from_string(hex_str)
|
||||
assert_equal hex_str, o2.to_s
|
||||
assert_equal @o, o2
|
||||
assert_equal @o.to_s, o2.to_s
|
||||
end
|
||||
|
||||
def test_illegal_from_string
|
||||
assert_raise InvalidObjectId do
|
||||
ObjectId.from_string("")
|
||||
end
|
||||
end
|
||||
|
||||
def test_legal
|
||||
assert !ObjectId.legal?(nil)
|
||||
assert !ObjectId.legal?("fred")
|
||||
assert !ObjectId.legal?("0000")
|
||||
assert !ObjectId.legal?('000102030405060708090A0')
|
||||
assert ObjectId.legal?('000102030405060708090A0B')
|
||||
assert ObjectId.legal?('abcdefABCDEF123456789012')
|
||||
assert !ObjectId.legal?('abcdefABCDEF12345678901x')
|
||||
end
|
||||
|
||||
def test_from_string_leading_zeroes
|
||||
hex_str = '000000000000000000000000'
|
||||
o = ObjectId.from_string(hex_str)
|
||||
assert_equal hex_str, o.to_s
|
||||
end
|
||||
|
||||
def test_byte_order
|
||||
hex_str = '000102030405060708090A0B'
|
||||
o = ObjectId.from_string(hex_str)
|
||||
assert_equal [0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08, 0x09, 0x0a, 0x0b], o.to_a
|
||||
end
|
||||
|
||||
def test_generation_time
|
||||
time = Time.now
|
||||
id = ObjectId.new
|
||||
generated_time = id.generation_time
|
||||
|
||||
assert_in_delta time.to_i, generated_time.to_i, 2
|
||||
assert_equal "UTC", generated_time.zone
|
||||
end
|
||||
|
||||
def test_from_time
|
||||
time = Time.now.utc
|
||||
id = ObjectId.from_time(time)
|
||||
|
||||
assert_equal time.to_i, id.generation_time.to_i
|
||||
end
|
||||
|
||||
def test_json
|
||||
id = ObjectId.new
|
||||
assert_equal "{\"$oid\": \"#{id}\"}", id.to_json
|
||||
end
|
||||
end
|
Loading…
Reference in New Issue