Cleanup w/r/t loading c ext and testing it
This commit is contained in:
parent
7709e127bf
commit
05b5fc576b
16
README.rdoc
16
README.rdoc
|
@ -254,16 +254,23 @@ Random cursor fun facts:
|
|||
|
||||
= Testing
|
||||
|
||||
If you have the source code, you can run the tests.
|
||||
If you have the source code, you can run the tests. There's a separate rake task for testing with
|
||||
the mongo_ext c extension enabled.
|
||||
|
||||
$ rake test
|
||||
$ rake test:c
|
||||
|
||||
This will run both unit and functional tests. If you want to run these
|
||||
individually:
|
||||
Or, to test without the extension:
|
||||
|
||||
$ rake test:ruby
|
||||
|
||||
These will run both unit and functional tests. To run these tests alone:
|
||||
|
||||
$ rake test:unit
|
||||
$ rake test:functional
|
||||
|
||||
To run any individual rake tasks with the C extenson enabled, just pass C_EXT=true to the task:
|
||||
|
||||
$ rake test:unit C_EXT=true
|
||||
|
||||
If you want to test replica pairs, you can run the following tests
|
||||
individually:
|
||||
|
@ -276,6 +283,7 @@ It's also possible to test replica pairs with connection pooling:
|
|||
|
||||
$ rake test:pooled_pair_insert
|
||||
|
||||
===Shoulda and Mocha
|
||||
|
||||
All tests now require shoulda and mocha. You can install these gems as
|
||||
follows:
|
||||
|
|
25
Rakefile
25
Rakefile
|
@ -14,15 +14,32 @@ include Config
|
|||
gem_command = "gem"
|
||||
gem_command = "gem1.9" if $0.match(/1\.9$/) # use gem1.9 if we used rake1.9
|
||||
|
||||
# NOTE: the functional tests assume MongoDB is running.
|
||||
desc "Test the MongoDB Ruby driver."
|
||||
task :test do
|
||||
Rake::Task['test:unit'].invoke
|
||||
Rake::Task['test:functional'].invoke
|
||||
Rake::Task['test:pooled_threading'].invoke
|
||||
puts "\nThis option has changed."
|
||||
puts "\nTo test the driver with the c-extensions:\nrake test:c\n"
|
||||
puts "To test the pure ruby driver: \nrake test:ruby"
|
||||
end
|
||||
|
||||
namespace :test do
|
||||
|
||||
desc "Test the driver with the c extension enabled."
|
||||
task :c do
|
||||
ENV['C_EXT'] = 'TRUE'
|
||||
Rake::Task['test:unit'].invoke
|
||||
Rake::Task['test:functional'].invoke
|
||||
Rake::Task['test:pooled_threading'].invoke
|
||||
ENV['C_EXT'] = nil
|
||||
end
|
||||
|
||||
desc "Test the driver using pure ruby (no c extension)"
|
||||
task :ruby do
|
||||
ENV['C_EXT'] = nil
|
||||
Rake::Task['test:unit'].invoke
|
||||
Rake::Task['test:functional'].invoke
|
||||
Rake::Task['test:pooled_threading'].invoke
|
||||
end
|
||||
|
||||
Rake::TestTask.new(:unit) do |t|
|
||||
t.test_files = FileList['test/unit/*_test.rb']
|
||||
t.verbose = true
|
||||
|
|
38
lib/mongo.rb
38
lib/mongo.rb
|
@ -1,5 +1,27 @@
|
|||
$:.unshift(File.join(File.dirname(__FILE__), '..', 'lib'))
|
||||
|
||||
module Mongo
|
||||
ASCENDING = 1
|
||||
DESCENDING = -1
|
||||
|
||||
VERSION = "0.18"
|
||||
end
|
||||
|
||||
begin
|
||||
require 'mongo_ext/cbson'
|
||||
require 'mongo/util/bson_c'
|
||||
BSON = BSON_C
|
||||
BSON_SERIALIZER = CBson
|
||||
rescue LoadError
|
||||
require 'mongo/util/bson_ruby'
|
||||
BSON = BSON_RUBY
|
||||
BSON_SERIALIZER = BSON
|
||||
warn "\n**Notice: C extension not loaded. This is required for optimum MongoDB Ruby driver performance."
|
||||
warn " You can install the extension as follows:\n gem install mongo_ext\n"
|
||||
warn " If you continue to receive this message after installing, make sure that the"
|
||||
warn " mongo_ext gem is in your load path. This may mean requiring rubygems.\n"
|
||||
end
|
||||
|
||||
require 'mongo/types/binary'
|
||||
require 'mongo/types/code'
|
||||
require 'mongo/types/dbref'
|
||||
|
@ -17,19 +39,3 @@ require 'mongo/db'
|
|||
require 'mongo/cursor'
|
||||
require 'mongo/collection'
|
||||
require 'mongo/admin'
|
||||
|
||||
begin
|
||||
require 'mongo_ext/cbson'
|
||||
BSON_SERIALIZER = CBson
|
||||
rescue LoadError
|
||||
BSON_SERIALIZER = BSON
|
||||
warn "\n**Notice: C extension not detected. This is required for optimum MongoDB Ruby driver performance."
|
||||
warn " You can install the extension as follows:\n gem install mongo_ext\n"
|
||||
end
|
||||
|
||||
module Mongo
|
||||
ASCENDING = 1
|
||||
DESCENDING = -1
|
||||
|
||||
VERSION = "0.18"
|
||||
end
|
||||
|
|
|
@ -14,8 +14,6 @@
|
|||
# limitations under the License.
|
||||
# ++
|
||||
|
||||
require 'mongo/util/ordered_hash'
|
||||
|
||||
module Mongo
|
||||
|
||||
# Provide administrative database methods: those having to do with
|
||||
|
|
|
@ -459,7 +459,7 @@ module Mongo
|
|||
buf.put_array(receive_message_on_socket(size - 4, sock).unpack("C*"), 4)
|
||||
number_remaining -= 1
|
||||
buf.rewind
|
||||
docs << BSON.new.deserialize(buf)
|
||||
docs << BSON.deserialize(buf)
|
||||
end
|
||||
[docs, number_received, cursor_id]
|
||||
end
|
||||
|
|
|
@ -12,9 +12,6 @@
|
|||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
require 'mongo/util/byte_buffer'
|
||||
require 'mongo/util/bson'
|
||||
|
||||
module Mongo
|
||||
|
||||
# A cursor over query results. Returned objects are hashes.
|
||||
|
|
|
@ -18,9 +18,6 @@ require 'socket'
|
|||
require 'timeout'
|
||||
require 'digest/md5'
|
||||
require 'thread'
|
||||
require 'mongo/collection'
|
||||
require 'mongo/util/ordered_hash.rb'
|
||||
require 'mongo/admin'
|
||||
|
||||
module Mongo
|
||||
|
||||
|
|
|
@ -18,7 +18,6 @@ require 'mongo/types/objectid'
|
|||
require 'mongo/util/byte_buffer'
|
||||
require 'mongo/util/ordered_hash'
|
||||
|
||||
|
||||
module GridFS
|
||||
|
||||
# A chunk stores a portion of GridStore data.
|
||||
|
|
|
@ -1,563 +0,0 @@
|
|||
# --
|
||||
# Copyright (C) 2008-2009 10gen Inc.
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify it
|
||||
# under the terms of the GNU Affero General Public License, version 3, as
|
||||
# published by the Free Software Foundation.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful, but WITHOUT
|
||||
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
|
||||
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Affero General Public License
|
||||
# for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU Affero General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
# ++
|
||||
|
||||
require 'base64'
|
||||
require 'mongo/util/byte_buffer'
|
||||
require 'mongo/util/ordered_hash'
|
||||
require 'mongo/types/binary'
|
||||
require 'mongo/types/dbref'
|
||||
require 'mongo/types/objectid'
|
||||
require 'mongo/types/regexp_of_holding'
|
||||
|
||||
# A BSON seralizer/deserializer.
|
||||
class BSON
|
||||
|
||||
include Mongo
|
||||
|
||||
MINKEY = -1
|
||||
EOO = 0
|
||||
NUMBER = 1
|
||||
STRING = 2
|
||||
OBJECT = 3
|
||||
ARRAY = 4
|
||||
BINARY = 5
|
||||
UNDEFINED = 6
|
||||
OID = 7
|
||||
BOOLEAN = 8
|
||||
DATE = 9
|
||||
NULL = 10
|
||||
REGEX = 11
|
||||
REF = 12
|
||||
CODE = 13
|
||||
SYMBOL = 14
|
||||
CODE_W_SCOPE = 15
|
||||
NUMBER_INT = 16
|
||||
TIMESTAMP = 17
|
||||
NUMBER_LONG = 18
|
||||
MAXKEY = 127
|
||||
|
||||
if RUBY_VERSION >= '1.9'
|
||||
def self.to_utf8(str)
|
||||
str.encode("utf-8")
|
||||
end
|
||||
else
|
||||
def self.to_utf8(str)
|
||||
begin
|
||||
str.unpack("U*")
|
||||
rescue => ex
|
||||
raise InvalidStringEncoding, "String not valid utf-8: #{str}"
|
||||
end
|
||||
str
|
||||
end
|
||||
end
|
||||
|
||||
def self.serialize_cstr(buf, val)
|
||||
buf.put_array(to_utf8(val.to_s).unpack("C*") + [0])
|
||||
end
|
||||
|
||||
def initialize()
|
||||
@buf = ByteBuffer.new
|
||||
end
|
||||
|
||||
def to_a
|
||||
@buf.to_a
|
||||
end
|
||||
|
||||
# Serializes an object.
|
||||
# Implemented to ensure an API compatible with BSON extension.
|
||||
def self.serialize(obj, check_keys)
|
||||
new.serialize(obj, check_keys)
|
||||
end
|
||||
|
||||
begin
|
||||
require 'mongo_ext/cbson'
|
||||
def serialize(obj, check_keys=false)
|
||||
@buf = ByteBuffer.new(CBson.serialize(obj, check_keys))
|
||||
end
|
||||
rescue LoadError
|
||||
def serialize(obj, check_keys=false)
|
||||
raise "Document is null" unless obj
|
||||
|
||||
@buf.rewind
|
||||
# put in a placeholder for the total size
|
||||
@buf.put_int(0)
|
||||
|
||||
# Write key/value pairs. Always write _id first if it exists.
|
||||
if obj.has_key? '_id'
|
||||
serialize_key_value('_id', obj['_id'], check_keys)
|
||||
elsif obj.has_key? :_id
|
||||
serialize_key_value('_id', obj[:_id], check_keys)
|
||||
end
|
||||
|
||||
obj.each {|k, v| serialize_key_value(k, v, check_keys) unless k == '_id' || k == :_id }
|
||||
|
||||
serialize_eoo_element(@buf)
|
||||
@buf.put_int(@buf.size, 0)
|
||||
self
|
||||
end
|
||||
end
|
||||
|
||||
# Returns the array stored in the buffer.
|
||||
# Implemented to ensure an API compatible with BSON extension.
|
||||
def unpack(arg)
|
||||
@buf.to_a
|
||||
end
|
||||
|
||||
def serialize_key_value(k, v, check_keys)
|
||||
k = k.to_s
|
||||
if check_keys
|
||||
if k[0] == ?$
|
||||
raise InvalidName.new("key #{k} must not start with '$'")
|
||||
end
|
||||
if k.include? ?.
|
||||
raise InvalidName.new("key #{k} must not contain '.'")
|
||||
end
|
||||
end
|
||||
type = bson_type(v)
|
||||
case type
|
||||
when STRING, SYMBOL
|
||||
serialize_string_element(@buf, k, v, type)
|
||||
when NUMBER, NUMBER_INT
|
||||
serialize_number_element(@buf, k, v, type)
|
||||
when OBJECT
|
||||
serialize_object_element(@buf, k, v, check_keys)
|
||||
when OID
|
||||
serialize_oid_element(@buf, k, v)
|
||||
when ARRAY
|
||||
serialize_array_element(@buf, k, v, check_keys)
|
||||
when REGEX
|
||||
serialize_regex_element(@buf, k, v)
|
||||
when BOOLEAN
|
||||
serialize_boolean_element(@buf, k, v)
|
||||
when DATE
|
||||
serialize_date_element(@buf, k, v)
|
||||
when NULL
|
||||
serialize_null_element(@buf, k)
|
||||
when REF
|
||||
serialize_dbref_element(@buf, k, v)
|
||||
when BINARY
|
||||
serialize_binary_element(@buf, k, v)
|
||||
when UNDEFINED
|
||||
serialize_null_element(@buf, k)
|
||||
when CODE_W_SCOPE
|
||||
serialize_code_w_scope(@buf, k, v)
|
||||
else
|
||||
raise "unhandled type #{type}"
|
||||
end
|
||||
end
|
||||
|
||||
begin
|
||||
require 'mongo_ext/cbson'
|
||||
def deserialize(buf=nil)
|
||||
if buf.is_a? String
|
||||
@buf = ByteBuffer.new(buf) if buf
|
||||
else
|
||||
@buf = ByteBuffer.new(buf.to_a) if buf
|
||||
end
|
||||
@buf.rewind
|
||||
CBson.deserialize(@buf.to_s)
|
||||
end
|
||||
rescue LoadError
|
||||
def deserialize(buf=nil)
|
||||
# If buf is nil, use @buf, assumed to contain already-serialized BSON.
|
||||
# This is only true during testing.
|
||||
if buf.is_a? String
|
||||
@buf = ByteBuffer.new(buf) if buf
|
||||
else
|
||||
@buf = ByteBuffer.new(buf.to_a) if buf
|
||||
end
|
||||
@buf.rewind
|
||||
@buf.get_int # eat message size
|
||||
doc = OrderedHash.new
|
||||
while @buf.more?
|
||||
type = @buf.get
|
||||
case type
|
||||
when STRING, CODE
|
||||
key = deserialize_cstr(@buf)
|
||||
doc[key] = deserialize_string_data(@buf)
|
||||
when SYMBOL
|
||||
key = deserialize_cstr(@buf)
|
||||
doc[key] = deserialize_string_data(@buf).intern
|
||||
when NUMBER
|
||||
key = deserialize_cstr(@buf)
|
||||
doc[key] = deserialize_number_data(@buf)
|
||||
when NUMBER_INT
|
||||
key = deserialize_cstr(@buf)
|
||||
doc[key] = deserialize_number_int_data(@buf)
|
||||
when NUMBER_LONG
|
||||
key = deserialize_cstr(@buf)
|
||||
doc[key] = deserialize_number_long_data(@buf)
|
||||
when OID
|
||||
key = deserialize_cstr(@buf)
|
||||
doc[key] = deserialize_oid_data(@buf)
|
||||
when ARRAY
|
||||
key = deserialize_cstr(@buf)
|
||||
doc[key] = deserialize_array_data(@buf)
|
||||
when REGEX
|
||||
key = deserialize_cstr(@buf)
|
||||
doc[key] = deserialize_regex_data(@buf)
|
||||
when OBJECT
|
||||
key = deserialize_cstr(@buf)
|
||||
doc[key] = deserialize_object_data(@buf)
|
||||
when BOOLEAN
|
||||
key = deserialize_cstr(@buf)
|
||||
doc[key] = deserialize_boolean_data(@buf)
|
||||
when DATE
|
||||
key = deserialize_cstr(@buf)
|
||||
doc[key] = deserialize_date_data(@buf)
|
||||
when NULL
|
||||
key = deserialize_cstr(@buf)
|
||||
doc[key] = nil
|
||||
when UNDEFINED
|
||||
key = deserialize_cstr(@buf)
|
||||
doc[key] = nil
|
||||
when REF
|
||||
key = deserialize_cstr(@buf)
|
||||
doc[key] = deserialize_dbref_data(@buf)
|
||||
when BINARY
|
||||
key = deserialize_cstr(@buf)
|
||||
doc[key] = deserialize_binary_data(@buf)
|
||||
when CODE_W_SCOPE
|
||||
key = deserialize_cstr(@buf)
|
||||
doc[key] = deserialize_code_w_scope_data(@buf)
|
||||
when TIMESTAMP
|
||||
key = deserialize_cstr(@buf)
|
||||
doc[key] = [deserialize_number_int_data(@buf),
|
||||
deserialize_number_int_data(@buf)]
|
||||
when EOO
|
||||
break
|
||||
else
|
||||
raise "Unknown type #{type}, key = #{key}"
|
||||
end
|
||||
end
|
||||
@buf.rewind
|
||||
doc
|
||||
end
|
||||
end
|
||||
|
||||
# For debugging.
|
||||
def hex_dump
|
||||
str = ''
|
||||
@buf.to_a.each_with_index { |b,i|
|
||||
if (i % 8) == 0
|
||||
str << "\n" if i > 0
|
||||
str << '%4d: ' % i
|
||||
else
|
||||
str << ' '
|
||||
end
|
||||
str << '%02X' % b
|
||||
}
|
||||
str
|
||||
end
|
||||
|
||||
def deserialize_date_data(buf)
|
||||
unsigned = buf.get_long()
|
||||
# see note for deserialize_number_long_data below
|
||||
milliseconds = unsigned >= 2 ** 64 / 2 ? unsigned - 2**64 : unsigned
|
||||
Time.at(milliseconds.to_f / 1000.0).utc # at() takes fractional seconds
|
||||
end
|
||||
|
||||
def deserialize_boolean_data(buf)
|
||||
buf.get == 1
|
||||
end
|
||||
|
||||
def deserialize_number_data(buf)
|
||||
buf.get_double
|
||||
end
|
||||
|
||||
def deserialize_number_int_data(buf)
|
||||
# sometimes ruby makes me angry... why would the same code pack as signed
|
||||
# but unpack as unsigned
|
||||
unsigned = buf.get_int
|
||||
unsigned >= 2**32 / 2 ? unsigned - 2**32 : unsigned
|
||||
end
|
||||
|
||||
def deserialize_number_long_data(buf)
|
||||
# same note as above applies here...
|
||||
unsigned = buf.get_long
|
||||
unsigned >= 2 ** 64 / 2 ? unsigned - 2**64 : unsigned
|
||||
end
|
||||
|
||||
def deserialize_object_data(buf)
|
||||
size = buf.get_int
|
||||
buf.position -= 4
|
||||
object = BSON.new().deserialize(buf.get(size))
|
||||
if object.has_key? "$ref"
|
||||
DBRef.new(object["$ref"], object["$id"])
|
||||
else
|
||||
object
|
||||
end
|
||||
end
|
||||
|
||||
def deserialize_array_data(buf)
|
||||
h = deserialize_object_data(buf)
|
||||
a = []
|
||||
h.each { |k, v| a[k.to_i] = v }
|
||||
a
|
||||
end
|
||||
|
||||
def deserialize_regex_data(buf)
|
||||
str = deserialize_cstr(buf)
|
||||
options_str = deserialize_cstr(buf)
|
||||
options = 0
|
||||
options |= Regexp::IGNORECASE if options_str.include?('i')
|
||||
options |= Regexp::MULTILINE if options_str.include?('m')
|
||||
options |= Regexp::EXTENDED if options_str.include?('x')
|
||||
options_str.gsub!(/[imx]/, '') # Now remove the three we understand
|
||||
RegexpOfHolding.new(str, options, options_str)
|
||||
end
|
||||
|
||||
def deserialize_string_data(buf)
|
||||
len = buf.get_int
|
||||
bytes = buf.get(len)
|
||||
str = bytes[0..-2]
|
||||
if str.respond_to? "pack"
|
||||
str = str.pack("C*")
|
||||
end
|
||||
if RUBY_VERSION >= '1.9'
|
||||
str.force_encoding("utf-8")
|
||||
end
|
||||
str
|
||||
end
|
||||
|
||||
def deserialize_code_w_scope_data(buf)
|
||||
buf.get_int
|
||||
len = buf.get_int
|
||||
code = buf.get(len)[0..-2]
|
||||
if code.respond_to? "pack"
|
||||
code = code.pack("C*")
|
||||
end
|
||||
if RUBY_VERSION >= '1.9'
|
||||
code.force_encoding("utf-8")
|
||||
end
|
||||
|
||||
scope_size = buf.get_int
|
||||
buf.position -= 4
|
||||
scope = BSON.new().deserialize(buf.get(scope_size))
|
||||
|
||||
Code.new(code, scope)
|
||||
end
|
||||
|
||||
def deserialize_oid_data(buf)
|
||||
ObjectID.new(buf.get(12))
|
||||
end
|
||||
|
||||
def deserialize_dbref_data(buf)
|
||||
ns = deserialize_string_data(buf)
|
||||
oid = deserialize_oid_data(buf)
|
||||
DBRef.new(ns, oid)
|
||||
end
|
||||
|
||||
def deserialize_binary_data(buf)
|
||||
len = buf.get_int
|
||||
type = buf.get
|
||||
len = buf.get_int if type == Binary::SUBTYPE_BYTES
|
||||
Binary.new(buf.get(len), type)
|
||||
end
|
||||
|
||||
def serialize_eoo_element(buf)
|
||||
buf.put(EOO)
|
||||
end
|
||||
|
||||
def serialize_null_element(buf, key)
|
||||
buf.put(NULL)
|
||||
self.class.serialize_cstr(buf, key)
|
||||
end
|
||||
|
||||
def serialize_dbref_element(buf, key, val)
|
||||
oh = OrderedHash.new
|
||||
oh['$ref'] = val.namespace
|
||||
oh['$id'] = val.object_id
|
||||
serialize_object_element(buf, key, oh, false)
|
||||
end
|
||||
|
||||
def serialize_binary_element(buf, key, val)
|
||||
buf.put(BINARY)
|
||||
self.class.serialize_cstr(buf, key)
|
||||
|
||||
bytes = val.to_a
|
||||
num_bytes = bytes.length
|
||||
subtype = val.respond_to?(:subtype) ? val.subtype : Binary::SUBTYPE_BYTES
|
||||
if subtype == Binary::SUBTYPE_BYTES
|
||||
buf.put_int(num_bytes + 4)
|
||||
buf.put(subtype)
|
||||
buf.put_int(num_bytes)
|
||||
buf.put_array(bytes)
|
||||
else
|
||||
buf.put_int(num_bytes)
|
||||
buf.put(subtype)
|
||||
buf.put_array(bytes)
|
||||
end
|
||||
end
|
||||
|
||||
def serialize_boolean_element(buf, key, val)
|
||||
buf.put(BOOLEAN)
|
||||
self.class.serialize_cstr(buf, key)
|
||||
buf.put(val ? 1 : 0)
|
||||
end
|
||||
|
||||
def serialize_date_element(buf, key, val)
|
||||
buf.put(DATE)
|
||||
self.class.serialize_cstr(buf, key)
|
||||
millisecs = (val.to_f * 1000).to_i
|
||||
buf.put_long(millisecs)
|
||||
end
|
||||
|
||||
def serialize_number_element(buf, key, val, type)
|
||||
if type == NUMBER
|
||||
buf.put(type)
|
||||
self.class.serialize_cstr(buf, key)
|
||||
buf.put_double(val)
|
||||
else
|
||||
if val > 2**64 / 2 - 1 or val < -2**64 / 2
|
||||
raise RangeError.new("MongoDB can only handle 8-byte ints")
|
||||
end
|
||||
if val > 2**32 / 2 - 1 or val < -2**32 / 2
|
||||
buf.put(NUMBER_LONG)
|
||||
self.class.serialize_cstr(buf, key)
|
||||
buf.put_long(val)
|
||||
else
|
||||
buf.put(type)
|
||||
self.class.serialize_cstr(buf, key)
|
||||
buf.put_int(val)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
def serialize_object_element(buf, key, val, check_keys, opcode=OBJECT)
|
||||
buf.put(opcode)
|
||||
self.class.serialize_cstr(buf, key)
|
||||
buf.put_array(BSON.new.serialize(val, check_keys).to_a)
|
||||
end
|
||||
|
||||
def serialize_array_element(buf, key, val, check_keys)
|
||||
# Turn array into hash with integer indices as keys
|
||||
h = OrderedHash.new
|
||||
i = 0
|
||||
val.each { |v| h[i] = v; i += 1 }
|
||||
serialize_object_element(buf, key, h, check_keys, ARRAY)
|
||||
end
|
||||
|
||||
def serialize_regex_element(buf, key, val)
|
||||
buf.put(REGEX)
|
||||
self.class.serialize_cstr(buf, key)
|
||||
|
||||
str = val.to_s.sub(/.*?:/, '')[0..-2] # Turn "(?xxx:yyy)" into "yyy"
|
||||
self.class.serialize_cstr(buf, str)
|
||||
|
||||
options = val.options
|
||||
options_str = ''
|
||||
options_str << 'i' if ((options & Regexp::IGNORECASE) != 0)
|
||||
options_str << 'm' if ((options & Regexp::MULTILINE) != 0)
|
||||
options_str << 'x' if ((options & Regexp::EXTENDED) != 0)
|
||||
options_str << val.extra_options_str if val.respond_to?(:extra_options_str)
|
||||
# Must store option chars in alphabetical order
|
||||
self.class.serialize_cstr(buf, options_str.split(//).sort.uniq.join)
|
||||
end
|
||||
|
||||
def serialize_oid_element(buf, key, val)
|
||||
buf.put(OID)
|
||||
self.class.serialize_cstr(buf, key)
|
||||
|
||||
buf.put_array(val.to_a)
|
||||
end
|
||||
|
||||
def serialize_string_element(buf, key, val, type)
|
||||
buf.put(type)
|
||||
self.class.serialize_cstr(buf, key)
|
||||
|
||||
# Make a hole for the length
|
||||
len_pos = buf.position
|
||||
buf.put_int(0)
|
||||
|
||||
# Save the string
|
||||
start_pos = buf.position
|
||||
self.class.serialize_cstr(buf, val)
|
||||
end_pos = buf.position
|
||||
|
||||
# Put the string size in front
|
||||
buf.put_int(end_pos - start_pos, len_pos)
|
||||
|
||||
# Go back to where we were
|
||||
buf.position = end_pos
|
||||
end
|
||||
|
||||
def serialize_code_w_scope(buf, key, val)
|
||||
buf.put(CODE_W_SCOPE)
|
||||
self.class.serialize_cstr(buf, key)
|
||||
|
||||
# Make a hole for the length
|
||||
len_pos = buf.position
|
||||
buf.put_int(0)
|
||||
|
||||
buf.put_int(val.length + 1)
|
||||
self.class.serialize_cstr(buf, val)
|
||||
buf.put_array(BSON.new.serialize(val.scope).to_a)
|
||||
|
||||
end_pos = buf.position
|
||||
buf.put_int(end_pos - len_pos, len_pos)
|
||||
buf.position = end_pos
|
||||
end
|
||||
|
||||
def deserialize_cstr(buf)
|
||||
chars = ""
|
||||
while true
|
||||
b = buf.get
|
||||
break if b == 0
|
||||
chars << b.chr
|
||||
end
|
||||
if RUBY_VERSION >= '1.9'
|
||||
chars.force_encoding("utf-8") # Mongo stores UTF-8
|
||||
end
|
||||
chars
|
||||
end
|
||||
|
||||
def bson_type(o)
|
||||
case o
|
||||
when nil
|
||||
NULL
|
||||
when Integer
|
||||
NUMBER_INT
|
||||
when Numeric
|
||||
NUMBER
|
||||
when ByteBuffer
|
||||
BINARY
|
||||
when Code
|
||||
CODE_W_SCOPE
|
||||
when String
|
||||
STRING
|
||||
when Array
|
||||
ARRAY
|
||||
when Regexp
|
||||
REGEX
|
||||
when ObjectID
|
||||
OID
|
||||
when DBRef
|
||||
REF
|
||||
when true, false
|
||||
BOOLEAN
|
||||
when Time
|
||||
DATE
|
||||
when Hash
|
||||
OBJECT
|
||||
when Symbol
|
||||
SYMBOL
|
||||
else
|
||||
raise "Unknown type of object: #{o.class.name}"
|
||||
end
|
||||
end
|
||||
|
||||
end
|
|
@ -27,6 +27,25 @@ class ByteBuffer
|
|||
@double_pack_order = 'E'
|
||||
end
|
||||
|
||||
if RUBY_VERSION >= '1.9'
|
||||
def self.to_utf8(str)
|
||||
str.encode("utf-8")
|
||||
end
|
||||
else
|
||||
def self.to_utf8(str)
|
||||
begin
|
||||
str.unpack("U*")
|
||||
rescue => ex
|
||||
raise InvalidStringEncoding, "String not valid utf-8: #{str}"
|
||||
end
|
||||
str
|
||||
end
|
||||
end
|
||||
|
||||
def self.serialize_cstr(buf, val)
|
||||
buf.put_array(to_utf8(val.to_s).unpack("C*") + [0])
|
||||
end
|
||||
|
||||
# +endianness+ should be :little_endian or :big_endian. Default is :little_endian
|
||||
def order=(endianness)
|
||||
@order = endianness
|
||||
|
|
|
@ -1,6 +1,4 @@
|
|||
$LOAD_PATH[0,0] = File.join(File.dirname(__FILE__), '..', 'lib')
|
||||
require 'mongo'
|
||||
require 'test/unit'
|
||||
require 'test/test_helper'
|
||||
|
||||
# NOTE: assumes Mongo is running
|
||||
class AdminTest < Test::Unit::TestCase
|
||||
|
|
|
@ -1,9 +1,5 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
$LOAD_PATH[0,0] = File.join(File.dirname(__FILE__), '..', 'lib')
|
||||
require 'mongo'
|
||||
require 'mongo/util/ordered_hash'
|
||||
require 'test/test_helper'
|
||||
require 'iconv'
|
||||
require 'test/unit'
|
||||
|
||||
class BSONTest < Test::Unit::TestCase
|
||||
|
||||
|
@ -13,58 +9,58 @@ class BSONTest < Test::Unit::TestCase
|
|||
# We don't pass a DB to the constructor, even though we are about to test
|
||||
# deserialization. This means that when we deserialize, any DBRefs will
|
||||
# have nil @db ivars. That's fine for now.
|
||||
@b = BSON.new
|
||||
#BSON = BSON.new
|
||||
end
|
||||
|
||||
def test_string
|
||||
doc = {'doc' => 'hello, world'}
|
||||
@b.serialize(doc)
|
||||
assert_equal doc, @b.deserialize
|
||||
bson = bson = BSON.serialize(doc)
|
||||
assert_equal doc, BSON.deserialize(bson)
|
||||
end
|
||||
|
||||
|
||||
def test_valid_utf8_string
|
||||
doc = {'doc' => "aéあ"}
|
||||
@b.serialize(doc)
|
||||
assert_equal doc, @b.deserialize
|
||||
bson = bson = BSON.serialize(doc)
|
||||
assert_equal doc, BSON.deserialize(bson)
|
||||
end
|
||||
|
||||
def test_invalid_string
|
||||
string = Iconv.conv('iso-8859-1', 'utf-8', 'aé').first
|
||||
doc = {'doc' => string}
|
||||
doc = {'doc' => string}
|
||||
assert_raise InvalidStringEncoding do
|
||||
@b.serialize(doc)
|
||||
BSON.serialize(doc)
|
||||
end
|
||||
end
|
||||
|
||||
def test_code
|
||||
doc = {'$where' => Code.new('this.a.b < this.b')}
|
||||
@b.serialize(doc)
|
||||
assert_equal doc, @b.deserialize
|
||||
bson = BSON.serialize(doc)
|
||||
assert_equal doc, BSON.deserialize(bson)
|
||||
end
|
||||
|
||||
def test_number
|
||||
doc = {'doc' => 41.99}
|
||||
@b.serialize(doc)
|
||||
assert_equal doc, @b.deserialize
|
||||
bson = BSON.serialize(doc)
|
||||
assert_equal doc, BSON.deserialize(bson)
|
||||
end
|
||||
|
||||
def test_int
|
||||
doc = {'doc' => 42}
|
||||
@b.serialize(doc)
|
||||
assert_equal doc, @b.deserialize
|
||||
bson = BSON.serialize(doc)
|
||||
assert_equal doc, BSON.deserialize(bson)
|
||||
|
||||
doc = {"doc" => -5600}
|
||||
@b.serialize(doc)
|
||||
assert_equal doc, @b.deserialize
|
||||
bson = BSON.serialize(doc)
|
||||
assert_equal doc, BSON.deserialize(bson)
|
||||
|
||||
doc = {"doc" => 2147483647}
|
||||
@b.serialize(doc)
|
||||
assert_equal doc, @b.deserialize
|
||||
bson = BSON.serialize(doc)
|
||||
assert_equal doc, BSON.deserialize(bson)
|
||||
|
||||
doc = {"doc" => -2147483648}
|
||||
@b.serialize(doc)
|
||||
assert_equal doc, @b.deserialize
|
||||
bson = BSON.serialize(doc)
|
||||
assert_equal doc, BSON.deserialize(bson)
|
||||
end
|
||||
|
||||
def test_ordered_hash
|
||||
|
@ -73,32 +69,32 @@ class BSONTest < Test::Unit::TestCase
|
|||
doc["a"] = 2
|
||||
doc["c"] = 3
|
||||
doc["d"] = 4
|
||||
@b.serialize(doc)
|
||||
assert_equal doc, @b.deserialize
|
||||
bson = BSON.serialize(doc)
|
||||
assert_equal doc, BSON.deserialize(bson)
|
||||
end
|
||||
|
||||
def test_object
|
||||
doc = {'doc' => {'age' => 42, 'name' => 'Spongebob', 'shoe_size' => 9.5}}
|
||||
@b.serialize(doc)
|
||||
assert_equal doc, @b.deserialize
|
||||
bson = BSON.serialize(doc)
|
||||
assert_equal doc, BSON.deserialize(bson)
|
||||
end
|
||||
|
||||
def test_oid
|
||||
doc = {'doc' => ObjectID.new}
|
||||
@b.serialize(doc)
|
||||
assert_equal doc, @b.deserialize
|
||||
bson = BSON.serialize(doc)
|
||||
assert_equal doc, BSON.deserialize(bson)
|
||||
end
|
||||
|
||||
def test_array
|
||||
doc = {'doc' => [1, 2, 'a', 'b']}
|
||||
@b.serialize(doc)
|
||||
assert_equal doc, @b.deserialize
|
||||
bson = BSON.serialize(doc)
|
||||
assert_equal doc, BSON.deserialize(bson)
|
||||
end
|
||||
|
||||
def test_regex
|
||||
doc = {'doc' => /foobar/i}
|
||||
@b.serialize(doc)
|
||||
doc2 = @b.deserialize
|
||||
bson = BSON.serialize(doc)
|
||||
doc2 = BSON.deserialize(bson)
|
||||
assert_equal doc, doc2
|
||||
|
||||
r = doc2['doc']
|
||||
|
@ -108,11 +104,10 @@ class BSONTest < Test::Unit::TestCase
|
|||
r.extra_options_str << 'zywcab'
|
||||
assert_equal 'zywcab', r.extra_options_str
|
||||
|
||||
b = BSON.new
|
||||
doc = {'doc' => r}
|
||||
b.serialize(doc)
|
||||
bson_doc = BSON.serialize(doc)
|
||||
doc2 = nil
|
||||
doc2 = b.deserialize
|
||||
doc2 = BSON.deserialize(bson_doc)
|
||||
assert_equal doc, doc2
|
||||
|
||||
r = doc2['doc']
|
||||
|
@ -122,30 +117,30 @@ class BSONTest < Test::Unit::TestCase
|
|||
|
||||
def test_boolean
|
||||
doc = {'doc' => true}
|
||||
@b.serialize(doc)
|
||||
assert_equal doc, @b.deserialize
|
||||
bson = BSON.serialize(doc)
|
||||
assert_equal doc, BSON.deserialize(bson)
|
||||
end
|
||||
|
||||
def test_date
|
||||
doc = {'date' => Time.now}
|
||||
@b.serialize(doc)
|
||||
doc2 = @b.deserialize
|
||||
bson = BSON.serialize(doc)
|
||||
doc2 = BSON.deserialize(bson)
|
||||
# Mongo only stores up to the millisecond
|
||||
assert_in_delta doc['date'], doc2['date'], 0.001
|
||||
end
|
||||
|
||||
def test_date_returns_as_utc
|
||||
doc = {'date' => Time.now}
|
||||
@b.serialize(doc)
|
||||
doc2 = @b.deserialize
|
||||
bson = BSON.serialize(doc)
|
||||
doc2 = BSON.deserialize(bson)
|
||||
assert doc2['date'].utc?
|
||||
end
|
||||
|
||||
def test_date_before_epoch
|
||||
begin
|
||||
doc = {'date' => Time.utc(1600)}
|
||||
@b.serialize(doc)
|
||||
doc2 = @b.deserialize
|
||||
bson = BSON.serialize(doc)
|
||||
doc2 = BSON.deserialize(bson)
|
||||
# Mongo only stores up to the millisecond
|
||||
assert_in_delta doc['date'], doc2['date'], 0.001
|
||||
rescue ArgumentError
|
||||
|
@ -160,16 +155,16 @@ class BSONTest < Test::Unit::TestCase
|
|||
oid = ObjectID.new
|
||||
doc = {}
|
||||
doc['dbref'] = DBRef.new('namespace', oid)
|
||||
@b.serialize(doc)
|
||||
doc2 = @b.deserialize
|
||||
bson = BSON.serialize(doc)
|
||||
doc2 = BSON.deserialize(bson)
|
||||
assert_equal 'namespace', doc2['dbref'].namespace
|
||||
assert_equal oid, doc2['dbref'].object_id
|
||||
end
|
||||
|
||||
def test_symbol
|
||||
doc = {'sym' => :foo}
|
||||
@b.serialize(doc)
|
||||
doc2 = @b.deserialize
|
||||
bson = BSON.serialize(doc)
|
||||
doc2 = BSON.deserialize(bson)
|
||||
assert_equal :foo, doc2['sym']
|
||||
end
|
||||
|
||||
|
@ -178,8 +173,8 @@ class BSONTest < Test::Unit::TestCase
|
|||
'binstring'.each_byte { |b| bin.put(b) }
|
||||
|
||||
doc = {'bin' => bin}
|
||||
@b.serialize(doc)
|
||||
doc2 = @b.deserialize
|
||||
bson = BSON.serialize(doc)
|
||||
doc2 = BSON.deserialize(bson)
|
||||
bin2 = doc2['bin']
|
||||
assert_kind_of Binary, bin2
|
||||
assert_equal 'binstring', bin2.to_s
|
||||
|
@ -190,8 +185,8 @@ class BSONTest < Test::Unit::TestCase
|
|||
bin = Binary.new([1, 2, 3, 4, 5], Binary::SUBTYPE_USER_DEFINED)
|
||||
|
||||
doc = {'bin' => bin}
|
||||
@b.serialize(doc)
|
||||
doc2 = @b.deserialize
|
||||
bson = BSON.serialize(doc)
|
||||
doc2 = BSON.deserialize(bson)
|
||||
bin2 = doc2['bin']
|
||||
assert_kind_of Binary, bin2
|
||||
assert_equal [1, 2, 3, 4, 5], bin2.to_a
|
||||
|
@ -203,8 +198,8 @@ class BSONTest < Test::Unit::TestCase
|
|||
5.times { |i| bb.put(i + 1) }
|
||||
|
||||
doc = {'bin' => bb}
|
||||
@b.serialize(doc)
|
||||
doc2 = @b.deserialize
|
||||
bson = BSON.serialize(doc)
|
||||
doc2 = BSON.deserialize(bson)
|
||||
bin2 = doc2['bin']
|
||||
assert_kind_of Binary, bin2
|
||||
assert_equal [1, 2, 3, 4, 5], bin2.to_a
|
||||
|
@ -215,24 +210,24 @@ class BSONTest < Test::Unit::TestCase
|
|||
val = OrderedHash.new
|
||||
val['not_id'] = 1
|
||||
val['_id'] = 2
|
||||
roundtrip = @b.deserialize(@b.serialize(val).to_a)
|
||||
roundtrip = BSON.deserialize(BSON.serialize(val).to_a)
|
||||
assert_kind_of OrderedHash, roundtrip
|
||||
assert_equal '_id', roundtrip.keys.first
|
||||
|
||||
val = {'a' => 'foo', 'b' => 'bar', :_id => 42, 'z' => 'hello'}
|
||||
roundtrip = @b.deserialize(@b.serialize(val).to_a)
|
||||
roundtrip = BSON.deserialize(BSON.serialize(val).to_a)
|
||||
assert_kind_of OrderedHash, roundtrip
|
||||
assert_equal '_id', roundtrip.keys.first
|
||||
end
|
||||
|
||||
def test_nil_id
|
||||
doc = {"_id" => nil}
|
||||
assert_equal doc, @b.deserialize(@b.serialize(doc).to_a)
|
||||
assert_equal doc, BSON.deserialize(bson = BSON.serialize(doc).to_a)
|
||||
end
|
||||
|
||||
def test_timestamp
|
||||
val = {"test" => [4, 20]}
|
||||
assert_equal val, @b.deserialize([0x13, 0x00, 0x00, 0x00,
|
||||
assert_equal val, BSON.deserialize([0x13, 0x00, 0x00, 0x00,
|
||||
0x11, 0x74, 0x65, 0x73,
|
||||
0x74, 0x00, 0x04, 0x00,
|
||||
0x00, 0x00, 0x14, 0x00,
|
||||
|
@ -242,29 +237,29 @@ class BSONTest < Test::Unit::TestCase
|
|||
def test_overflow
|
||||
doc = {"x" => 2**75}
|
||||
assert_raise RangeError do
|
||||
@b.serialize(doc)
|
||||
bson = BSON.serialize(doc)
|
||||
end
|
||||
|
||||
doc = {"x" => 9223372036854775}
|
||||
assert_equal doc, @b.deserialize(@b.serialize(doc).to_a)
|
||||
assert_equal doc, BSON.deserialize(BSON.serialize(doc).to_a)
|
||||
|
||||
doc = {"x" => 9223372036854775807}
|
||||
assert_equal doc, @b.deserialize(@b.serialize(doc).to_a)
|
||||
assert_equal doc, BSON.deserialize(BSON.serialize(doc).to_a)
|
||||
|
||||
doc["x"] = doc["x"] + 1
|
||||
assert_raise RangeError do
|
||||
@b.serialize(doc)
|
||||
bson = BSON.serialize(doc)
|
||||
end
|
||||
|
||||
doc = {"x" => -9223372036854775}
|
||||
assert_equal doc, @b.deserialize(@b.serialize(doc).to_a)
|
||||
assert_equal doc, BSON.deserialize(BSON.serialize(doc).to_a)
|
||||
|
||||
doc = {"x" => -9223372036854775808}
|
||||
assert_equal doc, @b.deserialize(@b.serialize(doc).to_a)
|
||||
assert_equal doc, BSON.deserialize(BSON.serialize(doc).to_a)
|
||||
|
||||
doc["x"] = doc["x"] - 1
|
||||
assert_raise RangeError do
|
||||
@b.serialize(doc)
|
||||
bson = BSON.serialize(doc)
|
||||
end
|
||||
end
|
||||
|
||||
|
@ -273,12 +268,12 @@ class BSONTest < Test::Unit::TestCase
|
|||
val['not_id'] = 1
|
||||
val['_id'] = 2
|
||||
assert val.keys.include?('_id')
|
||||
@b.serialize(val)
|
||||
BSON.serialize(val)
|
||||
assert val.keys.include?('_id')
|
||||
|
||||
val = {'a' => 'foo', 'b' => 'bar', :_id => 42, 'z' => 'hello'}
|
||||
assert val.keys.include?(:_id)
|
||||
@b.serialize(val)
|
||||
BSON.serialize(val)
|
||||
assert val.keys.include?(:_id)
|
||||
end
|
||||
|
||||
|
@ -293,7 +288,7 @@ class BSONTest < Test::Unit::TestCase
|
|||
dup = {"_id" => "foo", :_id => "foo"}
|
||||
one = {"_id" => "foo"}
|
||||
|
||||
assert_equal @b.serialize(one).to_a, @b.serialize(dup).to_a
|
||||
assert_equal BSON.serialize(one).to_a, BSON.serialize(dup).to_a
|
||||
end
|
||||
|
||||
end
|
||||
|
|
|
@ -1,6 +1,4 @@
|
|||
$LOAD_PATH[0,0] = File.join(File.dirname(__FILE__), '..', 'lib')
|
||||
require 'mongo'
|
||||
require 'test/unit'
|
||||
require 'test/test_helper'
|
||||
|
||||
class ByteBufferTest < Test::Unit::TestCase
|
||||
|
||||
|
|
|
@ -1,6 +1,4 @@
|
|||
$LOAD_PATH[0,0] = File.join(File.dirname(__FILE__), '..', 'lib')
|
||||
require 'test/unit'
|
||||
require 'mongo'
|
||||
require 'test/test_helper'
|
||||
require 'mongo/gridfs'
|
||||
|
||||
class ChunkTest < Test::Unit::TestCase
|
||||
|
|
|
@ -1,20 +1,5 @@
|
|||
# --
|
||||
# Copyright (C) 2008-2009 10gen Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
# ++
|
||||
|
||||
require 'test/test_helper'
|
||||
|
||||
class TestCollection < Test::Unit::TestCase
|
||||
@@connection = Connection.new(ENV['MONGO_RUBY_DRIVER_HOST'] || 'localhost', ENV['MONGO_RUBY_DRIVER_PORT'] || Connection::DEFAULT_PORT)
|
||||
@@db = @@connection.db('ruby-mongo-test')
|
||||
|
|
|
@ -1,6 +1,4 @@
|
|||
$LOAD_PATH[0,0] = File.join(File.dirname(__FILE__), '..', 'lib')
|
||||
require 'mongo'
|
||||
require 'test/unit'
|
||||
require 'test/test_helper'
|
||||
require 'logger'
|
||||
require 'stringio'
|
||||
|
||||
|
|
|
@ -1,8 +1,7 @@
|
|||
$LOAD_PATH[0,0] = File.join(File.dirname(__FILE__), '..', 'lib')
|
||||
require 'test/test_helper'
|
||||
require 'mongo/errors'
|
||||
require 'mongo/util/conversions'
|
||||
require 'mongo/util/ordered_hash'
|
||||
require 'test/unit'
|
||||
|
||||
class ConversionsTest < Test::Unit::TestCase
|
||||
include Mongo::Conversions
|
||||
|
|
|
@ -1,6 +1,4 @@
|
|||
$LOAD_PATH[0,0] = File.join(File.dirname(__FILE__), '..', 'lib')
|
||||
require 'mongo'
|
||||
require 'test/unit'
|
||||
require 'test/test_helper'
|
||||
|
||||
# NOTE: assumes Mongo is running
|
||||
class CursorTest < Test::Unit::TestCase
|
||||
|
|
|
@ -1,7 +1,5 @@
|
|||
$LOAD_PATH[0,0] = File.join(File.dirname(__FILE__), '..', 'lib')
|
||||
require 'test/test_helper'
|
||||
require 'digest/md5'
|
||||
require 'mongo'
|
||||
require 'test/unit'
|
||||
require 'stringio'
|
||||
require 'logger'
|
||||
|
||||
|
|
|
@ -1,6 +1,4 @@
|
|||
$LOAD_PATH[0,0] = File.join(File.dirname(__FILE__), '..', 'lib')
|
||||
require 'mongo'
|
||||
require 'test/unit'
|
||||
require 'test/test_helper'
|
||||
|
||||
# NOTE: assumes Mongo is running
|
||||
class DBAPITest < Test::Unit::TestCase
|
||||
|
|
|
@ -1,6 +1,4 @@
|
|||
$LOAD_PATH[0,0] = File.join(File.dirname(__FILE__), '..', 'lib')
|
||||
require 'mongo'
|
||||
require 'test/unit'
|
||||
require 'test/test_helper'
|
||||
|
||||
# NOTE: assumes Mongo is running
|
||||
class DBConnectionTest < Test::Unit::TestCase
|
||||
|
|
|
@ -1,6 +1,4 @@
|
|||
$LOAD_PATH[0,0] = File.join(File.dirname(__FILE__), '..', 'lib')
|
||||
require 'test/unit'
|
||||
require 'mongo'
|
||||
require 'test/test_helper'
|
||||
require 'mongo/gridfs'
|
||||
|
||||
class GridStoreTest < Test::Unit::TestCase
|
||||
|
|
|
@ -1,9 +1,10 @@
|
|||
$LOAD_PATH[0,0] = File.join(File.dirname(__FILE__), '..', 'lib')
|
||||
require 'rubygems'
|
||||
$:.unshift(File.join(File.dirname(__FILE__), '..', 'lib'))
|
||||
require 'rubygems' if ENV['C_EXT']
|
||||
require 'mongo'
|
||||
require 'test/unit'
|
||||
|
||||
begin
|
||||
require 'rubygems'
|
||||
require 'shoulda'
|
||||
require 'mocha'
|
||||
rescue LoadError
|
||||
|
@ -18,6 +19,8 @@ MSG
|
|||
exit
|
||||
end
|
||||
|
||||
require 'mongo_ext/cbson' if ENV['C_EXT']
|
||||
|
||||
# NOTE: most tests assume that MongoDB is running.
|
||||
class Test::Unit::TestCase
|
||||
include Mongo
|
||||
|
|
|
@ -1,6 +1,4 @@
|
|||
$LOAD_PATH[0,0] = File.join(File.dirname(__FILE__), '..', 'lib')
|
||||
require 'mongo'
|
||||
require 'test/unit'
|
||||
require 'test/test_helper'
|
||||
|
||||
class ObjectIDTest < Test::Unit::TestCase
|
||||
|
||||
|
|
|
@ -1,6 +1,4 @@
|
|||
$LOAD_PATH[0,0] = File.join(File.dirname(__FILE__), '..', 'lib')
|
||||
require 'mongo/util/ordered_hash'
|
||||
require 'test/unit'
|
||||
require 'test/test_helper'
|
||||
|
||||
class OrderedHashTest < Test::Unit::TestCase
|
||||
|
||||
|
|
|
@ -1,8 +1,7 @@
|
|||
HERE = File.dirname(__FILE__)
|
||||
$LOAD_PATH[0,0] = File.join(HERE, '..', 'lib')
|
||||
require 'mongo'
|
||||
require 'test/test_helper'
|
||||
require 'mongo/util/xml_to_ruby'
|
||||
require 'test/unit'
|
||||
|
||||
# For each xml/bson file in the data subdirectory, we turn the XML into an
|
||||
# OrderedHash and then test both Ruby-to-BSON and BSON-to-Ruby translations.
|
||||
|
|
|
@ -1,6 +1,4 @@
|
|||
$LOAD_PATH[0,0] = File.join(File.dirname(__FILE__), '..', 'lib')
|
||||
require 'mongo'
|
||||
require 'test/unit'
|
||||
require 'test/test_helper'
|
||||
|
||||
# NOTE: these tests are run only if we can connect to a single MongoDB in slave mode.
|
||||
class SlaveConnectionTest < Test::Unit::TestCase
|
||||
|
|
Loading…
Reference in New Issue