2009-01-09 19:43:12 +00:00
|
|
|
HERE = File.dirname(__FILE__)
|
|
|
|
$LOAD_PATH[0,0] = File.join(HERE, '..', 'lib')
|
|
|
|
require 'mongo'
|
2009-01-09 22:27:31 +00:00
|
|
|
require 'mongo/util/xml_to_ruby'
|
2009-01-09 19:43:12 +00:00
|
|
|
require 'test/unit'
|
|
|
|
|
|
|
|
# For each xml/bson file in the data subdirectory, we turn the XML into an
|
|
|
|
# OrderedHash and then test both Ruby-to-BSON and BSON-to-Ruby translations.
|
2009-01-12 14:19:31 +00:00
|
|
|
#
|
|
|
|
# There is a whole other project that includes similar tests
|
2009-01-28 19:15:21 +00:00
|
|
|
# (http://github.com/mongodb/mongo-qa). If the directory ../../mongo-qa
|
|
|
|
# exists, (that is, the top-level dir of mongo-qa is next to the top-level dir
|
|
|
|
# of this project), then we find the BSON test files there and use those, too.
|
2009-01-09 19:43:12 +00:00
|
|
|
class RoundTripTest < Test::Unit::TestCase
|
|
|
|
|
|
|
|
include XGen::Mongo::Driver
|
|
|
|
|
|
|
|
@@ruby = nil
|
|
|
|
|
|
|
|
def setup
|
2009-01-12 14:19:31 +00:00
|
|
|
unless @@ruby
|
|
|
|
names = Dir[File.join(HERE, 'data', '*.xml')].collect {|f| File.basename(f).sub(/\.xml$/, '') }
|
|
|
|
@@ruby = {}
|
|
|
|
names.each { |name|
|
|
|
|
File.open(File.join(HERE, 'data', "#{name}.xml")) { |f|
|
|
|
|
@@ruby[name] = XMLToRuby.new.xml_to_ruby(f)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
end
|
2009-01-10 21:40:24 +00:00
|
|
|
end
|
|
|
|
|
|
|
|
def test_dummy
|
|
|
|
assert true
|
2009-01-09 19:43:12 +00:00
|
|
|
end
|
|
|
|
|
2009-01-12 16:41:39 +00:00
|
|
|
def self.create_test_for_round_trip_files_in_dir(dir)
|
|
|
|
names = Dir[File.join(dir, '*.xson')].collect {|f| File.basename(f).sub(/\.xson$/, '') }
|
|
|
|
names.each { |name|
|
|
|
|
eval <<EOS
|
|
|
|
def test_#{name}_#{dir.gsub(/[^a-zA-Z0-9_]/, '_')}
|
|
|
|
one_round_trip("#{dir}", "#{name}")
|
|
|
|
end
|
|
|
|
EOS
|
|
|
|
}
|
|
|
|
end
|
|
|
|
|
|
|
|
# Dynamically generate one test for each test file. This way, if one test
|
|
|
|
# fails the others will still run.
|
|
|
|
create_test_for_round_trip_files_in_dir(File.join(HERE, 'data'))
|
2009-01-28 19:15:21 +00:00
|
|
|
mongo_qa_dir = File.join(HERE, '../..', 'mongo-qa/modules/bson_tests/tests')
|
2009-01-12 16:41:39 +00:00
|
|
|
if File.exist?(mongo_qa_dir)
|
|
|
|
%w(basic_types complex single_types).each { |subdir_name|
|
|
|
|
create_test_for_round_trip_files_in_dir(File.join(mongo_qa_dir, subdir_name))
|
|
|
|
}
|
|
|
|
end
|
|
|
|
|
2009-01-12 14:19:31 +00:00
|
|
|
# Round-trip comparisons of Ruby-to-BSON and back.
|
|
|
|
# * Take the objects that were read from XML
|
|
|
|
# * Turn them into BSON bytes
|
|
|
|
# * Compare that with the BSON files we have
|
|
|
|
# * Turn those BSON bytes back in to Ruby objects
|
|
|
|
# * Turn them back into BSON bytes
|
|
|
|
# * Compare that with the BSON files we have (or the bytes that were already
|
|
|
|
# generated)
|
2009-01-12 14:48:24 +00:00
|
|
|
def one_round_trip(dir, name)
|
|
|
|
obj = File.open(File.join(dir, "#{name}.xson")) { |f|
|
|
|
|
XMLToRuby.new.xml_to_ruby(f)
|
|
|
|
}
|
2009-01-10 00:58:53 +00:00
|
|
|
|
2009-07-16 15:52:34 +00:00
|
|
|
File.open(File.join(dir, "#{name}.bson"), 'rb') { |f|
|
2009-01-12 14:48:24 +00:00
|
|
|
# Read the BSON from the file
|
|
|
|
bson = f.read
|
|
|
|
bson = if RUBY_VERSION >= '1.9'
|
|
|
|
bson.bytes.to_a
|
|
|
|
else
|
|
|
|
bson.split(//).collect { |c| c[0] }
|
|
|
|
end
|
2009-01-09 19:43:12 +00:00
|
|
|
|
2009-01-12 14:48:24 +00:00
|
|
|
# Turn the Ruby object into BSON bytes and compare with the BSON bytes
|
|
|
|
# from the file.
|
|
|
|
bson_from_ruby = BSON.new.serialize(obj).to_a
|
2009-01-09 19:43:12 +00:00
|
|
|
|
2009-01-12 14:48:24 +00:00
|
|
|
begin
|
2009-01-12 14:19:31 +00:00
|
|
|
assert_equal bson.length, bson_from_ruby.length
|
|
|
|
assert_equal bson, bson_from_ruby
|
2009-01-12 14:48:24 +00:00
|
|
|
rescue => ex
|
2009-01-12 16:41:39 +00:00
|
|
|
# File.open(File.join(dir, "#{name}_out_a.bson"), 'wb') { |f| # DEBUG
|
|
|
|
# bson_from_ruby.each { |b| f.putc(b) }
|
|
|
|
# }
|
2009-01-12 14:48:24 +00:00
|
|
|
raise ex
|
|
|
|
end
|
|
|
|
|
|
|
|
# Turn those BSON bytes back into a Ruby object.
|
|
|
|
#
|
|
|
|
# We're passing a nil db to the contructor here, but that's OK because
|
2009-03-11 14:49:30 +00:00
|
|
|
# the BSON DBRef bytes don't contain the db object in any case, and we
|
2009-01-12 15:54:12 +00:00
|
|
|
# don't care what the database is.
|
2009-03-10 18:17:01 +00:00
|
|
|
obj_from_bson = BSON.new.deserialize(ByteBuffer.new(bson_from_ruby))
|
2009-01-12 14:48:24 +00:00
|
|
|
assert_kind_of OrderedHash, obj_from_bson
|
|
|
|
|
|
|
|
# Turn that Ruby object into BSON and compare it to the original BSON
|
|
|
|
# bytes.
|
|
|
|
bson_from_ruby = BSON.new.serialize(obj_from_bson).to_a
|
2009-01-12 15:54:12 +00:00
|
|
|
begin
|
|
|
|
assert_equal bson.length, bson_from_ruby.length
|
|
|
|
assert_equal bson, bson_from_ruby
|
|
|
|
rescue => ex
|
2009-01-12 17:08:02 +00:00
|
|
|
# File.open(File.join(dir, "#{name}_out_b.bson"), 'wb') { |f| # DEBUG
|
2009-01-12 16:41:39 +00:00
|
|
|
# bson_from_ruby.each { |b| f.putc(b) }
|
|
|
|
# }
|
2009-01-12 15:54:12 +00:00
|
|
|
raise ex
|
|
|
|
end
|
2009-01-12 14:19:31 +00:00
|
|
|
}
|
|
|
|
end
|
2009-01-09 19:43:12 +00:00
|
|
|
|
|
|
|
end
|