Create logging message only when a logger is passed to the Connection.
Results in a pretty significant performance improvement. Many thanks to Matt Taylor for noticing the unusual glut of calls to ObjectId#to_s. See here: http://unhalting.com/?p=18
This commit is contained in:
parent
c2955239a4
commit
300b442a94
|
@ -58,6 +58,7 @@ module Mongo
|
|||
|
||||
@db, @name = db, name
|
||||
@connection = @db.connection
|
||||
@logger = @connection.logger
|
||||
@pk_factory = pk_factory || BSON::ObjectId
|
||||
@hint = nil
|
||||
end
|
||||
|
@ -288,15 +289,14 @@ module Mongo
|
|||
message.put_int(0)
|
||||
message.put_array(BSON::BSON_CODER.serialize(selector, false, true).to_a)
|
||||
|
||||
@logger.debug("MONGODB #{@db.name}['#{@name}'].remove(#{selector.inspect})") if @logger
|
||||
if opts[:safe]
|
||||
@connection.send_message_with_safe_check(Mongo::Constants::OP_DELETE, message, @db.name,
|
||||
"#{@db.name}['#{@name}'].remove(#{selector.inspect})", opts[:safe])
|
||||
@connection.send_message_with_safe_check(Mongo::Constants::OP_DELETE, message, @db.name, nil, opts[:safe])
|
||||
# the return value of send_message_with_safe_check isn't actually meaningful --
|
||||
# only the fact that it didn't raise an error is -- so just return true
|
||||
true
|
||||
else
|
||||
@connection.send_message(Mongo::Constants::OP_DELETE, message,
|
||||
"#{@db.name}['#{@name}'].remove(#{selector.inspect})")
|
||||
@connection.send_message(Mongo::Constants::OP_DELETE, message)
|
||||
end
|
||||
end
|
||||
|
||||
|
@ -330,12 +330,11 @@ module Mongo
|
|||
message.put_int(update_options)
|
||||
message.put_array(BSON::BSON_CODER.serialize(selector, false, true).to_a)
|
||||
message.put_array(BSON::BSON_CODER.serialize(document, false, true).to_a)
|
||||
@logger.debug("MONGODB #{@db.name}['#{@name}'].update(#{selector.inspect}, #{document.inspect})") if @logger
|
||||
if options[:safe]
|
||||
@connection.send_message_with_safe_check(Mongo::Constants::OP_UPDATE, message, @db.name,
|
||||
"#{@db.name}['#{@name}'].update(#{selector.inspect}, #{document.inspect})", options[:safe])
|
||||
@connection.send_message_with_safe_check(Mongo::Constants::OP_UPDATE, message, @db.name, nil, options[:safe])
|
||||
else
|
||||
@connection.send_message(Mongo::Constants::OP_UPDATE, message,
|
||||
"#{@db.name}['#{@name}'].update(#{selector.inspect}, #{document.inspect})")
|
||||
@connection.send_message(Mongo::Constants::OP_UPDATE, message, nil)
|
||||
end
|
||||
end
|
||||
|
||||
|
@ -685,12 +684,11 @@ module Mongo
|
|||
documents.each { |doc| message.put_array(BSON::BSON_CODER.serialize(doc, check_keys, true).to_a) }
|
||||
raise InvalidOperation, "Exceded maximum insert size of 16,000,000 bytes" if message.size > 16_000_000
|
||||
|
||||
@logger.debug("MONGODB #{@db.name}['#{collection_name}'].insert(#{documents.inspect})") if @logger
|
||||
if safe
|
||||
@connection.send_message_with_safe_check(Mongo::Constants::OP_INSERT, message, @db.name,
|
||||
"#{@db.name}['#{collection_name}'].insert(#{documents.inspect})", safe)
|
||||
@connection.send_message_with_safe_check(Mongo::Constants::OP_INSERT, message, @db.name, nil, safe)
|
||||
else
|
||||
@connection.send_message(Mongo::Constants::OP_INSERT, message,
|
||||
"#{@db.name}['#{collection_name}'].insert(#{documents.inspect})")
|
||||
@connection.send_message(Mongo::Constants::OP_INSERT, message, nil)
|
||||
end
|
||||
documents.collect { |o| o[:_id] || o['_id'] }
|
||||
end
|
||||
|
|
|
@ -298,7 +298,7 @@ module Mongo
|
|||
#
|
||||
# @core databases db-instance_method
|
||||
def db(db_name, options={})
|
||||
DB.new(db_name, self, options.merge(:logger => @logger))
|
||||
DB.new(db_name, self)
|
||||
end
|
||||
|
||||
# Shortcut for returning a database. Use DB#db to accept options.
|
||||
|
@ -309,7 +309,7 @@ module Mongo
|
|||
#
|
||||
# @core databases []-instance_method
|
||||
def [](db_name)
|
||||
DB.new(db_name, self, :logger => @logger)
|
||||
DB.new(db_name, self)
|
||||
end
|
||||
|
||||
# Drop a database.
|
||||
|
@ -388,11 +388,9 @@ module Mongo
|
|||
#
|
||||
# @param [Integer] operation a MongoDB opcode.
|
||||
# @param [BSON::ByteBuffer] message a message to send to the database.
|
||||
# @param [String] log_message text version of +message+ for logging.
|
||||
#
|
||||
# @return [Integer] number of bytes sent
|
||||
def send_message(operation, message, log_message=nil)
|
||||
@logger.debug(" MONGODB #{log_message || message}") if @logger
|
||||
begin
|
||||
packed_message = add_message_headers(operation, message).to_s
|
||||
socket = checkout
|
||||
|
@ -408,7 +406,6 @@ module Mongo
|
|||
# @param [Integer] operation a MongoDB opcode.
|
||||
# @param [BSON::ByteBuffer] message a message to send to the database.
|
||||
# @param [String] db_name the name of the database. used on call to get_last_error.
|
||||
# @param [String] log_message text version of +message+ for logging.
|
||||
# @param [Hash] last_error_params parameters to be sent to getLastError. See DB#error for
|
||||
# available options.
|
||||
#
|
||||
|
@ -420,7 +417,6 @@ module Mongo
|
|||
def send_message_with_safe_check(operation, message, db_name, log_message=nil, last_error_params=false)
|
||||
message_with_headers = add_message_headers(operation, message)
|
||||
message_with_check = last_error_message(db_name, last_error_params)
|
||||
@logger.debug(" MONGODB #{log_message || message}") if @logger
|
||||
begin
|
||||
sock = checkout
|
||||
packed_message = message_with_headers.append!(message_with_check).to_s
|
||||
|
@ -442,7 +438,6 @@ module Mongo
|
|||
#
|
||||
# @param [Integer] operation a MongoDB opcode.
|
||||
# @param [BSON::ByteBuffer] message a message to send to the database.
|
||||
# @param [String] log_message text version of +message+ for logging.
|
||||
# @param [Socket] socket a socket to use in lieu of checking out a new one.
|
||||
#
|
||||
# @return [Array]
|
||||
|
@ -450,7 +445,6 @@ module Mongo
|
|||
# and [3] a cursor_id.
|
||||
def receive_message(operation, message, log_message=nil, socket=nil)
|
||||
packed_message = add_message_headers(operation, message).to_s
|
||||
@logger.debug(" MONGODB #{log_message || message}") if @logger
|
||||
begin
|
||||
sock = socket || checkout
|
||||
|
||||
|
@ -626,7 +620,7 @@ module Mongo
|
|||
if config
|
||||
update_node_list(config['hosts']) if config['hosts']
|
||||
if @logger
|
||||
@logger.warn(config['msg']) if config['msg']
|
||||
@logger.warn("MONGODB #{config['msg']}") if config['msg']
|
||||
end
|
||||
end
|
||||
|
||||
|
@ -737,7 +731,7 @@ module Mongo
|
|||
|
||||
# Otherwise, wait
|
||||
if @logger
|
||||
@logger.warn "Waiting for available connection; #{@checked_out.size} of #{@size} connections checked out."
|
||||
@logger.warn "MONGODB Waiting for available connection; #{@checked_out.size} of #{@size} connections checked out."
|
||||
end
|
||||
@queue.wait(@connection_mutex)
|
||||
end
|
||||
|
|
|
@ -37,6 +37,7 @@ module Mongo
|
|||
@db = collection.db
|
||||
@collection = collection
|
||||
@connection = @db.connection
|
||||
@logger = @connection.logger
|
||||
|
||||
@selector = convert_selector_for_query(options[:selector])
|
||||
@fields = convert_fields_for_query(options[:fields])
|
||||
|
@ -241,7 +242,8 @@ module Mongo
|
|||
message = BSON::ByteBuffer.new([0, 0, 0, 0])
|
||||
message.put_int(1)
|
||||
message.put_long(@cursor_id)
|
||||
@connection.send_message(Mongo::Constants::OP_KILL_CURSORS, message, "cursor.close #{@cursor_id}")
|
||||
@logger.debug("MONGODB cursor.close #{@cursor_id}") if @logger
|
||||
@connection.send_message(Mongo::Constants::OP_KILL_CURSORS, message, nil)
|
||||
end
|
||||
@cursor_id = 0
|
||||
@closed = true
|
||||
|
@ -343,7 +345,9 @@ module Mongo
|
|||
|
||||
# Cursor id.
|
||||
message.put_long(@cursor_id)
|
||||
results, @n_received, @cursor_id = @connection.receive_message(Mongo::Constants::OP_GET_MORE, message, "cursor.get_more()", @socket)
|
||||
@logger.debug("MONGODB cursor.refresh() for cursor #{@cursor_id}") if @logger
|
||||
results, @n_received, @cursor_id = @connection.receive_message(Mongo::Constants::OP_GET_MORE,
|
||||
message, nil, @socket)
|
||||
@cache += results
|
||||
close_cursor_if_query_complete
|
||||
end
|
||||
|
@ -354,8 +358,8 @@ module Mongo
|
|||
false
|
||||
else
|
||||
message = construct_query_message
|
||||
results, @n_received, @cursor_id = @connection.receive_message(Mongo::Constants::OP_QUERY, message,
|
||||
(query_log_message if @connection.logger), @socket)
|
||||
@logger.debug query_log_message if @logger
|
||||
results, @n_received, @cursor_id = @connection.receive_message(Mongo::Constants::OP_QUERY, message, nil, @socket)
|
||||
@cache += results
|
||||
@query_run = true
|
||||
close_cursor_if_query_complete
|
||||
|
|
|
@ -12,8 +12,9 @@ class CollectionTest < Test::Unit::TestCase
|
|||
@db = @conn['testing']
|
||||
@coll = @db.collection('books')
|
||||
@conn.expects(:send_message).with do |op, msg, log|
|
||||
op == 2001 && log.include?("testing['books'].update")
|
||||
op == 2001
|
||||
end
|
||||
@logger.stubs(:debug)
|
||||
@coll.update({}, {:title => 'Moby Dick'})
|
||||
end
|
||||
|
||||
|
@ -22,7 +23,10 @@ class CollectionTest < Test::Unit::TestCase
|
|||
@db = @conn['testing']
|
||||
@coll = @db.collection('books')
|
||||
@conn.expects(:send_message).with do |op, msg, log|
|
||||
op == 2002 && log.include?("testing['books'].insert")
|
||||
op == 2002
|
||||
end
|
||||
@logger.expects(:debug).with do |msg|
|
||||
msg.include?("Moby")
|
||||
end
|
||||
@coll.insert({:title => 'Moby Dick'})
|
||||
end
|
||||
|
@ -32,8 +36,11 @@ class CollectionTest < Test::Unit::TestCase
|
|||
@db = @conn['testing']
|
||||
@coll = @db.collection('books')
|
||||
@conn.expects(:receive_message).with do |op, msg, log, sock|
|
||||
op == 2004 && log.include?("sort")
|
||||
op == 2004
|
||||
end.returns([[], 0, 0])
|
||||
@logger.expects(:debug).with do |msg|
|
||||
msg.include?('Moby')
|
||||
end
|
||||
@coll.find({:title => 'Moby Dick'}).sort([['title', 1], ['author', 1]]).next_document
|
||||
end
|
||||
|
||||
|
@ -43,7 +50,10 @@ class CollectionTest < Test::Unit::TestCase
|
|||
@coll = @db.collection('books')
|
||||
data = BSON::Binary.new(("BINARY " * 1000).unpack("c*"))
|
||||
@conn.expects(:send_message).with do |op, msg, log|
|
||||
op == 2002 && log.include?("BSON::Binary")
|
||||
op == 2002
|
||||
end
|
||||
@logger.expects(:debug).with do |msg|
|
||||
msg.include?("Binary")
|
||||
end
|
||||
@coll.insert({:data => data})
|
||||
end
|
||||
|
@ -53,7 +63,10 @@ class CollectionTest < Test::Unit::TestCase
|
|||
@db = @conn['testing']
|
||||
@coll = @db.collection('books')
|
||||
@conn.expects(:send_message_with_safe_check).with do |op, msg, db_name, log|
|
||||
op == 2001 && log.include?("testing['books'].update")
|
||||
op == 2001
|
||||
end
|
||||
@logger.expects(:debug).with do |msg|
|
||||
msg.include?("testing['books'].update")
|
||||
end
|
||||
@coll.update({}, {:title => 'Moby Dick'}, :safe => true)
|
||||
end
|
||||
|
@ -63,8 +76,9 @@ class CollectionTest < Test::Unit::TestCase
|
|||
@db = @conn['testing']
|
||||
@coll = @db.collection('books')
|
||||
@conn.expects(:send_message_with_safe_check).with do |op, msg, db_name, log|
|
||||
op == 2001 && log.include?("testing['books'].update")
|
||||
op == 2001
|
||||
end
|
||||
@logger.stubs(:debug)
|
||||
@coll.update({}, {:title => 'Moby Dick'}, :safe => true)
|
||||
end
|
||||
end
|
||||
|
|
|
@ -3,7 +3,9 @@ require 'test/test_helper'
|
|||
class CursorTest < Test::Unit::TestCase
|
||||
context "Cursor options" do
|
||||
setup do
|
||||
@connection = stub(:class => Connection)
|
||||
@logger = mock()
|
||||
@logger.stubs(:debug)
|
||||
@connection = stub(:class => Connection, :logger => @logger)
|
||||
@db = stub(:name => "testing", :slave_ok? => false, :connection => @connection)
|
||||
@collection = stub(:db => @db, :name => "items")
|
||||
@cursor = Cursor.new(@collection)
|
||||
|
@ -70,7 +72,7 @@ class CursorTest < Test::Unit::TestCase
|
|||
|
||||
context "Query fields" do
|
||||
setup do
|
||||
@connection = stub(:class => Collection)
|
||||
@connection = stub(:class => Collection, :logger => @logger)
|
||||
@db = stub(:slave_ok? => true, :name => "testing", :connection => @connection)
|
||||
@collection = stub(:db => @db, :name => "items")
|
||||
end
|
||||
|
|
Loading…
Reference in New Issue