reorder benchmark output, update readme

This commit is contained in:
Brian Lopez 2010-04-04 22:51:38 -07:00
parent dd60ede9d1
commit c1c31c50c7
2 changed files with 16 additions and 15 deletions

View File

@ -69,10 +69,11 @@ Me: Yep, but it's API is considerably more complex *and* is 3x slower.
Performing a basic "SELECT * FROM" query on a table with ~31k rows and fields of nearly every representable data type,
then iterating over every row using an #each like method yielding a block:
# The Mysql gem appears faster because it only gives back nil and strings.
user system total real
Mysql
1.050000 0.090000 1.140000 ( 1.441405)
do_mysql
11.440000 0.260000 11.700000 ( 11.951023)
Mysql2
3.660000 0.170000 3.830000 ( 4.082238)
do_mysql
11.440000 0.260000 11.700000 ( 11.951023)

View File

@ -23,18 +23,6 @@ Benchmark.bmbm do |x|
end
end
do_mysql = DataObjects::Connection.new("mysql://localhost/#{database}")
command = DataObjects::Mysql::Command.new do_mysql, sql
x.report do
puts "do_mysql"
number_of.times do
do_result = command.execute_reader
do_result.each do |res|
# puts res.inspect
end
end
end
mysql2 = Mysql2::Client.new(:host => "localhost", :username => "root")
mysql2.query "USE #{database}"
x.report do
@ -46,4 +34,16 @@ Benchmark.bmbm do |x|
end
end
end
do_mysql = DataObjects::Connection.new("mysql://localhost/#{database}")
command = DataObjects::Mysql::Command.new do_mysql, sql
x.report do
puts "do_mysql"
number_of.times do
do_result = command.execute_reader
do_result.each do |res|
# puts res.inspect
end
end
end
end