diff --git a/ext/mysql.c b/ext/mysql.c index 2388158..297c4d6 100644 --- a/ext/mysql.c +++ b/ext/mysql.c @@ -248,6 +248,11 @@ typedef struct } arg_holder, *arg_holder2; +// here's the call to make rb_thread_blocking_region much cleaner and easier +// syntax: param_count+2, func_pointer to call, [RUBY_UBF_IO or RUBY_UBF_PROCESS], param1, param2... +// the third parameter is the interuptor--possible values appear to be RUBY_UBF_IO or RUBY_UBF_PROCESS http://groups.google.com/group/comp.lang.ruby/browse_thread/thread/ad8c1326b2a8e404/00447b9aa15979be?lnk=raot +// ex: (int) returned_this = rb_thread_blocking_region_variable_params(10, &method_name, RUBY_UBF_IO, param1, param2, param3, param4, param5, param6, param7, param8) + static void call_single_function_rb_thread_blocking_region(void *arg_holder_in); void *rb_thread_blocking_region_variable_params(int number, ...) diff --git a/test/create_test_db.rb b/test/create_test_db.rb new file mode 100644 index 0000000..c68029d --- /dev/null +++ b/test/create_test_db.rb @@ -0,0 +1,22 @@ +# To run first execute: +=begin +create database local_test_db; +use local_test_db; +CREATE TABLE test_table ( + c1 INT, + c2 VARCHAR(20) + ); +=end +# This script shows the effect of using .all_hashes instead of looping on each hash +# run it by substiting in a 'long' [many row] query for the query variable and toggling use_all_hashes here at the top +# note that we load all the rows first, then run .all_hashes on the result [to see more easily the effect of all hashes] +# on my machine and a 200_000 row table, it took 3.38s versus 3.65s for the old .each_hash way [note also that .each_hash is +# almost as fast, now, as .all_hashes--they've both been optimized] +require 'mysqlplus' + +puts 'initing db' +# init the DB +conn = Mysql.real_connect('localhost', 'root', '', 'local_test_db') +conn.query("delete from test_table") +200_000.times {conn.query(" insert into test_table (c1, c2) values (3, 'ABCDEFG')")} +puts 'connection pool ready' diff --git a/test/test_all_hashes.rb b/test/test_all_hashes.rb index 4d9f755..b809043 100644 --- a/test/test_all_hashes.rb +++ b/test/test_all_hashes.rb @@ -1,18 +1,4 @@ -# To run first execute: -=begin -create database local_test_db; -use local_test_db; -CREATE TABLE test_table ( - c1 INT, - c2 VARCHAR(20) - ); -=end -# This script shows the effect of using .all_hashes instead of looping on each hash -# run it by substiting in a 'long' [many row] query for the query variable and toggling use_all_hashes here at the top -# note that we load all the rows first, then run .all_hashes on the result [to see more easily the effect of all hashes] -# on my machine and a 200_000 row table, it took 3.38s versus 3.65s for the old .each_hash way [note also that .each_hash is -# almost as fast, now, as .all_hashes--they've both been optimized] -require 'mysqlplus' +require 'create_test_db' use_the_all_hashes_method = true @@ -25,13 +11,6 @@ $count.times do $connections << Mysql.real_connect('localhost','root', '', 'local_test_db') end -puts 'initing db' -# init the DB -conn = Mysql.real_connect('localhost', 'root', '', 'local_test_db') -conn.query("delete from test_table") -200_000.times {conn.query(" insert into test_table (c1, c2) values (3, 'ABCDEFG')")} -puts 'connection pool ready' - $threads = [] $count.times do |i| diff --git a/test/test_parsing_while_response_is_being_read.rb b/test/test_parsing_while_response_is_being_read.rb index a44d28a..f32511c 100644 --- a/test/test_parsing_while_response_is_being_read.rb +++ b/test/test_parsing_while_response_is_being_read.rb @@ -5,7 +5,7 @@ # from .82s to .62s # you can experiment with it by changing the query here to be a long one, and toggling the do_the_use_query_optimization variable # this also has the interesting property of 'freeing' Ruby to do thread changes mid-query. -require 'mysqlplus' +require 'create_test_db' do_the_use_query_optimization = true @@ -15,7 +15,7 @@ $start = Time.now $connections = [] $count.times do - $connections << Mysql.real_connect('localhost','root', '', 'local_leadgen_dev') + $connections << Mysql.real_connect('localhost','root', '', 'local_test_db') end puts 'connection pool ready' @@ -27,7 +27,7 @@ $count.times do |i| puts "sending query on connection #{i}" conn = $connections[i] saved = [] - query = "select * from campus_zips" + query = "select * from test_table" if do_the_use_query_optimization conn.query_with_result=false result = conn.async_query(query)