#!/usr/bin/env ruby
#$LOAD_PATH[0,0] = File.join(File.dirname(__FILE__), '..', 'lib')
#
# Note: Ruby 1.9 is faster than 1.8, as expected.
# This suite will be run against the installed version of ruby-mongo-driver.
# The c-extension, bson_ext, will be used if installed.

require 'rubygems'
require 'mongo'

large = {
  'base_url' => 'http://www.example.com/test-me',
  'total_word_count' => 6743,
  'access_time' => Time.now,
  'meta_tags' => {
    'description' => 'i am a long description string',
    'author' => 'Holly Man',
    'dynamically_created_meta_tag' => 'who know\n what'
  },
  'page_structure' => {
    'counted_tags' => 3450,
    'no_of_js_attached' => 10,
    'no_of_images' => 6
  },
  'harvested_words' => ['10gen','web','open','source','application','paas',
                        'platform-as-a-service','technology','helps',
                        'developers','focus','building','mongodb','mongo'] * 20
}


con = Mongo::Connection.new

db  = con['benchmark']
col = db['bigdocs']

col.remove()

puts "Inserting 100,000 large documents..."

def get_batch(large)
  batch = []
  100.times do
    batch << large.dup
  end
  batch
end

1000.times do
  col.insert(get_batch(large))
end
GC.start

t1 = Time.now
cursor = col.find({}, :batch_size => 0)
200.times do
  cursor.next_document
end
t2 = Time.now
puts "Database decides batch size: #{t2 - t1}"


t1 = Time.now
cursor = col.find({}, :batch_size => 100)
200.times do
  cursor.next_document
end
t2 = Time.now
puts "Batch size of 100: #{t2 - t1}"