allow arbitrary attirbutes in files collection
This commit is contained in:
parent
a3f31208e6
commit
3133380341
2
HISTORY
2
HISTORY
|
@ -1,5 +1,4 @@
|
|||
0.20
|
||||
|
||||
* Support for new commands:
|
||||
* Collection#find_and_modify
|
||||
* Collection#stats
|
||||
|
@ -16,6 +15,7 @@
|
|||
* GridFS
|
||||
* Option to delete old versions of GridFileSystem entries.
|
||||
* Filename is now optional for Grid#put.
|
||||
* Option to write arbitrary attributes to a file: @grid.put(@data, :favorite_phrase => "blimey!")
|
||||
* Removed the following deprecated items:
|
||||
* GridStore class
|
||||
|
||||
|
|
|
@ -27,6 +27,7 @@ module Mongo
|
|||
class GridIO
|
||||
DEFAULT_CHUNK_SIZE = 256 * 1024
|
||||
DEFAULT_CONTENT_TYPE = 'binary/octet-stream'
|
||||
PROTECTED_ATTRS = [:files_id, :file_length, :client_md5, :server_md5]
|
||||
|
||||
attr_reader :content_type, :chunk_size, :upload_date, :files_id, :filename,
|
||||
:metadata, :server_md5, :client_md5, :file_length
|
||||
|
@ -52,15 +53,16 @@ module Mongo
|
|||
# @options opts [Boolean] :safe (false) When safe mode is enabled, the chunks sent to the server
|
||||
# will be validated using an md5 hash. If validation fails, an exception will be raised.
|
||||
def initialize(files, chunks, filename, mode, opts={})
|
||||
@files = files
|
||||
@chunks = chunks
|
||||
@filename = filename
|
||||
@mode = mode
|
||||
@query = opts[:query] || {}
|
||||
@query_opts = opts[:query_opts] || {}
|
||||
@fs_name = opts[:fs_name] || Grid::DEFAULT_FS_NAME
|
||||
@safe = opts[:safe] || false
|
||||
@local_md5 = Digest::MD5.new if @safe
|
||||
@files = files
|
||||
@chunks = chunks
|
||||
@filename = filename
|
||||
@mode = mode
|
||||
@query = opts.delete(:query) || {}
|
||||
@query_opts = opts.delete(:query_opts) || {}
|
||||
@fs_name = opts.delete(:fs_name) || Grid::DEFAULT_FS_NAME
|
||||
@safe = opts.delete(:safe) || false
|
||||
@local_md5 = Digest::MD5.new if @safe
|
||||
@custom_attrs = {}
|
||||
|
||||
case @mode
|
||||
when 'r' then init_read
|
||||
|
@ -70,6 +72,19 @@ module Mongo
|
|||
end
|
||||
end
|
||||
|
||||
def [](key)
|
||||
@custom_attrs[key] || instance_variable_get("@#{key.to_s}")
|
||||
end
|
||||
|
||||
def []=(key, value)
|
||||
if PROTECTED_ATTRS.include?(key.to_sym)
|
||||
warn "Attempting to overwrite protected value."
|
||||
return nil
|
||||
else
|
||||
@custom_attrs[key] = value
|
||||
end
|
||||
end
|
||||
|
||||
# Read the data from the file. If a length if specified, will read from the
|
||||
# current file position.
|
||||
#
|
||||
|
@ -283,6 +298,7 @@ module Mongo
|
|||
@metadata = doc['metadata']
|
||||
@md5 = doc['md5']
|
||||
@filename = doc['filename']
|
||||
@custom_attrs = doc
|
||||
|
||||
@current_chunk = get_chunk(0)
|
||||
@file_position = 0
|
||||
|
@ -290,12 +306,13 @@ module Mongo
|
|||
|
||||
# Initialize the class for writing a file.
|
||||
def init_write(opts)
|
||||
@files_id = opts[:_id] || BSON::ObjectID.new
|
||||
@content_type = opts[:content_type] || (defined? MIME) && get_content_type || DEFAULT_CONTENT_TYPE
|
||||
@chunk_size = opts[:chunk_size] || DEFAULT_CHUNK_SIZE
|
||||
@metadata = opts[:metadata] if opts[:metadata]
|
||||
@aliases = opts[:aliases] if opts[:aliases]
|
||||
@files_id = opts.delete(:_id) || BSON::ObjectID.new
|
||||
@content_type = opts.delete(:content_type) || (defined? MIME) && get_content_type || DEFAULT_CONTENT_TYPE
|
||||
@chunk_size = opts.delete(:chunk_size) || DEFAULT_CHUNK_SIZE
|
||||
@metadata = opts.delete(:metadata) if opts[:metadata]
|
||||
@aliases = opts.delete(:aliases) if opts[:aliases]
|
||||
@file_length = 0
|
||||
opts.each {|k, v| self[k] = v}
|
||||
|
||||
@current_chunk = create_chunk(0)
|
||||
@file_position = 0
|
||||
|
@ -304,14 +321,15 @@ module Mongo
|
|||
def to_mongo_object
|
||||
h = OrderedHash.new
|
||||
h['_id'] = @files_id
|
||||
h['filename'] = @filename
|
||||
h['filename'] = @filename if @filename
|
||||
h['contentType'] = @content_type
|
||||
h['length'] = @current_chunk ? @current_chunk['n'] * @chunk_size + @chunk_position : 0
|
||||
h['chunkSize'] = @chunk_size
|
||||
h['uploadDate'] = @upload_date
|
||||
h['aliases'] = @aliases
|
||||
h['metadata'] = @metadata
|
||||
h['aliases'] = @aliases if @aliases
|
||||
h['metadata'] = @metadata if @metadata
|
||||
h['md5'] = get_md5
|
||||
h.merge!(@custom_attrs)
|
||||
h
|
||||
end
|
||||
|
||||
|
|
|
@ -64,6 +64,8 @@ class GridTest < Test::Unit::TestCase
|
|||
id = @grid.put(@data, :metadata => @metadata)
|
||||
file = @grid.get(id)
|
||||
assert_nil file.filename
|
||||
file_doc = @files.find_one({'_id' => id})
|
||||
assert !file_doc.has_key?('filename')
|
||||
assert_equal @metadata, file.metadata
|
||||
end
|
||||
|
||||
|
@ -75,6 +77,28 @@ class GridTest < Test::Unit::TestCase
|
|||
end
|
||||
end
|
||||
|
||||
context "Writing arbitrary data fields" do
|
||||
setup do
|
||||
@data = "GRIDDATA" * 50000
|
||||
@grid = Grid.new(@db, 'test-fs')
|
||||
end
|
||||
|
||||
should "write random keys to the files collection" do
|
||||
id = @grid.put(@data, :phrases => ["blimey", "ahoy!"])
|
||||
file = @grid.get(id)
|
||||
|
||||
assert_equal ["blimey", "ahoy!"], file['phrases']
|
||||
end
|
||||
|
||||
should "ignore special keys" do
|
||||
id = @grid.put(@data, :file_length => 100, :phrase => "blimey")
|
||||
file = @grid.get(id)
|
||||
|
||||
assert_equal "blimey", file['phrase']
|
||||
assert_equal 400_000, file.file_length
|
||||
end
|
||||
end
|
||||
|
||||
context "Storing data with a length of zero" do
|
||||
setup do
|
||||
@grid = Grid.new(@db, 'test-fs')
|
||||
|
|
Loading…
Reference in New Issue