rename bucket/object to directory/file

This commit is contained in:
Wesley Beary 2010-01-14 20:44:39 -08:00
Родитель 15df1eb96e
Коммит 4b8dff3cf7
14 изменённых файлов: 443 добавлений и 528 удалений

Просмотреть файл

@ -48,8 +48,8 @@ module AWS
connections[:ec2].addresses
end
def buckets
connections[:s3].buckets
def directories
connections[:s3].directories
end
def flavors

Просмотреть файл

@ -2,13 +2,13 @@ module Fog
module AWS
class S3
def buckets
Fog::AWS::S3::Buckets.new(:connection => self)
def directories
Fog::AWS::S3::Directories.new(:connection => self)
end
class Buckets < Fog::Collection
class Directories < Fog::Collection
model Fog::AWS::S3::Bucket
model Fog::AWS::S3::Directory
def all
if @loaded
@ -16,8 +16,8 @@ module Fog
end
@loaded = true
data = connection.get_service.body
data['Buckets'].each do |bucket|
self << new(bucket)
data['Buckets'].each do |directory|
self << new(directory)
end
self
end
@ -30,19 +30,19 @@ module Fog
:prefix => 'prefix'
})
data = connection.get_bucket(name, options).body
bucket = new(:name => data['Name'])
directory = new(:name => data['Name'])
options = {}
for key, value in data
if ['Delimiter', 'IsTruncated', 'Marker', 'MaxKeys', 'Prefix'].include?(key)
options[key] = value
end
end
bucket.objects.merge_attributes(options)
bucket.objects.instance_variable_set(:@loaded, true)
data['Contents'].each do |object|
bucket.objects << bucket.objects.new(object)
directory.files.merge_attributes(options)
directory.files.instance_variable_set(:@loaded, true)
data['Contents'].each do |file|
directory.files << directory.files.new(file)
end
bucket
directory
rescue Excon::Errors::NotFound
nil
end

Просмотреть файл

@ -2,7 +2,7 @@ module Fog
module AWS
class S3
class Bucket < Fog::Model
class Directory < Fog::Model
identity :name, 'Name'
@ -10,7 +10,6 @@ module Fog
def destroy
requires :name
connection.delete_bucket(@name)
true
rescue Excon::Errors::NotFound
@ -19,7 +18,6 @@ module Fog
def location
requires :name
data = connection.get_bucket_location(@name)
data.body['LocationConstraint']
end
@ -28,10 +26,10 @@ module Fog
@location = new_location
end
def objects
@objects ||= begin
Fog::AWS::S3::Objects.new(
:bucket => self,
def files
@files ||= begin
Fog::AWS::S3::Files.new(
:directory => self,
:connection => connection
)
end
@ -39,21 +37,18 @@ module Fog
def payer
requires :name
data = connection.get_request_payment(@name)
data.body['Payer']
end
def payer=(new_payer)
requires :name
connection.put_request_payment(@name, new_payer)
@payer = new_payer
end
def save
requires :name
options = {}
if @location
options['LocationConstraint'] = @location

Просмотреть файл

@ -2,7 +2,7 @@ module Fog
module AWS
class S3
class Object < Fog::Model
class File < Fog::Model
identity :key, 'Key'
@ -15,30 +15,28 @@ module Fog
attribute :size, 'Size'
attribute :storage_class, 'StorageClass'
def bucket
@bucket
def directory
@directory
end
def copy(target_bucket_name, target_object_key)
requires :bucket, :key
data = connection.copy_object(bucket.name, @key, target_bucket_name, target_object_key).body
target_bucket = connection.buckets.new(:name => target_bucket_name)
target_object = target_bucket.objects.new(attributes.merge!(:key => target_object_key))
def copy(target_directory_name, target_file_key)
requires :directory, :key
data = connection.copy_object(directory.name, @key, target_directory_name, target_file_key).body
target_directory = connection.directories.new(:name => target_directory_name)
target_file = target_directory.files.new(attributes.merge!(:key => target_file_key))
copy_data = {}
for key, value in data
if ['ETag', 'LastModified'].include?(key)
copy_data[key] = value
end
end
target_object.merge_attributes(copy_data)
target_object
target_file.merge_attributes(copy_data)
target_file
end
def destroy
requires :bucket, :key
connection.delete_object(bucket.name, @key)
requires :directory, :key
connection.delete_object(directory.name, @key)
true
end
@ -52,16 +50,16 @@ module Fog
end
def save(options = {})
requires :body, :bucket, :key
data = connection.put_object(bucket.name, @key, @body, options)
requires :body, :directory, :key
data = connection.put_object(directory.name, @key, @body, options)
@etag = data.headers['ETag']
true
end
private
def bucket=(new_bucket)
@bucket = new_bucket
def directory=(new_directory)
@directory = new_directory
end
end

Просмотреть файл

@ -2,7 +2,7 @@ module Fog
module AWS
class S3
class Objects < Fog::Collection
class Files < Fog::Collection
attribute :delimiter, 'Delimiter'
attribute :is_truncated, 'IsTruncated'
@ -10,7 +10,7 @@ module Fog
attribute :max_keys, 'MaxKeys'
attribute :prefix, 'Prefix'
model Fog::AWS::S3::Object
model Fog::AWS::S3::File
def all(options = {})
merge_attributes(options)
@ -18,19 +18,19 @@ module Fog
clear
end
@loaded = true
collection = bucket.collection.get(
bucket.name,
collection = directory.collection.get(
directory.name,
options
)
if collection
self.replace(collection.objects)
self.replace(collection.files)
else
nil
end
end
def bucket
@bucket
def directory
@directory
end
def get(key, options = {}, &block)
@ -40,48 +40,48 @@ module Fog
'max-keys' => @max_keys,
'prefix' => @prefix
}.merge!(options)
data = connection.get_object(bucket.name, key, options, &block)
object_data = {
data = connection.get_object(directory.name, key, options, &block)
file_data = {
:body => data.body,
:key => key
}
for key, value in data.headers
if ['Content-Length', 'Content-Type', 'ETag', 'Last-Modified'].include?(key)
object_data[key] = value
file_data[key] = value
end
end
new(object_data)
new(file_data)
rescue Excon::Errors::NotFound
nil
end
def get_url(key, expires)
connection.get_object_url(bucket.name, key, expires)
connection.get_object_url(directory.name, key, expires)
end
def head(key, options = {})
data = connection.head_object(bucket.name, key, options)
object_data = {
data = connection.head_object(directory.name, key, options)
file_data = {
:key => key
}
for key, value in data.headers
if ['Content-Length', 'Content-Type', 'ETag', 'Last-Modified'].include?(key)
object_data[key] = value
file_data[key] = value
end
end
new(object_data)
new(file_data)
rescue Excon::Errors::NotFound
nil
end
def new(attributes = {})
super({ :bucket => bucket }.merge!(attributes))
super({ :directory => directory }.merge!(attributes))
end
private
def bucket=(new_bucket)
@bucket = new_bucket
def directory=(new_directory)
@directory = new_directory
end
end

Просмотреть файл

@ -12,10 +12,10 @@ module Fog
end
def self.reload
load "fog/aws/models/s3/bucket.rb"
load "fog/aws/models/s3/buckets.rb"
load "fog/aws/models/s3/object.rb"
load "fog/aws/models/s3/objects.rb"
load "fog/aws/models/s3/directory.rb"
load "fog/aws/models/s3/directories.rb"
load "fog/aws/models/s3/file.rb"
load "fog/aws/models/s3/files.rb"
load "fog/aws/parsers/s3/copy_object.rb"
load "fog/aws/parsers/s3/get_bucket.rb"
@ -80,12 +80,12 @@ module Fog
metadata[:body] = data
metadata[:headers]['Content-Length'] = metadata[:body].size.to_s
else
filename = File.basename(data.path)
filename = ::File.basename(data.path)
unless (mime_types = MIME::Types.of(filename)).empty?
metadata[:headers]['Content-Type'] = mime_types.first.content_type
end
metadata[:body] = data.read
metadata[:headers]['Content-Length'] = File.size(data.path).to_s
metadata[:headers]['Content-Length'] = ::File.size(data.path).to_s
end
# metadata[:headers]['Content-MD5'] = Base64.encode64(Digest::MD5.digest(metadata[:body])).strip
metadata

Просмотреть файл

@ -1,129 +0,0 @@
require File.dirname(__FILE__) + '/../../../spec_helper'
describe 'Fog::AWS::S3::Bucket' do
describe "#initialize" do
it "should remap attributes from parser" do
now = Time.now
bucket = Fog::AWS::S3::Bucket.new(
'CreationDate' => now,
'Name' => 'bucketname'
)
bucket.creation_date.should == now
bucket.name.should == 'bucketname'
end
end
describe "#collection" do
it "should return a Fog::AWS::S3::Buckets" do
s3.buckets.new.collection.should be_a(Fog::AWS::S3::Buckets)
end
it "should be the buckets the bucket is related to" do
buckets = s3.buckets
buckets.new.collection.should == buckets
end
end
describe "#destroy" do
it "should return true if the bucket is deleted" do
bucket = s3.buckets.create(:name => 'fogmodelbucket')
bucket.destroy.should be_true
end
it "should return false if the bucket does not exist" do
bucket = s3.buckets.new(:name => 'fogmodelbucket')
bucket.destroy.should be_false
end
end
describe "#location" do
it "should return the location constraint" do
bucket = s3.buckets.create(:name => 'fogmodeleubucket', :location => 'EU')
bucket.location.should == 'EU'
eu_s3.buckets.new(:name => 'fogmodeleubucket').destroy
end
end
describe "#objects" do
it "should return a Fog::AWS::S3::Objects" do
bucket = s3.buckets.new(:name => 'fogmodelbucket')
bucket.objects.should be_an(Fog::AWS::S3::Objects)
end
end
describe "#payer" do
it "should return the request payment value" do
bucket = s3.buckets.create(:name => 'fogmodelbucket')
bucket.payer.should == 'BucketOwner'
bucket.destroy.should be_true
end
end
describe "#payer=" do
it "should set the request payment value" do
bucket = s3.buckets.create(:name => 'fogmodelbucket')
(bucket.payer = 'Requester').should == 'Requester'
bucket.destroy.should
end
end
describe "#reload" do
before(:each) do
@bucket = s3.buckets.create(:name => 'fogmodelbucket')
@reloaded = @bucket.reload
end
after(:each) do
@bucket.destroy
end
it "should return a Fog::AWS::S3::Bucket" do
@reloaded.should be_a(Fog::AWS::S3::Bucket)
end
it "should reset attributes to remote state" do
@bucket.attributes.should == @reloaded.attributes
end
end
describe "#save" do
before(:each) do
@bucket = s3.buckets.new(:name => 'fogmodelbucket')
end
it "should return true when it succeeds" do
@bucket.save.should be_true
@bucket.destroy
end
it "should not exist in buckets before save" do
s3.buckets.all.map {|bucket| bucket.name}.include?(@bucket.name).should be_false
end
it "should exist in buckets after save" do
@bucket.save
s3.buckets.all.map {|bucket| bucket.name}.include?(@bucket.name).should be_true
@bucket.destroy
end
end
end

Просмотреть файл

@ -1,70 +0,0 @@
require File.dirname(__FILE__) + '/../../../spec_helper'
describe 'Fog::AWS::S3::Buckets' do
describe "#all" do
it "should return a Fog::AWS::S3::Buckets" do
s3.buckets.all.should be_a(Fog::AWS::S3::Buckets)
end
it "should include persisted buckets" do
bucket = s3.buckets.create(:name => 'fogbucketname')
s3.buckets.all.map {|bucket| bucket.name}.should include('fogbucketname')
bucket.destroy
end
end
describe "#create" do
before(:each) do
@bucket = s3.buckets.create(:name => 'fogbucketname')
end
after(:each) do
@bucket.destroy
end
it "should return a Fog::AWS::S3::Bucket" do
@bucket.should be_a(Fog::AWS::S3::Bucket)
end
it "should exist on s3" do
s3.buckets.get(@bucket.name).should_not be_nil
end
end
describe "#get" do
it "should return a Fog::AWS::S3::Bucket if a matching bucket exists" do
bucket = s3.buckets.create(:name => 'fogbucketname')
get = s3.buckets.get('fogbucketname')
bucket.attributes.should == get.attributes
bucket.destroy
end
it "should return nil if no matching bucket exists" do
s3.buckets.get('fogbucketname').should be_nil
end
end
describe "#new" do
it "should return a Fog::AWS::S3::Bucket" do
s3.buckets.new.should be_a(Fog::AWS::S3::Bucket)
end
end
describe "#reload" do
it "should return a Fog::AWS::S3::Buckets" do
s3.buckets.all.should be_a(Fog::AWS::S3::Buckets)
end
end
end

Просмотреть файл

@ -0,0 +1,49 @@
require File.dirname(__FILE__) + '/../../../spec_helper'
describe 'Fog::AWS::S3::Directories' do
describe "#all" do
it "should include persisted directories" do
@directory = s3.directories.create(:name => 'fogdirectoryname')
s3.directories.all.map {|directory| @directory.name}.should include('fogdirectoryname')
@directory.destroy
end
end
describe "#create" do
it "should exist on s3" do
directory = s3.directories.create(:name => 'fogdirectoryname')
s3.directories.get(directory.name).should_not be_nil
directory.destroy
end
end
describe "#get" do
it "should return a Fog::AWS::S3::Directory if a matching directory exists" do
directory = s3.directories.create(:name => 'fogdirectoryname')
get = s3.directories.get('fogdirectoryname')
directory.attributes.should == get.attributes
directory.destroy
end
it "should return nil if no matching directory exists" do
s3.directories.get('fogdirectoryname').should be_nil
end
end
describe "#reload" do
it "should reload data" do
directories = s3.directories
directories.should == directories.reload
end
end
end

Просмотреть файл

@ -0,0 +1,112 @@
require File.dirname(__FILE__) + '/../../../spec_helper'
describe 'Fog::AWS::S3::Directory' do
describe "#initialize" do
it "should remap attributes from parser" do
now = Time.now
directory = Fog::AWS::S3::Directory.new(
'CreationDate' => now,
'Name' => 'directoryname'
)
directory.creation_date.should == now
directory.name.should == 'directoryname'
end
end
describe "#collection" do
it "should be the directories the directory is related to" do
directories = s3.directories
directories.new.collection.should == directories
end
end
describe "#destroy" do
it "should return true if the directory is deleted" do
directory = s3.directories.create(:name => 'fogmodeldirectory')
directory.destroy.should be_true
end
it "should return false if the directory does not exist" do
directory = s3.directories.new(:name => 'fogmodeldirectory')
directory.destroy.should be_false
end
end
describe "#location" do
it "should return the location constraint" do
directory = s3.directories.create(:name => 'fogmodeleudirectory', :location => 'EU')
directory.location.should == 'EU'
eu_s3.directories.get('fogmodeleudirectory').destroy
end
end
describe "#payer" do
it "should return the request payment value" do
directory = s3.directories.create(:name => 'fogmodeldirectory')
directory.payer.should == 'BucketOwner'
directory.destroy.should be_true
end
end
describe "#payer=" do
it "should set the request payment value" do
directory = s3.directories.create(:name => 'fogmodeldirectory')
(directory.payer = 'Requester').should == 'Requester'
directory.destroy.should
end
end
describe "#reload" do
before(:each) do
@directory = s3.directories.create(:name => 'fogmodeldirectory')
@reloaded = @directory.reload
end
after(:each) do
@directory.destroy
end
it "should reset attributes to remote state" do
@directory.attributes.should == @reloaded.attributes
end
end
describe "#save" do
before(:each) do
@directory = s3.directories.new(:name => 'fogmodeldirectory')
end
it "should return true when it succeeds" do
@directory.save.should be_true
@directory.destroy
end
it "should not exist in directories before save" do
s3.directories.all.map {|directory| directory.name}.include?(@directory.name).should be_false
end
it "should exist in directories after save" do
@directory.save
s3.directories.all.map {|directory| directory.name}.include?(@directory.name).should be_true
@directory.destroy
end
end
end

Просмотреть файл

@ -0,0 +1,106 @@
require File.dirname(__FILE__) + '/../../../spec_helper'
describe 'Fog::AWS::S3::File' do
before(:each) do
@directory = s3.directories.create(:name => 'fogdirectoryname')
end
after(:each) do
@directory.destroy
end
describe "#initialize" do
it "should remap attributes from parser" do
now = Time.now
directory = Fog::AWS::S3::File.new(
'Content-Length' => 10,
'Content-Type' => 'contenttype',
'Etag' => 'etag',
'Key' => 'key',
'Last-Modified' => now,
'Size' => 10,
'StorageClass' => 'storageclass'
)
directory.content_length == 10
directory.content_type.should == 'contenttype'
directory.etag.should == 'etag'
directory.key.should == 'key'
directory.last_modified.should == now
directory.size.should == 10
directory.storage_class.should == 'storageclass'
directory = Fog::AWS::S3::File.new(
'ETag' => 'etag',
'LastModified' => now
)
directory.etag.should == 'etag'
directory.last_modified.should == now
end
end
describe "#directory" do
it "should be the directory the file is related to" do
@file = @directory.files.new(:key => 'foo')
@file.directory.should == @directory
end
end
describe "#copy" do
it "should return a Fog::AWS::S3::File with matching attributes" do
other_directory = s3.directories.create(:name => 'fogotherdirectoryname')
data = File.open(File.dirname(__FILE__) + '/../../../lorem.txt', 'r')
file = @directory.files.create(:key => 'fogfilename', :body => data)
other_file = file.copy('fogotherdirectoryname', 'fogotherfilename')
file.reload.attributes.reject{|key,value| [:key, :last_modified].include?(key)}.should == other_file.reload.attributes.reject{|key,value| [:key, :last_modified].include?(key)}
other_file.destroy
file.destroy
other_directory.destroy
end
end
describe "#destroy" do
it "should return true if the file is deleted" do
data = File.open(File.dirname(__FILE__) + '/../../../lorem.txt', 'r')
file = @directory.files.create(:key => 'fogfilename', :body => data)
file.destroy.should be_true
end
it "should return true if the file does not exist" do
file = @directory.files.new(:key => 'fogfilename')
file.destroy.should be_true
end
end
describe "#reload" do
it "should reset attributes to remote state" do
data = File.open(File.dirname(__FILE__) + '/../../../lorem.txt', 'r')
file = @directory.files.create(:key => 'fogfilename', :body => data)
file.last_modified = Time.now
file.reload.attributes.should == file.attributes
file.destroy
end
end
describe "#save" do
it "should return the success value" do
data = File.open(File.dirname(__FILE__) + '/../../../lorem.txt', 'r')
file = @directory.files.new(:key => 'fogfilename', :body => data)
file.save.should be_true
file.destroy
end
end
end

Просмотреть файл

@ -0,0 +1,116 @@
require File.dirname(__FILE__) + '/../../../spec_helper'
describe 'Fog::AWS::S3::Files' do
before(:each) do
@directory = s3.directories.create(:name => 'fogdirectoryname')
end
after(:each) do
@directory.destroy
end
describe "#initialize" do
it "should remap attributes from parser" do
files = Fog::AWS::S3::Files.new(
'IsTruncated' => true,
'Marker' => 'marker',
'MaxKeys' => 1,
'Prefix' => 'prefix'
)
files.is_truncated.should == true
files.marker.should == 'marker'
files.max_keys.should == 1
files.prefix.should == 'prefix'
end
end
describe "#all" do
it "should return nil if the directory does not exist" do
directory = s3.directories.new(:name => 'notadirectory')
directory.files.all.should be_nil
end
end
describe "#create" do
it "should exist on s3" do
data = File.open(File.dirname(__FILE__) + '/../../../lorem.txt', 'r')
file = @directory.files.create(:key => 'fogfilename', :body => data)
@directory.files.get('fogfilename').should_not be_nil
file.destroy
end
end
describe "#get" do
before(:each) do
data = File.open(File.dirname(__FILE__) + '/../../../lorem.txt', 'r')
@file = @directory.files.create(:key => 'fogfilename', :body => data)
end
after(:each) do
@file.destroy
end
it "should return a Fog::AWS::S3::File with metadata and data" do
@file.reload
@file.body.should_not be_nil
@file.content_length.should_not be_nil
@file.etag.should_not be_nil
@file.last_modified.should_not be_nil
@file.destroy
end
it "should return chunked data if given a block" do
data = ''
@directory.files.get('fogfilename') do |chunk|
data << chunk
end
data.should == File.open(File.dirname(__FILE__) + '/../../../lorem.txt', 'r').read
end
end
describe "#get_url" do
it "should return a signed expiring url" do
data = File.open(File.dirname(__FILE__) + '/../../../lorem.txt', 'r')
file = @directory.files.create(:key => 'fogfilename', :body => data)
url = @directory.files.get_url('fogfilename', Time.now + 60 * 10)
unless Fog.mocking?
open(url).read.should == File.open(File.dirname(__FILE__) + '/../../../lorem.txt', 'r').read
end
file.destroy
end
end
describe "#head" do
it "should return a Fog::AWS::S3::File with metadata" do
data = File.open(File.dirname(__FILE__) + '/../../../lorem.txt', 'r')
file = @directory.files.create(:key => 'fogfilename', :body => data)
file = @directory.files.get('fogfilename')
file.content_length.should_not be_nil
file.etag.should_not be_nil
file.last_modified.should_not be_nil
file.destroy
end
end
describe "#reload" do
it "should reload data" do
@directory.files.reload.should == @directory.files
end
end
end

Просмотреть файл

@ -1,121 +0,0 @@
require File.dirname(__FILE__) + '/../../../spec_helper'
describe 'S3::Object' do
before(:each) do
@bucket = s3.buckets.create(:name => 'fogbucketname')
end
after(:each) do
@bucket.destroy
end
describe "#initialize" do
it "should remap attributes from parser" do
now = Time.now
bucket = Fog::AWS::S3::Object.new(
'Content-Length' => 10,
'Content-Type' => 'contenttype',
'Etag' => 'etag',
'Key' => 'key',
'Last-Modified' => now,
'Size' => 10,
'StorageClass' => 'storageclass'
)
bucket.content_length == 10
bucket.content_type.should == 'contenttype'
bucket.etag.should == 'etag'
bucket.key.should == 'key'
bucket.last_modified.should == now
bucket.size.should == 10
bucket.storage_class.should == 'storageclass'
bucket = Fog::AWS::S3::Object.new(
'ETag' => 'etag',
'LastModified' => now
)
bucket.etag.should == 'etag'
bucket.last_modified.should == now
end
end
describe "#bucket" do
before(:each) do
@object = @bucket.objects.new(:key => 'foo')
end
it "should return an S3::Bucket" do
@object.bucket.should be_a(Fog::AWS::S3::Bucket)
end
it "should be the bucket the object is related to" do
@object.bucket.should == @bucket
end
end
describe "#copy" do
it "should return a Fog::AWS::S3::Object with matching attributes" do
other_bucket = s3.buckets.create(:name => 'fogotherbucketname')
file = File.open(File.dirname(__FILE__) + '/../../../lorem.txt', 'r')
object = @bucket.objects.create(:key => 'fogobjectname', :body => file)
other_object = object.copy('fogotherbucketname', 'fogotherobjectname')
object.reload.attributes.reject{|key,value| [:key, :last_modified].include?(key)}.should == other_object.reload.attributes.reject{|key,value| [:key, :last_modified].include?(key)}
other_object.destroy
object.destroy
other_bucket.destroy
end
end
describe "#destroy" do
it "should return true if the object is deleted" do
file = File.open(File.dirname(__FILE__) + '/../../../lorem.txt', 'r')
object = @bucket.objects.create(:key => 'fogobjectname', :body => file)
object.destroy.should be_true
end
it "should return true if the object does not exist" do
object = @bucket.objects.new(:key => 'fogobjectname')
object.destroy.should be_true
end
end
describe "#reload" do
it "should return a Fog::AWS::S3::Object" do
file = File.open(File.dirname(__FILE__) + '/../../../lorem.txt', 'r')
object = @bucket.objects.create(:key => 'fogobjectname', :body => file)
# object.reload.should be_a(Fog::AWS::S3::Object)
object.destroy
end
it "should reset attributes to remote state" do
file = File.open(File.dirname(__FILE__) + '/../../../lorem.txt', 'r')
object = @bucket.objects.create(:key => 'fogobjectname', :body => file)
object.last_modified = Time.now
# object.reload.attributes.should == object.attributes
object.destroy
end
end
describe "#save" do
it "should return the success value" do
file = File.open(File.dirname(__FILE__) + '/../../../lorem.txt', 'r')
object = @bucket.objects.new(:key => 'fogobjectname', :body => file)
object.save.should be_true
object.destroy
end
end
end

Просмотреть файл

@ -1,141 +0,0 @@
require File.dirname(__FILE__) + '/../../../spec_helper'
describe 'Fog::AWS::S3::Objects' do
before(:each) do
@bucket = s3.buckets.create(:name => 'fogbucketname')
end
after(:each) do
@bucket.destroy
end
describe "#initialize" do
it "should remap attributes from parser" do
objects = Fog::AWS::S3::Objects.new(
'IsTruncated' => true,
'Marker' => 'marker',
'MaxKeys' => 1,
'Prefix' => 'prefix'
)
objects.is_truncated.should == true
objects.marker.should == 'marker'
objects.max_keys.should == 1
objects.prefix.should == 'prefix'
end
end
describe "#all" do
it "should return a Fog::AWS::S3::Objects" do
@bucket.objects.all.should be_a(Fog::AWS::S3::Objects)
end
it "should return nil if the bucket does not exist" do
bucket = s3.buckets.new(:name => 'notabucket')
bucket.objects.all.should be_nil
end
end
describe "#bucket" do
it "should return a Fog::AWS::S3::Bucket" do
@bucket.objects.bucket.should be_a(Fog::AWS::S3::Bucket)
end
it "should be the bucket the objects are related to" do
@bucket.objects.bucket.should == @bucket
end
end
describe "#create" do
it "should return a Fog::AWS::S3::Object" do
file = File.open(File.dirname(__FILE__) + '/../../../lorem.txt', 'r')
object = @bucket.objects.create(:key => 'fogobjectname', :body => file)
object.should be_a(Fog::AWS::S3::Object)
object.destroy
end
it "should exist on s3" do
file = File.open(File.dirname(__FILE__) + '/../../../lorem.txt', 'r')
object = @bucket.objects.create(:key => 'fogobjectname', :body => file)
@bucket.objects.get('fogobjectname').should_not be_nil
object.destroy
end
end
describe "#get" do
it "should return a Fog::AWS::S3::Object with metadata and data" do
file = File.open(File.dirname(__FILE__) + '/../../../lorem.txt', 'r')
object = @bucket.objects.create(:key => 'fogobjectname', :body => file)
object = @bucket.objects.get('fogobjectname')
object.body.should_not be_nil
object.content_length.should_not be_nil
object.etag.should_not be_nil
object.last_modified.should_not be_nil
object.destroy
end
it "should return chunked data if given a block" do
file = File.open(File.dirname(__FILE__) + '/../../../lorem.txt', 'r')
object = @bucket.objects.create(:key => 'fogobjectname', :body => file)
data = ''
@bucket.objects.get('fogobjectname') do |chunk|
data << chunk
end
data.should == File.open(File.dirname(__FILE__) + '/../../../lorem.txt', 'r').read
object.destroy
end
end
describe "#get_url" do
it "should return a signed expiring url" do
file = File.open(File.dirname(__FILE__) + '/../../../lorem.txt', 'r')
object = @bucket.objects.create(:key => 'fogobjectname', :body => file)
url = @bucket.objects.get_url('fogobjectname', Time.now + 60 * 10)
unless Fog.mocking?
open(url).read.should == File.open(File.dirname(__FILE__) + '/../../../lorem.txt', 'r').read
end
object.destroy
end
end
describe "#head" do
it "should return a Fog::AWS::S3::Object with metadata" do
file = File.open(File.dirname(__FILE__) + '/../../../lorem.txt', 'r')
object = @bucket.objects.create(:key => 'fogobjectname', :body => file)
object = @bucket.objects.get('fogobjectname')
object.content_length.should_not be_nil
object.etag.should_not be_nil
object.last_modified.should_not be_nil
object.destroy
end
end
describe "#new" do
it "should return a Fog::AWS::S3::Object" do
@bucket.objects.new.should be_a(Fog::AWS::S3::Object)
end
end
describe "#reload" do
it "should reload from s3" do
@bucket.objects.reload.should == @bucket.objects
end
end
end