Commit d1ce855d by LI Daobing

sounds works

parent 1bbdc0dd
module Paperclip
module Storage
# fog is a modern and versatile cloud computing library for Ruby.
# Among others, it supports Amazon S3 to store your files. In
# contrast to the outdated AWS-S3 gem it is actively maintained and
# supports multiple locations.
# Amazon's S3 file hosting service is a scalable, easy place to
# store files for distribution. You can find out more about it at
# http://aws.amazon.com/s3 There are a few fog-specific options for
# has_attached_file, which will be explained using S3 as an example:
# * +fog_credentials+: Takes a Hash with your credentials. For S3,
# you can use the following format:
# aws_access_key_id: '<your aws_access_key_id>'
# aws_secret_access_key: '<your aws_secret_access_key>'
# provider: 'AWS'
# region: 'eu-west-1'
# * +fog_directory+: This is the name of the S3 bucket that will
# store your files. Remember that the bucket must be unique across
# all of Amazon S3. If the bucket does not exist, Paperclip will
# attempt to create it.
# * +path+: This is the key under the bucket in which the file will
# be stored. The URL will be constructed from the bucket and the
# path. This is what you will want to interpolate. Keys should be
# unique, like filenames, and despite the fact that S3 (strictly
# speaking) does not support directories, you can still use a / to
# separate parts of your file name.
# * +fog_public+: (optional, defaults to true) Should the uploaded
# files be public or not? (true/false)
# * +fog_host+: (optional) The fully-qualified domain name (FQDN)
# that is the alias to the S3 domain of your bucket, e.g.
# 'http://images.example.com'. This can also be used in
# conjunction with Cloudfront (http://aws.amazon.com/cloudfront)
module Qiniu
def self.extended base
begin
......@@ -43,34 +12,20 @@ module Paperclip
base.instance_eval do
unless @options[:url].to_s.match(/^:fog.*url$/)
@options[:path] = @options[:path].gsub(/:url/, @options[:url])
@options[:url] = ':fog_public_url'
@options[:url] = ':qiniu_public_url'
end
Paperclip.interpolates(:fog_public_url) do |attachment, style|
Paperclip.interpolates(:qiniu_public_url) do |attachment, style|
attachment.public_url(style)
end unless Paperclip::Interpolations.respond_to? :fog_public_url
end unless Paperclip::Interpolations.respond_to? :qiniu_public_url
end
end
AWS_BUCKET_SUBDOMAIN_RESTRICTON_REGEX = /^(?:[a-z]|\d(?!\d{0,2}(?:\.\d{1,3}){3}$))(?:[a-z0-9]|\.(?![\.\-])|\-(?![\.])){1,61}[a-z0-9]$/
end
def exists?(style = default_style)
init
!!::Qiniu::RS.stat(bucket, path(style))
end
def fog_credentials
@fog_credentials ||= parse_credentials(@options[:fog_credentials])
end
def fog_file
@fog_file ||= @options[:fog_file] || {}
end
def fog_public
return @fog_public if defined?(@fog_public)
@fog_public = defined?(@options[:fog_public]) ? @options[:fog_public] : true
end
def flush_writes
init
for style, file in @queued_for_write do
......@@ -103,33 +58,6 @@ module Paperclip
nil
end
def expiring_url(time = 3600, style = default_style)
expiring_url = directory.files.get_http_url(path(style), time)
if @options[:fog_host]
expiring_url.gsub!(/#{host_name_for_directory}/, dynamic_fog_host_for_style(style))
end
return expiring_url
end
def parse_credentials(creds)
creds = find_credentials(creds).stringify_keys
env = Object.const_defined?(:Rails) ? Rails.env : nil
(creds[env] || creds).symbolize_keys
end
def copy_to_local_file(style, local_dest_path)
log("copying #{path(style)} to local file #{local_dest_path}")
local_file = ::File.open(local_dest_path, 'wb')
file = directory.files.get(path(style))
local_file.write(file.body)
local_file.close
rescue Fog::Errors::Error => e
warn("#{e} - cannot copy #{path(style)} to local file #{local_dest_path}")
false
end
private
def init
......@@ -153,58 +81,6 @@ module Paperclip
def bucket
@options[:bucket] || raise("bucket is nil")
end
def dynamic_fog_host_for_style(style)
if @options[:fog_host].respond_to?(:call)
@options[:fog_host].call(self)
else
(@options[:fog_host] =~ /%d/) ? @options[:fog_host] % (path(style).hash % 4) : @options[:fog_host]
end
end
def host_name_for_directory
if @options[:fog_directory].to_s =~ Fog::AWS_BUCKET_SUBDOMAIN_RESTRICTON_REGEX
# This:
"#{@options[:fog_directory]}."
# Should be modified to this:
# "#{@options[:fog_directory]}.s3.amazonaws.com"
# When fog with https://github.com/fog/fog/pull/857 gets released
else
"s3.amazonaws.com/#{@options[:fog_directory]}"
end
end
def find_credentials(creds)
case creds
when File
YAML::load(ERB.new(File.read(creds.path)).result)
when String, Pathname
YAML::load(ERB.new(File.read(creds)).result)
when Hash
creds
else
if creds.respond_to?(:call)
creds.call(self)
else
raise ArgumentError, "Credentials are not a path, file, hash or proc."
end
end
end
def connection
@connection ||= ::Fog::Storage.new(fog_credentials)
end
def directory
dir = if @options[:fog_directory].respond_to?(:call)
@options[:fog_directory].call(self)
else
@options[:fog_directory]
end
@directory ||= connection.directories.new(:key => dir)
end
end
end
end
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment