Merge pull request #30020 from rails/active-storage-import

Add Active Storage to Rails
This commit is contained in:
David Heinemeier Hansson 2017-08-04 18:05:13 -05:00 committed by GitHub
commit 5528406603
144 changed files with 7217 additions and 26 deletions

@ -28,10 +28,6 @@ Layout/CommentIndentation:
Layout/EmptyLineAfterMagicComment:
Enabled: true
# No extra empty lines.
Layout/EmptyLines:
Enabled: true
# In a regular class definition, no empty lines around the body.
Layout/EmptyLinesAroundClassBody:
Enabled: true

@ -40,6 +40,8 @@ script: 'ci/travis.rb'
env:
global:
- "JRUBY_OPTS='--dev -J-Xmx1024M'"
- "AWS_ACCESS_KEY_ID=AKIAIDIA2E7SSMYGNB7A"
- secure: "XohvFnYff1yf8qlawCujI+CIqHK08KOw34pPprd4QYuG0SJCzBdNN7efBBj5gLX1PI6DwkDLNv51Oi31xPh7yJFuzRAkB0FPdyKM7UyYZ7BMaTqx8LVC89lZJ8VIu19kDP/8sdOm0HN/huOM5kO3jZJFLpi2Tj313TjmzWZFPq0="
matrix:
- "GEM=railties"
- "GEM=ap,ac"

@ -92,6 +92,14 @@ group :cable do
gem "sprockets-export", require: false
end
group :storage do
gem "aws-sdk", "~> 2", require: false
gem "google-cloud-storage", "~> 1.3", require: false
gem "azure-storage", require: false
gem "mini_magick"
end
# Add your own local bundler stuff.
local_gemfile = File.expand_path(".Gemfile", __dir__)
instance_eval File.read local_gemfile if File.exist? local_gemfile

@ -82,6 +82,9 @@ PATH
activemodel (= 5.2.0.alpha)
activesupport (= 5.2.0.alpha)
arel (= 9.0.0.alpha)
activestorage (5.2.0.alpha)
actionpack (= 5.2.0.alpha)
activerecord (= 5.2.0.alpha)
activesupport (5.2.0.alpha)
concurrent-ruby (~> 1.0, >= 1.0.2)
i18n (~> 0.7)
@ -95,6 +98,7 @@ PATH
activejob (= 5.2.0.alpha)
activemodel (= 5.2.0.alpha)
activerecord (= 5.2.0.alpha)
activestorage (= 5.2.0.alpha)
activesupport (= 5.2.0.alpha)
bundler (>= 1.3.0)
railties (= 5.2.0.alpha)
@ -113,6 +117,23 @@ GEM
public_suffix (~> 2.0, >= 2.0.2)
amq-protocol (2.2.0)
ast (2.3.0)
aws-sdk (2.10.19)
aws-sdk-resources (= 2.10.19)
aws-sdk-core (2.10.19)
aws-sigv4 (~> 1.0)
jmespath (~> 1.0)
aws-sdk-resources (2.10.19)
aws-sdk-core (= 2.10.19)
aws-sigv4 (1.0.1)
azure-core (0.1.9)
faraday (~> 0.9)
faraday_middleware (~> 0.10)
nokogiri (~> 1.7)
azure-storage (0.11.4.preview)
azure-core (~> 0.1)
faraday (~> 0.9)
faraday_middleware (~> 0.10)
nokogiri (~> 1.6)
backburner (1.4.1)
beaneater (~> 1.0)
concurrent-ruby (~> 1.0.1)
@ -168,11 +189,14 @@ GEM
daemons (1.2.4)
dalli (2.7.6)
dante (0.2.0)
declarative (0.0.9)
declarative-option (0.1.0)
delayed_job (4.1.3)
activesupport (>= 3.0, < 5.2)
delayed_job_active_record (4.1.2)
activerecord (>= 3.0, < 5.2)
delayed_job (>= 3.0, < 5)
digest-crc (0.4.1)
em-hiredis (0.3.1)
eventmachine (~> 1.0)
hiredis (~> 0.6.0)
@ -195,6 +219,8 @@ GEM
execjs (2.7.0)
faraday (0.12.2)
multipart-post (>= 1.2, < 3)
faraday_middleware (0.12.0)
faraday (>= 0.7.4, < 1.0)
faye (1.2.4)
cookiejar (>= 0.3.0)
em-http-request (>= 0.3.0)
@ -211,14 +237,41 @@ GEM
ffi (1.9.18-x86-mingw32)
globalid (0.4.0)
activesupport (>= 4.2.0)
google-api-client (0.13.1)
addressable (~> 2.5, >= 2.5.1)
googleauth (~> 0.5)
httpclient (>= 2.8.1, < 3.0)
mime-types (~> 3.0)
representable (~> 3.0)
retriable (>= 2.0, < 4.0)
google-cloud-core (1.0.0)
google-cloud-env (~> 1.0)
googleauth (~> 0.5.1)
google-cloud-env (1.0.1)
faraday (~> 0.11)
google-cloud-storage (1.3.0)
digest-crc (~> 0.4)
google-api-client (~> 0.13.0)
google-cloud-core (~> 1.0)
googleauth (0.5.3)
faraday (~> 0.12)
jwt (~> 1.4)
logging (~> 2.0)
memoist (~> 0.12)
multi_json (~> 1.11)
os (~> 0.9)
signet (~> 0.7)
hiredis (0.6.1)
http_parser.rb (0.6.0)
httpclient (2.8.3)
i18n (0.8.6)
jmespath (1.3.1)
jquery-rails (4.3.1)
rails-dom-testing (>= 1, < 3)
railties (>= 4.2.0)
thor (>= 0.14, < 2.0)
json (2.1.0)
jwt (1.5.6)
kindlerb (1.2.0)
mustache
nokogiri
@ -227,15 +280,21 @@ GEM
rb-fsevent (~> 0.9, >= 0.9.4)
rb-inotify (~> 0.9, >= 0.9.7)
ruby_dep (~> 1.2)
little-plugger (1.1.4)
logging (2.2.2)
little-plugger (~> 1.1)
multi_json (~> 1.10)
loofah (2.0.3)
nokogiri (>= 1.5.9)
mail (2.6.6)
mime-types (>= 1.16, < 4)
memoist (0.16.0)
metaclass (0.0.4)
method_source (0.8.2)
mime-types (3.1)
mime-types-data (~> 3.2015)
mime-types-data (3.2016.0521)
mini_magick (4.8.0)
mini_portile2 (2.2.0)
minitest (5.10.2)
minitest-bisect (1.4.0)
@ -263,6 +322,7 @@ GEM
mini_portile2 (~> 2.2.0)
nokogiri (1.8.0-x86-mingw32)
mini_portile2 (~> 2.2.0)
os (0.9.6)
parallel (1.11.2)
parser (2.4.0.0)
ast (~> 2.2)
@ -303,6 +363,10 @@ GEM
redis (3.3.3)
redis-namespace (1.5.3)
redis (~> 3.0, >= 3.0.4)
representable (3.0.4)
declarative (< 0.1.0)
declarative-option (< 0.2.0)
uber (< 0.2.0)
resque (1.27.4)
mono_logger (~> 1.0)
multi_json (~> 1.0)
@ -314,6 +378,7 @@ GEM
redis (~> 3.3)
resque (~> 1.26)
rufus-scheduler (~> 3.2)
retriable (3.1.1)
rubocop (0.49.1)
parallel (~> 1.10)
parser (>= 2.3.3.1, < 3.0)
@ -345,6 +410,11 @@ GEM
rack-protection (>= 1.5.0)
redis (~> 3.3, >= 3.3.3)
sigdump (0.2.4)
signet (0.7.3)
addressable (~> 2.3)
faraday (~> 0.9)
jwt (~> 1.5)
multi_json (~> 1.10)
simple_uuid (0.4.0)
sinatra (2.0.0)
mustermann (~> 1.0)
@ -384,6 +454,7 @@ GEM
thread_safe (~> 0.1)
tzinfo-data (1.2017.2)
tzinfo (>= 1.0.0)
uber (0.1.0)
uglifier (3.2.0)
execjs (>= 0.3.0, < 3)
unicode-display_width (1.3.0)
@ -411,6 +482,8 @@ DEPENDENCIES
activerecord-jdbcpostgresql-adapter (>= 1.3.0)
activerecord-jdbcsqlite3-adapter (>= 1.3.0)
arel!
aws-sdk (~> 2)
azure-storage
backburner
bcrypt (~> 3.1.11)
benchmark-ips
@ -425,12 +498,14 @@ DEPENDENCIES
delayed_job_active_record
em-hiredis
erubis (~> 2.7.0)
google-cloud-storage (~> 1.3)
hiredis
jquery-rails
json (>= 2.0.0)
kindlerb (~> 1.2.0)
libxml-ruby
listen (>= 3.0.5, < 3.2)
mini_magick
minitest-bisect
mocha (~> 0.14)
mysql2 (>= 0.4.4)

@ -40,6 +40,8 @@ to generate and send emails; Active Job ([README](activejob/README.md)), a
framework for declaring jobs and making them run on a variety of queueing
backends; Action Cable ([README](actioncable/README.md)), a framework to
integrate WebSockets with a Rails application;
Active Storage ([README](activestorage/README.md)), a library to attach cloud
and local files to Rails applications;
and Active Support ([README](activesupport/README.rdoc)), a collection
of utility classes and standard library extensions that are useful for Rails,
and may also be used independently outside Rails.

@ -1193,7 +1193,7 @@ def hidden_field(object_name, method, options = {})
# file_field(:attachment, :file, class: 'file_input')
# # => <input type="file" id="attachment_file" name="attachment[file]" class="file_input" />
def file_field(object_name, method, options = {})
Tags::FileField.new(object_name, method, self, options).render
Tags::FileField.new(object_name, method, self, convert_direct_upload_option_to_url(options.dup)).render
end
# Returns a textarea opening and closing tag set tailored for accessing a specified attribute (identified by +method+)

@ -274,7 +274,7 @@ def hidden_field_tag(name, value = nil, options = {})
# file_field_tag 'file', accept: 'text/html', class: 'upload', value: 'index.html'
# # => <input accept="text/html" class="upload" id="file" name="file" type="file" value="index.html" />
def file_field_tag(name, options = {})
text_field_tag(name, nil, options.merge(type: :file))
text_field_tag(name, nil, convert_direct_upload_option_to_url(options.merge(type: :file)))
end
# Creates a password field, a masked text field that will hide the users input behind a mask character.
@ -904,6 +904,13 @@ def set_default_disable_with(value, tag_options)
tag_options.delete("data-disable-with")
end
def convert_direct_upload_option_to_url(options)
if options.delete(:direct_upload) && respond_to?(:rails_direct_uploads_url)
options["data-direct-upload-url"] = rails_direct_uploads_url
end
options
end
end
end
end

@ -281,6 +281,18 @@ def method_missing(selector, *args)
super
end
end
def respond_to_missing?(name, include_private = false)
begin
routes = @controller.respond_to?(:_routes) && @controller._routes
rescue
# Dont call routes, if there is an error on _routes call
end
routes &&
(routes.named_routes.route_defined?(name) ||
routes.mounted_helpers.method_defined?(name))
end
end
include Behavior

@ -8,6 +8,16 @@ class FormHelperTest < ActionView::TestCase
tests ActionView::Helpers::FormHelper
class WithActiveStorageRoutesControllers < ActionController::Base
test_routes do
post "/rails/active_storage/direct_uploads" => "active_storage/direct_uploads#create", as: :rails_direct_uploads
end
def url_options
{ host: "testtwo.host" }
end
end
def form_for(*)
@output_buffer = super
end
@ -542,6 +552,27 @@ def test_file_field_with_multiple_behavior_and_explicit_name
assert_dom_equal expected, file_field("import", "file", multiple: true, name: "custom")
end
def test_file_field_with_direct_upload_when_rails_direct_uploads_url_is_not_defined
expected = '<input type="file" name="import[file]" id="import_file" />'
assert_dom_equal expected, file_field("import", "file", direct_upload: true)
end
def test_file_field_with_direct_upload_when_rails_direct_uploads_url_is_defined
@controller = WithActiveStorageRoutesControllers.new
expected = '<input data-direct-upload-url="http://testtwo.host/rails/active_storage/direct_uploads" type="file" name="import[file]" id="import_file" />'
assert_dom_equal expected, file_field("import", "file", direct_upload: true)
end
def test_file_field_with_direct_upload_dont_mutate_arguments
original_options = { class: "pix", direct_upload: true }
expected = '<input class="pix" type="file" name="import[file]" id="import_file" />'
assert_dom_equal expected, file_field("import", "file", original_options)
assert_equal({ class: "pix", direct_upload: true }, original_options)
end
def test_hidden_field
assert_dom_equal(
'<input id="post_title" name="post[title]" type="hidden" value="Hello World" />',

@ -7,6 +7,16 @@ class FormTagHelperTest < ActionView::TestCase
tests ActionView::Helpers::FormTagHelper
class WithActiveStorageRoutesControllers < ActionController::Base
test_routes do
post "/rails/active_storage/direct_uploads" => "active_storage/direct_uploads#create", as: :rails_direct_uploads
end
def url_options
{ host: "testtwo.host" }
end
end
def setup
super
@controller = BasicController.new
@ -178,6 +188,33 @@ def test_file_field_tag_with_options
assert_dom_equal "<input name=\"picsplz\" type=\"file\" id=\"picsplz\" class=\"pix\"/>", file_field_tag("picsplz", class: "pix")
end
def test_file_field_tag_with_direct_upload_when_rails_direct_uploads_url_is_not_defined
assert_dom_equal(
"<input name=\"picsplz\" type=\"file\" id=\"picsplz\" class=\"pix\"/>",
file_field_tag("picsplz", class: "pix", direct_upload: true)
)
end
def test_file_field_tag_with_direct_upload_when_rails_direct_uploads_url_is_defined
@controller = WithActiveStorageRoutesControllers.new
assert_dom_equal(
"<input name=\"picsplz\" type=\"file\" id=\"picsplz\" class=\"pix\" data-direct-upload-url=\"http://testtwo.host/rails/active_storage/direct_uploads\"/>",
file_field_tag("picsplz", class: "pix", direct_upload: true)
)
end
def test_file_field_tag_with_direct_upload_dont_mutate_arguments
original_options = { class: "pix", direct_upload: true }
assert_dom_equal(
"<input name=\"picsplz\" type=\"file\" id=\"picsplz\" class=\"pix\"/>",
file_field_tag("picsplz", original_options)
)
assert_equal({ class: "pix", direct_upload: true }, original_options)
end
def test_password_field_tag
actual = password_field_tag
expected = %(<input id="password" name="password" type="password" />)

5
activestorage/.babelrc Normal file

@ -0,0 +1,5 @@
{
"presets": [
["env", { "modules": false } ]
]
}

@ -0,0 +1,7 @@
engines:
rubocop:
enabled: true
ratings:
paths:
- "**.rb"

19
activestorage/.eslintrc Normal file

@ -0,0 +1,19 @@
{
"extends": "eslint:recommended",
"rules": {
"semi": ["error", "never"],
"quotes": ["error", "double"],
"no-unused-vars": ["error", { "vars": "all", "args": "none" }]
},
"plugins": [
"import"
],
"env": {
"browser": true,
"es6": true
},
"parserOptions": {
"ecmaVersion": 6,
"sourceType": "module"
}
}

6
activestorage/.gitignore vendored Normal file

@ -0,0 +1,6 @@
.byebug_history
node_modules
test/dummy/db/*.sqlite3
test/dummy/db/*.sqlite3-journal
test/dummy/log/*.log
test/dummy/tmp/

@ -0,0 +1,3 @@
* Added to Rails.
*DHH*

20
activestorage/MIT-LICENSE Normal file

@ -0,0 +1,20 @@
Copyright (c) 2017 David Heinemeier Hansson, Basecamp
Permission is hereby granted, free of charge, to any person obtaining
a copy of this software and associated documentation files (the
"Software"), to deal in the Software without restriction, including
without limitation the rights to use, copy, modify, merge, publish,
distribute, sublicense, and/or sell copies of the Software, and to
permit persons to whom the Software is furnished to do so, subject to
the following conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.

134
activestorage/README.md Normal file

@ -0,0 +1,134 @@
# Active Storage
Active Storage makes it simple to upload and reference files in cloud services, like Amazon S3, Google Cloud Storage or Microsoft Azure Storage and attach those files to Active Records. It also provides a disk service for testing or local deployments, but the focus is on cloud storage.
Files can uploaded from the server to the cloud or directly from the client to the cloud.
Image files can further more be transformed using on-demand variants for quality, aspect ratio, size, or any other
MiniMagick supported transformation.
## Compared to other storage solutions
A key difference to how Active Storage works compared to other attachment solutions in Rails is through the use of built-in [Blob](https://github.com/rails/activestorage/blob/master/app/models/active_storage/blob.rb) and [Attachment](https://github.com/rails/activestorage/blob/master/app/models/active_storage/attachment.rb) models (backed by Active Record). This means existing application models do not need to be modified with additional columns to associate with files. Active Storage uses polymorphic associations via the join model of `Attachment`, which then connects to the actual `Blob`.
These `Blob` models are intended to be immutable in spirit. One file, one blob. You can associate the same blob with multiple application models as well. And if you want to do transformations of a given `Blob`, the idea is that you'll simply create a new one, rather than attempt to mutate the existing (though of course you can delete that later if you don't need it).
## Examples
One attachment:
```ruby
class User < ApplicationRecord
has_one_attached :avatar
end
user.avatar.attach io: File.open("~/face.jpg"), filename: "avatar.jpg", content_type: "image/jpg"
user.avatar.attached? # => true
user.avatar.purge
user.avatar.attached? # => false
url_for(user.avatar) # Generate a permanent URL for the blob, which upon access will redirect to a temporary service URL.
class AvatarsController < ApplicationController
def update
# params[:avatar] contains a ActionDispatch::Http::UploadedFile object
Current.user.avatar.attach(params.require(:avatar))
redirect_to Current.user
end
end
```
Many attachments:
```ruby
class Message < ApplicationRecord
has_many_attached :images
end
```
```erb
<%= form_with model: @message do |form| %>
<%= form.text_field :title, placeholder: "Title" %><br>
<%= form.text_area :content %><br><br>
<%= form.file_field :images, multiple: true %><br>
<%= form.submit %>
<% end %>
```
```ruby
class MessagesController < ApplicationController
def index
# Use the built-in with_attached_images scope to avoid N+1
@messages = Message.all.with_attached_images
end
def create
message = Message.create! params.require(:message).permit(:title, :content)
message.images.attach(params[:message][:images])
redirect_to message
end
def show
@message = Message.find(params[:id])
end
end
```
Variation of image attachment:
```erb
<%# Hitting the variant URL will lazy transform the original blob and then redirect to its new service location %>
<%= image_tag url_for(user.avatar.variant(resize: "100x100")) %>
```
## Installation
1. Run `rails activestorage:install` to create needed directories, migrations, and configuration.
2. Optional: Add `gem "aws-sdk", "~> 2"` to your Gemfile if you want to use AWS S3.
3. Optional: Add `gem "google-cloud-storage", "~> 1.3"` to your Gemfile if you want to use Google Cloud Storage.
4. Optional: Add `gem "azure-storage"` to your Gemfile if you want to use Microsoft Azure.
5. Optional: Add `gem "mini_magick"` to your Gemfile if you want to use variants.
## Direct uploads
Active Storage, with its included JavaScript library, supports uploading directly from the client to the cloud.
### Direct upload installation
1. Include `activestorage.js` in your application's JavaScript bundle.
Using the asset pipeline:
```js
//= require activestorage
```
Using the npm package:
```js
import * as ActiveStorage from "activestorage"
ActiveStorage.start()
```
2. Annotate file inputs with the direct upload URL.
```ruby
<%= form.file_field :attachments, multiple: true, direct_upload: true %>
```
3. That's it! Uploads begin upon form submission.
### Direct upload JavaScript events
| Event name | Event target | Event data (`event.detail`) | Description |
| --- | --- | --- | --- |
| `direct-uploads:start` | `<form>` | None | A form containing files for direct upload fields was submit. |
| `direct-upload:initialize` | `<input>` | `{id, file}` | Dispatched for every file after form submission. |
| `direct-upload:start` | `<input>` | `{id, file}` | A direct upload is starting. |
| `direct-upload:before-blob-request` | `<input>` | `{id, file, xhr}` | Before making a request to your application for direct upload metadata. |
| `direct-upload:before-storage-request` | `<input>` | `{id, file, xhr}` | Before making a request to store a file. |
| `direct-upload:progress` | `<input>` | `{id, file, progress}` | As requests to store files progress. |
| `direct-upload:error` | `<input>` | `{id, file, error}` | An error occurred. An `alert` will display unless this event is canceled. |
| `direct-upload:end` | `<input>` | `{id, file}` | A direct upload has ended. |
| `direct-uploads:end` | `<form>` | None | All direct uploads have ended. |
## License
Active Storage is released under the [MIT License](https://opensource.org/licenses/MIT).

12
activestorage/Rakefile Normal file

@ -0,0 +1,12 @@
require "bundler/setup"
require "bundler/gem_tasks"
require "rake/testtask"
Rake::TestTask.new do |test|
test.libs << "app/controllers"
test.libs << "test"
test.test_files = FileList["test/**/*_test.rb"]
test.warning = false
end
task default: :test

@ -0,0 +1,30 @@
# frozen_string_literal: true
version = File.read(File.expand_path("../RAILS_VERSION", __dir__)).strip
Gem::Specification.new do |s|
s.platform = Gem::Platform::RUBY
s.name = "activestorage"
s.version = version
s.summary = "Local and cloud file storage framework."
s.description = "Attach cloud and local files in Rails applications."
s.required_ruby_version = ">= 2.2.2"
s.license = "MIT"
s.author = "David Heinemeier Hansson"
s.email = "david@loudthinking.com"
s.homepage = "http://rubyonrails.org"
s.files = Dir["CHANGELOG.md", "MIT-LICENSE", "README.md", "lib/**/*", "app/**/*", "config/**/*"]
s.require_path = "lib"
s.metadata = {
"source_code_uri" => "https://github.com/rails/rails/tree/v#{version}/activestorage",
"changelog_uri" => "https://github.com/rails/rails/blob/v#{version}/activestorage/CHANGELOG.md"
}
s.add_dependency "actionpack", version
s.add_dependency "activerecord", version
end

File diff suppressed because one or more lines are too long

@ -0,0 +1,22 @@
# Take a signed permanent reference for a blob and turn it into an expiring service URL for download.
# Note: These URLs are publicly accessible. If you need to enforce access protection beyond the
# security-through-obscurity factor of the signed blob references, you'll need to implement your own
# authenticated redirection controller.
class ActiveStorage::BlobsController < ActionController::Base
def show
if blob = find_signed_blob
redirect_to blob.service_url(disposition: disposition_param)
else
head :not_found
end
end
private
def find_signed_blob
ActiveStorage::Blob.find_signed(params[:signed_id])
end
def disposition_param
params[:disposition].presence_in(%w( inline attachment )) || "inline"
end
end

@ -0,0 +1,21 @@
# Creates a new blob on the server side in anticipation of a direct-to-service upload from the client side.
# When the client-side upload is completed, the signed_blob_id can be submitted as part of the form to reference
# the blob that was created up front.
class ActiveStorage::DirectUploadsController < ActionController::Base
def create
blob = ActiveStorage::Blob.create_before_direct_upload!(blob_args)
render json: direct_upload_json(blob)
end
private
def blob_args
params.require(:blob).permit(:filename, :byte_size, :checksum, :content_type, :metadata).to_h.symbolize_keys
end
def direct_upload_json(blob)
blob.as_json(methods: :signed_id).merge(direct_upload: {
url: blob.service_url_for_direct_upload,
headers: blob.service_headers_for_direct_upload
})
end
end

@ -0,0 +1,51 @@
# Serves files stored with the disk service in the same way that the cloud services do.
# This means using expiring, signed URLs that are meant for immediate access, not permanent linking.
# Always go through the BlobsController, or your own authenticated controller, rather than directly
# to the service url.
class ActiveStorage::DiskController < ActionController::Base
def show
if key = decode_verified_key
send_data disk_service.download(key),
filename: params[:filename], disposition: disposition_param, content_type: params[:content_type]
else
head :not_found
end
end
def update
if token = decode_verified_token
if acceptable_content?(token)
disk_service.upload token[:key], request.body, checksum: token[:checksum]
else
head :unprocessable_entity
end
else
head :not_found
end
rescue ActiveStorage::IntegrityError
head :unprocessable_entity
end
private
def disk_service
ActiveStorage::Blob.service
end
def decode_verified_key
ActiveStorage.verifier.verified(params[:encoded_key], purpose: :blob_key)
end
def disposition_param
params[:disposition].presence_in(%w( inline attachment )) || "inline"
end
def decode_verified_token
ActiveStorage.verifier.verified(params[:encoded_token], purpose: :blob_token)
end
def acceptable_content?(token)
token[:content_type] == request.content_type && token[:content_length] == request.content_length
end
end

@ -0,0 +1,26 @@
# Take a signed permanent reference for a variant and turn it into an expiring service URL for download.
# Note: These URLs are publicly accessible. If you need to enforce access protection beyond the
# security-through-obscurity factor of the signed blob and variation reference, you'll need to implement your own
# authenticated redirection controller.
class ActiveStorage::VariantsController < ActionController::Base
def show
if blob = find_signed_blob
redirect_to ActiveStorage::Variant.new(blob, decoded_variation).processed.service_url(disposition: disposition_param)
else
head :not_found
end
end
private
def find_signed_blob
ActiveStorage::Blob.find_signed(params[:signed_blob_id])
end
def decoded_variation
ActiveStorage::Variation.decode(params[:variation_key])
end
def disposition_param
params[:disposition].presence_in(%w( inline attachment )) || "inline"
end
end

@ -0,0 +1,54 @@
import { getMetaValue } from "./helpers"
export class BlobRecord {
constructor(file, checksum, url) {
this.file = file
this.attributes = {
filename: file.name,
content_type: file.type,
byte_size: file.size,
checksum: checksum
}
this.xhr = new XMLHttpRequest
this.xhr.open("POST", url, true)
this.xhr.responseType = "json"
this.xhr.setRequestHeader("Content-Type", "application/json")
this.xhr.setRequestHeader("Accept", "application/json")
this.xhr.setRequestHeader("X-Requested-With", "XMLHttpRequest")
this.xhr.setRequestHeader("X-CSRF-Token", getMetaValue("csrf-token"))
this.xhr.addEventListener("load", event => this.requestDidLoad(event))
this.xhr.addEventListener("error", event => this.requestDidError(event))
}
create(callback) {
this.callback = callback
this.xhr.send(JSON.stringify({ blob: this.attributes }))
}
requestDidLoad(event) {
const { status, response } = this.xhr
if (status >= 200 && status < 300) {
const { direct_upload } = response
delete response.direct_upload
this.attributes = response
this.directUploadData = direct_upload
this.callback(null, this.toJSON())
} else {
this.requestDidError(event)
}
}
requestDidError(event) {
this.callback(`Error creating Blob for "${this.file.name}". Status: ${this.xhr.status}`)
}
toJSON() {
const result = {}
for (const key in this.attributes) {
result[key] = this.attributes[key]
}
return result
}
}

@ -0,0 +1,34 @@
export class BlobUpload {
constructor(blob) {
this.blob = blob
this.file = blob.file
const { url, headers } = blob.directUploadData
this.xhr = new XMLHttpRequest
this.xhr.open("PUT", url, true)
for (const key in headers) {
this.xhr.setRequestHeader(key, headers[key])
}
this.xhr.addEventListener("load", event => this.requestDidLoad(event))
this.xhr.addEventListener("error", event => this.requestDidError(event))
}
create(callback) {
this.callback = callback
this.xhr.send(this.file)
}
requestDidLoad(event) {
const { status, response } = this.xhr
if (status >= 200 && status < 300) {
this.callback(null, response)
} else {
this.requestDidError(event)
}
}
requestDidError(event) {
this.callback(`Error storing "${this.file.name}". Status: ${this.xhr.status}`)
}
}

@ -0,0 +1,42 @@
import { FileChecksum } from "./file_checksum"
import { BlobRecord } from "./blob_record"
import { BlobUpload } from "./blob_upload"
let id = 0
export class DirectUpload {
constructor(file, url, delegate) {
this.id = ++id
this.file = file
this.url = url
this.delegate = delegate
}
create(callback) {
FileChecksum.create(this.file, (error, checksum) => {
const blob = new BlobRecord(this.file, checksum, this.url)
notify(this.delegate, "directUploadWillCreateBlobWithXHR", blob.xhr)
blob.create(error => {
if (error) {
callback(error)
} else {
const upload = new BlobUpload(blob)
notify(this.delegate, "directUploadWillStoreFileWithXHR", upload.xhr)
upload.create(error => {
if (error) {
callback(error)
} else {
callback(null, blob.toJSON())
}
})
}
})
})
}
}
function notify(object, methodName, ...messages) {
if (object && typeof object[methodName] == "function") {
return object[methodName](...messages)
}
}

@ -0,0 +1,67 @@
import { DirectUpload } from "./direct_upload"
import { dispatchEvent } from "./helpers"
export class DirectUploadController {
constructor(input, file) {
this.input = input
this.file = file
this.directUpload = new DirectUpload(this.file, this.url, this)
this.dispatch("initialize")
}
start(callback) {
const hiddenInput = document.createElement("input")
hiddenInput.type = "hidden"
hiddenInput.name = this.input.name
this.input.insertAdjacentElement("beforebegin", hiddenInput)
this.dispatch("start")
this.directUpload.create((error, attributes) => {
if (error) {
hiddenInput.parentNode.removeChild(hiddenInput)
this.dispatchError(error)
} else {
hiddenInput.value = attributes.signed_id
}
this.dispatch("end")
callback(error)
})
}
uploadRequestDidProgress(event) {
const progress = event.loaded / event.total * 100
if (progress) {
this.dispatch("progress", { progress })
}
}
get url() {
return this.input.getAttribute("data-direct-upload-url")
}
dispatch(name, detail = {}) {
detail.file = this.file
detail.id = this.directUpload.id
return dispatchEvent(this.input, `direct-upload:${name}`, { detail })
}
dispatchError(error) {
const event = this.dispatch("error", { error })
if (!event.defaultPrevented) {
alert(error)
}
}
// DirectUpload delegate
directUploadWillCreateBlobWithXHR(xhr) {
this.dispatch("before-blob-request", { xhr })
}
directUploadWillStoreFileWithXHR(xhr) {
this.dispatch("before-storage-request", { xhr })
xhr.upload.addEventListener("progress", event => this.uploadRequestDidProgress(event))
}
}

@ -0,0 +1,50 @@
import { DirectUploadController } from "./direct_upload_controller"
import { findElements, dispatchEvent, toArray } from "./helpers"
const inputSelector = "input[type=file][data-direct-upload-url]:not([disabled])"
export class DirectUploadsController {
constructor(form) {
this.form = form
this.inputs = findElements(form, inputSelector).filter(input => input.files.length)
}
start(callback) {
const controllers = this.createDirectUploadControllers()
const startNextController = () => {
const controller = controllers.shift()
if (controller) {
controller.start(error => {
if (error) {
callback(error)
this.dispatch("end")
} else {
startNextController()
}
})
} else {
callback()
this.dispatch("end")
}
}
this.dispatch("start")
startNextController()
}
createDirectUploadControllers() {
const controllers = []
this.inputs.forEach(input => {
toArray(input.files).forEach(file => {
const controller = new DirectUploadController(input, file)
controllers.push(controller)
})
})
return controllers
}
dispatch(name, detail = {}) {
return dispatchEvent(this.form, `direct-uploads:${name}`, { detail })
}
}

@ -0,0 +1,53 @@
import SparkMD5 from "spark-md5"
const fileSlice = File.prototype.slice || File.prototype.mozSlice || File.prototype.webkitSlice
export class FileChecksum {
static create(file, callback) {
const instance = new FileChecksum(file)
instance.create(callback)
}
constructor(file) {
this.file = file
this.chunkSize = 2097152 // 2MB
this.chunkCount = Math.ceil(this.file.size / this.chunkSize)
this.chunkIndex = 0
}
create(callback) {
this.callback = callback
this.md5Buffer = new SparkMD5.ArrayBuffer
this.fileReader = new FileReader
this.fileReader.addEventListener("load", event => this.fileReaderDidLoad(event))
this.fileReader.addEventListener("error", event => this.fileReaderDidError(event))
this.readNextChunk()
}
fileReaderDidLoad(event) {
this.md5Buffer.append(event.target.result)
if (!this.readNextChunk()) {
const binaryDigest = this.md5Buffer.end(true)
const base64digest = btoa(binaryDigest)
this.callback(null, base64digest)
}
}
fileReaderDidError(event) {
this.callback(`Error reading ${this.file.name}`)
}
readNextChunk() {
if (this.chunkIndex < this.chunkCount) {
const start = this.chunkIndex * this.chunkSize
const end = Math.min(start + this.chunkSize, this.file.size)
const bytes = fileSlice.call(this.file, start, end)
this.fileReader.readAsArrayBuffer(bytes)
this.chunkIndex++
return true
} else {
return false
}
}
}

@ -0,0 +1,42 @@
export function getMetaValue(name) {
const element = findElement(document.head, `meta[name="${name}"]`)
if (element) {
return element.getAttribute("content")
}
}
export function findElements(root, selector) {
if (typeof root == "string") {
selector = root
root = document
}
const elements = root.querySelectorAll(selector)
return toArray(elements)
}
export function findElement(root, selector) {
if (typeof root == "string") {
selector = root
root = document
}
return root.querySelector(selector)
}
export function dispatchEvent(element, type, eventInit = {}) {
const { bubbles, cancelable, detail } = eventInit
const event = document.createEvent("Event")
event.initEvent(type, bubbles || true, cancelable || true)
event.detail = detail || {}
element.dispatchEvent(event)
return event
}
export function toArray(value) {
if (Array.isArray(value)) {
return value
} else if (Array.from) {
return Array.from(value)
} else {
return [].slice.call(value)
}
}

@ -0,0 +1,11 @@
import { start } from "./ujs"
import { DirectUpload } from "./direct_upload"
export { start, DirectUpload }
function autostart() {
if (window.ActiveStorage) {
start()
}
}
setTimeout(autostart, 1)

@ -0,0 +1,74 @@
import { DirectUploadsController } from "./direct_uploads_controller"
import { findElement } from "./helpers"
const processingAttribute = "data-direct-uploads-processing"
let started = false
export function start() {
if (!started) {
started = true
document.addEventListener("submit", didSubmitForm)
document.addEventListener("ajax:before", didSubmitRemoteElement)
}
}
function didSubmitForm(event) {
handleFormSubmissionEvent(event)
}
function didSubmitRemoteElement(event) {
if (event.target.tagName == "FORM") {
handleFormSubmissionEvent(event)
}
}
function handleFormSubmissionEvent(event) {
const form = event.target
if (form.hasAttribute(processingAttribute)) {
event.preventDefault()
return
}
const controller = new DirectUploadsController(form)
const { inputs } = controller
if (inputs.length) {
event.preventDefault()
form.setAttribute(processingAttribute, "")
inputs.forEach(disable)
controller.start(error => {
form.removeAttribute(processingAttribute)
if (error) {
inputs.forEach(enable)
} else {
submitForm(form)
}
})
}
}
function submitForm(form) {
let button = findElement(form, "input[type=submit]")
if (button) {
const { disabled } = button
button.disabled = false
button.click()
button.disabled = disabled
} else {
button = document.createElement("input")
button.type = "submit"
button.style = "display:none"
form.appendChild(button)
button.click()
form.removeChild(button)
}
}
function disable(input) {
input.disabled = true
}
function enable(input) {
input.disabled = false
}

@ -0,0 +1,9 @@
# Provides delayed purging of attachments or blobs using their `#purge_later` method.
class ActiveStorage::PurgeJob < ActiveJob::Base
# FIXME: Limit this to a custom ActiveStorage error
retry_on StandardError
def perform(attachment_or_blob)
attachment_or_blob.purge
end
end

@ -0,0 +1,28 @@
require "active_support/core_ext/module/delegation"
# Attachments associate records with blobs. Usually that's a one record-many blobs relationship,
# but it is possible to associate many different records with the same blob. If you're doing that,
# you'll want to declare with `has_one/many_attached :thingy, dependent: false`, so that destroying
# any one record won't destroy the blob as well. (Then you'll need to do your own garbage collecting, though).
class ActiveStorage::Attachment < ActiveRecord::Base
self.table_name = "active_storage_attachments"
belongs_to :record, polymorphic: true
belongs_to :blob, class_name: "ActiveStorage::Blob"
delegate_missing_to :blob
# Purging an attachment will purge the blob (delete the file on the service, then destroy the record)
# and then destroy the attachment itself.
def purge
blob.purge
destroy
end
# Purging an attachment means purging the blob, which means talking to the service, which means
# talking over the internet. Whenever you're doing that, it's a good idea to put that work in a job,
# so it doesn't hold up other operations. That's what #purge_later provides.
def purge_later
ActiveStorage::PurgeJob.perform_later(self)
end
end

@ -0,0 +1,195 @@
# A blob is a record that contains the metadata about a file and a key for where that file resides on the service.
# Blobs can be created in two ways:
#
# 1) Subsequent to the file being uploaded server-side to the service via #create_after_upload!
# 2) Ahead of the file being directly uploaded client-side to the service via #create_before_direct_upload!
#
# The first option doesn't require any client-side JavaScript integration, and can be used by any other back-end
# service that deals with files. The second option is faster, since you're not using your own server as a staging
# point for uploads, and can work with deployments like Heroku that do not provide large amounts of disk space.
#
# Blobs are intended to be immutable in as-so-far as their reference to a specific file goes. You're allowed to
# update a blob's metadata on a subsequent pass, but you should not update the key or change the uploaded file.
# If you need to create a derivative or otherwise change the blob, simply create a new blob and purge the old.
class ActiveStorage::Blob < ActiveRecord::Base
self.table_name = "active_storage_blobs"
has_secure_token :key
store :metadata, coder: JSON
class_attribute :service
class << self
# You can used the signed id of a blob to refer to it on the client side without fear of tampering.
# This is particularly helpful for direct uploads where the client side needs to refer to the blob
# that was created ahead of the upload itself on form submission.
#
# The signed id is also used to create stable URLs for the blob through the BlobsController.
def find_signed(id)
find ActiveStorage.verifier.verify(id, purpose: :blob_id)
end
# Returns a new, unsaved blob instance after the `io` has been uploaded to the service.
def build_after_upload(io:, filename:, content_type: nil, metadata: nil)
new.tap do |blob|
blob.filename = filename
blob.content_type = content_type
blob.metadata = metadata
blob.upload io
end
end
# Returns a saved blob instance after the `io` has been uploaded to the service. Note, the blob is first built,
# then the `io` is uploaded, then the blob is saved. This is doing to avoid opening a transaction and talking to
# the service during that (which is a bad idea and leads to deadlocks).
def create_after_upload!(io:, filename:, content_type: nil, metadata: nil)
build_after_upload(io: io, filename: filename, content_type: content_type, metadata: metadata).tap(&:save!)
end
# Returns a saved blob _without_ uploading a file to the service. This blob will point to a key where there is
# no file yet. It's intended to be used together with a client-side upload, which will first create the blob
# in order to produce the signed URL for uploading. This signed URL points to the key generated by the blob.
# Once the form using the direct upload is submitted, the blob can be associated with the right record using
# the signed ID.
def create_before_direct_upload!(filename:, byte_size:, checksum:, content_type: nil, metadata: nil)
create! filename: filename, byte_size: byte_size, checksum: checksum, content_type: content_type, metadata: metadata
end
end
# Returns a signed ID for this blob that's suitable for reference on the client-side without fear of tampering.
# It uses the framework-wide verifier on `ActiveStorage.verifier`, but with a dedicated purpose.
def signed_id
ActiveStorage.verifier.generate(id, purpose: :blob_id)
end
# Returns the key pointing to the file on the service that's associated with this blob. The key is in the
# standard secure-token format from Rails. So it'll look like: XTAPjJCJiuDrLk3TmwyJGpUo. This key is not intended
# to be revealed directly to the user. Always refer to blobs using the signed_id or a verified form of the key.
def key
# We can't wait until the record is first saved to have a key for it
self[:key] ||= self.class.generate_unique_secure_token
end
# Returns a `ActiveStorage::Filename` instance of the filename that can be queried for basename, extension, and
# a sanitized version of the filename that's safe to use in URLs.
def filename
ActiveStorage::Filename.new(self[:filename])
end
# Returns true if the content_type of this blob is in the image range, like image/png.
def image?
content_type =~ /^image/
end
# Returns true if the content_type of this blob is in the audio range, like audio/mpeg.
def audio?
content_type =~ /^audio/
end
# Returns true if the content_type of this blob is in the video range, like video/mp4.
def video?
content_type =~ /^video/
end
# Returns true if the content_type of this blob is in the text range, like text/plain.
def text?
content_type =~ /^text/
end
# Returns a `ActiveStorage::Variant` instance with the set of `transformations` passed in. This is only relevant
# for image files, and it allows any image to be transformed for size, colors, and the like. Example:
#
# avatar.variant(resize: "100x100").processed.service_url
#
# This will create and process a variant of the avatar blob that's constrained to a height and width of 100.
# Then it'll upload said variant to the service according to a derivative key of the blob and the transformations.
#
# Frequently, though, you don't actually want to transform the variant right away. But rather simply refer to a
# specific variant that can be created by a controller on-demand. Like so:
#
# <%= image_tag url_for(Current.user.avatar.variant(resize: "100x100")) %>
#
# This will create a URL for that specific blob with that specific variant, which the `ActiveStorage::VariantsController`
# can then produce on-demand.
def variant(transformations)
ActiveStorage::Variant.new(self, ActiveStorage::Variation.new(transformations))
end
# Returns the URL of the blob on the service. This URL is intended to be short-lived for security and not used directly
# with users. Instead, the `service_url` should only be exposed as a redirect from a stable, possibly authenticated URL.
# Hiding the `service_url` behind a redirect also gives you the power to change services without updating all URLs. And
# it allows permanent URLs that redirect to the `service_url` to be cached in the view.
def service_url(expires_in: 5.minutes, disposition: :inline)
service.url key, expires_in: expires_in, disposition: disposition, filename: filename, content_type: content_type
end
# Returns a URL that can be used to directly upload a file for this blob on the service. This URL is intended to be
# short-lived for security and only generated on-demand by the client-side JavaScript responsible for doing the uploading.
def service_url_for_direct_upload(expires_in: 5.minutes)
service.url_for_direct_upload key, expires_in: expires_in, content_type: content_type, content_length: byte_size, checksum: checksum
end
# Returns a Hash of headers for `service_url_for_direct_upload` requests.
def service_headers_for_direct_upload
service.headers_for_direct_upload key, filename: filename, content_type: content_type, content_length: byte_size, checksum: checksum
end
# Uploads the `io` to the service on the `key` for this blob. Blobs are intended to be immutable, so you shouldn't be
# using this method after a file has already been uploaded to fit with a blob. If you want to create a derivative blob,
# you should instead simply create a new blob based on the old one.
#
# Prior to uploading, we compute the checksum, which is sent to the service for transit integrity validation. If the
# checksum does not match what the service receives, an exception will be raised. We also measure the size of the `io`
# and store that in `byte_size` on the blob record.
#
# Normally, you do not have to call this method directly at all. Use the factory class methods of `build_after_upload`
# and `create_after_upload!`.
def upload(io)
self.checksum = compute_checksum_in_chunks(io)
self.byte_size = io.size
service.upload(key, io, checksum: checksum)
end
# Downloads the file associated with this blob. If no block is given, the entire file is read into memory and returned.
# That'll use a lot of RAM for very large files. If a block is given, then the download is streamed and yielded in chunks.
def download(&block)
service.download key, &block
end
# Deletes the file on the service that's associated with this blob. This should only be done if the blob is going to be
# deleted as well or you will essentially have a dead reference. It's recommended to use the `#purge` and `#purge_later`
# methods in most circumstances.
def delete
service.delete key
end
# Deletes the file on the service and then destroys the blob record. This is the recommended way to dispose of unwanted
# blobs. Note, though, that deleting the file off the service will initiate a HTTP connection to the service, which may
# be slow or prevented, so you should not use this method inside a transaction or in callbacks. Use `#purge_later` instead.
def purge
delete
destroy
end
# Enqueues a `ActiveStorage::PurgeJob` job that'll call `#purge`. This is the recommended way to purge blobs when the call
# needs to be made from a transaction, a callback, or any other real-time scenario.
def purge_later
ActiveStorage::PurgeJob.perform_later(self)
end
private
def compute_checksum_in_chunks(io)
Digest::MD5.new.tap do |checksum|
while chunk = io.read(5.megabytes)
checksum << chunk
end
io.rewind
end.base64digest
end
end

@ -0,0 +1,49 @@
# Encapsulates a string representing a filename to provide convenience access to parts of it and a sanitized version.
# This is what's returned by `ActiveStorage::Blob#filename`. A Filename instance is comparable so it can be used for sorting.
class ActiveStorage::Filename
include Comparable
def initialize(filename)
@filename = filename
end
# Filename.new("racecar.jpg").extname # => ".jpg"
def extname
File.extname(@filename)
end
# Filename.new("racecar.jpg").extension # => "jpg"
def extension
extname.from(1)
end
# Filename.new("racecar.jpg").base # => "racecar"
def base
File.basename(@filename, extname)
end
# Filename.new("foo:bar.jpg").sanitized # => "foo-bar.jpg"
# Filename.new("foo/bar.jpg").sanitized # => "foo-bar.jpg"
#
# ...and any other character unsafe for URLs or storage is converted or stripped.
def sanitized
@filename.encode(Encoding::UTF_8, invalid: :replace, undef: :replace, replace: "<EFBFBD>").strip.tr("\u{202E}%$|:;/\t\r\n\\", "-")
end
# Returns the sanitized version of the filename.
def to_s
sanitized.to_s
end
def as_json(*)
to_s
end
def to_json
to_s
end
def <=>(other)
to_s.downcase <=> other.to_s.downcase
end
end

@ -0,0 +1,80 @@
# Image blobs can have variants that are the result of a set of transformations applied to the original.
# These variants are used to create thumbnails, fixed-size avatars, or any other derivative image from the
# original.
#
# Variants rely on `MiniMagick` for the actual transformations of the file, so you must add `gem "mini_magick"`
# to your Gemfile if you wish to use variants.
#
# Note that to create a variant it's necessary to download the entire blob file from the service and load it
# into memory. The larger the image, the more memory is used. Because of this process, you also want to be
# considerate about when the variant is actually processed. You shouldn't be processing variants inline in a
# template, for example. Delay the processing to an on-demand controller, like the one provided in
# `ActiveStorage::VariantsController`.
#
# To refer to such a delayed on-demand variant, simply link to the variant through the resolved route provided
# by Active Storage like so:
#
# <%= image_tag url_for(Current.user.avatar.variant(resize: "100x100")) %>
#
# This will create a URL for that specific blob with that specific variant, which the `ActiveStorage::VariantsController`
# can then produce on-demand.
#
# When you do want to actually produce the variant needed, call `#processed`. This will check that the variant
# has already been processed and uploaded to the service, and, if so, just return that. Otherwise it will perform
# the transformations, upload the variant to the service, and return itself again. Example:
#
# avatar.variant(resize: "100x100").processed.service_url
#
# This will create and process a variant of the avatar blob that's constrained to a height and width of 100.
# Then it'll upload said variant to the service according to a derivative key of the blob and the transformations.
#
# A list of all possible transformations is available at https://www.imagemagick.org/script/mogrify.php. You can
# combine as many as you like freely:
#
# avatar.variant(resize: "100x100", monochrome: true, flip: "-90")
class ActiveStorage::Variant
attr_reader :blob, :variation
delegate :service, to: :blob
def initialize(blob, variation)
@blob, @variation = blob, variation
end
# Returns the variant instance itself after it's been processed or an existing processing has been found on the service.
def processed
process unless processed?
self
end
# Returns a combination key of the blob and the variation that together identifies a specific variant.
def key
"variants/#{blob.key}/#{variation.key}"
end
# Returns the URL of the variant on the service. This URL is intended to be short-lived for security and not used directly
# with users. Instead, the `service_url` should only be exposed as a redirect from a stable, possibly authenticated URL.
# Hiding the `service_url` behind a redirect also gives you the power to change services without updating all URLs. And
# it allows permanent URLs that redirec to the `service_url` to be cached in the view.
#
# Use `url_for(variant)` (or the implied form, like `link_to variant` or `redirect_to variant`) to get the stable URL
# for a variant that points to the `ActiveStorage::VariantsController`, which in turn will use this `#service_call` method
# for its redirection.
def service_url(expires_in: 5.minutes, disposition: :inline)
service.url key, expires_in: expires_in, disposition: disposition, filename: blob.filename, content_type: blob.content_type
end
private
def processed?
service.exist?(key)
end
def process
service.upload key, transform(service.download(blob.key))
end
def transform(io)
require "mini_magick"
File.open MiniMagick::Image.read(io).tap { |image| variation.transform(image) }.path
end
end

@ -0,0 +1,53 @@
require "active_support/core_ext/object/inclusion"
# A set of transformations that can be applied to a blob to create a variant. This class is exposed via
# the `ActiveStorage::Blob#variant` method and should rarely be used directly.
#
# In case you do need to use this directly, it's instantiated using a hash of transformations where
# the key is the command and the value is the arguments. Example:
#
# ActiveStorage::Variation.new(resize: "100x100", monochrome: true, trim: true, rotate: "-90")
#
# A list of all possible transformations is available at https://www.imagemagick.org/script/mogrify.php.
class ActiveStorage::Variation
attr_reader :transformations
class << self
# Returns a variation instance with the transformations that were encoded by `#encode`.
def decode(key)
new ActiveStorage.verifier.verify(key, purpose: :variation)
end
# Returns a signed key for the `transformations`, which can be used to refer to a specific
# variation in a URL or combined key (like `ActiveStorage::Variant#key`).
def encode(transformations)
ActiveStorage.verifier.generate(transformations, purpose: :variation)
end
end
def initialize(transformations)
@transformations = transformations
end
# Accepts an open MiniMagick image instance, like what's return by `MiniMagick::Image.read(io)`,
# and performs the `transformations` against it. The transformed image instance is then returned.
def transform(image)
transformations.each do |(method, argument)|
if eligible_argument?(argument)
image.public_send(method, argument)
else
image.public_send(method)
end
end
end
# Returns a signed key for all the `transformations` that this variation was instantiated with.
def key
self.class.encode(transformations)
end
private
def eligible_argument?(argument)
argument.present? && argument != true
end
end

@ -0,0 +1,28 @@
Rails.application.routes.draw do
get "/rails/active_storage/blobs/:signed_id/*filename" => "active_storage/blobs#show", as: :rails_service_blob
direct :rails_blob do |blob|
route_for(:rails_service_blob, blob.signed_id, blob.filename)
end
resolve("ActiveStorage::Blob") { |blob| route_for(:rails_blob, blob) }
resolve("ActiveStorage::Attachment") { |attachment| route_for(:rails_blob, attachment.blob) }
get "/rails/active_storage/variants/:signed_blob_id/:variation_key/*filename" => "active_storage/variants#show", as: :rails_blob_variation
direct :rails_variant do |variant|
signed_blob_id = variant.blob.signed_id
variation_key = variant.variation.key
filename = variant.blob.filename
route_for(:rails_blob_variation, signed_blob_id, variation_key, filename)
end
resolve("ActiveStorage::Variant") { |variant| route_for(:rails_variant, variant) }
get "/rails/active_storage/disk/:encoded_key/*filename" => "active_storage/disk#show", as: :rails_disk_service
put "/rails/active_storage/disk/:encoded_token" => "active_storage/disk#update", as: :update_rails_disk_service
post "/rails/active_storage/direct_uploads" => "active_storage/direct_uploads#create", as: :rails_direct_uploads
end

@ -0,0 +1,36 @@
#--
# Copyright (c) 2017 David Heinemeier Hansson
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#++
require "active_record"
require "active_support"
require "active_support/rails"
require_relative "active_storage/version"
module ActiveStorage
extend ActiveSupport::Autoload
autoload :Attached
autoload :Service
mattr_accessor :verifier
end

@ -0,0 +1,36 @@
require "action_dispatch"
require "action_dispatch/http/upload"
require "active_support/core_ext/module/delegation"
# Abstract baseclass for the concrete `ActiveStorage::Attached::One` and `ActiveStorage::Attached::Many`
# classes that both provide proxy access to the blob association for a record.
class ActiveStorage::Attached
attr_reader :name, :record
def initialize(name, record)
@name, @record = name, record
end
private
def create_blob_from(attachable)
case attachable
when ActiveStorage::Blob
attachable
when ActionDispatch::Http::UploadedFile
ActiveStorage::Blob.create_after_upload! \
io: attachable.open,
filename: attachable.original_filename,
content_type: attachable.content_type
when Hash
ActiveStorage::Blob.create_after_upload!(attachable)
when String
ActiveStorage::Blob.find_signed(attachable)
else
nil
end
end
end
require "active_storage/attached/one"
require "active_storage/attached/many"
require "active_storage/attached/macros"

@ -0,0 +1,76 @@
# Provides the class-level DSL for declaring that an Active Record model has attached blobs.
module ActiveStorage::Attached::Macros
# Specifies the relation between a single attachment and the model.
#
# class User < ActiveRecord::Base
# has_one_attached :avatar
# end
#
# There is no column defined on the model side, Active Storage takes
# care of the mapping between your records and the attachment.
#
# Under the covers, this relationship is implemented as a `has_one` association to a
# `ActiveStorage::Attachment` record and a `has_one-through` association to a
# `ActiveStorage::Blob` record. These associations are available as `avatar_attachment`
# and `avatar_blob`. But you shouldn't need to work with these associations directly in
# most circumstances.
#
# The system has been designed to having you go through the `ActiveStorage::Attached::One`
# proxy that provides the dynamic proxy to the associations and factory methods, like `#attach`.
#
# If the +:dependent+ option isn't set, the attachment will be purged
# (i.e. destroyed) whenever the record is destroyed.
def has_one_attached(name, dependent: :purge_later)
define_method(name) do
instance_variable_get("@active_storage_attached_#{name}") ||
instance_variable_set("@active_storage_attached_#{name}", ActiveStorage::Attached::One.new(name, self))
end
has_one :"#{name}_attachment", -> { where(name: name) }, class_name: "ActiveStorage::Attachment", as: :record
has_one :"#{name}_blob", through: :"#{name}_attachment", class_name: "ActiveStorage::Blob", source: :blob
if dependent == :purge_later
before_destroy { public_send(name).purge_later }
end
end
# Specifies the relation between multiple attachments and the model.
#
# class Gallery < ActiveRecord::Base
# has_many_attached :photos
# end
#
# There are no columns defined on the model side, Active Storage takes
# care of the mapping between your records and the attachments.
#
# To avoid N+1 queries, you can include the attached blobs in your query like so:
#
# Gallery.where(user: Current.user).with_attached_photos
#
# Under the covers, this relationship is implemented as a `has_many` association to a
# `ActiveStorage::Attachment` record and a `has_many-through` association to a
# `ActiveStorage::Blob` record. These associations are available as `photos_attachments`
# and `photos_blobs`. But you shouldn't need to work with these associations directly in
# most circumstances.
#
# The system has been designed to having you go through the `ActiveStorage::Attached::Many`
# proxy that provides the dynamic proxy to the associations and factory methods, like `#attach`.
#
# If the +:dependent+ option isn't set, all the attachments will be purged
# (i.e. destroyed) whenever the record is destroyed.
def has_many_attached(name, dependent: :purge_later)
define_method(name) do
instance_variable_get("@active_storage_attached_#{name}") ||
instance_variable_set("@active_storage_attached_#{name}", ActiveStorage::Attached::Many.new(name, self))
end
has_many :"#{name}_attachments", -> { where(name: name) }, as: :record, class_name: "ActiveStorage::Attachment"
has_many :"#{name}_blobs", through: :"#{name}_attachments", class_name: "ActiveStorage::Blob", source: :blob
scope :"with_attached_#{name}", -> { includes("#{name}_attachments": :blob) }
if dependent == :purge_later
before_destroy { public_send(name).purge_later }
end
end
end

@ -0,0 +1,51 @@
# Decorated proxy object representing of multiple attachments to a model.
class ActiveStorage::Attached::Many < ActiveStorage::Attached
delegate_missing_to :attachments
# Returns all the associated attachment records.
#
# All methods called on this proxy object that aren't listed here will automatically be delegated to `attachments`.
def attachments
record.public_send("#{name}_attachments")
end
# Associates one or several attachments with the current record, saving them to the database.
# Examples:
#
# document.images.attach(params[:images]) # Array of ActionDispatch::Http::UploadedFile objects
# document.images.attach(params[:signed_blob_id]) # Signed reference to blob from direct upload
# document.images.attach(io: File.open("~/racecar.jpg"), filename: "racecar.jpg", content_type: "image/jpg")
# document.images.attach([ first_blob, second_blob ])
def attach(*attachables)
attachables.flatten.collect do |attachable|
attachments.create!(name: name, blob: create_blob_from(attachable))
end
end
# Returns true if any attachments has been made.
#
# class Gallery < ActiveRecord::Base
# has_many_attached :photos
# end
#
# Gallery.new.photos.attached? # => false
def attached?
attachments.any?
end
# Directly purges each associated attachment (i.e. destroys the blobs and
# attachments and deletes the files on the service).
def purge
if attached?
attachments.each(&:purge)
attachments.reload
end
end
# Purges each associated attachment through the queuing system.
def purge_later
if attached?
attachments.each(&:purge_later)
end
end
end

@ -0,0 +1,56 @@
# Representation of a single attachment to a model.
class ActiveStorage::Attached::One < ActiveStorage::Attached
delegate_missing_to :attachment
# Returns the associated attachment record.
#
# You don't have to call this method to access the attachment's methods as
# they are all available at the model level.
def attachment
record.public_send("#{name}_attachment")
end
# Associates a given attachment with the current record, saving it to the database.
# Examples:
#
# person.avatar.attach(params[:avatar]) # ActionDispatch::Http::UploadedFile object
# person.avatar.attach(params[:signed_blob_id]) # Signed reference to blob from direct upload
# person.avatar.attach(io: File.open("~/face.jpg"), filename: "face.jpg", content_type: "image/jpg")
# person.avatar.attach(avatar_blob) # ActiveStorage::Blob object
def attach(attachable)
write_attachment \
ActiveStorage::Attachment.create!(record: record, name: name, blob: create_blob_from(attachable))
end
# Returns true if an attachment has been made.
#
# class User < ActiveRecord::Base
# has_one_attached :avatar
# end
#
# User.new.avatar.attached? # => false
def attached?
attachment.present?
end
# Directly purges the attachment (i.e. destroys the blob and
# attachment and deletes the file on the service).
def purge
if attached?
attachment.purge
write_attachment nil
end
end
# Purges the attachment through the queuing system.
def purge_later
if attached?
attachment.purge_later
end
end
private
def write_attachment(attachment)
record.public_send("#{name}_attachment=", attachment)
end
end

@ -0,0 +1,64 @@
# frozen_string_literal: true
require "rails"
require "active_storage"
module ActiveStorage
class Engine < Rails::Engine # :nodoc:
config.active_storage = ActiveSupport::OrderedOptions.new
config.eager_load_namespaces << ActiveStorage
initializer "active_storage.logger" do
require "active_storage/service"
config.after_initialize do |app|
ActiveStorage::Service.logger = app.config.active_storage.logger || Rails.logger
end
end
initializer "active_storage.attached" do
require "active_storage/attached"
ActiveSupport.on_load(:active_record) do
extend ActiveStorage::Attached::Macros
end
end
initializer "active_storage.verifier" do
config.after_initialize do |app|
if app.config.secret_key_base.present?
ActiveStorage.verifier = app.message_verifier("ActiveStorage")
end
end
end
initializer "active_storage.services" do
config.after_initialize do |app|
if config_choice = app.config.active_storage.service
config_file = Pathname.new(Rails.root.join("config/storage.yml"))
raise("Couldn't find Active Storage configuration in #{config_file}") unless config_file.exist?
require "yaml"
require "erb"
configs =
begin
YAML.load(ERB.new(config_file.read).result) || {}
rescue Psych::SyntaxError => e
raise "YAML syntax error occurred while parsing #{config_file}. " \
"Please note that YAML must be consistently indented using spaces. Tabs are not allowed. " \
"Error: #{e.message}"
end
ActiveStorage::Blob.service =
begin
ActiveStorage::Service.configure config_choice, configs
rescue => e
raise e, "Cannot load `Rails.config.active_storage.service`:\n#{e.message}", e.backtrace
end
end
end
end
end
end

@ -0,0 +1,15 @@
module ActiveStorage
# Returns the version of the currently loaded Active Storage as a <tt>Gem::Version</tt>
def self.gem_version
Gem::Version.new VERSION::STRING
end
module VERSION
MAJOR = 0
MINOR = 1
TINY = 0
PRE = "alpha"
STRING = [MAJOR, MINOR, TINY, PRE].compact.join(".")
end
end

@ -0,0 +1,48 @@
require "active_support/log_subscriber"
class ActiveStorage::LogSubscriber < ActiveSupport::LogSubscriber
def service_upload(event)
message = "Uploaded file to key: #{key_in(event)}"
message << " (checksum: #{event.payload[:checksum]})" if event.payload[:checksum]
info event, color(message, GREEN)
end
def service_download(event)
info event, color("Downloaded file from key: #{key_in(event)}", BLUE)
end
def service_delete(event)
info event, color("Deleted file from key: #{key_in(event)}", RED)
end
def service_exist(event)
debug event, color("Checked if file exist at key: #{key_in(event)} (#{event.payload[:exist] ? "yes" : "no"})", BLUE)
end
def service_url(event)
debug event, color("Generated URL for file at key: #{key_in(event)} (#{event.payload[:url]})", BLUE)
end
def logger
ActiveStorage::Service.logger
end
private
def info(event, colored_message)
super log_prefix_for_service(event) + colored_message
end
def debug(event, colored_message)
super log_prefix_for_service(event) + colored_message
end
def log_prefix_for_service(event)
color " #{event.payload[:service]} Storage (#{event.duration.round(1)}ms) ", CYAN
end
def key_in(event)
event.payload[:key]
end
end
ActiveStorage::LogSubscriber.attach_to :active_storage

@ -0,0 +1,27 @@
class ActiveStorageCreateTables < ActiveRecord::Migration[5.1]
def change
create_table :active_storage_blobs do |t|
t.string :key
t.string :filename
t.string :content_type
t.text :metadata
t.integer :byte_size
t.string :checksum
t.datetime :created_at
t.index [ :key ], unique: true
end
create_table :active_storage_attachments do |t|
t.string :name
t.string :record_type
t.integer :record_id
t.integer :blob_id
t.datetime :created_at
t.index :blob_id
t.index [ :record_type, :record_id, :name, :blob_id ], name: "index_active_storage_attachments_uniqueness", unique: true
end
end
end

@ -0,0 +1,114 @@
require "active_storage/log_subscriber"
# Abstract class serving as an interface for concrete services.
#
# The available services are:
#
# * +Disk+, to manage attachments saved directly on the hard drive.
# * +GCS+, to manage attachments through Google Cloud Storage.
# * +S3+, to manage attachments through Amazon S3.
# * +AzureStorage+, to manage attachments through Microsoft Azure Storage.
# * +Mirror+, to be able to use several services to manage attachments.
#
# Inside a Rails application, you can set-up your services through the
# generated <tt>config/storage.yml</tt> file and reference one
# of the aforementioned constant under the +service+ key. For example:
#
# local:
# service: Disk
# root: <%= Rails.root.join("storage") %>
#
# You can checkout the service's constructor to know which keys are required.
#
# Then, in your application's configuration, you can specify the service to
# use like this:
#
# config.active_storage.service = :local
#
# If you are using Active Storage outside of a Ruby on Rails application, you
# can configure the service to use like this:
#
# ActiveStorage::Blob.service = ActiveStorage::Service.configure(
# :Disk,
# root: Pathname("/foo/bar/storage")
# )
class ActiveStorage::Service
class ActiveStorage::IntegrityError < StandardError; end
extend ActiveSupport::Autoload
autoload :Configurator
class_attribute :logger
class << self
# Configure an Active Storage service by name from a set of configurations,
# typically loaded from a YAML file. The Active Storage engine uses this
# to set the global Active Storage service when the app boots.
def configure(service_name, configurations)
Configurator.build(service_name, configurations)
end
# Override in subclasses that stitch together multiple services and hence
# need to build additional services using the configurator.
#
# Passes the configurator and all of the service's config as keyword args.
#
# See MirrorService for an example.
def build(configurator:, service: nil, **service_config) #:nodoc:
new(**service_config)
end
end
# Upload the `io` to the `key` specified. If a `checksum` is provided, the service will
# ensure a match when the upload has completed or raise an `ActiveStorage::IntegrityError`.
def upload(key, io, checksum: nil)
raise NotImplementedError
end
# Return the content of the file at the `key`.
def download(key)
raise NotImplementedError
end
# Delete the file at the `key`.
def delete(key)
raise NotImplementedError
end
# Return true if a file exists at the `key`.
def exist?(key)
raise NotImplementedError
end
# Returns a signed, temporary URL for the file at the `key`. The URL will be valid for the amount
# of seconds specified in `expires_in`. You most also provide the `disposition` (`:inline` or `:attachment`),
# `filename`, and `content_type` that you wish the file to be served with on request.
def url(key, expires_in:, disposition:, filename:, content_type:)
raise NotImplementedError
end
# Returns a signed, temporary URL that a direct upload file can be PUT to on the `key`.
# The URL will be valid for the amount of seconds specified in `expires_in`.
# You most also provide the `content_type`, `content_length`, and `checksum` of the file
# that will be uploaded. All these attributes will be validated by the service upon upload.
def url_for_direct_upload(key, expires_in:, content_type:, content_length:, checksum:)
raise NotImplementedError
end
# Returns a Hash of headers for `url_for_direct_upload` requests.
def headers_for_direct_upload(key, filename:, content_type:, content_length:, checksum:)
{}
end
private
def instrument(operation, key, payload = {}, &block)
ActiveSupport::Notifications.instrument(
"service_#{operation}.active_storage",
payload.merge(key: key, service: service_name), &block)
end
def service_name
# ActiveStorage::Service::DiskService => Disk
self.class.name.split("::").third.remove("Service")
end
end

@ -0,0 +1,115 @@
require "active_support/core_ext/numeric/bytes"
require "azure/storage"
require "azure/storage/core/auth/shared_access_signature"
# Wraps the Microsoft Azure Storage Blob Service as a Active Storage service.
# See `ActiveStorage::Service` for the generic API documentation that applies to all services.
class ActiveStorage::Service::AzureStorageService < ActiveStorage::Service
attr_reader :client, :path, :blobs, :container, :signer
def initialize(path:, storage_account_name:, storage_access_key:, container:)
@client = Azure::Storage::Client.create(storage_account_name: storage_account_name, storage_access_key: storage_access_key)
@signer = Azure::Storage::Core::Auth::SharedAccessSignature.new(storage_account_name, storage_access_key)
@blobs = client.blob_client
@container = container
@path = path
end
def upload(key, io, checksum: nil)
instrument :upload, key, checksum: checksum do
begin
blobs.create_block_blob(container, key, io, content_md5: checksum)
rescue Azure::Core::Http::HTTPError => e
raise ActiveStorage::IntegrityError
end
end
end
def download(key)
if block_given?
instrument :streaming_download, key do
stream(key, &block)
end
else
instrument :download, key do
_, io = blobs.get_blob(container, key)
io.force_encoding(Encoding::BINARY)
end
end
end
def delete(key)
instrument :delete, key do
begin
blobs.delete_blob(container, key)
rescue Azure::Core::Http::HTTPError
false
end
end
end
def exist?(key)
instrument :exist, key do |payload|
answer = blob_for(key).present?
payload[:exist] = answer
answer
end
end
def url(key, expires_in:, disposition:, filename:)
instrument :url, key do |payload|
base_url = url_for(key)
generated_url = signer.signed_uri(URI(base_url), false, permissions: "r",
expiry: format_expiry(expires_in), content_disposition: "#{disposition}; filename=\"#{filename}\"").to_s
payload[:url] = generated_url
generated_url
end
end
def url_for_direct_upload(key, expires_in:, content_type:, content_length:, checksum:)
instrument :url, key do |payload|
base_url = url_for(key)
generated_url = signer.signed_uri(URI(base_url), false, permissions: "rw",
expiry: format_expiry(expires_in)).to_s
payload[:url] = generated_url
generated_url
end
end
def headers_for_direct_upload(key, content_type:, checksum:, **)
{ "Content-Type" => content_type, "Content-MD5" => checksum, "x-ms-blob-type" => "BlockBlob" }
end
private
def url_for(key)
"#{path}/#{container}/#{key}"
end
def blob_for(key)
blobs.get_blob_properties(container, key)
rescue Azure::Core::Http::HTTPError
false
end
def format_expiry(expires_in)
expires_in ? Time.now.utc.advance(seconds: expires_in).iso8601 : nil
end
# Reads the object for the given key in chunks, yielding each to the block.
def stream(key, options = {}, &block)
blob = blob_for(key)
chunk_size = 5.megabytes
offset = 0
while offset < blob.properties[:content_length]
_, io = blobs.get_blob(container, key, start_range: offset, end_range: offset + chunk_size - 1)
yield io
offset += chunk_size
end
end
end

@ -0,0 +1,28 @@
class ActiveStorage::Service::Configurator #:nodoc:
attr_reader :configurations
def self.build(service_name, configurations)
new(configurations).build(service_name)
end
def initialize(configurations)
@configurations = configurations.deep_symbolize_keys
end
def build(service_name)
config = config_for(service_name.to_sym)
resolve(config.fetch(:service)).build(**config, configurator: self)
end
private
def config_for(name)
configurations.fetch name do
raise "Missing configuration for the #{name.inspect} Active Storage service. Configurations available for #{configurations.keys.inspect}"
end
end
def resolve(class_name)
require "active_storage/service/#{class_name.to_s.underscore}_service"
ActiveStorage::Service.const_get(:"#{class_name}Service")
end
end

@ -0,0 +1,124 @@
require "fileutils"
require "pathname"
require "digest/md5"
require "active_support/core_ext/numeric/bytes"
# Wraps a local disk path as a Active Storage service. See `ActiveStorage::Service` for the generic API
# documentation that applies to all services.
class ActiveStorage::Service::DiskService < ActiveStorage::Service
attr_reader :root
def initialize(root:)
@root = root
end
def upload(key, io, checksum: nil)
instrument :upload, key, checksum: checksum do
IO.copy_stream(io, make_path_for(key))
ensure_integrity_of(key, checksum) if checksum
end
end
def download(key)
if block_given?
instrument :streaming_download, key do
File.open(path_for(key), "rb") do |file|
while data = file.read(64.kilobytes)
yield data
end
end
end
else
instrument :download, key do
File.binread path_for(key)
end
end
end
def delete(key)
instrument :delete, key do
begin
File.delete path_for(key)
rescue Errno::ENOENT
# Ignore files already deleted
end
end
end
def exist?(key)
instrument :exist, key do |payload|
answer = File.exist? path_for(key)
payload[:exist] = answer
answer
end
end
def url(key, expires_in:, disposition:, filename:, content_type:)
instrument :url, key do |payload|
verified_key_with_expiration = ActiveStorage.verifier.generate(key, expires_in: expires_in, purpose: :blob_key)
generated_url =
if defined?(Rails.application)
Rails.application.routes.url_helpers.rails_disk_service_path \
verified_key_with_expiration,
disposition: disposition, filename: filename, content_type: content_type
else
"/rails/active_storage/disk/#{verified_key_with_expiration}/#{filename}?disposition=#{disposition}&content_type=#{content_type}"
end
payload[:url] = generated_url
generated_url
end
end
def url_for_direct_upload(key, expires_in:, content_type:, content_length:, checksum:)
instrument :url, key do |payload|
verified_token_with_expiration = ActiveStorage.verifier.generate(
{
key: key,
content_type: content_type,
content_length: content_length,
checksum: checksum
},
expires_in: expires_in,
purpose: :blob_token
)
generated_url =
if defined?(Rails.application)
Rails.application.routes.url_helpers.update_rails_disk_service_path verified_token_with_expiration
else
"/rails/active_storage/disk/#{verified_token_with_expiration}"
end
payload[:url] = generated_url
generated_url
end
end
def headers_for_direct_upload(key, content_type:, **)
{ "Content-Type" => content_type }
end
private
def path_for(key)
File.join root, folder_for(key), key
end
def folder_for(key)
[ key[0..1], key[2..3] ].join("/")
end
def make_path_for(key)
path_for(key).tap { |path| FileUtils.mkdir_p File.dirname(path) }
end
def ensure_integrity_of(key, checksum)
unless Digest::MD5.file(path_for(key)).base64digest == checksum
delete key
raise ActiveStorage::IntegrityError
end
end
end

@ -0,0 +1,79 @@
require "google/cloud/storage"
require "active_support/core_ext/object/to_query"
# Wraps the Google Cloud Storage as a Active Storage service. See `ActiveStorage::Service` for the generic API
# documentation that applies to all services.
class ActiveStorage::Service::GCSService < ActiveStorage::Service
attr_reader :client, :bucket
def initialize(project:, keyfile:, bucket:)
@client = Google::Cloud::Storage.new(project: project, keyfile: keyfile)
@bucket = @client.bucket(bucket)
end
def upload(key, io, checksum: nil)
instrument :upload, key, checksum: checksum do
begin
bucket.create_file(io, key, md5: checksum)
rescue Google::Cloud::InvalidArgumentError
raise ActiveStorage::IntegrityError
end
end
end
# FIXME: Add streaming when given a block
def download(key)
instrument :download, key do
io = file_for(key).download
io.rewind
io.read
end
end
def delete(key)
instrument :delete, key do
file_for(key).try(:delete)
end
end
def exist?(key)
instrument :exist, key do |payload|
answer = file_for(key).present?
payload[:exist] = answer
answer
end
end
def url(key, expires_in:, disposition:, filename:, content_type:)
instrument :url, key do |payload|
generated_url = file_for(key).signed_url expires: expires_in, query: {
"response-content-disposition" => "#{disposition}; filename=\"#{filename}\"",
"response-content-type" => content_type
}
payload[:url] = generated_url
generated_url
end
end
def url_for_direct_upload(key, expires_in:, content_type:, content_length:, checksum:)
instrument :url, key do |payload|
generated_url = bucket.signed_url key, method: "PUT", expires: expires_in,
content_type: content_type, content_md5: checksum
payload[:url] = generated_url
generated_url
end
end
def headers_for_direct_upload(key, content_type:, checksum:, **)
{ "Content-Type" => content_type, "Content-MD5" => checksum }
end
private
def file_for(key)
bucket.file(key)
end
end

@ -0,0 +1,46 @@
require "active_support/core_ext/module/delegation"
# Wraps a set of mirror services and provides a single `ActiveStorage::Service` object that will all
# have the files uploaded to them. A `primary` service is designated to answer calls to `download`, `exists?`,
# and `url`.
class ActiveStorage::Service::MirrorService < ActiveStorage::Service
attr_reader :primary, :mirrors
delegate :download, :exist?, :url, to: :primary
# Stitch together from named services.
def self.build(primary:, mirrors:, configurator:, **options) #:nodoc:
new \
primary: configurator.build(primary),
mirrors: mirrors.collect { |name| configurator.build name }
end
def initialize(primary:, mirrors:)
@primary, @mirrors = primary, mirrors
end
# Upload the `io` to the `key` specified to all services. If a `checksum` is provided, all services will
# ensure a match when the upload has completed or raise an `ActiveStorage::IntegrityError`.
def upload(key, io, checksum: nil)
each_service.collect do |service|
service.upload key, io.tap(&:rewind), checksum: checksum
end
end
# Delete the file at the `key` on all services.
def delete(key)
perform_across_services :delete, key
end
private
def each_service(&block)
[ primary, *mirrors ].each(&block)
end
def perform_across_services(method, *args)
# FIXME: Convert to be threaded
each_service.collect do |service|
service.public_send method, *args
end
end
end

@ -0,0 +1,96 @@
require "aws-sdk"
require "active_support/core_ext/numeric/bytes"
# Wraps the Amazon Simple Storage Service (S3) as a Active Storage service.
# See `ActiveStorage::Service` for the generic API documentation that applies to all services.
class ActiveStorage::Service::S3Service < ActiveStorage::Service
attr_reader :client, :bucket, :upload_options
def initialize(access_key_id:, secret_access_key:, region:, bucket:, upload: {}, **options)
@client = Aws::S3::Resource.new(access_key_id: access_key_id, secret_access_key: secret_access_key, region: region, **options)
@bucket = @client.bucket(bucket)
@upload_options = upload
end
def upload(key, io, checksum: nil)
instrument :upload, key, checksum: checksum do
begin
object_for(key).put(upload_options.merge(body: io, content_md5: checksum))
rescue Aws::S3::Errors::BadDigest
raise ActiveStorage::IntegrityError
end
end
end
def download(key)
if block_given?
instrument :streaming_download, key do
stream(key, &block)
end
else
instrument :download, key do
object_for(key).get.body.read.force_encoding(Encoding::BINARY)
end
end
end
def delete(key)
instrument :delete, key do
object_for(key).delete
end
end
def exist?(key)
instrument :exist, key do |payload|
answer = object_for(key).exists?
payload[:exist] = answer
answer
end
end
def url(key, expires_in:, disposition:, filename:, content_type:)
instrument :url, key do |payload|
generated_url = object_for(key).presigned_url :get, expires_in: expires_in,
response_content_disposition: "#{disposition}; filename=\"#{filename}\"",
response_content_type: content_type
payload[:url] = generated_url
generated_url
end
end
def url_for_direct_upload(key, expires_in:, content_type:, content_length:, checksum:)
instrument :url, key do |payload|
generated_url = object_for(key).presigned_url :put, expires_in: expires_in,
content_type: content_type, content_length: content_length, content_md5: checksum
payload[:url] = generated_url
generated_url
end
end
def headers_for_direct_upload(key, content_type:, checksum:, **)
{ "Content-Type" => content_type, "Content-MD5" => checksum }
end
private
def object_for(key)
bucket.object(key)
end
# Reads the object for the given key in chunks, yielding each to the block.
def stream(key, options = {}, &block)
object = object_for(key)
chunk_size = 5.megabytes
offset = 0
while offset < object.content_length
yield object.read(options.merge(range: "bytes=#{offset}-#{offset + chunk_size - 1}"))
offset += chunk_size
end
end
end

@ -0,0 +1,8 @@
require_relative "gem_version"
module ActiveStorage
# Returns the version of the currently loaded ActiveStorage as a <tt>Gem::Version</tt>
def self.version
gem_version
end
end

@ -0,0 +1,13 @@
require "fileutils"
namespace :activestorage do
desc "Copy over the migration needed to the application"
task :install do
migration_file_path = "db/migrate/#{Time.now.utc.strftime("%Y%m%d%H%M%S")}_active_storage_create_tables.rb"
FileUtils.mkdir_p Rails.root.join("db/migrate")
FileUtils.cp File.expand_path("../../active_storage/migration.rb", __FILE__), Rails.root.join(migration_file_path)
puts "Copied migration to #{migration_file_path}"
puts "Now run rails db:migrate to create the tables for Active Storage"
end
end

@ -0,0 +1,33 @@
{
"name": "activestorage",
"version": "0.1.1",
"description": "Attach cloud and local files in Rails applications",
"main": "app/assets/javascripts/activestorage.js",
"files": [
"app/assets/javascripts/*.js"
],
"homepage": "https://github.com/rails/activestorage",
"repository": {
"type": "git",
"url": "git+https://github.com/rails/activestorage.git"
},
"bugs": {
"url": "https://github.com/rails/activestorage/issues"
},
"author": "Javan Makhmali <javan@javan.us>",
"license": "MIT",
"devDependencies": {
"babel-core": "^6.25.0",
"babel-loader": "^7.1.1",
"babel-preset-env": "^1.6.0",
"eslint": "^4.3.0",
"eslint-plugin-import": "^2.7.0",
"spark-md5": "^3.0.0",
"webpack": "^3.4.0"
},
"scripts": {
"prebuild": "yarn lint",
"build": "webpack -p",
"lint": "eslint app/javascript"
}
}

@ -0,0 +1,122 @@
require "test_helper"
require "database/setup"
if SERVICE_CONFIGURATIONS[:s3] && SERVICE_CONFIGURATIONS[:s3][:access_key_id].present?
class ActiveStorage::S3DirectUploadsControllerTest < ActionDispatch::IntegrationTest
setup do
@old_service = ActiveStorage::Blob.service
ActiveStorage::Blob.service = ActiveStorage::Service.configure(:s3, SERVICE_CONFIGURATIONS)
end
teardown do
ActiveStorage::Blob.service = @old_service
end
test "creating new direct upload" do
checksum = Digest::MD5.base64digest("Hello")
post rails_direct_uploads_url, params: { blob: {
filename: "hello.txt", byte_size: 6, checksum: checksum, content_type: "text/plain" } }
response.parsed_body.tap do |details|
assert_equal ActiveStorage::Blob.find(details["id"]), ActiveStorage::Blob.find_signed(details["signed_id"])
assert_equal "hello.txt", details["filename"]
assert_equal 6, details["byte_size"]
assert_equal checksum, details["checksum"]
assert_equal "text/plain", details["content_type"]
assert_match SERVICE_CONFIGURATIONS[:s3][:bucket], details["direct_upload"]["url"]
assert_match /s3\.(\S+)?amazonaws\.com/, details["direct_upload"]["url"]
assert_equal({ "Content-Type" => "text/plain", "Content-MD5" => checksum }, details["direct_upload"]["headers"])
end
end
end
else
puts "Skipping S3 Direct Upload tests because no S3 configuration was supplied"
end
if SERVICE_CONFIGURATIONS[:gcs]
class ActiveStorage::GCSDirectUploadsControllerTest < ActionDispatch::IntegrationTest
setup do
@config = SERVICE_CONFIGURATIONS[:gcs]
@old_service = ActiveStorage::Blob.service
ActiveStorage::Blob.service = ActiveStorage::Service.configure(:gcs, SERVICE_CONFIGURATIONS)
end
teardown do
ActiveStorage::Blob.service = @old_service
end
test "creating new direct upload" do
checksum = Digest::MD5.base64digest("Hello")
post rails_direct_uploads_url, params: { blob: {
filename: "hello.txt", byte_size: 6, checksum: checksum, content_type: "text/plain" } }
@response.parsed_body.tap do |details|
assert_equal ActiveStorage::Blob.find(details["id"]), ActiveStorage::Blob.find_signed(details["signed_id"])
assert_equal "hello.txt", details["filename"]
assert_equal 6, details["byte_size"]
assert_equal checksum, details["checksum"]
assert_equal "text/plain", details["content_type"]
assert_match %r{storage\.googleapis\.com/#{@config[:bucket]}}, details["direct_upload"]["url"]
assert_equal({ "Content-Type" => "text/plain", "Content-MD5" => checksum }, details["direct_upload"]["headers"])
end
end
end
else
puts "Skipping GCS Direct Upload tests because no GCS configuration was supplied"
end
if SERVICE_CONFIGURATIONS[:azure]
class ActiveStorage::AzureStorageDirectUploadsControllerTest < ActionDispatch::IntegrationTest
setup do
@config = SERVICE_CONFIGURATIONS[:azure]
@old_service = ActiveStorage::Blob.service
ActiveStorage::Blob.service = ActiveStorage::Service.configure(:azure, SERVICE_CONFIGURATIONS)
end
teardown do
ActiveStorage::Blob.service = @old_service
end
test "creating new direct upload" do
checksum = Digest::MD5.base64digest("Hello")
post rails_direct_uploads_url, params: { blob: {
filename: "hello.txt", byte_size: 6, checksum: checksum, content_type: "text/plain" } }
@response.parsed_body.tap do |details|
assert_equal ActiveStorage::Blob.find(details["id"]), ActiveStorage::Blob.find_signed(details["signed_id"])
assert_equal "hello.txt", details["filename"]
assert_equal 6, details["byte_size"]
assert_equal checksum, details["checksum"]
assert_equal "text/plain", details["content_type"]
assert_match %r{#{@config[:storage_account_name]}\.blob\.core\.windows\.net/#{@config[:container]}}, details["direct_upload"]["url"]
assert_equal({ "Content-Type" => "text/plain", "Content-MD5" => checksum, "x-ms-blob-type" => "BlockBlob" }, details["direct_upload"]["headers"])
end
end
end
else
puts "Skipping Azure Storage Direct Upload tests because no Azure Storage configuration was supplied"
end
class ActiveStorage::DiskDirectUploadsControllerTest < ActionDispatch::IntegrationTest
test "creating new direct upload" do
checksum = Digest::MD5.base64digest("Hello")
post rails_direct_uploads_url, params: { blob: {
filename: "hello.txt", byte_size: 6, checksum: checksum, content_type: "text/plain" } }
@response.parsed_body.tap do |details|
assert_equal ActiveStorage::Blob.find(details["id"]), ActiveStorage::Blob.find_signed(details["signed_id"])
assert_equal "hello.txt", details["filename"]
assert_equal 6, details["byte_size"]
assert_equal checksum, details["checksum"]
assert_equal "text/plain", details["content_type"]
assert_match /rails\/active_storage\/disk/, details["direct_upload"]["url"]
assert_equal({ "Content-Type" => "text/plain" }, details["direct_upload"]["headers"])
end
end
end

@ -0,0 +1,57 @@
require "test_helper"
require "database/setup"
class ActiveStorage::DiskControllerTest < ActionDispatch::IntegrationTest
test "showing blob inline" do
blob = create_blob
get blob.service_url
assert_equal "inline; filename=\"#{blob.filename.base}\"", @response.headers["Content-Disposition"]
assert_equal "text/plain", @response.headers["Content-Type"]
end
test "showing blob as attachment" do
blob = create_blob
get blob.service_url(disposition: :attachment)
assert_equal "attachment; filename=\"#{blob.filename.base}\"", @response.headers["Content-Disposition"]
assert_equal "text/plain", @response.headers["Content-Type"]
end
test "directly uploading blob with integrity" do
data = "Something else entirely!"
blob = create_blob_before_direct_upload byte_size: data.size, checksum: Digest::MD5.base64digest(data)
put blob.service_url_for_direct_upload, params: data, headers: { "Content-Type" => "text/plain" }
assert_response :no_content
assert_equal data, blob.download
end
test "directly uploading blob without integrity" do
data = "Something else entirely!"
blob = create_blob_before_direct_upload byte_size: data.size, checksum: Digest::MD5.base64digest("bad data")
put blob.service_url_for_direct_upload, params: data
assert_response :unprocessable_entity
assert_not blob.service.exist?(blob.key)
end
test "directly uploading blob with mismatched content type" do
data = "Something else entirely!"
blob = create_blob_before_direct_upload byte_size: data.size, checksum: Digest::MD5.base64digest(data)
put blob.service_url_for_direct_upload, params: data, headers: { "Content-Type" => "application/octet-stream" }
assert_response :unprocessable_entity
assert_not blob.service.exist?(blob.key)
end
test "directly uploading blob with mismatched content length" do
data = "Something else entirely!"
blob = create_blob_before_direct_upload byte_size: data.size - 1, checksum: Digest::MD5.base64digest(data)
put blob.service_url_for_direct_upload, params: data, headers: { "Content-Type" => "text/plain" }
assert_response :unprocessable_entity
assert_not blob.service.exist?(blob.key)
end
end

@ -0,0 +1,21 @@
require "test_helper"
require "database/setup"
class ActiveStorage::VariantsControllerTest < ActionDispatch::IntegrationTest
setup do
@blob = create_image_blob filename: "racecar.jpg"
end
test "showing variant inline" do
get rails_blob_variation_url(
filename: @blob.filename,
signed_blob_id: @blob.signed_id,
variation_key: ActiveStorage::Variation.encode(resize: "100x100"))
assert_redirected_to /racecar.jpg\?.*disposition=inline/
image = read_image_variant(@blob.variant(resize: "100x100"))
assert_equal 100, image.width
assert_equal 67, image.height
end
end

@ -0,0 +1,7 @@
class ActiveStorageCreateUsers < ActiveRecord::Migration[5.1]
def change
create_table :users do |t|
t.string :name
end
end
end

@ -0,0 +1,6 @@
require "active_storage/migration"
require_relative "create_users_migration"
ActiveRecord::Base.establish_connection(adapter: "sqlite3", database: ":memory:")
ActiveStorageCreateTables.migrate(:up)
ActiveStorageCreateUsers.migrate(:up)

@ -0,0 +1,3 @@
require_relative "config/application"
Rails.application.load_tasks

@ -0,0 +1,5 @@
//= link_tree ../images
//= link_directory ../javascripts .js
//= link_directory ../stylesheets .css
//= link active_storage_manifest.js

@ -0,0 +1,13 @@
// This is a manifest file that'll be compiled into application.js, which will include all the files
// listed below.
//
// Any JavaScript/Coffee file within this directory, lib/assets/javascripts, vendor/assets/javascripts,
// or any plugin's vendor/assets/javascripts directory can be referenced here using a relative path.
//
// It's not advisable to add code directly here, but if you do, it'll appear at the bottom of the
// compiled file. JavaScript code in this file should be added after the last require_* statement.
//
// Read Sprockets README (https://github.com/rails/sprockets#sprockets-directives) for details
// about supported directives.
//
//= require_tree .

@ -0,0 +1,15 @@
/*
* This is a manifest file that'll be compiled into application.css, which will include all the files
* listed below.
*
* Any CSS and SCSS file within this directory, lib/assets/stylesheets, vendor/assets/stylesheets,
* or any plugin's vendor/assets/stylesheets directory can be referenced here using a relative path.
*
* You're free to add application-wide styles to this file and they'll appear at the bottom of the
* compiled file so the styles you add here take precedence over styles defined in any other CSS/SCSS
* files in this directory. Styles in this file should be added after the last require_* statement.
* It is generally better to create a new file per style scope.
*
*= require_tree .
*= require_self
*/

@ -0,0 +1,3 @@
class ApplicationController < ActionController::Base
protect_from_forgery with: :exception
end

@ -0,0 +1,2 @@
module ApplicationHelper
end

@ -0,0 +1,2 @@
class ApplicationJob < ActiveJob::Base
end

@ -0,0 +1,3 @@
class ApplicationRecord < ActiveRecord::Base
self.abstract_class = true
end

@ -0,0 +1,14 @@
<!DOCTYPE html>
<html>
<head>
<title>Dummy</title>
<%= csrf_meta_tags %>
<%= stylesheet_link_tag 'application', media: 'all' %>
<%= javascript_include_tag 'application' %>
</head>
<body>
<%= yield %>
</body>
</html>

@ -0,0 +1,3 @@
#!/usr/bin/env ruby
ENV["BUNDLE_GEMFILE"] ||= File.expand_path("../../Gemfile", __FILE__)
load Gem.bin_path("bundler", "bundle")

@ -0,0 +1,4 @@
#!/usr/bin/env ruby
APP_PATH = File.expand_path("../config/application", __dir__)
require_relative "../config/boot"
require "rails/commands"

@ -0,0 +1,4 @@
#!/usr/bin/env ruby
require_relative "../config/boot"
require "rake"
Rake.application.run

@ -0,0 +1,11 @@
#!/usr/bin/env ruby
VENDOR_PATH = File.expand_path("..", __dir__)
Dir.chdir(VENDOR_PATH) do
begin
exec "yarnpkg #{ARGV.join(" ")}"
rescue Errno::ENOENT
$stderr.puts "Yarn executable was not detected in the system."
$stderr.puts "Download Yarn at https://yarnpkg.com/en/docs/install"
exit 1
end
end

@ -0,0 +1,5 @@
# This file is used by Rack-based servers to start the application.
require_relative "config/environment"
run Rails.application

@ -0,0 +1,24 @@
require_relative "boot"
require "rails"
require "active_model/railtie"
require "active_job/railtie"
require "active_record/railtie"
require "action_controller/railtie"
require "action_view/railtie"
require "sprockets/railtie"
require "active_storage/engine"
#require "action_mailer/railtie"
#require "rails/test_unit/railtie"
#require "action_cable/engine"
Bundler.require(*Rails.groups)
module Dummy
class Application < Rails::Application
config.load_defaults 5.1
config.active_storage.service = :local
end
end

@ -0,0 +1,5 @@
# Set up gems listed in the Gemfile.
ENV["BUNDLE_GEMFILE"] ||= File.expand_path("../../../Gemfile", __dir__)
require "bundler/setup" if File.exist?(ENV["BUNDLE_GEMFILE"])
$LOAD_PATH.unshift File.expand_path("../../../lib", __dir__)

@ -0,0 +1,25 @@
# SQLite version 3.x
# gem install sqlite3
#
# Ensure the SQLite 3 gem is defined in your Gemfile
# gem 'sqlite3'
#
default: &default
adapter: sqlite3
pool: <%= ENV.fetch("RAILS_MAX_THREADS") { 5 } %>
timeout: 5000
development:
<<: *default
database: db/development.sqlite3
# Warning: The database defined as "test" will be erased and
# re-generated from your development database when you run "rake".
# Do not set this db to the same as development or production.
test:
<<: *default
database: db/test.sqlite3
production:
<<: *default
database: db/production.sqlite3

@ -0,0 +1,5 @@
# Load the Rails application.
require_relative "application"
# Initialize the Rails application.
Rails.application.initialize!

@ -0,0 +1,49 @@
Rails.application.configure do
# Settings specified here will take precedence over those in config/application.rb.
# In the development environment your application's code is reloaded on
# every request. This slows down response time but is perfect for development
# since you don't have to restart the web server when you make code changes.
config.cache_classes = false
# Do not eager load code on boot.
config.eager_load = false
# Show full error reports.
config.consider_all_requests_local = true
# Enable/disable caching. By default caching is disabled.
if Rails.root.join("tmp/caching-dev.txt").exist?
config.action_controller.perform_caching = true
config.cache_store = :memory_store
config.public_file_server.headers = {
"Cache-Control" => "public, max-age=#{2.days.seconds.to_i}"
}
else
config.action_controller.perform_caching = false
config.cache_store = :null_store
end
# Print deprecation notices to the Rails logger.
config.active_support.deprecation = :log
# Raise an error on page load if there are pending migrations.
config.active_record.migration_error = :page_load
# Debug mode disables concatenation and preprocessing of assets.
# This option may cause significant delays in view rendering with a large
# number of complex assets.
config.assets.debug = true
# Suppress logger output for asset requests.
config.assets.quiet = true
# Raises error for missing translations
# config.action_view.raise_on_missing_translations = true
# Use an evented file watcher to asynchronously detect changes in source code,
# routes, locales, etc. This feature depends on the listen gem.
# config.file_watcher = ActiveSupport::EventedFileUpdateChecker
end

@ -0,0 +1,82 @@
Rails.application.configure do
# Settings specified here will take precedence over those in config/application.rb.
# Code is not reloaded between requests.
config.cache_classes = true
# Eager load code on boot. This eager loads most of Rails and
# your application in memory, allowing both threaded web servers
# and those relying on copy on write to perform better.
# Rake tasks automatically ignore this option for performance.
config.eager_load = true
# Full error reports are disabled and caching is turned on.
config.consider_all_requests_local = false
config.action_controller.perform_caching = true
# Attempt to read encrypted secrets from `config/secrets.yml.enc`.
# Requires an encryption key in `ENV["RAILS_MASTER_KEY"]` or
# `config/secrets.yml.key`.
config.read_encrypted_secrets = true
# Disable serving static files from the `/public` folder by default since
# Apache or NGINX already handles this.
config.public_file_server.enabled = ENV["RAILS_SERVE_STATIC_FILES"].present?
# Compress JavaScripts and CSS.
config.assets.js_compressor = :uglifier
# config.assets.css_compressor = :sass
# Do not fallback to assets pipeline if a precompiled asset is missed.
config.assets.compile = false
# `config.assets.precompile` and `config.assets.version` have moved to config/initializers/assets.rb
# Enable serving of images, stylesheets, and JavaScripts from an asset server.
# config.action_controller.asset_host = 'http://assets.example.com'
# Specifies the header that your server uses for sending files.
# config.action_dispatch.x_sendfile_header = 'X-Sendfile' # for Apache
# config.action_dispatch.x_sendfile_header = 'X-Accel-Redirect' # for NGINX
# Mount Action Cable outside main process or domain
# config.action_cable.mount_path = nil
# config.action_cable.url = 'wss://example.com/cable'
# config.action_cable.allowed_request_origins = [ 'http://example.com', /http:\/\/example.*/ ]
# Force all access to the app over SSL, use Strict-Transport-Security, and use secure cookies.
# config.force_ssl = true
# Use the lowest log level to ensure availability of diagnostic information
# when problems arise.
config.log_level = :debug
# Prepend all log lines with the following tags.
config.log_tags = [ :request_id ]
# Use a different cache store in production.
# config.cache_store = :mem_cache_store
# Enable locale fallbacks for I18n (makes lookups for any locale fall back to
# the I18n.default_locale when a translation cannot be found).
config.i18n.fallbacks = true
# Send deprecation notices to registered listeners.
config.active_support.deprecation = :notify
# Use default logging formatter so that PID and timestamp are not suppressed.
config.log_formatter = ::Logger::Formatter.new
# Use a different logger for distributed setups.
# require 'syslog/logger'
# config.logger = ActiveSupport::TaggedLogging.new(Syslog::Logger.new 'app-name')
if ENV["RAILS_LOG_TO_STDOUT"].present?
logger = ActiveSupport::Logger.new(STDOUT)
logger.formatter = config.log_formatter
config.logger = ActiveSupport::TaggedLogging.new(logger)
end
# Do not dump schema after migrations.
config.active_record.dump_schema_after_migration = false
end

@ -0,0 +1,33 @@
Rails.application.configure do
# Settings specified here will take precedence over those in config/application.rb.
# The test environment is used exclusively to run your application's
# test suite. You never need to work with it otherwise. Remember that
# your test database is "scratch space" for the test suite and is wiped
# and recreated between test runs. Don't rely on the data there!
config.cache_classes = true
# Do not eager load code on boot. This avoids loading your whole application
# just for the purpose of running a single test. If you are using a tool that
# preloads Rails for running tests, you may have to set it to true.
config.eager_load = false
# Configure public file server for tests with Cache-Control for performance.
config.public_file_server.enabled = true
config.public_file_server.headers = {
"Cache-Control" => "public, max-age=#{1.hour.seconds.to_i}"
}
# Show full error reports and disable caching.
config.consider_all_requests_local = true
config.action_controller.perform_caching = false
# Raise exceptions instead of rendering exception templates.
config.action_dispatch.show_exceptions = false
# Print deprecation notices to the stderr.
config.active_support.deprecation = :stderr
# Raises error for missing translations
# config.action_view.raise_on_missing_translations = true
end

@ -0,0 +1,6 @@
# Be sure to restart your server when you modify this file.
# ApplicationController.renderer.defaults.merge!(
# http_host: 'example.org',
# https: false
# )

@ -0,0 +1,14 @@
# Be sure to restart your server when you modify this file.
# Version of your assets, change this if you want to expire all your assets.
Rails.application.config.assets.version = "1.0"
# Add additional assets to the asset load path.
# Rails.application.config.assets.paths << Emoji.images_path
# Add Yarn node_modules folder to the asset load path.
Rails.application.config.assets.paths << Rails.root.join("node_modules")
# Precompile additional assets.
# application.js, application.css, and all non-JS/CSS in the app/assets
# folder are already added.
# Rails.application.config.assets.precompile += %w( admin.js admin.css )

@ -0,0 +1,7 @@
# Be sure to restart your server when you modify this file.
# You can add backtrace silencers for libraries that you're using but don't wish to see in your backtraces.
# Rails.backtrace_cleaner.add_silencer { |line| line =~ /my_noisy_library/ }
# You can also remove all the silencers if you're trying to debug a problem that might stem from framework code.
# Rails.backtrace_cleaner.remove_silencers!

@ -0,0 +1,5 @@
# Be sure to restart your server when you modify this file.
# Specify a serializer for the signed and encrypted cookie jars.
# Valid options are :json, :marshal, and :hybrid.
Rails.application.config.action_dispatch.cookies_serializer = :json

@ -0,0 +1,4 @@
# Be sure to restart your server when you modify this file.
# Configure sensitive parameters which will be filtered from the log file.
Rails.application.config.filter_parameters += [:password]

@ -0,0 +1,16 @@
# Be sure to restart your server when you modify this file.
# Add new inflection rules using the following format. Inflections
# are locale specific, and you may define rules for as many different
# locales as you wish. All of these examples are active by default:
# ActiveSupport::Inflector.inflections(:en) do |inflect|
# inflect.plural /^(ox)$/i, '\1en'
# inflect.singular /^(ox)en/i, '\1'
# inflect.irregular 'person', 'people'
# inflect.uncountable %w( fish sheep )
# end
# These inflection rules are supported but not enabled by default:
# ActiveSupport::Inflector.inflections(:en) do |inflect|
# inflect.acronym 'RESTful'
# end

@ -0,0 +1,4 @@
# Be sure to restart your server when you modify this file.
# Add new mime types for use in respond_to blocks:
# Mime::Type.register "text/richtext", :rtf

@ -0,0 +1,14 @@
# Be sure to restart your server when you modify this file.
# This file contains settings for ActionController::ParamsWrapper which
# is enabled by default.
# Enable parameter wrapping for JSON. You can disable this by setting :format to an empty array.
ActiveSupport.on_load(:action_controller) do
wrap_parameters format: [:json]
end
# To enable root element in JSON for ActiveRecord objects.
# ActiveSupport.on_load(:active_record) do
# self.include_root_in_json = true
# end

@ -0,0 +1,2 @@
Rails.application.routes.draw do
end

@ -0,0 +1,32 @@
# Be sure to restart your server when you modify this file.
# Your secret key is used for verifying the integrity of signed cookies.
# If you change this key, all old signed cookies will become invalid!
# Make sure the secret is at least 30 characters and all random,
# no regular words or you'll be exposed to dictionary attacks.
# You can use `rails secret` to generate a secure secret key.
# Make sure the secrets in this file are kept private
# if you're sharing your code publicly.
# Shared secrets are available across all environments.
# shared:
# api_key: a1B2c3D4e5F6
# Environmental secrets are only available for that specific environment.
development:
secret_key_base: e0ef5744b10d988669be6b2660c259749779964f3dcb487fd6199743b3558e2d89f7681d6a15d16d144e28979cbdae41885f4fb4c2cf56ff92ac22df282ffb66
test:
secret_key_base: 6fb1f3a828a8dcd6ac8dc07b43be4a5265ad64379120d417252a1578fe1f790e7b85ade4f95994de1ac8fb78581690de6e3a6ac4af36a0f0139667418c750d05
# Do not keep production secrets in the unencrypted secrets file.
# Instead, either read values from the environment.
# Or, use `bin/rails secrets:setup` to configure encrypted secrets
# and move the `production:` environment over there.
production:
secret_key_base: <%= ENV["SECRET_KEY_BASE"] %>

@ -0,0 +1,6 @@
%w(
.ruby-version
.rbenv-vars
tmp/restart.txt
tmp/caching-dev.txt
).each { |path| Spring.watch(path) }

@ -0,0 +1,3 @@
local:
service: Disk
root: <%= Rails.root.join("storage") %>

Some files were not shown because too many files have changed in this diff Show More