Add ActiveStorage::Blob.compose
This commit is contained in:
parent
481343ed91
commit
79a5e0b759
@ -1,3 +1,7 @@
|
||||
* `Add ActiveStorage::Blob.compose` to concatenate multiple blobs.
|
||||
|
||||
*Gannon McGibbon*
|
||||
|
||||
* Setting custom metadata on blobs are now persisted to remote storage.
|
||||
|
||||
*joshuamsager*
|
||||
|
@ -39,7 +39,7 @@ class ActiveStorage::Blob < ActiveStorage::Record
|
||||
MINIMUM_TOKEN_LENGTH = 28
|
||||
|
||||
has_secure_token :key, length: MINIMUM_TOKEN_LENGTH
|
||||
store :metadata, accessors: [ :analyzed, :identified ], coder: ActiveRecord::Coders::JSON
|
||||
store :metadata, accessors: [ :analyzed, :identified, :composed ], coder: ActiveRecord::Coders::JSON
|
||||
|
||||
class_attribute :services, default: {}
|
||||
class_attribute :service, instance_accessor: false
|
||||
@ -59,6 +59,7 @@ class ActiveStorage::Blob < ActiveStorage::Record
|
||||
end
|
||||
|
||||
validates :service_name, presence: true
|
||||
validates :checksum, presence: true, unless: :composed
|
||||
|
||||
validate do
|
||||
if service_name_changed? && service_name.present?
|
||||
@ -145,6 +146,20 @@ def scope_for_strict_loading # :nodoc:
|
||||
all
|
||||
end
|
||||
end
|
||||
|
||||
# Concatenate multiple blobs into a single "composed" blob.
|
||||
def compose(filename:, blobs:, content_type: nil, metadata: nil)
|
||||
unless blobs.all?(&:persisted?)
|
||||
raise(ActiveRecord::RecordNotSaved, "All blobs must be persisted.")
|
||||
end
|
||||
|
||||
content_type ||= blobs.pluck(:content_type).compact.first
|
||||
|
||||
new(filename: filename, content_type: content_type, metadata: metadata, byte_size: blobs.sum(&:byte_size)).tap do |combined_blob|
|
||||
combined_blob.compose(*blobs.pluck(:key))
|
||||
combined_blob.save!
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
# Returns a signed ID for this blob that's suitable for reference on the client-side without fear of tampering.
|
||||
@ -255,6 +270,11 @@ def upload_without_unfurling(io) # :nodoc:
|
||||
service.upload key, io, checksum: checksum, **service_metadata
|
||||
end
|
||||
|
||||
def compose(*keys) # :nodoc:
|
||||
self.composed = true
|
||||
service.compose(*keys, key, **service_metadata)
|
||||
end
|
||||
|
||||
# Downloads the file associated with this blob. If no block is given, the entire file is read into memory and returned.
|
||||
# That'll use a lot of RAM for very large files. If a block is given, then the download is streamed and yielded in chunks.
|
||||
def download(&block)
|
||||
@ -280,8 +300,14 @@ def download_chunk(range)
|
||||
#
|
||||
# Raises ActiveStorage::IntegrityError if the downloaded data does not match the blob's checksum.
|
||||
def open(tmpdir: nil, &block)
|
||||
service.open key, checksum: checksum,
|
||||
name: [ "ActiveStorage-#{id}-", filename.extension_with_delimiter ], tmpdir: tmpdir, &block
|
||||
service.open(
|
||||
key,
|
||||
checksum: checksum,
|
||||
verify: !composed,
|
||||
name: [ "ActiveStorage-#{id}-", filename.extension_with_delimiter ],
|
||||
tmpdir: tmpdir,
|
||||
&block
|
||||
)
|
||||
end
|
||||
|
||||
def mirror_later # :nodoc:
|
||||
|
@ -10,7 +10,7 @@ def change
|
||||
t.text :metadata
|
||||
t.string :service_name, null: false
|
||||
t.bigint :byte_size, null: false
|
||||
t.string :checksum, null: false
|
||||
t.string :checksum
|
||||
|
||||
if connection.supports_datetime_with_precision?
|
||||
t.datetime :created_at, precision: 6, null: false
|
||||
|
5
activestorage/db/update_migrate/20211119233751_remove_not_null_on_active_storage_blobs_checksum.rb
Normal file
5
activestorage/db/update_migrate/20211119233751_remove_not_null_on_active_storage_blobs_checksum.rb
Normal file
@ -0,0 +1,5 @@
|
||||
class RemoveNotNullOnActiveStorageBlobsChecksum < ActiveRecord::Migration[6.0]
|
||||
def change
|
||||
change_column_null(:active_storage_blobs, :checksum, true)
|
||||
end
|
||||
end
|
@ -8,10 +8,10 @@ def initialize(service)
|
||||
@service = service
|
||||
end
|
||||
|
||||
def open(key, checksum:, name: "ActiveStorage-", tmpdir: nil)
|
||||
def open(key, checksum: nil, verify: true, name: "ActiveStorage-", tmpdir: nil)
|
||||
open_tempfile(name, tmpdir) do |file|
|
||||
download key, file
|
||||
verify_integrity_of file, checksum: checksum
|
||||
verify_integrity_of(file, checksum: checksum) if verify
|
||||
yield file
|
||||
end
|
||||
end
|
||||
|
@ -90,6 +90,11 @@ def open(*args, **options, &block)
|
||||
ActiveStorage::Downloader.new(self).open(*args, **options, &block)
|
||||
end
|
||||
|
||||
# Concatenate multiple files into a single "composed" file. Returns the checksum of the composed file.
|
||||
def compose(*source_keys, destination_key, filename: nil, content_type: nil, disposition: nil, custom_metadata: {})
|
||||
raise NotImplementedError
|
||||
end
|
||||
|
||||
# Delete the file at the +key+.
|
||||
def delete(key)
|
||||
raise NotImplementedError
|
||||
|
@ -107,6 +107,24 @@ def headers_for_direct_upload(key, content_type:, checksum:, filename: nil, disp
|
||||
{ "Content-Type" => content_type, "Content-MD5" => checksum, "x-ms-blob-content-disposition" => content_disposition, "x-ms-blob-type" => "BlockBlob", **custom_metadata_headers(custom_metadata) }
|
||||
end
|
||||
|
||||
def compose(*source_keys, destination_key, filename: nil, content_type: nil, disposition: nil, custom_metadata: {})
|
||||
content_disposition = content_disposition_with(type: disposition, filename: filename) if disposition && filename
|
||||
|
||||
client.create_append_blob(
|
||||
container,
|
||||
destination_key,
|
||||
content_type: content_type,
|
||||
content_disposition: content_disposition,
|
||||
metadata: custom_metadata,
|
||||
).tap do |blob|
|
||||
source_keys.each do |source_key|
|
||||
stream(source_key) do |chunk|
|
||||
client.append_blob_block(container, blob.name, chunk)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
private
|
||||
def private_url(key, expires_in:, filename:, disposition:, content_type:, **)
|
||||
signer.signed_uri(
|
||||
|
@ -100,6 +100,16 @@ def path_for(key) # :nodoc:
|
||||
File.join root, folder_for(key), key
|
||||
end
|
||||
|
||||
def compose(*source_keys, destination_key, **)
|
||||
File.open(make_path_for(destination_key), "w") do |destination_file|
|
||||
source_keys.each do |source_key|
|
||||
File.open(path_for(source_key), "rb") do |source_file|
|
||||
IO.copy_stream(source_file, destination_file)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
private
|
||||
def private_url(key, expires_in:, filename:, content_type:, disposition:, **)
|
||||
generate_url(key, expires_in: expires_in, filename: filename, content_type: content_type, disposition: disposition)
|
||||
|
@ -134,6 +134,14 @@ def headers_for_direct_upload(key, checksum:, filename: nil, disposition: nil, c
|
||||
headers
|
||||
end
|
||||
|
||||
def compose(*source_keys, destination_key, filename: nil, content_type: nil, disposition: nil, custom_metadata: {})
|
||||
bucket.compose(source_keys, destination_key).update do |file|
|
||||
file.content_type = content_type
|
||||
file.content_disposition = content_disposition_with(type: disposition, filename: filename) if disposition && filename
|
||||
file.metadata = custom_metadata
|
||||
end
|
||||
end
|
||||
|
||||
private
|
||||
def private_url(key, expires_in:, filename:, content_type:, disposition:, **)
|
||||
args = {
|
||||
|
@ -14,7 +14,7 @@ class Service::MirrorService < Service
|
||||
attr_reader :primary, :mirrors
|
||||
|
||||
delegate :download, :download_chunk, :exist?, :url,
|
||||
:url_for_direct_upload, :headers_for_direct_upload, :path_for, to: :primary
|
||||
:url_for_direct_upload, :headers_for_direct_upload, :path_for, :compose, to: :primary
|
||||
|
||||
# Stitch together from named services.
|
||||
def self.build(primary:, mirrors:, name:, configurator:, **options) # :nodoc:
|
||||
|
@ -95,6 +95,24 @@ def headers_for_direct_upload(key, content_type:, checksum:, filename: nil, disp
|
||||
{ "Content-Type" => content_type, "Content-MD5" => checksum, "Content-Disposition" => content_disposition, **custom_metadata_headers(custom_metadata) }
|
||||
end
|
||||
|
||||
def compose(*source_keys, destination_key, filename: nil, content_type: nil, disposition: nil, custom_metadata: {})
|
||||
content_disposition = content_disposition_with(type: disposition, filename: filename) if disposition && filename
|
||||
|
||||
object_for(destination_key).upload_stream(
|
||||
content_type: content_type,
|
||||
content_disposition: content_disposition,
|
||||
part_size: MINIMUM_UPLOAD_PART_SIZE,
|
||||
metadata: custom_metadata,
|
||||
**upload_options
|
||||
) do |out|
|
||||
source_keys.each do |source_key|
|
||||
stream(source_key) do |chunk|
|
||||
IO.copy_stream(StringIO.new(chunk), out)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
private
|
||||
def private_url(key, expires_in:, filename:, disposition:, content_type:, **client_opts)
|
||||
object_for(key).presigned_url :get, expires_in: expires_in.to_i,
|
||||
|
@ -84,6 +84,26 @@ class ActiveStorage::BlobTest < ActiveSupport::TestCase
|
||||
assert_match(/^[a-z0-9]{28}$/, build_blob_after_unfurling.key)
|
||||
end
|
||||
|
||||
test "compose" do
|
||||
blobs = 3.times.map { create_blob(data: "123", filename: "numbers.txt", content_type: "text/plain", identify: false) }
|
||||
blob = ActiveStorage::Blob.compose(filename: "all_numbers.txt", blobs: blobs)
|
||||
|
||||
assert_equal "123123123", blob.download
|
||||
assert_equal "text/plain", blob.content_type
|
||||
assert_equal blobs.first.byte_size * blobs.count, blob.byte_size
|
||||
assert_predicate(blob, :composed)
|
||||
assert_nil blob.checksum
|
||||
end
|
||||
|
||||
test "compose with unpersisted blobs" do
|
||||
blobs = 3.times.map { create_blob(data: "123", filename: "numbers.txt", content_type: "text/plain", identify: false).dup }
|
||||
|
||||
error = assert_raises(ActiveRecord::RecordNotSaved) do
|
||||
ActiveStorage::Blob.compose(filename: "all_numbers.txt", blobs: blobs)
|
||||
end
|
||||
assert_equal "All blobs must be persisted.", error.message
|
||||
end
|
||||
|
||||
test "image?" do
|
||||
blob = create_file_blob filename: "racecar.jpg"
|
||||
assert_predicate blob, :image?
|
||||
|
@ -138,5 +138,24 @@ module ActiveStorage::Service::SharedServiceTests
|
||||
@service.delete("#{key}/a/a/b")
|
||||
@service.delete("#{key}/a/b/a")
|
||||
end
|
||||
|
||||
test "compose" do
|
||||
keys = 3.times.map { SecureRandom.base58(24) }
|
||||
data = %w(To get her)
|
||||
keys.zip(data).each do |key, data|
|
||||
@service.upload(
|
||||
key,
|
||||
StringIO.new(data),
|
||||
checksum: Digest::MD5.base64digest(data),
|
||||
disposition: :attachment,
|
||||
filename: ActiveStorage::Filename.new("test.html"),
|
||||
content_type: "text/html",
|
||||
)
|
||||
end
|
||||
destination_key = SecureRandom.base58(24)
|
||||
@service.compose(*keys, destination_key)
|
||||
|
||||
assert_equal "Together", @service.download(destination_key)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
Loading…
Reference in New Issue
Block a user