Merge branch 'main' into postgres-index-nulls-not-distinct

This commit is contained in:
Gregory Jones 2023-07-18 15:05:24 -04:00 committed by GitHub
commit 4837bcaeef
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
151 changed files with 2771 additions and 842 deletions

@ -74,6 +74,7 @@ PATH
activerecord (7.1.0.alpha)
activemodel (= 7.1.0.alpha)
activesupport (= 7.1.0.alpha)
timeout (>= 0.4.0)
activestorage (7.1.0.alpha)
actionpack (= 7.1.0.alpha)
activejob (= 7.1.0.alpha)
@ -334,7 +335,7 @@ GEM
mysql2 (0.5.4)
net-http-persistent (4.0.1)
connection_pool (~> 2.2)
net-imap (0.3.4)
net-imap (0.3.6)
date
net-protocol
net-pop (0.1.2)
@ -511,7 +512,7 @@ GEM
terser (1.1.13)
execjs (>= 0.3.0, < 3)
thor (1.2.2)
timeout (0.3.2)
timeout (0.4.0)
tomlrb (2.0.3)
trailblazer-option (0.1.2)
turbo-rails (1.3.2)

@ -1,3 +1,12 @@
* Add a `@server` instance variable referencing the `ActionCable.server`
singleton to `ActionCable::Channel::ConnectionStub`
This lets us delegate the `pubsub` and `config` method calls
to the server. This fixes `NoMethodError` errors when testing
channel logic that call `pubsub` (e.g. `stop_stream_for`).
*Julian Foo*
* Added `health_check_path` and `health_check_application` config to
mount a given health check rack app on a given path.
Useful when mounting Action Cable standalone.

@ -47,9 +47,12 @@ def start_periodic_timers; end
end
class ConnectionStub
attr_reader :transmissions, :identifiers, :subscriptions, :logger, :config
attr_reader :server, :transmissions, :identifiers, :subscriptions, :logger
delegate :pubsub, :config, to: :server
def initialize(identifiers = {})
@server = ActionCable.server
@transmissions = []
identifiers.each do |identifier, val|
@ -59,7 +62,6 @@ def initialize(identifiers = {})
@subscriptions = ActionCable::Connection::Subscriptions.new(self)
@identifiers = identifiers.keys
@logger = ActiveSupport::TaggedLogging.new ActiveSupport::Logger.new(StringIO.new)
@config = ActionCable::Server::Configuration.new
end
def transmit(cable_message)

@ -88,6 +88,10 @@ class StreamsTestChannel < ActionCable::Channel::Base
def subscribed
stream_from "test_#{params[:id] || 0}"
end
def unsubscribed
stop_stream_from "test_#{params[:id] || 0}"
end
end
class StreamsTestChannelTest < ActionCable::Channel::TestCase
@ -102,6 +106,13 @@ def test_stream_with_params
assert_has_stream "test_42"
end
def test_unsubscribe_from_stream
subscribe
unsubscribe
assert_no_streams
end
end
class StreamsForTestChannel < ActionCable::Channel::Base

@ -382,6 +382,8 @@ def []=(name, options)
# Removes the cookie on the client machine by setting the value to an empty string
# and the expiration date in the past. Like <tt>[]=</tt>, you can pass in
# an options hash to delete cookies with extra data such as a <tt>:path</tt>.
#
# Returns the value of the cookie, or +nil+ if the cookie does not exist.
def delete(name, options = {})
return unless @cookies.has_key? name.to_s

@ -554,6 +554,17 @@ def test_delete_cookie_with_path
assert_set_cookie_header "user_name=; path=/beaten; max-age=0; expires=Thu, 01 Jan 1970 00:00:00 GMT; SameSite=Lax"
end
def test_delete_cookie_return_value
request.cookies[:user_name] = "Joe"
return_value = request.cookies.delete(:user_name)
assert_equal "Joe", return_value
end
def test_delete_unexisting_cookie_return_value
return_value = request.cookies.delete(:no_such_cookie)
assert_nil return_value
end
def test_delete_unexisting_cookie
request.cookies.clear
get :delete_cookie

@ -1,3 +1,22 @@
* Use `Rails::HTML5::SafeListSanitizer` by default in the Rails 7.1 configuration if it is
supported.
Action Text's sanitizer can be configured by setting
`config.action_text.sanitizer_vendor`. Supported values are `Rails::HTML4::Sanitizer` or
`Rails::HTML5::Sanitizer`.
The Rails 7.1 configuration will set this to `Rails::HTML5::Sanitizer` when it is supported, and
fall back to `Rails::HTML4::Sanitizer`. Previous configurations default to
`Rails::HTML4::Sanitizer`.
As a result of this change, the defaults for `ActionText::ContentHelper.allowed_tags` and
`.allowed_attributes` are applied at runtime, so the value of these attributes is now 'nil'
unless set by the application. You may call `sanitizer_allowed_tags` or
`sanitizer_allowed_attributes` to inspect the tags and attributes being allowed by the
sanitizer.
*Mike Dalessio*
* Attachables now can override default attachment missing template.
When rendering Action Text attachments where the underlying attachable model has

@ -4,9 +4,9 @@
module ActionText
module ContentHelper
mattr_accessor(:sanitizer) { Rails::Html::Sanitizer.best_supported_vendor.safe_list_sanitizer.new }
mattr_accessor(:allowed_tags) { sanitizer.class.allowed_tags + [ ActionText::Attachment.tag_name, "figure", "figcaption" ] }
mattr_accessor(:allowed_attributes) { sanitizer.class.allowed_attributes + ActionText::Attachment::ATTRIBUTES }
mattr_accessor(:sanitizer, default: Rails::HTML4::Sanitizer.safe_list_sanitizer.new)
mattr_accessor(:allowed_tags)
mattr_accessor(:allowed_attributes)
mattr_accessor(:scrubber)
def render_action_text_content(content)
@ -15,7 +15,12 @@ def render_action_text_content(content)
end
def sanitize_action_text_content(content)
sanitizer.sanitize(content.to_html, tags: allowed_tags, attributes: allowed_attributes, scrubber: scrubber).html_safe
sanitizer.sanitize(
content.to_html,
tags: sanitizer_allowed_tags,
attributes: sanitizer_allowed_attributes,
scrubber: scrubber,
).html_safe
end
def render_action_text_attachments(content)
@ -48,5 +53,13 @@ def render_action_text_attachment(attachment, locals: {}) # :nodoc:
render(**options).chomp
end
def sanitizer_allowed_tags
allowed_tags || (sanitizer.class.allowed_tags + [ ActionText::Attachment.tag_name, "figure", "figcaption" ])
end
def sanitizer_allowed_attributes
allowed_attributes || (sanitizer.class.allowed_attributes + ActionText::Attachment::ATTRIBUTES)
end
end
end

@ -82,5 +82,11 @@ def to_trix_content_attachment_partial_path
initializer "action_text.configure" do |app|
ActionText::Attachment.tag_name = app.config.action_text.attachment_tag_name
end
config.after_initialize do |app|
if klass = app.config.action_text.sanitizer_vendor
ActionText::ContentHelper.sanitizer = klass.safe_list_sanitizer.new
end
end
end
end

@ -132,6 +132,32 @@ class ActionText::ContentTest < ActiveSupport::TestCase
assert_not defined?(::ApplicationController)
end
test "does basic sanitization" do
html = "<div onclick='action()'>safe<script>unsafe</script></div>"
rendered = content_from_html(html).to_rendered_html_with_layout
assert_not_includes rendered, "<script>"
assert_not_includes rendered, "action"
end
test "does custom tag sanitization" do
old_tags = ActionText::ContentHelper.allowed_tags
old_attrs = ActionText::ContentHelper.allowed_attributes
ActionText::ContentHelper.allowed_tags = ["div"] # not 'span'
ActionText::ContentHelper.allowed_attributes = ["size"] # not 'class'
html = "<div size='large' class='high'>safe<span>unsafe</span></div>"
rendered = content_from_html(html).to_rendered_html_with_layout
assert_includes rendered, "<div"
assert_not_includes rendered, "<span"
assert_includes rendered, "large"
assert_not_includes rendered, "high"
ensure
ActionText::ContentHelper.allowed_tags = old_tags
ActionText::ContentHelper.allowed_attributes = old_attrs
end
test "renders with layout when in a new thread" do
html = "<h1>Hello world</h1>"
rendered = nil

@ -1,3 +1,22 @@
* Don't double-encode nested `field_id` and `field_name` index values
Pass `index: @options` as a default keyword argument to `field_id` and
`field_name` view helper methods.
*Sean Doyle*
* Allow opting in/out of `Link preload` headers when calling `stylesheet_link_tag` or `javascript_include_tag`
```ruby
# will exclude header, even if setting is enabled:
javascript_include_tag("http://example.com/all.js", preload_links_header: false)
# will include header, even if setting is disabled:
stylesheet_link_tag("http://example.com/all.js", preload_links_header: true)
```
*Alex Ghiculescu*
* Stop generating `Link preload` headers once it has reached 1KB.
Some proxies have trouble handling large headers, but more importantly preload links

@ -112,6 +112,7 @@ def javascript_include_tag(*sources)
options = sources.extract_options!.stringify_keys
path_options = options.extract!("protocol", "extname", "host", "skip_pipeline").symbolize_keys
preload_links = []
use_preload_links_header = options["preload_links_header"].nil? ? preload_links_header : options.delete("preload_links_header")
nopush = options["nopush"].nil? ? true : options.delete("nopush")
crossorigin = options.delete("crossorigin")
crossorigin = "anonymous" if crossorigin == true
@ -120,7 +121,7 @@ def javascript_include_tag(*sources)
sources_tags = sources.uniq.map { |source|
href = path_to_javascript(source, path_options)
if preload_links_header && !options["defer"] && href.present? && !href.start_with?("data:")
if use_preload_links_header && !options["defer"] && href.present? && !href.start_with?("data:")
preload_link = "<#{href}>; rel=#{rel}; as=script"
preload_link += "; crossorigin=#{crossorigin}" unless crossorigin.nil?
preload_link += "; integrity=#{integrity}" unless integrity.nil?
@ -137,7 +138,7 @@ def javascript_include_tag(*sources)
content_tag("script", "", tag_options)
}.join("\n").html_safe
if preload_links_header
if use_preload_links_header
send_preload_links_header(preload_links)
end
@ -192,6 +193,7 @@ def javascript_include_tag(*sources)
def stylesheet_link_tag(*sources)
options = sources.extract_options!.stringify_keys
path_options = options.extract!("protocol", "extname", "host", "skip_pipeline").symbolize_keys
use_preload_links_header = options["preload_links_header"].nil? ? preload_links_header : options.delete("preload_links_header")
preload_links = []
crossorigin = options.delete("crossorigin")
crossorigin = "anonymous" if crossorigin == true
@ -200,7 +202,7 @@ def stylesheet_link_tag(*sources)
sources_tags = sources.uniq.map { |source|
href = path_to_stylesheet(source, path_options)
if preload_links_header && href.present? && !href.start_with?("data:")
if use_preload_links_header && href.present? && !href.start_with?("data:")
preload_link = "<#{href}>; rel=preload; as=style"
preload_link += "; crossorigin=#{crossorigin}" unless crossorigin.nil?
preload_link += "; integrity=#{integrity}" unless integrity.nil?
@ -220,7 +222,7 @@ def stylesheet_link_tag(*sources)
tag(:link, tag_options)
}.join("\n").html_safe
if preload_links_header
if use_preload_links_header
send_preload_links_header(preload_links)
end

@ -1767,7 +1767,7 @@ def id
# <tt>aria-describedby</tt> attribute referencing the <tt><span></tt>
# element, sharing a common <tt>id</tt> root (<tt>post_title</tt>, in this
# case).
def field_id(method, *suffixes, namespace: @options[:namespace], index: @index)
def field_id(method, *suffixes, namespace: @options[:namespace], index: @options[:index])
@template.field_id(@object_name, method, *suffixes, namespace: namespace, index: index)
end
@ -1787,7 +1787,7 @@ def field_id(method, *suffixes, namespace: @options[:namespace], index: @index)
# <%# => <input type="text" name="post[tag][]">
# <% end %>
#
def field_name(method, *methods, multiple: false, index: @index)
def field_name(method, *methods, multiple: false, index: @options[:index])
object_name = @options.fetch(:as) { @object_name }
@template.field_name(object_name, method, *methods, index: index, multiple: multiple)

@ -46,7 +46,7 @@ class Railtie < Rails::Engine # :nodoc:
ActionView::Helpers::ContentExfiltrationPreventionHelper.prepend_content_exfiltration_prevention = prepend_content_exfiltration_prevention
end
initializer "action_view.sanitizer_vendor" do |app|
config.after_initialize do |app|
if klass = app.config.action_view.delete(:sanitizer_vendor)
ActionView::Helpers::SanitizeHelper.sanitizer_vendor = klass
end

@ -89,15 +89,25 @@ def expanded_cache_key(key, view, template, digest_path)
# If the partial is not already cached it will also be
# written back to the underlying cache store.
def fetch_or_cache_partial(cached_partials, template, order_by:)
order_by.index_with do |cache_key|
entries_to_write = {}
keyed_partials = order_by.index_with do |cache_key|
if content = cached_partials[cache_key]
build_rendered_template(content, template)
else
yield.tap do |rendered_partial|
collection_cache.write(cache_key, rendered_partial.body)
rendered_partial = yield
if fragment = rendered_partial.body&.to_str
entries_to_write[cache_key] = fragment
end
rendered_partial
end
end
unless entries_to_write.empty?
collection_cache.write_multi(entries_to_write)
end
keyed_partials
end
end
end

@ -46,7 +46,7 @@ def render_template(view, template, layout_name = nil, locals = {}) # :nodoc:
return [super.body] unless layout_name && template.supports_streaming?
locals ||= {}
layout = layout_name && find_layout(layout_name, locals.keys, [formats.first])
layout = find_layout(layout_name, locals.keys, [formats.first])
Body.new do |buffer|
delayed_render(buffer, template, layout, view, locals)

@ -4,7 +4,7 @@
require "delegate"
module ActionView
# = Action View Template
# = Action View \Template
class Template
extend ActiveSupport::Autoload

@ -287,6 +287,7 @@ def method_missing(selector, *args)
super
end
end
ruby2_keywords(:method_missing)
def respond_to_missing?(name, include_private = false)
begin

@ -30,6 +30,12 @@ class MultifetchCacheTest < ActiveRecordTestCase
setup do
Topic.update_all(updated_at: Time.now)
@cache_store_was = ActionView::PartialRenderer.collection_cache
ActionView::PartialRenderer.collection_cache = ActiveSupport::Cache::MemoryStore.new
end
teardown do
ActionView::PartialRenderer.collection_cache = @cache_store_was
end
def test_only_preloading_for_records_that_miss_the_cache
@ -77,4 +83,19 @@ def test_preloads_all_records_if_using_cached_proc
assert_equal first_req.first, second_req.first
assert_includes second_req.last, %(WHERE "replies"."topic_id" IN (?, ?, ?))
end
class InspectableStore < ActiveSupport::Cache::MemoryStore
attr_reader :data
end
def test_fragments_are_stored_as_bare_strings
cache = ActionView::PartialRenderer.collection_cache = InspectableStore.new
Topic.update_all(title: "title")
get :cached_true
assert_not_predicate cache.data, :empty?
cache.data.each_value do |entry|
assert_equal String, entry.value.class
end
end
end

@ -0,0 +1,4 @@
<%= content_tag 'div', data: { controller: "read-more", 'read-more-more-text-value': "Read more", 'read-more-less-text-value': "Read less" } do %>
<%= content_tag('p', 'Content text', class: 'content-class', data: { 'test-name': "content" }) %>
<%= content_tag('button', 'Read more', class: 'expand-button', data: { action: 'read-more#toggle'}) %>
<% end %>

@ -642,6 +642,23 @@ def test_should_not_set_preload_links_for_data_url
end
end
def test_should_not_set_preload_links_if_opted_out_at_invokation
with_preload_links_header do
stylesheet_link_tag("http://example.com/style.css", preload_links_header: false)
javascript_include_tag("http://example.com/all.js", preload_links_header: false)
assert_nil @response.headers["Link"]
end
end
def test_should_set_preload_links_if_opted_in_at_invokation
with_preload_links_header(false) do
stylesheet_link_tag("http://example.com/style.css", preload_links_header: true)
javascript_include_tag("http://example.com/all.js", preload_links_header: true)
expected = "<http://example.com/style.css>; rel=preload; as=style; nopush,<http://example.com/all.js>; rel=preload; as=script; nopush"
assert_equal expected, @response.headers["Link"]
end
end
def test_should_generate_links_under_the_max_size
with_preload_links_header do
100.times do |i|

@ -1894,6 +1894,57 @@ def test_form_for_field_id_with_namespace_and_index
assert_dom_equal expected, @rendered
end
def test_form_for_with_nested_attributes_field_id
post, comment, tag = Post.new, Comment.new, Tag.new
comment.relevances = [tag]
post.comments = [comment]
form_for(post) do |form|
form.fields_for(:comments) do |comment_form|
concat comment_form.field_id :relevances_attributes
end
end
expected = whole_form("/posts", "new_post", "new_post") do
"post_comments_attributes_0_relevances_attributes"
end
assert_dom_equal expected, @rendered
end
def test_form_for_with_nested_attributes_field_name
post, comment, tag = Post.new, Comment.new, Tag.new
comment.relevances = [tag]
post.comments = [comment]
form_for(post) do |form|
form.fields_for(:comments) do |comment_form|
concat comment_form.field_name :relevances_attributes
end
end
expected = whole_form("/posts", "new_post", "new_post") do
"post[comments_attributes][0][relevances_attributes]"
end
assert_dom_equal expected, @rendered
end
def test_form_for_with_nested_attributes_field_name_multiple
post, comment, tag = Post.new, Comment.new, Tag.new
comment.relevances = [tag]
post.comments = [comment]
form_for(post) do |form|
form.fields_for(:comments) do |comment_form|
concat comment_form.field_name :relevances_attributes, multiple: true
end
end
expected = whole_form("/posts", "new_post", "new_post") do
"post[comments_attributes][0][relevances_attributes][]"
end
assert_dom_equal expected, @rendered
end
def test_form_for_with_collection_radio_buttons
post = Post.new
def post.active; false; end

@ -298,6 +298,11 @@ def test_content_tag_nested_in_content_tag_in_erb
assert_equal "<p>\n <b>Hello</b>\n</p>", view.render("test/builder_tag_nested_in_content_tag")
end
def test_content_tag_nested_in_content_tag_with_data_attributes_out_of_erb
assert_equal "<div data-controller=\"read-more\" data-read-more-more-text-value=\"Read more\" data-read-more-less-text-value=\"Read less\"\>\n <p class=\"content-class\" data-test-name=\"content\">Content text</p>\n <button class=\"expand-button\" data-action=\"read-more#toggle\">Read more</button>\n</div>",
view.render("test/content_tag_nested_in_content_tag_with_data_attributes_out_of_erb")
end
def test_content_tag_with_escaped_array_class
str = content_tag("p", "limelight", class: ["song", "play>"])
assert_equal "<p class=\"song play&gt;\">limelight</p>", str

@ -36,8 +36,9 @@ namespace :test do
Rake::TestTask.new(adapter => "test:env:#{adapter}") do |t|
t.description = "Run adapter tests for #{adapter}"
t.libs << "test"
t.test_files = FileList["test/cases/**/*_test.rb"].reject {
|x| x.include?("delayed_job") && adapter != "delayed_job"
t.test_files = FileList["test/cases/**/*_test.rb"].reject { |x|
(x.include?("delayed_job") && adapter != "delayed_job") ||
(x.include?("async") && adapter != "async")
}
t.verbose = true
t.warning = true
@ -46,8 +47,9 @@ namespace :test do
namespace :isolated do
task adapter => "test:env:#{adapter}" do
Dir.glob("#{__dir__}/test/cases/**/*_test.rb").reject {
|x| x.include?("delayed_job") && adapter != "delayed_job"
Dir.glob("#{__dir__}/test/cases/**/*_test.rb").reject { |x|
(x.include?("delayed_job") && adapter != "delayed_job") ||
(x.include?("async") && adapter != "async")
}.all? do |file|
sh(Gem.ruby, "-w", "-I#{__dir__}/lib", "-I#{__dir__}/test", file)
end || raise("Failures")

@ -95,7 +95,7 @@ def enqueue(job, queue_name:)
def enqueue_at(job, timestamp, queue_name:)
delay = timestamp - Time.current.to_f
if delay > 0
if !immediate && delay > 0
Concurrent::ScheduledTask.execute(delay, args: [job], executor: executor, &:perform)
else
enqueue(job, queue_name: queue_name)

@ -0,0 +1,22 @@
# frozen_string_literal: true
require "helper"
require "active_job/queue_adapters/async_adapter"
require "jobs/hello_job"
class AsyncAdapterTest < ActiveSupport::TestCase
setup do
JobBuffer.clear
ActiveJob::Base.queue_adapter.immediate = true
end
test "in immediate run, perform_later runs immediately" do
HelloJob.perform_later "Alex"
assert_match(/Alex/, JobBuffer.last_value)
end
test "in immediate run, enqueue with wait: runs immediately" do
HelloJob.set(wait_until: Date.tomorrow.noon).perform_later "Alex"
assert_match(/Alex/, JobBuffer.last_value)
end
end

@ -280,8 +280,8 @@ def test_enqueue_retry_logging_on_retry_job
def test_retry_stopped_logging
perform_enqueued_jobs do
RetryJob.perform_later "CustomCatchError", 6
assert_match(/Stopped retrying RetryJob \(Job ID: .*?\) due to a CustomCatchError.*, which reoccurred on \d+ attempts\./, @logger.messages)
end
assert_match(/Stopped retrying RetryJob \(Job ID: .*?\) due to a CustomCatchError.*, which reoccurred on \d+ attempts\./, @logger.messages)
end
def test_retry_stopped_logging_without_block

@ -515,6 +515,12 @@ def test_assert_enqueued_with_when_queue_name_is_symbol
end
end
def test_assert_no_enqueued_jobs_and_perform_now
assert_no_enqueued_jobs do
LoggingJob.perform_now(1, 2, 3, keyword: true)
end
end
def test_assert_enqueued_with_returns
job = assert_enqueued_with(job: LoggingJob) do
LoggingJob.set(wait_until: 5.minutes.from_now).perform_later(1, 2, 3, keyword: true)

@ -19,3 +19,7 @@
require "active_support/testing/autorun"
require_relative "../../tools/test_common"
def adapter_is?(*adapter_class_symbols)
adapter_class_symbols.map(&:to_s).include? ActiveJob::Base.queue_adapter_name
end

@ -1,8 +1,8 @@
# frozen_string_literal: true
class LoggingJob < ActiveJob::Base
def perform(dummy)
logger.info "Dummy, here is it: #{dummy}"
def perform(*dummy)
logger.info "Dummy, here is it: #{dummy.join(" ")}"
end
def job_id

@ -27,10 +27,6 @@ def clear_jobs
jobs_manager.clear_jobs
end
def adapter_is?(*adapter_class_symbols)
adapter_class_symbols.map(&:to_s).include? ActiveJob::Base.queue_adapter_name
end
def wait_for_jobs_to_finish_for(seconds = 60)
Timeout.timeout(seconds) do
while !job_executed do

@ -1,3 +1,8 @@
* Add a load hook for `ActiveModel::Model` (named `active_model`) to match the load hook for `ActiveRecord::Base` and
allow for overriding aspects of the `ActiveModel::Model` class.
*Lewis Buckley*
* Improve password length validation in ActiveModel::SecurePassword to consider byte size for BCrypt compatibility.
The previous password length validation only considered the character count, which may not

@ -13,8 +13,7 @@ module ActiveModel
# * <tt>include ActiveModel::Dirty</tt> in your object.
# * Call <tt>define_attribute_methods</tt> passing each method you want to
# track.
# * Call <tt>[attr_name]_will_change!</tt> before each change to the tracked
# attribute.
# * Call <tt>*_will_change!</tt> before each change to the tracked attribute.
# * Call <tt>changes_applied</tt> after the changes are persisted.
# * Call <tt>clear_changes_information</tt> when you want to reset the changes
# information.
@ -109,20 +108,136 @@ module ActiveModel
# person.changes # => {"name" => ["Bill", "Bob"]}
#
# If an attribute is modified in-place then make use of
# <tt>[attribute_name]_will_change!</tt> to mark that the attribute is changing.
# {*_will_change!}[rdoc-label:method-i-2A_will_change-21] to mark that the attribute is changing.
# Otherwise \Active \Model can't track changes to in-place attributes. Note
# that Active Record can detect in-place modifications automatically. You do
# not need to call <tt>[attribute_name]_will_change!</tt> on Active Record models.
# not need to call <tt>*_will_change!</tt> on Active Record models.
#
# person.name_will_change!
# person.name_change # => ["Bill", "Bill"]
# person.name << 'y'
# person.name_change # => ["Bill", "Billy"]
#
# Methods can be invoked as +name_changed?+ or by passing an argument to the
# generic method <tt>attribute_changed?("name")</tt>.
module Dirty
extend ActiveSupport::Concern
include ActiveModel::AttributeMethods
included do
##
# :method: *_previously_changed?
#
# :call-seq: *_previously_changed?(**options)
#
# This method is generated for each attribute.
#
# Returns true if the attribute previously had unsaved changes.
#
# person = Person.new
# person.name = 'Britanny'
# person.save
# person.name_previously_changed? # => true
# person.name_previously_changed?(from: nil, to: 'Britanny') # => true
##
# :method: *_changed?
#
# This method is generated for each attribute.
#
# Returns true if the attribute has unsaved changes.
#
# person = Person.new
# person.name = 'Andrew'
# person.name_changed? # => true
##
# :method: *_change
#
# This method is generated for each attribute.
#
# Returns the old and the new value of the attribute.
#
# person = Person.new
# person.name = 'Nick'
# person.name_change # => [nil, 'Nick']
##
# :method: *_will_change!
#
# This method is generated for each attribute.
#
# If an attribute is modified in-place then make use of
# <tt>*_will_change!</tt> to mark that the attribute is changing.
# Otherwise Active Model cant track changes to in-place attributes. Note
# that Active Record can detect in-place modifications automatically. You
# do not need to call <tt>*_will_change!</tt> on Active Record
# models.
#
# person = Person.new('Sandy')
# person.name_will_change!
# person.name_change # => ['Sandy', 'Sandy']
##
# :method: *_was
#
# This method is generated for each attribute.
#
# Returns the old value of the attribute.
#
# person = Person.new(name: 'Steph')
# person.name = 'Stephanie'
# person.name_change # => ['Steph', 'Stephanie']
##
# :method: *_previous_change
#
# This method is generated for each attribute.
#
# Returns the old and the new value of the attribute before the last save.
#
# person = Person.new
# person.name = 'Emmanuel'
# person.save
# person.name_previous_change # => [nil, 'Emmanuel']
##
# :method: *_previously_was
#
# This method is generated for each attribute.
#
# Returns the old value of the attribute before the last save.
#
# person = Person.new
# person.name = 'Sage'
# person.save
# person.name_previously_was # => nil
##
# :method: restore_*!
#
# This method is generated for each attribute.
#
# Restores the attribute to the old value.
#
# person = Person.new
# person.name = 'Amanda'
# person.restore_name!
# person.name # => nil
##
# :method: clear_*_change
#
# This method is generated for each attribute.
#
# Clears all dirty data of the attribute: current changes and previous changes.
#
# person = Person.new(name: 'Chris')
# person.name = 'Jason'
# person.name_change # => ['Chris', 'Jason']
# person.clear_name_change
# person.name_change # => nil
attribute_method_suffix "_previously_changed?", "_changed?", parameters: "**options"
attribute_method_suffix "_change", "_will_change!", "_was", parameters: false
attribute_method_suffix "_previous_change", "_previously_was", parameters: false
@ -174,22 +289,22 @@ def changed
mutations_from_database.changed_attribute_names
end
# Dispatch target for <tt>*_changed?</tt> attribute methods.
# Dispatch target for {*_changed}[rdoc-label:method-i-2A_changed-3F] attribute methods.
def attribute_changed?(attr_name, **options)
mutations_from_database.changed?(attr_name.to_s, **options)
end
# Dispatch target for <tt>*_was</tt> attribute methods.
# Dispatch target for {*_was}[rdoc-label:method-i-2A_was] attribute methods.
def attribute_was(attr_name)
mutations_from_database.original_value(attr_name.to_s)
end
# Dispatch target for <tt>*_previously_changed?</tt> attribute methods.
# Dispatch target for {*_previously_changed}[rdoc-label:method-i-2A_previously_changed-3F] attribute methods.
def attribute_previously_changed?(attr_name, **options)
mutations_before_last_save.changed?(attr_name.to_s, **options)
end
# Dispatch target for <tt>*_previously_was</tt> attribute methods.
# Dispatch target for {*_previously_was}[rdoc-label:method-i-2A_previously_was] attribute methods.
def attribute_previously_was(attr_name)
mutations_before_last_save.original_value(attr_name.to_s)
end

@ -65,4 +65,6 @@ module Model
#--
# Implemented by ActiveModel::Access#values_at.
end
ActiveSupport.run_load_hooks(:active_model, Model)
end

@ -4,10 +4,6 @@ module ActiveModel
module Type
module Helpers # :nodoc: all
module Mutable
def immutable_value(value)
value.deep_dup
end
def cast(value)
deserialize(serialize(value))
end
@ -18,6 +14,10 @@ def cast(value)
def changed_in_place?(raw_old_value, new_value)
raw_old_value != serialize(new_value)
end
def mutable? # :nodoc:
true
end
end
end
end

@ -133,8 +133,12 @@ def hash
def assert_valid_value(_)
end
def immutable_value(value) # :nodoc:
value
def serialized? # :nodoc:
false
end
def mutable? # :nodoc:
false
end
def as_json(*)

@ -76,4 +76,12 @@ def test_mixin_initializer_when_args_dont_exist
SimpleModel.new(hello: "world")
end
end
def test_load_hook_is_called
value = "not loaded"
ActiveSupport.on_load(:active_model) { value = "loaded" }
assert_equal "loaded", value
end
end

@ -179,7 +179,7 @@ def @contact.favorite_quote; "Constraints are liberating"; end
end
test "custom as_json should be honored when generating json" do
def @contact.as_json(options); { name: name, created_at: created_at }; end
def @contact.as_json(options = nil); { name: name, created_at: created_at }; end
json = @contact.to_json
assert_match %r{"name":"Konata Izumi"}, json

@ -6,6 +6,83 @@
*Gregory Jones*
* Fix incrementation of in memory counter caches when associations overlap
When two associations had a similarly named counter cache column, Active Record
could sometime increment the wrong one.
*Jacopo Beschi*, *Jean Boussier*
* Don't show secrets for Active Record's `Cipher::Aes256Gcm#inspect`.
Before:
```ruby
ActiveRecord::Encryption::Cipher::Aes256Gcm.new(secret).inspect
"#<ActiveRecord::Encryption::Cipher::Aes256Gcm:0x0000000104888038 ... @secret=\"\\xAF\\bFh]LV}q\\nl\\xB2U\\xB3 ... >"
```
After:
```ruby
ActiveRecord::Encryption::Cipher::Aes256Gcm(secret).inspect
"#<ActiveRecord::Encryption::Cipher::Aes256Gcm:0x0000000104888038>"
```
*Petrik de Heus*
* Bring back the historical behavior of committing transaction on non-local return.
```ruby
Model.transaction do
model.save
return
other_model.save # not executed
end
```
Historically only raised errors would trigger a rollback, but in Ruby `2.3`, the `timeout` library
started using `throw` to interrupt execution which had the adverse effect of committing open transactions.
To solve this, in Active Record 6.1 the behavior was changed to instead rollback the transaction as it was safer
than to potentially commit an incomplete transaction.
Using `return`, `break` or `throw` inside a `transaction` block was essentially deprecated from Rails 6.1 onwards.
However with the release of `timeout 0.4.0`, `Timeout.timeout` now raises an error again, and Active Record is able
to return to its original, less surprising, behavior.
This historical behavior can now be opt-ed in via:
```
Rails.application.config.active_record.commit_transaction_on_non_local_return = true
```
And is the default for new applications created in Rails 7.1.
*Jean Boussier*
* Deprecate `name` argument on `#remove_connection`.
The `name` argument is deprecated on `#remove_connection` without replacement. `#remove_connection` should be called directly on the class that established the connection.
*Eileen M. Uchitelle*
* Fix has_one through singular building with inverse.
Allows building of records from an association with a has_one through a
singular association with inverse. For belongs_to through associations,
linking the foreign key to the primary key model isn't needed.
For has_one, we cannot build records due to the association not being mutable.
*Gannon McGibbon*
* Disable database prepared statements when query logs are enabled
Prepared Statements and Query Logs are incompatible features due to query logs making every query unique.
*zzak, Jean Boussier*
* Support decrypting data encrypted non-deterministically with a SHA1 hash digest.
This adds a new Active Record encryption option to support decrypting data encrypted
@ -22,24 +99,6 @@
*Cadu Ribeiro and Jorge Manrubia*
* Apply scope to association subqueries. (belongs_to/has_one/has_many)
Given: `has_many :welcome_posts, -> { where(title: "welcome") }`
Before:
```ruby
Author.where(welcome_posts: Post.all)
#=> SELECT (...) WHERE "authors"."id" IN (SELECT "posts"."author_id" FROM "posts")
```
Later:
```ruby
Author.where(welcome_posts: Post.all)
#=> SELECT (...) WHERE "authors"."id" IN (SELECT "posts"."author_id" FROM "posts" WHERE "posts"."title" = 'welcome')
```
*Lázaro Nixon*
* Added PostgreSQL migration commands for enum rename, add value, and rename value.
`rename_enum` and `rename_enum_value` are reversible. Due to Postgres

@ -37,4 +37,5 @@
s.add_dependency "activesupport", version
s.add_dependency "activemodel", version
s.add_dependency "timeout", ">= 0.4.0"
end

@ -49,6 +49,7 @@ module ActiveRecord
autoload :Encryption
autoload :Enum
autoload :Explain
autoload :FixtureSet, "active_record/fixtures"
autoload :Inheritance
autoload :Integration
autoload :InternalMetadata
@ -173,6 +174,9 @@ module Tasks
autoload :SQLiteDatabaseTasks, "active_record/tasks/sqlite_database_tasks"
end
singleton_class.attr_accessor :disable_prepared_statements
self.disable_prepared_statements = false
# Lazily load the schema cache. This option will load the schema cache
# when a connection is established rather than on boot. If set,
# +config.active_record.use_schema_cache_dump+ will be set to false.
@ -316,6 +320,9 @@ def self.global_executor_concurrency # :nodoc:
singleton_class.attr_accessor :run_after_transaction_callbacks_in_order_defined
self.run_after_transaction_callbacks_in_order_defined = false
singleton_class.attr_accessor :commit_transaction_on_non_local_return
self.commit_transaction_on_non_local_return = false
##
# :singleton-method:
# Specify a threshold for the size of query result sets. If the number of

@ -190,9 +190,9 @@ class CompositePrimaryKeyMismatchError < ActiveRecordError # :nodoc:
def initialize(reflection = nil)
if reflection
if reflection.has_one? || reflection.collection?
super("Association #{reflection.active_record}##{reflection.name} primary key #{reflection.active_record_primary_key} doesn't match with foreign key #{reflection.foreign_key}. Please specify query_constraints.")
super("Association #{reflection.active_record}##{reflection.name} primary key #{reflection.active_record_primary_key} doesn't match with foreign key #{reflection.foreign_key}. Please specify query_constraints, or primary_key and foreign_key values.")
else
super("Association #{reflection.active_record}##{reflection.name} primary key #{reflection.association_primary_key} doesn't match with foreign key #{reflection.foreign_key}. Please specify query_constraints.")
super("Association #{reflection.active_record}##{reflection.name} primary key #{reflection.association_primary_key} doesn't match with foreign key #{reflection.foreign_key}. Please specify query_constraints, or primary_key and foreign_key values.")
end
else
super("Association primary key doesn't match with foreign key.")

@ -45,11 +45,6 @@ def self.add_right_association(name, options)
def self.retrieve_connection
left_model.retrieve_connection
end
private
def self.suppress_composite_primary_key(pk)
pk unless pk.is_a?(Array)
end
}
join_model.name = "HABTM_#{association_name.to_s.camelize}"

@ -87,7 +87,7 @@ def ids_writer(ids)
def reset
super
@target = []
@replaced_or_added_targets = Set.new
@replaced_or_added_targets = Set.new.compare_by_identity
@association_ids = nil
end
@ -333,7 +333,11 @@ def merge_target_lists(persisted, memory)
if mem_record = memory.delete(record)
((record.attribute_names & mem_record.attribute_names) - mem_record.changed_attribute_names_to_save - mem_record.class._attr_readonly).each do |name|
mem_record._write_attribute(name, record[name])
if name == "id" && mem_record.class.composite_primary_key?
mem_record.class.primary_key.zip(record[name]) { |attr, value| mem_record._write_attribute(attr, value) }
else
mem_record._write_attribute(name, record[name])
end
end
mem_record

@ -1107,6 +1107,11 @@ def inspect # :nodoc:
super
end
def pretty_print(pp) # :nodoc:
load_target if find_from_target?
super
end
delegate_methods = [
QueryMethods,
SpawnMethods,

@ -81,7 +81,9 @@ def construct_join_attributes(*records)
# to try to properly support stale-checking for nested associations.
def stale_state
if through_reflection.belongs_to?
owner[through_reflection.foreign_key] && owner[through_reflection.foreign_key].to_s
Array(through_reflection.foreign_key).map do |foreign_key_column|
owner[foreign_key_column] && owner[foreign_key_column].to_s
end
end
end
@ -112,11 +114,15 @@ def ensure_not_nested
end
def build_record(attributes)
inverse = source_reflection.inverse_of
target = through_association.target
if source_reflection.collection?
inverse = source_reflection.inverse_of
target = through_association.target
if inverse && target && !target.is_a?(Array)
attributes[inverse.foreign_key] = target.id
if inverse && target && !target.is_a?(Array)
Array(target.id).zip(Array(inverse.foreign_key)).map do |primary_key_value, foreign_key_column|
attributes[foreign_key_column] = primary_key_value
end
end
end
super

@ -5,6 +5,37 @@
module ActiveRecord
module AttributeMethods
# = Active Record Attribute Methods \Dirty
#
# Provides a way to track changes in your Active Record models. It adds all
# methods from ActiveModel::Dirty and adds database specific methods.
#
# A newly created +Person+ object is unchanged:
#
# class Person < ActiveRecord::Base
# end
#
# person = Person.create(name: "Alisson")
# person.changed? # => false
#
# Change the name:
#
# person.name = 'Alice'
# person.name_in_database # => "Allison"
# person.will_save_change_to_name? # => true
# person.name_change_to_be_saved # => ["Allison", "Alice"]
# person.changes_to_save # => {"name"=>["Allison", "Alice"]}
#
# Save the changes:
#
# person.save
# person.name_in_database # => "Alice"
# person.saved_change_to_name? # => true
# person.saved_change_to_name # => ["Allison", "Alice"]
# person.name_before_last_change # => "Allison"
#
# Similar to ActiveModel::Dirty, methods can be invoked as
# +saved_change_to_name?+ or by passing an argument to the generic method
# <tt>saved_change_to_attribute?("name")</tt>.
module Dirty
extend ActiveSupport::Concern

@ -12,6 +12,8 @@ module ConnectionAdapters
autoload :PoolConfig
autoload :PoolManager
autoload :SchemaCache
autoload :BoundSchemaReflection, "active_record/connection_adapters/schema_cache"
autoload :SchemaReflection, "active_record/connection_adapters/schema_cache"
autoload :Deduplicable
autoload_at "active_record/connection_adapters/abstract/schema_definitions" do

@ -128,7 +128,7 @@ def each_connection_pool(role = nil, &block) # :nodoc:
end
end
def establish_connection(config, owner_name: Base, role: ActiveRecord::Base.current_role, shard: Base.current_shard)
def establish_connection(config, owner_name: Base, role: Base.current_role, shard: Base.current_shard, clobber: false)
owner_name = determine_owner_name(owner_name, config)
pool_config = resolve_pool_config(config, owner_name, role, shard)
@ -142,7 +142,7 @@ def establish_connection(config, owner_name: Base, role: ActiveRecord::Base.curr
# configuration.
existing_pool_config = pool_manager.get_pool_config(role, shard)
if existing_pool_config && existing_pool_config.db_config == db_config
if !clobber && existing_pool_config && existing_pool_config.db_config == db_config
# Update the pool_config's connection class if it differs. This is used
# for ensuring that ActiveRecord::Base and the primary_abstract_class use
# the same pool. Without this granular swapping will not work correctly.

@ -10,22 +10,6 @@
module ActiveRecord
module ConnectionAdapters
module AbstractPool # :nodoc:
def get_schema_cache(connection)
self.schema_cache ||= SchemaCache.new(connection)
schema_cache.connection = connection
schema_cache
end
def set_schema_cache(cache)
self.schema_cache = cache
end
def lazily_set_schema_cache
return unless ActiveRecord.lazily_load_schema_cache
cache = SchemaCache.load_from(db_config.lazy_schema_cache_path)
set_schema_cache(cache)
end
end
class NullPool # :nodoc:
@ -38,7 +22,9 @@ def method_missing(*)
end
NULL_CONFIG = NullConfig.new # :nodoc:
attr_accessor :schema_cache
def schema_reflection
SchemaReflection.new(nil)
end
def connection_class; end
def checkin(_); end
@ -125,7 +111,7 @@ class ConnectionPool
attr_accessor :automatic_reconnect, :checkout_timeout
attr_reader :db_config, :size, :reaper, :pool_config, :async_executor, :role, :shard
delegate :schema_cache, :schema_cache=, to: :pool_config
delegate :schema_reflection, :schema_reflection=, to: :pool_config
# Creates a new ConnectionPool object. +pool_config+ is a PoolConfig
# object which describes database connection information (e.g. adapter,
@ -173,8 +159,6 @@ def initialize(pool_config)
@async_executor = build_async_executor
lazily_set_schema_cache
@reaper = Reaper.new(self, db_config.reaping_frequency)
@reaper.run
end

@ -90,7 +90,7 @@ def invalidate!; end
class Transaction # :nodoc:
attr_reader :connection, :state, :savepoint_name, :isolation_level
attr_accessor :written, :written_indirectly
attr_accessor :written
delegate :invalidate!, :invalidated?, to: :@state
@ -463,10 +463,6 @@ def commit_transaction
dirty_current_transaction if transaction.dirty?
if current_transaction.open?
current_transaction.written_indirectly ||= transaction.written || transaction.written_indirectly
end
transaction.commit
transaction.commit_records
end
@ -498,30 +494,25 @@ def within_new_transaction(isolation: nil, joinable: true)
raise
ensure
unless error
# In 7.1 we enforce timeout >= 0.4.0 which no longer use throw, so we can
# go back to the original behavior of committing on non-local return.
# If users are using throw, we assume it's not an error case.
completed = true if ActiveRecord.commit_transaction_on_non_local_return
if Thread.current.status == "aborting"
rollback_transaction
elsif !completed && transaction.written
# This was deprecated in 6.1, and has now changed to a rollback
rollback_transaction
elsif !completed && !transaction.written_indirectly
# This was a silent commit in 6.1, but now becomes a rollback; we skipped
# the warning because (having not been written) the change generally won't
# have any effect
ActiveRecord.deprecator.warn(<<~EOW)
A transaction is being rolled back because the transaction block was
exited using `return`, `break` or `throw`.
In Rails 7.2 this transaction will be committed instead.
To opt-in to the new behavior now and suppress this warning
you can set:
Rails.application.config.active_record.commit_transaction_on_non_local_return = true
EOW
rollback_transaction
else
if !completed && transaction.written_indirectly
# This is the case that was missed in the 6.1 deprecation, so we have to
# do it now
ActiveRecord.deprecator.warn(<<~EOW)
Using `return`, `break` or `throw` to exit a transaction block is
deprecated without replacement. If the `throw` came from
`Timeout.timeout(duration)`, pass an exception class as a second
argument so it doesn't use `throw` to abort its block. This results
in the transaction being committed, but in the next release of Rails
it will rollback.
EOW
end
begin
commit_transaction
rescue ActiveRecord::ConnectionFailed

@ -41,10 +41,18 @@ class AbstractAdapter
SIMPLE_INT = /\A\d+\z/
COMMENT_REGEX = %r{(?:--.*\n)|/\*(?:[^*]|\*[^/])*\*/}
attr_accessor :pool
attr_reader :pool
attr_reader :visitor, :owner, :logger, :lock
alias :in_use? :owner
def pool=(value)
return if value.eql?(@pool)
@schema_cache = nil
@pool = value
@pool.schema_reflection.load!(self) if ActiveRecord.lazily_load_schema_cache
end
set_callback :checkin, :after, :enable_lazy_transactions!
def self.type_cast_config_to_integer(config)
@ -160,7 +168,7 @@ def initialize(config_or_deprecated_connection, deprecated_logger = nil, depreca
@statements = build_statement_pool
self.lock_thread = nil
@prepared_statements = self.class.type_cast_config_to_boolean(
@prepared_statements = !ActiveRecord.disable_prepared_statements && self.class.type_cast_config_to_boolean(
@config.fetch(:prepared_statements) { default_prepared_statements }
)
@ -327,12 +335,7 @@ def shard
end
def schema_cache
@pool.get_schema_cache(self)
end
def schema_cache=(cache)
cache.connection = self
@pool.set_schema_cache(cache)
@schema_cache ||= BoundSchemaReflection.new(@pool.schema_reflection, self)
end
# this method must only be called while holding connection pool's mutex
@ -733,12 +736,6 @@ def disconnect!
# rid of a connection that belonged to its parent.
def discard!
# This should be overridden by concrete adapters.
#
# Prevent @raw_connection's finalizer from touching the socket, or
# otherwise communicating with its server, when it is collected.
if schema_cache.connection == self
schema_cache.connection = nil
end
end
# Reset the state of this connection, directing the DBMS to clear

@ -6,7 +6,12 @@ class PoolConfig # :nodoc:
include Mutex_m
attr_reader :db_config, :role, :shard
attr_accessor :schema_cache, :connection_class
attr_writer :schema_reflection
attr_accessor :connection_class
def schema_reflection
@schema_reflection ||= SchemaReflection.new(db_config.lazy_schema_cache_path)
end
INSTANCES = ObjectSpace::WeakMap.new
private_constant :INSTANCES

@ -876,6 +876,13 @@ def foreign_key_column_for(table_name) # :nodoc:
super
end
def add_index_options(table_name, column_name, **options) # :nodoc:
if (where = options[:where]) && table_exists?(table_name) && column_exists?(table_name, where)
options[:where] = quote_column_name(where)
end
super
end
def quoted_include_columns_for_index(column_names) # :nodoc:
return quote_column_name(column_names) if column_names.is_a?(Symbol)

@ -4,9 +4,234 @@
module ActiveRecord
module ConnectionAdapters
class SchemaReflection
class << self
attr_accessor :use_schema_cache_dump
attr_accessor :check_schema_cache_dump_version
end
self.use_schema_cache_dump = true
self.check_schema_cache_dump_version = true
def initialize(cache_path, cache = nil)
@cache = cache
@cache_path = cache_path
end
def set_schema_cache(cache)
@cache = cache
end
def clear!
@cache = empty_cache
nil
end
def load!(connection)
cache(connection)
self
end
def primary_keys(connection, table_name)
cache(connection).primary_keys(connection, table_name)
end
def data_source_exists?(connection, name)
cache(connection).data_source_exists?(connection, name)
end
def add(connection, name)
cache(connection).add(connection, name)
end
def data_sources(connection, name)
cache(connection).data_sources(connection, name)
end
def columns(connection, table_name)
cache(connection).columns(connection, table_name)
end
def columns_hash(connection, table_name)
cache(connection).columns_hash(connection, table_name)
end
def columns_hash?(connection, table_name)
cache(connection).columns_hash?(connection, table_name)
end
def indexes(connection, table_name)
cache(connection).indexes(connection, table_name)
end
def database_version(connection)
cache(connection).database_version(connection)
end
def version(connection)
cache(connection).version(connection)
end
def size(connection)
cache(connection).size
end
def clear_data_source_cache!(connection, name)
return if @cache.nil? && !possible_cache_available?
cache(connection).clear_data_source_cache!(connection, name)
end
def cached?(table_name)
if @cache.nil?
# If `check_schema_cache_dump_version` is enabled we can't load
# the schema cache dump without connecting to the database.
unless self.class.check_schema_cache_dump_version
@cache = load_cache(nil)
end
end
@cache&.cached?(table_name)
end
def dump_to(connection, filename)
fresh_cache = empty_cache
fresh_cache.add_all(connection)
fresh_cache.dump_to(filename)
@cache = fresh_cache
end
private
def empty_cache
new_cache = SchemaCache.allocate
new_cache.send(:initialize)
new_cache
end
def cache(connection)
@cache ||= load_cache(connection) || empty_cache
end
def possible_cache_available?
self.class.use_schema_cache_dump &&
@cache_path &&
File.file?(@cache_path)
end
def load_cache(connection)
# Can't load if schema dumps are disabled
return unless possible_cache_available?
# Check we can find one
return unless new_cache = SchemaCache._load_from(@cache_path)
if self.class.check_schema_cache_dump_version
begin
current_version = connection.migration_context.current_version
if new_cache.version(connection) != current_version
warn "Ignoring #{@cache_path} because it has expired. The current schema version is #{current_version}, but the one in the schema cache file is #{new_cache.schema_version}."
return
end
rescue ActiveRecordError => error
warn "Failed to validate the schema cache because of #{error.class}: #{error.message}"
return
end
end
new_cache
end
end
class BoundSchemaReflection
def initialize(abstract_schema_reflection, connection)
@schema_reflection = abstract_schema_reflection
@connection = connection
end
def clear!
@schema_reflection.clear!
end
def load!
@schema_reflection.load!(@connection)
end
def cached?(table_name)
@schema_reflection.cached?(table_name)
end
def primary_keys(table_name)
@schema_reflection.primary_keys(@connection, table_name)
end
def data_source_exists?(name)
@schema_reflection.data_source_exists?(@connection, name)
end
def add(name)
@schema_reflection.add(@connection, name)
end
def data_sources(name)
@schema_reflection.data_sources(@connection, name)
end
def columns(table_name)
@schema_reflection.columns(@connection, table_name)
end
def columns_hash(table_name)
@schema_reflection.columns_hash(@connection, table_name)
end
def columns_hash?(table_name)
@schema_reflection.columns_hash?(@connection, table_name)
end
def indexes(table_name)
@schema_reflection.indexes(@connection, table_name)
end
def database_version
@schema_reflection.database_version(@connection)
end
def version
@schema_reflection.version(@connection)
end
def size
@schema_reflection.size(@connection)
end
def clear_data_source_cache!(name)
@schema_reflection.clear_data_source_cache!(@connection, name)
end
def dump_to(filename)
@schema_reflection.dump_to(@connection, filename)
end
end
# = Active Record Connection Adapters Schema Cache
class SchemaCache
def self.load_from(filename)
class << self
def new(connection)
BoundSchemaReflection.new(SchemaReflection.new(nil), connection)
end
deprecate new: "use ActiveRecord::ConnectionAdapters::SchemaReflection instead", deprecator: ActiveRecord.deprecator
def load_from(filename) # :nodoc:
BoundSchemaReflection.new(SchemaReflection.new(filename), nil)
end
deprecate load_from: "use ActiveRecord::ConnectionAdapters::SchemaReflection instead", deprecator: ActiveRecord.deprecator
end
def self._load_from(filename) # :nodoc:
return unless File.file?(filename)
read(filename) do |file|
@ -33,20 +258,17 @@ def self.read(filename, &block)
end
private_class_method :read
attr_reader :version
attr_accessor :connection
def initialize(conn)
@connection = conn
def initialize
@columns = {}
@columns_hash = {}
@primary_keys = {}
@data_sources = {}
@indexes = {}
@database_version = nil
@version = nil
end
def initialize_dup(other)
def initialize_dup(other) # :nodoc:
super
@columns = @columns.dup
@columns_hash = @columns_hash.dup
@ -55,15 +277,13 @@ def initialize_dup(other)
@indexes = @indexes.dup
end
def encode_with(coder)
reset_version!
def encode_with(coder) # :nodoc:
coder["columns"] = @columns
coder["primary_keys"] = @primary_keys
coder["data_sources"] = @data_sources
coder["indexes"] = @indexes
coder["version"] = @version
coder["database_version"] = database_version
coder["database_version"] = @database_version
end
def init_with(coder)
@ -80,39 +300,44 @@ def init_with(coder)
end
end
def primary_keys(table_name)
def cached?(table_name)
@columns.key?(table_name)
end
def primary_keys(connection, table_name)
@primary_keys.fetch(table_name) do
if data_source_exists?(table_name)
if data_source_exists?(connection, table_name)
@primary_keys[deep_deduplicate(table_name)] = deep_deduplicate(connection.primary_key(table_name))
end
end
end
# A cached lookup for table existence.
def data_source_exists?(name)
def data_source_exists?(connection, name)
return if ignored_table?(name)
prepare_data_sources if @data_sources.empty?
prepare_data_sources(connection) if @data_sources.empty?
return @data_sources[name] if @data_sources.key? name
@data_sources[deep_deduplicate(name)] = connection.data_source_exists?(name)
end
# Add internal cache for table with +table_name+.
def add(table_name)
if data_source_exists?(table_name)
primary_keys(table_name)
columns(table_name)
columns_hash(table_name)
indexes(table_name)
def add(connection, table_name)
if data_source_exists?(connection, table_name)
primary_keys(connection, table_name)
columns(connection, table_name)
columns_hash(connection, table_name)
indexes(connection, table_name)
end
end
def data_sources(name)
def data_sources(_connection, name) # :nodoc:
@data_sources[name]
end
deprecate data_sources: :data_source_exists?, deprecator: ActiveRecord.deprecator
# Get the columns for a table
def columns(table_name)
def columns(connection, table_name)
if ignored_table?(table_name)
raise ActiveRecord::StatementInvalid, "Table '#{table_name}' doesn't exist"
end
@ -124,20 +349,20 @@ def columns(table_name)
# Get the columns for a table as a hash, key is the column name
# value is the column object.
def columns_hash(table_name)
def columns_hash(connection, table_name)
@columns_hash.fetch(table_name) do
@columns_hash[deep_deduplicate(table_name)] = columns(table_name).index_by(&:name).freeze
@columns_hash[deep_deduplicate(table_name)] = columns(connection, table_name).index_by(&:name).freeze
end
end
# Checks whether the columns hash is already cached for a table.
def columns_hash?(table_name)
def columns_hash?(connection, table_name)
@columns_hash.key?(table_name)
end
def indexes(table_name)
def indexes(connection, table_name)
@indexes.fetch(table_name) do
if data_source_exists?(table_name)
if data_source_exists?(connection, table_name)
@indexes[deep_deduplicate(table_name)] = deep_deduplicate(connection.indexes(table_name))
else
[]
@ -145,19 +370,16 @@ def indexes(table_name)
end
end
def database_version # :nodoc:
def database_version(connection) # :nodoc:
@database_version ||= connection.get_database_version
end
# Clears out internal caches
def clear!
@columns.clear
@columns_hash.clear
@primary_keys.clear
@data_sources.clear
@indexes.clear
@version = nil
@database_version = nil
def version(connection)
@version ||= connection.migration_context.current_version
end
def schema_version
@version
end
def size
@ -165,7 +387,7 @@ def size
end
# Clear out internal caches for the data source +name+.
def clear_data_source_cache!(name)
def clear_data_source_cache!(_connection, name)
@columns.delete name
@columns_hash.delete name
@primary_keys.delete name
@ -173,9 +395,16 @@ def clear_data_source_cache!(name)
@indexes.delete name
end
def add_all(connection) # :nodoc:
tables_to_cache(connection).each do |table|
add(connection, table)
end
version(connection)
database_version(connection)
end
def dump_to(filename)
clear!
tables_to_cache.each { |table| add(table) }
open(filename) { |f|
if filename.include?(".dump")
f.write(Marshal.dump(self))
@ -185,13 +414,11 @@ def dump_to(filename)
}
end
def marshal_dump
reset_version!
[@version, @columns, {}, @primary_keys, @data_sources, @indexes, database_version]
def marshal_dump # :nodoc:
[@version, @columns, {}, @primary_keys, @data_sources, @indexes, @database_version]
end
def marshal_load(array)
def marshal_load(array) # :nodoc:
@version, @columns, _columns_hash, @primary_keys, @data_sources, @indexes, @database_version = array
@indexes ||= {}
@ -199,7 +426,7 @@ def marshal_load(array)
end
private
def tables_to_cache
def tables_to_cache(connection)
connection.data_sources.reject do |table|
ignored_table?(table)
end
@ -211,10 +438,6 @@ def ignored_table?(table_name)
end
end
def reset_version!
@version = connection.schema_version
end
def derive_columns_hash_and_deduplicate_values
@columns = deep_deduplicate(@columns)
@columns_hash = @columns.transform_values { |columns| columns.index_by(&:name) }
@ -236,8 +459,8 @@ def deep_deduplicate(value)
end
end
def prepare_data_sources
tables_to_cache.each do |source|
def prepare_data_sources(connection)
tables_to_cache(connection).each do |source|
@data_sources[source] = true
end
end

@ -293,6 +293,14 @@ def connected?
end
def remove_connection(name = nil)
if name
ActiveRecord.deprecator.warn(<<-MSG.squish)
The name argument for `#remove_connection` is deprecated without replacement
and will be removed in Rails 7.2. `#remove_connection` should always be called
on the connection class directly, which makes the name argument obsolete.
MSG
end
name ||= @connection_specification_name if defined?(@connection_specification_name)
# if removing a connection that has a pool, we reset the
# connection_specification_name so it will use the parent

@ -79,6 +79,10 @@ def decrypt(encrypted_message)
raise ActiveRecord::Encryption::Errors::Decryption
end
def inspect # :nodoc:
"#<#{self.class.name}:#{'%#016x' % (object_id << 1)}>"
end
private
def generate_iv(cipher, clear_text)
if @deterministic

@ -72,8 +72,8 @@ def scheme_for(key_provider: nil, key: nil, deterministic: false, downcase: fals
end
def global_previous_schemes_for(scheme)
ActiveRecord::Encryption.config.previous_schemes.collect do |previous_scheme|
scheme.merge(previous_scheme)
ActiveRecord::Encryption.config.previous_schemes.filter_map do |previous_scheme|
scheme.merge(previous_scheme) if scheme.compatible_with?(previous_scheme)
end
end

@ -36,7 +36,7 @@ def downcase?
end
def deterministic?
@deterministic
!!@deterministic
end
def fixed?
@ -65,6 +65,10 @@ def with_context(&block)
end
end
def compatible_with?(other_scheme)
deterministic? == other_scheme.deterministic?
end
private
def validate_config!
raise Errors::Configuration, "ignore_case: can only be used with deterministic encryption" if @ignore_case && !@deterministic

@ -6,9 +6,6 @@
require "set"
require "active_support/dependencies"
require "active_support/core_ext/digest/uuid"
require "active_record/fixture_set/file"
require "active_record/fixture_set/render_context"
require "active_record/fixture_set/table_rows"
require "active_record/test_fixtures"
module ActiveRecord
@ -473,6 +470,10 @@ class FixtureClassNotFound < ActiveRecord::ActiveRecordError # :nodoc:
#
# Any fixtures labeled "_fixture" are safely ignored.
class FixtureSet
require "active_record/fixture_set/file"
require "active_record/fixture_set/render_context"
require "active_record/fixture_set/table_rows"
#--
# An instance of FixtureSet is normally stored in a single YAML file and
# possibly in a folder with the same name.
@ -575,7 +576,7 @@ def identify(label, column_type = :integer)
#
# Example:
#
# composite_identify("label", [:a, :b, :c]) => { a: hash_1, b: hash_2, c: hash_3 }
# composite_identify("label", [:a, :b, :c]) # => { a: hash_1, b: hash_2, c: hash_3 }
def composite_identify(label, key)
key
.index_with

@ -145,7 +145,7 @@ class Railtie < Rails::Railtie # :nodoc:
schema_cache_path: db_config.schema_cache_path
)
cache = ActiveRecord::ConnectionAdapters::SchemaCache.load_from(filename)
cache = ActiveRecord::ConnectionAdapters::SchemaCache._load_from(filename)
next if cache.nil?
if check_schema_cache_dump_version
@ -157,34 +157,49 @@ class Railtie < Rails::Railtie # :nodoc:
end
next if current_version.nil?
if cache.version != current_version
warn "Ignoring #{filename} because it has expired. The current schema version is #{current_version}, but the one in the schema cache file is #{cache.version}."
if cache.schema_version != current_version
warn "Ignoring #{filename} because it has expired. The current schema version is #{current_version}, but the one in the schema cache file is #{cache.schema_version}."
next
end
end
Rails.logger.info("Using schema cache file #{filename}")
connection_pool.set_schema_cache(cache)
connection_pool.schema_reflection.set_schema_cache(cache)
end
end
end
end
initializer "active_record.define_attribute_methods" do |app|
# For resiliency, it is critical that a Rails application should be
# able to boot without depending on the database (or any other service)
# being responsive.
#
# Otherwise a bad deploy adding a lot of load on the database may require to
# entirely shutdown the application so the database can recover before a fixed
# version can be deployed again.
#
# This is why this initializer tries hard not to query the database, and if it
# does, it makes sure to any possible database error.
check_schema_cache_dump_version = config.active_record.check_schema_cache_dump_version
config.after_initialize do
ActiveSupport.on_load(:active_record) do
if app.config.eager_load
# In development and test we shouldn't eagerly define attribute methods because
# db:test:prepare will trigger later and might change the schema.
if app.config.eager_load && !Rails.env.local?
begin
descendants.each do |model|
# If the schema cache was loaded from a dump, we can use it without connecting
schema_cache = model.connection_pool.schema_cache
# If there's no connection yet, we avoid connecting.
schema_cache ||= model.connected? && model.connection.schema_cache
# If the schema cache doesn't have the columns
# hash for the model cached, `define_attribute_methods` would trigger a query.
if schema_cache && schema_cache.columns_hash?(model.table_name)
# If the schema cache doesn't have the columns for this model,
# we avoid calling `define_attribute_methods` as it would trigger a query.
#
# However if we're already connected to the database, it's too late so we might
# as well eagerly define the attributes and hope the database timeout is strict enough.
#
# Additionally if `check_schema_cache_dump_version` is enabled, we have to connect to the
# database anyway to load the schema cache dump, so we might as well do it during boot to
# save memory in pre-forking setups and avoid slowness during the first requests post deploy.
schema_reflection = model.connection_pool.schema_reflection
if check_schema_cache_dump_version || schema_reflection.cached?(model.table_name) || model.connected?
model.define_attribute_methods
end
end
@ -367,13 +382,13 @@ class Railtie < Rails::Railtie # :nodoc:
**config.active_record.encryption
auto_filtered_parameters.enable if ActiveRecord::Encryption.config.add_to_filter_parameters
end
ActiveSupport.on_load(:active_record) do
# Support extended queries for deterministic attributes and validations
if ActiveRecord::Encryption.config.extend_queries
ActiveRecord::Encryption::ExtendedDeterministicQueries.install_support
ActiveRecord::Encryption::ExtendedDeterministicUniquenessValidator.install_support
ActiveSupport.on_load(:active_record) do
# Support extended queries for deterministic attributes and validations
if ActiveRecord::Encryption.config.extend_queries
ActiveRecord::Encryption::ExtendedDeterministicQueries.install_support
ActiveRecord::Encryption::ExtendedDeterministicUniquenessValidator.install_support
end
end
end
@ -396,6 +411,7 @@ class Railtie < Rails::Railtie # :nodoc:
db_host: ->(context) { context[:connection].pool.db_config.host },
database: ->(context) { context[:connection].pool.db_config.database }
)
ActiveRecord.disable_prepared_statements = true
if app.config.active_record.query_log_tags.present?
ActiveRecord::QueryLogs.tags = app.config.active_record.query_log_tags

@ -33,7 +33,7 @@ def process_action(action, *args)
end
def cleanup_view_runtime
if logger && logger.info? && ActiveRecord::Base.connected?
if logger && logger.info?
db_rt_before_render = ActiveRecord::RuntimeRegistry.reset
self.db_runtime = (db_runtime || 0) + db_rt_before_render
runtime = super
@ -47,9 +47,8 @@ def cleanup_view_runtime
def append_info_to_payload(payload)
super
if ActiveRecord::Base.connected?
payload[:db_runtime] = (db_runtime || 0) + ActiveRecord::RuntimeRegistry.reset
end
payload[:db_runtime] = (db_runtime || 0) + ActiveRecord::RuntimeRegistry.reset
end
end
end

@ -275,8 +275,11 @@ def check_validity_of_inverse!
# Hence this method.
def inverse_which_updates_counter_cache
unless @inverse_which_updates_counter_cache_defined
@inverse_which_updates_counter_cache = klass.reflect_on_all_associations(:belongs_to).find do |inverse|
inverse.counter_cache_column == counter_cache_column
if counter_cache_column
inverse_candidates = inverse_of ? [inverse_of] : klass.reflect_on_all_associations(:belongs_to)
@inverse_which_updates_counter_cache = inverse_candidates.find do |inverse|
inverse.counter_cache_column == counter_cache_column && (inverse.polymorphic? || inverse.klass == active_record)
end
end
@inverse_which_updates_counter_cache_defined = true
end
@ -542,7 +545,7 @@ def join_foreign_key
def check_validity!
check_validity_of_inverse!
if !polymorphic? && klass.composite_primary_key?
if !polymorphic? && (klass.composite_primary_key? || active_record.composite_primary_key?)
if (has_one? || collection?) && Array(active_record_primary_key).length != Array(foreign_key).length
raise CompositePrimaryKeyMismatchError.new(self)
elsif belongs_to? && Array(association_primary_key).length != Array(foreign_key).length
@ -805,7 +808,7 @@ def association_class
# klass option is necessary to support loading polymorphic associations
def association_primary_key(klass = nil)
if options[:query_constraints]
(klass || self.klass).query_constraints_list
(klass || self.klass).composite_query_constraints_list
elsif primary_key = options[:primary_key]
@association_primary_key ||= -primary_key.to_s
else

@ -65,8 +65,7 @@ def build(attribute, value, operator = nil)
end
def build_bind_attribute(column_name, value)
type = table.type(column_name)
Relation::QueryAttribute.new(column_name, type.immutable_value(value), type)
Relation::QueryAttribute.new(column_name, value, table.type(column_name))
end
def resolve_arel_attribute(table_name, column_name, &block)

@ -26,7 +26,6 @@ def ids
case value
when Relation
relation = value
relation = relation.merge(scope) if scope
relation = relation.select(primary_key) if select_clause?
relation = relation.where(primary_type => polymorphic_name) if polymorphic_clause?
relation
@ -49,10 +48,6 @@ def polymorphic_name
associated_table.polymorphic_name_association
end
def scope
associated_table.scope
end
def select_clause?
value.select_values.empty?
end

@ -5,6 +5,18 @@
module ActiveRecord
class Relation
class QueryAttribute < ActiveModel::Attribute # :nodoc:
def initialize(...)
super
# The query attribute value may be mutated before we actually "compile" the query.
# To avoid that if the type uses a serializer we eagerly compute the value for database
if @type.serialized?
value_for_database
elsif @type.mutable? # If the type is simply mutable, we deep_dup it.
@value_before_type_cast = @value_before_type_cast.deep_dup
end
end
def type_cast(value)
value
end
@ -36,6 +48,15 @@ def unboundable?
@_unboundable
end
def ==(other)
super && value_for_database == value_for_database
end
alias eql? ==
def hash
[self.class, name, value_for_database, type].hash
end
private
def infinity?(value)
value.respond_to?(:infinite?) && value.infinite?

@ -2,7 +2,7 @@
module ActiveRecord
class TableMetadata # :nodoc:
delegate :join_primary_key, :join_primary_type, :join_foreign_key, :join_foreign_type, :scope, to: :reflection
delegate :join_primary_key, :join_primary_type, :join_foreign_key, :join_foreign_type, to: :reflection
def initialize(klass, arel_table, reflection = nil)
@klass = klass

@ -387,7 +387,7 @@ def reconstruct_from_schema(db_config, format = ActiveRecord.schema_format, file
check_schema_file(file) if file
with_temporary_pool(db_config) do
with_temporary_pool(db_config, clobber: true) do
if schema_up_to_date?(db_config, format, file)
truncate_tables(db_config)
else
@ -485,19 +485,19 @@ def clear_schema_cache(filename)
FileUtils.rm_f filename, verbose: false
end
def with_temporary_connection_for_each(env: ActiveRecord::Tasks::DatabaseTasks.env, name: nil, &block) # :nodoc:
def with_temporary_connection_for_each(env: ActiveRecord::Tasks::DatabaseTasks.env, name: nil, clobber: false, &block) # :nodoc:
if name
db_config = ActiveRecord::Base.configurations.configs_for(env_name: env, name: name)
with_temporary_connection(db_config, &block)
with_temporary_connection(db_config, clobber: clobber, &block)
else
ActiveRecord::Base.configurations.configs_for(env_name: env, name: name).each do |db_config|
with_temporary_connection(db_config, &block)
with_temporary_connection(db_config, clobber: clobber, &block)
end
end
end
def with_temporary_connection(db_config) # :nodoc:
with_temporary_pool(db_config) do |pool|
def with_temporary_connection(db_config, clobber: false) # :nodoc:
with_temporary_pool(db_config, clobber: clobber) do |pool|
yield pool.connection
end
end
@ -511,13 +511,13 @@ def migration_connection # :nodoc:
end
private
def with_temporary_pool(db_config)
def with_temporary_pool(db_config, clobber: false)
original_db_config = migration_class.connection_db_config
pool = migration_class.establish_connection(db_config)
pool = migration_class.connection_handler.establish_connection(db_config, clobber: clobber)
yield pool
ensure
migration_class.establish_connection(original_db_config)
migration_class.connection_handler.establish_connection(original_db_config, clobber: clobber)
end
def configs_for(**options)

@ -28,6 +28,7 @@ def drop
def purge
establish_connection(configuration_hash_without_database)
connection.recreate_database(db_config.database, creation_options)
establish_connection
end
def charset

@ -55,6 +55,10 @@ def force_equality?(value)
coder.respond_to?(:object_class) && value.is_a?(coder.object_class)
end
def serialized? # :nodoc:
true
end
private
def default_value?(value)
value == coder.load(nil)

@ -158,6 +158,23 @@ def test_not_specifying_database_name_for_cross_database_selects
end
end
unless in_memory_db?
def test_disable_prepared_statements
original_prepared_statements = ActiveRecord.disable_prepared_statements
db_config = ActiveRecord::Base.configurations.configs_for(env_name: "arunit", name: "primary")
ActiveRecord::Base.establish_connection(db_config.configuration_hash.merge(prepared_statements: true))
assert_predicate ActiveRecord::Base.connection, :prepared_statements?
ActiveRecord.disable_prepared_statements = true
ActiveRecord::Base.establish_connection(db_config.configuration_hash.merge(prepared_statements: true))
assert_not_predicate ActiveRecord::Base.connection, :prepared_statements?
ensure
ActiveRecord.disable_prepared_statements = original_prepared_statements
ActiveRecord::Base.establish_connection :arunit
end
end
def test_table_alias
def @connection.test_table_alias_length() 10; end
class << @connection

@ -187,11 +187,7 @@ def test_establishes_connection_without_database
db_config = ActiveRecord::DatabaseConfigurations::HashConfig.new("default_env", "primary", @configuration)
ActiveRecord::Base.stub(:connection, @connection) do
assert_called_with(
ActiveRecord::Base,
:establish_connection,
[adapter: "mysql2", database: nil]
) do
assert_called(ActiveRecord::Base, :establish_connection, times: 2) do
ActiveRecord::Tasks::DatabaseTasks.purge(db_config)
end
end

@ -322,6 +322,14 @@ def test_partial_index
end
end
def test_partial_index_on_column_named_like_keyword
with_example_table('id serial primary key, number integer, "primary" boolean') do
@connection.add_index "ex", "id", name: "partial", where: "primary" # "primary" is a keyword
index = @connection.indexes("ex").find { |idx| idx.name == "partial" }
assert_equal '"primary"', index.where
end
end
def test_include_index
with_example_table do
@connection.add_index "ex", %w{ id }, name: "include", include: :number

@ -187,11 +187,7 @@ def test_establishes_connection_without_database
db_config = ActiveRecord::DatabaseConfigurations::HashConfig.new("default_env", "primary", @configuration)
ActiveRecord::Base.stub(:connection, @connection) do
assert_called_with(
ActiveRecord::Base,
:establish_connection,
[adapter: "trilogy", database: nil]
) do
assert_called(ActiveRecord::Base, :establish_connection, times: 2) do
ActiveRecord::Tasks::DatabaseTasks.purge(db_config)
end
end

@ -1777,7 +1777,7 @@ def self.name; "Temp"; end
ActiveRecord.belongs_to_required_validates_foreign_key = original_value
end
test "composite primary key malformed association" do
test "composite primary key malformed association class" do
error = assert_raises(ActiveRecord::CompositePrimaryKeyMismatchError) do
book = Cpk::BrokenBook.new(title: "Some book", order: Cpk::Order.new(id: [1, 2]))
book.save!
@ -1785,9 +1785,29 @@ def self.name; "Temp"; end
assert_equal(<<~MESSAGE.squish, error.message)
Association Cpk::BrokenBook#order primary key ["shop_id", "id"]
doesn't match with foreign key order_id. Please specify query_constraints.
doesn't match with foreign key order_id. Please specify query_constraints, or primary_key and foreign_key values.
MESSAGE
end
test "composite primary key malformed association owner class" do
error = assert_raises(ActiveRecord::CompositePrimaryKeyMismatchError) do
book = Cpk::BrokenBookWithNonCpkOrder.new(title: "Some book", order: Cpk::NonCpkOrder.new(id: 1))
book.save!
end
assert_equal(<<~MESSAGE.squish, error.message)
Association Cpk::BrokenBookWithNonCpkOrder#order primary key ["id"]
doesn't match with foreign key ["shop_id", "order_id"]. Please specify query_constraints, or primary_key and foreign_key values.
MESSAGE
end
test "association with query constraints assigns id on replacement" do
book = Cpk::NonCpkBook.create!(id: 1, author_id: 2, non_cpk_order: Cpk::NonCpkOrder.new)
other_order = Cpk::NonCpkOrder.create!
book.non_cpk_order = other_order
assert_equal(other_order.id, book.order_id)
end
end
class BelongsToWithForeignKeyTest < ActiveRecord::TestCase

@ -43,6 +43,7 @@
require "models/human"
require "models/sharded"
require "models/cpk"
require "models/comment_overlapping_counter_cache"
class HasManyAssociationsTestForReorderWithJoinDependency < ActiveRecord::TestCase
fixtures :authors, :author_addresses, :posts, :comments
@ -1403,6 +1404,23 @@ def test_counter_cache_updates_in_memory_after_update_with_inverse_of_disabled
assert_equal 2, topic.reload.replies_count
end
def test_counter_cache_updates_in_memory_after_create_with_overlapping_counter_cache_columns
user = UserCommentsCount.create!
post = PostCommentsCount.create!
assert_difference "user.comments_count", +1 do
assert_no_difference "post.comments_count" do
post.comments << CommentOverlappingCounterCache.create!(user_comments_count: user)
end
end
assert_difference "user.comments_count", +1 do
assert_no_difference "post.comments_count" do
user.comments << CommentOverlappingCounterCache.create!(post_comments_count: post)
end
end
end
def test_counter_cache_updates_in_memory_after_update_with_inverse_of_enabled
category = Category.create!(name: "Counter Cache")
@ -3174,7 +3192,7 @@ def test_key_ensuring_owner_was_is_valid_when_dependent_option_is_destroy_async
end
end
test "composite primary key malformed association" do
test "composite primary key malformed association class" do
error = assert_raises(ActiveRecord::CompositePrimaryKeyMismatchError) do
order = Cpk::BrokenOrder.new(id: [1, 2], books: [Cpk::Book.new(title: "Some book")])
order.save!
@ -3182,7 +3200,19 @@ def test_key_ensuring_owner_was_is_valid_when_dependent_option_is_destroy_async
assert_equal(<<~MESSAGE.squish, error.message)
Association Cpk::BrokenOrder#books primary key ["shop_id", "id"]
doesn't match with foreign key broken_order_id. Please specify query_constraints.
doesn't match with foreign key broken_order_id. Please specify query_constraints, or primary_key and foreign_key values.
MESSAGE
end
test "composite primary key malformed association owner class" do
error = assert_raises(ActiveRecord::CompositePrimaryKeyMismatchError) do
order = Cpk::BrokenOrderWithNonCpkBooks.new(id: [1, 2], books: [Cpk::NonCpkBook.new(title: "Some book")])
order.save!
end
assert_equal(<<~MESSAGE.squish, error.message)
Association Cpk::BrokenOrderWithNonCpkBooks#books primary key [\"shop_id\", \"id\"]
doesn't match with foreign key broken_order_with_non_cpk_books_id. Please specify query_constraints, or primary_key and foreign_key values.
MESSAGE
end

@ -1639,6 +1639,25 @@ def test_loading_cpk_association_with_unpersisted_owner
assert_equal([order_agreement], book.order_agreements.to_a)
end
def test_cpk_stale_target
order = Cpk::Order.create!(shop_id: 1)
book = Cpk::BookWithOrderAgreements.create!(id: [1, 2], order: order)
Cpk::OrderAgreement.create!(order: order)
book.order_agreements.load
book.order = Cpk::Order.new
assert_predicate(book.association(:order_agreements), :stale_target?)
end
def test_cpk_association_build_through_singular
order = Cpk::OrderWithSingularBookChapters.create!(id: [1, 2])
book = order.create_book!(id: [3, 4])
chapter = order.chapters.build
assert_equal(chapter.book, book)
end
private
def make_model(name)
Class.new(ActiveRecord::Base) { define_singleton_method(:name) { name } }

@ -958,7 +958,7 @@ class SpecialContent < ActiveRecord::Base
end
end
test "composite primary key malformed association" do
test "composite primary key malformed association class" do
error = assert_raises(ActiveRecord::CompositePrimaryKeyMismatchError) do
order = Cpk::BrokenOrder.new(id: [1, 2], book: Cpk::Book.new(title: "Some book"))
order.save!
@ -966,7 +966,19 @@ class SpecialContent < ActiveRecord::Base
assert_equal(<<~MESSAGE.squish, error.message)
Association Cpk::BrokenOrder#book primary key ["shop_id", "id"]
doesn't match with foreign key broken_order_id. Please specify query_constraints.
doesn't match with foreign key broken_order_id. Please specify query_constraints, or primary_key and foreign_key values.
MESSAGE
end
test "composite primary key malformed association owner class" do
error = assert_raises(ActiveRecord::CompositePrimaryKeyMismatchError) do
order = Cpk::BrokenOrderWithNonCpkBooks.new(id: [1, 2], book: Cpk::NonCpkBook.new(title: "Some book"))
order.save!
end
assert_equal(<<~MESSAGE.squish, error.message)
Association Cpk::BrokenOrderWithNonCpkBooks#book primary key [\"shop_id\", \"id\"]
doesn't match with foreign key broken_order_with_non_cpk_books_id. Please specify query_constraints, or primary_key and foreign_key values.
MESSAGE
end
end

@ -100,6 +100,14 @@ def test_building_multiple_associations_builds_through_record
assert_predicate member_detail_with_two_associations.member, :new_record?
end
def test_building_works_with_has_one_through_belongs_to
new_member = Member.create!(name: "Joe")
new_member.create_current_membership!
new_club = new_member.build_club
assert_equal(new_member.club, new_club)
end
def test_creating_multiple_associations_creates_through_record
member_type = MemberType.create!
member = Member.create!
@ -454,4 +462,15 @@ def test_loading_cpk_association_with_unpersisted_owner
assert_equal(order_agreement, book.order_agreement)
end
def test_cpk_stale_target
order = Cpk::Order.create!(shop_id: 1)
book = Cpk::BookWithOrderAgreements.create!(id: [1, 2], order: order)
Cpk::OrderAgreement.create!(order: order)
book.order_agreement
book.order = Cpk::Order.new
assert_predicate(book.association(:order_agreement), :stale_target?)
end
end

@ -793,7 +793,7 @@ def test_with_has_many_inversing_does_not_trigger_association_callbacks_on_set_w
end
end
def test_with_hash_many_inversing_does_not_add_duplicate_associated_objects
def test_with_has_many_inversing_does_not_add_duplicate_associated_objects
with_has_many_inversing(Interest) do
human = Human.new
interest = Interest.new(human: human)
@ -802,6 +802,26 @@ def test_with_hash_many_inversing_does_not_add_duplicate_associated_objects
end
end
def test_with_has_many_inversing_does_not_add_unsaved_duplicate_records_when_collection_is_loaded
with_has_many_inversing(Interest) do
human = Human.create!
human.interests.load
interest = Interest.new(human: human)
human.interests << interest
assert_equal 1, human.interests.size
end
end
def test_with_has_many_inversing_does_not_add_saved_duplicate_records_when_collection_is_loaded
with_has_many_inversing(Interest) do
human = Human.create!
human.interests.load
interest = Interest.create!(human: human)
human.interests << interest
assert_equal 1, human.interests.size
end
end
def test_recursive_model_has_many_inversing
with_has_many_inversing do
main = Branch.create!

@ -1,5 +1,6 @@
# frozen_string_literal: true
require "pp"
require "cases/helper"
require "models/computer"
require "models/developer"
@ -78,6 +79,16 @@ def test_loading_the_association_target_should_load_most_recent_attributes_for_c
assert_equal "Deck", ship.parts[0].name
end
def test_loading_cpk_association_when_persisted_and_in_memory_differ
order = Cpk::Order.create!(id: [1, 2], status: "paid")
book = order.books.create!(id: [3, 4], title: "Book")
Cpk::Book.find(book.id).update_columns(title: "A different title")
order.books.load
assert_equal [3, 4], book.id
end
def test_include_with_order_works
assert_nothing_raised { Account.all.merge!(order: "id", includes: :firm).first }
assert_nothing_raised { Account.all.merge!(order: :id, includes: :firm).first }
@ -451,6 +462,15 @@ def test_inspect_does_not_reload_a_not_yet_loaded_target
assert_predicate andreas.audit_logs, :loaded?
end
def test_pretty_print_does_not_reload_a_not_yet_loaded_target
andreas = Developer.new(log: "new developer added")
assert_not_predicate andreas.audit_logs, :loaded?
out = StringIO.new
PP.pp(andreas.audit_logs, out)
assert_match(/message: "new developer added"/, out.string)
assert_predicate andreas.audit_logs, :loaded?
end
def test_save_on_parent_saves_children
developer = Developer.create name: "Bryan", salary: 50_000
assert_equal 1, developer.reload.audit_logs.size

@ -279,6 +279,22 @@ def test_a_class_using_custom_pool_and_switching_back_to_primary
assert_same klass2.connection, ActiveRecord::Base.connection
end
def test_remove_connection_with_name_argument_is_deprecated
klass2 = Class.new(Base) { def self.name; "klass2"; end }
assert_same klass2.connection, ActiveRecord::Base.connection
pool = klass2.establish_connection(ActiveRecord::Base.connection_pool.db_config.configuration_hash)
assert_same klass2.connection, pool.connection
assert_not_same klass2.connection, ActiveRecord::Base.connection
assert_deprecated(ActiveRecord.deprecator) do
ActiveRecord::Base.remove_connection("klass2")
end
ensure
ActiveRecord::Base.establish_connection :arunit
end
class ApplicationRecord < ActiveRecord::Base
self.abstract_class = true
end
@ -416,8 +432,7 @@ def test_forked_child_recovers_from_disconnected_parent
end
def test_retrieve_connection_pool_copies_schema_cache_from_ancestor_pool
@pool.schema_cache = @pool.connection.schema_cache
@pool.schema_cache.add("posts")
@pool.connection.schema_cache.add("posts")
rd, wr = IO.pipe
rd.binmode
@ -426,7 +441,7 @@ def test_retrieve_connection_pool_copies_schema_cache_from_ancestor_pool
pid = fork {
rd.close
pool = @handler.retrieve_connection_pool(@connection_name)
wr.write Marshal.dump pool.schema_cache.size
wr.write Marshal.dump pool.connection.schema_cache.size
wr.close
exit!
}
@ -434,7 +449,7 @@ def test_retrieve_connection_pool_copies_schema_cache_from_ancestor_pool
wr.close
Process.waitpid pid
assert_equal @pool.schema_cache.size, Marshal.load(rd.read)
assert_equal @pool.connection.schema_cache.size, Marshal.load(rd.read)
rd.close
end

@ -7,33 +7,66 @@ module ConnectionAdapters
class SchemaCacheTest < ActiveRecord::TestCase
def setup
@connection = ARUnit2Model.connection
@cache = SchemaCache.new @connection
@cache = new_bound_reflection
@database_version = @connection.get_database_version
@check_schema_cache_dump_version_was = SchemaReflection.check_schema_cache_dump_version
end
def teardown
SchemaReflection.check_schema_cache_dump_version = @check_schema_cache_dump_version_was
end
def new_bound_reflection(connection = @connection)
BoundSchemaReflection.new(SchemaReflection.new(nil), connection)
end
def load_bound_reflection(filename, connection = @connection)
BoundSchemaReflection.new(SchemaReflection.new(filename), connection).tap do |cache|
cache.load!
end
end
def test_cached?
cache = new_bound_reflection
assert_not cache.cached?("courses")
cache.columns("courses").size
assert cache.cached?("courses")
tempfile = Tempfile.new(["schema_cache-", ".yml"])
cache.dump_to(tempfile.path)
reflection = SchemaReflection.new(tempfile.path)
# `check_schema_cache_dump_version` forces us to have an active connection
# to load the cache.
assert_not reflection.cached?("courses")
# If we disable it we can load the cache
SchemaReflection.check_schema_cache_dump_version = false
assert reflection.cached?("courses")
cache = BoundSchemaReflection.new(reflection, :__unused_connection__)
assert cache.cached?("courses")
end
def test_yaml_dump_and_load
connection = ActiveRecord::Base.connection
# Create an empty cache.
cache = SchemaCache.new connection
cache = new_bound_reflection
tempfile = Tempfile.new(["schema_cache-", ".yml"])
# Dump it. It should get populated before dumping.
cache.dump_to(tempfile.path)
# Load the cache.
cache = SchemaCache.load_from(tempfile.path)
# Give it a connection. Usually the connection
# would get set on the cache when it's retrieved
# from the pool.
cache.connection = connection
cache = load_bound_reflection(tempfile.path)
assert_no_queries do
assert_equal 12, cache.columns("posts").size
assert_equal 12, cache.columns_hash("posts").size
assert cache.data_sources("posts")
assert_equal "id", cache.primary_keys("posts")
assert_equal 1, cache.indexes("posts").size
assert_equal 3, cache.columns("courses").size
assert_equal 3, cache.columns_hash("courses").size
assert cache.data_source_exists?("courses")
assert_equal "id", cache.primary_keys("courses")
assert_equal 1, cache.indexes("courses").size
assert_equal @database_version.to_s, cache.database_version.to_s
end
ensure
@ -41,7 +74,7 @@ def test_yaml_dump_and_load
end
def test_cache_path_can_be_in_directory
cache = SchemaCache.new @connection
cache = new_bound_reflection
tmp_dir = Dir.mktmpdir
filename = File.join(tmp_dir, "schema.json")
@ -54,7 +87,7 @@ def test_cache_path_can_be_in_directory
def test_yaml_dump_and_load_with_gzip
# Create an empty cache.
cache = SchemaCache.new @connection
cache = new_bound_reflection
tempfile = Tempfile.new(["schema_cache-", ".yml.gz"])
# Dump it. It should get populated before dumping.
@ -65,30 +98,22 @@ def test_yaml_dump_and_load_with_gzip
YAML.respond_to?(:unsafe_load) ? YAML.unsafe_load(gz.read) : YAML.load(gz.read)
end
# Give it a connection. Usually the connection
# would get set on the cache when it's retrieved
# from the pool.
cache.connection = @connection
assert_no_queries do
assert_equal 3, cache.columns("courses").size
assert_equal 3, cache.columns_hash("courses").size
assert cache.data_sources("courses")
assert_equal "id", cache.primary_keys("courses")
assert_equal 1, cache.indexes("courses").size
assert_equal @database_version.to_s, cache.database_version.to_s
assert_equal 3, cache.columns(@connection, "courses").size
assert_equal 3, cache.columns_hash(@connection, "courses").size
assert cache.data_source_exists?(@connection, "courses")
assert_equal "id", cache.primary_keys(@connection, "courses")
assert_equal 1, cache.indexes(@connection, "courses").size
assert_equal @database_version.to_s, cache.database_version(@connection).to_s
end
# Load the cache the usual way.
cache = SchemaCache.load_from(tempfile.path)
# Give it a connection.
cache.connection = @connection
cache = load_bound_reflection(tempfile.path)
assert_no_queries do
assert_equal 3, cache.columns("courses").size
assert_equal 3, cache.columns_hash("courses").size
assert cache.data_sources("courses")
assert cache.data_source_exists?("courses")
assert_equal "id", cache.primary_keys("courses")
assert_equal 1, cache.indexes("courses").size
assert_equal @database_version.to_s, cache.database_version.to_s
@ -98,33 +123,33 @@ def test_yaml_dump_and_load_with_gzip
end
def test_yaml_loads_5_1_dump
cache = SchemaCache.load_from(schema_dump_path)
cache.connection = ActiveRecord::Base.connection
cache = load_bound_reflection(schema_dump_path)
assert_no_queries do
assert_equal 11, cache.columns("posts").size
assert_equal 11, cache.columns_hash("posts").size
assert cache.data_sources("posts")
assert cache.data_source_exists?("posts")
assert_equal "id", cache.primary_keys("posts")
end
end
def test_yaml_loads_5_1_dump_without_indexes_still_queries_for_indexes
cache = SchemaCache.load_from(schema_dump_path)
cache.connection = ActiveRecord::Base.connection
cache = load_bound_reflection(schema_dump_path)
assert_queries :any, ignore_none: true do
assert_equal 1, cache.indexes("posts").size
assert_equal 1, cache.indexes("courses").size
end
end
def test_yaml_loads_5_1_dump_without_database_version_still_queries_for_database_version
cache = SchemaCache.load_from(schema_dump_path)
cache.connection = ActiveRecord::Base.connection
cache = load_bound_reflection(schema_dump_path)
# We can't verify queries get executed because the database version gets
# cached in both MySQL and PostgreSQL outside of the schema cache.
assert_nil cache.instance_variable_get(:@database_version)
assert_not_nil reflection = @cache.instance_variable_get(:@schema_reflection)
assert_nil reflection.instance_variable_get(:@cache)
assert_equal @database_version.to_s, cache.database_version.to_s
end
@ -179,52 +204,57 @@ def test_caches_database_version
def test_clearing
@cache.columns("courses")
@cache.columns_hash("courses")
@cache.data_sources("courses")
@cache.data_source_exists?("courses")
@cache.primary_keys("courses")
@cache.indexes("courses")
@cache.clear!
assert_equal 0, @cache.size
assert_nil @cache.instance_variable_get(:@database_version)
reflection = @cache.instance_variable_get(:@schema_reflection)
schema_cache = reflection.instance_variable_get(:@cache)
assert_nil schema_cache.instance_variable_get(:@database_version)
end
def test_marshal_dump_and_load
# Create an empty cache.
cache = SchemaCache.new @connection
cache = new_bound_reflection
# Populate it.
cache.add("courses")
# We're going to manually dump, so we also need to force
# database_version to be stored.
cache.database_version
# Create a new cache by marshal dumping / loading.
cache = Marshal.load(Marshal.dump(cache))
cache = Marshal.load(Marshal.dump(cache.instance_variable_get(:@schema_reflection).instance_variable_get(:@cache)))
assert_no_queries do
assert_equal 3, cache.columns("courses").size
assert_equal 3, cache.columns_hash("courses").size
assert cache.data_sources("courses")
assert_equal "id", cache.primary_keys("courses")
assert_equal 1, cache.indexes("courses").size
assert_equal @database_version.to_s, cache.database_version.to_s
assert_equal 3, cache.columns(@connection, "courses").size
assert_equal 3, cache.columns_hash(@connection, "courses").size
assert cache.data_source_exists?(@connection, "courses")
assert_equal "id", cache.primary_keys(@connection, "courses")
assert_equal 1, cache.indexes(@connection, "courses").size
assert_equal @database_version.to_s, cache.database_version(@connection).to_s
end
end
def test_marshal_dump_and_load_via_disk
# Create an empty cache.
cache = SchemaCache.new @connection
cache = new_bound_reflection
tempfile = Tempfile.new(["schema_cache-", ".dump"])
# Dump it. It should get populated before dumping.
cache.dump_to(tempfile.path)
# Load a new cache.
cache = SchemaCache.load_from(tempfile.path)
cache.connection = @connection
cache = load_bound_reflection(tempfile.path)
assert_no_queries do
assert_equal 3, cache.columns("courses").size
assert_equal 3, cache.columns_hash("courses").size
assert cache.data_sources("courses")
assert cache.data_source_exists?("courses")
assert_equal "id", cache.primary_keys("courses")
assert_equal 1, cache.indexes("courses").size
assert_equal @database_version.to_s, cache.database_version.to_s
@ -237,26 +267,25 @@ def test_marshal_dump_and_load_with_ignored_tables
old_ignore = ActiveRecord.schema_cache_ignored_tables
ActiveRecord.schema_cache_ignored_tables = ["professors"]
# Create an empty cache.
cache = SchemaCache.new @connection
cache = new_bound_reflection
tempfile = Tempfile.new(["schema_cache-", ".dump"])
# Dump it. It should get populated before dumping.
cache.dump_to(tempfile.path)
# Load a new cache.
cache = SchemaCache.load_from(tempfile.path)
cache.connection = @connection
cache = load_bound_reflection(tempfile.path)
# Assert a table in the cache
assert cache.data_sources("courses"), "expected courses to be in the cached data_sources"
assert cache.data_source_exists?("courses"), "expected posts to be in the cached data_sources"
assert_equal 3, cache.columns("courses").size
assert_equal 3, cache.columns_hash("courses").size
assert cache.data_sources("courses")
assert cache.data_source_exists?("courses")
assert_equal "id", cache.primary_keys("courses")
assert_equal 1, cache.indexes("courses").size
# Assert ignored table. Behavior should match non-existent table.
assert_nil cache.data_sources("professors"), "expected professors to not be in the cached data_sources"
assert_nil cache.data_source_exists?("professors"), "expected comments to not be in the cached data_sources"
assert_raises ActiveRecord::StatementInvalid do
cache.columns("professors")
end
@ -272,7 +301,7 @@ def test_marshal_dump_and_load_with_ignored_tables
def test_marshal_dump_and_load_with_gzip
# Create an empty cache.
cache = SchemaCache.new @connection
cache = new_bound_reflection
tempfile = Tempfile.new(["schema_cache-", ".dump.gz"])
# Dump it. It should get populated before dumping.
@ -280,25 +309,23 @@ def test_marshal_dump_and_load_with_gzip
# Load a new cache manually.
cache = Zlib::GzipReader.open(tempfile.path) { |gz| Marshal.load(gz.read) }
cache.connection = @connection
assert_no_queries do
assert_equal 3, cache.columns("courses").size
assert_equal 3, cache.columns_hash("courses").size
assert cache.data_sources("courses")
assert_equal "id", cache.primary_keys("courses")
assert_equal 1, cache.indexes("courses").size
assert_equal @database_version.to_s, cache.database_version.to_s
assert_equal 3, cache.columns(@connection, "courses").size
assert_equal 3, cache.columns_hash(@connection, "courses").size
assert cache.data_source_exists?(@connection, "courses")
assert_equal "id", cache.primary_keys(@connection, "courses")
assert_equal 1, cache.indexes(@connection, "courses").size
assert_equal @database_version.to_s, cache.database_version(@connection).to_s
end
# Load a new cache.
cache = SchemaCache.load_from(tempfile.path)
cache.connection = @connection
cache = load_bound_reflection(tempfile.path)
assert_no_queries do
assert_equal 3, cache.columns("courses").size
assert_equal 3, cache.columns_hash("courses").size
assert cache.data_sources("courses")
assert cache.data_source_exists?("courses")
assert_equal "id", cache.primary_keys("courses")
assert_equal 1, cache.indexes("courses").size
assert_equal @database_version.to_s, cache.database_version.to_s
@ -340,40 +367,29 @@ def test_when_lazily_load_schema_cache_is_set_cache_is_lazily_populated_when_est
ActiveRecord::Base.establish_connection(new_config)
# cache is empty
assert_empty ActiveRecord::Base.connection.schema_cache.instance_variable_get(:@primary_keys)
assert_empty ActiveRecord::Base.connection.schema_cache.instance_variable_get(:@data_sources)
assert_empty ActiveRecord::Base.connection.schema_cache.instance_variable_get(:@indexes)
# cache starts empty
assert_equal 0, ActiveRecord::Base.connection.pool.schema_reflection.instance_variable_get(:@cache).size
# calling dump_to will load data sources, but not the rest of the cache
# so we need to set the cache manually. This essentially mimics the behavior
# of the Railtie.
cache = SchemaCache.new(ActiveRecord::Base.connection)
cache.dump_to(tempfile.path)
ActiveRecord::Base.connection.schema_cache = cache
# now we access the cache, causing it to load
assert ActiveRecord::Base.connection.schema_cache.version
assert File.exist?(tempfile)
assert_not_empty ActiveRecord::Base.connection.schema_cache.instance_variable_get(:@primary_keys)
assert_not_empty ActiveRecord::Base.connection.schema_cache.instance_variable_get(:@data_sources)
assert_not_empty ActiveRecord::Base.connection.schema_cache.instance_variable_get(:@indexes)
assert ActiveRecord::Base.connection.pool.schema_reflection.instance_variable_get(:@cache)
# assert cache is empty on new connection
# assert cache is still empty on new connection (precondition for the
# following to show it is loading because of the config change)
ActiveRecord::Base.establish_connection(new_config)
assert File.exist?(tempfile)
assert_empty ActiveRecord::Base.connection.schema_cache.instance_variable_get(:@primary_keys)
assert_empty ActiveRecord::Base.connection.schema_cache.instance_variable_get(:@data_sources)
assert_empty ActiveRecord::Base.connection.schema_cache.instance_variable_get(:@indexes)
assert_equal 0, ActiveRecord::Base.connection.pool.schema_reflection.instance_variable_get(:@cache).size
# cache is lazily loaded when lazily loading is on
# cache is loaded upon connection when lazily loading is on
old_config = ActiveRecord.lazily_load_schema_cache
ActiveRecord.lazily_load_schema_cache = true
ActiveRecord::Base.establish_connection(new_config)
assert File.exist?(tempfile)
assert_not_empty ActiveRecord::Base.connection.schema_cache.instance_variable_get(:@primary_keys)
assert_not_empty ActiveRecord::Base.connection.schema_cache.instance_variable_get(:@data_sources)
assert_not_empty ActiveRecord::Base.connection.schema_cache.instance_variable_get(:@indexes)
assert ActiveRecord::Base.connection.pool.schema_reflection.instance_variable_get(:@cache)
ensure
ActiveRecord.lazily_load_schema_cache = old_config
ActiveRecord::Base.establish_connection(:arunit)

@ -558,13 +558,16 @@ def test_connection_notification_is_called_for_shard
def test_pool_sets_connection_schema_cache
connection = pool.checkout
schema_cache = SchemaCache.new connection
schema_cache.add(:posts)
pool.schema_cache = schema_cache
connection.schema_cache.add(:posts)
pool.with_connection do |conn|
assert_equal pool.schema_cache.size, conn.schema_cache.size
assert_same pool.schema_cache.columns(:posts), conn.schema_cache.columns(:posts)
# We've retrieved a second, distinct, connection from the pool
assert_not_same connection, conn
# But the new connection can already see the schema cache
# entry we added above
assert_equal connection.schema_cache.size, conn.schema_cache.size
assert_same connection.schema_cache.columns(:posts), conn.schema_cache.columns(:posts)
end
pool.checkin connection

@ -36,6 +36,11 @@ class ActiveRecord::Encryption::Aes256GcmTest < ActiveRecord::EncryptionTestCase
assert_not_equal cipher.encrypt("Some text").headers.iv, cipher.encrypt("Some other text").headers.iv
end
test "inspect_does not show secrets" do
cipher = ActiveRecord::Encryption::Cipher::Aes256Gcm.new(@key)
assert_match(/\A#<ActiveRecord::Encryption::Cipher::Aes256Gcm:0x[0-9a-f]+>\z/, cipher.inspect)
end
private
def assert_cipher_encrypts(cipher, content_to_encrypt)
encrypted_content = cipher.encrypt(content_to_encrypt)

@ -2,7 +2,6 @@
require "cases/encryption/helper"
require "models/author_encrypted"
require "models/book"
class ActiveRecord::Encryption::EncryptionSchemesTest < ActiveRecord::EncryptionTestCase
test "can decrypt encrypted_value encrypted with a different encryption scheme" do
@ -102,7 +101,7 @@ class ActiveRecord::Encryption::EncryptionSchemesTest < ActiveRecord::Encryption
test "deterministic encryption is fixed by default: it will always use the oldest scheme to encrypt data" do
ActiveRecord::Encryption.config.support_unencrypted_data = false
ActiveRecord::Encryption.config.deterministic_key = "12345"
ActiveRecord::Encryption.config.previous = [{ downcase: true }, { downcase: false }]
ActiveRecord::Encryption.config.previous = [{ downcase: true, deterministic: true }, { downcase: false, deterministic: true }]
encrypted_author_class = Class.new(Author) do
self.table_name = "authors"
@ -114,10 +113,25 @@ class ActiveRecord::Encryption::EncryptionSchemesTest < ActiveRecord::Encryption
assert_equal "stephen king", author.name
end
test "don't use global previous schemes with a different deterministic nature" do
ActiveRecord::Encryption.config.support_unencrypted_data = false
ActiveRecord::Encryption.config.deterministic_key = "12345"
ActiveRecord::Encryption.config.previous = [{ downcase: true, deterministic: false }, { downcase: false, deterministic: true }]
encrypted_author_class = Class.new(Author) do
self.table_name = "authors"
encrypts :name, deterministic: true, downcase: false
end
author = encrypted_author_class.create!(name: "STEPHEN KING")
assert_equal "STEPHEN KING", author.name
end
test "deterministic encryption will use the newest encryption scheme to encrypt data when setting it to { fixed: false }" do
ActiveRecord::Encryption.config.support_unencrypted_data = false
ActiveRecord::Encryption.config.deterministic_key = "12345"
ActiveRecord::Encryption.config.previous = [{ downcase: true }, { downcase: false }]
ActiveRecord::Encryption.config.previous = [{ downcase: true, deterministic: true }, { downcase: false, deterministic: true }]
encrypted_author_class = Class.new(Author) do
self.table_name = "authors"
@ -129,6 +143,38 @@ class ActiveRecord::Encryption::EncryptionSchemesTest < ActiveRecord::Encryption
assert_equal "STEPHEN KING", author.name
end
test "use global previous schemes when performing queries" do
ActiveRecord::Encryption.config.support_unencrypted_data = false
ActiveRecord::Encryption.config.deterministic_key = "12345"
ActiveRecord::Encryption.config.previous = [{ downcase: true, deterministic: true }, { downcase: false, deterministic: true }]
encrypted_author_class = Class.new(Author) do
self.table_name = "authors"
encrypts :name, deterministic: true, downcase: false
end
author = encrypted_author_class.create!(name: "STEPHEN KING")
assert_equal author, encrypted_author_class.find_by_name("STEPHEN KING")
assert_equal author, encrypted_author_class.find_by_name("stephen king")
end
test "don't use global previous schemes with a different deterministic nature when performing queries" do
ActiveRecord::Encryption.config.support_unencrypted_data = false
ActiveRecord::Encryption.config.deterministic_key = "12345"
ActiveRecord::Encryption.config.previous = [{ downcase: true, deterministic: false }, { downcase: false, deterministic: true }]
encrypted_author_class = Class.new(Author) do
self.table_name = "authors"
encrypts :name, deterministic: true, downcase: false
end
author = encrypted_author_class.create!(name: "STEPHEN KING")
assert_equal author, encrypted_author_class.find_by_name("STEPHEN KING")
assert_nil encrypted_author_class.find_by_name("stephen king")
end
private
class TestEncryptor
def initialize(ciphertexts_by_clear_value)
@ -179,6 +225,6 @@ def declare_class_with_global_previous_encryption_schemes(*previous_schemes)
self.table_name = "authors"
encrypts :name
end
end.tap { |klass| klass.type_for_attribute(:name) }
end
end

@ -152,6 +152,16 @@ def assert_slower_by_at_most(threshold_factor, baseline:, baseline_label: BASELI
end
end
# We eager load encrypted attribute types as they are declared, so that they pick up the
# default encryption setup for tests. Because we load those lazily when used, this prevents
# side effects where some tests modify encryption config settings affecting others.
#
# Notice that we clear the declaration listeners when each test start, so this will only affect
# the classes loaded before tests starts, not those declared during tests.
ActiveRecord::Encryption.on_encrypted_attribute_declared do |klass, attribute_name|
klass.type_for_attribute(attribute_name)
end
class ActiveRecord::EncryptionTestCase < ActiveRecord::TestCase
include ActiveRecord::Encryption::EncryptionHelpers, ActiveRecord::Encryption::PerformanceHelpers

@ -26,14 +26,6 @@ def assert_valid_declaration(**options)
end
end
def declare_and_use_class(**options)
encrypted_book_class = Class.new(Book) do
encrypts :name, **options
end
encrypted_book_class.create! name: "Some name"
end
def declare_encrypts_with(options)
Class.new(Book) do
encrypts :name, **options

@ -23,7 +23,7 @@ class ActiveRecord::Encryption::UniquenessValidationsTest < ActiveRecord::Encryp
end
test "uniqueness validations work when using old encryption schemes" do
ActiveRecord::Encryption.config.previous = [ { downcase: true } ]
ActiveRecord::Encryption.config.previous = [ { downcase: true, deterministic: true } ]
OldEncryptionBook = Class.new(UnencryptedBook) do
self.table_name = "encrypted_books"

@ -149,8 +149,8 @@ def test_serializable_hash_with_default_except_option_and_excluding_inheritance_
@contact = ContactSti.new(@contact.attributes)
assert_equal "ContactSti", @contact.type
def @contact.serializable_hash(options = {})
super({ except: %w(age) }.merge!(options))
def @contact.serializable_hash(options = nil)
super({ except: %w(age) }.merge!(options || {}))
end
json = @contact.to_json

@ -688,7 +688,7 @@ class JsonObj < ActiveRecord::Base
attribute :payload, :json
end
class HashWithFixedHash < Hash
class ObjectFixedHash < Struct.new(:a, :b)
# this isn't very realistic, but it is the worst case and therefore a good
# case to test
def hash
@ -709,12 +709,12 @@ def setup
end
def test_query_cache_handles_mutated_binds
JsonObj.create(payload: { a: 1 })
JsonObj.create(payload: ObjectFixedHash.new({ a: 1 }))
search = HashWithFixedHash[a: 1]
search = ObjectFixedHash.new({ a: 1 })
JsonObj.where(payload: search).first # populate the cache
search.merge!(b: 2)
search.b = 2
assert_nil JsonObj.where(payload: search).first, "cache returned a false positive"
end
@ -724,6 +724,51 @@ def teardown
end
end
class QuerySerializedParamTest < ActiveRecord::TestCase
self.use_transactional_tests = false
fixtures :topics
class YAMLObj < ActiveRecord::Base
self.table_name = "yaml_objs"
serialize :payload
end
def setup
@use_yaml_unsafe_load_was = ActiveRecord.use_yaml_unsafe_load
ActiveRecord.use_yaml_unsafe_load = true
ActiveRecord::Base.connection.create_table("yaml_objs", force: true) do |t|
t.text "payload"
end
ActiveRecord::Base.connection.enable_query_cache!
end
def teardown
ActiveRecord::Base.connection.disable_query_cache!
ActiveRecord::Base.connection.drop_table("yaml_objs", if_exists: true)
ActiveRecord.use_yaml_unsafe_load = @use_yaml_unsafe_load_was
end
def test_query_serialized_active_record
topic = Topic.first
assert_not_nil topic
obj = YAMLObj.create!(payload: { topic: topic })
# This is absolutely terrible, no-one should ever do this
assert_equal obj, YAMLObj.where(payload: { topic: topic }).first
relation = YAMLObj.where(payload: { topic: topic })
topic.title = "New Title"
assert_equal obj, relation.first
assert_nil YAMLObj.where(payload: { topic: topic }).first
end
end
class QueryCacheExpiryTest < ActiveRecord::TestCase
fixtures :tasks, :posts, :categories, :categories_posts

@ -463,12 +463,6 @@ def test_where_on_association_with_collection_polymorphic_relation
assert_equal [treasures(:diamond)], treasures
end
def test_where_on_association_with_scoped_relation
authors = Author.where(welcome_posts: Post.all)
assert_equal 1, authors.count
assert_equal authors(:david), authors.first
end
def test_where_with_strong_parameters
author = authors(:david)
params = ProtectedParams.new(name: author.name)

@ -58,87 +58,88 @@ def assert_called_for_configs(method_name, configs, &block)
}
class DatabaseTasksCheckProtectedEnvironmentsTest < ActiveRecord::TestCase
self.use_transactional_tests = false
if current_adapter?(:SQLite3Adapter) && !in_memory_db?
self.use_transactional_tests = false
def setup
recreate_metadata_tables
end
def setup
recreate_metadata_tables
end
def teardown
recreate_metadata_tables
end
def teardown
recreate_metadata_tables
end
def test_raises_an_error_when_called_with_protected_environment
protected_environments = ActiveRecord::Base.protected_environments
current_env = ActiveRecord::Base.connection.migration_context.current_environment
def test_raises_an_error_when_called_with_protected_environment
protected_environments = ActiveRecord::Base.protected_environments
current_env = ActiveRecord::Base.connection.migration_context.current_environment
ActiveRecord::Base.connection.internal_metadata[:environment] = current_env
ActiveRecord::Base.connection.internal_metadata[:environment] = current_env
assert_called_on_instance_of(
ActiveRecord::MigrationContext,
:current_version,
times: 6,
returns: 1
) do
assert_not_includes protected_environments, current_env
# Assert no error
ActiveRecord::Tasks::DatabaseTasks.check_protected_environments!("arunit")
assert_called_on_instance_of(
ActiveRecord::MigrationContext,
:current_version,
times: 6,
returns: 1
) do
assert_not_includes protected_environments, current_env
# Assert no error
ActiveRecord::Tasks::DatabaseTasks.check_protected_environments!("arunit")
ActiveRecord::Base.protected_environments = [current_env]
ActiveRecord::Base.protected_environments = [current_env]
assert_raise(ActiveRecord::ProtectedEnvironmentError) do
assert_raise(ActiveRecord::ProtectedEnvironmentError) do
ActiveRecord::Tasks::DatabaseTasks.check_protected_environments!("arunit")
end
end
ensure
ActiveRecord::Base.protected_environments = protected_environments
end
def test_raises_an_error_when_called_with_protected_environment_which_name_is_a_symbol
protected_environments = ActiveRecord::Base.protected_environments
current_env = ActiveRecord::Base.connection.migration_context.current_environment
ActiveRecord::Base.connection.internal_metadata[:environment] = current_env
assert_called_on_instance_of(
ActiveRecord::MigrationContext,
:current_version,
times: 6,
returns: 1
) do
assert_not_includes protected_environments, current_env
# Assert no error
ActiveRecord::Tasks::DatabaseTasks.check_protected_environments!("arunit")
ActiveRecord::Base.protected_environments = [current_env.to_sym]
assert_raise(ActiveRecord::ProtectedEnvironmentError) do
ActiveRecord::Tasks::DatabaseTasks.check_protected_environments!("arunit")
end
end
ensure
ActiveRecord::Base.protected_environments = protected_environments
end
def test_raises_an_error_if_no_migrations_have_been_made
connection = ActiveRecord::Base.connection
internal_metadata = connection.internal_metadata
schema_migration = connection.schema_migration
schema_migration.create_table
schema_migration.create_version("1")
assert_predicate internal_metadata, :table_exists?
internal_metadata.drop_table
assert_not_predicate internal_metadata, :table_exists?
assert_raises(ActiveRecord::NoEnvironmentInSchemaError) do
ActiveRecord::Tasks::DatabaseTasks.check_protected_environments!("arunit")
end
ensure
schema_migration.delete_version("1")
internal_metadata.create_table
end
ensure
ActiveRecord::Base.protected_environments = protected_environments
end
def test_raises_an_error_when_called_with_protected_environment_which_name_is_a_symbol
protected_environments = ActiveRecord::Base.protected_environments
current_env = ActiveRecord::Base.connection.migration_context.current_environment
ActiveRecord::Base.connection.internal_metadata[:environment] = current_env
assert_called_on_instance_of(
ActiveRecord::MigrationContext,
:current_version,
times: 6,
returns: 1
) do
assert_not_includes protected_environments, current_env
# Assert no error
ActiveRecord::Tasks::DatabaseTasks.check_protected_environments!("arunit")
ActiveRecord::Base.protected_environments = [current_env.to_sym]
assert_raise(ActiveRecord::ProtectedEnvironmentError) do
ActiveRecord::Tasks::DatabaseTasks.check_protected_environments!("arunit")
end
end
ensure
ActiveRecord::Base.protected_environments = protected_environments
end
def test_raises_an_error_if_no_migrations_have_been_made
connection = ActiveRecord::Base.connection
internal_metadata = connection.internal_metadata
schema_migration = connection.schema_migration
schema_migration.create_table
schema_migration.create_version("1")
assert_predicate internal_metadata, :table_exists?
internal_metadata.drop_table
assert_not_predicate internal_metadata, :table_exists?
assert_raises(ActiveRecord::NoEnvironmentInSchemaError) do
ActiveRecord::Tasks::DatabaseTasks.check_protected_environments!("arunit")
end
ensure
schema_migration.delete_version("1")
internal_metadata.create_table
end
private
private
def recreate_metadata_tables
schema_migration = ActiveRecord::Base.connection.schema_migration
schema_migration.drop_table
@ -148,6 +149,7 @@ def recreate_metadata_tables
internal_metadata.drop_table
internal_metadata.create_table
end
end
end
class DatabaseTasksCheckProtectedEnvironmentsMultiDatabaseTest < ActiveRecord::TestCase

@ -17,6 +17,11 @@ class TransactionTest < ActiveRecord::TestCase
def setup
@first, @second = Topic.find(1, 2).sort_by(&:id)
@commit_transaction_on_non_local_return_was = ActiveRecord.commit_transaction_on_non_local_return
end
def teardown
ActiveRecord.commit_transaction_on_non_local_return = @commit_transaction_on_non_local_return_was
end
def test_rollback_dirty_changes
@ -270,7 +275,7 @@ def test_successful_with_return_outside_inner_transaction
end
end
assert_deprecated(ActiveRecord.deprecator) do
assert_not_deprecated(ActiveRecord.deprecator) do
transaction_with_shallow_return
end
assert committed
@ -285,7 +290,7 @@ def test_successful_with_return_outside_inner_transaction
end
def test_deprecation_on_ruby_timeout_outside_inner_transaction
assert_deprecated(ActiveRecord.deprecator) do
assert_not_deprecated(ActiveRecord.deprecator) do
catch do |timeout|
Topic.transaction do
Topic.transaction(requires_new: true) do
@ -312,7 +317,9 @@ def test_rollback_with_return
end
end
transaction_with_return
assert_deprecated(ActiveRecord.deprecator) do
transaction_with_return
end
assert_not committed
assert_not_predicate Topic.find(1), :approved?
@ -325,24 +332,76 @@ def test_rollback_with_return
end
def test_rollback_on_ruby_timeout
catch do |timeout|
Topic.transaction do
@first.approved = true
@first.save!
assert_deprecated(ActiveRecord.deprecator) do
catch do |timeout|
Topic.transaction do
@first.approved = true
@first.save!
throw timeout
throw timeout
end
end
end
assert_not_predicate Topic.find(1), :approved?
end
def test_early_return_from_transaction
assert_not_deprecated(ActiveRecord.deprecator) do
@first.with_lock do
break
def test_break_from_transaction_7_1_behavior
ActiveRecord.commit_transaction_on_non_local_return = true
@first.transaction do
assert_not_predicate @first, :approved?
@first.update!(approved: true)
break if true
# dead code
assert_predicate @first, :approved?
@first.update!(approved: false)
end
assert_predicate Topic.find(1), :approved?, "First should have been approved"
assert_predicate Topic.find(2), :approved?, "Second should have been approved"
end
def test_thow_from_transaction_7_1_behavior
ActiveRecord.commit_transaction_on_non_local_return = true
catch(:not_an_error) do
@first.transaction do
assert_not_predicate @first, :approved?
@first.update!(approved: true)
throw :not_an_error
# dead code
assert_predicate @first, :approved?
@first.update!(approved: false)
end
end
assert_predicate Topic.find(1), :approved?, "First should have been approved"
assert_predicate Topic.find(2), :approved?, "Second should have been approved"
end
def _test_return_from_transaction_7_1_behavior
@first.transaction do
assert_not_predicate @first, :approved?
@first.update!(approved: true)
return if true
# dead code
assert_predicate @first, :approved?
@first.update!(approved: false)
end
end
def test_return_from_transaction_7_1_behavior
ActiveRecord.commit_transaction_on_non_local_return = true
_test_return_from_transaction_7_1_behavior
assert_predicate Topic.find(1), :approved?, "First should have been approved"
assert_predicate Topic.find(2), :approved?, "Second should have been approved"
end
def test_number_of_transactions_in_commit

@ -14,6 +14,8 @@ def test_to_yaml_with_time_with_zone_should_not_raise_exception
topic = Topic.new(written_on: DateTime.now)
assert_nothing_raised { topic.to_yaml }
end
ensure
Topic.reset_column_information
end
def test_roundtrip

@ -0,0 +1,15 @@
# frozen_string_literal: true
class CommentOverlappingCounterCache < ActiveRecord::Base
belongs_to :user_comments_count, counter_cache: :comments_count
belongs_to :post_comments_count, class_name: "PostCommentsCount"
belongs_to :commentable, polymorphic: true, counter_cache: :comments_count
end
class UserCommentsCount < ActiveRecord::Base
has_many :comments, as: :commentable, class_name: "CommentOverlappingCounterCache"
end
class PostCommentsCount < ActiveRecord::Base
has_many :comments, class_name: "CommentOverlappingCounterCache"
end

@ -3,11 +3,10 @@
module Cpk
class Book < ActiveRecord::Base
self.table_name = :cpk_books
belongs_to :order, autosave: true, query_constraints: [:shop_id, :order_id]
belongs_to :author, class_name: "Cpk::Author"
has_many :chapters, query_constraints: [:author_id, :book_number]
has_many :chapters, query_constraints: [:author_id, :book_id]
end
class BestSeller < Book
@ -17,8 +16,18 @@ class BrokenBook < Book
belongs_to :order
end
class BrokenBookWithNonCpkOrder < Book
belongs_to :order, class_name: "Cpk::NonCpkOrder", query_constraints: [:shop_id, :order_id]
end
class NonCpkBook < Book
self.primary_key = :id
belongs_to :non_cpk_order, query_constraints: [:order_id]
end
class NullifiedBook < Book
has_one :chapter, query_constraints: [:author_id, :book_number], dependent: :nullify
has_one :chapter, query_constraints: [:author_id, :book_id], dependent: :nullify
end
class BookWithOrderAgreements < Book

@ -7,6 +7,6 @@ class Chapter < ActiveRecord::Base
# to be shared between different databases
self.primary_key = [:author_id, :id]
belongs_to :book, query_constraints: [:author_id, :book_number]
belongs_to :book, query_constraints: [:author_id, :book_id]
end
end

@ -17,6 +17,15 @@ class BrokenOrder < Order
has_one :book
end
class BrokenOrderWithNonCpkBooks < Order
has_many :books, class_name: "Cpk::NonCpkBook"
has_one :book, class_name: "Cpk::NonCpkBook"
end
class NonCpkOrder < Order
self.primary_key = :id
end
class OrderWithPrimaryKeyAssociatedBook < Order
has_one :book, primary_key: :id, foreign_key: :order_id
end
@ -24,4 +33,8 @@ class OrderWithPrimaryKeyAssociatedBook < Order
class OrderWithNullifiedBook < Order
has_one :book, query_constraints: [:shop_id, :order_id], dependent: :nullify
end
class OrderWithSingularBookChapters < Order
has_many :chapters, through: :book
end
end

@ -8,7 +8,7 @@ class Membership < ActiveRecord::Base
class CurrentMembership < Membership
belongs_to :member
belongs_to :club
belongs_to :club, inverse_of: :membership
end
class SuperMembership < Membership

Some files were not shown because too many files have changed in this diff Show More