-
-
Save sgoedecke/4b1ab5f7c5c979af3142eb63d27638b2 to your computer and use it in GitHub Desktop.
# frozen_string_literal: true | |
ORIG_ARGV = ARGV.dup | |
require "bundler/setup" | |
require "active_support/core_ext/kernel/reporting" | |
silence_warnings do | |
Encoding.default_internal = Encoding::UTF_8 | |
Encoding.default_external = Encoding::UTF_8 | |
end | |
require "active_support/testing/autorun" | |
require "active_support/testing/method_call_assertions" | |
ENV["NO_RELOAD"] = "1" | |
require "active_support" | |
Thread.abort_on_exception = true | |
# Show backtraces for deprecated behavior for quicker cleanup. | |
ActiveSupport::Deprecation.debug = true | |
# Default to old to_time behavior but allow running tests with new behavior | |
ActiveSupport.to_time_preserves_timezone = ENV["PRESERVE_TIMEZONES"] == "1" | |
# Disable available locale checks to avoid warnings running the test suite. | |
I18n.enforce_available_locales = false | |
class ActiveSupport::TestCase | |
if Process.respond_to?(:fork) && !Gem.win_platform? | |
parallelize | |
else | |
parallelize(with: :threads) | |
end | |
include ActiveSupport::Testing::MethodCallAssertions | |
private | |
# Skips the current run on Rubinius using Minitest::Assertions#skip | |
def rubinius_skip(message = "") | |
skip message if RUBY_ENGINE == "rbx" | |
end | |
# Skips the current run on JRuby using Minitest::Assertions#skip | |
def jruby_skip(message = "") | |
skip message if defined?(JRUBY_VERSION) | |
end | |
end | |
require_relative "../../tools/test_common" |
# frozen_string_literal: true | |
class String | |
# If you pass a single integer, returns a substring of one character at that | |
# position. The first character of the string is at position 0, the next at | |
# position 1, and so on. If a range is supplied, a substring containing | |
# characters at offsets given by the range is returned. In both cases, if an | |
# offset is negative, it is counted from the end of the string. Returns +nil+ | |
# if the initial offset falls outside the string. Returns an empty string if | |
# the beginning of the range is greater than the end of the string. | |
# | |
# str = "hello" | |
# str.at(0) # => "h" | |
# str.at(1..3) # => "ell" | |
# str.at(-2) # => "l" | |
# str.at(-2..-1) # => "lo" | |
# str.at(5) # => nil | |
# str.at(5..-1) # => "" | |
# | |
# If a Regexp is given, the matching portion of the string is returned. | |
# If a String is given, that given string is returned if it occurs in | |
# the string. In both cases, +nil+ is returned if there is no match. | |
# | |
# str = "hello" | |
# str.at(/lo/) # => "lo" | |
# str.at(/ol/) # => nil | |
# str.at("lo") # => "lo" | |
# str.at("ol") # => nil | |
def at(position) | |
self[position] | |
end | |
# Returns a substring from the given position to the end of the string. | |
# If the position is negative, it is counted from the end of the string. | |
# | |
# str = "hello" | |
# str.from(0) # => "hello" | |
# str.from(3) # => "lo" | |
# str.from(-2) # => "lo" | |
# | |
# You can mix it with +to+ method and do fun things like: | |
# | |
# str = "hello" | |
# str.from(0).to(-1) # => "hello" | |
# str.from(1).to(-2) # => "ell" | |
def from(position) | |
self[position, length] | |
end | |
# Returns a substring from the beginning of the string to the given position. | |
# If the position is negative, it is counted from the end of the string. | |
# | |
# str = "hello" | |
# str.to(0) # => "h" | |
# str.to(3) # => "hell" | |
# str.to(-2) # => "hell" | |
# | |
# You can mix it with +from+ method and do fun things like: | |
# | |
# str = "hello" | |
# str.from(0).to(-1) # => "hello" | |
# str.from(1).to(-2) # => "ell" | |
def to(position) | |
position += size if position < 0 | |
self[0, position + 1] || +"" | |
end | |
# Returns the first character. If a limit is supplied, returns a substring | |
# from the beginning of the string until it reaches the limit value. If the | |
# given limit is greater than or equal to the string length, returns a copy of self. | |
# | |
# str = "hello" | |
# str.first # => "h" | |
# str.first(1) # => "h" | |
# str.first(2) # => "he" | |
# str.first(0) # => "" | |
# str.first(6) # => "hello" | |
def first(limit = 1) | |
self[0, limit] || raise(ArgumentError, "negative limit") | |
end | |
# Returns the last character of the string. If a limit is supplied, returns a substring | |
# from the end of the string until it reaches the limit value (counting backwards). If | |
# the given limit is greater than or equal to the string length, returns a copy of self. | |
# | |
# str = "hello" | |
# str.last # => "o" | |
# str.last(1) # => "o" | |
# str.last(2) # => "lo" | |
# str.last(0) # => "" | |
# str.last(6) # => "hello" | |
def last(limit = 1) | |
self[[length - limit, 0].max, limit] || raise(ArgumentError, "negative limit") | |
end | |
end |
# frozen_string_literal: true | |
require_relative "../../abstract_unit" | |
require "active_support/core_ext/array" | |
class AccessTest < ActiveSupport::TestCase | |
def test_from | |
assert_equal %w( a b c d ), %w( a b c d ).from(0) | |
assert_equal %w( c d ), %w( a b c d ).from(2) | |
assert_equal %w(), %w( a b c d ).from(10) | |
assert_equal %w( d e ), %w( a b c d e ).from(-2) | |
assert_equal %w(), %w( a b c d e ).from(-10) | |
end | |
def test_to | |
assert_equal %w( a ), %w( a b c d ).to(0) | |
assert_equal %w( a b c ), %w( a b c d ).to(2) | |
assert_equal %w( a b c d ), %w( a b c d ).to(10) | |
assert_equal %w( a b c ), %w( a b c d ).to(-2) | |
assert_equal %w(), %w( a b c ).to(-10) | |
end | |
def test_specific_accessor | |
array = (1..42).to_a | |
assert_equal array[1], array.second | |
assert_equal array[2], array.third | |
assert_equal array[3], array.fourth | |
assert_equal array[4], array.fifth | |
assert_equal array[41], array.forty_two | |
assert_equal array[-3], array.third_to_last | |
assert_equal array[-2], array.second_to_last | |
end | |
def test_including | |
assert_equal [1, 2, 3, 4, 5], [1, 2, 4].including(3, 5).sort | |
assert_equal [1, 2, 3, 4, 5], [1, 2, 4].including([3, 5]).sort | |
assert_equal [[0, 1], [1, 0]], [[0, 1]].including([[1, 0]]) | |
end | |
def test_excluding | |
assert_equal [1, 2, 4], [1, 2, 3, 4, 5].excluding(3, 5) | |
assert_equal [1, 2, 4], [1, 2, 3, 4, 5].excluding([3, 5]) | |
assert_equal [[0, 1]], [[0, 1], [1, 0]].excluding([[1, 0]]) | |
end | |
def test_without | |
assert_equal [1, 2, 4], [1, 2, 3, 4, 5].without(3, 5) | |
end | |
end |
# frozen_string_literal: true | |
module ActiveSupport | |
# Actionable errors lets you define actions to resolve an error. | |
# | |
# To make an error actionable, include the <tt>ActiveSupport::ActionableError</tt> | |
# module and invoke the +action+ class macro to define the action. An action | |
# needs a name and a block to execute. | |
module ActionableError | |
extend Concern | |
class NonActionable < StandardError; end | |
included do | |
class_attribute :_actions, default: {} | |
end | |
def self.actions(error) # :nodoc: | |
case error | |
when ActionableError, -> it { Class === it && it < ActionableError } | |
error._actions | |
else | |
{} | |
end | |
end | |
def self.dispatch(error, name) # :nodoc: | |
actions(error).fetch(name).call | |
rescue KeyError | |
raise NonActionable, "Cannot find action \"#{name}\"" | |
end | |
module ClassMethods | |
# Defines an action that can resolve the error. | |
# | |
# class PendingMigrationError < MigrationError | |
# include ActiveSupport::ActionableError | |
# | |
# action "Run pending migrations" do | |
# ActiveRecord::Tasks::DatabaseTasks.migrate | |
# end | |
# end | |
def action(name, &block) | |
_actions[name] = block | |
end | |
end | |
end | |
end |
# frozen_string_literal: true | |
require_relative "abstract_unit" | |
require "active_support/actionable_error" | |
class ActionableErrorTest < ActiveSupport::TestCase | |
NonActionableError = Class.new(StandardError) | |
class DispatchableError < StandardError | |
include ActiveSupport::ActionableError | |
class_attribute :flip1, default: false | |
class_attribute :flip2, default: false | |
action "Flip 1" do | |
self.flip1 = true | |
end | |
action "Flip 2" do | |
self.flip2 = true | |
end | |
end | |
test "returns all action of an actionable error" do | |
assert_equal ["Flip 1", "Flip 2"], ActiveSupport::ActionableError.actions(DispatchableError).keys | |
assert_equal ["Flip 1", "Flip 2"], ActiveSupport::ActionableError.actions(DispatchableError.new).keys | |
end | |
test "returns no actions for non-actionable errors" do | |
assert ActiveSupport::ActionableError.actions(Exception).empty? | |
assert ActiveSupport::ActionableError.actions(Exception.new).empty? | |
end | |
test "dispatches actions from error and name" do | |
assert_changes "DispatchableError.flip1", from: false, to: true do | |
ActiveSupport::ActionableError.dispatch DispatchableError, "Flip 1" | |
end | |
end | |
test "cannot dispatch missing actions" do | |
err = assert_raises ActiveSupport::ActionableError::NonActionable do | |
ActiveSupport::ActionableError.dispatch NonActionableError, "action" | |
end | |
assert_equal 'Cannot find action "action"', err.to_s | |
end | |
end |
# frozen_string_literal: true | |
#-- | |
# Copyright (c) 2005-2022 David Heinemeier Hansson | |
# | |
# Permission is hereby granted, free of charge, to any person obtaining | |
# a copy of this software and associated documentation files (the | |
# "Software"), to deal in the Software without restriction, including | |
# without limitation the rights to use, copy, modify, merge, publish, | |
# distribute, sublicense, and/or sell copies of the Software, and to | |
# permit persons to whom the Software is furnished to do so, subject to | |
# the following conditions: | |
# | |
# The above copyright notice and this permission notice shall be | |
# included in all copies or substantial portions of the Software. | |
# | |
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, | |
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF | |
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND | |
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE | |
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION | |
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION | |
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. | |
#++ | |
require "securerandom" | |
require "active_support/dependencies/autoload" | |
require "active_support/version" | |
require "active_support/logger" | |
require "active_support/lazy_load_hooks" | |
require "active_support/core_ext/date_and_time/compatibility" | |
module ActiveSupport | |
extend ActiveSupport::Autoload | |
autoload :Concern | |
autoload :CodeGenerator | |
autoload :ActionableError | |
autoload :ConfigurationFile | |
autoload :CurrentAttributes | |
autoload :Dependencies | |
autoload :DescendantsTracker | |
autoload :ExecutionContext | |
autoload :ExecutionWrapper | |
autoload :Executor | |
autoload :ErrorReporter | |
autoload :FileUpdateChecker | |
autoload :EventedFileUpdateChecker | |
autoload :ForkTracker | |
autoload :LogSubscriber | |
autoload :IsolatedExecutionState | |
autoload :Notifications | |
autoload :Reloader | |
autoload :PerThreadRegistry | |
autoload :SecureCompareRotator | |
eager_autoload do | |
autoload :BacktraceCleaner | |
autoload :ProxyObject | |
autoload :Benchmarkable | |
autoload :Cache | |
autoload :Callbacks | |
autoload :Configurable | |
autoload :Deprecation | |
autoload :Digest | |
autoload :Gzip | |
autoload :Inflector | |
autoload :JSON | |
autoload :JsonWithMarshalFallback | |
autoload :KeyGenerator | |
autoload :MessageEncryptor | |
autoload :MessageVerifier | |
autoload :Multibyte | |
autoload :NumberHelper | |
autoload :OptionMerger | |
autoload :OrderedHash | |
autoload :OrderedOptions | |
autoload :StringInquirer | |
autoload :EnvironmentInquirer | |
autoload :TaggedLogging | |
autoload :XmlMini | |
autoload :ArrayInquirer | |
end | |
autoload :Rescuable | |
autoload :SafeBuffer, "active_support/core_ext/string/output_safety" | |
autoload :TestCase | |
def self.eager_load! | |
super | |
NumberHelper.eager_load! | |
end | |
cattr_accessor :test_order # :nodoc: | |
cattr_accessor :test_parallelization_threshold, default: 50 # :nodoc: | |
singleton_class.attr_accessor :error_reporter # :nodoc: | |
def self.cache_format_version | |
Cache.format_version | |
end | |
def self.cache_format_version=(value) | |
Cache.format_version = value | |
end | |
def self.to_time_preserves_timezone | |
DateAndTime::Compatibility.preserve_timezone | |
end | |
def self.to_time_preserves_timezone=(value) | |
DateAndTime::Compatibility.preserve_timezone = value | |
end | |
def self.utc_to_local_returns_utc_offset_times | |
DateAndTime::Compatibility.utc_to_local_returns_utc_offset_times | |
end | |
def self.utc_to_local_returns_utc_offset_times=(value) | |
DateAndTime::Compatibility.utc_to_local_returns_utc_offset_times = value | |
end | |
end | |
autoload :I18n, "active_support/i18n" |
# frozen_string_literal: true | |
require "active_support/core_ext/object/acts_like" | |
class Time | |
# Duck-types as a Time-like class. See Object#acts_like?. | |
def acts_like_time? | |
true | |
end | |
end |
# frozen_string_literal: true | |
require_relative "../../abstract_unit" | |
require "active_support/core_ext/date/acts_like" | |
require "active_support/core_ext/time/acts_like" | |
require "active_support/core_ext/date_time/acts_like" | |
require "active_support/core_ext/object/acts_like" | |
class ObjectTests < ActiveSupport::TestCase | |
class DuckTime | |
def acts_like_time? | |
true | |
end | |
end | |
def test_duck_typing | |
object = Object.new | |
time = Time.now | |
date = Date.today | |
dt = DateTime.new | |
duck = DuckTime.new | |
assert_not object.acts_like?(:time) | |
assert_not object.acts_like?(:date) | |
assert time.acts_like?(:time) | |
assert_not time.acts_like?(:date) | |
assert_not date.acts_like?(:time) | |
assert date.acts_like?(:date) | |
assert dt.acts_like?(:time) | |
assert dt.acts_like?(:date) | |
assert duck.acts_like?(:time) | |
assert_not duck.acts_like?(:date) | |
end | |
end |
# frozen_string_literal: true | |
require_relative "../abstract_unit" | |
module OtherAfterTeardown | |
def after_teardown | |
super | |
@witness = true | |
end | |
end | |
class AfterTeardownTest < ActiveSupport::TestCase | |
include OtherAfterTeardown | |
attr_writer :witness | |
MyError = Class.new(StandardError) | |
teardown do | |
raise MyError, "Test raises an error, all after_teardown should still get called" | |
end | |
def after_teardown | |
assert_changes -> { failures.count }, from: 0, to: 1 do | |
super | |
end | |
assert_equal true, @witness | |
failures.clear | |
end | |
def test_teardown_raise_but_all_after_teardown_method_are_called | |
assert true | |
end | |
end |
# frozen_string_literal: true | |
class Module | |
# Allows you to make aliases for attributes, which includes | |
# getter, setter, and a predicate. | |
# | |
# class Content < ActiveRecord::Base | |
# # has a title attribute | |
# end | |
# | |
# class Email < Content | |
# alias_attribute :subject, :title | |
# end | |
# | |
# e = Email.find(1) | |
# e.title # => "Superstars" | |
# e.subject # => "Superstars" | |
# e.subject? # => true | |
# e.subject = "Megastars" | |
# e.title # => "Megastars" | |
def alias_attribute(new_name, old_name) | |
# The following reader methods use an explicit `self` receiver in order to | |
# support aliases that start with an uppercase letter. Otherwise, they would | |
# be resolved as constants instead. | |
module_eval <<-STR, __FILE__, __LINE__ + 1 | |
def #{new_name}; self.#{old_name}; end # def subject; self.title; end | |
def #{new_name}?; self.#{old_name}?; end # def subject?; self.title?; end | |
def #{new_name}=(v); self.#{old_name} = v; end # def subject=(v); self.title = v; end | |
STR | |
end | |
end |
# frozen_string_literal: true | |
require "active_support" | |
require "active_support/time" | |
require "active_support/core_ext" |
# frozen_string_literal: true | |
class Module | |
# A module may or may not have a name. | |
# | |
# module M; end | |
# M.name # => "M" | |
# | |
# m = Module.new | |
# m.name # => nil | |
# | |
# +anonymous?+ method returns true if module does not have a name, false otherwise: | |
# | |
# Module.new.anonymous? # => true | |
# | |
# module M; end | |
# M.anonymous? # => false | |
# | |
# A module gets a name when it is first assigned to a constant. Either | |
# via the +module+ or +class+ keyword or by an explicit assignment: | |
# | |
# m = Module.new # creates an anonymous module | |
# m.anonymous? # => true | |
# M = m # m gets a name here as a side-effect | |
# m.name # => "M" | |
# m.anonymous? # => false | |
def anonymous? | |
name.nil? | |
end | |
end |
# frozen_string_literal: true | |
require_relative "../../abstract_unit" | |
require "active_support/core_ext/module/anonymous" | |
class AnonymousTest < ActiveSupport::TestCase | |
test "an anonymous class or module are anonymous" do | |
assert_predicate Module.new, :anonymous? | |
assert_predicate Class.new, :anonymous? | |
end | |
test "a named class or module are not anonymous" do | |
assert_not_predicate Kernel, :anonymous? | |
assert_not_predicate Object, :anonymous? | |
end | |
end |
# frozen_string_literal: true | |
class Fixtures::AnotherClass | |
end |
# frozen_string_literal: true | |
require "active_support/core_ext/array/wrap" | |
require "active_support/core_ext/array/access" | |
require "active_support/core_ext/array/conversions" | |
require "active_support/core_ext/array/deprecated_conversions" unless ENV["RAILS_DISABLE_DEPRECATED_TO_S_CONVERSION"] | |
require "active_support/core_ext/array/extract" | |
require "active_support/core_ext/array/extract_options" | |
require "active_support/core_ext/array/grouping" | |
require "active_support/core_ext/array/inquiry" |
# frozen_string_literal: true | |
module ActiveSupport | |
# Wrapping an array in an +ArrayInquirer+ gives a friendlier way to check | |
# its string-like contents: | |
# | |
# variants = ActiveSupport::ArrayInquirer.new([:phone, :tablet]) | |
# | |
# variants.phone? # => true | |
# variants.tablet? # => true | |
# variants.desktop? # => false | |
class ArrayInquirer < Array | |
# Passes each element of +candidates+ collection to ArrayInquirer collection. | |
# The method returns true if any element from the ArrayInquirer collection | |
# is equal to the stringified or symbolized form of any element in the +candidates+ collection. | |
# | |
# If +candidates+ collection is not given, method returns true. | |
# | |
# variants = ActiveSupport::ArrayInquirer.new([:phone, :tablet]) | |
# | |
# variants.any? # => true | |
# variants.any?(:phone, :tablet) # => true | |
# variants.any?('phone', 'desktop') # => true | |
# variants.any?(:desktop, :watch) # => false | |
def any?(*candidates) | |
if candidates.none? | |
super | |
else | |
candidates.any? do |candidate| | |
include?(candidate.to_sym) || include?(candidate.to_s) | |
end | |
end | |
end | |
private | |
def respond_to_missing?(name, include_private = false) | |
name.end_with?("?") || super | |
end | |
def method_missing(name, *args) | |
if name.end_with?("?") | |
any?(name[0..-2]) | |
else | |
super | |
end | |
end | |
end | |
end |
# frozen_string_literal: true | |
require_relative "abstract_unit" | |
require "active_support/core_ext/array" | |
class ArrayInquirerTest < ActiveSupport::TestCase | |
def setup | |
@array_inquirer = ActiveSupport::ArrayInquirer.new([:mobile, :tablet, "api"]) | |
end | |
def test_individual | |
assert_predicate @array_inquirer, :mobile? | |
assert_predicate @array_inquirer, :tablet? | |
assert_not_predicate @array_inquirer, :desktop? | |
end | |
def test_any | |
assert @array_inquirer.any?(:mobile, :desktop) | |
assert @array_inquirer.any?(:watch, :tablet) | |
assert_not @array_inquirer.any?(:desktop, :watch) | |
end | |
def test_any_string_symbol_mismatch | |
assert @array_inquirer.any?("mobile") | |
assert @array_inquirer.any?(:api) | |
end | |
def test_any_with_block | |
assert @array_inquirer.any? { |v| v == :mobile } | |
assert_not @array_inquirer.any? { |v| v == :desktop } | |
end | |
def test_respond_to | |
assert_respond_to @array_inquirer, :development? | |
end | |
def test_inquiry | |
result = [:mobile, :tablet, "api"].inquiry | |
assert_instance_of ActiveSupport::ArrayInquirer, result | |
assert_equal @array_inquirer, result | |
end | |
def test_respond_to_fallback_to_array_respond_to | |
Array.class_eval do | |
def respond_to_missing?(name, include_private = false) | |
(name == :foo) || super | |
end | |
end | |
arr = ActiveSupport::ArrayInquirer.new([:x]) | |
assert_respond_to arr, :can_you_hear_me? | |
assert_respond_to arr, :foo | |
assert_not_respond_to arr, :nope | |
ensure | |
Array.class_eval do | |
undef_method :respond_to_missing? | |
def respond_to_missing?(name, include_private = false) # rubocop:disable Lint/DuplicateMethods | |
super | |
end | |
end | |
end | |
end |
# frozen_string_literal: true | |
require "active_support/core_ext/enumerable" | |
module ActiveSupport | |
module Testing | |
module Assertions | |
UNTRACKED = Object.new # :nodoc: | |
# Asserts that an expression is not truthy. Passes if <tt>object</tt> is | |
# +nil+ or +false+. "Truthy" means "considered true in a conditional" | |
# like <tt>if foo</tt>. | |
# | |
# assert_not nil # => true | |
# assert_not false # => true | |
# assert_not 'foo' # => Expected "foo" to be nil or false | |
# | |
# An error message can be specified. | |
# | |
# assert_not foo, 'foo should be false' | |
def assert_not(object, message = nil) | |
message ||= "Expected #{mu_pp(object)} to be nil or false" | |
assert !object, message | |
end | |
# Assertion that the block should not raise an exception. | |
# | |
# Passes if evaluated code in the yielded block raises no exception. | |
# | |
# assert_nothing_raised do | |
# perform_service(param: 'no_exception') | |
# end | |
def assert_nothing_raised | |
yield.tap { assert(true) } | |
rescue => error | |
raise Minitest::UnexpectedError.new(error) | |
end | |
# Test numeric difference between the return value of an expression as a | |
# result of what is evaluated in the yielded block. | |
# | |
# assert_difference 'Article.count' do | |
# post :create, params: { article: {...} } | |
# end | |
# | |
# An arbitrary expression is passed in and evaluated. | |
# | |
# assert_difference 'Article.last.comments(:reload).size' do | |
# post :create, params: { comment: {...} } | |
# end | |
# | |
# An arbitrary positive or negative difference can be specified. | |
# The default is <tt>1</tt>. | |
# | |
# assert_difference 'Article.count', -1 do | |
# post :delete, params: { id: ... } | |
# end | |
# | |
# An array of expressions can also be passed in and evaluated. | |
# | |
# assert_difference [ 'Article.count', 'Post.count' ], 2 do | |
# post :create, params: { article: {...} } | |
# end | |
# | |
# A hash of expressions/numeric differences can also be passed in and evaluated. | |
# | |
# assert_difference ->{ Article.count } => 1, ->{ Notification.count } => 2 do | |
# post :create, params: { article: {...} } | |
# end | |
# | |
# A lambda or a list of lambdas can be passed in and evaluated: | |
# | |
# assert_difference ->{ Article.count }, 2 do | |
# post :create, params: { article: {...} } | |
# end | |
# | |
# assert_difference [->{ Article.count }, ->{ Post.count }], 2 do | |
# post :create, params: { article: {...} } | |
# end | |
# | |
# An error message can be specified. | |
# | |
# assert_difference 'Article.count', -1, 'An Article should be destroyed' do | |
# post :delete, params: { id: ... } | |
# end | |
def assert_difference(expression, *args, &block) | |
expressions = | |
if expression.is_a?(Hash) | |
message = args[0] | |
expression | |
else | |
difference = args[0] || 1 | |
message = args[1] | |
Array(expression).index_with(difference) | |
end | |
exps = expressions.keys.map { |e| | |
e.respond_to?(:call) ? e : lambda { eval(e, block.binding) } | |
} | |
before = exps.map(&:call) | |
retval = _assert_nothing_raised_or_warn("assert_difference", &block) | |
expressions.zip(exps, before) do |(code, diff), exp, before_value| | |
error = "#{code.inspect} didn't change by #{diff}" | |
error = "#{message}.\n#{error}" if message | |
assert_equal(before_value + diff, exp.call, error) | |
end | |
retval | |
end | |
# Assertion that the numeric result of evaluating an expression is not | |
# changed before and after invoking the passed in block. | |
# | |
# assert_no_difference 'Article.count' do | |
# post :create, params: { article: invalid_attributes } | |
# end | |
# | |
# A lambda can be passed in and evaluated. | |
# | |
# assert_no_difference -> { Article.count } do | |
# post :create, params: { article: invalid_attributes } | |
# end | |
# | |
# An error message can be specified. | |
# | |
# assert_no_difference 'Article.count', 'An Article should not be created' do | |
# post :create, params: { article: invalid_attributes } | |
# end | |
# | |
# An array of expressions can also be passed in and evaluated. | |
# | |
# assert_no_difference [ 'Article.count', -> { Post.count } ] do | |
# post :create, params: { article: invalid_attributes } | |
# end | |
def assert_no_difference(expression, message = nil, &block) | |
assert_difference expression, 0, message, &block | |
end | |
# Assertion that the result of evaluating an expression is changed before | |
# and after invoking the passed in block. | |
# | |
# assert_changes 'Status.all_good?' do | |
# post :create, params: { status: { ok: false } } | |
# end | |
# | |
# You can pass the block as a string to be evaluated in the context of | |
# the block. A lambda can be passed for the block as well. | |
# | |
# assert_changes -> { Status.all_good? } do | |
# post :create, params: { status: { ok: false } } | |
# end | |
# | |
# The assertion is useful to test side effects. The passed block can be | |
# anything that can be converted to string with #to_s. | |
# | |
# assert_changes :@object do | |
# @object = 42 | |
# end | |
# | |
# The keyword arguments +:from+ and +:to+ can be given to specify the | |
# expected initial value and the expected value after the block was | |
# executed. | |
# | |
# assert_changes :@object, from: nil, to: :foo do | |
# @object = :foo | |
# end | |
# | |
# An error message can be specified. | |
# | |
# assert_changes -> { Status.all_good? }, 'Expected the status to be bad' do | |
# post :create, params: { status: { incident: true } } | |
# end | |
def assert_changes(expression, message = nil, from: UNTRACKED, to: UNTRACKED, &block) | |
exp = expression.respond_to?(:call) ? expression : -> { eval(expression.to_s, block.binding) } | |
before = exp.call | |
retval = _assert_nothing_raised_or_warn("assert_changes", &block) | |
unless from == UNTRACKED | |
error = "Expected change from #{from.inspect}" | |
error = "#{message}.\n#{error}" if message | |
assert from === before, error | |
end | |
after = exp.call | |
error = "#{expression.inspect} didn't change" | |
error = "#{error}. It was already #{to}" if before == to | |
error = "#{message}.\n#{error}" if message | |
refute_equal before, after, error | |
unless to == UNTRACKED | |
error = "Expected change to #{to}\n" | |
error = "#{message}.\n#{error}" if message | |
assert to === after, error | |
end | |
retval | |
end | |
# Assertion that the result of evaluating an expression is not changed before | |
# and after invoking the passed in block. | |
# | |
# assert_no_changes 'Status.all_good?' do | |
# post :create, params: { status: { ok: true } } | |
# end | |
# | |
# Provide the optional keyword argument :from to specify the expected | |
# initial value. | |
# | |
# assert_no_changes -> { Status.all_good? }, from: true do | |
# post :create, params: { status: { ok: true } } | |
# end | |
# | |
# An error message can be specified. | |
# | |
# assert_no_changes -> { Status.all_good? }, 'Expected the status to be good' do | |
# post :create, params: { status: { ok: false } } | |
# end | |
def assert_no_changes(expression, message = nil, from: UNTRACKED, &block) | |
exp = expression.respond_to?(:call) ? expression : -> { eval(expression.to_s, block.binding) } | |
before = exp.call | |
retval = _assert_nothing_raised_or_warn("assert_no_changes", &block) | |
unless from == UNTRACKED | |
error = "Expected initial value of #{from.inspect}" | |
error = "#{message}.\n#{error}" if message | |
assert from === before, error | |
end | |
after = exp.call | |
error = "#{expression.inspect} changed" | |
error = "#{message}.\n#{error}" if message | |
if before.nil? | |
assert_nil after, error | |
else | |
assert_equal before, after, error | |
end | |
retval | |
end | |
private | |
def _assert_nothing_raised_or_warn(assertion, &block) | |
assert_nothing_raised(&block) | |
rescue Minitest::UnexpectedError => e | |
if tagged_logger && tagged_logger.warn? | |
warning = <<~MSG | |
#{self.class} - #{name}: #{e.error.class} raised. | |
If you expected this exception, use `assert_raises` as near to the code that raises as possible. | |
Other block based assertions (e.g. `#{assertion}`) can be used, as long as `assert_raises` is inside their block. | |
MSG | |
tagged_logger.warn warning | |
end | |
raise | |
end | |
end | |
end | |
end |
# frozen_string_literal: true | |
require "fileutils" | |
class File | |
# Write to a file atomically. Useful for situations where you don't | |
# want other processes or threads to see half-written files. | |
# | |
# File.atomic_write('important.file') do |file| | |
# file.write('hello') | |
# end | |
# | |
# This method needs to create a temporary file. By default it will create it | |
# in the same directory as the destination file. If you don't like this | |
# behavior you can provide a different directory but it must be on the | |
# same physical filesystem as the file you're trying to write. | |
# | |
# File.atomic_write('/data/something.important', '/data/tmp') do |file| | |
# file.write('hello') | |
# end | |
def self.atomic_write(file_name, temp_dir = dirname(file_name)) | |
require "tempfile" unless defined?(Tempfile) | |
Tempfile.open(".#{basename(file_name)}", temp_dir) do |temp_file| | |
temp_file.binmode | |
return_val = yield temp_file | |
temp_file.close | |
old_stat = if exist?(file_name) | |
# Get original file permissions | |
stat(file_name) | |
else | |
# If not possible, probe which are the default permissions in the | |
# destination directory. | |
probe_stat_in(dirname(file_name)) | |
end | |
if old_stat | |
# Set correct permissions on new file | |
begin | |
chown(old_stat.uid, old_stat.gid, temp_file.path) | |
# This operation will affect filesystem ACL's | |
chmod(old_stat.mode, temp_file.path) | |
rescue Errno::EPERM, Errno::EACCES | |
# Changing file ownership failed, moving on. | |
end | |
end | |
# Overwrite original file with temp file | |
rename(temp_file.path, file_name) | |
return_val | |
end | |
end | |
# Private utility method. | |
def self.probe_stat_in(dir) # :nodoc: | |
basename = [ | |
".permissions_check", | |
Thread.current.object_id, | |
Process.pid, | |
rand(1000000) | |
].join(".") | |
file_name = join(dir, basename) | |
FileUtils.touch(file_name) | |
stat(file_name) | |
rescue Errno::ENOENT | |
file_name = nil | |
ensure | |
FileUtils.rm_f(file_name) if file_name | |
end | |
end |
# frozen_string_literal: true | |
class Module | |
# Declares an attribute reader backed by an internally-named instance variable. | |
def attr_internal_reader(*attrs) | |
attrs.each { |attr_name| attr_internal_define(attr_name, :reader) } | |
end | |
# Declares an attribute writer backed by an internally-named instance variable. | |
def attr_internal_writer(*attrs) | |
attrs.each { |attr_name| attr_internal_define(attr_name, :writer) } | |
end | |
# Declares an attribute reader and writer backed by an internally-named instance | |
# variable. | |
def attr_internal_accessor(*attrs) | |
attr_internal_reader(*attrs) | |
attr_internal_writer(*attrs) | |
end | |
alias_method :attr_internal, :attr_internal_accessor | |
class << self; attr_accessor :attr_internal_naming_format end | |
self.attr_internal_naming_format = "@_%s" | |
private | |
def attr_internal_ivar_name(attr) | |
Module.attr_internal_naming_format % attr | |
end | |
def attr_internal_define(attr_name, type) | |
internal_name = attr_internal_ivar_name(attr_name).delete_prefix("@") | |
# use native attr_* methods as they are faster on some Ruby implementations | |
public_send("attr_#{type}", internal_name) | |
attr_name, internal_name = "#{attr_name}=", "#{internal_name}=" if type == :writer | |
alias_method attr_name, internal_name | |
remove_method internal_name | |
end | |
end |
# frozen_string_literal: true | |
require_relative "../../abstract_unit" | |
require "active_support/core_ext/module/attr_internal" | |
class AttrInternalTest < ActiveSupport::TestCase | |
def setup | |
@target = Class.new | |
@instance = @target.new | |
end | |
def test_reader | |
assert_nothing_raised { @target.attr_internal_reader :foo } | |
assert_not @instance.instance_variable_defined?("@_foo") | |
assert_raise(NoMethodError) { @instance.foo = 1 } | |
@instance.instance_variable_set("@_foo", 1) | |
assert_nothing_raised { assert_equal 1, @instance.foo } | |
end | |
def test_writer | |
assert_nothing_raised { @target.attr_internal_writer :foo } | |
assert_not @instance.instance_variable_defined?("@_foo") | |
assert_nothing_raised { assert_equal 1, @instance.foo = 1 } | |
assert_equal 1, @instance.instance_variable_get("@_foo") | |
assert_raise(NoMethodError) { @instance.foo } | |
end | |
def test_accessor | |
assert_nothing_raised { @target.attr_internal :foo } | |
assert_not @instance.instance_variable_defined?("@_foo") | |
assert_nothing_raised { assert_equal 1, @instance.foo = 1 } | |
assert_equal 1, @instance.instance_variable_get("@_foo") | |
assert_nothing_raised { assert_equal 1, @instance.foo } | |
end | |
def test_naming_format | |
assert_equal "@_%s", Module.attr_internal_naming_format | |
assert_nothing_raised { Module.attr_internal_naming_format = "@abc%sdef" } | |
@target.attr_internal :foo | |
assert_not @instance.instance_variable_defined?("@_foo") | |
assert_not @instance.instance_variable_defined?("@abcfoodef") | |
assert_nothing_raised { @instance.foo = 1 } | |
assert_not @instance.instance_variable_defined?("@_foo") | |
assert @instance.instance_variable_defined?("@abcfoodef") | |
ensure | |
Module.attr_internal_naming_format = "@_%s" | |
end | |
end |
# frozen_string_literal: true | |
require "active_support/core_ext/module/redefine_method" | |
class Class | |
# Declare a class-level attribute whose value is inheritable by subclasses. | |
# Subclasses can change their own value and it will not impact parent class. | |
# | |
# ==== Options | |
# | |
# * <tt>:instance_reader</tt> - Sets the instance reader method (defaults to true). | |
# * <tt>:instance_writer</tt> - Sets the instance writer method (defaults to true). | |
# * <tt>:instance_accessor</tt> - Sets both instance methods (defaults to true). | |
# * <tt>:instance_predicate</tt> - Sets a predicate method (defaults to true). | |
# * <tt>:default</tt> - Sets a default value for the attribute (defaults to nil). | |
# | |
# ==== Examples | |
# | |
# class Base | |
# class_attribute :setting | |
# end | |
# | |
# class Subclass < Base | |
# end | |
# | |
# Base.setting = true | |
# Subclass.setting # => true | |
# Subclass.setting = false | |
# Subclass.setting # => false | |
# Base.setting # => true | |
# | |
# In the above case as long as Subclass does not assign a value to setting | |
# by performing <tt>Subclass.setting = _something_</tt>, <tt>Subclass.setting</tt> | |
# would read value assigned to parent class. Once Subclass assigns a value then | |
# the value assigned by Subclass would be returned. | |
# | |
# This matches normal Ruby method inheritance: think of writing an attribute | |
# on a subclass as overriding the reader method. However, you need to be aware | |
# when using +class_attribute+ with mutable structures as +Array+ or +Hash+. | |
# In such cases, you don't want to do changes in place. Instead use setters: | |
# | |
# Base.setting = [] | |
# Base.setting # => [] | |
# Subclass.setting # => [] | |
# | |
# # Appending in child changes both parent and child because it is the same object: | |
# Subclass.setting << :foo | |
# Base.setting # => [:foo] | |
# Subclass.setting # => [:foo] | |
# | |
# # Use setters to not propagate changes: | |
# Base.setting = [] | |
# Subclass.setting += [:foo] | |
# Base.setting # => [] | |
# Subclass.setting # => [:foo] | |
# | |
# For convenience, an instance predicate method is defined as well. | |
# To skip it, pass <tt>instance_predicate: false</tt>. | |
# | |
# Subclass.setting? # => false | |
# | |
# Instances may overwrite the class value in the same way: | |
# | |
# Base.setting = true | |
# object = Base.new | |
# object.setting # => true | |
# object.setting = false | |
# object.setting # => false | |
# Base.setting # => true | |
# | |
# To opt out of the instance reader method, pass <tt>instance_reader: false</tt>. | |
# | |
# object.setting # => NoMethodError | |
# object.setting? # => NoMethodError | |
# | |
# To opt out of the instance writer method, pass <tt>instance_writer: false</tt>. | |
# | |
# object.setting = false # => NoMethodError | |
# | |
# To opt out of both instance methods, pass <tt>instance_accessor: false</tt>. | |
# | |
# To set a default value for the attribute, pass <tt>default:</tt>, like so: | |
# | |
# class_attribute :settings, default: {} | |
def class_attribute(*attrs, instance_accessor: true, | |
instance_reader: instance_accessor, instance_writer: instance_accessor, instance_predicate: true, default: nil) | |
class_methods, methods = [], [] | |
attrs.each do |name| | |
unless name.is_a?(Symbol) || name.is_a?(String) | |
raise TypeError, "#{name.inspect} is not a symbol nor a string" | |
end | |
class_methods << <<~RUBY # In case the method exists and is not public | |
silence_redefinition_of_method def #{name} | |
end | |
RUBY | |
methods << <<~RUBY if instance_reader | |
silence_redefinition_of_method def #{name} | |
defined?(@#{name}) ? @#{name} : self.class.#{name} | |
end | |
RUBY | |
class_methods << <<~RUBY | |
silence_redefinition_of_method def #{name}=(value) | |
redefine_method(:#{name}) { value } if singleton_class? | |
redefine_singleton_method(:#{name}) { value } | |
value | |
end | |
RUBY | |
methods << <<~RUBY if instance_writer | |
silence_redefinition_of_method(:#{name}=) | |
attr_writer :#{name} | |
RUBY | |
if instance_predicate | |
class_methods << "silence_redefinition_of_method def #{name}?; !!self.#{name}; end" | |
if instance_reader | |
methods << "silence_redefinition_of_method def #{name}?; !!self.#{name}; end" | |
end | |
end | |
end | |
location = caller_locations(1, 1).first | |
class_eval(["class << self", *class_methods, "end", *methods].join(";").tr("\n", ";"), location.path, location.lineno) | |
attrs.each { |name| public_send("#{name}=", default) } | |
end | |
end |
# frozen_string_literal: true | |
require_relative "../../abstract_unit" | |
require "active_support/core_ext/module/attribute_accessors_per_thread" | |
class ModuleAttributeAccessorPerThreadTest < ActiveSupport::TestCase | |
class MyClass | |
thread_mattr_accessor :foo | |
thread_mattr_accessor :bar, instance_writer: false | |
thread_mattr_reader :shaq, instance_reader: false | |
thread_mattr_accessor :camp, instance_accessor: false | |
end | |
class SubMyClass < MyClass | |
end | |
setup do | |
@class = MyClass | |
@subclass = SubMyClass | |
@object = @class.new | |
end | |
def test_is_shared_between_fibers | |
@class.foo = 42 | |
enumerator = Enumerator.new do |yielder| | |
yielder.yield @class.foo | |
end | |
assert_equal 42, enumerator.next | |
end | |
def test_is_not_shared_between_fibers_if_isolation_level_is_fiber | |
previous_level = ActiveSupport::IsolatedExecutionState.isolation_level | |
ActiveSupport::IsolatedExecutionState.isolation_level = :fiber | |
@class.foo = 42 | |
enumerator = Enumerator.new do |yielder| | |
yielder.yield @class.foo | |
end | |
assert_nil enumerator.next | |
ensure | |
ActiveSupport::IsolatedExecutionState.isolation_level = previous_level | |
end | |
def test_can_initialize_with_default_value | |
Thread.new do | |
@class.thread_mattr_accessor :baz, default: "default_value" | |
assert_equal "default_value", @class.baz | |
end.join | |
assert_nil @class.baz | |
end | |
def test_should_use_mattr_default | |
Thread.new do | |
assert_nil @class.foo | |
assert_nil @object.foo | |
end.join | |
end | |
def test_should_set_mattr_value | |
Thread.new do | |
@class.foo = :test | |
assert_equal :test, @class.foo | |
@class.foo = :test2 | |
assert_equal :test2, @class.foo | |
end.join | |
end | |
def test_should_not_create_instance_writer | |
Thread.new do | |
assert_respond_to @class, :foo | |
assert_respond_to @class, :foo= | |
assert_respond_to @object, :bar | |
assert_not_respond_to @object, :bar= | |
end.join | |
end | |
def test_should_not_create_instance_reader | |
Thread.new do | |
assert_respond_to @class, :shaq | |
assert_not_respond_to @object, :shaq | |
end.join | |
end | |
def test_should_not_create_instance_accessors | |
Thread.new do | |
assert_respond_to @class, :camp | |
assert_not_respond_to @object, :camp | |
assert_not_respond_to @object, :camp= | |
end.join | |
end | |
def test_values_should_not_bleed_between_threads | |
threads = [] | |
threads << Thread.new do | |
@class.foo = "things" | |
Thread.pass | |
assert_equal "things", @class.foo | |
end | |
threads << Thread.new do | |
@class.foo = "other things" | |
Thread.pass | |
assert_equal "other things", @class.foo | |
end | |
threads << Thread.new do | |
@class.foo = "really other things" | |
Thread.pass | |
assert_equal "really other things", @class.foo | |
end | |
threads.each(&:join) | |
end | |
def test_should_raise_name_error_if_attribute_name_is_invalid | |
exception = assert_raises NameError do | |
Class.new do | |
thread_cattr_reader "1nvalid" | |
end | |
end | |
assert_match "invalid attribute name: 1nvalid", exception.message | |
exception = assert_raises NameError do | |
Class.new do | |
thread_cattr_writer "1nvalid" | |
end | |
end | |
assert_match "invalid attribute name: 1nvalid", exception.message | |
exception = assert_raises NameError do | |
Class.new do | |
thread_mattr_reader "1valid_part" | |
end | |
end | |
assert_match "invalid attribute name: 1valid_part", exception.message | |
exception = assert_raises NameError do | |
Class.new do | |
thread_mattr_writer "2valid_part" | |
end | |
end | |
assert_match "invalid attribute name: 2valid_part", exception.message | |
end | |
def test_should_return_same_value_by_class_or_instance_accessor | |
@class.foo = "fries" | |
assert_equal @class.foo, @object.foo | |
end | |
def test_should_not_affect_superclass_if_subclass_set_value | |
@class.foo = "super" | |
assert_equal "super", @class.foo | |
assert_nil @subclass.foo | |
@subclass.foo = "sub" | |
assert_equal "super", @class.foo | |
assert_equal "sub", @subclass.foo | |
end | |
end |
# frozen_string_literal: true | |
require_relative "../../abstract_unit" | |
require "active_support/core_ext/module/attribute_accessors" | |
class ModuleAttributeAccessorTest < ActiveSupport::TestCase | |
def setup | |
m = @module = Module.new do | |
mattr_accessor :foo | |
mattr_accessor :bar, instance_writer: false | |
mattr_reader :shaq, instance_reader: false | |
mattr_accessor :camp, instance_accessor: false | |
cattr_accessor(:defa) { "default_accessor_value" } | |
cattr_reader(:defr) { "default_reader_value" } | |
cattr_writer(:defw) { "default_writer_value" } | |
cattr_accessor(:deff) { false } | |
cattr_accessor(:quux) { :quux } | |
cattr_accessor :def_accessor, default: "default_accessor_value" | |
cattr_reader :def_reader, default: "default_reader_value" | |
cattr_writer :def_writer, default: "default_writer_value" | |
cattr_accessor :def_false, default: false | |
cattr_accessor(:def_priority, default: false) { :no_priority } | |
end | |
@class = Class.new | |
@class.instance_eval { include m } | |
@object = @class.new | |
end | |
def test_should_use_mattr_default | |
assert_nil @module.foo | |
assert_nil @object.foo | |
end | |
def test_mattr_default_keyword_arguments | |
assert_equal "default_accessor_value", @module.def_accessor | |
assert_equal "default_reader_value", @module.def_reader | |
assert_equal "default_writer_value", @module.class_variable_get(:@@def_writer) | |
end | |
def test_mattr_can_default_to_false | |
assert_equal false, @module.def_false | |
assert_equal false, @module.deff | |
end | |
def test_mattr_default_priority | |
assert_equal false, @module.def_priority | |
end | |
def test_should_set_mattr_value | |
@module.foo = :test | |
assert_equal :test, @object.foo | |
@object.foo = :test2 | |
assert_equal :test2, @module.foo | |
end | |
def test_cattr_accessor_default_value | |
assert_equal :quux, @module.quux | |
assert_equal :quux, @object.quux | |
end | |
def test_should_not_create_instance_writer | |
assert_respond_to @module, :foo | |
assert_respond_to @module, :foo= | |
assert_respond_to @object, :bar | |
assert_not_respond_to @object, :bar= | |
end | |
def test_should_not_create_instance_reader | |
assert_respond_to @module, :shaq | |
assert_not_respond_to @object, :shaq | |
end | |
def test_should_not_create_instance_accessors | |
assert_respond_to @module, :camp | |
assert_not_respond_to @object, :camp | |
assert_not_respond_to @object, :camp= | |
end | |
def test_should_raise_name_error_if_attribute_name_is_invalid | |
exception = assert_raises NameError do | |
Class.new do | |
cattr_reader "1nvalid" | |
end | |
end | |
assert_match "invalid attribute name: 1nvalid", exception.message | |
exception = assert_raises NameError do | |
Class.new do | |
cattr_writer "1nvalid" | |
end | |
end | |
assert_match "invalid attribute name: 1nvalid", exception.message | |
exception = assert_raises NameError do | |
Class.new do | |
mattr_reader "valid_part\ninvalid_part" | |
end | |
end | |
assert_match "invalid attribute name: valid_part\ninvalid_part", exception.message | |
exception = assert_raises NameError do | |
Class.new do | |
mattr_writer "valid_part\ninvalid_part" | |
end | |
end | |
assert_match "invalid attribute name: valid_part\ninvalid_part", exception.message | |
end | |
def test_should_use_default_value_if_block_passed | |
assert_equal "default_accessor_value", @module.defa | |
assert_equal "default_reader_value", @module.defr | |
assert_equal "default_writer_value", @module.class_variable_get("@@defw") | |
end | |
def test_method_invocation_should_not_invoke_the_default_block | |
count = 0 | |
@module.cattr_accessor(:defcount) { count += 1 } | |
assert_equal 1, count | |
assert_no_difference "count" do | |
@module.defcount | |
end | |
end | |
def test_declaring_multiple_attributes_at_once_invokes_the_block_multiple_times | |
count = 0 | |
@module.cattr_accessor(:defn1, :defn2) { count += 1 } | |
assert_equal 1, @module.defn1 | |
assert_equal 2, @module.defn2 | |
end | |
def test_declaring_attributes_on_singleton_errors | |
klass = Class.new | |
ex = assert_raises TypeError do | |
class << klass | |
mattr_accessor :my_attr | |
end | |
end | |
assert_equal "module attributes should be defined directly on class, not singleton", ex.message | |
assert_not_includes Module.class_variables, :@@my_attr | |
end | |
end |
# frozen_string_literal: true | |
# == Attribute Accessors | |
# | |
# Extends the module object with class/module and instance accessors for | |
# class/module attributes, just like the native attr* accessors for instance | |
# attributes. | |
class Module | |
# Defines a class attribute and creates a class and instance reader methods. | |
# The underlying class variable is set to +nil+, if it is not previously | |
# defined. All class and instance methods created will be public, even if | |
# this method is called with a private or protected access modifier. | |
# | |
# module HairColors | |
# mattr_reader :hair_colors | |
# end | |
# | |
# HairColors.hair_colors # => nil | |
# HairColors.class_variable_set("@@hair_colors", [:brown, :black]) | |
# HairColors.hair_colors # => [:brown, :black] | |
# | |
# The attribute name must be a valid method name in Ruby. | |
# | |
# module Foo | |
# mattr_reader :"1_Badname" | |
# end | |
# # => NameError: invalid attribute name: 1_Badname | |
# | |
# To omit the instance reader method, pass | |
# <tt>instance_reader: false</tt> or <tt>instance_accessor: false</tt>. | |
# | |
# module HairColors | |
# mattr_reader :hair_colors, instance_reader: false | |
# end | |
# | |
# class Person | |
# include HairColors | |
# end | |
# | |
# Person.new.hair_colors # => NoMethodError | |
# | |
# You can set a default value for the attribute. | |
# | |
# module HairColors | |
# mattr_reader :hair_colors, default: [:brown, :black, :blonde, :red] | |
# end | |
# | |
# class Person | |
# include HairColors | |
# end | |
# | |
# Person.new.hair_colors # => [:brown, :black, :blonde, :red] | |
def mattr_reader(*syms, instance_reader: true, instance_accessor: true, default: nil, location: nil) | |
raise TypeError, "module attributes should be defined directly on class, not singleton" if singleton_class? | |
location ||= caller_locations(1, 1).first | |
definition = [] | |
syms.each do |sym| | |
raise NameError.new("invalid attribute name: #{sym}") unless /\A[_A-Za-z]\w*\z/.match?(sym) | |
definition << "def self.#{sym}; @@#{sym}; end" | |
if instance_reader && instance_accessor | |
definition << "def #{sym}; @@#{sym}; end" | |
end | |
sym_default_value = (block_given? && default.nil?) ? yield : default | |
class_variable_set("@@#{sym}", sym_default_value) unless sym_default_value.nil? && class_variable_defined?("@@#{sym}") | |
end | |
module_eval(definition.join(";"), location.path, location.lineno) | |
end | |
alias :cattr_reader :mattr_reader | |
# Defines a class attribute and creates a class and instance writer methods to | |
# allow assignment to the attribute. All class and instance methods created | |
# will be public, even if this method is called with a private or protected | |
# access modifier. | |
# | |
# module HairColors | |
# mattr_writer :hair_colors | |
# end | |
# | |
# class Person | |
# include HairColors | |
# end | |
# | |
# HairColors.hair_colors = [:brown, :black] | |
# Person.class_variable_get("@@hair_colors") # => [:brown, :black] | |
# Person.new.hair_colors = [:blonde, :red] | |
# HairColors.class_variable_get("@@hair_colors") # => [:blonde, :red] | |
# | |
# To omit the instance writer method, pass | |
# <tt>instance_writer: false</tt> or <tt>instance_accessor: false</tt>. | |
# | |
# module HairColors | |
# mattr_writer :hair_colors, instance_writer: false | |
# end | |
# | |
# class Person | |
# include HairColors | |
# end | |
# | |
# Person.new.hair_colors = [:blonde, :red] # => NoMethodError | |
# | |
# You can set a default value for the attribute. | |
# | |
# module HairColors | |
# mattr_writer :hair_colors, default: [:brown, :black, :blonde, :red] | |
# end | |
# | |
# class Person | |
# include HairColors | |
# end | |
# | |
# Person.class_variable_get("@@hair_colors") # => [:brown, :black, :blonde, :red] | |
def mattr_writer(*syms, instance_writer: true, instance_accessor: true, default: nil, location: nil) | |
raise TypeError, "module attributes should be defined directly on class, not singleton" if singleton_class? | |
location ||= caller_locations(1, 1).first | |
definition = [] | |
syms.each do |sym| | |
raise NameError.new("invalid attribute name: #{sym}") unless /\A[_A-Za-z]\w*\z/.match?(sym) | |
definition << "def self.#{sym}=(val); @@#{sym} = val; end" | |
if instance_writer && instance_accessor | |
definition << "def #{sym}=(val); @@#{sym} = val; end" | |
end | |
sym_default_value = (block_given? && default.nil?) ? yield : default | |
class_variable_set("@@#{sym}", sym_default_value) unless sym_default_value.nil? && class_variable_defined?("@@#{sym}") | |
end | |
module_eval(definition.join(";"), location.path, location.lineno) | |
end | |
alias :cattr_writer :mattr_writer | |
# Defines both class and instance accessors for class attributes. | |
# All class and instance methods created will be public, even if | |
# this method is called with a private or protected access modifier. | |
# | |
# module HairColors | |
# mattr_accessor :hair_colors | |
# end | |
# | |
# class Person | |
# include HairColors | |
# end | |
# | |
# HairColors.hair_colors = [:brown, :black, :blonde, :red] | |
# HairColors.hair_colors # => [:brown, :black, :blonde, :red] | |
# Person.new.hair_colors # => [:brown, :black, :blonde, :red] | |
# | |
# If a subclass changes the value then that would also change the value for | |
# parent class. Similarly if parent class changes the value then that would | |
# change the value of subclasses too. | |
# | |
# class Citizen < Person | |
# end | |
# | |
# Citizen.new.hair_colors << :blue | |
# Person.new.hair_colors # => [:brown, :black, :blonde, :red, :blue] | |
# | |
# To omit the instance writer method, pass <tt>instance_writer: false</tt>. | |
# To omit the instance reader method, pass <tt>instance_reader: false</tt>. | |
# | |
# module HairColors | |
# mattr_accessor :hair_colors, instance_writer: false, instance_reader: false | |
# end | |
# | |
# class Person | |
# include HairColors | |
# end | |
# | |
# Person.new.hair_colors = [:brown] # => NoMethodError | |
# Person.new.hair_colors # => NoMethodError | |
# | |
# Or pass <tt>instance_accessor: false</tt>, to omit both instance methods. | |
# | |
# module HairColors | |
# mattr_accessor :hair_colors, instance_accessor: false | |
# end | |
# | |
# class Person | |
# include HairColors | |
# end | |
# | |
# Person.new.hair_colors = [:brown] # => NoMethodError | |
# Person.new.hair_colors # => NoMethodError | |
# | |
# You can set a default value for the attribute. | |
# | |
# module HairColors | |
# mattr_accessor :hair_colors, default: [:brown, :black, :blonde, :red] | |
# end | |
# | |
# class Person | |
# include HairColors | |
# end | |
# | |
# Person.class_variable_get("@@hair_colors") # => [:brown, :black, :blonde, :red] | |
def mattr_accessor(*syms, instance_reader: true, instance_writer: true, instance_accessor: true, default: nil, &blk) | |
location = caller_locations(1, 1).first | |
mattr_reader(*syms, instance_reader: instance_reader, instance_accessor: instance_accessor, default: default, location: location, &blk) | |
mattr_writer(*syms, instance_writer: instance_writer, instance_accessor: instance_accessor, default: default, location: location) | |
end | |
alias :cattr_accessor :mattr_accessor | |
end |
# frozen_string_literal: true | |
# == Attribute Accessors per Thread | |
# | |
# Extends the module object with class/module and instance accessors for | |
# class/module attributes, just like the native attr* accessors for instance | |
# attributes, but does so on a per-thread basis. | |
# | |
# So the values are scoped within the Thread.current space under the class name | |
# of the module. | |
# | |
# Note that it can also be scoped per-fiber if +Rails.application.config.active_support.isolation_level+ | |
# is set to +:fiber+. | |
class Module | |
# Defines a per-thread class attribute and creates class and instance reader methods. | |
# The underlying per-thread class variable is set to +nil+, if it is not previously defined. | |
# | |
# module Current | |
# thread_mattr_reader :user | |
# end | |
# | |
# Current.user = "DHH" | |
# Current.user # => "DHH" | |
# Thread.new { Current.user }.value # => nil | |
# | |
# The attribute name must be a valid method name in Ruby. | |
# | |
# module Foo | |
# thread_mattr_reader :"1_Badname" | |
# end | |
# # => NameError: invalid attribute name: 1_Badname | |
# | |
# To omit the instance reader method, pass | |
# <tt>instance_reader: false</tt> or <tt>instance_accessor: false</tt>. | |
# | |
# class Current | |
# thread_mattr_reader :user, instance_reader: false | |
# end | |
# | |
# Current.new.user # => NoMethodError | |
def thread_mattr_reader(*syms, instance_reader: true, instance_accessor: true, default: nil) # :nodoc: | |
syms.each do |sym| | |
raise NameError.new("invalid attribute name: #{sym}") unless /^[_A-Za-z]\w*$/.match?(sym) | |
# The following generated method concatenates `name` because we want it | |
# to work with inheritance via polymorphism. | |
class_eval(<<-EOS, __FILE__, __LINE__ + 1) | |
def self.#{sym} | |
@__thread_mattr_#{sym} ||= "attr_\#{name}_#{sym}" | |
::ActiveSupport::IsolatedExecutionState[@__thread_mattr_#{sym}] | |
end | |
EOS | |
if instance_reader && instance_accessor | |
class_eval(<<-EOS, __FILE__, __LINE__ + 1) | |
def #{sym} | |
self.class.#{sym} | |
end | |
EOS | |
end | |
::ActiveSupport::IsolatedExecutionState["attr_#{name}_#{sym}"] = default unless default.nil? | |
end | |
end | |
alias :thread_cattr_reader :thread_mattr_reader | |
# Defines a per-thread class attribute and creates a class and instance writer methods to | |
# allow assignment to the attribute. | |
# | |
# module Current | |
# thread_mattr_writer :user | |
# end | |
# | |
# Current.user = "DHH" | |
# Thread.current[:attr_Current_user] # => "DHH" | |
# | |
# To omit the instance writer method, pass | |
# <tt>instance_writer: false</tt> or <tt>instance_accessor: false</tt>. | |
# | |
# class Current | |
# thread_mattr_writer :user, instance_writer: false | |
# end | |
# | |
# Current.new.user = "DHH" # => NoMethodError | |
def thread_mattr_writer(*syms, instance_writer: true, instance_accessor: true, default: nil) # :nodoc: | |
syms.each do |sym| | |
raise NameError.new("invalid attribute name: #{sym}") unless /^[_A-Za-z]\w*$/.match?(sym) | |
# The following generated method concatenates `name` because we want it | |
# to work with inheritance via polymorphism. | |
class_eval(<<-EOS, __FILE__, __LINE__ + 1) | |
def self.#{sym}=(obj) | |
@__thread_mattr_#{sym} ||= "attr_\#{name}_#{sym}" | |
::ActiveSupport::IsolatedExecutionState[@__thread_mattr_#{sym}] = obj | |
end | |
EOS | |
if instance_writer && instance_accessor | |
class_eval(<<-EOS, __FILE__, __LINE__ + 1) | |
def #{sym}=(obj) | |
self.class.#{sym} = obj | |
end | |
EOS | |
end | |
public_send("#{sym}=", default) unless default.nil? | |
end | |
end | |
alias :thread_cattr_writer :thread_mattr_writer | |
# Defines both class and instance accessors for class attributes. | |
# | |
# class Account | |
# thread_mattr_accessor :user | |
# end | |
# | |
# Account.user = "DHH" | |
# Account.user # => "DHH" | |
# Account.new.user # => "DHH" | |
# | |
# Unlike +mattr_accessor+, values are *not* shared with subclasses or parent classes. | |
# If a subclass changes the value, the parent class' value is not changed. | |
# If the parent class changes the value, the value of subclasses is not changed. | |
# | |
# class Customer < Account | |
# end | |
# | |
# Account.user # => "DHH" | |
# Customer.user # => nil | |
# Customer.user = "Rafael" | |
# Customer.user # => "Rafael" | |
# Account.user # => "DHH" | |
# | |
# To omit the instance writer method, pass <tt>instance_writer: false</tt>. | |
# To omit the instance reader method, pass <tt>instance_reader: false</tt>. | |
# | |
# class Current | |
# thread_mattr_accessor :user, instance_writer: false, instance_reader: false | |
# end | |
# | |
# Current.new.user = "DHH" # => NoMethodError | |
# Current.new.user # => NoMethodError | |
# | |
# Or pass <tt>instance_accessor: false</tt>, to omit both instance methods. | |
# | |
# class Current | |
# thread_mattr_accessor :user, instance_accessor: false | |
# end | |
# | |
# Current.new.user = "DHH" # => NoMethodError | |
# Current.new.user # => NoMethodError | |
def thread_mattr_accessor(*syms, instance_reader: true, instance_writer: true, instance_accessor: true, default: nil) | |
thread_mattr_reader(*syms, instance_reader: instance_reader, instance_accessor: instance_accessor, default: default) | |
thread_mattr_writer(*syms, instance_writer: instance_writer, instance_accessor: instance_accessor) | |
end | |
alias :thread_cattr_accessor :thread_mattr_accessor | |
end |
# frozen_string_literal: true | |
require_relative "../../abstract_unit" | |
require "active_support/core_ext/module/aliasing" | |
module AttributeAliasing | |
class Content | |
attr_accessor :title, :Data | |
def initialize | |
@title, @Data = nil, nil | |
end | |
def title? | |
!title.nil? | |
end | |
def Data? | |
!self.Data.nil? | |
end | |
end | |
class Email < Content | |
alias_attribute :subject, :title | |
alias_attribute :body, :Data | |
end | |
end | |
class AttributeAliasingTest < ActiveSupport::TestCase | |
def test_attribute_alias | |
e = AttributeAliasing::Email.new | |
assert_not_predicate e, :subject? | |
e.title = "Upgrade computer" | |
assert_equal "Upgrade computer", e.subject | |
assert_predicate e, :subject? | |
e.subject = "We got a long way to go" | |
assert_equal "We got a long way to go", e.title | |
assert_predicate e, :title? | |
end | |
def test_aliasing_to_uppercase_attributes | |
# Although it's very un-Ruby, some people's AR-mapped tables have | |
# upper-case attributes, and when people want to alias those names | |
# to more sensible ones, everything goes *foof*. | |
e = AttributeAliasing::Email.new | |
assert_not_predicate e, :body? | |
assert_not_predicate e, :Data? | |
e.body = "No, really, this is not a joke." | |
assert_equal "No, really, this is not a joke.", e.Data | |
assert_predicate e, :Data? | |
e.Data = "Uppercased methods are the suck" | |
assert_equal "Uppercased methods are the suck", e.body | |
assert_predicate e, :body? | |
end | |
end |
# frozen_string_literal: true | |
require_relative "../../abstract_unit" | |
require "active_support/core_ext/class/attribute" | |
class ClassAttributeTest < ActiveSupport::TestCase | |
def setup | |
@klass = Class.new do | |
class_attribute :setting | |
class_attribute :timeout, default: 5 | |
end | |
@sub = Class.new(@klass) | |
end | |
test "defaults to nil" do | |
assert_nil @klass.setting | |
assert_nil @sub.setting | |
end | |
test "custom default" do | |
assert_equal 5, @klass.timeout | |
end | |
test "inheritable" do | |
@klass.setting = 1 | |
assert_equal 1, @sub.setting | |
end | |
test "overridable" do | |
@sub.setting = 1 | |
assert_nil @klass.setting | |
@klass.setting = 2 | |
assert_equal 1, @sub.setting | |
assert_equal 1, Class.new(@sub).setting | |
end | |
test "predicate method" do | |
assert_equal false, @klass.setting? | |
@klass.setting = 1 | |
assert_equal true, @klass.setting? | |
end | |
test "instance reader delegates to class" do | |
assert_nil @klass.new.setting | |
@klass.setting = 1 | |
assert_equal 1, @klass.new.setting | |
end | |
test "instance override" do | |
object = @klass.new | |
object.setting = 1 | |
assert_nil @klass.setting | |
@klass.setting = 2 | |
assert_equal 1, object.setting | |
end | |
test "instance predicate" do | |
object = @klass.new | |
assert_equal false, object.setting? | |
object.setting = 1 | |
assert_equal true, object.setting? | |
end | |
test "disabling instance writer" do | |
object = Class.new { class_attribute :setting, instance_writer: false }.new | |
assert_raise(NoMethodError) { object.setting = "boom" } | |
assert_not_respond_to object, :setting= | |
end | |
test "disabling instance reader" do | |
object = Class.new { class_attribute :setting, instance_reader: false }.new | |
assert_raise(NoMethodError) { object.setting } | |
assert_not_respond_to object, :setting | |
assert_raise(NoMethodError) { object.setting? } | |
assert_not_respond_to object, :setting? | |
end | |
test "disabling both instance writer and reader" do | |
object = Class.new { class_attribute :setting, instance_accessor: false }.new | |
assert_raise(NoMethodError) { object.setting } | |
assert_not_respond_to object, :setting | |
assert_raise(NoMethodError) { object.setting? } | |
assert_not_respond_to object, :setting? | |
assert_raise(NoMethodError) { object.setting = "boom" } | |
assert_not_respond_to object, :setting= | |
end | |
test "disabling instance predicate" do | |
object = Class.new { class_attribute :setting, instance_predicate: false }.new | |
assert_raise(NoMethodError) { object.setting? } | |
assert_not_respond_to object, :setting? | |
end | |
test "works well with singleton classes" do | |
object = @klass.new | |
object.singleton_class.setting = "foo" | |
assert_equal "foo", object.setting | |
end | |
test "works well with module singleton classes" do | |
@module = Module.new do | |
class << self | |
class_attribute :settings, default: 42 | |
end | |
end | |
assert_equal 42, @module.settings | |
end | |
test "setter returns set value" do | |
val = @klass.public_send(:setting=, 1) | |
assert_equal 1, val | |
end | |
end |
# frozen_string_literal: true | |
require "active_support/inflector/methods" | |
module ActiveSupport | |
# Autoload and eager load conveniences for your library. | |
# | |
# This module allows you to define autoloads based on | |
# Rails conventions (i.e. no need to define the path | |
# it is automatically guessed based on the filename) | |
# and also define a set of constants that needs to be | |
# eager loaded: | |
# | |
# module MyLib | |
# extend ActiveSupport::Autoload | |
# | |
# autoload :Model | |
# | |
# eager_autoload do | |
# autoload :Cache | |
# end | |
# end | |
# | |
# Then your library can be eager loaded by simply calling: | |
# | |
# MyLib.eager_load! | |
module Autoload | |
def self.extended(base) # :nodoc: | |
if RUBY_VERSION < "3" | |
base.class_eval do | |
@_autoloads = nil | |
@_under_path = nil | |
@_at_path = nil | |
@_eager_autoload = false | |
end | |
end | |
end | |
def autoload(const_name, path = @_at_path) | |
unless path | |
full = [name, @_under_path, const_name.to_s].compact.join("::") | |
path = Inflector.underscore(full) | |
end | |
if @_eager_autoload | |
@_eagerloaded_constants ||= [] | |
@_eagerloaded_constants << const_name | |
end | |
super const_name, path | |
end | |
def autoload_under(path) | |
@_under_path, old_path = path, @_under_path | |
yield | |
ensure | |
@_under_path = old_path | |
end | |
def autoload_at(path) | |
@_at_path, old_path = path, @_at_path | |
yield | |
ensure | |
@_at_path = old_path | |
end | |
def eager_autoload | |
old_eager, @_eager_autoload = @_eager_autoload, true | |
yield | |
ensure | |
@_eager_autoload = old_eager | |
end | |
def eager_load! | |
if @_eagerloaded_constants | |
@_eagerloaded_constants.each { |const_name| const_get(const_name) } | |
@_eagerloaded_constants = nil | |
end | |
end | |
end | |
end |
# frozen_string_literal: true | |
require_relative "abstract_unit" | |
class TestAutoloadModule < ActiveSupport::TestCase | |
include ActiveSupport::Testing::Isolation | |
module ::Fixtures | |
extend ActiveSupport::Autoload | |
module Autoload | |
extend ActiveSupport::Autoload | |
end | |
end | |
def setup | |
@some_class_path = File.expand_path("test/fixtures/autoload/some_class.rb") | |
@another_class_path = File.expand_path("test/fixtures/autoload/another_class.rb") | |
$LOAD_PATH << "test" | |
end | |
def teardown | |
$LOAD_PATH.pop | |
end | |
test "the autoload module works like normal autoload" do | |
module ::Fixtures::Autoload | |
autoload :SomeClass, "fixtures/autoload/some_class" | |
end | |
assert_nothing_raised { ::Fixtures::Autoload::SomeClass } | |
end | |
test "when specifying an :eager constant it still works like normal autoload by default" do | |
module ::Fixtures::Autoload | |
eager_autoload do | |
autoload :SomeClass, "fixtures/autoload/some_class" | |
end | |
end | |
assert_not_includes $LOADED_FEATURES, @some_class_path | |
assert_nothing_raised { ::Fixtures::Autoload::SomeClass } | |
end | |
test "the location of autoloaded constants defaults to :name.underscore" do | |
module ::Fixtures::Autoload | |
autoload :SomeClass | |
end | |
assert_not_includes $LOADED_FEATURES, @some_class_path | |
assert_nothing_raised { ::Fixtures::Autoload::SomeClass } | |
end | |
test "the location of :eager autoloaded constants defaults to :name.underscore" do | |
module ::Fixtures::Autoload | |
eager_autoload do | |
autoload :SomeClass | |
end | |
end | |
assert_not_includes $LOADED_FEATURES, @some_class_path | |
::Fixtures::Autoload.eager_load! | |
assert_includes $LOADED_FEATURES, @some_class_path | |
assert_nothing_raised { ::Fixtures::Autoload::SomeClass } | |
end | |
test "a directory for a block of autoloads can be specified" do | |
module ::Fixtures | |
autoload_under "autoload" do | |
autoload :AnotherClass | |
end | |
end | |
assert_not_includes $LOADED_FEATURES, @another_class_path | |
assert_nothing_raised { ::Fixtures::AnotherClass } | |
end | |
test "a path for a block of autoloads can be specified" do | |
module ::Fixtures | |
autoload_at "fixtures/autoload/another_class" do | |
autoload :AnotherClass | |
end | |
end | |
assert_not_includes $LOADED_FEATURES, @another_class_path | |
assert_nothing_raised { ::Fixtures::AnotherClass } | |
end | |
end |
# frozen_string_literal: true | |
gem "minitest" | |
require "minitest" | |
Minitest.autorun |
# frozen_string_literal: true | |
module ActiveSupport | |
# Backtraces often include many lines that are not relevant for the context | |
# under review. This makes it hard to find the signal amongst the backtrace | |
# noise, and adds debugging time. With a BacktraceCleaner, filters and | |
# silencers are used to remove the noisy lines, so that only the most relevant | |
# lines remain. | |
# | |
# Filters are used to modify lines of data, while silencers are used to remove | |
# lines entirely. The typical filter use case is to remove lengthy path | |
# information from the start of each line, and view file paths relevant to the | |
# app directory instead of the file system root. The typical silencer use case | |
# is to exclude the output of a noisy library from the backtrace, so that you | |
# can focus on the rest. | |
# | |
# bc = ActiveSupport::BacktraceCleaner.new | |
# bc.add_filter { |line| line.gsub(Rails.root.to_s, '') } # strip the Rails.root prefix | |
# bc.add_silencer { |line| /puma|rubygems/.match?(line) } # skip any lines from puma or rubygems | |
# bc.clean(exception.backtrace) # perform the cleanup | |
# | |
# To reconfigure an existing BacktraceCleaner (like the default one in Rails) | |
# and show as much data as possible, you can always call | |
# BacktraceCleaner#remove_silencers!, which will restore the | |
# backtrace to a pristine state. If you need to reconfigure an existing | |
# BacktraceCleaner so that it does not filter or modify the paths of any lines | |
# of the backtrace, you can call BacktraceCleaner#remove_filters! | |
# These two methods will give you a completely untouched backtrace. | |
# | |
# Inspired by the Quiet Backtrace gem by thoughtbot. | |
class BacktraceCleaner | |
def initialize | |
@filters, @silencers = [], [] | |
add_gem_filter | |
add_gem_silencer | |
add_stdlib_silencer | |
end | |
# Returns the backtrace after all filters and silencers have been run | |
# against it. Filters run first, then silencers. | |
def clean(backtrace, kind = :silent) | |
filtered = filter_backtrace(backtrace) | |
case kind | |
when :silent | |
silence(filtered) | |
when :noise | |
noise(filtered) | |
else | |
filtered | |
end | |
end | |
alias :filter :clean | |
# Adds a filter from the block provided. Each line in the backtrace will be | |
# mapped against this filter. | |
# | |
# # Will turn "/my/rails/root/app/models/person.rb" into "/app/models/person.rb" | |
# backtrace_cleaner.add_filter { |line| line.gsub(Rails.root.to_s, '') } | |
def add_filter(&block) | |
@filters << block | |
end | |
# Adds a silencer from the block provided. If the silencer returns +true+ | |
# for a given line, it will be excluded from the clean backtrace. | |
# | |
# # Will reject all lines that include the word "puma", like "/gems/puma/server.rb" or "/app/my_puma_server/rb" | |
# backtrace_cleaner.add_silencer { |line| /puma/.match?(line) } | |
def add_silencer(&block) | |
@silencers << block | |
end | |
# Removes all silencers, but leaves in the filters. Useful if your | |
# context of debugging suddenly expands as you suspect a bug in one of | |
# the libraries you use. | |
def remove_silencers! | |
@silencers = [] | |
end | |
# Removes all filters, but leaves in the silencers. Useful if you suddenly | |
# need to see entire filepaths in the backtrace that you had already | |
# filtered out. | |
def remove_filters! | |
@filters = [] | |
end | |
private | |
FORMATTED_GEMS_PATTERN = /\A[^\/]+ \([\w.]+\) / | |
def add_gem_filter | |
gems_paths = (Gem.path | [Gem.default_dir]).map { |p| Regexp.escape(p) } | |
return if gems_paths.empty? | |
gems_regexp = %r{\A(#{gems_paths.join('|')})/(bundler/)?gems/([^/]+)-([\w.]+)/(.*)} | |
gems_result = '\3 (\4) \5' | |
add_filter { |line| line.sub(gems_regexp, gems_result) } | |
end | |
def add_gem_silencer | |
add_silencer { |line| FORMATTED_GEMS_PATTERN.match?(line) } | |
end | |
def add_stdlib_silencer | |
add_silencer { |line| line.start_with?(RbConfig::CONFIG["rubylibdir"]) } | |
end | |
def filter_backtrace(backtrace) | |
@filters.each do |f| | |
backtrace = backtrace.map { |line| f.call(line) } | |
end | |
backtrace | |
end | |
def silence(backtrace) | |
@silencers.each do |s| | |
backtrace = backtrace.reject { |line| s.call(line) } | |
end | |
backtrace | |
end | |
def noise(backtrace) | |
backtrace.select do |line| | |
@silencers.any? do |s| | |
s.call(line) | |
end | |
end | |
end | |
end | |
end |
# frozen_string_literal: true | |
class String | |
# Enables more predictable duck-typing on String-like classes. See <tt>Object#acts_like?</tt>. | |
def acts_like_string? | |
true | |
end | |
end |
# frozen_string_literal: true | |
require_relative "behaviors/cache_delete_matched_behavior" | |
require_relative "behaviors/cache_increment_decrement_behavior" | |
require_relative "behaviors/cache_instrumentation_behavior" | |
require_relative "behaviors/cache_store_behavior" | |
require_relative "behaviors/cache_store_version_behavior" | |
require_relative "behaviors/cache_store_coder_behavior" | |
require_relative "behaviors/connection_pool_behavior" | |
require_relative "behaviors/encoded_key_cache_behavior" | |
require_relative "behaviors/failure_safety_behavior" | |
require_relative "behaviors/failure_raising_behavior" | |
require_relative "behaviors/local_cache_behavior" |
# frozen_string_literal: true | |
require "benchmark" | |
class << Benchmark | |
# Benchmark realtime in milliseconds. | |
# | |
# Benchmark.realtime { User.all } | |
# # => 8.0e-05 | |
# | |
# Benchmark.ms { User.all } | |
# # => 0.074 | |
def ms(&block) | |
1000 * realtime(&block) | |
end | |
end |
# frozen_string_literal: true | |
require "active_support/core_ext/benchmark" | |
require "active_support/core_ext/hash/keys" | |
module ActiveSupport | |
module Benchmarkable | |
# Allows you to measure the execution time of a block in a template and | |
# records the result to the log. Wrap this block around expensive operations | |
# or possible bottlenecks to get a time reading for the operation. For | |
# example, let's say you thought your file processing method was taking too | |
# long; you could wrap it in a benchmark block. | |
# | |
# <% benchmark 'Process data files' do %> | |
# <%= expensive_files_operation %> | |
# <% end %> | |
# | |
# That would add something like "Process data files (345.2ms)" to the log, | |
# which you can then use to compare timings when optimizing your code. | |
# | |
# You may give an optional logger level (<tt>:debug</tt>, <tt>:info</tt>, | |
# <tt>:warn</tt>, <tt>:error</tt>) as the <tt>:level</tt> option. The | |
# default logger level value is <tt>:info</tt>. | |
# | |
# <% benchmark 'Low-level files', level: :debug do %> | |
# <%= lowlevel_files_operation %> | |
# <% end %> | |
# | |
# Finally, you can pass true as the third argument to silence all log | |
# activity (other than the timing information) from inside the block. This | |
# is great for boiling down a noisy block to just a single statement that | |
# produces one log line: | |
# | |
# <% benchmark 'Process data files', level: :info, silence: true do %> | |
# <%= expensive_and_chatty_files_operation %> | |
# <% end %> | |
def benchmark(message = "Benchmarking", options = {}, &block) | |
if logger | |
options.assert_valid_keys(:level, :silence) | |
options[:level] ||= :info | |
result = nil | |
ms = Benchmark.ms { result = options[:silence] ? logger.silence(&block) : yield } | |
logger.public_send(options[:level], "%s (%.1fms)" % [ message, ms ]) | |
result | |
else | |
yield | |
end | |
end | |
end | |
end |
# frozen_string_literal: true | |
require_relative "abstract_unit" | |
class BenchmarkableTest < ActiveSupport::TestCase | |
include ActiveSupport::Benchmarkable | |
attr_reader :buffer, :logger | |
class Buffer | |
include Enumerable | |
def initialize; @lines = []; end | |
def each(&block); @lines.each(&block); end | |
def write(x); @lines << x; end | |
def close; end | |
def last; @lines.last; end | |
def size; @lines.size; end | |
def empty?; @lines.empty?; end | |
end | |
def setup | |
@buffer = Buffer.new | |
@logger = ActiveSupport::Logger.new(@buffer) | |
end | |
def test_without_block | |
assert_raise(LocalJumpError) { benchmark } | |
assert_empty buffer | |
end | |
def test_defaults | |
i_was_run = false | |
benchmark { i_was_run = true } | |
assert i_was_run | |
assert_last_logged | |
end | |
def test_with_message | |
i_was_run = false | |
benchmark("test_run") { i_was_run = true } | |
assert i_was_run | |
assert_last_logged "test_run" | |
end | |
def test_with_silence | |
assert_difference "buffer.count", +2 do | |
benchmark("test_run") do | |
logger.info "SOMETHING" | |
end | |
end | |
assert_difference "buffer.count", +1 do | |
benchmark("test_run", silence: true) do | |
logger.info "NOTHING" | |
end | |
end | |
end | |
def test_within_level | |
logger.level = ActiveSupport::Logger::DEBUG | |
benchmark("included_debug_run", level: :debug) { } | |
assert_last_logged "included_debug_run" | |
end | |
def test_outside_level | |
logger.level = ActiveSupport::Logger::ERROR | |
benchmark("skipped_debug_run", level: :debug) { } | |
assert_no_match(/skipped_debug_run/, buffer.last) | |
ensure | |
logger.level = ActiveSupport::Logger::DEBUG | |
end | |
private | |
def assert_last_logged(message = "Benchmarking") | |
assert_match(/^#{message} \(.*\)$/, buffer.last) | |
end | |
end |
# frozen_string_literal: true | |
require "active_support/core_ext/big_decimal/conversions" |
# frozen_string_literal: true | |
require_relative "../abstract_unit" | |
require "active_support/core_ext/big_decimal" | |
class BigDecimalTest < ActiveSupport::TestCase | |
def test_to_s | |
bd = BigDecimal "0.01" | |
assert_equal "0.01", bd.to_s | |
assert_equal "+0.01", bd.to_s("+F") | |
assert_equal "+0.0 1", bd.to_s("+1F") | |
end | |
end |
# frozen_string_literal: true | |
require "pathname" | |
class Pathname | |
# An Pathname is blank if it's empty: | |
# | |
# Pathname.new("").blank? # => true | |
# Pathname.new(" ").blank? # => false | |
# Pathname.new("test").blank? # => false | |
# | |
# @return [true, false] | |
def blank? | |
to_s.empty? | |
end | |
end |
# frozen_string_literal: true | |
require_relative "../../abstract_unit" | |
require "active_support/core_ext/pathname/blank" | |
class PathnameBlankTest < ActiveSupport::TestCase | |
def test_blank | |
assert_predicate Pathname.new(""), :blank? | |
assert_not_predicate Pathname.new("test"), :blank? | |
assert_not_predicate Pathname.new(" "), :blank? | |
end | |
end |
# frozen_string_literal: true | |
require_relative "abstract_unit" | |
module ActiveSupport | |
class BroadcastLoggerTest < TestCase | |
attr_reader :logger, :log1, :log2 | |
setup do | |
@log1 = FakeLogger.new | |
@log2 = FakeLogger.new | |
@log1.extend Logger.broadcast @log2 | |
@logger = @log1 | |
end | |
Logger::Severity.constants.each do |level_name| | |
method = level_name.downcase | |
level = Logger::Severity.const_get(level_name) | |
test "##{method} adds the message to all loggers" do | |
logger.public_send(method, "msg") | |
assert_equal [level, "msg", nil], log1.adds.first | |
assert_equal [level, "msg", nil], log2.adds.first | |
end | |
end | |
test "#close broadcasts to all loggers" do | |
logger.close | |
assert log1.closed, "should be closed" | |
assert log2.closed, "should be closed" | |
end | |
test "#<< shovels the value into all loggers" do | |
logger << "foo" | |
assert_equal %w{ foo }, log1.chevrons | |
assert_equal %w{ foo }, log2.chevrons | |
end | |
test "#level= assigns the level to all loggers" do | |
assert_equal ::Logger::DEBUG, logger.level | |
logger.level = ::Logger::FATAL | |
assert_equal ::Logger::FATAL, log1.level | |
assert_equal ::Logger::FATAL, log2.level | |
end | |
test "#progname= assigns to all the loggers" do | |
assert_nil logger.progname | |
logger.progname = ::Logger::FATAL | |
assert_equal ::Logger::FATAL, log1.progname | |
assert_equal ::Logger::FATAL, log2.progname | |
end | |
test "#formatter= assigns to all the loggers" do | |
assert_nil logger.formatter | |
logger.formatter = ::Logger::FATAL | |
assert_equal ::Logger::FATAL, log1.formatter | |
assert_equal ::Logger::FATAL, log2.formatter | |
end | |
test "#local_level= assigns the local_level to all loggers" do | |
assert_equal ::Logger::DEBUG, logger.local_level | |
logger.local_level = ::Logger::FATAL | |
assert_equal ::Logger::FATAL, log1.local_level | |
assert_equal ::Logger::FATAL, log2.local_level | |
end | |
test "#silence does not break custom loggers" do | |
new_logger = FakeLogger.new | |
custom_logger = CustomLogger.new | |
custom_logger.extend(Logger.broadcast(new_logger)) | |
custom_logger.silence do | |
custom_logger.error "from error" | |
custom_logger.unknown "from unknown" | |
end | |
assert_equal [[::Logger::ERROR, "from error", nil], [::Logger::UNKNOWN, "from unknown", nil]], custom_logger.adds | |
assert_equal [[::Logger::ERROR, "from error", nil], [::Logger::UNKNOWN, "from unknown", nil]], new_logger.adds | |
end | |
test "#silence silences all loggers below the default level of ERROR" do | |
logger.silence do | |
logger.debug "test" | |
end | |
assert_equal [], log1.adds | |
assert_equal [], log2.adds | |
end | |
test "#silence does not silence at or above ERROR" do | |
logger.silence do | |
logger.error "from error" | |
logger.unknown "from unknown" | |
end | |
assert_equal [[::Logger::ERROR, "from error", nil], [::Logger::UNKNOWN, "from unknown", nil]], log1.adds | |
assert_equal [[::Logger::ERROR, "from error", nil], [::Logger::UNKNOWN, "from unknown", nil]], log2.adds | |
end | |
test "#silence allows you to override the silence level" do | |
logger.silence(::Logger::FATAL) do | |
logger.error "unseen" | |
logger.fatal "seen" | |
end | |
assert_equal [[::Logger::FATAL, "seen", nil]], log1.adds | |
assert_equal [[::Logger::FATAL, "seen", nil]], log2.adds | |
end | |
class CustomLogger | |
include ActiveSupport::LoggerSilence | |
attr_reader :adds, :closed, :chevrons | |
attr_accessor :level, :progname, :formatter, :local_level | |
def initialize | |
@adds = [] | |
@closed = false | |
@chevrons = [] | |
@level = ::Logger::DEBUG | |
@local_level = ::Logger::DEBUG | |
@progname = nil | |
@formatter = nil | |
end | |
def debug(message, &block) | |
add(::Logger::DEBUG, message, &block) | |
end | |
def info(message, &block) | |
add(::Logger::INFO, message, &block) | |
end | |
def warn(message, &block) | |
add(::Logger::WARN, message, &block) | |
end | |
def error(message, &block) | |
add(::Logger::ERROR, message, &block) | |
end | |
def fatal(message, &block) | |
add(::Logger::FATAL, message, &block) | |
end | |
def unknown(message, &block) | |
add(::Logger::UNKNOWN, message, &block) | |
end | |
def <<(x) | |
@chevrons << x | |
end | |
def add(message_level, message = nil, progname = nil, &block) | |
@adds << [message_level, message, progname] if message_level >= local_level | |
end | |
def close | |
@closed = true | |
end | |
end | |
class FakeLogger < CustomLogger | |
end | |
end | |
end |
# frozen_string_literal: true | |
begin | |
require "builder" | |
rescue LoadError => e | |
$stderr.puts "You don't have builder installed in your application. Please add it to your Gemfile and run bundle install" | |
raise e | |
end |
# frozen_string_literal: true | |
class Numeric | |
KILOBYTE = 1024 | |
MEGABYTE = KILOBYTE * 1024 | |
GIGABYTE = MEGABYTE * 1024 | |
TERABYTE = GIGABYTE * 1024 | |
PETABYTE = TERABYTE * 1024 | |
EXABYTE = PETABYTE * 1024 | |
# Enables the use of byte calculations and declarations, like 45.bytes + 2.6.megabytes | |
# | |
# 2.bytes # => 2 | |
def bytes | |
self | |
end | |
alias :byte :bytes | |
# Returns the number of bytes equivalent to the kilobytes provided. | |
# | |
# 2.kilobytes # => 2048 | |
def kilobytes | |
self * KILOBYTE | |
end | |
alias :kilobyte :kilobytes | |
# Returns the number of bytes equivalent to the megabytes provided. | |
# | |
# 2.megabytes # => 2_097_152 | |
def megabytes | |
self * MEGABYTE | |
end | |
alias :megabyte :megabytes | |
# Returns the number of bytes equivalent to the gigabytes provided. | |
# | |
# 2.gigabytes # => 2_147_483_648 | |
def gigabytes | |
self * GIGABYTE | |
end | |
alias :gigabyte :gigabytes | |
# Returns the number of bytes equivalent to the terabytes provided. | |
# | |
# 2.terabytes # => 2_199_023_255_552 | |
def terabytes | |
self * TERABYTE | |
end | |
alias :terabyte :terabytes | |
# Returns the number of bytes equivalent to the petabytes provided. | |
# | |
# 2.petabytes # => 2_251_799_813_685_248 | |
def petabytes | |
self * PETABYTE | |
end | |
alias :petabyte :petabytes | |
# Returns the number of bytes equivalent to the exabytes provided. | |
# | |
# 2.exabytes # => 2_305_843_009_213_693_952 | |
def exabytes | |
self * EXABYTE | |
end | |
alias :exabyte :exabytes | |
end |
# frozen_string_literal: true | |
require "zlib" | |
require "active_support/core_ext/array/extract_options" | |
require "active_support/core_ext/enumerable" | |
require "active_support/core_ext/module/attribute_accessors" | |
require "active_support/core_ext/numeric/bytes" | |
require "active_support/core_ext/object/to_param" | |
require "active_support/core_ext/object/try" | |
require "active_support/core_ext/string/inflections" | |
module ActiveSupport | |
# See ActiveSupport::Cache::Store for documentation. | |
module Cache | |
autoload :FileStore, "active_support/cache/file_store" | |
autoload :MemoryStore, "active_support/cache/memory_store" | |
autoload :MemCacheStore, "active_support/cache/mem_cache_store" | |
autoload :NullStore, "active_support/cache/null_store" | |
autoload :RedisCacheStore, "active_support/cache/redis_cache_store" | |
# These options mean something to all cache implementations. Individual cache | |
# implementations may support additional options. | |
UNIVERSAL_OPTIONS = [:namespace, :compress, :compress_threshold, :expires_in, :expire_in, :expired_in, :race_condition_ttl, :coder, :skip_nil] | |
DEFAULT_COMPRESS_LIMIT = 1.kilobyte | |
# Mapping of canonical option names to aliases that a store will recognize. | |
OPTION_ALIASES = { | |
expires_in: [:expire_in, :expired_in] | |
}.freeze | |
module Strategy | |
autoload :LocalCache, "active_support/cache/strategy/local_cache" | |
end | |
@format_version = 6.1 | |
class << self | |
attr_accessor :format_version | |
# Creates a new Store object according to the given options. | |
# | |
# If no arguments are passed to this method, then a new | |
# ActiveSupport::Cache::MemoryStore object will be returned. | |
# | |
# If you pass a Symbol as the first argument, then a corresponding cache | |
# store class under the ActiveSupport::Cache namespace will be created. | |
# For example: | |
# | |
# ActiveSupport::Cache.lookup_store(:memory_store) | |
# # => returns a new ActiveSupport::Cache::MemoryStore object | |
# | |
# ActiveSupport::Cache.lookup_store(:mem_cache_store) | |
# # => returns a new ActiveSupport::Cache::MemCacheStore object | |
# | |
# Any additional arguments will be passed to the corresponding cache store | |
# class's constructor: | |
# | |
# ActiveSupport::Cache.lookup_store(:file_store, '/tmp/cache') | |
# # => same as: ActiveSupport::Cache::FileStore.new('/tmp/cache') | |
# | |
# If the first argument is not a Symbol, then it will simply be returned: | |
# | |
# ActiveSupport::Cache.lookup_store(MyOwnCacheStore.new) | |
# # => returns MyOwnCacheStore.new | |
def lookup_store(store = nil, *parameters) | |
case store | |
when Symbol | |
options = parameters.extract_options! | |
# clean this up once Ruby 2.7 support is dropped | |
# see https://github.com/rails/rails/pull/41522#discussion_r581186602 | |
if options.empty? | |
retrieve_store_class(store).new(*parameters) | |
else | |
retrieve_store_class(store).new(*parameters, **options) | |
end | |
when Array | |
lookup_store(*store) | |
when nil | |
ActiveSupport::Cache::MemoryStore.new | |
else | |
store | |
end | |
end | |
# Expands out the +key+ argument into a key that can be used for the | |
# cache store. Optionally accepts a namespace, and all keys will be | |
# scoped within that namespace. | |
# | |
# If the +key+ argument provided is an array, or responds to +to_a+, then | |
# each of elements in the array will be turned into parameters/keys and | |
# concatenated into a single key. For example: | |
# | |
# ActiveSupport::Cache.expand_cache_key([:foo, :bar]) # => "foo/bar" | |
# ActiveSupport::Cache.expand_cache_key([:foo, :bar], "namespace") # => "namespace/foo/bar" | |
# | |
# The +key+ argument can also respond to +cache_key+ or +to_param+. | |
def expand_cache_key(key, namespace = nil) | |
expanded_cache_key = namespace ? +"#{namespace}/" : +"" | |
if prefix = ENV["RAILS_CACHE_ID"] || ENV["RAILS_APP_VERSION"] | |
expanded_cache_key << "#{prefix}/" | |
end | |
expanded_cache_key << retrieve_cache_key(key) | |
expanded_cache_key | |
end | |
private | |
def retrieve_cache_key(key) | |
case | |
when key.respond_to?(:cache_key_with_version) then key.cache_key_with_version | |
when key.respond_to?(:cache_key) then key.cache_key | |
when key.is_a?(Array) then key.map { |element| retrieve_cache_key(element) }.to_param | |
when key.respond_to?(:to_a) then retrieve_cache_key(key.to_a) | |
else key.to_param | |
end.to_s | |
end | |
# Obtains the specified cache store class, given the name of the +store+. | |
# Raises an error when the store class cannot be found. | |
def retrieve_store_class(store) | |
# require_relative cannot be used here because the class might be | |
# provided by another gem, like redis-activesupport for example. | |
require "active_support/cache/#{store}" | |
rescue LoadError => e | |
raise "Could not find cache store adapter for #{store} (#{e})" | |
else | |
ActiveSupport::Cache.const_get(store.to_s.camelize) | |
end | |
end | |
# An abstract cache store class. There are multiple cache store | |
# implementations, each having its own additional features. See the classes | |
# under the ActiveSupport::Cache module, e.g. | |
# ActiveSupport::Cache::MemCacheStore. MemCacheStore is currently the most | |
# popular cache store for large production websites. | |
# | |
# Some implementations may not support all methods beyond the basic cache | |
# methods of +fetch+, +write+, +read+, +exist?+, and +delete+. | |
# | |
# ActiveSupport::Cache::Store can store any serializable Ruby object. | |
# | |
# cache = ActiveSupport::Cache::MemoryStore.new | |
# | |
# cache.read('city') # => nil | |
# cache.write('city', "Duckburgh") | |
# cache.read('city') # => "Duckburgh" | |
# | |
# Keys are always translated into Strings and are case sensitive. When an | |
# object is specified as a key and has a +cache_key+ method defined, this | |
# method will be called to define the key. Otherwise, the +to_param+ | |
# method will be called. Hashes and Arrays can also be used as keys. The | |
# elements will be delimited by slashes, and the elements within a Hash | |
# will be sorted by key so they are consistent. | |
# | |
# cache.read('city') == cache.read(:city) # => true | |
# | |
# Nil values can be cached. | |
# | |
# If your cache is on a shared infrastructure, you can define a namespace | |
# for your cache entries. If a namespace is defined, it will be prefixed on | |
# to every key. The namespace can be either a static value or a Proc. If it | |
# is a Proc, it will be invoked when each key is evaluated so that you can | |
# use application logic to invalidate keys. | |
# | |
# cache.namespace = -> { @last_mod_time } # Set the namespace to a variable | |
# @last_mod_time = Time.now # Invalidate the entire cache by changing namespace | |
# | |
# Cached data larger than 1kB are compressed by default. To turn off | |
# compression, pass <tt>compress: false</tt> to the initializer or to | |
# individual +fetch+ or +write+ method calls. The 1kB compression | |
# threshold is configurable with the <tt>:compress_threshold</tt> option, | |
# specified in bytes. | |
class Store | |
cattr_accessor :logger, instance_writer: true | |
attr_reader :silence, :options | |
alias :silence? :silence | |
class << self | |
private | |
def retrieve_pool_options(options) | |
{}.tap do |pool_options| | |
pool_options[:size] = options.delete(:pool_size) if options[:pool_size] | |
pool_options[:timeout] = options.delete(:pool_timeout) if options[:pool_timeout] | |
end | |
end | |
def ensure_connection_pool_added! | |
require "connection_pool" | |
rescue LoadError => e | |
$stderr.puts "You don't have connection_pool installed in your application. Please add it to your Gemfile and run bundle install" | |
raise e | |
end | |
end | |
# Creates a new cache. The options will be passed to any write method calls | |
# except for <tt>:namespace</tt> which can be used to set the global | |
# namespace for the cache. | |
def initialize(options = nil) | |
@options = options ? normalize_options(options) : {} | |
@options[:compress] = true unless @options.key?(:compress) | |
@options[:compress_threshold] = DEFAULT_COMPRESS_LIMIT unless @options.key?(:compress_threshold) | |
@coder = @options.delete(:coder) { default_coder } || NullCoder | |
@coder_supports_compression = @coder.respond_to?(:dump_compressed) | |
end | |
# Silences the logger. | |
def silence! | |
@silence = true | |
self | |
end | |
# Silences the logger within a block. | |
def mute | |
previous_silence, @silence = defined?(@silence) && @silence, true | |
yield | |
ensure | |
@silence = previous_silence | |
end | |
# Fetches data from the cache, using the given key. If there is data in | |
# the cache with the given key, then that data is returned. | |
# | |
# If there is no such data in the cache (a cache miss), then +nil+ will be | |
# returned. However, if a block has been passed, that block will be passed | |
# the key and executed in the event of a cache miss. The return value of the | |
# block will be written to the cache under the given cache key, and that | |
# return value will be returned. | |
# | |
# cache.write('today', 'Monday') | |
# cache.fetch('today') # => "Monday" | |
# | |
# cache.fetch('city') # => nil | |
# cache.fetch('city') do | |
# 'Duckburgh' | |
# end | |
# cache.fetch('city') # => "Duckburgh" | |
# | |
# You may also specify additional options via the +options+ argument. | |
# Setting <tt>force: true</tt> forces a cache "miss," meaning we treat | |
# the cache value as missing even if it's present. Passing a block is | |
# required when +force+ is true so this always results in a cache write. | |
# | |
# cache.write('today', 'Monday') | |
# cache.fetch('today', force: true) { 'Tuesday' } # => 'Tuesday' | |
# cache.fetch('today', force: true) # => ArgumentError | |
# | |
# The +:force+ option is useful when you're calling some other method to | |
# ask whether you should force a cache write. Otherwise, it's clearer to | |
# just call <tt>Cache#write</tt>. | |
# | |
# Setting <tt>skip_nil: true</tt> will not cache nil result: | |
# | |
# cache.fetch('foo') { nil } | |
# cache.fetch('bar', skip_nil: true) { nil } | |
# cache.exist?('foo') # => true | |
# cache.exist?('bar') # => false | |
# | |
# | |
# Setting <tt>compress: false</tt> disables compression of the cache entry. | |
# | |
# Setting <tt>:expires_in</tt> will set an expiration time on the cache. | |
# All caches support auto-expiring content after a specified number of | |
# seconds. This value can be specified as an option to the constructor | |
# (in which case all entries will be affected), or it can be supplied to | |
# the +fetch+ or +write+ method to affect just one entry. | |
# <tt>:expire_in</tt> and <tt>:expired_in</tt> are aliases for | |
# <tt>:expires_in</tt>. | |
# | |
# cache = ActiveSupport::Cache::MemoryStore.new(expires_in: 5.minutes) | |
# cache.write(key, value, expires_in: 1.minute) # Set a lower value for one entry | |
# | |
# Setting <tt>:expires_at</tt> will set an absolute expiration time on the cache. | |
# All caches support auto-expiring content after a specified number of | |
# seconds. This value can only be supplied to the +fetch+ or +write+ method to | |
# affect just one entry. | |
# | |
# cache = ActiveSupport::Cache::MemoryStore.new | |
# cache.write(key, value, expires_at: Time.now.at_end_of_hour) | |
# | |
# Setting <tt>:version</tt> verifies the cache stored under <tt>name</tt> | |
# is of the same version. nil is returned on mismatches despite contents. | |
# This feature is used to support recyclable cache keys. | |
# | |
# Setting <tt>:race_condition_ttl</tt> is very useful in situations where | |
# a cache entry is used very frequently and is under heavy load. If a | |
# cache expires and due to heavy load several different processes will try | |
# to read data natively and then they all will try to write to cache. To | |
# avoid that case the first process to find an expired cache entry will | |
# bump the cache expiration time by the value set in <tt>:race_condition_ttl</tt>. | |
# Yes, this process is extending the time for a stale value by another few | |
# seconds. Because of extended life of the previous cache, other processes | |
# will continue to use slightly stale data for a just a bit longer. In the | |
# meantime that first process will go ahead and will write into cache the | |
# new value. After that all the processes will start getting the new value. | |
# The key is to keep <tt>:race_condition_ttl</tt> small. | |
# | |
# If the process regenerating the entry errors out, the entry will be | |
# regenerated after the specified number of seconds. Also note that the | |
# life of stale cache is extended only if it expired recently. Otherwise | |
# a new value is generated and <tt>:race_condition_ttl</tt> does not play | |
# any role. | |
# | |
# # Set all values to expire after one minute. | |
# cache = ActiveSupport::Cache::MemoryStore.new(expires_in: 1.minute) | |
# | |
# cache.write('foo', 'original value') | |
# val_1 = nil | |
# val_2 = nil | |
# sleep 60 | |
# | |
# Thread.new do | |
# val_1 = cache.fetch('foo', race_condition_ttl: 10.seconds) do | |
# sleep 1 | |
# 'new value 1' | |
# end | |
# end | |
# | |
# Thread.new do | |
# val_2 = cache.fetch('foo', race_condition_ttl: 10.seconds) do | |
# 'new value 2' | |
# end | |
# end | |
# | |
# cache.fetch('foo') # => "original value" | |
# sleep 10 # First thread extended the life of cache by another 10 seconds | |
# cache.fetch('foo') # => "new value 1" | |
# val_1 # => "new value 1" | |
# val_2 # => "original value" | |
# | |
# Other options will be handled by the specific cache store implementation. | |
# Internally, #fetch calls #read_entry, and calls #write_entry on a cache | |
# miss. +options+ will be passed to the #read and #write calls. | |
# | |
# For example, MemCacheStore's #write method supports the +:raw+ | |
# option, which tells the memcached server to store all values as strings. | |
# We can use this option with #fetch too: | |
# | |
# cache = ActiveSupport::Cache::MemCacheStore.new | |
# cache.fetch("foo", force: true, raw: true) do | |
# :bar | |
# end | |
# cache.fetch('foo') # => "bar" | |
def fetch(name, options = nil, &block) | |
if block_given? | |
options = merged_options(options) | |
key = normalize_key(name, options) | |
entry = nil | |
instrument(:read, name, options) do |payload| | |
cached_entry = read_entry(key, **options, event: payload) unless options[:force] | |
entry = handle_expired_entry(cached_entry, key, options) | |
entry = nil if entry && entry.mismatched?(normalize_version(name, options)) | |
payload[:super_operation] = :fetch if payload | |
payload[:hit] = !!entry if payload | |
end | |
if entry | |
get_entry_value(entry, name, options) | |
else | |
save_block_result_to_cache(name, options, &block) | |
end | |
elsif options && options[:force] | |
raise ArgumentError, "Missing block: Calling `Cache#fetch` with `force: true` requires a block." | |
else | |
read(name, options) | |
end | |
end | |
# Reads data from the cache, using the given key. If there is data in | |
# the cache with the given key, then that data is returned. Otherwise, | |
# +nil+ is returned. | |
# | |
# Note, if data was written with the <tt>:expires_in</tt> or | |
# <tt>:version</tt> options, both of these conditions are applied before | |
# the data is returned. | |
# | |
# Options are passed to the underlying cache implementation. | |
def read(name, options = nil) | |
options = merged_options(options) | |
key = normalize_key(name, options) | |
version = normalize_version(name, options) | |
instrument(:read, name, options) do |payload| | |
entry = read_entry(key, **options, event: payload) | |
if entry | |
if entry.expired? | |
delete_entry(key, **options) | |
payload[:hit] = false if payload | |
nil | |
elsif entry.mismatched?(version) | |
payload[:hit] = false if payload | |
nil | |
else | |
payload[:hit] = true if payload | |
entry.value | |
end | |
else | |
payload[:hit] = false if payload | |
nil | |
end | |
end | |
end | |
# Reads multiple values at once from the cache. Options can be passed | |
# in the last argument. | |
# | |
# Some cache implementation may optimize this method. | |
# | |
# Returns a hash mapping the names provided to the values found. | |
def read_multi(*names) | |
options = names.extract_options! | |
options = merged_options(options) | |
instrument :read_multi, names, options do |payload| | |
read_multi_entries(names, **options, event: payload).tap do |results| | |
payload[:hits] = results.keys | |
end | |
end | |
end | |
# Cache Storage API to write multiple values at once. | |
def write_multi(hash, options = nil) | |
options = merged_options(options) | |
instrument :write_multi, hash, options do |payload| | |
entries = hash.each_with_object({}) do |(name, value), memo| | |
memo[normalize_key(name, options)] = Entry.new(value, **options.merge(version: normalize_version(name, options))) | |
end | |
write_multi_entries entries, **options | |
end | |
end | |
# Fetches data from the cache, using the given keys. If there is data in | |
# the cache with the given keys, then that data is returned. Otherwise, | |
# the supplied block is called for each key for which there was no data, | |
# and the result will be written to the cache and returned. | |
# Therefore, you need to pass a block that returns the data to be written | |
# to the cache. If you do not want to write the cache when the cache is | |
# not found, use #read_multi. | |
# | |
# Returns a hash with the data for each of the names. For example: | |
# | |
# cache.write("bim", "bam") | |
# cache.fetch_multi("bim", "unknown_key") do |key| | |
# "Fallback value for key: #{key}" | |
# end | |
# # => { "bim" => "bam", | |
# # "unknown_key" => "Fallback value for key: unknown_key" } | |
# | |
# Options are passed to the underlying cache implementation. For example: | |
# | |
# cache.fetch_multi("fizz", expires_in: 5.seconds) do |key| | |
# "buzz" | |
# end | |
# # => {"fizz"=>"buzz"} | |
# cache.read("fizz") | |
# # => "buzz" | |
# sleep(6) | |
# cache.read("fizz") | |
# # => nil | |
def fetch_multi(*names) | |
raise ArgumentError, "Missing block: `Cache#fetch_multi` requires a block." unless block_given? | |
options = names.extract_options! | |
options = merged_options(options) | |
instrument :read_multi, names, options do |payload| | |
reads = read_multi_entries(names, **options) | |
writes = {} | |
ordered = names.index_with do |name| | |
reads.fetch(name) { writes[name] = yield(name) } | |
end | |
payload[:hits] = reads.keys | |
payload[:super_operation] = :fetch_multi | |
write_multi(writes, options) | |
ordered | |
end | |
end | |
# Writes the value to the cache, with the key. | |
# | |
# Options are passed to the underlying cache implementation. | |
def write(name, value, options = nil) | |
options = merged_options(options) | |
instrument(:write, name, options) do | |
entry = Entry.new(value, **options.merge(version: normalize_version(name, options))) | |
write_entry(normalize_key(name, options), entry, **options) | |
end | |
end | |
# Deletes an entry in the cache. Returns +true+ if an entry is deleted. | |
# | |
# Options are passed to the underlying cache implementation. | |
def delete(name, options = nil) | |
options = merged_options(options) | |
instrument(:delete, name) do | |
delete_entry(normalize_key(name, options), **options) | |
end | |
end | |
# Deletes multiple entries in the cache. | |
# | |
# Options are passed to the underlying cache implementation. | |
def delete_multi(names, options = nil) | |
options = merged_options(options) | |
names.map! { |key| normalize_key(key, options) } | |
instrument :delete_multi, names do | |
delete_multi_entries(names, **options) | |
end | |
end | |
# Returns +true+ if the cache contains an entry for the given key. | |
# | |
# Options are passed to the underlying cache implementation. | |
def exist?(name, options = nil) | |
options = merged_options(options) | |
instrument(:exist?, name) do |payload| | |
entry = read_entry(normalize_key(name, options), **options, event: payload) | |
(entry && !entry.expired? && !entry.mismatched?(normalize_version(name, options))) || false | |
end | |
end | |
def new_entry(value, options = nil) # :nodoc: | |
Entry.new(value, **merged_options(options)) | |
end | |
# Deletes all entries with keys matching the pattern. | |
# | |
# Options are passed to the underlying cache implementation. | |
# | |
# Some implementations may not support this method. | |
def delete_matched(matcher, options = nil) | |
raise NotImplementedError.new("#{self.class.name} does not support delete_matched") | |
end | |
# Increments an integer value in the cache. | |
# | |
# Options are passed to the underlying cache implementation. | |
# | |
# Some implementations may not support this method. | |
def increment(name, amount = 1, options = nil) | |
raise NotImplementedError.new("#{self.class.name} does not support increment") | |
end | |
# Decrements an integer value in the cache. | |
# | |
# Options are passed to the underlying cache implementation. | |
# | |
# Some implementations may not support this method. | |
def decrement(name, amount = 1, options = nil) | |
raise NotImplementedError.new("#{self.class.name} does not support decrement") | |
end | |
# Cleanups the cache by removing expired entries. | |
# | |
# Options are passed to the underlying cache implementation. | |
# | |
# Some implementations may not support this method. | |
def cleanup(options = nil) | |
raise NotImplementedError.new("#{self.class.name} does not support cleanup") | |
end | |
# Clears the entire cache. Be careful with this method since it could | |
# affect other processes if shared cache is being used. | |
# | |
# The options hash is passed to the underlying cache implementation. | |
# | |
# Some implementations may not support this method. | |
def clear(options = nil) | |
raise NotImplementedError.new("#{self.class.name} does not support clear") | |
end | |
private | |
def default_coder | |
Coders[Cache.format_version] | |
end | |
# Adds the namespace defined in the options to a pattern designed to | |
# match keys. Implementations that support delete_matched should call | |
# this method to translate a pattern that matches names into one that | |
# matches namespaced keys. | |
def key_matcher(pattern, options) # :doc: | |
prefix = options[:namespace].is_a?(Proc) ? options[:namespace].call : options[:namespace] | |
if prefix | |
source = pattern.source | |
if source.start_with?("^") | |
source = source[1, source.length] | |
else | |
source = ".*#{source[0, source.length]}" | |
end | |
Regexp.new("^#{Regexp.escape(prefix)}:#{source}", pattern.options) | |
else | |
pattern | |
end | |
end | |
# Reads an entry from the cache implementation. Subclasses must implement | |
# this method. | |
def read_entry(key, **options) | |
raise NotImplementedError.new | |
end | |
# Writes an entry to the cache implementation. Subclasses must implement | |
# this method. | |
def write_entry(key, entry, **options) | |
raise NotImplementedError.new | |
end | |
def serialize_entry(entry, **options) | |
options = merged_options(options) | |
if @coder_supports_compression && options[:compress] | |
@coder.dump_compressed(entry, options[:compress_threshold] || DEFAULT_COMPRESS_LIMIT) | |
else | |
@coder.dump(entry) | |
end | |
end | |
def deserialize_entry(payload) | |
payload.nil? ? nil : @coder.load(payload) | |
end | |
# Reads multiple entries from the cache implementation. Subclasses MAY | |
# implement this method. | |
def read_multi_entries(names, **options) | |
names.each_with_object({}) do |name, results| | |
key = normalize_key(name, options) | |
entry = read_entry(key, **options) | |
next unless entry | |
version = normalize_version(name, options) | |
if entry.expired? | |
delete_entry(key, **options) | |
elsif !entry.mismatched?(version) | |
results[name] = entry.value | |
end | |
end | |
end | |
# Writes multiple entries to the cache implementation. Subclasses MAY | |
# implement this method. | |
def write_multi_entries(hash, **options) | |
hash.each do |key, entry| | |
write_entry key, entry, **options | |
end | |
end | |
# Deletes an entry from the cache implementation. Subclasses must | |
# implement this method. | |
def delete_entry(key, **options) | |
raise NotImplementedError.new | |
end | |
# Deletes multiples entries in the cache implementation. Subclasses MAY | |
# implement this method. | |
def delete_multi_entries(entries, **options) | |
entries.count { |key| delete_entry(key, **options) } | |
end | |
# Merges the default options with ones specific to a method call. | |
def merged_options(call_options) | |
if call_options | |
call_options = normalize_options(call_options) | |
if options.empty? | |
call_options | |
else | |
options.merge(call_options) | |
end | |
else | |
options | |
end | |
end | |
# Normalize aliased options to their canonical form | |
def normalize_options(options) | |
options = options.dup | |
OPTION_ALIASES.each do |canonical_name, aliases| | |
alias_key = aliases.detect { |key| options.key?(key) } | |
options[canonical_name] ||= options[alias_key] if alias_key | |
options.except!(*aliases) | |
end | |
options | |
end | |
# Expands and namespaces the cache key. May be overridden by | |
# cache stores to do additional normalization. | |
def normalize_key(key, options = nil) | |
namespace_key expanded_key(key), options | |
end | |
# Prefix the key with a namespace string: | |
# | |
# namespace_key 'foo', namespace: 'cache' | |
# # => 'cache:foo' | |
# | |
# With a namespace block: | |
# | |
# namespace_key 'foo', namespace: -> { 'cache' } | |
# # => 'cache:foo' | |
def namespace_key(key, options = nil) | |
options = merged_options(options) | |
namespace = options[:namespace] | |
if namespace.respond_to?(:call) | |
namespace = namespace.call | |
end | |
if key && key.encoding != Encoding::UTF_8 | |
key = key.dup.force_encoding(Encoding::UTF_8) | |
end | |
if namespace | |
"#{namespace}:#{key}" | |
else | |
key | |
end | |
end | |
# Expands key to be a consistent string value. Invokes +cache_key+ if | |
# object responds to +cache_key+. Otherwise, +to_param+ method will be | |
# called. If the key is a Hash, then keys will be sorted alphabetically. | |
def expanded_key(key) | |
return key.cache_key.to_s if key.respond_to?(:cache_key) | |
case key | |
when Array | |
if key.size > 1 | |
key.collect { |element| expanded_key(element) } | |
else | |
expanded_key(key.first) | |
end | |
when Hash | |
key.collect { |k, v| "#{k}=#{v}" }.sort! | |
else | |
key | |
end.to_param | |
end | |
def normalize_version(key, options = nil) | |
(options && options[:version].try(:to_param)) || expanded_version(key) | |
end | |
def expanded_version(key) | |
case | |
when key.respond_to?(:cache_version) then key.cache_version.to_param | |
when key.is_a?(Array) then key.map { |element| expanded_version(element) }.tap(&:compact!).to_param | |
when key.respond_to?(:to_a) then expanded_version(key.to_a) | |
end | |
end | |
def instrument(operation, key, options = nil) | |
if logger && logger.debug? && !silence? | |
logger.debug "Cache #{operation}: #{normalize_key(key, options)}#{options.blank? ? "" : " (#{options.inspect})"}" | |
end | |
payload = { key: key, store: self.class.name } | |
payload.merge!(options) if options.is_a?(Hash) | |
ActiveSupport::Notifications.instrument("cache_#{operation}.active_support", payload) { yield(payload) } | |
end | |
def handle_expired_entry(entry, key, options) | |
if entry && entry.expired? | |
race_ttl = options[:race_condition_ttl].to_i | |
if (race_ttl > 0) && (Time.now.to_f - entry.expires_at <= race_ttl) | |
# When an entry has a positive :race_condition_ttl defined, put the stale entry back into the cache | |
# for a brief period while the entry is being recalculated. | |
entry.expires_at = Time.now.to_f + race_ttl | |
write_entry(key, entry, expires_in: race_ttl * 2) | |
else | |
delete_entry(key, **options) | |
end | |
entry = nil | |
end | |
entry | |
end | |
def get_entry_value(entry, name, options) | |
instrument(:fetch_hit, name, options) { } | |
entry.value | |
end | |
def save_block_result_to_cache(name, options) | |
result = instrument(:generate, name, options) do | |
yield(name) | |
end | |
write(name, result, options) unless result.nil? && options[:skip_nil] | |
result | |
end | |
end | |
module NullCoder # :nodoc: | |
extend self | |
def dump(entry) | |
entry | |
end | |
def dump_compressed(entry, threshold) | |
entry.compressed(threshold) | |
end | |
def load(payload) | |
payload | |
end | |
end | |
module Coders # :nodoc: | |
MARK_61 = "\x04\b".b.freeze # The one set by Marshal. | |
MARK_70_UNCOMPRESSED = "\x00".b.freeze | |
MARK_70_COMPRESSED = "\x01".b.freeze | |
class << self | |
def [](version) | |
case version | |
when 6.1 | |
Rails61Coder | |
when 7.0 | |
Rails70Coder | |
else | |
raise ArgumentError, "Unknown ActiveSupport::Cache.format_version #{Cache.format_version.inspect}" | |
end | |
end | |
end | |
module Loader | |
extend self | |
def load(payload) | |
if !payload.is_a?(String) | |
ActiveSupport::Cache::Store.logger&.warn %{Payload wasn't a string, was #{payload.class.name} - couldn't unmarshal, so returning nil."} | |
return nil | |
elsif payload.start_with?(MARK_70_UNCOMPRESSED) | |
members = Marshal.load(payload.byteslice(1..-1)) | |
elsif payload.start_with?(MARK_70_COMPRESSED) | |
members = Marshal.load(Zlib::Inflate.inflate(payload.byteslice(1..-1))) | |
elsif payload.start_with?(MARK_61) | |
return Marshal.load(payload) | |
else | |
ActiveSupport::Cache::Store.logger&.warn %{Invalid cache prefix: #{payload.byteslice(0).inspect}, expected "\\x00" or "\\x01"} | |
return nil | |
end | |
Entry.unpack(members) | |
end | |
end | |
module Rails61Coder | |
include Loader | |
extend self | |
def dump(entry) | |
Marshal.dump(entry) | |
end | |
def dump_compressed(entry, threshold) | |
Marshal.dump(entry.compressed(threshold)) | |
end | |
end | |
module Rails70Coder | |
include Loader | |
extend self | |
def dump(entry) | |
MARK_70_UNCOMPRESSED + Marshal.dump(entry.pack) | |
end | |
def dump_compressed(entry, threshold) | |
payload = Marshal.dump(entry.pack) | |
if payload.bytesize >= threshold | |
compressed_payload = Zlib::Deflate.deflate(payload) | |
if compressed_payload.bytesize < payload.bytesize | |
return MARK_70_COMPRESSED + compressed_payload | |
end | |
end | |
MARK_70_UNCOMPRESSED + payload | |
end | |
end | |
end | |
# This class is used to represent cache entries. Cache entries have a value, an optional | |
# expiration time, and an optional version. The expiration time is used to support the :race_condition_ttl option | |
# on the cache. The version is used to support the :version option on the cache for rejecting | |
# mismatches. | |
# | |
# Since cache entries in most instances will be serialized, the internals of this class are highly optimized | |
# using short instance variable names that are lazily defined. | |
class Entry # :nodoc: | |
class << self | |
def unpack(members) | |
new(members[0], expires_at: members[1], version: members[2]) | |
end | |
end | |
attr_reader :version | |
# Creates a new cache entry for the specified value. Options supported are | |
# +:compressed+, +:version+, +:expires_at+ and +:expires_in+. | |
def initialize(value, compressed: false, version: nil, expires_in: nil, expires_at: nil, **) | |
@value = value | |
@version = version | |
@created_at = 0.0 | |
@expires_in = expires_at&.to_f || expires_in && (expires_in.to_f + Time.now.to_f) | |
@compressed = true if compressed | |
end | |
def value | |
compressed? ? uncompress(@value) : @value | |
end | |
def mismatched?(version) | |
@version && version && @version != version | |
end | |
# Checks if the entry is expired. The +expires_in+ parameter can override | |
# the value set when the entry was created. | |
def expired? | |
@expires_in && @created_at + @expires_in <= Time.now.to_f | |
end | |
def expires_at | |
@expires_in ? @created_at + @expires_in : nil | |
end | |
def expires_at=(value) | |
if value | |
@expires_in = value.to_f - @created_at | |
else | |
@expires_in = nil | |
end | |
end | |
# Returns the size of the cached value. This could be less than | |
# <tt>value.bytesize</tt> if the data is compressed. | |
def bytesize | |
case value | |
when NilClass | |
0 | |
when String | |
@value.bytesize | |
else | |
@s ||= Marshal.dump(@value).bytesize | |
end | |
end | |
def compressed? # :nodoc: | |
defined?(@compressed) | |
end | |
def compressed(compress_threshold) | |
return self if compressed? | |
case @value | |
when nil, true, false, Numeric | |
uncompressed_size = 0 | |
when String | |
uncompressed_size = @value.bytesize | |
else | |
serialized = Marshal.dump(@value) | |
uncompressed_size = serialized.bytesize | |
end | |
if uncompressed_size >= compress_threshold | |
serialized ||= Marshal.dump(@value) | |
compressed = Zlib::Deflate.deflate(serialized) | |
if compressed.bytesize < uncompressed_size | |
return Entry.new(compressed, compressed: true, expires_at: expires_at, version: version) | |
end | |
end | |
self | |
end | |
def local? | |
false | |
end | |
# Duplicates the value in a class. This is used by cache implementations that don't natively | |
# serialize entries to protect against accidental cache modifications. | |
def dup_value! | |
if @value && !compressed? && !(@value.is_a?(Numeric) || @value == true || @value == false) | |
if @value.is_a?(String) | |
@value = @value.dup | |
else | |
@value = Marshal.load(Marshal.dump(@value)) | |
end | |
end | |
end | |
def pack | |
members = [value, expires_at, version] | |
members.pop while !members.empty? && members.last.nil? | |
members | |
end | |
private | |
def uncompress(value) | |
Marshal.load(Zlib::Inflate.inflate(value)) | |
end | |
end | |
end | |
end |
# frozen_string_literal: true | |
module CacheDeleteMatchedBehavior | |
def test_delete_matched | |
@cache.write("foo", "bar") | |
@cache.write("fu", "baz") | |
@cache.write("foo/bar", "baz") | |
@cache.write("fu/baz", "bar") | |
@cache.delete_matched(/oo/) | |
assert_not @cache.exist?("foo") | |
assert @cache.exist?("fu") | |
assert_not @cache.exist?("foo/bar") | |
assert @cache.exist?("fu/baz") | |
end | |
end |
# frozen_string_literal: true | |
require_relative "../abstract_unit" | |
require "active_support/cache" | |
class CacheEntryTest < ActiveSupport::TestCase | |
def test_expired | |
entry = ActiveSupport::Cache::Entry.new("value") | |
assert_not entry.expired?, "entry not expired" | |
entry = ActiveSupport::Cache::Entry.new("value", expires_in: 60) | |
assert_not entry.expired?, "entry not expired" | |
Time.stub(:now, Time.at(entry.expires_at + 1)) do | |
assert entry.expired?, "entry is expired" | |
end | |
end | |
def test_initialize_with_expires_at | |
entry = ActiveSupport::Cache::Entry.new("value", expires_in: 60) | |
clone = ActiveSupport::Cache::Entry.new("value", expires_at: entry.expires_at) | |
assert_equal entry.expires_at, clone.expires_at | |
end | |
end |
# frozen_string_literal: true | |
module CacheIncrementDecrementBehavior | |
def test_increment | |
key = SecureRandom.uuid | |
@cache.write(key, 1, raw: true) | |
assert_equal 1, @cache.read(key, raw: true).to_i | |
assert_equal 2, @cache.increment(key) | |
assert_equal 2, @cache.read(key, raw: true).to_i | |
assert_equal 3, @cache.increment(key) | |
assert_equal 3, @cache.read(key, raw: true).to_i | |
missing = @cache.increment(SecureRandom.alphanumeric) | |
assert(missing.nil? || missing == 1) | |
end | |
def test_decrement | |
key = SecureRandom.uuid | |
@cache.write(key, 3, raw: true) | |
assert_equal 3, @cache.read(key, raw: true).to_i | |
assert_equal 2, @cache.decrement(key) | |
assert_equal 2, @cache.read(key, raw: true).to_i | |
assert_equal 1, @cache.decrement(key) | |
assert_equal 1, @cache.read(key, raw: true).to_i | |
missing = @cache.decrement(SecureRandom.alphanumeric) | |
assert(missing.nil? || missing == -1) | |
end | |
end |
# frozen_string_literal: true | |
module CacheInstrumentationBehavior | |
def test_fetch_multi_uses_write_multi_entries_store_provider_interface | |
assert_called(@cache, :write_multi_entries) do | |
@cache.fetch_multi "a", "b", "c" do |key| | |
key * 2 | |
end | |
end | |
end | |
def test_write_multi_instrumentation | |
key_1 = SecureRandom.uuid | |
key_2 = SecureRandom.uuid | |
value_1 = SecureRandom.alphanumeric | |
value_2 = SecureRandom.alphanumeric | |
writes = { key_1 => value_1, key_2 => value_2 } | |
events = with_instrumentation "write_multi" do | |
@cache.write_multi(writes) | |
end | |
assert_equal %w[ cache_write_multi.active_support ], events.map(&:name) | |
assert_nil events[0].payload[:super_operation] | |
assert_equal({ key_1 => value_1, key_2 => value_2 }, events[0].payload[:key]) | |
end | |
def test_instrumentation_with_fetch_multi_as_super_operation | |
key_1 = SecureRandom.uuid | |
@cache.write(key_1, SecureRandom.alphanumeric) | |
key_2 = SecureRandom.uuid | |
events = with_instrumentation "read_multi" do | |
@cache.fetch_multi(key_2, key_1) { |key| key * 2 } | |
end | |
assert_equal %w[ cache_read_multi.active_support ], events.map(&:name) | |
assert_equal :fetch_multi, events[0].payload[:super_operation] | |
assert_equal [key_2, key_1], events[0].payload[:key] | |
assert_equal [key_1], events[0].payload[:hits] | |
assert_equal @cache.class.name, events[0].payload[:store] | |
end | |
def test_instrumentation_empty_fetch_multi | |
events = with_instrumentation "read_multi" do | |
@cache.fetch_multi() { |key| key * 2 } | |
end | |
assert_equal %w[ cache_read_multi.active_support ], events.map(&:name) | |
assert_equal :fetch_multi, events[0].payload[:super_operation] | |
assert_equal [], events[0].payload[:key] | |
assert_equal [], events[0].payload[:hits] | |
assert_equal @cache.class.name, events[0].payload[:store] | |
end | |
def test_read_multi_instrumentation | |
key_1 = SecureRandom.uuid | |
@cache.write(key_1, SecureRandom.alphanumeric) | |
key_2 = SecureRandom.uuid | |
events = with_instrumentation "read_multi" do | |
@cache.read_multi(key_2, key_1) | |
end | |
assert_equal %w[ cache_read_multi.active_support ], events.map(&:name) | |
assert_equal [key_2, key_1], events[0].payload[:key] | |
assert_equal [key_1], events[0].payload[:hits] | |
assert_equal @cache.class.name, events[0].payload[:store] | |
end | |
def test_empty_read_multi_instrumentation | |
events = with_instrumentation "read_multi" do | |
@cache.read_multi() | |
end | |
assert_equal %w[ cache_read_multi.active_support ], events.map(&:name) | |
assert_equal [], events[0].payload[:key] | |
assert_equal [], events[0].payload[:hits] | |
assert_equal @cache.class.name, events[0].payload[:store] | |
end | |
private | |
def with_instrumentation(method) | |
event_name = "cache_#{method}.active_support" | |
[].tap do |events| | |
ActiveSupport::Notifications.subscribe event_name do |*args| | |
events << ActiveSupport::Notifications::Event.new(*args) | |
end | |
yield | |
end | |
ensure | |
ActiveSupport::Notifications.unsubscribe event_name | |
end | |
end |
# frozen_string_literal: true | |
require_relative "../abstract_unit" | |
require "active_support/cache" | |
class CacheKeyTest < ActiveSupport::TestCase | |
def test_entry_legacy_optional_ivars | |
legacy = Class.new(ActiveSupport::Cache::Entry) do | |
def initialize(value, **options) | |
@value = value | |
@expires_in = nil | |
@created_at = nil | |
super | |
end | |
end | |
entry = legacy.new "foo" | |
assert_equal "foo", entry.value | |
end | |
def test_expand_cache_key | |
assert_equal "1/2/true", ActiveSupport::Cache.expand_cache_key([1, "2", true]) | |
assert_equal "name/1/2/true", ActiveSupport::Cache.expand_cache_key([1, "2", true], :name) | |
end | |
def test_expand_cache_key_with_rails_cache_id | |
with_env("RAILS_CACHE_ID" => "c99") do | |
assert_equal "c99/foo", ActiveSupport::Cache.expand_cache_key(:foo) | |
assert_equal "c99/foo", ActiveSupport::Cache.expand_cache_key([:foo]) | |
assert_equal "c99/foo/bar", ActiveSupport::Cache.expand_cache_key([:foo, :bar]) | |
assert_equal "nm/c99/foo", ActiveSupport::Cache.expand_cache_key(:foo, :nm) | |
assert_equal "nm/c99/foo", ActiveSupport::Cache.expand_cache_key([:foo], :nm) | |
assert_equal "nm/c99/foo/bar", ActiveSupport::Cache.expand_cache_key([:foo, :bar], :nm) | |
end | |
end | |
def test_expand_cache_key_with_rails_app_version | |
with_env("RAILS_APP_VERSION" => "rails3") do | |
assert_equal "rails3/foo", ActiveSupport::Cache.expand_cache_key(:foo) | |
end | |
end | |
def test_expand_cache_key_rails_cache_id_should_win_over_rails_app_version | |
with_env("RAILS_CACHE_ID" => "c99", "RAILS_APP_VERSION" => "rails3") do | |
assert_equal "c99/foo", ActiveSupport::Cache.expand_cache_key(:foo) | |
end | |
end | |
def test_expand_cache_key_respond_to_cache_key | |
key = +"foo" | |
def key.cache_key | |
:foo_key | |
end | |
assert_equal "foo_key", ActiveSupport::Cache.expand_cache_key(key) | |
end | |
def test_expand_cache_key_array_with_something_that_responds_to_cache_key | |
key = +"foo" | |
def key.cache_key | |
:foo_key | |
end | |
assert_equal "foo_key", ActiveSupport::Cache.expand_cache_key([key]) | |
end | |
def test_expand_cache_key_of_nil | |
assert_equal "", ActiveSupport::Cache.expand_cache_key(nil) | |
end | |
def test_expand_cache_key_of_false | |
assert_equal "false", ActiveSupport::Cache.expand_cache_key(false) | |
end | |
def test_expand_cache_key_of_true | |
assert_equal "true", ActiveSupport::Cache.expand_cache_key(true) | |
end | |
def test_expand_cache_key_of_array_like_object | |
assert_equal "foo/bar/baz", ActiveSupport::Cache.expand_cache_key(%w{foo bar baz}.to_enum) | |
end | |
private | |
def with_env(kv) | |
old_values = {} | |
kv.each { |key, value| old_values[key], ENV[key] = ENV[key], value } | |
yield | |
ensure | |
old_values.each { |key, value| ENV[key] = value } | |
end | |
end |
# frozen_string_literal: true | |
require "active_support/core_ext/numeric/time" | |
# Tests the base functionality that should be identical across all cache stores. | |
module CacheStoreBehavior | |
def test_should_read_and_write_strings | |
key = SecureRandom.uuid | |
assert @cache.write(key, "bar") | |
assert_equal "bar", @cache.read(key) | |
end | |
def test_should_overwrite | |
key = SecureRandom.uuid | |
@cache.write(key, "bar") | |
@cache.write(key, "baz") | |
assert_equal "baz", @cache.read(key) | |
end | |
def test_fetch_without_cache_miss | |
key = SecureRandom.uuid | |
@cache.write(key, "bar") | |
assert_not_called(@cache, :write) do | |
assert_equal "bar", @cache.fetch(key) { "baz" } | |
end | |
end | |
def test_fetch_with_cache_miss | |
key = SecureRandom.uuid | |
assert_called_with(@cache, :write, [key, "baz", @cache.options]) do | |
assert_equal "baz", @cache.fetch(key) { "baz" } | |
end | |
end | |
def test_fetch_with_cache_miss_passes_key_to_block | |
cache_miss = false | |
key = SecureRandom.alphanumeric(10) | |
assert_equal 10, @cache.fetch(key) { |key| cache_miss = true; key.length } | |
assert cache_miss | |
cache_miss = false | |
assert_equal 10, @cache.fetch(key) { |fetch_key| cache_miss = true; fetch_key.length } | |
assert_not cache_miss | |
end | |
def test_fetch_with_forced_cache_miss | |
key = SecureRandom.uuid | |
@cache.write(key, "bar") | |
assert_not_called(@cache, :read) do | |
assert_called_with(@cache, :write, [key, "bar", @cache.options.merge(force: true)]) do | |
@cache.fetch(key, force: true) { "bar" } | |
end | |
end | |
end | |
def test_fetch_with_cached_nil | |
key = SecureRandom.uuid | |
@cache.write(key, nil) | |
assert_not_called(@cache, :write) do | |
assert_nil @cache.fetch(key) { "baz" } | |
end | |
end | |
def test_fetch_cache_miss_with_skip_nil | |
key = SecureRandom.uuid | |
assert_not_called(@cache, :write) do | |
assert_nil @cache.fetch(key, skip_nil: true) { nil } | |
assert_equal false, @cache.exist?("foo") | |
end | |
end | |
def test_fetch_with_forced_cache_miss_with_block | |
key = SecureRandom.uuid | |
@cache.write(key, "bar") | |
assert_equal "foo_bar", @cache.fetch(key, force: true) { "foo_bar" } | |
end | |
def test_fetch_with_forced_cache_miss_without_block | |
key = SecureRandom.uuid | |
@cache.write(key, "bar") | |
assert_raises(ArgumentError) do | |
@cache.fetch(key, force: true) | |
end | |
assert_equal "bar", @cache.read(key) | |
end | |
def test_should_read_and_write_hash | |
key = SecureRandom.uuid | |
assert @cache.write(key, a: "b") | |
assert_equal({ a: "b" }, @cache.read(key)) | |
end | |
def test_should_read_and_write_integer | |
key = SecureRandom.uuid | |
assert @cache.write(key, 1) | |
assert_equal 1, @cache.read(key) | |
end | |
def test_should_read_and_write_nil | |
key = SecureRandom.uuid | |
assert @cache.write(key, nil) | |
assert_nil @cache.read(key) | |
end | |
def test_should_read_and_write_false | |
key = SecureRandom.uuid | |
assert @cache.write(key, false) | |
assert_equal false, @cache.read(key) | |
end | |
def test_read_multi | |
key = SecureRandom.uuid | |
@cache.write(key, "bar") | |
other_key = SecureRandom.uuid | |
@cache.write(other_key, "baz") | |
@cache.write(SecureRandom.alphanumeric, "biz") | |
assert_equal({ key => "bar", other_key => "baz" }, @cache.read_multi(key, other_key)) | |
end | |
def test_read_multi_with_expires | |
time = Time.now | |
key = SecureRandom.uuid | |
other_key = SecureRandom.uuid | |
@cache.write(key, "bar", expires_in: 10) | |
@cache.write(other_key, "baz") | |
@cache.write(SecureRandom.alphanumeric, "biz") | |
Time.stub(:now, time + 11) do | |
assert_equal({ other_key => "baz" }, @cache.read_multi(other_key, SecureRandom.alphanumeric)) | |
end | |
end | |
def test_read_multi_with_empty_keys_and_a_logger_and_no_namespace | |
@cache.options[:namespace] = nil | |
@cache.logger = ActiveSupport::Logger.new(nil) | |
assert_equal({}, @cache.read_multi) | |
end | |
def test_fetch_multi | |
key = SecureRandom.uuid | |
other_key = SecureRandom.uuid | |
third_key = SecureRandom.alphanumeric | |
@cache.write(key, "bar") | |
@cache.write(other_key, "biz") | |
values = @cache.fetch_multi(key, other_key, third_key) { |value| value * 2 } | |
assert_equal({ key => "bar", other_key => "biz", third_key => (third_key * 2) }, values) | |
assert_equal((third_key * 2), @cache.read(third_key)) | |
end | |
def test_fetch_multi_without_expires_in | |
key = SecureRandom.uuid | |
other_key = SecureRandom.uuid | |
third_key = SecureRandom.alphanumeric | |
@cache.write(key, "bar") | |
@cache.write(other_key, "biz") | |
values = @cache.fetch_multi(key, third_key, other_key, expires_in: nil) { |value| value * 2 } | |
assert_equal({ key => "bar", third_key => (third_key * 2), other_key => "biz" }, values) | |
assert_equal((third_key * 2), @cache.read(third_key)) | |
end | |
def test_fetch_multi_with_objects | |
key = SecureRandom.uuid | |
other_key = SecureRandom.uuid | |
cache_struct = Struct.new(:cache_key, :title) | |
foo = cache_struct.new(key, "FOO!") | |
bar = cache_struct.new(other_key) | |
@cache.write(other_key, "BAM!") | |
values = @cache.fetch_multi(foo, bar) { |object| object.title } | |
assert_equal({ foo => "FOO!", bar => "BAM!" }, values) | |
end | |
def test_fetch_multi_returns_ordered_names | |
key = SecureRandom.alphanumeric.downcase | |
other_key = SecureRandom.alphanumeric.downcase | |
third_key = SecureRandom.alphanumeric.downcase | |
@cache.write(key, "BAM") | |
values = @cache.fetch_multi(other_key, third_key, key) { |key| key.upcase } | |
assert_equal([other_key, third_key, key], values.keys) | |
assert_equal([other_key.upcase, third_key.upcase, "BAM"], values.values) | |
end | |
def test_fetch_multi_without_block | |
assert_raises(ArgumentError) do | |
@cache.fetch_multi(SecureRandom.alphanumeric) | |
end | |
end | |
# Use strings that are guaranteed to compress well, so we can easily tell if | |
# the compression kicked in or not. | |
SMALL_STRING = "0" * 100 | |
LARGE_STRING = "0" * 2.kilobytes | |
SMALL_OBJECT = { data: SMALL_STRING } | |
LARGE_OBJECT = { data: LARGE_STRING } | |
def test_nil_with_default_compression_settings | |
assert_uncompressed(nil) | |
end | |
def test_nil_with_compress_true | |
assert_uncompressed(nil, compress: true) | |
end | |
def test_nil_with_compress_false | |
assert_uncompressed(nil, compress: false) | |
end | |
def test_nil_with_compress_low_compress_threshold | |
assert_uncompressed(nil, compress: true, compress_threshold: 20) | |
end | |
def test_small_string_with_default_compression_settings | |
assert_uncompressed(SMALL_STRING) | |
end | |
def test_small_string_with_compress_true | |
assert_uncompressed(SMALL_STRING, compress: true) | |
end | |
def test_small_string_with_compress_false | |
assert_uncompressed(SMALL_STRING, compress: false) | |
end | |
def test_small_string_with_low_compress_threshold | |
assert_compressed(SMALL_STRING, compress: true, compress_threshold: 1) | |
end | |
def test_small_object_with_default_compression_settings | |
assert_uncompressed(SMALL_OBJECT) | |
end | |
def test_small_object_with_compress_true | |
assert_uncompressed(SMALL_OBJECT, compress: true) | |
end | |
def test_small_object_with_compress_false | |
assert_uncompressed(SMALL_OBJECT, compress: false) | |
end | |
def test_small_object_with_low_compress_threshold | |
assert_compressed(SMALL_OBJECT, compress: true, compress_threshold: 1) | |
end | |
def test_large_string_with_compress_true | |
assert_compressed(LARGE_STRING, compress: true) | |
end | |
def test_large_string_with_compress_false | |
assert_uncompressed(LARGE_STRING, compress: false) | |
end | |
def test_large_string_with_high_compress_threshold | |
assert_uncompressed(LARGE_STRING, compress: true, compress_threshold: 1.megabyte) | |
end | |
def test_large_object_with_compress_true | |
assert_compressed(LARGE_OBJECT, compress: true) | |
end | |
def test_large_object_with_compress_false | |
assert_uncompressed(LARGE_OBJECT, compress: false) | |
end | |
def test_large_object_with_high_compress_threshold | |
assert_uncompressed(LARGE_OBJECT, compress: true, compress_threshold: 1.megabyte) | |
end | |
def test_incompressible_data | |
assert_uncompressed(nil, compress: true, compress_threshold: 30) | |
assert_uncompressed(true, compress: true, compress_threshold: 30) | |
assert_uncompressed(false, compress: true, compress_threshold: 30) | |
assert_uncompressed(0, compress: true, compress_threshold: 30) | |
assert_uncompressed(1.2345, compress: true, compress_threshold: 30) | |
assert_uncompressed("", compress: true, compress_threshold: 30) | |
incompressible = nil | |
# generate an incompressible string | |
loop do | |
incompressible = Random.bytes(1.kilobyte) | |
break if incompressible.bytesize < Zlib::Deflate.deflate(incompressible).bytesize | |
end | |
assert_uncompressed(incompressible, compress: true, compress_threshold: 1) | |
end | |
def test_cache_key | |
key = SecureRandom.uuid | |
klass = Class.new do | |
def initialize(key) | |
@key = key | |
end | |
def cache_key | |
@key | |
end | |
end | |
@cache.write(klass.new(key), "bar") | |
assert_equal "bar", @cache.read(key) | |
end | |
def test_param_as_cache_key | |
key = SecureRandom.uuid | |
klass = Class.new do | |
def initialize(key) | |
@key = key | |
end | |
def to_param | |
@key | |
end | |
end | |
@cache.write(klass.new(key), "bar") | |
assert_equal "bar", @cache.read(key) | |
end | |
def test_unversioned_cache_key | |
key = SecureRandom.uuid | |
klass = Class.new do | |
def initialize(key) | |
@key = key | |
end | |
def cache_key | |
@key | |
end | |
def cache_key_with_version | |
"#{@key}-v1" | |
end | |
end | |
@cache.write(klass.new(key), "bar") | |
assert_equal "bar", @cache.read(key) | |
end | |
def test_array_as_cache_key | |
key = SecureRandom.uuid | |
@cache.write([key, "foo"], "bar") | |
assert_equal "bar", @cache.read("#{key}/foo") | |
end | |
InstanceTest = Struct.new(:name, :id) do | |
def cache_key | |
"#{name}/#{id}" | |
end | |
def to_param | |
"hello" | |
end | |
end | |
def test_array_with_single_instance_as_cache_key_uses_cache_key_method | |
key = SecureRandom.alphanumeric | |
other_key = SecureRandom.alphanumeric | |
test_instance_one = InstanceTest.new(key, 1) | |
test_instance_two = InstanceTest.new(other_key, 2) | |
@cache.write([test_instance_one], "one") | |
@cache.write([test_instance_two], "two") | |
assert_equal "one", @cache.read([test_instance_one]) | |
assert_equal "two", @cache.read([test_instance_two]) | |
end | |
def test_array_with_multiple_instances_as_cache_key_uses_cache_key_method | |
key = SecureRandom.alphanumeric | |
other_key = SecureRandom.alphanumeric | |
third_key = SecureRandom.alphanumeric | |
test_instance_one = InstanceTest.new(key, 1) | |
test_instance_two = InstanceTest.new(other_key, 2) | |
test_instance_three = InstanceTest.new(third_key, 3) | |
@cache.write([test_instance_one, test_instance_three], "one") | |
@cache.write([test_instance_two, test_instance_three], "two") | |
assert_equal "one", @cache.read([test_instance_one, test_instance_three]) | |
assert_equal "two", @cache.read([test_instance_two, test_instance_three]) | |
end | |
def test_format_of_expanded_key_for_single_instance | |
key = SecureRandom.alphanumeric | |
test_instance_one = InstanceTest.new(key, 1) | |
expanded_key = @cache.send(:expanded_key, test_instance_one) | |
assert_equal expanded_key, test_instance_one.cache_key | |
end | |
def test_format_of_expanded_key_for_single_instance_in_array | |
key = SecureRandom.alphanumeric | |
test_instance_one = InstanceTest.new(key, 1) | |
expanded_key = @cache.send(:expanded_key, [test_instance_one]) | |
assert_equal expanded_key, test_instance_one.cache_key | |
end | |
def test_hash_as_cache_key | |
key = SecureRandom.alphanumeric | |
other_key = SecureRandom.alphanumeric | |
@cache.write({ key => 1, other_key => 2 }, "bar") | |
assert_equal "bar", @cache.read({ key => 1, other_key => 2 }) | |
end | |
def test_keys_are_case_sensitive | |
key = "case_sensitive_key" | |
@cache.write(key, "bar") | |
assert_nil @cache.read(key.upcase) | |
end | |
def test_exist | |
key = SecureRandom.alphanumeric | |
@cache.write(key, "bar") | |
assert_equal true, @cache.exist?(key) | |
assert_equal false, @cache.exist?(SecureRandom.uuid) | |
end | |
def test_nil_exist | |
key = SecureRandom.alphanumeric | |
@cache.write(key, nil) | |
assert @cache.exist?(key) | |
end | |
def test_delete | |
key = SecureRandom.alphanumeric | |
@cache.write(key, "bar") | |
assert @cache.exist?(key) | |
assert @cache.delete(key) | |
assert_not @cache.exist?(key) | |
end | |
def test_delete_multi | |
key = SecureRandom.alphanumeric | |
@cache.write(key, "bar") | |
assert @cache.exist?(key) | |
other_key = SecureRandom.alphanumeric | |
@cache.write(other_key, "world") | |
assert @cache.exist?(other_key) | |
assert_equal 2, @cache.delete_multi([key, SecureRandom.uuid, other_key]) | |
assert_not @cache.exist?(key) | |
assert_not @cache.exist?(other_key) | |
end | |
def test_original_store_objects_should_not_be_immutable | |
bar = +"bar" | |
key = SecureRandom.alphanumeric | |
@cache.write(key, bar) | |
assert_nothing_raised { bar.gsub!(/.*/, "baz") } | |
end | |
def test_expires_in | |
time = Time.local(2008, 4, 24) | |
key = SecureRandom.alphanumeric | |
other_key = SecureRandom.alphanumeric | |
Time.stub(:now, time) do | |
@cache.write(key, "bar", expires_in: 1.minute) | |
@cache.write(other_key, "spam", expires_in: 2.minute) | |
assert_equal "bar", @cache.read(key) | |
assert_equal "spam", @cache.read(other_key) | |
end | |
Time.stub(:now, time + 30) do | |
assert_equal "bar", @cache.read(key) | |
assert_equal "spam", @cache.read(other_key) | |
end | |
Time.stub(:now, time + 1.minute + 1.second) do | |
assert_nil @cache.read(key) | |
assert_equal "spam", @cache.read(other_key) | |
end | |
Time.stub(:now, time + 2.minute + 1.second) do | |
assert_nil @cache.read(key) | |
assert_nil @cache.read(other_key) | |
end | |
end | |
def test_expires_at | |
time = Time.local(2008, 4, 24) | |
key = SecureRandom.alphanumeric | |
Time.stub(:now, time) do | |
@cache.write(key, "bar", expires_at: time + 15.seconds) | |
assert_equal "bar", @cache.read(key) | |
end | |
Time.stub(:now, time + 10) do | |
assert_equal "bar", @cache.read(key) | |
end | |
Time.stub(:now, time + 30) do | |
assert_nil @cache.read(key) | |
end | |
end | |
def test_expire_in_is_alias_for_expires_in | |
time = Time.local(2008, 4, 24) | |
key = SecureRandom.alphanumeric | |
Time.stub(:now, time) do | |
@cache.write(key, "bar", expire_in: 20) | |
assert_equal "bar", @cache.read(key) | |
end | |
Time.stub(:now, time + 10) do | |
assert_equal "bar", @cache.read(key) | |
end | |
Time.stub(:now, time + 21) do | |
assert_nil @cache.read(key) | |
end | |
end | |
def test_expired_in_is_alias_for_expires_in | |
time = Time.local(2008, 4, 24) | |
key = SecureRandom.alphanumeric | |
Time.stub(:now, time) do | |
@cache.write(key, "bar", expired_in: 20) | |
assert_equal "bar", @cache.read(key) | |
end | |
Time.stub(:now, time + 10) do | |
assert_equal "bar", @cache.read(key) | |
end | |
Time.stub(:now, time + 21) do | |
assert_nil @cache.read(key) | |
end | |
end | |
def test_race_condition_protection_skipped_if_not_defined | |
key = SecureRandom.alphanumeric | |
@cache.write(key, "bar") | |
time = @cache.send(:read_entry, @cache.send(:normalize_key, key, {}), **{}).expires_at | |
Time.stub(:now, Time.at(time)) do | |
result = @cache.fetch(key) do | |
assert_nil @cache.read(key) | |
"baz" | |
end | |
assert_equal "baz", result | |
end | |
end | |
def test_race_condition_protection_is_limited | |
time = Time.now | |
key = SecureRandom.uuid | |
@cache.write(key, "bar", expires_in: 60) | |
Time.stub(:now, time + 71) do | |
result = @cache.fetch(key, race_condition_ttl: 10) do | |
assert_nil @cache.read(key) | |
"baz" | |
end | |
assert_equal "baz", result | |
end | |
end | |
def test_race_condition_protection_is_safe | |
time = Time.now | |
key = SecureRandom.uuid | |
@cache.write(key, "bar", expires_in: 60) | |
Time.stub(:now, time + 61) do | |
begin | |
@cache.fetch(key, race_condition_ttl: 10) do | |
assert_equal "bar", @cache.read(key) | |
raise ArgumentError.new | |
end | |
rescue ArgumentError | |
end | |
assert_equal "bar", @cache.read(key) | |
end | |
Time.stub(:now, time + 91) do | |
assert_nil @cache.read(key) | |
end | |
end | |
def test_race_condition_protection | |
time = Time.now | |
key = SecureRandom.uuid | |
@cache.write(key, "bar", expires_in: 60) | |
Time.stub(:now, time + 61) do | |
result = @cache.fetch(key, race_condition_ttl: 10) do | |
assert_equal "bar", @cache.read(key) | |
"baz" | |
end | |
assert_equal "baz", result | |
end | |
end | |
def test_absurd_key_characters | |
absurd_key = "#/:*(<+=> )&$%@?;'\"\'`~-" | |
assert @cache.write(absurd_key, "1", raw: true) | |
assert_equal "1", @cache.read(absurd_key, raw: true) | |
assert_equal "1", @cache.fetch(absurd_key, raw: true) | |
assert @cache.delete(absurd_key) | |
assert_equal "2", @cache.fetch(absurd_key, raw: true) { "2" } | |
assert_equal 3, @cache.increment(absurd_key) | |
assert_equal 2, @cache.decrement(absurd_key) | |
end | |
def test_really_long_keys | |
key = SecureRandom.alphanumeric * 2048 | |
assert @cache.write(key, "bar") | |
assert_equal "bar", @cache.read(key) | |
assert_equal "bar", @cache.fetch(key) | |
assert_nil @cache.read("#{key}x") | |
assert_equal({ key => "bar" }, @cache.read_multi(key)) | |
end | |
def test_cache_hit_instrumentation | |
key = "test_key" | |
@events = [] | |
ActiveSupport::Notifications.subscribe "cache_read.active_support" do |*args| | |
@events << ActiveSupport::Notifications::Event.new(*args) | |
end | |
assert @cache.write(key, "1", raw: true) | |
assert @cache.fetch(key, raw: true) { } | |
assert_equal 1, @events.length | |
assert_equal "cache_read.active_support", @events[0].name | |
assert_equal :fetch, @events[0].payload[:super_operation] | |
assert @events[0].payload[:hit] | |
ensure | |
ActiveSupport::Notifications.unsubscribe "cache_read.active_support" | |
end | |
def test_cache_miss_instrumentation | |
@events = [] | |
ActiveSupport::Notifications.subscribe(/^cache_(.*)\.active_support$/) do |*args| | |
@events << ActiveSupport::Notifications::Event.new(*args) | |
end | |
assert_not @cache.fetch(SecureRandom.uuid) { } | |
assert_equal 3, @events.length | |
assert_equal "cache_read.active_support", @events[0].name | |
assert_equal "cache_generate.active_support", @events[1].name | |
assert_equal "cache_write.active_support", @events[2].name | |
assert_equal :fetch, @events[0].payload[:super_operation] | |
assert_not @events[0].payload[:hit] | |
ensure | |
ActiveSupport::Notifications.unsubscribe "cache_read.active_support" | |
end | |
private | |
def assert_compressed(value, **options) | |
assert_compression(true, value, **options) | |
end | |
def assert_uncompressed(value, **options) | |
assert_compression(false, value, **options) | |
end | |
def assert_compression(should_compress, value, **options) | |
actual = "actual" + SecureRandom.uuid | |
uncompressed = "uncompressed" + SecureRandom.uuid | |
freeze_time do | |
@cache.write(actual, value, options) | |
@cache.write(uncompressed, value, options.merge(compress: false)) | |
end | |
if value.nil? | |
assert_nil @cache.read(actual) | |
assert_nil @cache.read(uncompressed) | |
else | |
assert_equal value, @cache.read(actual) | |
assert_equal value, @cache.read(uncompressed) | |
end | |
actual_entry = @cache.send(:read_entry, @cache.send(:normalize_key, actual, {}), **{}) | |
uncompressed_entry = @cache.send(:read_entry, @cache.send(:normalize_key, uncompressed, {}), **{}) | |
actual_payload = @cache.send(:serialize_entry, actual_entry, **@cache.send(:merged_options, options)) | |
uncompressed_payload = @cache.send(:serialize_entry, uncompressed_entry, compress: false) | |
actual_size = actual_payload.bytesize | |
uncompressed_size = uncompressed_payload.bytesize | |
if should_compress | |
assert_operator actual_size, :<, uncompressed_size, "value should be compressed" | |
else | |
assert_equal uncompressed_size, actual_size, "value should not be compressed" | |
end | |
end | |
end |
# frozen_string_literal: true | |
module CacheStoreCoderBehavior | |
class SpyCoder | |
attr_reader :dumped_entries, :loaded_entries | |
def initialize | |
@dumped_entries = [] | |
@loaded_entries = [] | |
end | |
def dump(entry) | |
@dumped_entries << entry | |
Marshal.dump(entry) | |
end | |
def load(payload) | |
entry = Marshal.load(payload) | |
@loaded_entries << entry | |
entry | |
end | |
end | |
def test_coder_receive_the_entry_on_write | |
coder = SpyCoder.new | |
@store = lookup_store(coder: coder) | |
@store.write("foo", "bar") | |
assert_equal 1, coder.dumped_entries.size | |
entry = coder.dumped_entries.first | |
assert_instance_of ActiveSupport::Cache::Entry, entry | |
assert_equal "bar", entry.value | |
end | |
def test_coder_receive_the_entry_on_read | |
coder = SpyCoder.new | |
@store = lookup_store(coder: coder) | |
@store.write("foo", "bar") | |
@store.read("foo") | |
assert_equal 1, coder.loaded_entries.size | |
entry = coder.loaded_entries.first | |
assert_instance_of ActiveSupport::Cache::Entry, entry | |
assert_equal "bar", entry.value | |
end | |
def test_coder_receive_the_entry_on_read_multi | |
coder = SpyCoder.new | |
@store = lookup_store(coder: coder) | |
@store.write_multi({ "foo" => "bar", "egg" => "spam" }) | |
@store.read_multi("foo", "egg") | |
assert_equal 2, coder.loaded_entries.size | |
entry = coder.loaded_entries.first | |
assert_instance_of ActiveSupport::Cache::Entry, entry | |
assert_equal "bar", entry.value | |
entry = coder.loaded_entries[1] | |
assert_instance_of ActiveSupport::Cache::Entry, entry | |
assert_equal "spam", entry.value | |
end | |
def test_coder_receive_the_entry_on_write_multi | |
coder = SpyCoder.new | |
@store = lookup_store(coder: coder) | |
@store.write_multi({ "foo" => "bar", "egg" => "spam" }) | |
assert_equal 2, coder.dumped_entries.size | |
entry = coder.dumped_entries.first | |
assert_instance_of ActiveSupport::Cache::Entry, entry | |
assert_equal "bar", entry.value | |
entry = coder.dumped_entries[1] | |
assert_instance_of ActiveSupport::Cache::Entry, entry | |
assert_equal "spam", entry.value | |
end | |
def test_coder_does_not_receive_the_entry_on_read_miss | |
coder = SpyCoder.new | |
@store = lookup_store(coder: coder) | |
@store.read("foo") | |
assert_equal 0, coder.loaded_entries.size | |
end | |
end |
# frozen_string_literal: true | |
require_relative "../abstract_unit" | |
require "active_support/cache" | |
class CacheStoreLoggerTest < ActiveSupport::TestCase | |
def setup | |
@cache = ActiveSupport::Cache.lookup_store(:memory_store) | |
@buffer = StringIO.new | |
@cache.logger = ActiveSupport::Logger.new(@buffer) | |
end | |
def test_logging | |
@cache.fetch("foo") { "bar" } | |
assert_predicate @buffer.string, :present? | |
end | |
def test_log_with_string_namespace | |
@cache.fetch("foo", namespace: "string_namespace") { "bar" } | |
assert_match %r{string_namespace:foo}, @buffer.string | |
end | |
def test_log_with_proc_namespace | |
proc = Proc.new do | |
"proc_namespace" | |
end | |
@cache.fetch("foo", namespace: proc) { "bar" } | |
assert_match %r{proc_namespace:foo}, @buffer.string | |
end | |
def test_mute_logging | |
@cache.mute { @cache.fetch("foo") { "bar" } } | |
assert_predicate @buffer.string, :blank? | |
end | |
end |
# frozen_string_literal: true | |
require_relative "../abstract_unit" | |
require "active_support/cache" | |
class CacheStoreNamespaceTest < ActiveSupport::TestCase | |
def test_static_namespace | |
cache = ActiveSupport::Cache.lookup_store(:memory_store, namespace: "tester") | |
cache.write("foo", "bar") | |
assert_equal "bar", cache.read("foo") | |
assert_equal "bar", cache.instance_variable_get(:@data)["tester:foo"].value | |
end | |
def test_proc_namespace | |
test_val = "tester" | |
proc = lambda { test_val } | |
cache = ActiveSupport::Cache.lookup_store(:memory_store, namespace: proc) | |
cache.write("foo", "bar") | |
assert_equal "bar", cache.read("foo") | |
assert_equal "bar", cache.instance_variable_get(:@data)["tester:foo"].value | |
end | |
def test_delete_matched_key_start | |
cache = ActiveSupport::Cache.lookup_store(:memory_store, namespace: "tester") | |
cache.write("foo", "bar") | |
cache.write("fu", "baz") | |
cache.delete_matched(/^fo/) | |
assert_not cache.exist?("foo") | |
assert cache.exist?("fu") | |
end | |
def test_delete_matched_key | |
cache = ActiveSupport::Cache.lookup_store(:memory_store, namespace: "foo") | |
cache.write("foo", "bar") | |
cache.write("fu", "baz") | |
cache.delete_matched(/OO/i) | |
assert_not cache.exist?("foo") | |
assert cache.exist?("fu") | |
end | |
end |
# frozen_string_literal: true | |
require_relative "../abstract_unit" | |
require "active_support/cache" | |
require "dalli" | |
class CacheStoreSettingTest < ActiveSupport::TestCase | |
def test_memory_store_gets_created_if_no_arguments_passed_to_lookup_store_method | |
store = ActiveSupport::Cache.lookup_store | |
assert_kind_of(ActiveSupport::Cache::MemoryStore, store) | |
end | |
def test_memory_store | |
store = ActiveSupport::Cache.lookup_store :memory_store | |
assert_kind_of(ActiveSupport::Cache::MemoryStore, store) | |
end | |
def test_file_fragment_cache_store | |
store = ActiveSupport::Cache.lookup_store :file_store, "/path/to/cache/directory" | |
assert_kind_of(ActiveSupport::Cache::FileStore, store) | |
assert_equal "/path/to/cache/directory", store.cache_path | |
end | |
def test_file_store_requires_a_path | |
assert_raises(ArgumentError) do | |
ActiveSupport::Cache.lookup_store :file_store | |
end | |
end | |
def test_mem_cache_fragment_cache_store | |
assert_called_with(Dalli::Client, :new, [%w[localhost], { compress: false }]) do | |
store = ActiveSupport::Cache.lookup_store :mem_cache_store, "localhost" | |
assert_kind_of(ActiveSupport::Cache::MemCacheStore, store) | |
end | |
end | |
def test_mem_cache_fragment_cache_store_with_given_mem_cache | |
mem_cache = Dalli::Client.new | |
assert_not_called(Dalli::Client, :new) do | |
store = ActiveSupport::Cache.lookup_store :mem_cache_store, mem_cache | |
assert_kind_of(ActiveSupport::Cache::MemCacheStore, store) | |
end | |
end | |
def test_mem_cache_fragment_cache_store_with_not_dalli_client | |
assert_not_called(Dalli::Client, :new) do | |
memcache = Object.new | |
assert_raises(ArgumentError) do | |
ActiveSupport::Cache.lookup_store :mem_cache_store, memcache | |
end | |
end | |
end | |
def test_mem_cache_fragment_cache_store_with_multiple_servers | |
assert_called_with(Dalli::Client, :new, [%w[localhost 192.168.1.1], { compress: false }]) do | |
store = ActiveSupport::Cache.lookup_store :mem_cache_store, "localhost", "192.168.1.1" | |
assert_kind_of(ActiveSupport::Cache::MemCacheStore, store) | |
end | |
end | |
def test_mem_cache_fragment_cache_store_with_options | |
assert_called_with(Dalli::Client, :new, [%w[localhost 192.168.1.1], { timeout: 10, compress: false }]) do | |
store = ActiveSupport::Cache.lookup_store :mem_cache_store, "localhost", "192.168.1.1", namespace: "foo", timeout: 10 | |
assert_kind_of(ActiveSupport::Cache::MemCacheStore, store) | |
assert_equal "foo", store.options[:namespace] | |
end | |
end | |
def test_object_assigned_fragment_cache_store | |
store = ActiveSupport::Cache.lookup_store ActiveSupport::Cache::FileStore.new("/path/to/cache/directory") | |
assert_kind_of(ActiveSupport::Cache::FileStore, store) | |
assert_equal "/path/to/cache/directory", store.cache_path | |
end | |
def test_redis_cache_store_with_single_array_object | |
cache_store = [:redis_cache_store, namespace: "foo"] | |
store = ActiveSupport::Cache.lookup_store(cache_store) | |
assert_kind_of ActiveSupport::Cache::RedisCacheStore, store | |
assert_equal "foo", store.options[:namespace] | |
end | |
def test_redis_cache_store_with_ordered_options | |
options = ActiveSupport::OrderedOptions.new | |
options.update namespace: "foo" | |
store = ActiveSupport::Cache.lookup_store :redis_cache_store, options | |
assert_kind_of(ActiveSupport::Cache::RedisCacheStore, store) | |
assert_equal "foo", store.options[:namespace] | |
end | |
end |
# frozen_string_literal: true | |
module CacheStoreVersionBehavior | |
ModelWithKeyAndVersion = Struct.new(:cache_key, :cache_version) | |
def test_fetch_with_right_version_should_hit | |
key = SecureRandom.uuid | |
value = SecureRandom.alphanumeric | |
@cache.fetch(key, version: 1) { value } | |
assert_equal value, @cache.read(key, version: 1) | |
end | |
def test_fetch_with_wrong_version_should_miss | |
key = SecureRandom.uuid | |
@cache.fetch(key, version: 1) { SecureRandom.alphanumeric } | |
assert_nil @cache.read(key, version: 2) | |
end | |
def test_read_with_right_version_should_hit | |
key = SecureRandom.uuid | |
value = SecureRandom.alphanumeric | |
@cache.write(key, value, version: 1) | |
assert_equal value, @cache.read(key, version: 1) | |
end | |
def test_read_with_wrong_version_should_miss | |
key = SecureRandom.uuid | |
value = SecureRandom.alphanumeric | |
@cache.write(key, value, version: 1) | |
assert_nil @cache.read(key, version: 2) | |
end | |
def test_exist_with_right_version_should_be_true | |
key = SecureRandom.uuid | |
@cache.write(key, SecureRandom.alphanumeric, version: 1) | |
assert @cache.exist?(key, version: 1) | |
end | |
def test_exist_with_wrong_version_should_be_false | |
key = SecureRandom.uuid | |
@cache.write(key, SecureRandom.alphanumeric, version: 1) | |
assert_not @cache.exist?(key, version: 2) | |
end | |
def test_reading_and_writing_with_model_supporting_cache_version | |
model_name = SecureRandom.alphanumeric | |
m1v1 = ModelWithKeyAndVersion.new("#{model_name}/1", 1) | |
m1v2 = ModelWithKeyAndVersion.new("#{model_name}/1", 2) | |
value = SecureRandom.alphanumeric | |
@cache.write(m1v1, value) | |
assert_equal value, @cache.read(m1v1) | |
assert_nil @cache.read(m1v2) | |
end | |
def test_reading_and_writing_with_model_supporting_cache_version_using_nested_key | |
model_name = SecureRandom.alphanumeric | |
m1v1 = ModelWithKeyAndVersion.new("#{model_name}/1", 1) | |
m1v2 = ModelWithKeyAndVersion.new("#{model_name}/1", 2) | |
value = SecureRandom.alphanumeric | |
@cache.write([ "something", m1v1 ], value) | |
assert_equal value, @cache.read([ "something", m1v1 ]) | |
assert_nil @cache.read([ "something", m1v2 ]) | |
end | |
def test_fetching_with_model_supporting_cache_version | |
model_name = SecureRandom.alphanumeric | |
m1v1 = ModelWithKeyAndVersion.new("#{model_name}/1", 1) | |
m1v2 = ModelWithKeyAndVersion.new("#{model_name}/1", 2) | |
value = SecureRandom.alphanumeric | |
other_value = SecureRandom.alphanumeric | |
@cache.fetch(m1v1) { value } | |
assert_equal value, @cache.fetch(m1v1) { other_value } | |
assert_equal other_value, @cache.fetch(m1v2) { other_value } | |
end | |
def test_exist_with_model_supporting_cache_version | |
model_name = SecureRandom.alphanumeric | |
m1v1 = ModelWithKeyAndVersion.new("#{model_name}/1", 1) | |
m1v2 = ModelWithKeyAndVersion.new("#{model_name}/1", 2) | |
value = SecureRandom.alphanumeric | |
@cache.write(m1v1, value) | |
assert @cache.exist?(m1v1) | |
assert_not @cache.fetch(m1v2) | |
end | |
def test_fetch_multi_with_model_supporting_cache_version | |
model_name = SecureRandom.alphanumeric | |
m1v1 = ModelWithKeyAndVersion.new("#{model_name}/1", 1) | |
m2v1 = ModelWithKeyAndVersion.new("#{model_name}/2", 1) | |
m2v2 = ModelWithKeyAndVersion.new("#{model_name}/2", 2) | |
first_fetch_values = @cache.fetch_multi(m1v1, m2v1) { |m| m.cache_key } | |
second_fetch_values = @cache.fetch_multi(m1v1, m2v2) { |m| m.cache_key + " 2nd" } | |
assert_equal({ m1v1 => "#{model_name}/1", m2v1 => "#{model_name}/2" }, first_fetch_values) | |
assert_equal({ m1v1 => "#{model_name}/1", m2v2 => "#{model_name}/2 2nd" }, second_fetch_values) | |
end | |
def test_version_is_normalized | |
key = SecureRandom.uuid | |
value = SecureRandom.alphanumeric | |
@cache.write(key, value, version: 1) | |
assert_equal value, @cache.read(key, version: "1") | |
end | |
end |
# frozen_string_literal: true | |
require "active_support/duration" | |
require "active_support/core_ext/time/conversions" | |
require "active_support/time_with_zone" | |
require "active_support/core_ext/time/zones" | |
require "active_support/core_ext/date_and_time/calculations" | |
require "active_support/core_ext/date/calculations" | |
require "active_support/core_ext/module/remove_method" | |
class Time | |
include DateAndTime::Calculations | |
COMMON_YEAR_DAYS_IN_MONTH = [nil, 31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31] | |
class << self | |
# Overriding case equality method so that it returns true for ActiveSupport::TimeWithZone instances | |
def ===(other) | |
super || (self == Time && other.is_a?(ActiveSupport::TimeWithZone)) | |
end | |
# Returns the number of days in the given month. | |
# If no year is specified, it will use the current year. | |
def days_in_month(month, year = current.year) | |
if month == 2 && ::Date.gregorian_leap?(year) | |
29 | |
else | |
COMMON_YEAR_DAYS_IN_MONTH[month] | |
end | |
end | |
# Returns the number of days in the given year. | |
# If no year is specified, it will use the current year. | |
def days_in_year(year = current.year) | |
days_in_month(2, year) + 337 | |
end | |
# Returns <tt>Time.zone.now</tt> when <tt>Time.zone</tt> or <tt>config.time_zone</tt> are set, otherwise just returns <tt>Time.now</tt>. | |
def current | |
::Time.zone ? ::Time.zone.now : ::Time.now | |
end | |
# Layers additional behavior on Time.at so that ActiveSupport::TimeWithZone and DateTime | |
# instances can be used when called with a single argument | |
def at_with_coercion(*args, **kwargs) | |
return at_without_coercion(*args, **kwargs) if args.size != 1 || !kwargs.empty? | |
# Time.at can be called with a time or numerical value | |
time_or_number = args.first | |
if time_or_number.is_a?(ActiveSupport::TimeWithZone) | |
at_without_coercion(time_or_number.to_r).getlocal | |
elsif time_or_number.is_a?(DateTime) | |
at_without_coercion(time_or_number.to_f).getlocal | |
else | |
at_without_coercion(time_or_number) | |
end | |
end | |
alias_method :at_without_coercion, :at | |
alias_method :at, :at_with_coercion | |
# Creates a +Time+ instance from an RFC 3339 string. | |
# | |
# Time.rfc3339('1999-12-31T14:00:00-10:00') # => 2000-01-01 00:00:00 -1000 | |
# | |
# If the time or offset components are missing then an +ArgumentError+ will be raised. | |
# | |
# Time.rfc3339('1999-12-31') # => ArgumentError: invalid date | |
def rfc3339(str) | |
parts = Date._rfc3339(str) | |
raise ArgumentError, "invalid date" if parts.empty? | |
Time.new( | |
parts.fetch(:year), | |
parts.fetch(:mon), | |
parts.fetch(:mday), | |
parts.fetch(:hour), | |
parts.fetch(:min), | |
parts.fetch(:sec) + parts.fetch(:sec_fraction, 0), | |
parts.fetch(:offset) | |
) | |
end | |
end | |
# Returns the number of seconds since 00:00:00. | |
# | |
# Time.new(2012, 8, 29, 0, 0, 0).seconds_since_midnight # => 0.0 | |
# Time.new(2012, 8, 29, 12, 34, 56).seconds_since_midnight # => 45296.0 | |
# Time.new(2012, 8, 29, 23, 59, 59).seconds_since_midnight # => 86399.0 | |
def seconds_since_midnight | |
to_i - change(hour: 0).to_i + (usec / 1.0e+6) | |
end | |
# Returns the number of seconds until 23:59:59. | |
# | |
# Time.new(2012, 8, 29, 0, 0, 0).seconds_until_end_of_day # => 86399 | |
# Time.new(2012, 8, 29, 12, 34, 56).seconds_until_end_of_day # => 41103 | |
# Time.new(2012, 8, 29, 23, 59, 59).seconds_until_end_of_day # => 0 | |
def seconds_until_end_of_day | |
end_of_day.to_i - to_i | |
end | |
# Returns the fraction of a second as a +Rational+ | |
# | |
# Time.new(2012, 8, 29, 0, 0, 0.5).sec_fraction # => (1/2) | |
def sec_fraction | |
subsec | |
end | |
unless Time.method_defined?(:floor) | |
def floor(precision = 0) | |
change(nsec: 0) + subsec.floor(precision) | |
end | |
end | |
# Restricted Ruby version due to a bug in `Time#ceil` | |
# See https://bugs.ruby-lang.org/issues/17025 for more details | |
if RUBY_VERSION <= "2.8" | |
remove_possible_method :ceil | |
def ceil(precision = 0) | |
change(nsec: 0) + subsec.ceil(precision) | |
end | |
end | |
# Returns a new Time where one or more of the elements have been changed according | |
# to the +options+ parameter. The time options (<tt>:hour</tt>, <tt>:min</tt>, | |
# <tt>:sec</tt>, <tt>:usec</tt>, <tt>:nsec</tt>) reset cascadingly, so if only | |
# the hour is passed, then minute, sec, usec, and nsec is set to 0. If the hour | |
# and minute is passed, then sec, usec, and nsec is set to 0. The +options+ parameter | |
# takes a hash with any of these keys: <tt>:year</tt>, <tt>:month</tt>, <tt>:day</tt>, | |
# <tt>:hour</tt>, <tt>:min</tt>, <tt>:sec</tt>, <tt>:usec</tt>, <tt>:nsec</tt>, | |
# <tt>:offset</tt>. Pass either <tt>:usec</tt> or <tt>:nsec</tt>, not both. | |
# | |
# Time.new(2012, 8, 29, 22, 35, 0).change(day: 1) # => Time.new(2012, 8, 1, 22, 35, 0) | |
# Time.new(2012, 8, 29, 22, 35, 0).change(year: 1981, day: 1) # => Time.new(1981, 8, 1, 22, 35, 0) | |
# Time.new(2012, 8, 29, 22, 35, 0).change(year: 1981, hour: 0) # => Time.new(1981, 8, 29, 0, 0, 0) | |
def change(options) | |
new_year = options.fetch(:year, year) | |
new_month = options.fetch(:month, month) | |
new_day = options.fetch(:day, day) | |
new_hour = options.fetch(:hour, hour) | |
new_min = options.fetch(:min, options[:hour] ? 0 : min) | |
new_sec = options.fetch(:sec, (options[:hour] || options[:min]) ? 0 : sec) | |
new_offset = options.fetch(:offset, nil) | |
if new_nsec = options[:nsec] | |
raise ArgumentError, "Can't change both :nsec and :usec at the same time: #{options.inspect}" if options[:usec] | |
new_usec = Rational(new_nsec, 1000) | |
else | |
new_usec = options.fetch(:usec, (options[:hour] || options[:min] || options[:sec]) ? 0 : Rational(nsec, 1000)) | |
end | |
raise ArgumentError, "argument out of range" if new_usec >= 1000000 | |
new_sec += Rational(new_usec, 1000000) | |
if new_offset | |
::Time.new(new_year, new_month, new_day, new_hour, new_min, new_sec, new_offset) | |
elsif utc? | |
::Time.utc(new_year, new_month, new_day, new_hour, new_min, new_sec) | |
elsif zone&.respond_to?(:utc_to_local) | |
::Time.new(new_year, new_month, new_day, new_hour, new_min, new_sec, zone) | |
elsif zone | |
::Time.local(new_year, new_month, new_day, new_hour, new_min, new_sec) | |
else | |
::Time.new(new_year, new_month, new_day, new_hour, new_min, new_sec, utc_offset) | |
end | |
end | |
# Uses Date to provide precise Time calculations for years, months, and days | |
# according to the proleptic Gregorian calendar. The +options+ parameter | |
# takes a hash with any of these keys: <tt>:years</tt>, <tt>:months</tt>, | |
# <tt>:weeks</tt>, <tt>:days</tt>, <tt>:hours</tt>, <tt>:minutes</tt>, | |
# <tt>:seconds</tt>. | |
# | |
# Time.new(2015, 8, 1, 14, 35, 0).advance(seconds: 1) # => 2015-08-01 14:35:01 -0700 | |
# Time.new(2015, 8, 1, 14, 35, 0).advance(minutes: 1) # => 2015-08-01 14:36:00 -0700 | |
# Time.new(2015, 8, 1, 14, 35, 0).advance(hours: 1) # => 2015-08-01 15:35:00 -0700 | |
# Time.new(2015, 8, 1, 14, 35, 0).advance(days: 1) # => 2015-08-02 14:35:00 -0700 | |
# Time.new(2015, 8, 1, 14, 35, 0).advance(weeks: 1) # => 2015-08-08 14:35:00 -0700 | |
def advance(options) | |
unless options[:weeks].nil? | |
options[:weeks], partial_weeks = options[:weeks].divmod(1) | |
options[:days] = options.fetch(:days, 0) + 7 * partial_weeks | |
end | |
unless options[:days].nil? | |
options[:days], partial_days = options[:days].divmod(1) | |
options[:hours] = options.fetch(:hours, 0) + 24 * partial_days | |
end | |
d = to_date.gregorian.advance(options) | |
time_advanced_by_date = change(year: d.year, month: d.month, day: d.day) | |
seconds_to_advance = \ | |
options.fetch(:seconds, 0) + | |
options.fetch(:minutes, 0) * 60 + | |
options.fetch(:hours, 0) * 3600 | |
if seconds_to_advance.zero? | |
time_advanced_by_date | |
else | |
time_advanced_by_date.since(seconds_to_advance) | |
end | |
end | |
# Returns a new Time representing the time a number of seconds ago, this is basically a wrapper around the Numeric extension | |
def ago(seconds) | |
since(-seconds) | |
end | |
# Returns a new Time representing the time a number of seconds since the instance time | |
def since(seconds) | |
self + seconds | |
rescue | |
to_datetime.since(seconds) | |
end | |
alias :in :since | |
# Returns a new Time representing the start of the day (0:00) | |
def beginning_of_day | |
change(hour: 0) | |
end | |
alias :midnight :beginning_of_day | |
alias :at_midnight :beginning_of_day | |
alias :at_beginning_of_day :beginning_of_day | |
# Returns a new Time representing the middle of the day (12:00) | |
def middle_of_day | |
change(hour: 12) | |
end | |
alias :midday :middle_of_day | |
alias :noon :middle_of_day | |
alias :at_midday :middle_of_day | |
alias :at_noon :middle_of_day | |
alias :at_middle_of_day :middle_of_day | |
# Returns a new Time representing the end of the day, 23:59:59.999999 | |
def end_of_day | |
change( | |
hour: 23, | |
min: 59, | |
sec: 59, | |
usec: Rational(999999999, 1000) | |
) | |
end | |
alias :at_end_of_day :end_of_day | |
# Returns a new Time representing the start of the hour (x:00) | |
def beginning_of_hour | |
change(min: 0) | |
end | |
alias :at_beginning_of_hour :beginning_of_hour | |
# Returns a new Time representing the end of the hour, x:59:59.999999 | |
def end_of_hour | |
change( | |
min: 59, | |
sec: 59, | |
usec: Rational(999999999, 1000) | |
) | |
end | |
alias :at_end_of_hour :end_of_hour | |
# Returns a new Time representing the start of the minute (x:xx:00) | |
def beginning_of_minute | |
change(sec: 0) | |
end | |
alias :at_beginning_of_minute :beginning_of_minute | |
# Returns a new Time representing the end of the minute, x:xx:59.999999 | |
def end_of_minute | |
change( | |
sec: 59, | |
usec: Rational(999999999, 1000) | |
) | |
end | |
alias :at_end_of_minute :end_of_minute | |
def plus_with_duration(other) # :nodoc: | |
if ActiveSupport::Duration === other | |
other.since(self) | |
else | |
plus_without_duration(other) | |
end | |
end | |
alias_method :plus_without_duration, :+ | |
alias_method :+, :plus_with_duration | |
def minus_with_duration(other) # :nodoc: | |
if ActiveSupport::Duration === other | |
other.until(self) | |
else | |
minus_without_duration(other) | |
end | |
end | |
alias_method :minus_without_duration, :- | |
alias_method :-, :minus_with_duration | |
# Time#- can also be used to determine the number of seconds between two Time instances. | |
# We're layering on additional behavior so that ActiveSupport::TimeWithZone instances | |
# are coerced into values that Time#- will recognize | |
def minus_with_coercion(other) | |
other = other.comparable_time if other.respond_to?(:comparable_time) | |
other.is_a?(DateTime) ? to_f - other.to_f : minus_without_coercion(other) | |
end | |
alias_method :minus_without_coercion, :- | |
alias_method :-, :minus_with_coercion # rubocop:disable Lint/DuplicateMethods | |
# Layers additional behavior on Time#<=> so that DateTime and ActiveSupport::TimeWithZone instances | |
# can be chronologically compared with a Time | |
def compare_with_coercion(other) | |
# we're avoiding Time#to_datetime and Time#to_time because they're expensive | |
if other.class == Time | |
compare_without_coercion(other) | |
elsif other.is_a?(Time) | |
compare_without_coercion(other.to_time) | |
else | |
to_datetime <=> other | |
end | |
end | |
alias_method :compare_without_coercion, :<=> | |
alias_method :<=>, :compare_with_coercion | |
# Layers additional behavior on Time#eql? so that ActiveSupport::TimeWithZone instances | |
# can be eql? to an equivalent Time | |
def eql_with_coercion(other) | |
# if other is an ActiveSupport::TimeWithZone, coerce a Time instance from it so we can do eql? comparison | |
other = other.comparable_time if other.respond_to?(:comparable_time) | |
eql_without_coercion(other) | |
end | |
alias_method :eql_without_coercion, :eql? | |
alias_method :eql?, :eql_with_coercion | |
# Returns a new time the specified number of days ago. | |
def prev_day(days = 1) | |
advance(days: -days) | |
end | |
# Returns a new time the specified number of days in the future. | |
def next_day(days = 1) | |
advance(days: days) | |
end | |
# Returns a new time the specified number of months ago. | |
def prev_month(months = 1) | |
advance(months: -months) | |
end | |
# Returns a new time the specified number of months in the future. | |
def next_month(months = 1) | |
advance(months: months) | |
end | |
# Returns a new time the specified number of years ago. | |
def prev_year(years = 1) | |
advance(years: -years) | |
end | |
# Returns a new time the specified number of years in the future. | |
def next_year(years = 1) | |
advance(years: years) | |
end | |
end |
# frozen_string_literal: true | |
require_relative "abstract_unit" | |
module CallbackInheritanceTestFixtures | |
class GrandParent | |
include ActiveSupport::Callbacks | |
attr_reader :log, :action_name | |
def initialize(action_name) | |
@action_name, @log = action_name, [] | |
end | |
define_callbacks :dispatch | |
set_callback :dispatch, :before, :before1, :before2, if: proc { |c| c.action_name == "index" || c.action_name == "update" } | |
set_callback :dispatch, :after, :after1, :after2, if: proc { |c| c.action_name == "update" || c.action_name == "delete" } | |
def before1 | |
@log << "before1" | |
end | |
def before2 | |
@log << "before2" | |
end | |
def after1 | |
@log << "after1" | |
end | |
def after2 | |
@log << "after2" | |
end | |
def dispatch | |
run_callbacks :dispatch do | |
@log << action_name | |
end | |
self | |
end | |
end | |
class Parent < GrandParent | |
skip_callback :dispatch, :before, :before2, unless: proc { |c| c.action_name == "update" } | |
skip_callback :dispatch, :after, :after2, unless: proc { |c| c.action_name == "delete" } | |
end | |
class Child < GrandParent | |
skip_callback :dispatch, :before, :before2, unless: proc { |c| c.action_name == "update" }, if: :state_open? | |
def state_open? | |
@state == :open | |
end | |
def initialize(action_name, state) | |
super(action_name) | |
@state = state | |
end | |
end | |
class EmptyParent | |
include ActiveSupport::Callbacks | |
def performed? | |
@performed ||= false | |
end | |
define_callbacks :dispatch | |
def perform! | |
@performed = true | |
end | |
def dispatch | |
run_callbacks :dispatch | |
self | |
end | |
end | |
class EmptyChild < EmptyParent | |
set_callback :dispatch, :before, :do_nothing | |
def do_nothing | |
end | |
end | |
class CountingParent | |
include ActiveSupport::Callbacks | |
attr_reader :count | |
define_callbacks :dispatch | |
def initialize | |
@count = 0 | |
end | |
def count! | |
@count += 1 | |
end | |
def dispatch | |
run_callbacks(:dispatch) | |
self | |
end | |
end | |
class CountingChild < CountingParent | |
end | |
end | |
class BasicCallbacksTest < ActiveSupport::TestCase | |
include CallbackInheritanceTestFixtures | |
def setup | |
@index = GrandParent.new("index").dispatch | |
@update = GrandParent.new("update").dispatch | |
@delete = GrandParent.new("delete").dispatch | |
end | |
def test_basic_conditional_callback1 | |
assert_equal %w(before1 before2 index), @index.log | |
end | |
def test_basic_conditional_callback2 | |
assert_equal %w(before1 before2 update after2 after1), @update.log | |
end | |
def test_basic_conditional_callback3 | |
assert_equal %w(delete after2 after1), @delete.log | |
end | |
end | |
class InheritedCallbacksTest < ActiveSupport::TestCase | |
include CallbackInheritanceTestFixtures | |
def setup | |
@index = Parent.new("index").dispatch | |
@update = Parent.new("update").dispatch | |
@delete = Parent.new("delete").dispatch | |
end | |
def test_inherited_excluded | |
assert_equal %w(before1 index), @index.log | |
end | |
def test_inherited_not_excluded | |
assert_equal %w(before1 before2 update after1), @update.log | |
end | |
def test_partially_excluded | |
assert_equal %w(delete after2 after1), @delete.log | |
end | |
end | |
class InheritedCallbacksTest2 < ActiveSupport::TestCase | |
include CallbackInheritanceTestFixtures | |
def setup | |
@update1 = Child.new("update", :open).dispatch | |
@update2 = Child.new("update", :closed).dispatch | |
end | |
def test_complex_mix_on | |
assert_equal %w(before1 update after2 after1), @update1.log | |
end | |
def test_complex_mix_off | |
assert_equal %w(before1 before2 update after2 after1), @update2.log | |
end | |
end | |
class DynamicInheritedCallbacks < ActiveSupport::TestCase | |
include CallbackInheritanceTestFixtures | |
def test_callbacks_looks_to_the_superclass_before_running | |
child = EmptyChild.new.dispatch | |
assert_not_predicate child, :performed? | |
EmptyParent.set_callback :dispatch, :before, :perform! | |
child = EmptyChild.new.dispatch | |
assert_predicate child, :performed? | |
end | |
def test_callbacks_should_be_performed_once_in_child_class | |
CountingParent.set_callback(:dispatch, :before) { count! } | |
child = CountingChild.new.dispatch | |
assert_equal 1, child.count | |
end | |
end | |
class DynamicDefinedCallbacks < ActiveSupport::TestCase | |
include CallbackInheritanceTestFixtures | |
def test_callbacks_should_be_performed_once_in_child_class_after_dynamic_define | |
GrandParent.define_callbacks(:foo) | |
GrandParent.set_callback(:foo, :before, :before1) | |
parent = Parent.new("foo") | |
parent.run_callbacks(:foo) | |
assert_equal %w(before1), parent.log | |
end | |
end |
# frozen_string_literal: true | |
require "active_support/concern" | |
require "active_support/descendants_tracker" | |
require "active_support/core_ext/array/extract_options" | |
require "active_support/core_ext/class/attribute" | |
require "active_support/core_ext/string/filters" | |
require "active_support/core_ext/object/blank" | |
require "thread" | |
module ActiveSupport | |
# Callbacks are code hooks that are run at key points in an object's life cycle. | |
# The typical use case is to have a base class define a set of callbacks | |
# relevant to the other functionality it supplies, so that subclasses can | |
# install callbacks that enhance or modify the base functionality without | |
# needing to override or redefine methods of the base class. | |
# | |
# Mixing in this module allows you to define the events in the object's | |
# life cycle that will support callbacks (via ClassMethods#define_callbacks), | |
# set the instance methods, procs, or callback objects to be called (via | |
# ClassMethods#set_callback), and run the installed callbacks at the | |
# appropriate times (via +run_callbacks+). | |
# | |
# By default callbacks are halted by throwing +:abort+. | |
# See ClassMethods#define_callbacks for details. | |
# | |
# Three kinds of callbacks are supported: before callbacks, run before a | |
# certain event; after callbacks, run after the event; and around callbacks, | |
# blocks that surround the event, triggering it when they yield. Callback code | |
# can be contained in instance methods, procs or lambdas, or callback objects | |
# that respond to certain predetermined methods. See ClassMethods#set_callback | |
# for details. | |
# | |
# class Record | |
# include ActiveSupport::Callbacks | |
# define_callbacks :save | |
# | |
# def save | |
# run_callbacks :save do | |
# puts "- save" | |
# end | |
# end | |
# end | |
# | |
# class PersonRecord < Record | |
# set_callback :save, :before, :saving_message | |
# def saving_message | |
# puts "saving..." | |
# end | |
# | |
# set_callback :save, :after do |object| | |
# puts "saved" | |
# end | |
# end | |
# | |
# person = PersonRecord.new | |
# person.save | |
# | |
# Output: | |
# saving... | |
# - save | |
# saved | |
module Callbacks | |
extend Concern | |
included do | |
extend ActiveSupport::DescendantsTracker | |
class_attribute :__callbacks, instance_writer: false, default: {} | |
end | |
CALLBACK_FILTER_TYPES = [:before, :after, :around] | |
# Runs the callbacks for the given event. | |
# | |
# Calls the before and around callbacks in the order they were set, yields | |
# the block (if given one), and then runs the after callbacks in reverse | |
# order. | |
# | |
# If the callback chain was halted, returns +false+. Otherwise returns the | |
# result of the block, +nil+ if no callbacks have been set, or +true+ | |
# if callbacks have been set but no block is given. | |
# | |
# run_callbacks :save do | |
# save | |
# end | |
# | |
#-- | |
# | |
# As this method is used in many places, and often wraps large portions of | |
# user code, it has an additional design goal of minimizing its impact on | |
# the visible call stack. An exception from inside a :before or :after | |
# callback can be as noisy as it likes -- but when control has passed | |
# smoothly through and into the supplied block, we want as little evidence | |
# as possible that we were here. | |
def run_callbacks(kind) | |
callbacks = __callbacks[kind.to_sym] | |
if callbacks.empty? | |
yield if block_given? | |
else | |
env = Filters::Environment.new(self, false, nil) | |
next_sequence = callbacks.compile | |
# Common case: no 'around' callbacks defined | |
if next_sequence.final? | |
next_sequence.invoke_before(env) | |
env.value = !env.halted && (!block_given? || yield) | |
next_sequence.invoke_after(env) | |
env.value | |
else | |
invoke_sequence = Proc.new do | |
skipped = nil | |
while true | |
current = next_sequence | |
current.invoke_before(env) | |
if current.final? | |
env.value = !env.halted && (!block_given? || yield) | |
elsif current.skip?(env) | |
(skipped ||= []) << current | |
next_sequence = next_sequence.nested | |
next | |
else | |
next_sequence = next_sequence.nested | |
begin | |
target, block, method, *arguments = current.expand_call_template(env, invoke_sequence) | |
target.send(method, *arguments, &block) | |
ensure | |
next_sequence = current | |
end | |
end | |
current.invoke_after(env) | |
skipped.pop.invoke_after(env) while skipped&.first | |
break env.value | |
end | |
end | |
invoke_sequence.call | |
end | |
end | |
end | |
private | |
# A hook invoked every time a before callback is halted. | |
# This can be overridden in ActiveSupport::Callbacks implementors in order | |
# to provide better debugging/logging. | |
def halted_callback_hook(filter, name) | |
end | |
module Conditionals # :nodoc: | |
class Value | |
def initialize(&block) | |
@block = block | |
end | |
def call(target, value); @block.call(value); end | |
end | |
end | |
module Filters | |
Environment = Struct.new(:target, :halted, :value) | |
class Before | |
def self.build(callback_sequence, user_callback, user_conditions, chain_config, filter, name) | |
halted_lambda = chain_config[:terminator] | |
if user_conditions.any? | |
halting_and_conditional(callback_sequence, user_callback, user_conditions, halted_lambda, filter, name) | |
else | |
halting(callback_sequence, user_callback, halted_lambda, filter, name) | |
end | |
end | |
def self.halting_and_conditional(callback_sequence, user_callback, user_conditions, halted_lambda, filter, name) | |
callback_sequence.before do |env| | |
target = env.target | |
value = env.value | |
halted = env.halted | |
if !halted && user_conditions.all? { |c| c.call(target, value) } | |
result_lambda = -> { user_callback.call target, value } | |
env.halted = halted_lambda.call(target, result_lambda) | |
if env.halted | |
target.send :halted_callback_hook, filter, name | |
end | |
end | |
env | |
end | |
end | |
private_class_method :halting_and_conditional | |
def self.halting(callback_sequence, user_callback, halted_lambda, filter, name) | |
callback_sequence.before do |env| | |
target = env.target | |
value = env.value | |
halted = env.halted | |
unless halted | |
result_lambda = -> { user_callback.call target, value } | |
env.halted = halted_lambda.call(target, result_lambda) | |
if env.halted | |
target.send :halted_callback_hook, filter, name | |
end | |
end | |
env | |
end | |
end | |
private_class_method :halting | |
end | |
class After | |
def self.build(callback_sequence, user_callback, user_conditions, chain_config) | |
if chain_config[:skip_after_callbacks_if_terminated] | |
if user_conditions.any? | |
halting_and_conditional(callback_sequence, user_callback, user_conditions) | |
else | |
halting(callback_sequence, user_callback) | |
end | |
else | |
if user_conditions.any? | |
conditional callback_sequence, user_callback, user_conditions | |
else | |
simple callback_sequence, user_callback | |
end | |
end | |
end | |
def self.halting_and_conditional(callback_sequence, user_callback, user_conditions) | |
callback_sequence.after do |env| | |
target = env.target | |
value = env.value | |
halted = env.halted | |
if !halted && user_conditions.all? { |c| c.call(target, value) } | |
user_callback.call target, value | |
end | |
env | |
end | |
end | |
private_class_method :halting_and_conditional | |
def self.halting(callback_sequence, user_callback) | |
callback_sequence.after do |env| | |
unless env.halted | |
user_callback.call env.target, env.value | |
end | |
env | |
end | |
end | |
private_class_method :halting | |
def self.conditional(callback_sequence, user_callback, user_conditions) | |
callback_sequence.after do |env| | |
target = env.target | |
value = env.value | |
if user_conditions.all? { |c| c.call(target, value) } | |
user_callback.call target, value | |
end | |
env | |
end | |
end | |
private_class_method :conditional | |
def self.simple(callback_sequence, user_callback) | |
callback_sequence.after do |env| | |
user_callback.call env.target, env.value | |
env | |
end | |
end | |
private_class_method :simple | |
end | |
end | |
class Callback # :nodoc:# | |
def self.build(chain, filter, kind, options) | |
if filter.is_a?(String) | |
raise ArgumentError, <<-MSG.squish | |
Passing string to define a callback is not supported. See the `.set_callback` | |
documentation to see supported values. | |
MSG | |
end | |
new chain.name, filter, kind, options, chain.config | |
end | |
attr_accessor :kind, :name | |
attr_reader :chain_config, :filter | |
def initialize(name, filter, kind, options, chain_config) | |
@chain_config = chain_config | |
@name = name | |
@kind = kind | |
@filter = filter | |
@if = check_conditionals(options[:if]) | |
@unless = check_conditionals(options[:unless]) | |
end | |
def merge_conditional_options(chain, if_option:, unless_option:) | |
options = { | |
if: @if.dup, | |
unless: @unless.dup | |
} | |
options[:if].concat Array(unless_option) | |
options[:unless].concat Array(if_option) | |
self.class.build chain, @filter, @kind, options | |
end | |
def matches?(_kind, _filter) | |
@kind == _kind && filter == _filter | |
end | |
def duplicates?(other) | |
case @filter | |
when Symbol | |
matches?(other.kind, other.filter) | |
else | |
false | |
end | |
end | |
# Wraps code with filter | |
def apply(callback_sequence) | |
user_conditions = conditions_lambdas | |
user_callback = CallTemplate.build(@filter, self) | |
case kind | |
when :before | |
Filters::Before.build(callback_sequence, user_callback.make_lambda, user_conditions, chain_config, @filter, name) | |
when :after | |
Filters::After.build(callback_sequence, user_callback.make_lambda, user_conditions, chain_config) | |
when :around | |
callback_sequence.around(user_callback, user_conditions) | |
end | |
end | |
def current_scopes | |
Array(chain_config[:scope]).map { |s| public_send(s) } | |
end | |
private | |
EMPTY_ARRAY = [].freeze | |
private_constant :EMPTY_ARRAY | |
def check_conditionals(conditionals) | |
return EMPTY_ARRAY if conditionals.blank? | |
conditionals = Array(conditionals) | |
if conditionals.any?(String) | |
raise ArgumentError, <<-MSG.squish | |
Passing string to be evaluated in :if and :unless conditional | |
options is not supported. Pass a symbol for an instance method, | |
or a lambda, proc or block, instead. | |
MSG | |
end | |
conditionals.freeze | |
end | |
def conditions_lambdas | |
@if.map { |c| CallTemplate.build(c, self).make_lambda } + | |
@unless.map { |c| CallTemplate.build(c, self).inverted_lambda } | |
end | |
end | |
# A future invocation of user-supplied code (either as a callback, | |
# or a condition filter). | |
module CallTemplate # :nodoc: | |
class MethodCall | |
def initialize(method) | |
@method_name = method | |
end | |
# Return the parts needed to make this call, with the given | |
# input values. | |
# | |
# Returns an array of the form: | |
# | |
# [target, block, method, *arguments] | |
# | |
# This array can be used as such: | |
# | |
# target.send(method, *arguments, &block) | |
# | |
# The actual invocation is left up to the caller to minimize | |
# call stack pollution. | |
def expand(target, value, block) | |
[target, block, @method_name] | |
end | |
def make_lambda | |
lambda do |target, value, &block| | |
target.send(@method_name, &block) | |
end | |
end | |
def inverted_lambda | |
lambda do |target, value, &block| | |
!target.send(@method_name, &block) | |
end | |
end | |
end | |
class ObjectCall | |
def initialize(target, method) | |
@override_target = target | |
@method_name = method | |
end | |
def expand(target, value, block) | |
[@override_target || target, block, @method_name, target] | |
end | |
def make_lambda | |
lambda do |target, value, &block| | |
(@override_target || target).send(@method_name, target, &block) | |
end | |
end | |
def inverted_lambda | |
lambda do |target, value, &block| | |
!(@override_target || target).send(@method_name, target, &block) | |
end | |
end | |
end | |
class InstanceExec0 | |
def initialize(block) | |
@override_block = block | |
end | |
def expand(target, value, block) | |
[target, @override_block, :instance_exec] | |
end | |
def make_lambda | |
lambda do |target, value, &block| | |
target.instance_exec(&@override_block) | |
end | |
end | |
def inverted_lambda | |
lambda do |target, value, &block| | |
!target.instance_exec(&@override_block) | |
end | |
end | |
end | |
class InstanceExec1 | |
def initialize(block) | |
@override_block = block | |
end | |
def expand(target, value, block) | |
[target, @override_block, :instance_exec, target] | |
end | |
def make_lambda | |
lambda do |target, value, &block| | |
target.instance_exec(target, &@override_block) | |
end | |
end | |
def inverted_lambda | |
lambda do |target, value, &block| | |
!target.instance_exec(target, &@override_block) | |
end | |
end | |
end | |
class InstanceExec2 | |
def initialize(block) | |
@override_block = block | |
end | |
def expand(target, value, block) | |
raise ArgumentError unless block | |
[target, @override_block || block, :instance_exec, target, block] | |
end | |
def make_lambda | |
lambda do |target, value, &block| | |
raise ArgumentError unless block | |
target.instance_exec(target, block, &@override_block) | |
end | |
end | |
def inverted_lambda | |
lambda do |target, value, &block| | |
raise ArgumentError unless block | |
!target.instance_exec(target, block, &@override_block) | |
end | |
end | |
end | |
class ProcCall | |
def initialize(target) | |
@override_target = target | |
end | |
def expand(target, value, block) | |
[@override_target || target, block, :call, target, value] | |
end | |
def make_lambda | |
lambda do |target, value, &block| | |
(@override_target || target).call(target, value, &block) | |
end | |
end | |
def inverted_lambda | |
lambda do |target, value, &block| | |
!(@override_target || target).call(target, value, &block) | |
end | |
end | |
end | |
# Filters support: | |
# | |
# Symbols:: A method to call. | |
# Procs:: A proc to call with the object. | |
# Objects:: An object with a <tt>before_foo</tt> method on it to call. | |
# | |
# All of these objects are converted into a CallTemplate and handled | |
# the same after this point. | |
def self.build(filter, callback) | |
case filter | |
when Symbol | |
MethodCall.new(filter) | |
when Conditionals::Value | |
ProcCall.new(filter) | |
when ::Proc | |
if filter.arity > 1 | |
InstanceExec2.new(filter) | |
elsif filter.arity > 0 | |
InstanceExec1.new(filter) | |
else | |
InstanceExec0.new(filter) | |
end | |
else | |
ObjectCall.new(filter, callback.current_scopes.join("_").to_sym) | |
end | |
end | |
end | |
# Execute before and after filters in a sequence instead of | |
# chaining them with nested lambda calls, see: | |
# https://github.com/rails/rails/issues/18011 | |
class CallbackSequence # :nodoc: | |
def initialize(nested = nil, call_template = nil, user_conditions = nil) | |
@nested = nested | |
@call_template = call_template | |
@user_conditions = user_conditions | |
@before = [] | |
@after = [] | |
end | |
def before(&before) | |
@before.unshift(before) | |
self | |
end | |
def after(&after) | |
@after.push(after) | |
self | |
end | |
def around(call_template, user_conditions) | |
CallbackSequence.new(self, call_template, user_conditions) | |
end | |
def skip?(arg) | |
arg.halted || !@user_conditions.all? { |c| c.call(arg.target, arg.value) } | |
end | |
attr_reader :nested | |
def final? | |
!@call_template | |
end | |
def expand_call_template(arg, block) | |
@call_template.expand(arg.target, arg.value, block) | |
end | |
def invoke_before(arg) | |
@before.each { |b| b.call(arg) } | |
end | |
def invoke_after(arg) | |
@after.each { |a| a.call(arg) } | |
end | |
end | |
class CallbackChain # :nodoc: | |
include Enumerable | |
attr_reader :name, :config | |
def initialize(name, config) | |
@name = name | |
@config = { | |
scope: [:kind], | |
terminator: default_terminator | |
}.merge!(config) | |
@chain = [] | |
@callbacks = nil | |
@mutex = Mutex.new | |
end | |
def each(&block); @chain.each(&block); end | |
def index(o); @chain.index(o); end | |
def empty?; @chain.empty?; end | |
def insert(index, o) | |
@callbacks = nil | |
@chain.insert(index, o) | |
end | |
def delete(o) | |
@callbacks = nil | |
@chain.delete(o) | |
end | |
def clear | |
@callbacks = nil | |
@chain.clear | |
self | |
end | |
def initialize_copy(other) | |
@callbacks = nil | |
@chain = other.chain.dup | |
@mutex = Mutex.new | |
end | |
def compile | |
@callbacks || @mutex.synchronize do | |
final_sequence = CallbackSequence.new | |
@callbacks ||= @chain.reverse.inject(final_sequence) do |callback_sequence, callback| | |
callback.apply callback_sequence | |
end | |
end | |
end | |
def append(*callbacks) | |
callbacks.each { |c| append_one(c) } | |
end | |
def prepend(*callbacks) | |
callbacks.each { |c| prepend_one(c) } | |
end | |
protected | |
attr_reader :chain | |
private | |
def append_one(callback) | |
@callbacks = nil | |
remove_duplicates(callback) | |
@chain.push(callback) | |
end | |
def prepend_one(callback) | |
@callbacks = nil | |
remove_duplicates(callback) | |
@chain.unshift(callback) | |
end | |
def remove_duplicates(callback) | |
@callbacks = nil | |
@chain.delete_if { |c| callback.duplicates?(c) } | |
end | |
def default_terminator | |
Proc.new do |target, result_lambda| | |
terminate = true | |
catch(:abort) do | |
result_lambda.call | |
terminate = false | |
end | |
terminate | |
end | |
end | |
end | |
module ClassMethods | |
def normalize_callback_params(filters, block) # :nodoc: | |
type = CALLBACK_FILTER_TYPES.include?(filters.first) ? filters.shift : :before | |
options = filters.extract_options! | |
filters.unshift(block) if block | |
[type, filters, options.dup] | |
end | |
# This is used internally to append, prepend and skip callbacks to the | |
# CallbackChain. | |
def __update_callbacks(name) # :nodoc: | |
([self] + self.descendants).reverse_each do |target| | |
chain = target.get_callbacks name | |
yield target, chain.dup | |
end | |
end | |
# Install a callback for the given event. | |
# | |
# set_callback :save, :before, :before_method | |
# set_callback :save, :after, :after_method, if: :condition | |
# set_callback :save, :around, ->(r, block) { stuff; result = block.call; stuff } | |
# | |
# The second argument indicates whether the callback is to be run +:before+, | |
# +:after+, or +:around+ the event. If omitted, +:before+ is assumed. This | |
# means the first example above can also be written as: | |
# | |
# set_callback :save, :before_method | |
# | |
# The callback can be specified as a symbol naming an instance method; as a | |
# proc, lambda, or block; or as an object that responds to a certain method | |
# determined by the <tt>:scope</tt> argument to +define_callbacks+. | |
# | |
# If a proc, lambda, or block is given, its body is evaluated in the context | |
# of the current object. It can also optionally accept the current object as | |
# an argument. | |
# | |
# Before and around callbacks are called in the order that they are set; | |
# after callbacks are called in the reverse order. | |
# | |
# Around callbacks can access the return value from the event, if it | |
# wasn't halted, from the +yield+ call. | |
# | |
# ===== Options | |
# | |
# * <tt>:if</tt> - A symbol or an array of symbols, each naming an instance | |
# method or a proc; the callback will be called only when they all return | |
# a true value. | |
# | |
# If a proc is given, its body is evaluated in the context of the | |
# current object. It can also optionally accept the current object as | |
# an argument. | |
# * <tt>:unless</tt> - A symbol or an array of symbols, each naming an | |
# instance method or a proc; the callback will be called only when they | |
# all return a false value. | |
# | |
# If a proc is given, its body is evaluated in the context of the | |
# current object. It can also optionally accept the current object as | |
# an argument. | |
# * <tt>:prepend</tt> - If +true+, the callback will be prepended to the | |
# existing chain rather than appended. | |
def set_callback(name, *filter_list, &block) | |
type, filters, options = normalize_callback_params(filter_list, block) | |
self_chain = get_callbacks name | |
mapped = filters.map do |filter| | |
Callback.build(self_chain, filter, type, options) | |
end | |
__update_callbacks(name) do |target, chain| | |
options[:prepend] ? chain.prepend(*mapped) : chain.append(*mapped) | |
target.set_callbacks name, chain | |
end | |
end | |
# Skip a previously set callback. Like +set_callback+, <tt>:if</tt> or | |
# <tt>:unless</tt> options may be passed in order to control when the | |
# callback is skipped. | |
# | |
# class Writer < PersonRecord | |
# attr_accessor :age | |
# skip_callback :save, :before, :saving_message, if: -> { age > 18 } | |
# end | |
# | |
# When if option returns true, callback is skipped. | |
# | |
# writer = Writer.new | |
# writer.age = 20 | |
# writer.save | |
# | |
# Output: | |
# - save | |
# saved | |
# | |
# When if option returns false, callback is NOT skipped. | |
# | |
# young_writer = Writer.new | |
# young_writer.age = 17 | |
# young_writer.save | |
# | |
# Output: | |
# saving... | |
# - save | |
# saved | |
# | |
# An <tt>ArgumentError</tt> will be raised if the callback has not | |
# already been set (unless the <tt>:raise</tt> option is set to <tt>false</tt>). | |
def skip_callback(name, *filter_list, &block) | |
type, filters, options = normalize_callback_params(filter_list, block) | |
options[:raise] = true unless options.key?(:raise) | |
__update_callbacks(name) do |target, chain| | |
filters.each do |filter| | |
callback = chain.find { |c| c.matches?(type, filter) } | |
if !callback && options[:raise] | |
raise ArgumentError, "#{type.to_s.capitalize} #{name} callback #{filter.inspect} has not been defined" | |
end | |
if callback && (options.key?(:if) || options.key?(:unless)) | |
new_callback = callback.merge_conditional_options(chain, if_option: options[:if], unless_option: options[:unless]) | |
chain.insert(chain.index(callback), new_callback) | |
end | |
chain.delete(callback) | |
end | |
target.set_callbacks name, chain | |
end | |
end | |
# Remove all set callbacks for the given event. | |
def reset_callbacks(name) | |
callbacks = get_callbacks name | |
self.descendants.each do |target| | |
chain = target.get_callbacks(name).dup | |
callbacks.each { |c| chain.delete(c) } | |
target.set_callbacks name, chain | |
end | |
set_callbacks(name, callbacks.dup.clear) | |
end | |
# Define sets of events in the object life cycle that support callbacks. | |
# | |
# define_callbacks :validate | |
# define_callbacks :initialize, :save, :destroy | |
# | |
# ===== Options | |
# | |
# * <tt>:terminator</tt> - Determines when a before filter will halt the | |
# callback chain, preventing following before and around callbacks from | |
# being called and the event from being triggered. | |
# This should be a lambda to be executed. | |
# The current object and the result lambda of the callback will be provided | |
# to the terminator lambda. | |
# | |
# define_callbacks :validate, terminator: ->(target, result_lambda) { result_lambda.call == false } | |
# | |
# In this example, if any before validate callbacks returns +false+, | |
# any successive before and around callback is not executed. | |
# | |
# The default terminator halts the chain when a callback throws +:abort+. | |
# | |
# * <tt>:skip_after_callbacks_if_terminated</tt> - Determines if after | |
# callbacks should be terminated by the <tt>:terminator</tt> option. By | |
# default after callbacks are executed no matter if callback chain was | |
# terminated or not. This option has no effect if <tt>:terminator</tt> | |
# option is set to +nil+. | |
# | |
# * <tt>:scope</tt> - Indicates which methods should be executed when an | |
# object is used as a callback. | |
# | |
# class Audit | |
# def before(caller) | |
# puts 'Audit: before is called' | |
# end | |
# | |
# def before_save(caller) | |
# puts 'Audit: before_save is called' | |
# end | |
# end | |
# | |
# class Account | |
# include ActiveSupport::Callbacks | |
# | |
# define_callbacks :save | |
# set_callback :save, :before, Audit.new | |
# | |
# def save | |
# run_callbacks :save do | |
# puts 'save in main' | |
# end | |
# end | |
# end | |
# | |
# In the above case whenever you save an account the method | |
# <tt>Audit#before</tt> will be called. On the other hand | |
# | |
# define_callbacks :save, scope: [:kind, :name] | |
# | |
# would trigger <tt>Audit#before_save</tt> instead. That's constructed | |
# by calling <tt>#{kind}_#{name}</tt> on the given instance. In this | |
# case "kind" is "before" and "name" is "save". In this context +:kind+ | |
# and +:name+ have special meanings: +:kind+ refers to the kind of | |
# callback (before/after/around) and +:name+ refers to the method on | |
# which callbacks are being defined. | |
# | |
# A declaration like | |
# | |
# define_callbacks :save, scope: [:name] | |
# | |
# would call <tt>Audit#save</tt>. | |
# | |
# ===== Notes | |
# | |
# +names+ passed to +define_callbacks+ must not end with | |
# <tt>!</tt>, <tt>?</tt> or <tt>=</tt>. | |
# | |
# Calling +define_callbacks+ multiple times with the same +names+ will | |
# overwrite previous callbacks registered with +set_callback+. | |
def define_callbacks(*names) | |
options = names.extract_options! | |
names.each do |name| | |
name = name.to_sym | |
([self] + self.descendants).each do |target| | |
target.set_callbacks name, CallbackChain.new(name, options) | |
end | |
module_eval <<-RUBY, __FILE__, __LINE__ + 1 | |
def _run_#{name}_callbacks(&block) | |
run_callbacks #{name.inspect}, &block | |
end | |
def self._#{name}_callbacks | |
get_callbacks(#{name.inspect}) | |
end | |
def self._#{name}_callbacks=(value) | |
set_callbacks(#{name.inspect}, value) | |
end | |
def _#{name}_callbacks | |
__callbacks[#{name.inspect}] | |
end | |
RUBY | |
end | |
end | |
protected | |
def get_callbacks(name) # :nodoc: | |
__callbacks[name.to_sym] | |
end | |
def set_callbacks(name, callbacks) # :nodoc: | |
unless singleton_class.method_defined?(:__callbacks, false) | |
self.__callbacks = __callbacks.dup | |
end | |
self.__callbacks[name.to_sym] = callbacks | |
self.__callbacks | |
end | |
end | |
end | |
end |
# frozen_string_literal: true | |
require_relative "abstract_unit" | |
require "active_support/core_ext/kernel/singleton_class" | |
module CallbacksTest | |
class Record | |
include ActiveSupport::Callbacks | |
define_callbacks :save | |
def self.before_save(*filters, &blk) | |
set_callback(:save, :before, *filters, &blk) | |
end | |
def self.after_save(*filters, &blk) | |
set_callback(:save, :after, *filters, &blk) | |
end | |
class << self | |
def callback_symbol(callback_method) | |
method_name = :"#{callback_method}_method" | |
define_method(method_name) do | |
history << [callback_method, :symbol] | |
end | |
method_name | |
end | |
def callback_proc(callback_method) | |
Proc.new { |model| model.history << [callback_method, :proc] } | |
end | |
def callback_object(callback_method) | |
klass = Class.new | |
klass.define_method(callback_method) do |model| | |
model.history << [:"#{callback_method}_save", :object] | |
end | |
klass.new | |
end | |
end | |
def history | |
@history ||= [] | |
end | |
end | |
class CallbackClass | |
def self.before(model) | |
model.history << [:before_save, :class] | |
end | |
def self.after(model) | |
model.history << [:after_save, :class] | |
end | |
end | |
class Person < Record | |
attr_accessor :save_fails | |
[:before_save, :after_save].each do |callback_method| | |
callback_method_sym = callback_method.to_sym | |
public_send(callback_method, callback_symbol(callback_method_sym)) | |
public_send(callback_method, callback_proc(callback_method_sym)) | |
public_send(callback_method, callback_object(callback_method_sym.to_s.gsub(/_save/, ""))) | |
public_send(callback_method, CallbackClass) | |
public_send(callback_method) { |model| model.history << [callback_method_sym, :block] } | |
end | |
def save | |
run_callbacks :save do | |
raise "inside save" if save_fails | |
end | |
end | |
end | |
class PersonSkipper < Person | |
skip_callback :save, :before, :before_save_method, if: :yes | |
skip_callback :save, :after, :after_save_method, unless: :yes | |
skip_callback :save, :after, :after_save_method, if: :no | |
skip_callback :save, :before, :before_save_method, unless: :no | |
skip_callback :save, :before, CallbackClass, if: :yes | |
def yes; true; end | |
def no; false; end | |
end | |
class PersonForProgrammaticSkipping < Person | |
end | |
class ParentController | |
include ActiveSupport::Callbacks | |
define_callbacks :dispatch | |
set_callback :dispatch, :before, :log, unless: proc { |c| c.action_name == :index || c.action_name == :show } | |
set_callback :dispatch, :after, :log2 | |
attr_reader :action_name, :logger | |
def initialize(action_name) | |
@action_name, @logger = action_name, [] | |
end | |
def log | |
@logger << action_name | |
end | |
def log2 | |
@logger << action_name | |
end | |
def dispatch | |
run_callbacks :dispatch do | |
@logger << "Done" | |
end | |
self | |
end | |
end | |
class Child < ParentController | |
skip_callback :dispatch, :before, :log, if: proc { |c| c.action_name == :update } | |
skip_callback :dispatch, :after, :log2 | |
end | |
class OneTimeCompile < Record | |
@@starts_true, @@starts_false = true, false | |
def initialize | |
super | |
end | |
before_save Proc.new { |r| r.history << [:before_save, :starts_true, :if] }, if: :starts_true | |
before_save Proc.new { |r| r.history << [:before_save, :starts_false, :if] }, if: :starts_false | |
before_save Proc.new { |r| r.history << [:before_save, :starts_true, :unless] }, unless: :starts_true | |
before_save Proc.new { |r| r.history << [:before_save, :starts_false, :unless] }, unless: :starts_false | |
def starts_true | |
if @@starts_true | |
@@starts_true = false | |
return true | |
end | |
@@starts_true | |
end | |
def starts_false | |
unless @@starts_false | |
@@starts_false = true | |
return false | |
end | |
@@starts_false | |
end | |
def save | |
run_callbacks :save | |
end | |
end | |
class OneTimeCompileTest < ActiveSupport::TestCase | |
def test_optimized_first_compile | |
around = OneTimeCompile.new | |
around.save | |
assert_equal [ | |
[:before_save, :starts_true, :if], | |
[:before_save, :starts_true, :unless] | |
], around.history | |
end | |
end | |
class AfterSaveConditionalPerson < Record | |
after_save Proc.new { |r| r.history << [:after_save, :string1] } | |
after_save Proc.new { |r| r.history << [:after_save, :string2] } | |
def save | |
run_callbacks :save | |
end | |
end | |
class AfterSaveConditionalPersonCallbackTest < ActiveSupport::TestCase | |
def test_after_save_runs_in_the_reverse_order | |
person = AfterSaveConditionalPerson.new | |
person.save | |
assert_equal [ | |
[:after_save, :string2], | |
[:after_save, :string1] | |
], person.history | |
end | |
end | |
class ConditionalPerson < Record | |
# proc | |
before_save Proc.new { |r| r.history << [:before_save, :proc] }, if: Proc.new { |r| true } | |
before_save Proc.new { |r| r.history << "b00m" }, if: Proc.new { |r| false } | |
before_save Proc.new { |r| r.history << [:before_save, :proc] }, unless: Proc.new { |r| false } | |
before_save Proc.new { |r| r.history << "b00m" }, unless: Proc.new { |r| true } | |
# symbol | |
before_save Proc.new { |r| r.history << [:before_save, :symbol] }, if: :yes | |
before_save Proc.new { |r| r.history << "b00m" }, if: :no | |
before_save Proc.new { |r| r.history << [:before_save, :symbol] }, unless: :no | |
before_save Proc.new { |r| r.history << "b00m" }, unless: :yes | |
# Combined if and unless | |
before_save Proc.new { |r| r.history << [:before_save, :combined_symbol] }, if: :yes, unless: :no | |
before_save Proc.new { |r| r.history << "b00m" }, if: :yes, unless: :yes | |
def yes; true; end | |
def other_yes; true; end | |
def no; false; end | |
def other_no; false; end | |
def save | |
run_callbacks :save | |
end | |
end | |
class CleanPerson < ConditionalPerson | |
reset_callbacks :save | |
end | |
class MySuper | |
include ActiveSupport::Callbacks | |
define_callbacks :save | |
end | |
class MySlate < MySuper | |
attr_reader :history | |
attr_accessor :save_fails | |
def initialize | |
@history = [] | |
end | |
def save | |
run_callbacks :save do | |
raise "inside save" if save_fails | |
@history << "running" | |
end | |
end | |
def no; false; end | |
def yes; true; end | |
def method_missing(sym, *) | |
case sym | |
when /^log_(.*)/ | |
@history << $1 | |
nil | |
when /^wrap_(.*)/ | |
@history << "wrap_#$1" | |
yield | |
@history << "unwrap_#$1" | |
nil | |
when /^double_(.*)/ | |
@history << "first_#$1" | |
yield | |
@history << "second_#$1" | |
yield | |
@history << "third_#$1" | |
else | |
super | |
end | |
end | |
def respond_to_missing?(sym) | |
sym.match?(/^(log|wrap)_/) || super | |
end | |
end | |
class AroundPerson < MySlate | |
set_callback :save, :before, :nope, if: :no | |
set_callback :save, :before, :nope, unless: :yes | |
set_callback :save, :after, :tweedle | |
set_callback :save, :before, proc { |m| m.history << "yup" } | |
set_callback :save, :before, :nope, if: proc { false } | |
set_callback :save, :before, :nope, unless: proc { true } | |
set_callback :save, :before, :yup, if: proc { true } | |
set_callback :save, :before, :yup, unless: proc { false } | |
set_callback :save, :around, :tweedle_dum | |
set_callback :save, :around, :w0tyes, if: :yes | |
set_callback :save, :around, :w0tno, if: :no | |
set_callback :save, :around, :tweedle_deedle | |
def nope | |
@history << "boom" | |
end | |
def yup | |
@history << "yup" | |
end | |
def w0tyes | |
@history << "w0tyes before" | |
yield | |
@history << "w0tyes after" | |
end | |
def w0tno | |
@history << "boom" | |
yield | |
end | |
def tweedle_dum | |
@history << "tweedle dum pre" | |
yield | |
@history << "tweedle dum post" | |
end | |
def tweedle | |
@history << "tweedle" | |
end | |
def tweedle_deedle | |
@history << "tweedle deedle pre" | |
yield | |
@history << "tweedle deedle post" | |
end | |
end | |
class AroundPersonResult < MySuper | |
attr_reader :result | |
set_callback :save, :after, :tweedle_1 | |
set_callback :save, :around, :tweedle_dum | |
set_callback :save, :after, :tweedle_2 | |
def tweedle_dum | |
@result = yield | |
end | |
def tweedle_1 | |
:tweedle_1 | |
end | |
def tweedle_2 | |
:tweedle_2 | |
end | |
def save | |
run_callbacks :save do | |
:running | |
end | |
end | |
end | |
class HyphenatedCallbacks | |
include ActiveSupport::Callbacks | |
define_callbacks :save | |
attr_reader :stuff | |
set_callback :save, :before, :action, if: :yes | |
def yes() true end | |
def action | |
@stuff = "ACTION" | |
end | |
def save | |
run_callbacks :save do | |
@stuff | |
end | |
end | |
end | |
module ExtendModule | |
def self.extended(base) | |
base.class_eval do | |
set_callback :save, :before, :record3 | |
end | |
end | |
def record3 | |
@recorder << 3 | |
end | |
end | |
module IncludeModule | |
def self.included(base) | |
base.class_eval do | |
set_callback :save, :before, :record2 | |
end | |
end | |
def record2 | |
@recorder << 2 | |
end | |
end | |
class ExtendCallbacks | |
include ActiveSupport::Callbacks | |
define_callbacks :save | |
set_callback :save, :before, :record1 | |
include IncludeModule | |
def save | |
run_callbacks :save | |
end | |
attr_reader :recorder | |
def initialize | |
@recorder = [] | |
end | |
private | |
def record1 | |
@recorder << 1 | |
end | |
end | |
class AroundCallbacksTest < ActiveSupport::TestCase | |
def test_save_around | |
around = AroundPerson.new | |
around.save | |
assert_equal [ | |
"yup", "yup", | |
"tweedle dum pre", | |
"w0tyes before", | |
"tweedle deedle pre", | |
"running", | |
"tweedle deedle post", | |
"w0tyes after", | |
"tweedle dum post", | |
"tweedle" | |
], around.history | |
end | |
end | |
class DoubleYieldTest < ActiveSupport::TestCase | |
class DoubleYieldModel < MySlate | |
set_callback :save, :around, :wrap_outer | |
set_callback :save, :around, :double_trouble | |
set_callback :save, :around, :wrap_inner | |
end | |
def test_double_save | |
double = DoubleYieldModel.new | |
double.save | |
assert_equal [ | |
"wrap_outer", | |
"first_trouble", | |
"wrap_inner", | |
"running", | |
"unwrap_inner", | |
"second_trouble", | |
"wrap_inner", | |
"running", | |
"unwrap_inner", | |
"third_trouble", | |
"unwrap_outer", | |
], double.history | |
end | |
end | |
class CallStackTest < ActiveSupport::TestCase | |
def test_tidy_call_stack | |
around = AroundPerson.new | |
around.save_fails = true | |
exception = (around.save rescue $!) | |
# Make sure we have the exception we're expecting | |
assert_equal "inside save", exception.message | |
call_stack = exception.backtrace_locations | |
call_stack.pop caller_locations(0).size | |
# Yes, this looks like an implementation test, but it's the least | |
# obtuse way of asserting that there aren't a load of entries in | |
# the call stack for each callback. | |
# | |
# If you've renamed a method, or squeezed more lines out, go ahead | |
# and update this assertion. But if you're here because a | |
# refactoring added new lines, please reconsider. | |
# As shown here, our current budget is one line for run_callbacks | |
# itself, plus N+1 lines where N is the number of :around | |
# callbacks that have been invoked, if there are any (plus | |
# whatever the callbacks do themselves, of course). | |
assert_equal [ | |
"block in save", | |
"block in run_callbacks", | |
"tweedle_deedle", | |
"block in run_callbacks", | |
"w0tyes", | |
"block in run_callbacks", | |
"tweedle_dum", | |
"block in run_callbacks", | |
"run_callbacks", | |
"save" | |
], call_stack.map(&:label) | |
end | |
def test_short_call_stack | |
person = Person.new | |
person.save_fails = true | |
exception = (person.save rescue $!) | |
# Make sure we have the exception we're expecting | |
assert_equal "inside save", exception.message | |
call_stack = exception.backtrace_locations | |
call_stack.pop caller_locations(0).size | |
# This budget much simpler: with no :around callbacks invoked, | |
# there should be just one line. run_callbacks yields directly | |
# back to its caller. | |
assert_equal [ | |
"block in save", | |
"run_callbacks", | |
"save" | |
], call_stack.map(&:label) | |
end | |
end | |
class AroundCallbackResultTest < ActiveSupport::TestCase | |
def test_save_around | |
around = AroundPersonResult.new | |
around.save | |
assert_equal :running, around.result | |
end | |
end | |
class SkipCallbacksTest < ActiveSupport::TestCase | |
def test_skip_person | |
person = PersonSkipper.new | |
assert_equal [], person.history | |
person.save | |
assert_equal [ | |
[:before_save, :proc], | |
[:before_save, :object], | |
[:before_save, :block], | |
[:after_save, :block], | |
[:after_save, :class], | |
[:after_save, :object], | |
[:after_save, :proc], | |
[:after_save, :symbol] | |
], person.history | |
end | |
def test_skip_person_programmatically | |
PersonForProgrammaticSkipping._save_callbacks.each do |save_callback| | |
if "before" == save_callback.kind.to_s | |
PersonForProgrammaticSkipping.skip_callback("save", save_callback.kind, save_callback.filter) | |
end | |
end | |
person = PersonForProgrammaticSkipping.new | |
assert_equal [], person.history | |
person.save | |
assert_equal [ | |
[:after_save, :block], | |
[:after_save, :class], | |
[:after_save, :object], | |
[:after_save, :proc], | |
[:after_save, :symbol] | |
], person.history | |
end | |
end | |
class CallbacksTest < ActiveSupport::TestCase | |
def test_save_person | |
person = Person.new | |
assert_equal [], person.history | |
person.save | |
assert_equal [ | |
[:before_save, :symbol], | |
[:before_save, :proc], | |
[:before_save, :object], | |
[:before_save, :class], | |
[:before_save, :block], | |
[:after_save, :block], | |
[:after_save, :class], | |
[:after_save, :object], | |
[:after_save, :proc], | |
[:after_save, :symbol] | |
], person.history | |
end | |
end | |
class ConditionalCallbackTest < ActiveSupport::TestCase | |
def test_save_conditional_person | |
person = ConditionalPerson.new | |
person.save | |
assert_equal [ | |
[:before_save, :proc], | |
[:before_save, :proc], | |
[:before_save, :symbol], | |
[:before_save, :symbol], | |
[:before_save, :combined_symbol], | |
], person.history | |
end | |
end | |
class ResetCallbackTest < ActiveSupport::TestCase | |
def test_save_conditional_person | |
person = CleanPerson.new | |
person.save | |
assert_equal [], person.history | |
end | |
end | |
class AbstractCallbackTerminator | |
include ActiveSupport::Callbacks | |
def self.set_save_callbacks | |
set_callback :save, :before, :first | |
set_callback :save, :before, :second | |
set_callback :save, :around, :around_it | |
set_callback :save, :before, :third | |
set_callback :save, :after, :first | |
set_callback :save, :around, :around_it | |
set_callback :save, :after, :third | |
end | |
attr_reader :history, :saved, :halted, :callback_name | |
def initialize | |
@history = [] | |
end | |
def around_it | |
@history << "around1" | |
yield | |
@history << "around2" | |
end | |
def first | |
@history << "first" | |
end | |
def second | |
@history << "second" | |
:halt | |
end | |
def third | |
@history << "third" | |
end | |
def save | |
run_callbacks :save do | |
@saved = true | |
end | |
end | |
def halted_callback_hook(filter, name) | |
@halted = filter | |
@callback_name = name | |
end | |
end | |
class CallbackTerminator < AbstractCallbackTerminator | |
define_callbacks :save, terminator: ->(_, result_lambda) { result_lambda.call == :halt } | |
set_save_callbacks | |
end | |
class CallbackTerminatorSkippingAfterCallbacks < AbstractCallbackTerminator | |
define_callbacks :save, terminator: ->(_, result_lambda) { result_lambda.call == :halt }, | |
skip_after_callbacks_if_terminated: true | |
set_save_callbacks | |
end | |
class CallbackDefaultTerminator < AbstractCallbackTerminator | |
define_callbacks :save | |
def second | |
@history << "second" | |
throw(:abort) | |
end | |
set_save_callbacks | |
end | |
class CallbackFalseTerminator < AbstractCallbackTerminator | |
define_callbacks :save | |
def second | |
@history << "second" | |
false | |
end | |
set_save_callbacks | |
end | |
class CallbackObject | |
def before(caller) | |
caller.record << "before" | |
end | |
def before_save(caller) | |
caller.record << "before save" | |
end | |
def around(caller) | |
caller.record << "around before" | |
yield | |
caller.record << "around after" | |
end | |
end | |
class UsingObjectBefore | |
include ActiveSupport::Callbacks | |
define_callbacks :save | |
set_callback :save, :before, CallbackObject.new | |
attr_accessor :record | |
def initialize | |
@record = [] | |
end | |
def save | |
run_callbacks :save do | |
@record << "yielded" | |
end | |
end | |
end | |
class UsingObjectAround | |
include ActiveSupport::Callbacks | |
define_callbacks :save | |
set_callback :save, :around, CallbackObject.new | |
attr_accessor :record | |
def initialize | |
@record = [] | |
end | |
def save | |
run_callbacks :save do | |
@record << "yielded" | |
end | |
end | |
end | |
class CustomScopeObject | |
include ActiveSupport::Callbacks | |
define_callbacks :save, scope: [:kind, :name] | |
set_callback :save, :before, CallbackObject.new | |
attr_accessor :record | |
def initialize | |
@record = [] | |
end | |
def save | |
run_callbacks :save do | |
@record << "yielded" | |
"CallbackResult" | |
end | |
end | |
end | |
class OneTwoThreeSave | |
include ActiveSupport::Callbacks | |
define_callbacks :save | |
attr_accessor :record | |
def initialize | |
@record = [] | |
end | |
def save | |
run_callbacks :save do | |
@record << "yielded" | |
end | |
end | |
def first | |
@record << "one" | |
end | |
def second | |
@record << "two" | |
end | |
def third | |
@record << "three" | |
end | |
end | |
class DuplicatingCallbacks < OneTwoThreeSave | |
set_callback :save, :before, :first, :second | |
set_callback :save, :before, :first, :third | |
end | |
class DuplicatingCallbacksInSameCall < OneTwoThreeSave | |
set_callback :save, :before, :first, :second, :first, :third | |
end | |
class UsingObjectTest < ActiveSupport::TestCase | |
def test_before_object | |
u = UsingObjectBefore.new | |
u.save | |
assert_equal ["before", "yielded"], u.record | |
end | |
def test_around_object | |
u = UsingObjectAround.new | |
u.save | |
assert_equal ["around before", "yielded", "around after"], u.record | |
end | |
def test_customized_object | |
u = CustomScopeObject.new | |
u.save | |
assert_equal ["before save", "yielded"], u.record | |
end | |
def test_block_result_is_returned | |
u = CustomScopeObject.new | |
assert_equal "CallbackResult", u.save | |
end | |
end | |
class CallbackTerminatorTest < ActiveSupport::TestCase | |
def test_termination_skips_following_before_and_around_callbacks | |
terminator = CallbackTerminator.new | |
terminator.save | |
assert_equal ["first", "second", "third", "first"], terminator.history | |
end | |
def test_termination_invokes_hook | |
terminator = CallbackTerminator.new | |
terminator.save | |
assert_equal :second, terminator.halted | |
assert_equal :save, terminator.callback_name | |
end | |
def test_block_never_called_if_terminated | |
obj = CallbackTerminator.new | |
obj.save | |
assert_not obj.saved | |
end | |
end | |
class CallbackTerminatorSkippingAfterCallbacksTest < ActiveSupport::TestCase | |
def test_termination_skips_after_callbacks | |
terminator = CallbackTerminatorSkippingAfterCallbacks.new | |
terminator.save | |
assert_equal ["first", "second"], terminator.history | |
end | |
end | |
class CallbackDefaultTerminatorTest < ActiveSupport::TestCase | |
def test_default_termination | |
terminator = CallbackDefaultTerminator.new | |
terminator.save | |
assert_equal ["first", "second", "third", "first"], terminator.history | |
end | |
def test_default_termination_invokes_hook | |
terminator = CallbackDefaultTerminator.new | |
terminator.save | |
assert_equal :second, terminator.halted | |
end | |
def test_block_never_called_if_abort_is_thrown | |
obj = CallbackDefaultTerminator.new | |
obj.save | |
assert_not obj.saved | |
end | |
end | |
class CallbackFalseTerminatorTest < ActiveSupport::TestCase | |
def test_returning_false_does_not_halt_callback | |
obj = CallbackFalseTerminator.new | |
obj.save | |
assert_nil obj.halted | |
assert obj.saved | |
end | |
end | |
class HyphenatedKeyTest < ActiveSupport::TestCase | |
def test_save | |
obj = HyphenatedCallbacks.new | |
obj.save | |
assert_equal "ACTION", obj.stuff | |
end | |
end | |
class WriterSkipper < Person | |
attr_accessor :age | |
skip_callback :save, :before, :before_save_method, if: -> { age > 21 } | |
end | |
class WriterCallbacksTest < ActiveSupport::TestCase | |
def test_skip_writer | |
writer = WriterSkipper.new | |
writer.age = 18 | |
assert_equal [], writer.history | |
writer.save | |
assert_equal [ | |
[:before_save, :symbol], | |
[:before_save, :proc], | |
[:before_save, :object], | |
[:before_save, :class], | |
[:before_save, :block], | |
[:after_save, :block], | |
[:after_save, :class], | |
[:after_save, :object], | |
[:after_save, :proc], | |
[:after_save, :symbol] | |
], writer.history | |
end | |
end | |
class ExtendCallbacksTest < ActiveSupport::TestCase | |
def test_save | |
model = ExtendCallbacks.new.extend ExtendModule | |
model.save | |
assert_equal [1, 2, 3], model.recorder | |
end | |
end | |
class ExcludingDuplicatesCallbackTest < ActiveSupport::TestCase | |
def test_excludes_duplicates_in_separate_calls | |
model = DuplicatingCallbacks.new | |
model.save | |
assert_equal ["two", "one", "three", "yielded"], model.record | |
end | |
def test_excludes_duplicates_in_one_call | |
model = DuplicatingCallbacksInSameCall.new | |
model.save | |
assert_equal ["two", "one", "three", "yielded"], model.record | |
end | |
end | |
class CallbackProcTest < ActiveSupport::TestCase | |
def build_class(callback) | |
Class.new { | |
include ActiveSupport::Callbacks | |
define_callbacks :foo | |
set_callback :foo, :before, callback | |
def run; run_callbacks :foo; end | |
} | |
end | |
def test_proc_arity_0 | |
calls = [] | |
klass = build_class(->() { calls << :foo }) | |
klass.new.run | |
assert_equal [:foo], calls | |
end | |
def test_proc_arity_1 | |
calls = [] | |
klass = build_class(->(o) { calls << o }) | |
instance = klass.new | |
instance.run | |
assert_equal [instance], calls | |
end | |
def test_proc_arity_2 | |
assert_raises(ArgumentError) do | |
klass = build_class(->(x, y) { }) | |
klass.new.run | |
end | |
end | |
def test_proc_negative_called_with_empty_list | |
calls = [] | |
klass = build_class(->(*args) { calls << args }) | |
klass.new.run | |
assert_equal [[]], calls | |
end | |
end | |
class ConditionalTests < ActiveSupport::TestCase | |
def build_class(callback) | |
Class.new { | |
include ActiveSupport::Callbacks | |
define_callbacks :foo | |
set_callback :foo, :before, :foo, if: callback | |
def foo; end | |
def run; run_callbacks :foo; end | |
} | |
end | |
# FIXME: do we really want to support classes as conditionals? There were | |
# no tests for it previous to this. | |
def test_class_conditional_with_scope | |
z = [] | |
callback = Class.new { | |
define_singleton_method(:foo) { |o| z << o } | |
} | |
klass = Class.new { | |
include ActiveSupport::Callbacks | |
define_callbacks :foo, scope: [:name] | |
set_callback :foo, :before, :foo, if: callback | |
def run; run_callbacks :foo; end | |
private | |
def foo; end | |
} | |
object = klass.new | |
object.run | |
assert_equal [object], z | |
end | |
# FIXME: do we really want to support classes as conditionals? There were | |
# no tests for it previous to this. | |
def test_class | |
z = [] | |
klass = build_class Class.new { | |
define_singleton_method(:before) { |o| z << o } | |
} | |
object = klass.new | |
object.run | |
assert_equal [object], z | |
end | |
def test_proc_negative_arity # passes an empty list if *args | |
z = [] | |
object = build_class(->(*args) { z << args }).new | |
object.run | |
assert_equal [], z.flatten | |
end | |
def test_proc_arity0 | |
z = [] | |
object = build_class(->() { z << 0 }).new | |
object.run | |
assert_equal [0], z | |
end | |
def test_proc_arity1 | |
z = [] | |
object = build_class(->(x) { z << x }).new | |
object.run | |
assert_equal [object], z | |
end | |
def test_proc_arity2 | |
assert_raises(ArgumentError) do | |
object = build_class(->(a, b) { }).new | |
object.run | |
end | |
end | |
end | |
class ResetCallbackTest < ActiveSupport::TestCase | |
def build_class(memo) | |
klass = Class.new { | |
include ActiveSupport::Callbacks | |
define_callbacks :foo | |
set_callback :foo, :before, :hello | |
def run; run_callbacks :foo; end | |
} | |
klass.class_eval { | |
define_method(:hello) { memo << :hi } | |
} | |
klass | |
end | |
def test_reset_callbacks | |
events = [] | |
klass = build_class events | |
klass.new.run | |
assert_equal 1, events.length | |
klass.reset_callbacks :foo | |
klass.new.run | |
assert_equal 1, events.length | |
end | |
def test_reset_impacts_subclasses | |
events = [] | |
klass = build_class events | |
subclass = Class.new(klass) { set_callback :foo, :before, :world } | |
subclass.class_eval { define_method(:world) { events << :world } } | |
subclass.new.run | |
assert_equal 2, events.length | |
klass.reset_callbacks :foo | |
subclass.new.run | |
assert_equal 3, events.length | |
end | |
end | |
class CallbackTypeTest < ActiveSupport::TestCase | |
def build_class(callback, n = 10) | |
Class.new { | |
include ActiveSupport::Callbacks | |
define_callbacks :foo | |
n.times { set_callback :foo, :before, callback } | |
def run; run_callbacks :foo; end | |
def self.skip(*things); skip_callback :foo, :before, *things; end | |
} | |
end | |
def test_add_class | |
calls = [] | |
callback = Class.new { | |
define_singleton_method(:before) { |o| calls << o } | |
} | |
build_class(callback).new.run | |
assert_equal 10, calls.length | |
end | |
def test_add_lambda | |
calls = [] | |
build_class(->(o) { calls << o }).new.run | |
assert_equal 10, calls.length | |
end | |
def test_add_symbol | |
calls = [] | |
klass = build_class(:bar) | |
klass.class_eval { define_method(:bar) { calls << klass } } | |
klass.new.run | |
assert_equal 1, calls.length | |
end | |
def test_skip_class # removes one at a time | |
calls = [] | |
callback = Class.new { | |
define_singleton_method(:before) { |o| calls << o } | |
} | |
klass = build_class(callback) | |
9.downto(0) { |i| | |
klass.skip callback | |
klass.new.run | |
assert_equal i, calls.length | |
calls.clear | |
} | |
end | |
def test_skip_symbol # removes all | |
calls = [] | |
klass = build_class(:bar) | |
klass.class_eval { define_method(:bar) { calls << klass } } | |
klass.skip :bar | |
klass.new.run | |
assert_equal 0, calls.length | |
end | |
def test_skip_string # raises error | |
calls = [] | |
klass = build_class(:bar) | |
klass.class_eval { define_method(:bar) { calls << klass } } | |
assert_raises(ArgumentError) { klass.skip "bar" } | |
klass.new.run | |
assert_equal 1, calls.length | |
end | |
def test_skip_undefined_callback # raises error | |
calls = [] | |
klass = build_class(:bar) | |
klass.class_eval { define_method(:bar) { calls << klass } } | |
assert_raises(ArgumentError) { klass.skip :qux } | |
klass.new.run | |
assert_equal 1, calls.length | |
end | |
def test_skip_without_raise # removes nothing | |
calls = [] | |
klass = build_class(:bar) | |
klass.class_eval { define_method(:bar) { calls << klass } } | |
klass.skip :qux, raise: false | |
klass.new.run | |
assert_equal 1, calls.length | |
end | |
end | |
class NotSupportedStringConditionalTest < ActiveSupport::TestCase | |
def test_string_conditional_options | |
klass = Class.new(Record) | |
assert_raises(ArgumentError) { klass.before_save :tweedle, if: ["true"] } | |
assert_raises(ArgumentError) { klass.before_save :tweedle, if: "true" } | |
assert_raises(ArgumentError) { klass.after_save :tweedle, unless: "false" } | |
assert_raises(ArgumentError) { klass.skip_callback :save, :before, :tweedle, if: "true" } | |
assert_raises(ArgumentError) { klass.skip_callback :save, :after, :tweedle, unless: "false" } | |
end | |
end | |
class NotPermittedStringCallbackTest < ActiveSupport::TestCase | |
def test_passing_string_callback_is_not_permitted | |
klass = Class.new(Record) | |
assert_raises(ArgumentError) do | |
klass.before_save "tweedle" | |
end | |
end | |
end | |
end |
# frozen_string_literal: true | |
require "active_support/json" | |
require "active_support/core_ext/string/access" | |
require "active_support/core_ext/string/behavior" | |
require "active_support/core_ext/module/delegation" | |
module ActiveSupport # :nodoc: | |
module Multibyte # :nodoc: | |
# Chars enables you to work transparently with UTF-8 encoding in the Ruby | |
# String class without having extensive knowledge about the encoding. A | |
# Chars object accepts a string upon initialization and proxies String | |
# methods in an encoding safe manner. All the normal String methods are also | |
# implemented on the proxy. | |
# | |
# String methods are proxied through the Chars object, and can be accessed | |
# through the +mb_chars+ method. Methods which would normally return a | |
# String object now return a Chars object so methods can be chained. | |
# | |
# 'The Perfect String '.mb_chars.downcase.strip | |
# # => #<ActiveSupport::Multibyte::Chars:0x007fdc434ccc10 @wrapped_string="the perfect string"> | |
# | |
# Chars objects are perfectly interchangeable with String objects as long as | |
# no explicit class checks are made. If certain methods do explicitly check | |
# the class, call +to_s+ before you pass chars objects to them. | |
# | |
# bad.explicit_checking_method 'T'.mb_chars.downcase.to_s | |
# | |
# The default Chars implementation assumes that the encoding of the string | |
# is UTF-8, if you want to handle different encodings you can write your own | |
# multibyte string handler and configure it through | |
# ActiveSupport::Multibyte.proxy_class. | |
# | |
# class CharsForUTF32 | |
# def size | |
# @wrapped_string.size / 4 | |
# end | |
# | |
# def self.accepts?(string) | |
# string.length % 4 == 0 | |
# end | |
# end | |
# | |
# ActiveSupport::Multibyte.proxy_class = CharsForUTF32 | |
class Chars | |
include Comparable | |
attr_reader :wrapped_string | |
alias to_s wrapped_string | |
alias to_str wrapped_string | |
delegate :<=>, :=~, :match?, :acts_like_string?, to: :wrapped_string | |
# Creates a new Chars instance by wrapping _string_. | |
def initialize(string) | |
@wrapped_string = string | |
@wrapped_string.force_encoding(Encoding::UTF_8) unless @wrapped_string.frozen? | |
end | |
# Forward all undefined methods to the wrapped string. | |
def method_missing(method, *args, &block) | |
result = @wrapped_string.__send__(method, *args, &block) | |
if method.end_with?("!") | |
self if result | |
else | |
result.kind_of?(String) ? chars(result) : result | |
end | |
end | |
# Returns +true+ if _obj_ responds to the given method. Private methods | |
# are included in the search only if the optional second parameter | |
# evaluates to +true+. | |
def respond_to_missing?(method, include_private) | |
@wrapped_string.respond_to?(method, include_private) | |
end | |
# Works just like <tt>String#split</tt>, with the exception that the items | |
# in the resulting list are Chars instances instead of String. This makes | |
# chaining methods easier. | |
# | |
# 'Café périferôl'.mb_chars.split(/é/).map { |part| part.upcase.to_s } # => ["CAF", " P", "RIFERÔL"] | |
def split(*args) | |
@wrapped_string.split(*args).map { |i| self.class.new(i) } | |
end | |
# Works like <tt>String#slice!</tt>, but returns an instance of | |
# Chars, or +nil+ if the string was not modified. The string will not be | |
# modified if the range given is out of bounds | |
# | |
# string = 'Welcome' | |
# string.mb_chars.slice!(3) # => #<ActiveSupport::Multibyte::Chars:0x000000038109b8 @wrapped_string="c"> | |
# string # => 'Welome' | |
# string.mb_chars.slice!(0..3) # => #<ActiveSupport::Multibyte::Chars:0x00000002eb80a0 @wrapped_string="Welo"> | |
# string # => 'me' | |
def slice!(*args) | |
string_sliced = @wrapped_string.slice!(*args) | |
if string_sliced | |
chars(string_sliced) | |
end | |
end | |
# Reverses all characters in the string. | |
# | |
# 'Café'.mb_chars.reverse.to_s # => 'éfaC' | |
def reverse | |
chars(@wrapped_string.grapheme_clusters.reverse.join) | |
end | |
# Limits the byte size of the string to a number of bytes without breaking | |
# characters. Usable when the storage for a string is limited for some | |
# reason. | |
# | |
# 'こんにちは'.mb_chars.limit(7).to_s # => "こん" | |
def limit(limit) | |
chars(@wrapped_string.truncate_bytes(limit, omission: nil)) | |
end | |
# Capitalizes the first letter of every word, when possible. | |
# | |
# "ÉL QUE SE ENTERÓ".mb_chars.titleize.to_s # => "Él Que Se Enteró" | |
# "日本語".mb_chars.titleize.to_s # => "日本語" | |
def titleize | |
chars(downcase.to_s.gsub(/\b('?\S)/u) { $1.upcase }) | |
end | |
alias_method :titlecase, :titleize | |
# Performs canonical decomposition on all the characters. | |
# | |
# 'é'.length # => 1 | |
# 'é'.mb_chars.decompose.to_s.length # => 2 | |
def decompose | |
chars(Unicode.decompose(:canonical, @wrapped_string.codepoints.to_a).pack("U*")) | |
end | |
# Performs composition on all the characters. | |
# | |
# 'é'.length # => 1 | |
# 'é'.mb_chars.compose.to_s.length # => 1 | |
def compose | |
chars(Unicode.compose(@wrapped_string.codepoints.to_a).pack("U*")) | |
end | |
# Returns the number of grapheme clusters in the string. | |
# | |
# 'क्षि'.mb_chars.length # => 4 | |
# 'क्षि'.mb_chars.grapheme_length # => 2 | |
def grapheme_length | |
@wrapped_string.grapheme_clusters.length | |
end | |
# Replaces all ISO-8859-1 or CP1252 characters by their UTF-8 equivalent | |
# resulting in a valid UTF-8 string. | |
# | |
# Passing +true+ will forcibly tidy all bytes, assuming that the string's | |
# encoding is entirely CP1252 or ISO-8859-1. | |
def tidy_bytes(force = false) | |
chars(Unicode.tidy_bytes(@wrapped_string, force)) | |
end | |
def as_json(options = nil) # :nodoc: | |
to_s.as_json(options) | |
end | |
%w(reverse tidy_bytes).each do |method| | |
define_method("#{method}!") do |*args| | |
@wrapped_string = public_send(method, *args).to_s | |
self | |
end | |
end | |
private | |
def chars(string) | |
self.class.new(string) | |
end | |
end | |
end | |
end |
# frozen_string_literal: true | |
$check_warnings_load_count += 1 | |
$checked_verbose = $VERBOSE |
# frozen_string_literal: true | |
require "active_support/core_ext/class/attribute" | |
require "active_support/core_ext/class/subclasses" |
# frozen_string_literal: true | |
require_relative "../abstract_unit" | |
require "active_support/core_ext/class" | |
require "set" | |
class ClassTest < ActiveSupport::TestCase | |
class Parent; end | |
class Foo < Parent; end | |
class Bar < Foo; end | |
class Baz < Bar; end | |
class A < Parent; end | |
class B < A; end | |
class C < B; end | |
def test_descendants | |
assert_equal [Foo, Bar, Baz, A, B, C].to_set, Parent.descendants.to_set | |
assert_equal [Bar, Baz].to_set, Foo.descendants.to_set | |
assert_equal [Baz], Bar.descendants | |
assert_equal [], Baz.descendants | |
end | |
def test_subclasses | |
assert_equal [Foo, A].to_set, Parent.subclasses.to_set | |
assert_equal [Bar], Foo.subclasses | |
assert_equal [Baz], Bar.subclasses | |
assert_equal [], Baz.subclasses | |
end | |
def test_descendants_excludes_singleton_classes | |
klass = Parent.new.singleton_class | |
assert_not Parent.descendants.include?(klass), "descendants should not include singleton classes" | |
end | |
def test_subclasses_excludes_singleton_classes | |
klass = Parent.new.singleton_class | |
assert_not Parent.subclasses.include?(klass), "subclasses should not include singleton classes" | |
end | |
end |
# frozen_string_literal: true | |
require_relative "abstract_unit" | |
class BacktraceCleanerFilterTest < ActiveSupport::TestCase | |
def setup | |
@bc = ActiveSupport::BacktraceCleaner.new | |
@bc.add_filter { |line| line.gsub("/my/prefix", "") } | |
end | |
test "backtrace should filter all lines in a backtrace, removing prefixes" do | |
assert_equal \ | |
["/my/class.rb", "/my/module.rb"], | |
@bc.clean(["/my/prefix/my/class.rb", "/my/prefix/my/module.rb"]) | |
end | |
test "backtrace cleaner should allow removing filters" do | |
@bc.remove_filters! | |
assert_equal "/my/prefix/my/class.rb", @bc.clean(["/my/prefix/my/class.rb"]).first | |
end | |
test "backtrace should contain unaltered lines if they don't match a filter" do | |
assert_equal "/my/other_prefix/my/class.rb", @bc.clean([ "/my/other_prefix/my/class.rb" ]).first | |
end | |
end | |
class BacktraceCleanerSilencerTest < ActiveSupport::TestCase | |
def setup | |
@bc = ActiveSupport::BacktraceCleaner.new | |
@bc.add_silencer { |line| line.include?("mongrel") } | |
end | |
test "backtrace should not contain lines that match the silencer" do | |
assert_equal \ | |
[ "/other/class.rb" ], | |
@bc.clean([ "/mongrel/class.rb", "/other/class.rb", "/mongrel/stuff.rb" ]) | |
end | |
test "backtrace cleaner should allow removing silencer" do | |
@bc.remove_silencers! | |
assert_equal ["/mongrel/stuff.rb"], @bc.clean(["/mongrel/stuff.rb"]) | |
end | |
end | |
class BacktraceCleanerMultipleSilencersTest < ActiveSupport::TestCase | |
def setup | |
@bc = ActiveSupport::BacktraceCleaner.new | |
@bc.add_silencer { |line| line.include?("mongrel") } | |
@bc.add_silencer { |line| line.include?("yolo") } | |
end | |
test "backtrace should not contain lines that match the silencers" do | |
assert_equal \ | |
[ "/other/class.rb" ], | |
@bc.clean([ "/mongrel/class.rb", "/other/class.rb", "/mongrel/stuff.rb", "/other/yolo.rb" ]) | |
end | |
test "backtrace should only contain lines that match the silencers" do | |
assert_equal \ | |
[ "/mongrel/class.rb", "/mongrel/stuff.rb", "/other/yolo.rb" ], | |
@bc.clean([ "/mongrel/class.rb", "/other/class.rb", "/mongrel/stuff.rb", "/other/yolo.rb" ], | |
:noise) | |
end | |
end | |
class BacktraceCleanerFilterAndSilencerTest < ActiveSupport::TestCase | |
def setup | |
@bc = ActiveSupport::BacktraceCleaner.new | |
@bc.add_filter { |line| line.gsub("/mongrel", "") } | |
@bc.add_silencer { |line| line.include?("mongrel") } | |
end | |
test "backtrace should not silence lines that has first had their silence hook filtered out" do | |
assert_equal [ "/class.rb" ], @bc.clean([ "/mongrel/class.rb" ]) | |
end | |
end | |
class BacktraceCleanerDefaultFilterAndSilencerTest < ActiveSupport::TestCase | |
def setup | |
@bc = ActiveSupport::BacktraceCleaner.new | |
end | |
test "should format installed gems correctly" do | |
backtrace = [ "#{Gem.default_dir}/gems/nosuchgem-1.2.3/lib/foo.rb" ] | |
result = @bc.clean(backtrace, :all) | |
assert_equal "nosuchgem (1.2.3) lib/foo.rb", result[0] | |
end | |
test "should format installed gems not in Gem.default_dir correctly" do | |
target_dir = Gem.path.detect { |p| p != Gem.default_dir } | |
# skip this test if default_dir is the only directory on Gem.path | |
if target_dir | |
backtrace = [ "#{target_dir}/gems/nosuchgem-1.2.3/lib/foo.rb" ] | |
result = @bc.clean(backtrace, :all) | |
assert_equal "nosuchgem (1.2.3) lib/foo.rb", result[0] | |
end | |
end | |
test "should format gems installed by bundler" do | |
backtrace = [ "#{Gem.default_dir}/bundler/gems/nosuchgem-1.2.3/lib/foo.rb" ] | |
result = @bc.clean(backtrace, :all) | |
assert_equal "nosuchgem (1.2.3) lib/foo.rb", result[0] | |
end | |
test "should silence gems from the backtrace" do | |
backtrace = [ "#{Gem.path[0]}/gems/nosuchgem-1.2.3/lib/foo.rb" ] | |
result = @bc.clean(backtrace) | |
assert_empty result | |
end | |
test "should silence stdlib" do | |
backtrace = ["#{RbConfig::CONFIG["rubylibdir"]}/lib/foo.rb"] | |
result = @bc.clean(backtrace) | |
assert_empty result | |
end | |
test "should preserve lines that have a subpath matching a gem path" do | |
backtrace = [Gem.default_dir, *Gem.path].map { |path| "/parent#{path}/gems/nosuchgem-1.2.3/lib/foo.rb" } | |
assert_equal backtrace, @bc.clean(backtrace) | |
end | |
end |
# frozen_string_literal: true | |
require_relative "abstract_unit" | |
require "stringio" | |
require "active_support/logger" | |
class CleanLoggerTest < ActiveSupport::TestCase | |
def setup | |
@out = StringIO.new | |
@logger = ActiveSupport::Logger.new(@out) | |
end | |
def test_format_message | |
@logger.error "error" | |
assert_equal "error\n", @out.string | |
end | |
def test_datetime_format | |
@logger.formatter = Logger::Formatter.new | |
@logger.formatter.datetime_format = "%Y-%m-%d" | |
@logger.debug "debug" | |
assert_equal "%Y-%m-%d", @logger.formatter.datetime_format | |
assert_match(/D, \[\d\d\d\d-\d\d-\d\d[ ]?#\d+\] DEBUG -- : debug/, @out.string) | |
end | |
def test_nonstring_formatting | |
an_object = [1, 2, 3, 4, 5] | |
@logger.debug an_object | |
assert_equal("#{an_object.inspect}\n", @out.string) | |
end | |
end |
# frozen_string_literal: true | |
module ActiveSupport | |
class CodeGenerator # :nodoc: | |
class MethodSet | |
METHOD_CACHES = Hash.new { |h, k| h[k] = Module.new } | |
def initialize(namespace) | |
@cache = METHOD_CACHES[namespace] | |
@sources = [] | |
@methods = {} | |
end | |
def define_cached_method(name, as: name) | |
name = name.to_sym | |
as = as.to_sym | |
@methods.fetch(name) do | |
unless @cache.method_defined?(as) | |
yield @sources | |
end | |
@methods[name] = as | |
end | |
end | |
def apply(owner, path, line) | |
unless @sources.empty? | |
@cache.module_eval("# frozen_string_literal: true\n" + @sources.join(";"), path, line) | |
end | |
@methods.each do |name, as| | |
owner.define_method(name, @cache.instance_method(as)) | |
end | |
end | |
end | |
class << self | |
def batch(owner, path, line) | |
if owner.is_a?(CodeGenerator) | |
yield owner | |
else | |
instance = new(owner, path, line) | |
result = yield instance | |
instance.execute | |
result | |
end | |
end | |
end | |
def initialize(owner, path, line) | |
@owner = owner | |
@path = path | |
@line = line | |
@namespaces = Hash.new { |h, k| h[k] = MethodSet.new(k) } | |
end | |
def define_cached_method(name, namespace:, as: name, &block) | |
@namespaces[namespace].define_cached_method(name, as: as, &block) | |
end | |
def execute | |
@namespaces.each_value do |method_set| | |
method_set.apply(@owner, @path, @line - 1) | |
end | |
end | |
end | |
end |
# frozen_string_literal: true | |
require_relative "../abstract_unit" | |
require "active_support/cache" | |
require "active_support/core_ext/numeric/time" | |
class CacheCoderTest < ActiveSupport::TestCase | |
def test_new_coder_can_read_legacy_payloads | |
entry = ActiveSupport::Cache::Entry.new("foobar", expires_in: 1.hour, version: "v42") | |
deserialized_entry = ActiveSupport::Cache::Coders::Rails70Coder.load( | |
ActiveSupport::Cache::Coders::Rails61Coder.dump(entry), | |
) | |
assert_equal entry.value, deserialized_entry.value | |
assert_equal entry.version, deserialized_entry.version | |
assert_equal entry.expires_at, deserialized_entry.expires_at | |
end | |
def test_legacy_coder_can_read_new_payloads | |
entry = ActiveSupport::Cache::Entry.new("foobar", expires_in: 1.hour, version: "v42") | |
deserialized_entry = ActiveSupport::Cache::Coders::Rails61Coder.load( | |
ActiveSupport::Cache::Coders::Rails70Coder.dump(entry), | |
) | |
assert_equal entry.value, deserialized_entry.value | |
assert_equal entry.version, deserialized_entry.version | |
assert_equal entry.expires_at, deserialized_entry.expires_at | |
end | |
end |
# frozen_string_literal: true | |
module ActiveSupport | |
module CompareWithRange | |
# Extends the default Range#=== to support range comparisons. | |
# (1..5) === (1..5) # => true | |
# (1..5) === (2..3) # => true | |
# (1..5) === (1...6) # => true | |
# (1..5) === (2..6) # => false | |
# | |
# The native Range#=== behavior is untouched. | |
# ('a'..'f') === ('c') # => true | |
# (5..9) === (11) # => false | |
# | |
# The given range must be fully bounded, with both start and end. | |
def ===(value) | |
if value.is_a?(::Range) | |
is_backwards_op = value.exclude_end? ? :>= : :> | |
return false if value.begin && value.end && value.begin.public_send(is_backwards_op, value.end) | |
# 1...10 includes 1..9 but it does not include 1..10. | |
# 1..10 includes 1...11 but it does not include 1...12. | |
operator = exclude_end? && !value.exclude_end? ? :< : :<= | |
value_max = !exclude_end? && value.exclude_end? ? value.max : value.last | |
super(value.first) && (self.end.nil? || value_max.public_send(operator, last)) | |
else | |
super | |
end | |
end | |
# Extends the default Range#include? to support range comparisons. | |
# (1..5).include?(1..5) # => true | |
# (1..5).include?(2..3) # => true | |
# (1..5).include?(1...6) # => true | |
# (1..5).include?(2..6) # => false | |
# | |
# The native Range#include? behavior is untouched. | |
# ('a'..'f').include?('c') # => true | |
# (5..9).include?(11) # => false | |
# | |
# The given range must be fully bounded, with both start and end. | |
def include?(value) | |
if value.is_a?(::Range) | |
is_backwards_op = value.exclude_end? ? :>= : :> | |
return false if value.begin && value.end && value.begin.public_send(is_backwards_op, value.end) | |
# 1...10 includes 1..9 but it does not include 1..10. | |
# 1..10 includes 1...11 but it does not include 1...12. | |
operator = exclude_end? && !value.exclude_end? ? :< : :<= | |
value_max = !exclude_end? && value.exclude_end? ? value.max : value.last | |
super(value.first) && (self.end.nil? || value_max.public_send(operator, last)) | |
else | |
super | |
end | |
end | |
end | |
end | |
Range.prepend(ActiveSupport::CompareWithRange) |
# frozen_string_literal: true | |
require "active_support/core_ext/date_and_time/compatibility" | |
require "active_support/core_ext/module/redefine_method" | |
class Time | |
include DateAndTime::Compatibility | |
silence_redefinition_of_method :to_time | |
# Either return +self+ or the time in the local system timezone depending | |
# on the setting of +ActiveSupport.to_time_preserves_timezone+. | |
def to_time | |
preserve_timezone ? self : getlocal | |
end | |
end |
# frozen_string_literal: true | |
require "active_support/core_ext/module/concerning" | |
module Kernel | |
module_function | |
# A shortcut to define a toplevel concern, not within a module. | |
# | |
# See Module::Concerning for more. | |
def concern(topic, &module_definition) | |
Object.concern topic, &module_definition | |
end | |
end |
# frozen_string_literal: true | |
require_relative "../../abstract_unit" | |
require "active_support/core_ext/kernel/concern" | |
class KernelConcernTest < ActiveSupport::TestCase | |
def test_may_be_defined_at_toplevel | |
mod = ::TOPLEVEL_BINDING.eval "concern(:ToplevelConcern) { }" | |
assert_equal mod, ::ToplevelConcern | |
assert_kind_of ActiveSupport::Concern, ::ToplevelConcern | |
assert_not Object.ancestors.include?(::ToplevelConcern), mod.ancestors.inspect | |
ensure | |
Object.send :remove_const, :ToplevelConcern | |
end | |
end |
# frozen_string_literal: true | |
require "active_support/concern" | |
class Module | |
# = Bite-sized separation of concerns | |
# | |
# We often find ourselves with a medium-sized chunk of behavior that we'd | |
# like to extract, but only mix in to a single class. | |
# | |
# Extracting a plain old Ruby object to encapsulate it and collaborate or | |
# delegate to the original object is often a good choice, but when there's | |
# no additional state to encapsulate or we're making DSL-style declarations | |
# about the parent class, introducing new collaborators can obfuscate rather | |
# than simplify. | |
# | |
# The typical route is to just dump everything in a monolithic class, perhaps | |
# with a comment, as a least-bad alternative. Using modules in separate files | |
# means tedious sifting to get a big-picture view. | |
# | |
# = Dissatisfying ways to separate small concerns | |
# | |
# == Using comments: | |
# | |
# class Todo < ApplicationRecord | |
# # Other todo implementation | |
# # ... | |
# | |
# ## Event tracking | |
# has_many :events | |
# | |
# before_create :track_creation | |
# | |
# private | |
# def track_creation | |
# # ... | |
# end | |
# end | |
# | |
# == With an inline module: | |
# | |
# Noisy syntax. | |
# | |
# class Todo < ApplicationRecord | |
# # Other todo implementation | |
# # ... | |
# | |
# module EventTracking | |
# extend ActiveSupport::Concern | |
# | |
# included do | |
# has_many :events | |
# before_create :track_creation | |
# end | |
# | |
# private | |
# def track_creation | |
# # ... | |
# end | |
# end | |
# include EventTracking | |
# end | |
# | |
# == Mix-in noise exiled to its own file: | |
# | |
# Once our chunk of behavior starts pushing the scroll-to-understand-it | |
# boundary, we give in and move it to a separate file. At this size, the | |
# increased overhead can be a reasonable tradeoff even if it reduces our | |
# at-a-glance perception of how things work. | |
# | |
# class Todo < ApplicationRecord | |
# # Other todo implementation | |
# # ... | |
# | |
# include TodoEventTracking | |
# end | |
# | |
# = Introducing Module#concerning | |
# | |
# By quieting the mix-in noise, we arrive at a natural, low-ceremony way to | |
# separate bite-sized concerns. | |
# | |
# class Todo < ApplicationRecord | |
# # Other todo implementation | |
# # ... | |
# | |
# concerning :EventTracking do | |
# included do | |
# has_many :events | |
# before_create :track_creation | |
# end | |
# | |
# private | |
# def track_creation | |
# # ... | |
# end | |
# end | |
# end | |
# | |
# Todo.ancestors | |
# # => [Todo, Todo::EventTracking, ApplicationRecord, Object] | |
# | |
# This small step has some wonderful ripple effects. We can | |
# * grok the behavior of our class in one glance, | |
# * clean up monolithic junk-drawer classes by separating their concerns, and | |
# * stop leaning on protected/private for crude "this is internal stuff" modularity. | |
# | |
# === Prepending concerning | |
# | |
# <tt>concerning</tt> supports a <tt>prepend: true</tt> argument which will <tt>prepend</tt> the | |
# concern instead of using <tt>include</tt> for it. | |
module Concerning | |
# Define a new concern and mix it in. | |
def concerning(topic, prepend: false, &block) | |
method = prepend ? :prepend : :include | |
__send__(method, concern(topic, &block)) | |
end | |
# A low-cruft shortcut to define a concern. | |
# | |
# concern :EventTracking do | |
# ... | |
# end | |
# | |
# is equivalent to | |
# | |
# module EventTracking | |
# extend ActiveSupport::Concern | |
# | |
# ... | |
# end | |
def concern(topic, &module_definition) | |
const_set topic, Module.new { | |
extend ::ActiveSupport::Concern | |
module_eval(&module_definition) | |
} | |
end | |
end | |
include Concerning | |
end |
# frozen_string_literal: true | |
require_relative "../../abstract_unit" | |
require "active_support/core_ext/module/concerning" | |
class ModuleConcerningTest < ActiveSupport::TestCase | |
def test_concerning_declares_a_concern_and_includes_it_immediately | |
klass = Class.new { concerning(:Foo) { } } | |
assert_includes klass.ancestors, klass::Foo, klass.ancestors.inspect | |
klass = Class.new { concerning(:Foo, prepend: true) { } } | |
assert_includes klass.ancestors, klass::Foo, klass.ancestors.inspect | |
end | |
def test_concerning_can_prepend_concern | |
klass = Class.new do | |
def hi; "self"; end | |
concerning(:Foo, prepend: true) do | |
def hi; "hello, #{super}"; end | |
end | |
end | |
assert_equal "hello, self", klass.new.hi | |
end | |
end | |
class ModuleConcernTest < ActiveSupport::TestCase | |
def test_concern_creates_a_module_extended_with_active_support_concern | |
klass = Class.new do | |
concern :Baz do | |
included { @foo = 1 } | |
prepended { @foo = 2 } | |
def should_be_public; end | |
end | |
end | |
# Declares a concern but doesn't include it | |
assert klass.const_defined?(:Baz, false) | |
assert_not ModuleConcernTest.const_defined?(:Baz) | |
assert_kind_of ActiveSupport::Concern, klass::Baz | |
assert_not_includes klass.ancestors, klass::Baz, klass.ancestors.inspect | |
# Public method visibility by default | |
assert_includes klass::Baz.public_instance_methods.map(&:to_s), "should_be_public" | |
# Calls included hook | |
assert_equal 1, Class.new { include klass::Baz }.instance_variable_get("@foo") | |
# Calls prepended hook | |
assert_equal 2, Class.new { prepend klass::Baz }.instance_variable_get("@foo") | |
end | |
class Foo | |
concerning :Bar do | |
module ClassMethods | |
def will_be_orphaned; end | |
end | |
const_set :ClassMethods, Module.new { | |
def hacked_on; end | |
} | |
# Doesn't overwrite existing ClassMethods module. | |
class_methods do | |
def nicer_dsl; end | |
end | |
# Doesn't overwrite previous class_methods definitions. | |
class_methods do | |
def doesnt_clobber; end | |
end | |
end | |
concerning :Baz, prepend: true do | |
module ClassMethods | |
def will_be_orphaned_also; end | |
end | |
const_set :ClassMethods, Module.new { | |
def hacked_on_also; end | |
} | |
# Doesn't overwrite existing ClassMethods module. | |
class_methods do | |
def nicer_dsl_also; end | |
end | |
# Doesn't overwrite previous class_methods definitions. | |
class_methods do | |
def doesnt_clobber_also; end | |
end | |
end | |
end | |
def test_using_class_methods_blocks_instead_of_ClassMethods_module | |
assert_not_respond_to Foo, :will_be_orphaned | |
assert_respond_to Foo, :hacked_on | |
assert_respond_to Foo, :nicer_dsl | |
assert_respond_to Foo, :doesnt_clobber | |
# Orphan in Foo::ClassMethods, not Bar::ClassMethods. | |
assert Foo.const_defined?(:ClassMethods) | |
assert Foo::ClassMethods.method_defined?(:will_be_orphaned) | |
end | |
def test_using_class_methods_blocks_instead_of_ClassMethods_module_prepend | |
assert_not_respond_to Foo, :will_be_orphaned_also | |
assert_respond_to Foo, :hacked_on_also | |
assert_respond_to Foo, :nicer_dsl_also | |
assert_respond_to Foo, :doesnt_clobber_also | |
# Orphan in Foo::ClassMethods, not Bar::ClassMethods. | |
assert Foo.const_defined?(:ClassMethods) | |
assert Foo::ClassMethods.method_defined?(:will_be_orphaned_also) | |
end | |
end |
# frozen_string_literal: true | |
require "active_support/concern" | |
require "active_support/ordered_options" | |
module ActiveSupport | |
# Configurable provides a <tt>config</tt> method to store and retrieve | |
# configuration options as an OrderedOptions. | |
module Configurable | |
extend ActiveSupport::Concern | |
class Configuration < ActiveSupport::InheritableOptions | |
def compile_methods! | |
self.class.compile_methods!(keys) | |
end | |
# Compiles reader methods so we don't have to go through method_missing. | |
def self.compile_methods!(keys) | |
keys.reject { |m| method_defined?(m) }.each do |key| | |
class_eval <<-RUBY, __FILE__, __LINE__ + 1 | |
def #{key}; _get(#{key.inspect}); end | |
RUBY | |
end | |
end | |
end | |
module ClassMethods | |
def config | |
@_config ||= if respond_to?(:superclass) && superclass.respond_to?(:config) | |
superclass.config.inheritable_copy | |
else | |
# create a new "anonymous" class that will host the compiled reader methods | |
Class.new(Configuration).new | |
end | |
end | |
def configure | |
yield config | |
end | |
# Allows you to add shortcut so that you don't have to refer to attribute | |
# through config. Also look at the example for config to contrast. | |
# | |
# Defines both class and instance config accessors. | |
# | |
# class User | |
# include ActiveSupport::Configurable | |
# config_accessor :allowed_access | |
# end | |
# | |
# User.allowed_access # => nil | |
# User.allowed_access = false | |
# User.allowed_access # => false | |
# | |
# user = User.new | |
# user.allowed_access # => false | |
# user.allowed_access = true | |
# user.allowed_access # => true | |
# | |
# User.allowed_access # => false | |
# | |
# The attribute name must be a valid method name in Ruby. | |
# | |
# class User | |
# include ActiveSupport::Configurable | |
# config_accessor :"1_Badname" | |
# end | |
# # => NameError: invalid config attribute name | |
# | |
# To omit the instance writer method, pass <tt>instance_writer: false</tt>. | |
# To omit the instance reader method, pass <tt>instance_reader: false</tt>. | |
# | |
# class User | |
# include ActiveSupport::Configurable | |
# config_accessor :allowed_access, instance_reader: false, instance_writer: false | |
# end | |
# | |
# User.allowed_access = false | |
# User.allowed_access # => false | |
# | |
# User.new.allowed_access = true # => NoMethodError | |
# User.new.allowed_access # => NoMethodError | |
# | |
# Or pass <tt>instance_accessor: false</tt>, to omit both instance methods. | |
# | |
# class User | |
# include ActiveSupport::Configurable | |
# config_accessor :allowed_access, instance_accessor: false | |
# end | |
# | |
# User.allowed_access = false | |
# User.allowed_access # => false | |
# | |
# User.new.allowed_access = true # => NoMethodError | |
# User.new.allowed_access # => NoMethodError | |
# | |
# Also you can pass <tt>default</tt> or a block to set up the attribute with a default value. | |
# | |
# class User | |
# include ActiveSupport::Configurable | |
# config_accessor :allowed_access, default: false | |
# config_accessor :hair_colors do | |
# [:brown, :black, :blonde, :red] | |
# end | |
# end | |
# | |
# User.allowed_access # => false | |
# User.hair_colors # => [:brown, :black, :blonde, :red] | |
def config_accessor(*names, instance_reader: true, instance_writer: true, instance_accessor: true, default: nil) # :doc: | |
names.each do |name| | |
raise NameError.new("invalid config attribute name") unless /\A[_A-Za-z]\w*\z/.match?(name) | |
reader, reader_line = "def #{name}; config.#{name}; end", __LINE__ | |
writer, writer_line = "def #{name}=(value); config.#{name} = value; end", __LINE__ | |
singleton_class.class_eval reader, __FILE__, reader_line | |
singleton_class.class_eval writer, __FILE__, writer_line | |
if instance_accessor | |
class_eval reader, __FILE__, reader_line if instance_reader | |
class_eval writer, __FILE__, writer_line if instance_writer | |
end | |
send("#{name}=", block_given? ? yield : default) | |
end | |
end | |
private :config_accessor | |
end | |
# Reads and writes attributes from a configuration OrderedOptions. | |
# | |
# require "active_support/configurable" | |
# | |
# class User | |
# include ActiveSupport::Configurable | |
# end | |
# | |
# user = User.new | |
# | |
# user.config.allowed_access = true | |
# user.config.level = 1 | |
# | |
# user.config.allowed_access # => true | |
# user.config.level # => 1 | |
def config | |
@_config ||= self.class.config.inheritable_copy | |
end | |
end | |
end |
# frozen_string_literal: true | |
require_relative "abstract_unit" | |
require "active_support/configurable" | |
class ConfigurableActiveSupport < ActiveSupport::TestCase | |
class Parent | |
include ActiveSupport::Configurable | |
config_accessor :foo | |
config_accessor :bar, instance_reader: false, instance_writer: false | |
config_accessor :baz, instance_accessor: false | |
end | |
class Child < Parent | |
end | |
setup do | |
Parent.config.clear | |
Parent.config.foo = :bar | |
Child.config.clear | |
end | |
test "adds a configuration hash" do | |
assert_equal({ foo: :bar }, Parent.config) | |
end | |
test "adds a configuration hash to a module as well" do | |
mixin = Module.new { include ActiveSupport::Configurable } | |
mixin.config.foo = :bar | |
assert_equal({ foo: :bar }, mixin.config) | |
end | |
test "configuration hash is inheritable" do | |
assert_equal :bar, Child.config.foo | |
assert_equal :bar, Parent.config.foo | |
Child.config.foo = :baz | |
assert_equal :baz, Child.config.foo | |
assert_equal :bar, Parent.config.foo | |
end | |
test "configuration accessors are not available on instance" do | |
instance = Parent.new | |
assert_not_respond_to instance, :bar | |
assert_not_respond_to instance, :bar= | |
assert_not_respond_to instance, :baz | |
assert_not_respond_to instance, :baz= | |
end | |
test "configuration accessors can take a default value as a block" do | |
parent = Class.new do | |
include ActiveSupport::Configurable | |
config_accessor :hair_colors, :tshirt_colors do | |
[:black, :blue, :white] | |
end | |
end | |
assert_equal [:black, :blue, :white], parent.hair_colors | |
assert_equal [:black, :blue, :white], parent.tshirt_colors | |
end | |
test "configuration accessors can take a default value as an option" do | |
parent = Class.new do | |
include ActiveSupport::Configurable | |
config_accessor :foo, default: :bar | |
end | |
assert_equal :bar, parent.foo | |
end | |
test "configuration hash is available on instance" do | |
instance = Parent.new | |
assert_equal :bar, instance.config.foo | |
assert_equal :bar, Parent.config.foo | |
instance.config.foo = :baz | |
assert_equal :baz, instance.config.foo | |
assert_equal :bar, Parent.config.foo | |
end | |
test "configuration is crystalizeable" do | |
parent = Class.new { include ActiveSupport::Configurable } | |
child = Class.new(parent) | |
parent.config.bar = :foo | |
assert_method_not_defined parent.config, :bar | |
assert_method_not_defined child.config, :bar | |
assert_method_not_defined child.new.config, :bar | |
parent.config.compile_methods! | |
assert_equal :foo, parent.config.bar | |
assert_equal :foo, child.new.config.bar | |
assert_method_defined parent.config, :bar | |
assert_method_defined child.config, :bar | |
assert_method_defined child.new.config, :bar | |
end | |
test "should raise name error if attribute name is invalid" do | |
assert_raises NameError do | |
Class.new do | |
include ActiveSupport::Configurable | |
config_accessor "invalid attribute name" | |
end | |
end | |
assert_raises NameError do | |
Class.new do | |
include ActiveSupport::Configurable | |
config_accessor "invalid\nattribute" | |
end | |
end | |
assert_raises NameError do | |
Class.new do | |
include ActiveSupport::Configurable | |
config_accessor "invalid\n" | |
end | |
end | |
end | |
test "the config_accessor method should not be publicly callable" do | |
assert_raises NoMethodError do | |
Class.new { | |
include ActiveSupport::Configurable | |
}.config_accessor :foo | |
end | |
end | |
def assert_method_defined(object, method) | |
methods = object.public_methods.map(&:to_s) | |
assert_includes methods, method.to_s, "Expected #{methods.inspect} to include #{method.to_s.inspect}" | |
end | |
def assert_method_not_defined(object, method) | |
methods = object.public_methods.map(&:to_s) | |
assert_not_includes methods, method.to_s, "Expected #{methods.inspect} to not include #{method.to_s.inspect}" | |
end | |
end |
# frozen_string_literal: true | |
module ActiveSupport | |
# Reads a YAML configuration file, evaluating any ERB, then | |
# parsing the resulting YAML. | |
# | |
# Warns in case of YAML confusing characters, like invisible | |
# non-breaking spaces. | |
class ConfigurationFile # :nodoc: | |
class FormatError < StandardError; end | |
def initialize(content_path) | |
@content_path = content_path.to_s | |
@content = read content_path | |
end | |
def self.parse(content_path, **options) | |
new(content_path).parse(**options) | |
end | |
def parse(context: nil, **options) | |
source = render(context) | |
if YAML.respond_to?(:unsafe_load) | |
YAML.unsafe_load(source, **options) || {} | |
else | |
YAML.load(source, **options) || {} | |
end | |
rescue Psych::SyntaxError => error | |
raise "YAML syntax error occurred while parsing #{@content_path}. " \ | |
"Please note that YAML must be consistently indented using spaces. Tabs are not allowed. " \ | |
"Error: #{error.message}" | |
end | |
private | |
def read(content_path) | |
require "yaml" | |
require "erb" | |
File.read(content_path).tap do |content| | |
if content.include?("\u00A0") | |
warn "#{content_path} contains invisible non-breaking spaces, you may want to remove those" | |
end | |
end | |
end | |
def render(context) | |
erb = ERB.new(@content).tap { |e| e.filename = @content_path } | |
context ? erb.result(context) : erb.result | |
end | |
end | |
end |
# frozen_string_literal: true | |
require_relative "abstract_unit" | |
class ConfigurationFileTest < ActiveSupport::TestCase | |
test "backtrace contains yaml path" do | |
Tempfile.create do |file| | |
file.write("wrong: <%= foo %>") | |
file.rewind | |
error = assert_raises do | |
ActiveSupport::ConfigurationFile.parse(file.path) | |
end | |
assert_match file.path, error.backtrace.first | |
end | |
end | |
test "backtrace contains yaml path (when Pathname given)" do | |
Tempfile.create do |file| | |
file.write("wrong: <%= foo %>") | |
file.rewind | |
error = assert_raises do | |
ActiveSupport::ConfigurationFile.parse(Pathname(file.path)) | |
end | |
assert_match file.path, error.backtrace.first | |
end | |
end | |
end |
# frozen_string_literal: true | |
Conflict = 1 |
# frozen_string_literal: true | |
module ConnectionPoolBehavior | |
def test_connection_pool | |
Thread.report_on_exception, original_report_on_exception = false, Thread.report_on_exception | |
threads = [] | |
emulating_latency do | |
cache = ActiveSupport::Cache.lookup_store(*store, { pool_size: 2, pool_timeout: 1 }.merge(store_options)) | |
cache.read("foo") | |
assert_raises Timeout::Error do | |
# One of the three threads will fail in 1 second because our pool size | |
# is only two. | |
3.times do | |
threads << Thread.new do | |
cache.read("latency") | |
end | |
end | |
threads.each(&:join) | |
end | |
ensure | |
threads.each(&:kill) | |
end | |
ensure | |
Thread.report_on_exception = original_report_on_exception | |
end | |
def test_no_connection_pool | |
threads = [] | |
emulating_latency do | |
cache = ActiveSupport::Cache.lookup_store(*store, store_options) | |
assert_nothing_raised do | |
# Default connection pool size is 5, assuming 10 will make sure that | |
# the connection pool isn't used at all. | |
10.times do | |
threads << Thread.new do | |
cache.read("latency") | |
end | |
end | |
threads.each(&:join) | |
end | |
ensure | |
threads.each(&:kill) | |
end | |
end | |
private | |
def store_options; {}; end | |
end |
# frozen_string_literal: true | |
module ActiveSupport | |
class Deprecation | |
# DeprecatedConstantAccessor transforms a constant into a deprecated one by | |
# hooking +const_missing+. | |
# | |
# It takes the names of an old (deprecated) constant and of a new constant | |
# (both in string form) and optionally a deprecator. The deprecator defaults | |
# to +ActiveSupport::Deprecator+ if none is specified. | |
# | |
# The deprecated constant now returns the same object as the new one rather | |
# than a proxy object, so it can be used transparently in +rescue+ blocks | |
# etc. | |
# | |
# PLANETS = %w(mercury venus earth mars jupiter saturn uranus neptune pluto) | |
# | |
# # (In a later update, the original implementation of `PLANETS` has been removed.) | |
# | |
# PLANETS_POST_2006 = %w(mercury venus earth mars jupiter saturn uranus neptune) | |
# include ActiveSupport::Deprecation::DeprecatedConstantAccessor | |
# deprecate_constant 'PLANETS', 'PLANETS_POST_2006' | |
# | |
# PLANETS.map { |planet| planet.capitalize } | |
# # => DEPRECATION WARNING: PLANETS is deprecated! Use PLANETS_POST_2006 instead. | |
# (Backtrace information…) | |
# ["Mercury", "Venus", "Earth", "Mars", "Jupiter", "Saturn", "Uranus", "Neptune"] | |
module DeprecatedConstantAccessor | |
def self.included(base) | |
require "active_support/inflector/methods" | |
extension = Module.new do | |
def const_missing(missing_const_name) | |
if class_variable_defined?(:@@_deprecated_constants) | |
if (replacement = class_variable_get(:@@_deprecated_constants)[missing_const_name.to_s]) | |
replacement[:deprecator].warn(replacement[:message] || "#{name}::#{missing_const_name} is deprecated! Use #{replacement[:new]} instead.", caller_locations) | |
return ActiveSupport::Inflector.constantize(replacement[:new].to_s) | |
end | |
end | |
super | |
end | |
def deprecate_constant(const_name, new_constant, message: nil, deprecator: ActiveSupport::Deprecation.instance) | |
class_variable_set(:@@_deprecated_constants, {}) unless class_variable_defined?(:@@_deprecated_constants) | |
class_variable_get(:@@_deprecated_constants)[const_name.to_s] = { new: new_constant, message: message, deprecator: deprecator } | |
end | |
end | |
base.singleton_class.prepend extension | |
end | |
end | |
end | |
end |
# frozen_string_literal: true | |
require "active_support/concern" | |
require "active_support/inflector" | |
module ActiveSupport | |
module Testing | |
# Resolves a constant from a minitest spec name. | |
# | |
# Given the following spec-style test: | |
# | |
# describe WidgetsController, :index do | |
# describe "authenticated user" do | |
# describe "returns widgets" do | |
# it "has a controller that exists" do | |
# assert_kind_of WidgetsController, @controller | |
# end | |
# end | |
# end | |
# end | |
# | |
# The test will have the following name: | |
# | |
# "WidgetsController::index::authenticated user::returns widgets" | |
# | |
# The constant WidgetsController can be resolved from the name. | |
# The following code will resolve the constant: | |
# | |
# controller = determine_constant_from_test_name(name) do |constant| | |
# Class === constant && constant < ::ActionController::Metal | |
# end | |
module ConstantLookup | |
extend ::ActiveSupport::Concern | |
module ClassMethods # :nodoc: | |
def determine_constant_from_test_name(test_name) | |
names = test_name.split "::" | |
while names.size > 0 do | |
names.last.sub!(/Test$/, "") | |
begin | |
constant = names.join("::").safe_constantize | |
break(constant) if yield(constant) | |
ensure | |
names.pop | |
end | |
end | |
end | |
end | |
end | |
end | |
end |
# frozen_string_literal: true | |
require_relative "../abstract_unit" | |
class Foo; end | |
class Bar < Foo | |
def index; end | |
def self.index; end | |
end | |
module FooBar; end | |
class ConstantLookupTest < ActiveSupport::TestCase | |
include ActiveSupport::Testing::ConstantLookup | |
def find_foo(name) | |
self.class.determine_constant_from_test_name(name) do |constant| | |
Class === constant && constant < Foo | |
end | |
end | |
def find_module(name) | |
self.class.determine_constant_from_test_name(name) do |constant| | |
Module === constant | |
end | |
end | |
def test_find_bar_from_foo | |
assert_equal Bar, find_foo("Bar") | |
assert_equal Bar, find_foo("Bar::index") | |
assert_equal Bar, find_foo("Bar::index::authenticated") | |
assert_equal Bar, find_foo("BarTest") | |
assert_equal Bar, find_foo("BarTest::index") | |
assert_equal Bar, find_foo("BarTest::index::authenticated") | |
end | |
def test_find_module | |
assert_equal FooBar, find_module("FooBar") | |
assert_equal FooBar, find_module("FooBar::index") | |
assert_equal FooBar, find_module("FooBar::index::authenticated") | |
assert_equal FooBar, find_module("FooBarTest") | |
assert_equal FooBar, find_module("FooBarTest::index") | |
assert_equal FooBar, find_module("FooBarTest::index::authenticated") | |
end | |
def test_returns_nil_when_cant_find_foo | |
assert_nil find_foo("DoesntExist") | |
assert_nil find_foo("DoesntExistTest") | |
assert_nil find_foo("DoesntExist::Nadda") | |
assert_nil find_foo("DoesntExist::Nadda::Nope") | |
assert_nil find_foo("DoesntExist::Nadda::Nope::NotHere") | |
end | |
def test_returns_nil_when_cant_find_module | |
assert_nil find_module("DoesntExist") | |
assert_nil find_module("DoesntExistTest") | |
assert_nil find_module("DoesntExist::Nadda") | |
assert_nil find_module("DoesntExist::Nadda::Nope") | |
assert_nil find_module("DoesntExist::Nadda::Nope::NotHere") | |
end | |
def test_does_not_shallow_ordinary_exceptions | |
test_name = "RaisesNameError" | |
file_name = File.expand_path("../autoloading_fixtures/raises_no_method_error.rb", __dir__) | |
assert_raises(NameError) do | |
Object.autoload(test_name, file_name) | |
self.class.determine_constant_from_test_name(test_name) | |
end | |
ensure | |
Object.send(:remove_const, test_name) | |
end | |
end |
# frozen_string_literal: true | |
module ActiveSupport | |
module Testing | |
module ConstantStubbing | |
# Changes the value of a constant for the duration of a block. Example: | |
# | |
# # World::List::Import::LARGE_IMPORT_THRESHOLD = 5000 | |
# stub_const(World::List::Import, :LARGE_IMPORT_THRESHOLD, 1) do | |
# assert_equal 1, World::List::Import::LARGE_IMPORT_THRESHOLD | |
# end | |
# | |
# assert_equal 5000, World::List::Import::LARGE_IMPORT_THRESHOLD = 5000 | |
# | |
# Using this method rather than forcing <tt>World::List::Import::LARGE_IMPORT_THRESHOLD = 5000</tt> prevents | |
# warnings from being thrown, and ensures that the old value is returned after the test has completed. | |
# | |
# Note: Stubbing a const will stub it across all threads. So if you have concurrent threads | |
# (like separate test suites running in parallel) that all depend on the same constant, it's possible | |
# divergent stubbing will trample on each other. | |
def stub_const(klass, constant, new_value) | |
old_value = klass.const_get(constant) | |
klass.send(:remove_const, constant) | |
klass.const_set(constant, new_value) | |
yield | |
ensure | |
klass.send(:remove_const, constant) | |
klass.const_set(constant, old_value) | |
end | |
end | |
end | |
end |
# frozen_string_literal: true | |
require_relative "constantize_test_helpers" | |
module Ace | |
module Base | |
class Case | |
class Dice | |
end | |
end | |
class Fase < Case | |
end | |
end | |
class Gas | |
include Base | |
end | |
end | |
class Object | |
module AddtlGlobalConstants | |
class Case | |
class Dice | |
end | |
end | |
end | |
include AddtlGlobalConstants | |
end | |
module ConstantizeTestCases | |
include ConstantizeTestHelpers | |
def run_constantize_tests_on | |
assert_equal Ace::Base::Case, yield("Ace::Base::Case") | |
assert_equal Ace::Base::Case, yield("::Ace::Base::Case") | |
assert_equal Ace::Base::Case::Dice, yield("Ace::Base::Case::Dice") | |
assert_equal Ace::Base::Case::Dice, yield("Ace::Base::Fase::Dice") | |
assert_equal Ace::Base::Fase::Dice, yield("Ace::Base::Fase::Dice") | |
assert_equal Ace::Gas::Case, yield("Ace::Gas::Case") | |
assert_equal Ace::Gas::Case::Dice, yield("Ace::Gas::Case::Dice") | |
assert_equal Ace::Base::Case::Dice, yield("Ace::Gas::Case::Dice") | |
assert_equal Case::Dice, yield("Case::Dice") | |
assert_equal AddtlGlobalConstants::Case::Dice, yield("Case::Dice") | |
assert_equal Object::AddtlGlobalConstants::Case::Dice, yield("Case::Dice") | |
assert_equal Case::Dice, yield("Object::Case::Dice") | |
assert_equal AddtlGlobalConstants::Case::Dice, yield("Object::Case::Dice") | |
assert_equal Object::AddtlGlobalConstants::Case::Dice, yield("Case::Dice") | |
assert_equal ConstantizeTestCases, yield("ConstantizeTestCases") | |
assert_equal ConstantizeTestCases, yield("::ConstantizeTestCases") | |
assert_raises(NameError) { yield("UnknownClass") } | |
assert_raises(NameError) { yield("UnknownClass::Ace") } | |
assert_raises(NameError) { yield("UnknownClass::Ace::Base") } | |
assert_raises(NameError) { yield("An invalid string") } | |
assert_raises(NameError) { yield("InvalidClass\n") } | |
assert_raises(NameError) { yield("Ace::ConstantizeTestCases") } | |
assert_raises(NameError) { yield("Ace::Base::ConstantizeTestCases") } | |
assert_raises(NameError) { yield("Ace::Gas::Base") } | |
assert_raises(NameError) { yield("Ace::Gas::ConstantizeTestCases") } | |
assert_raises(NameError) { yield("") } | |
assert_raises(NameError) { yield("::") } | |
assert_raises(NameError) { yield("Ace::gas") } | |
assert_raises(NameError) do | |
with_autoloading_fixtures do | |
yield("RaisesNameError") | |
end | |
end | |
assert_raises(NoMethodError) do | |
with_autoloading_fixtures do | |
yield("RaisesNoMethodError") | |
end | |
end | |
end | |
def run_safe_constantize_tests_on | |
assert_equal Ace::Base::Case, yield("Ace::Base::Case") | |
assert_equal Ace::Base::Case, yield("::Ace::Base::Case") | |
assert_equal Ace::Base::Case::Dice, yield("Ace::Base::Case::Dice") | |
assert_equal Ace::Base::Fase::Dice, yield("Ace::Base::Fase::Dice") | |
assert_equal Ace::Gas::Case, yield("Ace::Gas::Case") | |
assert_equal Ace::Gas::Case::Dice, yield("Ace::Gas::Case::Dice") | |
assert_equal Case::Dice, yield("Case::Dice") | |
assert_equal Case::Dice, yield("Object::Case::Dice") | |
assert_equal ConstantizeTestCases, yield("ConstantizeTestCases") | |
assert_equal ConstantizeTestCases, yield("::ConstantizeTestCases") | |
assert_nil yield("") | |
assert_nil yield("::") | |
assert_nil yield("UnknownClass") | |
assert_nil yield("UnknownClass::Ace") | |
assert_nil yield("UnknownClass::Ace::Base") | |
assert_nil yield("An invalid string") | |
assert_nil yield("InvalidClass\n") | |
assert_nil yield("blargle") | |
assert_nil yield("Ace::ConstantizeTestCases") | |
assert_nil yield("Ace::Base::ConstantizeTestCases") | |
assert_nil yield("Ace::Gas::Base") | |
assert_nil yield("Ace::Gas::ConstantizeTestCases") | |
assert_nil yield("#<Class:0x7b8b718b>::Nested_1") | |
assert_nil yield("Ace::gas") | |
assert_nil yield("Object::ABC") | |
assert_nil yield("Object::Object::Object::ABC") | |
assert_nil yield("A::Object::B") | |
assert_nil yield("A::Object::Object::Object::B") | |
assert_raises(LoadError) do | |
with_autoloading_fixtures do | |
yield("RaisesLoadError") | |
end | |
end | |
assert_raises(NameError) do | |
with_autoloading_fixtures do | |
yield("RaisesNameError") | |
end | |
end | |
assert_raises(NoMethodError) do | |
with_autoloading_fixtures do | |
yield("RaisesNoMethodError") | |
end | |
end | |
end | |
end |
# frozen_string_literal: true | |
module ConstantizeTestHelpers | |
ROOT_DIR = File.realpath("#{__dir__}/autoloading_fixtures") | |
AUTOLOADS = { | |
"RaisesLoadError" => "#{ROOT_DIR}/raises_load_error", | |
"RaisesNameError" => "#{ROOT_DIR}/raises_name_error", | |
"RaisesNoMethodError" => "#{ROOT_DIR}/raises_no_method_error" | |
} | |
def with_autoloading_fixtures | |
define_autoloads | |
yield | |
ensure | |
remove_autoloads | |
end | |
def define_autoloads | |
AUTOLOADS.each do |constant, realpath| | |
Object.autoload(constant, realpath) | |
end | |
end | |
def remove_autoloads | |
AUTOLOADS.each do |constant, realpath| | |
Object.send(:remove_const, constant) if Object.const_defined?(constant) | |
$LOADED_FEATURES.delete(realpath) | |
end | |
end | |
end |
# frozen_string_literal: true | |
require "time" | |
require "active_support/inflector/methods" | |
require "active_support/values/time_zone" | |
class Time | |
DATE_FORMATS = { | |
db: "%Y-%m-%d %H:%M:%S", | |
inspect: "%Y-%m-%d %H:%M:%S.%9N %z", | |
number: "%Y%m%d%H%M%S", | |
nsec: "%Y%m%d%H%M%S%9N", | |
usec: "%Y%m%d%H%M%S%6N", | |
time: "%H:%M", | |
short: "%d %b %H:%M", | |
long: "%B %d, %Y %H:%M", | |
long_ordinal: lambda { |time| | |
day_format = ActiveSupport::Inflector.ordinalize(time.day) | |
time.strftime("%B #{day_format}, %Y %H:%M") | |
}, | |
rfc822: lambda { |time| | |
offset_format = time.formatted_offset(false) | |
time.strftime("%a, %d %b %Y %H:%M:%S #{offset_format}") | |
}, | |
iso8601: lambda { |time| time.iso8601 } | |
} | |
# Converts to a formatted string. See DATE_FORMATS for built-in formats. | |
# | |
# This method is aliased to <tt>to_formatted_s</tt>. | |
# | |
# time = Time.now # => 2007-01-18 06:10:17 -06:00 | |
# | |
# time.to_fs(:time) # => "06:10" | |
# time.to_formatted_s(:time) # => "06:10" | |
# | |
# time.to_fs(:db) # => "2007-01-18 06:10:17" | |
# time.to_fs(:number) # => "20070118061017" | |
# time.to_fs(:short) # => "18 Jan 06:10" | |
# time.to_fs(:long) # => "January 18, 2007 06:10" | |
# time.to_fs(:long_ordinal) # => "January 18th, 2007 06:10" | |
# time.to_fs(:rfc822) # => "Thu, 18 Jan 2007 06:10:17 -0600" | |
# time.to_fs(:iso8601) # => "2007-01-18T06:10:17-06:00" | |
# | |
# == Adding your own time formats to +to_fs+ | |
# You can add your own formats to the Time::DATE_FORMATS hash. | |
# Use the format name as the hash key and either a strftime string | |
# or Proc instance that takes a time argument as the value. | |
# | |
# # config/initializers/time_formats.rb | |
# Time::DATE_FORMATS[:month_and_year] = '%B %Y' | |
# Time::DATE_FORMATS[:short_ordinal] = ->(time) { time.strftime("%B #{time.day.ordinalize}") } | |
def to_fs(format = :default) | |
if formatter = DATE_FORMATS[format] | |
formatter.respond_to?(:call) ? formatter.call(self).to_s : strftime(formatter) | |
else | |
# Change to `to_s` when deprecation is gone. Also deprecate `to_default_s`. | |
to_default_s | |
end | |
end | |
alias_method :to_formatted_s, :to_fs | |
alias_method :to_default_s, :to_s | |
# Returns a formatted string of the offset from UTC, or an alternative | |
# string if the time zone is already UTC. | |
# | |
# Time.local(2000).formatted_offset # => "-06:00" | |
# Time.local(2000).formatted_offset(false) # => "-0600" | |
def formatted_offset(colon = true, alternate_utc_string = nil) | |
utc? && alternate_utc_string || ActiveSupport::TimeZone.seconds_to_utc_offset(utc_offset, colon) | |
end | |
# Aliased to +xmlschema+ for compatibility with +DateTime+ | |
alias_method :rfc3339, :xmlschema | |
end |
# frozen_string_literal: true | |
require_relative "../../abstract_unit" | |
require "active_support/core_ext/array" | |
require "active_support/core_ext/big_decimal" | |
require "active_support/core_ext/hash" | |
require "active_support/core_ext/string" | |
class ToSentenceTest < ActiveSupport::TestCase | |
def test_plain_array_to_sentence | |
assert_equal "", [].to_sentence | |
assert_equal "one", ["one"].to_sentence | |
assert_equal "one and two", ["one", "two"].to_sentence | |
assert_equal "one, two, and three", ["one", "two", "three"].to_sentence | |
end | |
def test_to_sentence_with_words_connector | |
assert_equal "one two, and three", ["one", "two", "three"].to_sentence(words_connector: " ") | |
assert_equal "one & two, and three", ["one", "two", "three"].to_sentence(words_connector: " & ") | |
assert_equal "onetwo, and three", ["one", "two", "three"].to_sentence(words_connector: nil) | |
end | |
def test_to_sentence_with_last_word_connector | |
assert_equal "one, two, and also three", ["one", "two", "three"].to_sentence(last_word_connector: ", and also ") | |
assert_equal "one, twothree", ["one", "two", "three"].to_sentence(last_word_connector: nil) | |
assert_equal "one, two three", ["one", "two", "three"].to_sentence(last_word_connector: " ") | |
assert_equal "one, two and three", ["one", "two", "three"].to_sentence(last_word_connector: " and ") | |
end | |
def test_two_elements | |
assert_equal "one and two", ["one", "two"].to_sentence | |
assert_equal "one two", ["one", "two"].to_sentence(two_words_connector: " ") | |
end | |
def test_one_element | |
assert_equal "one", ["one"].to_sentence | |
end | |
def test_one_element_not_same_object | |
elements = ["one"] | |
assert_not_equal elements[0].object_id, elements.to_sentence.object_id | |
end | |
def test_one_non_string_element | |
assert_equal "1", [1].to_sentence | |
end | |
def test_does_not_modify_given_hash | |
options = { words_connector: " " } | |
assert_equal "one two, and three", ["one", "two", "three"].to_sentence(options) | |
assert_equal({ words_connector: " " }, options) | |
end | |
def test_with_blank_elements | |
assert_equal ", one, , two, and three", [nil, "one", "", "two", "three"].to_sentence | |
end | |
def test_with_invalid_options | |
exception = assert_raise ArgumentError do | |
["one", "two"].to_sentence(passing: "invalid option") | |
end | |
assert_equal "Unknown key: :passing. Valid keys are: :words_connector, :two_words_connector, :last_word_connector, :locale", exception.message | |
end | |
def test_always_returns_string | |
assert_instance_of String, [ActiveSupport::SafeBuffer.new("one")].to_sentence | |
assert_instance_of String, [ActiveSupport::SafeBuffer.new("one"), "two"].to_sentence | |
assert_instance_of String, [ActiveSupport::SafeBuffer.new("one"), "two", "three"].to_sentence | |
end | |
def test_returns_no_frozen_string | |
assert_not [].to_sentence.frozen? | |
assert_not ["one"].to_sentence.frozen? | |
assert_not ["one", "two"].to_sentence.frozen? | |
assert_not ["one", "two", "three"].to_sentence.frozen? | |
end | |
end | |
class ToSTest < ActiveSupport::TestCase | |
class TestDB | |
def self.reset | |
@@counter = 0 | |
end | |
reset | |
def id | |
@@counter += 1 | |
end | |
end | |
setup do | |
TestDB.reset | |
end | |
def test_to_s_db | |
collection = [TestDB.new, TestDB.new, TestDB.new] | |
assert_deprecated do | |
assert_equal "null", [].to_s(:db) | |
end | |
assert_deprecated do | |
assert_equal "1,2,3", collection.to_s(:db) | |
end | |
end | |
def test_to_s_not_existent | |
assert_deprecated do | |
assert_equal "[]", [].to_s(:not_existent) | |
end | |
end | |
def test_to_fs_db | |
collection = [TestDB.new, TestDB.new, TestDB.new] | |
assert_equal "null", [].to_fs(:db) | |
assert_equal "1,2,3", collection.to_fs(:db) | |
assert_equal "null", [].to_formatted_s(:db) | |
assert_equal "4,5,6", collection.to_formatted_s(:db) | |
end | |
end | |
class ToXmlTest < ActiveSupport::TestCase | |
def test_to_xml_with_hash_elements | |
xml = [ | |
{ name: "David", age: 26, age_in_millis: 820497600000 }, | |
{ name: "Jason", age: 31, age_in_millis: BigDecimal("1.0") } | |
].to_xml(skip_instruct: true, indent: 0) | |
assert_equal '<objects type="array"><object>', xml.first(30) | |
assert_includes xml, %(<age type="integer">26</age>), xml | |
assert_includes xml, %(<age-in-millis type="integer">820497600000</age-in-millis>), xml | |
assert_includes xml, %(<name>David</name>), xml | |
assert_includes xml, %(<age type="integer">31</age>), xml | |
assert_includes xml, %(<age-in-millis type="decimal">1.0</age-in-millis>), xml | |
assert_includes xml, %(<name>Jason</name>), xml | |
end | |
def test_to_xml_with_non_hash_elements | |
xml = %w[1 2 3].to_xml(skip_instruct: true, indent: 0) | |
assert_equal '<strings type="array"><string', xml.first(29) | |
assert_includes xml, %(<string>2</string>), xml | |
end | |
def test_to_xml_with_non_hash_different_type_elements | |
xml = [1, 2.0, "3"].to_xml(skip_instruct: true, indent: 0) | |
assert_equal '<objects type="array"><object', xml.first(29) | |
assert_includes xml, %(<object type="integer">1</object>), xml | |
assert_includes xml, %(<object type="float">2.0</object>), xml | |
assert_includes xml, %(object>3</object>), xml | |
end | |
def test_to_xml_with_dedicated_name | |
xml = [ | |
{ name: "David", age: 26, age_in_millis: 820497600000 }, { name: "Jason", age: 31 } | |
].to_xml(skip_instruct: true, indent: 0, root: "people") | |
assert_equal '<people type="array"><person>', xml.first(29) | |
end | |
def test_to_xml_with_options | |
xml = [ | |
{ name: "David", street_address: "Paulina" }, { name: "Jason", street_address: "Evergreen" } | |
].to_xml(skip_instruct: true, skip_types: true, indent: 0) | |
assert_equal "<objects><object>", xml.first(17) | |
assert_includes xml, %(<street-address>Paulina</street-address>) | |
assert_includes xml, %(<name>David</name>) | |
assert_includes xml, %(<street-address>Evergreen</street-address>) | |
assert_includes xml, %(<name>Jason</name>) | |
end | |
def test_to_xml_with_indent_set | |
xml = [ | |
{ name: "David", street_address: "Paulina" }, { name: "Jason", street_address: "Evergreen" } | |
].to_xml(skip_instruct: true, skip_types: true, indent: 4) | |
assert_equal "<objects>\n <object>", xml.first(22) | |
assert_includes xml, %(\n <street-address>Paulina</street-address>) | |
assert_includes xml, %(\n <name>David</name>) | |
assert_includes xml, %(\n <street-address>Evergreen</street-address>) | |
assert_includes xml, %(\n <name>Jason</name>) | |
end | |
def test_to_xml_with_dasherize_false | |
xml = [ | |
{ name: "David", street_address: "Paulina" }, { name: "Jason", street_address: "Evergreen" } | |
].to_xml(skip_instruct: true, skip_types: true, indent: 0, dasherize: false) | |
assert_equal "<objects><object>", xml.first(17) | |
assert_includes xml, %(<street_address>Paulina</street_address>) | |
assert_includes xml, %(<street_address>Evergreen</street_address>) | |
end | |
def test_to_xml_with_dasherize_true | |
xml = [ | |
{ name: "David", street_address: "Paulina" }, { name: "Jason", street_address: "Evergreen" } | |
].to_xml(skip_instruct: true, skip_types: true, indent: 0, dasherize: true) | |
assert_equal "<objects><object>", xml.first(17) | |
assert_includes xml, %(<street-address>Paulina</street-address>) | |
assert_includes xml, %(<street-address>Evergreen</street-address>) | |
end | |
def test_to_xml_with_instruct | |
xml = [ | |
{ name: "David", age: 26, age_in_millis: 820497600000 }, | |
{ name: "Jason", age: 31, age_in_millis: BigDecimal("1.0") } | |
].to_xml(skip_instruct: false, indent: 0) | |
assert_match(/^<\?xml [^>]*/, xml) | |
assert_equal 0, xml.rindex(/<\?xml /) | |
end | |
def test_to_xml_with_block | |
xml = [ | |
{ name: "David", age: 26, age_in_millis: 820497600000 }, | |
{ name: "Jason", age: 31, age_in_millis: BigDecimal("1.0") } | |
].to_xml(skip_instruct: true, indent: 0) do |builder| | |
builder.count 2 | |
end | |
assert_includes xml, %(<count>2</count>), xml | |
end | |
def test_to_xml_with_empty | |
xml = [].to_xml | |
assert_match(/type="array"\/>/, xml) | |
end | |
def test_to_xml_dups_options | |
options = { skip_instruct: true } | |
[].to_xml(options) | |
# :builder, etc, shouldn't be added to options | |
assert_equal({ skip_instruct: true }, options) | |
end | |
end |
# frozen_string_literal: true | |
Dir.glob(File.expand_path("core_ext/*.rb", __dir__)).sort.each do |path| | |
next if path.end_with?("core_ext/uri.rb") | |
require path | |
end |
# frozen_string_literal: true | |
class CrossSiteDepender | |
CrossSiteDependency | |
end |
# frozen_string_literal: true | |
require "active_support/callbacks" | |
require "active_support/core_ext/enumerable" | |
require "active_support/core_ext/module/delegation" | |
module ActiveSupport | |
# Abstract super class that provides a thread-isolated attributes singleton, which resets automatically | |
# before and after each request. This allows you to keep all the per-request attributes easily | |
# available to the whole system. | |
# | |
# The following full app-like example demonstrates how to use a Current class to | |
# facilitate easy access to the global, per-request attributes without passing them deeply | |
# around everywhere: | |
# | |
# # app/models/current.rb | |
# class Current < ActiveSupport::CurrentAttributes | |
# attribute :account, :user | |
# attribute :request_id, :user_agent, :ip_address | |
# | |
# resets { Time.zone = nil } | |
# | |
# def user=(user) | |
# super | |
# self.account = user.account | |
# Time.zone = user.time_zone | |
# end | |
# end | |
# | |
# # app/controllers/concerns/authentication.rb | |
# module Authentication | |
# extend ActiveSupport::Concern | |
# | |
# included do | |
# before_action :authenticate | |
# end | |
# | |
# private | |
# def authenticate | |
# if authenticated_user = User.find_by(id: cookies.encrypted[:user_id]) | |
# Current.user = authenticated_user | |
# else | |
# redirect_to new_session_url | |
# end | |
# end | |
# end | |
# | |
# # app/controllers/concerns/set_current_request_details.rb | |
# module SetCurrentRequestDetails | |
# extend ActiveSupport::Concern | |
# | |
# included do | |
# before_action do | |
# Current.request_id = request.uuid | |
# Current.user_agent = request.user_agent | |
# Current.ip_address = request.ip | |
# end | |
# end | |
# end | |
# | |
# class ApplicationController < ActionController::Base | |
# include Authentication | |
# include SetCurrentRequestDetails | |
# end | |
# | |
# class MessagesController < ApplicationController | |
# def create | |
# Current.account.messages.create(message_params) | |
# end | |
# end | |
# | |
# class Message < ApplicationRecord | |
# belongs_to :creator, default: -> { Current.user } | |
# after_create { |message| Event.create(record: message) } | |
# end | |
# | |
# class Event < ApplicationRecord | |
# before_create do | |
# self.request_id = Current.request_id | |
# self.user_agent = Current.user_agent | |
# self.ip_address = Current.ip_address | |
# end | |
# end | |
# | |
# A word of caution: It's easy to overdo a global singleton like Current and tangle your model as a result. | |
# Current should only be used for a few, top-level globals, like account, user, and request details. | |
# The attributes stuck in Current should be used by more or less all actions on all requests. If you start | |
# sticking controller-specific attributes in there, you're going to create a mess. | |
class CurrentAttributes | |
include ActiveSupport::Callbacks | |
define_callbacks :reset | |
class << self | |
# Returns singleton instance for this class in this thread. If none exists, one is created. | |
def instance | |
current_instances[current_instances_key] ||= new | |
end | |
# Declares one or more attributes that will be given both class and instance accessor methods. | |
def attribute(*names) | |
ActiveSupport::CodeGenerator.batch(generated_attribute_methods, __FILE__, __LINE__) do |owner| | |
names.each do |name| | |
owner.define_cached_method(name, namespace: :current_attributes) do |batch| | |
batch << | |
"def #{name}" << | |
"attributes[:#{name}]" << | |
"end" | |
end | |
owner.define_cached_method("#{name}=", namespace: :current_attributes) do |batch| | |
batch << | |
"def #{name}=(value)" << | |
"attributes[:#{name}] = value" << | |
"end" | |
end | |
end | |
end | |
ActiveSupport::CodeGenerator.batch(singleton_class, __FILE__, __LINE__) do |owner| | |
names.each do |name| | |
owner.define_cached_method(name, namespace: :current_attributes_delegation) do |batch| | |
batch << | |
"def #{name}" << | |
"instance.#{name}" << | |
"end" | |
end | |
owner.define_cached_method("#{name}=", namespace: :current_attributes_delegation) do |batch| | |
batch << | |
"def #{name}=(value)" << | |
"instance.#{name} = value" << | |
"end" | |
end | |
end | |
end | |
end | |
# Calls this block before #reset is called on the instance. Used for resetting external collaborators that depend on current values. | |
def before_reset(&block) | |
set_callback :reset, :before, &block | |
end | |
# Calls this block after #reset is called on the instance. Used for resetting external collaborators, like Time.zone. | |
def resets(&block) | |
set_callback :reset, :after, &block | |
end | |
alias_method :after_reset, :resets | |
delegate :set, :reset, to: :instance | |
def reset_all # :nodoc: | |
current_instances.each_value(&:reset) | |
end | |
def clear_all # :nodoc: | |
reset_all | |
current_instances.clear | |
end | |
private | |
def generated_attribute_methods | |
@generated_attribute_methods ||= Module.new.tap { |mod| include mod } | |
end | |
def current_instances | |
IsolatedExecutionState[:current_attributes_instances] ||= {} | |
end | |
def current_instances_key | |
@current_instances_key ||= name.to_sym | |
end | |
def method_missing(name, *args, &block) | |
# Caches the method definition as a singleton method of the receiver. | |
# | |
# By letting #delegate handle it, we avoid an enclosure that'll capture args. | |
singleton_class.delegate name, to: :instance | |
send(name, *args, &block) | |
end | |
ruby2_keywords(:method_missing) | |
def respond_to_missing?(name, _) | |
super || instance.respond_to?(name) | |
end | |
end | |
attr_accessor :attributes | |
def initialize | |
@attributes = {} | |
end | |
# Expose one or more attributes within a block. Old values are returned after the block concludes. | |
# Example demonstrating the common use of needing to set Current attributes outside the request-cycle: | |
# | |
# class Chat::PublicationJob < ApplicationJob | |
# def perform(attributes, room_number, creator) | |
# Current.set(person: creator) do | |
# Chat::Publisher.publish(attributes: attributes, room_number: room_number) | |
# end | |
# end | |
# end | |
def set(set_attributes) | |
old_attributes = compute_attributes(set_attributes.keys) | |
assign_attributes(set_attributes) | |
yield | |
ensure | |
assign_attributes(old_attributes) | |
end | |
# Reset all attributes. Should be called before and after actions, when used as a per-request singleton. | |
def reset | |
run_callbacks :reset do | |
self.attributes = {} | |
end | |
end | |
private | |
def assign_attributes(new_attributes) | |
new_attributes.each { |key, value| public_send("#{key}=", value) } | |
end | |
def compute_attributes(keys) | |
keys.index_with { |key| public_send(key) } | |
end | |
end | |
end |
# frozen_string_literal: true | |
require_relative "abstract_unit" | |
require "active_support/current_attributes/test_helper" | |
class CurrentAttributesTest < ActiveSupport::TestCase | |
# CurrentAttributes is automatically reset in Rails app via executor hooks set in railtie | |
# But not in Active Support's own test suite. | |
include ActiveSupport::CurrentAttributes::TestHelper | |
Person = Struct.new(:id, :name, :time_zone) | |
class Current < ActiveSupport::CurrentAttributes | |
attribute :world, :account, :person, :request | |
delegate :time_zone, to: :person | |
before_reset { Session.previous = person&.id } | |
resets do | |
Time.zone = "UTC" | |
Session.current = nil | |
end | |
def account=(account) | |
super | |
self.person = Person.new(1, "#{account}'s person") | |
end | |
def person=(person) | |
super | |
Time.zone = person&.time_zone | |
Session.current = person&.id | |
end | |
def set_world_and_account(world:, account:) | |
self.world = world | |
self.account = account | |
end | |
def get_world_and_account(hash) | |
hash[:world] = world | |
hash[:account] = account | |
hash | |
end | |
def respond_to_test; end | |
def request | |
"#{super} something" | |
end | |
def intro | |
"#{person.name}, in #{time_zone}" | |
end | |
end | |
class Session < ActiveSupport::CurrentAttributes | |
attribute :current, :previous | |
end | |
# Eagerly set-up `instance`s by reference. | |
[ Current.instance, Session.instance ] | |
# Use library specific minitest hook to catch Time.zone before reset is called via TestHelper | |
def before_setup | |
@original_time_zone = Time.zone | |
super | |
end | |
# Use library specific minitest hook to set Time.zone after reset is called via TestHelper | |
def after_teardown | |
super | |
Time.zone = @original_time_zone | |
end | |
setup { assert_nil Session.previous, "Expected Session to not have leaked state" } | |
test "read and write attribute" do | |
Current.world = "world/1" | |
assert_equal "world/1", Current.world | |
end | |
test "read overwritten attribute method" do | |
Current.request = "request/1" | |
assert_equal "request/1 something", Current.request | |
end | |
test "set attribute via overwritten method" do | |
Current.account = "account/1" | |
assert_equal "account/1", Current.account | |
assert_equal "account/1's person", Current.person.name | |
end | |
test "set auxiliary class via overwritten method" do | |
Current.person = Person.new(42, "David", "Central Time (US & Canada)") | |
assert_equal "Central Time (US & Canada)", Time.zone.name | |
assert_equal 42, Session.current | |
end | |
test "resets auxiliary classes via callback" do | |
Current.person = Person.new(42, "David", "Central Time (US & Canada)") | |
assert_equal "Central Time (US & Canada)", Time.zone.name | |
Current.reset | |
assert_equal "UTC", Time.zone.name | |
assert_equal 42, Session.previous | |
assert_nil Session.current | |
end | |
test "set auxiliary class based on current attributes via before callback" do | |
Current.person = Person.new(42, "David", "Central Time (US & Canada)") | |
assert_nil Session.previous | |
assert_equal 42, Session.current | |
Current.reset | |
assert_equal 42, Session.previous | |
assert_nil Session.current | |
end | |
test "set attribute only via scope" do | |
Current.world = "world/1" | |
Current.set(world: "world/2") do | |
assert_equal "world/2", Current.world | |
end | |
assert_equal "world/1", Current.world | |
end | |
test "set multiple attributes" do | |
Current.world = "world/1" | |
Current.account = "account/1" | |
Current.set(world: "world/2", account: "account/2") do | |
assert_equal "world/2", Current.world | |
assert_equal "account/2", Current.account | |
end | |
assert_equal "world/1", Current.world | |
assert_equal "account/1", Current.account | |
end | |
test "using keyword arguments" do | |
Current.set_world_and_account(world: "world/1", account: "account/1") | |
assert_equal "world/1", Current.world | |
assert_equal "account/1", Current.account | |
hash = {} | |
assert_same hash, Current.get_world_and_account(hash) | |
assert_equal "world/1", hash[:world] | |
assert_equal "account/1", hash[:account] | |
end | |
setup { @testing_teardown = false } | |
teardown { assert_equal 42, Session.current if @testing_teardown } | |
test "accessing attributes in teardown" do | |
Session.current = 42 | |
@testing_teardown = true | |
end | |
test "delegation" do | |
Current.person = Person.new(42, "David", "Central Time (US & Canada)") | |
assert_equal "Central Time (US & Canada)", Current.time_zone | |
assert_equal "Central Time (US & Canada)", Current.instance.time_zone | |
end | |
test "all methods forward to the instance" do | |
Current.person = Person.new(42, "David", "Central Time (US & Canada)") | |
assert_equal "David, in Central Time (US & Canada)", Current.intro | |
assert_equal "David, in Central Time (US & Canada)", Current.instance.intro | |
end | |
test "respond_to? for methods that have not been called" do | |
assert_equal true, Current.respond_to?("respond_to_test") | |
end | |
test "CurrentAttributes use fiber-local variables" do | |
previous_level = ActiveSupport::IsolatedExecutionState.isolation_level | |
ActiveSupport::IsolatedExecutionState.isolation_level = :fiber | |
Session.current = 42 | |
enumerator = Enumerator.new do |yielder| | |
yielder.yield Session.current | |
end | |
assert_nil enumerator.next | |
ensure | |
ActiveSupport::IsolatedExecutionState.isolation_level = previous_level | |
end | |
test "CurrentAttributes can use thread-local variables" do | |
previous_level = ActiveSupport::IsolatedExecutionState.isolation_level | |
ActiveSupport::IsolatedExecutionState.isolation_level = :thread | |
Session.current = 42 | |
enumerator = Enumerator.new do |yielder| | |
yielder.yield Session.current | |
end | |
assert_equal 42, enumerator.next | |
ensure | |
ActiveSupport::IsolatedExecutionState.isolation_level = previous_level | |
end | |
end |
# frozen_string_literal: true | |
require "active_support/core_ext/date/acts_like" | |
require "active_support/core_ext/date/blank" | |
require "active_support/core_ext/date/calculations" | |
require "active_support/core_ext/date/conversions" | |
require "active_support/core_ext/date/deprecated_conversions" unless ENV["RAILS_DISABLE_DEPRECATED_TO_S_CONVERSION"] | |
require "active_support/core_ext/date/zones" |
# frozen_string_literal: true | |
require_relative "../abstract_unit" | |
module DateAndTimeBehavior | |
def test_yesterday | |
assert_equal date_time_init(2005, 2, 21, 10, 10, 10), date_time_init(2005, 2, 22, 10, 10, 10).yesterday | |
assert_equal date_time_init(2005, 2, 28, 10, 10, 10), date_time_init(2005, 3, 2, 10, 10, 10).yesterday.yesterday | |
end | |
def test_tomorrow | |
assert_equal date_time_init(2005, 2, 23, 10, 10, 10), date_time_init(2005, 2, 22, 10, 10, 10).tomorrow | |
assert_equal date_time_init(2005, 3, 2, 10, 10, 10), date_time_init(2005, 2, 28, 10, 10, 10).tomorrow.tomorrow | |
end | |
def test_days_ago | |
assert_equal date_time_init(2005, 6, 4, 10, 10, 10), date_time_init(2005, 6, 5, 10, 10, 10).days_ago(1) | |
assert_equal date_time_init(2005, 5, 31, 10, 10, 10), date_time_init(2005, 6, 5, 10, 10, 10).days_ago(5) | |
end | |
def test_days_since | |
assert_equal date_time_init(2005, 6, 6, 10, 10, 10), date_time_init(2005, 6, 5, 10, 10, 10).days_since(1) | |
assert_equal date_time_init(2005, 1, 1, 10, 10, 10), date_time_init(2004, 12, 31, 10, 10, 10).days_since(1) | |
end | |
def test_weeks_ago | |
assert_equal date_time_init(2005, 5, 29, 10, 10, 10), date_time_init(2005, 6, 5, 10, 10, 10).weeks_ago(1) | |
assert_equal date_time_init(2005, 5, 1, 10, 10, 10), date_time_init(2005, 6, 5, 10, 10, 10).weeks_ago(5) | |
assert_equal date_time_init(2005, 4, 24, 10, 10, 10), date_time_init(2005, 6, 5, 10, 10, 10).weeks_ago(6) | |
assert_equal date_time_init(2005, 2, 27, 10, 10, 10), date_time_init(2005, 6, 5, 10, 10, 10).weeks_ago(14) | |
assert_equal date_time_init(2004, 12, 25, 10, 10, 10), date_time_init(2005, 1, 1, 10, 10, 10).weeks_ago(1) | |
end | |
def test_weeks_since | |
assert_equal date_time_init(2005, 7, 14, 10, 10, 10), date_time_init(2005, 7, 7, 10, 10, 10).weeks_since(1) | |
assert_equal date_time_init(2005, 7, 14, 10, 10, 10), date_time_init(2005, 7, 7, 10, 10, 10).weeks_since(1) | |
assert_equal date_time_init(2005, 7, 4, 10, 10, 10), date_time_init(2005, 6, 27, 10, 10, 10).weeks_since(1) | |
assert_equal date_time_init(2005, 1, 4, 10, 10, 10), date_time_init(2004, 12, 28, 10, 10, 10).weeks_since(1) | |
end | |
def test_months_ago | |
assert_equal date_time_init(2005, 5, 5, 10, 10, 10), date_time_init(2005, 6, 5, 10, 10, 10).months_ago(1) | |
assert_equal date_time_init(2004, 11, 5, 10, 10, 10), date_time_init(2005, 6, 5, 10, 10, 10).months_ago(7) | |
assert_equal date_time_init(2004, 12, 5, 10, 10, 10), date_time_init(2005, 6, 5, 10, 10, 10).months_ago(6) | |
assert_equal date_time_init(2004, 6, 5, 10, 10, 10), date_time_init(2005, 6, 5, 10, 10, 10).months_ago(12) | |
assert_equal date_time_init(2003, 6, 5, 10, 10, 10), date_time_init(2005, 6, 5, 10, 10, 10).months_ago(24) | |
end | |
def test_months_since | |
assert_equal date_time_init(2005, 7, 5, 10, 10, 10), date_time_init(2005, 6, 5, 10, 10, 10).months_since(1) | |
assert_equal date_time_init(2006, 1, 5, 10, 10, 10), date_time_init(2005, 12, 5, 10, 10, 10).months_since(1) | |
assert_equal date_time_init(2005, 12, 5, 10, 10, 10), date_time_init(2005, 6, 5, 10, 10, 10).months_since(6) | |
assert_equal date_time_init(2006, 6, 5, 10, 10, 10), date_time_init(2005, 12, 5, 10, 10, 10).months_since(6) | |
assert_equal date_time_init(2006, 1, 5, 10, 10, 10), date_time_init(2005, 6, 5, 10, 10, 10).months_since(7) | |
assert_equal date_time_init(2006, 6, 5, 10, 10, 10), date_time_init(2005, 6, 5, 10, 10, 10).months_since(12) | |
assert_equal date_time_init(2007, 6, 5, 10, 10, 10), date_time_init(2005, 6, 5, 10, 10, 10).months_since(24) | |
assert_equal date_time_init(2005, 4, 30, 10, 10, 10), date_time_init(2005, 3, 31, 10, 10, 10).months_since(1) | |
assert_equal date_time_init(2005, 2, 28, 10, 10, 10), date_time_init(2005, 1, 29, 10, 10, 10).months_since(1) | |
assert_equal date_time_init(2005, 2, 28, 10, 10, 10), date_time_init(2005, 1, 30, 10, 10, 10).months_since(1) | |
assert_equal date_time_init(2005, 2, 28, 10, 10, 10), date_time_init(2005, 1, 31, 10, 10, 10).months_since(1) | |
end | |
def test_years_ago | |
assert_equal date_time_init(2004, 6, 5, 10, 10, 10), date_time_init(2005, 6, 5, 10, 10, 10).years_ago(1) | |
assert_equal date_time_init(1998, 6, 5, 10, 10, 10), date_time_init(2005, 6, 5, 10, 10, 10).years_ago(7) | |
assert_equal date_time_init(2003, 2, 28, 10, 10, 10), date_time_init(2004, 2, 29, 10, 10, 10).years_ago(1) # 1 year ago from leap day | |
end | |
def test_years_since | |
assert_equal date_time_init(2006, 6, 5, 10, 10, 10), date_time_init(2005, 6, 5, 10, 10, 10).years_since(1) | |
assert_equal date_time_init(2012, 6, 5, 10, 10, 10), date_time_init(2005, 6, 5, 10, 10, 10).years_since(7) | |
assert_equal date_time_init(2005, 2, 28, 10, 10, 10), date_time_init(2004, 2, 29, 10, 10, 10).years_since(1) # 1 year since leap day | |
assert_equal date_time_init(2182, 6, 5, 10, 10, 10), date_time_init(2005, 6, 5, 10, 10, 10).years_since(177) | |
end | |
def test_beginning_of_month | |
assert_equal date_time_init(2005, 2, 1, 0, 0, 0), date_time_init(2005, 2, 22, 10, 10, 10).beginning_of_month | |
end | |
def test_beginning_of_quarter | |
assert_equal date_time_init(2005, 1, 1, 0, 0, 0), date_time_init(2005, 2, 15, 10, 10, 10).beginning_of_quarter | |
assert_equal date_time_init(2005, 1, 1, 0, 0, 0), date_time_init(2005, 1, 1, 0, 0, 0).beginning_of_quarter | |
assert_equal date_time_init(2005, 10, 1, 0, 0, 0), date_time_init(2005, 12, 31, 10, 10, 10).beginning_of_quarter | |
assert_equal date_time_init(2005, 4, 1, 0, 0, 0), date_time_init(2005, 6, 30, 23, 59, 59).beginning_of_quarter | |
end | |
def test_end_of_quarter | |
assert_equal date_time_init(2007, 3, 31, 23, 59, 59, Rational(999999999, 1000)), date_time_init(2007, 2, 15, 10, 10, 10).end_of_quarter | |
assert_equal date_time_init(2007, 3, 31, 23, 59, 59, Rational(999999999, 1000)), date_time_init(2007, 3, 31, 0, 0, 0).end_of_quarter | |
assert_equal date_time_init(2007, 12, 31, 23, 59, 59, Rational(999999999, 1000)), date_time_init(2007, 12, 21, 10, 10, 10).end_of_quarter | |
assert_equal date_time_init(2007, 6, 30, 23, 59, 59, Rational(999999999, 1000)), date_time_init(2007, 4, 1, 0, 0, 0).end_of_quarter | |
assert_equal date_time_init(2008, 6, 30, 23, 59, 59, Rational(999999999, 1000)), date_time_init(2008, 5, 31, 0, 0, 0).end_of_quarter | |
end | |
def test_beginning_of_year | |
assert_equal date_time_init(2005, 1, 1, 0, 0, 0), date_time_init(2005, 2, 22, 10, 10, 10).beginning_of_year | |
end | |
def test_next_week | |
# M | T | W | T | F | S | S # M | T | W | T | F | S | S # | |
# | 22/2 | | | | | # 28/2 | | | | | | # monday in next week `next_week` | |
# | 22/2 | | | | | # | | | | 4/3 | | # friday in next week `next_week(:friday)` | |
# 23/10 | | | | | | # 30/10 | | | | | | # monday in next week `next_week` | |
# 23/10 | | | | | | # | | 1/11 | | | | # wednesday in next week `next_week(:wednesday)` | |
assert_equal date_time_init(2005, 2, 28, 0, 0, 0), date_time_init(2005, 2, 22, 15, 15, 10).next_week | |
assert_equal date_time_init(2005, 3, 4, 0, 0, 0), date_time_init(2005, 2, 22, 15, 15, 10).next_week(:friday) | |
assert_equal date_time_init(2006, 10, 30, 0, 0, 0), date_time_init(2006, 10, 23, 0, 0, 0).next_week | |
assert_equal date_time_init(2006, 11, 1, 0, 0, 0), date_time_init(2006, 10, 23, 0, 0, 0).next_week(:wednesday) | |
end | |
def test_next_week_with_default_beginning_of_week_set | |
with_bw_default(:tuesday) do | |
assert_equal Time.local(2012, 3, 28), Time.local(2012, 3, 21).next_week(:wednesday) | |
assert_equal Time.local(2012, 3, 31), Time.local(2012, 3, 21).next_week(:saturday) | |
assert_equal Time.local(2012, 3, 27), Time.local(2012, 3, 21).next_week(:tuesday) | |
assert_equal Time.local(2012, 4, 02), Time.local(2012, 3, 21).next_week(:monday) | |
end | |
end | |
def test_next_week_at_same_time | |
assert_equal date_time_init(2005, 2, 28, 15, 15, 10), date_time_init(2005, 2, 22, 15, 15, 10).next_week(:monday, same_time: true) | |
assert_equal date_time_init(2005, 2, 28, 15, 15, 10, 999999), date_time_init(2005, 2, 22, 15, 15, 10, 999999).next_week(:monday, same_time: true) | |
assert_equal date_time_init(2005, 2, 28, 15, 15, 10, Rational(999999999, 1000)), date_time_init(2005, 2, 22, 15, 15, 10, Rational(999999999, 1000)).next_week(:monday, same_time: true) | |
assert_equal date_time_init(2005, 3, 4, 15, 15, 10), date_time_init(2005, 2, 22, 15, 15, 10).next_week(:friday, same_time: true) | |
assert_equal date_time_init(2006, 10, 30, 0, 0, 0), date_time_init(2006, 10, 23, 0, 0, 0).next_week(:monday, same_time: true) | |
assert_equal date_time_init(2006, 11, 1, 0, 0, 0), date_time_init(2006, 10, 23, 0, 0, 0).next_week(:wednesday, same_time: true) | |
end | |
def test_next_weekday_on_wednesday | |
assert_equal date_time_init(2015, 1, 8, 0, 0, 0), date_time_init(2015, 1, 7, 0, 0, 0).next_weekday | |
assert_equal date_time_init(2015, 1, 8, 15, 15, 10), date_time_init(2015, 1, 7, 15, 15, 10).next_weekday | |
end | |
def test_next_weekday_on_friday | |
assert_equal date_time_init(2015, 1, 5, 0, 0, 0), date_time_init(2015, 1, 2, 0, 0, 0).next_weekday | |
assert_equal date_time_init(2015, 1, 5, 15, 15, 10), date_time_init(2015, 1, 2, 15, 15, 10).next_weekday | |
end | |
def test_next_weekday_on_saturday | |
assert_equal date_time_init(2015, 1, 5, 0, 0, 0), date_time_init(2015, 1, 3, 0, 0, 0).next_weekday | |
assert_equal date_time_init(2015, 1, 5, 15, 15, 10), date_time_init(2015, 1, 3, 15, 15, 10).next_weekday | |
end | |
def test_next_month_on_31st | |
assert_equal date_time_init(2005, 9, 30, 15, 15, 10), date_time_init(2005, 8, 31, 15, 15, 10).next_month | |
end | |
def test_next_quarter_on_31st | |
assert_equal date_time_init(2005, 11, 30, 15, 15, 10), date_time_init(2005, 8, 31, 15, 15, 10).next_quarter | |
end | |
def test_prev_week | |
assert_equal date_time_init(2005, 2, 21, 0, 0, 0), date_time_init(2005, 3, 1, 15, 15, 10).prev_week | |
assert_equal date_time_init(2005, 2, 22, 0, 0, 0), date_time_init(2005, 3, 1, 15, 15, 10).prev_week(:tuesday) | |
assert_equal date_time_init(2005, 2, 25, 0, 0, 0), date_time_init(2005, 3, 1, 15, 15, 10).prev_week(:friday) | |
assert_equal date_time_init(2006, 10, 30, 0, 0, 0), date_time_init(2006, 11, 6, 0, 0, 0).prev_week | |
assert_equal date_time_init(2006, 11, 15, 0, 0, 0), date_time_init(2006, 11, 23, 0, 0, 0).prev_week(:wednesday) | |
end | |
def test_prev_week_with_default_beginning_of_week | |
with_bw_default(:tuesday) do | |
assert_equal Time.local(2012, 3, 14), Time.local(2012, 3, 21).prev_week(:wednesday) | |
assert_equal Time.local(2012, 3, 17), Time.local(2012, 3, 21).prev_week(:saturday) | |
assert_equal Time.local(2012, 3, 13), Time.local(2012, 3, 21).prev_week(:tuesday) | |
assert_equal Time.local(2012, 3, 19), Time.local(2012, 3, 21).prev_week(:monday) | |
end | |
end | |
def test_prev_week_at_same_time | |
assert_equal date_time_init(2005, 2, 21, 15, 15, 10), date_time_init(2005, 3, 1, 15, 15, 10).prev_week(:monday, same_time: true) | |
assert_equal date_time_init(2005, 2, 22, 15, 15, 10), date_time_init(2005, 3, 1, 15, 15, 10).prev_week(:tuesday, same_time: true) | |
assert_equal date_time_init(2005, 2, 25, 15, 15, 10), date_time_init(2005, 3, 1, 15, 15, 10).prev_week(:friday, same_time: true) | |
assert_equal date_time_init(2006, 10, 30, 0, 0, 0), date_time_init(2006, 11, 6, 0, 0, 0).prev_week(:monday, same_time: true) | |
assert_equal date_time_init(2006, 11, 15, 0, 0, 0), date_time_init(2006, 11, 23, 0, 0, 0).prev_week(:wednesday, same_time: true) | |
end | |
def test_prev_weekday_on_wednesday | |
assert_equal date_time_init(2015, 1, 6, 0, 0, 0), date_time_init(2015, 1, 7, 0, 0, 0).prev_weekday | |
assert_equal date_time_init(2015, 1, 6, 15, 15, 10), date_time_init(2015, 1, 7, 15, 15, 10).prev_weekday | |
end | |
def test_prev_weekday_on_monday | |
assert_equal date_time_init(2015, 1, 2, 0, 0, 0), date_time_init(2015, 1, 5, 0, 0, 0).prev_weekday | |
assert_equal date_time_init(2015, 1, 2, 15, 15, 10), date_time_init(2015, 1, 5, 15, 15, 10).prev_weekday | |
end | |
def test_prev_weekday_on_sunday | |
assert_equal date_time_init(2015, 1, 2, 0, 0, 0), date_time_init(2015, 1, 4, 0, 0, 0).prev_weekday | |
assert_equal date_time_init(2015, 1, 2, 15, 15, 10), date_time_init(2015, 1, 4, 15, 15, 10).prev_weekday | |
end | |
def test_prev_month_on_31st | |
assert_equal date_time_init(2004, 2, 29, 10, 10, 10), date_time_init(2004, 3, 31, 10, 10, 10).prev_month | |
end | |
def test_prev_quarter_on_31st | |
assert_equal date_time_init(2004, 2, 29, 10, 10, 10), date_time_init(2004, 5, 31, 10, 10, 10).prev_quarter | |
end | |
def test_last_month_on_31st | |
assert_equal date_time_init(2004, 2, 29, 0, 0, 0), date_time_init(2004, 3, 31, 0, 0, 0).last_month | |
end | |
def test_last_year | |
assert_equal date_time_init(2004, 6, 5, 10, 0, 0), date_time_init(2005, 6, 5, 10, 0, 0).last_year | |
end | |
def test_days_to_week_start | |
assert_equal 0, date_time_init(2011, 11, 01, 0, 0, 0).days_to_week_start(:tuesday) | |
assert_equal 1, date_time_init(2011, 11, 02, 0, 0, 0).days_to_week_start(:tuesday) | |
assert_equal 2, date_time_init(2011, 11, 03, 0, 0, 0).days_to_week_start(:tuesday) | |
assert_equal 3, date_time_init(2011, 11, 04, 0, 0, 0).days_to_week_start(:tuesday) | |
assert_equal 4, date_time_init(2011, 11, 05, 0, 0, 0).days_to_week_start(:tuesday) | |
assert_equal 5, date_time_init(2011, 11, 06, 0, 0, 0).days_to_week_start(:tuesday) | |
assert_equal 6, date_time_init(2011, 11, 07, 0, 0, 0).days_to_week_start(:tuesday) | |
assert_equal 3, date_time_init(2011, 11, 03, 0, 0, 0).days_to_week_start(:monday) | |
assert_equal 3, date_time_init(2011, 11, 04, 0, 0, 0).days_to_week_start(:tuesday) | |
assert_equal 3, date_time_init(2011, 11, 05, 0, 0, 0).days_to_week_start(:wednesday) | |
assert_equal 3, date_time_init(2011, 11, 06, 0, 0, 0).days_to_week_start(:thursday) | |
assert_equal 3, date_time_init(2011, 11, 07, 0, 0, 0).days_to_week_start(:friday) | |
assert_equal 3, date_time_init(2011, 11, 8, 0, 0, 0).days_to_week_start(:saturday) | |
assert_equal 3, date_time_init(2011, 11, 9, 0, 0, 0).days_to_week_start(:sunday) | |
end | |
def test_days_to_week_start_with_default_set | |
with_bw_default(:friday) do | |
assert_equal 6, Time.local(2012, 03, 8, 0, 0, 0).days_to_week_start | |
assert_equal 5, Time.local(2012, 03, 7, 0, 0, 0).days_to_week_start | |
assert_equal 4, Time.local(2012, 03, 6, 0, 0, 0).days_to_week_start | |
assert_equal 3, Time.local(2012, 03, 5, 0, 0, 0).days_to_week_start | |
assert_equal 2, Time.local(2012, 03, 4, 0, 0, 0).days_to_week_start | |
assert_equal 1, Time.local(2012, 03, 3, 0, 0, 0).days_to_week_start | |
assert_equal 0, Time.local(2012, 03, 2, 0, 0, 0).days_to_week_start | |
end | |
end | |
def test_beginning_of_week | |
assert_equal date_time_init(2005, 1, 31, 0, 0, 0), date_time_init(2005, 2, 4, 10, 10, 10).beginning_of_week | |
assert_equal date_time_init(2005, 11, 28, 0, 0, 0), date_time_init(2005, 11, 28, 0, 0, 0).beginning_of_week # monday | |
assert_equal date_time_init(2005, 11, 28, 0, 0, 0), date_time_init(2005, 11, 29, 0, 0, 0).beginning_of_week # tuesday | |
assert_equal date_time_init(2005, 11, 28, 0, 0, 0), date_time_init(2005, 11, 30, 0, 0, 0).beginning_of_week # wednesday | |
assert_equal date_time_init(2005, 11, 28, 0, 0, 0), date_time_init(2005, 12, 01, 0, 0, 0).beginning_of_week # thursday | |
assert_equal date_time_init(2005, 11, 28, 0, 0, 0), date_time_init(2005, 12, 02, 0, 0, 0).beginning_of_week # friday | |
assert_equal date_time_init(2005, 11, 28, 0, 0, 0), date_time_init(2005, 12, 03, 0, 0, 0).beginning_of_week # saturday | |
assert_equal date_time_init(2005, 11, 28, 0, 0, 0), date_time_init(2005, 12, 04, 0, 0, 0).beginning_of_week # sunday | |
end | |
def test_end_of_week | |
assert_equal date_time_init(2008, 1, 6, 23, 59, 59, Rational(999999999, 1000)), date_time_init(2007, 12, 31, 10, 10, 10).end_of_week | |
assert_equal date_time_init(2007, 9, 2, 23, 59, 59, Rational(999999999, 1000)), date_time_init(2007, 8, 27, 0, 0, 0).end_of_week # monday | |
assert_equal date_time_init(2007, 9, 2, 23, 59, 59, Rational(999999999, 1000)), date_time_init(2007, 8, 28, 0, 0, 0).end_of_week # tuesday | |
assert_equal date_time_init(2007, 9, 2, 23, 59, 59, Rational(999999999, 1000)), date_time_init(2007, 8, 29, 0, 0, 0).end_of_week # wednesday | |
assert_equal date_time_init(2007, 9, 2, 23, 59, 59, Rational(999999999, 1000)), date_time_init(2007, 8, 30, 0, 0, 0).end_of_week # thursday | |
assert_equal date_time_init(2007, 9, 2, 23, 59, 59, Rational(999999999, 1000)), date_time_init(2007, 8, 31, 0, 0, 0).end_of_week # friday | |
assert_equal date_time_init(2007, 9, 2, 23, 59, 59, Rational(999999999, 1000)), date_time_init(2007, 9, 01, 0, 0, 0).end_of_week # saturday | |
assert_equal date_time_init(2007, 9, 2, 23, 59, 59, Rational(999999999, 1000)), date_time_init(2007, 9, 02, 0, 0, 0).end_of_week # sunday | |
end | |
def test_end_of_month | |
assert_equal date_time_init(2005, 3, 31, 23, 59, 59, Rational(999999999, 1000)), date_time_init(2005, 3, 20, 10, 10, 10).end_of_month | |
assert_equal date_time_init(2005, 2, 28, 23, 59, 59, Rational(999999999, 1000)), date_time_init(2005, 2, 20, 10, 10, 10).end_of_month | |
assert_equal date_time_init(2005, 4, 30, 23, 59, 59, Rational(999999999, 1000)), date_time_init(2005, 4, 20, 10, 10, 10).end_of_month | |
end | |
def test_end_of_year | |
assert_equal date_time_init(2007, 12, 31, 23, 59, 59, Rational(999999999, 1000)), date_time_init(2007, 2, 22, 10, 10, 10).end_of_year | |
assert_equal date_time_init(2007, 12, 31, 23, 59, 59, Rational(999999999, 1000)), date_time_init(2007, 12, 31, 10, 10, 10).end_of_year | |
end | |
def test_next_occurring | |
assert_equal date_time_init(2017, 12, 18, 3, 14, 15), date_time_init(2017, 12, 14, 3, 14, 15).next_occurring(:monday) | |
assert_equal date_time_init(2017, 12, 19, 3, 14, 15), date_time_init(2017, 12, 14, 3, 14, 15).next_occurring(:tuesday) | |
assert_equal date_time_init(2017, 12, 20, 3, 14, 15), date_time_init(2017, 12, 14, 3, 14, 15).next_occurring(:wednesday) | |
assert_equal date_time_init(2017, 12, 21, 3, 14, 15), date_time_init(2017, 12, 14, 3, 14, 15).next_occurring(:thursday) | |
assert_equal date_time_init(2017, 12, 15, 3, 14, 15), date_time_init(2017, 12, 14, 3, 14, 15).next_occurring(:friday) | |
assert_equal date_time_init(2017, 12, 16, 3, 14, 15), date_time_init(2017, 12, 14, 3, 14, 15).next_occurring(:saturday) | |
assert_equal date_time_init(2017, 12, 17, 3, 14, 15), date_time_init(2017, 12, 14, 3, 14, 15).next_occurring(:sunday) | |
end | |
def test_prev_occurring | |
assert_equal date_time_init(2017, 12, 11, 3, 14, 15), date_time_init(2017, 12, 14, 3, 14, 15).prev_occurring(:monday) | |
assert_equal date_time_init(2017, 12, 12, 3, 14, 15), date_time_init(2017, 12, 14, 3, 14, 15).prev_occurring(:tuesday) | |
assert_equal date_time_init(2017, 12, 13, 3, 14, 15), date_time_init(2017, 12, 14, 3, 14, 15).prev_occurring(:wednesday) | |
assert_equal date_time_init(2017, 12, 7, 3, 14, 15), date_time_init(2017, 12, 14, 3, 14, 15).prev_occurring(:thursday) | |
assert_equal date_time_init(2017, 12, 8, 3, 14, 15), date_time_init(2017, 12, 14, 3, 14, 15).prev_occurring(:friday) | |
assert_equal date_time_init(2017, 12, 9, 3, 14, 15), date_time_init(2017, 12, 14, 3, 14, 15).prev_occurring(:saturday) | |
assert_equal date_time_init(2017, 12, 10, 3, 14, 15), date_time_init(2017, 12, 14, 3, 14, 15).prev_occurring(:sunday) | |
end | |
def test_monday_with_default_beginning_of_week_set | |
with_bw_default(:saturday) do | |
assert_equal date_time_init(2012, 9, 17, 0, 0, 0), date_time_init(2012, 9, 18, 0, 0, 0).monday | |
end | |
end | |
def test_sunday_with_default_beginning_of_week_set | |
with_bw_default(:wednesday) do | |
assert_equal date_time_init(2012, 9, 23, 23, 59, 59, Rational(999999999, 1000)), date_time_init(2012, 9, 19, 0, 0, 0).sunday | |
end | |
end | |
def test_on_weekend_on_saturday | |
assert_predicate date_time_init(2015, 1, 3, 0, 0, 0), :on_weekend? | |
assert_predicate date_time_init(2015, 1, 3, 15, 15, 10), :on_weekend? | |
end | |
def test_on_weekend_on_sunday | |
assert_predicate date_time_init(2015, 1, 4, 0, 0, 0), :on_weekend? | |
assert_predicate date_time_init(2015, 1, 4, 15, 15, 10), :on_weekend? | |
end | |
def test_on_weekend_on_monday | |
assert_not_predicate date_time_init(2015, 1, 5, 0, 0, 0), :on_weekend? | |
assert_not_predicate date_time_init(2015, 1, 5, 15, 15, 10), :on_weekend? | |
end | |
def test_on_weekday_on_sunday | |
assert_not_predicate date_time_init(2015, 1, 4, 0, 0, 0), :on_weekday? | |
assert_not_predicate date_time_init(2015, 1, 4, 15, 15, 10), :on_weekday? | |
end | |
def test_on_weekday_on_monday | |
assert_predicate date_time_init(2015, 1, 5, 0, 0, 0), :on_weekday? | |
assert_predicate date_time_init(2015, 1, 5, 15, 15, 10), :on_weekday? | |
end | |
def test_before | |
assert_equal false, date_time_init(2017, 3, 6, 12, 0, 0).before?(date_time_init(2017, 3, 5, 12, 0, 0)) | |
assert_equal false, date_time_init(2017, 3, 6, 12, 0, 0).before?(date_time_init(2017, 3, 6, 12, 0, 0)) | |
assert_equal true, date_time_init(2017, 3, 6, 12, 0, 0).before?(date_time_init(2017, 3, 7, 12, 0, 0)) | |
end | |
def test_after | |
assert_equal true, date_time_init(2017, 3, 6, 12, 0, 0).after?(date_time_init(2017, 3, 5, 12, 0, 0)) | |
assert_equal false, date_time_init(2017, 3, 6, 12, 0, 0).after?(date_time_init(2017, 3, 6, 12, 0, 0)) | |
assert_equal false, date_time_init(2017, 3, 6, 12, 0, 0).after?(date_time_init(2017, 3, 7, 12, 0, 0)) | |
end | |
def with_bw_default(bw = :monday) | |
old_bw = Date.beginning_of_week | |
Date.beginning_of_week = bw | |
yield | |
ensure | |
Date.beginning_of_week = old_bw | |
end | |
end |
# frozen_string_literal: true | |
require_relative "../abstract_unit" | |
require "active_support/time" | |
require_relative "../time_zone_test_helpers" | |
class DateAndTimeCompatibilityTest < ActiveSupport::TestCase | |
include TimeZoneTestHelpers | |
def setup | |
@utc_time = Time.utc(2016, 4, 23, 14, 11, 12) | |
@date_time = DateTime.new(2016, 4, 23, 14, 11, 12, 0) | |
@utc_offset = 3600 | |
@system_offset = -14400 | |
@zone = ActiveSupport::TimeZone["London"] | |
end | |
def test_time_to_time_preserves_timezone | |
with_preserve_timezone(true) do | |
with_env_tz "US/Eastern" do | |
source = Time.new(2016, 4, 23, 15, 11, 12, 3600) | |
time = source.to_time | |
assert_instance_of Time, time | |
assert_equal @utc_time, time.getutc | |
assert_equal @utc_offset, time.utc_offset | |
assert_equal source.object_id, time.object_id | |
end | |
end | |
end | |
def test_time_to_time_does_not_preserve_time_zone | |
with_preserve_timezone(false) do | |
with_env_tz "US/Eastern" do | |
source = Time.new(2016, 4, 23, 15, 11, 12, 3600) | |
time = source.to_time | |
assert_instance_of Time, time | |
assert_equal @utc_time, time.getutc | |
assert_equal @system_offset, time.utc_offset | |
assert_not_equal source.object_id, time.object_id | |
end | |
end | |
end | |
def test_time_to_time_frozen_preserves_timezone | |
with_preserve_timezone(true) do | |
with_env_tz "US/Eastern" do | |
source = Time.new(2016, 4, 23, 15, 11, 12, 3600).freeze | |
time = source.to_time | |
assert_instance_of Time, time | |
assert_equal @utc_time, time.getutc | |
assert_equal @utc_offset, time.utc_offset | |
assert_equal source.object_id, time.object_id | |
assert_predicate time, :frozen? | |
end | |
end | |
end | |
def test_time_to_time_frozen_does_not_preserve_time_zone | |
with_preserve_timezone(false) do | |
with_env_tz "US/Eastern" do | |
source = Time.new(2016, 4, 23, 15, 11, 12, 3600).freeze | |
time = source.to_time | |
assert_instance_of Time, time | |
assert_equal @utc_time, time.getutc | |
assert_equal @system_offset, time.utc_offset | |
assert_not_equal source.object_id, time.object_id | |
assert_not_predicate time, :frozen? | |
end | |
end | |
end | |
def test_datetime_to_time_preserves_timezone | |
with_preserve_timezone(true) do | |
with_env_tz "US/Eastern" do | |
source = DateTime.new(2016, 4, 23, 15, 11, 12, Rational(1, 24)) | |
time = source.to_time | |
assert_instance_of Time, time | |
assert_equal @utc_time, time.getutc | |
assert_equal @utc_offset, time.utc_offset | |
end | |
end | |
end | |
def test_datetime_to_time_does_not_preserve_time_zone | |
with_preserve_timezone(false) do | |
with_env_tz "US/Eastern" do | |
source = DateTime.new(2016, 4, 23, 15, 11, 12, Rational(1, 24)) | |
time = source.to_time | |
assert_instance_of Time, time | |
assert_equal @utc_time, time.getutc | |
assert_equal @system_offset, time.utc_offset | |
end | |
end | |
end | |
def test_datetime_to_time_frozen_preserves_timezone | |
with_preserve_timezone(true) do | |
with_env_tz "US/Eastern" do | |
source = DateTime.new(2016, 4, 23, 15, 11, 12, Rational(1, 24)).freeze | |
time = source.to_time | |
assert_instance_of Time, time | |
assert_equal @utc_time, time.getutc | |
assert_equal @utc_offset, time.utc_offset | |
assert_not_predicate time, :frozen? | |
end | |
end | |
end | |
def test_datetime_to_time_frozen_does_not_preserve_time_zone | |
with_preserve_timezone(false) do | |
with_env_tz "US/Eastern" do | |
source = DateTime.new(2016, 4, 23, 15, 11, 12, Rational(1, 24)).freeze | |
time = source.to_time | |
assert_instance_of Time, time | |
assert_equal @utc_time, time.getutc | |
assert_equal @system_offset, time.utc_offset | |
assert_not_predicate time, :frozen? | |
end | |
end | |
end | |
def test_twz_to_time_preserves_timezone | |
with_preserve_timezone(true) do | |
with_env_tz "US/Eastern" do | |
source = ActiveSupport::TimeWithZone.new(@utc_time, @zone) | |
time = source.to_time | |
assert_instance_of Time, time | |
assert_equal @utc_time, time.getutc | |
assert_instance_of Time, time.getutc | |
assert_equal @utc_offset, time.utc_offset | |
source = ActiveSupport::TimeWithZone.new(@date_time, @zone) | |
time = source.to_time | |
assert_instance_of Time, time | |
assert_equal @date_time, time.getutc | |
assert_instance_of Time, time.getutc | |
assert_equal @utc_offset, time.utc_offset | |
end | |
end | |
end | |
def test_twz_to_time_does_not_preserve_time_zone | |
with_preserve_timezone(false) do | |
with_env_tz "US/Eastern" do | |
source = ActiveSupport::TimeWithZone.new(@utc_time, @zone) | |
time = source.to_time | |
assert_instance_of Time, time | |
assert_equal @utc_time, time.getutc | |
assert_instance_of Time, time.getutc | |
assert_equal @system_offset, time.utc_offset | |
source = ActiveSupport::TimeWithZone.new(@date_time, @zone) | |
time = source.to_time | |
assert_instance_of Time, time | |
assert_equal @date_time, time.getutc | |
assert_instance_of Time, time.getutc | |
assert_equal @system_offset, time.utc_offset | |
end | |
end | |
end | |
def test_twz_to_time_frozen_preserves_timezone | |
with_preserve_timezone(true) do | |
with_env_tz "US/Eastern" do | |
source = ActiveSupport::TimeWithZone.new(@utc_time, @zone).freeze | |
time = source.to_time | |
assert_instance_of Time, time | |
assert_equal @utc_time, time.getutc | |
assert_instance_of Time, time.getutc | |
assert_equal @utc_offset, time.utc_offset | |
assert_not_predicate time, :frozen? | |
source = ActiveSupport::TimeWithZone.new(@date_time, @zone).freeze | |
time = source.to_time | |
assert_instance_of Time, time | |
assert_equal @date_time, time.getutc | |
assert_instance_of Time, time.getutc | |
assert_equal @utc_offset, time.utc_offset | |
assert_not_predicate time, :frozen? | |
end | |
end | |
end | |
def test_twz_to_time_frozen_does_not_preserve_time_zone | |
with_preserve_timezone(false) do | |
with_env_tz "US/Eastern" do | |
source = ActiveSupport::TimeWithZone.new(@utc_time, @zone).freeze | |
time = source.to_time | |
assert_instance_of Time, time | |
assert_equal @utc_time, time.getutc | |
assert_instance_of Time, time.getutc | |
assert_equal @system_offset, time.utc_offset | |
assert_not_predicate time, :frozen? | |
source = ActiveSupport::TimeWithZone.new(@date_time, @zone).freeze | |
time = source.to_time | |
assert_instance_of Time, time | |
assert_equal @date_time, time.getutc | |
assert_instance_of Time, time.getutc | |
assert_equal @system_offset, time.utc_offset | |
assert_not_predicate time, :frozen? | |
end | |
end | |
end | |
def test_string_to_time_preserves_timezone | |
with_preserve_timezone(true) do | |
with_env_tz "US/Eastern" do | |
source = "2016-04-23T15:11:12+01:00" | |
time = source.to_time | |
assert_instance_of Time, time | |
assert_equal @utc_time, time.getutc | |
assert_equal @utc_offset, time.utc_offset | |
end | |
end | |
end | |
def test_string_to_time_does_not_preserve_time_zone | |
with_preserve_timezone(false) do | |
with_env_tz "US/Eastern" do | |
source = "2016-04-23T15:11:12+01:00" | |
time = source.to_time | |
assert_instance_of Time, time | |
assert_equal @utc_time, time.getutc | |
assert_equal @system_offset, time.utc_offset | |
end | |
end | |
end | |
def test_string_to_time_frozen_preserves_timezone | |
with_preserve_timezone(true) do | |
with_env_tz "US/Eastern" do | |
source = "2016-04-23T15:11:12+01:00" | |
time = source.to_time | |
assert_instance_of Time, time | |
assert_equal @utc_time, time.getutc | |
assert_equal @utc_offset, time.utc_offset | |
assert_not_predicate time, :frozen? | |
end | |
end | |
end | |
def test_string_to_time_frozen_does_not_preserve_time_zone | |
with_preserve_timezone(false) do | |
with_env_tz "US/Eastern" do | |
source = "2016-04-23T15:11:12+01:00" | |
time = source.to_time | |
assert_instance_of Time, time | |
assert_equal @utc_time, time.getutc | |
assert_equal @system_offset, time.utc_offset | |
assert_not_predicate time, :frozen? | |
end | |
end | |
end | |
end |
# frozen_string_literal: true | |
require_relative "../abstract_unit" | |
require "active_support/time" | |
require_relative "../core_ext/date_and_time_behavior" | |
require_relative "../time_zone_test_helpers" | |
class DateExtCalculationsTest < ActiveSupport::TestCase | |
def date_time_init(year, month, day, *args) | |
Date.new(year, month, day) | |
end | |
include DateAndTimeBehavior | |
include TimeZoneTestHelpers | |
def test_yesterday_in_calendar_reform | |
assert_equal Date.new(1582, 10, 4), Date.new(1582, 10, 15).yesterday | |
end | |
def test_tomorrow_in_calendar_reform | |
assert_equal Date.new(1582, 10, 15), Date.new(1582, 10, 4).tomorrow | |
end | |
def test_to_s | |
date = Date.new(2005, 2, 21) | |
assert_equal "2005-02-21", date.to_s | |
assert_deprecated do | |
assert_equal "21 Feb", date.to_s(:short) | |
end | |
assert_deprecated do | |
assert_equal "February 21, 2005", date.to_s(:long) | |
end | |
assert_deprecated do | |
assert_equal "February 21st, 2005", date.to_s(:long_ordinal) | |
end | |
assert_deprecated do | |
assert_equal "2005-02-21", date.to_s(:db) | |
end | |
assert_deprecated do | |
assert_equal "2005-02-21", date.to_s(:inspect) | |
end | |
assert_deprecated do | |
assert_equal "21 Feb 2005", date.to_s(:rfc822) | |
end | |
assert_deprecated do | |
assert_equal "2005-02-21", date.to_s(:iso8601) | |
end | |
assert_deprecated do | |
assert_equal "2005-02-21", date.to_s(:not_existent) | |
end | |
end | |
def test_to_s_with_single_digit_day | |
date = Date.new(2005, 2, 1) | |
assert_equal "2005-02-01", date.to_s | |
assert_deprecated do | |
assert_equal "01 Feb", date.to_s(:short) | |
end | |
assert_deprecated do | |
assert_equal "February 01, 2005", date.to_s(:long) | |
end | |
assert_deprecated do | |
assert_equal "February 1st, 2005", date.to_s(:long_ordinal) | |
end | |
assert_deprecated do | |
assert_equal "2005-02-01", date.to_s(:db) | |
end | |
assert_deprecated do | |
assert_equal "2005-02-01", date.to_s(:inspect) | |
end | |
assert_deprecated do | |
assert_equal "01 Feb 2005", date.to_s(:rfc822) | |
end | |
assert_deprecated do | |
assert_equal "2005-02-01", date.to_s(:iso8601) | |
end | |
assert_deprecated do | |
assert_equal "2005-02-01", date.to_s(:not_existent) | |
end | |
end | |
def test_to_fs | |
date = Date.new(2005, 2, 21) | |
assert_equal "21 Feb", date.to_fs(:short) | |
assert_equal "February 21, 2005", date.to_fs(:long) | |
assert_equal "February 21st, 2005", date.to_fs(:long_ordinal) | |
assert_equal "2005-02-21", date.to_fs(:db) | |
assert_equal "2005-02-21", date.to_fs(:inspect) | |
assert_equal "21 Feb 2005", date.to_fs(:rfc822) | |
assert_equal "2005-02-21", date.to_fs(:iso8601) | |
assert_equal "21 Feb", date.to_formatted_s(:short) | |
end | |
def test_to_fs_with_single_digit_day | |
date = Date.new(2005, 2, 1) | |
assert_equal "01 Feb", date.to_fs(:short) | |
assert_equal "February 01, 2005", date.to_fs(:long) | |
assert_equal "February 1st, 2005", date.to_fs(:long_ordinal) | |
assert_equal "2005-02-01", date.to_fs(:db) | |
assert_equal "2005-02-01", date.to_fs(:inspect) | |
assert_equal "01 Feb 2005", date.to_fs(:rfc822) | |
assert_equal "2005-02-01", date.to_fs(:iso8601) | |
end | |
def test_readable_inspect | |
assert_equal "Mon, 21 Feb 2005", Date.new(2005, 2, 21).readable_inspect | |
assert_equal Date.new(2005, 2, 21).readable_inspect, Date.new(2005, 2, 21).inspect | |
end | |
def test_to_time | |
with_env_tz "US/Eastern" do | |
assert_equal Time, Date.new(2005, 2, 21).to_time.class | |
assert_equal Time.local(2005, 2, 21), Date.new(2005, 2, 21).to_time | |
assert_equal Time.local(2005, 2, 21).utc_offset, Date.new(2005, 2, 21).to_time.utc_offset | |
end | |
silence_warnings do | |
0.upto(138) do |year| | |
[:utc, :local].each do |format| | |
assert_equal year, Date.new(year).to_time(format).year | |
end | |
end | |
end | |
assert_raise(ArgumentError) do | |
Date.new(2005, 2, 21).to_time(:tokyo) | |
end | |
end | |
def test_compare_to_time | |
assert Date.yesterday < Time.now | |
end | |
def test_to_datetime | |
assert_equal DateTime.civil(2005, 2, 21), Date.new(2005, 2, 21).to_datetime | |
assert_equal 0, Date.new(2005, 2, 21).to_datetime.offset # use UTC offset | |
assert_equal ::Date::ITALY, Date.new(2005, 2, 21).to_datetime.start # use Ruby's default start value | |
end | |
def test_to_date | |
assert_equal Date.new(2005, 2, 21), Date.new(2005, 2, 21).to_date | |
end | |
def test_change | |
assert_equal Date.new(2005, 2, 21), Date.new(2005, 2, 11).change(day: 21) | |
assert_equal Date.new(2007, 5, 11), Date.new(2005, 2, 11).change(year: 2007, month: 5) | |
assert_equal Date.new(2006, 2, 22), Date.new(2005, 2, 22).change(year: 2006) | |
assert_equal Date.new(2005, 6, 22), Date.new(2005, 2, 22).change(month: 6) | |
end | |
def test_sunday | |
assert_equal Date.new(2008, 3, 2), Date.new(2008, 3, 02).sunday | |
assert_equal Date.new(2008, 3, 2), Date.new(2008, 2, 29).sunday | |
end | |
def test_beginning_of_week_in_calendar_reform | |
assert_equal Date.new(1582, 10, 1), Date.new(1582, 10, 15).beginning_of_week # friday | |
end | |
def test_end_of_week_in_calendar_reform | |
assert_equal Date.new(1582, 10, 17), Date.new(1582, 10, 4).end_of_week # thursday | |
end | |
def test_end_of_year | |
assert_equal Date.new(2008, 12, 31).to_s, Date.new(2008, 2, 22).end_of_year.to_s | |
end | |
def test_end_of_month | |
assert_equal Date.new(2005, 3, 31), Date.new(2005, 3, 20).end_of_month | |
assert_equal Date.new(2005, 2, 28), Date.new(2005, 2, 20).end_of_month | |
assert_equal Date.new(2005, 4, 30), Date.new(2005, 4, 20).end_of_month | |
end | |
def test_last_year_in_leap_years | |
assert_equal Date.new(1999, 2, 28), Date.new(2000, 2, 29).last_year | |
end | |
def test_last_year_in_calendar_reform | |
assert_equal Date.new(1582, 10, 4), Date.new(1583, 10, 14).last_year | |
end | |
def test_advance | |
assert_equal Date.new(2006, 2, 28), Date.new(2005, 2, 28).advance(years: 1) | |
assert_equal Date.new(2005, 6, 28), Date.new(2005, 2, 28).advance(months: 4) | |
assert_equal Date.new(2005, 3, 21), Date.new(2005, 2, 28).advance(weeks: 3) | |
assert_equal Date.new(2005, 3, 5), Date.new(2005, 2, 28).advance(days: 5) | |
assert_equal Date.new(2012, 9, 28), Date.new(2005, 2, 28).advance(years: 7, months: 7) | |
assert_equal Date.new(2013, 10, 3), Date.new(2005, 2, 28).advance(years: 7, months: 19, days: 5) | |
assert_equal Date.new(2013, 10, 17), Date.new(2005, 2, 28).advance(years: 7, months: 19, weeks: 2, days: 5) | |
assert_equal Date.new(2005, 2, 28), Date.new(2004, 2, 29).advance(years: 1) # leap day plus one year | |
end | |
def test_advance_does_first_years_and_then_days | |
assert_equal Date.new(2012, 2, 29), Date.new(2011, 2, 28).advance(years: 1, days: 1) | |
# If day was done first we would jump to 2012-03-01 instead. | |
end | |
def test_advance_does_first_months_and_then_days | |
assert_equal Date.new(2010, 3, 29), Date.new(2010, 2, 28).advance(months: 1, days: 1) | |
# If day was done first we would jump to 2010-04-01 instead. | |
end | |
def test_advance_in_calendar_reform | |
assert_equal Date.new(1582, 10, 15), Date.new(1582, 10, 4).advance(days: 1) | |
assert_equal Date.new(1582, 10, 4), Date.new(1582, 10, 15).advance(days: -1) | |
5.upto(14) do |day| | |
assert_equal Date.new(1582, 10, 4), Date.new(1582, 9, day).advance(months: 1) | |
assert_equal Date.new(1582, 10, 4), Date.new(1582, 11, day).advance(months: -1) | |
assert_equal Date.new(1582, 10, 4), Date.new(1581, 10, day).advance(years: 1) | |
assert_equal Date.new(1582, 10, 4), Date.new(1583, 10, day).advance(years: -1) | |
end | |
end | |
def test_last_week | |
assert_equal Date.new(2005, 5, 9), Date.new(2005, 5, 17).last_week | |
assert_equal Date.new(2006, 12, 25), Date.new(2007, 1, 7).last_week | |
assert_equal Date.new(2010, 2, 12), Date.new(2010, 2, 19).last_week(:friday) | |
assert_equal Date.new(2010, 2, 13), Date.new(2010, 2, 19).last_week(:saturday) | |
assert_equal Date.new(2010, 2, 27), Date.new(2010, 3, 4).last_week(:saturday) | |
end | |
def test_next_week_in_calendar_reform | |
assert_equal Date.new(1582, 10, 15), Date.new(1582, 9, 30).next_week(:friday) | |
assert_equal Date.new(1582, 10, 18), Date.new(1582, 10, 4).next_week | |
end | |
def test_last_quarter_on_31st | |
assert_equal Date.new(2004, 2, 29), Date.new(2004, 5, 31).last_quarter | |
end | |
def test_yesterday_constructor | |
assert_equal Date.current - 1, Date.yesterday | |
end | |
def test_yesterday_constructor_when_zone_is_not_set | |
with_env_tz "UTC" do | |
with_tz_default do | |
assert_equal(Date.today - 1, Date.yesterday) | |
end | |
end | |
end | |
def test_yesterday_constructor_when_zone_is_set | |
with_env_tz "UTC" do | |
with_tz_default ActiveSupport::TimeZone["Eastern Time (US & Canada)"] do # UTC -5 | |
Time.stub(:now, Time.local(2000, 1, 1)) do | |
assert_equal Date.new(1999, 12, 30), Date.yesterday | |
end | |
end | |
end | |
end | |
def test_tomorrow_constructor | |
assert_equal Date.current + 1, Date.tomorrow | |
end | |
def test_tomorrow_constructor_when_zone_is_not_set | |
with_env_tz "UTC" do | |
with_tz_default do | |
assert_equal(Date.today + 1, Date.tomorrow) | |
end | |
end | |
end | |
def test_tomorrow_constructor_when_zone_is_set | |
with_env_tz "UTC" do | |
with_tz_default ActiveSupport::TimeZone["Europe/Paris"] do # UTC +1 | |
Time.stub(:now, Time.local(1999, 12, 31, 23)) do | |
assert_equal Date.new(2000, 1, 2), Date.tomorrow | |
end | |
end | |
end | |
end | |
def test_since | |
assert_equal Time.local(2005, 2, 21, 0, 0, 45), Date.new(2005, 2, 21).since(45) | |
end | |
def test_since_when_zone_is_set | |
zone = ActiveSupport::TimeZone["Eastern Time (US & Canada)"] | |
with_env_tz "UTC" do | |
with_tz_default zone do | |
assert_equal zone.local(2005, 2, 21, 0, 0, 45), Date.new(2005, 2, 21).since(45) | |
assert_equal zone, Date.new(2005, 2, 21).since(45).time_zone | |
end | |
end | |
end | |
def test_ago | |
assert_equal Time.local(2005, 2, 20, 23, 59, 15), Date.new(2005, 2, 21).ago(45) | |
end | |
def test_ago_when_zone_is_set | |
zone = ActiveSupport::TimeZone["Eastern Time (US & Canada)"] | |
with_env_tz "UTC" do | |
with_tz_default zone do | |
assert_equal zone.local(2005, 2, 20, 23, 59, 15), Date.new(2005, 2, 21).ago(45) | |
assert_equal zone, Date.new(2005, 2, 21).ago(45).time_zone | |
end | |
end | |
end | |
def test_beginning_of_day | |
assert_equal Time.local(2005, 2, 21, 0, 0, 0), Date.new(2005, 2, 21).beginning_of_day | |
end | |
def test_middle_of_day | |
assert_equal Time.local(2005, 2, 21, 12, 0, 0), Date.new(2005, 2, 21).middle_of_day | |
end | |
def test_beginning_of_day_when_zone_is_set | |
zone = ActiveSupport::TimeZone["Eastern Time (US & Canada)"] | |
with_env_tz "UTC" do | |
with_tz_default zone do | |
assert_equal zone.local(2005, 2, 21, 0, 0, 0), Date.new(2005, 2, 21).beginning_of_day | |
assert_equal zone, Date.new(2005, 2, 21).beginning_of_day.time_zone | |
end | |
end | |
end | |
def test_end_of_day | |
assert_equal Time.local(2005, 2, 21, 23, 59, 59, Rational(999999999, 1000)), Date.new(2005, 2, 21).end_of_day | |
end | |
def test_end_of_day_when_zone_is_set | |
zone = ActiveSupport::TimeZone["Eastern Time (US & Canada)"] | |
with_env_tz "UTC" do | |
with_tz_default zone do | |
assert_equal zone.local(2005, 2, 21, 23, 59, 59, Rational(999999999, 1000)), Date.new(2005, 2, 21).end_of_day | |
assert_equal zone, Date.new(2005, 2, 21).end_of_day.time_zone | |
end | |
end | |
end | |
def test_all_day | |
beginning_of_day = Time.local(2011, 6, 7, 0, 0, 0) | |
end_of_day = Time.local(2011, 6, 7, 23, 59, 59, Rational(999999999, 1000)) | |
assert_equal beginning_of_day..end_of_day, Date.new(2011, 6, 7).all_day | |
end | |
def test_all_day_when_zone_is_set | |
zone = ActiveSupport::TimeZone["Hawaii"] | |
with_env_tz "UTC" do | |
with_tz_default zone do | |
beginning_of_day = zone.local(2011, 6, 7, 0, 0, 0) | |
end_of_day = zone.local(2011, 6, 7, 23, 59, 59, Rational(999999999, 1000)) | |
assert_equal beginning_of_day..end_of_day, Date.new(2011, 6, 7).all_day | |
end | |
end | |
end | |
def test_all_week | |
assert_equal Date.new(2011, 6, 6)..Date.new(2011, 6, 12), Date.new(2011, 6, 7).all_week | |
assert_equal Date.new(2011, 6, 5)..Date.new(2011, 6, 11), Date.new(2011, 6, 7).all_week(:sunday) | |
end | |
def test_all_month | |
assert_equal Date.new(2011, 6, 1)..Date.new(2011, 6, 30), Date.new(2011, 6, 7).all_month | |
end | |
def test_all_quarter | |
assert_equal Date.new(2011, 4, 1)..Date.new(2011, 6, 30), Date.new(2011, 6, 7).all_quarter | |
end | |
def test_all_year | |
assert_equal Date.new(2011, 1, 1)..Date.new(2011, 12, 31), Date.new(2011, 6, 7).all_year | |
end | |
def test_xmlschema | |
with_env_tz "US/Eastern" do | |
assert_match(/^1980-02-28T00:00:00-05:?00$/, Date.new(1980, 2, 28).xmlschema) | |
assert_match(/^1980-06-28T00:00:00-04:?00$/, Date.new(1980, 6, 28).xmlschema) | |
# these tests are only of interest on platforms where older dates #to_time fail over to DateTime | |
if ::DateTime === Date.new(1880, 6, 28).to_time | |
assert_match(/^1880-02-28T00:00:00-05:?00$/, Date.new(1880, 2, 28).xmlschema) | |
assert_match(/^1880-06-28T00:00:00-05:?00$/, Date.new(1880, 6, 28).xmlschema) # DateTimes aren't aware of DST rules | |
end | |
end | |
end | |
def test_xmlschema_when_zone_is_set | |
with_env_tz "UTC" do | |
with_tz_default ActiveSupport::TimeZone["Eastern Time (US & Canada)"] do # UTC -5 | |
assert_match(/^1980-02-28T00:00:00-05:?00$/, Date.new(1980, 2, 28).xmlschema) | |
assert_match(/^1980-06-28T00:00:00-04:?00$/, Date.new(1980, 6, 28).xmlschema) | |
end | |
end | |
end | |
def test_past | |
Date.stub(:current, Date.new(2000, 1, 1)) do | |
assert_equal true, Date.new(1999, 12, 31).past? | |
assert_equal false, Date.new(2000, 1, 1).past? | |
assert_equal false, Date.new(2000, 1, 2).past? | |
end | |
end | |
def test_future | |
Date.stub(:current, Date.new(2000, 1, 1)) do | |
assert_equal false, Date.new(1999, 12, 31).future? | |
assert_equal false, Date.new(2000, 1, 1).future? | |
assert_equal true, Date.new(2000, 1, 2).future? | |
end | |
end | |
def test_current_returns_date_today_when_zone_not_set | |
with_env_tz "US/Central" do | |
Time.stub(:now, Time.local(1999, 12, 31, 23)) do | |
assert_equal Date.today, Date.current | |
end | |
end | |
end | |
def test_current_returns_time_zone_today_when_zone_is_set | |
Time.zone = ActiveSupport::TimeZone["Eastern Time (US & Canada)"] | |
with_env_tz "US/Central" do | |
assert_equal ::Time.zone.today, Date.current | |
end | |
ensure | |
Time.zone = nil | |
end | |
def test_date_advance_should_not_change_passed_options_hash | |
options = { years: 3, months: 11, days: 2 } | |
Date.new(2005, 2, 28).advance(options) | |
assert_equal({ years: 3, months: 11, days: 2 }, options) | |
end | |
end | |
class DateExtBehaviorTest < ActiveSupport::TestCase | |
def test_date_acts_like_date | |
assert_predicate Date.new, :acts_like_date? | |
end | |
def test_blank? | |
assert_not_predicate Date.new, :blank? | |
end | |
def test_freeze_doesnt_clobber_memoized_instance_methods | |
assert_nothing_raised do | |
Date.today.freeze.inspect | |
end | |
end | |
def test_can_freeze_twice | |
assert_nothing_raised do | |
Date.today.freeze.freeze | |
end | |
end | |
end |
# frozen_string_literal: true | |
require "active_support/core_ext/date_time/acts_like" | |
require "active_support/core_ext/date_time/blank" | |
require "active_support/core_ext/date_time/calculations" | |
require "active_support/core_ext/date_time/compatibility" | |
require "active_support/core_ext/date_time/conversions" | |
require "active_support/core_ext/date_time/deprecated_conversions" unless ENV["RAILS_DISABLE_DEPRECATED_TO_S_CONVERSION"] |
# frozen_string_literal: true | |
require_relative "../abstract_unit" | |
require "active_support/time" | |
require_relative "../core_ext/date_and_time_behavior" | |
require_relative "../time_zone_test_helpers" | |
class DateTimeExtCalculationsTest < ActiveSupport::TestCase | |
def date_time_init(year, month, day, hour, minute, second, usec = 0) | |
DateTime.civil(year, month, day, hour, minute, second + (usec / 1000000)) | |
end | |
include DateAndTimeBehavior | |
include TimeZoneTestHelpers | |
def test_to_s | |
datetime = DateTime.new(2005, 2, 21, 14, 30, 0, 0) | |
assert_deprecated do | |
assert_equal "2005-02-21 14:30:00", datetime.to_s(:db) | |
end | |
assert_deprecated do | |
assert_equal "2005-02-21 14:30:00.000000000 +0000", datetime.to_s(:inspect) | |
end | |
assert_deprecated do | |
assert_equal "14:30", datetime.to_s(:time) | |
end | |
assert_deprecated do | |
assert_equal "21 Feb 14:30", datetime.to_s(:short) | |
end | |
assert_deprecated do | |
assert_equal "February 21, 2005 14:30", datetime.to_s(:long) | |
end | |
assert_deprecated do | |
assert_equal "Mon, 21 Feb 2005 14:30:00 +0000", datetime.to_s(:rfc822) | |
end | |
assert_deprecated do | |
assert_equal "February 21st, 2005 14:30", datetime.to_s(:long_ordinal) | |
end | |
assert_match(/^2005-02-21T14:30:00(Z|\+00:00)$/, datetime.to_s) | |
assert_deprecated do | |
assert_match(/^2005-02-21T14:30:00(Z|\+00:00)$/, datetime.to_s(:not_existent)) | |
end | |
with_env_tz "US/Central" do | |
assert_deprecated do | |
assert_equal "2009-02-05T14:30:05-06:00", DateTime.civil(2009, 2, 5, 14, 30, 5, Rational(-21600, 86400)).to_s(:iso8601) | |
end | |
assert_deprecated do | |
assert_equal "2008-06-09T04:05:01-05:00", DateTime.civil(2008, 6, 9, 4, 5, 1, Rational(-18000, 86400)).to_s(:iso8601) | |
end | |
assert_deprecated do | |
assert_equal "2009-02-05T14:30:05+00:00", DateTime.civil(2009, 2, 5, 14, 30, 5).to_s(:iso8601) | |
end | |
end | |
end | |
def test_to_fs | |
datetime = DateTime.new(2005, 2, 21, 14, 30, 0, 0) | |
assert_equal "2005-02-21 14:30:00", datetime.to_fs(:db) | |
assert_equal "2005-02-21 14:30:00.000000000 +0000", datetime.to_fs(:inspect) | |
assert_equal "14:30", datetime.to_fs(:time) | |
assert_equal "21 Feb 14:30", datetime.to_fs(:short) | |
assert_equal "February 21, 2005 14:30", datetime.to_fs(:long) | |
assert_equal "Mon, 21 Feb 2005 14:30:00 +0000", datetime.to_fs(:rfc822) | |
assert_equal "February 21st, 2005 14:30", datetime.to_fs(:long_ordinal) | |
assert_match(/^2005-02-21T14:30:00(Z|\+00:00)$/, datetime.to_fs) | |
assert_match(/^2005-02-21T14:30:00(Z|\+00:00)$/, datetime.to_fs(:not_existent)) | |
with_env_tz "US/Central" do | |
assert_equal "2009-02-05T14:30:05-06:00", DateTime.civil(2009, 2, 5, 14, 30, 5, Rational(-21600, 86400)).to_fs(:iso8601) | |
assert_equal "2008-06-09T04:05:01-05:00", DateTime.civil(2008, 6, 9, 4, 5, 1, Rational(-18000, 86400)).to_fs(:iso8601) | |
assert_equal "2009-02-05T14:30:05+00:00", DateTime.civil(2009, 2, 5, 14, 30, 5).to_fs(:iso8601) | |
end | |
assert_equal "2005-02-21 14:30:00", datetime.to_formatted_s(:db) | |
end | |
def test_readable_inspect | |
datetime = DateTime.new(2005, 2, 21, 14, 30, 0) | |
assert_equal "Mon, 21 Feb 2005 14:30:00 +0000", datetime.readable_inspect | |
assert_equal datetime.readable_inspect, datetime.inspect | |
end | |
def test_to_s_with_custom_date_format | |
Time::DATE_FORMATS[:custom] = "%Y%m%d%H%M%S" | |
assert_deprecated do | |
assert_equal "20050221143000", DateTime.new(2005, 2, 21, 14, 30, 0).to_s(:custom) | |
end | |
ensure | |
Time::DATE_FORMATS.delete(:custom) | |
end | |
def test_to_fs_with_custom_date_format | |
Time::DATE_FORMATS[:custom] = "%Y%m%d%H%M%S" | |
assert_equal "20050221143000", DateTime.new(2005, 2, 21, 14, 30, 0).to_fs(:custom) | |
ensure | |
Time::DATE_FORMATS.delete(:custom) | |
end | |
def test_localtime | |
with_env_tz "US/Eastern" do | |
assert_instance_of Time, DateTime.new(2016, 3, 11, 15, 11, 12, 0).localtime | |
assert_equal Time.local(2016, 3, 11, 10, 11, 12), DateTime.new(2016, 3, 11, 15, 11, 12, 0).localtime | |
assert_equal Time.local(2016, 3, 21, 11, 11, 12), DateTime.new(2016, 3, 21, 15, 11, 12, 0).localtime | |
assert_equal Time.local(2016, 4, 1, 11, 11, 12), DateTime.new(2016, 4, 1, 16, 11, 12, Rational(1, 24)).localtime | |
end | |
end | |
def test_getlocal | |
with_env_tz "US/Eastern" do | |
assert_instance_of Time, DateTime.new(2016, 3, 11, 15, 11, 12, 0).getlocal | |
assert_equal Time.local(2016, 3, 11, 10, 11, 12), DateTime.new(2016, 3, 11, 15, 11, 12, 0).getlocal | |
assert_equal Time.local(2016, 3, 21, 11, 11, 12), DateTime.new(2016, 3, 21, 15, 11, 12, 0).getlocal | |
assert_equal Time.local(2016, 4, 1, 11, 11, 12), DateTime.new(2016, 4, 1, 16, 11, 12, Rational(1, 24)).getlocal | |
end | |
end | |
def test_to_date | |
assert_equal Date.new(2005, 2, 21), DateTime.new(2005, 2, 21, 14, 30, 0).to_date | |
end | |
def test_to_datetime | |
assert_equal DateTime.new(2005, 2, 21, 14, 30, 0), DateTime.new(2005, 2, 21, 14, 30, 0).to_datetime | |
end | |
def test_to_time | |
with_env_tz "US/Eastern" do | |
assert_instance_of Time, DateTime.new(2005, 2, 21, 10, 11, 12, 0).to_time | |
if ActiveSupport.to_time_preserves_timezone | |
assert_equal Time.local(2005, 2, 21, 5, 11, 12).getlocal(0), DateTime.new(2005, 2, 21, 10, 11, 12, 0).to_time | |
assert_equal Time.local(2005, 2, 21, 5, 11, 12).getlocal(0).utc_offset, DateTime.new(2005, 2, 21, 10, 11, 12, 0).to_time.utc_offset | |
else | |
assert_equal Time.local(2005, 2, 21, 5, 11, 12), DateTime.new(2005, 2, 21, 10, 11, 12, 0).to_time | |
assert_equal Time.local(2005, 2, 21, 5, 11, 12).utc_offset, DateTime.new(2005, 2, 21, 10, 11, 12, 0).to_time.utc_offset | |
end | |
end | |
end | |
def test_to_time_preserves_fractional_seconds | |
assert_equal Time.utc(2005, 2, 21, 10, 11, 12, 256), DateTime.new(2005, 2, 21, 10, 11, 12 + Rational(256, 1000000), 0).to_time | |
end | |
def test_civil_from_format | |
assert_equal Time.local(2010, 5, 4, 0, 0, 0), DateTime.civil_from_format(:local, 2010, 5, 4) | |
assert_equal Time.utc(2010, 5, 4, 0, 0, 0), DateTime.civil_from_format(:utc, 2010, 5, 4) | |
end | |
def test_seconds_since_midnight | |
assert_equal 1, DateTime.civil(2005, 1, 1, 0, 0, 1).seconds_since_midnight | |
assert_equal 60, DateTime.civil(2005, 1, 1, 0, 1, 0).seconds_since_midnight | |
assert_equal 3660, DateTime.civil(2005, 1, 1, 1, 1, 0).seconds_since_midnight | |
assert_equal 86399, DateTime.civil(2005, 1, 1, 23, 59, 59).seconds_since_midnight | |
end | |
def test_seconds_until_end_of_day | |
assert_equal 0, DateTime.civil(2005, 1, 1, 23, 59, 59).seconds_until_end_of_day | |
assert_equal 1, DateTime.civil(2005, 1, 1, 23, 59, 58).seconds_until_end_of_day | |
assert_equal 60, DateTime.civil(2005, 1, 1, 23, 58, 59).seconds_until_end_of_day | |
assert_equal 3660, DateTime.civil(2005, 1, 1, 22, 58, 59).seconds_until_end_of_day | |
assert_equal 86399, DateTime.civil(2005, 1, 1, 0, 0, 0).seconds_until_end_of_day | |
end | |
def test_beginning_of_day | |
assert_equal DateTime.civil(2005, 2, 4, 0, 0, 0), DateTime.civil(2005, 2, 4, 10, 10, 10).beginning_of_day | |
end | |
def test_middle_of_day | |
assert_equal DateTime.civil(2005, 2, 4, 12, 0, 0), DateTime.civil(2005, 2, 4, 10, 10, 10).middle_of_day | |
end | |
def test_end_of_day | |
assert_equal DateTime.civil(2005, 2, 4, 23, 59, Rational(59999999999, 1000000000)), DateTime.civil(2005, 2, 4, 10, 10, 10).end_of_day | |
end | |
def test_beginning_of_hour | |
assert_equal DateTime.civil(2005, 2, 4, 19, 0, 0), DateTime.civil(2005, 2, 4, 19, 30, 10).beginning_of_hour | |
end | |
def test_end_of_hour | |
assert_equal DateTime.civil(2005, 2, 4, 19, 59, Rational(59999999999, 1000000000)), DateTime.civil(2005, 2, 4, 19, 30, 10).end_of_hour | |
end | |
def test_beginning_of_minute | |
assert_equal DateTime.civil(2005, 2, 4, 19, 30, 0), DateTime.civil(2005, 2, 4, 19, 30, 10).beginning_of_minute | |
end | |
def test_end_of_minute | |
assert_equal DateTime.civil(2005, 2, 4, 19, 30, Rational(59999999999, 1000000000)), DateTime.civil(2005, 2, 4, 19, 30, 10).end_of_minute | |
end | |
def test_end_of_month | |
assert_equal DateTime.civil(2005, 3, 31, 23, 59, Rational(59999999999, 1000000000)), DateTime.civil(2005, 3, 20, 10, 10, 10).end_of_month | |
assert_equal DateTime.civil(2005, 2, 28, 23, 59, Rational(59999999999, 1000000000)), DateTime.civil(2005, 2, 20, 10, 10, 10).end_of_month | |
assert_equal DateTime.civil(2005, 4, 30, 23, 59, Rational(59999999999, 1000000000)), DateTime.civil(2005, 4, 20, 10, 10, 10).end_of_month | |
end | |
def test_ago | |
assert_equal DateTime.civil(2005, 2, 22, 10, 10, 9), DateTime.civil(2005, 2, 22, 10, 10, 10).ago(1) | |
assert_equal DateTime.civil(2005, 2, 22, 9, 10, 10), DateTime.civil(2005, 2, 22, 10, 10, 10).ago(3600) | |
assert_equal DateTime.civil(2005, 2, 20, 10, 10, 10), DateTime.civil(2005, 2, 22, 10, 10, 10).ago(86400 * 2) | |
assert_equal DateTime.civil(2005, 2, 20, 9, 9, 45), DateTime.civil(2005, 2, 22, 10, 10, 10).ago(86400 * 2 + 3600 + 25) | |
end | |
def test_since | |
assert_equal DateTime.civil(2005, 2, 22, 10, 10, 11), DateTime.civil(2005, 2, 22, 10, 10, 10).since(1) | |
assert_equal DateTime.civil(2005, 2, 22, 11, 10, 10), DateTime.civil(2005, 2, 22, 10, 10, 10).since(3600) | |
assert_equal DateTime.civil(2005, 2, 24, 10, 10, 10), DateTime.civil(2005, 2, 22, 10, 10, 10).since(86400 * 2) | |
assert_equal DateTime.civil(2005, 2, 24, 11, 10, 35), DateTime.civil(2005, 2, 22, 10, 10, 10).since(86400 * 2 + 3600 + 25) | |
assert_not_equal DateTime.civil(2005, 2, 22, 10, 10, 11), DateTime.civil(2005, 2, 22, 10, 10, 10).since(1.333) | |
assert_not_equal DateTime.civil(2005, 2, 22, 10, 10, 12), DateTime.civil(2005, 2, 22, 10, 10, 10).since(1.667) | |
end | |
def test_change | |
assert_equal DateTime.civil(2006, 2, 22, 15, 15, 10), DateTime.civil(2005, 2, 22, 15, 15, 10).change(year: 2006) | |
assert_equal DateTime.civil(2005, 6, 22, 15, 15, 10), DateTime.civil(2005, 2, 22, 15, 15, 10).change(month: 6) | |
assert_equal DateTime.civil(2012, 9, 22, 15, 15, 10), DateTime.civil(2005, 2, 22, 15, 15, 10).change(year: 2012, month: 9) | |
assert_equal DateTime.civil(2005, 2, 22, 16), DateTime.civil(2005, 2, 22, 15, 15, 10).change(hour: 16) | |
assert_equal DateTime.civil(2005, 2, 22, 16, 45), DateTime.civil(2005, 2, 22, 15, 15, 10).change(hour: 16, min: 45) | |
assert_equal DateTime.civil(2005, 2, 22, 15, 45), DateTime.civil(2005, 2, 22, 15, 15, 10).change(min: 45) | |
# datetime with non-zero offset | |
assert_equal DateTime.civil(2005, 2, 22, 15, 15, 10, Rational(-5, 24)), DateTime.civil(2005, 2, 22, 15, 15, 10, 0).change(offset: Rational(-5, 24)) | |
# datetime with fractions of a second | |
assert_equal DateTime.civil(2005, 2, 1, 15, 15, 10.7), DateTime.civil(2005, 2, 22, 15, 15, 10.7).change(day: 1) | |
assert_equal DateTime.civil(2005, 1, 2, 11, 22, Rational(33000008, 1000000)), DateTime.civil(2005, 1, 2, 11, 22, 33).change(usec: 8) | |
assert_equal DateTime.civil(2005, 1, 2, 11, 22, Rational(33000008, 1000000)), DateTime.civil(2005, 1, 2, 11, 22, 33).change(nsec: 8000) | |
assert_raise(ArgumentError) { DateTime.civil(2005, 1, 2, 11, 22, 0).change(usec: 1, nsec: 1) } | |
assert_raise(ArgumentError) { DateTime.civil(2005, 1, 2, 11, 22, 0).change(usec: 1000000) } | |
assert_raise(ArgumentError) { DateTime.civil(2005, 1, 2, 11, 22, 0).change(nsec: 1000000000) } | |
assert_nothing_raised { DateTime.civil(2005, 1, 2, 11, 22, 0).change(usec: 999999) } | |
assert_nothing_raised { DateTime.civil(2005, 1, 2, 11, 22, 0).change(nsec: 999999999) } | |
end | |
def test_advance | |
assert_equal DateTime.civil(2006, 2, 28, 15, 15, 10), DateTime.civil(2005, 2, 28, 15, 15, 10).advance(years: 1) | |
assert_equal DateTime.civil(2005, 6, 28, 15, 15, 10), DateTime.civil(2005, 2, 28, 15, 15, 10).advance(months: 4) | |
assert_equal DateTime.civil(2005, 3, 21, 15, 15, 10), DateTime.civil(2005, 2, 28, 15, 15, 10).advance(weeks: 3) | |
assert_equal DateTime.civil(2005, 3, 5, 15, 15, 10), DateTime.civil(2005, 2, 28, 15, 15, 10).advance(days: 5) | |
assert_equal DateTime.civil(2012, 9, 28, 15, 15, 10), DateTime.civil(2005, 2, 28, 15, 15, 10).advance(years: 7, months: 7) | |
assert_equal DateTime.civil(2013, 10, 3, 15, 15, 10), DateTime.civil(2005, 2, 28, 15, 15, 10).advance(years: 7, months: 19, days: 5) | |
assert_equal DateTime.civil(2013, 10, 17, 15, 15, 10), DateTime.civil(2005, 2, 28, 15, 15, 10).advance(years: 7, months: 19, weeks: 2, days: 5) | |
assert_equal DateTime.civil(2001, 12, 27, 15, 15, 10), DateTime.civil(2005, 2, 28, 15, 15, 10).advance(years: -3, months: -2, days: -1) | |
assert_equal DateTime.civil(2005, 2, 28, 15, 15, 10), DateTime.civil(2004, 2, 29, 15, 15, 10).advance(years: 1) # leap day plus one year | |
assert_equal DateTime.civil(2005, 2, 28, 20, 15, 10), DateTime.civil(2005, 2, 28, 15, 15, 10).advance(hours: 5) | |
assert_equal DateTime.civil(2005, 2, 28, 15, 22, 10), DateTime.civil(2005, 2, 28, 15, 15, 10).advance(minutes: 7) | |
assert_equal DateTime.civil(2005, 2, 28, 15, 15, 19), DateTime.civil(2005, 2, 28, 15, 15, 10).advance(seconds: 9) | |
assert_equal DateTime.civil(2005, 2, 28, 20, 22, 19), DateTime.civil(2005, 2, 28, 15, 15, 10).advance(hours: 5, minutes: 7, seconds: 9) | |
assert_equal DateTime.civil(2005, 2, 28, 10, 8, 1), DateTime.civil(2005, 2, 28, 15, 15, 10).advance(hours: -5, minutes: -7, seconds: -9) | |
assert_equal DateTime.civil(2013, 10, 17, 20, 22, 19), DateTime.civil(2005, 2, 28, 15, 15, 10).advance(years: 7, months: 19, weeks: 2, days: 5, hours: 5, minutes: 7, seconds: 9) | |
end | |
def test_advance_partial_days | |
assert_equal DateTime.civil(2012, 9, 29, 13, 15, 10), DateTime.civil(2012, 9, 28, 1, 15, 10).advance(days: 1.5) | |
assert_equal DateTime.civil(2012, 9, 28, 13, 15, 10), DateTime.civil(2012, 9, 28, 1, 15, 10).advance(days: 0.5) | |
assert_equal DateTime.civil(2012, 10, 29, 13, 15, 10), DateTime.civil(2012, 9, 28, 1, 15, 10).advance(days: 1.5, months: 1) | |
end | |
def test_advanced_processes_first_the_date_deltas_and_then_the_time_deltas | |
# If the time deltas were processed first, the following datetimes would be advanced to 2010/04/01 instead. | |
assert_equal DateTime.civil(2010, 3, 29), DateTime.civil(2010, 2, 28, 23, 59, 59).advance(months: 1, seconds: 1) | |
assert_equal DateTime.civil(2010, 3, 29), DateTime.civil(2010, 2, 28, 23, 59).advance(months: 1, minutes: 1) | |
assert_equal DateTime.civil(2010, 3, 29), DateTime.civil(2010, 2, 28, 23).advance(months: 1, hours: 1) | |
assert_equal DateTime.civil(2010, 3, 29), DateTime.civil(2010, 2, 28, 22, 58, 59).advance(months: 1, hours: 1, minutes: 1, seconds: 1) | |
end | |
def test_last_week | |
assert_equal DateTime.civil(2005, 2, 21), DateTime.civil(2005, 3, 1, 15, 15, 10).last_week | |
assert_equal DateTime.civil(2005, 2, 22), DateTime.civil(2005, 3, 1, 15, 15, 10).last_week(:tuesday) | |
assert_equal DateTime.civil(2005, 2, 25), DateTime.civil(2005, 3, 1, 15, 15, 10).last_week(:friday) | |
assert_equal DateTime.civil(2006, 10, 30), DateTime.civil(2006, 11, 6, 0, 0, 0).last_week | |
assert_equal DateTime.civil(2006, 11, 15), DateTime.civil(2006, 11, 23, 0, 0, 0).last_week(:wednesday) | |
end | |
def test_date_time_should_have_correct_last_week_for_leap_year | |
assert_equal DateTime.civil(2016, 2, 29), DateTime.civil(2016, 3, 7).last_week | |
end | |
def test_last_quarter_on_31st | |
assert_equal DateTime.civil(2004, 2, 29), DateTime.civil(2004, 5, 31).last_quarter | |
end | |
def test_xmlschema | |
assert_match(/^1880-02-28T15:15:10\+00:?00$/, DateTime.civil(1880, 2, 28, 15, 15, 10).xmlschema) | |
assert_match(/^1980-02-28T15:15:10\+00:?00$/, DateTime.civil(1980, 2, 28, 15, 15, 10).xmlschema) | |
assert_match(/^2080-02-28T15:15:10\+00:?00$/, DateTime.civil(2080, 2, 28, 15, 15, 10).xmlschema) | |
assert_match(/^1880-02-28T15:15:10-06:?00$/, DateTime.civil(1880, 2, 28, 15, 15, 10, -0.25).xmlschema) | |
assert_match(/^1980-02-28T15:15:10-06:?00$/, DateTime.civil(1980, 2, 28, 15, 15, 10, -0.25).xmlschema) | |
assert_match(/^2080-02-28T15:15:10-06:?00$/, DateTime.civil(2080, 2, 28, 15, 15, 10, -0.25).xmlschema) | |
end | |
def test_today_with_offset | |
Date.stub(:current, Date.new(2000, 1, 1)) do | |
assert_equal false, DateTime.civil(1999, 12, 31, 23, 59, 59, Rational(-18000, 86400)).today? | |
assert_equal true, DateTime.civil(2000, 1, 1, 0, 0, 0, Rational(-18000, 86400)).today? | |
assert_equal true, DateTime.civil(2000, 1, 1, 23, 59, 59, Rational(-18000, 86400)).today? | |
assert_equal false, DateTime.civil(2000, 1, 2, 0, 0, 0, Rational(-18000, 86400)).today? | |
end | |
end | |
def test_today_without_offset | |
Date.stub(:current, Date.new(2000, 1, 1)) do | |
assert_equal false, DateTime.civil(1999, 12, 31, 23, 59, 59).today? | |
assert_equal true, DateTime.civil(2000, 1, 1, 0).today? | |
assert_equal true, DateTime.civil(2000, 1, 1, 23, 59, 59).today? | |
assert_equal false, DateTime.civil(2000, 1, 2, 0).today? | |
end | |
end | |
def test_yesterday_with_offset | |
Date.stub(:current, Date.new(2000, 1, 1)) do | |
assert_equal true, DateTime.civil(1999, 12, 31, 23, 59, 59, Rational(-18000, 86400)).yesterday? | |
assert_equal false, DateTime.civil(2000, 1, 1, 0, 0, 0, Rational(-18000, 86400)).yesterday? | |
assert_equal false, DateTime.civil(2000, 1, 1, 23, 59, 59, Rational(-18000, 86400)).yesterday? | |
assert_equal true, DateTime.civil(1999, 12, 31, 0, 0, 0, Rational(-18000, 86400)).yesterday? | |
end | |
end | |
def test_yesterday_without_offset | |
Date.stub(:current, Date.new(2000, 1, 1)) do | |
assert_equal true, DateTime.civil(1999, 12, 31, 23, 59, 59).yesterday? | |
assert_equal false, DateTime.civil(2000, 1, 1, 0).yesterday? | |
assert_equal false, DateTime.civil(2000, 1, 1, 23, 59, 59).yesterday? | |
assert_equal false, DateTime.civil(2000, 1, 2, 0).yesterday? | |
end | |
end | |
def test_prev_day_with_offset | |
Date.stub(:current, Date.new(2000, 1, 1)) do | |
assert_equal true, DateTime.civil(1999, 12, 31, 23, 59, 59, Rational(-18000, 86400)).prev_day? | |
assert_equal false, DateTime.civil(2000, 1, 1, 0, 0, 0, Rational(-18000, 86400)).prev_day? | |
assert_equal false, DateTime.civil(2000, 1, 1, 23, 59, 59, Rational(-18000, 86400)).prev_day? | |
assert_equal true, DateTime.civil(1999, 12, 31, 0, 0, 0, Rational(-18000, 86400)).prev_day? | |
end | |
end | |
def test_prev_day_without_offset | |
Date.stub(:current, Date.new(2000, 1, 1)) do | |
assert_equal true, DateTime.civil(1999, 12, 31, 23, 59, 59).prev_day? | |
assert_equal false, DateTime.civil(2000, 1, 1, 0).prev_day? | |
assert_equal false, DateTime.civil(2000, 1, 1, 23, 59, 59).prev_day? | |
assert_equal false, DateTime.civil(2000, 1, 2, 0).prev_day? | |
end | |
end | |
def test_tomorrow_with_offset | |
Date.stub(:current, Date.new(2000, 1, 1)) do | |
assert_equal false, DateTime.civil(1999, 12, 31, 23, 59, 59, Rational(-18000, 86400)).tomorrow? | |
assert_equal true, DateTime.civil(2000, 1, 2, 0, 0, 0, Rational(-18000, 86400)).tomorrow? | |
assert_equal false, DateTime.civil(2000, 1, 1, 23, 59, 59, Rational(-18000, 86400)).tomorrow? | |
assert_equal true, DateTime.civil(2000, 1, 2, 23, 59, 59, Rational(-18000, 86400)).tomorrow? | |
end | |
end | |
def test_tomorrow_without_offset | |
Date.stub(:current, Date.new(2000, 1, 1)) do | |
assert_equal false, DateTime.civil(1999, 12, 31, 23, 59, 59).tomorrow? | |
assert_equal true, DateTime.civil(2000, 1, 2, 0).tomorrow? | |
assert_equal false, DateTime.civil(2000, 1, 1, 23, 59, 59).tomorrow? | |
assert_equal false, DateTime.civil(2000, 1, 3, 0).tomorrow? | |
end | |
end | |
def test_next_day_with_offset | |
Date.stub(:current, Date.new(2000, 1, 1)) do | |
assert_equal false, DateTime.civil(1999, 12, 31, 23, 59, 59, Rational(-18000, 86400)).next_day? | |
assert_equal true, DateTime.civil(2000, 1, 2, 0, 0, 0, Rational(-18000, 86400)).next_day? | |
assert_equal false, DateTime.civil(2000, 1, 1, 23, 59, 59, Rational(-18000, 86400)).next_day? | |
assert_equal true, DateTime.civil(2000, 1, 2, 23, 59, 59, Rational(-18000, 86400)).next_day? | |
end | |
end | |
def test_next_day_without_offset | |
Date.stub(:current, Date.new(2000, 1, 1)) do | |
assert_equal false, DateTime.civil(1999, 12, 31, 23, 59, 59).next_day? | |
assert_equal true, DateTime.civil(2000, 1, 2, 0).next_day? | |
assert_equal false, DateTime.civil(2000, 1, 1, 23, 59, 59).next_day? | |
assert_equal false, DateTime.civil(2000, 1, 3, 0).next_day? | |
end | |
end | |
def test_past_with_offset | |
DateTime.stub(:current, DateTime.civil(2005, 2, 10, 15, 30, 45, Rational(-18000, 86400))) do | |
assert_equal true, DateTime.civil(2005, 2, 10, 15, 30, 44, Rational(-18000, 86400)).past? | |
assert_equal false, DateTime.civil(2005, 2, 10, 15, 30, 45, Rational(-18000, 86400)).past? | |
assert_equal false, DateTime.civil(2005, 2, 10, 15, 30, 46, Rational(-18000, 86400)).past? | |
end | |
end | |
def test_past_without_offset | |
DateTime.stub(:current, DateTime.civil(2005, 2, 10, 15, 30, 45, Rational(-18000, 86400))) do | |
assert_equal true, DateTime.civil(2005, 2, 10, 20, 30, 44).past? | |
assert_equal false, DateTime.civil(2005, 2, 10, 20, 30, 45).past? | |
assert_equal false, DateTime.civil(2005, 2, 10, 20, 30, 46).past? | |
end | |
end | |
def test_future_with_offset | |
DateTime.stub(:current, DateTime.civil(2005, 2, 10, 15, 30, 45, Rational(-18000, 86400))) do | |
assert_equal false, DateTime.civil(2005, 2, 10, 15, 30, 44, Rational(-18000, 86400)).future? | |
assert_equal false, DateTime.civil(2005, 2, 10, 15, 30, 45, Rational(-18000, 86400)).future? | |
assert_equal true, DateTime.civil(2005, 2, 10, 15, 30, 46, Rational(-18000, 86400)).future? | |
end | |
end | |
def test_future_without_offset | |
DateTime.stub(:current, DateTime.civil(2005, 2, 10, 15, 30, 45, Rational(-18000, 86400))) do | |
assert_equal false, DateTime.civil(2005, 2, 10, 20, 30, 44).future? | |
assert_equal false, DateTime.civil(2005, 2, 10, 20, 30, 45).future? | |
assert_equal true, DateTime.civil(2005, 2, 10, 20, 30, 46).future? | |
end | |
end | |
def test_current_returns_date_today_when_zone_is_not_set | |
with_env_tz "US/Eastern" do | |
Time.stub(:now, Time.local(1999, 12, 31, 23, 59, 59)) do | |
assert_equal DateTime.new(1999, 12, 31, 23, 59, 59, Rational(-18000, 86400)), DateTime.current | |
end | |
end | |
end | |
def test_current_returns_time_zone_today_when_zone_is_set | |
Time.zone = ActiveSupport::TimeZone["Eastern Time (US & Canada)"] | |
with_env_tz "US/Eastern" do | |
Time.stub(:now, Time.local(1999, 12, 31, 23, 59, 59)) do | |
assert_equal DateTime.new(1999, 12, 31, 23, 59, 59, Rational(-18000, 86400)), DateTime.current | |
end | |
end | |
ensure | |
Time.zone = nil | |
end | |
def test_current_without_time_zone | |
assert_kind_of DateTime, DateTime.current | |
end | |
def test_current_with_time_zone | |
with_env_tz "US/Eastern" do | |
assert_kind_of DateTime, DateTime.current | |
end | |
end | |
def test_acts_like_date | |
assert_predicate DateTime.new, :acts_like_date? | |
end | |
def test_acts_like_time | |
assert_predicate DateTime.new, :acts_like_time? | |
end | |
def test_blank? | |
assert_not_predicate DateTime.new, :blank? | |
end | |
def test_utc? | |
assert_equal true, DateTime.civil(2005, 2, 21, 10, 11, 12).utc? | |
assert_equal true, DateTime.civil(2005, 2, 21, 10, 11, 12, 0).utc? | |
assert_equal false, DateTime.civil(2005, 2, 21, 10, 11, 12, 0.25).utc? | |
assert_equal false, DateTime.civil(2005, 2, 21, 10, 11, 12, -0.25).utc? | |
end | |
def test_utc_offset | |
assert_equal 0, DateTime.civil(2005, 2, 21, 10, 11, 12).utc_offset | |
assert_equal 0, DateTime.civil(2005, 2, 21, 10, 11, 12, 0).utc_offset | |
assert_equal 21600, DateTime.civil(2005, 2, 21, 10, 11, 12, 0.25).utc_offset | |
assert_equal(-21600, DateTime.civil(2005, 2, 21, 10, 11, 12, -0.25).utc_offset) | |
assert_equal(-18000, DateTime.civil(2005, 2, 21, 10, 11, 12, Rational(-5, 24)).utc_offset) | |
end | |
def test_utc | |
assert_instance_of Time, DateTime.civil(2005, 2, 21, 10, 11, 12, Rational(-6, 24)).utc | |
assert_equal DateTime.civil(2005, 2, 21, 16, 11, 12, 0), DateTime.civil(2005, 2, 21, 10, 11, 12, Rational(-6, 24)).utc | |
assert_equal DateTime.civil(2005, 2, 21, 15, 11, 12, 0), DateTime.civil(2005, 2, 21, 10, 11, 12, Rational(-5, 24)).utc | |
assert_equal DateTime.civil(2005, 2, 21, 10, 11, 12, 0), DateTime.civil(2005, 2, 21, 10, 11, 12, 0).utc | |
assert_equal DateTime.civil(2005, 2, 21, 9, 11, 12, 0), DateTime.civil(2005, 2, 21, 10, 11, 12, Rational(1, 24)).utc | |
assert_equal DateTime.civil(2005, 2, 21, 9, 11, 12, 0), DateTime.civil(2005, 2, 21, 10, 11, 12, Rational(1, 24)).getutc | |
end | |
def test_formatted_offset_with_utc | |
assert_equal "+00:00", DateTime.civil(2000).formatted_offset | |
assert_equal "+0000", DateTime.civil(2000).formatted_offset(false) | |
assert_equal "UTC", DateTime.civil(2000).formatted_offset(true, "UTC") | |
end | |
def test_formatted_offset_with_local | |
dt = DateTime.civil(2005, 2, 21, 10, 11, 12, Rational(-5, 24)) | |
assert_equal "-05:00", dt.formatted_offset | |
assert_equal "-0500", dt.formatted_offset(false) | |
end | |
def test_compare_with_time | |
assert_equal 1, DateTime.civil(2000) <=> Time.utc(1999, 12, 31, 23, 59, 59) | |
assert_equal 0, DateTime.civil(2000) <=> Time.utc(2000, 1, 1, 0, 0, 0) | |
assert_equal(-1, DateTime.civil(2000) <=> Time.utc(2000, 1, 1, 0, 0, 1)) | |
end | |
def test_compare_with_datetime | |
assert_equal 1, DateTime.civil(2000) <=> DateTime.civil(1999, 12, 31, 23, 59, 59) | |
assert_equal 0, DateTime.civil(2000) <=> DateTime.civil(2000, 1, 1, 0, 0, 0) | |
assert_equal(-1, DateTime.civil(2000) <=> DateTime.civil(2000, 1, 1, 0, 0, 1)) | |
end | |
def test_compare_with_time_with_zone | |
assert_equal 1, DateTime.civil(2000) <=> ActiveSupport::TimeWithZone.new(Time.utc(1999, 12, 31, 23, 59, 59), ActiveSupport::TimeZone["UTC"]) | |
assert_equal 0, DateTime.civil(2000) <=> ActiveSupport::TimeWithZone.new(Time.utc(2000, 1, 1, 0, 0, 0), ActiveSupport::TimeZone["UTC"]) | |
assert_equal(-1, DateTime.civil(2000) <=> ActiveSupport::TimeWithZone.new(Time.utc(2000, 1, 1, 0, 0, 1), ActiveSupport::TimeZone["UTC"])) | |
end | |
def test_compare_with_string | |
assert_equal 1, DateTime.civil(2000) <=> Time.utc(1999, 12, 31, 23, 59, 59).to_s | |
assert_equal 0, DateTime.civil(2000) <=> Time.utc(2000, 1, 1, 0, 0, 0).to_s | |
assert_equal(-1, DateTime.civil(2000) <=> Time.utc(2000, 1, 1, 0, 0, 1).to_s) | |
assert_nil DateTime.civil(2000) <=> "Invalid as Time" | |
end | |
def test_compare_with_integer | |
assert_equal 1, DateTime.civil(1970, 1, 1, 12, 0, 0) <=> 2440587 | |
assert_equal 0, DateTime.civil(1970, 1, 1, 12, 0, 0) <=> 2440588 | |
assert_equal(-1, DateTime.civil(1970, 1, 1, 12, 0, 0) <=> 2440589) | |
end | |
def test_compare_with_float | |
assert_equal 1, DateTime.civil(1970) <=> 2440586.5 | |
assert_equal 0, DateTime.civil(1970) <=> 2440587.5 | |
assert_equal(-1, DateTime.civil(1970) <=> 2440588.5) | |
end | |
def test_compare_with_rational | |
assert_equal 1, DateTime.civil(1970) <=> Rational(4881173, 2) | |
assert_equal 0, DateTime.civil(1970) <=> Rational(4881175, 2) | |
assert_equal(-1, DateTime.civil(1970) <=> Rational(4881177, 2)) | |
end | |
def test_to_f | |
assert_equal 946684800.0, DateTime.civil(2000).to_f | |
assert_equal 946684800.0, DateTime.civil(1999, 12, 31, 19, 0, 0, Rational(-5, 24)).to_f | |
assert_equal 946684800.5, DateTime.civil(1999, 12, 31, 19, 0, 0.5, Rational(-5, 24)).to_f | |
end | |
def test_to_i | |
assert_equal 946684800, DateTime.civil(2000).to_i | |
assert_equal 946684800, DateTime.civil(1999, 12, 31, 19, 0, 0, Rational(-5, 24)).to_i | |
end | |
def test_usec | |
assert_equal 0, DateTime.civil(2000).usec | |
assert_equal 500000, DateTime.civil(2000, 1, 1, 0, 0, Rational(1, 2)).usec | |
end | |
def test_nsec | |
assert_equal 0, DateTime.civil(2000).nsec | |
assert_equal 500000000, DateTime.civil(2000, 1, 1, 0, 0, Rational(1, 2)).nsec | |
end | |
def test_subsec | |
assert_equal 0, DateTime.civil(2000).subsec | |
assert_equal Rational(1, 2), DateTime.civil(2000, 1, 1, 0, 0, Rational(1, 2)).subsec | |
end | |
end |
# frozen_string_literal: true | |
module ActiveSupport | |
module Testing | |
module Declarative | |
unless defined?(Spec) | |
# Helper to define a test method using a String. Under the hood, it replaces | |
# spaces with underscores and defines the test method. | |
# | |
# test "verify something" do | |
# ... | |
# end | |
def test(name, &block) | |
test_name = "test_#{name.gsub(/\s+/, '_')}".to_sym | |
defined = method_defined? test_name | |
raise "#{test_name} is already defined in #{self}" if defined | |
if block_given? | |
define_method(test_name, &block) | |
else | |
define_method(test_name) do | |
flunk "No implementation provided for #{name}" | |
end | |
end | |
end | |
end | |
end | |
end | |
end |
# frozen_string_literal: true | |
require "active_support/core_ext/module/attribute_accessors" | |
require "active_support/core_ext/module/delegation" | |
require "json" | |
module ActiveSupport | |
# Look for and parse json strings that look like ISO 8601 times. | |
mattr_accessor :parse_json_times | |
module JSON | |
# matches YAML-formatted dates | |
DATE_REGEX = /\A\d{4}-\d{2}-\d{2}\z/ | |
DATETIME_REGEX = /\A(?:\d{4}-\d{2}-\d{2}|\d{4}-\d{1,2}-\d{1,2}[T \t]+\d{1,2}:\d{2}:\d{2}(\.[0-9]*)?(([ \t]*)Z|[-+]\d{2}?(:\d{2})?)?)\z/ | |
class << self | |
# Parses a JSON string (JavaScript Object Notation) into a hash. | |
# See http://www.json.org for more info. | |
# | |
# ActiveSupport::JSON.decode("{\"team\":\"rails\",\"players\":\"36\"}") | |
# => {"team" => "rails", "players" => "36"} | |
def decode(json) | |
data = ::JSON.parse(json, quirks_mode: true) | |
if ActiveSupport.parse_json_times | |
convert_dates_from(data) | |
else | |
data | |
end | |
end | |
alias_method :load, :decode | |
# Returns the class of the error that will be raised when there is an | |
# error in decoding JSON. Using this method means you won't directly | |
# depend on the ActiveSupport's JSON implementation, in case it changes | |
# in the future. | |
# | |
# begin | |
# obj = ActiveSupport::JSON.decode(some_string) | |
# rescue ActiveSupport::JSON.parse_error | |
# Rails.logger.warn("Attempted to decode invalid JSON: #{some_string}") | |
# end | |
def parse_error | |
::JSON::ParserError | |
end | |
private | |
def convert_dates_from(data) | |
case data | |
when nil | |
nil | |
when DATE_REGEX | |
begin | |
Date.parse(data) | |
rescue ArgumentError | |
data | |
end | |
when DATETIME_REGEX | |
begin | |
Time.zone.parse(data) | |
rescue ArgumentError | |
data | |
end | |
when Array | |
data.map! { |d| convert_dates_from(d) } | |
when Hash | |
data.transform_values! do |value| | |
convert_dates_from(value) | |
end | |
else | |
data | |
end | |
end | |
end | |
end | |
end |
# frozen_string_literal: true | |
require_relative "../abstract_unit" | |
require "active_support/json" | |
require "active_support/time" | |
require_relative "../time_zone_test_helpers" | |
class TestJSONDecoding < ActiveSupport::TestCase | |
include TimeZoneTestHelpers | |
class Foo | |
def self.json_create(object) | |
"Foo" | |
end | |
end | |
TESTS = { | |
%q({"returnTo":{"\/categories":"\/"}}) => { "returnTo" => { "/categories" => "/" } }, | |
%q({"return\\"To\\":":{"\/categories":"\/"}}) => { "return\"To\":" => { "/categories" => "/" } }, | |
%q({"returnTo":{"\/categories":1}}) => { "returnTo" => { "/categories" => 1 } }, | |
%({"returnTo":[1,"a"]}) => { "returnTo" => [1, "a"] }, | |
%({"returnTo":[1,"\\"a\\",", "b"]}) => { "returnTo" => [1, "\"a\",", "b"] }, | |
%({"a": "'", "b": "5,000"}) => { "a" => "'", "b" => "5,000" }, | |
%({"a": "a's, b's and c's", "b": "5,000"}) => { "a" => "a's, b's and c's", "b" => "5,000" }, | |
# multibyte | |
%({"matzue": "松江", "asakusa": "浅草"}) => { "matzue" => "松江", "asakusa" => "浅草" }, | |
%({"a": "2007-01-01"}) => { "a" => Date.new(2007, 1, 1) }, | |
%({"a": "2007-01-01 01:12:34 Z"}) => { "a" => Time.utc(2007, 1, 1, 1, 12, 34) }, | |
%(["2007-01-01 01:12:34 Z"]) => [Time.utc(2007, 1, 1, 1, 12, 34)], | |
%(["2007-01-01 01:12:34 Z", "2007-01-01 01:12:35 Z"]) => [Time.utc(2007, 1, 1, 1, 12, 34), Time.utc(2007, 1, 1, 1, 12, 35)], | |
# no time zone | |
%({"a": "2007-01-01 01:12:34"}) => { "a" => Time.new(2007, 1, 1, 1, 12, 34, "-05:00") }, | |
# invalid date | |
%({"a": "1089-10-40"}) => { "a" => "1089-10-40" }, | |
# xmlschema date notation | |
%({"a": "2009-08-10T19:01:02"}) => { "a" => Time.new(2009, 8, 10, 19, 1, 2, "-04:00") }, | |
%({"a": "2009-08-10T19:01:02Z"}) => { "a" => Time.utc(2009, 8, 10, 19, 1, 2) }, | |
%({"a": "2009-08-10T19:01:02+02:00"}) => { "a" => Time.utc(2009, 8, 10, 17, 1, 2) }, | |
%({"a": "2009-08-10T19:01:02-05:00"}) => { "a" => Time.utc(2009, 8, 11, 00, 1, 2) }, | |
# needs to be *exact* | |
%({"a": " 2007-01-01 01:12:34 Z "}) => { "a" => " 2007-01-01 01:12:34 Z " }, | |
%({"a": "2007-01-01 : it's your birthday"}) => { "a" => "2007-01-01 : it's your birthday" }, | |
%({"a": "Today is:\\n2020-05-21"}) => { "a" => "Today is:\n2020-05-21" }, | |
%({"a": "2007-01-01 01:12:34 Z\\nwas my birthday"}) => { "a" => "2007-01-01 01:12:34 Z\nwas my birthday" }, | |
%([]) => [], | |
%({}) => {}, | |
%({"a":1}) => { "a" => 1 }, | |
%({"a": ""}) => { "a" => "" }, | |
%({"a":"\\""}) => { "a" => "\"" }, | |
%({"a": null}) => { "a" => nil }, | |
%({"a": true}) => { "a" => true }, | |
%({"a": false}) => { "a" => false }, | |
'{"bad":"\\\\","trailing":""}' => { "bad" => "\\", "trailing" => "" }, | |
%q({"a": "http:\/\/test.host\/posts\/1"}) => { "a" => "http://test.host/posts/1" }, | |
%q({"a": "\u003cunicode\u0020escape\u003e"}) => { "a" => "<unicode escape>" }, | |
'{"a": "\\\\u0020skip double backslashes"}' => { "a" => "\\u0020skip double backslashes" }, | |
%q({"a": "\u003cbr /\u003e"}) => { "a" => "<br />" }, | |
%q({"b":["\u003ci\u003e","\u003cb\u003e","\u003cu\u003e"]}) => { "b" => ["<i>", "<b>", "<u>"] }, | |
# test combination of dates and escaped or unicode encoded data in arrays | |
%q([{"d":"1970-01-01", "s":"\u0020escape"},{"d":"1970-01-01", "s":"\u0020escape"}]) => | |
[{ "d" => Date.new(1970, 1, 1), "s" => " escape" }, { "d" => Date.new(1970, 1, 1), "s" => " escape" }], | |
%q([{"d":"1970-01-01","s":"http:\/\/example.com"},{"d":"1970-01-01","s":"http:\/\/example.com"}]) => | |
[{ "d" => Date.new(1970, 1, 1), "s" => "http://example.com" }, | |
{ "d" => Date.new(1970, 1, 1), "s" => "http://example.com" }], | |
# tests escaping of "\n" char with Yaml backend | |
%q({"a":"\n"}) => { "a" => "\n" }, | |
%q({"a":"\u000a"}) => { "a" => "\n" }, | |
%q({"a":"Line1\u000aLine2"}) => { "a" => "Line1\nLine2" }, | |
# prevent json unmarshalling | |
'{"json_class":"TestJSONDecoding::Foo"}' => { "json_class" => "TestJSONDecoding::Foo" }, | |
# json "fragments" - these are invalid JSON, but ActionPack relies on this | |
'"a string"' => "a string", | |
"1.1" => 1.1, | |
"1" => 1, | |
"-1" => -1, | |
"true" => true, | |
"false" => false, | |
"null" => nil | |
} | |
TESTS.each_with_index do |(json, expected), index| | |
fail_message = "JSON decoding failed for #{json}" | |
test "json decodes #{index}" do | |
with_tz_default "Eastern Time (US & Canada)" do | |
with_parse_json_times(true) do | |
silence_warnings do | |
if expected.nil? | |
assert_nil ActiveSupport::JSON.decode(json), fail_message | |
else | |
assert_equal expected, ActiveSupport::JSON.decode(json), fail_message | |
end | |
end | |
end | |
end | |
end | |
end | |
test "json decodes time json with time parsing disabled" do | |
with_parse_json_times(false) do | |
expected = { "a" => "2007-01-01 01:12:34 Z" } | |
assert_equal expected, ActiveSupport::JSON.decode(%({"a": "2007-01-01 01:12:34 Z"})) | |
end | |
end | |
def test_failed_json_decoding | |
assert_raise(ActiveSupport::JSON.parse_error) { ActiveSupport::JSON.decode(%(undefined)) } | |
assert_raise(ActiveSupport::JSON.parse_error) { ActiveSupport::JSON.decode(%({a: 1})) } | |
assert_raise(ActiveSupport::JSON.parse_error) { ActiveSupport::JSON.decode(%({: 1})) } | |
assert_raise(ActiveSupport::JSON.parse_error) { ActiveSupport::JSON.decode(%()) } | |
end | |
def test_cannot_pass_unsupported_options | |
assert_raise(ArgumentError) { ActiveSupport::JSON.decode("", create_additions: true) } | |
end | |
private | |
def with_parse_json_times(value) | |
old_value = ActiveSupport.parse_json_times | |
ActiveSupport.parse_json_times = value | |
yield | |
ensure | |
ActiveSupport.parse_json_times = old_value | |
end | |
end |
# frozen_string_literal: true | |
require "active_support/core_ext/object/duplicable" | |
class Object | |
# Returns a deep copy of object if it's duplicable. If it's | |
# not duplicable, returns +self+. | |
# | |
# object = Object.new | |
# dup = object.deep_dup | |
# dup.instance_variable_set(:@a, 1) | |
# | |
# object.instance_variable_defined?(:@a) # => false | |
# dup.instance_variable_defined?(:@a) # => true | |
def deep_dup | |
duplicable? ? dup : self | |
end | |
end | |
class Array | |
# Returns a deep copy of array. | |
# | |
# array = [1, [2, 3]] | |
# dup = array.deep_dup | |
# dup[1][2] = 4 | |
# | |
# array[1][2] # => nil | |
# dup[1][2] # => 4 | |
def deep_dup | |
map(&:deep_dup) | |
end | |
end | |
class Hash | |
# Returns a deep copy of hash. | |
# | |
# hash = { a: { b: 'b' } } | |
# dup = hash.deep_dup | |
# dup[:a][:c] = 'c' | |
# | |
# hash[:a][:c] # => nil | |
# dup[:a][:c] # => "c" | |
def deep_dup | |
hash = dup | |
each_pair do |key, value| | |
if ::String === key || ::Symbol === key | |
hash[key] = value.deep_dup | |
else | |
hash.delete(key) | |
hash[key.deep_dup] = value.deep_dup | |
end | |
end | |
hash | |
end | |
end |
# frozen_string_literal: true | |
require_relative "../../abstract_unit" | |
require "active_support/core_ext/object/deep_dup" | |
class DeepDupTest < ActiveSupport::TestCase | |
def test_array_deep_dup | |
array = [1, [2, 3]] | |
dup = array.deep_dup | |
dup[1][2] = 4 | |
assert_nil array[1][2] | |
assert_equal 4, dup[1][2] | |
end | |
def test_hash_deep_dup | |
hash = { a: { b: "b" } } | |
dup = hash.deep_dup | |
dup[:a][:c] = "c" | |
assert_nil hash[:a][:c] | |
assert_equal "c", dup[:a][:c] | |
end | |
def test_array_deep_dup_with_hash_inside | |
array = [1, { a: 2, b: 3 } ] | |
dup = array.deep_dup | |
dup[1][:c] = 4 | |
assert_nil array[1][:c] | |
assert_equal 4, dup[1][:c] | |
end | |
def test_hash_deep_dup_with_array_inside | |
hash = { a: [1, 2] } | |
dup = hash.deep_dup | |
dup[:a][2] = "c" | |
assert_nil hash[:a][2] | |
assert_equal "c", dup[:a][2] | |
end | |
def test_deep_dup_initialize | |
zero_hash = Hash.new 0 | |
hash = { a: zero_hash } | |
dup = hash.deep_dup | |
assert_equal 0, dup[:a][44] | |
end | |
def test_object_deep_dup | |
object = Object.new | |
dup = object.deep_dup | |
dup.instance_variable_set(:@a, 1) | |
assert_not object.instance_variable_defined?(:@a) | |
assert dup.instance_variable_defined?(:@a) | |
end | |
def test_deep_dup_with_hash_class_key | |
hash = { Integer => 1 } | |
dup = hash.deep_dup | |
assert_equal 1, dup.keys.length | |
end | |
def test_deep_dup_with_mutable_frozen_key | |
key = { array: [] }.freeze | |
hash = { key => :value } | |
dup = hash.deep_dup | |
dup.transform_keys { |k| k[:array] << :array_element } | |
assert_not_equal hash.keys, dup.keys | |
end | |
end |
# frozen_string_literal: true | |
class Hash | |
# Returns a new hash with +self+ and +other_hash+ merged recursively. | |
# | |
# h1 = { a: true, b: { c: [1, 2, 3] } } | |
# h2 = { a: false, b: { x: [3, 4, 5] } } | |
# | |
# h1.deep_merge(h2) # => { a: false, b: { c: [1, 2, 3], x: [3, 4, 5] } } | |
# | |
# Like with Hash#merge in the standard library, a block can be provided | |
# to merge values: | |
# | |
# h1 = { a: 100, b: 200, c: { c1: 100 } } | |
# h2 = { b: 250, c: { c1: 200 } } | |
# h1.deep_merge(h2) { |key, this_val, other_val| this_val + other_val } | |
# # => { a: 100, b: 450, c: { c1: 300 } } | |
def deep_merge(other_hash, &block) | |
dup.deep_merge!(other_hash, &block) | |
end | |
# Same as +deep_merge+, but modifies +self+. | |
def deep_merge!(other_hash, &block) | |
merge!(other_hash) do |key, this_val, other_val| | |
if this_val.is_a?(Hash) && other_val.is_a?(Hash) | |
this_val.deep_merge(other_val, &block) | |
elsif block_given? | |
block.call(key, this_val, other_val) | |
else | |
other_val | |
end | |
end | |
end | |
end |
# frozen_string_literal: true | |
class Hash | |
# Returns a new hash with all values converted by the block operation. | |
# This includes the values from the root hash and from all | |
# nested hashes and arrays. | |
# | |
# hash = { person: { name: 'Rob', age: '28' } } | |
# | |
# hash.deep_transform_values{ |value| value.to_s.upcase } | |
# # => {person: {name: "ROB", age: "28"}} | |
def deep_transform_values(&block) | |
_deep_transform_values_in_object(self, &block) | |
end | |
# Destructively converts all values by using the block operation. | |
# This includes the values from the root hash and from all | |
# nested hashes and arrays. | |
def deep_transform_values!(&block) | |
_deep_transform_values_in_object!(self, &block) | |
end | |
private | |
# Support methods for deep transforming nested hashes and arrays. | |
def _deep_transform_values_in_object(object, &block) | |
case object | |
when Hash | |
object.transform_values { |value| _deep_transform_values_in_object(value, &block) } | |
when Array | |
object.map { |e| _deep_transform_values_in_object(e, &block) } | |
else | |
yield(object) | |
end | |
end | |
def _deep_transform_values_in_object!(object, &block) | |
case object | |
when Hash | |
object.transform_values! { |value| _deep_transform_values_in_object!(value, &block) } | |
when Array | |
object.map! { |e| _deep_transform_values_in_object!(e, &block) } | |
else | |
yield(object) | |
end | |
end | |
end |
# frozen_string_literal: true | |
require "set" | |
class Module | |
# Error generated by +delegate+ when a method is called on +nil+ and +allow_nil+ | |
# option is not used. | |
class DelegationError < NoMethodError; end | |
RUBY_RESERVED_KEYWORDS = %w(alias and BEGIN begin break case class def defined? do | |
else elsif END end ensure false for if in module next nil not or redo rescue retry | |
return self super then true undef unless until when while yield) | |
DELEGATION_RESERVED_KEYWORDS = %w(_ arg args block) | |
DELEGATION_RESERVED_METHOD_NAMES = Set.new( | |
RUBY_RESERVED_KEYWORDS + DELEGATION_RESERVED_KEYWORDS | |
).freeze | |
# Provides a +delegate+ class method to easily expose contained objects' | |
# public methods as your own. | |
# | |
# ==== Options | |
# * <tt>:to</tt> - Specifies the target object name as a symbol or string | |
# * <tt>:prefix</tt> - Prefixes the new method with the target name or a custom prefix | |
# * <tt>:allow_nil</tt> - If set to true, prevents a +Module::DelegationError+ | |
# from being raised | |
# * <tt>:private</tt> - If set to true, changes method visibility to private | |
# | |
# The macro receives one or more method names (specified as symbols or | |
# strings) and the name of the target object via the <tt>:to</tt> option | |
# (also a symbol or string). | |
# | |
# Delegation is particularly useful with Active Record associations: | |
# | |
# class Greeter < ActiveRecord::Base | |
# def hello | |
# 'hello' | |
# end | |
# | |
# def goodbye | |
# 'goodbye' | |
# end | |
# end | |
# | |
# class Foo < ActiveRecord::Base | |
# belongs_to :greeter | |
# delegate :hello, to: :greeter | |
# end | |
# | |
# Foo.new.hello # => "hello" | |
# Foo.new.goodbye # => NoMethodError: undefined method `goodbye' for #<Foo:0x1af30c> | |
# | |
# Multiple delegates to the same target are allowed: | |
# | |
# class Foo < ActiveRecord::Base | |
# belongs_to :greeter | |
# delegate :hello, :goodbye, to: :greeter | |
# end | |
# | |
# Foo.new.goodbye # => "goodbye" | |
# | |
# Methods can be delegated to instance variables, class variables, or constants | |
# by providing them as a symbols: | |
# | |
# class Foo | |
# CONSTANT_ARRAY = [0,1,2,3] | |
# @@class_array = [4,5,6,7] | |
# | |
# def initialize | |
# @instance_array = [8,9,10,11] | |
# end | |
# delegate :sum, to: :CONSTANT_ARRAY | |
# delegate :min, to: :@@class_array | |
# delegate :max, to: :@instance_array | |
# end | |
# | |
# Foo.new.sum # => 6 | |
# Foo.new.min # => 4 | |
# Foo.new.max # => 11 | |
# | |
# It's also possible to delegate a method to the class by using +:class+: | |
# | |
# class Foo | |
# def self.hello | |
# "world" | |
# end | |
# | |
# delegate :hello, to: :class | |
# end | |
# | |
# Foo.new.hello # => "world" | |
# | |
# Delegates can optionally be prefixed using the <tt>:prefix</tt> option. If the value | |
# is <tt>true</tt>, the delegate methods are prefixed with the name of the object being | |
# delegated to. | |
# | |
# Person = Struct.new(:name, :address) | |
# | |
# class Invoice < Struct.new(:client) | |
# delegate :name, :address, to: :client, prefix: true | |
# end | |
# | |
# john_doe = Person.new('John Doe', 'Vimmersvej 13') | |
# invoice = Invoice.new(john_doe) | |
# invoice.client_name # => "John Doe" | |
# invoice.client_address # => "Vimmersvej 13" | |
# | |
# It is also possible to supply a custom prefix. | |
# | |
# class Invoice < Struct.new(:client) | |
# delegate :name, :address, to: :client, prefix: :customer | |
# end | |
# | |
# invoice = Invoice.new(john_doe) | |
# invoice.customer_name # => 'John Doe' | |
# invoice.customer_address # => 'Vimmersvej 13' | |
# | |
# The delegated methods are public by default. | |
# Pass <tt>private: true</tt> to change that. | |
# | |
# class User < ActiveRecord::Base | |
# has_one :profile | |
# delegate :first_name, to: :profile | |
# delegate :date_of_birth, to: :profile, private: true | |
# | |
# def age | |
# Date.today.year - date_of_birth.year | |
# end | |
# end | |
# | |
# User.new.first_name # => "Tomas" | |
# User.new.date_of_birth # => NoMethodError: private method `date_of_birth' called for #<User:0x00000008221340> | |
# User.new.age # => 2 | |
# | |
# If the target is +nil+ and does not respond to the delegated method a | |
# +Module::DelegationError+ is raised. If you wish to instead return +nil+, | |
# use the <tt>:allow_nil</tt> option. | |
# | |
# class User < ActiveRecord::Base | |
# has_one :profile | |
# delegate :age, to: :profile | |
# end | |
# | |
# User.new.age | |
# # => Module::DelegationError: User#age delegated to profile.age, but profile is nil | |
# | |
# But if not having a profile yet is fine and should not be an error | |
# condition: | |
# | |
# class User < ActiveRecord::Base | |
# has_one :profile | |
# delegate :age, to: :profile, allow_nil: true | |
# end | |
# | |
# User.new.age # nil | |
# | |
# Note that if the target is not +nil+ then the call is attempted regardless of the | |
# <tt>:allow_nil</tt> option, and thus an exception is still raised if said object | |
# does not respond to the method: | |
# | |
# class Foo | |
# def initialize(bar) | |
# @bar = bar | |
# end | |
# | |
# delegate :name, to: :@bar, allow_nil: true | |
# end | |
# | |
# Foo.new("Bar").name # raises NoMethodError: undefined method `name' | |
# | |
# The target method must be public, otherwise it will raise +NoMethodError+. | |
def delegate(*methods, to: nil, prefix: nil, allow_nil: nil, private: nil) | |
unless to | |
raise ArgumentError, "Delegation needs a target. Supply a keyword argument 'to' (e.g. delegate :hello, to: :greeter)." | |
end | |
if prefix == true && /^[^a-z_]/.match?(to) | |
raise ArgumentError, "Can only automatically set the delegation prefix when delegating to a method." | |
end | |
method_prefix = \ | |
if prefix | |
"#{prefix == true ? to : prefix}_" | |
else | |
"" | |
end | |
location = caller_locations(1, 1).first | |
file, line = location.path, location.lineno | |
to = to.to_s | |
to = "self.#{to}" if DELEGATION_RESERVED_METHOD_NAMES.include?(to) | |
method_def = [] | |
method_names = [] | |
methods.map do |method| | |
method_name = prefix ? "#{method_prefix}#{method}" : method | |
method_names << method_name.to_sym | |
# Attribute writer methods only accept one argument. Makes sure []= | |
# methods still accept two arguments. | |
definition = /[^\]]=\z/.match?(method) ? "arg" : "..." | |
# The following generated method calls the target exactly once, storing | |
# the returned value in a dummy variable. | |
# | |
# Reason is twofold: On one hand doing less calls is in general better. | |
# On the other hand it could be that the target has side-effects, | |
# whereas conceptually, from the user point of view, the delegator should | |
# be doing one call. | |
if allow_nil | |
method = method.to_s | |
method_def << | |
"def #{method_name}(#{definition})" << | |
" _ = #{to}" << | |
" if !_.nil? || nil.respond_to?(:#{method})" << | |
" _.#{method}(#{definition})" << | |
" end" << | |
"end" | |
else | |
method = method.to_s | |
method_name = method_name.to_s | |
method_def << | |
"def #{method_name}(#{definition})" << | |
" _ = #{to}" << | |
" _.#{method}(#{definition})" << | |
"rescue NoMethodError => e" << | |
" if _.nil? && e.name == :#{method}" << | |
%( raise DelegationError, "#{self}##{method_name} delegated to #{to}.#{method}, but #{to} is nil: \#{self.inspect}") << | |
" else" << | |
" raise" << | |
" end" << | |
"end" | |
end | |
end | |
module_eval(method_def.join(";"), file, line) | |
private(*method_names) if private | |
method_names | |
end | |
# When building decorators, a common pattern may emerge: | |
# | |
# class Partition | |
# def initialize(event) | |
# @event = event | |
# end | |
# | |
# def person | |
# detail.person || creator | |
# end | |
# | |
# private | |
# def respond_to_missing?(name, include_private = false) | |
# @event.respond_to?(name, include_private) | |
# end | |
# | |
# def method_missing(method, *args, &block) | |
# @event.send(method, *args, &block) | |
# end | |
# end | |
# | |
# With <tt>Module#delegate_missing_to</tt>, the above is condensed to: | |
# | |
# class Partition | |
# delegate_missing_to :@event | |
# | |
# def initialize(event) | |
# @event = event | |
# end | |
# | |
# def person | |
# detail.person || creator | |
# end | |
# end | |
# | |
# The target can be anything callable within the object, e.g. instance | |
# variables, methods, constants, etc. | |
# | |
# The delegated method must be public on the target, otherwise it will | |
# raise +DelegationError+. If you wish to instead return +nil+, | |
# use the <tt>:allow_nil</tt> option. | |
# | |
# The <tt>marshal_dump</tt> and <tt>_dump</tt> methods are exempt from | |
# delegation due to possible interference when calling | |
# <tt>Marshal.dump(object)</tt>, should the delegation target method | |
# of <tt>object</tt> add or remove instance variables. | |
def delegate_missing_to(target, allow_nil: nil) | |
target = target.to_s | |
target = "self.#{target}" if DELEGATION_RESERVED_METHOD_NAMES.include?(target) | |
module_eval <<-RUBY, __FILE__, __LINE__ + 1 | |
def respond_to_missing?(name, include_private = false) | |
# It may look like an oversight, but we deliberately do not pass | |
# +include_private+, because they do not get delegated. | |
return false if name == :marshal_dump || name == :_dump | |
#{target}.respond_to?(name) || super | |
end | |
def method_missing(method, *args, &block) | |
if #{target}.respond_to?(method) | |
#{target}.public_send(method, *args, &block) | |
else | |
begin | |
super | |
rescue NoMethodError | |
if #{target}.nil? | |
if #{allow_nil == true} | |
nil | |
else | |
raise DelegationError, "\#{method} delegated to #{target}, but #{target} is nil" | |
end | |
else | |
raise | |
end | |
end | |
end | |
end | |
ruby2_keywords(:method_missing) | |
RUBY | |
end | |
end |
# frozen_string_literal: true | |
require "set" | |
require "active_support/dependencies/interlock" | |
module ActiveSupport # :nodoc: | |
module Dependencies # :nodoc: | |
require_relative "dependencies/require_dependency" | |
singleton_class.attr_accessor :interlock | |
@interlock = Interlock.new | |
# :doc: | |
# Execute the supplied block without interference from any | |
# concurrent loads. | |
def self.run_interlock(&block) | |
interlock.running(&block) | |
end | |
# Execute the supplied block while holding an exclusive lock, | |
# preventing any other thread from being inside a #run_interlock | |
# block at the same time. | |
def self.load_interlock(&block) | |
interlock.loading(&block) | |
end | |
# Execute the supplied block while holding an exclusive lock, | |
# preventing any other thread from being inside a #run_interlock | |
# block at the same time. | |
def self.unload_interlock(&block) | |
interlock.unloading(&block) | |
end | |
# :nodoc: | |
# The array of directories from which we autoload and reload, if reloading | |
# is enabled. The public interface to push directories to this collection | |
# from applications or engines is config.autoload_paths. | |
# | |
# This collection is allowed to have intersection with autoload_once_paths. | |
# Common directories are not reloaded. | |
singleton_class.attr_accessor :autoload_paths | |
self.autoload_paths = [] | |
# The array of directories from which we autoload and never reload, even if | |
# reloading is enabled. The public interface to push directories to this | |
# collection from applications or engines is config.autoload_once_paths. | |
singleton_class.attr_accessor :autoload_once_paths | |
self.autoload_once_paths = [] | |
# This is a private set that collects all eager load paths during bootstrap. | |
# Useful for Zeitwerk integration. The public interface to push custom | |
# directories to this collection from applications or engines is | |
# config.eager_load_paths. | |
singleton_class.attr_accessor :_eager_load_paths | |
self._eager_load_paths = Set.new | |
# If reloading is enabled, this private set holds autoloaded classes tracked | |
# by the descendants tracker. It is populated by an on_load callback in the | |
# main autoloader. Used to clear state. | |
singleton_class.attr_accessor :_autoloaded_tracked_classes | |
self._autoloaded_tracked_classes = Set.new | |
# If reloading is enabled, this private attribute stores the main autoloader | |
# of a Rails application. It is `nil` otherwise. | |
# | |
# The public interface for this autoloader is `Rails.autoloaders.main`. | |
singleton_class.attr_accessor :autoloader | |
# Private method that reloads constants autoloaded by the main autoloader. | |
# | |
# Rails.application.reloader.reload! is the public interface for application | |
# reload. That involves more things, like deleting unloaded classes from the | |
# internal state of the descendants tracker, or reloading routes. | |
def self.clear | |
unload_interlock do | |
_autoloaded_tracked_classes.clear | |
autoloader.reload | |
end | |
end | |
# Private method used by require_dependency. | |
def self.search_for_file(relpath) | |
relpath += ".rb" unless relpath.end_with?(".rb") | |
autoload_paths.each do |autoload_path| | |
abspath = File.join(autoload_path, relpath) | |
return abspath if File.file?(abspath) | |
end | |
nil | |
end | |
# Private method that helps configuring the autoloaders. | |
def self.eager_load?(path) | |
_eager_load_paths.member?(path) | |
end | |
end | |
end |
# frozen_string_literal: true | |
require_relative "abstract_unit" | |
require "pp" | |
require "active_support/dependencies" | |
module ModuleWithMissing | |
mattr_accessor :missing_count | |
def self.const_missing(name) | |
self.missing_count += 1 | |
name | |
end | |
end | |
module ModuleWithConstant | |
InheritedConstant = "Hello" | |
end | |
class RequireDependencyTest < ActiveSupport::TestCase | |
setup do | |
@loaded_features_copy = $LOADED_FEATURES.dup | |
ActiveSupport::Dependencies.autoload_paths.clear | |
@root_dir = Dir.mktmpdir | |
File.write("#{@root_dir}/x.rb", "X = :X") | |
ActiveSupport::Dependencies.autoload_paths << @root_dir | |
end | |
teardown do | |
$LOADED_FEATURES.replace(@loaded_features_copy) | |
ActiveSupport::Dependencies.autoload_paths.clear | |
FileUtils.rm_rf(@root_dir) | |
Object.send(:remove_const, :X) if Object.const_defined?(:X) | |
end | |
test "require_dependency looks autoload paths up" do | |
assert require_dependency("x") | |
assert_equal :X, X | |
end | |
test "require_dependency looks autoload paths up (idempotent)" do | |
assert require_dependency("x") | |
assert_not require_dependency("x") | |
end | |
test "require_dependency handles absolute paths correctly" do | |
assert require_dependency("#{@root_dir}/x.rb") | |
assert_equal :X, X | |
end | |
test "require_dependency handles absolute paths correctly (idempotent)" do | |
assert require_dependency("#{@root_dir}/x.rb") | |
assert_not require_dependency("#{@root_dir}/x.rb") | |
end | |
test "require_dependency supports arguments that respond to to_path" do | |
x = Object.new | |
def x.to_path; "x"; end | |
assert require_dependency(x) | |
assert_equal :X, X | |
end | |
test "require_dependency supports arguments that respond to to_path (idempotent)" do | |
x = Object.new | |
def x.to_path; "x"; end | |
assert require_dependency(x) | |
assert_not require_dependency(x) | |
end | |
test "require_dependency fallback to Kernel#require" do | |
dir = Dir.mktmpdir | |
$LOAD_PATH << dir | |
File.write("#{dir}/y.rb", "Y = :Y") | |
assert require_dependency("y") | |
assert_equal :Y, Y | |
ensure | |
$LOAD_PATH.pop | |
Object.send(:remove_const, :Y) if Object.const_defined?(:Y) | |
end | |
test "require_dependency fallback to Kernel#require (idempotent)" do | |
dir = Dir.mktmpdir | |
$LOAD_PATH << dir | |
File.write("#{dir}/y.rb", "Y = :Y") | |
assert require_dependency("y") | |
assert_not require_dependency("y") | |
ensure | |
$LOAD_PATH.pop | |
Object.send(:remove_const, :Y) if Object.const_defined?(:Y) | |
end | |
test "require_dependency raises ArgumentError if the argument is not a String and does not respond to #to_path" do | |
assert_raises(ArgumentError) { require_dependency(Object.new) } | |
end | |
test "require_dependency raises LoadError if the given argument is not found" do | |
assert_raise(LoadError) { require_dependency("nonexistent_filename") } | |
end | |
end |
# frozen_string_literal: true | |
require "time" | |
class Time | |
NOT_SET = Object.new # :nodoc: | |
def to_s(format = NOT_SET) # :nodoc: | |
if formatter = DATE_FORMATS[format] | |
ActiveSupport::Deprecation.warn( | |
"Time#to_s(#{format.inspect}) is deprecated. Please use Time#to_fs(#{format.inspect}) instead." | |
) | |
formatter.respond_to?(:call) ? formatter.call(self).to_s : strftime(formatter) | |
elsif format == NOT_SET | |
to_default_s | |
else | |
ActiveSupport::Deprecation.warn( | |
"Time#to_s(#{format.inspect}) is deprecated. Please use Time#to_fs(#{format.inspect}) instead." | |
) | |
to_default_s | |
end | |
end | |
end |
# frozen_string_literal: true | |
require "active_support/deprecation" | |
module ActiveSupport | |
module Testing | |
module Deprecation | |
# Asserts that a matching deprecation warning was emitted by the given deprecator during the execution of the yielded block. | |
# | |
# assert_deprecated(/foo/, CustomDeprecator) do | |
# CustomDeprecator.warn "foo should no longer be used" | |
# end | |
# | |
# The +match+ object may be a +Regexp+, or +String+ appearing in the message. | |
# | |
# assert_deprecated('foo', CustomDeprecator) do | |
# CustomDeprecator.warn "foo should no longer be used" | |
# end | |
# | |
# If the +match+ is omitted (or explicitly +nil+), any deprecation warning will match. | |
# | |
# assert_deprecated(nil, CustomDeprecator) do | |
# CustomDeprecator.warn "foo should no longer be used" | |
# end | |
# | |
# If no +deprecator+ is given, defaults to ActiveSupport::Deprecation. | |
# | |
# assert_deprecated do | |
# ActiveSupport::Deprecation.warn "foo should no longer be used" | |
# end | |
def assert_deprecated(match = nil, deprecator = nil, &block) | |
result, warnings = collect_deprecations(deprecator, &block) | |
assert !warnings.empty?, "Expected a deprecation warning within the block but received none" | |
if match | |
match = Regexp.new(Regexp.escape(match)) unless match.is_a?(Regexp) | |
assert warnings.any? { |w| match.match?(w) }, "No deprecation warning matched #{match}: #{warnings.join(', ')}" | |
end | |
result | |
end | |
# Asserts that no deprecation warnings are emitted by the given deprecator during the execution of the yielded block. | |
# | |
# assert_not_deprecated(CustomDeprecator) do | |
# CustomDeprecator.warn "message" # fails assertion | |
# end | |
# | |
# If no +deprecator+ is given, defaults to ActiveSupport::Deprecation. | |
# | |
# assert_not_deprecated do | |
# ActiveSupport::Deprecation.warn "message" # fails assertion | |
# end | |
# | |
# assert_not_deprecated do | |
# CustomDeprecator.warn "message" # passes assertion | |
# end | |
def assert_not_deprecated(deprecator = nil, &block) | |
result, deprecations = collect_deprecations(deprecator, &block) | |
assert deprecations.empty?, "Expected no deprecation warning within the block but received #{deprecations.size}: \n #{deprecations * "\n "}" | |
result | |
end | |
# Returns an array of all the deprecation warnings emitted by the given | |
# +deprecator+ during the execution of the yielded block. | |
# | |
# collect_deprecations(CustomDeprecator) do | |
# CustomDeprecator.warn "message" | |
# end # => ["message"] | |
# | |
# If no +deprecator+ is given, defaults to ActiveSupport::Deprecation. | |
# | |
# collect_deprecations do | |
# CustomDeprecator.warn "custom message" | |
# ActiveSupport::Deprecation.warn "message" | |
# end # => ["message"] | |
def collect_deprecations(deprecator = nil) | |
deprecator ||= ActiveSupport::Deprecation | |
old_behavior = deprecator.behavior | |
deprecations = [] | |
deprecator.behavior = Proc.new do |message, callstack| | |
deprecations << message | |
end | |
result = yield | |
[result, deprecations] | |
ensure | |
deprecator.behavior = old_behavior | |
end | |
end | |
end | |
end |
# frozen_string_literal: true | |
require_relative "abstract_unit" | |
require "active_support/testing/stream" | |
class Deprecatee | |
def initialize | |
@request = ActiveSupport::Deprecation::DeprecatedInstanceVariableProxy.new(self, :request) | |
@_request = "there we go" | |
end | |
def request; @_request end | |
def old_request; @request end | |
def partially(foo = nil) | |
ActiveSupport::Deprecation.warn("calling with foo=nil is out") if foo.nil? | |
end | |
def not() 2 end | |
def none() 1 end | |
def one(a) a end | |
def multi(a, b, c) [a, b, c] end | |
deprecate :none, :one, :multi | |
def a; end | |
def b; end | |
def c; end | |
def d; end | |
def e; end | |
deprecate :a, :b, c: :e, d: "you now need to do something extra for this one" | |
def f=(v); end | |
deprecate :f= | |
deprecate :g | |
def g(h) h end | |
module B | |
C = 1 | |
end | |
A = ActiveSupport::Deprecation::DeprecatedConstantProxy.new("Deprecatee::A", "Deprecatee::B::C") | |
module New | |
class Descendant; end | |
end | |
Old = ActiveSupport::Deprecation::DeprecatedConstantProxy.new("Deprecatee::Old", "Deprecatee::New") | |
end | |
class DeprecateeWithAccessor | |
include ActiveSupport::Deprecation::DeprecatedConstantAccessor | |
module B | |
C = 1 | |
end | |
deprecate_constant "A", "DeprecateeWithAccessor::B::C" | |
class NewException < StandardError; end | |
deprecate_constant "OldException", "DeprecateeWithAccessor::NewException" | |
end | |
class DeprecationTest < ActiveSupport::TestCase | |
include ActiveSupport::Testing::Stream | |
def setup | |
# Track the last warning. | |
@old_behavior = ActiveSupport::Deprecation.behavior | |
@last_message = nil | |
ActiveSupport::Deprecation.behavior = Proc.new { |message| @last_message = message } | |
@dtc = Deprecatee.new | |
end | |
def teardown | |
ActiveSupport::Deprecation.behavior = @old_behavior | |
end | |
def test_inline_deprecation_warning | |
assert_deprecated(/foo=nil/) do | |
@dtc.partially | |
end | |
end | |
def test_undeprecated | |
assert_not_deprecated do | |
assert_equal 2, @dtc.not | |
end | |
end | |
def test_deprecate_method_on_class | |
assert_deprecated(/none is deprecated/) do | |
assert_equal 1, @dtc.none | |
end | |
assert_deprecated(/one is deprecated/) do | |
assert_equal 1, @dtc.one(1) | |
end | |
assert_deprecated(/multi is deprecated/) do | |
assert_equal [1, 2, 3], @dtc.multi(1, 2, 3) | |
end | |
end | |
def test_deprecate_method_doesnt_expand_positional_argument_hash | |
hash = { k: 1 } | |
assert_deprecated(/one is deprecated/) do | |
assert_same hash, @dtc.one(hash) | |
end | |
assert_deprecated(/g is deprecated/) do | |
assert_same hash, @dtc.g(hash) | |
end | |
end | |
def test_deprecate_object | |
deprecated_object = ActiveSupport::Deprecation::DeprecatedObjectProxy.new(Object.new, ":bomb:") | |
assert_deprecated(/:bomb:/) { deprecated_object.to_s } | |
end | |
def test_nil_behavior_is_ignored | |
ActiveSupport::Deprecation.behavior = nil | |
assert_deprecated(/foo=nil/) { @dtc.partially } | |
end | |
def test_several_behaviors | |
@a, @b, @c = nil, nil, nil | |
ActiveSupport::Deprecation.behavior = [ | |
lambda { |msg, callstack, horizon, gem| @a = msg }, | |
lambda { |msg, callstack| @b = msg }, | |
lambda { |*args| @c = args }, | |
] | |
@dtc.partially | |
assert_match(/foo=nil/, @a) | |
assert_match(/foo=nil/, @b) | |
assert_equal 4, @c.size | |
end | |
def test_raise_behaviour | |
ActiveSupport::Deprecation.behavior = :raise | |
message = "Revise this deprecated stuff now!" | |
callstack = caller_locations | |
e = assert_raise ActiveSupport::DeprecationException do | |
ActiveSupport::Deprecation.behavior.first.call(message, callstack, "horizon", "gem") | |
end | |
assert_equal message, e.message | |
assert_equal callstack.map(&:to_s), e.backtrace.map(&:to_s) | |
end | |
def test_default_stderr_behavior | |
ActiveSupport::Deprecation.behavior = :stderr | |
behavior = ActiveSupport::Deprecation.behavior.first | |
content = capture(:stderr) { | |
assert_nil behavior.call("Some error!", ["call stack!"], "horizon", "gem") | |
} | |
assert_match(/Some error!/, content) | |
assert_match(/call stack!/, content) | |
end | |
def test_default_stderr_behavior_with_warn_method | |
ActiveSupport::Deprecation.behavior = :stderr | |
content = capture(:stderr) { | |
ActiveSupport::Deprecation.warn("Instance error!", ["instance call stack!"]) | |
} | |
assert_match(/Instance error!/, content) | |
assert_match(/instance call stack!/, content) | |
end | |
def test_default_silence_behavior | |
ActiveSupport::Deprecation.behavior = :silence | |
behavior = ActiveSupport::Deprecation.behavior.first | |
stderr_output = capture(:stderr) { | |
assert_nil behavior.call("Some error!", ["call stack!"], "horizon", "gem") | |
} | |
assert_empty stderr_output | |
end | |
def test_default_notify_behavior | |
ActiveSupport::Deprecation.behavior = :notify | |
behavior = ActiveSupport::Deprecation.behavior.first | |
begin | |
events = [] | |
ActiveSupport::Notifications.subscribe("deprecation.my_gem_custom") { |*args| | |
events << args.extract_options! | |
} | |
assert_nil behavior.call("Some error!", ["call stack!"], "horizon", "MyGem::Custom") | |
assert_equal 1, events.size | |
assert_equal "Some error!", events.first[:message] | |
assert_equal ["call stack!"], events.first[:callstack] | |
assert_equal "horizon", events.first[:deprecation_horizon] | |
assert_equal "MyGem::Custom", events.first[:gem_name] | |
ensure | |
ActiveSupport::Notifications.unsubscribe("deprecation.my_gem_custom") | |
end | |
end | |
def test_default_invalid_behavior | |
e = assert_raises(ArgumentError) do | |
ActiveSupport::Deprecation.behavior = :invalid | |
end | |
assert_equal ":invalid is not a valid deprecation behavior.", e.message | |
end | |
def test_deprecated_instance_variable_proxy | |
assert_not_deprecated { @dtc.request.size } | |
assert_deprecated("@request.size") { assert_equal @dtc.request.size, @dtc.old_request.size } | |
assert_deprecated("@request.to_s") { assert_equal @dtc.request.to_s, @dtc.old_request.to_s } | |
end | |
def test_deprecated_instance_variable_proxy_shouldnt_warn_on_inspect | |
assert_not_deprecated { assert_equal @dtc.request.inspect, @dtc.old_request.inspect } | |
end | |
def test_deprecated_constant_proxy | |
assert_not_deprecated { Deprecatee::B::C } | |
assert_deprecated("Deprecatee::A") { assert_equal Deprecatee::B::C, Deprecatee::A } | |
assert_not_deprecated { assert_equal Deprecatee::B::C.class, Deprecatee::A.class } | |
end | |
def test_deprecated_constant_descendant | |
assert_not_deprecated { Deprecatee::New::Descendant } | |
assert_deprecated("Deprecatee::Old") do | |
assert_equal Deprecatee::Old::Descendant, Deprecatee::New::Descendant | |
end | |
assert_raises(NameError) do | |
assert_deprecated("Deprecatee::Old") { Deprecatee::Old::NON_EXISTENCE } | |
end | |
end | |
def test_deprecated_constant_accessor | |
assert_not_deprecated { DeprecateeWithAccessor::B::C } | |
assert_deprecated("DeprecateeWithAccessor::A") { assert_equal DeprecateeWithAccessor::B::C, DeprecateeWithAccessor::A } | |
end | |
def test_deprecated_constant_accessor_exception | |
raise DeprecateeWithAccessor::NewException.new("Test") | |
rescue DeprecateeWithAccessor::OldException => e | |
assert_kind_of DeprecateeWithAccessor::NewException, e | |
end | |
def test_assert_deprecated_raises_when_method_not_deprecated | |
assert_raises(Minitest::Assertion) { assert_deprecated { @dtc.not } } | |
end | |
def test_assert_not_deprecated | |
assert_raises(Minitest::Assertion) { assert_not_deprecated { @dtc.partially } } | |
end | |
def test_assert_deprecation_without_match | |
assert_deprecated do | |
@dtc.partially | |
end | |
end | |
def test_assert_deprecated_matches_any_warning | |
assert_deprecated "abc" do | |
ActiveSupport::Deprecation.warn "abc" | |
ActiveSupport::Deprecation.warn "def" | |
end | |
rescue Minitest::Assertion | |
flunk "assert_deprecated should match any warning in block, not just the last one" | |
end | |
def test_assert_not_deprecated_returns_result_of_block | |
assert_equal 123, assert_not_deprecated { 123 } | |
end | |
def test_assert_deprecated_returns_result_of_block | |
result = assert_deprecated("abc") do | |
ActiveSupport::Deprecation.warn "abc" | |
123 | |
end | |
assert_equal 123, result | |
end | |
def test_assert_deprecated_warn_work_with_default_behavior | |
ActiveSupport::Deprecation.instance_variable_set("@behavior", nil) | |
assert_deprecated("abc") do | |
ActiveSupport::Deprecation.warn "abc" | |
end | |
end | |
def test_silence | |
ActiveSupport::Deprecation.silence do | |
assert_not_deprecated { @dtc.partially } | |
end | |
ActiveSupport::Deprecation.silenced = true | |
assert ActiveSupport::Deprecation.silenced | |
assert_not_deprecated { @dtc.partially } | |
ActiveSupport::Deprecation.silenced = false | |
assert_not ActiveSupport::Deprecation.silenced | |
end | |
def test_silence_threaded | |
barrier = Concurrent::CyclicBarrier.new(2) | |
th = Thread.new do | |
ActiveSupport::Deprecation.silence do | |
barrier.wait | |
barrier.wait | |
assert_not_deprecated { ActiveSupport::Deprecation.warn "abc" } | |
end | |
assert_deprecated("abc") { ActiveSupport::Deprecation.warn "abc" } | |
end | |
barrier.wait | |
assert_deprecated("abc") { ActiveSupport::Deprecation.warn "abc" } | |
ActiveSupport::Deprecation.silence do | |
assert_not_deprecated { ActiveSupport::Deprecation.warn "abc" } | |
end | |
assert_deprecated("abc") { ActiveSupport::Deprecation.warn "abc" } | |
barrier.wait | |
th.join | |
ensure | |
th.kill | |
end | |
def test_deprecation_without_explanation | |
assert_deprecated { @dtc.a } | |
assert_deprecated { @dtc.b } | |
assert_deprecated { @dtc.f = :foo } | |
end | |
def test_deprecation_with_alternate_method | |
assert_deprecated(/use e instead/) { @dtc.c } | |
end | |
def test_deprecation_with_explicit_message | |
assert_deprecated(/you now need to do something extra for this one/) { @dtc.d } | |
end | |
def test_deprecation_in_other_object | |
messages = [] | |
klass = Class.new do | |
delegate :warn, :behavior=, to: ActiveSupport::Deprecation | |
end | |
o = klass.new | |
o.behavior = Proc.new { |message, callstack| messages << message } | |
assert_difference("messages.size") do | |
o.warn("warning") | |
end | |
end | |
def test_deprecated_method_with_custom_method_warning | |
deprecator = deprecator_with_messages | |
class << deprecator | |
private | |
def deprecated_method_warning(method, message) | |
"deprecator.deprecated_method_warning.#{method}" | |
end | |
end | |
deprecatee = Class.new do | |
def method | |
end | |
deprecate :method, deprecator: deprecator | |
end | |
deprecatee.new.method | |
assert deprecator.messages.first.match("DEPRECATION WARNING: deprecator.deprecated_method_warning.method") | |
end | |
def test_deprecate_with_custom_deprecator | |
custom_deprecator = Struct.new(:deprecation_warning).new | |
assert_called_with(custom_deprecator, :deprecation_warning, [:method, nil]) do | |
klass = Class.new do | |
def method | |
end | |
deprecate :method, deprecator: custom_deprecator | |
end | |
klass.new.method | |
end | |
end | |
def test_deprecated_constant_with_deprecator_given | |
deprecator = deprecator_with_messages | |
klass = Class.new | |
klass.const_set(:OLD, ActiveSupport::Deprecation::DeprecatedConstantProxy.new("klass::OLD", "Object", deprecator)) | |
assert_difference("deprecator.messages.size") do | |
klass::OLD.to_s | |
end | |
end | |
def test_deprecated_constant_with_custom_message | |
deprecator = deprecator_with_messages | |
klass = Class.new | |
klass.const_set(:OLD, ActiveSupport::Deprecation::DeprecatedConstantProxy.new("klass::OLD", "Object", deprecator, message: "foo")) | |
klass::OLD.to_s | |
assert_match "foo", deprecator.messages.last | |
end | |
def test_deprecated_instance_variable_with_instance_deprecator | |
deprecator = deprecator_with_messages | |
klass = Class.new() do | |
def initialize(deprecator) | |
@request = ActiveSupport::Deprecation::DeprecatedInstanceVariableProxy.new(self, :request, :@request, deprecator) | |
@_request = :a_request | |
end | |
def request; @_request end | |
def old_request; @request end | |
end | |
assert_difference("deprecator.messages.size") { klass.new(deprecator).old_request.to_s } | |
end | |
def test_deprecated_instance_variable_with_given_deprecator | |
deprecator = deprecator_with_messages | |
klass = Class.new do | |
define_method(:initialize) do | |
@request = ActiveSupport::Deprecation::DeprecatedInstanceVariableProxy.new(self, :request, :@request, deprecator) | |
@_request = :a_request | |
end | |
def request; @_request end | |
def old_request; @request end | |
end | |
assert_difference("deprecator.messages.size") { klass.new.old_request.to_s } | |
end | |
def test_delegate_deprecator_instance | |
klass = Class.new do | |
attr_reader :last_message | |
delegate :warn, :behavior=, to: ActiveSupport::Deprecation | |
def initialize | |
self.behavior = [Proc.new { |message| @last_message = message }] | |
end | |
def deprecated_method | |
warn(deprecated_method_warning(:deprecated_method, "You are calling deprecated method")) | |
end | |
private | |
def deprecated_method_warning(method_name, message = nil) | |
message || "#{method_name} is deprecated and will be removed from This Library" | |
end | |
end | |
object = klass.new | |
object.deprecated_method | |
assert_match(/You are calling deprecated method/, object.last_message) | |
end | |
def test_default_deprecation_horizon_should_always_bigger_than_current_rails_version | |
assert_operator ActiveSupport::Deprecation.new.deprecation_horizon, :>, ActiveSupport::VERSION::STRING | |
end | |
def test_default_gem_name | |
deprecator = ActiveSupport::Deprecation.new | |
deprecator.send(:deprecated_method_warning, :deprecated_method, "You are calling deprecated method").tap do |message| | |
assert_match(/is deprecated and will be removed from Rails/, message) | |
end | |
end | |
def test_custom_gem_name | |
deprecator = ActiveSupport::Deprecation.new("2.0", "Custom") | |
deprecator.send(:deprecated_method_warning, :deprecated_method, "You are calling deprecated method").tap do |message| | |
assert_match(/is deprecated and will be removed from Custom/, message) | |
end | |
end | |
def test_deprecate_work_before_define_method | |
assert_deprecated(/g is deprecated/) { @dtc.g(1) } | |
end | |
def test_config_disallows_no_deprecations_by_default | |
assert_equal ActiveSupport::Deprecation.disallowed_warnings, [] | |
end | |
def test_allows_configuration_of_disallowed_warnings | |
resetting_disallowed_deprecation_config do | |
config_warnings = ["unsafe_method is going away"] | |
ActiveSupport::Deprecation.disallowed_warnings = config_warnings | |
assert_equal ActiveSupport::Deprecation.disallowed_warnings, config_warnings | |
end | |
end | |
def test_no_disallowed_behavior_with_no_disallowed_messages | |
resetting_disallowed_deprecation_config do | |
ActiveSupport::Deprecation.disallowed_behavior = :raise | |
assert_nothing_raised do | |
@dtc.none | |
@dtc.partially | |
end | |
end | |
end | |
def test_disallowed_behavior_does_not_apply_to_allowed_messages | |
resetting_disallowed_deprecation_config do | |
ActiveSupport::Deprecation.disallowed_behavior = :raise | |
ActiveSupport::Deprecation.disallowed_warnings = ["foo=nil"] | |
assert_nothing_raised { @dtc.none } | |
end | |
end | |
def test_disallowed_behavior_when_disallowed_message_configured_with_substring | |
resetting_disallowed_deprecation_config do | |
ActiveSupport::Deprecation.disallowed_behavior = :raise | |
ActiveSupport::Deprecation.disallowed_warnings = ["foo=nil"] | |
e = assert_raise ActiveSupport::DeprecationException do | |
@dtc.partially | |
end | |
message = "DEPRECATION WARNING: calling with foo=nil is out" | |
assert_match message, e.message | |
end | |
end | |
def test_disallowed_behavior_when_disallowed_message_configured_with_symbol_treated_as_substring | |
resetting_disallowed_deprecation_config do | |
ActiveSupport::Deprecation.disallowed_behavior = :raise | |
ActiveSupport::Deprecation.disallowed_warnings = [:foo] | |
e = assert_raise ActiveSupport::DeprecationException do | |
@dtc.partially | |
end | |
message = "DEPRECATION WARNING: calling with foo=nil is out" | |
assert_match message, e.message | |
end | |
end | |
def test_disallowed_behavior_when_disallowed_message_configured_with_regular_expression | |
resetting_disallowed_deprecation_config do | |
ActiveSupport::Deprecation.disallowed_behavior = :raise | |
ActiveSupport::Deprecation.disallowed_warnings = [/none|one*/] | |
e = assert_raise ActiveSupport::DeprecationException do | |
@dtc.none | |
end | |
message = "none is deprecated" | |
assert_match message, e.message | |
e = assert_raise ActiveSupport::DeprecationException do | |
@dtc.one | |
end | |
message = "one is deprecated" | |
assert_match message, e.message | |
end | |
end | |
def test_disallowed_behavior_when_disallowed_message_configured_with_scalar_symbol_all | |
resetting_disallowed_deprecation_config do | |
allowed_message = nil | |
disallowed_message = nil | |
ActiveSupport::Deprecation.behavior = [ | |
lambda { |msg, callstack, horizon, gem| allowed_message = msg } | |
] | |
ActiveSupport::Deprecation.disallowed_behavior = [ | |
lambda { |msg, callstack, horizon, gem| disallowed_message = msg } | |
] | |
ActiveSupport::Deprecation.disallowed_warnings = :all | |
@dtc.partially | |
assert_nil allowed_message | |
assert_match(/foo=nil/, disallowed_message) | |
allowed_message = nil | |
disallowed_message = nil | |
@dtc.none | |
assert_nil allowed_message | |
assert_match(/none is deprecated/, disallowed_message) | |
end | |
end | |
def test_different_behaviors_for_allowed_and_disallowed_messages | |
resetting_disallowed_deprecation_config do | |
@a, @b, @c, @d = nil, nil, nil, nil | |
ActiveSupport::Deprecation.behavior = [ | |
lambda { |msg, callstack, horizon, gem| @a = msg }, | |
lambda { |msg, callstack| @b = msg }, | |
] | |
ActiveSupport::Deprecation.disallowed_behavior = [ | |
lambda { |msg, callstack, horizon, gem| @c = msg }, | |
lambda { |msg, callstack| @d = msg }, | |
] | |
ActiveSupport::Deprecation.disallowed_warnings = ["foo=nil"] | |
@dtc.partially | |
@dtc.none | |
assert_match(/none is deprecated/, @a) | |
assert_match(/none is deprecated/, @b) | |
assert_match(/foo=nil/, @c) | |
assert_match(/foo=nil/, @d) | |
end | |
end | |
def test_allow | |
resetting_disallowed_deprecation_config do | |
@warnings_allowed, @warnings_disallowed = [], [] | |
ActiveSupport::Deprecation.behavior = [ | |
lambda { |msg, callstack, horizon, gem| @warnings_allowed << msg } | |
] | |
ActiveSupport::Deprecation.disallowed_behavior = [ | |
lambda { |msg, callstack, horizon, gem| @warnings_disallowed << msg } | |
] | |
ActiveSupport::Deprecation.disallowed_warnings = [ | |
"b is deprecated", | |
"c is deprecated" | |
] | |
ActiveSupport::Deprecation.allow do | |
@dtc.a | |
@dtc.b | |
@dtc.c | |
end | |
assert_match(/a is deprecated/, @warnings_allowed.join("\n")) | |
assert_match(/a is deprecated/, @warnings_allowed.join("\n")) | |
assert_match(/c is deprecated/, @warnings_allowed.join("\n")) | |
assert_empty @warnings_disallowed | |
end | |
end | |
def test_allow_only_matching_warnings | |
resetting_disallowed_deprecation_config do | |
@warnings_allowed, @warnings_disallowed = [], [] | |
ActiveSupport::Deprecation.behavior = [ | |
lambda { |msg, callstack, horizon, gem| @warnings_allowed << msg } | |
] | |
ActiveSupport::Deprecation.disallowed_behavior = [ | |
lambda { |msg, callstack, horizon, gem| @warnings_disallowed << msg } | |
] | |
ActiveSupport::Deprecation.disallowed_warnings = [ | |
"a is deprecated", | |
"b is deprecated", | |
"c is deprecated", | |
] | |
ActiveSupport::Deprecation.allow ["b is", "c is"] do | |
@dtc.none | |
@dtc.a | |
@dtc.b | |
@dtc.c | |
end | |
assert_match(/none is deprecated/, @warnings_allowed.join("\n")) | |
assert_no_match(/a is deprecated/, @warnings_allowed.join("\n")) | |
assert_match(/b is deprecated/, @warnings_allowed.join("\n")) | |
assert_match(/c is deprecated/, @warnings_allowed.join("\n")) | |
assert_no_match(/none is deprecated/, @warnings_disallowed.join("\n")) | |
assert_match(/a is deprecated/, @warnings_disallowed.join("\n")) | |
assert_no_match(/b is deprecated/, @warnings_disallowed.join("\n")) | |
assert_no_match(/c is deprecated/, @warnings_disallowed.join("\n")) | |
end | |
end | |
def test_allow_with_symbol | |
resetting_disallowed_deprecation_config do | |
@warnings_allowed, @warnings_disallowed = [], [] | |
ActiveSupport::Deprecation.behavior = [ | |
lambda { |msg, callstack, horizon, gem| @warnings_allowed << msg } | |
] | |
ActiveSupport::Deprecation.disallowed_behavior = [ | |
lambda { |msg, callstack, horizon, gem| @warnings_disallowed << msg } | |
] | |
ActiveSupport::Deprecation.disallowed_warnings = [ | |
"a is deprecated", | |
"b is deprecated", | |
"c is deprecated", | |
] | |
ActiveSupport::Deprecation.allow [:"b is", :"c is"] do | |
@dtc.none | |
@dtc.a | |
@dtc.b | |
@dtc.c | |
end | |
assert_match(/none is deprecated/, @warnings_allowed.join("\n")) | |
assert_no_match(/a is deprecated/, @warnings_allowed.join("\n")) | |
assert_match(/b is deprecated/, @warnings_allowed.join("\n")) | |
assert_match(/c is deprecated/, @warnings_allowed.join("\n")) | |
assert_no_match(/none is deprecated/, @warnings_disallowed.join("\n")) | |
assert_match(/a is deprecated/, @warnings_disallowed.join("\n")) | |
assert_no_match(/b is deprecated/, @warnings_disallowed.join("\n")) | |
assert_no_match(/c is deprecated/, @warnings_disallowed.join("\n")) | |
end | |
end | |
def test_allow_with_regexp | |
resetting_disallowed_deprecation_config do | |
@warnings_allowed, @warnings_disallowed = [], [] | |
ActiveSupport::Deprecation.behavior = [ | |
lambda { |msg, callstack, horizon, gem| @warnings_allowed << msg } | |
] | |
ActiveSupport::Deprecation.disallowed_behavior = [ | |
lambda { |msg, callstack, horizon, gem| @warnings_disallowed << msg } | |
] | |
ActiveSupport::Deprecation.disallowed_warnings = [ | |
"a is deprecated", | |
"b is deprecated", | |
"c is deprecated", | |
] | |
ActiveSupport::Deprecation.allow [/(b|c)\sis/] do | |
@dtc.none | |
@dtc.a | |
@dtc.b | |
@dtc.c | |
end | |
assert_match(/none is deprecated/, @warnings_allowed.join("\n")) | |
assert_no_match(/a is deprecated/, @warnings_allowed.join("\n")) | |
assert_match(/b is deprecated/, @warnings_allowed.join("\n")) | |
assert_match(/c is deprecated/, @warnings_allowed.join("\n")) | |
assert_no_match(/none is deprecated/, @warnings_disallowed.join("\n")) | |
assert_match(/a is deprecated/, @warnings_disallowed.join("\n")) | |
assert_no_match(/b is deprecated/, @warnings_disallowed.join("\n")) | |
assert_no_match(/c is deprecated/, @warnings_disallowed.join("\n")) | |
end | |
end | |
def test_allow_only_has_effect_inside_provided_block | |
resetting_disallowed_deprecation_config do | |
@warnings_allowed, @warnings_disallowed = [], [] | |
ActiveSupport::Deprecation.behavior = [ | |
lambda { |msg, callstack, horizon, gem| @warnings_allowed << msg } | |
] | |
ActiveSupport::Deprecation.disallowed_behavior = [ | |
lambda { |msg, callstack, horizon, gem| @warnings_disallowed << msg } | |
] | |
ActiveSupport::Deprecation.disallowed_warnings = [ | |
"a is deprecated" | |
] | |
ActiveSupport::Deprecation.allow "a is deprecated and will" do | |
@dtc.a | |
end | |
assert_match(/a is deprecated/, @warnings_allowed.join("\n")) | |
assert_no_match(/a is deprecated/, @warnings_disallowed.join("\n")) | |
@warnings_allowed, @warnings_disallowed = [], [] | |
@dtc.a | |
assert_no_match(/a is deprecated/, @warnings_allowed.join("\n")) | |
assert_match(/a is deprecated/, @warnings_disallowed.join("\n")) | |
end | |
end | |
def test_allow_only_has_effect_on_the_thread_on_which_it_was_called | |
th1, th2 = nil, nil | |
resetting_disallowed_deprecation_config do | |
@warnings_allowed, @warnings_disallowed = [], [] | |
ActiveSupport::Deprecation.behavior = [ | |
lambda { |msg, callstack, horizon, gem| @warnings_allowed << msg } | |
] | |
ActiveSupport::Deprecation.disallowed_behavior = [ | |
lambda { |msg, callstack, horizon, gem| @warnings_disallowed << msg } | |
] | |
ActiveSupport::Deprecation.disallowed_warnings = [ | |
"a is deprecated" | |
] | |
th1 = Thread.new do | |
# barrier.wait | |
ActiveSupport::Deprecation.allow "a is deprecated and will" do | |
th2 = Thread.new do | |
@dtc.a | |
end | |
th2.join | |
end | |
end | |
th1.join | |
assert_no_match(/a is deprecated/, @warnings_allowed.join("\n")) | |
assert_match(/a is deprecated/, @warnings_disallowed.join("\n")) | |
end | |
ensure | |
th1.kill | |
th2.kill | |
end | |
def test_is_a_noop_based_on_if_kwarg_truthy_or_falsey | |
resetting_disallowed_deprecation_config do | |
@warnings_allowed, @warnings_disallowed = [], [] | |
ActiveSupport::Deprecation.behavior = [ | |
lambda { |msg, callstack, horizon, gem| @warnings_allowed << msg } | |
] | |
ActiveSupport::Deprecation.disallowed_behavior = [ | |
lambda { |msg, callstack, horizon, gem| @warnings_disallowed << msg } | |
] | |
ActiveSupport::Deprecation.disallowed_warnings = [ | |
"a is deprecated" | |
] | |
ActiveSupport::Deprecation.allow "a is deprecated and will", if: true do | |
@dtc.a | |
end | |
assert_match(/a is deprecated/, @warnings_allowed.join("\n")) | |
assert_no_match(/a is deprecated/, @warnings_disallowed.join("\n")) | |
@warnings_allowed, @warnings_disallowed = [], [] | |
ActiveSupport::Deprecation.allow "a is deprecated and will", if: Object.new do | |
@dtc.a | |
end | |
assert_match(/a is deprecated/, @warnings_allowed.join("\n")) | |
assert_no_match(/a is deprecated/, @warnings_disallowed.join("\n")) | |
@warnings_allowed, @warnings_disallowed = [], [] | |
ActiveSupport::Deprecation.allow "a is deprecated and will", if: false do | |
@dtc.a | |
end | |
assert_no_match(/a is deprecated/, @warnings_allowed.join("\n")) | |
assert_match(/a is deprecated/, @warnings_disallowed.join("\n")) | |
@warnings_allowed, @warnings_disallowed = [], [] | |
ActiveSupport::Deprecation.allow "a is deprecated and will", if: nil do | |
@dtc.a | |
end | |
assert_no_match(/a is deprecated/, @warnings_allowed.join("\n")) | |
assert_match(/a is deprecated/, @warnings_disallowed.join("\n")) | |
end | |
end | |
def test_is_a_noop_based_on_if_kwarg_using_proc | |
resetting_disallowed_deprecation_config do | |
@warnings_allowed, @warnings_disallowed = [], [] | |
ActiveSupport::Deprecation.behavior = [ | |
lambda { |msg, callstack, horizon, gem| @warnings_allowed << msg } | |
] | |
ActiveSupport::Deprecation.disallowed_behavior = [ | |
lambda { |msg, callstack, horizon, gem| @warnings_disallowed << msg } | |
] | |
ActiveSupport::Deprecation.disallowed_warnings = [ | |
"a is deprecated" | |
] | |
ActiveSupport::Deprecation.allow "a is deprecated and will", if: Proc.new { true } do | |
@dtc.a | |
end | |
assert_match(/a is deprecated/, @warnings_allowed.join("\n")) | |
assert_no_match(/a is deprecated/, @warnings_disallowed.join("\n")) | |
@warnings_allowed, @warnings_disallowed = [], [] | |
ActiveSupport::Deprecation.allow "a is deprecated and will", if: Proc.new { false } do | |
@dtc.a | |
end | |
assert_no_match(/a is deprecated/, @warnings_allowed.join("\n")) | |
assert_match(/a is deprecated/, @warnings_disallowed.join("\n")) | |
end | |
end | |
private | |
def deprecator_with_messages | |
klass = Class.new(ActiveSupport::Deprecation) | |
deprecator = klass.new | |
deprecator.behavior = Proc.new { |message, callstack| deprecator.messages << message } | |
def deprecator.messages | |
@messages ||= [] | |
end | |
deprecator | |
end | |
def resetting_disallowed_deprecation_config | |
original_deprecations = ActiveSupport::Deprecation.disallowed_warnings | |
original_behaviors = ActiveSupport::Deprecation.disallowed_behavior | |
yield | |
ensure | |
ActiveSupport::Deprecation.disallowed_warnings = original_deprecations | |
ActiveSupport::Deprecation.disallowed_behavior = original_behaviors | |
end | |
end |
# frozen_string_literal: true | |
require "weakref" | |
require "active_support/ruby_features" | |
module ActiveSupport | |
# This module provides an internal implementation to track descendants | |
# which is faster than iterating through ObjectSpace. | |
module DescendantsTracker | |
class << self | |
def direct_descendants(klass) | |
ActiveSupport::Deprecation.warn(<<~MSG) | |
ActiveSupport::DescendantsTracker.direct_descendants is deprecated and will be removed in Rails 7.1. | |
Use ActiveSupport::DescendantsTracker.subclasses instead. | |
MSG | |
subclasses(klass) | |
end | |
end | |
@clear_disabled = false | |
if RubyFeatures::CLASS_SUBCLASSES | |
@@excluded_descendants = if RUBY_ENGINE == "ruby" | |
# On MRI `ObjectSpace::WeakMap` keys are weak references. | |
# So we can simply use WeakMap as a `Set`. | |
ObjectSpace::WeakMap.new | |
else | |
# On TruffleRuby `ObjectSpace::WeakMap` keys are strong references. | |
# So we use `object_id` as a key and the actual object as a value. | |
# | |
# JRuby for now doesn't have Class#descendant, but when it will, it will likely | |
# have the same WeakMap semantic than Truffle so we future proof this as much as possible. | |
class WeakSet # :nodoc: | |
def initialize | |
@map = ObjectSpace::WeakMap.new | |
end | |
def [](object) | |
@map.key?(object.object_id) | |
end | |
def []=(object, _present) | |
@map[object.object_id] = object | |
end | |
end | |
WeakSet.new | |
end | |
class << self | |
def disable_clear! # :nodoc: | |
unless @clear_disabled | |
@clear_disabled = true | |
remove_method(:subclasses) | |
@@excluded_descendants = nil | |
end | |
end | |
def subclasses(klass) | |
klass.subclasses | |
end | |
def descendants(klass) | |
klass.descendants | |
end | |
def clear(classes) # :nodoc: | |
raise "DescendantsTracker.clear was disabled because config.cache_classes = true" if @clear_disabled | |
classes.each do |klass| | |
@@excluded_descendants[klass] = true | |
klass.descendants.each do |descendant| | |
@@excluded_descendants[descendant] = true | |
end | |
end | |
end | |
def native? # :nodoc: | |
true | |
end | |
end | |
def subclasses | |
subclasses = super | |
subclasses.reject! { |d| @@excluded_descendants[d] } | |
subclasses | |
end | |
def descendants | |
subclasses.concat(subclasses.flat_map(&:descendants)) | |
end | |
def direct_descendants | |
ActiveSupport::Deprecation.warn(<<~MSG) | |
ActiveSupport::DescendantsTracker#direct_descendants is deprecated and will be removed in Rails 7.1. | |
Use #subclasses instead. | |
MSG | |
subclasses | |
end | |
else | |
@@direct_descendants = {} | |
class << self | |
def disable_clear! # :nodoc: | |
@clear_disabled = true | |
end | |
def subclasses(klass) | |
descendants = @@direct_descendants[klass] | |
descendants ? descendants.to_a : [] | |
end | |
def descendants(klass) | |
arr = [] | |
accumulate_descendants(klass, arr) | |
arr | |
end | |
def clear(classes) # :nodoc: | |
raise "DescendantsTracker.clear was disabled because config.cache_classes = true" if @clear_disabled | |
@@direct_descendants.each do |klass, direct_descendants_of_klass| | |
if classes.member?(klass) | |
@@direct_descendants.delete(klass) | |
else | |
direct_descendants_of_klass.reject! do |direct_descendant_of_class| | |
classes.member?(direct_descendant_of_class) | |
end | |
end | |
end | |
end | |
def native? # :nodoc: | |
false | |
end | |
# This is the only method that is not thread safe, but is only ever called | |
# during the eager loading phase. | |
def store_inherited(klass, descendant) | |
(@@direct_descendants[klass] ||= DescendantsArray.new) << descendant | |
end | |
private | |
def accumulate_descendants(klass, acc) | |
if direct_descendants = @@direct_descendants[klass] | |
direct_descendants.each do |direct_descendant| | |
acc << direct_descendant | |
accumulate_descendants(direct_descendant, acc) | |
end | |
end | |
end | |
end | |
def inherited(base) | |
DescendantsTracker.store_inherited(self, base) | |
super | |
end | |
def direct_descendants | |
ActiveSupport::Deprecation.warn(<<~MSG) | |
ActiveSupport::DescendantsTracker#direct_descendants is deprecated and will be removed in Rails 7.1. | |
Use #subclasses instead. | |
MSG | |
DescendantsTracker.subclasses(self) | |
end | |
def subclasses | |
DescendantsTracker.subclasses(self) | |
end | |
def descendants | |
DescendantsTracker.descendants(self) | |
end | |
# DescendantsArray is an array that contains weak references to classes. | |
class DescendantsArray # :nodoc: | |
include Enumerable | |
def initialize | |
@refs = [] | |
end | |
def initialize_copy(orig) | |
@refs = @refs.dup | |
end | |
def <<(klass) | |
@refs << WeakRef.new(klass) | |
end | |
def each | |
@refs.reject! do |ref| | |
yield ref.__getobj__ | |
false | |
rescue WeakRef::RefError | |
true | |
end | |
self | |
end | |
def refs_size | |
@refs.size | |
end | |
def cleanup! | |
@refs.delete_if { |ref| !ref.weakref_alive? } | |
end | |
def reject! | |
@refs.reject! do |ref| | |
yield ref.__getobj__ | |
rescue WeakRef::RefError | |
true | |
end | |
end | |
end | |
end | |
end | |
end |
# frozen_string_literal: true | |
require_relative "abstract_unit" | |
require "set" | |
require "active_support/descendants_tracker" | |
class DescendantsTrackerTest < ActiveSupport::TestCase | |
setup do | |
if ActiveSupport::DescendantsTracker.class_variable_defined?(:@@direct_descendants) | |
@original_state = ActiveSupport::DescendantsTracker.class_variable_get(:@@direct_descendants).dup | |
@original_state.each { |k, v| @original_state[k] = v.dup } | |
end | |
eval <<~RUBY | |
class Parent | |
extend ActiveSupport::DescendantsTracker | |
end | |
class Child1 < Parent | |
end | |
class Child2 < Parent | |
end | |
class Grandchild1 < Child1 | |
end | |
class Grandchild2 < Child1 | |
end | |
RUBY | |
end | |
teardown do | |
if ActiveSupport::DescendantsTracker.class_variable_defined?(:@@direct_descendants) | |
ActiveSupport::DescendantsTracker.class_variable_get(:@@direct_descendants).replace(@original_state) | |
end | |
%i(Parent Child1 Child2 Grandchild1 Grandchild2).each do |name| | |
if DescendantsTrackerTest.const_defined?(name) | |
DescendantsTrackerTest.send(:remove_const, name) | |
end | |
end | |
end | |
test ".descendants" do | |
assert_equal_sets [Child1, Grandchild1, Grandchild2, Child2], Parent.descendants | |
assert_equal_sets [Grandchild1, Grandchild2], Child1.descendants | |
assert_equal_sets [], Child2.descendants | |
end | |
test ".descendants with garbage collected classes" do | |
# The Ruby GC (and most other GCs for that matter) are not fully precise. | |
# When GC is run, the whole stack is scanned to mark any object reference | |
# in registers. But some of these references might simply be leftovers from | |
# previous method calls waiting to be overridden, and there's no definite | |
# way to clear them. By executing this code in a distinct thread, we ensure | |
# that such references are on a stack that will be entirely garbage | |
# collected, effectively working around the problem. | |
Thread.new do | |
child_klass = Class.new(Parent) | |
assert_equal_sets [Child1, Grandchild1, Grandchild2, Child2, child_klass], Parent.descendants | |
end.join | |
# Calling `GC.start` 4 times should trigger a full GC run | |
4.times do | |
GC.start | |
end | |
assert_equal_sets [Child1, Grandchild1, Grandchild2, Child2], Parent.descendants | |
end | |
test ".direct_descendants" do | |
assert_deprecated do | |
assert_equal_sets [Child1, Child2], Parent.direct_descendants | |
end | |
assert_deprecated do | |
assert_equal_sets [Grandchild1, Grandchild2], Child1.direct_descendants | |
end | |
assert_deprecated do | |
assert_equal_sets [], Child2.direct_descendants | |
end | |
end | |
test ".subclasses" do | |
[Parent, Child1, Child2].each do |klass| | |
assert_equal assert_deprecated { klass.direct_descendants }, klass.subclasses | |
end | |
end | |
test ".clear(classes) deletes the given classes only" do | |
ActiveSupport::DescendantsTracker.clear(Set[Child2, Grandchild1]) | |
assert_equal_sets [Child1, Grandchild2], Parent.descendants | |
assert_equal_sets [Grandchild2], Child1.descendants | |
assert_equal_sets [Child1], assert_deprecated { Parent.direct_descendants } | |
assert_equal_sets [Grandchild2], assert_deprecated { Child1.direct_descendants } | |
end | |
private | |
def assert_equal_sets(expected, actual) | |
assert_equal Set.new(expected), Set.new(actual) | |
end | |
end |
# frozen_string_literal: true | |
require "openssl" | |
module ActiveSupport | |
class Digest # :nodoc: | |
class << self | |
def hash_digest_class | |
@hash_digest_class ||= OpenSSL::Digest::MD5 | |
end | |
def hash_digest_class=(klass) | |
raise ArgumentError, "#{klass} is expected to implement hexdigest class method" unless klass.respond_to?(:hexdigest) | |
@hash_digest_class = klass | |
end | |
def hexdigest(arg) | |
hash_digest_class.hexdigest(arg)[0...32] | |
end | |
end | |
end | |
end |
# frozen_string_literal: true | |
require_relative "abstract_unit" | |
require "openssl" | |
class DigestTest < ActiveSupport::TestCase | |
class InvalidDigest; end | |
def test_with_default_hash_digest_class | |
assert_equal OpenSSL::Digest::MD5.hexdigest("hello friend"), ActiveSupport::Digest.hexdigest("hello friend") | |
end | |
def test_with_custom_hash_digest_class | |
original_hash_digest_class = ActiveSupport::Digest.hash_digest_class | |
ActiveSupport::Digest.hash_digest_class = OpenSSL::Digest::SHA1 | |
digest = ActiveSupport::Digest.hexdigest("hello friend") | |
assert_equal 32, digest.length | |
assert_equal OpenSSL::Digest::SHA1.hexdigest("hello friend")[0...32], digest | |
ensure | |
ActiveSupport::Digest.hash_digest_class = original_hash_digest_class | |
end | |
def test_should_raise_argument_error_if_custom_digest_is_missing_hexdigest_method | |
assert_raises(ArgumentError) { ActiveSupport::Digest.hash_digest_class = InvalidDigest } | |
end | |
end |
# frozen_string_literal: true | |
module ActiveSupport | |
class Deprecation | |
module Disallowed | |
# Sets the criteria used to identify deprecation messages which should be | |
# disallowed. Can be an array containing strings, symbols, or regular | |
# expressions. (Symbols are treated as strings). These are compared against | |
# the text of the generated deprecation warning. | |
# | |
# Additionally the scalar symbol +:all+ may be used to treat all | |
# deprecations as disallowed. | |
# | |
# Deprecations matching a substring or regular expression will be handled | |
# using the configured +ActiveSupport::Deprecation.disallowed_behavior+ | |
# rather than +ActiveSupport::Deprecation.behavior+ | |
attr_writer :disallowed_warnings | |
# Returns the configured criteria used to identify deprecation messages | |
# which should be treated as disallowed. | |
def disallowed_warnings | |
@disallowed_warnings ||= [] | |
end | |
private | |
def deprecation_disallowed?(message) | |
disallowed = ActiveSupport::Deprecation.disallowed_warnings | |
return false if explicitly_allowed?(message) | |
return true if disallowed == :all | |
disallowed.any? do |rule| | |
case rule | |
when String, Symbol | |
message.include?(rule.to_s) | |
when Regexp | |
rule.match?(message) | |
end | |
end | |
end | |
def explicitly_allowed?(message) | |
allowances = @explicitly_allowed_warnings.value | |
return false unless allowances | |
return true if allowances == :all | |
allowances = [allowances] unless allowances.kind_of?(Array) | |
allowances.any? do |rule| | |
case rule | |
when String, Symbol | |
message.include?(rule.to_s) | |
when Regexp | |
rule.match?(message) | |
end | |
end | |
end | |
end | |
end | |
end |
# frozen_string_literal: true | |
#-- | |
# Most objects are cloneable, but not all. For example you can't dup methods: | |
# | |
# method(:puts).dup # => TypeError: allocator undefined for Method | |
# | |
# Classes may signal their instances are not duplicable removing +dup+/+clone+ | |
# or raising exceptions from them. So, to dup an arbitrary object you normally | |
# use an optimistic approach and are ready to catch an exception, say: | |
# | |
# arbitrary_object.dup rescue object | |
# | |
# Rails dups objects in a few critical spots where they are not that arbitrary. | |
# That rescue is very expensive (like 40 times slower than a predicate), and it | |
# is often triggered. | |
# | |
# That's why we hardcode the following cases and check duplicable? instead of | |
# using that rescue idiom. | |
#++ | |
class Object | |
# Can you safely dup this object? | |
# | |
# False for method objects; | |
# true otherwise. | |
def duplicable? | |
true | |
end | |
end | |
class Method | |
# Methods are not duplicable: | |
# | |
# method(:puts).duplicable? # => false | |
# method(:puts).dup # => TypeError: allocator undefined for Method | |
def duplicable? | |
false | |
end | |
end | |
class UnboundMethod | |
# Unbound methods are not duplicable: | |
# | |
# method(:puts).unbind.duplicable? # => false | |
# method(:puts).unbind.dup # => TypeError: allocator undefined for UnboundMethod | |
def duplicable? | |
false | |
end | |
end | |
require "singleton" | |
module Singleton | |
# Singleton instances are not duplicable: | |
# | |
# Class.new.include(Singleton).instance.dup # TypeError (can't dup instance of singleton | |
def duplicable? | |
false | |
end | |
end |
# frozen_string_literal: true | |
require_relative "../../abstract_unit" | |
require "bigdecimal" | |
require "active_support/core_ext/object/duplicable" | |
require "active_support/core_ext/numeric/time" | |
class DuplicableTest < ActiveSupport::TestCase | |
RAISE_DUP = [method(:puts), method(:puts).unbind, Class.new.include(Singleton).instance] | |
ALLOW_DUP = ["1", "symbol_from_string".to_sym, Object.new, /foo/, [], {}, Time.now, Class.new, Module.new, BigDecimal("4.56"), nil, false, true, 1, 2.3, Complex(1), Rational(1)] | |
def test_duplicable | |
rubinius_skip "* Method#dup is allowed at the moment on Rubinius\n" \ | |
"* https://github.com/rubinius/rubinius/issues/3089" | |
RAISE_DUP.each do |v| | |
assert_not v.duplicable?, "#{ v.inspect } should not be duplicable" | |
assert_raises(TypeError, v.class.name) { v.dup } | |
end | |
ALLOW_DUP.each do |v| | |
assert v.duplicable?, "#{ v.class } should be duplicable" | |
assert_nothing_raised { v.dup } | |
end | |
end | |
end |
# frozen_string_literal: true | |
require "active_support/core_ext/array/conversions" | |
require "active_support/core_ext/module/delegation" | |
require "active_support/core_ext/object/acts_like" | |
require "active_support/core_ext/string/filters" | |
module ActiveSupport | |
# Provides accurate date and time measurements using Date#advance and | |
# Time#advance, respectively. It mainly supports the methods on Numeric. | |
# | |
# 1.month.ago # equivalent to Time.now.advance(months: -1) | |
class Duration | |
class Scalar < Numeric # :nodoc: | |
attr_reader :value | |
delegate :to_i, :to_f, :to_s, to: :value | |
def initialize(value) | |
@value = value | |
end | |
def coerce(other) | |
[Scalar.new(other), self] | |
end | |
def -@ | |
Scalar.new(-value) | |
end | |
def <=>(other) | |
if Scalar === other || Duration === other | |
value <=> other.value | |
elsif Numeric === other | |
value <=> other | |
else | |
nil | |
end | |
end | |
def +(other) | |
if Duration === other | |
seconds = value + other._parts.fetch(:seconds, 0) | |
new_parts = other._parts.merge(seconds: seconds) | |
new_value = value + other.value | |
Duration.new(new_value, new_parts, other.variable?) | |
else | |
calculate(:+, other) | |
end | |
end | |
def -(other) | |
if Duration === other | |
seconds = value - other._parts.fetch(:seconds, 0) | |
new_parts = other._parts.transform_values(&:-@) | |
new_parts = new_parts.merge(seconds: seconds) | |
new_value = value - other.value | |
Duration.new(new_value, new_parts, other.variable?) | |
else | |
calculate(:-, other) | |
end | |
end | |
def *(other) | |
if Duration === other | |
new_parts = other._parts.transform_values { |other_value| value * other_value } | |
new_value = value * other.value | |
Duration.new(new_value, new_parts, other.variable?) | |
else | |
calculate(:*, other) | |
end | |
end | |
def /(other) | |
if Duration === other | |
value / other.value | |
else | |
calculate(:/, other) | |
end | |
end | |
def %(other) | |
if Duration === other | |
Duration.build(value % other.value) | |
else | |
calculate(:%, other) | |
end | |
end | |
def variable? # :nodoc: | |
false | |
end | |
private | |
def calculate(op, other) | |
if Scalar === other | |
Scalar.new(value.public_send(op, other.value)) | |
elsif Numeric === other | |
Scalar.new(value.public_send(op, other)) | |
else | |
raise_type_error(other) | |
end | |
end | |
def raise_type_error(other) | |
raise TypeError, "no implicit conversion of #{other.class} into #{self.class}" | |
end | |
end | |
SECONDS_PER_MINUTE = 60 | |
SECONDS_PER_HOUR = 3600 | |
SECONDS_PER_DAY = 86400 | |
SECONDS_PER_WEEK = 604800 | |
SECONDS_PER_MONTH = 2629746 # 1/12 of a gregorian year | |
SECONDS_PER_YEAR = 31556952 # length of a gregorian year (365.2425 days) | |
PARTS_IN_SECONDS = { | |
seconds: 1, | |
minutes: SECONDS_PER_MINUTE, | |
hours: SECONDS_PER_HOUR, | |
days: SECONDS_PER_DAY, | |
weeks: SECONDS_PER_WEEK, | |
months: SECONDS_PER_MONTH, | |
years: SECONDS_PER_YEAR | |
}.freeze | |
PARTS = [:years, :months, :weeks, :days, :hours, :minutes, :seconds].freeze | |
VARIABLE_PARTS = [:years, :months, :weeks, :days].freeze | |
attr_reader :value | |
autoload :ISO8601Parser, "active_support/duration/iso8601_parser" | |
autoload :ISO8601Serializer, "active_support/duration/iso8601_serializer" | |
class << self | |
# Creates a new Duration from string formatted according to ISO 8601 Duration. | |
# | |
# See {ISO 8601}[https://en.wikipedia.org/wiki/ISO_8601#Durations] for more information. | |
# This method allows negative parts to be present in pattern. | |
# If invalid string is provided, it will raise +ActiveSupport::Duration::ISO8601Parser::ParsingError+. | |
def parse(iso8601duration) | |
parts = ISO8601Parser.new(iso8601duration).parse! | |
new(calculate_total_seconds(parts), parts) | |
end | |
def ===(other) # :nodoc: | |
other.is_a?(Duration) | |
rescue ::NoMethodError | |
false | |
end | |
def seconds(value) # :nodoc: | |
new(value, { seconds: value }, false) | |
end | |
def minutes(value) # :nodoc: | |
new(value * SECONDS_PER_MINUTE, { minutes: value }, false) | |
end | |
def hours(value) # :nodoc: | |
new(value * SECONDS_PER_HOUR, { hours: value }, false) | |
end | |
def days(value) # :nodoc: | |
new(value * SECONDS_PER_DAY, { days: value }, true) | |
end | |
def weeks(value) # :nodoc: | |
new(value * SECONDS_PER_WEEK, { weeks: value }, true) | |
end | |
def months(value) # :nodoc: | |
new(value * SECONDS_PER_MONTH, { months: value }, true) | |
end | |
def years(value) # :nodoc: | |
new(value * SECONDS_PER_YEAR, { years: value }, true) | |
end | |
# Creates a new Duration from a seconds value that is converted | |
# to the individual parts: | |
# | |
# ActiveSupport::Duration.build(31556952).parts # => {:years=>1} | |
# ActiveSupport::Duration.build(2716146).parts # => {:months=>1, :days=>1} | |
# | |
def build(value) | |
unless value.is_a?(::Numeric) | |
raise TypeError, "can't build an #{self.name} from a #{value.class.name}" | |
end | |
parts = {} | |
remainder_sign = value <=> 0 | |
remainder = value.round(9).abs | |
variable = false | |
PARTS.each do |part| | |
unless part == :seconds | |
part_in_seconds = PARTS_IN_SECONDS[part] | |
parts[part] = remainder.div(part_in_seconds) * remainder_sign | |
remainder %= part_in_seconds | |
unless parts[part].zero? | |
variable ||= VARIABLE_PARTS.include?(part) | |
end | |
end | |
end unless value == 0 | |
parts[:seconds] = remainder * remainder_sign | |
new(value, parts, variable) | |
end | |
private | |
def calculate_total_seconds(parts) | |
parts.inject(0) do |total, (part, value)| | |
total + value * PARTS_IN_SECONDS[part] | |
end | |
end | |
end | |
def initialize(value, parts, variable = nil) # :nodoc: | |
@value, @parts = value, parts | |
@parts.reject! { |k, v| v.zero? } unless value == 0 | |
@parts.freeze | |
@variable = variable | |
if @variable.nil? | |
@variable = @parts.any? { |part, _| VARIABLE_PARTS.include?(part) } | |
end | |
end | |
# Returns a copy of the parts hash that defines the duration | |
def parts | |
@parts.dup | |
end | |
def coerce(other) # :nodoc: | |
case other | |
when Scalar | |
[other, self] | |
when Duration | |
[Scalar.new(other.value), self] | |
else | |
[Scalar.new(other), self] | |
end | |
end | |
# Compares one Duration with another or a Numeric to this Duration. | |
# Numeric values are treated as seconds. | |
def <=>(other) | |
if Duration === other | |
value <=> other.value | |
elsif Numeric === other | |
value <=> other | |
end | |
end | |
# Adds another Duration or a Numeric to this Duration. Numeric values | |
# are treated as seconds. | |
def +(other) | |
if Duration === other | |
parts = @parts.merge(other._parts) do |_key, value, other_value| | |
value + other_value | |
end | |
Duration.new(value + other.value, parts, @variable || other.variable?) | |
else | |
seconds = @parts.fetch(:seconds, 0) + other | |
Duration.new(value + other, @parts.merge(seconds: seconds), @variable) | |
end | |
end | |
# Subtracts another Duration or a Numeric from this Duration. Numeric | |
# values are treated as seconds. | |
def -(other) | |
self + (-other) | |
end | |
# Multiplies this Duration by a Numeric and returns a new Duration. | |
def *(other) | |
if Scalar === other || Duration === other | |
Duration.new(value * other.value, @parts.transform_values { |number| number * other.value }, @variable || other.variable?) | |
elsif Numeric === other | |
Duration.new(value * other, @parts.transform_values { |number| number * other }, @variable) | |
else | |
raise_type_error(other) | |
end | |
end | |
# Divides this Duration by a Numeric and returns a new Duration. | |
def /(other) | |
if Scalar === other | |
Duration.new(value / other.value, @parts.transform_values { |number| number / other.value }, @variable) | |
elsif Duration === other | |
value / other.value | |
elsif Numeric === other | |
Duration.new(value / other, @parts.transform_values { |number| number / other }, @variable) | |
else | |
raise_type_error(other) | |
end | |
end | |
# Returns the modulo of this Duration by another Duration or Numeric. | |
# Numeric values are treated as seconds. | |
def %(other) | |
if Duration === other || Scalar === other | |
Duration.build(value % other.value) | |
elsif Numeric === other | |
Duration.build(value % other) | |
else | |
raise_type_error(other) | |
end | |
end | |
def -@ # :nodoc: | |
Duration.new(-value, @parts.transform_values(&:-@), @variable) | |
end | |
def +@ # :nodoc: | |
self | |
end | |
def is_a?(klass) # :nodoc: | |
Duration == klass || value.is_a?(klass) | |
end | |
alias :kind_of? :is_a? | |
def instance_of?(klass) # :nodoc: | |
Duration == klass || value.instance_of?(klass) | |
end | |
# Returns +true+ if +other+ is also a Duration instance with the | |
# same +value+, or if <tt>other == value</tt>. | |
def ==(other) | |
if Duration === other | |
other.value == value | |
else | |
other == value | |
end | |
end | |
# Returns the amount of seconds a duration covers as a string. | |
# For more information check to_i method. | |
# | |
# 1.day.to_s # => "86400" | |
def to_s | |
@value.to_s | |
end | |
# Returns the number of seconds that this Duration represents. | |
# | |
# 1.minute.to_i # => 60 | |
# 1.hour.to_i # => 3600 | |
# 1.day.to_i # => 86400 | |
# | |
# Note that this conversion makes some assumptions about the | |
# duration of some periods, e.g. months are always 1/12 of year | |
# and years are 365.2425 days: | |
# | |
# # equivalent to (1.year / 12).to_i | |
# 1.month.to_i # => 2629746 | |
# | |
# # equivalent to 365.2425.days.to_i | |
# 1.year.to_i # => 31556952 | |
# | |
# In such cases, Ruby's core | |
# Date[https://ruby-doc.org/stdlib/libdoc/date/rdoc/Date.html] and | |
# Time[https://ruby-doc.org/stdlib/libdoc/time/rdoc/Time.html] should be used for precision | |
# date and time arithmetic. | |
def to_i | |
@value.to_i | |
end | |
alias :in_seconds :to_i | |
# Returns the amount of minutes a duration covers as a float | |
# | |
# 1.day.in_minutes # => 1440.0 | |
def in_minutes | |
in_seconds / SECONDS_PER_MINUTE.to_f | |
end | |
# Returns the amount of hours a duration covers as a float | |
# | |
# 1.day.in_hours # => 24.0 | |
def in_hours | |
in_seconds / SECONDS_PER_HOUR.to_f | |
end | |
# Returns the amount of days a duration covers as a float | |
# | |
# 12.hours.in_days # => 0.5 | |
def in_days | |
in_seconds / SECONDS_PER_DAY.to_f | |
end | |
# Returns the amount of weeks a duration covers as a float | |
# | |
# 2.months.in_weeks # => 8.696 | |
def in_weeks | |
in_seconds / SECONDS_PER_WEEK.to_f | |
end | |
# Returns the amount of months a duration covers as a float | |
# | |
# 9.weeks.in_months # => 2.07 | |
def in_months | |
in_seconds / SECONDS_PER_MONTH.to_f | |
end | |
# Returns the amount of years a duration covers as a float | |
# | |
# 30.days.in_years # => 0.082 | |
def in_years | |
in_seconds / SECONDS_PER_YEAR.to_f | |
end | |
# Returns +true+ if +other+ is also a Duration instance, which has the | |
# same parts as this one. | |
def eql?(other) | |
Duration === other && other.value.eql?(value) | |
end | |
def hash | |
@value.hash | |
end | |
# Calculates a new Time or Date that is as far in the future | |
# as this Duration represents. | |
def since(time = ::Time.current) | |
sum(1, time) | |
end | |
alias :from_now :since | |
alias :after :since | |
# Calculates a new Time or Date that is as far in the past | |
# as this Duration represents. | |
def ago(time = ::Time.current) | |
sum(-1, time) | |
end | |
alias :until :ago | |
alias :before :ago | |
def inspect # :nodoc: | |
return "#{value} seconds" if @parts.empty? | |
@parts. | |
sort_by { |unit, _ | PARTS.index(unit) }. | |
map { |unit, val| "#{val} #{val == 1 ? unit.to_s.chop : unit.to_s}" }. | |
to_sentence(locale: false) | |
end | |
def as_json(options = nil) # :nodoc: | |
to_i | |
end | |
def init_with(coder) # :nodoc: | |
initialize(coder["value"], coder["parts"]) | |
end | |
def encode_with(coder) # :nodoc: | |
coder.map = { "value" => @value, "parts" => @parts } | |
end | |
# Build ISO 8601 Duration string for this duration. | |
# The +precision+ parameter can be used to limit seconds' precision of duration. | |
def iso8601(precision: nil) | |
ISO8601Serializer.new(self, precision: precision).serialize | |
end | |
def variable? # :nodoc: | |
@variable | |
end | |
def _parts # :nodoc: | |
@parts | |
end | |
private | |
def sum(sign, time = ::Time.current) | |
unless time.acts_like?(:time) || time.acts_like?(:date) | |
raise ::ArgumentError, "expected a time or date, got #{time.inspect}" | |
end | |
if @parts.empty? | |
time.since(sign * value) | |
else | |
@parts.inject(time) do |t, (type, number)| | |
if type == :seconds | |
t.since(sign * number) | |
elsif type == :minutes | |
t.since(sign * number * 60) | |
elsif type == :hours | |
t.since(sign * number * 3600) | |
else | |
t.advance(type => sign * number) | |
end | |
end | |
end | |
end | |
def respond_to_missing?(method, _) | |
value.respond_to?(method) | |
end | |
def method_missing(method, *args, &block) | |
value.public_send(method, *args, &block) | |
end | |
def raise_type_error(other) | |
raise TypeError, "no implicit conversion of #{other.class} into #{self.class}" | |
end | |
end | |
end |
# frozen_string_literal: true | |
require_relative "../abstract_unit" | |
require "active_support/inflector" | |
require "active_support/time" | |
require "active_support/json" | |
require_relative "../time_zone_test_helpers" | |
require "yaml" | |
class DurationTest < ActiveSupport::TestCase | |
include TimeZoneTestHelpers | |
def test_is_a | |
d = 1.day | |
assert d.is_a?(ActiveSupport::Duration) | |
assert_kind_of ActiveSupport::Duration, d | |
assert_kind_of Numeric, d | |
assert_kind_of Integer, d | |
assert_not d.is_a?(Hash) | |
k = Class.new | |
class << k; undef_method :== end | |
assert_not d.is_a?(k) | |
end | |
def test_instance_of | |
assert 1.minute.instance_of?(Integer) | |
assert 2.days.instance_of?(ActiveSupport::Duration) | |
assert_not 3.second.instance_of?(Numeric) | |
end | |
def test_threequals | |
assert ActiveSupport::Duration === 1.day | |
assert_not (ActiveSupport::Duration === 1.day.to_i) | |
assert_not (ActiveSupport::Duration === "foo") | |
end | |
def test_equals | |
assert 1.day == 1.day | |
assert 1.day == 1.day.to_i | |
assert 1.day.to_i == 1.day | |
assert_not (1.day == "foo") | |
end | |
def test_to_s | |
assert_equal "1", 1.second.to_s | |
end | |
def test_in_seconds | |
assert_equal 86400.0, 1.day.in_seconds | |
assert_equal 1.week.to_i, 1.week.in_seconds | |
end | |
def test_in_minutes | |
assert_in_delta 1440.0, 1.day.in_minutes | |
assert_in_delta 0.5, 30.seconds.in_minutes | |
end | |
def test_in_hours | |
assert_in_delta 24.0, 1.day.in_hours | |
assert_in_delta 336.0, 2.weeks.in_hours | |
end | |
def test_in_days | |
assert_in_delta 0.5, 12.hours.in_days | |
assert_in_delta 30.437, 1.month.in_days | |
end | |
def test_in_weeks | |
assert_in_delta 8.696, 2.months.in_weeks | |
assert_in_delta 52.178, 1.year.in_weeks | |
end | |
def test_in_months | |
assert_in_delta 2.07, 9.weeks.in_months | |
assert_in_delta 12.0, 1.year.in_months | |
end | |
def test_in_years | |
assert_in_delta 0.082, 30.days.in_years | |
assert_in_delta 1.0, 365.days.in_years | |
end | |
def test_eql | |
rubinius_skip "Rubinius' #eql? definition relies on #instance_of? " \ | |
"which behaves oddly for the sake of backward-compatibility." | |
assert 1.minute.eql?(1.minute) | |
assert 1.minute.eql?(60.seconds) | |
assert 2.days.eql?(48.hours) | |
assert_not 1.second.eql?(1) | |
assert_not 1.eql?(1.second) | |
assert 1.minute.eql?(180.seconds - 2.minutes) | |
assert_not 1.minute.eql?(60) | |
assert_not 1.minute.eql?("foo") | |
end | |
def test_inspect | |
assert_equal "0 seconds", 0.seconds.inspect | |
assert_equal "0 days", 0.days.inspect | |
assert_equal "1 month", 1.month.inspect | |
assert_equal "1 month and 1 day", (1.month + 1.day).inspect | |
assert_equal "6 months and -2 days", (6.months - 2.days).inspect | |
assert_equal "10 seconds", 10.seconds.inspect | |
assert_equal "10 years, 2 months, and 1 day", (10.years + 2.months + 1.day).inspect | |
assert_equal "10 years, 2 months, and 1 day", (10.years + 1.month + 1.day + 1.month).inspect | |
assert_equal "10 years, 2 months, and 1 day", (1.day + 10.years + 2.months).inspect | |
assert_equal "7 days", 7.days.inspect | |
assert_equal "1 week", 1.week.inspect | |
assert_equal "2 weeks", 1.fortnight.inspect | |
assert_equal "0 seconds", (10 % 5.seconds).inspect | |
assert_equal "10 minutes", (10.minutes + 0.seconds).inspect | |
assert_equal "3600 seconds", (1.day / 24).inspect | |
end | |
def test_inspect_ignores_locale | |
current_locale = I18n.default_locale | |
I18n.default_locale = :de | |
I18n.backend.store_translations(:de, support: { array: { last_word_connector: " und " } }) | |
assert_equal "10 years, 1 month, and 1 day", (10.years + 1.month + 1.day).inspect | |
ensure | |
I18n.default_locale = current_locale | |
end | |
def test_minus_with_duration_does_not_break_subtraction_of_date_from_date | |
assert_nothing_raised { Date.today - Date.today } | |
end | |
def test_unary_plus | |
assert_equal (+ 1.second), 1.second | |
assert_instance_of ActiveSupport::Duration, + 1.second | |
end | |
def test_plus | |
assert_equal 2.seconds, 1.second + 1.second | |
assert_instance_of ActiveSupport::Duration, 1.second + 1.second | |
assert_equal 2.seconds, 1.second + 1 | |
assert_instance_of ActiveSupport::Duration, 1.second + 1 | |
end | |
def test_minus | |
assert_equal 1.second, 2.seconds - 1.second | |
assert_instance_of ActiveSupport::Duration, 2.seconds - 1.second | |
assert_equal 1.second, 2.seconds - 1 | |
assert_instance_of ActiveSupport::Duration, 2.seconds - 1 | |
assert_equal 1.second, 2 - 1.second | |
assert_instance_of ActiveSupport::Duration, 2.seconds - 1 | |
end | |
def test_multiply | |
assert_equal 7.days, 1.day * 7 | |
assert_instance_of ActiveSupport::Duration, 1.day * 7 | |
assert_equal 86400, 1.day * 1.second | |
end | |
def test_divide | |
assert_equal 1.day, 7.days / 7 | |
assert_instance_of ActiveSupport::Duration, 7.days / 7 | |
assert_equal 1.hour, 1.day / 24 | |
assert_instance_of ActiveSupport::Duration, 1.day / 24 | |
assert_equal 24, 86400 / 1.hour | |
assert_kind_of Integer, 86400 / 1.hour | |
assert_equal 24, 1.day / 1.hour | |
assert_kind_of Integer, 1.day / 1.hour | |
assert_equal 1, 1.day / 1.day | |
assert_kind_of Integer, 1.day / 1.hour | |
end | |
def test_modulo | |
assert_equal 1.minute, 5.minutes % 120 | |
assert_instance_of ActiveSupport::Duration, 5.minutes % 120 | |
assert_equal 1.minute, 5.minutes % 2.minutes | |
assert_instance_of ActiveSupport::Duration, 5.minutes % 2.minutes | |
assert_equal 1.minute, 5.minutes % 120.seconds | |
assert_instance_of ActiveSupport::Duration, 5.minutes % 120.seconds | |
assert_equal 5.minutes, 5.minutes % 1.hour | |
assert_instance_of ActiveSupport::Duration, 5.minutes % 1.hour | |
assert_equal 1.day, 36.days % 604800 | |
assert_instance_of ActiveSupport::Duration, 36.days % 604800 | |
assert_equal 1.day, 36.days % 7.days | |
assert_instance_of ActiveSupport::Duration, 36.days % 7.days | |
assert_equal 800.seconds, 8000 % 1.hour | |
assert_instance_of ActiveSupport::Duration, 8000 % 1.hour | |
assert_equal 1.month, 13.months % 1.year | |
assert_instance_of ActiveSupport::Duration, 13.months % 1.year | |
end | |
def test_date_added_with_zero_days | |
assert_equal Date.civil(2017, 1, 1), Date.civil(2017, 1, 1) + 0.days | |
assert_instance_of Date, Date.civil(2017, 1, 1) + 0.days | |
end | |
def test_date_added_with_multiplied_duration | |
assert_equal Date.civil(2017, 1, 3), Date.civil(2017, 1, 1) + 1.day * 2 | |
assert_instance_of Date, Date.civil(2017, 1, 1) + 1.day * 2 | |
end | |
def test_date_added_with_multiplied_duration_larger_than_one_month | |
assert_equal Date.civil(2017, 2, 15), Date.civil(2017, 1, 1) + 1.day * 45 | |
assert_instance_of Date, Date.civil(2017, 1, 1) + 1.day * 45 | |
end | |
def test_date_added_with_divided_duration | |
assert_equal Date.civil(2017, 1, 3), Date.civil(2017, 1, 1) + 4.days / 2 | |
assert_instance_of Date, Date.civil(2017, 1, 1) + 4.days / 2 | |
end | |
def test_date_added_with_divided_duration_larger_than_one_month | |
assert_equal Date.civil(2017, 2, 15), Date.civil(2017, 1, 1) + 90.days / 2 | |
assert_instance_of Date, Date.civil(2017, 1, 1) + 90.days / 2 | |
end | |
def test_plus_with_time | |
assert_equal 1 + 1.second, 1.second + 1, "Duration + Numeric should == Numeric + Duration" | |
end | |
def test_time_plus_duration_returns_same_time_datatype | |
twz = ActiveSupport::TimeWithZone.new(nil, ActiveSupport::TimeZone["Moscow"], Time.utc(2016, 4, 28, 00, 45)) | |
now = Time.now.utc | |
%w( second minute hour day week month year ).each do |unit| | |
assert_equal((now + 1.public_send(unit)).class, Time, "Time + 1.#{unit} must be Time") | |
assert_equal((twz + 1.public_send(unit)).class, ActiveSupport::TimeWithZone, "TimeWithZone + 1.#{unit} must be TimeWithZone") | |
end | |
end | |
def test_argument_error | |
e = assert_raise ArgumentError do | |
1.second.ago("") | |
end | |
assert_equal 'expected a time or date, got ""', e.message, "ensure ArgumentError is not being raised by dependencies.rb" | |
end | |
def test_fractional_weeks | |
assert_equal((86400 * 7) * 1.5, 1.5.weeks) | |
assert_equal((86400 * 7) * 1.7, 1.7.weeks) | |
end | |
def test_fractional_days | |
assert_equal 86400 * 1.5, 1.5.days | |
assert_equal 86400 * 1.7, 1.7.days | |
end | |
def test_since_and_ago | |
t = Time.local(2000) | |
assert_equal t + 1, 1.second.since(t) | |
assert_equal t + 1, (1.minute / 60).since(t) | |
assert_equal t - 1, 1.second.ago(t) | |
assert_equal t - 1, (1.minute / 60).ago(t) | |
end | |
def test_since_and_ago_without_argument | |
now = Time.now | |
assert 1.second.since >= now + 1 | |
now = Time.now | |
assert 1.second.ago >= now - 1 | |
end | |
def test_since_and_ago_with_fractional_days | |
t = Time.local(2000) | |
# since | |
assert_equal 36.hours.since(t), 1.5.days.since(t) | |
assert_in_delta((24 * 1.7).hours.since(t), 1.7.days.since(t), 1) | |
# ago | |
assert_equal 36.hours.ago(t), 1.5.days.ago(t) | |
assert_in_delta((24 * 1.7).hours.ago(t), 1.7.days.ago(t), 1) | |
end | |
def test_since_and_ago_with_fractional_weeks | |
t = Time.local(2000) | |
# since | |
assert_equal((7 * 36).hours.since(t), 1.5.weeks.since(t)) | |
assert_in_delta((7 * 24 * 1.7).hours.since(t), 1.7.weeks.since(t), 1) | |
# ago | |
assert_equal((7 * 36).hours.ago(t), 1.5.weeks.ago(t)) | |
assert_in_delta((7 * 24 * 1.7).hours.ago(t), 1.7.weeks.ago(t), 1) | |
end | |
def test_since_and_ago_anchored_to_time_now_when_time_zone_is_not_set | |
Time.zone = nil | |
with_env_tz "US/Eastern" do | |
Time.stub(:now, Time.local(2000)) do | |
# since | |
assert_not_instance_of ActiveSupport::TimeWithZone, 5.seconds.since | |
assert_equal Time.local(2000, 1, 1, 0, 0, 5), 5.seconds.since | |
# ago | |
assert_not_instance_of ActiveSupport::TimeWithZone, 5.seconds.ago | |
assert_equal Time.local(1999, 12, 31, 23, 59, 55), 5.seconds.ago | |
end | |
end | |
end | |
def test_since_and_ago_anchored_to_time_zone_now_when_time_zone_is_set | |
Time.zone = ActiveSupport::TimeZone["Eastern Time (US & Canada)"] | |
with_env_tz "US/Eastern" do | |
Time.stub(:now, Time.local(2000)) do | |
# since | |
assert_instance_of ActiveSupport::TimeWithZone, 5.seconds.since | |
assert_equal Time.utc(2000, 1, 1, 0, 0, 5), 5.seconds.since.time | |
assert_equal "Eastern Time (US & Canada)", 5.seconds.since.time_zone.name | |
# ago | |
assert_instance_of ActiveSupport::TimeWithZone, 5.seconds.ago | |
assert_equal Time.utc(1999, 12, 31, 23, 59, 55), 5.seconds.ago.time | |
assert_equal "Eastern Time (US & Canada)", 5.seconds.ago.time_zone.name | |
end | |
end | |
ensure | |
Time.zone = nil | |
end | |
def test_before_and_after | |
t = Time.local(2000) | |
assert_equal t + 1, 1.second.after(t) | |
assert_equal t - 1, 1.second.before(t) | |
end | |
def test_before_and_after_without_argument | |
Time.stub(:now, Time.local(2000)) do | |
assert_equal Time.now - 1.second, 1.second.before | |
assert_equal Time.now + 1.second, 1.second.after | |
end | |
end | |
def test_adding_hours_across_dst_boundary | |
with_env_tz "CET" do | |
assert_equal Time.local(2009, 3, 29, 0, 0, 0) + 24.hours, Time.local(2009, 3, 30, 1, 0, 0) | |
end | |
end | |
def test_adding_day_across_dst_boundary | |
with_env_tz "CET" do | |
assert_equal Time.local(2009, 3, 29, 0, 0, 0) + 1.day, Time.local(2009, 3, 30, 0, 0, 0) | |
end | |
end | |
def test_delegation_with_block_works | |
counter = 0 | |
assert_nothing_raised do | |
1.minute.times { counter += 1 } | |
end | |
assert_equal 60, counter | |
end | |
def test_as_json | |
assert_equal 172800, 2.days.as_json | |
end | |
def test_to_json | |
assert_equal "172800", 2.days.to_json | |
end | |
def test_case_when | |
cased = \ | |
case 1.day | |
when 1.day | |
"ok" | |
end | |
assert_equal "ok", cased | |
end | |
def test_respond_to | |
assert_respond_to 1.day, :since | |
assert_respond_to 1.day, :zero? | |
end | |
def test_hash | |
assert_equal 1.minute.hash, 60.seconds.hash | |
end | |
def test_comparable | |
assert_equal(-1, (0.seconds <=> 1.second)) | |
assert_equal(-1, (1.second <=> 1.minute)) | |
assert_equal(-1, (1 <=> 1.minute)) | |
assert_equal(0, (0.seconds <=> 0.seconds)) | |
assert_equal(0, (0.seconds <=> 0.minutes)) | |
assert_equal(0, (1.second <=> 1.second)) | |
assert_equal(1, (1.second <=> 0.second)) | |
assert_equal(1, (1.minute <=> 1.second)) | |
assert_equal(1, (61 <=> 1.minute)) | |
end | |
def test_implicit_coercion | |
assert_equal 2.days, 2 * 1.day | |
assert_instance_of ActiveSupport::Duration, 2 * 1.day | |
assert_equal Time.utc(2017, 1, 3), Time.utc(2017, 1, 1) + 2 * 1.day | |
assert_equal Date.civil(2017, 1, 3), Date.civil(2017, 1, 1) + 2 * 1.day | |
end | |
def test_scalar_coerce | |
scalar = ActiveSupport::Duration::Scalar.new(10) | |
assert_instance_of ActiveSupport::Duration::Scalar, 10 + scalar | |
assert_instance_of ActiveSupport::Duration, 10.seconds + scalar | |
end | |
def test_scalar_delegations | |
scalar = ActiveSupport::Duration::Scalar.new(10) | |
assert_kind_of Float, scalar.to_f | |
assert_kind_of Integer, scalar.to_i | |
assert_kind_of String, scalar.to_s | |
end | |
def test_scalar_unary_minus | |
scalar = ActiveSupport::Duration::Scalar.new(10) | |
assert_equal(-10, -scalar) | |
assert_instance_of ActiveSupport::Duration::Scalar, -scalar | |
end | |
def test_scalar_compare | |
scalar = ActiveSupport::Duration::Scalar.new(10) | |
assert_equal(1, scalar <=> 5) | |
assert_equal(0, scalar <=> 10) | |
assert_equal(-1, scalar <=> 15) | |
assert_nil(scalar <=> "foo") | |
end | |
def test_scalar_plus | |
scalar = ActiveSupport::Duration::Scalar.new(10) | |
assert_equal 20, 10 + scalar | |
assert_instance_of ActiveSupport::Duration::Scalar, 10 + scalar | |
assert_equal 20, scalar + 10 | |
assert_instance_of ActiveSupport::Duration::Scalar, scalar + 10 | |
assert_equal 20, 10.seconds + scalar | |
assert_instance_of ActiveSupport::Duration, 10.seconds + scalar | |
assert_equal 20, scalar + 10.seconds | |
assert_instance_of ActiveSupport::Duration, scalar + 10.seconds | |
exception = assert_raises(TypeError) do | |
scalar + "foo" | |
end | |
assert_equal "no implicit conversion of String into ActiveSupport::Duration::Scalar", exception.message | |
end | |
def test_scalar_plus_parts | |
scalar = ActiveSupport::Duration::Scalar.new(10) | |
assert_equal({ days: 1, seconds: 10 }, (scalar + 1.day).parts) | |
assert_equal({ days: -1, seconds: 10 }, (scalar + -1.day).parts) | |
end | |
def test_scalar_minus | |
scalar = ActiveSupport::Duration::Scalar.new(10) | |
assert_equal 10, 20 - scalar | |
assert_instance_of ActiveSupport::Duration::Scalar, 20 - scalar | |
assert_equal 5, scalar - 5 | |
assert_instance_of ActiveSupport::Duration::Scalar, scalar - 5 | |
assert_equal 10, 20.seconds - scalar | |
assert_instance_of ActiveSupport::Duration, 20.seconds - scalar | |
assert_equal 5, scalar - 5.seconds | |
assert_instance_of ActiveSupport::Duration, scalar - 5.seconds | |
assert_equal({ days: -1, seconds: 10 }, (scalar - 1.day).parts) | |
assert_equal({ days: 1, seconds: 10 }, (scalar - -1.day).parts) | |
exception = assert_raises(TypeError) do | |
scalar - "foo" | |
end | |
assert_equal "no implicit conversion of String into ActiveSupport::Duration::Scalar", exception.message | |
end | |
def test_scalar_minus_parts | |
scalar = ActiveSupport::Duration::Scalar.new(10) | |
assert_equal({ days: -1, seconds: 10 }, (scalar - 1.day).parts) | |
assert_equal({ days: 1, seconds: 10 }, (scalar - -1.day).parts) | |
end | |
def test_scalar_multiply | |
scalar = ActiveSupport::Duration::Scalar.new(5) | |
assert_equal 10, 2 * scalar | |
assert_instance_of ActiveSupport::Duration::Scalar, 2 * scalar | |
assert_equal 10, scalar * 2 | |
assert_instance_of ActiveSupport::Duration::Scalar, scalar * 2 | |
assert_equal 10, 2.seconds * scalar | |
assert_instance_of ActiveSupport::Duration, 2.seconds * scalar | |
assert_equal 10, scalar * 2.seconds | |
assert_instance_of ActiveSupport::Duration, scalar * 2.seconds | |
exception = assert_raises(TypeError) do | |
scalar * "foo" | |
end | |
assert_equal "no implicit conversion of String into ActiveSupport::Duration::Scalar", exception.message | |
end | |
def test_scalar_multiply_parts | |
scalar = ActiveSupport::Duration::Scalar.new(1) | |
assert_equal({ days: 2 }, (scalar * 2.days).parts) | |
assert_equal(172800, (scalar * 2.days).value) | |
assert_equal({ days: -2 }, (scalar * -2.days).parts) | |
assert_equal(-172800, (scalar * -2.days).value) | |
end | |
def test_scalar_divide | |
scalar = ActiveSupport::Duration::Scalar.new(10) | |
assert_equal 10, 100 / scalar | |
assert_instance_of ActiveSupport::Duration::Scalar, 100 / scalar | |
assert_equal 5, scalar / 2 | |
assert_instance_of ActiveSupport::Duration::Scalar, scalar / 2 | |
assert_equal 10, 100.seconds / scalar | |
assert_instance_of ActiveSupport::Duration, 100.seconds / scalar | |
assert_equal 5, scalar / 2.seconds | |
assert_kind_of Integer, scalar / 2.seconds | |
exception = assert_raises(TypeError) do | |
scalar / "foo" | |
end | |
assert_equal "no implicit conversion of String into ActiveSupport::Duration::Scalar", exception.message | |
end | |
def test_scalar_modulo | |
scalar = ActiveSupport::Duration::Scalar.new(10) | |
assert_equal 1, 31 % scalar | |
assert_instance_of ActiveSupport::Duration::Scalar, 31 % scalar | |
assert_equal 1, scalar % 3 | |
assert_instance_of ActiveSupport::Duration::Scalar, scalar % 3 | |
assert_equal 1, 31.seconds % scalar | |
assert_instance_of ActiveSupport::Duration, 31.seconds % scalar | |
assert_equal 1, scalar % 3.seconds | |
assert_instance_of ActiveSupport::Duration, scalar % 3.seconds | |
exception = assert_raises(TypeError) do | |
scalar % "foo" | |
end | |
assert_equal "no implicit conversion of String into ActiveSupport::Duration::Scalar", exception.message | |
end | |
def test_scalar_modulo_parts | |
scalar = ActiveSupport::Duration::Scalar.new(82800) | |
assert_equal({ hours: 1 }, (scalar % 2.hours).parts) | |
assert_equal(3600, (scalar % 2.hours).value) | |
end | |
def test_twelve_months_equals_one_year | |
assert_equal 12.months, 1.year | |
end | |
def test_thirty_days_does_not_equal_one_month | |
assert_not_equal 30.days, 1.month | |
end | |
def test_adding_one_month_maintains_day_of_month | |
(1..11).each do |month| | |
[1, 14, 28].each do |day| | |
assert_equal Date.civil(2016, month + 1, day), Date.civil(2016, month, day) + 1.month | |
end | |
end | |
assert_equal Date.civil(2017, 1, 1), Date.civil(2016, 12, 1) + 1.month | |
assert_equal Date.civil(2017, 1, 14), Date.civil(2016, 12, 14) + 1.month | |
assert_equal Date.civil(2017, 1, 28), Date.civil(2016, 12, 28) + 1.month | |
assert_equal Date.civil(2015, 2, 28), Date.civil(2015, 1, 31) + 1.month | |
assert_equal Date.civil(2016, 2, 29), Date.civil(2016, 1, 31) + 1.month | |
end | |
# ISO8601 string examples are taken from ISO8601 gem at https://github.com/arnau/ISO8601/blob/b93d466840/spec/iso8601/duration_spec.rb | |
# published under the conditions of MIT license at https://github.com/arnau/ISO8601/blob/b93d466840/LICENSE | |
# | |
# Copyright (c) 2012-2014 Arnau Siches | |
# | |
# MIT License | |
# | |
# Permission is hereby granted, free of charge, to any person obtaining | |
# a copy of this software and associated documentation files (the | |
# "Software"), to deal in the Software without restriction, including | |
# without limitation the rights to use, copy, modify, merge, publish, | |
# distribute, sublicense, and/or sell copies of the Software, and to | |
# permit persons to whom the Software is furnished to do so, subject to | |
# the following conditions: | |
# | |
# The above copyright notice and this permission notice shall be | |
# included in all copies or substantial portions of the Software. | |
# | |
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, | |
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF | |
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND | |
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE | |
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION | |
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION | |
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. | |
def test_iso8601_parsing_wrong_patterns_with_raise | |
invalid_patterns = ["", "P", "PT", "P1YT", "T", "PW", "P1Y1W", "~P1Y", ".P1Y", "P1.5Y0.5M", "P1.5Y1M", "P1.5MT10.5S"] | |
invalid_patterns.each do |pattern| | |
assert_raise ActiveSupport::Duration::ISO8601Parser::ParsingError, pattern.inspect do | |
ActiveSupport::Duration.parse(pattern) | |
end | |
end | |
end | |
def test_iso8601_output | |
expectations = [ | |
["P1Y", 1.year ], | |
["P1W", 1.week ], | |
["P4W", 4.week ], | |
["P1Y7D", 1.year + 1.week ], | |
["P1Y1M21D", 1.year + 1.month + 3.week ], | |
["P1Y1M", 1.year + 1.month ], | |
["P1Y1M1D", 1.year + 1.month + 1.day ], | |
["P-1Y-1D", -1.year - 1.day ], | |
["P1Y-1DT-1S", 1.year - 1.day - 1.second ], # Parts with different signs are exists in PostgreSQL interval datatype. | |
["PT1S", 1.second ], | |
["PT1.4S", (1.4).seconds ], | |
["P1Y1M1DT1H", 1.year + 1.month + 1.day + 1.hour], | |
["PT0S", 0.minutes ], | |
["PT-0.2S", (-0.2).seconds ], | |
["PT1000000S", 1_000_000.seconds ], | |
] | |
expectations.each do |expected_output, duration| | |
assert_equal expected_output, duration.iso8601, expected_output.inspect | |
end | |
end | |
def test_iso8601_output_precision | |
expectations = [ | |
[nil, "P1Y1MT8.55S", 1.year + 1.month + (8.55).seconds ], | |
[0, "P1Y1MT9S", 1.year + 1.month + (8.55).seconds ], | |
[1, "P1Y1MT8.6S", 1.year + 1.month + (8.55).seconds ], | |
[2, "P1Y1MT8.55S", 1.year + 1.month + (8.55).seconds ], | |
[3, "P1Y1MT8.550S", 1.year + 1.month + (8.55).seconds ], | |
[nil, "PT1S", 1.second ], | |
[2, "PT1.00S", 1.second ], | |
[nil, "PT1.4S", (1.4).seconds ], | |
[0, "PT1S", (1.4).seconds ], | |
[1, "PT1.4S", (1.4).seconds ], | |
[5, "PT1.40000S", (1.4).seconds ], | |
] | |
expectations.each do |precision, expected_output, duration| | |
assert_equal expected_output, duration.iso8601(precision: precision), expected_output.inspect | |
end | |
end | |
def test_iso8601_output_and_reparsing | |
patterns = %w[ | |
P1Y P0.5Y P0,5Y P1Y1M P1Y0.5M P1Y0,5M P1Y1M1D P1Y1M0.5D P1Y1M0,5D P1Y1M1DT1H P1Y1M1DT0.5H P1Y1M1DT0,5H P1W +P1Y -P1Y P-1Y | |
P1Y1M1DT1H1M P1Y1M1DT1H0.5M P1Y1M1DT1H0,5M P1Y1M1DT1H1M1S P1Y1M1DT1H1M1.0S P1Y1M1DT1H1M1,0S P-1Y-2M3DT-4H-5M-6S | |
] | |
# That could be weird, but if we parse P1Y1M0.5D and output it to ISO 8601, we'll get P1Y1MT12.0H. | |
# So we check that initially parsed and reparsed duration added to time will result in the same time. | |
time = Time.current | |
patterns.each do |pattern| | |
duration = ActiveSupport::Duration.parse(pattern) | |
assert_equal time + duration, time + ActiveSupport::Duration.parse(duration.iso8601), pattern.inspect | |
end | |
end | |
def test_iso8601_parsing_across_spring_dst_boundary | |
with_env_tz eastern_time_zone do | |
with_tz_default "Eastern Time (US & Canada)" do | |
travel_to Time.utc(2016, 3, 11) do | |
assert_equal 604800, ActiveSupport::Duration.parse("P7D").to_i | |
assert_equal 604800, ActiveSupport::Duration.parse("P1W").to_i | |
end | |
end | |
end | |
end | |
def test_iso8601_parsing_across_autumn_dst_boundary | |
with_env_tz eastern_time_zone do | |
with_tz_default "Eastern Time (US & Canada)" do | |
travel_to Time.utc(2016, 11, 4) do | |
assert_equal 604800, ActiveSupport::Duration.parse("P7D").to_i | |
assert_equal 604800, ActiveSupport::Duration.parse("P1W").to_i | |
end | |
end | |
end | |
end | |
def test_iso8601_parsing_equivalence_with_numeric_extensions_over_long_periods | |
with_env_tz eastern_time_zone do | |
with_tz_default "Eastern Time (US & Canada)" do | |
assert_equal 3.months, ActiveSupport::Duration.parse("P3M") | |
assert_equal 3.months.to_i, ActiveSupport::Duration.parse("P3M").to_i | |
assert_equal 10.months, ActiveSupport::Duration.parse("P10M") | |
assert_equal 10.months.to_i, ActiveSupport::Duration.parse("P10M").to_i | |
assert_equal 3.years, ActiveSupport::Duration.parse("P3Y") | |
assert_equal 3.years.to_i, ActiveSupport::Duration.parse("P3Y").to_i | |
assert_equal 10.years, ActiveSupport::Duration.parse("P10Y") | |
assert_equal 10.years.to_i, ActiveSupport::Duration.parse("P10Y").to_i | |
end | |
end | |
end | |
def test_adding_durations_do_not_hold_prior_states | |
time = Time.parse("Nov 29, 2016") | |
# If the implementation adds and subtracts 3 months, the | |
# resulting date would have been in February so the day will | |
# change to the 29th. | |
d1 = 3.months - 3.months | |
d2 = 2.months - 2.months | |
assert_equal time + d1, time + d2 | |
end | |
def test_durations_survive_yaml_serialization | |
payload = YAML.dump(10.minutes) | |
d1 = YAML.respond_to?(:unsafe_load) ? YAML.unsafe_load(payload) : YAML.load(payload) | |
assert_equal 600, d1.to_i | |
assert_equal 660, (d1 + 60).to_i | |
end | |
def test_string_build_raises_error | |
error = assert_raises(TypeError) do | |
ActiveSupport::Duration.build("9") | |
end | |
assert_equal "can't build an ActiveSupport::Duration from a String", error.message | |
end | |
def test_non_numeric_build_raises_error | |
error = assert_raises(TypeError) do | |
ActiveSupport::Duration.build(nil) | |
end | |
assert_equal "can't build an ActiveSupport::Duration from a NilClass", error.message | |
end | |
def test_variable | |
assert_not 12.seconds.variable? | |
assert_not 12.minutes.variable? | |
assert_not 12.hours.variable? | |
assert 12.days.variable? | |
assert 12.weeks.variable? | |
assert 12.months.variable? | |
assert 12.years.variable? | |
assert_not (12.hours + 12.minutes).variable? | |
assert (12.hours + 1.day).variable? | |
assert (1.day + 12.hours).variable? | |
end | |
def test_duration_symmetry | |
time = Time.parse("Dec 7, 2021") | |
expected_time = Time.parse("2021-12-06 23:59:59") | |
assert_equal expected_time, time + -1.second | |
assert_equal expected_time, time + ActiveSupport::Duration.build(1) * -1 | |
assert_equal expected_time, time + -ActiveSupport::Duration.build(1) | |
assert_equal expected_time, time + ActiveSupport::Duration::Scalar.new(-1) | |
assert_equal expected_time, time + ActiveSupport::Duration.build(-1) | |
end | |
private | |
def eastern_time_zone | |
if Gem.win_platform? | |
"EST5EDT" | |
else | |
"America/New_York" | |
end | |
end | |
end |
# frozen_string_literal: true | |
require "active_support/time_with_zone" | |
module ActiveSupport | |
module EachTimeWithZone # :nodoc: | |
def each(&block) | |
ensure_iteration_allowed | |
super | |
end | |
def step(n = 1, &block) | |
ensure_iteration_allowed | |
super | |
end | |
private | |
def ensure_iteration_allowed | |
raise TypeError, "can't iterate from #{first.class}" if first.is_a?(TimeWithZone) | |
end | |
end | |
end | |
Range.prepend(ActiveSupport::EachTimeWithZone) |
# frozen_string_literal: true | |
{ | |
en: { | |
number: { | |
nth: { | |
ordinals: lambda do |_key, options| | |
number = options[:number] | |
case number | |
when 1; "st" | |
when 2; "nd" | |
when 3; "rd" | |
when 4, 5, 6, 7, 8, 9, 10, 11, 12, 13; "th" | |
else | |
num_modulo = number.to_i.abs % 100 | |
num_modulo %= 10 if num_modulo > 13 | |
case num_modulo | |
when 1; "st" | |
when 2; "nd" | |
when 3; "rd" | |
else "th" | |
end | |
end | |
end, | |
ordinalized: lambda do |_key, options| | |
number = options[:number] | |
"#{number}#{ActiveSupport::Inflector.ordinal(number)}" | |
end | |
} | |
} | |
} | |
} |
# frozen_string_literal: true | |
# https://rails.lighthouseapp.com/projects/8994/tickets/6225-memcachestore-cant-deal-with-umlauts-and-special-characters | |
# The error is caused by character encodings that can't be compared with ASCII-8BIT regular expressions and by special | |
# characters like the umlaut in UTF-8. | |
module EncodedKeyCacheBehavior | |
Encoding.list.each do |encoding| | |
define_method "test_#{encoding.name.underscore}_encoded_values" do | |
key = (+"foo").force_encoding(encoding) | |
assert @cache.write(key, "1", raw: true) | |
assert_equal "1", @cache.read(key, raw: true) | |
assert_equal "1", @cache.fetch(key, raw: true) | |
assert @cache.delete(key) | |
assert_equal "2", @cache.fetch(key, raw: true) { "2" } | |
assert_equal 3, @cache.increment(key) | |
assert_equal 2, @cache.decrement(key) | |
end | |
end | |
def test_common_utf8_values | |
key = (+"\xC3\xBCmlaut").force_encoding(Encoding::UTF_8) | |
assert @cache.write(key, "1", raw: true) | |
assert_equal "1", @cache.read(key, raw: true) | |
assert_equal "1", @cache.fetch(key, raw: true) | |
assert @cache.delete(key) | |
assert_equal "2", @cache.fetch(key, raw: true) { "2" } | |
assert_equal 3, @cache.increment(key) | |
assert_equal 2, @cache.decrement(key) | |
end | |
def test_retains_encoding | |
key = (+"\xC3\xBCmlaut").force_encoding(Encoding::UTF_8) | |
assert @cache.write(key, "1", raw: true) | |
assert_equal Encoding::UTF_8, key.encoding | |
end | |
end |
# frozen_string_literal: true | |
require "active_support/core_ext/object/json" | |
require "active_support/core_ext/module/delegation" | |
module ActiveSupport | |
class << self | |
delegate :use_standard_json_time_format, :use_standard_json_time_format=, | |
:time_precision, :time_precision=, | |
:escape_html_entities_in_json, :escape_html_entities_in_json=, | |
:json_encoder, :json_encoder=, | |
to: :'ActiveSupport::JSON::Encoding' | |
end | |
module JSON | |
# Dumps objects in JSON (JavaScript Object Notation). | |
# See http://www.json.org for more info. | |
# | |
# ActiveSupport::JSON.encode({ team: 'rails', players: '36' }) | |
# # => "{\"team\":\"rails\",\"players\":\"36\"}" | |
class << self | |
def encode(value, options = nil) | |
Encoding.json_encoder.new(options).encode(value) | |
end | |
alias_method :dump, :encode | |
end | |
module Encoding # :nodoc: | |
class JSONGemEncoder # :nodoc: | |
attr_reader :options | |
def initialize(options = nil) | |
@options = options || {} | |
end | |
# Encode the given object into a JSON string | |
def encode(value) | |
stringify jsonify value.as_json(options.dup) | |
end | |
private | |
# Rails does more escaping than the JSON gem natively does (we | |
# escape \u2028 and \u2029 and optionally >, <, & to work around | |
# certain browser problems). | |
ESCAPED_CHARS = { | |
"\u2028" => '\u2028', | |
"\u2029" => '\u2029', | |
">" => '\u003e', | |
"<" => '\u003c', | |
"&" => '\u0026', | |
} | |
ESCAPE_REGEX_WITH_HTML_ENTITIES = /[\u2028\u2029><&]/u | |
ESCAPE_REGEX_WITHOUT_HTML_ENTITIES = /[\u2028\u2029]/u | |
# This class wraps all the strings we see and does the extra escaping | |
class EscapedString < String # :nodoc: | |
def to_json(*) | |
if Encoding.escape_html_entities_in_json | |
s = super | |
s.gsub! ESCAPE_REGEX_WITH_HTML_ENTITIES, ESCAPED_CHARS | |
s | |
else | |
s = super | |
s.gsub! ESCAPE_REGEX_WITHOUT_HTML_ENTITIES, ESCAPED_CHARS | |
s | |
end | |
end | |
def to_s | |
self | |
end | |
end | |
# Mark these as private so we don't leak encoding-specific constructs | |
private_constant :ESCAPED_CHARS, :ESCAPE_REGEX_WITH_HTML_ENTITIES, | |
:ESCAPE_REGEX_WITHOUT_HTML_ENTITIES, :EscapedString | |
# Convert an object into a "JSON-ready" representation composed of | |
# primitives like Hash, Array, String, Numeric, | |
# and +true+/+false+/+nil+. | |
# Recursively calls #as_json to the object to recursively build a | |
# fully JSON-ready object. | |
# | |
# This allows developers to implement #as_json without having to | |
# worry about what base types of objects they are allowed to return | |
# or having to remember to call #as_json recursively. | |
# | |
# Note: the +options+ hash passed to +object.to_json+ is only passed | |
# to +object.as_json+, not any of this method's recursive +#as_json+ | |
# calls. | |
def jsonify(value) | |
case value | |
when String | |
EscapedString.new(value) | |
when Numeric, NilClass, TrueClass, FalseClass | |
value.as_json | |
when Hash | |
result = {} | |
value.each do |k, v| | |
result[jsonify(k)] = jsonify(v) | |
end | |
result | |
when Array | |
value.map { |v| jsonify(v) } | |
else | |
jsonify value.as_json | |
end | |
end | |
# Encode a "jsonified" Ruby data structure using the JSON gem | |
def stringify(jsonified) | |
::JSON.generate(jsonified, quirks_mode: true, max_nesting: false) | |
end | |
end | |
class << self | |
# If true, use ISO 8601 format for dates and times. Otherwise, fall back | |
# to the Active Support legacy format. | |
attr_accessor :use_standard_json_time_format | |
# If true, encode >, <, & as escaped unicode sequences (e.g. > as \u003e) | |
# as a safety measure. | |
attr_accessor :escape_html_entities_in_json | |
# Sets the precision of encoded time values. | |
# Defaults to 3 (equivalent to millisecond precision) | |
attr_accessor :time_precision | |
# Sets the encoder used by Rails to encode Ruby objects into JSON strings | |
# in +Object#to_json+ and +ActiveSupport::JSON.encode+. | |
attr_accessor :json_encoder | |
end | |
self.use_standard_json_time_format = true | |
self.escape_html_entities_in_json = true | |
self.json_encoder = JSONGemEncoder | |
self.time_precision = 3 | |
end | |
end | |
end |
# frozen_string_literal: true | |
require "securerandom" | |
require_relative "../abstract_unit" | |
require "active_support/core_ext/string/inflections" | |
require "active_support/json" | |
require "active_support/time" | |
require_relative "../time_zone_test_helpers" | |
require_relative "../json/encoding_test_cases" | |
class TestJSONEncoding < ActiveSupport::TestCase | |
include TimeZoneTestHelpers | |
def sorted_json(json) | |
if json.start_with?("{") && json.end_with?("}") | |
"{" + json[1..-2].split(",").sort.join(",") + "}" | |
else | |
json | |
end | |
end | |
JSONTest::EncodingTestCases.constants.each do |class_tests| | |
define_method("test_#{class_tests[0..-6].underscore}") do | |
prev = ActiveSupport.use_standard_json_time_format | |
standard_class_tests = /Standard/.match?(class_tests) | |
ActiveSupport.escape_html_entities_in_json = !standard_class_tests | |
ActiveSupport.use_standard_json_time_format = standard_class_tests | |
JSONTest::EncodingTestCases.const_get(class_tests).each do |pair| | |
assert_equal pair.last, sorted_json(ActiveSupport::JSON.encode(pair.first)) | |
end | |
ensure | |
ActiveSupport.escape_html_entities_in_json = false | |
ActiveSupport.use_standard_json_time_format = prev | |
end | |
end | |
def test_process_status | |
rubinius_skip "https://github.com/rubinius/rubinius/issues/3334" | |
# There doesn't seem to be a good way to get a handle on a Process::Status object without actually | |
# creating a child process, hence this to populate $? | |
system("not_a_real_program_#{SecureRandom.hex}") | |
assert_equal %({"exitstatus":#{$?.exitstatus},"pid":#{$?.pid}}), ActiveSupport::JSON.encode($?) | |
end | |
def test_hash_encoding | |
assert_equal %({\"a\":\"b\"}), ActiveSupport::JSON.encode(a: :b) | |
assert_equal %({\"a\":1}), ActiveSupport::JSON.encode("a" => 1) | |
assert_equal %({\"a\":[1,2]}), ActiveSupport::JSON.encode("a" => [1, 2]) | |
assert_equal %({"1":2}), ActiveSupport::JSON.encode(1 => 2) | |
assert_equal %({\"a\":\"b\",\"c\":\"d\"}), sorted_json(ActiveSupport::JSON.encode(a: :b, c: :d)) | |
end | |
def test_hash_keys_encoding | |
ActiveSupport.escape_html_entities_in_json = true | |
assert_equal "{\"\\u003c\\u003e\":\"\\u003c\\u003e\"}", ActiveSupport::JSON.encode("<>" => "<>") | |
ensure | |
ActiveSupport.escape_html_entities_in_json = false | |
end | |
def test_utf8_string_encoded_properly | |
result = ActiveSupport::JSON.encode("€2.99") | |
assert_equal '"€2.99"', result | |
assert_equal(Encoding::UTF_8, result.encoding) | |
result = ActiveSupport::JSON.encode("✎☺") | |
assert_equal '"✎☺"', result | |
assert_equal(Encoding::UTF_8, result.encoding) | |
end | |
def test_non_utf8_string_transcodes | |
s = "二".encode("Shift_JIS") | |
result = ActiveSupport::JSON.encode(s) | |
assert_equal '"二"', result | |
assert_equal Encoding::UTF_8, result.encoding | |
end | |
def test_wide_utf8_chars | |
w = "𠜎" | |
result = ActiveSupport::JSON.encode(w) | |
assert_equal '"𠜎"', result | |
end | |
def test_wide_utf8_roundtrip | |
hash = { string: "𐒑" } | |
json = ActiveSupport::JSON.encode(hash) | |
decoded_hash = ActiveSupport::JSON.decode(json) | |
assert_equal "𐒑", decoded_hash["string"] | |
end | |
def test_hash_key_identifiers_are_always_quoted | |
values = { 0 => 0, 1 => 1, :_ => :_, "$" => "$", "a" => "a", :A => :A, :A0 => :A0, "A0B" => "A0B" } | |
assert_equal %w( "$" "A" "A0" "A0B" "_" "a" "0" "1" ).sort, object_keys(ActiveSupport::JSON.encode(values)) | |
end | |
def test_hash_should_allow_key_filtering_with_only | |
assert_equal %({"a":1}), ActiveSupport::JSON.encode({ "a" => 1, :b => 2, :c => 3 }, { only: "a" }) | |
end | |
def test_hash_should_allow_key_filtering_with_except | |
assert_equal %({"b":2}), ActiveSupport::JSON.encode({ "foo" => "bar", :b => 2, :c => 3 }, { except: ["foo", :c] }) | |
end | |
def test_time_to_json_includes_local_offset | |
with_standard_json_time_format(true) do | |
with_env_tz "US/Eastern" do | |
assert_equal %("2005-02-01T15:15:10.000-05:00"), ActiveSupport::JSON.encode(Time.local(2005, 2, 1, 15, 15, 10)) | |
end | |
end | |
end | |
def test_hash_with_time_to_json | |
with_standard_json_time_format(false) do | |
assert_equal '{"time":"2009/01/01 00:00:00 +0000"}', { time: Time.utc(2009) }.to_json | |
end | |
end | |
def test_nested_hash_with_float | |
assert_nothing_raised do | |
hash = { | |
"CHI" => { | |
display_name: "chicago", | |
latitude: 123.234 | |
} | |
} | |
ActiveSupport::JSON.encode(hash) | |
end | |
end | |
def test_hash_like_with_options | |
h = JSONTest::Hashlike.new | |
json = h.to_json only: [:foo] | |
assert_equal({ "foo" => "hello" }, JSON.parse(json)) | |
end | |
def test_object_to_json_with_options | |
obj = Object.new | |
obj.instance_variable_set :@foo, "hello" | |
obj.instance_variable_set :@bar, "world" | |
json = obj.to_json only: ["foo"] | |
assert_equal({ "foo" => "hello" }, JSON.parse(json)) | |
end | |
def test_struct_to_json_with_options | |
struct = Struct.new(:foo, :bar).new | |
struct.foo = "hello" | |
struct.bar = "world" | |
json = struct.to_json only: [:foo] | |
assert_equal({ "foo" => "hello" }, JSON.parse(json)) | |
end | |
def test_struct_to_json_with_options_nested | |
klass = Struct.new(:foo, :bar) | |
struct = klass.new "hello", "world" | |
parent_struct = klass.new struct, "world" | |
json = parent_struct.to_json only: [:foo] | |
assert_equal({ "foo" => { "foo" => "hello" } }, JSON.parse(json)) | |
end | |
def test_hash_should_pass_encoding_options_to_children_in_as_json | |
person = { | |
name: "John", | |
address: { | |
city: "London", | |
country: "UK" | |
} | |
} | |
json = person.as_json only: [:address, :city] | |
assert_equal({ "address" => { "city" => "London" } }, json) | |
end | |
def test_hash_should_pass_encoding_options_to_children_in_to_json | |
person = { | |
name: "John", | |
address: { | |
city: "London", | |
country: "UK" | |
} | |
} | |
json = person.to_json only: [:address, :city] | |
assert_equal(%({"address":{"city":"London"}}), json) | |
end | |
def test_array_should_pass_encoding_options_to_children_in_as_json | |
people = [ | |
{ name: "John", address: { city: "London", country: "UK" } }, | |
{ name: "Jean", address: { city: "Paris", country: "France" } } | |
] | |
json = people.as_json only: [:address, :city] | |
expected = [ | |
{ "address" => { "city" => "London" } }, | |
{ "address" => { "city" => "Paris" } } | |
] | |
assert_equal(expected, json) | |
end | |
def test_array_should_pass_encoding_options_to_children_in_to_json | |
people = [ | |
{ name: "John", address: { city: "London", country: "UK" } }, | |
{ name: "Jean", address: { city: "Paris", country: "France" } } | |
] | |
json = people.to_json only: [:address, :city] | |
assert_equal(%([{"address":{"city":"London"}},{"address":{"city":"Paris"}}]), json) | |
end | |
People = Class.new(BasicObject) do | |
include Enumerable | |
def initialize | |
@people = [ | |
{ name: "John", address: { city: "London", country: "UK" } }, | |
{ name: "Jean", address: { city: "Paris", country: "France" } } | |
] | |
end | |
def each(*, &blk) | |
@people.each do |p| | |
yield p if blk | |
p | |
end.each | |
end | |
end | |
def test_enumerable_should_generate_json_with_as_json | |
json = People.new.as_json only: [:address, :city] | |
expected = [ | |
{ "address" => { "city" => "London" } }, | |
{ "address" => { "city" => "Paris" } } | |
] | |
assert_equal(expected, json) | |
end | |
def test_enumerable_should_generate_json_with_to_json | |
json = People.new.to_json only: [:address, :city] | |
assert_equal(%([{"address":{"city":"London"}},{"address":{"city":"Paris"}}]), json) | |
end | |
def test_enumerable_should_pass_encoding_options_to_children_in_as_json | |
json = People.new.each.as_json only: [:address, :city] | |
expected = [ | |
{ "address" => { "city" => "London" } }, | |
{ "address" => { "city" => "Paris" } } | |
] | |
assert_equal(expected, json) | |
end | |
def test_enumerable_should_pass_encoding_options_to_children_in_to_json | |
json = People.new.each.to_json only: [:address, :city] | |
assert_equal(%([{"address":{"city":"London"}},{"address":{"city":"Paris"}}]), json) | |
end | |
class CustomWithOptions | |
attr_accessor :foo, :bar | |
def as_json(options = {}) | |
options[:only] = %w(foo bar) | |
super(options) | |
end | |
end | |
def test_hash_to_json_should_not_keep_options_around | |
f = CustomWithOptions.new | |
f.foo = "hello" | |
f.bar = "world" | |
hash = { "foo" => f, "other_hash" => { "foo" => "other_foo", "test" => "other_test" } } | |
assert_equal({ "foo" => { "foo" => "hello", "bar" => "world" }, | |
"other_hash" => { "foo" => "other_foo", "test" => "other_test" } }, ActiveSupport::JSON.decode(hash.to_json)) | |
end | |
def test_array_to_json_should_not_keep_options_around | |
f = CustomWithOptions.new | |
f.foo = "hello" | |
f.bar = "world" | |
array = [f, { "foo" => "other_foo", "test" => "other_test" }] | |
assert_equal([{ "foo" => "hello", "bar" => "world" }, | |
{ "foo" => "other_foo", "test" => "other_test" }], ActiveSupport::JSON.decode(array.to_json)) | |
end | |
class OptionsTest | |
def as_json(options = :default) | |
options | |
end | |
end | |
def test_hash_as_json_without_options | |
json = { foo: OptionsTest.new }.as_json | |
assert_equal({ "foo" => :default }, json) | |
end | |
def test_array_as_json_without_options | |
json = [ OptionsTest.new ].as_json | |
assert_equal([:default], json) | |
end | |
def test_struct_encoding | |
Struct.new("UserNameAndEmail", :name, :email) | |
Struct.new("UserNameAndDate", :name, :date) | |
Struct.new("Custom", :name, :sub) | |
user_email = Struct::UserNameAndEmail.new "David", "sample@example.com" | |
user_birthday = Struct::UserNameAndDate.new "David", Date.new(2010, 01, 01) | |
custom = Struct::Custom.new "David", user_birthday | |
json_strings = "" | |
json_string_and_date = "" | |
json_custom = "" | |
assert_nothing_raised do | |
json_strings = user_email.to_json | |
json_string_and_date = user_birthday.to_json | |
json_custom = custom.to_json | |
end | |
assert_equal({ "name" => "David", | |
"sub" => { | |
"name" => "David", | |
"date" => "2010-01-01" } }, ActiveSupport::JSON.decode(json_custom)) | |
assert_equal({ "name" => "David", "email" => "sample@example.com" }, | |
ActiveSupport::JSON.decode(json_strings)) | |
assert_equal({ "name" => "David", "date" => "2010-01-01" }, | |
ActiveSupport::JSON.decode(json_string_and_date)) | |
end | |
def test_nil_true_and_false_represented_as_themselves | |
assert_nil nil.as_json | |
assert_equal true, true.as_json | |
assert_equal false, false.as_json | |
end | |
class HashWithAsJson < Hash | |
attr_accessor :as_json_called | |
def initialize(*) | |
super | |
end | |
def as_json(options = {}) | |
@as_json_called = true | |
super | |
end | |
end | |
def test_json_gem_dump_by_passing_active_support_encoder | |
h = HashWithAsJson.new | |
h[:foo] = "hello" | |
h[:bar] = "world" | |
assert_equal %({"foo":"hello","bar":"world"}), JSON.dump(h) | |
assert_nil h.as_json_called | |
end | |
def test_json_gem_generate_by_passing_active_support_encoder | |
h = HashWithAsJson.new | |
h[:foo] = "hello" | |
h[:bar] = "world" | |
assert_equal %({"foo":"hello","bar":"world"}), JSON.generate(h) | |
assert_nil h.as_json_called | |
end | |
def test_json_gem_pretty_generate_by_passing_active_support_encoder | |
h = HashWithAsJson.new | |
h[:foo] = "hello" | |
h[:bar] = "world" | |
assert_equal <<EXPECTED.chomp, JSON.pretty_generate(h) | |
{ | |
"foo": "hello", | |
"bar": "world" | |
} | |
EXPECTED | |
assert_nil h.as_json_called | |
end | |
def test_twz_to_json_with_use_standard_json_time_format_config_set_to_false | |
with_standard_json_time_format(false) do | |
zone = ActiveSupport::TimeZone["Eastern Time (US & Canada)"] | |
time = ActiveSupport::TimeWithZone.new(Time.utc(2000), zone) | |
assert_equal "\"1999/12/31 19:00:00 -0500\"", ActiveSupport::JSON.encode(time) | |
end | |
end | |
def test_twz_to_json_with_use_standard_json_time_format_config_set_to_true | |
with_standard_json_time_format(true) do | |
zone = ActiveSupport::TimeZone["Eastern Time (US & Canada)"] | |
time = ActiveSupport::TimeWithZone.new(Time.utc(2000), zone) | |
assert_equal "\"1999-12-31T19:00:00.000-05:00\"", ActiveSupport::JSON.encode(time) | |
end | |
end | |
def test_twz_to_json_with_custom_time_precision | |
with_standard_json_time_format(true) do | |
with_time_precision(0) do | |
zone = ActiveSupport::TimeZone["Eastern Time (US & Canada)"] | |
time = ActiveSupport::TimeWithZone.new(Time.utc(2000), zone) | |
assert_equal "\"1999-12-31T19:00:00-05:00\"", ActiveSupport::JSON.encode(time) | |
end | |
end | |
end | |
def test_time_to_json_with_custom_time_precision | |
with_standard_json_time_format(true) do | |
with_time_precision(0) do | |
assert_equal "\"2000-01-01T00:00:00Z\"", ActiveSupport::JSON.encode(Time.utc(2000)) | |
end | |
end | |
end | |
def test_datetime_to_json_with_custom_time_precision | |
with_standard_json_time_format(true) do | |
with_time_precision(0) do | |
assert_equal "\"2000-01-01T00:00:00+00:00\"", ActiveSupport::JSON.encode(DateTime.new(2000)) | |
end | |
end | |
end | |
def test_twz_to_json_when_wrapping_a_date_time | |
zone = ActiveSupport::TimeZone["Eastern Time (US & Canada)"] | |
time = ActiveSupport::TimeWithZone.new(DateTime.new(2000), zone) | |
assert_equal '"1999-12-31T19:00:00.000-05:00"', ActiveSupport::JSON.encode(time) | |
end | |
def test_exception_to_json | |
exception = Exception.new("foo") | |
assert_equal '"foo"', ActiveSupport::JSON.encode(exception) | |
end | |
class InfiniteNumber | |
def as_json(options = nil) | |
{ "number" => Float::INFINITY } | |
end | |
end | |
def test_to_json_works_when_as_json_returns_infinite_number | |
assert_equal '{"number":null}', InfiniteNumber.new.to_json | |
end | |
class NaNNumber | |
def as_json(options = nil) | |
{ "number" => Float::NAN } | |
end | |
end | |
def test_to_json_works_when_as_json_returns_NaN_number | |
assert_equal '{"number":null}', NaNNumber.new.to_json | |
end | |
def test_to_json_works_on_io_objects | |
assert_equal STDOUT.to_s.to_json, STDOUT.to_json | |
end | |
private | |
def object_keys(json_object) | |
json_object[1..-2].scan(/([^{}:,\s]+):/).flatten.sort | |
end | |
def with_standard_json_time_format(boolean = true) | |
old, ActiveSupport.use_standard_json_time_format = ActiveSupport.use_standard_json_time_format, boolean | |
yield | |
ensure | |
ActiveSupport.use_standard_json_time_format = old | |
end | |
def with_time_precision(value) | |
old_value = ActiveSupport::JSON::Encoding.time_precision | |
ActiveSupport::JSON::Encoding.time_precision = value | |
yield | |
ensure | |
ActiveSupport::JSON::Encoding.time_precision = old_value | |
end | |
end |
# frozen_string_literal: true | |
require "bigdecimal" | |
require "date" | |
require "time" | |
require "pathname" | |
require "uri" | |
module JSONTest | |
class Foo | |
def initialize(a, b) | |
@a, @b = a, b | |
end | |
end | |
class Hashlike | |
def to_hash | |
{ foo: "hello", bar: "world" } | |
end | |
end | |
class Custom | |
def initialize(serialized) | |
@serialized = serialized | |
end | |
def as_json(options = nil) | |
@serialized | |
end | |
end | |
MyStruct = Struct.new(:name, :value) do | |
def initialize(*) | |
@unused = "unused instance variable" | |
super | |
end | |
end | |
module EncodingTestCases | |
TrueTests = [[ true, %(true) ]] | |
FalseTests = [[ false, %(false) ]] | |
NilTests = [[ nil, %(null) ]] | |
NumericTests = [[ 1, %(1) ], | |
[ 2.5, %(2.5) ], | |
[ 0.0 / 0.0, %(null) ], | |
[ 1.0 / 0.0, %(null) ], | |
[ -1.0 / 0.0, %(null) ], | |
[ BigDecimal("0.0") / BigDecimal("0.0"), %(null) ], | |
[ BigDecimal("2.5"), %("#{BigDecimal('2.5')}") ]] | |
StringTests = [[ "this is the <string>", %("this is the \\u003cstring\\u003e")], | |
[ 'a "string" with quotes & an ampersand', %("a \\"string\\" with quotes \\u0026 an ampersand") ], | |
[ "http://test.host/posts/1", %("http://test.host/posts/1")], | |
[ "Control characters: \x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\u2028\u2029", | |
%("Control characters: \\u0000\\u0001\\u0002\\u0003\\u0004\\u0005\\u0006\\u0007\\b\\t\\n\\u000b\\f\\r\\u000e\\u000f\\u0010\\u0011\\u0012\\u0013\\u0014\\u0015\\u0016\\u0017\\u0018\\u0019\\u001a\\u001b\\u001c\\u001d\\u001e\\u001f\\u2028\\u2029") ]] | |
ArrayTests = [[ ["a", "b", "c"], %([\"a\",\"b\",\"c\"]) ], | |
[ [1, "a", :b, nil, false], %([1,\"a\",\"b\",null,false]) ]] | |
HashTests = [[ { foo: "bar" }, %({\"foo\":\"bar\"}) ], | |
[ { 1 => 1, 2 => "a", 3 => :b, 4 => nil, 5 => false }, %({\"1\":1,\"2\":\"a\",\"3\":\"b\",\"4\":null,\"5\":false}) ]] | |
RangeTests = [[ 1..2, %("1..2")], | |
[ 1...2, %("1...2")], | |
[ 1.5..2.5, %("1.5..2.5")]] | |
SymbolTests = [[ :a, %("a") ], | |
[ :this, %("this") ], | |
[ :"a b", %("a b") ]] | |
ModuleTests = [[ Module, %("Module") ], | |
[ Class, %("Class") ], | |
[ ActiveSupport, %("ActiveSupport") ], | |
[ ActiveSupport::MessageEncryptor, %("ActiveSupport::MessageEncryptor") ]] | |
ObjectTests = [[ Foo.new(1, 2), %({\"a\":1,\"b\":2}) ]] | |
HashlikeTests = [[ Hashlike.new, %({\"bar\":\"world\",\"foo\":\"hello\"}) ]] | |
StructTests = [[ MyStruct.new(:foo, "bar"), %({\"name\":\"foo\",\"value\":\"bar\"}) ], | |
[ MyStruct.new(nil, nil), %({\"name\":null,\"value\":null}) ]] | |
CustomTests = [[ Custom.new("custom"), '"custom"' ], | |
[ Custom.new(nil), "null" ], | |
[ Custom.new(:a), '"a"' ], | |
[ Custom.new([ :foo, "bar" ]), '["foo","bar"]' ], | |
[ Custom.new(foo: "hello", bar: "world"), '{"bar":"world","foo":"hello"}' ], | |
[ Custom.new(Hashlike.new), '{"bar":"world","foo":"hello"}' ], | |
[ Custom.new(Custom.new(Custom.new(:a))), '"a"' ]] | |
RegexpTests = [[ /^a/, '"(?-mix:^a)"' ], [/^\w{1,2}[a-z]+/ix, '"(?ix-m:^\\\\w{1,2}[a-z]+)"']] | |
URITests = [[ URI.parse("http://example.com"), %("http://example.com") ]] | |
PathnameTests = [[ Pathname.new("lib/index.rb"), %("lib/index.rb") ]] | |
IPAddrTests = [[ IPAddr.new("127.0.0.1"), %("127.0.0.1") ]] | |
DateTests = [[ Date.new(2005, 2, 1), %("2005/02/01") ]] | |
TimeTests = [[ Time.utc(2005, 2, 1, 15, 15, 10), %("2005/02/01 15:15:10 +0000") ]] | |
DateTimeTests = [[ DateTime.civil(2005, 2, 1, 15, 15, 10), %("2005/02/01 15:15:10 +0000") ]] | |
StandardDateTests = [[ Date.new(2005, 2, 1), %("2005-02-01") ]] | |
StandardTimeTests = [[ Time.utc(2005, 2, 1, 15, 15, 10), %("2005-02-01T15:15:10.000Z") ]] | |
StandardDateTimeTests = [[ DateTime.civil(2005, 2, 1, 15, 15, 10), %("2005-02-01T15:15:10.000+00:00") ]] | |
StandardStringTests = [[ "this is the <string>", %("this is the <string>")]] | |
end | |
end |
# frozen_string_literal: true | |
require "yaml" | |
require "active_support/encrypted_file" | |
require "active_support/ordered_options" | |
require "active_support/core_ext/object/inclusion" | |
require "active_support/core_ext/module/delegation" | |
module ActiveSupport | |
class EncryptedConfiguration < EncryptedFile | |
delegate :[], :fetch, to: :config | |
delegate_missing_to :options | |
def initialize(config_path:, key_path:, env_key:, raise_if_missing_key:) | |
super content_path: config_path, key_path: key_path, | |
env_key: env_key, raise_if_missing_key: raise_if_missing_key | |
end | |
# Allow a config to be started without a file present | |
def read | |
super | |
rescue ActiveSupport::EncryptedFile::MissingContentError | |
"" | |
end | |
def write(contents) | |
deserialize(contents) | |
super | |
end | |
def config | |
@config ||= deserialize(read).deep_symbolize_keys | |
end | |
private | |
def deep_transform(hash) | |
return hash unless hash.is_a?(Hash) | |
h = ActiveSupport::InheritableOptions.new | |
hash.each do |k, v| | |
h[k] = deep_transform(v) | |
end | |
h | |
end | |
def options | |
@options ||= ActiveSupport::InheritableOptions.new(deep_transform(config)) | |
end | |
def deserialize(config) | |
doc = YAML.respond_to?(:unsafe_load) ? YAML.unsafe_load(config) : YAML.load(config) | |
doc.presence || {} | |
end | |
end | |
end |
# frozen_string_literal: true | |
require_relative "abstract_unit" | |
require "active_support/encrypted_configuration" | |
class EncryptedConfigurationTest < ActiveSupport::TestCase | |
setup do | |
@tmpdir = Dir.mktmpdir("config-") | |
@credentials_config_path = File.join(@tmpdir, "credentials.yml.enc") | |
@credentials_key_path = File.join(@tmpdir, "master.key") | |
File.write(@credentials_key_path, ActiveSupport::EncryptedConfiguration.generate_key) | |
@credentials = ActiveSupport::EncryptedConfiguration.new( | |
config_path: @credentials_config_path, key_path: @credentials_key_path, | |
env_key: "RAILS_MASTER_KEY", raise_if_missing_key: true | |
) | |
end | |
teardown do | |
FileUtils.rm_rf @tmpdir | |
end | |
test "reading configuration by env key" do | |
FileUtils.rm_rf @credentials_key_path | |
begin | |
ENV["RAILS_MASTER_KEY"] = ActiveSupport::EncryptedConfiguration.generate_key | |
@credentials.write({ something: { good: true, bad: false } }.to_yaml) | |
assert @credentials[:something][:good] | |
assert_not @credentials.dig(:something, :bad) | |
assert_nil @credentials.fetch(:nothing, nil) | |
ensure | |
ENV["RAILS_MASTER_KEY"] = nil | |
end | |
end | |
test "reading configuration by key file" do | |
@credentials.write({ something: { good: true, bad: false, nested: { foo: "bar" } } }.to_yaml) | |
assert @credentials.something[:good] | |
assert_not @credentials.something[:bad] | |
assert @credentials.something.good | |
assert_not @credentials.something.bad | |
assert_equal "bar", @credentials.dig(:something, :nested, :foo) | |
assert_equal "bar", @credentials.something.nested.foo | |
assert_equal [:good, :bad, :nested], @credentials.something.keys | |
assert_equal ({ good: true, bad: false, nested: { foo: "bar" } }), @credentials.something | |
end | |
test "reading comment-only configuration" do | |
@credentials.write("# comment") | |
assert_equal @credentials.config, {} | |
end | |
test "change configuration by key file" do | |
@credentials.write({ something: { good: true } }.to_yaml) | |
@credentials.change do |config_file| | |
config = YAML.load(config_file.read) | |
config_file.write config.merge(new: "things").to_yaml | |
end | |
assert @credentials.something[:good] | |
assert_equal "things", @credentials[:new] | |
end | |
test "raise error when writing an invalid format value" do | |
assert_raise(Psych::SyntaxError) do | |
@credentials.change do |config_file| | |
config_file.write "login: *login\n username: dummy" | |
end | |
end | |
end | |
test "raises key error when accessing config via bang method" do | |
assert_raise(KeyError) { @credentials.something! } | |
end | |
end |
# frozen_string_literal: true | |
require "pathname" | |
require "tmpdir" | |
require "active_support/message_encryptor" | |
module ActiveSupport | |
class EncryptedFile | |
class MissingContentError < RuntimeError | |
def initialize(content_path) | |
super "Missing encrypted content file in #{content_path}." | |
end | |
end | |
class MissingKeyError < RuntimeError | |
def initialize(key_path:, env_key:) | |
super \ | |
"Missing encryption key to decrypt file with. " + | |
"Ask your team for your master key and write it to #{key_path} or put it in the ENV['#{env_key}']." | |
end | |
end | |
class InvalidKeyLengthError < RuntimeError | |
def initialize | |
super "Encryption key must be exactly #{EncryptedFile.expected_key_length} characters." | |
end | |
end | |
CIPHER = "aes-128-gcm" | |
def self.generate_key | |
SecureRandom.hex(ActiveSupport::MessageEncryptor.key_len(CIPHER)) | |
end | |
def self.expected_key_length # :nodoc: | |
@expected_key_length ||= generate_key.length | |
end | |
attr_reader :content_path, :key_path, :env_key, :raise_if_missing_key | |
def initialize(content_path:, key_path:, env_key:, raise_if_missing_key:) | |
@content_path = Pathname.new(content_path).yield_self { |path| path.symlink? ? path.realpath : path } | |
@key_path = Pathname.new(key_path) | |
@env_key, @raise_if_missing_key = env_key, raise_if_missing_key | |
end | |
def key | |
read_env_key || read_key_file || handle_missing_key | |
end | |
def read | |
if !key.nil? && content_path.exist? | |
decrypt content_path.binread | |
else | |
raise MissingContentError, content_path | |
end | |
end | |
def write(contents) | |
IO.binwrite "#{content_path}.tmp", encrypt(contents) | |
FileUtils.mv "#{content_path}.tmp", content_path | |
end | |
def change(&block) | |
writing read, &block | |
end | |
private | |
def writing(contents) | |
tmp_file = "#{Process.pid}.#{content_path.basename.to_s.chomp('.enc')}" | |
tmp_path = Pathname.new File.join(Dir.tmpdir, tmp_file) | |
tmp_path.binwrite contents | |
yield tmp_path | |
updated_contents = tmp_path.binread | |
write(updated_contents) if updated_contents != contents | |
ensure | |
FileUtils.rm(tmp_path) if tmp_path&.exist? | |
end | |
def encrypt(contents) | |
check_key_length | |
encryptor.encrypt_and_sign contents | |
end | |
def decrypt(contents) | |
encryptor.decrypt_and_verify contents | |
end | |
def encryptor | |
@encryptor ||= ActiveSupport::MessageEncryptor.new([ key ].pack("H*"), cipher: CIPHER) | |
end | |
def read_env_key | |
ENV[env_key].presence | |
end | |
def read_key_file | |
return @key_file_contents if defined?(@key_file_contents) | |
@key_file_contents = (key_path.binread.strip if key_path.exist?) | |
end | |
def handle_missing_key | |
raise MissingKeyError.new(key_path: key_path, env_key: env_key) if raise_if_missing_key | |
end | |
def check_key_length | |
raise InvalidKeyLengthError if key&.length != self.class.expected_key_length | |
end | |
end | |
end |
# frozen_string_literal: true | |
require_relative "abstract_unit" | |
require "active_support/encrypted_file" | |
class EncryptedFileTest < ActiveSupport::TestCase | |
setup do | |
@content = "One little fox jumped over the hedge" | |
@tmpdir = Dir.mktmpdir("encrypted-file-test-") | |
@content_path = File.join(@tmpdir, "content.txt.enc") | |
@key_path = File.join(@tmpdir, "content.txt.key") | |
File.write(@key_path, ActiveSupport::EncryptedFile.generate_key) | |
@encrypted_file = encrypted_file(@content_path) | |
end | |
teardown do | |
FileUtils.rm_rf @content_path | |
FileUtils.rm_rf @key_path | |
FileUtils.rm_rf @tmpdir | |
end | |
test "reading content by env key" do | |
FileUtils.rm_rf @key_path | |
begin | |
ENV["CONTENT_KEY"] = ActiveSupport::EncryptedFile.generate_key | |
@encrypted_file.write @content | |
assert_equal @content, @encrypted_file.read | |
ensure | |
ENV["CONTENT_KEY"] = nil | |
end | |
end | |
test "reading content by key file" do | |
@encrypted_file.write(@content) | |
assert_equal @content, @encrypted_file.read | |
end | |
test "change content by key file" do | |
@encrypted_file.write(@content) | |
@encrypted_file.change do |file| | |
file.write(file.read + " and went by the lake") | |
end | |
assert_equal "#{@content} and went by the lake", @encrypted_file.read | |
end | |
test "raise MissingKeyError when key is missing" do | |
assert_raise ActiveSupport::EncryptedFile::MissingKeyError do | |
encrypted_file(@content_path, key_path: "", env_key: "").read | |
end | |
end | |
test "raise MissingKeyError when env key is blank" do | |
FileUtils.rm_rf @key_path | |
begin | |
ENV["CONTENT_KEY"] = "" | |
raised = assert_raise ActiveSupport::EncryptedFile::MissingKeyError do | |
@encrypted_file.write @content | |
@encrypted_file.read | |
end | |
assert_match(/Missing encryption key to decrypt file/, raised.message) | |
ensure | |
ENV["CONTENT_KEY"] = nil | |
end | |
end | |
test "raise InvalidKeyLengthError when key is too short" do | |
File.write(@key_path, ActiveSupport::EncryptedFile.generate_key[0..-2]) | |
assert_raise ActiveSupport::EncryptedFile::InvalidKeyLengthError do | |
@encrypted_file.write(@content) | |
end | |
end | |
test "raise InvalidKeyLengthError when key is too long" do | |
File.write(@key_path, ActiveSupport::EncryptedFile.generate_key + "0") | |
assert_raise ActiveSupport::EncryptedFile::InvalidKeyLengthError do | |
@encrypted_file.write(@content) | |
end | |
end | |
test "respects existing content_path symlink" do | |
@encrypted_file.write(@content) | |
symlink_path = File.join(@tmpdir, "content_symlink.txt.enc") | |
File.symlink(@encrypted_file.content_path, symlink_path) | |
encrypted_file(symlink_path).write(@content) | |
assert File.symlink?(symlink_path) | |
assert_equal @content, @encrypted_file.read | |
ensure | |
FileUtils.rm_rf symlink_path | |
end | |
test "creates new content_path symlink if it's dead" do | |
symlink_path = File.join(@tmpdir, "content_symlink.txt.enc") | |
File.symlink(@content_path, symlink_path) | |
encrypted_file(symlink_path).write(@content) | |
assert File.exist?(@content_path) | |
assert_equal @content, @encrypted_file.read | |
ensure | |
FileUtils.rm_rf symlink_path | |
end | |
private | |
def encrypted_file(content_path, key_path: @key_path, env_key: "CONTENT_KEY") | |
ActiveSupport::EncryptedFile.new(content_path: @content_path, key_path: key_path, | |
env_key: env_key, raise_if_missing_key: true) | |
end | |
end |
# frozen_string_literal: true | |
module ActiveSupport | |
module EnumerableCoreExt # :nodoc: | |
module Constants | |
private | |
def const_missing(name) | |
if name == :SoleItemExpectedError | |
::ActiveSupport::EnumerableCoreExt::SoleItemExpectedError | |
else | |
super | |
end | |
end | |
end | |
end | |
end | |
module Enumerable | |
# Error generated by +sole+ when called on an enumerable that doesn't have | |
# exactly one item. | |
class SoleItemExpectedError < StandardError; end | |
# HACK: For performance reasons, Enumerable shouldn't have any constants of its own. | |
# So we move SoleItemExpectedError into ActiveSupport::EnumerableCoreExt. | |
ActiveSupport::EnumerableCoreExt::SoleItemExpectedError = remove_const(:SoleItemExpectedError) | |
singleton_class.prepend(ActiveSupport::EnumerableCoreExt::Constants) | |
# Enumerable#sum was added in Ruby 2.4, but it only works with Numeric elements | |
# when we omit an identity. | |
# :stopdoc: | |
# We can't use Refinements here because Refinements with Module which will be prepended | |
# doesn't work well https://bugs.ruby-lang.org/issues/13446 | |
alias :_original_sum_with_required_identity :sum | |
private :_original_sum_with_required_identity | |
# :startdoc: | |
# Calculates the minimum from the extracted elements. | |
# | |
# payments = [Payment.new(5), Payment.new(15), Payment.new(10)] | |
# payments.minimum(:price) # => 5 | |
def minimum(key) | |
map(&key).min | |
end | |
# Calculates the maximum from the extracted elements. | |
# | |
# payments = [Payment.new(5), Payment.new(15), Payment.new(10)] | |
# payments.maximum(:price) # => 15 | |
def maximum(key) | |
map(&key).max | |
end | |
# Calculates a sum from the elements. | |
# | |
# payments.sum { |p| p.price * p.tax_rate } | |
# payments.sum(&:price) | |
# | |
# The latter is a shortcut for: | |
# | |
# payments.inject(0) { |sum, p| sum + p.price } | |
# | |
# It can also calculate the sum without the use of a block. | |
# | |
# [5, 15, 10].sum # => 30 | |
# ['foo', 'bar'].sum('') # => "foobar" | |
# [[1, 2], [3, 1, 5]].sum([]) # => [1, 2, 3, 1, 5] | |
# | |
# The default sum of an empty list is zero. You can override this default: | |
# | |
# [].sum(Payment.new(0)) { |i| i.amount } # => Payment.new(0) | |
def sum(identity = nil, &block) | |
if identity | |
_original_sum_with_required_identity(identity, &block) | |
elsif block_given? | |
map(&block).sum | |
# we check `first(1) == []` to check if we have an | |
# empty Enumerable; checking `empty?` would return | |
# true for `[nil]`, which we want to deprecate to | |
# keep consistent with Ruby | |
elsif first.is_a?(Numeric) || first(1) == [] | |
identity ||= 0 | |
_original_sum_with_required_identity(identity, &block) | |
else | |
ActiveSupport::Deprecation.warn(<<-MSG.squish) | |
Rails 7.0 has deprecated Enumerable.sum in favor of Ruby's native implementation available since 2.4. | |
Sum of non-numeric elements requires an initial argument. | |
MSG | |
inject(:+) || 0 | |
end | |
end | |
# Convert an enumerable to a hash, using the block result as the key and the | |
# element as the value. | |
# | |
# people.index_by(&:login) | |
# # => { "nextangle" => <Person ...>, "chade-" => <Person ...>, ...} | |
# | |
# people.index_by { |person| "#{person.first_name} #{person.last_name}" } | |
# # => { "Chade- Fowlersburg-e" => <Person ...>, "David Heinemeier Hansson" => <Person ...>, ...} | |
def index_by | |
if block_given? | |
result = {} | |
each { |elem| result[yield(elem)] = elem } | |
result | |
else | |
to_enum(:index_by) { size if respond_to?(:size) } | |
end | |
end | |
# Convert an enumerable to a hash, using the element as the key and the block | |
# result as the value. | |
# | |
# post = Post.new(title: "hey there", body: "what's up?") | |
# | |
# %i( title body ).index_with { |attr_name| post.public_send(attr_name) } | |
# # => { title: "hey there", body: "what's up?" } | |
# | |
# If an argument is passed instead of a block, it will be used as the value | |
# for all elements: | |
# | |
# %i( created_at updated_at ).index_with(Time.now) | |
# # => { created_at: 2020-03-09 22:31:47, updated_at: 2020-03-09 22:31:47 } | |
def index_with(default = (no_default = true)) | |
if block_given? | |
result = {} | |
each { |elem| result[elem] = yield(elem) } | |
result | |
elsif no_default | |
to_enum(:index_with) { size if respond_to?(:size) } | |
else | |
result = {} | |
each { |elem| result[elem] = default } | |
result | |
end | |
end | |
# Returns +true+ if the enumerable has more than 1 element. Functionally | |
# equivalent to <tt>enum.to_a.size > 1</tt>. Can be called with a block too, | |
# much like any?, so <tt>people.many? { |p| p.age > 26 }</tt> returns +true+ | |
# if more than one person is over 26. | |
def many? | |
cnt = 0 | |
if block_given? | |
any? do |element| | |
cnt += 1 if yield element | |
cnt > 1 | |
end | |
else | |
any? { (cnt += 1) > 1 } | |
end | |
end | |
# Returns a new array that includes the passed elements. | |
# | |
# [ 1, 2, 3 ].including(4, 5) | |
# # => [ 1, 2, 3, 4, 5 ] | |
# | |
# ["David", "Rafael"].including %w[ Aaron Todd ] | |
# # => ["David", "Rafael", "Aaron", "Todd"] | |
def including(*elements) | |
to_a.including(*elements) | |
end | |
# The negative of the <tt>Enumerable#include?</tt>. Returns +true+ if the | |
# collection does not include the object. | |
def exclude?(object) | |
!include?(object) | |
end | |
# Returns a copy of the enumerable excluding the specified elements. | |
# | |
# ["David", "Rafael", "Aaron", "Todd"].excluding "Aaron", "Todd" | |
# # => ["David", "Rafael"] | |
# | |
# ["David", "Rafael", "Aaron", "Todd"].excluding %w[ Aaron Todd ] | |
# # => ["David", "Rafael"] | |
# | |
# {foo: 1, bar: 2, baz: 3}.excluding :bar | |
# # => {foo: 1, baz: 3} | |
def excluding(*elements) | |
elements.flatten!(1) | |
reject { |element| elements.include?(element) } | |
end | |
alias :without :excluding | |
# Extract the given key from each element in the enumerable. | |
# | |
# [{ name: "David" }, { name: "Rafael" }, { name: "Aaron" }].pluck(:name) | |
# # => ["David", "Rafael", "Aaron"] | |
# | |
# [{ id: 1, name: "David" }, { id: 2, name: "Rafael" }].pluck(:id, :name) | |
# # => [[1, "David"], [2, "Rafael"]] | |
def pluck(*keys) | |
if keys.many? | |
map { |element| keys.map { |key| element[key] } } | |
else | |
key = keys.first | |
map { |element| element[key] } | |
end | |
end | |
# Extract the given key from the first element in the enumerable. | |
# | |
# [{ name: "David" }, { name: "Rafael" }, { name: "Aaron" }].pick(:name) | |
# # => "David" | |
# | |
# [{ id: 1, name: "David" }, { id: 2, name: "Rafael" }].pick(:id, :name) | |
# # => [1, "David"] | |
def pick(*keys) | |
return if none? | |
if keys.many? | |
keys.map { |key| first[key] } | |
else | |
first[keys.first] | |
end | |
end | |
# Returns a new +Array+ without the blank items. | |
# Uses Object#blank? for determining if an item is blank. | |
# | |
# [1, "", nil, 2, " ", [], {}, false, true].compact_blank | |
# # => [1, 2, true] | |
# | |
# Set.new([nil, "", 1, 2]) | |
# # => [2, 1] (or [1, 2]) | |
# | |
# When called on a +Hash+, returns a new +Hash+ without the blank values. | |
# | |
# { a: "", b: 1, c: nil, d: [], e: false, f: true }.compact_blank | |
# # => { b: 1, f: true } | |
def compact_blank | |
reject(&:blank?) | |
end | |
# Returns a new +Array+ where the order has been set to that provided in the +series+, based on the +key+ of the | |
# objects in the original enumerable. | |
# | |
# [ Person.find(5), Person.find(3), Person.find(1) ].in_order_of(:id, [ 1, 5, 3 ]) | |
# # => [ Person.find(1), Person.find(5), Person.find(3) ] | |
# | |
# If the +series+ include keys that have no corresponding element in the Enumerable, these are ignored. | |
# If the Enumerable has additional elements that aren't named in the +series+, these are not included in the result. | |
def in_order_of(key, series) | |
index_by(&key).values_at(*series).compact | |
end | |
# Returns the sole item in the enumerable. If there are no items, or more | |
# than one item, raises +Enumerable::SoleItemExpectedError+. | |
# | |
# ["x"].sole # => "x" | |
# Set.new.sole # => Enumerable::SoleItemExpectedError: no item found | |
# { a: 1, b: 2 }.sole # => Enumerable::SoleItemExpectedError: multiple items found | |
def sole | |
case count | |
when 1 then return first # rubocop:disable Style/RedundantReturn | |
when 0 then raise ActiveSupport::EnumerableCoreExt::SoleItemExpectedError, "no item found" | |
when 2.. then raise ActiveSupport::EnumerableCoreExt::SoleItemExpectedError, "multiple items found" | |
end | |
end | |
end | |
class Hash | |
# Hash#reject has its own definition, so this needs one too. | |
def compact_blank # :nodoc: | |
reject { |_k, v| v.blank? } | |
end | |
# Removes all blank values from the +Hash+ in place and returns self. | |
# Uses Object#blank? for determining if a value is blank. | |
# | |
# h = { a: "", b: 1, c: nil, d: [], e: false, f: true } | |
# h.compact_blank! | |
# # => { b: 1, f: true } | |
def compact_blank! | |
# use delete_if rather than reject! because it always returns self even if nothing changed | |
delete_if { |_k, v| v.blank? } | |
end | |
end | |
class Range # :nodoc: | |
# Optimize range sum to use arithmetic progression if a block is not given and | |
# we have a range of numeric values. | |
def sum(identity = nil) | |
if block_given? || !(first.is_a?(Integer) && last.is_a?(Integer)) | |
super | |
else | |
actual_last = exclude_end? ? (last - 1) : last | |
if actual_last >= first | |
sum = identity || 0 | |
sum + (actual_last - first + 1) * (actual_last + first) / 2 | |
else | |
identity || 0 | |
end | |
end | |
end | |
end | |
# Using Refinements here in order not to expose our internal method | |
using Module.new { | |
refine Array do | |
alias :orig_sum :sum | |
end | |
} | |
class Array # :nodoc: | |
def sum(init = nil, &block) | |
if init.is_a?(Numeric) || first.is_a?(Numeric) | |
init ||= 0 | |
orig_sum(init, &block) | |
else | |
super | |
end | |
end | |
# Removes all blank elements from the +Array+ in place and returns self. | |
# Uses Object#blank? for determining if an item is blank. | |
# | |
# a = [1, "", nil, 2, " ", [], {}, false, true] | |
# a.compact_blank! | |
# # => [1, 2, true] | |
def compact_blank! | |
# use delete_if rather than reject! because it always returns self even if nothing changed | |
delete_if(&:blank?) | |
end | |
end |
# frozen_string_literal: true | |
require_relative "../abstract_unit" | |
require "active_support/core_ext/array" | |
require "active_support/core_ext/enumerable" | |
Payment = Struct.new(:price) | |
ExpandedPayment = Struct.new(:dollars, :cents) | |
class SummablePayment < Payment | |
def +(p) self.class.new(price + p.price) end | |
end | |
class EnumerableTests < ActiveSupport::TestCase | |
class GenericEnumerable | |
include Enumerable | |
def initialize(values = [1, 2, 3]) | |
@values = values | |
end | |
def each(&block) | |
@values.each(&block) | |
end | |
end | |
def assert_typed_equal(e, v, cls, msg = nil) | |
assert_kind_of(cls, v, msg) | |
assert_equal(e, v, msg) | |
end | |
def test_minimum | |
payments = GenericEnumerable.new([ Payment.new(5), Payment.new(15), Payment.new(10) ]) | |
assert_equal 5, payments.minimum(:price) | |
end | |
def test_minimum_with_empty_enumerable | |
payments = GenericEnumerable.new([]) | |
assert_nil payments.minimum(:price) | |
end | |
def test_maximum | |
payments = GenericEnumerable.new([ Payment.new(5), Payment.new(15), Payment.new(10) ]) | |
assert_equal 15, payments.maximum(:price) | |
end | |
def test_maximum_with_empty_enumerable | |
payments = GenericEnumerable.new([]) | |
assert_nil payments.maximum(:price) | |
end | |
def test_sums | |
enum = GenericEnumerable.new([5, 15, 10]) | |
assert_equal 30, enum.sum | |
assert_equal 60, enum.sum { |i| i * 2 } | |
enum = GenericEnumerable.new(%w(a b c)) | |
assert_equal "abc", enum.sum("") | |
assert_equal "aabbcc", enum.sum("") { |i| i * 2 } | |
assert_deprecated do | |
assert_equal "abc", enum.sum | |
end | |
assert_deprecated do | |
assert_equal "aabbcc", enum.sum { |i| i * 2 } | |
end | |
payments = GenericEnumerable.new([ Payment.new(5), Payment.new(15), Payment.new(10) ]) | |
assert_equal 30, payments.sum(&:price) | |
assert_equal 60, payments.sum { |p| p.price * 2 } | |
payments = GenericEnumerable.new([ SummablePayment.new(5), SummablePayment.new(15) ]) | |
assert_deprecated do | |
assert_equal SummablePayment.new(20), payments.sum | |
end | |
assert_equal SummablePayment.new(20), payments.sum(SummablePayment.new(0)) | |
assert_equal SummablePayment.new(20), payments.sum(SummablePayment.new(0)) { |p| p } | |
assert_deprecated do | |
assert_equal SummablePayment.new(20), payments.sum { |p| p } | |
end | |
sum = GenericEnumerable.new([3, 5.quo(1)]).sum | |
assert_typed_equal(8, sum, Rational) | |
sum = GenericEnumerable.new([3, 5.quo(1)]).sum(0.0) | |
assert_typed_equal(8.0, sum, Float) | |
sum = GenericEnumerable.new([3, 5.quo(1), 7.0]).sum | |
assert_typed_equal(15.0, sum, Float) | |
sum = GenericEnumerable.new([3, 5.quo(1), Complex(7)]).sum | |
assert_typed_equal(Complex(15), sum, Complex) | |
assert_typed_equal(15, sum.real, Rational) | |
assert_typed_equal(0, sum.imag, Integer) | |
sum = GenericEnumerable.new([3.5, 5]).sum | |
assert_typed_equal(8.5, sum, Float) | |
sum = GenericEnumerable.new([2, 8.5]).sum | |
assert_typed_equal(10.5, sum, Float) | |
sum = GenericEnumerable.new([1.quo(2), 1]).sum | |
assert_typed_equal(3.quo(2), sum, Rational) | |
sum = GenericEnumerable.new([1.quo(2), 1.quo(3)]).sum | |
assert_typed_equal(5.quo(6), sum, Rational) | |
sum = GenericEnumerable.new([2.0, 3.0 * Complex::I]).sum | |
assert_typed_equal(Complex(2.0, 3.0), sum, Complex) | |
assert_typed_equal(2.0, sum.real, Float) | |
assert_typed_equal(3.0, sum.imag, Float) | |
sum = GenericEnumerable.new([1, 2]).sum(10) { |v| v * 2 } | |
assert_typed_equal(16, sum, Integer) | |
end | |
def test_nil_sums | |
expected_raise = TypeError | |
assert_raise(expected_raise) { GenericEnumerable.new([5, 15, nil]).sum } | |
assert_deprecated do | |
assert_equal 0, [nil].sum | |
end | |
payments = GenericEnumerable.new([ Payment.new(5), Payment.new(15), Payment.new(10), Payment.new(nil) ]) | |
assert_raise(expected_raise) { payments.sum(&:price) } | |
assert_equal 60, payments.sum { |p| p.price.to_i * 2 } | |
end | |
def test_empty_sums | |
assert_equal 0, GenericEnumerable.new([]).sum | |
assert_equal [], GenericEnumerable.new([]).sum([]) | |
assert_equal 0, GenericEnumerable.new([]).sum { |i| i + 10 } | |
assert_equal [], GenericEnumerable.new([]).sum([]) { |i| i + 10 } | |
assert_equal Payment.new(0), GenericEnumerable.new([]).sum(Payment.new(0)) | |
assert_typed_equal 0.0, GenericEnumerable.new([]).sum(0.0), Float | |
end | |
def test_range_sums | |
assert_equal 20, (1..4).sum { |i| i * 2 } | |
assert_equal 10, (1..4).sum | |
assert_equal 10, (1..4.5).sum | |
assert_equal 6, (1...4).sum | |
assert_deprecated do | |
assert_equal "abc", ("a".."c").sum | |
end | |
assert_equal "abc", ("a".."c").sum("") | |
assert_equal 50_000_005_000_000, (0..10_000_000).sum | |
assert_equal 0, (10..0).sum | |
assert_equal 5, (10..0).sum(5) | |
assert_equal 10, (10..10).sum | |
assert_equal 42, (10...10).sum(42) | |
assert_typed_equal 20.0, (1..4).sum(0.0) { |i| i * 2 }, Float | |
assert_typed_equal 10.0, (1..4).sum(0.0), Float | |
assert_typed_equal 20.0, (1..4).sum(10.0), Float | |
assert_typed_equal 5.0, (10..0).sum(5.0), Float | |
end | |
def test_array_sums | |
enum = [5, 15, 10] | |
assert_equal 30, enum.sum | |
assert_equal 60, enum.sum { |i| i * 2 } | |
enum = %w(a b c) | |
assert_deprecated do | |
assert_equal "abc", enum.sum | |
end | |
assert_equal "abc", enum.sum("") | |
assert_deprecated do | |
assert_equal "aabbcc", enum.sum { |i| i * 2 } | |
end | |
assert_equal "aabbcc", enum.sum("") { |i| i * 2 } | |
payments = [ Payment.new(5), Payment.new(15), Payment.new(10) ] | |
assert_equal 30, payments.sum(&:price) | |
assert_equal 60, payments.sum { |p| p.price * 2 } | |
payments = [ SummablePayment.new(5), SummablePayment.new(15) ] | |
assert_deprecated do | |
assert_equal SummablePayment.new(20), payments.sum | |
end | |
assert_equal SummablePayment.new(20), payments.sum(SummablePayment.new(0)) | |
assert_deprecated do | |
assert_equal SummablePayment.new(20), payments.sum { |p| p } | |
end | |
assert_equal SummablePayment.new(20), payments.sum(SummablePayment.new(0)) { |p| p } | |
sum = [3, 5.quo(1)].sum | |
assert_typed_equal(8, sum, Rational) | |
sum = [3, 5.quo(1)].sum(0.0) | |
assert_typed_equal(8.0, sum, Float) | |
sum = [3, 5.quo(1), 7.0].sum | |
assert_typed_equal(15.0, sum, Float) | |
sum = [3, 5.quo(1), Complex(7)].sum | |
assert_typed_equal(Complex(15), sum, Complex) | |
assert_typed_equal(15, sum.real, Rational) | |
assert_typed_equal(0, sum.imag, Integer) | |
sum = [3.5, 5].sum | |
assert_typed_equal(8.5, sum, Float) | |
sum = [2, 8.5].sum | |
assert_typed_equal(10.5, sum, Float) | |
sum = [1.quo(2), 1].sum | |
assert_typed_equal(3.quo(2), sum, Rational) | |
sum = [1.quo(2), 1.quo(3)].sum | |
assert_typed_equal(5.quo(6), sum, Rational) | |
sum = [2.0, 3.0 * Complex::I].sum | |
assert_typed_equal(Complex(2.0, 3.0), sum, Complex) | |
assert_typed_equal(2.0, sum.real, Float) | |
assert_typed_equal(3.0, sum.imag, Float) | |
sum = [1, 2].sum(10) { |v| v * 2 } | |
assert_typed_equal(16, sum, Integer) | |
end | |
def test_index_by | |
payments = GenericEnumerable.new([ Payment.new(5), Payment.new(15), Payment.new(10) ]) | |
assert_equal({ 5 => Payment.new(5), 15 => Payment.new(15), 10 => Payment.new(10) }, | |
payments.index_by(&:price)) | |
assert_equal Enumerator, payments.index_by.class | |
assert_nil payments.index_by.size | |
assert_equal 42, (1..42).index_by.size | |
assert_equal({ 5 => Payment.new(5), 15 => Payment.new(15), 10 => Payment.new(10) }, | |
payments.index_by.each(&:price)) | |
end | |
def test_index_with | |
payments = GenericEnumerable.new([ Payment.new(5), Payment.new(15), Payment.new(10) ]) | |
assert_equal({ Payment.new(5) => 5, Payment.new(15) => 15, Payment.new(10) => 10 }, payments.index_with(&:price)) | |
assert_equal({ title: nil, body: nil }, %i( title body ).index_with(nil)) | |
assert_equal({ title: [], body: [] }, %i( title body ).index_with([])) | |
assert_equal({ title: {}, body: {} }, %i( title body ).index_with({})) | |
assert_equal Enumerator, payments.index_with.class | |
assert_nil payments.index_with.size | |
assert_equal 42, (1..42).index_with.size | |
assert_equal({ Payment.new(5) => 5, Payment.new(15) => 15, Payment.new(10) => 10 }, payments.index_with.each(&:price)) | |
end | |
def test_many | |
assert_equal false, GenericEnumerable.new([]).many? | |
assert_equal false, GenericEnumerable.new([ 1 ]).many? | |
assert_equal true, GenericEnumerable.new([ 1, 2 ]).many? | |
assert_equal false, GenericEnumerable.new([]).many? { |x| x > 1 } | |
assert_equal false, GenericEnumerable.new([ 2 ]).many? { |x| x > 1 } | |
assert_equal false, GenericEnumerable.new([ 1, 2 ]).many? { |x| x > 1 } | |
assert_equal true, GenericEnumerable.new([ 1, 2, 2 ]).many? { |x| x > 1 } | |
end | |
def test_many_iterates_only_on_what_is_needed | |
infinity = 1.0 / 0.0 | |
very_long_enum = 0..infinity | |
assert_equal true, very_long_enum.many? | |
assert_equal true, very_long_enum.many? { |x| x > 100 } | |
end | |
def test_exclude? | |
assert_equal true, GenericEnumerable.new([ 1 ]).exclude?(2) | |
assert_equal false, GenericEnumerable.new([ 1 ]).exclude?(1) | |
end | |
def test_excluding | |
assert_equal [1, 2, 4], GenericEnumerable.new((1..5).to_a).excluding(3, 5) | |
assert_equal [3, 4, 5], GenericEnumerable.new((1..5).to_a).excluding([1, 2]) | |
assert_equal [[0, 1]], GenericEnumerable.new([[0, 1], [1, 0]]).excluding([[1, 0]]) | |
assert_equal [1, 2, 4], (1..5).to_a.excluding(3, 5) | |
assert_equal [1, 2, 4], (1..5).to_set.excluding(3, 5) | |
assert_equal({ foo: 1, baz: 3 }, { foo: 1, bar: 2, baz: 3 }.excluding(:bar)) | |
end | |
def test_without | |
assert_equal [1, 2, 4], GenericEnumerable.new((1..5).to_a).without(3, 5) | |
assert_equal [3, 4, 5], GenericEnumerable.new((1..5).to_a).without([1, 2]) | |
end | |
def test_pluck | |
payments = GenericEnumerable.new([ Payment.new(5), Payment.new(15), Payment.new(10) ]) | |
assert_equal [5, 15, 10], payments.pluck(:price) | |
payments = GenericEnumerable.new([ | |
ExpandedPayment.new(5, 99), | |
ExpandedPayment.new(15, 0), | |
ExpandedPayment.new(10, 50) | |
]) | |
assert_equal [[5, 99], [15, 0], [10, 50]], payments.pluck(:dollars, :cents) | |
assert_equal [], [].pluck(:price) | |
assert_equal [], [].pluck(:dollars, :cents) | |
end | |
def test_pick | |
payments = GenericEnumerable.new([ Payment.new(5), Payment.new(15), Payment.new(10) ]) | |
assert_equal 5, payments.pick(:price) | |
payments = GenericEnumerable.new([ | |
ExpandedPayment.new(5, 99), | |
ExpandedPayment.new(15, 0), | |
ExpandedPayment.new(10, 50) | |
]) | |
assert_equal [5, 99], payments.pick(:dollars, :cents) | |
assert_nil [].pick(:price) | |
assert_nil [].pick(:dollars, :cents) | |
end | |
def test_compact_blank | |
values = GenericEnumerable.new([1, "", nil, 2, " ", [], {}, false, true]) | |
assert_equal [1, 2, true], values.compact_blank | |
end | |
def test_array_compact_blank! | |
values = [1, "", nil, 2, " ", [], {}, false, true] | |
values.compact_blank! | |
assert_equal [1, 2, true], values | |
end | |
def test_hash_compact_blank | |
values = { a: "", b: 1, c: nil, d: [], e: false, f: true } | |
assert_equal({ b: 1, f: true }, values.compact_blank) | |
end | |
def test_hash_compact_blank! | |
values = { a: "", b: 1, c: nil, d: [], e: false, f: true } | |
values.compact_blank! | |
assert_equal({ b: 1, f: true }, values) | |
end | |
def test_in_order_of | |
values = [ Payment.new(5), Payment.new(1), Payment.new(3) ] | |
assert_equal [ Payment.new(1), Payment.new(5), Payment.new(3) ], values.in_order_of(:price, [ 1, 5, 3 ]) | |
end | |
def test_in_order_of_ignores_missing_series | |
values = [ Payment.new(5), Payment.new(1), Payment.new(3) ] | |
assert_equal [ Payment.new(1), Payment.new(5), Payment.new(3) ], values.in_order_of(:price, [ 1, 2, 4, 5, 3 ]) | |
end | |
def test_in_order_of_drops_elements_not_named_in_series | |
values = [ Payment.new(5), Payment.new(1), Payment.new(3) ] | |
assert_equal [ Payment.new(1), Payment.new(5) ], values.in_order_of(:price, [ 1, 5 ]) | |
end | |
def test_sole | |
expected_raise = Enumerable::SoleItemExpectedError | |
assert_raise(expected_raise) { GenericEnumerable.new([]).sole } | |
assert_equal 1, GenericEnumerable.new([1]).sole | |
assert_raise(expected_raise) { GenericEnumerable.new([1, 2]).sole } | |
assert_raise(expected_raise) { GenericEnumerable.new([1, nil]).sole } | |
end | |
def test_doesnt_bust_constant_cache | |
skip "Only applies to MRI" unless defined?(RubyVM.stat) && RubyVM.stat(:global_constant_state) | |
object = Object.new | |
assert_no_difference -> { RubyVM.stat(:global_constant_state) } do | |
object.extend(Enumerable) | |
end | |
end | |
end |
# frozen_string_literal: true | |
require "active_support/string_inquirer" | |
module ActiveSupport | |
class EnvironmentInquirer < StringInquirer # :nodoc: | |
DEFAULT_ENVIRONMENTS = ["development", "test", "production"] | |
def initialize(env) | |
super(env) | |
DEFAULT_ENVIRONMENTS.each do |default| | |
instance_variable_set :"@#{default}", env == default | |
end | |
end | |
DEFAULT_ENVIRONMENTS.each do |env| | |
class_eval "def #{env}?; @#{env}; end" | |
end | |
end | |
end |
# frozen_string_literal: true | |
module ActiveSupport | |
# +ActiveSupport::ErrorReporter+ is a common interface for error reporting services. | |
# | |
# To rescue and report any unhandled error, you can use the +handle+ method: | |
# | |
# Rails.error.handle do | |
# do_something! | |
# end | |
# | |
# If an error is raised, it will be reported and swallowed. | |
# | |
# Alternatively if you want to report the error but not swallow it, you can use +record+ | |
# | |
# Rails.error.record do | |
# do_something! | |
# end | |
# | |
# Both methods can be restricted to only handle a specific exception class | |
# | |
# maybe_tags = Rails.error.handle(Redis::BaseError) { redis.get("tags") } | |
# | |
# You can also pass some extra context information that may be used by the error subscribers: | |
# | |
# Rails.error.handle(context: { section: "admin" }) do | |
# # ... | |
# end | |
# | |
# Additionally a +severity+ can be passed along to communicate how important the error report is. | |
# +severity+ can be one of +:error+, +:warning+, or +:info+. Handled errors default to the +:warning+ | |
# severity, and unhandled ones to +:error+. | |
# | |
# Both +handle+ and +record+ pass through the return value from the block. In the case of +handle+ | |
# rescuing an error, a fallback can be provided. The fallback must be a callable whose result will | |
# be returned when the block raises and is handled: | |
# | |
# user = Rails.error.handle(fallback: -> { User.anonymous }) do | |
# User.find_by(params) | |
# end | |
class ErrorReporter | |
SEVERITIES = %i(error warning info) | |
attr_accessor :logger | |
def initialize(*subscribers, logger: nil) | |
@subscribers = subscribers.flatten | |
@logger = logger | |
end | |
# Report any unhandled exception, and swallow it. | |
# | |
# Rails.error.handle do | |
# 1 + '1' | |
# end | |
# | |
def handle(error_class = StandardError, severity: :warning, context: {}, fallback: nil) | |
yield | |
rescue error_class => error | |
report(error, handled: true, severity: severity, context: context) | |
fallback.call if fallback | |
end | |
def record(error_class = StandardError, severity: :error, context: {}) | |
yield | |
rescue error_class => error | |
report(error, handled: false, severity: severity, context: context) | |
raise | |
end | |
# Register a new error subscriber. The subscriber must respond to | |
# | |
# report(Exception, handled: Boolean, context: Hash) | |
# | |
# The +report+ method +should+ never raise an error. | |
def subscribe(subscriber) | |
unless subscriber.respond_to?(:report) | |
raise ArgumentError, "Error subscribers must respond to #report" | |
end | |
@subscribers << subscriber | |
end | |
# Update the execution context that is accessible to error subscribers | |
# | |
# Rails.error.set_context(section: "checkout", user_id: @user.id) | |
# | |
# See +ActiveSupport::ExecutionContext.set+ | |
def set_context(...) | |
ActiveSupport::ExecutionContext.set(...) | |
end | |
# When the block based +handle+ and +record+ methods are not suitable, you can directly use +report+ | |
# | |
# Rails.error.report(error) | |
def report(error, handled: true, severity: handled ? :warning : :error, context: {}) | |
unless SEVERITIES.include?(severity) | |
raise ArgumentError, "severity must be one of #{SEVERITIES.map(&:inspect).join(", ")}, got: #{severity.inspect}" | |
end | |
full_context = ActiveSupport::ExecutionContext.to_h.merge(context) | |
@subscribers.each do |subscriber| | |
subscriber.report(error, handled: handled, severity: severity, context: full_context) | |
rescue => subscriber_error | |
if logger | |
logger.fatal( | |
"Error subscriber raised an error: #{subscriber_error.message} (#{subscriber_error.class})\n" + | |
subscriber_error.backtrace.join("\n") | |
) | |
else | |
raise | |
end | |
end | |
nil | |
end | |
end | |
end |
# frozen_string_literal: true | |
require_relative "abstract_unit" | |
require "active_support/execution_context/test_helper" | |
class ErrorReporterTest < ActiveSupport::TestCase | |
# ExecutionContext is automatically reset in Rails app via executor hooks set in railtie | |
# But not in Active Support's own test suite. | |
include ActiveSupport::ExecutionContext::TestHelper | |
class ErrorSubscriber | |
attr_reader :events | |
def initialize | |
@events = [] | |
end | |
def report(error, handled:, severity:, context:) | |
@events << [error, handled, severity, context] | |
end | |
end | |
setup do | |
@reporter = ActiveSupport::ErrorReporter.new | |
@subscriber = ErrorSubscriber.new | |
@reporter.subscribe(@subscriber) | |
@error = ArgumentError.new("Oops") | |
end | |
test "receives the execution context" do | |
@reporter.set_context(section: "admin") | |
error = ArgumentError.new("Oops") | |
@reporter.report(error, handled: true) | |
assert_equal [[error, true, :warning, { section: "admin" }]], @subscriber.events | |
end | |
test "passed context has priority over the execution context" do | |
@reporter.set_context(section: "admin") | |
error = ArgumentError.new("Oops") | |
@reporter.report(error, handled: true, context: { section: "public" }) | |
assert_equal [[error, true, :warning, { section: "public" }]], @subscriber.events | |
end | |
test "#handle swallow and report any unhandled error" do | |
error = ArgumentError.new("Oops") | |
@reporter.handle do | |
raise error | |
end | |
assert_equal [[error, true, :warning, {}]], @subscriber.events | |
end | |
test "#handle can be scoped to an exception class" do | |
assert_raises ArgumentError do | |
@reporter.handle(NameError) do | |
raise ArgumentError | |
end | |
end | |
assert_equal [], @subscriber.events | |
end | |
test "#handle passes through the return value" do | |
result = @reporter.handle do | |
2 + 2 | |
end | |
assert_equal 4, result | |
end | |
test "#handle returns nil on handled raise" do | |
result = @reporter.handle do | |
raise StandardError | |
2 + 2 | |
end | |
assert_nil result | |
end | |
test "#handle returns the value of the fallback as a proc on handled raise" do | |
result = @reporter.handle(fallback: -> { 2 + 2 }) do | |
raise StandardError | |
end | |
assert_equal 4, result | |
end | |
test "#handle raises if the fallback is not a callable" do | |
assert_raises NoMethodError do | |
@reporter.handle(fallback: "four") do | |
raise StandardError | |
end | |
end | |
end | |
test "#handle raises the error up if fallback is a proc that then also raises" do | |
assert_raises ArgumentError do | |
@reporter.handle(fallback: -> { raise ArgumentError }) do | |
raise StandardError | |
end | |
end | |
end | |
test "#record report any unhandled error and re-raise them" do | |
error = ArgumentError.new("Oops") | |
assert_raises ArgumentError do | |
@reporter.record do | |
raise error | |
end | |
end | |
assert_equal [[error, false, :error, {}]], @subscriber.events | |
end | |
test "#record can be scoped to an exception class" do | |
assert_raises ArgumentError do | |
@reporter.record(NameError) do | |
raise ArgumentError | |
end | |
end | |
assert_equal [], @subscriber.events | |
end | |
test "#record passes through the return value" do | |
result = @reporter.record do | |
2 + 2 | |
end | |
assert_equal 4, result | |
end | |
test "can have multiple subscribers" do | |
second_subscriber = ErrorSubscriber.new | |
@reporter.subscribe(second_subscriber) | |
error = ArgumentError.new("Oops") | |
@reporter.report(error, handled: true) | |
assert_equal 1, @subscriber.events.size | |
assert_equal 1, second_subscriber.events.size | |
end | |
test "handled errors default to :warning severity" do | |
@reporter.report(@error, handled: true) | |
assert_equal :warning, @subscriber.events.dig(0, 2) | |
end | |
test "unhandled errors default to :error severity" do | |
@reporter.report(@error, handled: false) | |
assert_equal :error, @subscriber.events.dig(0, 2) | |
end | |
class FailingErrorSubscriber | |
Error = Class.new(StandardError) | |
def initialize(error) | |
@error = error | |
end | |
def report(_error, handled:, severity:, context:) | |
raise @error | |
end | |
end | |
test "subscriber errors are re-raised if no logger is set" do | |
subscriber_error = FailingErrorSubscriber::Error.new("Big Oopsie") | |
@reporter.subscribe(FailingErrorSubscriber.new(subscriber_error)) | |
assert_raises FailingErrorSubscriber::Error do | |
@reporter.report(@error, handled: true) | |
end | |
end | |
test "subscriber errors are logged if a logger is set" do | |
subscriber_error = FailingErrorSubscriber::Error.new("Big Oopsie") | |
@reporter.subscribe(FailingErrorSubscriber.new(subscriber_error)) | |
log = StringIO.new | |
@reporter.logger = ActiveSupport::Logger.new(log) | |
@reporter.report(@error, handled: true) | |
expected = "Error subscriber raised an error: Big Oopsie (ErrorReporterTest::FailingErrorSubscriber::Error)" | |
assert_equal expected, log.string.lines.first.chomp | |
end | |
end |
# frozen_string_literal: true | |
require "set" | |
require "pathname" | |
require "concurrent/atomic/atomic_boolean" | |
require "listen" | |
require "active_support/fork_tracker" | |
module ActiveSupport | |
# Allows you to "listen" to changes in a file system. | |
# The evented file updater does not hit disk when checking for updates. | |
# Instead, it uses platform-specific file system events to trigger a change | |
# in state. | |
# | |
# The file checker takes an array of files to watch or a hash specifying directories | |
# and file extensions to watch. It also takes a block that is called when | |
# EventedFileUpdateChecker#execute is run or when EventedFileUpdateChecker#execute_if_updated | |
# is run and there have been changes to the file system. | |
# | |
# Example: | |
# | |
# checker = ActiveSupport::EventedFileUpdateChecker.new(["/tmp/foo"]) { puts "changed" } | |
# checker.updated? | |
# # => false | |
# checker.execute_if_updated | |
# # => nil | |
# | |
# FileUtils.touch("/tmp/foo") | |
# | |
# checker.updated? | |
# # => true | |
# checker.execute_if_updated | |
# # => "changed" | |
# | |
class EventedFileUpdateChecker # :nodoc: all | |
def initialize(files, dirs = {}, &block) | |
unless block | |
raise ArgumentError, "A block is required to initialize an EventedFileUpdateChecker" | |
end | |
@block = block | |
@core = Core.new(files, dirs) | |
ObjectSpace.define_finalizer(self, @core.finalizer) | |
end | |
def updated? | |
if @core.restart? | |
@core.thread_safely(&:restart) | |
@core.updated.make_true | |
end | |
@core.updated.true? | |
end | |
def execute | |
@core.updated.make_false | |
@block.call | |
end | |
def execute_if_updated | |
if updated? | |
yield if block_given? | |
execute | |
true | |
end | |
end | |
class Core | |
attr_reader :updated | |
def initialize(files, dirs) | |
@files = files.map { |file| Pathname(file).expand_path }.to_set | |
@dirs = dirs.each_with_object({}) do |(dir, exts), hash| | |
hash[Pathname(dir).expand_path] = Array(exts).map { |ext| ext.to_s.sub(/\A\.?/, ".") }.to_set | |
end | |
@common_path = common_path(@dirs.keys) | |
@dtw = directories_to_watch | |
@missing = [] | |
@updated = Concurrent::AtomicBoolean.new(false) | |
@mutex = Mutex.new | |
start | |
@after_fork = ActiveSupport::ForkTracker.after_fork { start } | |
end | |
def finalizer | |
proc do | |
stop | |
ActiveSupport::ForkTracker.unregister(@after_fork) | |
end | |
end | |
def thread_safely | |
@mutex.synchronize do | |
yield self | |
end | |
end | |
def start | |
normalize_dirs! | |
@dtw, @missing = [*@dtw, *@missing].partition(&:exist?) | |
@listener = @dtw.any? ? Listen.to(*@dtw, &method(:changed)) : nil | |
@listener&.start | |
end | |
def stop | |
@listener&.stop | |
end | |
def restart | |
stop | |
start | |
end | |
def restart? | |
@missing.any?(&:exist?) | |
end | |
def normalize_dirs! | |
@dirs.transform_keys! do |dir| | |
dir.exist? ? dir.realpath : dir | |
end | |
end | |
def changed(modified, added, removed) | |
unless @updated.true? | |
@updated.make_true if (modified + added + removed).any? { |f| watching?(f) } | |
end | |
end | |
def watching?(file) | |
file = Pathname(file) | |
if @files.member?(file) | |
true | |
elsif file.directory? | |
false | |
else | |
ext = file.extname | |
file.dirname.ascend do |dir| | |
matching = @dirs[dir] | |
if matching && (matching.empty? || matching.include?(ext)) | |
break true | |
elsif dir == @common_path || dir.root? | |
break false | |
end | |
end | |
end | |
end | |
def directories_to_watch | |
dtw = @dirs.keys | @files.map(&:dirname) | |
accounted_for = dtw.to_set + Gem.path.map { |path| Pathname(path) } | |
dtw.reject { |dir| dir.ascend.drop(1).any? { |parent| accounted_for.include?(parent) } } | |
end | |
def common_path(paths) | |
paths.map { |path| path.ascend.to_a }.reduce(&:&)&.first | |
end | |
end | |
end | |
end |
# frozen_string_literal: true | |
require_relative "abstract_unit" | |
require "pathname" | |
require "weakref" | |
require_relative "file_update_checker_shared_tests" | |
class EventedFileUpdateCheckerTest < ActiveSupport::TestCase | |
include FileUpdateCheckerSharedTests | |
def setup | |
skip if ENV["LISTEN"] == "0" | |
require "listen" | |
super | |
end | |
def new_checker(files = [], dirs = {}, &block) | |
ActiveSupport::EventedFileUpdateChecker.new(files, dirs, &block).tap do |c| | |
wait | |
end | |
end | |
def teardown | |
super | |
Listen.stop | |
end | |
def wait | |
sleep 1 | |
end | |
def mkdir(dirs) | |
super | |
wait # wait for the events to fire | |
end | |
def touch(files) | |
super | |
wait # wait for the events to fire | |
end | |
def rm_f(files) | |
super | |
wait # wait for the events to fire | |
end | |
test "notifies forked processes" do | |
skip "Forking not available" unless Process.respond_to?(:fork) | |
FileUtils.touch(tmpfiles) | |
checker = new_checker(tmpfiles) { } | |
assert_not_predicate checker, :updated? | |
# Pipes used for flow control across fork. | |
boot_reader, boot_writer = IO.pipe | |
touch_reader, touch_writer = IO.pipe | |
pid = fork do | |
assert_not_predicate checker, :updated? | |
# Fork is booted, ready for file to be touched | |
# notify parent process. | |
boot_writer.write("booted") | |
# Wait for parent process to signal that file | |
# has been touched. | |
IO.select([touch_reader]) | |
assert_predicate checker, :updated? | |
end | |
assert pid | |
# Wait for fork to be booted before touching files. | |
IO.select([boot_reader]) | |
touch(tmpfiles) | |
# Notify fork that files have been touched. | |
touch_writer.write("touched") | |
assert_predicate checker, :updated? | |
Process.wait(pid) | |
end | |
test "can be garbage collected" do | |
# Use a separate thread to isolate objects and ensure they will be garbage collected. | |
checker_ref, listener_threads = Thread.new do | |
threads_before_checker = Thread.list | |
checker = ActiveSupport::EventedFileUpdateChecker.new([], tmpdir => ".rb") { } | |
# Wait for listener thread to start processing events. | |
wait | |
[WeakRef.new(checker), Thread.list - threads_before_checker] | |
end.value | |
# Calling `GC.start` 4 times should trigger a full GC run. | |
4.times do | |
GC.start | |
end | |
assert_not checker_ref.weakref_alive?, "EventedFileUpdateChecker was not garbage collected" | |
assert_empty Thread.list & listener_threads | |
end | |
test "should detect changes through symlink" do | |
actual_dir = File.join(tmpdir, "actual") | |
linked_dir = File.join(tmpdir, "linked") | |
Dir.mkdir(actual_dir) | |
FileUtils.ln_s(actual_dir, linked_dir) | |
checker = new_checker([], linked_dir => ".rb") { } | |
assert_not_predicate checker, :updated? | |
touch(File.join(actual_dir, "a.rb")) | |
assert_predicate checker, :updated? | |
assert checker.execute_if_updated | |
end | |
test "updated should become true when nonexistent directory is added later" do | |
watched_dir = File.join(tmpdir, "app") | |
unwatched_dir = File.join(tmpdir, "node_modules") | |
not_exist_watched_dir = File.join(tmpdir, "test") | |
Dir.mkdir(watched_dir) | |
Dir.mkdir(unwatched_dir) | |
checker = new_checker([], watched_dir => ".rb", not_exist_watched_dir => ".rb") { } | |
touch(File.join(watched_dir, "a.rb")) | |
assert_predicate checker, :updated? | |
assert checker.execute_if_updated | |
Dir.mkdir(not_exist_watched_dir) | |
wait | |
assert_predicate checker, :updated? | |
assert checker.execute_if_updated | |
touch(File.join(unwatched_dir, "a.rb")) | |
assert_not_predicate checker, :updated? | |
assert_not checker.execute_if_updated | |
end | |
test "does not stop other checkers when nonexistent directory is added later" do | |
dir1 = File.join(tmpdir, "app") | |
dir2 = File.join(tmpdir, "test") | |
Dir.mkdir(dir2) | |
checker1 = new_checker([], dir1 => ".rb") { } | |
checker2 = new_checker([], dir2 => ".rb") { } | |
Dir.mkdir(dir1) | |
touch(File.join(dir1, "a.rb")) | |
assert_predicate checker1, :updated? | |
assert_not_predicate checker2, :updated? | |
touch(File.join(dir2, "a.rb")) | |
assert_predicate checker2, :updated? | |
end | |
end |
# frozen_string_literal: true | |
require_relative "../abstract_unit" | |
module ActiveSupport | |
module Notifications | |
class EventedTest < ActiveSupport::TestCase | |
# we expect all exception types to be handled, so test with the most basic type | |
class BadListenerException < Exception; end | |
class Listener | |
attr_reader :events | |
def initialize | |
@events = [] | |
end | |
def start(name, id, payload) | |
@events << [:start, name, id, payload] | |
end | |
def finish(name, id, payload) | |
@events << [:finish, name, id, payload] | |
end | |
end | |
class ListenerWithTimedSupport < Listener | |
def call(name, start, finish, id, payload) | |
@events << [:call, name, start, finish, id, payload] | |
end | |
end | |
class BadStartListener < Listener | |
def start(name, id, payload) | |
raise BadListenerException | |
end | |
def finish(name, id, payload) | |
end | |
end | |
class BadFinishListener < Listener | |
def start(name, id, payload) | |
end | |
def finish(name, id, payload) | |
raise BadListenerException | |
end | |
end | |
def test_evented_listener | |
notifier = Fanout.new | |
listener = Listener.new | |
notifier.subscribe "hi", listener | |
notifier.start "hi", 1, {} | |
notifier.start "hi", 2, {} | |
notifier.finish "hi", 2, {} | |
notifier.finish "hi", 1, {} | |
assert_equal 4, listener.events.length | |
assert_equal [ | |
[:start, "hi", 1, {}], | |
[:start, "hi", 2, {}], | |
[:finish, "hi", 2, {}], | |
[:finish, "hi", 1, {}], | |
], listener.events | |
end | |
def test_evented_listener_no_events | |
notifier = Fanout.new | |
listener = Listener.new | |
notifier.subscribe "hi", listener | |
notifier.start "world", 1, {} | |
assert_equal 0, listener.events.length | |
end | |
def test_listen_to_everything | |
notifier = Fanout.new | |
listener = Listener.new | |
notifier.subscribe nil, listener | |
notifier.start "hello", 1, {} | |
notifier.start "world", 1, {} | |
notifier.finish "world", 1, {} | |
notifier.finish "hello", 1, {} | |
assert_equal 4, listener.events.length | |
assert_equal [ | |
[:start, "hello", 1, {}], | |
[:start, "world", 1, {}], | |
[:finish, "world", 1, {}], | |
[:finish, "hello", 1, {}], | |
], listener.events | |
end | |
def test_listen_start_multiple_exception_consistency | |
notifier = Fanout.new | |
listener = Listener.new | |
notifier.subscribe nil, BadStartListener.new | |
notifier.subscribe nil, BadStartListener.new | |
notifier.subscribe nil, listener | |
error = assert_raises InstrumentationSubscriberError do | |
notifier.start "hello", 1, {} | |
end | |
assert_instance_of BadListenerException, error.cause | |
error = assert_raises InstrumentationSubscriberError do | |
notifier.start "world", 1, {} | |
end | |
assert_instance_of BadListenerException, error.cause | |
notifier.finish "world", 1, {} | |
notifier.finish "hello", 1, {} | |
assert_equal 4, listener.events.length | |
assert_equal [ | |
[:start, "hello", 1, {}], | |
[:start, "world", 1, {}], | |
[:finish, "world", 1, {}], | |
[:finish, "hello", 1, {}], | |
], listener.events | |
end | |
def test_listen_finish_multiple_exception_consistency | |
notifier = Fanout.new | |
listener = Listener.new | |
notifier.subscribe nil, BadFinishListener.new | |
notifier.subscribe nil, BadFinishListener.new | |
notifier.subscribe nil, listener | |
notifier.start "hello", 1, {} | |
notifier.start "world", 1, {} | |
error = assert_raises InstrumentationSubscriberError do | |
notifier.finish "world", 1, {} | |
end | |
assert_instance_of BadListenerException, error.cause | |
error = assert_raises InstrumentationSubscriberError do | |
notifier.finish "hello", 1, {} | |
end | |
assert_instance_of BadListenerException, error.cause | |
assert_equal 4, listener.events.length | |
assert_equal [ | |
[:start, "hello", 1, {}], | |
[:start, "world", 1, {}], | |
[:finish, "world", 1, {}], | |
[:finish, "hello", 1, {}], | |
], listener.events | |
end | |
def test_evented_listener_priority | |
notifier = Fanout.new | |
listener = ListenerWithTimedSupport.new | |
notifier.subscribe "hi", listener | |
notifier.start "hi", 1, {} | |
notifier.finish "hi", 1, {} | |
assert_equal [ | |
[:start, "hi", 1, {}], | |
[:finish, "hi", 1, {}] | |
], listener.events | |
end | |
def test_listen_to_regexp | |
notifier = Fanout.new | |
listener = Listener.new | |
notifier.subscribe(/[a-z]*.world/, listener) | |
notifier.start("hi.world", 1, {}) | |
notifier.finish("hi.world", 2, {}) | |
notifier.start("hello.world", 1, {}) | |
notifier.finish("hello.world", 2, {}) | |
assert_equal [ | |
[:start, "hi.world", 1, {}], | |
[:finish, "hi.world", 2, {}], | |
[:start, "hello.world", 1, {}], | |
[:finish, "hello.world", 2, {}] | |
], listener.events | |
end | |
def test_listen_to_regexp_with_exclusions | |
notifier = Fanout.new | |
listener = Listener.new | |
notifier.subscribe(/[a-z]*.world/, listener) | |
notifier.unsubscribe("hi.world") | |
notifier.start("hi.world", 1, {}) | |
notifier.finish("hi.world", 2, {}) | |
notifier.start("hello.world", 1, {}) | |
notifier.finish("hello.world", 2, {}) | |
assert_equal [ | |
[:start, "hello.world", 1, {}], | |
[:finish, "hello.world", 2, {}] | |
], listener.events | |
end | |
end | |
end | |
end |
# frozen_string_literal: true | |
class Hash | |
# Returns a hash that includes everything except given keys. | |
# hash = { a: true, b: false, c: nil } | |
# hash.except(:c) # => { a: true, b: false } | |
# hash.except(:a, :b) # => { c: nil } | |
# hash # => { a: true, b: false, c: nil } | |
# | |
# This is useful for limiting a set of parameters to everything but a few known toggles: | |
# @person.update(params[:person].except(:admin)) | |
def except(*keys) | |
slice(*self.keys - keys) | |
end unless method_defined?(:except) | |
# Removes the given keys from hash and returns it. | |
# hash = { a: true, b: false, c: nil } | |
# hash.except!(:c) # => { a: true, b: false } | |
# hash # => { a: true, b: false } | |
def except!(*keys) | |
keys.each { |key| delete(key) } | |
self | |
end | |
end |
# frozen_string_literal: true | |
class String | |
# The inverse of <tt>String#include?</tt>. Returns true if the string | |
# does not include the other string. | |
# | |
# "hello".exclude? "lo" # => false | |
# "hello".exclude? "ol" # => true | |
# "hello".exclude? ?h # => false | |
def exclude?(string) | |
!include?(string) | |
end | |
end |
# frozen_string_literal: true | |
module ActiveSupport | |
module ExecutionContext # :nodoc: | |
@after_change_callbacks = [] | |
class << self | |
def after_change(&block) | |
@after_change_callbacks << block | |
end | |
# Updates the execution context. If a block is given, it resets the provided keys to their | |
# previous value once the block exits. | |
def set(**options) | |
options.symbolize_keys! | |
keys = options.keys | |
store = self.store | |
previous_context = keys.zip(store.values_at(*keys)).to_h | |
store.merge!(options) | |
@after_change_callbacks.each(&:call) | |
if block_given? | |
begin | |
yield | |
ensure | |
store.merge!(previous_context) | |
@after_change_callbacks.each(&:call) | |
end | |
end | |
end | |
def []=(key, value) | |
store[key.to_sym] = value | |
@after_change_callbacks.each(&:call) | |
end | |
def to_h | |
store.dup | |
end | |
def clear | |
store.clear | |
end | |
private | |
def store | |
IsolatedExecutionState[:active_support_execution_context] ||= {} | |
end | |
end | |
end | |
end |
# frozen_string_literal: true | |
require_relative "abstract_unit" | |
require "active_support/execution_context/test_helper" | |
class ExecutionContextTest < ActiveSupport::TestCase | |
# ExecutionContext is automatically reset in Rails app via executor hooks set in railtie | |
# But not in Active Support's own test suite. | |
include ActiveSupport::ExecutionContext::TestHelper | |
test "#set restore the modified keys when the block exits" do | |
assert_nil ActiveSupport::ExecutionContext.to_h[:foo] | |
ActiveSupport::ExecutionContext.set(foo: "bar") do | |
assert_equal "bar", ActiveSupport::ExecutionContext.to_h[:foo] | |
ActiveSupport::ExecutionContext.set(foo: "plop") do | |
assert_equal "plop", ActiveSupport::ExecutionContext.to_h[:foo] | |
end | |
assert_equal "bar", ActiveSupport::ExecutionContext.to_h[:foo] | |
ActiveSupport::ExecutionContext[:direct_assignment] = "present" | |
ActiveSupport::ExecutionContext.set(multi_assignment: "present") | |
end | |
assert_nil ActiveSupport::ExecutionContext.to_h[:foo] | |
assert_equal "present", ActiveSupport::ExecutionContext.to_h[:direct_assignment] | |
assert_equal "present", ActiveSupport::ExecutionContext.to_h[:multi_assignment] | |
end | |
test "#set coerce keys to symbol" do | |
ActiveSupport::ExecutionContext.set("foo" => "bar") do | |
assert_equal "bar", ActiveSupport::ExecutionContext.to_h[:foo] | |
end | |
end | |
test "#[]= coerce keys to symbol" do | |
ActiveSupport::ExecutionContext["symbol_key"] = "symbolized" | |
assert_equal "symbolized", ActiveSupport::ExecutionContext.to_h[:symbol_key] | |
end | |
test "#to_h returns a copy of the context" do | |
ActiveSupport::ExecutionContext[:foo] = 42 | |
context = ActiveSupport::ExecutionContext.to_h | |
context[:foo] = 43 | |
assert_equal 42, ActiveSupport::ExecutionContext.to_h[:foo] | |
end | |
end |
# frozen_string_literal: true | |
require "active_support/error_reporter" | |
require "active_support/callbacks" | |
require "concurrent/hash" | |
module ActiveSupport | |
class ExecutionWrapper | |
include ActiveSupport::Callbacks | |
Null = Object.new # :nodoc: | |
def Null.complete! # :nodoc: | |
end | |
define_callbacks :run | |
define_callbacks :complete | |
def self.to_run(*args, &block) | |
set_callback(:run, *args, &block) | |
end | |
def self.to_complete(*args, &block) | |
set_callback(:complete, *args, &block) | |
end | |
RunHook = Struct.new(:hook) do # :nodoc: | |
def before(target) | |
hook_state = target.send(:hook_state) | |
hook_state[hook] = hook.run | |
end | |
end | |
CompleteHook = Struct.new(:hook) do # :nodoc: | |
def before(target) | |
hook_state = target.send(:hook_state) | |
if hook_state.key?(hook) | |
hook.complete hook_state[hook] | |
end | |
end | |
alias after before | |
end | |
# Register an object to be invoked during both the +run+ and | |
# +complete+ steps. | |
# | |
# +hook.complete+ will be passed the value returned from +hook.run+, | |
# and will only be invoked if +run+ has previously been called. | |
# (Mostly, this means it won't be invoked if an exception occurs in | |
# a preceding +to_run+ block; all ordinary +to_complete+ blocks are | |
# invoked in that situation.) | |
def self.register_hook(hook, outer: false) | |
if outer | |
to_run RunHook.new(hook), prepend: true | |
to_complete :after, CompleteHook.new(hook) | |
else | |
to_run RunHook.new(hook) | |
to_complete CompleteHook.new(hook) | |
end | |
end | |
# Run this execution. | |
# | |
# Returns an instance, whose +complete!+ method *must* be invoked | |
# after the work has been performed. | |
# | |
# Where possible, prefer +wrap+. | |
def self.run!(reset: false) | |
if reset | |
lost_instance = IsolatedExecutionState.delete(active_key) | |
lost_instance&.complete! | |
else | |
return Null if active? | |
end | |
new.tap do |instance| | |
success = nil | |
begin | |
instance.run! | |
success = true | |
ensure | |
instance.complete! unless success | |
end | |
end | |
end | |
# Perform the work in the supplied block as an execution. | |
def self.wrap | |
return yield if active? | |
instance = run! | |
begin | |
yield | |
rescue => error | |
error_reporter.report(error, handled: false) | |
raise | |
ensure | |
instance.complete! | |
end | |
end | |
def self.perform # :nodoc: | |
instance = new | |
instance.run | |
begin | |
yield | |
ensure | |
instance.complete | |
end | |
end | |
def self.error_reporter | |
@error_reporter ||= ActiveSupport::ErrorReporter.new | |
end | |
def self.active_key # :nodoc: | |
@active_key ||= :"active_execution_wrapper_#{object_id}" | |
end | |
def self.active? # :nodoc: | |
IsolatedExecutionState.key?(active_key) | |
end | |
def run! # :nodoc: | |
IsolatedExecutionState[self.class.active_key] = self | |
run | |
end | |
def run # :nodoc: | |
run_callbacks(:run) | |
end | |
# Complete this in-flight execution. This method *must* be called | |
# exactly once on the result of any call to +run!+. | |
# | |
# Where possible, prefer +wrap+. | |
def complete! | |
complete | |
ensure | |
IsolatedExecutionState.delete(self.class.active_key) | |
end | |
def complete # :nodoc: | |
run_callbacks(:complete) | |
end | |
private | |
def hook_state | |
@_hook_state ||= {} | |
end | |
end | |
end |
# frozen_string_literal: true | |
require "active_support/execution_wrapper" | |
module ActiveSupport | |
class Executor < ExecutionWrapper | |
end | |
end |
# frozen_string_literal: true | |
require_relative "abstract_unit" | |
class ExecutorTest < ActiveSupport::TestCase | |
class DummyError < RuntimeError | |
end | |
class ErrorSubscriber | |
attr_reader :events | |
def initialize | |
@events = [] | |
end | |
def report(error, handled:, severity:, context:) | |
@events << [error, handled, severity, context] | |
end | |
end | |
def test_wrap_report_errors | |
subscriber = ErrorSubscriber.new | |
executor.error_reporter.subscribe(subscriber) | |
error = DummyError.new("Oops") | |
assert_raises DummyError do | |
executor.wrap do | |
raise error | |
end | |
end | |
assert_equal [[error, false, :error, {}]], subscriber.events | |
end | |
def test_wrap_invokes_callbacks | |
called = [] | |
executor.to_run { called << :run } | |
executor.to_complete { called << :complete } | |
executor.wrap do | |
called << :body | |
end | |
assert_equal [:run, :body, :complete], called | |
end | |
def test_callbacks_share_state | |
result = false | |
executor.to_run { @foo = true } | |
executor.to_complete { result = @foo } | |
executor.wrap { } | |
assert result | |
end | |
def test_separated_calls_invoke_callbacks | |
called = [] | |
executor.to_run { called << :run } | |
executor.to_complete { called << :complete } | |
state = executor.run! | |
called << :body | |
state.complete! | |
assert_equal [:run, :body, :complete], called | |
end | |
def test_exceptions_unwind | |
called = [] | |
executor.to_run { called << :run_1 } | |
executor.to_run { raise DummyError } | |
executor.to_run { called << :run_2 } | |
executor.to_complete { called << :complete } | |
assert_raises(DummyError) do | |
executor.wrap { called << :body } | |
end | |
assert_equal [:run_1, :complete], called | |
end | |
def test_avoids_double_wrapping | |
called = [] | |
executor.to_run { called << :run } | |
executor.to_complete { called << :complete } | |
executor.wrap do | |
called << :early | |
executor.wrap do | |
called << :body | |
end | |
called << :late | |
end | |
assert_equal [:run, :early, :body, :late, :complete], called | |
end | |
def test_hooks_carry_state | |
supplied_state = :none | |
hook = Class.new do | |
define_method(:run) do | |
:some_state | |
end | |
define_method(:complete) do |state| | |
supplied_state = state | |
end | |
end.new | |
executor.register_hook(hook) | |
executor.wrap { } | |
assert_equal :some_state, supplied_state | |
end | |
def test_nil_state_is_sufficient | |
supplied_state = :none | |
hook = Class.new do | |
define_method(:run) do | |
nil | |
end | |
define_method(:complete) do |state| | |
supplied_state = state | |
end | |
end.new | |
executor.register_hook(hook) | |
executor.wrap { } | |
assert_nil supplied_state | |
end | |
def test_exception_skips_uninvoked_hook | |
supplied_state = :none | |
hook = Class.new do | |
define_method(:run) do | |
:some_state | |
end | |
define_method(:complete) do |state| | |
supplied_state = state | |
end | |
end.new | |
executor.to_run do | |
raise DummyError | |
end | |
executor.register_hook(hook) | |
assert_raises(DummyError) do | |
executor.wrap { } | |
end | |
assert_equal :none, supplied_state | |
end | |
def test_exception_unwinds_invoked_hook | |
supplied_state = :none | |
hook = Class.new do | |
define_method(:run) do | |
:some_state | |
end | |
define_method(:complete) do |state| | |
supplied_state = state | |
end | |
end.new | |
executor.register_hook(hook) | |
executor.to_run do | |
raise DummyError | |
end | |
assert_raises(DummyError) do | |
executor.wrap { } | |
end | |
assert_equal :some_state, supplied_state | |
end | |
def test_hook_insertion_order | |
invoked = [] | |
supplied_state = [] | |
hook_class = Class.new do | |
attr_accessor :letter | |
define_method(:initialize) do |letter| | |
self.letter = letter | |
end | |
define_method(:run) do | |
invoked << :"run_#{letter}" | |
:"state_#{letter}" | |
end | |
define_method(:complete) do |state| | |
invoked << :"complete_#{letter}" | |
supplied_state << state | |
end | |
end | |
executor.register_hook(hook_class.new(:a)) | |
executor.register_hook(hook_class.new(:b)) | |
executor.register_hook(hook_class.new(:c), outer: true) | |
executor.register_hook(hook_class.new(:d)) | |
executor.wrap { } | |
assert_equal [:run_c, :run_a, :run_b, :run_d, :complete_a, :complete_b, :complete_d, :complete_c], invoked | |
assert_equal [:state_a, :state_b, :state_d, :state_c], supplied_state | |
end | |
def test_class_serial_is_unaffected | |
skip if !defined?(RubyVM) | |
hook = Class.new do | |
define_method(:run) do | |
nil | |
end | |
define_method(:complete) do |state| | |
nil | |
end | |
end.new | |
executor.register_hook(hook) | |
# Warm-up to trigger any pending autoloads | |
executor.wrap { } | |
before = RubyVM.stat(:class_serial) | |
executor.wrap { } | |
executor.wrap { } | |
executor.wrap { } | |
after = RubyVM.stat(:class_serial) | |
assert_equal before, after | |
end | |
def test_separate_classes_can_wrap | |
other_executor = Class.new(ActiveSupport::Executor) | |
called = [] | |
executor.to_run { called << :run } | |
executor.to_complete { called << :complete } | |
other_executor.to_run { called << :other_run } | |
other_executor.to_complete { called << :other_complete } | |
executor.wrap do | |
other_executor.wrap do | |
called << :body | |
end | |
end | |
assert_equal [:run, :other_run, :body, :other_complete, :complete], called | |
end | |
private | |
def executor | |
@executor ||= Class.new(ActiveSupport::Executor) | |
end | |
end |
# frozen_string_literal: true | |
require "pathname" | |
class Pathname | |
# Returns the receiver if the named file exists otherwise returns +nil+. | |
# <tt>pathname.existence</tt> is equivalent to | |
# | |
# pathname.exist? ? pathname : nil | |
# | |
# For example, something like | |
# | |
# content = pathname.read if pathname.exist? | |
# | |
# becomes | |
# | |
# content = pathname.existence&.read | |
# | |
# @return [Pathname] | |
def existence | |
self if exist? | |
end | |
end |
# frozen_string_literal: true | |
require_relative "../../abstract_unit" | |
require "active_support/core_ext/pathname/existence" | |
class PathnameExistenceTest < ActiveSupport::TestCase | |
def test_existence | |
existing = Pathname.new(__FILE__) | |
not_existing = Pathname.new("not existing") | |
assert_equal existing, existing.existence | |
assert_nil not_existing.existence | |
end | |
end |
# frozen_string_literal: true | |
class Array | |
# Removes and returns the elements for which the block returns a true value. | |
# If no block is given, an Enumerator is returned instead. | |
# | |
# numbers = [0, 1, 2, 3, 4, 5, 6, 7, 8, 9] | |
# odd_numbers = numbers.extract! { |number| number.odd? } # => [1, 3, 5, 7, 9] | |
# numbers # => [0, 2, 4, 6, 8] | |
def extract! | |
return to_enum(:extract!) { size } unless block_given? | |
extracted_elements = [] | |
reject! do |element| | |
extracted_elements << element if yield(element) | |
end | |
extracted_elements | |
end | |
end |
# frozen_string_literal: true | |
class Hash | |
# By default, only instances of Hash itself are extractable. | |
# Subclasses of Hash may implement this method and return | |
# true to declare themselves as extractable. If a Hash | |
# is extractable, Array#extract_options! pops it from | |
# the Array when it is the last element of the Array. | |
def extractable_options? | |
instance_of?(Hash) | |
end | |
end | |
class Array | |
# Extracts options from a set of arguments. Removes and returns the last | |
# element in the array if it's a hash, otherwise returns a blank hash. | |
# | |
# def options(*args) | |
# args.extract_options! | |
# end | |
# | |
# options(1, 2) # => {} | |
# options(1, 2, a: :b) # => {:a=>:b} | |
def extract_options! | |
if last.is_a?(Hash) && last.extractable_options? | |
pop | |
else | |
{} | |
end | |
end | |
end |
# frozen_string_literal: true | |
require_relative "../../abstract_unit" | |
require "active_support/core_ext/array" | |
require "active_support/core_ext/hash" | |
require "active_support/ordered_options" | |
class ExtractOptionsTest < ActiveSupport::TestCase | |
class HashSubclass < Hash | |
end | |
class ExtractableHashSubclass < Hash | |
def extractable_options? | |
true | |
end | |
end | |
def test_extract_options | |
assert_equal({}, [].extract_options!) | |
assert_equal({}, [1].extract_options!) | |
assert_equal({ a: :b }, [{ a: :b }].extract_options!) | |
assert_equal({ a: :b }, [1, { a: :b }].extract_options!) | |
end | |
def test_extract_options_doesnt_extract_hash_subclasses | |
hash = HashSubclass.new | |
hash[:foo] = 1 | |
array = [hash] | |
options = array.extract_options! | |
assert_equal({}, options) | |
assert_equal([hash], array) | |
end | |
def test_extract_options_extracts_extractable_subclass | |
hash = ExtractableHashSubclass.new | |
hash[:foo] = 1 | |
array = [hash] | |
options = array.extract_options! | |
assert_equal({ foo: 1 }, options) | |
assert_equal([], array) | |
end | |
def test_extract_options_extracts_hash_with_indifferent_access | |
array = [{ foo: 1 }.with_indifferent_access] | |
options = array.extract_options! | |
assert_equal(1, options[:foo]) | |
end | |
def test_extract_options_extracts_ordered_options | |
hash = ActiveSupport::OrderedOptions.new | |
hash.foo = 1 | |
array = [hash] | |
options = array.extract_options! | |
assert_equal({ foo: 1 }, options) | |
assert_equal([], array) | |
end | |
end |
# frozen_string_literal: true | |
require_relative "../../abstract_unit" | |
require "active_support/core_ext/array" | |
class ExtractTest < ActiveSupport::TestCase | |
def test_extract | |
numbers = [0, 1, 2, 3, 4, 5, 6, 7, 8, 9] | |
array_id = numbers.object_id | |
odd_numbers = numbers.extract!(&:odd?) | |
assert_equal [1, 3, 5, 7, 9], odd_numbers | |
assert_equal [0, 2, 4, 6, 8], numbers | |
assert_equal array_id, numbers.object_id | |
end | |
def test_extract_without_block | |
numbers = [0, 1, 2, 3, 4, 5, 6, 7, 8, 9] | |
array_id = numbers.object_id | |
extract_enumerator = numbers.extract! | |
assert_instance_of Enumerator, extract_enumerator | |
assert_equal numbers.size, extract_enumerator.size | |
odd_numbers = extract_enumerator.each(&:odd?) | |
assert_equal [1, 3, 5, 7, 9], odd_numbers | |
assert_equal [0, 2, 4, 6, 8], numbers | |
assert_equal array_id, numbers.object_id | |
end | |
def test_extract_on_empty_array | |
empty_array = [] | |
array_id = empty_array.object_id | |
new_empty_array = empty_array.extract! { } | |
assert_equal [], new_empty_array | |
assert_equal [], empty_array | |
assert_equal array_id, empty_array.object_id | |
end | |
end |
# frozen_string_literal: true | |
module FailureRaisingBehavior | |
def test_fetch_read_failure_raises | |
key = SecureRandom.uuid | |
@cache.write(key, SecureRandom.alphanumeric) | |
assert_raise Redis::BaseError do | |
emulating_unavailability do |cache| | |
cache.fetch(key) | |
end | |
end | |
end | |
def test_fetch_with_block_read_failure_raises | |
key = SecureRandom.uuid | |
value = SecureRandom.alphanumeric | |
@cache.write(key, value) | |
assert_raise Redis::BaseError do | |
emulating_unavailability do |cache| | |
cache.fetch(key) { SecureRandom.alphanumeric } | |
end | |
end | |
assert_equal value, @cache.read(key) | |
end | |
def test_read_failure_raises | |
key = SecureRandom.uuid | |
@cache.write(key, SecureRandom.alphanumeric) | |
assert_raise Redis::BaseError do | |
emulating_unavailability do |cache| | |
cache.read(key) | |
end | |
end | |
end | |
def test_read_multi_failure_raises | |
key = SecureRandom.uuid | |
other_key = SecureRandom.uuid | |
@cache.write_multi( | |
key => SecureRandom.alphanumeric, | |
other_key => SecureRandom.alphanumeric | |
) | |
assert_raise Redis::BaseError do | |
emulating_unavailability do |cache| | |
cache.read_multi(key, other_key) | |
end | |
end | |
end | |
def test_write_failure_raises | |
assert_raise Redis::BaseError do | |
emulating_unavailability do |cache| | |
cache.write(SecureRandom.uuid, SecureRandom.alphanumeric) | |
end | |
end | |
end | |
def test_write_multi_failure_raises | |
assert_raise Redis::BaseError do | |
emulating_unavailability do |cache| | |
cache.write_multi( | |
SecureRandom.uuid => SecureRandom.alphanumeric, | |
SecureRandom.uuid => SecureRandom.alphanumeric | |
) | |
end | |
end | |
end | |
def test_fetch_multi_failure_raises | |
key = SecureRandom.uuid | |
other_key = SecureRandom.uuid | |
@cache.write_multi( | |
key => SecureRandom.alphanumeric, | |
other_key => SecureRandom.alphanumeric | |
) | |
assert_raise Redis::BaseError do | |
emulating_unavailability do |cache| | |
cache.fetch_multi(key, other_key) { |k| "unavailable" } | |
end | |
end | |
end | |
def test_delete_failure_raises | |
key = SecureRandom.uuid | |
@cache.write(key, SecureRandom.alphanumeric) | |
assert_raise Redis::BaseError do | |
emulating_unavailability do |cache| | |
cache.delete(key) | |
end | |
end | |
end | |
def test_exist_failure_raises | |
key = SecureRandom.uuid | |
@cache.write(key, SecureRandom.alphanumeric) | |
assert_raise Redis::BaseError do | |
emulating_unavailability do |cache| | |
cache.exist?(key) | |
end | |
end | |
end | |
def test_increment_failure_raises | |
key = SecureRandom.uuid | |
@cache.write(key, 1, raw: true) | |
assert_raise Redis::BaseError do | |
emulating_unavailability do |cache| | |
cache.increment(key) | |
end | |
end | |
end | |
def test_decrement_failure_raises | |
key = SecureRandom.uuid | |
@cache.write(key, 1, raw: true) | |
assert_raise Redis::BaseError do | |
emulating_unavailability do |cache| | |
cache.decrement(key) | |
end | |
end | |
end | |
def test_clear_failure_returns_nil | |
assert_raise Redis::BaseError do | |
emulating_unavailability do |cache| | |
cache.clear | |
end | |
end | |
end | |
end |
# frozen_string_literal: true | |
module FailureSafetyBehavior | |
def test_fetch_read_failure_returns_nil | |
key = SecureRandom.uuid | |
@cache.write(key, SecureRandom.alphanumeric) | |
emulating_unavailability do |cache| | |
assert_nil cache.fetch(key) | |
end | |
end | |
def test_fetch_read_failure_does_not_attempt_to_write | |
key = SecureRandom.uuid | |
value = SecureRandom.alphanumeric | |
@cache.write(key, value) | |
emulating_unavailability do |cache| | |
val = cache.fetch(key) { "1" } | |
## | |
# Though the `write` part of fetch fails for the same reason | |
# `read` will, the block result is still executed and returned. | |
assert_equal "1", val | |
end | |
assert_equal value, @cache.read(key) | |
end | |
def test_read_failure_returns_nil | |
key = SecureRandom.uuid | |
@cache.write(key, SecureRandom.alphanumeric) | |
emulating_unavailability do |cache| | |
assert_nil cache.read(key) | |
end | |
end | |
def test_read_multi_failure_returns_empty_hash | |
key = SecureRandom.uuid | |
other_key = SecureRandom.uuid | |
@cache.write_multi( | |
key => SecureRandom.alphanumeric, | |
other_key => SecureRandom.alphanumeric | |
) | |
emulating_unavailability do |cache| | |
assert_equal Hash.new, cache.read_multi(key, other_key) | |
end | |
end | |
def test_write_failure_returns_false | |
key = SecureRandom.uuid | |
emulating_unavailability do |cache| | |
assert_equal false, cache.write(key, SecureRandom.alphanumeric) | |
end | |
end | |
def test_write_multi_failure_not_raises | |
emulating_unavailability do |cache| | |
assert_nothing_raised do | |
cache.write_multi( | |
SecureRandom.uuid => SecureRandom.alphanumeric, | |
SecureRandom.uuid => SecureRandom.alphanumeric | |
) | |
end | |
end | |
end | |
def test_fetch_multi_failure_returns_fallback_results | |
key = SecureRandom.uuid | |
other_key = SecureRandom.uuid | |
@cache.write_multi( | |
key => SecureRandom.alphanumeric, | |
other_key => SecureRandom.alphanumeric | |
) | |
emulating_unavailability do |cache| | |
fetched = cache.fetch_multi(key, other_key) { |k| "unavailable" } | |
assert_equal Hash[key => "unavailable", other_key => "unavailable"], fetched | |
end | |
end | |
def test_delete_failure_returns_false | |
key = SecureRandom.uuid | |
@cache.write(key, SecureRandom.alphanumeric) | |
emulating_unavailability do |cache| | |
assert_equal false, cache.delete(key) | |
end | |
end | |
def test_exist_failure_returns_false | |
key = SecureRandom.uuid | |
@cache.write(key, SecureRandom.alphanumeric) | |
emulating_unavailability do |cache| | |
assert_not cache.exist?(key) | |
end | |
end | |
def test_increment_failure_returns_nil | |
key = SecureRandom.uuid | |
@cache.write(key, 1, raw: true) | |
emulating_unavailability do |cache| | |
assert_nil cache.increment(key) | |
end | |
end | |
def test_decrement_failure_returns_nil | |
key = SecureRandom.uuid | |
@cache.write(key, 1, raw: true) | |
emulating_unavailability do |cache| | |
assert_nil cache.decrement(key) | |
end | |
end | |
def test_clear_failure_returns_nil | |
emulating_unavailability do |cache| | |
assert_nil cache.clear | |
end | |
end | |
end |
# frozen_string_literal: true | |
require "mutex_m" | |
require "concurrent/map" | |
require "set" | |
require "active_support/core_ext/object/try" | |
module ActiveSupport | |
module Notifications | |
class InstrumentationSubscriberError < RuntimeError | |
attr_reader :exceptions | |
def initialize(exceptions) | |
@exceptions = exceptions | |
exception_class_names = exceptions.map { |e| e.class.name } | |
super "Exception(s) occurred within instrumentation subscribers: #{exception_class_names.join(', ')}" | |
end | |
end | |
# This is a default queue implementation that ships with Notifications. | |
# It just pushes events to all registered log subscribers. | |
# | |
# This class is thread safe. All methods are reentrant. | |
class Fanout | |
include Mutex_m | |
def initialize | |
@string_subscribers = Hash.new { |h, k| h[k] = [] } | |
@other_subscribers = [] | |
@listeners_for = Concurrent::Map.new | |
super | |
end | |
def subscribe(pattern = nil, callable = nil, monotonic: false, &block) | |
subscriber = Subscribers.new(pattern, callable || block, monotonic) | |
synchronize do | |
case pattern | |
when String | |
@string_subscribers[pattern] << subscriber | |
@listeners_for.delete(pattern) | |
when NilClass, Regexp | |
@other_subscribers << subscriber | |
@listeners_for.clear | |
else | |
raise ArgumentError, "pattern must be specified as a String, Regexp or empty" | |
end | |
end | |
subscriber | |
end | |
def unsubscribe(subscriber_or_name) | |
synchronize do | |
case subscriber_or_name | |
when String | |
@string_subscribers[subscriber_or_name].clear | |
@listeners_for.delete(subscriber_or_name) | |
@other_subscribers.each { |sub| sub.unsubscribe!(subscriber_or_name) } | |
else | |
pattern = subscriber_or_name.try(:pattern) | |
if String === pattern | |
@string_subscribers[pattern].delete(subscriber_or_name) | |
@listeners_for.delete(pattern) | |
else | |
@other_subscribers.delete(subscriber_or_name) | |
@listeners_for.clear | |
end | |
end | |
end | |
end | |
def inspect # :nodoc: | |
total_patterns = @string_subscribers.size + @other_subscribers.size | |
"#<#{self.class} (#{total_patterns} patterns)>" | |
end | |
def start(name, id, payload) | |
iterate_guarding_exceptions(listeners_for(name)) { |s| s.start(name, id, payload) } | |
end | |
def finish(name, id, payload, listeners = listeners_for(name)) | |
iterate_guarding_exceptions(listeners) { |s| s.finish(name, id, payload) } | |
end | |
def publish(name, *args) | |
iterate_guarding_exceptions(listeners_for(name)) { |s| s.publish(name, *args) } | |
end | |
def publish_event(event) | |
iterate_guarding_exceptions(listeners_for(event.name)) { |s| s.publish_event(event) } | |
end | |
def iterate_guarding_exceptions(listeners) | |
exceptions = nil | |
listeners.each do |s| | |
yield s | |
rescue Exception => e | |
exceptions ||= [] | |
exceptions << e | |
end | |
if exceptions | |
if exceptions.size == 1 | |
raise exceptions.first | |
else | |
raise InstrumentationSubscriberError.new(exceptions), cause: exceptions.first | |
end | |
end | |
listeners | |
end | |
def listeners_for(name) | |
# this is correctly done double-checked locking (Concurrent::Map's lookups have volatile semantics) | |
@listeners_for[name] || synchronize do | |
# use synchronisation when accessing @subscribers | |
@listeners_for[name] ||= | |
@string_subscribers[name] + @other_subscribers.select { |s| s.subscribed_to?(name) } | |
end | |
end | |
def listening?(name) | |
listeners_for(name).any? | |
end | |
# This is a sync queue, so there is no waiting. | |
def wait | |
end | |
module Subscribers # :nodoc: | |
def self.new(pattern, listener, monotonic) | |
subscriber_class = monotonic ? MonotonicTimed : Timed | |
if listener.respond_to?(:start) && listener.respond_to?(:finish) | |
subscriber_class = Evented | |
else | |
# Doing this to detect a single argument block or callable | |
# like `proc { |x| }` vs `proc { |*x| }`, `proc { |**x| }`, | |
# or `proc { |x, **y| }` | |
procish = listener.respond_to?(:parameters) ? listener : listener.method(:call) | |
if procish.arity == 1 && procish.parameters.length == 1 | |
subscriber_class = EventObject | |
end | |
end | |
subscriber_class.new(pattern, listener) | |
end | |
class Matcher # :nodoc: | |
attr_reader :pattern, :exclusions | |
def self.wrap(pattern) | |
if String === pattern | |
pattern | |
elsif pattern.nil? | |
AllMessages.new | |
else | |
new(pattern) | |
end | |
end | |
def initialize(pattern) | |
@pattern = pattern | |
@exclusions = Set.new | |
end | |
def unsubscribe!(name) | |
exclusions << -name if pattern === name | |
end | |
def ===(name) | |
pattern === name && !exclusions.include?(name) | |
end | |
class AllMessages | |
def ===(name) | |
true | |
end | |
def unsubscribe!(*) | |
false | |
end | |
end | |
end | |
class Evented # :nodoc: | |
attr_reader :pattern | |
def initialize(pattern, delegate) | |
@pattern = Matcher.wrap(pattern) | |
@delegate = delegate | |
@can_publish = delegate.respond_to?(:publish) | |
@can_publish_event = delegate.respond_to?(:publish_event) | |
end | |
def publish(name, *args) | |
if @can_publish | |
@delegate.publish name, *args | |
end | |
end | |
def publish_event(event) | |
if @can_publish_event | |
@delegate.publish_event event | |
else | |
publish(event.name, event.time, event.end, event.transaction_id, event.payload) | |
end | |
end | |
def start(name, id, payload) | |
@delegate.start name, id, payload | |
end | |
def finish(name, id, payload) | |
@delegate.finish name, id, payload | |
end | |
def subscribed_to?(name) | |
pattern === name | |
end | |
def unsubscribe!(name) | |
pattern.unsubscribe!(name) | |
end | |
end | |
class Timed < Evented # :nodoc: | |
def publish(name, *args) | |
@delegate.call name, *args | |
end | |
def start(name, id, payload) | |
timestack = IsolatedExecutionState[:_timestack] ||= [] | |
timestack.push Time.now | |
end | |
def finish(name, id, payload) | |
timestack = IsolatedExecutionState[:_timestack] | |
started = timestack.pop | |
@delegate.call(name, started, Time.now, id, payload) | |
end | |
end | |
class MonotonicTimed < Evented # :nodoc: | |
def publish(name, *args) | |
@delegate.call name, *args | |
end | |
def start(name, id, payload) | |
timestack = IsolatedExecutionState[:_timestack_monotonic] ||= [] | |
timestack.push Process.clock_gettime(Process::CLOCK_MONOTONIC) | |
end | |
def finish(name, id, payload) | |
timestack = IsolatedExecutionState[:_timestack_monotonic] | |
started = timestack.pop | |
@delegate.call(name, started, Process.clock_gettime(Process::CLOCK_MONOTONIC), id, payload) | |
end | |
end | |
class EventObject < Evented | |
def start(name, id, payload) | |
stack = IsolatedExecutionState[:_event_stack] ||= [] | |
event = build_event name, id, payload | |
event.start! | |
stack.push event | |
end | |
def finish(name, id, payload) | |
stack = IsolatedExecutionState[:_event_stack] | |
event = stack.pop | |
event.payload = payload | |
event.finish! | |
@delegate.call event | |
end | |
def publish_event(event) | |
@delegate.call event | |
end | |
private | |
def build_event(name, id, payload) | |
ActiveSupport::Notifications::Event.new name, nil, nil, id, payload | |
end | |
end | |
end | |
end | |
end | |
end |
# frozen_string_literal: true | |
require "active_support/core_ext/file/atomic" |
# frozen_string_literal: true | |
require "active_support/concern" | |
module ActiveSupport | |
module Testing | |
# Adds simple access to sample files called file fixtures. | |
# File fixtures are normal files stored in | |
# <tt>ActiveSupport::TestCase.file_fixture_path</tt>. | |
# | |
# File fixtures are represented as +Pathname+ objects. | |
# This makes it easy to extract specific information: | |
# | |
# file_fixture("example.txt").read # get the file's content | |
# file_fixture("example.mp3").size # get the file size | |
module FileFixtures | |
extend ActiveSupport::Concern | |
included do | |
class_attribute :file_fixture_path, instance_writer: false | |
end | |
# Returns a +Pathname+ to the fixture file named +fixture_name+. | |
# | |
# Raises +ArgumentError+ if +fixture_name+ can't be found. | |
def file_fixture(fixture_name) | |
path = Pathname.new(File.join(file_fixture_path, fixture_name)) | |
if path.exist? | |
path | |
else | |
msg = "the directory '%s' does not contain a file named '%s'" | |
raise ArgumentError, msg % [file_fixture_path, fixture_name] | |
end | |
end | |
end | |
end | |
end |
# frozen_string_literal: true | |
require_relative "../abstract_unit" | |
require "pathname" | |
class FileFixturesTest < ActiveSupport::TestCase | |
self.file_fixture_path = File.expand_path("../file_fixtures", __dir__) | |
test "#file_fixture returns Pathname to file fixture" do | |
path = file_fixture("sample.txt") | |
assert_kind_of Pathname, path | |
assert_match %r{.*/test/file_fixtures/sample\.txt$}, path.to_s | |
end | |
test "raises an exception when the fixture file does not exist" do | |
e = assert_raises(ArgumentError) do | |
file_fixture("nope") | |
end | |
assert_match(/^the directory '[^']+test\/file_fixtures' does not contain a file named 'nope'$/, e.message) | |
end | |
end | |
class FileFixturesPathnameDirectoryTest < ActiveSupport::TestCase | |
self.file_fixture_path = Pathname.new(File.expand_path("../file_fixtures", __dir__)) | |
test "#file_fixture_path returns Pathname to file fixture" do | |
path = file_fixture("sample.txt") | |
assert_kind_of Pathname, path | |
assert_match %r{.*/test/file_fixtures/sample\.txt$}, path.to_s | |
end | |
end |
# frozen_string_literal: true | |
require "active_support/core_ext/file/atomic" | |
require "active_support/core_ext/string/conversions" | |
require "uri/common" | |
module ActiveSupport | |
module Cache | |
# A cache store implementation which stores everything on the filesystem. | |
class FileStore < Store | |
attr_reader :cache_path | |
DIR_FORMATTER = "%03X" | |
FILENAME_MAX_SIZE = 226 # max filename size on file system is 255, minus room for timestamp, pid, and random characters appended by Tempfile (used by atomic write) | |
FILEPATH_MAX_SIZE = 900 # max is 1024, plus some room | |
GITKEEP_FILES = [".gitkeep", ".keep"].freeze | |
def initialize(cache_path, **options) | |
super(options) | |
@cache_path = cache_path.to_s | |
end | |
# Advertise cache versioning support. | |
def self.supports_cache_versioning? | |
true | |
end | |
# Deletes all items from the cache. In this case it deletes all the entries in the specified | |
# file store directory except for .keep or .gitkeep. Be careful which directory is specified in your | |
# config file when using +FileStore+ because everything in that directory will be deleted. | |
def clear(options = nil) | |
root_dirs = (Dir.children(cache_path) - GITKEEP_FILES) | |
FileUtils.rm_r(root_dirs.collect { |f| File.join(cache_path, f) }) | |
rescue Errno::ENOENT, Errno::ENOTEMPTY | |
end | |
# Preemptively iterates through all stored keys and removes the ones which have expired. | |
def cleanup(options = nil) | |
options = merged_options(options) | |
search_dir(cache_path) do |fname| | |
entry = read_entry(fname, **options) | |
delete_entry(fname, **options) if entry && entry.expired? | |
end | |
end | |
# Increments an already existing integer value that is stored in the cache. | |
# If the key is not found nothing is done. | |
def increment(name, amount = 1, options = nil) | |
modify_value(name, amount, options) | |
end | |
# Decrements an already existing integer value that is stored in the cache. | |
# If the key is not found nothing is done. | |
def decrement(name, amount = 1, options = nil) | |
modify_value(name, -amount, options) | |
end | |
def delete_matched(matcher, options = nil) | |
options = merged_options(options) | |
instrument(:delete_matched, matcher.inspect) do | |
matcher = key_matcher(matcher, options) | |
search_dir(cache_path) do |path| | |
key = file_path_key(path) | |
delete_entry(path, **options) if key.match(matcher) | |
end | |
end | |
end | |
private | |
def read_entry(key, **options) | |
if payload = read_serialized_entry(key, **options) | |
entry = deserialize_entry(payload) | |
entry if entry.is_a?(Cache::Entry) | |
end | |
end | |
def read_serialized_entry(key, **) | |
File.binread(key) if File.exist?(key) | |
rescue => error | |
logger.error("FileStoreError (#{error}): #{error.message}") if logger | |
nil | |
end | |
def write_entry(key, entry, **options) | |
write_serialized_entry(key, serialize_entry(entry, **options), **options) | |
end | |
def write_serialized_entry(key, payload, **options) | |
return false if options[:unless_exist] && File.exist?(key) | |
ensure_cache_path(File.dirname(key)) | |
File.atomic_write(key, cache_path) { |f| f.write(payload) } | |
true | |
end | |
def delete_entry(key, **options) | |
if File.exist?(key) | |
begin | |
File.delete(key) | |
delete_empty_directories(File.dirname(key)) | |
true | |
rescue | |
# Just in case the error was caused by another process deleting the file first. | |
raise if File.exist?(key) | |
false | |
end | |
end | |
end | |
# Lock a file for a block so only one process can modify it at a time. | |
def lock_file(file_name, &block) | |
if File.exist?(file_name) | |
File.open(file_name, "r+") do |f| | |
f.flock File::LOCK_EX | |
yield | |
ensure | |
f.flock File::LOCK_UN | |
end | |
else | |
yield | |
end | |
end | |
# Translate a key into a file path. | |
def normalize_key(key, options) | |
key = super | |
fname = URI.encode_www_form_component(key) | |
if fname.size > FILEPATH_MAX_SIZE | |
fname = ActiveSupport::Digest.hexdigest(key) | |
end | |
hash = Zlib.adler32(fname) | |
hash, dir_1 = hash.divmod(0x1000) | |
dir_2 = hash.modulo(0x1000) | |
# Make sure file name doesn't exceed file system limits. | |
if fname.length < FILENAME_MAX_SIZE | |
fname_paths = fname | |
else | |
fname_paths = [] | |
begin | |
fname_paths << fname[0, FILENAME_MAX_SIZE] | |
fname = fname[FILENAME_MAX_SIZE..-1] | |
end until fname.blank? | |
end | |
File.join(cache_path, DIR_FORMATTER % dir_1, DIR_FORMATTER % dir_2, fname_paths) | |
end | |
# Translate a file path into a key. | |
def file_path_key(path) | |
fname = path[cache_path.to_s.size..-1].split(File::SEPARATOR, 4).last | |
URI.decode_www_form_component(fname, Encoding::UTF_8) | |
end | |
# Delete empty directories in the cache. | |
def delete_empty_directories(dir) | |
return if File.realpath(dir) == File.realpath(cache_path) | |
if Dir.children(dir).empty? | |
Dir.delete(dir) rescue nil | |
delete_empty_directories(File.dirname(dir)) | |
end | |
end | |
# Make sure a file path's directories exist. | |
def ensure_cache_path(path) | |
FileUtils.makedirs(path) unless File.exist?(path) | |
end | |
def search_dir(dir, &callback) | |
return if !File.exist?(dir) | |
Dir.each_child(dir) do |d| | |
name = File.join(dir, d) | |
if File.directory?(name) | |
search_dir(name, &callback) | |
else | |
callback.call name | |
end | |
end | |
end | |
# Modifies the amount of an already existing integer value that is stored in the cache. | |
# If the key is not found nothing is done. | |
def modify_value(name, amount, options) | |
file_name = normalize_key(name, options) | |
lock_file(file_name) do | |
options = merged_options(options) | |
if num = read(name, options) | |
num = num.to_i + amount | |
write(name, num, options) | |
num | |
end | |
end | |
end | |
end | |
end | |
end |
# frozen_string_literal: true | |
require_relative "../../abstract_unit" | |
require "active_support/cache" | |
require_relative "../behaviors" | |
require "pathname" | |
class FileStoreTest < ActiveSupport::TestCase | |
attr_reader :cache_dir | |
def lookup_store(options = {}) | |
cache_dir = options.delete(:cache_dir) { @cache_dir } | |
ActiveSupport::Cache.lookup_store(:file_store, cache_dir, options) | |
end | |
def setup | |
@cache_dir = Dir.mktmpdir("file-store-") | |
Dir.mkdir(cache_dir) unless File.exist?(cache_dir) | |
@cache = lookup_store(expires_in: 60) | |
@peek = lookup_store(expires_in: 60) | |
@cache_with_pathname = lookup_store(cache_dir: Pathname.new(cache_dir), expires_in: 60) | |
@buffer = StringIO.new | |
@cache.logger = ActiveSupport::Logger.new(@buffer) | |
end | |
def teardown | |
FileUtils.rm_r(cache_dir) | |
rescue Errno::ENOENT | |
end | |
include CacheStoreBehavior | |
include CacheStoreVersionBehavior | |
include CacheStoreCoderBehavior | |
include CacheDeleteMatchedBehavior | |
include CacheIncrementDecrementBehavior | |
include CacheInstrumentationBehavior | |
def test_clear | |
gitkeep = File.join(cache_dir, ".gitkeep") | |
keep = File.join(cache_dir, ".keep") | |
FileUtils.touch([gitkeep, keep]) | |
@cache.clear | |
assert File.exist?(gitkeep) | |
assert File.exist?(keep) | |
end | |
def test_clear_without_cache_dir | |
FileUtils.rm_r(cache_dir) | |
assert_nothing_raised { @cache.clear } | |
end | |
def test_long_uri_encoded_keys | |
@cache.write("%" * 870, 1) | |
assert_equal 1, @cache.read("%" * 870) | |
end | |
def test_key_transformation | |
key = @cache.send(:normalize_key, "views/index?id=1", {}) | |
assert_equal "views/index?id=1", @cache.send(:file_path_key, key) | |
end | |
def test_key_transformation_with_pathname | |
FileUtils.touch(File.join(cache_dir, "foo")) | |
key = @cache_with_pathname.send(:normalize_key, "views/index?id=1", {}) | |
assert_equal "views/index?id=1", @cache_with_pathname.send(:file_path_key, key) | |
end | |
# Test that generated cache keys are short enough to have Tempfile stuff added to them and | |
# remain valid | |
def test_filename_max_size | |
key = "#{'A' * ActiveSupport::Cache::FileStore::FILENAME_MAX_SIZE}" | |
path = @cache.send(:normalize_key, key, {}) | |
basename = File.basename(path) | |
dirname = File.dirname(path) | |
Dir::Tmpname.create(basename, Dir.tmpdir + dirname) do |tmpname, n, opts| | |
assert File.basename(tmpname + ".lock").length <= 255, "Temp filename too long: #{File.basename(tmpname + '.lock').length}" | |
end | |
end | |
# Because file systems have a maximum filename size, filenames > max size should be split in to directories | |
# If filename is 'AAAAB', where max size is 4, the returned path should be AAAA/B | |
def test_key_transformation_max_filename_size | |
key = "#{'A' * ActiveSupport::Cache::FileStore::FILENAME_MAX_SIZE}B" | |
path = @cache.send(:normalize_key, key, {}) | |
assert path.split("/").all? { |dir_name| dir_name.size <= ActiveSupport::Cache::FileStore::FILENAME_MAX_SIZE } | |
assert_equal "B", File.basename(path) | |
end | |
# If nothing has been stored in the cache, there is a chance the cache directory does not yet exist | |
# Ensure delete_matched gracefully handles this case | |
def test_delete_matched_when_cache_directory_does_not_exist | |
assert_nothing_raised do | |
ActiveSupport::Cache::FileStore.new("/test/cache/directory").delete_matched(/does_not_exist/) | |
end | |
end | |
def test_delete_does_not_delete_empty_parent_dir | |
sub_cache_dir = File.join(cache_dir, "subdir/") | |
sub_cache_store = ActiveSupport::Cache::FileStore.new(sub_cache_dir) | |
assert_nothing_raised do | |
assert sub_cache_store.write("foo", "bar") | |
assert sub_cache_store.delete("foo") | |
end | |
assert File.exist?(cache_dir), "Parent of top level cache dir was deleted!" | |
assert File.exist?(sub_cache_dir), "Top level cache dir was deleted!" | |
assert_empty Dir.children(sub_cache_dir) | |
end | |
def test_log_exception_when_cache_read_fails | |
File.stub(:exist?, -> { raise StandardError.new("failed") }) do | |
@cache.send(:read_entry, "winston", **{}) | |
assert_predicate @buffer.string, :present? | |
end | |
end | |
def test_cleanup_removes_all_expired_entries | |
time = Time.now | |
@cache.write("foo", "bar", expires_in: 10) | |
@cache.write("baz", "qux") | |
@cache.write("quux", "corge", expires_in: 20) | |
Time.stub(:now, time + 15) do | |
@cache.cleanup | |
assert_not @cache.exist?("foo") | |
assert @cache.exist?("baz") | |
assert @cache.exist?("quux") | |
assert_equal 2, Dir.glob(File.join(cache_dir, "**")).size | |
end | |
end | |
def test_cleanup_when_non_active_support_cache_file_exists | |
cache_file_path = @cache.send(:normalize_key, "foo", nil) | |
FileUtils.makedirs(File.dirname(cache_file_path)) | |
File.atomic_write(cache_file_path, cache_dir) { |f| Marshal.dump({ "foo": "bar" }, f) } | |
assert_nothing_raised { @cache.cleanup } | |
assert_equal 1, Dir.glob(File.join(cache_dir, "**")).size | |
end | |
def test_write_with_unless_exist | |
assert_equal true, @cache.write(1, "aaaaaaaaaa") | |
assert_equal false, @cache.write(1, "aaaaaaaaaa", unless_exist: true) | |
@cache.write(1, nil) | |
assert_equal false, @cache.write(1, "aaaaaaaaaa", unless_exist: true) | |
end | |
end | |
class OptimizedFileStoreTest < FileStoreTest | |
def setup | |
@previous_format = ActiveSupport::Cache.format_version | |
ActiveSupport::Cache.format_version = 7.0 | |
super | |
end | |
def test_forward_compatibility | |
previous_format = ActiveSupport::Cache.format_version | |
ActiveSupport::Cache.format_version = 6.1 | |
@old_store = lookup_store | |
ActiveSupport::Cache.format_version = previous_format | |
@old_store.write("foo", "bar") | |
assert_equal "bar", @cache.read("foo") | |
end | |
def test_backward_compatibility | |
previous_format = ActiveSupport::Cache.format_version | |
ActiveSupport::Cache.format_version = 6.1 | |
@old_store = lookup_store | |
ActiveSupport::Cache.format_version = previous_format | |
@cache.write("foo", "bar") | |
assert_equal "bar", @old_store.read("foo") | |
end | |
def teardown | |
super | |
ActiveSupport::Cache.format_version = @previous_format | |
end | |
end |
# frozen_string_literal: true | |
require_relative "../abstract_unit" | |
require "active_support/core_ext/file" | |
class AtomicWriteTest < ActiveSupport::TestCase | |
def test_atomic_write_without_errors | |
contents = "Atomic Text" | |
File.atomic_write(file_name, Dir.pwd) do |file| | |
file.write(contents) | |
assert_not File.exist?(file_name) | |
end | |
assert File.exist?(file_name) | |
assert_equal contents, File.read(file_name) | |
ensure | |
File.unlink(file_name) rescue nil | |
end | |
def test_atomic_write_doesnt_write_when_block_raises | |
File.atomic_write(file_name) do |file| | |
file.write("testing") | |
raise "something bad" | |
end | |
rescue | |
assert_not File.exist?(file_name) | |
end | |
def test_atomic_write_preserves_file_permissions | |
contents = "Atomic Text" | |
File.open(file_name, "w", 0755) do |file| | |
file.write(contents) | |
assert File.exist?(file_name) | |
end | |
assert File.exist?(file_name) | |
assert_equal 0100755 & ~File.umask, file_mode | |
assert_equal contents, File.read(file_name) | |
File.atomic_write(file_name, Dir.pwd) do |file| | |
file.write(contents) | |
assert File.exist?(file_name) | |
end | |
assert File.exist?(file_name) | |
assert_equal 0100755 & ~File.umask, file_mode | |
assert_equal contents, File.read(file_name) | |
ensure | |
File.unlink(file_name) rescue nil | |
end | |
def test_atomic_write_preserves_default_file_permissions | |
contents = "Atomic Text" | |
File.atomic_write(file_name, Dir.pwd) do |file| | |
file.write(contents) | |
assert_not File.exist?(file_name) | |
end | |
assert File.exist?(file_name) | |
assert_equal File.probe_stat_in(Dir.pwd).mode, file_mode | |
assert_equal contents, File.read(file_name) | |
ensure | |
File.unlink(file_name) rescue nil | |
end | |
def test_atomic_write_preserves_file_permissions_same_directory | |
Dir.mktmpdir do |temp_dir| | |
File.chmod 0700, temp_dir | |
probed_permissions = File.probe_stat_in(temp_dir).mode.to_s(8) | |
File.atomic_write(File.join(temp_dir, file_name), &:close) | |
actual_permissions = File.stat(File.join(temp_dir, file_name)).mode.to_s(8) | |
assert_equal actual_permissions, probed_permissions | |
end | |
end | |
def test_atomic_write_returns_result_from_yielded_block | |
block_return_value = File.atomic_write(file_name, Dir.pwd) do |file| | |
"Hello world!" | |
end | |
assert_equal "Hello world!", block_return_value | |
ensure | |
File.unlink(file_name) rescue nil | |
end | |
def test_probe_stat_in_when_no_dir | |
assert_nil File.probe_stat_in("/dir/does/not/exist") | |
end | |
private | |
def file_name | |
"atomic-#{Process.pid}.file" | |
end | |
def file_mode | |
File.stat(file_name).mode | |
end | |
end |
# frozen_string_literal: true | |
require "active_support/core_ext/time/calculations" | |
module ActiveSupport | |
# FileUpdateChecker specifies the API used by Rails to watch files | |
# and control reloading. The API depends on four methods: | |
# | |
# * +initialize+ which expects two parameters and one block as | |
# described below. | |
# | |
# * +updated?+ which returns a boolean if there were updates in | |
# the filesystem or not. | |
# | |
# * +execute+ which executes the given block on initialization | |
# and updates the latest watched files and timestamp. | |
# | |
# * +execute_if_updated+ which just executes the block if it was updated. | |
# | |
# After initialization, a call to +execute_if_updated+ must execute | |
# the block only if there was really a change in the filesystem. | |
# | |
# This class is used by Rails to reload the I18n framework whenever | |
# they are changed upon a new request. | |
# | |
# i18n_reloader = ActiveSupport::FileUpdateChecker.new(paths) do | |
# I18n.reload! | |
# end | |
# | |
# ActiveSupport::Reloader.to_prepare do | |
# i18n_reloader.execute_if_updated | |
# end | |
class FileUpdateChecker | |
# It accepts two parameters on initialization. The first is an array | |
# of files and the second is an optional hash of directories. The hash must | |
# have directories as keys and the value is an array of extensions to be | |
# watched under that directory. | |
# | |
# This method must also receive a block that will be called once a path | |
# changes. The array of files and list of directories cannot be changed | |
# after FileUpdateChecker has been initialized. | |
def initialize(files, dirs = {}, &block) | |
unless block | |
raise ArgumentError, "A block is required to initialize a FileUpdateChecker" | |
end | |
@files = files.freeze | |
@glob = compile_glob(dirs) | |
@block = block | |
@watched = nil | |
@updated_at = nil | |
@last_watched = watched | |
@last_update_at = updated_at(@last_watched) | |
end | |
# Check if any of the entries were updated. If so, the watched and/or | |
# updated_at values are cached until the block is executed via +execute+ | |
# or +execute_if_updated+. | |
def updated? | |
current_watched = watched | |
if @last_watched.size != current_watched.size | |
@watched = current_watched | |
true | |
else | |
current_updated_at = updated_at(current_watched) | |
if @last_update_at < current_updated_at | |
@watched = current_watched | |
@updated_at = current_updated_at | |
true | |
else | |
false | |
end | |
end | |
end | |
# Executes the given block and updates the latest watched files and | |
# timestamp. | |
def execute | |
@last_watched = watched | |
@last_update_at = updated_at(@last_watched) | |
@block.call | |
ensure | |
@watched = nil | |
@updated_at = nil | |
end | |
# Execute the block given if updated. | |
def execute_if_updated | |
if updated? | |
yield if block_given? | |
execute | |
true | |
else | |
false | |
end | |
end | |
private | |
def watched | |
@watched || begin | |
all = @files.select { |f| File.exist?(f) } | |
all.concat(Dir[@glob]) if @glob | |
all | |
end | |
end | |
def updated_at(paths) | |
@updated_at || max_mtime(paths) || Time.at(0) | |
end | |
# This method returns the maximum mtime of the files in +paths+, or +nil+ | |
# if the array is empty. | |
# | |
# Files with a mtime in the future are ignored. Such abnormal situation | |
# can happen for example if the user changes the clock by hand. It is | |
# healthy to consider this edge case because with mtimes in the future | |
# reloading is not triggered. | |
def max_mtime(paths) | |
time_now = Time.now | |
max_mtime = nil | |
# Time comparisons are performed with #compare_without_coercion because | |
# AS redefines these operators in a way that is much slower and does not | |
# bring any benefit in this particular code. | |
# | |
# Read t1.compare_without_coercion(t2) < 0 as t1 < t2. | |
paths.each do |path| | |
mtime = File.mtime(path) | |
next if time_now.compare_without_coercion(mtime) < 0 | |
if max_mtime.nil? || max_mtime.compare_without_coercion(mtime) < 0 | |
max_mtime = mtime | |
end | |
end | |
max_mtime | |
end | |
def compile_glob(hash) | |
hash.freeze # Freeze so changes aren't accidentally pushed | |
return if hash.empty? | |
globs = hash.map do |key, value| | |
"#{escape(key)}/**/*#{compile_ext(value)}" | |
end | |
"{#{globs.join(",")}}" | |
end | |
def escape(key) | |
key.gsub(",", '\,') | |
end | |
def compile_ext(array) | |
array = Array(array) | |
return if array.empty? | |
".{#{array.join(",")}}" | |
end | |
end | |
end |
# frozen_string_literal: true | |
require_relative "abstract_unit" | |
require_relative "file_update_checker_shared_tests" | |
class FileUpdateCheckerTest < ActiveSupport::TestCase | |
include FileUpdateCheckerSharedTests | |
def new_checker(files = [], dirs = {}, &block) | |
ActiveSupport::FileUpdateChecker.new(files, dirs, &block) | |
end | |
def touch(files) | |
sleep 1 # let's wait a bit to ensure there's a new mtime | |
super | |
end | |
end |
# frozen_string_literal: true | |
class String | |
# Returns the string, first removing all whitespace on both ends of | |
# the string, and then changing remaining consecutive whitespace | |
# groups into one space each. | |
# | |
# Note that it handles both ASCII and Unicode whitespace. | |
# | |
# %{ Multi-line | |
# string }.squish # => "Multi-line string" | |
# " foo bar \n \t boo".squish # => "foo bar boo" | |
def squish | |
dup.squish! | |
end | |
# Performs a destructive squish. See String#squish. | |
# str = " foo bar \n \t boo" | |
# str.squish! # => "foo bar boo" | |
# str # => "foo bar boo" | |
def squish! | |
gsub!(/[[:space:]]+/, " ") | |
strip! | |
self | |
end | |
# Returns a new string with all occurrences of the patterns removed. | |
# str = "foo bar test" | |
# str.remove(" test") # => "foo bar" | |
# str.remove(" test", /bar/) # => "foo " | |
# str # => "foo bar test" | |
def remove(*patterns) | |
dup.remove!(*patterns) | |
end | |
# Alters the string by removing all occurrences of the patterns. | |
# str = "foo bar test" | |
# str.remove!(" test", /bar/) # => "foo " | |
# str # => "foo " | |
def remove!(*patterns) | |
patterns.each do |pattern| | |
gsub! pattern, "" | |
end | |
self | |
end | |
# Truncates a given +text+ after a given <tt>length</tt> if +text+ is longer than <tt>length</tt>: | |
# | |
# 'Once upon a time in a world far far away'.truncate(27) | |
# # => "Once upon a time in a wo..." | |
# | |
# Pass a string or regexp <tt>:separator</tt> to truncate +text+ at a natural break: | |
# | |
# 'Once upon a time in a world far far away'.truncate(27, separator: ' ') | |
# # => "Once upon a time in a..." | |
# | |
# 'Once upon a time in a world far far away'.truncate(27, separator: /\s/) | |
# # => "Once upon a time in a..." | |
# | |
# The last characters will be replaced with the <tt>:omission</tt> string (defaults to "...") | |
# for a total length not exceeding <tt>length</tt>: | |
# | |
# 'And they found that many people were sleeping better.'.truncate(25, omission: '... (continued)') | |
# # => "And they f... (continued)" | |
def truncate(truncate_at, options = {}) | |
return dup unless length > truncate_at | |
omission = options[:omission] || "..." | |
length_with_room_for_omission = truncate_at - omission.length | |
stop = \ | |
if options[:separator] | |
rindex(options[:separator], length_with_room_for_omission) || length_with_room_for_omission | |
else | |
length_with_room_for_omission | |
end | |
+"#{self[0, stop]}#{omission}" | |
end | |
# Truncates +text+ to at most <tt>bytesize</tt> bytes in length without | |
# breaking string encoding by splitting multibyte characters or breaking | |
# grapheme clusters ("perceptual characters") by truncating at combining | |
# characters. | |
# | |
# >> "🔪🔪🔪🔪🔪🔪🔪🔪🔪🔪🔪🔪🔪🔪🔪🔪🔪🔪🔪🔪".size | |
# => 20 | |
# >> "🔪🔪🔪🔪🔪🔪🔪🔪🔪🔪🔪🔪🔪🔪🔪🔪🔪🔪🔪🔪".bytesize | |
# => 80 | |
# >> "🔪🔪🔪🔪🔪🔪🔪🔪🔪🔪🔪🔪🔪🔪🔪🔪🔪🔪🔪🔪".truncate_bytes(20) | |
# => "🔪🔪🔪🔪…" | |
# | |
# The truncated text ends with the <tt>:omission</tt> string, defaulting | |
# to "…", for a total length not exceeding <tt>bytesize</tt>. | |
def truncate_bytes(truncate_at, omission: "…") | |
omission ||= "" | |
case | |
when bytesize <= truncate_at | |
dup | |
when omission.bytesize > truncate_at | |
raise ArgumentError, "Omission #{omission.inspect} is #{omission.bytesize}, larger than the truncation length of #{truncate_at} bytes" | |
when omission.bytesize == truncate_at | |
omission.dup | |
else | |
self.class.new.tap do |cut| | |
cut_at = truncate_at - omission.bytesize | |
each_grapheme_cluster do |grapheme| | |
if cut.bytesize + grapheme.bytesize <= cut_at | |
cut << grapheme | |
else | |
break | |
end | |
end | |
cut << omission | |
end | |
end | |
end | |
# Truncates a given +text+ after a given number of words (<tt>words_count</tt>): | |
# | |
# 'Once upon a time in a world far far away'.truncate_words(4) | |
# # => "Once upon a time..." | |
# | |
# Pass a string or regexp <tt>:separator</tt> to specify a different separator of words: | |
# | |
# 'Once<br>upon<br>a<br>time<br>in<br>a<br>world'.truncate_words(5, separator: '<br>') | |
# # => "Once<br>upon<br>a<br>time<br>in..." | |
# | |
# The last characters will be replaced with the <tt>:omission</tt> string (defaults to "..."): | |
# | |
# 'And they found that many people were sleeping better.'.truncate_words(5, omission: '... (continued)') | |
# # => "And they found that many... (continued)" | |
def truncate_words(words_count, options = {}) | |
sep = options[:separator] || /\s+/ | |
sep = Regexp.escape(sep.to_s) unless Regexp === sep | |
if self =~ /\A((?>.+?#{sep}){#{words_count - 1}}.+?)#{sep}.*/m | |
$1 + (options[:omission] || "...") | |
else | |
dup | |
end | |
end | |
end |
# frozen_string_literal: true | |
module ActiveSupport | |
module ForkTracker # :nodoc: | |
module ModernCoreExt | |
def _fork | |
pid = super | |
if pid == 0 | |
ForkTracker.check! | |
end | |
pid | |
end | |
end | |
module CoreExt | |
def fork(...) | |
if block_given? | |
super do | |
ForkTracker.check! | |
yield | |
end | |
else | |
unless pid = super | |
ForkTracker.check! | |
end | |
pid | |
end | |
end | |
end | |
module CoreExtPrivate | |
include CoreExt | |
private :fork | |
end | |
@pid = Process.pid | |
@callbacks = [] | |
class << self | |
def check! | |
new_pid = Process.pid | |
if @pid != new_pid | |
@callbacks.each(&:call) | |
@pid = new_pid | |
end | |
end | |
def hook! | |
if Process.respond_to?(:_fork) # Ruby 3.1+ | |
::Process.singleton_class.prepend(ModernCoreExt) | |
elsif Process.respond_to?(:fork) | |
::Object.prepend(CoreExtPrivate) if RUBY_VERSION < "3.0" | |
::Kernel.prepend(CoreExtPrivate) | |
::Kernel.singleton_class.prepend(CoreExt) | |
::Process.singleton_class.prepend(CoreExt) | |
end | |
end | |
def after_fork(&block) | |
@callbacks << block | |
block | |
end | |
def unregister(callback) | |
@callbacks.delete(callback) | |
end | |
end | |
end | |
end | |
ActiveSupport::ForkTracker.hook! |
# frozen_string_literal: true | |
require_relative "abstract_unit" | |
class ForkTrackerTest < ActiveSupport::TestCase | |
def test_object_fork | |
read, write = IO.pipe | |
called = false | |
handler = ActiveSupport::ForkTracker.after_fork do | |
called = true | |
write.write "forked" | |
end | |
assert_not respond_to?(:fork) | |
pid = fork do | |
read.close | |
write.close | |
exit! | |
end | |
write.close | |
Process.waitpid(pid) | |
assert_equal "forked", read.read | |
read.close | |
assert_not called | |
ensure | |
ActiveSupport::ForkTracker.unregister(handler) | |
end | |
def test_object_fork_without_block | |
read, write = IO.pipe | |
called = false | |
handler = ActiveSupport::ForkTracker.after_fork do | |
called = true | |
write.write "forked" | |
end | |
if pid = fork | |
write.close | |
Process.waitpid(pid) | |
assert_equal "forked", read.read | |
read.close | |
assert_not called | |
else | |
read.close | |
write.close | |
exit! | |
end | |
ensure | |
ActiveSupport::ForkTracker.unregister(handler) | |
end | |
def test_process_fork | |
read, write = IO.pipe | |
called = false | |
handler = ActiveSupport::ForkTracker.after_fork do | |
called = true | |
write.write "forked" | |
end | |
pid = Process.fork do | |
read.close | |
write.close | |
exit! | |
end | |
write.close | |
Process.waitpid(pid) | |
assert_equal "forked", read.read | |
read.close | |
assert_not called | |
ensure | |
ActiveSupport::ForkTracker.unregister(handler) | |
end | |
def test_process_fork_without_block | |
read, write = IO.pipe | |
called = false | |
handler = ActiveSupport::ForkTracker.after_fork do | |
called = true | |
write.write "forked" | |
end | |
if pid = Process.fork | |
write.close | |
Process.waitpid(pid) | |
assert_equal "forked", read.read | |
read.close | |
assert_not called | |
else | |
read.close | |
write.close | |
exit! | |
end | |
ensure | |
ActiveSupport::ForkTracker.unregister(handler) | |
end | |
def test_kernel_fork | |
read, write = IO.pipe | |
called = false | |
handler = ActiveSupport::ForkTracker.after_fork do | |
called = true | |
write.write "forked" | |
end | |
pid = Kernel.fork do | |
read.close | |
write.close | |
exit! | |
end | |
write.close | |
Process.waitpid(pid) | |
assert_equal "forked", read.read | |
read.close | |
assert_not called | |
ensure | |
ActiveSupport::ForkTracker.unregister(handler) | |
end | |
def test_kernel_fork_without_block | |
read, write = IO.pipe | |
called = false | |
handler = ActiveSupport::ForkTracker.after_fork do | |
called = true | |
write.write "forked" | |
end | |
if pid = Kernel.fork | |
write.close | |
Process.waitpid(pid) | |
assert_equal "forked", read.read | |
read.close | |
assert_not called | |
else | |
read.close | |
write.close | |
exit! | |
end | |
ensure | |
ActiveSupport::ForkTracker.unregister(handler) | |
end | |
def test_check | |
count = 0 | |
handler = ActiveSupport::ForkTracker.after_fork { count += 1 } | |
assert_no_difference -> { count } do | |
3.times { ActiveSupport::ForkTracker.check! } | |
end | |
Process.stub(:pid, Process.pid + 1) do | |
assert_difference -> { count }, +1 do | |
3.times { ActiveSupport::ForkTracker.check! } | |
end | |
end | |
assert_difference -> { count }, +1 do | |
3.times { ActiveSupport::ForkTracker.check! } | |
end | |
ensure | |
ActiveSupport::ForkTracker.unregister(handler) | |
end | |
def test_basic_object_with_kernel_fork | |
read, write = IO.pipe | |
called = false | |
handler = ActiveSupport::ForkTracker.after_fork do | |
called = true | |
write.write "forked" | |
end | |
klass = Class.new(BasicObject) do | |
include ::Kernel | |
def fark(&block) | |
fork(&block) | |
end | |
end | |
object = klass.new | |
assert_not object.respond_to?(:fork) | |
pid = object.fark do | |
read.close | |
write.close | |
exit! | |
end | |
write.close | |
Process.waitpid(pid) | |
assert_equal "forked", read.read | |
read.close | |
assert_not called | |
ensure | |
ActiveSupport::ForkTracker.unregister(handler) | |
end | |
end if Process.respond_to?(:fork) |
# frozen_string_literal: true | |
module ActiveSupport | |
# Returns the version of the currently loaded Active Support as a <tt>Gem::Version</tt>. | |
def self.gem_version | |
Gem::Version.new VERSION::STRING | |
end | |
module VERSION | |
MAJOR = 7 | |
MINOR = 1 | |
TINY = 0 | |
PRE = "alpha" | |
STRING = [MAJOR, MINOR, TINY, PRE].compact.join(".") | |
end | |
end |
# frozen_string_literal: true | |
class Array | |
# Splits or iterates over the array in groups of size +number+, | |
# padding any remaining slots with +fill_with+ unless it is +false+. | |
# | |
# %w(1 2 3 4 5 6 7 8 9 10).in_groups_of(3) {|group| p group} | |
# ["1", "2", "3"] | |
# ["4", "5", "6"] | |
# ["7", "8", "9"] | |
# ["10", nil, nil] | |
# | |
# %w(1 2 3 4 5).in_groups_of(2, ' ') {|group| p group} | |
# ["1", "2"] | |
# ["3", "4"] | |
# ["5", " "] | |
# | |
# %w(1 2 3 4 5).in_groups_of(2, false) {|group| p group} | |
# ["1", "2"] | |
# ["3", "4"] | |
# ["5"] | |
def in_groups_of(number, fill_with = nil, &block) | |
if number.to_i <= 0 | |
raise ArgumentError, | |
"Group size must be a positive integer, was #{number.inspect}" | |
end | |
if fill_with == false | |
collection = self | |
else | |
# size % number gives how many extra we have; | |
# subtracting from number gives how many to add; | |
# modulo number ensures we don't add group of just fill. | |
padding = (number - size % number) % number | |
collection = dup.concat(Array.new(padding, fill_with)) | |
end | |
if block_given? | |
collection.each_slice(number, &block) | |
else | |
collection.each_slice(number).to_a | |
end | |
end | |
# Splits or iterates over the array in +number+ of groups, padding any | |
# remaining slots with +fill_with+ unless it is +false+. | |
# | |
# %w(1 2 3 4 5 6 7 8 9 10).in_groups(3) {|group| p group} | |
# ["1", "2", "3", "4"] | |
# ["5", "6", "7", nil] | |
# ["8", "9", "10", nil] | |
# | |
# %w(1 2 3 4 5 6 7 8 9 10).in_groups(3, ' ') {|group| p group} | |
# ["1", "2", "3", "4"] | |
# ["5", "6", "7", " "] | |
# ["8", "9", "10", " "] | |
# | |
# %w(1 2 3 4 5 6 7).in_groups(3, false) {|group| p group} | |
# ["1", "2", "3"] | |
# ["4", "5"] | |
# ["6", "7"] | |
def in_groups(number, fill_with = nil, &block) | |
# size.div number gives minor group size; | |
# size % number gives how many objects need extra accommodation; | |
# each group hold either division or division + 1 items. | |
division = size.div number | |
modulo = size % number | |
# create a new array avoiding dup | |
groups = [] | |
start = 0 | |
number.times do |index| | |
length = division + (modulo > 0 && modulo > index ? 1 : 0) | |
groups << last_group = slice(start, length) | |
last_group << fill_with if fill_with != false && | |
modulo > 0 && length == division | |
start += length | |
end | |
if block_given? | |
groups.each(&block) | |
else | |
groups | |
end | |
end | |
# Divides the array into one or more subarrays based on a delimiting +value+ | |
# or the result of an optional block. | |
# | |
# [1, 2, 3, 4, 5].split(3) # => [[1, 2], [4, 5]] | |
# (1..10).to_a.split { |i| i % 3 == 0 } # => [[1, 2], [4, 5], [7, 8], [10]] | |
def split(value = nil, &block) | |
arr = dup | |
result = [] | |
if block_given? | |
while (idx = arr.index(&block)) | |
result << arr.shift(idx) | |
arr.shift | |
end | |
else | |
while (idx = arr.index(value)) | |
result << arr.shift(idx) | |
arr.shift | |
end | |
end | |
result << arr | |
end | |
end |
# frozen_string_literal: true | |
require_relative "../../abstract_unit" | |
require "active_support/core_ext/array" | |
class GroupingTest < ActiveSupport::TestCase | |
def test_in_groups_of_with_perfect_fit | |
groups = [] | |
("a".."i").to_a.in_groups_of(3) do |group| | |
groups << group | |
end | |
assert_equal [%w(a b c), %w(d e f), %w(g h i)], groups | |
assert_equal [%w(a b c), %w(d e f), %w(g h i)], ("a".."i").to_a.in_groups_of(3) | |
end | |
def test_in_groups_of_with_padding | |
groups = [] | |
("a".."g").to_a.in_groups_of(3) do |group| | |
groups << group | |
end | |
assert_equal [%w(a b c), %w(d e f), ["g", nil, nil]], groups | |
end | |
def test_in_groups_of_pads_with_specified_values | |
groups = [] | |
("a".."g").to_a.in_groups_of(3, "foo") do |group| | |
groups << group | |
end | |
assert_equal [%w(a b c), %w(d e f), %w(g foo foo)], groups | |
end | |
def test_in_groups_of_without_padding | |
groups = [] | |
("a".."g").to_a.in_groups_of(3, false) do |group| | |
groups << group | |
end | |
assert_equal [%w(a b c), %w(d e f), %w(g)], groups | |
end | |
def test_in_groups_returned_array_size | |
array = (1..7).to_a | |
1.upto(array.size + 1) do |number| | |
assert_equal number, array.in_groups(number).size | |
end | |
end | |
def test_in_groups_with_empty_array | |
assert_equal [[], [], []], [].in_groups(3) | |
end | |
def test_in_groups_with_block | |
array = (1..9).to_a | |
groups = [] | |
array.in_groups(3) do |group| | |
groups << group | |
end | |
assert_equal array.in_groups(3), groups | |
end | |
def test_in_groups_with_perfect_fit | |
assert_equal [[1, 2, 3], [4, 5, 6], [7, 8, 9]], | |
(1..9).to_a.in_groups(3) | |
end | |
def test_in_groups_with_padding | |
array = (1..7).to_a | |
assert_equal [[1, 2, 3], [4, 5, nil], [6, 7, nil]], | |
array.in_groups(3) | |
assert_equal [[1, 2, 3], [4, 5, "foo"], [6, 7, "foo"]], | |
array.in_groups(3, "foo") | |
end | |
def test_in_groups_without_padding | |
assert_equal [[1, 2, 3], [4, 5], [6, 7]], | |
(1..7).to_a.in_groups(3, false) | |
end | |
def test_in_groups_invalid_argument | |
assert_raises(ArgumentError) { [].in_groups_of(0) } | |
assert_raises(ArgumentError) { [].in_groups_of(-1) } | |
assert_raises(ArgumentError) { [].in_groups_of(nil) } | |
end | |
end | |
class SplitTest < ActiveSupport::TestCase | |
def test_split_with_empty_array | |
assert_equal [[]], [].split(0) | |
end | |
def test_split_with_argument | |
a = [1, 2, 3, 4, 5] | |
assert_equal [[1, 2], [4, 5]], a.split(3) | |
assert_equal [[1, 2, 3, 4, 5]], a.split(0) | |
assert_equal [1, 2, 3, 4, 5], a | |
end | |
def test_split_with_block | |
a = (1..10).to_a | |
assert_equal [[1, 2], [4, 5], [7, 8], [10]], a.split { |i| i % 3 == 0 } | |
assert_equal [1, 2, 3, 4, 5, 6, 7, 8, 9, 10], a | |
end | |
def test_split_with_edge_values | |
a = [1, 2, 3, 4, 5] | |
assert_equal [[], [2, 3, 4, 5]], a.split(1) | |
assert_equal [[1, 2, 3, 4], []], a.split(5) | |
assert_equal [[], [2, 3, 4], []], a.split { |i| i == 1 || i == 5 } | |
assert_equal [1, 2, 3, 4, 5], a | |
end | |
def test_split_with_repeated_values | |
a = [1, 2, 3, 5, 5, 3, 4, 6, 2, 1, 3] | |
assert_equal [[1, 2], [5, 5], [4, 6, 2, 1], []], a.split(3) | |
assert_equal [[1, 2, 3], [], [3, 4, 6, 2, 1, 3]], a.split(5) | |
assert_equal [[1, 2], [], [], [], [4, 6, 2, 1], []], a.split { |i| i == 3 || i == 5 } | |
assert_equal [1, 2, 3, 5, 5, 3, 4, 6, 2, 1, 3], a | |
end | |
end |
# frozen_string_literal: true | |
require "zlib" | |
require "stringio" | |
module ActiveSupport | |
# A convenient wrapper for the zlib standard library that allows | |
# compression/decompression of strings with gzip. | |
# | |
# gzip = ActiveSupport::Gzip.compress('compress me!') | |
# # => "\x1F\x8B\b\x00o\x8D\xCDO\x00\x03K\xCE\xCF-(J-.V\xC8MU\x04\x00R>n\x83\f\x00\x00\x00" | |
# | |
# ActiveSupport::Gzip.decompress(gzip) | |
# # => "compress me!" | |
module Gzip | |
class Stream < StringIO | |
def initialize(*) | |
super | |
set_encoding "BINARY" | |
end | |
def close; rewind; end | |
end | |
# Decompresses a gzipped string. | |
def self.decompress(source) | |
Zlib::GzipReader.wrap(StringIO.new(source), &:read) | |
end | |
# Compresses a string using gzip. | |
def self.compress(source, level = Zlib::DEFAULT_COMPRESSION, strategy = Zlib::DEFAULT_STRATEGY) | |
output = Stream.new | |
gz = Zlib::GzipWriter.new(output, level, strategy) | |
gz.write(source) | |
gz.close | |
output.string | |
end | |
end | |
end |
# frozen_string_literal: true | |
require_relative "abstract_unit" | |
require "active_support/core_ext/object/blank" | |
class GzipTest < ActiveSupport::TestCase | |
def test_compress_should_decompress_to_the_same_value | |
assert_equal "Hello World", ActiveSupport::Gzip.decompress(ActiveSupport::Gzip.compress("Hello World")) | |
assert_equal "Hello World", ActiveSupport::Gzip.decompress(ActiveSupport::Gzip.compress("Hello World", Zlib::NO_COMPRESSION)) | |
assert_equal "Hello World", ActiveSupport::Gzip.decompress(ActiveSupport::Gzip.compress("Hello World", Zlib::BEST_SPEED)) | |
assert_equal "Hello World", ActiveSupport::Gzip.decompress(ActiveSupport::Gzip.compress("Hello World", Zlib::BEST_COMPRESSION)) | |
assert_equal "Hello World", ActiveSupport::Gzip.decompress(ActiveSupport::Gzip.compress("Hello World", nil, Zlib::FILTERED)) | |
assert_equal "Hello World", ActiveSupport::Gzip.decompress(ActiveSupport::Gzip.compress("Hello World", nil, Zlib::HUFFMAN_ONLY)) | |
assert_equal "Hello World", ActiveSupport::Gzip.decompress(ActiveSupport::Gzip.compress("Hello World", nil, nil)) | |
end | |
def test_compress_should_return_a_binary_string | |
compressed = ActiveSupport::Gzip.compress("") | |
assert_equal Encoding.find("binary"), compressed.encoding | |
assert_not compressed.blank?, "a compressed blank string should not be blank" | |
end | |
def test_compress_should_return_gzipped_string_by_compression_level | |
source_string = "Hello World" * 100 | |
gzipped_by_speed = ActiveSupport::Gzip.compress(source_string, Zlib::BEST_SPEED) | |
assert_equal 1, Zlib::GzipReader.new(StringIO.new(gzipped_by_speed)).level | |
gzipped_by_best_compression = ActiveSupport::Gzip.compress(source_string, Zlib::BEST_COMPRESSION) | |
assert_equal 9, Zlib::GzipReader.new(StringIO.new(gzipped_by_best_compression)).level | |
assert_equal true, (gzipped_by_best_compression.bytesize < gzipped_by_speed.bytesize) | |
end | |
def test_decompress_checks_crc | |
compressed = ActiveSupport::Gzip.compress("Hello World") | |
first_crc_byte_index = compressed.bytesize - 8 | |
compressed.setbyte(first_crc_byte_index, compressed.getbyte(first_crc_byte_index) ^ 0xff) | |
assert_raises(Zlib::GzipFile::CRCError) do | |
ActiveSupport::Gzip.decompress(compressed) | |
end | |
end | |
end |
# frozen_string_literal: true | |
require "active_support/core_ext/hash/conversions" | |
require "active_support/core_ext/hash/deep_merge" | |
require "active_support/core_ext/hash/deep_transform_values" | |
require "active_support/core_ext/hash/except" | |
require "active_support/core_ext/hash/indifferent_access" | |
require "active_support/core_ext/hash/keys" | |
require "active_support/core_ext/hash/reverse_merge" | |
require "active_support/core_ext/hash/slice" |
# frozen_string_literal: true | |
require_relative "../abstract_unit" | |
require "bigdecimal" | |
require "yaml" | |
require "active_support/core_ext/hash" | |
require "active_support/core_ext/string/access" | |
require "active_support/core_ext/object/conversions" | |
require "active_support/core_ext/date/conversions" | |
require "active_support/core_ext/object/deep_dup" | |
require "active_support/inflections" | |
class HashExtTest < ActiveSupport::TestCase | |
def setup | |
@strings = { "a" => 1, "b" => 2 } | |
@nested_strings = { "a" => { "b" => { "c" => 3 } } } | |
@symbols = { a: 1, b: 2 } | |
@nested_symbols = { a: { b: { c: 3 } } } | |
@mixed = { :a => 1, "b" => 2 } | |
@nested_mixed = { "a" => { b: { "c" => 3 } } } | |
@integers = { 0 => 1, 1 => 2 } | |
@nested_integers = { 0 => { 1 => { 2 => 3 } } } | |
@illegal_symbols = { [] => 3 } | |
@nested_illegal_symbols = { [] => { [] => 3 } } | |
@upcase_strings = { "A" => 1, "B" => 2 } | |
@nested_upcase_strings = { "A" => { "B" => { "C" => 3 } } } | |
@string_array_of_hashes = { "a" => [ { "b" => 2 }, { "c" => 3 }, 4 ] } | |
@symbol_array_of_hashes = { a: [ { b: 2 }, { c: 3 }, 4 ] } | |
@mixed_array_of_hashes = { a: [ { b: 2 }, { "c" => 3 }, 4 ] } | |
@upcase_array_of_hashes = { "A" => [ { "B" => 2 }, { "C" => 3 }, 4 ] } | |
end | |
def test_methods | |
h = {} | |
assert_respond_to h, :deep_transform_keys | |
assert_respond_to h, :deep_transform_keys! | |
assert_respond_to h, :deep_transform_values | |
assert_respond_to h, :deep_transform_values! | |
assert_respond_to h, :symbolize_keys | |
assert_respond_to h, :symbolize_keys! | |
assert_respond_to h, :deep_symbolize_keys | |
assert_respond_to h, :deep_symbolize_keys! | |
assert_respond_to h, :stringify_keys | |
assert_respond_to h, :stringify_keys! | |
assert_respond_to h, :deep_stringify_keys | |
assert_respond_to h, :deep_stringify_keys! | |
assert_respond_to h, :to_options | |
assert_respond_to h, :to_options! | |
assert_respond_to h, :except | |
assert_respond_to h, :except! | |
end | |
def test_deep_transform_keys | |
assert_equal @nested_upcase_strings, @nested_symbols.deep_transform_keys { |key| key.to_s.upcase } | |
assert_equal @nested_upcase_strings, @nested_strings.deep_transform_keys { |key| key.to_s.upcase } | |
assert_equal @nested_upcase_strings, @nested_mixed.deep_transform_keys { |key| key.to_s.upcase } | |
assert_equal @upcase_array_of_hashes, @string_array_of_hashes.deep_transform_keys { |key| key.to_s.upcase } | |
assert_equal @upcase_array_of_hashes, @symbol_array_of_hashes.deep_transform_keys { |key| key.to_s.upcase } | |
assert_equal @upcase_array_of_hashes, @mixed_array_of_hashes.deep_transform_keys { |key| key.to_s.upcase } | |
end | |
def test_deep_transform_keys_not_mutates | |
transformed_hash = @nested_mixed.deep_dup | |
transformed_hash.deep_transform_keys { |key| key.to_s.upcase } | |
assert_equal @nested_mixed, transformed_hash | |
end | |
def test_deep_transform_keys! | |
assert_equal @nested_upcase_strings, @nested_symbols.deep_dup.deep_transform_keys! { |key| key.to_s.upcase } | |
assert_equal @nested_upcase_strings, @nested_strings.deep_dup.deep_transform_keys! { |key| key.to_s.upcase } | |
assert_equal @nested_upcase_strings, @nested_mixed.deep_dup.deep_transform_keys! { |key| key.to_s.upcase } | |
assert_equal @upcase_array_of_hashes, @string_array_of_hashes.deep_dup.deep_transform_keys! { |key| key.to_s.upcase } | |
assert_equal @upcase_array_of_hashes, @symbol_array_of_hashes.deep_dup.deep_transform_keys! { |key| key.to_s.upcase } | |
assert_equal @upcase_array_of_hashes, @mixed_array_of_hashes.deep_dup.deep_transform_keys! { |key| key.to_s.upcase } | |
end | |
def test_deep_transform_keys_with_bang_mutates | |
transformed_hash = @nested_mixed.deep_dup | |
transformed_hash.deep_transform_keys! { |key| key.to_s.upcase } | |
assert_equal @nested_upcase_strings, transformed_hash | |
assert_equal({ "a" => { b: { "c" => 3 } } }, @nested_mixed) | |
end | |
def test_deep_transform_values | |
assert_equal({ "a" => "1", "b" => "2" }, @strings.deep_transform_values { |value| value.to_s }) | |
assert_equal({ "a" => { "b" => { "c" => "3" } } }, @nested_strings.deep_transform_values { |value| value.to_s }) | |
assert_equal({ "a" => [ { "b" => "2" }, { "c" => "3" }, "4" ] }, @string_array_of_hashes.deep_transform_values { |value| value.to_s }) | |
end | |
def test_deep_transform_values_not_mutates | |
transformed_hash = @nested_mixed.deep_dup | |
transformed_hash.deep_transform_values { |value| value.to_s } | |
assert_equal @nested_mixed, transformed_hash | |
end | |
def test_deep_transform_values! | |
assert_equal({ "a" => "1", "b" => "2" }, @strings.deep_transform_values! { |value| value.to_s }) | |
assert_equal({ "a" => { "b" => { "c" => "3" } } }, @nested_strings.deep_transform_values! { |value| value.to_s }) | |
assert_equal({ "a" => [ { "b" => "2" }, { "c" => "3" }, "4" ] }, @string_array_of_hashes.deep_transform_values! { |value| value.to_s }) | |
end | |
def test_deep_transform_values_with_bang_mutates | |
transformed_hash = @nested_mixed.deep_dup | |
transformed_hash.deep_transform_values! { |value| value.to_s } | |
assert_equal({ "a" => { b: { "c" => "3" } } }, transformed_hash) | |
assert_equal({ "a" => { b: { "c" => 3 } } }, @nested_mixed) | |
end | |
def test_symbolize_keys | |
assert_equal @symbols, @symbols.symbolize_keys | |
assert_equal @symbols, @strings.symbolize_keys | |
assert_equal @symbols, @mixed.symbolize_keys | |
end | |
def test_symbolize_keys_not_mutates | |
transformed_hash = @mixed.dup | |
transformed_hash.symbolize_keys | |
assert_equal @mixed, transformed_hash | |
end | |
def test_deep_symbolize_keys | |
assert_equal @nested_symbols, @nested_symbols.deep_symbolize_keys | |
assert_equal @nested_symbols, @nested_strings.deep_symbolize_keys | |
assert_equal @nested_symbols, @nested_mixed.deep_symbolize_keys | |
assert_equal @symbol_array_of_hashes, @string_array_of_hashes.deep_symbolize_keys | |
assert_equal @symbol_array_of_hashes, @symbol_array_of_hashes.deep_symbolize_keys | |
assert_equal @symbol_array_of_hashes, @mixed_array_of_hashes.deep_symbolize_keys | |
end | |
def test_deep_symbolize_keys_not_mutates | |
transformed_hash = @nested_mixed.deep_dup | |
transformed_hash.deep_symbolize_keys | |
assert_equal @nested_mixed, transformed_hash | |
end | |
def test_symbolize_keys! | |
assert_equal @symbols, @symbols.dup.symbolize_keys! | |
assert_equal @symbols, @strings.dup.symbolize_keys! | |
assert_equal @symbols, @mixed.dup.symbolize_keys! | |
end | |
def test_symbolize_keys_with_bang_mutates | |
transformed_hash = @mixed.dup | |
transformed_hash.deep_symbolize_keys! | |
assert_equal @symbols, transformed_hash | |
assert_equal({ :a => 1, "b" => 2 }, @mixed) | |
end | |
def test_deep_symbolize_keys! | |
assert_equal @nested_symbols, @nested_symbols.deep_dup.deep_symbolize_keys! | |
assert_equal @nested_symbols, @nested_strings.deep_dup.deep_symbolize_keys! | |
assert_equal @nested_symbols, @nested_mixed.deep_dup.deep_symbolize_keys! | |
assert_equal @symbol_array_of_hashes, @string_array_of_hashes.deep_dup.deep_symbolize_keys! | |
assert_equal @symbol_array_of_hashes, @symbol_array_of_hashes.deep_dup.deep_symbolize_keys! | |
assert_equal @symbol_array_of_hashes, @mixed_array_of_hashes.deep_dup.deep_symbolize_keys! | |
end | |
def test_deep_symbolize_keys_with_bang_mutates | |
transformed_hash = @nested_mixed.deep_dup | |
transformed_hash.deep_symbolize_keys! | |
assert_equal @nested_symbols, transformed_hash | |
assert_equal({ "a" => { b: { "c" => 3 } } }, @nested_mixed) | |
end | |
def test_symbolize_keys_preserves_keys_that_cant_be_symbolized | |
assert_equal @illegal_symbols, @illegal_symbols.symbolize_keys | |
assert_equal @illegal_symbols, @illegal_symbols.dup.symbolize_keys! | |
end | |
def test_deep_symbolize_keys_preserves_keys_that_cant_be_symbolized | |
assert_equal @nested_illegal_symbols, @nested_illegal_symbols.deep_symbolize_keys | |
assert_equal @nested_illegal_symbols, @nested_illegal_symbols.deep_dup.deep_symbolize_keys! | |
end | |
def test_symbolize_keys_preserves_integer_keys | |
assert_equal @integers, @integers.symbolize_keys | |
assert_equal @integers, @integers.dup.symbolize_keys! | |
end | |
def test_deep_symbolize_keys_preserves_integer_keys | |
assert_equal @nested_integers, @nested_integers.deep_symbolize_keys | |
assert_equal @nested_integers, @nested_integers.deep_dup.deep_symbolize_keys! | |
end | |
def test_stringify_keys | |
assert_equal @strings, @symbols.stringify_keys | |
assert_equal @strings, @strings.stringify_keys | |
assert_equal @strings, @mixed.stringify_keys | |
end | |
def test_stringify_keys_not_mutates | |
transformed_hash = @mixed.dup | |
transformed_hash.stringify_keys | |
assert_equal @mixed, transformed_hash | |
end | |
def test_deep_stringify_keys | |
assert_equal @nested_strings, @nested_symbols.deep_stringify_keys | |
assert_equal @nested_strings, @nested_strings.deep_stringify_keys | |
assert_equal @nested_strings, @nested_mixed.deep_stringify_keys | |
assert_equal @string_array_of_hashes, @string_array_of_hashes.deep_stringify_keys | |
assert_equal @string_array_of_hashes, @symbol_array_of_hashes.deep_stringify_keys | |
assert_equal @string_array_of_hashes, @mixed_array_of_hashes.deep_stringify_keys | |
end | |
def test_deep_stringify_keys_not_mutates | |
transformed_hash = @nested_mixed.deep_dup | |
transformed_hash.deep_stringify_keys | |
assert_equal @nested_mixed, transformed_hash | |
end | |
def test_stringify_keys! | |
assert_equal @strings, @symbols.dup.stringify_keys! | |
assert_equal @strings, @strings.dup.stringify_keys! | |
assert_equal @strings, @mixed.dup.stringify_keys! | |
end | |
def test_stringify_keys_with_bang_mutates | |
transformed_hash = @mixed.dup | |
transformed_hash.stringify_keys! | |
assert_equal @strings, transformed_hash | |
assert_equal({ :a => 1, "b" => 2 }, @mixed) | |
end | |
def test_deep_stringify_keys! | |
assert_equal @nested_strings, @nested_symbols.deep_dup.deep_stringify_keys! | |
assert_equal @nested_strings, @nested_strings.deep_dup.deep_stringify_keys! | |
assert_equal @nested_strings, @nested_mixed.deep_dup.deep_stringify_keys! | |
assert_equal @string_array_of_hashes, @string_array_of_hashes.deep_dup.deep_stringify_keys! | |
assert_equal @string_array_of_hashes, @symbol_array_of_hashes.deep_dup.deep_stringify_keys! | |
assert_equal @string_array_of_hashes, @mixed_array_of_hashes.deep_dup.deep_stringify_keys! | |
end | |
def test_deep_stringify_keys_with_bang_mutates | |
transformed_hash = @nested_mixed.deep_dup | |
transformed_hash.deep_stringify_keys! | |
assert_equal @nested_strings, transformed_hash | |
assert_equal({ "a" => { b: { "c" => 3 } } }, @nested_mixed) | |
end | |
def test_assert_valid_keys | |
assert_nothing_raised do | |
{ failure: "stuff", funny: "business" }.assert_valid_keys([ :failure, :funny ]) | |
{ failure: "stuff", funny: "business" }.assert_valid_keys(:failure, :funny) | |
end | |
# not all valid keys are required to be present | |
assert_nothing_raised do | |
{ failure: "stuff", funny: "business" }.assert_valid_keys([ :failure, :funny, :sunny ]) | |
{ failure: "stuff", funny: "business" }.assert_valid_keys(:failure, :funny, :sunny) | |
end | |
exception = assert_raise ArgumentError do | |
{ failore: "stuff", funny: "business" }.assert_valid_keys([ :failure, :funny ]) | |
end | |
assert_equal "Unknown key: :failore. Valid keys are: :failure, :funny", exception.message | |
exception = assert_raise ArgumentError do | |
{ failore: "stuff", funny: "business" }.assert_valid_keys(:failure, :funny) | |
end | |
assert_equal "Unknown key: :failore. Valid keys are: :failure, :funny", exception.message | |
exception = assert_raise ArgumentError do | |
{ failore: "stuff", funny: "business" }.assert_valid_keys([ :failure ]) | |
end | |
assert_equal "Unknown key: :failore. Valid keys are: :failure", exception.message | |
exception = assert_raise ArgumentError do | |
{ failore: "stuff", funny: "business" }.assert_valid_keys(:failure) | |
end | |
assert_equal "Unknown key: :failore. Valid keys are: :failure", exception.message | |
end | |
def test_deep_merge | |
hash_1 = { a: "a", b: "b", c: { c1: "c1", c2: "c2", c3: { d1: "d1" } } } | |
hash_2 = { a: 1, c: { c1: 2, c3: { d2: "d2" } } } | |
expected = { a: 1, b: "b", c: { c1: 2, c2: "c2", c3: { d1: "d1", d2: "d2" } } } | |
assert_equal expected, hash_1.deep_merge(hash_2) | |
hash_1.deep_merge!(hash_2) | |
assert_equal expected, hash_1 | |
end | |
def test_deep_merge_with_block | |
hash_1 = { a: "a", b: "b", c: { c1: "c1", c2: "c2", c3: { d1: "d1" } } } | |
hash_2 = { a: 1, c: { c1: 2, c3: { d2: "d2" } } } | |
expected = { a: [:a, "a", 1], b: "b", c: { c1: [:c1, "c1", 2], c2: "c2", c3: { d1: "d1", d2: "d2" } } } | |
assert_equal(expected, hash_1.deep_merge(hash_2) { |k, o, n| [k, o, n] }) | |
hash_1.deep_merge!(hash_2) { |k, o, n| [k, o, n] } | |
assert_equal expected, hash_1 | |
end | |
def test_deep_merge_with_falsey_values | |
hash_1 = { e: false } | |
hash_2 = { e: "e" } | |
expected = { e: [:e, false, "e"] } | |
assert_equal(expected, hash_1.deep_merge(hash_2) { |k, o, n| [k, o, n] }) | |
hash_1.deep_merge!(hash_2) { |k, o, n| [k, o, n] } | |
assert_equal expected, hash_1 | |
end | |
def test_reverse_merge | |
defaults = { d: 0, a: "x", b: "y", c: 10 }.freeze | |
options = { a: 1, b: 2 } | |
expected = { d: 0, a: 1, b: 2, c: 10 } | |
# Should merge defaults into options, creating a new hash. | |
assert_equal expected, options.reverse_merge(defaults) | |
assert_not_equal expected, options | |
# Should merge! defaults into options, replacing options. | |
merged = options.dup | |
assert_equal expected, merged.reverse_merge!(defaults) | |
assert_equal expected, merged | |
# Make the order consistent with the non-overwriting reverse merge. | |
assert_equal expected.keys, merged.keys | |
# Should be an alias for reverse_merge! | |
merged = options.dup | |
assert_equal expected, merged.reverse_update(defaults) | |
assert_equal expected, merged | |
end | |
def test_with_defaults_aliases_reverse_merge | |
defaults = { a: "x", b: "y", c: 10 }.freeze | |
options = { a: 1, b: 2 } | |
expected = { a: 1, b: 2, c: 10 } | |
# Should be an alias for reverse_merge | |
assert_equal expected, options.with_defaults(defaults) | |
assert_not_equal expected, options | |
# Should be an alias for reverse_merge! | |
merged = options.dup | |
assert_equal expected, merged.with_defaults!(defaults) | |
assert_equal expected, merged | |
end | |
def test_slice_inplace | |
original = { a: "x", b: "y", c: 10 } | |
expected_return = { c: 10 } | |
expected_original = { a: "x", b: "y" } | |
# Should return a hash containing the removed key/value pairs. | |
assert_equal expected_return, original.slice!(:a, :b) | |
# Should replace the hash with only the given keys. | |
assert_equal expected_original, original | |
end | |
def test_slice_inplace_with_an_array_key | |
original = { :a => "x", :b => "y", :c => 10, [:a, :b] => "an array key" } | |
expected = { a: "x", b: "y" } | |
# Should replace the hash with only the given keys when given an array key. | |
assert_equal expected, original.slice!([:a, :b], :c) | |
end | |
def test_slice_bang_does_not_override_default | |
hash = Hash.new(0) | |
hash.update(a: 1, b: 2) | |
hash.slice!(:a) | |
assert_equal 0, hash[:c] | |
end | |
def test_slice_bang_does_not_override_default_proc | |
hash = Hash.new { |h, k| h[k] = [] } | |
hash.update(a: 1, b: 2) | |
hash.slice!(:a) | |
assert_equal [], hash[:c] | |
end | |
def test_extract | |
original = { a: 1, b: 2, c: 3, d: 4 } | |
expected = { a: 1, b: 2 } | |
remaining = { c: 3, d: 4 } | |
assert_equal expected, original.extract!(:a, :b, :x) | |
assert_equal remaining, original | |
end | |
def test_extract_nils | |
original = { a: nil, b: nil } | |
expected = { a: nil } | |
remaining = { b: nil } | |
extracted = original.extract!(:a, :x) | |
assert_equal expected, extracted | |
assert_nil extracted[:a] | |
assert_nil extracted[:x] | |
assert_equal remaining, original | |
end | |
def test_except | |
original = { a: "x", b: "y", c: 10 } | |
expected = { a: "x", b: "y" } | |
# Should return a new hash without the given keys. | |
assert_equal expected, original.except(:c) | |
assert_not_equal expected, original | |
# Should replace the hash without the given keys. | |
assert_equal expected, original.except!(:c) | |
assert_equal expected, original | |
end | |
def test_except_with_more_than_one_argument | |
original = { a: "x", b: "y", c: 10 } | |
expected = { a: "x" } | |
assert_equal expected, original.except(:b, :c) | |
assert_equal expected, original.except!(:b, :c) | |
assert_equal expected, original | |
end | |
def test_except_with_original_frozen | |
original = { a: "x", b: "y" } | |
original.freeze | |
assert_nothing_raised { original.except(:a) } | |
assert_raise(FrozenError) { original.except!(:a) } | |
end | |
def test_except_does_not_delete_values_in_original | |
original = { a: "x", b: "y" } | |
assert_not_called(original, :delete) do | |
original.except(:a) | |
end | |
end | |
end | |
class IWriteMyOwnXML | |
def to_xml(options = {}) | |
options[:indent] ||= 2 | |
xml = options[:builder] ||= Builder::XmlMarkup.new(indent: options[:indent]) | |
xml.instruct! unless options[:skip_instruct] | |
xml.level_one do | |
xml.tag!(:second_level, "content") | |
end | |
end | |
end | |
class HashExtToParamTests < ActiveSupport::TestCase | |
class ToParam < String | |
def to_param | |
"#{self}-1" | |
end | |
end | |
def test_string_hash | |
assert_equal "", {}.to_param | |
assert_equal "hello=world", { hello: "world" }.to_param | |
assert_equal "hello=10", { "hello" => 10 }.to_param | |
assert_equal "hello=world&say_bye=true", { :hello => "world", "say_bye" => true }.to_param | |
end | |
def test_number_hash | |
assert_equal "10=20&30=40&50=60", { 10 => 20, 30 => 40, 50 => 60 }.to_param | |
end | |
def test_to_param_hash | |
assert_equal "custom-1=param-1&custom2-1=param2-1", { ToParam.new("custom") => ToParam.new("param"), ToParam.new("custom2") => ToParam.new("param2") }.to_param | |
end | |
def test_to_param_hash_escapes_its_keys_and_values | |
assert_equal "param+1=A+string+with+%2F+characters+%26+that+should+be+%3F+escaped", { "param 1" => "A string with / characters & that should be ? escaped" }.to_param | |
end | |
def test_to_param_orders_by_key_in_ascending_order | |
assert_equal "a=2&b=1&c=0", Hash[*%w(b 1 c 0 a 2)].to_param | |
end | |
end | |
class HashToXmlTest < ActiveSupport::TestCase | |
def setup | |
@xml_options = { root: :person, skip_instruct: true, indent: 0 } | |
end | |
def test_one_level | |
xml = { name: "David", street: "Paulina" }.to_xml(@xml_options) | |
assert_equal "<person>", xml.first(8) | |
assert_includes xml, %(<street>Paulina</street>) | |
assert_includes xml, %(<name>David</name>) | |
end | |
def test_one_level_dasherize_false | |
xml = { name: "David", street_name: "Paulina" }.to_xml(@xml_options.merge(dasherize: false)) | |
assert_equal "<person>", xml.first(8) | |
assert_includes xml, %(<street_name>Paulina</street_name>) | |
assert_includes xml, %(<name>David</name>) | |
end | |
def test_one_level_dasherize_true | |
xml = { name: "David", street_name: "Paulina" }.to_xml(@xml_options.merge(dasherize: true)) | |
assert_equal "<person>", xml.first(8) | |
assert_includes xml, %(<street-name>Paulina</street-name>) | |
assert_includes xml, %(<name>David</name>) | |
end | |
def test_one_level_camelize_true | |
xml = { name: "David", street_name: "Paulina" }.to_xml(@xml_options.merge(camelize: true)) | |
assert_equal "<Person>", xml.first(8) | |
assert_includes xml, %(<StreetName>Paulina</StreetName>) | |
assert_includes xml, %(<Name>David</Name>) | |
end | |
def test_one_level_camelize_lower | |
xml = { name: "David", street_name: "Paulina" }.to_xml(@xml_options.merge(camelize: :lower)) | |
assert_equal "<person>", xml.first(8) | |
assert_includes xml, %(<streetName>Paulina</streetName>) | |
assert_includes xml, %(<name>David</name>) | |
end | |
def test_one_level_with_types | |
xml = { name: "David", street: "Paulina", age: 26, age_in_millis: 820497600000, moved_on: Date.new(2005, 11, 15), resident: :yes }.to_xml(@xml_options) | |
assert_equal "<person>", xml.first(8) | |
assert_includes xml, %(<street>Paulina</street>) | |
assert_includes xml, %(<name>David</name>) | |
assert_includes xml, %(<age type="integer">26</age>) | |
assert_includes xml, %(<age-in-millis type="integer">820497600000</age-in-millis>) | |
assert_includes xml, %(<moved-on type="date">2005-11-15</moved-on>) | |
assert_includes xml, %(<resident type="symbol">yes</resident>) | |
end | |
def test_one_level_with_nils | |
xml = { name: "David", street: "Paulina", age: nil }.to_xml(@xml_options) | |
assert_equal "<person>", xml.first(8) | |
assert_includes xml, %(<street>Paulina</street>) | |
assert_includes xml, %(<name>David</name>) | |
assert_includes xml, %(<age nil="true"/>) | |
end | |
def test_one_level_with_skipping_types | |
xml = { name: "David", street: "Paulina", age: nil }.to_xml(@xml_options.merge(skip_types: true)) | |
assert_equal "<person>", xml.first(8) | |
assert_includes xml, %(<street>Paulina</street>) | |
assert_includes xml, %(<name>David</name>) | |
assert_includes xml, %(<age nil="true"/>) | |
end | |
def test_one_level_with_yielding | |
xml = { name: "David", street: "Paulina" }.to_xml(@xml_options) do |x| | |
x.creator("Rails") | |
end | |
assert_equal "<person>", xml.first(8) | |
assert_includes xml, %(<street>Paulina</street>) | |
assert_includes xml, %(<name>David</name>) | |
assert_includes xml, %(<creator>Rails</creator>) | |
end | |
def test_two_levels | |
xml = { name: "David", address: { street: "Paulina" } }.to_xml(@xml_options) | |
assert_equal "<person>", xml.first(8) | |
assert_includes xml, %(<address><street>Paulina</street></address>) | |
assert_includes xml, %(<name>David</name>) | |
end | |
def test_two_levels_with_second_level_overriding_to_xml | |
xml = { name: "David", address: { street: "Paulina" }, child: IWriteMyOwnXML.new }.to_xml(@xml_options) | |
assert_equal "<person>", xml.first(8) | |
assert_includes xml, %(<address><street>Paulina</street></address>) | |
assert_includes xml, %(<level_one><second_level>content</second_level></level_one>) | |
end | |
def test_two_levels_with_array | |
xml = { name: "David", addresses: [{ street: "Paulina" }, { street: "Evergreen" }] }.to_xml(@xml_options) | |
assert_equal "<person>", xml.first(8) | |
assert_includes xml, %(<addresses type="array"><address>) | |
assert_includes xml, %(<address><street>Paulina</street></address>) | |
assert_includes xml, %(<address><street>Evergreen</street></address>) | |
assert_includes xml, %(<name>David</name>) | |
end | |
def test_three_levels_with_array | |
xml = { name: "David", addresses: [{ streets: [ { name: "Paulina" }, { name: "Paulina" } ] } ] }.to_xml(@xml_options) | |
assert_includes xml, %(<addresses type="array"><address><streets type="array"><street><name>) | |
end | |
def test_timezoned_attributes | |
# TODO: Remove assertion in Rails 7.1 and add ActiveSupport::TimeWithZone to XML type mapping | |
assert_deprecated("ActiveSupport::TimeWithZone.name has been deprecated") do | |
xml = { | |
created_at: Time.utc(1999, 2, 2), | |
local_created_at: Time.utc(1999, 2, 2).in_time_zone("Eastern Time (US & Canada)") | |
}.to_xml(@xml_options) | |
assert_match %r{<created-at type="dateTime">1999-02-02T00:00:00Z</created-at>}, xml | |
assert_match %r{<local-created-at type="dateTime">1999-02-01T19:00:00-05:00</local-created-at>}, xml | |
end | |
end | |
def test_multiple_records_from_xml_with_attributes_other_than_type_ignores_them_without_exploding | |
topics_xml = <<-EOT | |
<topics type="array" page="1" page-count="1000" per-page="2"> | |
<topic> | |
<title>The First Topic</title> | |
<author-name>David</author-name> | |
<id type="integer">1</id> | |
<approved type="boolean">false</approved> | |
<replies-count type="integer">0</replies-count> | |
<replies-close-in type="integer">2592000000</replies-close-in> | |
<written-on type="date">2003-07-16</written-on> | |
<viewed-at type="datetime">2003-07-16T09:28:00+0000</viewed-at> | |
<content>Have a nice day</content> | |
<author-email-address>david@loudthinking.com</author-email-address> | |
<parent-id nil="true"></parent-id> | |
</topic> | |
<topic> | |
<title>The Second Topic</title> | |
<author-name>Jason</author-name> | |
<id type="integer">1</id> | |
<approved type="boolean">false</approved> | |
<replies-count type="integer">0</replies-count> | |
<replies-close-in type="integer">2592000000</replies-close-in> | |
<written-on type="date">2003-07-16</written-on> | |
<viewed-at type="datetime">2003-07-16T09:28:00+0000</viewed-at> | |
<content>Have a nice day</content> | |
<author-email-address>david@loudthinking.com</author-email-address> | |
<parent-id></parent-id> | |
</topic> | |
</topics> | |
EOT | |
expected_topic_hash = { | |
title: "The First Topic", | |
author_name: "David", | |
id: 1, | |
approved: false, | |
replies_count: 0, | |
replies_close_in: 2592000000, | |
written_on: Date.new(2003, 7, 16), | |
viewed_at: Time.utc(2003, 7, 16, 9, 28), | |
content: "Have a nice day", | |
author_email_address: "david@loudthinking.com", | |
parent_id: nil | |
}.stringify_keys | |
assert_equal expected_topic_hash, Hash.from_xml(topics_xml)["topics"].first | |
end | |
def test_single_record_from_xml | |
topic_xml = <<-EOT | |
<topic> | |
<title>The First Topic</title> | |
<author-name>David</author-name> | |
<id type="integer">1</id> | |
<approved type="boolean"> true </approved> | |
<replies-count type="integer">0</replies-count> | |
<replies-close-in type="integer">2592000000</replies-close-in> | |
<written-on type="date">2003-07-16</written-on> | |
<viewed-at type="datetime">2003-07-16T09:28:00+0000</viewed-at> | |
<author-email-address>david@loudthinking.com</author-email-address> | |
<parent-id></parent-id> | |
<ad-revenue type="decimal">1.5</ad-revenue> | |
<optimum-viewing-angle type="float">135</optimum-viewing-angle> | |
</topic> | |
EOT | |
expected_topic_hash = { | |
title: "The First Topic", | |
author_name: "David", | |
id: 1, | |
approved: true, | |
replies_count: 0, | |
replies_close_in: 2592000000, | |
written_on: Date.new(2003, 7, 16), | |
viewed_at: Time.utc(2003, 7, 16, 9, 28), | |
author_email_address: "david@loudthinking.com", | |
parent_id: nil, | |
ad_revenue: BigDecimal("1.50"), | |
optimum_viewing_angle: 135.0, | |
}.stringify_keys | |
assert_equal expected_topic_hash, Hash.from_xml(topic_xml)["topic"] | |
end | |
def test_single_record_from_xml_with_nil_values | |
topic_xml = <<-EOT | |
<topic> | |
<title></title> | |
<id type="integer"></id> | |
<approved type="boolean"></approved> | |
<written-on type="date"></written-on> | |
<viewed-at type="datetime"></viewed-at> | |
<parent-id></parent-id> | |
</topic> | |
EOT | |
expected_topic_hash = { | |
title: nil, | |
id: nil, | |
approved: nil, | |
written_on: nil, | |
viewed_at: nil, | |
parent_id: nil | |
}.stringify_keys | |
assert_equal expected_topic_hash, Hash.from_xml(topic_xml)["topic"] | |
end | |
def test_multiple_records_from_xml | |
topics_xml = <<-EOT | |
<topics type="array"> | |
<topic> | |
<title>The First Topic</title> | |
<author-name>David</author-name> | |
<id type="integer">1</id> | |
<approved type="boolean">false</approved> | |
<replies-count type="integer">0</replies-count> | |
<replies-close-in type="integer">2592000000</replies-close-in> | |
<written-on type="date">2003-07-16</written-on> | |
<viewed-at type="datetime">2003-07-16T09:28:00+0000</viewed-at> | |
<content>Have a nice day</content> | |
<author-email-address>david@loudthinking.com</author-email-address> | |
<parent-id nil="true"></parent-id> | |
</topic> | |
<topic> | |
<title>The Second Topic</title> | |
<author-name>Jason</author-name> | |
<id type="integer">1</id> | |
<approved type="boolean">false</approved> | |
<replies-count type="integer">0</replies-count> | |
<replies-close-in type="integer">2592000000</replies-close-in> | |
<written-on type="date">2003-07-16</written-on> | |
<viewed-at type="datetime">2003-07-16T09:28:00+0000</viewed-at> | |
<content>Have a nice day</content> | |
<author-email-address>david@loudthinking.com</author-email-address> | |
<parent-id></parent-id> | |
</topic> | |
</topics> | |
EOT | |
expected_topic_hash = { | |
title: "The First Topic", | |
author_name: "David", | |
id: 1, | |
approved: false, | |
replies_count: 0, | |
replies_close_in: 2592000000, | |
written_on: Date.new(2003, 7, 16), | |
viewed_at: Time.utc(2003, 7, 16, 9, 28), | |
content: "Have a nice day", | |
author_email_address: "david@loudthinking.com", | |
parent_id: nil | |
}.stringify_keys | |
assert_equal expected_topic_hash, Hash.from_xml(topics_xml)["topics"].first | |
end | |
def test_single_record_from_xml_with_attributes_other_than_type | |
topic_xml = <<-EOT | |
<rsp stat="ok"> | |
<photos page="1" pages="1" perpage="100" total="16"> | |
<photo id="175756086" owner="55569174@N00" secret="0279bf37a1" server="76" title="Colored Pencil PhotoBooth Fun" ispublic="1" isfriend="0" isfamily="0"/> | |
</photos> | |
</rsp> | |
EOT | |
expected_topic_hash = { | |
id: "175756086", | |
owner: "55569174@N00", | |
secret: "0279bf37a1", | |
server: "76", | |
title: "Colored Pencil PhotoBooth Fun", | |
ispublic: "1", | |
isfriend: "0", | |
isfamily: "0", | |
}.stringify_keys | |
assert_equal expected_topic_hash, Hash.from_xml(topic_xml)["rsp"]["photos"]["photo"] | |
end | |
def test_all_caps_key_from_xml | |
test_xml = <<-EOT | |
<ABC3XYZ> | |
<TEST>Lorem Ipsum</TEST> | |
</ABC3XYZ> | |
EOT | |
expected_hash = { | |
"ABC3XYZ" => { | |
"TEST" => "Lorem Ipsum" | |
} | |
} | |
assert_equal expected_hash, Hash.from_xml(test_xml) | |
end | |
def test_empty_array_from_xml | |
blog_xml = <<-XML | |
<blog> | |
<posts type="array"></posts> | |
</blog> | |
XML | |
expected_blog_hash = { "blog" => { "posts" => [] } } | |
assert_equal expected_blog_hash, Hash.from_xml(blog_xml) | |
end | |
def test_empty_array_with_whitespace_from_xml | |
blog_xml = <<-XML | |
<blog> | |
<posts type="array"> | |
</posts> | |
</blog> | |
XML | |
expected_blog_hash = { "blog" => { "posts" => [] } } | |
assert_equal expected_blog_hash, Hash.from_xml(blog_xml) | |
end | |
def test_array_with_one_entry_from_xml | |
blog_xml = <<-XML | |
<blog> | |
<posts type="array"> | |
<post>a post</post> | |
</posts> | |
</blog> | |
XML | |
expected_blog_hash = { "blog" => { "posts" => ["a post"] } } | |
assert_equal expected_blog_hash, Hash.from_xml(blog_xml) | |
end | |
def test_array_with_multiple_entries_from_xml | |
blog_xml = <<-XML | |
<blog> | |
<posts type="array"> | |
<post>a post</post> | |
<post>another post</post> | |
</posts> | |
</blog> | |
XML | |
expected_blog_hash = { "blog" => { "posts" => ["a post", "another post"] } } | |
assert_equal expected_blog_hash, Hash.from_xml(blog_xml) | |
end | |
def test_file_from_xml | |
blog_xml = <<-XML | |
<blog> | |
<logo type="file" name="logo.png" content_type="image/png"> | |
</logo> | |
</blog> | |
XML | |
hash = Hash.from_xml(blog_xml) | |
assert hash.has_key?("blog") | |
assert hash["blog"].has_key?("logo") | |
file = hash["blog"]["logo"] | |
assert_equal "logo.png", file.original_filename | |
assert_equal "image/png", file.content_type | |
end | |
def test_file_from_xml_with_defaults | |
blog_xml = <<-XML | |
<blog> | |
<logo type="file"> | |
</logo> | |
</blog> | |
XML | |
file = Hash.from_xml(blog_xml)["blog"]["logo"] | |
assert_equal "untitled", file.original_filename | |
assert_equal "application/octet-stream", file.content_type | |
end | |
def test_tag_with_attrs_and_whitespace | |
xml = <<-XML | |
<blog name="bacon is the best"> | |
</blog> | |
XML | |
hash = Hash.from_xml(xml) | |
assert_equal "bacon is the best", hash["blog"]["name"] | |
end | |
def test_empty_cdata_from_xml | |
xml = "<data><![CDATA[]]></data>" | |
assert_equal "", Hash.from_xml(xml)["data"] | |
end | |
def test_xsd_like_types_from_xml | |
bacon_xml = <<-EOT | |
<bacon> | |
<weight type="double">0.5</weight> | |
<price type="decimal">12.50</price> | |
<chunky type="boolean"> 1 </chunky> | |
<expires-at type="dateTime">2007-12-25T12:34:56+0000</expires-at> | |
<notes type="string"></notes> | |
<illustration type="base64Binary">YmFiZS5wbmc=</illustration> | |
<caption type="binary" encoding="base64">VGhhdCdsbCBkbywgcGlnLg==</caption> | |
</bacon> | |
EOT | |
expected_bacon_hash = { | |
weight: 0.5, | |
chunky: true, | |
price: BigDecimal("12.50"), | |
expires_at: Time.utc(2007, 12, 25, 12, 34, 56), | |
notes: "", | |
illustration: "babe.png", | |
caption: "That'll do, pig." | |
}.stringify_keys | |
assert_equal expected_bacon_hash, Hash.from_xml(bacon_xml)["bacon"] | |
end | |
def test_type_trickles_through_when_unknown | |
product_xml = <<-EOT | |
<product> | |
<weight type="double">0.5</weight> | |
<image type="ProductImage"><filename>image.gif</filename></image> | |
</product> | |
EOT | |
expected_product_hash = { | |
weight: 0.5, | |
image: { "type" => "ProductImage", "filename" => "image.gif" }, | |
}.stringify_keys | |
assert_equal expected_product_hash, Hash.from_xml(product_xml)["product"] | |
end | |
def test_from_xml_raises_on_disallowed_type_attributes | |
assert_raise ActiveSupport::XMLConverter::DisallowedType do | |
Hash.from_xml '<product><name type="foo">value</name></product>', %w(foo) | |
end | |
end | |
def test_from_xml_disallows_symbol_and_yaml_types_by_default | |
assert_raise ActiveSupport::XMLConverter::DisallowedType do | |
Hash.from_xml '<product><name type="symbol">value</name></product>' | |
end | |
assert_raise ActiveSupport::XMLConverter::DisallowedType do | |
Hash.from_xml '<product><name type="yaml">value</name></product>' | |
end | |
end | |
def test_from_xml_array_one | |
expected = { "numbers" => { "type" => "Array", "value" => "1" } } | |
assert_equal expected, Hash.from_xml('<numbers type="Array"><value>1</value></numbers>') | |
end | |
def test_from_xml_array_many | |
expected = { "numbers" => { "type" => "Array", "value" => [ "1", "2" ] } } | |
assert_equal expected, Hash.from_xml('<numbers type="Array"><value>1</value><value>2</value></numbers>') | |
end | |
def test_from_trusted_xml_allows_symbol_and_yaml_types | |
expected = { "product" => { "name" => :value } } | |
assert_equal expected, Hash.from_trusted_xml('<product><name type="symbol">value</name></product>') | |
assert_equal expected, Hash.from_trusted_xml('<product><name type="yaml">:value</name></product>') | |
end | |
# The XML builder seems to fail miserably when trying to tag something | |
# with the same name as a Kernel method (throw, test, loop, select ...) | |
def test_kernel_method_names_to_xml | |
hash = { throw: { ball: "red" } } | |
expected = "<person><throw><ball>red</ball></throw></person>" | |
assert_nothing_raised do | |
assert_equal expected, hash.to_xml(@xml_options) | |
end | |
end | |
def test_empty_string_works_for_typecast_xml_value | |
assert_nothing_raised do | |
ActiveSupport::XMLConverter.new("").to_h | |
end | |
end | |
def test_escaping_to_xml | |
hash = { | |
bare_string: "First & Last Name", | |
pre_escaped_string: "First & Last Name" | |
}.stringify_keys | |
expected_xml = "<person><bare-string>First & Last Name</bare-string><pre-escaped-string>First &amp; Last Name</pre-escaped-string></person>" | |
assert_equal expected_xml, hash.to_xml(@xml_options) | |
end | |
def test_unescaping_from_xml | |
xml_string = "<person><bare-string>First & Last Name</bare-string><pre-escaped-string>First &amp; Last Name</pre-escaped-string></person>" | |
expected_hash = { | |
bare_string: "First & Last Name", | |
pre_escaped_string: "First & Last Name" | |
}.stringify_keys | |
assert_equal expected_hash, Hash.from_xml(xml_string)["person"] | |
end | |
def test_roundtrip_to_xml_from_xml | |
hash = { | |
bare_string: "First & Last Name", | |
pre_escaped_string: "First & Last Name" | |
}.stringify_keys | |
assert_equal hash, Hash.from_xml(hash.to_xml(@xml_options))["person"] | |
end | |
def test_datetime_xml_type_with_utc_time | |
alert_xml = <<-XML | |
<alert> | |
<alert_at type="datetime">2008-02-10T15:30:45Z</alert_at> | |
</alert> | |
XML | |
alert_at = Hash.from_xml(alert_xml)["alert"]["alert_at"] | |
assert_predicate alert_at, :utc? | |
assert_equal Time.utc(2008, 2, 10, 15, 30, 45), alert_at | |
end | |
def test_datetime_xml_type_with_non_utc_time | |
alert_xml = <<-XML | |
<alert> | |
<alert_at type="datetime">2008-02-10T10:30:45-05:00</alert_at> | |
</alert> | |
XML | |
alert_at = Hash.from_xml(alert_xml)["alert"]["alert_at"] | |
assert_predicate alert_at, :utc? | |
assert_equal Time.utc(2008, 2, 10, 15, 30, 45), alert_at | |
end | |
def test_datetime_xml_type_with_far_future_date | |
alert_xml = <<-XML | |
<alert> | |
<alert_at type="datetime">2050-02-10T15:30:45Z</alert_at> | |
</alert> | |
XML | |
alert_at = Hash.from_xml(alert_xml)["alert"]["alert_at"] | |
assert_predicate alert_at, :utc? | |
assert_equal 2050, alert_at.year | |
assert_equal 2, alert_at.month | |
assert_equal 10, alert_at.day | |
assert_equal 15, alert_at.hour | |
assert_equal 30, alert_at.min | |
assert_equal 45, alert_at.sec | |
end | |
def test_to_xml_dups_options | |
options = { skip_instruct: true } | |
{}.to_xml(options) | |
# :builder, etc, shouldn't be added to options | |
assert_equal({ skip_instruct: true }, options) | |
end | |
def test_expansion_count_is_limited | |
expected = | |
case ActiveSupport::XmlMini.backend.name | |
when "ActiveSupport::XmlMini_REXML"; RuntimeError | |
when "ActiveSupport::XmlMini_Nokogiri"; Nokogiri::XML::SyntaxError | |
when "ActiveSupport::XmlMini_NokogiriSAX"; RuntimeError | |
when "ActiveSupport::XmlMini_LibXML"; LibXML::XML::Error | |
when "ActiveSupport::XmlMini_LibXMLSAX"; LibXML::XML::Error | |
end | |
assert_raise expected do | |
attack_xml = <<-EOT | |
<?xml version="1.0" encoding="UTF-8"?> | |
<!DOCTYPE member [ | |
<!ENTITY a "&b;&b;&b;&b;&b;&b;&b;&b;&b;&b;"> | |
<!ENTITY b "&c;&c;&c;&c;&c;&c;&c;&c;&c;&c;"> | |
<!ENTITY c "&d;&d;&d;&d;&d;&d;&d;&d;&d;&d;"> | |
<!ENTITY d "&e;&e;&e;&e;&e;&e;&e;&e;&e;&e;"> | |
<!ENTITY e "&f;&f;&f;&f;&f;&f;&f;&f;&f;&f;"> | |
<!ENTITY f "&g;&g;&g;&g;&g;&g;&g;&g;&g;&g;"> | |
<!ENTITY g "xxxxxxxxxxxxxxxxxxxxxxxxxxxxxx"> | |
]> | |
<member> | |
&a; | |
</member> | |
EOT | |
Hash.from_xml(attack_xml) | |
end | |
end | |
end |
# frozen_string_literal: true | |
require "active_support/core_ext/hash/keys" | |
require "active_support/core_ext/hash/reverse_merge" | |
require "active_support/core_ext/hash/except" | |
require "active_support/core_ext/hash/slice" | |
module ActiveSupport | |
# Implements a hash where keys <tt>:foo</tt> and <tt>"foo"</tt> are considered | |
# to be the same. | |
# | |
# rgb = ActiveSupport::HashWithIndifferentAccess.new | |
# | |
# rgb[:black] = '#000000' | |
# rgb[:black] # => '#000000' | |
# rgb['black'] # => '#000000' | |
# | |
# rgb['white'] = '#FFFFFF' | |
# rgb[:white] # => '#FFFFFF' | |
# rgb['white'] # => '#FFFFFF' | |
# | |
# Internally symbols are mapped to strings when used as keys in the entire | |
# writing interface (calling <tt>[]=</tt>, <tt>merge</tt>, etc). This | |
# mapping belongs to the public interface. For example, given: | |
# | |
# hash = ActiveSupport::HashWithIndifferentAccess.new(a: 1) | |
# | |
# You are guaranteed that the key is returned as a string: | |
# | |
# hash.keys # => ["a"] | |
# | |
# Technically other types of keys are accepted: | |
# | |
# hash = ActiveSupport::HashWithIndifferentAccess.new(a: 1) | |
# hash[0] = 0 | |
# hash # => {"a"=>1, 0=>0} | |
# | |
# but this class is intended for use cases where strings or symbols are the | |
# expected keys and it is convenient to understand both as the same. For | |
# example the +params+ hash in Ruby on Rails. | |
# | |
# Note that core extensions define <tt>Hash#with_indifferent_access</tt>: | |
# | |
# rgb = { black: '#000000', white: '#FFFFFF' }.with_indifferent_access | |
# | |
# which may be handy. | |
# | |
# To access this class outside of Rails, require the core extension with: | |
# | |
# require "active_support/core_ext/hash/indifferent_access" | |
# | |
# which will, in turn, require this file. | |
class HashWithIndifferentAccess < Hash | |
# Returns +true+ so that <tt>Array#extract_options!</tt> finds members of | |
# this class. | |
def extractable_options? | |
true | |
end | |
def with_indifferent_access | |
dup | |
end | |
def nested_under_indifferent_access | |
self | |
end | |
def initialize(constructor = nil) | |
if constructor.respond_to?(:to_hash) | |
super() | |
update(constructor) | |
hash = constructor.is_a?(Hash) ? constructor : constructor.to_hash | |
self.default = hash.default if hash.default | |
self.default_proc = hash.default_proc if hash.default_proc | |
elsif constructor.nil? | |
super() | |
else | |
super(constructor) | |
end | |
end | |
def self.[](*args) | |
new.merge!(Hash[*args]) | |
end | |
alias_method :regular_writer, :[]= unless method_defined?(:regular_writer) | |
alias_method :regular_update, :update unless method_defined?(:regular_update) | |
# Assigns a new value to the hash: | |
# | |
# hash = ActiveSupport::HashWithIndifferentAccess.new | |
# hash[:key] = 'value' | |
# | |
# This value can be later fetched using either +:key+ or <tt>'key'</tt>. | |
def []=(key, value) | |
regular_writer(convert_key(key), convert_value(value, conversion: :assignment)) | |
end | |
alias_method :store, :[]= | |
# Updates the receiver in-place, merging in the hashes passed as arguments: | |
# | |
# hash_1 = ActiveSupport::HashWithIndifferentAccess.new | |
# hash_1[:key] = 'value' | |
# | |
# hash_2 = ActiveSupport::HashWithIndifferentAccess.new | |
# hash_2[:key] = 'New Value!' | |
# | |
# hash_1.update(hash_2) # => {"key"=>"New Value!"} | |
# | |
# hash = ActiveSupport::HashWithIndifferentAccess.new | |
# hash.update({ "a" => 1 }, { "b" => 2 }) # => { "a" => 1, "b" => 2 } | |
# | |
# The arguments can be either an | |
# <tt>ActiveSupport::HashWithIndifferentAccess</tt> or a regular +Hash+. | |
# In either case the merge respects the semantics of indifferent access. | |
# | |
# If the argument is a regular hash with keys +:key+ and <tt>"key"</tt> only one | |
# of the values end up in the receiver, but which one is unspecified. | |
# | |
# When given a block, the value for duplicated keys will be determined | |
# by the result of invoking the block with the duplicated key, the value | |
# in the receiver, and the value in +other_hash+. The rules for duplicated | |
# keys follow the semantics of indifferent access: | |
# | |
# hash_1[:key] = 10 | |
# hash_2['key'] = 12 | |
# hash_1.update(hash_2) { |key, old, new| old + new } # => {"key"=>22} | |
def update(*other_hashes, &block) | |
if other_hashes.size == 1 | |
update_with_single_argument(other_hashes.first, block) | |
else | |
other_hashes.each do |other_hash| | |
update_with_single_argument(other_hash, block) | |
end | |
end | |
self | |
end | |
alias_method :merge!, :update | |
# Checks the hash for a key matching the argument passed in: | |
# | |
# hash = ActiveSupport::HashWithIndifferentAccess.new | |
# hash['key'] = 'value' | |
# hash.key?(:key) # => true | |
# hash.key?('key') # => true | |
def key?(key) | |
super(convert_key(key)) | |
end | |
alias_method :include?, :key? | |
alias_method :has_key?, :key? | |
alias_method :member?, :key? | |
# Same as <tt>Hash#[]</tt> where the key passed as argument can be | |
# either a string or a symbol: | |
# | |
# counters = ActiveSupport::HashWithIndifferentAccess.new | |
# counters[:foo] = 1 | |
# | |
# counters['foo'] # => 1 | |
# counters[:foo] # => 1 | |
# counters[:zoo] # => nil | |
def [](key) | |
super(convert_key(key)) | |
end | |
# Same as <tt>Hash#assoc</tt> where the key passed as argument can be | |
# either a string or a symbol: | |
# | |
# counters = ActiveSupport::HashWithIndifferentAccess.new | |
# counters[:foo] = 1 | |
# | |
# counters.assoc('foo') # => ["foo", 1] | |
# counters.assoc(:foo) # => ["foo", 1] | |
# counters.assoc(:zoo) # => nil | |
def assoc(key) | |
super(convert_key(key)) | |
end | |
# Same as <tt>Hash#fetch</tt> where the key passed as argument can be | |
# either a string or a symbol: | |
# | |
# counters = ActiveSupport::HashWithIndifferentAccess.new | |
# counters[:foo] = 1 | |
# | |
# counters.fetch('foo') # => 1 | |
# counters.fetch(:bar, 0) # => 0 | |
# counters.fetch(:bar) { |key| 0 } # => 0 | |
# counters.fetch(:zoo) # => KeyError: key not found: "zoo" | |
def fetch(key, *extras) | |
super(convert_key(key), *extras) | |
end | |
# Same as <tt>Hash#dig</tt> where the key passed as argument can be | |
# either a string or a symbol: | |
# | |
# counters = ActiveSupport::HashWithIndifferentAccess.new | |
# counters[:foo] = { bar: 1 } | |
# | |
# counters.dig('foo', 'bar') # => 1 | |
# counters.dig(:foo, :bar) # => 1 | |
# counters.dig(:zoo) # => nil | |
def dig(*args) | |
args[0] = convert_key(args[0]) if args.size > 0 | |
super(*args) | |
end | |
# Same as <tt>Hash#default</tt> where the key passed as argument can be | |
# either a string or a symbol: | |
# | |
# hash = ActiveSupport::HashWithIndifferentAccess.new(1) | |
# hash.default # => 1 | |
# | |
# hash = ActiveSupport::HashWithIndifferentAccess.new { |hash, key| key } | |
# hash.default # => nil | |
# hash.default('foo') # => 'foo' | |
# hash.default(:foo) # => 'foo' | |
def default(*args) | |
super(*args.map { |arg| convert_key(arg) }) | |
end | |
# Returns an array of the values at the specified indices: | |
# | |
# hash = ActiveSupport::HashWithIndifferentAccess.new | |
# hash[:a] = 'x' | |
# hash[:b] = 'y' | |
# hash.values_at('a', 'b') # => ["x", "y"] | |
def values_at(*keys) | |
super(*keys.map { |key| convert_key(key) }) | |
end | |
# Returns an array of the values at the specified indices, but also | |
# raises an exception when one of the keys can't be found. | |
# | |
# hash = ActiveSupport::HashWithIndifferentAccess.new | |
# hash[:a] = 'x' | |
# hash[:b] = 'y' | |
# hash.fetch_values('a', 'b') # => ["x", "y"] | |
# hash.fetch_values('a', 'c') { |key| 'z' } # => ["x", "z"] | |
# hash.fetch_values('a', 'c') # => KeyError: key not found: "c" | |
def fetch_values(*indices, &block) | |
super(*indices.map { |key| convert_key(key) }, &block) | |
end | |
# Returns a shallow copy of the hash. | |
# | |
# hash = ActiveSupport::HashWithIndifferentAccess.new({ a: { b: 'b' } }) | |
# dup = hash.dup | |
# dup[:a][:c] = 'c' | |
# | |
# hash[:a][:c] # => "c" | |
# dup[:a][:c] # => "c" | |
def dup | |
self.class.new(self).tap do |new_hash| | |
set_defaults(new_hash) | |
end | |
end | |
# This method has the same semantics of +update+, except it does not | |
# modify the receiver but rather returns a new hash with indifferent | |
# access with the result of the merge. | |
def merge(*hashes, &block) | |
dup.update(*hashes, &block) | |
end | |
# Like +merge+ but the other way around: Merges the receiver into the | |
# argument and returns a new hash with indifferent access as result: | |
# | |
# hash = ActiveSupport::HashWithIndifferentAccess.new | |
# hash['a'] = nil | |
# hash.reverse_merge(a: 0, b: 1) # => {"a"=>nil, "b"=>1} | |
def reverse_merge(other_hash) | |
super(self.class.new(other_hash)) | |
end | |
alias_method :with_defaults, :reverse_merge | |
# Same semantics as +reverse_merge+ but modifies the receiver in-place. | |
def reverse_merge!(other_hash) | |
super(self.class.new(other_hash)) | |
end | |
alias_method :with_defaults!, :reverse_merge! | |
# Replaces the contents of this hash with other_hash. | |
# | |
# h = { "a" => 100, "b" => 200 } | |
# h.replace({ "c" => 300, "d" => 400 }) # => {"c"=>300, "d"=>400} | |
def replace(other_hash) | |
super(self.class.new(other_hash)) | |
end | |
# Removes the specified key from the hash. | |
def delete(key) | |
super(convert_key(key)) | |
end | |
# Returns a hash with indifferent access that includes everything except given keys. | |
# hash = { a: "x", b: "y", c: 10 }.with_indifferent_access | |
# hash.except(:a, "b") # => {c: 10}.with_indifferent_access | |
# hash # => { a: "x", b: "y", c: 10 }.with_indifferent_access | |
def except(*keys) | |
slice(*self.keys - keys.map { |key| convert_key(key) }) | |
end | |
alias_method :without, :except | |
def stringify_keys!; self end | |
def deep_stringify_keys!; self end | |
def stringify_keys; dup end | |
def deep_stringify_keys; dup end | |
undef :symbolize_keys! | |
undef :deep_symbolize_keys! | |
def symbolize_keys; to_hash.symbolize_keys! end | |
alias_method :to_options, :symbolize_keys | |
def deep_symbolize_keys; to_hash.deep_symbolize_keys! end | |
def to_options!; self end | |
def select(*args, &block) | |
return to_enum(:select) unless block_given? | |
dup.tap { |hash| hash.select!(*args, &block) } | |
end | |
def reject(*args, &block) | |
return to_enum(:reject) unless block_given? | |
dup.tap { |hash| hash.reject!(*args, &block) } | |
end | |
def transform_values(*args, &block) | |
return to_enum(:transform_values) unless block_given? | |
dup.tap { |hash| hash.transform_values!(*args, &block) } | |
end | |
def transform_keys(*args, &block) | |
return to_enum(:transform_keys) unless block_given? | |
dup.tap { |hash| hash.transform_keys!(*args, &block) } | |
end | |
def transform_keys! | |
return enum_for(:transform_keys!) { size } unless block_given? | |
keys.each do |key| | |
self[yield(key)] = delete(key) | |
end | |
self | |
end | |
def slice(*keys) | |
keys.map! { |key| convert_key(key) } | |
self.class.new(super) | |
end | |
def slice!(*keys) | |
keys.map! { |key| convert_key(key) } | |
super | |
end | |
def compact | |
dup.tap(&:compact!) | |
end | |
# Convert to a regular hash with string keys. | |
def to_hash | |
_new_hash = Hash.new | |
set_defaults(_new_hash) | |
each do |key, value| | |
_new_hash[key] = convert_value(value, conversion: :to_hash) | |
end | |
_new_hash | |
end | |
private | |
if Symbol.method_defined?(:name) | |
def convert_key(key) | |
key.kind_of?(Symbol) ? key.name : key | |
end | |
else | |
def convert_key(key) | |
key.kind_of?(Symbol) ? key.to_s : key | |
end | |
end | |
def convert_value(value, conversion: nil) | |
if value.is_a? Hash | |
if conversion == :to_hash | |
value.to_hash | |
else | |
value.nested_under_indifferent_access | |
end | |
elsif value.is_a?(Array) | |
if conversion != :assignment || value.frozen? | |
value = value.dup | |
end | |
value.map! { |e| convert_value(e, conversion: conversion) } | |
else | |
value | |
end | |
end | |
def set_defaults(target) | |
if default_proc | |
target.default_proc = default_proc.dup | |
else | |
target.default = default | |
end | |
end | |
def update_with_single_argument(other_hash, block) | |
if other_hash.is_a? HashWithIndifferentAccess | |
regular_update(other_hash, &block) | |
else | |
other_hash.to_hash.each_pair do |key, value| | |
if block && key?(key) | |
value = block.call(convert_key(key), self[key], value) | |
end | |
regular_writer(convert_key(key), convert_value(value)) | |
end | |
end | |
end | |
end | |
end | |
# :stopdoc: | |
HashWithIndifferentAccess = ActiveSupport::HashWithIndifferentAccess |
# frozen_string_literal: true | |
require_relative "abstract_unit" | |
require "bigdecimal" | |
require "yaml" | |
require "active_support/core_ext/hash" | |
require "active_support/core_ext/string/access" | |
require "active_support/core_ext/object/conversions" | |
require "active_support/core_ext/object/deep_dup" | |
require "active_support/inflections" | |
class HashWithIndifferentAccessTest < ActiveSupport::TestCase | |
HashWithIndifferentAccess = ActiveSupport::HashWithIndifferentAccess | |
class IndifferentHash < ActiveSupport::HashWithIndifferentAccess | |
end | |
class SubclassingArray < Array | |
end | |
class SubclassingHash < Hash | |
end | |
class NonIndifferentHash < Hash | |
def nested_under_indifferent_access | |
self | |
end | |
end | |
class HashByConversion | |
def initialize(hash) | |
@hash = hash | |
end | |
def to_hash | |
@hash | |
end | |
end | |
def setup | |
@strings = { "a" => 1, "b" => 2 } | |
@nested_strings = { "a" => { "b" => { "c" => 3 } } } | |
@symbols = { a: 1, b: 2 } | |
@nested_symbols = { a: { b: { c: 3 } } } | |
@mixed = { :a => 1, "b" => 2 } | |
@nested_mixed = { "a" => { b: { "c" => 3 } } } | |
@integers = { 0 => 1, 1 => 2 } | |
@nested_integers = { 0 => { 1 => { 2 => 3 } } } | |
@illegal_symbols = { [] => 3 } | |
@nested_illegal_symbols = { [] => { [] => 3 } } | |
end | |
def test_symbolize_keys_for_hash_with_indifferent_access | |
assert_instance_of Hash, @symbols.with_indifferent_access.symbolize_keys | |
assert_equal @symbols, @symbols.with_indifferent_access.symbolize_keys | |
assert_equal @symbols, @strings.with_indifferent_access.symbolize_keys | |
assert_equal @symbols, @mixed.with_indifferent_access.symbolize_keys | |
end | |
def test_to_options_for_hash_with_indifferent_access | |
assert_instance_of Hash, @symbols.with_indifferent_access.to_options | |
assert_equal @symbols, @symbols.with_indifferent_access.to_options | |
assert_equal @symbols, @strings.with_indifferent_access.to_options | |
assert_equal @symbols, @mixed.with_indifferent_access.to_options | |
end | |
def test_deep_symbolize_keys_for_hash_with_indifferent_access | |
assert_instance_of Hash, @nested_symbols.with_indifferent_access.deep_symbolize_keys | |
assert_equal @nested_symbols, @nested_symbols.with_indifferent_access.deep_symbolize_keys | |
assert_equal @nested_symbols, @nested_strings.with_indifferent_access.deep_symbolize_keys | |
assert_equal @nested_symbols, @nested_mixed.with_indifferent_access.deep_symbolize_keys | |
end | |
def test_symbolize_keys_bang_for_hash_with_indifferent_access | |
assert_raise(NoMethodError) { @symbols.with_indifferent_access.dup.symbolize_keys! } | |
assert_raise(NoMethodError) { @strings.with_indifferent_access.dup.symbolize_keys! } | |
assert_raise(NoMethodError) { @mixed.with_indifferent_access.dup.symbolize_keys! } | |
end | |
def test_deep_symbolize_keys_bang_for_hash_with_indifferent_access | |
assert_raise(NoMethodError) { @nested_symbols.with_indifferent_access.deep_dup.deep_symbolize_keys! } | |
assert_raise(NoMethodError) { @nested_strings.with_indifferent_access.deep_dup.deep_symbolize_keys! } | |
assert_raise(NoMethodError) { @nested_mixed.with_indifferent_access.deep_dup.deep_symbolize_keys! } | |
end | |
def test_symbolize_keys_preserves_keys_that_cant_be_symbolized_for_hash_with_indifferent_access | |
assert_equal @illegal_symbols, @illegal_symbols.with_indifferent_access.symbolize_keys | |
assert_raise(NoMethodError) { @illegal_symbols.with_indifferent_access.dup.symbolize_keys! } | |
end | |
def test_deep_symbolize_keys_preserves_keys_that_cant_be_symbolized_for_hash_with_indifferent_access | |
assert_equal @nested_illegal_symbols, @nested_illegal_symbols.with_indifferent_access.deep_symbolize_keys | |
assert_raise(NoMethodError) { @nested_illegal_symbols.with_indifferent_access.deep_dup.deep_symbolize_keys! } | |
end | |
def test_symbolize_keys_preserves_integer_keys_for_hash_with_indifferent_access | |
assert_equal @integers, @integers.with_indifferent_access.symbolize_keys | |
assert_raise(NoMethodError) { @integers.with_indifferent_access.dup.symbolize_keys! } | |
end | |
def test_deep_symbolize_keys_preserves_integer_keys_for_hash_with_indifferent_access | |
assert_equal @nested_integers, @nested_integers.with_indifferent_access.deep_symbolize_keys | |
assert_raise(NoMethodError) { @nested_integers.with_indifferent_access.deep_dup.deep_symbolize_keys! } | |
end | |
def test_stringify_keys_for_hash_with_indifferent_access | |
assert_instance_of ActiveSupport::HashWithIndifferentAccess, @symbols.with_indifferent_access.stringify_keys | |
assert_equal @strings, @symbols.with_indifferent_access.stringify_keys | |
assert_equal @strings, @strings.with_indifferent_access.stringify_keys | |
assert_equal @strings, @mixed.with_indifferent_access.stringify_keys | |
end | |
def test_deep_stringify_keys_for_hash_with_indifferent_access | |
assert_instance_of ActiveSupport::HashWithIndifferentAccess, @nested_symbols.with_indifferent_access.deep_stringify_keys | |
assert_equal @nested_strings, @nested_symbols.with_indifferent_access.deep_stringify_keys | |
assert_equal @nested_strings, @nested_strings.with_indifferent_access.deep_stringify_keys | |
assert_equal @nested_strings, @nested_mixed.with_indifferent_access.deep_stringify_keys | |
end | |
def test_stringify_keys_bang_for_hash_with_indifferent_access | |
assert_instance_of ActiveSupport::HashWithIndifferentAccess, @symbols.with_indifferent_access.dup.stringify_keys! | |
assert_equal @strings, @symbols.with_indifferent_access.dup.stringify_keys! | |
assert_equal @strings, @strings.with_indifferent_access.dup.stringify_keys! | |
assert_equal @strings, @mixed.with_indifferent_access.dup.stringify_keys! | |
end | |
def test_deep_stringify_keys_bang_for_hash_with_indifferent_access | |
assert_instance_of ActiveSupport::HashWithIndifferentAccess, @nested_symbols.with_indifferent_access.dup.deep_stringify_keys! | |
assert_equal @nested_strings, @nested_symbols.with_indifferent_access.deep_dup.deep_stringify_keys! | |
assert_equal @nested_strings, @nested_strings.with_indifferent_access.deep_dup.deep_stringify_keys! | |
assert_equal @nested_strings, @nested_mixed.with_indifferent_access.deep_dup.deep_stringify_keys! | |
end | |
def test_nested_under_indifferent_access | |
foo = { "foo" => SubclassingHash.new.tap { |h| h["bar"] = "baz" } }.with_indifferent_access | |
assert_kind_of ActiveSupport::HashWithIndifferentAccess, foo["foo"] | |
foo = { "foo" => NonIndifferentHash.new.tap { |h| h["bar"] = "baz" } }.with_indifferent_access | |
assert_kind_of NonIndifferentHash, foo["foo"] | |
foo = { "foo" => IndifferentHash.new.tap { |h| h["bar"] = "baz" } }.with_indifferent_access | |
assert_kind_of IndifferentHash, foo["foo"] | |
end | |
def test_indifferent_assorted | |
@strings = @strings.with_indifferent_access | |
@symbols = @symbols.with_indifferent_access | |
@mixed = @mixed.with_indifferent_access | |
assert_equal "a", @strings.__send__(:convert_key, :a) | |
assert_equal 1, @strings.fetch("a") | |
assert_equal 1, @strings.fetch(:a.to_s) | |
assert_equal 1, @strings.fetch(:a) | |
hashes = { :@strings => @strings, :@symbols => @symbols, :@mixed => @mixed } | |
method_map = { '[]': 1, fetch: 1, values_at: [1], | |
has_key?: true, include?: true, key?: true, | |
member?: true } | |
hashes.each do |name, hash| | |
method_map.sort_by(&:to_s).each do |meth, expected| | |
assert_equal(expected, hash.__send__(meth, "a"), | |
"Calling #{name}.#{meth} 'a'") | |
assert_equal(expected, hash.__send__(meth, :a), | |
"Calling #{name}.#{meth} :a") | |
end | |
end | |
assert_equal [1, 2], @strings.values_at("a", "b") | |
assert_equal [1, 2], @strings.values_at(:a, :b) | |
assert_equal [1, 2], @symbols.values_at("a", "b") | |
assert_equal [1, 2], @symbols.values_at(:a, :b) | |
assert_equal [1, 2], @mixed.values_at("a", "b") | |
assert_equal [1, 2], @mixed.values_at(:a, :b) | |
end | |
def test_indifferent_fetch_values | |
@mixed = @mixed.with_indifferent_access | |
assert_equal [1, 2], @mixed.fetch_values("a", "b") | |
assert_equal [1, 2], @mixed.fetch_values(:a, :b) | |
assert_equal [1, 2], @mixed.fetch_values(:a, "b") | |
assert_equal [1, "c"], @mixed.fetch_values(:a, :c) { |key| key } | |
assert_raise(KeyError) { @mixed.fetch_values(:a, :c) } | |
end | |
def test_indifferent_reading | |
hash = HashWithIndifferentAccess.new | |
hash["a"] = 1 | |
hash["b"] = true | |
hash["c"] = false | |
hash["d"] = nil | |
assert_equal 1, hash[:a] | |
assert_equal true, hash[:b] | |
assert_equal false, hash[:c] | |
assert_nil hash[:d] | |
assert_nil hash[:e] | |
end | |
def test_indifferent_reading_with_nonnil_default | |
hash = HashWithIndifferentAccess.new(1) | |
hash["a"] = 1 | |
hash["b"] = true | |
hash["c"] = false | |
hash["d"] = nil | |
assert_equal 1, hash[:a] | |
assert_equal true, hash[:b] | |
assert_equal false, hash[:c] | |
assert_nil hash[:d] | |
assert_equal 1, hash[:e] | |
end | |
def test_indifferent_writing | |
hash = HashWithIndifferentAccess.new | |
hash[:a] = 1 | |
hash["b"] = 2 | |
hash[3] = 3 | |
assert_equal 1, hash["a"] | |
assert_equal 2, hash["b"] | |
assert_equal 1, hash[:a] | |
assert_equal 2, hash[:b] | |
assert_equal 3, hash[3] | |
end | |
def test_indifferent_update | |
hash = HashWithIndifferentAccess.new | |
hash[:a] = "a" | |
hash["b"] = "b" | |
updated_with_strings = hash.update(@strings) | |
updated_with_symbols = hash.update(@symbols) | |
updated_with_mixed = hash.update(@mixed) | |
assert_equal 1, updated_with_strings[:a] | |
assert_equal 1, updated_with_strings["a"] | |
assert_equal 2, updated_with_strings["b"] | |
assert_equal 1, updated_with_symbols[:a] | |
assert_equal 2, updated_with_symbols["b"] | |
assert_equal 2, updated_with_symbols[:b] | |
assert_equal 1, updated_with_mixed[:a] | |
assert_equal 2, updated_with_mixed["b"] | |
assert [updated_with_strings, updated_with_symbols, updated_with_mixed].all? { |h| h.keys.size == 2 } | |
end | |
def test_update_with_multiple_arguments | |
hash = HashWithIndifferentAccess.new | |
hash.update({ "a" => 1 }, { "b" => 2 }) | |
assert_equal 1, hash["a"] | |
assert_equal 2, hash["b"] | |
end | |
def test_update_with_to_hash_conversion | |
hash = HashWithIndifferentAccess.new | |
hash.update HashByConversion.new(a: 1) | |
assert_equal 1, hash["a"] | |
end | |
def test_indifferent_merging | |
hash = HashWithIndifferentAccess.new | |
hash[:a] = "failure" | |
hash["b"] = "failure" | |
other = { "a" => 1, :b => 2 } | |
merged = hash.merge(other) | |
assert_equal HashWithIndifferentAccess, merged.class | |
assert_equal 1, merged[:a] | |
assert_equal 2, merged["b"] | |
hash.update(other) | |
assert_equal 1, hash[:a] | |
assert_equal 2, hash["b"] | |
end | |
def test_merging_with_multiple_arguments | |
hash = HashWithIndifferentAccess.new | |
merged = hash.merge({ "a" => 1 }, { "b" => 2 }) | |
assert_equal 1, merged["a"] | |
assert_equal 2, merged["b"] | |
end | |
def test_merge_with_to_hash_conversion | |
hash = HashWithIndifferentAccess.new | |
merged = hash.merge HashByConversion.new(a: 1) | |
assert_equal 1, merged["a"] | |
end | |
def test_indifferent_replace | |
hash = HashWithIndifferentAccess.new | |
hash[:a] = 42 | |
replaced = hash.replace(b: 12) | |
assert hash.key?("b") | |
assert_not hash.key?(:a) | |
assert_equal 12, hash[:b] | |
assert_same hash, replaced | |
end | |
def test_replace_with_to_hash_conversion | |
hash = HashWithIndifferentAccess.new | |
hash[:a] = 42 | |
replaced = hash.replace(HashByConversion.new(b: 12)) | |
assert hash.key?("b") | |
assert_not hash.key?(:a) | |
assert_equal 12, hash[:b] | |
assert_same hash, replaced | |
end | |
def test_indifferent_merging_with_block | |
hash = HashWithIndifferentAccess.new | |
hash[:a] = 1 | |
hash["b"] = 3 | |
other = { "a" => 4, :b => 2, "c" => 10 } | |
merged = hash.merge(other) { |key, old, new| old > new ? old : new } | |
assert_equal HashWithIndifferentAccess, merged.class | |
assert_equal 4, merged[:a] | |
assert_equal 3, merged["b"] | |
assert_equal 10, merged[:c] | |
other_indifferent = HashWithIndifferentAccess.new("a" => 9, :b => 2) | |
merged = hash.merge(other_indifferent) { |key, old, new| old + new } | |
assert_equal HashWithIndifferentAccess, merged.class | |
assert_equal 10, merged[:a] | |
assert_equal 5, merged[:b] | |
end | |
def test_indifferent_reverse_merging | |
hash = HashWithIndifferentAccess.new key: :old_value | |
hash.reverse_merge! key: :new_value | |
assert_equal :old_value, hash[:key] | |
hash = HashWithIndifferentAccess.new("some" => "value", "other" => "value") | |
hash.reverse_merge!(some: "noclobber", another: "clobber") | |
assert_equal "value", hash[:some] | |
assert_equal "clobber", hash[:another] | |
end | |
def test_indifferent_with_defaults_aliases_reverse_merge | |
hash = HashWithIndifferentAccess.new key: :old_value | |
actual = hash.with_defaults key: :new_value | |
assert_equal :old_value, actual[:key] | |
hash = HashWithIndifferentAccess.new key: :old_value | |
hash.with_defaults! key: :new_value | |
assert_equal :old_value, hash[:key] | |
end | |
def test_indifferent_deleting | |
get_hash = proc { { a: "foo" }.with_indifferent_access } | |
hash = get_hash.call | |
assert_equal "foo", hash.delete(:a) | |
assert_nil hash.delete(:a) | |
hash = get_hash.call | |
assert_equal "foo", hash.delete("a") | |
assert_nil hash.delete("a") | |
end | |
def test_indifferent_select | |
hash = ActiveSupport::HashWithIndifferentAccess.new(@strings).select { |k, v| v == 1 } | |
assert_equal({ "a" => 1 }, hash) | |
assert_instance_of ActiveSupport::HashWithIndifferentAccess, hash | |
end | |
def test_indifferent_select_returns_enumerator | |
enum = ActiveSupport::HashWithIndifferentAccess.new(@strings).select | |
assert_instance_of Enumerator, enum | |
end | |
def test_indifferent_select_returns_a_hash_when_unchanged | |
hash = ActiveSupport::HashWithIndifferentAccess.new(@strings).select { |k, v| true } | |
assert_instance_of ActiveSupport::HashWithIndifferentAccess, hash | |
end | |
def test_indifferent_select_bang | |
indifferent_strings = ActiveSupport::HashWithIndifferentAccess.new(@strings) | |
indifferent_strings.select! { |k, v| v == 1 } | |
assert_equal({ "a" => 1 }, indifferent_strings) | |
assert_instance_of ActiveSupport::HashWithIndifferentAccess, indifferent_strings | |
end | |
def test_indifferent_reject | |
hash = ActiveSupport::HashWithIndifferentAccess.new(@strings).reject { |k, v| v != 1 } | |
assert_equal({ "a" => 1 }, hash) | |
assert_instance_of ActiveSupport::HashWithIndifferentAccess, hash | |
end | |
def test_indifferent_reject_returns_enumerator | |
enum = ActiveSupport::HashWithIndifferentAccess.new(@strings).reject | |
assert_instance_of Enumerator, enum | |
end | |
def test_indifferent_reject_bang | |
indifferent_strings = ActiveSupport::HashWithIndifferentAccess.new(@strings) | |
indifferent_strings.reject! { |k, v| v != 1 } | |
assert_equal({ "a" => 1 }, indifferent_strings) | |
assert_instance_of ActiveSupport::HashWithIndifferentAccess, indifferent_strings | |
end | |
def test_indifferent_transform_keys | |
hash = ActiveSupport::HashWithIndifferentAccess.new(@strings).transform_keys { |k| k * 2 } | |
assert_equal({ "aa" => 1, "bb" => 2 }, hash) | |
assert_instance_of ActiveSupport::HashWithIndifferentAccess, hash | |
hash = ActiveSupport::HashWithIndifferentAccess.new(@strings).transform_keys { |k| k.to_sym } | |
assert_equal(1, hash[:a]) | |
assert_equal(1, hash["a"]) | |
assert_instance_of ActiveSupport::HashWithIndifferentAccess, hash | |
end | |
def test_indifferent_deep_transform_keys | |
hash = ActiveSupport::HashWithIndifferentAccess.new(@nested_strings).deep_transform_keys { |k| k * 2 } | |
assert_equal({ "aa" => { "bb" => { "cc" => 3 } } }, hash) | |
assert_instance_of ActiveSupport::HashWithIndifferentAccess, hash | |
hash = ActiveSupport::HashWithIndifferentAccess.new(@nested_strings).deep_transform_keys { |k| k.to_sym } | |
assert_equal(3, hash[:a][:b][:c]) | |
assert_equal(3, hash["a"]["b"]["c"]) | |
assert_instance_of ActiveSupport::HashWithIndifferentAccess, hash | |
end | |
def test_indifferent_transform_keys_bang | |
indifferent_strings = ActiveSupport::HashWithIndifferentAccess.new(@strings) | |
indifferent_strings.transform_keys! { |k| k * 2 } | |
assert_equal({ "aa" => 1, "bb" => 2 }, indifferent_strings) | |
assert_instance_of ActiveSupport::HashWithIndifferentAccess, indifferent_strings | |
indifferent_strings = ActiveSupport::HashWithIndifferentAccess.new(@strings) | |
indifferent_strings.transform_keys! { |k| k.to_sym } | |
assert_equal(1, indifferent_strings[:a]) | |
assert_equal(1, indifferent_strings["a"]) | |
assert_instance_of ActiveSupport::HashWithIndifferentAccess, indifferent_strings | |
end | |
def test_indifferent_deep_transform_keys_bang | |
hash = ActiveSupport::HashWithIndifferentAccess.new(@nested_strings) | |
hash.deep_transform_keys! { |k| k * 2 } | |
assert_equal({ "aa" => { "bb" => { "cc" => 3 } } }, hash) | |
assert_instance_of ActiveSupport::HashWithIndifferentAccess, hash | |
hash = ActiveSupport::HashWithIndifferentAccess.new(@nested_strings) | |
hash.deep_transform_keys! { |k| k.to_sym } | |
assert_equal(3, hash[:a][:b][:c]) | |
assert_equal(3, hash["a"]["b"]["c"]) | |
assert_instance_of ActiveSupport::HashWithIndifferentAccess, hash | |
end | |
def test_indifferent_transform_values | |
hash = ActiveSupport::HashWithIndifferentAccess.new(@strings).transform_values { |v| v * 2 } | |
assert_equal({ "a" => 2, "b" => 4 }, hash) | |
assert_instance_of ActiveSupport::HashWithIndifferentAccess, hash | |
end | |
def test_indifferent_transform_values_bang | |
indifferent_strings = ActiveSupport::HashWithIndifferentAccess.new(@strings) | |
indifferent_strings.transform_values! { |v| v * 2 } | |
assert_equal({ "a" => 2, "b" => 4 }, indifferent_strings) | |
assert_instance_of ActiveSupport::HashWithIndifferentAccess, indifferent_strings | |
end | |
def test_indifferent_assoc | |
indifferent_strings = ActiveSupport::HashWithIndifferentAccess.new(@strings) | |
key, value = indifferent_strings.assoc(:a) | |
assert_equal("a", key) | |
assert_equal(1, value) | |
end | |
def test_indifferent_compact | |
hash_contain_nil_value = @strings.merge("z" => nil) | |
hash = ActiveSupport::HashWithIndifferentAccess.new(hash_contain_nil_value) | |
compacted_hash = hash.compact | |
assert_equal(@strings, compacted_hash) | |
assert_equal(hash_contain_nil_value, hash) | |
assert_instance_of ActiveSupport::HashWithIndifferentAccess, compacted_hash | |
empty_hash = ActiveSupport::HashWithIndifferentAccess.new | |
compacted_hash = empty_hash.compact | |
assert_equal compacted_hash, empty_hash | |
non_empty_hash = ActiveSupport::HashWithIndifferentAccess.new(foo: :bar) | |
compacted_hash = non_empty_hash.compact | |
assert_equal compacted_hash, non_empty_hash | |
end | |
def test_indifferent_to_hash | |
# Should convert to a Hash with String keys. | |
assert_equal @strings, @mixed.with_indifferent_access.to_hash | |
# Should preserve the default value. | |
mixed_with_default = @mixed.dup | |
mixed_with_default.default = "1234" | |
roundtrip = mixed_with_default.with_indifferent_access.to_hash | |
assert_equal @strings, roundtrip | |
assert_equal "1234", roundtrip.default | |
# Ensure nested hashes are not HashWithIndifferentAccess | |
new_to_hash = @nested_mixed.with_indifferent_access.to_hash | |
assert_not new_to_hash.instance_of?(HashWithIndifferentAccess) | |
assert_not new_to_hash["a"].instance_of?(HashWithIndifferentAccess) | |
assert_not new_to_hash["a"]["b"].instance_of?(HashWithIndifferentAccess) | |
end | |
def test_lookup_returns_the_same_object_that_is_stored_in_hash_indifferent_access | |
hash = HashWithIndifferentAccess.new { |h, k| h[k] = [] } | |
hash[:a] << 1 | |
assert_equal [1], hash[:a] | |
end | |
def test_with_indifferent_access_has_no_side_effects_on_existing_hash | |
hash = { content: [{ :foo => :bar, "bar" => "baz" }] } | |
hash.with_indifferent_access | |
assert_equal [:foo, "bar"], hash[:content].first.keys | |
end | |
def test_indifferent_hash_with_array_of_hashes | |
hash = { "urls" => { "url" => [ { "address" => "1" }, { "address" => "2" } ] } }.with_indifferent_access | |
assert_equal "1", hash[:urls][:url].first[:address] | |
hash = hash.to_hash | |
assert_not hash.instance_of?(HashWithIndifferentAccess) | |
assert_not hash["urls"].instance_of?(HashWithIndifferentAccess) | |
assert_not hash["urls"]["url"].first.instance_of?(HashWithIndifferentAccess) | |
end | |
def test_should_preserve_array_subclass_when_value_is_array | |
array = SubclassingArray.new | |
array << { "address" => "1" } | |
hash = { "urls" => { "url" => array } }.with_indifferent_access | |
assert_equal SubclassingArray, hash[:urls][:url].class | |
end | |
def test_should_preserve_array_class_when_hash_value_is_frozen_array | |
array = SubclassingArray.new | |
array << { "address" => "1" } | |
hash = { "urls" => { "url" => array.freeze } }.with_indifferent_access | |
assert_equal SubclassingArray, hash[:urls][:url].class | |
end | |
def test_stringify_and_symbolize_keys_on_indifferent_preserves_hash | |
h = HashWithIndifferentAccess.new | |
h[:first] = 1 | |
h = h.stringify_keys | |
assert_equal 1, h["first"] | |
h = HashWithIndifferentAccess.new | |
h["first"] = 1 | |
h = h.symbolize_keys | |
assert_equal 1, h[:first] | |
end | |
def test_deep_stringify_and_deep_symbolize_keys_on_indifferent_preserves_hash | |
h = HashWithIndifferentAccess.new | |
h[:first] = 1 | |
h = h.deep_stringify_keys | |
assert_equal 1, h["first"] | |
h = HashWithIndifferentAccess.new | |
h["first"] = 1 | |
h = h.deep_symbolize_keys | |
assert_equal 1, h[:first] | |
end | |
def test_to_options_on_indifferent_preserves_hash | |
h = HashWithIndifferentAccess.new | |
h["first"] = 1 | |
h.to_options! | |
assert_equal 1, h["first"] | |
end | |
def test_to_options_on_indifferent_preserves_works_as_hash_with_dup | |
h = HashWithIndifferentAccess.new(a: { b: "b" }) | |
dup = h.dup | |
dup[:a][:c] = "c" | |
assert_equal "c", h[:a][:c] | |
end | |
def test_indifferent_sub_hashes | |
h = { "user" => { "id" => 5 } }.with_indifferent_access | |
["user", :user].each { |user| [:id, "id"].each { |id| assert_equal 5, h[user][id], "h[#{user.inspect}][#{id.inspect}] should be 5" } } | |
h = { user: { id: 5 } }.with_indifferent_access | |
["user", :user].each { |user| [:id, "id"].each { |id| assert_equal 5, h[user][id], "h[#{user.inspect}][#{id.inspect}] should be 5" } } | |
end | |
def test_indifferent_duplication | |
# Should preserve default value | |
h = HashWithIndifferentAccess.new | |
h.default = "1234" | |
assert_equal h.default, h.dup.default | |
# Should preserve class for subclasses | |
h = IndifferentHash.new | |
assert_equal h.class, h.dup.class | |
end | |
def test_nested_dig_indifferent_access | |
data = { "this" => { "views" => 1234 } }.with_indifferent_access | |
assert_equal 1234, data.dig(:this, :views) | |
end | |
def test_argless_default_with_existing_nil_key | |
h = Hash.new(:default).merge(nil => "defined").with_indifferent_access | |
assert_equal :default, h.default | |
end | |
def test_default_with_argument | |
h = Hash.new { 5 }.merge(1 => 2).with_indifferent_access | |
assert_equal 5, h.default(1) | |
end | |
def test_default_proc | |
h = ActiveSupport::HashWithIndifferentAccess.new { |hash, key| key } | |
assert_nil h.default | |
assert_equal "foo", h.default("foo") | |
assert_equal "foo", h.default(:foo) | |
end | |
def test_double_conversion_with_nil_key | |
h = { nil => "defined" }.with_indifferent_access.with_indifferent_access | |
assert_nil h[:undefined_key] | |
end | |
def test_assorted_keys_not_stringified | |
original = { Object.new => 2, 1 => 2, [] => true } | |
indiff = original.with_indifferent_access | |
assert_not(indiff.keys.any? { |k| k.kind_of? String }, "A key was converted to a string!") | |
end | |
def test_deep_merge_on_indifferent_access | |
hash_1 = HashWithIndifferentAccess.new(a: "a", b: "b", c: { c1: "c1", c2: "c2", c3: { d1: "d1" } }) | |
hash_2 = HashWithIndifferentAccess.new(a: 1, c: { c1: 2, c3: { d2: "d2" } }) | |
hash_3 = { a: 1, c: { c1: 2, c3: { d2: "d2" } } } | |
expected = { "a" => 1, "b" => "b", "c" => { "c1" => 2, "c2" => "c2", "c3" => { "d1" => "d1", "d2" => "d2" } } } | |
assert_equal expected, hash_1.deep_merge(hash_2) | |
assert_equal expected, hash_1.deep_merge(hash_3) | |
hash_1.deep_merge!(hash_2) | |
assert_equal expected, hash_1 | |
end | |
def test_store_on_indifferent_access | |
hash = HashWithIndifferentAccess.new | |
hash.store(:test1, 1) | |
hash.store("test1", 11) | |
hash[:test2] = 2 | |
hash["test2"] = 22 | |
expected = { "test1" => 11, "test2" => 22 } | |
assert_equal expected, hash | |
end | |
def test_constructor_on_indifferent_access | |
hash = HashWithIndifferentAccess[:foo, 1] | |
assert_equal 1, hash[:foo] | |
assert_equal 1, hash["foo"] | |
hash[:foo] = 3 | |
assert_equal 3, hash[:foo] | |
assert_equal 3, hash["foo"] | |
end | |
def test_indifferent_slice | |
original = { a: "x", b: "y", c: 10 }.with_indifferent_access | |
expected = { a: "x", b: "y" }.with_indifferent_access | |
[["a", "b"], [:a, :b]].each do |keys| | |
# Should return a new hash with only the given keys. | |
assert_equal expected, original.slice(*keys), keys.inspect | |
assert_not_equal expected, original | |
end | |
end | |
def test_indifferent_slice_inplace | |
original = { a: "x", b: "y", c: 10 }.with_indifferent_access | |
expected = { c: 10 }.with_indifferent_access | |
[["a", "b"], [:a, :b]].each do |keys| | |
# Should replace the hash with only the given keys. | |
copy = original.dup | |
assert_equal expected, copy.slice!(*keys) | |
end | |
end | |
def test_indifferent_slice_access_with_symbols | |
original = { "login" => "bender", "password" => "shiny", "stuff" => "foo" } | |
original = original.with_indifferent_access | |
slice = original.slice(:login, :password) | |
assert_equal "bender", slice[:login] | |
assert_equal "bender", slice["login"] | |
end | |
def test_indifferent_without | |
original = { a: "x", b: "y", c: 10 }.with_indifferent_access | |
expected = { c: 10 }.with_indifferent_access | |
[["a", "b"], [:a, :b]].each do |keys| | |
# Should return a new hash without the given keys. | |
assert_equal expected, original.without(*keys), keys.inspect | |
assert_not_equal expected, original | |
end | |
end | |
def test_indifferent_extract | |
original = { :a => 1, "b" => 2, :c => 3, "d" => 4 }.with_indifferent_access | |
expected = { a: 1, b: 2 }.with_indifferent_access | |
remaining = { c: 3, d: 4 }.with_indifferent_access | |
[["a", "b"], [:a, :b]].each do |keys| | |
copy = original.dup | |
assert_equal expected, copy.extract!(*keys) | |
assert_equal remaining, copy | |
end | |
end | |
def test_new_with_to_hash_conversion | |
hash = HashWithIndifferentAccess.new(HashByConversion.new(a: 1)) | |
assert hash.key?("a") | |
assert_equal 1, hash[:a] | |
end | |
def test_dup_with_default_proc | |
hash = HashWithIndifferentAccess.new | |
hash.default_proc = proc { |h, v| raise "walrus" } | |
assert_nothing_raised { hash.dup } | |
end | |
def test_dup_with_default_proc_sets_proc | |
hash = HashWithIndifferentAccess.new | |
hash.default_proc = proc { |h, k| k + 1 } | |
new_hash = hash.dup | |
assert_equal 3, new_hash[2] | |
new_hash.default = 2 | |
assert_equal 2, new_hash[:non_existent] | |
end | |
def test_to_hash_with_raising_default_proc | |
hash = HashWithIndifferentAccess.new | |
hash.default_proc = proc { |h, k| raise "walrus" } | |
assert_nothing_raised { hash.to_hash } | |
end | |
def test_new_with_to_hash_conversion_copies_default | |
normal_hash = Hash.new(3) | |
normal_hash[:a] = 1 | |
hash = HashWithIndifferentAccess.new(HashByConversion.new(normal_hash)) | |
assert_equal 1, hash[:a] | |
assert_equal 3, hash[:b] | |
end | |
def test_new_with_to_hash_conversion_copies_default_proc | |
normal_hash = Hash.new { 1 + 2 } | |
normal_hash[:a] = 1 | |
hash = HashWithIndifferentAccess.new(HashByConversion.new(normal_hash)) | |
assert_equal 1, hash[:a] | |
assert_equal 3, hash[:b] | |
end | |
def test_inheriting_from_top_level_hash_with_indifferent_access_preserves_ancestors_chain | |
klass = Class.new(::HashWithIndifferentAccess) | |
assert_equal ActiveSupport::HashWithIndifferentAccess, klass.ancestors[1] | |
end | |
def test_inheriting_from_hash_with_indifferent_access_properly_dumps_ivars | |
klass = Class.new(::HashWithIndifferentAccess) do | |
def initialize(*) | |
@foo = "bar" | |
super | |
end | |
end | |
yaml_output = klass.new.to_yaml | |
# `hash-with-ivars` was introduced in 2.0.9 (https://git.io/vyUQW) | |
if Gem::Version.new(Psych::VERSION) >= Gem::Version.new("2.0.9") | |
assert_includes yaml_output, "hash-with-ivars" | |
assert_includes yaml_output, "@foo: bar" | |
else | |
assert_includes yaml_output, "hash" | |
end | |
end | |
def test_should_use_default_proc_for_unknown_key | |
hash_wia = HashWithIndifferentAccess.new { 1 + 2 } | |
assert_equal 3, hash_wia[:new_key] | |
end | |
def test_should_return_nil_if_no_key_is_supplied | |
hash_wia = HashWithIndifferentAccess.new { 1 + 2 } | |
assert_nil hash_wia.default | |
end | |
def test_should_use_default_value_for_unknown_key | |
hash_wia = HashWithIndifferentAccess.new(3) | |
assert_equal 3, hash_wia[:new_key] | |
end | |
def test_should_use_default_value_if_no_key_is_supplied | |
hash_wia = HashWithIndifferentAccess.new(3) | |
assert_equal 3, hash_wia.default | |
end | |
def test_should_nil_if_no_default_value_is_supplied | |
hash_wia = HashWithIndifferentAccess.new | |
assert_nil hash_wia.default | |
end | |
def test_should_return_dup_for_with_indifferent_access | |
hash_wia = HashWithIndifferentAccess.new | |
assert_equal hash_wia, hash_wia.with_indifferent_access | |
assert_not_same hash_wia, hash_wia.with_indifferent_access | |
end | |
def test_allows_setting_frozen_array_values_with_indifferent_access | |
value = [1, 2, 3].freeze | |
hash = HashWithIndifferentAccess.new | |
hash[:key] = value | |
assert_equal hash[:key], value | |
end | |
def test_should_copy_the_default_value_when_converting_to_hash_with_indifferent_access | |
hash = Hash.new(3) | |
hash_wia = hash.with_indifferent_access | |
assert_equal 3, hash_wia.default | |
end | |
def test_should_copy_the_default_proc_when_converting_to_hash_with_indifferent_access | |
hash = Hash.new do | |
2 + 1 | |
end | |
assert_equal 3, hash[:foo] | |
hash_wia = hash.with_indifferent_access | |
assert_equal 3, hash_wia[:foo] | |
assert_equal 3, hash_wia[:bar] | |
end | |
def test_should_copy_the_default_when_converting_non_hash_to_hash_with_indifferent_access | |
non_hash = Object.new | |
def non_hash.to_hash | |
h = { foo: :bar } | |
h.default = :baz | |
h | |
end | |
hash_wia = HashWithIndifferentAccess.new(non_hash) | |
assert_equal :bar, hash_wia[:foo] | |
assert_equal :baz, hash_wia[:missing] | |
end | |
def test_should_copy_the_default_proc_when_converting_non_hash_to_hash_with_indifferent_access | |
non_hash = Object.new | |
def non_hash.to_hash | |
h = { foo: :bar } | |
h.default_proc = ->(hash, key) { hash[key] = :baz } | |
h | |
end | |
hash_wia = HashWithIndifferentAccess.new(non_hash) | |
assert_equal :bar, hash_wia[:foo] | |
assert_equal :baz, hash_wia[:missing] | |
end | |
end |
# frozen_string_literal: true | |
module ActiveSupport | |
module HtmlSafeTranslation # :nodoc: | |
extend self | |
def translate(key, **options) | |
if html_safe_translation_key?(key) | |
html_safe_options = html_escape_translation_options(options) | |
translation = I18n.translate(key, **html_safe_options) | |
html_safe_translation(translation) | |
else | |
I18n.translate(key, **options) | |
end | |
end | |
private | |
def html_safe_translation_key?(key) | |
/(?:_|\b)html\z/.match?(key) | |
end | |
def html_escape_translation_options(options) | |
options.each do |name, value| | |
unless i18n_option?(name) || (name == :count && value.is_a?(Numeric)) | |
options[name] = ERB::Util.html_escape(value.to_s) | |
end | |
end | |
end | |
def i18n_option?(name) | |
(@i18n_option_names ||= I18n::RESERVED_KEYS.to_set).include?(name) | |
end | |
def html_safe_translation(translation) | |
if translation.respond_to?(:map) | |
translation.map { |element| element.respond_to?(:html_safe) ? element.html_safe : element } | |
else | |
translation.respond_to?(:html_safe) ? translation.html_safe : translation | |
end | |
end | |
end | |
end |
# frozen_string_literal: true | |
require "active_support/core_ext/hash/deep_merge" | |
require "active_support/core_ext/hash/except" | |
require "active_support/core_ext/hash/slice" | |
begin | |
require "i18n" | |
require "i18n/backend/fallbacks" | |
rescue LoadError => e | |
$stderr.puts "The i18n gem is not available. Please add it to your Gemfile and run bundle install" | |
raise e | |
end | |
require "active_support/lazy_load_hooks" | |
ActiveSupport.run_load_hooks(:i18n) | |
I18n.load_path << File.expand_path("locale/en.yml", __dir__) | |
I18n.load_path << File.expand_path("locale/en.rb", __dir__) |
# frozen_string_literal: true | |
require "active_support" | |
require "active_support/core_ext/array/wrap" | |
# :enddoc: | |
module I18n | |
class Railtie < Rails::Railtie | |
config.i18n = ActiveSupport::OrderedOptions.new | |
config.i18n.railties_load_path = [] | |
config.i18n.load_path = [] | |
config.i18n.fallbacks = ActiveSupport::OrderedOptions.new | |
config.eager_load_namespaces << I18n | |
# Set the i18n configuration after initialization since a lot of | |
# configuration is still usually done in application initializers. | |
config.after_initialize do |app| | |
I18n::Railtie.initialize_i18n(app) | |
end | |
# Trigger i18n config before any eager loading has happened | |
# so it's ready if any classes require it when eager loaded. | |
config.before_eager_load do |app| | |
I18n::Railtie.initialize_i18n(app) | |
end | |
@i18n_inited = false | |
# Setup i18n configuration. | |
def self.initialize_i18n(app) | |
return if @i18n_inited | |
fallbacks = app.config.i18n.delete(:fallbacks) | |
# Avoid issues with setting the default_locale by disabling available locales | |
# check while configuring. | |
enforce_available_locales = app.config.i18n.delete(:enforce_available_locales) | |
enforce_available_locales = I18n.enforce_available_locales if enforce_available_locales.nil? | |
I18n.enforce_available_locales = false | |
reloadable_paths = [] | |
app.config.i18n.each do |setting, value| | |
case setting | |
when :railties_load_path | |
reloadable_paths = value | |
app.config.i18n.load_path.unshift(*value.flat_map(&:existent)) | |
when :load_path | |
I18n.load_path += value | |
when :raise_on_missing_translations | |
forward_raise_on_missing_translations_config(app) | |
else | |
I18n.public_send("#{setting}=", value) | |
end | |
end | |
init_fallbacks(fallbacks) if fallbacks && validate_fallbacks(fallbacks) | |
# Restore available locales check so it will take place from now on. | |
I18n.enforce_available_locales = enforce_available_locales | |
unless app.config.cache_classes | |
directories = watched_dirs_with_extensions(reloadable_paths) | |
reloader = app.config.file_watcher.new(I18n.load_path.dup, directories) do | |
I18n.load_path.keep_if { |p| File.exist?(p) } | |
I18n.load_path |= reloadable_paths.flat_map(&:existent) | |
end | |
app.reloaders << reloader | |
app.reloader.to_run do | |
reloader.execute_if_updated { require_unload_lock! } | |
end | |
reloader.execute | |
end | |
@i18n_inited = true | |
end | |
def self.forward_raise_on_missing_translations_config(app) | |
ActiveSupport.on_load(:action_view) do | |
ActionView::Helpers::TranslationHelper.raise_on_missing_translations = app.config.i18n.raise_on_missing_translations | |
end | |
ActiveSupport.on_load(:action_controller) do | |
AbstractController::Translation.raise_on_missing_translations = app.config.i18n.raise_on_missing_translations | |
end | |
end | |
def self.include_fallbacks_module | |
I18n.backend.class.include(I18n::Backend::Fallbacks) | |
end | |
def self.init_fallbacks(fallbacks) | |
include_fallbacks_module | |
args = \ | |
case fallbacks | |
when ActiveSupport::OrderedOptions | |
[*(fallbacks[:defaults] || []) << fallbacks[:map]].compact | |
when Hash, Array | |
Array.wrap(fallbacks) | |
else # TrueClass | |
[I18n.default_locale] | |
end | |
I18n.fallbacks = I18n::Locale::Fallbacks.new(*args) | |
end | |
def self.validate_fallbacks(fallbacks) | |
case fallbacks | |
when ActiveSupport::OrderedOptions | |
!fallbacks.empty? | |
when TrueClass, Array, Hash | |
true | |
else | |
raise "Unexpected fallback type #{fallbacks.inspect}" | |
end | |
end | |
def self.watched_dirs_with_extensions(paths) | |
paths.each_with_object({}) do |path, result| | |
result[path.absolute_current] = path.extensions | |
end | |
end | |
end | |
end |
# frozen_string_literal: true | |
require_relative "abstract_unit" | |
require "active_support/time" | |
require "active_support/core_ext/array/conversions" | |
class I18nTest < ActiveSupport::TestCase | |
def setup | |
@date = Date.parse("2008-7-2") | |
@time = Time.utc(2008, 7, 2, 16, 47, 1) | |
end | |
def test_time_zone_localization_with_default_format | |
now = Time.local(2000) | |
assert_equal now.strftime("%a, %d %b %Y %H:%M:%S %z"), I18n.localize(now) | |
end | |
def test_date_localization_should_use_default_format | |
assert_equal @date.strftime("%Y-%m-%d"), I18n.localize(@date) | |
end | |
def test_date_localization_with_default_format | |
assert_equal @date.strftime("%Y-%m-%d"), I18n.localize(@date, format: :default) | |
end | |
def test_date_localization_with_short_format | |
assert_equal @date.strftime("%b %d"), I18n.localize(@date, format: :short) | |
end | |
def test_date_localization_with_long_format | |
assert_equal @date.strftime("%B %d, %Y"), I18n.localize(@date, format: :long) | |
end | |
def test_time_localization_should_use_default_format | |
assert_equal @time.strftime("%a, %d %b %Y %H:%M:%S %z"), I18n.localize(@time) | |
end | |
def test_time_localization_with_default_format | |
assert_equal @time.strftime("%a, %d %b %Y %H:%M:%S %z"), I18n.localize(@time, format: :default) | |
end | |
def test_time_localization_with_short_format | |
assert_equal @time.strftime("%d %b %H:%M"), I18n.localize(@time, format: :short) | |
end | |
def test_time_localization_with_long_format | |
assert_equal @time.strftime("%B %d, %Y %H:%M"), I18n.localize(@time, format: :long) | |
end | |
def test_day_names | |
assert_equal Date::DAYNAMES, I18n.translate(:'date.day_names') | |
end | |
def test_abbr_day_names | |
assert_equal Date::ABBR_DAYNAMES, I18n.translate(:'date.abbr_day_names') | |
end | |
def test_month_names | |
assert_equal Date::MONTHNAMES, I18n.translate(:'date.month_names') | |
end | |
def test_abbr_month_names | |
assert_equal Date::ABBR_MONTHNAMES, I18n.translate(:'date.abbr_month_names') | |
end | |
def test_date_order | |
assert_equal %w(year month day), I18n.translate(:'date.order') | |
end | |
def test_time_am | |
assert_equal "am", I18n.translate(:'time.am') | |
end | |
def test_time_pm | |
assert_equal "pm", I18n.translate(:'time.pm') | |
end | |
def test_words_connector | |
assert_equal ", ", I18n.translate(:'support.array.words_connector') | |
end | |
def test_two_words_connector | |
assert_equal " and ", I18n.translate(:'support.array.two_words_connector') | |
end | |
def test_last_word_connector | |
assert_equal ", and ", I18n.translate(:'support.array.last_word_connector') | |
end | |
def test_to_sentence | |
default_two_words_connector = I18n.translate(:'support.array.two_words_connector') | |
default_last_word_connector = I18n.translate(:'support.array.last_word_connector') | |
assert_equal "a, b, and c", %w[a b c].to_sentence | |
I18n.backend.store_translations "en", support: { array: { two_words_connector: " & " } } | |
assert_equal "a & b", %w[a b].to_sentence | |
I18n.backend.store_translations "en", support: { array: { last_word_connector: " and " } } | |
assert_equal "a, b and c", %w[a b c].to_sentence | |
ensure | |
I18n.backend.store_translations "en", support: { array: { two_words_connector: default_two_words_connector } } | |
I18n.backend.store_translations "en", support: { array: { last_word_connector: default_last_word_connector } } | |
end | |
def test_to_sentence_with_empty_i18n_store | |
assert_equal "a, b, and c", %w[a b c].to_sentence(locale: "empty") | |
end | |
end |
# frozen_string_literal: true | |
# frozen_string_literal: true | |
ActiveSupport::Deprecation.warn(<<-MSG.squish) | |
`active_support/core_ext/range/include_time_with_zone` is deprecated and will be removed in Rails 7.1. | |
MSG |
# frozen_string_literal: true | |
class Object | |
# Returns true if this object is included in the argument. Argument must be | |
# any object which responds to +#include?+. Usage: | |
# | |
# characters = ["Konata", "Kagami", "Tsukasa"] | |
# "Konata".in?(characters) # => true | |
# | |
# This will throw an +ArgumentError+ if the argument doesn't respond | |
# to +#include?+. | |
def in?(another_object) | |
another_object.include?(self) | |
rescue NoMethodError | |
raise ArgumentError.new("The parameter passed to #in? must respond to #include?") | |
end | |
# Returns the receiver if it's included in the argument otherwise returns +nil+. | |
# Argument must be any object which responds to +#include?+. Usage: | |
# | |
# params[:bucket_type].presence_in %w( project calendar ) | |
# | |
# This will throw an +ArgumentError+ if the argument doesn't respond to +#include?+. | |
# | |
# @return [Object] | |
def presence_in(another_object) | |
in?(another_object) ? self : nil | |
end | |
end |
# frozen_string_literal: true | |
require_relative "../../abstract_unit" | |
require "active_support/core_ext/object/inclusion" | |
class InTest < ActiveSupport::TestCase | |
def test_in_array | |
assert 1.in?([1, 2]) | |
assert_not 3.in?([1, 2]) | |
end | |
def test_in_hash | |
h = { "a" => 100, "b" => 200 } | |
assert "a".in?(h) | |
assert_not "z".in?(h) | |
end | |
def test_in_string | |
assert "lo".in?("hello") | |
assert_not "ol".in?("hello") | |
assert ?h.in?("hello") | |
end | |
def test_in_range | |
assert 25.in?(1..50) | |
assert_not 75.in?(1..50) | |
end | |
def test_in_set | |
s = Set.new([1, 2]) | |
assert 1.in?(s) | |
assert_not 3.in?(s) | |
end | |
module A | |
end | |
class B | |
include A | |
end | |
class C < B | |
end | |
class D | |
end | |
def test_in_module | |
assert A.in?(B) | |
assert A.in?(C) | |
assert_not A.in?(A) | |
assert_not A.in?(D) | |
end | |
def test_no_method_catching | |
assert_raise(ArgumentError) { 1.in?(1) } | |
end | |
def test_presence_in | |
assert_equal "stuff", "stuff".presence_in(%w( lots of stuff )) | |
assert_nil "stuff".presence_in(%w( lots of crap )) | |
assert_raise(ArgumentError) { 1.presence_in(1) } | |
end | |
end |
# frozen_string_literal: true | |
class String | |
# Same as +indent+, except it indents the receiver in-place. | |
# | |
# Returns the indented string, or +nil+ if there was nothing to indent. | |
def indent!(amount, indent_string = nil, indent_empty_lines = false) | |
indent_string = indent_string || self[/^[ \t]/] || " " | |
re = indent_empty_lines ? /^/ : /^(?!$)/ | |
gsub!(re, indent_string * amount) | |
end | |
# Indents the lines in the receiver: | |
# | |
# <<EOS.indent(2) | |
# def some_method | |
# some_code | |
# end | |
# EOS | |
# # => | |
# def some_method | |
# some_code | |
# end | |
# | |
# The second argument, +indent_string+, specifies which indent string to | |
# use. The default is +nil+, which tells the method to make a guess by | |
# peeking at the first indented line, and fallback to a space if there is | |
# none. | |
# | |
# " foo".indent(2) # => " foo" | |
# "foo\n\t\tbar".indent(2) # => "\t\tfoo\n\t\t\t\tbar" | |
# "foo".indent(2, "\t") # => "\t\tfoo" | |
# | |
# While +indent_string+ is typically one space or tab, it may be any string. | |
# | |
# The third argument, +indent_empty_lines+, is a flag that says whether | |
# empty lines should be indented. Default is false. | |
# | |
# "foo\n\nbar".indent(2) # => " foo\n\n bar" | |
# "foo\n\nbar".indent(2, nil, true) # => " foo\n \n bar" | |
# | |
def indent(amount, indent_string = nil, indent_empty_lines = false) | |
dup.tap { |_| _.indent!(amount, indent_string, indent_empty_lines) } | |
end | |
end |
# frozen_string_literal: true | |
require "active_support/hash_with_indifferent_access" | |
class Hash | |
# Returns an ActiveSupport::HashWithIndifferentAccess out of its receiver: | |
# | |
# { a: 1 }.with_indifferent_access['a'] # => 1 | |
def with_indifferent_access | |
ActiveSupport::HashWithIndifferentAccess.new(self) | |
end | |
# Called when object is nested under an object that receives | |
# #with_indifferent_access. This method will be called on the current object | |
# by the enclosing object and is aliased to #with_indifferent_access by | |
# default. Subclasses of Hash may override this method to return +self+ if | |
# converting to an ActiveSupport::HashWithIndifferentAccess would not be | |
# desirable. | |
# | |
# b = { b: 1 } | |
# { a: b }.with_indifferent_access['a'] # calls b.nested_under_indifferent_access | |
# # => {"b"=>1} | |
alias nested_under_indifferent_access with_indifferent_access | |
end |
# frozen_string_literal: true | |
require "concurrent/map" | |
require "active_support/i18n" | |
module ActiveSupport | |
module Inflector | |
extend self | |
# A singleton instance of this class is yielded by Inflector.inflections, | |
# which can then be used to specify additional inflection rules. If passed | |
# an optional locale, rules for other languages can be specified. The | |
# default locale is <tt>:en</tt>. Only rules for English are provided. | |
# | |
# ActiveSupport::Inflector.inflections(:en) do |inflect| | |
# inflect.plural /^(ox)$/i, '\1\2en' | |
# inflect.singular /^(ox)en/i, '\1' | |
# | |
# inflect.irregular 'cactus', 'cacti' | |
# | |
# inflect.uncountable 'equipment' | |
# end | |
# | |
# New rules are added at the top. So in the example above, the irregular | |
# rule for cactus will now be the first of the pluralization and | |
# singularization rules that is runs. This guarantees that your rules run | |
# before any of the rules that may already have been loaded. | |
class Inflections | |
@__instance__ = Concurrent::Map.new | |
class Uncountables < Array | |
def initialize | |
@regex_array = [] | |
super | |
end | |
def delete(entry) | |
super entry | |
@regex_array.delete(to_regex(entry)) | |
end | |
def <<(*word) | |
add(word) | |
end | |
def add(words) | |
words = words.flatten.map(&:downcase) | |
concat(words) | |
@regex_array += words.map { |word| to_regex(word) } | |
self | |
end | |
def uncountable?(str) | |
@regex_array.any? { |regex| regex.match? str } | |
end | |
private | |
def to_regex(string) | |
/\b#{::Regexp.escape(string)}\Z/i | |
end | |
end | |
def self.instance(locale = :en) | |
@__instance__[locale] ||= new | |
end | |
def self.instance_or_fallback(locale) | |
I18n.fallbacks[locale].each do |k| | |
return @__instance__[k] if @__instance__.key?(k) | |
end | |
instance(locale) | |
end | |
attr_reader :plurals, :singulars, :uncountables, :humans, :acronyms | |
attr_reader :acronyms_camelize_regex, :acronyms_underscore_regex # :nodoc: | |
def initialize | |
@plurals, @singulars, @uncountables, @humans, @acronyms = [], [], Uncountables.new, [], {} | |
define_acronym_regex_patterns | |
end | |
# Private, for the test suite. | |
def initialize_dup(orig) # :nodoc: | |
%w(plurals singulars uncountables humans acronyms).each do |scope| | |
instance_variable_set("@#{scope}", orig.public_send(scope).dup) | |
end | |
define_acronym_regex_patterns | |
end | |
# Specifies a new acronym. An acronym must be specified as it will appear | |
# in a camelized string. An underscore string that contains the acronym | |
# will retain the acronym when passed to +camelize+, +humanize+, or | |
# +titleize+. A camelized string that contains the acronym will maintain | |
# the acronym when titleized or humanized, and will convert the acronym | |
# into a non-delimited single lowercase word when passed to +underscore+. | |
# | |
# acronym 'HTML' | |
# titleize 'html' # => 'HTML' | |
# camelize 'html' # => 'HTML' | |
# underscore 'MyHTML' # => 'my_html' | |
# | |
# The acronym, however, must occur as a delimited unit and not be part of | |
# another word for conversions to recognize it: | |
# | |
# acronym 'HTTP' | |
# camelize 'my_http_delimited' # => 'MyHTTPDelimited' | |
# camelize 'https' # => 'Https', not 'HTTPs' | |
# underscore 'HTTPS' # => 'http_s', not 'https' | |
# | |
# acronym 'HTTPS' | |
# camelize 'https' # => 'HTTPS' | |
# underscore 'HTTPS' # => 'https' | |
# | |
# Note: Acronyms that are passed to +pluralize+ will no longer be | |
# recognized, since the acronym will not occur as a delimited unit in the | |
# pluralized result. To work around this, you must specify the pluralized | |
# form as an acronym as well: | |
# | |
# acronym 'API' | |
# camelize(pluralize('api')) # => 'Apis' | |
# | |
# acronym 'APIs' | |
# camelize(pluralize('api')) # => 'APIs' | |
# | |
# +acronym+ may be used to specify any word that contains an acronym or | |
# otherwise needs to maintain a non-standard capitalization. The only | |
# restriction is that the word must begin with a capital letter. | |
# | |
# acronym 'RESTful' | |
# underscore 'RESTful' # => 'restful' | |
# underscore 'RESTfulController' # => 'restful_controller' | |
# titleize 'RESTfulController' # => 'RESTful Controller' | |
# camelize 'restful' # => 'RESTful' | |
# camelize 'restful_controller' # => 'RESTfulController' | |
# | |
# acronym 'McDonald' | |
# underscore 'McDonald' # => 'mcdonald' | |
# camelize 'mcdonald' # => 'McDonald' | |
def acronym(word) | |
@acronyms[word.downcase] = word | |
define_acronym_regex_patterns | |
end | |
# Specifies a new pluralization rule and its replacement. The rule can | |
# either be a string or a regular expression. The replacement should | |
# always be a string that may include references to the matched data from | |
# the rule. | |
def plural(rule, replacement) | |
@uncountables.delete(rule) if rule.is_a?(String) | |
@uncountables.delete(replacement) | |
@plurals.prepend([rule, replacement]) | |
end | |
# Specifies a new singularization rule and its replacement. The rule can | |
# either be a string or a regular expression. The replacement should | |
# always be a string that may include references to the matched data from | |
# the rule. | |
def singular(rule, replacement) | |
@uncountables.delete(rule) if rule.is_a?(String) | |
@uncountables.delete(replacement) | |
@singulars.prepend([rule, replacement]) | |
end | |
# Specifies a new irregular that applies to both pluralization and | |
# singularization at the same time. This can only be used for strings, not | |
# regular expressions. You simply pass the irregular in singular and | |
# plural form. | |
# | |
# irregular 'cactus', 'cacti' | |
# irregular 'person', 'people' | |
def irregular(singular, plural) | |
@uncountables.delete(singular) | |
@uncountables.delete(plural) | |
s0 = singular[0] | |
srest = singular[1..-1] | |
p0 = plural[0] | |
prest = plural[1..-1] | |
if s0.upcase == p0.upcase | |
plural(/(#{s0})#{srest}$/i, '\1' + prest) | |
plural(/(#{p0})#{prest}$/i, '\1' + prest) | |
singular(/(#{s0})#{srest}$/i, '\1' + srest) | |
singular(/(#{p0})#{prest}$/i, '\1' + srest) | |
else | |
plural(/#{s0.upcase}(?i)#{srest}$/, p0.upcase + prest) | |
plural(/#{s0.downcase}(?i)#{srest}$/, p0.downcase + prest) | |
plural(/#{p0.upcase}(?i)#{prest}$/, p0.upcase + prest) | |
plural(/#{p0.downcase}(?i)#{prest}$/, p0.downcase + prest) | |
singular(/#{s0.upcase}(?i)#{srest}$/, s0.upcase + srest) | |
singular(/#{s0.downcase}(?i)#{srest}$/, s0.downcase + srest) | |
singular(/#{p0.upcase}(?i)#{prest}$/, s0.upcase + srest) | |
singular(/#{p0.downcase}(?i)#{prest}$/, s0.downcase + srest) | |
end | |
end | |
# Specifies words that are uncountable and should not be inflected. | |
# | |
# uncountable 'money' | |
# uncountable 'money', 'information' | |
# uncountable %w( money information rice ) | |
def uncountable(*words) | |
@uncountables.add(words) | |
end | |
# Specifies a humanized form of a string by a regular expression rule or | |
# by a string mapping. When using a regular expression based replacement, | |
# the normal humanize formatting is called after the replacement. When a | |
# string is used, the human form should be specified as desired (example: | |
# 'The name', not 'the_name'). | |
# | |
# human /_cnt$/i, '\1_count' | |
# human 'legacy_col_person_name', 'Name' | |
def human(rule, replacement) | |
@humans.prepend([rule, replacement]) | |
end | |
# Clears the loaded inflections within a given scope (default is | |
# <tt>:all</tt>). Give the scope as a symbol of the inflection type, the | |
# options are: <tt>:plurals</tt>, <tt>:singulars</tt>, <tt>:uncountables</tt>, | |
# <tt>:humans</tt>, <tt>:acronyms</tt>. | |
# | |
# clear :all | |
# clear :plurals | |
def clear(scope = :all) | |
case scope | |
when :all | |
clear(:acronyms) | |
clear(:plurals) | |
clear(:singulars) | |
clear(:uncountables) | |
clear(:humans) | |
when :acronyms | |
@acronyms = {} | |
define_acronym_regex_patterns | |
when :uncountables | |
@uncountables = Uncountables.new | |
when :plurals, :singulars, :humans | |
instance_variable_set "@#{scope}", [] | |
end | |
end | |
private | |
def define_acronym_regex_patterns | |
@acronym_regex = @acronyms.empty? ? /(?=a)b/ : /#{@acronyms.values.join("|")}/ | |
@acronyms_camelize_regex = /^(?:#{@acronym_regex}(?=\b|[A-Z_])|\w)/ | |
@acronyms_underscore_regex = /(?:(?<=([A-Za-z\d]))|\b)(#{@acronym_regex})(?=\b|[^a-z])/ | |
end | |
end | |
# Yields a singleton instance of Inflector::Inflections so you can specify | |
# additional inflector rules. If passed an optional locale, rules for other | |
# languages can be specified. If not specified, defaults to <tt>:en</tt>. | |
# Only rules for English are provided. | |
# | |
# ActiveSupport::Inflector.inflections(:en) do |inflect| | |
# inflect.uncountable 'rails' | |
# end | |
def inflections(locale = :en) | |
if block_given? | |
yield Inflections.instance(locale) | |
else | |
Inflections.instance_or_fallback(locale) | |
end | |
end | |
end | |
end |
# frozen_string_literal: true | |
# in case active_support/inflector is required without the rest of active_support | |
require "active_support/inflector/inflections" | |
require "active_support/inflector/transliterate" | |
require "active_support/inflector/methods" | |
require "active_support/inflections" | |
require "active_support/core_ext/string/inflections" |
# frozen_string_literal: true | |
require_relative "abstract_unit" | |
require "active_support/inflector" | |
require_relative "inflector_test_cases" | |
require_relative "constantize_test_cases" | |
class InflectorTest < ActiveSupport::TestCase | |
include InflectorTestCases | |
include ConstantizeTestCases | |
def setup | |
# Dups the singleton before each test, restoring the original inflections later. | |
# | |
# This helper is implemented by setting @__instance__ because in some tests | |
# there are module functions that access ActiveSupport::Inflector.inflections, | |
# so we need to replace the singleton itself. | |
@original_inflections = ActiveSupport::Inflector::Inflections.instance_variable_get(:@__instance__)[:en] | |
ActiveSupport::Inflector::Inflections.instance_variable_set(:@__instance__, en: @original_inflections.dup) | |
end | |
def teardown | |
ActiveSupport::Inflector::Inflections.instance_variable_set(:@__instance__, en: @original_inflections) | |
end | |
def test_pluralize_plurals | |
assert_equal "plurals", ActiveSupport::Inflector.pluralize("plurals") | |
assert_equal "Plurals", ActiveSupport::Inflector.pluralize("Plurals") | |
end | |
def test_pluralize_empty_string | |
assert_equal "", ActiveSupport::Inflector.pluralize("") | |
end | |
def test_pluralize_with_fallback | |
I18n.stub(:default_locale, :"en-GB") do | |
assert_equal "days", ActiveSupport::Inflector.pluralize("day") | |
end | |
end | |
test "uncountability of ascii word" do | |
word = "HTTP" | |
ActiveSupport::Inflector.inflections do |inflect| | |
inflect.uncountable word | |
end | |
assert_equal word, ActiveSupport::Inflector.pluralize(word) | |
assert_equal word, ActiveSupport::Inflector.singularize(word) | |
assert_equal ActiveSupport::Inflector.pluralize(word), ActiveSupport::Inflector.singularize(word) | |
ActiveSupport::Inflector.inflections.uncountables.pop | |
end | |
test "uncountability of non-ascii word" do | |
word = "猫" | |
ActiveSupport::Inflector.inflections do |inflect| | |
inflect.uncountable word | |
end | |
assert_equal word, ActiveSupport::Inflector.pluralize(word) | |
assert_equal word, ActiveSupport::Inflector.singularize(word) | |
assert_equal ActiveSupport::Inflector.pluralize(word), ActiveSupport::Inflector.singularize(word) | |
ActiveSupport::Inflector.inflections.uncountables.pop | |
end | |
ActiveSupport::Inflector.inflections.uncountable.each do |word| | |
define_method "test_uncountability_of_#{word}" do | |
assert_equal word, ActiveSupport::Inflector.singularize(word) | |
assert_equal word, ActiveSupport::Inflector.pluralize(word) | |
assert_equal ActiveSupport::Inflector.pluralize(word), ActiveSupport::Inflector.singularize(word) | |
end | |
end | |
def test_uncountable_word_is_not_greedy | |
uncountable_word = "ors" | |
countable_word = "sponsor" | |
ActiveSupport::Inflector.inflections.uncountable << uncountable_word | |
assert_equal uncountable_word, ActiveSupport::Inflector.singularize(uncountable_word) | |
assert_equal uncountable_word, ActiveSupport::Inflector.pluralize(uncountable_word) | |
assert_equal ActiveSupport::Inflector.pluralize(uncountable_word), ActiveSupport::Inflector.singularize(uncountable_word) | |
assert_equal "sponsor", ActiveSupport::Inflector.singularize(countable_word) | |
assert_equal "sponsors", ActiveSupport::Inflector.pluralize(countable_word) | |
assert_equal "sponsor", ActiveSupport::Inflector.singularize(ActiveSupport::Inflector.pluralize(countable_word)) | |
end | |
SingularToPlural.each do |singular, plural| | |
define_method "test_pluralize_singular_#{singular}" do | |
assert_equal(plural, ActiveSupport::Inflector.pluralize(singular)) | |
assert_equal(plural.capitalize, ActiveSupport::Inflector.pluralize(singular.capitalize)) | |
end | |
end | |
SingularToPlural.each do |singular, plural| | |
define_method "test_singularize_plural_#{plural}" do | |
assert_equal(singular, ActiveSupport::Inflector.singularize(plural)) | |
assert_equal(singular.capitalize, ActiveSupport::Inflector.singularize(plural.capitalize)) | |
end | |
end | |
SingularToPlural.each do |singular, plural| | |
define_method "test_pluralize_plural_#{plural}" do | |
assert_equal(plural, ActiveSupport::Inflector.pluralize(plural)) | |
assert_equal(plural.capitalize, ActiveSupport::Inflector.pluralize(plural.capitalize)) | |
end | |
define_method "test_singularize_singular_#{singular}" do | |
assert_equal(singular, ActiveSupport::Inflector.singularize(singular)) | |
assert_equal(singular.capitalize, ActiveSupport::Inflector.singularize(singular.capitalize)) | |
end | |
end | |
def test_overwrite_previous_inflectors | |
assert_equal("series", ActiveSupport::Inflector.singularize("series")) | |
ActiveSupport::Inflector.inflections.singular "series", "serie" | |
assert_equal("serie", ActiveSupport::Inflector.singularize("series")) | |
end | |
MixtureToTitleCase.each_with_index do |(before, titleized), index| | |
define_method "test_titleize_mixture_to_title_case_#{index}" do | |
assert_equal(titleized, ActiveSupport::Inflector.titleize(before), "mixture \ | |
to TitleCase failed for #{before}") | |
end | |
end | |
MixtureToTitleCaseWithKeepIdSuffix.each_with_index do |(before, titleized), index| | |
define_method "test_titleize_with_keep_id_suffix_mixture_to_title_case_#{index}" do | |
assert_equal(titleized, ActiveSupport::Inflector.titleize(before, keep_id_suffix: true), | |
"mixture to TitleCase with keep_id_suffix failed for #{before}") | |
end | |
end | |
def test_camelize | |
CamelToUnderscore.each do |camel, underscore| | |
assert_equal(camel, ActiveSupport::Inflector.camelize(underscore)) | |
end | |
end | |
def test_camelize_with_true_upcases_the_first_letter | |
assert_equal("Capital", ActiveSupport::Inflector.camelize("Capital", true)) | |
assert_equal("Capital", ActiveSupport::Inflector.camelize("capital", true)) | |
end | |
def test_camelize_with_upper_upcases_the_first_letter | |
assert_equal("Capital", ActiveSupport::Inflector.camelize("Capital", :upper)) | |
assert_equal("Capital", ActiveSupport::Inflector.camelize("capital", :upper)) | |
end | |
def test_camelize_with_false_downcases_the_first_letter | |
assert_equal("capital", ActiveSupport::Inflector.camelize("Capital", false)) | |
assert_equal("capital", ActiveSupport::Inflector.camelize("capital", false)) | |
end | |
def test_camelize_with_nil_downcases_the_first_letter | |
assert_equal("capital", ActiveSupport::Inflector.camelize("Capital", nil)) | |
assert_equal("capital", ActiveSupport::Inflector.camelize("capital", nil)) | |
end | |
def test_camelize_with_lower_downcases_the_first_letter | |
assert_equal("capital", ActiveSupport::Inflector.camelize("Capital", :lower)) | |
assert_equal("capital", ActiveSupport::Inflector.camelize("capital", :lower)) | |
end | |
def test_camelize_with_any_other_arg_upcases_the_first_letter | |
assert_equal("Capital", ActiveSupport::Inflector.camelize("capital", :true)) | |
assert_equal("Capital", ActiveSupport::Inflector.camelize("Capital", :true)) | |
assert_equal("Capital", ActiveSupport::Inflector.camelize("capital", :false)) | |
assert_equal("Capital", ActiveSupport::Inflector.camelize("capital", :foo)) | |
assert_equal("Capital", ActiveSupport::Inflector.camelize("capital", 42)) | |
assert_equal("Capital", ActiveSupport::Inflector.camelize("capital")) | |
end | |
def test_camelize_with_underscores | |
assert_equal("CamelCase", ActiveSupport::Inflector.camelize("Camel_Case")) | |
end | |
def test_acronyms | |
ActiveSupport::Inflector.inflections do |inflect| | |
inflect.acronym("API") | |
inflect.acronym("HTML") | |
inflect.acronym("HTTP") | |
inflect.acronym("RESTful") | |
inflect.acronym("W3C") | |
inflect.acronym("PhD") | |
inflect.acronym("RoR") | |
inflect.acronym("SSL") | |
end | |
# camelize underscore humanize titleize | |
[ | |
["API", "api", "API", "API"], | |
["APIController", "api_controller", "API controller", "API Controller"], | |
["Nokogiri::HTML", "nokogiri/html", "Nokogiri/HTML", "Nokogiri/HTML"], | |
["HTTPAPI", "http_api", "HTTP API", "HTTP API"], | |
["HTTP::Get", "http/get", "HTTP/get", "HTTP/Get"], | |
["SSLError", "ssl_error", "SSL error", "SSL Error"], | |
["RESTful", "restful", "RESTful", "RESTful"], | |
["RESTfulController", "restful_controller", "RESTful controller", "RESTful Controller"], | |
["Nested::RESTful", "nested/restful", "Nested/RESTful", "Nested/RESTful"], | |
["IHeartW3C", "i_heart_w3c", "I heart W3C", "I Heart W3C"], | |
["PhDRequired", "phd_required", "PhD required", "PhD Required"], | |
["IRoRU", "i_ror_u", "I RoR u", "I RoR U"], | |
["RESTfulHTTPAPI", "restful_http_api", "RESTful HTTP API", "RESTful HTTP API"], | |
["HTTP::RESTful", "http/restful", "HTTP/RESTful", "HTTP/RESTful"], | |
["HTTP::RESTfulAPI", "http/restful_api", "HTTP/RESTful API", "HTTP/RESTful API"], | |
["APIRESTful", "api_restful", "API RESTful", "API RESTful"], | |
# misdirection | |
["Capistrano", "capistrano", "Capistrano", "Capistrano"], | |
["CapiController", "capi_controller", "Capi controller", "Capi Controller"], | |
["HttpsApis", "https_apis", "Https apis", "Https Apis"], | |
["Html5", "html5", "Html5", "Html5"], | |
["Restfully", "restfully", "Restfully", "Restfully"], | |
["RoRails", "ro_rails", "Ro rails", "Ro Rails"] | |
].each do |camel, under, human, title| | |
assert_equal(camel, ActiveSupport::Inflector.camelize(under)) | |
assert_equal(camel, ActiveSupport::Inflector.camelize(camel)) | |
assert_equal(under, ActiveSupport::Inflector.underscore(under)) | |
assert_equal(under, ActiveSupport::Inflector.underscore(camel)) | |
assert_equal(title, ActiveSupport::Inflector.titleize(under)) | |
assert_equal(title, ActiveSupport::Inflector.titleize(camel)) | |
assert_equal(human, ActiveSupport::Inflector.humanize(under)) | |
end | |
end | |
def test_acronym_override | |
ActiveSupport::Inflector.inflections do |inflect| | |
inflect.acronym("API") | |
inflect.acronym("LegacyApi") | |
end | |
assert_equal("LegacyApi", ActiveSupport::Inflector.camelize("legacyapi")) | |
assert_equal("LegacyAPI", ActiveSupport::Inflector.camelize("legacy_api")) | |
assert_equal("SomeLegacyApi", ActiveSupport::Inflector.camelize("some_legacyapi")) | |
assert_equal("Nonlegacyapi", ActiveSupport::Inflector.camelize("nonlegacyapi")) | |
end | |
def test_acronyms_camelize_lower | |
ActiveSupport::Inflector.inflections do |inflect| | |
inflect.acronym("API") | |
inflect.acronym("HTML") | |
end | |
assert_equal("htmlAPI", ActiveSupport::Inflector.camelize("html_api", false)) | |
assert_equal("htmlAPI", ActiveSupport::Inflector.camelize("htmlAPI", false)) | |
assert_equal("htmlAPI", ActiveSupport::Inflector.camelize("HTMLAPI", false)) | |
end | |
def test_underscore_acronym_sequence | |
ActiveSupport::Inflector.inflections do |inflect| | |
inflect.acronym("API") | |
inflect.acronym("JSON") | |
inflect.acronym("HTML") | |
end | |
assert_equal("json_html_api", ActiveSupport::Inflector.underscore("JSONHTMLAPI")) | |
end | |
def test_underscore | |
CamelToUnderscore.each do |camel, underscore| | |
assert_equal(underscore, ActiveSupport::Inflector.underscore(camel)) | |
end | |
CamelToUnderscoreWithoutReverse.each do |camel, underscore| | |
assert_equal(underscore, ActiveSupport::Inflector.underscore(camel)) | |
end | |
end | |
def test_camelize_with_module | |
CamelWithModuleToUnderscoreWithSlash.each do |camel, underscore| | |
assert_equal(camel, ActiveSupport::Inflector.camelize(underscore)) | |
end | |
end | |
def test_underscore_with_slashes | |
CamelWithModuleToUnderscoreWithSlash.each do |camel, underscore| | |
assert_equal(underscore, ActiveSupport::Inflector.underscore(camel)) | |
end | |
end | |
def test_demodulize | |
assert_equal "Account", ActiveSupport::Inflector.demodulize("MyApplication::Billing::Account") | |
assert_equal "Account", ActiveSupport::Inflector.demodulize("Account") | |
assert_equal "Account", ActiveSupport::Inflector.demodulize("::Account") | |
assert_equal "", ActiveSupport::Inflector.demodulize("") | |
end | |
def test_deconstantize | |
assert_equal "MyApplication::Billing", ActiveSupport::Inflector.deconstantize("MyApplication::Billing::Account") | |
assert_equal "::MyApplication::Billing", ActiveSupport::Inflector.deconstantize("::MyApplication::Billing::Account") | |
assert_equal "MyApplication", ActiveSupport::Inflector.deconstantize("MyApplication::Billing") | |
assert_equal "::MyApplication", ActiveSupport::Inflector.deconstantize("::MyApplication::Billing") | |
assert_equal "", ActiveSupport::Inflector.deconstantize("Account") | |
assert_equal "", ActiveSupport::Inflector.deconstantize("::Account") | |
assert_equal "", ActiveSupport::Inflector.deconstantize("") | |
end | |
def test_foreign_key | |
ClassNameToForeignKeyWithUnderscore.each do |klass, foreign_key| | |
assert_equal(foreign_key, ActiveSupport::Inflector.foreign_key(klass)) | |
end | |
ClassNameToForeignKeyWithoutUnderscore.each do |klass, foreign_key| | |
assert_equal(foreign_key, ActiveSupport::Inflector.foreign_key(klass, false)) | |
end | |
end | |
def test_tableize | |
ClassNameToTableName.each do |class_name, table_name| | |
assert_equal(table_name, ActiveSupport::Inflector.tableize(class_name)) | |
end | |
end | |
def test_parameterize | |
StringToParameterized.each do |some_string, parameterized_string| | |
assert_equal(parameterized_string, ActiveSupport::Inflector.parameterize(some_string)) | |
end | |
end | |
def test_parameterize_and_normalize | |
StringToParameterizedAndNormalized.each do |some_string, parameterized_string| | |
assert_equal(parameterized_string, ActiveSupport::Inflector.parameterize(some_string)) | |
end | |
end | |
def test_parameterize_with_custom_separator | |
StringToParameterizeWithUnderscore.each do |some_string, parameterized_string| | |
assert_equal(parameterized_string, ActiveSupport::Inflector.parameterize(some_string, separator: "_")) | |
end | |
end | |
def test_parameterize_with_multi_character_separator | |
StringToParameterized.each do |some_string, parameterized_string| | |
assert_equal(parameterized_string.gsub("-", "__sep__"), ActiveSupport::Inflector.parameterize(some_string, separator: "__sep__")) | |
end | |
end | |
def test_parameterize_with_locale | |
word = "Fünf autos" | |
I18n.backend.store_translations(:de, i18n: { transliterate: { rule: { "ü" => "ue" } } }) | |
assert_equal("fuenf-autos", ActiveSupport::Inflector.parameterize(word, locale: :de)) | |
end | |
def test_classify | |
ClassNameToTableName.each do |class_name, table_name| | |
assert_equal(class_name, ActiveSupport::Inflector.classify(table_name)) | |
assert_equal(class_name, ActiveSupport::Inflector.classify("table_prefix." + table_name)) | |
end | |
end | |
def test_classify_with_symbol | |
assert_nothing_raised do | |
assert_equal "FooBar", ActiveSupport::Inflector.classify(:foo_bars) | |
end | |
end | |
def test_classify_with_leading_schema_name | |
assert_equal "FooBar", ActiveSupport::Inflector.classify("schema.foo_bar") | |
end | |
def test_humanize | |
UnderscoreToHuman.each do |underscore, human| | |
assert_equal(human, ActiveSupport::Inflector.humanize(underscore)) | |
end | |
end | |
def test_humanize_without_capitalize | |
UnderscoreToHumanWithoutCapitalize.each do |underscore, human| | |
assert_equal(human, ActiveSupport::Inflector.humanize(underscore, capitalize: false)) | |
end | |
end | |
def test_humanize_with_keep_id_suffix | |
UnderscoreToHumanWithKeepIdSuffix.each do |underscore, human| | |
assert_equal(human, ActiveSupport::Inflector.humanize(underscore, keep_id_suffix: true)) | |
end | |
end | |
def test_humanize_by_rule | |
ActiveSupport::Inflector.inflections do |inflect| | |
inflect.human(/_cnt$/i, '\1_count') | |
inflect.human(/^prefx_/i, '\1') | |
end | |
assert_equal("Jargon count", ActiveSupport::Inflector.humanize("jargon_cnt")) | |
assert_equal("Request", ActiveSupport::Inflector.humanize("prefx_request")) | |
end | |
def test_humanize_by_string | |
ActiveSupport::Inflector.inflections do |inflect| | |
inflect.human("col_rpted_bugs", "Reported bugs") | |
end | |
assert_equal("Reported bugs", ActiveSupport::Inflector.humanize("col_rpted_bugs")) | |
assert_equal("Col rpted bugs", ActiveSupport::Inflector.humanize("COL_rpted_bugs")) | |
end | |
def test_humanize_with_acronyms | |
ActiveSupport::Inflector.inflections do |inflect| | |
inflect.acronym "LAX" | |
inflect.acronym "SFO" | |
end | |
assert_equal("LAX roundtrip to SFO", ActiveSupport::Inflector.humanize("LAX ROUNDTRIP TO SFO")) | |
assert_equal("LAX roundtrip to SFO", ActiveSupport::Inflector.humanize("LAX ROUNDTRIP TO SFO", capitalize: false)) | |
assert_equal("LAX roundtrip to SFO", ActiveSupport::Inflector.humanize("lax roundtrip to sfo")) | |
assert_equal("LAX roundtrip to SFO", ActiveSupport::Inflector.humanize("lax roundtrip to sfo", capitalize: false)) | |
assert_equal("LAX roundtrip to SFO", ActiveSupport::Inflector.humanize("Lax Roundtrip To Sfo")) | |
assert_equal("LAX roundtrip to SFO", ActiveSupport::Inflector.humanize("Lax Roundtrip To Sfo", capitalize: false)) | |
end | |
def test_constantize | |
run_constantize_tests_on do |string| | |
ActiveSupport::Inflector.constantize(string) | |
end | |
end | |
def test_safe_constantize | |
run_safe_constantize_tests_on do |string| | |
ActiveSupport::Inflector.safe_constantize(string) | |
end | |
end | |
def test_ordinal | |
OrdinalNumbers.each do |number, ordinalized| | |
assert_equal(ordinalized, number + ActiveSupport::Inflector.ordinal(number)) | |
end | |
end | |
def test_ordinalize | |
OrdinalNumbers.each do |number, ordinalized| | |
assert_equal(ordinalized, ActiveSupport::Inflector.ordinalize(number)) | |
end | |
end | |
def test_dasherize | |
UnderscoresToDashes.each do |underscored, dasherized| | |
assert_equal(dasherized, ActiveSupport::Inflector.dasherize(underscored)) | |
end | |
end | |
def test_underscore_as_reverse_of_dasherize | |
UnderscoresToDashes.each_key do |underscored| | |
assert_equal(underscored, ActiveSupport::Inflector.underscore(ActiveSupport::Inflector.dasherize(underscored))) | |
end | |
end | |
def test_underscore_to_lower_camel | |
UnderscoreToLowerCamel.each do |underscored, lower_camel| | |
assert_equal(lower_camel, ActiveSupport::Inflector.camelize(underscored, false)) | |
end | |
end | |
def test_symbol_to_lower_camel | |
SymbolToLowerCamel.each do |symbol, lower_camel| | |
assert_equal(lower_camel, ActiveSupport::Inflector.camelize(symbol, false)) | |
end | |
end | |
%w{plurals singulars uncountables humans}.each do |inflection_type| | |
class_eval <<-RUBY, __FILE__, __LINE__ + 1 | |
def test_clear_#{inflection_type} | |
ActiveSupport::Inflector.inflections.clear :#{inflection_type} | |
assert ActiveSupport::Inflector.inflections.#{inflection_type}.empty?, \"#{inflection_type} inflections should be empty after clear :#{inflection_type}\" | |
end | |
RUBY | |
end | |
def test_clear_acronyms_resets_to_reusable_state | |
ActiveSupport::Inflector.inflections.clear(:acronyms) | |
assert_empty ActiveSupport::Inflector.inflections.acronyms | |
ActiveSupport::Inflector.inflections do |inflect| | |
inflect.acronym "HTML" | |
end | |
assert_equal "HTML", "html".titleize | |
end | |
def test_inflector_locality | |
ActiveSupport::Inflector.inflections(:es) do |inflect| | |
inflect.plural(/$/, "s") | |
inflect.plural(/z$/i, "ces") | |
inflect.singular(/s$/, "") | |
inflect.singular(/es$/, "") | |
inflect.irregular("el", "los") | |
inflect.uncountable("agua") | |
end | |
assert_equal("hijos", "hijo".pluralize(:es)) | |
assert_equal("luces", "luz".pluralize(:es)) | |
assert_equal("luzs", "luz".pluralize) | |
assert_equal("sociedad", "sociedades".singularize(:es)) | |
assert_equal("sociedade", "sociedades".singularize) | |
assert_equal("los", "el".pluralize(:es)) | |
assert_equal("els", "el".pluralize) | |
assert_equal("agua", "agua".pluralize(:es)) | |
assert_equal("aguas", "agua".pluralize) | |
ActiveSupport::Inflector.inflections(:es) { |inflect| inflect.clear } | |
assert_empty ActiveSupport::Inflector.inflections(:es).plurals | |
assert_empty ActiveSupport::Inflector.inflections(:es).singulars | |
assert_empty ActiveSupport::Inflector.inflections(:es).uncountables | |
assert_not_empty ActiveSupport::Inflector.inflections.plurals | |
assert_not_empty ActiveSupport::Inflector.inflections.singulars | |
assert_not_empty ActiveSupport::Inflector.inflections.uncountables | |
end | |
def test_clear_all | |
ActiveSupport::Inflector.inflections do |inflect| | |
# ensure any data is present | |
inflect.plural(/(quiz)$/i, '\1zes') | |
inflect.singular(/(database)s$/i, '\1') | |
inflect.uncountable("series") | |
inflect.human("col_rpted_bugs", "Reported bugs") | |
inflect.acronym("HTML") | |
inflect.clear :all | |
assert_empty inflect.plurals | |
assert_empty inflect.singulars | |
assert_empty inflect.uncountables | |
assert_empty inflect.humans | |
assert_empty inflect.acronyms | |
end | |
end | |
def test_clear_with_default | |
ActiveSupport::Inflector.inflections do |inflect| | |
# ensure any data is present | |
inflect.plural(/(quiz)$/i, '\1zes') | |
inflect.singular(/(database)s$/i, '\1') | |
inflect.uncountable("series") | |
inflect.human("col_rpted_bugs", "Reported bugs") | |
inflect.acronym("HTML") | |
inflect.clear | |
assert_empty inflect.plurals | |
assert_empty inflect.singulars | |
assert_empty inflect.uncountables | |
assert_empty inflect.humans | |
assert_empty inflect.acronyms | |
end | |
end | |
def test_clear_all_resets_camelize_and_underscore_regexes | |
ActiveSupport::Inflector.inflections do |inflect| | |
# ensure any data is present | |
inflect.acronym("HTTP") | |
assert_equal "http_s", "HTTPS".underscore | |
assert_equal "Https", "https".camelize | |
inflect.clear :all | |
assert_empty inflect.acronyms | |
assert_equal "https", "HTTPS".underscore | |
assert_equal "Https", "https".camelize | |
end | |
end | |
Irregularities.each do |singular, plural| | |
define_method("test_irregularity_between_#{singular}_and_#{plural}") do | |
ActiveSupport::Inflector.inflections do |inflect| | |
inflect.irregular(singular, plural) | |
assert_equal singular, ActiveSupport::Inflector.singularize(plural) | |
assert_equal plural, ActiveSupport::Inflector.pluralize(singular) | |
end | |
end | |
end | |
Irregularities.each do |singular, plural| | |
define_method("test_pluralize_of_irregularity_#{plural}_should_be_the_same") do | |
ActiveSupport::Inflector.inflections do |inflect| | |
inflect.irregular(singular, plural) | |
assert_equal plural, ActiveSupport::Inflector.pluralize(plural) | |
end | |
end | |
end | |
Irregularities.each do |singular, plural| | |
define_method("test_singularize_of_irregularity_#{singular}_should_be_the_same") do | |
ActiveSupport::Inflector.inflections do |inflect| | |
inflect.irregular(singular, plural) | |
assert_equal singular, ActiveSupport::Inflector.singularize(singular) | |
end | |
end | |
end | |
[ :all, [] ].each do |scope| | |
ActiveSupport::Inflector.inflections do |inflect| | |
define_method("test_clear_inflections_with_#{scope.kind_of?(Array) ? "no_arguments" : scope}") do | |
# save all the inflections | |
singulars, plurals, uncountables = inflect.singulars, inflect.plurals, inflect.uncountables | |
# clear all the inflections | |
inflect.clear(*scope) | |
assert_equal [], inflect.singulars | |
assert_equal [], inflect.plurals | |
assert_equal [], inflect.uncountables | |
# restore all the inflections | |
singulars.reverse_each { |singular| inflect.singular(*singular) } | |
plurals.reverse_each { |plural| inflect.plural(*plural) } | |
inflect.uncountable(uncountables) | |
assert_equal singulars, inflect.singulars | |
assert_equal plurals, inflect.plurals | |
assert_equal uncountables, inflect.uncountables | |
end | |
end | |
end | |
%i(plurals singulars uncountables humans).each do |scope| | |
define_method("test_clear_inflections_with_#{scope}") do | |
# clear the inflections | |
ActiveSupport::Inflector.inflections do |inflect| | |
inflect.clear(scope) | |
assert_equal [], inflect.public_send(scope) | |
end | |
end | |
end | |
def test_clear_inflections_with_acronyms | |
ActiveSupport::Inflector.inflections do |inflect| | |
inflect.clear(:acronyms) | |
assert_equal({}, inflect.acronyms) | |
end | |
end | |
end |
# frozen_string_literal: true | |
module InflectorTestCases | |
SingularToPlural = { | |
"search" => "searches", | |
"switch" => "switches", | |
"fix" => "fixes", | |
"box" => "boxes", | |
"process" => "processes", | |
"address" => "addresses", | |
"case" => "cases", | |
"stack" => "stacks", | |
"wish" => "wishes", | |
"fish" => "fish", | |
"jeans" => "jeans", | |
"funky jeans" => "funky jeans", | |
"my money" => "my money", | |
"category" => "categories", | |
"query" => "queries", | |
"ability" => "abilities", | |
"agency" => "agencies", | |
"movie" => "movies", | |
"archive" => "archives", | |
"index" => "indices", | |
"wife" => "wives", | |
"safe" => "saves", | |
"half" => "halves", | |
"move" => "moves", | |
"salesperson" => "salespeople", | |
"person" => "people", | |
"spokesman" => "spokesmen", | |
"man" => "men", | |
"woman" => "women", | |
"basis" => "bases", | |
"diagnosis" => "diagnoses", | |
"diagnosis_a" => "diagnosis_as", | |
"datum" => "data", | |
"medium" => "media", | |
"stadium" => "stadia", | |
"analysis" => "analyses", | |
"my_analysis" => "my_analyses", | |
"node_child" => "node_children", | |
"child" => "children", | |
"experience" => "experiences", | |
"day" => "days", | |
"comment" => "comments", | |
"foobar" => "foobars", | |
"newsletter" => "newsletters", | |
"old_news" => "old_news", | |
"news" => "news", | |
"series" => "series", | |
"miniseries" => "miniseries", | |
"species" => "species", | |
"quiz" => "quizzes", | |
"perspective" => "perspectives", | |
"ox" => "oxen", | |
"photo" => "photos", | |
"buffalo" => "buffaloes", | |
"tomato" => "tomatoes", | |
"dwarf" => "dwarves", | |
"elf" => "elves", | |
"information" => "information", | |
"equipment" => "equipment", | |
"bus" => "buses", | |
"status" => "statuses", | |
"status_code" => "status_codes", | |
"mouse" => "mice", | |
"louse" => "lice", | |
"house" => "houses", | |
"octopus" => "octopi", | |
"virus" => "viri", | |
"alias" => "aliases", | |
"portfolio" => "portfolios", | |
"vertex" => "vertices", | |
"matrix" => "matrices", | |
"matrix_fu" => "matrix_fus", | |
"axis" => "axes", | |
"taxi" => "taxis", # prevents regression | |
"testis" => "testes", | |
"crisis" => "crises", | |
"rice" => "rice", | |
"shoe" => "shoes", | |
"horse" => "horses", | |
"prize" => "prizes", | |
"edge" => "edges", | |
"database" => "databases", | |
# regression tests against improper inflection regexes | |
"|ice" => "|ices", | |
"|ouse" => "|ouses", | |
"slice" => "slices", | |
"police" => "police" | |
} | |
CamelToUnderscore = { | |
"Product" => "product", | |
"SpecialGuest" => "special_guest", | |
"ApplicationController" => "application_controller", | |
"Area51Controller" => "area51_controller", | |
"AppCDir" => "app_c_dir", | |
"Accountsv2N2Test" => "accountsv2_n2_test", | |
} | |
UnderscoreToLowerCamel = { | |
"product" => "product", | |
"special_guest" => "specialGuest", | |
"application_controller" => "applicationController", | |
"area51_controller" => "area51Controller" | |
} | |
SymbolToLowerCamel = { | |
product: "product", | |
special_guest: "specialGuest", | |
application_controller: "applicationController", | |
area51_controller: "area51Controller" | |
} | |
CamelToUnderscoreWithoutReverse = { | |
"HTMLTidy" => "html_tidy", | |
"HTMLTidyGenerator" => "html_tidy_generator", | |
"FreeBSD" => "free_bsd", | |
"HTML" => "html", | |
"ForceXMLController" => "force_xml_controller", | |
:product => "product", | |
} | |
CamelWithModuleToUnderscoreWithSlash = { | |
"Admin::Product" => "admin/product", | |
"Users::Commission::Department" => "users/commission/department", | |
"UsersSection::CommissionDepartment" => "users_section/commission_department", | |
} | |
ClassNameToForeignKeyWithUnderscore = { | |
"Person" => "person_id", | |
"MyApplication::Billing::Account" => "account_id" | |
} | |
ClassNameToForeignKeyWithoutUnderscore = { | |
"Person" => "personid", | |
"MyApplication::Billing::Account" => "accountid" | |
} | |
ClassNameToTableName = { | |
"PrimarySpokesman" => "primary_spokesmen", | |
"NodeChild" => "node_children", | |
"Calculu" => "calculus" # Singular names are not handled correctly | |
} | |
StringToParameterized = { | |
"Donald E. Knuth" => "donald-e-knuth", | |
"Random text with *(bad)* characters" => "random-text-with-bad-characters", | |
"Allow_Under_Scores" => "allow_under_scores", | |
"Trailing bad characters!@#" => "trailing-bad-characters", | |
"!@#Leading bad characters" => "leading-bad-characters", | |
"Squeeze separators" => "squeeze-separators", | |
"Test with + sign" => "test-with-sign", | |
"Test with malformed utf8 \251" => "test-with-malformed-utf8" | |
} | |
StringToParameterizedPreserveCase = { | |
"Donald E. Knuth" => "Donald-E-Knuth", | |
"Random text with *(bad)* characters" => "Random-text-with-bad-characters", | |
"Allow_Under_Scores" => "Allow_Under_Scores", | |
"Trailing bad characters!@#" => "Trailing-bad-characters", | |
"!@#Leading bad characters" => "Leading-bad-characters", | |
"Squeeze separators" => "Squeeze-separators", | |
"Test with + sign" => "Test-with-sign", | |
"Test with malformed utf8 \xA9" => "Test-with-malformed-utf8" | |
} | |
StringToParameterizeWithNoSeparator = { | |
"Donald E. Knuth" => "donaldeknuth", | |
"With-some-dashes" => "with-some-dashes", | |
"Random text with *(bad)* characters" => "randomtextwithbadcharacters", | |
"Trailing bad characters!@#" => "trailingbadcharacters", | |
"!@#Leading bad characters" => "leadingbadcharacters", | |
"Squeeze separators" => "squeezeseparators", | |
"Test with + sign" => "testwithsign", | |
"Test with malformed utf8 \251" => "testwithmalformedutf8" | |
} | |
StringToParameterizePreserveCaseWithNoSeparator = { | |
"Donald E. Knuth" => "DonaldEKnuth", | |
"With-some-dashes" => "With-some-dashes", | |
"Random text with *(bad)* characters" => "Randomtextwithbadcharacters", | |
"Trailing bad characters!@#" => "Trailingbadcharacters", | |
"!@#Leading bad characters" => "Leadingbadcharacters", | |
"Squeeze separators" => "Squeezeseparators", | |
"Test with + sign" => "Testwithsign", | |
"Test with malformed utf8 \xA9" => "Testwithmalformedutf8" | |
} | |
StringToParameterizeWithUnderscore = { | |
"Donald E. Knuth" => "donald_e_knuth", | |
"Random text with *(bad)* characters" => "random_text_with_bad_characters", | |
"With-some-dashes" => "with-some-dashes", | |
"Retain_underscore" => "retain_underscore", | |
"Trailing bad characters!@#" => "trailing_bad_characters", | |
"!@#Leading bad characters" => "leading_bad_characters", | |
"Squeeze separators" => "squeeze_separators", | |
"Test with + sign" => "test_with_sign", | |
"Test with malformed utf8 \251" => "test_with_malformed_utf8" | |
} | |
StringToParameterizePreserveCaseWithUnderscore = { | |
"Donald E. Knuth" => "Donald_E_Knuth", | |
"Random text with *(bad)* characters" => "Random_text_with_bad_characters", | |
"With-some-dashes" => "With-some-dashes", | |
"Allow_Under_Scores" => "Allow_Under_Scores", | |
"Trailing bad characters!@#" => "Trailing_bad_characters", | |
"!@#Leading bad characters" => "Leading_bad_characters", | |
"Squeeze separators" => "Squeeze_separators", | |
"Test with + sign" => "Test_with_sign", | |
"Test with malformed utf8 \xA9" => "Test_with_malformed_utf8" | |
} | |
StringToParameterizedAndNormalized = { | |
"Malmö" => "malmo", | |
"Garçons" => "garcons", | |
"Ops\331" => "opsu", | |
"Ærøskøbing" => "aeroskobing", | |
"Aßlar" => "asslar", | |
"Japanese: 日本語" => "japanese" | |
} | |
UnderscoreToHuman = { | |
"employee_salary" => "Employee salary", | |
"employee_id" => "Employee", | |
"underground" => "Underground", | |
"_id" => "Id", | |
"_external_id" => "External" | |
} | |
UnderscoreToHumanWithKeepIdSuffix = { | |
"this_is_a_string_ending_with_id" => "This is a string ending with id", | |
"employee_id" => "Employee id", | |
"employee_id_something_else" => "Employee id something else", | |
"underground" => "Underground", | |
"_id" => "Id", | |
"_external_id" => "External id" | |
} | |
UnderscoreToHumanWithoutCapitalize = { | |
"employee_salary" => "employee salary", | |
"employee_id" => "employee", | |
"underground" => "underground" | |
} | |
MixtureToTitleCaseWithKeepIdSuffix = { | |
"this_is_a_string_ending_with_id" => "This Is A String Ending With Id", | |
"EmployeeId" => "Employee Id", | |
"Author Id" => "Author Id" | |
} | |
MixtureToTitleCase = { | |
"active_record" => "Active Record", | |
"ActiveRecord" => "Active Record", | |
"action web service" => "Action Web Service", | |
"Action Web Service" => "Action Web Service", | |
"Action web service" => "Action Web Service", | |
"actionwebservice" => "Actionwebservice", | |
"Actionwebservice" => "Actionwebservice", | |
"david's code" => "David's Code", | |
"David's code" => "David's Code", | |
"david's Code" => "David's Code", | |
"sgt. pepper's" => "Sgt. Pepper's", | |
"i've just seen a face" => "I've Just Seen A Face", | |
"maybe you'll be there" => "Maybe You'll Be There", | |
"¿por qué?" => "¿Por Qué?", | |
"Fred’s" => "Fred’s", | |
"Fred`s" => "Fred`s", | |
"this was 'fake news'" => "This Was 'Fake News'", | |
"new name(s)" => "New Name(s)", | |
"new (names)" => "New (Names)", | |
"their (mis)deeds" => "Their (Mis)deeds", | |
ActiveSupport::SafeBuffer.new("confirmation num") => "Confirmation Num" | |
} | |
OrdinalNumbers = { | |
"-1" => "-1st", | |
"-2" => "-2nd", | |
"-3" => "-3rd", | |
"-4" => "-4th", | |
"-5" => "-5th", | |
"-6" => "-6th", | |
"-7" => "-7th", | |
"-8" => "-8th", | |
"-9" => "-9th", | |
"-10" => "-10th", | |
"-11" => "-11th", | |
"-12" => "-12th", | |
"-13" => "-13th", | |
"-14" => "-14th", | |
"-20" => "-20th", | |
"-21" => "-21st", | |
"-22" => "-22nd", | |
"-23" => "-23rd", | |
"-24" => "-24th", | |
"-100" => "-100th", | |
"-101" => "-101st", | |
"-102" => "-102nd", | |
"-103" => "-103rd", | |
"-104" => "-104th", | |
"-110" => "-110th", | |
"-111" => "-111th", | |
"-112" => "-112th", | |
"-113" => "-113th", | |
"-1000" => "-1000th", | |
"-1001" => "-1001st", | |
"0" => "0th", | |
"1" => "1st", | |
"2" => "2nd", | |
"3" => "3rd", | |
"4" => "4th", | |
"5" => "5th", | |
"6" => "6th", | |
"7" => "7th", | |
"8" => "8th", | |
"9" => "9th", | |
"10" => "10th", | |
"11" => "11th", | |
"12" => "12th", | |
"13" => "13th", | |
"14" => "14th", | |
"20" => "20th", | |
"21" => "21st", | |
"22" => "22nd", | |
"23" => "23rd", | |
"24" => "24th", | |
"100" => "100th", | |
"101" => "101st", | |
"102" => "102nd", | |
"103" => "103rd", | |
"104" => "104th", | |
"110" => "110th", | |
"111" => "111th", | |
"112" => "112th", | |
"113" => "113th", | |
"1000" => "1000th", | |
"1001" => "1001st" | |
} | |
UnderscoresToDashes = { | |
"street" => "street", | |
"street_address" => "street-address", | |
"person_street_address" => "person-street-address" | |
} | |
Irregularities = { | |
"person" => "people", | |
"man" => "men", | |
"child" => "children", | |
"sex" => "sexes", | |
"move" => "moves", | |
"cow" => "kine", # Test inflections with different starting letters | |
"zombie" => "zombies", | |
"genus" => "genera" | |
} | |
end |
# frozen_string_literal: true | |
require "active_support/string_inquirer" | |
require "active_support/environment_inquirer" | |
class String | |
# Wraps the current string in the ActiveSupport::StringInquirer class, | |
# which gives you a prettier way to test for equality. | |
# | |
# env = 'production'.inquiry | |
# env.production? # => true | |
# env.development? # => false | |
def inquiry | |
ActiveSupport::StringInquirer.new(self) | |
end | |
end |
# frozen_string_literal: true | |
require "active_support/core_ext/module/delegation" | |
module ActiveSupport | |
class Deprecation | |
module InstanceDelegator # :nodoc: | |
def self.included(base) | |
base.extend(ClassMethods) | |
base.singleton_class.prepend(OverrideDelegators) | |
base.public_class_method :new | |
end | |
module ClassMethods # :nodoc: | |
def include(included_module) | |
included_module.instance_methods.each { |m| method_added(m) } | |
super | |
end | |
def method_added(method_name) | |
singleton_class.delegate(method_name, to: :instance) | |
end | |
end | |
module OverrideDelegators # :nodoc: | |
def warn(message = nil, callstack = nil) | |
callstack ||= caller_locations(2) | |
super | |
end | |
def deprecation_warning(deprecated_method_name, message = nil, caller_backtrace = nil) | |
caller_backtrace ||= caller_locations(2) | |
super | |
end | |
end | |
end | |
end | |
end |
# frozen_string_literal: true | |
class Object | |
# Returns a hash with string keys that maps instance variable names without "@" to their | |
# corresponding values. | |
# | |
# class C | |
# def initialize(x, y) | |
# @x, @y = x, y | |
# end | |
# end | |
# | |
# C.new(0, 1).instance_values # => {"x" => 0, "y" => 1} | |
def instance_values | |
instance_variables.to_h do |ivar| | |
[ivar[1..-1].freeze, instance_variable_get(ivar)] | |
end | |
end | |
if Symbol.method_defined?(:name) # RUBY_VERSION >= "3.0" | |
# Returns an array of instance variable names as strings including "@". | |
# | |
# class C | |
# def initialize(x, y) | |
# @x, @y = x, y | |
# end | |
# end | |
# | |
# C.new(0, 1).instance_variable_names # => ["@y", "@x"] | |
def instance_variable_names | |
instance_variables.map(&:name) | |
end | |
else | |
def instance_variable_names | |
variables = instance_variables | |
variables.map! { |s| s.to_s.freeze } | |
variables | |
end | |
end | |
end |
# frozen_string_literal: true | |
require_relative "../../abstract_unit" | |
require "active_support/core_ext/object/instance_variables" | |
class ObjectInstanceVariableTest < ActiveSupport::TestCase | |
def setup | |
@source, @dest = Object.new, Object.new | |
@source.instance_variable_set(:@bar, "bar") | |
@source.instance_variable_set(:@baz, "baz") | |
end | |
def test_instance_variable_names | |
assert_equal %w(@bar @baz), @source.instance_variable_names.sort | |
end | |
def test_instance_values | |
assert_equal({ "bar" => "bar", "baz" => "baz" }, @source.instance_values) | |
end | |
def test_instance_exec_passes_arguments_to_block | |
assert_equal %w(hello goodbye), (+"hello").instance_exec("goodbye") { |v| [self, v] } | |
end | |
def test_instance_exec_with_frozen_obj | |
assert_equal %w(olleh goodbye), "hello".instance_exec("goodbye") { |v| [reverse, v] } | |
end | |
def test_instance_exec_nested | |
assert_equal %w(goodbye olleh bar), (+"hello").instance_exec("goodbye") { |arg| | |
[arg] + instance_exec("bar") { |v| [reverse, v] } } | |
end | |
end |
# frozen_string_literal: true | |
require "securerandom" | |
module ActiveSupport | |
module Notifications | |
# Instrumenters are stored in a thread local. | |
class Instrumenter | |
attr_reader :id | |
def initialize(notifier) | |
@id = unique_id | |
@notifier = notifier | |
end | |
# Given a block, instrument it by measuring the time taken to execute | |
# and publish it. Without a block, simply send a message via the | |
# notifier. Notice that events get sent even if an error occurs in the | |
# passed-in block. | |
def instrument(name, payload = {}) | |
# some of the listeners might have state | |
listeners_state = start name, payload | |
begin | |
yield payload if block_given? | |
rescue Exception => e | |
payload[:exception] = [e.class.name, e.message] | |
payload[:exception_object] = e | |
raise e | |
ensure | |
finish_with_state listeners_state, name, payload | |
end | |
end | |
def new_event(name, payload = {}) # :nodoc: | |
Event.new(name, nil, nil, @id, payload) | |
end | |
# Send a start notification with +name+ and +payload+. | |
def start(name, payload) | |
@notifier.start name, @id, payload | |
end | |
# Send a finish notification with +name+ and +payload+. | |
def finish(name, payload) | |
@notifier.finish name, @id, payload | |
end | |
def finish_with_state(listeners_state, name, payload) | |
@notifier.finish name, @id, payload, listeners_state | |
end | |
private | |
def unique_id | |
SecureRandom.hex(10) | |
end | |
end | |
class Event | |
attr_reader :name, :time, :end, :transaction_id | |
attr_accessor :payload | |
def initialize(name, start, ending, transaction_id, payload) | |
@name = name | |
@payload = payload.dup | |
@time = start ? start.to_f * 1_000.0 : start | |
@transaction_id = transaction_id | |
@end = ending ? ending.to_f * 1_000.0 : ending | |
@cpu_time_start = 0.0 | |
@cpu_time_finish = 0.0 | |
@allocation_count_start = 0 | |
@allocation_count_finish = 0 | |
end | |
def record | |
start! | |
begin | |
yield payload if block_given? | |
rescue Exception => e | |
payload[:exception] = [e.class.name, e.message] | |
payload[:exception_object] = e | |
raise e | |
ensure | |
finish! | |
end | |
end | |
# Record information at the time this event starts | |
def start! | |
@time = now | |
@cpu_time_start = now_cpu | |
@allocation_count_start = now_allocations | |
end | |
# Record information at the time this event finishes | |
def finish! | |
@cpu_time_finish = now_cpu | |
@end = now | |
@allocation_count_finish = now_allocations | |
end | |
# Returns the CPU time (in milliseconds) passed since the call to | |
# +start!+ and the call to +finish!+ | |
def cpu_time | |
@cpu_time_finish - @cpu_time_start | |
end | |
# Returns the idle time time (in milliseconds) passed since the call to | |
# +start!+ and the call to +finish!+ | |
def idle_time | |
duration - cpu_time | |
end | |
# Returns the number of allocations made since the call to +start!+ and | |
# the call to +finish!+ | |
def allocations | |
@allocation_count_finish - @allocation_count_start | |
end | |
def children # :nodoc: | |
ActiveSupport::Deprecation.warn <<~EOM | |
ActiveSupport::Notifications::Event#children is deprecated and will | |
be removed in Rails 7.2. | |
EOM | |
[] | |
end | |
def parent_of?(event) # :nodoc: | |
ActiveSupport::Deprecation.warn <<~EOM | |
ActiveSupport::Notifications::Event#parent_of? is deprecated and will | |
be removed in Rails 7.2. | |
EOM | |
start = (time - event.time) * 1000 | |
start <= 0 && (start + duration >= event.duration) | |
end | |
# Returns the difference in milliseconds between when the execution of the | |
# event started and when it ended. | |
# | |
# ActiveSupport::Notifications.subscribe('wait') do |*args| | |
# @event = ActiveSupport::Notifications::Event.new(*args) | |
# end | |
# | |
# ActiveSupport::Notifications.instrument('wait') do | |
# sleep 1 | |
# end | |
# | |
# @event.duration # => 1000.138 | |
def duration | |
self.end - time | |
end | |
private | |
def now | |
Process.clock_gettime(Process::CLOCK_MONOTONIC, :float_millisecond) | |
end | |
begin | |
Process.clock_gettime(Process::CLOCK_THREAD_CPUTIME_ID, :float_millisecond) | |
def now_cpu | |
Process.clock_gettime(Process::CLOCK_THREAD_CPUTIME_ID, :float_millisecond) | |
end | |
rescue | |
def now_cpu # rubocop:disable Lint/DuplicateMethods | |
0.0 | |
end | |
end | |
if GC.stat.key?(:total_allocated_objects) | |
def now_allocations | |
GC.stat(:total_allocated_objects) | |
end | |
else # Likely on JRuby, TruffleRuby | |
def now_allocations | |
0 | |
end | |
end | |
end | |
end | |
end |
# frozen_string_literal: true | |
require_relative "../abstract_unit" | |
require "active_support/notifications/instrumenter" | |
module ActiveSupport | |
module Notifications | |
class InstrumenterTest < ActiveSupport::TestCase | |
class TestNotifier | |
attr_reader :starts, :finishes | |
def initialize | |
@starts = [] | |
@finishes = [] | |
end | |
def start(*args); @starts << args; end | |
def finish(*args); @finishes << args; end | |
end | |
attr_reader :instrumenter, :notifier, :payload | |
def setup | |
super | |
@notifier = TestNotifier.new | |
@instrumenter = Instrumenter.new @notifier | |
@payload = { foo: Object.new } | |
end | |
def test_instrument | |
called = false | |
instrumenter.instrument("foo", payload) { | |
called = true | |
} | |
assert called | |
end | |
def test_instrument_yields_the_payload_for_further_modification | |
assert_equal 2, instrumenter.instrument("awesome") { |p| p[:result] = 1 + 1 } | |
assert_equal 1, notifier.finishes.size | |
name, _, payload = notifier.finishes.first | |
assert_equal "awesome", name | |
assert_equal Hash[result: 2], payload | |
end | |
def test_instrument_works_without_a_block | |
instrumenter.instrument("no.block", payload) | |
assert_equal 1, notifier.finishes.size | |
assert_equal "no.block", notifier.finishes.first.first | |
end | |
def test_start | |
instrumenter.start("foo", payload) | |
assert_equal [["foo", instrumenter.id, payload]], notifier.starts | |
assert_empty notifier.finishes | |
end | |
def test_finish | |
instrumenter.finish("foo", payload) | |
assert_equal [["foo", instrumenter.id, payload]], notifier.finishes | |
assert_empty notifier.starts | |
end | |
def test_record | |
called = false | |
event = instrumenter.new_event("foo", payload) | |
event.record { | |
called = true | |
} | |
assert called | |
end | |
def test_record_yields_the_payload_for_further_modification | |
event = instrumenter.new_event("awesome") | |
event.record { |p| p[:result] = 1 + 1 } | |
assert_equal 2, event.payload[:result] | |
assert_equal "awesome", event.name | |
assert_equal Hash[result: 2], event.payload | |
assert_equal instrumenter.id, event.transaction_id | |
assert_not_nil event.time | |
assert_not_nil event.end | |
end | |
def test_record_works_without_a_block | |
event = instrumenter.new_event("no.block", payload) | |
event.record | |
assert_equal "no.block", event.name | |
assert_equal payload, event.payload | |
assert_equal instrumenter.id, event.transaction_id | |
assert_not_nil event.time | |
assert_not_nil event.end | |
end | |
def test_record_with_exception | |
event = instrumenter.new_event("crash", payload) | |
assert_raises RuntimeError do | |
event.record { raise "Oopsies" } | |
end | |
assert_equal "Oopsies", event.payload[:exception_object].message | |
end | |
end | |
end | |
end |
# frozen_string_literal: true | |
require "active_support/core_ext/integer/multiple" | |
require "active_support/core_ext/integer/inflections" | |
require "active_support/core_ext/integer/time" |
# frozen_string_literal: true | |
require_relative "../abstract_unit" | |
require "active_support/core_ext/integer" | |
class IntegerExtTest < ActiveSupport::TestCase | |
PRIME = 22953686867719691230002707821868552601124472329079 | |
def test_multiple_of | |
[ -7, 0, 7, 14 ].each { |i| assert i.multiple_of?(7) } | |
[ -7, 7, 14 ].each { |i| assert_not i.multiple_of?(6) } | |
# test the 0 edge case | |
assert 0.multiple_of?(0) | |
assert_not 5.multiple_of?(0) | |
# test with a prime | |
[2, 3, 5, 7].each { |i| assert_not PRIME.multiple_of?(i) } | |
end | |
def test_ordinalize | |
# These tests are mostly just to ensure that the ordinalize method exists. | |
# Its results are tested comprehensively in the inflector test cases. | |
assert_equal "1st", 1.ordinalize | |
assert_equal "8th", 8.ordinalize | |
end | |
def test_ordinal | |
assert_equal "st", 1.ordinal | |
assert_equal "th", 8.ordinal | |
end | |
end |
# frozen_string_literal: true | |
require "active_support/concurrency/share_lock" | |
module ActiveSupport # :nodoc: | |
module Dependencies # :nodoc: | |
class Interlock | |
def initialize # :nodoc: | |
@lock = ActiveSupport::Concurrency::ShareLock.new | |
end | |
def loading(&block) | |
@lock.exclusive(purpose: :load, compatible: [:load], after_compatible: [:load], &block) | |
end | |
def unloading(&block) | |
@lock.exclusive(purpose: :unload, compatible: [:load, :unload], after_compatible: [:load, :unload], &block) | |
end | |
def start_unloading | |
@lock.start_exclusive(purpose: :unload, compatible: [:load, :unload]) | |
end | |
def done_unloading | |
@lock.stop_exclusive(compatible: [:load, :unload]) | |
end | |
def start_running | |
@lock.start_sharing | |
end | |
def done_running | |
@lock.stop_sharing | |
end | |
def running(&block) | |
@lock.sharing(&block) | |
end | |
def permit_concurrent_loads(&block) | |
@lock.yield_shares(compatible: [:load], &block) | |
end | |
def raw_state(&block) # :nodoc: | |
@lock.raw_state(&block) | |
end | |
end | |
end | |
end |
# frozen_string_literal: true | |
require "active_support/core_ext/string/filters" | |
require "active_support/inflector" | |
class Module | |
# Returns the name of the module containing this one. | |
# | |
# M::N.module_parent_name # => "M" | |
def module_parent_name | |
if defined?(@parent_name) | |
@parent_name | |
else | |
parent_name = name =~ /::[^:]+\z/ ? -$` : nil | |
@parent_name = parent_name unless frozen? | |
parent_name | |
end | |
end | |
# Returns the module which contains this one according to its name. | |
# | |
# module M | |
# module N | |
# end | |
# end | |
# X = M::N | |
# | |
# M::N.module_parent # => M | |
# X.module_parent # => M | |
# | |
# The parent of top-level and anonymous modules is Object. | |
# | |
# M.module_parent # => Object | |
# Module.new.module_parent # => Object | |
def module_parent | |
module_parent_name ? ActiveSupport::Inflector.constantize(module_parent_name) : Object | |
end | |
# Returns all the parents of this module according to its name, ordered from | |
# nested outwards. The receiver is not contained within the result. | |
# | |
# module M | |
# module N | |
# end | |
# end | |
# X = M::N | |
# | |
# M.module_parents # => [Object] | |
# M::N.module_parents # => [M, Object] | |
# X.module_parents # => [M, Object] | |
def module_parents | |
parents = [] | |
if module_parent_name | |
parts = module_parent_name.split("::") | |
until parts.empty? | |
parents << ActiveSupport::Inflector.constantize(parts * "::") | |
parts.pop | |
end | |
end | |
parents << Object unless parents.include? Object | |
parents | |
end | |
end |
# frozen_string_literal: true | |
require_relative "../../abstract_unit" | |
require "active_support/core_ext/module/introspection" | |
module ParentA | |
module B | |
module C; end | |
module FrozenC; end | |
FrozenC.freeze | |
end | |
module FrozenB; end | |
FrozenB.freeze | |
end | |
class IntrospectionTest < ActiveSupport::TestCase | |
def test_module_parent_name | |
assert_equal "ParentA", ParentA::B.module_parent_name | |
assert_equal "ParentA::B", ParentA::B::C.module_parent_name | |
assert_nil ParentA.module_parent_name | |
end | |
def test_module_parent_name_when_frozen | |
assert_equal "ParentA", ParentA::FrozenB.module_parent_name | |
assert_equal "ParentA::B", ParentA::B::FrozenC.module_parent_name | |
end | |
def test_module_parent | |
assert_equal ParentA::B, ParentA::B::C.module_parent | |
assert_equal ParentA, ParentA::B.module_parent | |
assert_equal Object, ParentA.module_parent | |
end | |
def test_module_parents | |
assert_equal [ParentA::B, ParentA, Object], ParentA::B::C.module_parents | |
assert_equal [ParentA, Object], ParentA::B.module_parents | |
end | |
end |
# frozen_string_literal: true | |
require "strscan" | |
module ActiveSupport | |
class Duration | |
# Parses a string formatted according to ISO 8601 Duration into the hash. | |
# | |
# See {ISO 8601}[https://en.wikipedia.org/wiki/ISO_8601#Durations] for more information. | |
# | |
# This parser allows negative parts to be present in pattern. | |
class ISO8601Parser # :nodoc: | |
class ParsingError < ::ArgumentError; end | |
PERIOD_OR_COMMA = /\.|,/ | |
PERIOD = "." | |
COMMA = "," | |
SIGN_MARKER = /\A-|\+|/ | |
DATE_MARKER = /P/ | |
TIME_MARKER = /T/ | |
DATE_COMPONENT = /(-?\d+(?:[.,]\d+)?)(Y|M|D|W)/ | |
TIME_COMPONENT = /(-?\d+(?:[.,]\d+)?)(H|M|S)/ | |
DATE_TO_PART = { "Y" => :years, "M" => :months, "W" => :weeks, "D" => :days } | |
TIME_TO_PART = { "H" => :hours, "M" => :minutes, "S" => :seconds } | |
DATE_COMPONENTS = [:years, :months, :days] | |
TIME_COMPONENTS = [:hours, :minutes, :seconds] | |
attr_reader :parts, :scanner | |
attr_accessor :mode, :sign | |
def initialize(string) | |
@scanner = StringScanner.new(string) | |
@parts = {} | |
@mode = :start | |
@sign = 1 | |
end | |
def parse! | |
while !finished? | |
case mode | |
when :start | |
if scan(SIGN_MARKER) | |
self.sign = (scanner.matched == "-") ? -1 : 1 | |
self.mode = :sign | |
else | |
raise_parsing_error | |
end | |
when :sign | |
if scan(DATE_MARKER) | |
self.mode = :date | |
else | |
raise_parsing_error | |
end | |
when :date | |
if scan(TIME_MARKER) | |
self.mode = :time | |
elsif scan(DATE_COMPONENT) | |
parts[DATE_TO_PART[scanner[2]]] = number * sign | |
else | |
raise_parsing_error | |
end | |
when :time | |
if scan(TIME_COMPONENT) | |
parts[TIME_TO_PART[scanner[2]]] = number * sign | |
else | |
raise_parsing_error | |
end | |
end | |
end | |
validate! | |
parts | |
end | |
private | |
def finished? | |
scanner.eos? | |
end | |
# Parses number which can be a float with either comma or period. | |
def number | |
PERIOD_OR_COMMA.match?(scanner[1]) ? scanner[1].tr(COMMA, PERIOD).to_f : scanner[1].to_i | |
end | |
def scan(pattern) | |
scanner.scan(pattern) | |
end | |
def raise_parsing_error(reason = nil) | |
raise ParsingError, "Invalid ISO 8601 duration: #{scanner.string.inspect} #{reason}".strip | |
end | |
# Checks for various semantic errors as stated in ISO 8601 standard. | |
def validate! | |
raise_parsing_error("is empty duration") if parts.empty? | |
# Mixing any of Y, M, D with W is invalid. | |
if parts.key?(:weeks) && (parts.keys & DATE_COMPONENTS).any? | |
raise_parsing_error("mixing weeks with other date parts not allowed") | |
end | |
# Specifying an empty T part is invalid. | |
if mode == :time && (parts.keys & TIME_COMPONENTS).empty? | |
raise_parsing_error("time part marker is present but time part is empty") | |
end | |
fractions = parts.values.reject(&:zero?).select { |a| (a % 1) != 0 } | |
unless fractions.empty? || (fractions.size == 1 && fractions.last == @parts.values.reject(&:zero?).last) | |
raise_parsing_error "(only last part can be fractional)" | |
end | |
true | |
end | |
end | |
end | |
end |
# frozen_string_literal: true | |
require "active_support/core_ext/object/blank" | |
module ActiveSupport | |
class Duration | |
# Serializes duration to string according to ISO 8601 Duration format. | |
class ISO8601Serializer # :nodoc: | |
DATE_COMPONENTS = %i(years months days) | |
def initialize(duration, precision: nil) | |
@duration = duration | |
@precision = precision | |
end | |
# Builds and returns output string. | |
def serialize | |
parts = normalize | |
return "PT0S" if parts.empty? | |
output = +"P" | |
output << "#{parts[:years]}Y" if parts.key?(:years) | |
output << "#{parts[:months]}M" if parts.key?(:months) | |
output << "#{parts[:days]}D" if parts.key?(:days) | |
output << "#{parts[:weeks]}W" if parts.key?(:weeks) | |
time = +"" | |
time << "#{parts[:hours]}H" if parts.key?(:hours) | |
time << "#{parts[:minutes]}M" if parts.key?(:minutes) | |
if parts.key?(:seconds) | |
time << "#{format_seconds(parts[:seconds])}S" | |
end | |
output << "T#{time}" unless time.empty? | |
output | |
end | |
private | |
# Return pair of duration's parts and whole duration sign. | |
# Parts are summarized (as they can become repetitive due to addition, etc). | |
# Zero parts are removed as not significant. | |
# If all parts are negative it will negate all of them and return minus as a sign. | |
def normalize | |
parts = @duration.parts.each_with_object(Hash.new(0)) do |(k, v), p| | |
p[k] += v unless v.zero? | |
end | |
# Convert weeks to days and remove weeks if mixed with date parts | |
if week_mixed_with_date?(parts) | |
parts[:days] += parts.delete(:weeks) * SECONDS_PER_WEEK / SECONDS_PER_DAY | |
end | |
parts | |
end | |
def week_mixed_with_date?(parts) | |
parts.key?(:weeks) && (parts.keys & DATE_COMPONENTS).any? | |
end | |
def format_seconds(seconds) | |
if @precision | |
sprintf("%0.0#{@precision}f", seconds) | |
else | |
seconds.to_s | |
end | |
end | |
end | |
end | |
end |
# frozen_string_literal: true | |
require "fiber" | |
module ActiveSupport | |
module IsolatedExecutionState # :nodoc: | |
@isolation_level = nil | |
Thread.attr_accessor :active_support_execution_state | |
Fiber.attr_accessor :active_support_execution_state | |
class << self | |
attr_reader :isolation_level, :scope | |
def isolation_level=(level) | |
return if level == @isolation_level | |
unless %i(thread fiber).include?(level) | |
raise ArgumentError, "isolation_level must be `:thread` or `:fiber`, got: `#{level.inspect}`" | |
end | |
clear if @isolation_level | |
@scope = | |
case level | |
when :thread; Thread | |
when :fiber; Fiber | |
end | |
@isolation_level = level | |
end | |
def unique_id | |
self[:__id__] ||= Object.new | |
end | |
def [](key) | |
state[key] | |
end | |
def []=(key, value) | |
state[key] = value | |
end | |
def key?(key) | |
state.key?(key) | |
end | |
def delete(key) | |
state.delete(key) | |
end | |
def clear | |
state.clear | |
end | |
def context | |
scope.current | |
end | |
def share_with(other) | |
# Action Controller streaming spawns a new thread and copy thread locals. | |
# We do the same here for backward compatibility, but this is very much a hack | |
# and streaming should be rethought. | |
context.active_support_execution_state = other.active_support_execution_state.dup | |
end | |
private | |
def state | |
context.active_support_execution_state ||= {} | |
end | |
end | |
self.isolation_level = :thread | |
end | |
end |
# frozen_string_literal: true | |
require_relative "abstract_unit" | |
class IsolatedExecutionStateTest < ActiveSupport::TestCase | |
setup do | |
ActiveSupport::IsolatedExecutionState.clear | |
@original_isolation_level = ActiveSupport::IsolatedExecutionState.isolation_level | |
end | |
teardown do | |
ActiveSupport::IsolatedExecutionState.clear | |
ActiveSupport::IsolatedExecutionState.isolation_level = @original_isolation_level | |
end | |
test "#[] when isolation level is :fiber" do | |
ActiveSupport::IsolatedExecutionState.isolation_level = :fiber | |
ActiveSupport::IsolatedExecutionState[:test] = 42 | |
assert_equal 42, ActiveSupport::IsolatedExecutionState[:test] | |
enumerator = Enumerator.new do |yielder| | |
yielder.yield ActiveSupport::IsolatedExecutionState[:test] | |
end | |
assert_nil enumerator.next | |
assert_nil Thread.new { ActiveSupport::IsolatedExecutionState[:test] }.value | |
end | |
test "#[] when isolation level is :thread" do | |
ActiveSupport::IsolatedExecutionState.isolation_level = :thread | |
ActiveSupport::IsolatedExecutionState[:test] = 42 | |
assert_equal 42, ActiveSupport::IsolatedExecutionState[:test] | |
enumerator = Enumerator.new do |yielder| | |
yielder.yield ActiveSupport::IsolatedExecutionState[:test] | |
end | |
assert_equal 42, enumerator.next | |
assert_nil Thread.new { ActiveSupport::IsolatedExecutionState[:test] }.value | |
end | |
test "changing the isolation level clear the old store" do | |
original = ActiveSupport::IsolatedExecutionState.isolation_level | |
other = ActiveSupport::IsolatedExecutionState.isolation_level == :fiber ? :thread : :fiber | |
ActiveSupport::IsolatedExecutionState[:test] = 42 | |
ActiveSupport::IsolatedExecutionState.isolation_level = original | |
assert_equal 42, ActiveSupport::IsolatedExecutionState[:test] | |
ActiveSupport::IsolatedExecutionState.isolation_level = other | |
assert_nil ActiveSupport::IsolatedExecutionState[:test] | |
ActiveSupport::IsolatedExecutionState.isolation_level = original | |
assert_nil ActiveSupport::IsolatedExecutionState[:test] | |
end | |
end |
# frozen_string_literal: true | |
module ActiveSupport | |
module Testing | |
module Isolation | |
require "thread" | |
def self.included(klass) # :nodoc: | |
klass.class_eval do | |
parallelize_me! | |
end | |
end | |
def self.forking_env? | |
!ENV["NO_FORK"] && Process.respond_to?(:fork) | |
end | |
def run | |
serialized = run_in_isolation do | |
super | |
end | |
Marshal.load(serialized) | |
end | |
module Forking | |
def run_in_isolation(&blk) | |
read, write = IO.pipe | |
read.binmode | |
write.binmode | |
pid = fork do | |
read.close | |
yield | |
begin | |
if error? | |
failures.map! { |e| | |
begin | |
Marshal.dump e | |
e | |
rescue TypeError | |
ex = Exception.new e.message | |
ex.set_backtrace e.backtrace | |
Minitest::UnexpectedError.new ex | |
end | |
} | |
end | |
test_result = defined?(Minitest::Result) ? Minitest::Result.from(self) : dup | |
result = Marshal.dump(test_result) | |
end | |
write.puts [result].pack("m") | |
exit! | |
end | |
write.close | |
result = read.read | |
Process.wait2(pid) | |
result.unpack1("m") | |
end | |
end | |
module Subprocess | |
ORIG_ARGV = ARGV.dup unless defined?(ORIG_ARGV) | |
# Complicated H4X to get this working in windows / jruby with | |
# no forking. | |
def run_in_isolation(&blk) | |
require "tempfile" | |
if ENV["ISOLATION_TEST"] | |
yield | |
test_result = defined?(Minitest::Result) ? Minitest::Result.from(self) : dup | |
File.open(ENV["ISOLATION_OUTPUT"], "w") do |file| | |
file.puts [Marshal.dump(test_result)].pack("m") | |
end | |
exit! | |
else | |
Tempfile.open("isolation") do |tmpfile| | |
env = { | |
"ISOLATION_TEST" => self.class.name, | |
"ISOLATION_OUTPUT" => tmpfile.path | |
} | |
test_opts = "-n#{self.class.name}##{name}" | |
load_path_args = [] | |
$-I.each do |p| | |
load_path_args << "-I" | |
load_path_args << File.expand_path(p) | |
end | |
child = IO.popen([env, Gem.ruby, *load_path_args, $0, *ORIG_ARGV, test_opts]) | |
begin | |
Process.wait(child.pid) | |
rescue Errno::ECHILD # The child process may exit before we wait | |
nil | |
end | |
return tmpfile.read.unpack1("m") | |
end | |
end | |
end | |
end | |
include forking_env? ? Forking : Subprocess | |
end | |
end | |
end |
# frozen_string_literal: true | |
raise "JRuby is required to use the JDOM backend for XmlMini" unless RUBY_PLATFORM.include?("java") | |
require "jruby" | |
include Java | |
require "active_support/core_ext/object/blank" | |
java_import javax.xml.parsers.DocumentBuilder unless defined? DocumentBuilder | |
java_import javax.xml.parsers.DocumentBuilderFactory unless defined? DocumentBuilderFactory | |
java_import java.io.StringReader unless defined? StringReader | |
java_import org.xml.sax.InputSource unless defined? InputSource | |
java_import org.xml.sax.Attributes unless defined? Attributes | |
java_import org.w3c.dom.Node unless defined? Node | |
module ActiveSupport | |
module XmlMini_JDOM # :nodoc: | |
extend self | |
CONTENT_KEY = "__content__" | |
NODE_TYPE_NAMES = %w{ATTRIBUTE_NODE CDATA_SECTION_NODE COMMENT_NODE DOCUMENT_FRAGMENT_NODE | |
DOCUMENT_NODE DOCUMENT_TYPE_NODE ELEMENT_NODE ENTITY_NODE ENTITY_REFERENCE_NODE NOTATION_NODE | |
PROCESSING_INSTRUCTION_NODE TEXT_NODE} | |
node_type_map = {} | |
NODE_TYPE_NAMES.each { |type| node_type_map[Node.send(type)] = type } | |
# Parse an XML Document string or IO into a simple hash using Java's jdom. | |
# data:: | |
# XML Document string or IO to parse | |
def parse(data) | |
if data.respond_to?(:read) | |
data = data.read | |
end | |
if data.blank? | |
{} | |
else | |
@dbf = DocumentBuilderFactory.new_instance | |
# secure processing of java xml | |
# https://archive.is/9xcQQ | |
@dbf.setFeature("http://apache.org/xml/features/nonvalidating/load-external-dtd", false) | |
@dbf.setFeature("http://xml.org/sax/features/external-general-entities", false) | |
@dbf.setFeature("http://xml.org/sax/features/external-parameter-entities", false) | |
@dbf.setFeature(javax.xml.XMLConstants::FEATURE_SECURE_PROCESSING, true) | |
xml_string_reader = StringReader.new(data) | |
xml_input_source = InputSource.new(xml_string_reader) | |
doc = @dbf.new_document_builder.parse(xml_input_source) | |
merge_element!({ CONTENT_KEY => "" }, doc.document_element, XmlMini.depth) | |
end | |
end | |
private | |
# Convert an XML element and merge into the hash | |
# | |
# hash:: | |
# Hash to merge the converted element into. | |
# element:: | |
# XML element to merge into hash | |
def merge_element!(hash, element, depth) | |
raise "Document too deep!" if depth == 0 | |
delete_empty(hash) | |
merge!(hash, element.tag_name, collapse(element, depth)) | |
end | |
def delete_empty(hash) | |
hash.delete(CONTENT_KEY) if hash[CONTENT_KEY] == "" | |
end | |
# Actually converts an XML document element into a data structure. | |
# | |
# element:: | |
# The document element to be collapsed. | |
def collapse(element, depth) | |
hash = get_attributes(element) | |
child_nodes = element.child_nodes | |
if child_nodes.length > 0 | |
(0...child_nodes.length).each do |i| | |
child = child_nodes.item(i) | |
merge_element!(hash, child, depth - 1) unless child.node_type == Node.TEXT_NODE | |
end | |
merge_texts!(hash, element) unless empty_content?(element) | |
hash | |
else | |
merge_texts!(hash, element) | |
end | |
end | |
# Merge all the texts of an element into the hash | |
# | |
# hash:: | |
# Hash to add the converted element to. | |
# element:: | |
# XML element whose texts are to me merged into the hash | |
def merge_texts!(hash, element) | |
delete_empty(hash) | |
text_children = texts(element) | |
if text_children.join.empty? | |
hash | |
else | |
# must use value to prevent double-escaping | |
merge!(hash, CONTENT_KEY, text_children.join) | |
end | |
end | |
# Adds a new key/value pair to an existing Hash. If the key to be added | |
# already exists and the existing value associated with key is not | |
# an Array, it will be wrapped in an Array. Then the new value is | |
# appended to that Array. | |
# | |
# hash:: | |
# Hash to add key/value pair to. | |
# key:: | |
# Key to be added. | |
# value:: | |
# Value to be associated with key. | |
def merge!(hash, key, value) | |
if hash.has_key?(key) | |
if hash[key].instance_of?(Array) | |
hash[key] << value | |
else | |
hash[key] = [hash[key], value] | |
end | |
elsif value.instance_of?(Array) | |
hash[key] = [value] | |
else | |
hash[key] = value | |
end | |
hash | |
end | |
# Converts the attributes array of an XML element into a hash. | |
# Returns an empty Hash if node has no attributes. | |
# | |
# element:: | |
# XML element to extract attributes from. | |
def get_attributes(element) | |
attribute_hash = {} | |
attributes = element.attributes | |
(0...attributes.length).each do |i| | |
attribute_hash[CONTENT_KEY] ||= "" | |
attribute_hash[attributes.item(i).name] = attributes.item(i).value | |
end | |
attribute_hash | |
end | |
# Determines if a document element has text content | |
# | |
# element:: | |
# XML element to be checked. | |
def texts(element) | |
texts = [] | |
child_nodes = element.child_nodes | |
(0...child_nodes.length).each do |i| | |
item = child_nodes.item(i) | |
if item.node_type == Node.TEXT_NODE | |
texts << item.get_data | |
end | |
end | |
texts | |
end | |
# Determines if a document element has text content | |
# | |
# element:: | |
# XML element to be checked. | |
def empty_content?(element) | |
text = +"" | |
child_nodes = element.child_nodes | |
(0...child_nodes.length).each do |i| | |
item = child_nodes.item(i) | |
if item.node_type == Node.TEXT_NODE | |
text << item.get_data.strip | |
end | |
end | |
text.strip.length == 0 | |
end | |
end | |
end |
# frozen_string_literal: true | |
require_relative "xml_mini_engine_test" | |
XMLMiniEngineTest.run_with_platform("java") do | |
class JDOMEngineTest < XMLMiniEngineTest | |
FILES_DIR = File.expand_path("../fixtures/xml", __dir__) | |
def test_not_allowed_to_expand_entities_to_files | |
attack_xml = <<-EOT | |
<!DOCTYPE member [ | |
<!ENTITY a SYSTEM "file://#{FILES_DIR}/jdom_include.txt"> | |
]> | |
<member>x&a;</member> | |
EOT | |
assert_equal "x", Hash.from_xml(attack_xml)["member"] | |
end | |
def test_not_allowed_to_expand_parameter_entities_to_files | |
attack_xml = <<-EOT | |
<!DOCTYPE member [ | |
<!ENTITY % b SYSTEM "file://#{FILES_DIR}/jdom_entities.txt"> | |
%b; | |
]> | |
<member>x&a;</member> | |
EOT | |
assert_raise Java::OrgXmlSax::SAXParseException do | |
assert_equal "x", Hash.from_xml(attack_xml)["member"] | |
end | |
end | |
def test_not_allowed_to_load_external_doctypes | |
attack_xml = <<-EOT | |
<!DOCTYPE member SYSTEM "file://#{FILES_DIR}/jdom_doctype.dtd"> | |
<member>x&a;</member> | |
EOT | |
assert_equal "x", Hash.from_xml(attack_xml)["member"] | |
end | |
private | |
def engine | |
"JDOM" | |
end | |
def expansion_attack_error | |
Java::OrgXmlSax::SAXParseException | |
end | |
def extended_engine? | |
false | |
end | |
end | |
end |
# frozen_string_literal: true | |
require "active_support/json/decoding" | |
require "active_support/json/encoding" |
# frozen_string_literal: true | |
require_relative "../../abstract_unit" | |
# These test cases were added to test that cherry-picking the json extensions | |
# works correctly, primarily for dependencies problems reported in #16131. They | |
# need to be executed in isolation to reproduce the scenario correctly, because | |
# other test cases might have already loaded additional dependencies. | |
class JsonCherryPickTest < ActiveSupport::TestCase | |
include ActiveSupport::Testing::Isolation | |
def test_time_as_json | |
require_or_skip "active_support/core_ext/object/json" | |
expected = Time.new(2004, 7, 25) | |
actual = Time.parse(expected.as_json) | |
assert_equal expected, actual | |
end | |
def test_date_as_json | |
require_or_skip "active_support/core_ext/object/json" | |
expected = Date.new(2004, 7, 25) | |
actual = Date.parse(expected.as_json) | |
assert_equal expected, actual | |
end | |
def test_datetime_as_json | |
require_or_skip "active_support/core_ext/object/json" | |
expected = DateTime.new(2004, 7, 25) | |
actual = DateTime.parse(expected.as_json) | |
assert_equal expected, actual | |
end | |
private | |
def require_or_skip(file) | |
require(file) || skip("'#{file}' was already loaded") | |
end | |
end |
# frozen_string_literal: true | |
require_relative "../../abstract_unit" | |
require "json" | |
require_relative "../../json/encoding_test_cases" | |
# These test cases were added to test that we do not interfere with json gem's | |
# output when the AS encoder is loaded, primarily for problems reported in | |
# #20775. They need to be executed in isolation to reproduce the scenario | |
# correctly, because other test cases might have already loaded additional | |
# dependencies. | |
# The AS::JSON encoder requires the BigDecimal core_ext, which, unfortunately, | |
# changes the BigDecimal#to_s output, and consequently the JSON gem output. So | |
# we need to require this upfront to ensure we don't get a false failure, but | |
# ideally we should just fix the BigDecimal core_ext to not change to_s without | |
# arguments. | |
require "active_support/core_ext/big_decimal" | |
class JsonGemEncodingTest < ActiveSupport::TestCase | |
include ActiveSupport::Testing::Isolation | |
JSONTest::EncodingTestCases.constants.each_with_index do |name| | |
JSONTest::EncodingTestCases.const_get(name).each_with_index do |(subject, _), i| | |
test("#{name[0..-6]} #{i}") do | |
assert_same_with_or_without_active_support(subject) | |
end | |
end | |
end | |
class CustomToJson | |
def to_json(*) | |
'"custom"' | |
end | |
end | |
test "custom to_json" do | |
assert_same_with_or_without_active_support(CustomToJson.new) | |
end | |
private | |
def require_or_skip(file) | |
require(file) || skip("'#{file}' was already loaded") | |
end | |
def assert_same_with_or_without_active_support(subject) | |
begin | |
expected = JSON.generate(subject, quirks_mode: true) | |
rescue JSON::GeneratorError => e | |
exception = e | |
end | |
require_or_skip "active_support/core_ext/object/json" | |
if exception | |
assert_raises_with_message JSON::GeneratorError, e.message do | |
JSON.generate(subject, quirks_mode: true) | |
end | |
else | |
assert_equal expected, JSON.generate(subject, quirks_mode: true) | |
end | |
end | |
def assert_raises_with_message(exception_class, message, &block) | |
err = assert_raises(exception_class) { block.call } | |
assert_match message, err.message | |
end | |
end |
# frozen_string_literal: true | |
module ActiveSupport | |
class JsonWithMarshalFallback | |
MARSHAL_SIGNATURE = "\x04\x08" | |
cattr_accessor :fallback_to_marshal_deserialization, instance_accessor: false, default: true | |
cattr_accessor :use_marshal_serialization, instance_accessor: false, default: true | |
class << self | |
def logger | |
if defined?(Rails) && Rails.respond_to?(:logger) | |
Rails.logger | |
else | |
nil | |
end | |
end | |
def dump(value) | |
if self.use_marshal_serialization | |
Marshal.dump(value) | |
else | |
JSON.encode(value) | |
end | |
end | |
def load(value) | |
if self.fallback_to_marshal_deserialization | |
if value.start_with?(MARSHAL_SIGNATURE) | |
logger.warn("JsonWithMarshalFallback: Marshal load fallback occurred.") if logger | |
Marshal.load(value) | |
else | |
JSON.decode(value) | |
end | |
else | |
raise ::JSON::ParserError if value.start_with?(MARSHAL_SIGNATURE) | |
JSON.decode(value) | |
end | |
end | |
end | |
end | |
end |
# frozen_string_literal: true | |
require "active_support/core_ext/kernel/concern" | |
require "active_support/core_ext/kernel/reporting" | |
require "active_support/core_ext/kernel/singleton_class" |
# frozen_string_literal: true | |
require_relative "../abstract_unit" | |
require "active_support/core_ext/kernel" | |
class KernelTest < ActiveSupport::TestCase | |
def test_silence_warnings | |
silence_warnings { assert_nil $VERBOSE } | |
assert_equal 1234, silence_warnings { 1234 } | |
end | |
def test_silence_warnings_verbose_invariant | |
old_verbose = $VERBOSE | |
silence_warnings { raise } | |
flunk | |
rescue | |
assert_equal old_verbose, $VERBOSE | |
end | |
def test_enable_warnings | |
enable_warnings { assert_equal true, $VERBOSE } | |
assert_equal 1234, enable_warnings { 1234 } | |
end | |
def test_enable_warnings_verbose_invariant | |
old_verbose = $VERBOSE | |
enable_warnings { raise } | |
flunk | |
rescue | |
assert_equal old_verbose, $VERBOSE | |
end | |
def test_class_eval | |
o = Object.new | |
class << o; @x = 1; end | |
assert_equal 1, o.class_eval { @x } | |
end | |
end | |
class KernelSuppressTest < ActiveSupport::TestCase | |
def test_reraise | |
assert_raise(LoadError) do | |
suppress(ArgumentError) { raise LoadError } | |
end | |
end | |
def test_suppression | |
assert_nothing_raised do | |
suppress(ArgumentError) { raise ArgumentError } | |
suppress(LoadError) { raise LoadError } | |
suppress(LoadError, ArgumentError) { raise LoadError } | |
suppress(LoadError, ArgumentError) { raise ArgumentError } | |
end | |
end | |
end |
# frozen_string_literal: true | |
require "concurrent/map" | |
require "openssl" | |
module ActiveSupport | |
# KeyGenerator is a simple wrapper around OpenSSL's implementation of PBKDF2. | |
# It can be used to derive a number of keys for various purposes from a given secret. | |
# This lets Rails applications have a single secure secret, but avoid reusing that | |
# key in multiple incompatible contexts. | |
class KeyGenerator | |
class << self | |
def hash_digest_class=(klass) | |
if klass.kind_of?(Class) && klass < OpenSSL::Digest | |
@hash_digest_class = klass | |
else | |
raise ArgumentError, "#{klass} is expected to be an OpenSSL::Digest subclass" | |
end | |
end | |
def hash_digest_class | |
@hash_digest_class ||= OpenSSL::Digest::SHA1 | |
end | |
end | |
def initialize(secret, options = {}) | |
@secret = secret | |
# The default iterations are higher than required for our key derivation uses | |
# on the off chance someone uses this for password storage | |
@iterations = options[:iterations] || 2**16 | |
# Also allow configuration here so people can use this to build a rotation | |
# scheme when switching the digest class. | |
@hash_digest_class = options[:hash_digest_class] || self.class.hash_digest_class | |
end | |
# Returns a derived key suitable for use. The default key_size is chosen | |
# to be compatible with the default settings of ActiveSupport::MessageVerifier. | |
# i.e. OpenSSL::Digest::SHA1#block_length | |
def generate_key(salt, key_size = 64) | |
OpenSSL::PKCS5.pbkdf2_hmac(@secret, salt, @iterations, key_size, @hash_digest_class.new) | |
end | |
end | |
# CachingKeyGenerator is a wrapper around KeyGenerator which allows users to avoid | |
# re-executing the key generation process when it's called using the same salt and | |
# key_size. | |
class CachingKeyGenerator | |
def initialize(key_generator) | |
@key_generator = key_generator | |
@cache_keys = Concurrent::Map.new | |
end | |
# Returns a derived key suitable for use. | |
def generate_key(*args) | |
@cache_keys[args.join("|")] ||= @key_generator.generate_key(*args) | |
end | |
end | |
end |
# frozen_string_literal: true | |
require_relative "abstract_unit" | |
begin | |
require "openssl" | |
OpenSSL::PKCS5 | |
rescue LoadError, NameError | |
$stderr.puts "Skipping KeyGenerator test: broken OpenSSL install" | |
else | |
class KeyGeneratorTest < ActiveSupport::TestCase | |
class InvalidDigest; end | |
def setup | |
@secret = SecureRandom.hex(64) | |
@generator = ActiveSupport::KeyGenerator.new(@secret, iterations: 2) | |
end | |
test "Generating a key of the default length" do | |
derived_key = @generator.generate_key("some_salt") | |
assert_kind_of String, derived_key | |
assert_equal 64, derived_key.length, "Should have generated a key of the default size" | |
end | |
test "Generating a key of an alternative length" do | |
derived_key = @generator.generate_key("some_salt", 32) | |
assert_kind_of String, derived_key | |
assert_equal 32, derived_key.length, "Should have generated a key of the right size" | |
end | |
test "Expected results" do | |
# For any given set of inputs, this method must continue to return | |
# the same output: if it changes, any existing values relying on a | |
# key would break. | |
expected = "b129376f68f1ecae788d7433310249d65ceec090ecacd4c872a3a9e9ec78e055739be5cc6956345d5ae38e7e1daa66f1de587dc8da2bf9e8b965af4b3918a122" | |
assert_equal expected, ActiveSupport::KeyGenerator.new("0" * 64).generate_key("some_salt").unpack1("H*") | |
expected = "b129376f68f1ecae788d7433310249d65ceec090ecacd4c872a3a9e9ec78e055" | |
assert_equal expected, ActiveSupport::KeyGenerator.new("0" * 64).generate_key("some_salt", 32).unpack1("H*") | |
expected = "cbea7f7f47df705967dc508f4e446fd99e7797b1d70011c6899cd39bbe62907b8508337d678505a7dc8184e037f1003ba3d19fc5d829454668e91d2518692eae" | |
assert_equal expected, ActiveSupport::KeyGenerator.new("0" * 64, iterations: 2).generate_key("some_salt").unpack1("H*") | |
end | |
test "With custom hash digest class" do | |
original_hash_digest_class = ActiveSupport::KeyGenerator.hash_digest_class | |
ActiveSupport::KeyGenerator.hash_digest_class = ::OpenSSL::Digest::SHA256 | |
expected = "c92322ad55ee691520e8e0f279b53e7a5cc9c1f8efca98295ae252b04cc6e2274c3aaf75ef53b260a6dc548f3e5fbb8af0edf10e7663cf7054c35bcc12835fc0" | |
assert_equal expected, ActiveSupport::KeyGenerator.new("0" * 64).generate_key("some_salt").unpack1("H*") | |
ensure | |
ActiveSupport::KeyGenerator.hash_digest_class = original_hash_digest_class | |
end | |
test "Raises if given a non digest instance" do | |
assert_raises(ArgumentError) { ActiveSupport::KeyGenerator.hash_digest_class = InvalidDigest } | |
assert_raises(ArgumentError) { ActiveSupport::KeyGenerator.hash_digest_class = InvalidDigest.new } | |
end | |
end | |
class CachingKeyGeneratorTest < ActiveSupport::TestCase | |
def setup | |
@secret = SecureRandom.hex(64) | |
@generator = ActiveSupport::KeyGenerator.new(@secret, iterations: 2) | |
@caching_generator = ActiveSupport::CachingKeyGenerator.new(@generator) | |
end | |
test "Generating a cached key for same salt and key size" do | |
derived_key = @caching_generator.generate_key("some_salt", 32) | |
cached_key = @caching_generator.generate_key("some_salt", 32) | |
assert_equal derived_key, cached_key | |
assert_equal derived_key.object_id, cached_key.object_id | |
end | |
test "Does not cache key for different salt" do | |
derived_key = @caching_generator.generate_key("some_salt", 32) | |
different_salt_key = @caching_generator.generate_key("other_salt", 32) | |
assert_not_equal derived_key, different_salt_key | |
end | |
test "Does not cache key for different length" do | |
derived_key = @caching_generator.generate_key("some_salt", 32) | |
different_length_key = @caching_generator.generate_key("some_salt", 64) | |
assert_not_equal derived_key, different_length_key | |
end | |
test "Does not cache key for different salts and lengths that are different but are equal when concatenated" do | |
derived_key = @caching_generator.generate_key("13", 37) | |
different_length_key = @caching_generator.generate_key("1", 337) | |
assert_not_equal derived_key, different_length_key | |
end | |
end | |
end |
# frozen_string_literal: true | |
class Hash | |
# Returns a new hash with all keys converted to strings. | |
# | |
# hash = { name: 'Rob', age: '28' } | |
# | |
# hash.stringify_keys | |
# # => {"name"=>"Rob", "age"=>"28"} | |
def stringify_keys | |
transform_keys(&:to_s) | |
end | |
# Destructively converts all keys to strings. Same as | |
# +stringify_keys+, but modifies +self+. | |
def stringify_keys! | |
transform_keys!(&:to_s) | |
end | |
# Returns a new hash with all keys converted to symbols, as long as | |
# they respond to +to_sym+. | |
# | |
# hash = { 'name' => 'Rob', 'age' => '28' } | |
# | |
# hash.symbolize_keys | |
# # => {:name=>"Rob", :age=>"28"} | |
def symbolize_keys | |
transform_keys { |key| key.to_sym rescue key } | |
end | |
alias_method :to_options, :symbolize_keys | |
# Destructively converts all keys to symbols, as long as they respond | |
# to +to_sym+. Same as +symbolize_keys+, but modifies +self+. | |
def symbolize_keys! | |
transform_keys! { |key| key.to_sym rescue key } | |
end | |
alias_method :to_options!, :symbolize_keys! | |
# Validates all keys in a hash match <tt>*valid_keys</tt>, raising | |
# +ArgumentError+ on a mismatch. | |
# | |
# Note that keys are treated differently than HashWithIndifferentAccess, | |
# meaning that string and symbol keys will not match. | |
# | |
# { name: 'Rob', years: '28' }.assert_valid_keys(:name, :age) # => raises "ArgumentError: Unknown key: :years. Valid keys are: :name, :age" | |
# { name: 'Rob', age: '28' }.assert_valid_keys('name', 'age') # => raises "ArgumentError: Unknown key: :name. Valid keys are: 'name', 'age'" | |
# { name: 'Rob', age: '28' }.assert_valid_keys(:name, :age) # => passes, raises nothing | |
def assert_valid_keys(*valid_keys) | |
valid_keys.flatten! | |
each_key do |k| | |
unless valid_keys.include?(k) | |
raise ArgumentError.new("Unknown key: #{k.inspect}. Valid keys are: #{valid_keys.map(&:inspect).join(', ')}") | |
end | |
end | |
end | |
# Returns a new hash with all keys converted by the block operation. | |
# This includes the keys from the root hash and from all | |
# nested hashes and arrays. | |
# | |
# hash = { person: { name: 'Rob', age: '28' } } | |
# | |
# hash.deep_transform_keys{ |key| key.to_s.upcase } | |
# # => {"PERSON"=>{"NAME"=>"Rob", "AGE"=>"28"}} | |
def deep_transform_keys(&block) | |
_deep_transform_keys_in_object(self, &block) | |
end | |
# Destructively converts all keys by using the block operation. | |
# This includes the keys from the root hash and from all | |
# nested hashes and arrays. | |
def deep_transform_keys!(&block) | |
_deep_transform_keys_in_object!(self, &block) | |
end | |
# Returns a new hash with all keys converted to strings. | |
# This includes the keys from the root hash and from all | |
# nested hashes and arrays. | |
# | |
# hash = { person: { name: 'Rob', age: '28' } } | |
# | |
# hash.deep_stringify_keys | |
# # => {"person"=>{"name"=>"Rob", "age"=>"28"}} | |
def deep_stringify_keys | |
deep_transform_keys(&:to_s) | |
end | |
# Destructively converts all keys to strings. | |
# This includes the keys from the root hash and from all | |
# nested hashes and arrays. | |
def deep_stringify_keys! | |
deep_transform_keys!(&:to_s) | |
end | |
# Returns a new hash with all keys converted to symbols, as long as | |
# they respond to +to_sym+. This includes the keys from the root hash | |
# and from all nested hashes and arrays. | |
# | |
# hash = { 'person' => { 'name' => 'Rob', 'age' => '28' } } | |
# | |
# hash.deep_symbolize_keys | |
# # => {:person=>{:name=>"Rob", :age=>"28"}} | |
def deep_symbolize_keys | |
deep_transform_keys { |key| key.to_sym rescue key } | |
end | |
# Destructively converts all keys to symbols, as long as they respond | |
# to +to_sym+. This includes the keys from the root hash and from all | |
# nested hashes and arrays. | |
def deep_symbolize_keys! | |
deep_transform_keys! { |key| key.to_sym rescue key } | |
end | |
private | |
# Support methods for deep transforming nested hashes and arrays. | |
def _deep_transform_keys_in_object(object, &block) | |
case object | |
when Hash | |
object.each_with_object(self.class.new) do |(key, value), result| | |
result[yield(key)] = _deep_transform_keys_in_object(value, &block) | |
end | |
when Array | |
object.map { |e| _deep_transform_keys_in_object(e, &block) } | |
else | |
object | |
end | |
end | |
def _deep_transform_keys_in_object!(object, &block) | |
case object | |
when Hash | |
object.keys.each do |key| | |
value = object.delete(key) | |
object[yield(key)] = _deep_transform_keys_in_object!(value, &block) | |
end | |
object | |
when Array | |
object.map! { |e| _deep_transform_keys_in_object!(e, &block) } | |
else | |
object | |
end | |
end | |
end |
# frozen_string_literal: true | |
module ActiveSupport | |
# lazy_load_hooks allows Rails to lazily load a lot of components and thus | |
# making the app boot faster. Because of this feature now there is no need to | |
# require <tt>ActiveRecord::Base</tt> at boot time purely to apply | |
# configuration. Instead a hook is registered that applies configuration once | |
# <tt>ActiveRecord::Base</tt> is loaded. Here <tt>ActiveRecord::Base</tt> is | |
# used as example but this feature can be applied elsewhere too. | |
# | |
# Here is an example where +on_load+ method is called to register a hook. | |
# | |
# initializer 'active_record.initialize_timezone' do | |
# ActiveSupport.on_load(:active_record) do | |
# self.time_zone_aware_attributes = true | |
# self.default_timezone = :utc | |
# end | |
# end | |
# | |
# When the entirety of +ActiveRecord::Base+ has been | |
# evaluated then +run_load_hooks+ is invoked. The very last line of | |
# +ActiveRecord::Base+ is: | |
# | |
# ActiveSupport.run_load_hooks(:active_record, ActiveRecord::Base) | |
module LazyLoadHooks | |
def self.extended(base) # :nodoc: | |
base.class_eval do | |
@load_hooks = Hash.new { |h, k| h[k] = [] } | |
@loaded = Hash.new { |h, k| h[k] = [] } | |
@run_once = Hash.new { |h, k| h[k] = [] } | |
end | |
end | |
# Declares a block that will be executed when a Rails component is fully | |
# loaded. | |
# | |
# Options: | |
# | |
# * <tt>:yield</tt> - Yields the object that run_load_hooks to +block+. | |
# * <tt>:run_once</tt> - Given +block+ will run only once. | |
def on_load(name, options = {}, &block) | |
@loaded[name].each do |base| | |
execute_hook(name, base, options, block) | |
end | |
@load_hooks[name] << [block, options] | |
end | |
def run_load_hooks(name, base = Object) | |
@loaded[name] << base | |
@load_hooks[name].each do |hook, options| | |
execute_hook(name, base, options, hook) | |
end | |
end | |
private | |
def with_execution_control(name, block, once) | |
unless @run_once[name].include?(block) | |
@run_once[name] << block if once | |
yield | |
end | |
end | |
def execute_hook(name, base, options, block) | |
with_execution_control(name, block, options[:run_once]) do | |
if options[:yield] | |
block.call(base) | |
else | |
if base.is_a?(Module) | |
base.class_eval(&block) | |
else | |
base.instance_eval(&block) | |
end | |
end | |
end | |
end | |
end | |
extend LazyLoadHooks | |
end |
# frozen_string_literal: true | |
require_relative "abstract_unit" | |
require "active_support/core_ext/module/remove_method" | |
class LazyLoadHooksTest < ActiveSupport::TestCase | |
def test_basic_hook | |
i = 0 | |
ActiveSupport.on_load(:basic_hook) { i += 1 } | |
ActiveSupport.run_load_hooks(:basic_hook) | |
assert_equal 1, i | |
end | |
def test_basic_hook_with_two_registrations | |
i = 0 | |
ActiveSupport.on_load(:basic_hook_with_two) { i += incr } | |
assert_equal 0, i | |
ActiveSupport.run_load_hooks(:basic_hook_with_two, FakeContext.new(2)) | |
assert_equal 2, i | |
ActiveSupport.run_load_hooks(:basic_hook_with_two, FakeContext.new(5)) | |
assert_equal 7, i | |
end | |
def test_basic_hook_with_two_registrations_only_once | |
i = 0 | |
block = proc { i += incr } | |
ActiveSupport.on_load(:basic_hook_with_two_once, run_once: true, &block) | |
ActiveSupport.on_load(:basic_hook_with_two_once) do | |
i += incr | |
end | |
ActiveSupport.on_load(:different_hook, run_once: true, &block) | |
ActiveSupport.run_load_hooks(:different_hook, FakeContext.new(2)) | |
assert_equal 2, i | |
ActiveSupport.run_load_hooks(:basic_hook_with_two_once, FakeContext.new(2)) | |
assert_equal 6, i | |
ActiveSupport.run_load_hooks(:basic_hook_with_two_once, FakeContext.new(5)) | |
assert_equal 11, i | |
end | |
def test_hook_registered_after_run | |
i = 0 | |
ActiveSupport.run_load_hooks(:registered_after) | |
assert_equal 0, i | |
ActiveSupport.on_load(:registered_after) { i += 1 } | |
assert_equal 1, i | |
end | |
def test_hook_registered_after_run_with_two_registrations | |
i = 0 | |
ActiveSupport.run_load_hooks(:registered_after_with_two, FakeContext.new(2)) | |
ActiveSupport.run_load_hooks(:registered_after_with_two, FakeContext.new(5)) | |
assert_equal 0, i | |
ActiveSupport.on_load(:registered_after_with_two) { i += incr } | |
assert_equal 7, i | |
end | |
def test_hook_registered_after_run_with_two_registrations_only_once | |
i = 0 | |
ActiveSupport.run_load_hooks(:registered_after_with_two_once, FakeContext.new(2)) | |
ActiveSupport.run_load_hooks(:registered_after_with_two_once, FakeContext.new(5)) | |
assert_equal 0, i | |
ActiveSupport.on_load(:registered_after_with_two_once, run_once: true) { i += incr } | |
assert_equal 2, i | |
end | |
def test_hook_registered_interleaved_run_with_two_registrations | |
i = 0 | |
ActiveSupport.run_load_hooks(:registered_interleaved_with_two, FakeContext.new(2)) | |
assert_equal 0, i | |
ActiveSupport.on_load(:registered_interleaved_with_two) { i += incr } | |
assert_equal 2, i | |
ActiveSupport.run_load_hooks(:registered_interleaved_with_two, FakeContext.new(5)) | |
assert_equal 7, i | |
end | |
def test_hook_registered_interleaved_run_with_two_registrations_once | |
i = 0 | |
ActiveSupport | |
.run_load_hooks(:registered_interleaved_with_two_once, FakeContext.new(2)) | |
assert_equal 0, i | |
ActiveSupport.on_load(:registered_interleaved_with_two_once, run_once: true) do | |
i += incr | |
end | |
assert_equal 2, i | |
ActiveSupport | |
.run_load_hooks(:registered_interleaved_with_two_once, FakeContext.new(5)) | |
assert_equal 2, i | |
end | |
def test_hook_receives_a_context | |
i = 0 | |
ActiveSupport.on_load(:contextual) { i += incr } | |
assert_equal 0, i | |
ActiveSupport.run_load_hooks(:contextual, FakeContext.new(2)) | |
assert_equal 2, i | |
end | |
def test_hook_receives_a_context_afterward | |
i = 0 | |
ActiveSupport.run_load_hooks(:contextual_after, FakeContext.new(2)) | |
assert_equal 0, i | |
ActiveSupport.on_load(:contextual_after) { i += incr } | |
assert_equal 2, i | |
end | |
def test_hook_with_yield_true | |
i = 0 | |
ActiveSupport.on_load(:contextual_yield, yield: true) do |obj| | |
i += obj.incr + incr_amt | |
end | |
assert_equal 0, i | |
ActiveSupport.run_load_hooks(:contextual_yield, FakeContext.new(2)) | |
assert_equal 7, i | |
end | |
def test_hook_with_yield_true_afterward | |
i = 0 | |
ActiveSupport.run_load_hooks(:contextual_yield_after, FakeContext.new(2)) | |
assert_equal 0, i | |
ActiveSupport.on_load(:contextual_yield_after, yield: true) do |obj| | |
i += obj.incr + incr_amt | |
end | |
assert_equal 7, i | |
end | |
def test_hook_uses_class_eval_when_base_is_a_class | |
ActiveSupport.on_load(:uses_class_eval) do | |
def first_wrestler | |
"John Cena" | |
end | |
end | |
ActiveSupport.run_load_hooks(:uses_class_eval, FakeContext) | |
assert_equal "John Cena", FakeContext.new(0).first_wrestler | |
ensure | |
FakeContext.remove_possible_method(:first_wrestler) | |
end | |
def test_hook_uses_class_eval_when_base_is_a_module | |
mod = Module.new | |
ActiveSupport.on_load(:uses_class_eval2) do | |
def last_wrestler | |
"Dwayne Johnson" | |
end | |
end | |
ActiveSupport.run_load_hooks(:uses_class_eval2, mod) | |
klass = Class.new do | |
include mod | |
end | |
assert_equal "Dwayne Johnson", klass.new.last_wrestler | |
end | |
def test_hook_uses_instance_eval_when_base_is_an_instance | |
ActiveSupport.on_load(:uses_instance_eval) do | |
def second_wrestler | |
"Hulk Hogan" | |
end | |
end | |
context = FakeContext.new(1) | |
ActiveSupport.run_load_hooks(:uses_instance_eval, context) | |
assert_raises NoMethodError do | |
FakeContext.new(2).second_wrestler | |
end | |
assert_raises NoMethodError do | |
FakeContext.second_wrestler | |
end | |
assert_equal "Hulk Hogan", context.second_wrestler | |
end | |
private | |
def incr_amt | |
5 | |
end | |
class FakeContext | |
attr_reader :incr | |
def initialize(incr) | |
@incr = incr | |
end | |
end | |
end |
# frozen_string_literal: true | |
ConstFromLib = 1 | |
module ModuleFolder | |
class LibClass | |
end | |
end |
# frozen_string_literal: true | |
require "libxml" | |
require "active_support/core_ext/object/blank" | |
require "stringio" | |
module ActiveSupport | |
module XmlMini_LibXML # :nodoc: | |
extend self | |
# Parse an XML Document string or IO into a simple hash using libxml. | |
# data:: | |
# XML Document string or IO to parse | |
def parse(data) | |
if !data.respond_to?(:read) | |
data = StringIO.new(data || "") | |
end | |
if data.eof? | |
{} | |
else | |
LibXML::XML::Parser.io(data).parse.to_hash | |
end | |
end | |
end | |
end | |
module LibXML # :nodoc: | |
module Conversions # :nodoc: | |
module Document # :nodoc: | |
def to_hash | |
root.to_hash | |
end | |
end | |
module Node # :nodoc: | |
CONTENT_ROOT = "__content__" | |
# Convert XML document to hash. | |
# | |
# hash:: | |
# Hash to merge the converted element into. | |
def to_hash(hash = {}) | |
node_hash = {} | |
# Insert node hash into parent hash correctly. | |
case hash[name] | |
when Array then hash[name] << node_hash | |
when Hash then hash[name] = [hash[name], node_hash] | |
when nil then hash[name] = node_hash | |
end | |
# Handle child elements | |
each_child do |c| | |
if c.element? | |
c.to_hash(node_hash) | |
elsif c.text? || c.cdata? | |
node_hash[CONTENT_ROOT] ||= +"" | |
node_hash[CONTENT_ROOT] << c.content | |
end | |
end | |
# Remove content node if it is blank | |
if node_hash.length > 1 && node_hash[CONTENT_ROOT].blank? | |
node_hash.delete(CONTENT_ROOT) | |
end | |
# Handle attributes | |
each_attr { |a| node_hash[a.name] = a.value } | |
hash | |
end | |
end | |
end | |
end | |
# :enddoc: | |
LibXML::XML::Document.include(LibXML::Conversions::Document) | |
LibXML::XML::Node.include(LibXML::Conversions::Node) |
# frozen_string_literal: true | |
require_relative "xml_mini_engine_test" | |
XMLMiniEngineTest.run_with_gem("libxml") do | |
class LibxmlEngineTest < XMLMiniEngineTest | |
def setup | |
super | |
LibXML::XML::Error.set_handler(&lambda { |error| }) # silence libxml, exceptions will do | |
end | |
private | |
def engine | |
"LibXML" | |
end | |
def expansion_attack_error | |
LibXML::XML::Error | |
end | |
end | |
end |
# frozen_string_literal: true | |
require "libxml" | |
require "active_support/core_ext/object/blank" | |
require "stringio" | |
module ActiveSupport | |
module XmlMini_LibXMLSAX # :nodoc: | |
extend self | |
# Class that will build the hash while the XML document | |
# is being parsed using SAX events. | |
class HashBuilder | |
include LibXML::XML::SaxParser::Callbacks | |
CONTENT_KEY = "__content__" | |
HASH_SIZE_KEY = "__hash_size__" | |
attr_reader :hash | |
def current_hash | |
@hash_stack.last | |
end | |
def on_start_document | |
@hash = { CONTENT_KEY => +"" } | |
@hash_stack = [@hash] | |
end | |
def on_end_document | |
@hash = @hash_stack.pop | |
@hash.delete(CONTENT_KEY) | |
end | |
def on_start_element(name, attrs = {}) | |
new_hash = { CONTENT_KEY => +"" }.merge!(attrs) | |
new_hash[HASH_SIZE_KEY] = new_hash.size + 1 | |
case current_hash[name] | |
when Array then current_hash[name] << new_hash | |
when Hash then current_hash[name] = [current_hash[name], new_hash] | |
when nil then current_hash[name] = new_hash | |
end | |
@hash_stack.push(new_hash) | |
end | |
def on_end_element(name) | |
if current_hash.length > current_hash.delete(HASH_SIZE_KEY) && current_hash[CONTENT_KEY].blank? || current_hash[CONTENT_KEY] == "" | |
current_hash.delete(CONTENT_KEY) | |
end | |
@hash_stack.pop | |
end | |
def on_characters(string) | |
current_hash[CONTENT_KEY] << string | |
end | |
alias_method :on_cdata_block, :on_characters | |
end | |
attr_accessor :document_class | |
self.document_class = HashBuilder | |
def parse(data) | |
if !data.respond_to?(:read) | |
data = StringIO.new(data || "") | |
end | |
if data.eof? | |
{} | |
else | |
LibXML::XML::Error.set_handler(&LibXML::XML::Error::QUIET_HANDLER) | |
parser = LibXML::XML::SaxParser.io(data) | |
document = document_class.new | |
parser.callbacks = document | |
parser.parse | |
document.hash | |
end | |
end | |
end | |
end |
# frozen_string_literal: true | |
require_relative "xml_mini_engine_test" | |
XMLMiniEngineTest.run_with_gem("libxml") do | |
class LibXMLSAXEngineTest < XMLMiniEngineTest | |
private | |
def engine | |
"LibXMLSAX" | |
end | |
def expansion_attack_error | |
LibXML::XML::Error | |
end | |
end | |
end |
# frozen_string_literal: true | |
class LoadError | |
# Returns true if the given path name (except perhaps for the ".rb" | |
# extension) is the missing file which caused the exception to be raised. | |
def is_missing?(location) | |
location.delete_suffix(".rb") == path.to_s.delete_suffix(".rb") | |
end | |
end |
# frozen_string_literal: true | |
require_relative "../abstract_unit" | |
require "active_support/core_ext/load_error" | |
class TestLoadError < ActiveSupport::TestCase | |
def test_with_require | |
assert_raise(LoadError) { require "no_this_file_don't_exist" } | |
end | |
def test_with_load | |
assert_raise(LoadError) { load "nor_does_this_one" } | |
end | |
def test_path | |
load "nor/this/one.rb" | |
rescue LoadError => e | |
assert_equal "nor/this/one.rb", e.path | |
end | |
def test_is_missing_with_nil_path | |
error = LoadError.new(nil) | |
assert_nothing_raised { error.is_missing?("anything") } | |
end | |
end |
# frozen_string_literal: true | |
require "monitor" | |
module ActiveSupport | |
module Concurrency | |
# A monitor that will permit dependency loading while blocked waiting for | |
# the lock. | |
class LoadInterlockAwareMonitor < Monitor | |
EXCEPTION_NEVER = { Exception => :never }.freeze | |
EXCEPTION_IMMEDIATE = { Exception => :immediate }.freeze | |
private_constant :EXCEPTION_NEVER, :EXCEPTION_IMMEDIATE | |
# Enters an exclusive section, but allows dependency loading while blocked | |
def mon_enter | |
mon_try_enter || | |
ActiveSupport::Dependencies.interlock.permit_concurrent_loads { super } | |
end | |
def synchronize(&block) | |
Thread.handle_interrupt(EXCEPTION_NEVER) do | |
mon_enter | |
begin | |
Thread.handle_interrupt(EXCEPTION_IMMEDIATE, &block) | |
ensure | |
mon_exit | |
end | |
end | |
end | |
end | |
end | |
end |
# frozen_string_literal: true | |
require_relative "../abstract_unit" | |
require "concurrent/atomic/count_down_latch" | |
require "active_support/concurrency/load_interlock_aware_monitor" | |
module ActiveSupport | |
module Concurrency | |
class LoadInterlockAwareMonitorTest < ActiveSupport::TestCase | |
def setup | |
@monitor = ActiveSupport::Concurrency::LoadInterlockAwareMonitor.new | |
end | |
def test_entering_with_no_blocking | |
assert @monitor.mon_enter | |
end | |
def test_entering_with_blocking | |
load_interlock_latch = Concurrent::CountDownLatch.new | |
monitor_latch = Concurrent::CountDownLatch.new | |
able_to_use_monitor = false | |
able_to_load = false | |
thread_with_load_interlock = Thread.new do | |
ActiveSupport::Dependencies.interlock.running do | |
load_interlock_latch.count_down | |
monitor_latch.wait | |
@monitor.synchronize do | |
able_to_use_monitor = true | |
end | |
end | |
end | |
thread_with_monitor_lock = Thread.new do | |
@monitor.synchronize do | |
monitor_latch.count_down | |
load_interlock_latch.wait | |
ActiveSupport::Dependencies.interlock.loading do | |
able_to_load = true | |
end | |
end | |
end | |
thread_with_load_interlock.join | |
thread_with_monitor_lock.join | |
assert able_to_use_monitor | |
assert able_to_load | |
end | |
end | |
end | |
end |
# frozen_string_literal: true | |
require "active_support/core_ext/string/inflections" | |
module ActiveSupport | |
module Cache | |
module Strategy | |
# Caches that implement LocalCache will be backed by an in-memory cache for the | |
# duration of a block. Repeated calls to the cache for the same key will hit the | |
# in-memory cache for faster access. | |
module LocalCache | |
autoload :Middleware, "active_support/cache/strategy/local_cache_middleware" | |
# Class for storing and registering the local caches. | |
module LocalCacheRegistry # :nodoc: | |
extend self | |
def cache_for(local_cache_key) | |
registry = ActiveSupport::IsolatedExecutionState[:active_support_local_cache_registry] ||= {} | |
registry[local_cache_key] | |
end | |
def set_cache_for(local_cache_key, value) | |
registry = ActiveSupport::IsolatedExecutionState[:active_support_local_cache_registry] ||= {} | |
registry[local_cache_key] = value | |
end | |
end | |
# Simple memory backed cache. This cache is not thread safe and is intended only | |
# for serving as a temporary memory cache for a single thread. | |
class LocalStore | |
def initialize | |
@data = {} | |
end | |
def clear(options = nil) | |
@data.clear | |
end | |
def read_entry(key) | |
@data[key] | |
end | |
def read_multi_entries(keys) | |
@data.slice(*keys) | |
end | |
def write_entry(key, entry) | |
@data[key] = entry | |
true | |
end | |
def delete_entry(key) | |
!!@data.delete(key) | |
end | |
def fetch_entry(key) # :nodoc: | |
@data.fetch(key) { @data[key] = yield } | |
end | |
end | |
# Use a local cache for the duration of block. | |
def with_local_cache(&block) | |
use_temporary_local_cache(LocalStore.new, &block) | |
end | |
# Middleware class can be inserted as a Rack handler to be local cache for the | |
# duration of request. | |
def middleware | |
@middleware ||= Middleware.new( | |
"ActiveSupport::Cache::Strategy::LocalCache", | |
local_cache_key) | |
end | |
def clear(**options) # :nodoc: | |
return super unless cache = local_cache | |
cache.clear(options) | |
super | |
end | |
def cleanup(**options) # :nodoc: | |
return super unless cache = local_cache | |
cache.clear | |
super | |
end | |
def delete_matched(matcher, options = nil) # :nodoc: | |
return super unless cache = local_cache | |
cache.clear | |
super | |
end | |
def increment(name, amount = 1, **options) # :nodoc: | |
return super unless local_cache | |
value = bypass_local_cache { super } | |
write_cache_value(name, value, raw: true, **options) | |
value | |
end | |
def decrement(name, amount = 1, **options) # :nodoc: | |
return super unless local_cache | |
value = bypass_local_cache { super } | |
write_cache_value(name, value, raw: true, **options) | |
value | |
end | |
private | |
def read_serialized_entry(key, raw: false, **options) | |
if cache = local_cache | |
hit = true | |
entry = cache.fetch_entry(key) do | |
hit = false | |
super | |
end | |
options[:event][:store] = cache.class.name if hit && options[:event] | |
entry | |
else | |
super | |
end | |
end | |
def read_multi_entries(keys, **options) | |
return super unless local_cache | |
local_entries = local_cache.read_multi_entries(keys) | |
local_entries.transform_values! do |payload| | |
deserialize_entry(payload).value | |
end | |
missed_keys = keys - local_entries.keys | |
if missed_keys.any? | |
local_entries.merge!(super(missed_keys, **options)) | |
else | |
local_entries | |
end | |
end | |
def write_serialized_entry(key, payload, **) | |
if return_value = super | |
local_cache.write_entry(key, payload) if local_cache | |
else | |
local_cache.delete_entry(key) if local_cache | |
end | |
return_value | |
end | |
def delete_entry(key, **) | |
local_cache.delete_entry(key) if local_cache | |
super | |
end | |
def write_cache_value(name, value, **options) | |
name = normalize_key(name, options) | |
cache = local_cache | |
if value | |
cache.write_entry(name, serialize_entry(new_entry(value, **options), **options)) | |
else | |
cache.delete_entry(name) | |
end | |
end | |
def local_cache_key | |
@local_cache_key ||= "#{self.class.name.underscore}_local_cache_#{object_id}".gsub(/[\/-]/, "_").to_sym | |
end | |
def local_cache | |
LocalCacheRegistry.cache_for(local_cache_key) | |
end | |
def bypass_local_cache(&block) | |
use_temporary_local_cache(nil, &block) | |
end | |
def use_temporary_local_cache(temporary_cache) | |
save_cache = LocalCacheRegistry.cache_for(local_cache_key) | |
begin | |
LocalCacheRegistry.set_cache_for(local_cache_key, temporary_cache) | |
yield | |
ensure | |
LocalCacheRegistry.set_cache_for(local_cache_key, save_cache) | |
end | |
end | |
end | |
end | |
end | |
end |
# frozen_string_literal: true | |
module LocalCacheBehavior | |
def test_instrumentation_with_local_cache | |
key = SecureRandom.uuid | |
events = with_instrumentation "write" do | |
@cache.write(key, SecureRandom.uuid) | |
end | |
assert_equal @cache.class.name, events[0].payload[:store] | |
@cache.with_local_cache do | |
events = with_instrumentation "read" do | |
@cache.read(key) | |
@cache.read(key) | |
end | |
expected = [@cache.class.name, @cache.send(:local_cache).class.name] | |
assert_equal expected, events.map { |p| p.payload[:store] } | |
end | |
end | |
def test_local_writes_are_persistent_on_the_remote_cache | |
key = SecureRandom.uuid | |
value = SecureRandom.alphanumeric | |
retval = @cache.with_local_cache do | |
@cache.write(key, value) | |
end | |
assert retval | |
assert_equal value, @cache.read(key) | |
end | |
def test_clear_also_clears_local_cache | |
key = SecureRandom.uuid | |
@cache.with_local_cache do | |
@cache.write(key, SecureRandom.alphanumeric) | |
@cache.clear | |
assert_nil @cache.read(key) | |
end | |
assert_nil @cache.read(key) | |
end | |
def test_cleanup_clears_local_cache_but_not_remote_cache | |
begin | |
@cache.cleanup | |
rescue NotImplementedError | |
skip | |
end | |
key = SecureRandom.uuid | |
value = SecureRandom.alphanumeric | |
other_value = SecureRandom.alphanumeric | |
@cache.with_local_cache do | |
@cache.write(key, value) | |
assert_equal value, @cache.read(key) | |
@cache.send(:bypass_local_cache) { @cache.write(key, other_value) } | |
assert_equal value, @cache.read(key) | |
@cache.cleanup | |
assert_equal other_value, @cache.read(key) | |
end | |
end | |
def test_local_cache_of_write | |
key = SecureRandom.uuid | |
value = SecureRandom.alphanumeric | |
@cache.with_local_cache do | |
@cache.write(key, value) | |
@peek.delete(key) | |
assert_equal value, @cache.read(key) | |
end | |
end | |
def test_local_cache_of_read_returns_a_copy_of_the_entry | |
key = SecureRandom.alphanumeric.to_sym | |
value = SecureRandom.alphanumeric | |
@cache.with_local_cache do | |
@cache.write(key, type: value) | |
local_value = @cache.read(key) | |
assert_equal(value, local_value.delete(:type)) | |
assert_equal({ type: value }, @cache.read(key)) | |
end | |
end | |
def test_local_cache_of_read | |
key = SecureRandom.uuid | |
value = SecureRandom.alphanumeric | |
@cache.write(key, value) | |
@cache.with_local_cache do | |
assert_equal value, @cache.read(key) | |
end | |
end | |
def test_local_cache_of_read_nil | |
key = SecureRandom.uuid | |
value = SecureRandom.alphanumeric | |
@cache.with_local_cache do | |
assert_nil @cache.read(key) | |
@cache.send(:bypass_local_cache) { @cache.write(key, value) } | |
assert_nil @cache.read(key) | |
end | |
end | |
def test_local_cache_fetch | |
key = SecureRandom.uuid | |
value = SecureRandom.alphanumeric | |
@cache.with_local_cache do | |
@cache.send(:local_cache).write_entry(key, value) | |
assert_equal value, @cache.send(:local_cache).fetch_entry(key) | |
end | |
end | |
def test_local_cache_of_write_nil | |
key = SecureRandom.uuid | |
value = SecureRandom.alphanumeric | |
@cache.with_local_cache do | |
assert @cache.write(key, nil) | |
assert_nil @cache.read(key) | |
@peek.write(key, value) | |
assert_nil @cache.read(key) | |
end | |
end | |
def test_local_cache_of_write_with_unless_exist | |
key = SecureRandom.uuid | |
value = SecureRandom.alphanumeric | |
@cache.with_local_cache do | |
@cache.write(key, value) | |
@cache.write(key, SecureRandom.alphanumeric, unless_exist: true) | |
assert_equal @peek.read(key), @cache.read(key) | |
end | |
end | |
def test_local_cache_of_delete | |
key = SecureRandom.uuid | |
@cache.with_local_cache do | |
@cache.write(key, SecureRandom.alphanumeric) | |
@cache.delete(key) | |
assert_nil @cache.read(key) | |
end | |
end | |
def test_local_cache_of_delete_matched | |
begin | |
@cache.delete_matched("*") | |
rescue NotImplementedError | |
skip | |
end | |
prefix = SecureRandom.alphanumeric | |
key = "#{prefix}#{SecureRandom.uuid}" | |
other_key = "#{prefix}#{SecureRandom.uuid}" | |
third_key = SecureRandom.uuid | |
value = SecureRandom.alphanumeric | |
@cache.with_local_cache do | |
@cache.write(key, SecureRandom.alphanumeric) | |
@cache.write(other_key, SecureRandom.alphanumeric) | |
@cache.write(third_key, value) | |
@cache.delete_matched("#{prefix}*") | |
assert_not @cache.exist?(key) | |
assert_not @cache.exist?(other_key) | |
assert_equal value, @cache.read(third_key) | |
end | |
end | |
def test_local_cache_of_exist | |
key = SecureRandom.uuid | |
@cache.with_local_cache do | |
@cache.write(key, SecureRandom.alphanumeric) | |
@peek.delete(key) | |
assert @cache.exist?(key) | |
end | |
end | |
def test_local_cache_of_increment | |
key = SecureRandom.uuid | |
@cache.with_local_cache do | |
@cache.write(key, 1, raw: true) | |
@peek.write(key, 2, raw: true) | |
@cache.increment(key) | |
expected = @peek.read(key, raw: true) | |
assert_equal 3, Integer(expected) | |
assert_equal expected, @cache.read(key, raw: true) | |
end | |
end | |
def test_local_cache_of_decrement | |
key = SecureRandom.uuid | |
@cache.with_local_cache do | |
@cache.write(key, 1, raw: true) | |
@peek.write(key, 3, raw: true) | |
@cache.decrement(key) | |
expected = @peek.read(key, raw: true) | |
assert_equal 2, Integer(expected) | |
assert_equal expected, @cache.read(key, raw: true) | |
end | |
end | |
def test_local_cache_of_fetch_multi | |
key = SecureRandom.uuid | |
other_key = SecureRandom.uuid | |
@cache.with_local_cache do | |
@cache.fetch_multi(key, other_key) { |_key| true } | |
@peek.delete(key) | |
@peek.delete(other_key) | |
assert_equal true, @cache.read(key) | |
assert_equal true, @cache.read(other_key) | |
end | |
end | |
def test_local_cache_of_read_multi | |
key = SecureRandom.uuid | |
value = SecureRandom.alphanumeric | |
other_key = SecureRandom.uuid | |
other_value = SecureRandom.alphanumeric | |
@cache.with_local_cache do | |
@cache.write(key, value, raw: true) | |
@cache.write(other_key, other_value, raw: true) | |
values = @cache.read_multi(key, other_key, raw: true) | |
assert_equal value, @cache.read(key, raw: true) | |
assert_equal other_value, @cache.read(other_key, raw: true) | |
assert_equal value, values[key] | |
assert_equal other_value, values[other_key] | |
end | |
end | |
def test_initial_object_mutation_after_write | |
key = SecureRandom.uuid | |
@cache.with_local_cache do | |
initial = +"bar" | |
@cache.write(key, initial) | |
initial << "baz" | |
assert_equal "bar", @cache.read(key) | |
end | |
end | |
def test_initial_object_mutation_after_fetch | |
key = SecureRandom.uuid | |
@cache.with_local_cache do | |
initial = +"bar" | |
@cache.fetch(key) { initial } | |
initial << "baz" | |
assert_equal "bar", @cache.read(key) | |
assert_equal "bar", @cache.fetch(key) | |
end | |
end | |
def test_middleware | |
key = SecureRandom.uuid | |
value = SecureRandom.alphanumeric | |
app = lambda { |env| | |
result = @cache.write(key, value) | |
assert_equal value, @cache.read(key) # make sure 'foo' was written | |
assert result | |
[200, {}, []] | |
} | |
app = @cache.middleware.new(app) | |
app.call({}) | |
end | |
def test_local_race_condition_protection | |
key = SecureRandom.uuid | |
value = SecureRandom.alphanumeric | |
other_value = SecureRandom.alphanumeric | |
@cache.with_local_cache do | |
time = Time.now | |
@cache.write(key, value, expires_in: 60) | |
Time.stub(:now, time + 61) do | |
result = @cache.fetch(key, race_condition_ttl: 10) do | |
assert_equal value, @cache.read(key) | |
other_value | |
end | |
assert_equal other_value, result | |
end | |
end | |
end | |
def test_local_cache_should_read_and_write_false | |
key = SecureRandom.uuid | |
@cache.with_local_cache do | |
assert @cache.write(key, false) | |
assert_equal false, @cache.read(key) | |
end | |
end | |
def test_local_cache_should_deserialize_entries_on_multi_get | |
keys = Array.new(5) { SecureRandom.uuid } | |
values = keys.index_with(true) | |
@cache.with_local_cache do | |
assert @cache.write_multi(values) | |
assert_equal values, @cache.read_multi(*keys) | |
end | |
end | |
end |
# frozen_string_literal: true | |
require "rack/body_proxy" | |
require "rack/utils" | |
module ActiveSupport | |
module Cache | |
module Strategy | |
module LocalCache | |
#-- | |
# This class wraps up local storage for middlewares. Only the middleware method should | |
# construct them. | |
class Middleware # :nodoc: | |
attr_reader :name, :local_cache_key | |
def initialize(name, local_cache_key) | |
@name = name | |
@local_cache_key = local_cache_key | |
@app = nil | |
end | |
def new(app) | |
@app = app | |
self | |
end | |
def call(env) | |
LocalCacheRegistry.set_cache_for(local_cache_key, LocalStore.new) | |
response = @app.call(env) | |
response[2] = ::Rack::BodyProxy.new(response[2]) do | |
LocalCacheRegistry.set_cache_for(local_cache_key, nil) | |
end | |
cleanup_on_body_close = true | |
response | |
rescue Rack::Utils::InvalidParameterError | |
[400, {}, []] | |
ensure | |
LocalCacheRegistry.set_cache_for(local_cache_key, nil) unless | |
cleanup_on_body_close | |
end | |
end | |
end | |
end | |
end | |
end |
# frozen_string_literal: true | |
require_relative "../abstract_unit" | |
require "active_support/cache" | |
module ActiveSupport | |
module Cache | |
module Strategy | |
module LocalCache | |
class MiddlewareTest < ActiveSupport::TestCase | |
def test_local_cache_cleared_on_close | |
key = "super awesome key" | |
assert_nil LocalCacheRegistry.cache_for key | |
middleware = Middleware.new("<3", key).new(->(env) { | |
assert LocalCacheRegistry.cache_for(key), "should have a cache" | |
[200, {}, []] | |
}) | |
_, _, body = middleware.call({}) | |
assert LocalCacheRegistry.cache_for(key), "should still have a cache" | |
body.each { } | |
assert LocalCacheRegistry.cache_for(key), "should still have a cache" | |
body.close | |
assert_nil LocalCacheRegistry.cache_for(key) | |
end | |
def test_local_cache_cleared_and_response_should_be_present_on_invalid_parameters_error | |
key = "super awesome key" | |
assert_nil LocalCacheRegistry.cache_for key | |
middleware = Middleware.new("<3", key).new(->(env) { | |
assert LocalCacheRegistry.cache_for(key), "should have a cache" | |
raise Rack::Utils::InvalidParameterError | |
}) | |
response = middleware.call({}) | |
assert response, "response should exist" | |
assert_nil LocalCacheRegistry.cache_for(key) | |
end | |
def test_local_cache_cleared_on_exception | |
key = "super awesome key" | |
assert_nil LocalCacheRegistry.cache_for key | |
middleware = Middleware.new("<3", key).new(->(env) { | |
assert LocalCacheRegistry.cache_for(key), "should have a cache" | |
raise | |
}) | |
assert_raises(RuntimeError) { middleware.call({}) } | |
assert_nil LocalCacheRegistry.cache_for(key) | |
end | |
def test_local_cache_cleared_on_throw | |
key = "super awesome key" | |
assert_nil LocalCacheRegistry.cache_for key | |
middleware = Middleware.new("<3", key).new(->(env) { | |
assert LocalCacheRegistry.cache_for(key), "should have a cache" | |
throw :warden | |
}) | |
assert_throws(:warden) { middleware.call({}) } | |
assert_nil LocalCacheRegistry.cache_for(key) | |
end | |
end | |
end | |
end | |
end | |
end |
# frozen_string_literal: true | |
require "active_support/core_ext/module/attribute_accessors" | |
require "active_support/core_ext/class/attribute" | |
require "active_support/subscriber" | |
module ActiveSupport | |
# <tt>ActiveSupport::LogSubscriber</tt> is an object set to consume | |
# ActiveSupport::Notifications with the sole purpose of logging them. | |
# The log subscriber dispatches notifications to a registered object based | |
# on its given namespace. | |
# | |
# An example would be Active Record log subscriber responsible for logging | |
# queries: | |
# | |
# module ActiveRecord | |
# class LogSubscriber < ActiveSupport::LogSubscriber | |
# def sql(event) | |
# info "#{event.payload[:name]} (#{event.duration}) #{event.payload[:sql]}" | |
# end | |
# end | |
# end | |
# | |
# And it's finally registered as: | |
# | |
# ActiveRecord::LogSubscriber.attach_to :active_record | |
# | |
# Since we need to know all instance methods before attaching the log | |
# subscriber, the line above should be called after your | |
# <tt>ActiveRecord::LogSubscriber</tt> definition. | |
# | |
# A logger also needs to be set with <tt>ActiveRecord::LogSubscriber.logger=</tt>. | |
# This is assigned automatically in a Rails environment. | |
# | |
# After configured, whenever a <tt>"sql.active_record"</tt> notification is published, | |
# it will properly dispatch the event | |
# (<tt>ActiveSupport::Notifications::Event</tt>) to the sql method. | |
# | |
# Being an ActiveSupport::Notifications consumer, | |
# <tt>ActiveSupport::LogSubscriber</tt> exposes a simple interface to check if | |
# instrumented code raises an exception. It is common to log a different | |
# message in case of an error, and this can be achieved by extending | |
# the previous example: | |
# | |
# module ActiveRecord | |
# class LogSubscriber < ActiveSupport::LogSubscriber | |
# def sql(event) | |
# exception = event.payload[:exception] | |
# | |
# if exception | |
# exception_object = event.payload[:exception_object] | |
# | |
# error "[ERROR] #{event.payload[:name]}: #{exception.join(', ')} " \ | |
# "(#{exception_object.backtrace.first})" | |
# else | |
# # standard logger code | |
# end | |
# end | |
# end | |
# end | |
# | |
# Log subscriber also has some helpers to deal with logging and automatically | |
# flushes all logs when the request finishes | |
# (via <tt>action_dispatch.callback</tt> notification) in a Rails environment. | |
class LogSubscriber < Subscriber | |
# Embed in a String to clear all previous ANSI sequences. | |
CLEAR = "\e[0m" | |
BOLD = "\e[1m" | |
# Colors | |
BLACK = "\e[30m" | |
RED = "\e[31m" | |
GREEN = "\e[32m" | |
YELLOW = "\e[33m" | |
BLUE = "\e[34m" | |
MAGENTA = "\e[35m" | |
CYAN = "\e[36m" | |
WHITE = "\e[37m" | |
mattr_accessor :colorize_logging, default: true | |
class << self | |
def logger | |
@logger ||= if defined?(Rails) && Rails.respond_to?(:logger) | |
Rails.logger | |
end | |
end | |
attr_writer :logger | |
def log_subscribers | |
subscribers | |
end | |
# Flush all log_subscribers' logger. | |
def flush_all! | |
logger.flush if logger.respond_to?(:flush) | |
end | |
private | |
def fetch_public_methods(subscriber, inherit_all) | |
subscriber.public_methods(inherit_all) - LogSubscriber.public_instance_methods(true) | |
end | |
end | |
def logger | |
LogSubscriber.logger | |
end | |
def call(event) | |
super if logger | |
rescue => e | |
log_exception(event.name, e) | |
end | |
def publish_event(event) | |
super if logger | |
rescue => e | |
log_exception(event.name, e) | |
end | |
private | |
%w(info debug warn error fatal unknown).each do |level| | |
class_eval <<-METHOD, __FILE__, __LINE__ + 1 | |
def #{level}(progname = nil, &block) | |
logger.#{level}(progname, &block) if logger | |
end | |
METHOD | |
end | |
# Set color by using a symbol or one of the defined constants. If a third | |
# option is set to +true+, it also adds bold to the string. This is based | |
# on the Highline implementation and will automatically append CLEAR to the | |
# end of the returned String. | |
def color(text, color, bold = false) # :doc: | |
return text unless colorize_logging | |
color = self.class.const_get(color.upcase) if color.is_a?(Symbol) | |
bold = bold ? BOLD : "" | |
"#{bold}#{color}#{text}#{CLEAR}" | |
end | |
def log_exception(name, e) | |
if logger | |
logger.error "Could not log #{name.inspect} event. #{e.class}: #{e.message} #{e.backtrace}" | |
end | |
end | |
end | |
end |
# frozen_string_literal: true | |
require_relative "abstract_unit" | |
require "active_support/log_subscriber/test_helper" | |
class MyLogSubscriber < ActiveSupport::LogSubscriber | |
attr_reader :event | |
def some_event(event) | |
@event = event | |
info event.name | |
end | |
def foo(event) | |
debug "debug" | |
info { "info" } | |
warn "warn" | |
end | |
def bar(event) | |
info "#{color("cool", :red)}, #{color("isn't it?", :blue, true)}" | |
end | |
def puke(event) | |
raise "puke" | |
end | |
end | |
class SyncLogSubscriberTest < ActiveSupport::TestCase | |
include ActiveSupport::LogSubscriber::TestHelper | |
def setup | |
super | |
@log_subscriber = MyLogSubscriber.new | |
end | |
def teardown | |
super | |
ActiveSupport::LogSubscriber.log_subscribers.clear | |
end | |
def instrument(*args, &block) | |
ActiveSupport::Notifications.instrument(*args, &block) | |
end | |
def test_proxies_method_to_rails_logger | |
@log_subscriber.foo(nil) | |
assert_equal %w(debug), @logger.logged(:debug) | |
assert_equal %w(info), @logger.logged(:info) | |
assert_equal %w(warn), @logger.logged(:warn) | |
end | |
def test_set_color_for_messages | |
ActiveSupport::LogSubscriber.colorize_logging = true | |
@log_subscriber.bar(nil) | |
assert_equal "\e[31mcool\e[0m, \e[1m\e[34misn't it |
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)