Skip to content
This repository
tree: d214925625
Fetching contributors…

Octocat-spinner-32-eaf2f5

Cannot retrieve contributors at this time

file 160 lines (146 sloc) 4.941 kb
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159
require 'monitor'

module ActiveSupport
  module Cache
    # A cache store implementation which stores everything into memory in the
    # same process. If you're running multiple Ruby on Rails server processes
    # (which is the case if you're using mongrel_cluster or Phusion Passenger),
    # then this means that Rails server process instances won't be able
    # to share cache data with each other and this may not be the most
    # appropriate cache in that scenario.
    #
    # This cache has a bounded size specified by the :size options to the
    # initializer (default is 32Mb). When the cache exceeds the allotted size,
    # a cleanup will occur which tries to prune the cache down to three quarters
    # of the maximum size by removing the least recently used entries.
    #
    # MemoryStore is thread-safe.
    class MemoryStore < Store
      def initialize(options = nil)
        options ||= {}
        super(options)
        @data = {}
        @key_access = {}
        @max_size = options[:size] || 32.megabytes
        @max_prune_time = options[:max_prune_time] || 2
        @cache_size = 0
        @monitor = Monitor.new
        @pruning = false
      end

      def clear(options = nil)
        synchronize do
          @data.clear
          @key_access.clear
          @cache_size = 0
        end
      end

      def cleanup(options = nil)
        options = merged_options(options)
        instrument(:cleanup, :size => @data.size) do
          keys = synchronize{ @data.keys }
          keys.each do |key|
            entry = @data[key]
            delete_entry(key, options) if entry && entry.expired?
          end
        end
      end

      # To ensure entries fit within the specified memory prune the cache by removing the least
      # recently accessed entries.
      def prune(target_size, max_time = nil)
        return if pruning?
        @pruning = true
        begin
          start_time = Time.now
          cleanup
          instrument(:prune, target_size, :from => @cache_size) do
            keys = synchronize{ @key_access.keys.sort{|a,b| @key_access[a].to_f <=> @key_access[b].to_f} }
            keys.each do |key|
              delete_entry(key, options)
              return if @cache_size <= target_size || (max_time && Time.now - start_time > max_time)
            end
          end
        ensure
          @pruning = false
        end
      end

      # Returns true if the cache is currently being pruned.
      def pruning?
        @pruning
      end

      # Increment an integer value in the cache.
      def increment(name, amount = 1, options = nil)
        synchronize do
          options = merged_options(options)
          if num = read(name, options)
            num = num.to_i + amount
            write(name, num, options)
            num
          else
            nil
          end
        end
      end

      # Decrement an integer value in the cache.
      def decrement(name, amount = 1, options = nil)
        synchronize do
          options = merged_options(options)
          if num = read(name, options)
            num = num.to_i - amount
            write(name, num, options)
            num
          else
            nil
          end
        end
      end

      def delete_matched(matcher, options = nil)
        options = merged_options(options)
        instrument(:delete_matched, matcher.inspect) do
          matcher = key_matcher(matcher, options)
          keys = synchronize { @data.keys }
          keys.each do |key|
            delete_entry(key, options) if key.match(matcher)
          end
        end
      end

      def inspect # :nodoc:
        "<##{self.class.name} entries=#{@data.size}, size=#{@cache_size}, options=#{@options.inspect}>"
      end

      # Synchronize calls to the cache. This should be called wherever the underlying cache implementation
      # is not thread safe.
      def synchronize(&block) # :nodoc:
        @monitor.synchronize(&block)
      end

      protected
        def read_entry(key, options) # :nodoc:
          entry = @data[key]
          synchronize do
            if entry
              @key_access[key] = Time.now.to_f
            else
              @key_access.delete(key)
            end
          end
          entry
        end

        def write_entry(key, entry, options) # :nodoc:
          synchronize do
            old_entry = @data[key]
            @cache_size -= old_entry.size if old_entry
            @cache_size += entry.size
            @key_access[key] = Time.now.to_f
            @data[key] = entry
            prune(@max_size * 0.75, @max_prune_time) if @cache_size > @max_size
            true
          end
        end

        def delete_entry(key, options) # :nodoc:
          synchronize do
            @key_access.delete(key)
            entry = @data.delete(key)
            @cache_size -= entry.size if entry
            !!entry
          end
        end
    end
  end
end
Something went wrong with that request. Please try again.