class Sprockets::Cache::FileStore
Public: A file system cache store that automatically cleans up old keys.
Assign the instance to the Sprockets::Base#cache.
environment.cache = Sprockets::Cache::FileStore.new("/tmp")
See Also
ActiveSupport::Cache::FileStore
Constants
- DEFAULT_MAX_SIZE
Internal: Default key limit for store.
Public Class Methods
Internal: Default standard error fatal logger.
Returns a Logger.
# File lib/sprockets/cache/file_store.rb, line 26 def self.default_logger logger = Logger.new($stderr) logger.level = Logger::FATAL logger end
Public: Initialize the cache store.
root - A String path to a directory to persist cached values to. max_size - A Integer of the maximum number of keys the store will hold.
(default: 1000).
# File lib/sprockets/cache/file_store.rb, line 37 def initialize(root, max_size = DEFAULT_MAX_SIZE, logger = self.class.default_logger) @root = root @size = find_caches.inject(0) { |n, (_, stat)| n + stat.size } @max_size = max_size @gc_size = max_size * 0.75 @logger = logger end
Public Instance Methods
Public: Retrieve value from cache.
This API should not be used directly, but via the Cache wrapper API.
key - String cache key.
Returns Object or nil or the value is not set.
# File lib/sprockets/cache/file_store.rb, line 52 def get(key) path = File.join(@root, "#{key}.cache") value = safe_open(path) do |f| begin EncodingUtils.unmarshaled_deflated(f.read, Zlib::MAX_WBITS) rescue Exception => e @logger.error do "#{self.class}[#{path}] could not be unmarshaled: " + "#{e.class}: #{e.message}" end nil end end if value FileUtils.touch(path) value end end
Public: Pretty inspect
Returns String.
# File lib/sprockets/cache/file_store.rb, line 122 def inspect "#<#{self.class} size=#{@size}/#{@max_size}>" end
Public: Set a key and value in the cache.
This API should not be used directly, but via the Cache wrapper API.
key - String cache key. value - Object value.
Returns Object value.
# File lib/sprockets/cache/file_store.rb, line 81 def set(key, value) path = File.join(@root, "#{key}.cache") # Ensure directory exists FileUtils.mkdir_p File.dirname(path) # Check if cache exists before writing exists = File.exist?(path) # Serialize value marshaled = Marshal.dump(value) # Compress if larger than 4KB if marshaled.bytesize > 4 * 1024 deflater = Zlib::Deflate.new( Zlib::BEST_COMPRESSION, Zlib::MAX_WBITS, Zlib::MAX_MEM_LEVEL, Zlib::DEFAULT_STRATEGY ) deflater << marshaled raw = deflater.finish else raw = marshaled end # Write data PathUtils.atomic_write(path) do |f| f.write(raw) @size += f.size unless exists end # GC if necessary gc! if @size > @max_size value end
Private Instance Methods
# File lib/sprockets/cache/file_store.rb, line 141 def compute_size(caches) caches.inject(0) { |sum, (_, stat)| sum + stat.size } end
Internal: Get all cache files along with stats.
Returns an Array of [String filename, File::Stat] pairs sorted by mtime.
# File lib/sprockets/cache/file_store.rb, line 131 def find_caches Dir.glob(File.join(@root, '**/*.cache')).reduce([]) { |stats, filename| stat = safe_stat(filename) # stat maybe nil if file was removed between the time we called # dir.glob and the next stat stats << [filename, stat] if stat stats }.sort_by { |_, stat| stat.mtime.to_i } end
# File lib/sprockets/cache/file_store.rb, line 158 def gc! start_time = Time.now caches = find_caches size = compute_size(caches) delete_caches, keep_caches = caches.partition { |filename, stat| deleted = size > @gc_size size -= stat.size deleted } return if delete_caches.empty? FileUtils.remove(delete_caches.map(&:first), force: true) @size = compute_size(keep_caches) @logger.warn do secs = Time.now.to_f - start_time.to_f "#{self.class}[#{@root}] garbage collected " + "#{delete_caches.size} files (#{(secs * 1000).to_i}ms)" end end
# File lib/sprockets/cache/file_store.rb, line 151 def safe_open(path, &block) if File.exist?(path) File.open(path, 'rb', &block) end rescue Errno::ENOENT end
# File lib/sprockets/cache/file_store.rb, line 145 def safe_stat(fn) File.stat(fn) rescue Errno::ENOENT nil end