In-Memory Caches

Consider the following Rust crates for your in-memory caching needs: lru, moka, and cached.

Use a Least Recently Used (LRU) Cache

lru cat-caching

lru provides a fast Least Recently Used (LRU) cache for Rust. LRU is a common data structure used to limit the size of a cache by discarding the Least Recently Used (accessed) items when the cache reaches its capacity.

//! This example demonstrates the basic usage of the `LruCache` from the `lru`
//! crate. It shows how to create a cache, insert key-value pairs, access
//! entries, and observe the eviction behavior of the LRU (Least Recently Used)
//! cache.

use std::num::NonZeroUsize;

use lru::LruCache;

fn main() {
    // Create an LRU cache with a capacity of 3.
    let mut cache = LruCache::new(NonZeroUsize::new(3).unwrap());

    // Insert some key-value pairs into the cache.
    cache.put(1, "one");
    cache.put(2, "two");
    cache.put(3, "three");

    // Access some entries.
    println!("a) value for key 1: {:?}", cache.get(&1)); // Should print "Some("one")"
    assert_eq!(*cache.get(&2).unwrap(), "two");
    assert_eq!(cache.get(&3), Some(&"three"));
    assert!(cache.get(&4).is_none());

    // Insert another entry, causing the least recently used entry to be
    // evicted.
    cache.put(4, "four");

    // The cache now contains keys 2, 3, and 4.
    println!("b) value for key 1: {:?}", cache.get(&1)); // Should print "None"
    println!("b) value for key 3: {:?}", cache.get(&3)); // Some("three")
    println!("b) value for key 4: {:?}", cache.get(&4)); // Some("four")

    // Insert another entry, causing another eviction.
    cache.put(5, "five");

    // The cache now contains keys 3, 4, and 5.
    println!("Value for key 2: {:?}", cache.get(&2)); // "None"
    println!("Value for key 4: {:?}", cache.get(&4)); // Some("four")"
    println!("Value for key 5: {:?}", cache.get(&5)); // Some("five")

    {
        // Returns a mutable reference to the value of the key in the cache or
        // `None`, if it is not present in the cache.
        // Moves the key to the head of the LRU list, if it exists.
        let v = cache.get_mut(&5).unwrap();
        *v = "new value";
        assert_eq!(cache.get_mut(&5), Some(&mut "new value"));

        assert_eq!(cache.get_mut(&6), None);
    }
}

moka

moka moka-crates.io moka-github moka-lib.rs cat-caching cat-concurrency

moka is a fast and concurrent cache library inspired by Java Caffeine.

//! This example demonstrates the basic usage of the `moka` crate for creating
//! a concurrent cache.
//!
//! `moka` is inspired by the 'Caffeine' library for Java.
//!
//! Remember to add `moka = "0.12.10"` or latest to your `Cargo.toml`.
//!
//! `moka` also provides an asynchronous (futures aware) cache in its `future`
//! module.

use std::time::Duration;

use moka::sync::Cache;

fn main() {
    // Create a cache with a maximum of 100 entries.
    // Entries will be evicted after 10 minutes of not being accessed.
    //
    // The `sync::Cache` struct provides a synchronous cache.
    let cache = Cache::builder()
        .max_capacity(100)
        .time_to_idle(Duration::from_secs(600))
        .build();

    // Insert a key-value pair.
    cache.insert("key1", "value1");

    // Retrieve a value.
    match cache.get("key1") {
        Some(value) => println!("Retrieved value: {}", value),
        None => println!("Value not found"),
    }
    // Note: Every time get is called for an existing key, it creates a clone of
    // the stored value and returns it. If you want to store values that
    // will be expensive to clone, wrap them in `std::sync::Arc`.

    // Remove an entry.
    cache.invalidate("key1");

    // To share the same cache across multiple threads, clone it.
    // This is a cheap operation.
    let cache_clone = cache.clone();
    let handle = std::thread::spawn(move || {
        cache_clone.insert("key2", "value2");
    });
    handle.join().unwrap();
}

cached

cached cached-crates.io cached-github cached-lib.rs cat-caching cat-data-structures

cached provides generic cache implementations and simplified function memoization.

//! This example demonstrates two key uses of the `cached` crate:
//! - Simple function memoization with `#[cached]`,
//! - Advanced caching with custom configuration options.
//!
//! Remember to add cached = "0.55.1" or latest to your Cargo.toml.

use cached::proc_macro::cached;

/// Cached function with default configuration. Uses the unbounded cache.
/// By default, the cache's name will be the function's name in all caps.
#[cached]
fn fibonacci(n: u64) -> u64 {
    if n <= 1 {
        return n;
    }
    fibonacci(n - 1) + fibonacci(n - 2)
}

/// Cached function with custom time-to-live and max capacity.
#[cached(
    // Max cache size.
    size = 50,
    // Set a time-to-live (TTL) for cached entries.
    time = 60,
    //  Specify whether to refresh the TTL on cache hits.
    time_refresh = true,
 )]
fn fetch_data(key: usize) -> String {
    println!("Expensive data fetching simulated here.");
    format!("Data for key {}", key)
}
// You can also specify the cache name and type, the way it is created, the key
// type, execution synchronization options, handling of `Result` and `Option`
// return values, etc.

fn main() {
    // First call computes and caches.
    println!("Fibonacci of 10: {}", fibonacci(10));

    // Subsequent calls retrieve from cache.
    println!("Cached Fibonacci of 10: {}", fibonacci(10));

    // Check the underlying cache:
    {
        use cached::Cached;
        let cache = FIBONACCI.lock().unwrap();
        println!("misses: {:?}", cache.cache_misses());
        assert_eq!(cache.cache_misses(), Some(11));
        // Make sure the lock is dropped.
    }

    // Example with customized cache.
    println!("User data: {}", fetch_data(123));
    println!("Cached user data: {}", fetch_data(123));
}

Related Topics

  • Database.
  • Database Implementations.
  • NoSQL.
  • Key-Value Stores.