🦀 Functional Rust

375: LRU Cache — Evict the Least Recently Used

Difficulty: 3 Level: Advanced O(1) get and put with automatic eviction of the least-recently-used entry when the cache is full.

The Problem This Solves

You're caching the results of expensive computations — database queries, rendered pages, API responses. You have limited memory, so you can't keep everything. You need an eviction policy. The most principled simple policy is LRU: when the cache is full and you need to insert something new, evict the entry that was accessed least recently. The intuition: if you haven't used something in a while, you're less likely to need it soon. The challenge is doing this in O(1) for every operation. A `HashMap` gives O(1) lookup but has no concept of access order. A doubly-linked list tracks access order (most recent at front, least recent at back) and lets you move any node to the front in O(1) — but finding a node by key is O(n). The classic solution: combine both. `HashMap<Key, Pointer to ListNode>` for O(1) lookup, doubly-linked list for O(1) order tracking. On every get/put, move the accessed node to the front of the list. On eviction, remove the tail.

The Intuition

Python's `functools.lru_cache` does this for you, and `collections.OrderedDict` exposes the primitives to build it manually. Rust has neither in the standard library — but the `lru` crate wraps this pattern behind a clean API. The tricky part in safe Rust: a doubly-linked list requires nodes with two mutable pointers (prev and next), which conflicts with Rust's aliasing rules. The standard solutions are: use `HashMap` + `VecDeque` with index-based addressing (good enough for most cases), use `unsafe` for raw pointers, or reach for the `lru` crate which handles this correctly.

How It Works in Rust

// Idiomatic implementation using HashMap + ordered index tracking
// For production use, prefer the `lru` crate
use std::collections::HashMap;

struct LruCache<K, V> {
 capacity: usize,
 map: HashMap<K, V>,
 order: Vec<K>, // front = most recently used, back = LRU
}

impl<K: Clone + Eq + std::hash::Hash, V> LruCache<K, V> {
 fn new(capacity: usize) -> Self {
     LruCache { capacity, map: HashMap::new(), order: Vec::new() }
 }

 fn get(&mut self, key: &K) -> Option<&V> {
     if self.map.contains_key(key) {
         // Move to front of order (mark as recently used)
         self.order.retain(|k| k != key);
         self.order.insert(0, key.clone());
         self.map.get(key)
     } else {
         None
     }
 }

 fn put(&mut self, key: K, value: V) {
     if self.map.contains_key(&key) {
         self.order.retain(|k| k != &key);
     } else if self.map.len() >= self.capacity {
         // Evict the LRU entry (last in order)
         if let Some(lru_key) = self.order.pop() {
             self.map.remove(&lru_key);
         }
     }
     self.order.insert(0, key.clone());
     self.map.insert(key, value);
 }

 fn len(&self) -> usize { self.map.len() }
}

// Usage
let mut cache: LruCache<i32, &str> = LruCache::new(3);
cache.put(1, "one");
cache.put(2, "two");
cache.put(3, "three");
// Order: [3, 2, 1] (3 most recent)

cache.get(&1); // access 1 → moves to front
// Order: [1, 3, 2]

cache.put(4, "four"); // capacity full → evict 2 (LRU)
// Order: [4, 1, 3]

assert!(cache.get(&2).is_none()); // 2 was evicted

// With the `lru` crate (production-ready, O(1) everything):
// use lru::LruCache;
// use std::num::NonZeroUsize;
// let mut cache = LruCache::new(NonZeroUsize::new(3).unwrap());
// cache.put("key", "value");
// cache.get("key");

What This Unlocks

Key Differences

ConceptOCamlRust
LRU cachenot in stdlibcustom or `lru` crate
O(1) lookup`Hashtbl``HashMap`
O(1) order trackingmanual doubly-linked list`VecDeque` (O(n) move-to-front) or `unsafe` list
Production-ready impl`lru-cache` opam package`lru` crate
Python equivalent`functools.lru_cache` / `OrderedDict``lru` crate
Eviction policyvariesLRU (least recently used)
use std::collections::{HashMap, VecDeque};

struct LruCache<K: Clone + Eq + std::hash::Hash, V> {
    map: HashMap<K, V>,
    order: VecDeque<K>, // front = most recent, back = least recent
    capacity: usize,
}

impl<K: Clone + Eq + std::hash::Hash, V: Clone> LruCache<K, V> {
    fn new(capacity: usize) -> Self {
        assert!(capacity > 0);
        Self { map: HashMap::new(), order: VecDeque::new(), capacity }
    }

    fn get(&mut self, key: &K) -> Option<&V> {
        if self.map.contains_key(key) {
            // Move to front (most recent)
            self.order.retain(|k| k != key);
            self.order.push_front(key.clone());
            self.map.get(key)
        } else { None }
    }

    fn put(&mut self, key: K, val: V) {
        if self.map.contains_key(&key) {
            self.order.retain(|k| k != &key);
        } else if self.map.len() >= self.capacity {
            // Evict least recently used (back of deque)
            if let Some(lru_key) = self.order.pop_back() {
                self.map.remove(&lru_key);
            }
        }
        self.map.insert(key.clone(), val);
        self.order.push_front(key);
    }

    fn len(&self) -> usize { self.map.len() }
    fn contains(&self, key: &K) -> bool { self.map.contains_key(key) }
}

fn main() {
    let mut cache: LruCache<&str, i32> = LruCache::new(3);
    cache.put("a", 1); cache.put("b", 2); cache.put("c", 3);
    println!("get a: {:?}", cache.get(&"a")); // a is now MRU
    cache.put("d", 4); // b is LRU, gets evicted
    println!("a: {:?}", cache.get(&"a"));
    println!("b: {:?}", cache.get(&"b")); // evicted
    println!("c: {:?}", cache.get(&"c"));
    println!("d: {:?}", cache.get(&"d"));
}

#[cfg(test)]
mod tests {
    use super::*;
    #[test] fn basic_lru() {
        let mut c: LruCache<i32,i32> = LruCache::new(2);
        c.put(1,1); c.put(2,2);
        assert_eq!(c.get(&1), Some(&1));
        c.put(3,3); // 2 evicted
        assert_eq!(c.get(&2), None);
        assert_eq!(c.get(&1), Some(&1));
        assert_eq!(c.get(&3), Some(&3));
    }
    #[test] fn update_existing() {
        let mut c: LruCache<&str,i32> = LruCache::new(2);
        c.put("a",1); c.put("b",2);
        c.put("a",10); // update, a becomes MRU
        c.put("c",3); // b evicted
        assert_eq!(c.get(&"a"), Some(&10));
        assert_eq!(c.get(&"b"), None);
    }
}
(* OCaml: LRU with Hashtbl + doubly-linked list (simplified) *)

type ('k,'v) lru = {
  mutable data: ('k*'v) list;  (* Most-recent first *)
  capacity: int;
}

let make cap = { data=[]; capacity=cap }

let get lru k =
  match List.assoc_opt k lru.data with
  | None -> None
  | Some v ->
    lru.data <- (k,v) :: List.filter (fun (k2,_) -> k2 <> k) lru.data;
    Some v

let put lru k v =
  let without_k = List.filter (fun (k2,_) -> k2 <> k) lru.data in
  let with_k = (k,v) :: without_k in
  lru.data <- if List.length with_k > lru.capacity then
    List.filteri (fun i _ -> i < lru.capacity) with_k
  else with_k

let () =
  let c = make 3 in
  put c "a" 1; put c "b" 2; put c "c" 3;
  ignore (get c "a");  (* a is now most recent *)
  put c "d" 4;  (* evicts b (lru) *)
  Printf.printf "a: %s\n" (Option.fold ~none:"evicted" ~some:string_of_int (get c "a"));
  Printf.printf "b: %s\n" (Option.fold ~none:"evicted" ~some:string_of_int (get c "b"))