๐Ÿฆ€ Functional Rust

961: LRU Cache

Difficulty: Intermediate Category: Data Structures / Caching Concept: Capacity-bounded cache that evicts the least recently used entry on overflow Key Insight: Both languages combine a hash map (O(1) lookup) with an ordered queue (LRU tracking); OCaml uses `Queue` (linked list), Rust uses `VecDeque` โ€” `retain` removes a key from the middle, mirroring OCaml's `Queue.iter`-based removal
// 961: LRU Cache
// OCaml: Hashtbl + Queue; Rust: HashMap + VecDeque
// Both O(1) get/put amortized (with O(n) queue cleanup in this simplified version)

use std::collections::{HashMap, VecDeque};

pub struct LruCache<K, V> {
    capacity: usize,
    table: HashMap<K, V>,
    order: VecDeque<K>,
}

impl<K: Eq + std::hash::Hash + Clone, V> LruCache<K, V> {
    pub fn new(capacity: usize) -> Self {
        assert!(capacity > 0, "capacity must be > 0");
        LruCache {
            capacity,
            table: HashMap::with_capacity(capacity),
            order: VecDeque::with_capacity(capacity),
        }
    }

    pub fn get(&mut self, key: &K) -> Option<&V> {
        if self.table.contains_key(key) {
            // Move to back (most recently used)
            self.order.retain(|k| k != key);
            self.order.push_back(key.clone());
            self.table.get(key)
        } else {
            None
        }
    }

    pub fn put(&mut self, key: K, value: V) {
        if self.table.contains_key(&key) {
            // Update: remove from order, re-add at back
            self.order.retain(|k| k != &key);
        } else if self.table.len() >= self.capacity {
            // Evict LRU (front of deque)
            if let Some(lru_key) = self.order.pop_front() {
                self.table.remove(&lru_key);
            }
        }
        self.table.insert(key.clone(), value);
        self.order.push_back(key);
    }

    pub fn size(&self) -> usize {
        self.table.len()
    }

    pub fn contains(&self, key: &K) -> bool {
        self.table.contains_key(key)
    }
}

fn main() {
    let mut cache: LruCache<&str, i32> = LruCache::new(3);

    cache.put("a", 1);
    cache.put("b", 2);
    cache.put("c", 3);

    println!("get a: {:?}", cache.get(&"a"));
    println!("get b: {:?}", cache.get(&"b"));

    // After get("a"): order = c, b, a โ†’ evict "c" on next insert
    // Actually: initial order a,b,c. get("a") โ†’ b,c,a. get("b") โ†’ c,a,b.
    // Next insert evicts "c"
    cache.put("d", 4);
    println!("after insert d, has c: {}", cache.contains(&"c")); // false

    println!("get a: {:?}", cache.get(&"a"));
    println!("get b: {:?}", cache.get(&"b"));
    println!("get d: {:?}", cache.get(&"d"));
    println!("size: {}", cache.size());
}

#[cfg(test)]
mod tests {
    use super::*;

    #[test]
    fn test_basic_get_put() {
        let mut c: LruCache<&str, i32> = LruCache::new(3);
        c.put("a", 1);
        c.put("b", 2);
        c.put("c", 3);

        assert_eq!(c.get(&"a"), Some(&1));
        assert_eq!(c.get(&"b"), Some(&2));
        assert_eq!(c.size(), 3);
    }

    #[test]
    fn test_eviction() {
        let mut c: LruCache<&str, i32> = LruCache::new(3);
        c.put("a", 1);
        c.put("b", 2);
        c.put("c", 3);

        // Access "a" to make it recently used โ†’ order: b, c, a
        c.get(&"a");
        // Insert "d" โ†’ evicts "b" (front)
        c.put("d", 4);

        assert_eq!(c.size(), 3);
        assert_eq!(c.get(&"b"), None); // evicted
        assert_eq!(c.get(&"a"), Some(&1));
        assert_eq!(c.get(&"c"), Some(&3));
        assert_eq!(c.get(&"d"), Some(&4));
    }

    #[test]
    fn test_update_existing() {
        let mut c: LruCache<&str, i32> = LruCache::new(3);
        c.put("a", 1);
        c.put("b", 2);
        c.put("a", 99);
        assert_eq!(c.get(&"a"), Some(&99));
        assert_eq!(c.size(), 2);
    }

    #[test]
    fn test_capacity_one() {
        let mut c: LruCache<i32, &str> = LruCache::new(1);
        c.put(1, "one");
        c.put(2, "two"); // evicts 1
        assert_eq!(c.get(&1), None);
        assert_eq!(c.get(&2), Some(&"two"));
    }

    #[test]
    fn test_miss() {
        let mut c: LruCache<&str, i32> = LruCache::new(2);
        c.put("x", 10);
        assert_eq!(c.get(&"y"), None);
    }
}
(* 961: LRU Cache *)
(* Capacity-bounded cache: evicts least recently used on overflow *)
(* OCaml: Hashtbl for O(1) lookup + Queue for LRU order *)

type ('k, 'v) lru = {
  capacity: int;
  table: ('k, 'v) Hashtbl.t;
  order: 'k Queue.t;
}

let create capacity =
  { capacity;
    table = Hashtbl.create capacity;
    order = Queue.create () }

(* Remove first occurrence of key from queue *)
let remove_from_queue q key =
  let tmp = Queue.create () in
  Queue.iter (fun k -> if k <> key then Queue.add k tmp) q;
  Queue.clear q;
  Queue.iter (fun k -> Queue.add k q) tmp

let get cache key =
  match Hashtbl.find_opt cache.table key with
  | None -> None
  | Some v ->
    (* Move key to back (most recently used) *)
    remove_from_queue cache.order key;
    Queue.add key cache.order;
    Some v

let put cache key value =
  (* If key exists, remove from order queue first *)
  if Hashtbl.mem cache.table key then
    remove_from_queue cache.order key
  else begin
    (* If at capacity, evict LRU *)
    if Hashtbl.length cache.table >= cache.capacity then begin
      let lru_key = Queue.pop cache.order in
      Hashtbl.remove cache.table lru_key
    end
  end;
  Hashtbl.replace cache.table key value;
  Queue.add key cache.order

let size cache = Hashtbl.length cache.table

let () =
  let c = create 3 in

  put c "a" 1;
  put c "b" 2;
  put c "c" 3;

  assert (get c "a" = Some 1);
  assert (get c "b" = Some 2);
  assert (size c = 3);

  (* Access "a" to make it recently used. LRU should be "c" then "b" *)
  (* After get "a": order is c, b, a (most recent) wait.. let's check *)
  (* Initial order: a, b, c *)
  (* get "a" โ†’ moves a to back: order = b, c, a *)
  (* Now insert "d" โ†’ evict "b" (front) *)
  put c "d" 4;
  assert (size c = 3);
  assert (get c "b" = None); (* "b" should be evicted *)
  assert (get c "a" = Some 1);
  assert (get c "c" = Some 3);
  assert (get c "d" = Some 4);

  (* Update existing key *)
  put c "a" 99;
  assert (get c "a" = Some 99);
  assert (size c = 3);

  Printf.printf "โœ“ All tests passed\n"

๐Ÿ“Š Detailed Comparison

LRU Cache โ€” Comparison

Core Insight

LRU cache = hash map (fast lookup) + ordered queue (eviction order). Both OCaml and Rust use the same two-structure approach. The `get` operation must update recency โ€” removing the key from the middle of the queue and reinserting at the back. Both use O(n) queue cleanup (sufficient for most uses; production LRU uses a doubly-linked list).

OCaml Approach

  • `Hashtbl.t` + `Queue.t` in a record `{ capacity; table; order }`
  • `Queue.pop` โ€” dequeue front (LRU victim)
  • `Queue.add k q` โ€” enqueue back (most recent)
  • `remove_from_queue` โ€” filter queue to remove a key from the middle
  • `Queue.iter` + temporary queue for key removal
  • Mutable structure passed by reference (OCaml records are mutable when fields are `mutable`)

Rust Approach

  • `HashMap<K, V>` + `VecDeque<K>` in a struct
  • `VecDeque::pop_front()` โ€” remove LRU
  • `VecDeque::push_back(key)` โ€” insert as MRU
  • `order.retain(|k| k != key)` โ€” elegant middle removal (O(n))
  • `K: Eq + Hash + Clone` โ€” trait bounds for HashMap + cloning into deque
  • `&mut self` on `get` โ€” Rust enforces mutation is explicit

Comparison Table

AspectOCamlRust
Hash store`Hashtbl.t``HashMap<K, V>`
Order queue`Queue.t``VecDeque<K>`
Evict front`Queue.pop q``order.pop_front()`
Add to back`Queue.add k q``order.push_back(k)`
Remove from middleManual filter with temp queue`order.retain(\k\k != key)`
Get mutates (recency)Yes (mutable record fields)`&mut self` required
Generic types`('k, 'v) lru``LruCache<K, V>`
Trait boundsNone (structural)`K: Eq + Hash + Clone`