main.go
1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586
package main

import (
	"container/list"
	"fmt"
	"sync"
)

type entry struct {
	key   string
	value string
}

type LRUCache struct {
	mu       sync.Mutex
	capacity int
	ll       *list.List
	items    map[string]*list.Element
}

func NewLRUCache(capacity int) *LRUCache {
	if capacity < 1 {
		panic("capacity must be >= 1")
	}
	return &LRUCache{
		capacity: capacity,
		ll:       list.New(),
		items:    make(map[string]*list.Element, capacity),
	}
}

func (c *LRUCache) Get(key string) (string, bool) {
	c.mu.Lock()
	defer c.mu.Unlock()

	el, ok := c.items[key]
	if !ok {
		return "", false
	}
	c.ll.MoveToFront(el)
	return el.Value.(entry).value, true
}

func (c *LRUCache) Put(key, value string) {
	c.mu.Lock()
	defer c.mu.Unlock()

	if el, ok := c.items[key]; ok {
		el.Value = entry{key: key, value: value}
		c.ll.MoveToFront(el)
		return
	}

	el := c.ll.PushFront(entry{key: key, value: value})
	c.items[key] = el

	if c.ll.Len() > c.capacity {
		back := c.ll.Back()
		if back != nil {
			c.ll.Remove(back)
			ev := back.Value.(entry)
			delete(c.items, ev.key)
		}
	}
}

func (c *LRUCache) Len() int {
	c.mu.Lock()
	defer c.mu.Unlock()
	return c.ll.Len()
}

func main() {
	cache := NewLRUCache(2)
	cache.Put("a", "1")
	cache.Put("b", "2")

	if v, ok := cache.Get("a"); ok {
		fmt.Println("a =", v)
	}

	cache.Put("c", "3")
	_, ok := cache.Get("b")
	fmt.Println("b present?", ok)
	fmt.Println("len =", cache.Len())
}

How It Works

Classic LRU cache using a map plus a doubly linked list to provide O(1) get and put with deterministic eviction.

Maintains list nodes for recency; Get moves nodes to the front; Put updates values or inserts new nodes, evicting the tail when capacity is exceeded; a mutex guards modifications for concurrent use.

Key Concepts

  • 1Map gives constant-time lookups while the list preserves order.
  • 2Eviction removes the least recently used entry when full.
  • 3Concurrency safety via a single mutex around cache state.

When to Use This Pattern

  • Caching database query results or rendered templates.
  • Memoizing expensive function results.
  • Building small in-memory caches for CLI tools or services.

Best Practices

  • Keep capacity realistic to avoid churn under heavy load.
  • Avoid returning internal node references to callers.
  • Profile hit and miss ratios to tune size.
Go Version1.18+
Difficultyintermediate
Production ReadyYes
Lines of Code86