⚡ Backend Intermediate ⏱️ 16 min

Caching Strategies in .NET with Redis and Memory Cache

Implement distributed and in-memory caching patterns in .NET applications for improved performance and reduced database load.

By Victor Robin

Introduction

Effective caching is one of the highest-impact performance optimizations for any distributed system. It reduces database load, improves response times, and enhances overall scalability by serving frequently accessed data from high-speed memory.

This guide covers implementing multi-tier caching strategies in .NET, combining in-memory caching for speed with Redis for distributed consistency.

What We’ll Build

  1. Hybrid Cache: A two-layer cache (L1 Memory + L2 Redis).
  2. Cache-Aside Pattern: A repository decorator that transparently caches database lookups.
  3. Stampede Protection: A mechanism to prevent thousands of requests hitting the database simultaneously when a hot key expires.

Architecture Overview

flowchart LR
    Client["📱 Client"] -->|Request| API["🚀 API"]
    
    subgraph Caching["⚡ Caching Layer"]
        API -->|1. Check| L1["🧠 L1: Memory\n(In-Process)"]
        API -->|2. Check| L2["🔴 L2: Redis\n(Distributed)"]
    end
    
    subgraph Data["💾 Data Layer"]
        L2 -->|3. Fallback| DB[(PostgreSQL)]
    end

    L1 -.->|Hit| API
    L2 -.->|Hit| API
    DB -.->|Miss & Set| L2
    L2 -.->|Propagate| L1

    classDef primary fill:#7c3aed,color:#fff
    classDef secondary fill:#06b6d4,color:#fff
    classDef db fill:#f43f5e,color:#fff
    classDef warning fill:#fbbf24,color:#000

    class Client,API primary
    class L1 secondary
    class L2,DB db

Implementation

Cache Configuration

Service Registration

// Program.cs
builder.Services.AddMemoryCache(options =>
{
    options.SizeLimit = 1024; // Maximum cache entries
    options.ExpirationScanFrequency = TimeSpan.FromMinutes(1);
});

builder.Services.AddStackExchangeRedisCache(options =>
{
    options.Configuration = builder.Configuration.GetConnectionString("Redis");
    options.InstanceName = "BlueRobin:";
});

// Custom cache service
builder.Services.AddSingleton<ICacheService, HybridCacheService>();

Cache Abstraction

// Application/Caching/ICacheService.cs
public interface ICacheService
{
    Task<T?> GetAsync<T>(string key, CancellationToken ct = default) where T : class;
    Task SetAsync<T>(string key, T value, CacheOptions? options = null, CancellationToken ct = default) where T : class;
    Task RemoveAsync(string key, CancellationToken ct = default);
    Task<T> GetOrCreateAsync<T>(string key, Func<Task<T>> factory, CacheOptions? options = null, CancellationToken ct = default) where T : class;
}

public record CacheOptions
{
    public TimeSpan? AbsoluteExpiration { get; init; }
    public TimeSpan? SlidingExpiration { get; init; }
    public CacheTier Tier { get; init; } = CacheTier.Both;
}

public enum CacheTier
{
    Memory,
    Distributed,
    Both
}

Hybrid Cache Implementation

Multi-Tier Cache Service

// Infrastructure/Caching/HybridCacheService.cs
public sealed class HybridCacheService : ICacheService
{
    private readonly IMemoryCache _memoryCache;
    private readonly IDistributedCache _distributedCache;
    private readonly ILogger<HybridCacheService> _logger;
    
    private static readonly TimeSpan DefaultAbsoluteExpiration = TimeSpan.FromMinutes(10);
    private static readonly TimeSpan DefaultSlidingExpiration = TimeSpan.FromMinutes(2);

    public HybridCacheService(
        IMemoryCache memoryCache,
        IDistributedCache distributedCache,
        ILogger<HybridCacheService> logger)
    {
        _memoryCache = memoryCache;
        _distributedCache = distributedCache;
        _logger = logger;
    }

    public async Task<T?> GetAsync<T>(string key, CancellationToken ct = default) where T : class
    {
        // Try L1 cache (memory)
        if (_memoryCache.TryGetValue(key, out T? cached))
        {
            _logger.LogDebug("Cache hit (L1): {Key}", key);
            return cached;
        }

        // Try L2 cache (Redis)
        var distributed = await _distributedCache.GetStringAsync(key, ct);
        if (distributed is not null)
        {
            _logger.LogDebug("Cache hit (L2): {Key}", key);
            var value = JsonSerializer.Deserialize<T>(distributed);
            
            // Populate L1 cache
            if (value is not null)
            {
                _memoryCache.Set(key, value, TimeSpan.FromMinutes(1));
            }
            
            return value;
        }

        _logger.LogDebug("Cache miss: {Key}", key);
        return null;
    }

    public async Task SetAsync<T>(string key, T value, CacheOptions? options = null, CancellationToken ct = default) where T : class
    {
        options ??= new CacheOptions();
        
        var absoluteExpiration = options.AbsoluteExpiration ?? DefaultAbsoluteExpiration;
        var slidingExpiration = options.SlidingExpiration ?? DefaultSlidingExpiration;

        // Set L1 cache
        if (options.Tier is CacheTier.Memory or CacheTier.Both)
        {
            var memoryOptions = new MemoryCacheEntryOptions
            {
                AbsoluteExpirationRelativeToNow = absoluteExpiration,
                SlidingExpiration = slidingExpiration,
                Size = 1
            };
            _memoryCache.Set(key, value, memoryOptions);
        }

        // Set L2 cache
        if (options.Tier is CacheTier.Distributed or CacheTier.Both)
        {
            var distributedOptions = new DistributedCacheEntryOptions
            {
                AbsoluteExpirationRelativeToNow = absoluteExpiration,
                SlidingExpiration = slidingExpiration
            };
            
            var json = JsonSerializer.Serialize(value);
            await _distributedCache.SetStringAsync(key, json, distributedOptions, ct);
        }
    }

    public async Task RemoveAsync(string key, CancellationToken ct = default)
    {
        _memoryCache.Remove(key);
        await _distributedCache.RemoveAsync(key, ct);
    }

    public async Task<T> GetOrCreateAsync<T>(
        string key, 
        Func<Task<T>> factory, 
        CacheOptions? options = null, 
        CancellationToken ct = default) where T : class
    {
        var cached = await GetAsync<T>(key, ct);
        if (cached is not null)
        {
            return cached;
        }

        var value = await factory();
        await SetAsync(key, value, options, ct);
        return value;
    }
}

Cache-Aside Pattern

Repository with Caching

// Infrastructure/Repositories/CachedDocumentRepository.cs
public sealed class CachedDocumentRepository : IDocumentRepository
{
    private readonly BlueRobinDbContext _context;
    private readonly ICacheService _cache;
    private readonly ILogger<CachedDocumentRepository> _logger;

    public CachedDocumentRepository(
        BlueRobinDbContext context,
        ICacheService cache,
        ILogger<CachedDocumentRepository> logger)
    {
        _context = context;
        _cache = cache;
        _logger = logger;
    }

    public async Task<Document?> GetByIdAsync(DocumentId id, CancellationToken ct = default)
    {
        var cacheKey = CacheKeys.Document(id);
        
        return await _cache.GetOrCreateAsync(
            cacheKey,
            async () =>
            {
                _logger.LogDebug("Loading document from database: {DocumentId}", id);
                return await _context.Documents
                    .Include(d => d.Chunks)
                    .FirstOrDefaultAsync(d => d.Id == id, ct);
            },
            new CacheOptions
            {
                AbsoluteExpiration = TimeSpan.FromMinutes(30),
                SlidingExpiration = TimeSpan.FromMinutes(5)
            },
            ct);
    }

    public async Task UpdateAsync(Document document, CancellationToken ct = default)
    {
        _context.Documents.Update(document);
        await _context.SaveChangesAsync(ct);
        
        // Invalidate cache
        await _cache.RemoveAsync(CacheKeys.Document(document.Id), ct);
        await _cache.RemoveAsync(CacheKeys.UserDocuments(document.OwnerId), ct);
    }
}

Cache Key Generator

// Application/Caching/CacheKeys.cs
public static class CacheKeys
{
    private const string Prefix = "bluerobin";

    public static string Document(DocumentId id) 
        => $"{Prefix}:documents:{id.Value}";

    public static string UserDocuments(BlueRobinId userId) 
        => $"{Prefix}:users:{userId.Value}:documents";

    public static string SearchResults(string query, int page) 
        => $"{Prefix}:search:{ComputeHash(query)}:page:{page}";

    public static string UserProfile(BlueRobinId userId) 
        => $"{Prefix}:users:{userId.Value}:profile";

    private static string ComputeHash(string input)
    {
        var bytes = SHA256.HashData(Encoding.UTF8.GetBytes(input));
        return Convert.ToHexString(bytes)[..16].ToLowerInvariant();
    }
}

Output Caching

Response Caching

// Program.cs
builder.Services.AddOutputCache(options =>
{
    options.AddBasePolicy(builder => builder.Cache());
    
    options.AddPolicy("Documents", builder => builder
        .Expire(TimeSpan.FromMinutes(5))
        .Tag("documents"));
    
    options.AddPolicy("Search", builder => builder
        .Expire(TimeSpan.FromMinutes(1))
        .SetVaryByQuery("q", "page", "limit")
        .Tag("search"));
});

Endpoint with Output Cache

// Api/Endpoints/Documents/GetDocumentEndpoint.cs
public sealed class GetDocumentEndpoint : Endpoint<GetDocumentRequest, DocumentResponse>
{
    public override void Configure()
    {
        Get("/api/documents/{id}");
        Options(x => x.WithCachePolicy("Documents"));
    }

    public override async Task HandleAsync(GetDocumentRequest req, CancellationToken ct)
    {
        // Response is automatically cached
        var document = await _repository.GetByIdAsync(req.Id, ct);
        await SendAsync(document.ToResponse());
    }
}

Cache Invalidation

Event-Driven Invalidation

// Application/EventHandlers/DocumentUpdatedHandler.cs
public sealed class DocumentUpdatedHandler : INotificationHandler<DocumentUpdatedEvent>
{
    private readonly ICacheService _cache;
    private readonly IOutputCacheStore _outputCache;
    private readonly ILogger<DocumentUpdatedHandler> _logger;

    public DocumentUpdatedHandler(
        ICacheService cache,
        IOutputCacheStore outputCache,
        ILogger<DocumentUpdatedHandler> logger)
    {
        _cache = cache;
        _outputCache = outputCache;
        _logger = logger;
    }

    public async Task Handle(DocumentUpdatedEvent notification, CancellationToken ct)
    {
        _logger.LogInformation("Invalidating cache for document: {DocumentId}", notification.DocumentId);

        // Invalidate application cache
        await _cache.RemoveAsync(CacheKeys.Document(notification.DocumentId), ct);
        await _cache.RemoveAsync(CacheKeys.UserDocuments(notification.OwnerId), ct);

        // Invalidate output cache by tag
        await _outputCache.EvictByTagAsync("documents", ct);
    }
}

Stampede Protection

// Infrastructure/Caching/StampedeProtectedCache.cs
public sealed class StampedeProtectedCache : ICacheService
{
    private readonly ICacheService _inner;
    private readonly ConcurrentDictionary<string, SemaphoreSlim> _locks = new();

    public async Task<T> GetOrCreateAsync<T>(
        string key, 
        Func<Task<T>> factory, 
        CacheOptions? options = null, 
        CancellationToken ct = default) where T : class
    {
        // Fast path: cache hit
        var cached = await _inner.GetAsync<T>(key, ct);
        if (cached is not null)
        {
            return cached;
        }

        // Slow path: acquire lock to prevent stampede
        var lockObj = _locks.GetOrAdd(key, _ => new SemaphoreSlim(1, 1));
        
        await lockObj.WaitAsync(ct);
        try
        {
            // Double-check after acquiring lock
            cached = await _inner.GetAsync<T>(key, ct);
            if (cached is not null)
            {
                return cached;
            }

            // Generate value
            var value = await factory();
            await _inner.SetAsync(key, value, options, ct);
            return value;
        }
        finally
        {
            lockObj.Release();
            
            // Clean up lock if no waiters
            if (lockObj.CurrentCount == 1)
            {
                _locks.TryRemove(key, out _);
            }
        }
    }
}

Monitoring Cache Performance

Cache Metrics

// Infrastructure/Caching/MeteredCacheService.cs
public sealed class MeteredCacheService : ICacheService
{
    private readonly ICacheService _inner;
    private readonly IMetrics _metrics;
    
    private readonly Counter<long> _cacheHits;
    private readonly Counter<long> _cacheMisses;
    private readonly Histogram<double> _cacheLatency;

    public MeteredCacheService(ICacheService inner, IMetrics metrics)
    {
        _inner = inner;
        _metrics = metrics;
        
        var meter = new Meter("BlueRobin.Cache");
        _cacheHits = meter.CreateCounter<long>("cache_hits");
        _cacheMisses = meter.CreateCounter<long>("cache_misses");
        _cacheLatency = meter.CreateHistogram<double>("cache_latency_ms");
    }

    public async Task<T?> GetAsync<T>(string key, CancellationToken ct = default) where T : class
    {
        var sw = Stopwatch.StartNew();
        
        var result = await _inner.GetAsync<T>(key, ct);
        
        sw.Stop();
        _cacheLatency.Record(sw.Elapsed.TotalMilliseconds, new KeyValuePair<string, object?>("operation", "get"));
        
        if (result is not null)
        {
            _cacheHits.Add(1, new KeyValuePair<string, object?>("tier", "any"));
        }
        else
        {
            _cacheMisses.Add(1);
        }

        return result;
    }
}

Conclusion

Cache Patterns Summary:

PatternUse CaseInvalidation
Cache-AsideGeneral purpose, read-heavyOn write/update
Output CacheHTTP responses, public dataBy tag or timeout
Hybrid (L1/L2)High throughput, distributedBoth tiers
Stampede ProtectedHigh concurrencySame as inner

Best Practices

PracticeBenefit
Short TTLs initiallyPrevents stale data issues
Consistent key generationAvoids cache collisions
Graceful degradationApp works if cache fails
Metric instrumentationVisibility into hit rates
Tag-based invalidationEfficient group invalidation

Effective caching dramatically improves application performance while reducing infrastructure costs through reduced database and API calls.

[Caching in .NET] — Microsoft