Refactor caching and memory management components
All checks were successful
Release Tag / release (push) Successful in 9s

- Updated the caching logic to utilize a predictive cache warmer, enhancing content prefetching based on access patterns.
- Replaced the legacy warming system with a more efficient predictive approach, allowing for better performance and resource management.
- Refactored memory management to integrate dynamic cache size adjustments based on system memory usage, improving overall efficiency.
- Simplified the VFS interface and improved concurrency handling with sharded locks for better performance in multi-threaded environments.
- Enhanced tests to validate the new caching and memory management behaviors, ensuring reliability and performance improvements.
This commit is contained in:
2025-09-22 01:59:15 -05:00
parent 9b2affe95a
commit bfe29dea75
13 changed files with 612 additions and 1215 deletions

View File

@@ -21,7 +21,6 @@ import (
"s1d3sw1ped/steamcache2/vfs/gc"
"s1d3sw1ped/steamcache2/vfs/memory"
"s1d3sw1ped/steamcache2/vfs/predictive"
"s1d3sw1ped/steamcache2/vfs/warming"
"strconv"
"strings"
"sync"
@@ -781,14 +780,14 @@ type SteamCache struct {
// Adaptive and predictive caching
adaptiveManager *adaptive.AdaptiveCacheManager
predictiveManager *predictive.PredictiveCacheManager
cacheWarmer *warming.CacheWarmer
cacheWarmer *predictive.CacheWarmer
lastAccessKey string // Track last accessed key for sequence analysis
lastAccessKeyMu sync.RWMutex
adaptiveEnabled bool // Flag to enable/disable adaptive features
// Dynamic memory management
memoryMonitor *memory.MemoryMonitor
dynamicCacheMgr *memory.DynamicCacheManager
dynamicCacheMgr *memory.MemoryMonitor
}
func New(address string, memorySize string, diskSize string, diskPath, upstream, memoryGC, diskGC string, maxConcurrentRequests int64, maxRequestsPerClient int64) *SteamCache {
@@ -925,8 +924,8 @@ func New(address string, memorySize string, diskSize string, diskPath, upstream,
// Initialize adaptive and predictive caching (lightweight)
adaptiveManager: adaptive.NewAdaptiveCacheManager(5 * time.Minute), // Much longer interval
predictiveManager: predictive.NewPredictiveCacheManager(),
cacheWarmer: warming.NewCacheWarmer(c, 2), // Reduced to 2 concurrent warmers
adaptiveEnabled: true, // Enable by default but can be disabled
cacheWarmer: predictive.NewCacheWarmer(), // Use predictive cache warmer
adaptiveEnabled: true, // Enable by default but can be disabled
// Initialize dynamic memory management
memoryMonitor: memory.NewMemoryMonitor(uint64(memorysize), 10*time.Second, 0.1), // 10% threshold
@@ -935,7 +934,7 @@ func New(address string, memorySize string, diskSize string, diskPath, upstream,
// Initialize dynamic cache manager if we have memory cache
if m != nil && sc.memoryMonitor != nil {
sc.dynamicCacheMgr = memory.NewDynamicCacheManager(mgc, uint64(memorysize), sc.memoryMonitor)
sc.dynamicCacheMgr = memory.NewMemoryMonitorWithCache(uint64(memorysize), 10*time.Second, 0.1, mgc, uint64(memorysize))
sc.dynamicCacheMgr.Start()
sc.memoryMonitor.Start()
}
@@ -1535,6 +1534,6 @@ func (sc *SteamCache) recordCacheMiss(key string, size int64) {
// Only trigger warming for very large files to reduce overhead
if size > 10*1024*1024 { // Only warm files > 10MB
sc.cacheWarmer.RequestWarming(key, 3, "cache_miss", size, "cache_miss_analyzer")
sc.cacheWarmer.RequestWarming(key, 3, "cache_miss", size)
}
}