Refactor caching and memory management components
All checks were successful
Release Tag / release (push) Successful in 9s
All checks were successful
Release Tag / release (push) Successful in 9s
- Updated the caching logic to utilize a predictive cache warmer, enhancing content prefetching based on access patterns. - Replaced the legacy warming system with a more efficient predictive approach, allowing for better performance and resource management. - Refactored memory management to integrate dynamic cache size adjustments based on system memory usage, improving overall efficiency. - Simplified the VFS interface and improved concurrency handling with sharded locks for better performance in multi-threaded environments. - Enhanced tests to validate the new caching and memory management behaviors, ensuring reliability and performance improvements.
This commit is contained in:
66
vfs/lru/lru.go
Normal file
66
vfs/lru/lru.go
Normal file
@@ -0,0 +1,66 @@
|
||||
package lru
|
||||
|
||||
import (
|
||||
"container/list"
|
||||
"s1d3sw1ped/steamcache2/vfs/types"
|
||||
)
|
||||
|
||||
// LRUList represents a least recently used list for cache eviction
|
||||
type LRUList[T any] struct {
|
||||
list *list.List
|
||||
elem map[string]*list.Element
|
||||
}
|
||||
|
||||
// NewLRUList creates a new LRU list
|
||||
func NewLRUList[T any]() *LRUList[T] {
|
||||
return &LRUList[T]{
|
||||
list: list.New(),
|
||||
elem: make(map[string]*list.Element),
|
||||
}
|
||||
}
|
||||
|
||||
// Add adds an item to the front of the LRU list
|
||||
func (l *LRUList[T]) Add(key string, item T) {
|
||||
elem := l.list.PushFront(item)
|
||||
l.elem[key] = elem
|
||||
}
|
||||
|
||||
// MoveToFront moves an item to the front of the LRU list
|
||||
func (l *LRUList[T]) MoveToFront(key string, timeUpdater *types.BatchedTimeUpdate) {
|
||||
if elem, exists := l.elem[key]; exists {
|
||||
l.list.MoveToFront(elem)
|
||||
// Update the FileInfo in the element with new access time
|
||||
if fi, ok := any(elem.Value).(interface {
|
||||
UpdateAccessBatched(*types.BatchedTimeUpdate)
|
||||
}); ok {
|
||||
fi.UpdateAccessBatched(timeUpdater)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Remove removes an item from the LRU list
|
||||
func (l *LRUList[T]) Remove(key string) (T, bool) {
|
||||
if elem, exists := l.elem[key]; exists {
|
||||
delete(l.elem, key)
|
||||
if item, ok := l.list.Remove(elem).(T); ok {
|
||||
return item, true
|
||||
}
|
||||
}
|
||||
var zero T
|
||||
return zero, false
|
||||
}
|
||||
|
||||
// Len returns the number of items in the LRU list
|
||||
func (l *LRUList[T]) Len() int {
|
||||
return l.list.Len()
|
||||
}
|
||||
|
||||
// Back returns the least recently used item (at the back of the list)
|
||||
func (l *LRUList[T]) Back() *list.Element {
|
||||
return l.list.Back()
|
||||
}
|
||||
|
||||
// Front returns the most recently used item (at the front of the list)
|
||||
func (l *LRUList[T]) Front() *list.Element {
|
||||
return l.list.Front()
|
||||
}
|
||||
Reference in New Issue
Block a user