Android LruCache
Library
Android LruCache
Overview
Android LruCache is a memory cache based on the LRU (Least Recently Used) algorithm built into the Android platform.
Details
Android LruCache is a class designed to achieve efficient memory caching in Android application development. It adopts the LRU (Least Recently Used) algorithm and automatically removes the least frequently used entries when the cache size reaches its limit. Introduced in API Level 12 (Android 3.1), with a support version available in the AndroidX Collection library for earlier versions. It's primarily used for Bitmap caching and API response caching, achieving fast data access while controlling memory usage. The thread-safe implementation allows safe use in multi-threaded environments, and custom size calculation and deletion policies can also be implemented. Through proper memory management integrated with Android's lifecycle, it improves application performance while preventing OutOfMemoryErrors.
Pros and Cons
Pros
- Platform Standard: Part of Android standard library with no additional dependencies
- Automatic Memory Management: Efficient memory usage through LRU algorithm
- Thread-Safe: Safe operations in multi-threaded environments
- Customizable: Customizable size calculation and deletion policies
- Lightweight: High performance with minimal overhead
- Compatibility: Available for older API levels through AndroidX version
- Garbage Collection Efficiency: More efficient memory management than weak references
Cons
- Android Only: Cannot be used on platforms other than Android
- Memory Only: Data is lost when app terminates as it's not persisted
- Size Limitations: Cache size limited by memory constraints
- Configuration Complexity: Proper size settings require memory analysis
- No Null Support: Cannot use null for keys and values
Key Links
- LruCache Android API
- Android Memory Management Guide
- AndroidX Collection
- Android Performance Tips
- DiskLruCache
Code Examples
Basic LruCache Usage
// Java
import android.util.LruCache;
public class ImageCache {
private LruCache<String, Bitmap> memoryCache;
public ImageCache() {
// Use 1/8 of available memory for cache
final int maxMemory = (int) (Runtime.getRuntime().maxMemory() / 1024);
final int cacheSize = maxMemory / 8;
memoryCache = new LruCache<String, Bitmap>(cacheSize) {
@Override
protected int sizeOf(String key, Bitmap bitmap) {
// Return bitmap size in KB
return bitmap.getByteCount() / 1024;
}
};
}
public void addBitmapToMemoryCache(String key, Bitmap bitmap) {
if (getBitmapFromMemCache(key) == null) {
memoryCache.put(key, bitmap);
}
}
public Bitmap getBitmapFromMemCache(String key) {
return memoryCache.get(key);
}
}
Kotlin Usage Example
import android.util.LruCache
import android.graphics.Bitmap
class ImageCacheKotlin {
private val memoryCache: LruCache<String, Bitmap>
init {
// Use 1/8 of available memory for cache
val maxMemory = (Runtime.getRuntime().maxMemory() / 1024).toInt()
val cacheSize = maxMemory / 8
memoryCache = object : LruCache<String, Bitmap>(cacheSize) {
override fun sizeOf(key: String, bitmap: Bitmap): Int {
// Return bitmap size in KB
return bitmap.byteCount / 1024
}
override fun entryRemoved(
evicted: Boolean,
key: String,
oldValue: Bitmap,
newValue: Bitmap?
) {
// Process when entry is removed
if (evicted) {
println("Cache entry removed: $key")
}
}
}
}
fun addBitmapToMemoryCache(key: String, bitmap: Bitmap) {
if (getBitmapFromMemCache(key) == null) {
memoryCache.put(key, bitmap)
}
}
fun getBitmapFromMemCache(key: String): Bitmap? {
return memoryCache.get(key)
}
fun clearCache() {
memoryCache.evictAll()
}
fun getCacheInfo(): String {
return "Size: ${memoryCache.size()}, " +
"MaxSize: ${memoryCache.maxSize()}, " +
"HitCount: ${memoryCache.hitCount()}, " +
"MissCount: ${memoryCache.missCount()}"
}
}
API Response Caching
data class ApiResponse(
val data: String,
val timestamp: Long
) {
fun isExpired(timeoutMs: Long = 300_000): Boolean {
return System.currentTimeMillis() - timestamp > timeoutMs
}
}
class ApiCache {
private val cache = object : LruCache<String, ApiResponse>(50) {
override fun sizeOf(key: String, value: ApiResponse): Int {
return key.length + value.data.length
}
}
fun get(key: String): ApiResponse? {
val response = cache.get(key)
return if (response?.isExpired() == true) {
cache.remove(key)
null
} else {
response
}
}
fun put(key: String, data: String) {
val response = ApiResponse(data, System.currentTimeMillis())
cache.put(key, response)
}
fun invalidate(key: String) {
cache.remove(key)
}
}
Image Loading and Cache Integration
import kotlinx.coroutines.*
import java.net.URL
class ImageLoader(private val scope: CoroutineScope) {
private val imageCache = object : LruCache<String, Bitmap>(
// 20MB cache size
20 * 1024 * 1024
) {
override fun sizeOf(key: String, bitmap: Bitmap): Int {
return bitmap.byteCount
}
override fun entryRemoved(
evicted: Boolean,
key: String,
oldValue: Bitmap,
newValue: Bitmap?
) {
if (evicted && !oldValue.isRecycled) {
// Recycle bitmap if necessary
oldValue.recycle()
}
}
}
suspend fun loadImage(url: String): Bitmap? {
// Try to get from cache
imageCache.get(url)?.let { return it }
// Fetch from network
return withContext(Dispatchers.IO) {
try {
val bitmap = downloadBitmap(url)
bitmap?.let { imageCache.put(url, it) }
bitmap
} catch (e: Exception) {
null
}
}
}
private fun downloadBitmap(url: String): Bitmap? {
// Actual image download process
return BitmapFactory.decodeStream(URL(url).openConnection().inputStream)
}
fun preloadImages(urls: List<String>) {
scope.launch {
urls.forEach { url ->
if (imageCache.get(url) == null) {
loadImage(url)
}
}
}
}
fun clearMemoryCache() {
imageCache.evictAll()
}
fun getMemoryUsage(): String {
val currentSize = imageCache.size()
val maxSize = imageCache.maxSize()
val usagePercent = (currentSize.toFloat() / maxSize * 100).toInt()
return "Memory cache: ${currentSize / 1024 / 1024}MB / ${maxSize / 1024 / 1024}MB ($usagePercent%)"
}
}
Custom Object Caching
data class UserProfile(
val id: String,
val name: String,
val avatar: Bitmap?,
val lastUpdated: Long
) {
fun getEstimatedSize(): Int {
return id.length * 2 + name.length * 2 + (avatar?.byteCount ?: 0)
}
}
class UserProfileCache {
private val cache = object : LruCache<String, UserProfile>(
// Maximum 100 users
100
) {
override fun sizeOf(key: String, value: UserProfile): Int {
return value.getEstimatedSize()
}
override fun entryRemoved(
evicted: Boolean,
key: String,
oldValue: UserProfile,
newValue: UserProfile?
) {
if (evicted) {
// Recycle avatar bitmap
oldValue.avatar?.let { bitmap ->
if (!bitmap.isRecycled) {
bitmap.recycle()
}
}
}
}
}
fun getProfile(userId: String): UserProfile? {
return cache.get(userId)
}
fun cacheProfile(profile: UserProfile) {
cache.put(profile.id, profile)
}
fun updateProfile(userId: String, updater: (UserProfile) -> UserProfile) {
cache.get(userId)?.let { currentProfile ->
val updatedProfile = updater(currentProfile)
cache.put(userId, updatedProfile)
}
}
fun removeProfile(userId: String) {
cache.remove(userId)
}
fun getStatistics(): Map<String, Any> {
return mapOf(
"currentSize" to cache.size(),
"maxSize" to cache.maxSize(),
"hitCount" to cache.hitCount(),
"missCount" to cache.missCount(),
"hitRate" to (cache.hitCount().toFloat() /
(cache.hitCount() + cache.missCount()) * 100)
)
}
}
Dynamic Size Adjustment During Memory Pressure
import android.content.ComponentCallbacks2
class AdaptiveLruCache<K, V> : LruCache<K, V> {
private var baseCacheSize: Int
private var currentCacheSize: Int
constructor(maxSize: Int) : super(maxSize) {
baseCacheSize = maxSize
currentCacheSize = maxSize
}
fun onTrimMemory(level: Int) {
when (level) {
ComponentCallbacks2.TRIM_MEMORY_RUNNING_MODERATE -> {
// Reduce memory usage to 75%
adjustCacheSize(0.75f)
}
ComponentCallbacks2.TRIM_MEMORY_RUNNING_LOW -> {
// Reduce memory usage to 50%
adjustCacheSize(0.5f)
}
ComponentCallbacks2.TRIM_MEMORY_RUNNING_CRITICAL -> {
// Clear cache
evictAll()
}
ComponentCallbacks2.TRIM_MEMORY_UI_HIDDEN -> {
// Reduce to 60% when UI is hidden
adjustCacheSize(0.6f)
}
}
}
private fun adjustCacheSize(factor: Float) {
val newSize = (baseCacheSize * factor).toInt()
if (newSize != currentCacheSize) {
currentCacheSize = newSize
resize(newSize)
// Remove current entries if necessary
while (size() > newSize) {
val key = eldestKey()
if (key != null) {
remove(key)
} else {
break
}
}
}
}
private fun eldestKey(): K? {
// LruCache doesn't have direct eldest key access,
// custom implementation needed
return null
}
}