Boost Cache
Library
Boost Cache
Overview
Boost Cache is caching functionality provided as part of the Boost libraries. It's designed with emphasis on robustness and standardized APIs, providing reliable caching solutions for enterprise-level C++ development.
Details
Boost Cache is a caching component provided within the C++ Boost ecosystem. It's primarily implemented as OpenCL program cache (program_cache) within the Boost.Compute library, used to mitigate the overhead of OpenCL runtime kernel compilation model. Program objects are persistently stored and retrieved based on user-defined cache keys and build options, with compilation executed only on first use. It's also possible to implement MRU (Most Recently Used) and LFU (Least Frequently Used) caches using the Boost.MultiIndex library. The consistent design within the Boost ecosystem enables easy integration with other Boost libraries and has stable adoption records in enterprise development.
Advantages and Disadvantages
Advantages
- Boost Ecosystem Integration: Consistency and compatibility with other Boost libraries
- Standardized API: Stable interface based on Boost design principles
- Robustness Focus: Reliability assurance at enterprise level
- Persistence Support: Persistent storage functionality in program cache
- Rich Documentation: Comprehensive documentation as Boost library
- Proven Track Record: Long-term usage history in Boost community
- OpenCL Optimization: Efficient program caching for GPU computing
Disadvantages
- Limited Functionality: Limited features as general-purpose cache library
- Boost Dependency: Requires dependency on entire Boost library
- Performance: May underperform compared to specialized cache libraries
- Complexity: Over-engineered for simple use cases
- Size: Increased binary size from introducing entire Boost library
Key Links
- Boost Official Website
- Boost.Compute program_cache
- Boost Software License
- Boost Documentation
- Boost.MultiIndex
- OpenCL Optimization Guide
Code Examples
Boost.Compute Program Cache Basic Usage
#include <boost/compute.hpp>
#include <boost/compute/utility/program_cache.hpp>
namespace compute = boost::compute;
int main() {
// Get OpenCL device and context
compute::device device = compute::system::default_device();
compute::context context(device);
compute::command_queue queue(context, device);
// Create program cache
compute::program_cache cache(context);
// OpenCL kernel source code
const char source[] = R"(
__kernel void vector_add(__global const float* a,
__global const float* b,
__global float* result)
{
int i = get_global_id(0);
result[i] = a[i] + b[i];
}
)";
// Get or build program from cache
compute::program program = cache.get_or_build(
"vector_add_program", // Cache key
std::string(source), // Source code
context
);
// Create kernel
compute::kernel kernel(program, "vector_add");
return 0;
}
LRU Cache using Boost.MultiIndex
#include <boost/multi_index_container.hpp>
#include <boost/multi_index/ordered_index.hpp>
#include <boost/multi_index/sequenced_index.hpp>
#include <boost/multi_index/member.hpp>
#include <iostream>
#include <string>
using namespace boost::multi_index;
// Cache entry structure
struct CacheEntry {
std::string key;
int value;
CacheEntry(const std::string& k, int v) : key(k), value(v) {}
};
// MultiIndex container definition
typedef multi_index_container<
CacheEntry,
indexed_by<
// Sequential index for access order
sequenced<>,
// Ordered index for key search
ordered_unique<
member<CacheEntry, std::string, &CacheEntry::key>
>
>
> LRUCache;
class SimpleLRUCache {
private:
LRUCache cache_;
std::size_t max_size_;
public:
SimpleLRUCache(std::size_t max_size) : max_size_(max_size) {}
// Get value (update LRU)
bool get(const std::string& key, int& value) {
auto& key_index = cache_.get<1>();
auto it = key_index.find(key);
if (it == key_index.end()) {
return false; // Not found
}
value = it->value;
// Move to most recently used
cache_.relocate(cache_.end(), cache_.project<0>(it));
return true;
}
// Set value
void put(const std::string& key, int value) {
auto& key_index = cache_.get<1>();
auto it = key_index.find(key);
if (it != key_index.end()) {
// Update existing key
key_index.modify(it, [value](CacheEntry& entry) {
entry.value = value;
});
cache_.relocate(cache_.end(), cache_.project<0>(it));
} else {
// Add new entry
if (cache_.size() >= max_size_) {
// Remove oldest entry
cache_.pop_front();
}
cache_.push_back(CacheEntry(key, value));
}
}
std::size_t size() const { return cache_.size(); }
};
int main() {
SimpleLRUCache cache(3); // Max 3 entries
cache.put("key1", 100);
cache.put("key2", 200);
cache.put("key3", 300);
int value;
if (cache.get("key1", value)) {
std::cout << "key1: " << value << std::endl;
}
// key2 will be removed due to capacity overflow
cache.put("key4", 400);
return 0;
}
Custom Cache Policy
#include <boost/multi_index_container.hpp>
#include <boost/multi_index/ordered_index.hpp>
#include <boost/multi_index/member.hpp>
#include <chrono>
#include <string>
using namespace boost::multi_index;
// Cache entry with timestamp
struct TimedCacheEntry {
std::string key;
std::string value;
std::chrono::steady_clock::time_point timestamp;
TimedCacheEntry(const std::string& k, const std::string& v)
: key(k), value(v), timestamp(std::chrono::steady_clock::now()) {}
};
// TTL-enabled cache
typedef multi_index_container<
TimedCacheEntry,
indexed_by<
// For key search
ordered_unique<
member<TimedCacheEntry, std::string, &TimedCacheEntry::key>
>,
// For timestamp search
ordered_non_unique<
member<TimedCacheEntry, std::chrono::steady_clock::time_point,
&TimedCacheEntry::timestamp>
>
>
> TTLCache;
class TimeBasedCache {
private:
TTLCache cache_;
std::chrono::seconds ttl_;
public:
TimeBasedCache(std::chrono::seconds ttl) : ttl_(ttl) {}
void put(const std::string& key, const std::string& value) {
cleanup_expired();
auto& key_index = cache_.get<0>();
auto it = key_index.find(key);
if (it != key_index.end()) {
key_index.erase(it);
}
cache_.insert(TimedCacheEntry(key, value));
}
bool get(const std::string& key, std::string& value) {
cleanup_expired();
auto& key_index = cache_.get<0>();
auto it = key_index.find(key);
if (it == key_index.end()) {
return false;
}
value = it->value;
return true;
}
private:
void cleanup_expired() {
auto now = std::chrono::steady_clock::now();
auto& time_index = cache_.get<1>();
auto expired_end = time_index.upper_bound(now - ttl_);
time_index.erase(time_index.begin(), expired_end);
}
};
Cache using Boost.Bimap
#include <boost/bimap.hpp>
#include <boost/bimap/list_of.hpp>
#include <boost/bimap/unordered_set_of.hpp>
#include <string>
#include <iostream>
namespace bm = boost::bimaps;
class BiMapCache {
private:
typedef boost::bimap<
bm::unordered_set_of<std::string>, // Key
bm::list_of<std::string> // Value (LRU order)
> CacheBiMap;
CacheBiMap cache_;
std::size_t max_size_;
public:
BiMapCache(std::size_t max_size) : max_size_(max_size) {}
void put(const std::string& key, const std::string& value) {
auto it = cache_.left.find(key);
if (it != cache_.left.end()) {
// Update existing entry and move to end
cache_.left.erase(it);
} else if (cache_.size() >= max_size_) {
// Remove oldest entry
cache_.right.erase(cache_.right.begin());
}
cache_.left.insert({key, value});
}
bool get(const std::string& key, std::string& value) {
auto it = cache_.left.find(key);
if (it == cache_.left.end()) {
return false;
}
value = it->second;
// For LRU update, remove and reinsert
std::string temp_value = value;
cache_.left.erase(it);
cache_.left.insert({key, temp_value});
return true;
}
std::size_t size() const { return cache_.size(); }
};
int main() {
BiMapCache cache(3);
cache.put("name", "John");
cache.put("age", "30");
cache.put("city", "Tokyo");
std::string value;
if (cache.get("name", value)) {
std::cout << "Name: " << value << std::endl;
}
return 0;
}