dmsc/c/cache.rs
1//! Copyright © 2025-2026 Wenze Wei. All Rights Reserved.
2//!
3//! This file is part of DMSC.
4//! The DMSC project belongs to the Dunimd Team.
5//!
6//! Licensed under the Apache License, Version 2.0 (the "License");
7//! You may not use this file except in compliance with the License.
8//! You may obtain a copy of the License at
9//!
10//! http://www.apache.org/licenses/LICENSE-2.0
11//!
12//! Unless required by applicable law or agreed to in writing, software
13//! distributed under the License is distributed on an "AS IS" BASIS,
14//! WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15//! See the License for the specific language governing permissions and
16//! limitations under the License.
17
18//! # Cache Module C API
19//!
20//! This module provides C language bindings for DMSC's caching subsystem. The cache module
21//! delivers high-performance in-memory data caching capabilities for accelerating application
22//! performance, reducing database load, and improving system throughput. This C API enables
23//! C/C++ applications to leverage DMSC's sophisticated caching infrastructure including memory
24//! caching, distributed caching support, and intelligent cache eviction policies.
25//!
26//! ## Module Architecture
27//!
28//! The caching module comprises three primary components:
29//!
30//! - **DMSCCacheConfig**: Configuration container for cache system parameters. Controls cache
31//! size limits, eviction policies, expiration timeouts, and connection settings for
32//! distributed cache backends. The configuration object is essential for initializing
33//! cache managers with appropriate resource limits and behavior characteristics.
34//!
35//! - **DMSCCacheManager**: Central cache management interface providing unified operations
36//! across different cache backends. Handles cache lifecycle, backend selection, and
37//! provides high-level cache operations including get, set, delete, and invalidation.
38//! The cache manager supports automatic serialization of complex types and provides
39//! consistent API regardless of underlying storage implementation.
40//!
41//! - **DMSCMemoryCache**: In-memory cache implementation using concurrent data structures.
42//! Provides thread-safe caching with O(1) average-case operations for read and write.
43//! The memory cache implements sophisticated eviction policies to manage memory usage
44//! and prevent unbounded growth. Ideal for single-instance deployments or as a
45//! local cache tier in multi-level caching architectures.
46//!
47//! ## Cache Strategies
48//!
49//! The caching system implements multiple strategies optimized for different use cases:
50//!
51//! - **LRU (Least Recently Used)**: Evicts least recently accessed items when capacity
52//! is reached. Optimal for workloads with temporal locality where recently accessed
53//! items are likely to be accessed again. Memory-efficient implementation using
54//! linked hash map for O(1) access and eviction.
55//!
56//! - **LFU (Least Frequently Used)**: Evicts items with lowest access frequency.
57//! Suitable for workloads where access frequency correlates with importance.
58//! Maintains frequency counters for eviction decisions. More computationally
59//! expensive than LRU but provides better hit rates for certain access patterns.
60//!
61//! - **TTL-Based Expiration**: Automatic expiration based on time-to-live values.
62//! Each cache entry has associated expiration timestamp. Entries are lazily
63//! removed during access or via background cleanup tasks. Ensures data freshness
64//! for time-sensitive cached content.
65//!
66//! - **Write-Through/Write-Behind**: Cache synchronization strategies for persistent
67//! backends. Write-through updates cache and backend simultaneously. Write-behind
68//! queues writes for batch processing improving write throughput.
69//!
70//! ## Memory Management
71//!
72//! All C API objects use opaque pointers with manual memory management responsibilities:
73//!
74//! - Objects must be allocated using constructor functions
75//! - Destructor functions must be called to release memory
76//! - Null pointer checks required before all operations
77//! - Double-free prevention is caller's responsibility
78//!
79//! ## Thread Safety
80//!
81//! All underlying implementations provide thread-safe concurrent access:
82//!
83//! - Memory cache uses fine-grained locking or lock-free data structures
84//! - Operations achieve high throughput under concurrent load
85//! - C API itself requires external synchronization for multi-threaded access
86//!
87//! ## Performance Characteristics
88//!
89//! Cache operations have the following performance profiles:
90//!
91//! - Cache hit (memory): O(1) average, O(n) worst case for hash collisions
92//! - Cache miss: O(1) plus backend fetch latency
93//! - Cache write: O(1) amortized
94//! - Eviction: O(1) for LRU, O(log n) for LFU
95//!
96//! ## Integration with Distributed Systems
97//!
98//! The cache module supports integration with distributed cache backends:
99//!
100//! - Redis cluster support for horizontal scaling
101//! - Memcached protocol compatibility
102//! - Consistent hashing for distribution
103//! - Automatic failover and replication
104//!
105//! ## Usage Example
106//!
107//! ```c
108//! // Create cache configuration
109//! DMSCCacheConfig* config = dmsc_cache_config_new();
110//! dmsc_cache_config_set_max_size(config, 10000);
111//! dmsc_cache_config_set_ttl(config, 3600);
112//!
113//! // Create memory cache instance
114//! DMSCMemoryCache* cache = dmsc_memory_cache_new();
115//!
116//! // Store cached value
117//! const char* key = "user:12345";
118//! const char* value = "{\"name\":\"John\",\"age\":30}";
119//! dmsc_memory_cache_set(cache, key, value, strlen(value));
120//!
121//! // Retrieve cached value
122//! size_t value_len;
123//! char* cached = dmsc_memory_cache_get(cache, key, &value_len);
124//! if (cached != NULL) {
125//! // Process cached data
126//! free(cached);
127//! }
128//!
129//! // Cleanup
130//! dmsc_memory_cache_free(cache);
131//! dmsc_cache_config_free(config);
132//! ```
133//!
134//! ## Dependencies
135//!
136//! This module depends on the following DMSC components:
137//!
138//! - `crate::cache`: Rust cache implementation
139//! - `crate::prelude`: Common types and traits
140//!
141//! ## Feature Flags
142//!
143//! The cache module is enabled by default with the "cache" feature flag.
144//! Disable this feature to reduce binary size when caching is not required.
145
146use crate::cache::{DMSCCacheConfig, DMSCCacheManager, DMSCMemoryCache};
147
148c_wrapper!(CDMSCCacheConfig, DMSCCacheConfig);
149
150c_wrapper!(CDMSCCacheManager, DMSCCacheManager);
151
152c_wrapper!(CDMSCMemoryCache, DMSCMemoryCache);
153
154c_constructor!(dmsc_cache_config_new, CDMSCCacheConfig, DMSCCacheConfig, DMSCCacheConfig::default());
155
156c_destructor!(dmsc_cache_config_free, CDMSCCacheConfig);
157
158/// Creates a new DMSCMemoryCache instance.
159///
160/// Initializes an empty in-memory cache with default configuration. The cache
161/// starts empty and grows as entries are added. Memory usage is managed automatically
162/// through eviction policies.
163///
164/// # Returns
165///
166/// Pointer to newly allocated DMSCMemoryCache on success, or NULL if memory
167/// allocation fails. The returned pointer must be freed using dmsc_memory_cache_free().
168///
169/// # Initial State
170///
171/// A newly created memory cache:
172///
173/// - Contains zero entries
174/// - Has no memory usage
175/// - Uses default LRU eviction
176/// - No maximum capacity enforcement until configured
177///
178/// # Usage Pattern
179///
180/// ```c
181/// DMSCMemoryCache* cache = dmsc_memory_cache_new();
182/// if (cache == NULL) {
183/// // Handle allocation failure
184/// return ERROR_MEMORY_ALLOCATION;
185/// }
186///
187/// // Configure capacity if needed
188/// dmsc_memory_cache_set_max_size(cache, 100000);
189///
190/// // Use cache operations
191/// dmsc_memory_cache_set(cache, "key", "value", 5);
192/// char* value = dmsc_memory_cache_get(cache, "key", NULL);
193///
194/// // Cleanup
195/// dmsc_memory_cache_free(cache);
196/// ```
197///
198/// # Performance Considerations
199///
200/// For optimal performance:
201///
202/// - Configure capacity before heavy usage
203/// - Batch similar operations together
204/// - Use appropriate serialization format
205/// - Monitor cache hit rate for tuning
206#[no_mangle]
207pub extern "C" fn dmsc_memory_cache_new() -> *mut CDMSCMemoryCache {
208 let cache = DMSCMemoryCache::new();
209 Box::into_raw(Box::new(CDMSCMemoryCache::new(cache)))
210}
211
212c_destructor!(dmsc_memory_cache_free, CDMSCMemoryCache);