dmsc/cache/
core.rs

1//! Copyright © 2025-2026 Wenze Wei. All Rights Reserved.
2//!
3//! This file is part of DMSC.
4//! The DMSC project belongs to the Dunimd Team.
5//!
6//! Licensed under the Apache License, Version 2.0 (the "License");
7//! You may not use this file except in compliance with the License.
8//! You may obtain a copy of the License at
9//!
10//!     http://www.apache.org/licenses/LICENSE-2.0
11//!
12//! Unless required by applicable law or agreed to in writing, software
13//! distributed under the License is distributed on an "AS IS" BASIS,
14//! WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15//! See the License for the specific language governing permissions and
16//! limitations under the License.
17
18//! # Core Module
19//!
20//! This module provides the core abstractions and data structures for the DMSC caching system.
21//! It defines the foundational traits, event types, statistics, and value wrappers that all
22//! cache backend implementations rely upon.
23//!
24//! ## Key Components
25//!
26//! - **[`DMSCCache`](DMSCCache)**: Core trait defining the cache interface with async operations
27//! - **[`DMSCCacheEvent`](DMSCCacheEvent)**: Event types for cache monitoring and consistency
28//! - **[`DMSCCacheStats`](DMSCCacheStats)**: Statistics tracking for cache performance monitoring
29//! - **[`DMSCCachedValue`](DMSCCachedValue)**: Wrapper for cached values with TTL and LRU support
30//!
31//! ## Design Principles
32//!
33//! 1. **Trait-based Architecture**: All backends implement the DMSCCache trait for consistency
34//! 2. **Async-first**: Full async/await support for non-blocking cache operations
35//! 3. **Thread Safety**: All implementations are Send + Sync for concurrent access
36//! 4. **Extensibility**: Easy to add new cache backends by implementing the trait
37//! 5. **Monitoring**: Built-in event system for cache activity tracking
38//! 6. **Statistics**: Comprehensive metrics for cache performance analysis
39//!
40//! ## Usage Example
41//!
42//! ```rust
43//! use dmsc::cache::{DMSCCache, DMSCCacheEvent, DMSCCacheStats, DMSCCachedValue};
44//! use dmsc::cache::backends::DMSCMemoryCache;
45//!
46//! async fn example() -> dmsc::core::DMSCResult<()> {
47//!     // Create a memory cache backend
48//!     let cache = DMSCMemoryCache::new();
49//!
50//!     // Set a value with 1-hour TTL
51//!     cache.set("user:123", "{\"name\": \"Alice\"}", Some(3600)).await?;
52//!
53//!     // Retrieve the value
54//!     let value = cache.get("user:123").await?;
55//!     println!("Retrieved: {:?}", value);
56//!
57//!     // Check if key exists
58//!     let exists = cache.exists("user:123").await;
59//!
60//!     // Get cache statistics
61//!     let stats: DMSCCacheStats = cache.stats().await;
62//!
63//!     // Clean up expired entries
64//!     let cleaned = cache.cleanup_expired().await?;
65//!
66//!     Ok(())
67//! }
68//! ```
69
70use crate::core::{DMSCResult, DMSCError};
71use std::time::Duration;
72use serde::{Serialize, Deserialize};
73
74#[cfg(feature = "pyo3")]
75use pyo3::prelude::*;
76
77/// Cache trait for DMSC cache implementations.
78///
79/// This trait defines the core interface for all cache backends in DMSC.
80/// Implementations must provide thread-safe, asynchronous cache operations
81/// with support for TTL-based expiration and comprehensive statistics tracking.
82///
83/// ## Implementations
84///
85/// DMSC provides several built-in implementations:
86/// - **[`DMSCMemoryCache`](super::backends::DMSCMemoryCache)**: In-memory cache using DashMap
87/// - **[`DMSCRedisCache`](super::backends::DMSCRedisCache)**: Distributed cache using Redis
88/// - **[`DMSCHybridCache`](super::backends::DMSCHybridCache)**: Multi-layer cache combining memory and Redis
89///
90/// ## Thread Safety
91///
92/// All implementations must be `Send + Sync` to ensure safe concurrent access
93/// from multiple async tasks or threads. The trait uses interior mutability
94/// patterns internally.
95///
96/// ## Async Operations
97///
98/// All operations are asynchronous and use async/await syntax. This enables
99/// non-blocking cache operations suitable for high-throughput applications.
100///
101/// ## Key Operations
102///
103/// 1. **Basic Operations**: `get`, `set`, `delete`, `exists`
104/// 2. **Batch Operations**: `get_multi`, `set_multi`, `delete_multi`
105/// 3. **Maintenance**: `clear`, `cleanup_expired`, `stats`
106/// 4. **Pattern Matching**: `keys`, `delete_by_pattern`
107///
108/// ## Example
109///
110/// ```rust
111/// use dmsc::cache::DMSCCache;
112/// use dmsc::cache::backends::DMSCMemoryCache;
113///
114/// async fn example() -> dmsc::core::DMSCResult<()> {
115///     let cache = DMSCMemoryCache::new();
116///
117///     // Store a value with 1-hour TTL
118///     cache.set("user:123", "Alice", Some(3600)).await?;
119///
120///     // Retrieve the value
121///     let value = cache.get("user:123").await?;
122///     assert_eq!(value, Some("Alice".to_string()));
123///
124///     // Check if key exists
125///     assert!(cache.exists("user:123").await);
126///
127///     // Get cache statistics
128///     let stats = cache.stats().await;
129///     println!("Hits: {}, Misses: {}", stats.hits, stats.misses);
130///
131///     // Delete the value
132///     cache.delete("user:123").await?;
133///
134///     Ok(())
135/// }
136/// ```
137#[async_trait::async_trait]
138pub trait DMSCCache: Send + Sync {
139    /// Retrieves a value from the cache by key.
140    ///
141    /// This method looks up the specified key in the cache. If the key exists
142    /// and the value is not expired, it returns the value as a string. Expired
143    /// entries are automatically removed during the lookup.
144    ///
145    /// ## Expiration Handling
146    ///
147    /// If the cached value has an associated TTL (Time-To-Live) and the current
148    /// time has passed the expiration timestamp, the entry is treated as missing
149    /// and removed from the cache.
150    ///
151    /// ## Statistics
152    ///
153    /// This operation updates cache statistics:
154    /// - Increments `hits` counter on successful retrieval
155    /// - Increments `misses` counter when key is not found or expired
156    ///
157    /// # Parameters
158    ///
159    /// * `key` - The cache key to look up (typically a string identifier)
160    ///
161    /// # Returns
162    ///
163    /// A `DMSCResult<Option<String>>` containing:
164    /// - `Ok(Some(value))` if the key exists and is not expired
165    /// - `Ok(None)` if the key doesn't exist or has expired
166    /// - `Err(DMSCError)` if an error occurred during the operation
167    ///
168    /// # Examples
169    ///
170    /// ```rust
171    /// use dmsc::cache::backends::DMSCMemoryCache;
172    ///
173    /// async fn example() -> dmsc::core::DMSCResult<()> {
174    ///     let cache = DMSCMemoryCache::new();
175    ///
176    ///     // Key doesn't exist
177    ///     let result = cache.get("missing").await?;
178    ///     assert_eq!(result, None);
179    ///
180    ///     // Store a value
181    ///     cache.set("key", "value", None).await?;
182    ///
183    ///     // Key exists
184    ///     let result = cache.get("key").await?;
185    ///     assert_eq!(result, Some("value".to_string()));
186    ///
187    ///     Ok(())
188    /// }
189    /// ```
190    async fn get(&self, key: &str) -> DMSCResult<Option<String>>;
191
192    /// Stores a value in the cache with an optional TTL.
193    ///
194    /// This method inserts or updates a cache entry with the specified key and value.
195    /// The entry will automatically expire after the specified TTL duration if provided.
196    ///
197    /// ## Overwrite Behavior
198    ///
199    /// If a value already exists for the given key, it will be overwritten with the
200    /// new value. The expiration time will be reset based on the new TTL.
201    ///
202    /// ## TTL Handling
203    ///
204    /// - `Some(seconds)`: The entry will expire after the specified number of seconds
205    /// - `None`: The entry will never expire automatically
206    ///
207    /// ## Storage Format
208    ///
209    /// The value is stored as a string. For complex types, serialize them to a string
210    /// format (e.g., JSON) before caching.
211    ///
212    /// # Parameters
213    ///
214    /// * `key` - The cache key to store the value under
215    /// * `value` - The string value to cache
216    /// * `ttl_seconds` - Optional time-to-live in seconds (None for persistent storage)
217    ///
218    /// # Returns
219    ///
220    /// A `DMSCResult<()>` indicating success or failure
221    ///
222    /// # Examples
223    ///
224    /// ```rust
225    /// use dmsc::cache::backends::DMSCMemoryCache;
226    ///
227    /// async fn example() -> dmsc::core::DMSCResult<()> {
228    ///     let cache = DMSCMemoryCache::new();
229    ///
230    ///     // Store a value without expiration
231    ///     cache.set("persistent", "data", None).await?;
232    ///
233    ///     // Store a value with 1-hour expiration
234    ///     cache.set("temp", "data", Some(3600)).await?;
235    ///
236    ///     Ok(())
237    /// }
238    /// ```
239    async fn set(&self, key: &str, value: &str, ttl_seconds: Option<u64>) -> DMSCResult<()>;
240
241    /// Removes a value from the cache by key.
242    ///
243    /// This method deletes the specified key from the cache. If the key doesn't
244    /// exist, the operation still succeeds but returns false.
245    ///
246    /// ## Behavior
247    ///
248    /// - The entry is completely removed from the cache
249    /// - If the key doesn't exist, no error is raised
250    /// - No statistics are updated for delete operations
251    ///
252    /// # Parameters
253    ///
254    /// * `key` - The cache key to delete
255    ///
256    /// # Returns
257    ///
258    /// A `DMSCResult<bool>` containing:
259    /// - `Ok(true)` if the key was found and deleted
260    /// - `Ok(false)` if the key didn't exist
261    /// - `Err(DMSCError)` if an error occurred during the operation
262    ///
263    /// # Examples
264    ///
265    /// ```rust
266    /// use dmsc::cache::backends::DMSCMemoryCache;
267    ///
268    /// async fn example() -> dmsc::core::DMSCResult<()> {
269    ///     let cache = DMSCMemoryCache::new();
270    ///
271    ///     // Delete non-existent key
272    ///     let deleted = cache.delete("missing").await?;
273    ///     assert!(!deleted);
274    ///
275    ///     // Store and delete
276    ///     cache.set("key", "value", None).await?;
277    ///     let deleted = cache.delete("key").await?;
278    ///     assert!(deleted);
279    ///
280    ///     Ok(())
281    /// }
282    /// ```
283    async fn delete(&self, key: &str) -> DMSCResult<bool>;
284
285    /// Removes all entries from the cache.
286    ///
287    /// This method clears all cached values regardless of their expiration status.
288    /// The operation is typically O(n) where n is the number of cached entries.
289    ///
290    /// ## Behavior
291    ///
292    /// - All entries are immediately removed
293    /// - Statistics are reset to their default values
294    /// - This operation cannot be undone
295    ///
296    /// # Returns
297    ///
298    /// A `DMSCResult<()>` indicating success or failure
299    ///
300    /// # Examples
301    ///
302    /// ```rust
303    /// use dmsc::cache::backends::DMSCMemoryCache;
304    ///
305    /// async fn example() -> dmsc::core::DMSCResult<()> {
306    ///     let cache = DMSCMemoryCache::new();
307    ///
308    ///     // Add some entries
309    ///     cache.set("a", "1", None).await?;
310    ///     cache.set("b", "2", None).await?;
311    ///     cache.set("c", "3", None).await?;
312    ///
313    ///     // Clear all entries
314    ///     cache.clear().await?;
315    ///
316    ///     // Verify cache is empty
317    ///     assert!(!cache.exists("a").await);
318    ///
319    ///     Ok(())
320    /// }
321    /// ```
322    async fn clear(&self) -> DMSCResult<()>;
323
324    /// Returns current cache statistics.
325    ///
326    /// This method retrieves performance metrics and usage statistics from the cache.
327    /// The statistics provide insights into cache effectiveness and resource usage.
328    ///
329    /// ## Statistics Collected
330    ///
331    /// - `hits`: Number of successful cache lookups
332    /// - `misses`: Number of cache lookups that returned None
333    /// - `entries`: Current number of entries in the cache
334    /// - `memory_usage_bytes`: Estimated memory consumption
335    /// - `avg_hit_rate`: Ratio of hits to total lookups
336    /// - `eviction_count`: Number of entries evicted due to size limits
337    ///
338    /// ## Thread Safety
339    ///
340    /// The returned statistics are a snapshot taken at call time. Other threads
341    /// may modify the cache immediately after, making the statistics slightly stale.
342    ///
343    /// # Returns
344    ///
345    /// A `DMSCCacheStats` struct containing all cache metrics
346    ///
347    /// # Examples
348    ///
349    /// ```rust
350    /// use dmsc::cache::backends::DMSCMemoryCache;
351    ///
352    /// async fn example() {
353    ///     let cache = DMSCMemoryCache::new();
354    ///
355    ///     // Perform some cache operations
356    ///     let _ = cache.get("missing").await;
357    ///     cache.set("key", "value", None).await.unwrap();
358    ///     let _ = cache.get("key").await;
359    ///
360    ///     // Get statistics
361    ///     let stats = cache.stats().await;
362    ///     println!("Hits: {}, Misses: {}", stats.hits, stats.misses);
363    ///     println!("Hit rate: {:.1}%", stats.avg_hit_rate * 100.0);
364    /// }
365    /// ```
366    async fn stats(&self) -> DMSCCacheStats;
367
368    /// Removes all expired entries from the cache.
369    ///
370    /// This method scans the cache and removes entries that have exceeded their
371    /// TTL (Time-To-Live). This is useful for reclaiming memory used by expired entries.
372    ///
373    /// ## Performance
374    ///
375    /// The performance characteristics depend on the implementation:
376    /// - In-memory caches: Typically O(n) where n is total entries
377    /// - Distributed caches: May involve network round-trips for each entry
378    ///
379    /// ## Automatic Cleanup
380    ///
381    /// Many implementations automatically remove expired entries during normal
382    /// operations (e.g., during `get()` calls). This explicit cleanup is useful
383    /// for periodic maintenance or when entries have no recent access.
384    ///
385    /// # Returns
386    ///
387    /// A `DMSCResult<usize>` containing the number of expired entries removed
388    ///
389    /// # Examples
390    ///
391    /// ```rust
392    /// use dmsc::cache::backends::DMSCMemoryCache;
393    ///
394    /// async fn example() -> dmsc::core::DMSCResult<()> {
395    ///     let cache = DMSCMemoryCache::new();
396    ///
397    ///     // Add entries with short TTL
398    ///     cache.set("short-lived", "data", Some(1)).await?;
399    ///
400    ///     // Wait for expiration
401    ///     tokio::time::sleep(tokio::time::Duration::from_secs(2)).await;
402    ///
403    ///     // Cleanup expired entries
404    ///     let cleaned = cache.cleanup_expired().await?;
405    ///     println!("Cleaned {} expired entries", cleaned);
406    ///
407    ///     Ok(())
408    /// }
409    /// ```
410    async fn cleanup_expired(&self) -> DMSCResult<usize>;
411
412    /// Checks if a key exists in the cache and is not expired.
413    ///
414    /// This method provides a lightweight way to check key existence without
415    /// retrieving the value. Expired entries are automatically removed.
416    ///
417    /// ## Expiration Check
418    ///
419    /// If the key exists but the value is expired, the entry is removed and
420    /// the method returns false.
421    ///
422    /// ## Performance
423    ///
424    /// This operation is typically faster than `get()` because it doesn't
425    /// need to deserialize or return the cached value.
426    ///
427    /// # Parameters
428    ///
429    /// * `key` - The cache key to check
430    ///
431    /// # Returns
432    ///
433    /// A boolean indicating whether the key exists and is not expired
434    ///
435    /// # Examples
436    ///
437    /// ```rust
438    /// use dmsc::cache::backends::DMSCMemoryCache;
439    ///
440    /// async fn example() -> dmsc::core::DMSCResult<()> {
441    ///     let cache = DMSCMemoryCache::new();
442    ///
443    ///     assert!(!cache.exists("missing").await);
444    ///
445    ///     cache.set("key", "value", None).await?;
446    ///     assert!(cache.exists("key").await);
447    ///
448    ///     Ok(())
449    /// }
450    /// ```
451    async fn exists(&self, key: &str) -> bool;
452
453    /// Retrieves all cache keys.
454    ///
455    /// This method returns a list of all keys currently stored in the cache,
456    /// including expired ones. Use `cleanup_expired()` to remove expired entries first.
457    ///
458    /// ## Order
459    ///
460    /// The order of returned keys is implementation-defined. Do not rely on
461    /// any particular ordering.
462    ///
463    /// ## Performance
464    ///
465    /// This operation may be expensive for large caches as it typically requires
466    /// iterating through all entries.
467    ///
468    /// # Returns
469    ///
470    /// A `DMSCResult<Vec<String>>` containing all cache keys
471    ///
472    /// # Examples
473    ///
474    /// ```rust
475    /// use dmsc::cache::backends::DMSCMemoryCache;
476    ///
477    /// async fn example() -> dmsc::core::DMSCResult<()> {
478    ///     let cache = DMSCMemoryCache::new();
479    ///
480    ///     cache.set("a", "1", None).await?;
481    ///     cache.set("b", "2", None).await?;
482    ///     cache.set("c", "3", None).await?;
483    ///
484    ///     let keys = cache.keys().await?;
485    ///     assert_eq!(keys.len(), 3);
486    ///
487    ///     Ok(())
488    /// }
489    /// ```
490    async fn keys(&self) -> DMSCResult<Vec<String>>;
491
492    /// Retrieves multiple values from the cache in a single operation.
493    ///
494    /// This method is a convenience wrapper that fetches multiple keys efficiently.
495    /// The results are returned in the same order as the input keys.
496    ///
497    /// ## Partial Results
498    ///
499    /// If some keys exist and others don't, the result vector will contain
500    /// `Some(value)` for existing keys and `None` for missing keys.
501    ///
502    /// # Parameters
503    ///
504    /// * `keys` - A slice of cache keys to retrieve
505    ///
506    /// # Returns
507    ///
508    /// A `DMSCResult<Vec<Option<String>>>` containing values in key order
509    ///
510    /// # Examples
511    ///
512    /// ```rust
513    /// use dmsc::cache::backends::DMSCMemoryCache;
514    ///
515    /// async fn example() -> dmsc::core::DMSCResult<()> {
516    ///     let cache = DMSCMemoryCache::new();
517    ///
518    ///     cache.set("a", "1", None).await?;
519    ///     cache.set("b", "2", None).await?;
520    ///
521    ///     let results = cache.get_multi(&["a", "b", "c"]).await?;
522    ///     assert_eq!(results, vec![
523    ///         Some("1".to_string()),
524    ///         Some("2".to_string()),
525    ///         None
526    ///     ]);
527    ///
528    ///     Ok(())
529    /// }
530    /// ```
531    async fn get_multi(&self, keys: &[&str]) -> DMSCResult<Vec<Option<String>>> {
532        let mut results = Vec::with_capacity(keys.len());
533        for &key in keys {
534            results.push(self.get(key).await?);
535        }
536        Ok(results)
537    }
538
539    /// Stores multiple key-value pairs in the cache.
540    ///
541    /// This method is a convenience wrapper for setting multiple entries efficiently.
542    /// All entries use the same TTL if provided.
543    ///
544    /// # Parameters
545    ///
546    /// * `items` - A slice of (key, value) tuples to store
547    /// * `ttl_seconds` - Optional TTL for all entries
548    ///
549    /// # Returns
550    ///
551    /// A `DMSCResult<()>` indicating success or failure
552    ///
553    /// # Examples
554    ///
555    /// ```rust
556    /// use dmsc::cache::backends::DMSCMemoryCache;
557    ///
558    /// async fn example() -> dmsc::core::DMSCResult<()> {
559    ///     let cache = DMSCMemoryCache::new();
560    ///
561    ///     let items = vec![
562    ///         ("a", "1"),
563    ///         ("b", "2"),
564    ///         ("c", "3"),
565    ///     ];
566    ///
567    ///     cache.set_multi(&items, Some(3600)).await?;
568    ///
569    ///     Ok(())
570    /// }
571    /// ```
572    async fn set_multi(&self, items: &[(&str, &str)], ttl_seconds: Option<u64>) -> DMSCResult<()> {
573        for &(key, value) in items {
574            self.set(key, value, ttl_seconds).await?;
575        }
576        Ok(())
577    }
578
579    /// Removes multiple keys from the cache.
580    ///
581    /// This method is a convenience wrapper for deleting multiple entries efficiently.
582    ///
583    /// ## Atomicity
584    ///
585    /// This operation is not atomic - each delete is performed independently.
586    /// Partial failures may result in some keys being deleted while others remain.
587    ///
588    /// # Parameters
589    ///
590    /// * `keys` - A slice of cache keys to delete
591    ///
592    /// # Returns
593    ///
594    /// A `DMSCResult<usize>` containing the number of keys deleted
595    ///
596    /// # Examples
597    ///
598    /// ```rust
599    /// use dmsc::cache::backends::DMSCMemoryCache;
600    ///
601    /// async fn example() -> dmsc::core::DMSCResult<()> {
602    ///     let cache = DMSCMemoryCache::new();
603    ///
604    ///     cache.set("a", "1", None).await?;
605    ///     cache.set("b", "2", None).await?;
606    ///     cache.set("c", "3", None).await?;
607    ///
608    ///     let count = cache.delete_multi(&["a", "b"]).await?;
609    ///     assert_eq!(count, 2);
610    ///
611    ///     Ok(())
612    /// }
613    /// ```
614    async fn delete_multi(&self, keys: &[&str]) -> DMSCResult<usize> {
615        let mut count = 0;
616        for &key in keys {
617            if self.delete(key).await? {
618                count += 1;
619            }
620        }
621        Ok(count)
622    }
623
624    /// Checks if multiple keys exist in the cache.
625    ///
626    /// This method is a convenience wrapper for checking multiple keys efficiently.
627    ///
628    /// # Parameters
629    ///
630    /// * `keys` - A slice of cache keys to check
631    ///
632    /// # Returns
633    ///
634    /// A `DMSCResult<Vec<bool>>` indicating existence of each key
635    ///
636    /// # Examples
637    ///
638    /// ```rust
639    /// use dmsc::cache::backends::DMSCMemoryCache;
640    ///
641    /// async fn example() -> dmsc::core::DMSCResult<()> {
642    ///     let cache = DMSCMemoryCache::new();
643    ///
644    ///     cache.set("a", "1", None).await?;
645    ///
646    ///     let results = cache.exists_multi(&["a", "b"]).await?;
647    ///     assert_eq!(results, vec![true, false]);
648    ///
649    ///     Ok(())
650    /// }
651    /// ```
652    async fn exists_multi(&self, keys: &[&str]) -> DMSCResult<Vec<bool>> {
653        let mut results = Vec::with_capacity(keys.len());
654        for &key in keys {
655            results.push(self.exists(key).await);
656        }
657        Ok(results)
658    }
659
660    /// Removes all keys matching a regex pattern.
661    ///
662    /// This method is useful for bulk invalidation of related cache entries.
663    /// For example, invalidating all user-related cache entries when a user updates their profile.
664    ///
665    /// ## Pattern Format
666    ///
667    /// The pattern is a regular expression. Common patterns include:
668    /// - `user:*` - Matches all keys starting with "user:"
669    /// - `*:session` - Matches all keys ending with ":session"
670    /// - `.*` - Matches all keys
671    ///
672    /// ## Performance
673    ///
674    /// This operation requires fetching all keys and filtering by regex.
675    /// For large caches, consider using key prefixes for better performance.
676    ///
677    /// # Parameters
678    ///
679    /// * `pattern` - A regular expression pattern to match keys against
680    ///
681    /// # Returns
682    ///
683    /// A `DMSCResult<usize>` containing the number of keys deleted
684    ///
685    /// # Examples
686    ///
687    /// ```rust
688    /// use dmsc::cache::backends::DMSCMemoryCache;
689    ///
690    /// async fn example() -> dmsc::core::DMSCResult<()> {
691    ///     let cache = DMSCMemoryCache::new();
692    ///
693    ///     cache.set("user:123:profile", "data", None).await?;
694    ///     cache.set("user:123:settings", "data", None).await?;
695    ///     cache.set("product:456", "data", None).await?;
696    ///
697    ///     let count = cache.delete_by_pattern("user:.*").await?;
698    ///     assert_eq!(count, 2);
699    ///
700    ///     Ok(())
701    /// }
702    /// ```
703    async fn delete_by_pattern(&self, pattern: &str) -> DMSCResult<usize> {
704        let keys = self.keys().await?;
705        let regex = regex::Regex::new(pattern)
706            .map_err(|e| DMSCError::Other(format!("Invalid pattern: {}", e)))?;
707        let mut count = 0;
708        for key in keys {
709            if regex.is_match(&key) {
710                if self.delete(&key).await? {
711                    count += 1;
712                }
713            }
714        }
715        Ok(count)
716    }
717}
718
719/// Cache event types for monitoring and consistency
720#[derive(Debug, Clone, Serialize, Deserialize)]
721#[cfg_attr(feature = "pyo3", pyo3::prelude::pyclass)]
722pub enum DMSCCacheEvent {
723    /// Cache hit event
724    Hit { key: String },
725    /// Cache miss event
726    Miss { key: String },
727    /// Cache eviction event
728    Eviction { key: String },
729    /// Cache set event
730    Set { key: String, ttl_seconds: Option<u64> },
731    /// Cache delete event
732    Delete { key: String },
733    /// Cache clear event
734    Clear(),
735    /// Cache cleanup event
736    Cleanup { cleaned_count: usize },
737    /// Cache invalidate pattern event
738    InvalidatePattern { pattern: String },
739    /// Cache invalidate event
740    Invalidate { key: String },
741}
742
743/// Cache statistics
744#[derive(Debug, Clone, Copy, Serialize, Deserialize)]
745#[cfg_attr(feature = "pyo3", pyo3::prelude::pyclass(get_all, set_all))]
746pub struct DMSCCacheStats {
747    pub hits: u64,
748    pub misses: u64,
749    pub entries: usize,
750    pub memory_usage_bytes: usize,
751    pub avg_hit_rate: f64,
752    pub hit_count: u64,
753    pub miss_count: u64,
754    pub eviction_count: u64,
755}
756
757impl Default for DMSCCacheStats {
758    fn default() -> Self {
759        Self {
760            hits: 0,
761            misses: 0,
762            entries: 0,
763            memory_usage_bytes: 0,
764            avg_hit_rate: 0.0,
765            hit_count: 0,
766            miss_count: 0,
767            eviction_count: 0,
768        }
769    }
770}
771
772#[cfg(feature = "pyo3")]
773#[pymethods]
774impl DMSCCacheStats {
775    #[new]
776    fn py_new() -> Self {
777        Self::default()
778    }
779    
780    #[staticmethod]
781    fn default_stats() -> Self {
782        Self::default()
783    }
784}
785
786/// Cached value wrapper with TTL and LRU support.
787///
788/// This struct encapsulates a cached value along with metadata for cache management:
789/// - **value**: The actual cached data as a string
790/// - **expires_at**: Optional TTL-based expiration timestamp (UNIX epoch seconds)
791/// - **last_accessed**: Optional last access timestamp for LRU eviction policies
792///
793/// # Examples
794///
795/// ```
796/// use dmsc::cache::DMSCCachedValue;
797///
798/// let cached = DMSCCachedValue::new("test_data".to_string(), Some(3600));
799/// assert!(!cached.is_expired());
800/// cached.touch();
801/// assert!(!cached.is_stale(300));
802/// ```
803#[derive(Debug, Clone, Serialize, Deserialize)]
804#[cfg_attr(feature = "pyo3", pyo3::prelude::pyclass(get_all, set_all))]
805pub struct DMSCCachedValue {
806    /// The cached value as a string
807    pub value: String,
808    /// Optional expiration timestamp (UNIX epoch seconds)
809    /// If None, the value never expires based on TTL
810    pub expires_at: Option<u64>,
811    /// Optional last access timestamp (UNIX epoch seconds)
812    /// Used for LRU-based cache eviction policies
813    pub last_accessed: Option<u64>,
814}
815
816impl DMSCCachedValue {
817    /// Creates a new cached value with optional TTL.
818    /// 
819    /// # Parameters
820    /// 
821    /// - `value`: The string value to cache
822    /// - `ttl_seconds`: Optional time-to-live in seconds
823    ///   - If Some(seconds), the value will expire after the specified duration
824    ///   - If None, the value never expires based on TTL
825    /// 
826    /// # Behavior
827    /// 
828    /// - Initializes `last_accessed` to current timestamp for LRU tracking
829    /// - Calculates `expires_at` as current_time + ttl_seconds if TTL is provided
830    /// 
831    /// # Examples
832    /// 
833    /// ```
834    /// use dmsc::cache::DMSCCachedValue;
835    /// 
836    /// // Create a value that expires in 1 hour
837    /// let cached = DMSCCachedValue::new("data".to_string(), Some(3600));
838    /// 
839    /// // Create a value that never expires
840    /// let persistent = DMSCCachedValue::new("persistent".to_string(), None);
841    /// ```
842    pub fn new(value: String, ttl_seconds: Option<u64>) -> Self {
843        let now = std::time::SystemTime::now()
844            .duration_since(std::time::UNIX_EPOCH)
845            .unwrap_or(Duration::from_secs(0))
846            .as_secs();
847        
848        let expires_at = ttl_seconds.map(|ttl| {
849            now + ttl
850        });
851        
852        Self { 
853            value, 
854            expires_at,
855            last_accessed: Some(now),
856        }
857    }
858    
859    pub fn deserialize<T: serde::de::DeserializeOwned>(&self) -> crate::core::DMSCResult<T> {
860        serde_json::from_str(&self.value)
861            .map_err(|e| crate::core::DMSCError::Other(format!("Deserialization error: {e}")))
862    }
863    
864    pub fn is_expired(&self) -> bool {
865        if let Some(expires_at) = self.expires_at {
866            let now = std::time::SystemTime::now()
867                .duration_since(std::time::UNIX_EPOCH)
868                .unwrap_or(Duration::from_secs(0))
869                .as_secs();
870            now >= expires_at
871        } else {
872            false
873        }
874    }
875    
876    /// Updates the last access timestamp to current time.
877    /// 
878    /// This method should be called each time the cached value is accessed
879    /// to support LRU (Least Recently Used) cache eviction policies.
880    /// 
881    /// # Behavior
882    /// 
883    /// - Sets `last_accessed` to the current UNIX timestamp
884    /// - Does not modify `expires_at` or `value`
885    /// 
886    /// # Use Cases
887    /// 
888    /// - LRU cache implementations tracking access order
889    /// - Cache warming strategies based on access patterns
890    /// - Usage analytics and cache performance monitoring
891    pub fn touch(&mut self) {
892        let now = std::time::SystemTime::now()
893            .duration_since(std::time::UNIX_EPOCH)
894            .unwrap_or(Duration::from_secs(0))
895            .as_secs();
896        
897        self.last_accessed = Some(now);
898    }
899    
900    /// Checks if the cached value is stale based on idle time.
901    /// 
902    /// A value is considered stale if it has not been accessed for longer
903    /// than the specified maximum idle time. This is useful for LRU eviction.
904    /// 
905    /// # Parameters
906    /// 
907    /// - `max_idle_secs`: Maximum idle time in seconds before considering stale
908    /// 
909    /// # Returns
910    /// 
911    /// - `true` if the value is stale (not accessed within max_idle_secs)
912    /// - `false` if the value is still fresh or has no access timestamp
913    /// 
914    /// # Examples
915    /// 
916    /// ```
917    /// use dmsc::cache::DMSCCachedValue;
918    /// 
919    /// let mut cached = DMSCCachedValue::new("data".to_string(), None);
920    /// 
921    /// // Immediately after creation, not stale
922    /// assert!(!cached.is_stale(300));
923    /// 
924    /// cached.touch();
925    /// assert!(!cached.is_stale(300));
926    /// ```
927    pub fn is_stale(&self, max_idle_secs: u64) -> bool {
928        if let Some(last_accessed) = self.last_accessed {
929            let now = std::time::SystemTime::now()
930                .duration_since(std::time::UNIX_EPOCH)
931                .unwrap_or(Duration::from_secs(0))
932                .as_secs();
933            now - last_accessed > max_idle_secs
934        } else {
935            false
936        }
937    }
938}
939
940#[cfg(feature = "pyo3")]
941#[pymethods]
942impl DMSCCachedValue {
943    #[new]
944    fn py_new(value: String, ttl_seconds: Option<u64>) -> Self {
945        Self::new(value, ttl_seconds)
946    }
947    
948    #[staticmethod]
949    fn default() -> Self {
950        Self::new(String::new(), None)
951    }
952    
953    #[pyo3(name = "is_expired")]
954    fn is_expired_impl(&self) -> bool {
955        self.is_expired()
956    }
957    
958    #[pyo3(name = "touch")]
959    fn touch_impl(&mut self) {
960        self.touch()
961    }
962}