Skip to content

ccproxy.utils.caching

ccproxy.utils.caching

Caching utilities for CCProxy.

This module provides caching decorators and utilities to improve performance by caching frequently accessed data like detection results and auth status.

TTLCache

TTLCache(maxsize=128, ttl=300.0)

Thread-safe TTL (Time To Live) cache with LRU eviction.

Parameters:

Name Type Description Default
maxsize int

Maximum number of entries to cache

128
ttl float

Time to live for entries in seconds

300.0
Source code in ccproxy/utils/caching.py
def __init__(self, maxsize: int = 128, ttl: float = 300.0):
    """Initialize TTL cache.

    Args:
        maxsize: Maximum number of entries to cache
        ttl: Time to live for entries in seconds
    """
    self.maxsize = maxsize
    self.ttl = ttl
    self._cache: dict[Hashable, tuple[Any, float]] = {}
    self._access_order: dict[Hashable, int] = {}
    self._access_counter = 0
    self._lock = threading.RLock()

get

get(key)

Get value from cache.

Source code in ccproxy/utils/caching.py
def get(self, key: Hashable) -> Any | None:
    """Get value from cache."""
    with self._lock:
        if key not in self._cache:
            return None

        value, expiry_time = self._cache[key]

        # Check if expired
        if time.time() > expiry_time:
            self._cache.pop(key, None)
            self._access_order.pop(key, None)
            return None

        # Update access order
        self._access_counter += 1
        self._access_order[key] = self._access_counter

        return value

set

set(key, value)

Set value in cache.

Source code in ccproxy/utils/caching.py
def set(self, key: Hashable, value: Any) -> None:
    """Set value in cache."""
    with self._lock:
        now = time.time()
        expiry_time = now + self.ttl

        # Add/update entry
        self._cache[key] = (value, expiry_time)
        self._access_counter += 1
        self._access_order[key] = self._access_counter

        # Evict expired entries first
        self._evict_expired()

        # Evict oldest entries if over maxsize
        while len(self._cache) > self.maxsize:
            self._evict_oldest()

delete

delete(key)

Delete entry from cache.

Source code in ccproxy/utils/caching.py
def delete(self, key: Hashable) -> bool:
    """Delete entry from cache."""
    with self._lock:
        if key in self._cache:
            del self._cache[key]
            self._access_order.pop(key, None)
            return True
        return False

clear

clear()

Clear all cache entries.

Source code in ccproxy/utils/caching.py
def clear(self) -> None:
    """Clear all cache entries."""
    with self._lock:
        self._cache.clear()
        self._access_order.clear()
        self._access_counter = 0

stats

stats()

Get cache statistics.

Source code in ccproxy/utils/caching.py
def stats(self) -> dict[str, Any]:
    """Get cache statistics."""
    with self._lock:
        return {
            "size": len(self._cache),
            "maxsize": self.maxsize,
            "ttl": self.ttl,
        }

AuthStatusCache

AuthStatusCache(ttl=60.0)

Specialized cache for auth status checks with shorter TTL.

Parameters:

Name Type Description Default
ttl float

Time to live for auth status in seconds

60.0
Source code in ccproxy/utils/caching.py
def __init__(self, ttl: float = 60.0):  # 1 minute TTL for auth status
    """Initialize auth status cache.

    Args:
        ttl: Time to live for auth status in seconds
    """
    self._cache = TTLCache(maxsize=32, ttl=ttl)

get_auth_status

get_auth_status(provider)

Get cached auth status for provider.

Source code in ccproxy/utils/caching.py
def get_auth_status(self, provider: str) -> bool | None:
    """Get cached auth status for provider."""
    return self._cache.get(f"auth_status:{provider}")

set_auth_status

set_auth_status(provider, is_authenticated)

Cache auth status for provider.

Source code in ccproxy/utils/caching.py
def set_auth_status(self, provider: str, is_authenticated: bool) -> None:
    """Cache auth status for provider."""
    self._cache.set(f"auth_status:{provider}", is_authenticated)

invalidate_auth_status

invalidate_auth_status(provider)

Invalidate auth status for provider.

Source code in ccproxy/utils/caching.py
def invalidate_auth_status(self, provider: str) -> None:
    """Invalidate auth status for provider."""
    self._cache.delete(f"auth_status:{provider}")

clear

clear()

Clear all auth status cache.

Source code in ccproxy/utils/caching.py
def clear(self) -> None:
    """Clear all auth status cache."""
    self._cache.clear()

ttl_cache

ttl_cache(maxsize=128, ttl=300.0)

TTL cache decorator for functions.

Parameters:

Name Type Description Default
maxsize int

Maximum number of entries to cache

128
ttl float

Time to live for cached results in seconds

300.0
Source code in ccproxy/utils/caching.py
def ttl_cache(maxsize: int = 128, ttl: float = 300.0) -> Callable[[F], F]:
    """TTL cache decorator for functions.

    Args:
        maxsize: Maximum number of entries to cache
        ttl: Time to live for cached results in seconds
    """

    def decorator(func: F) -> F:
        cache = TTLCache(maxsize=maxsize, ttl=ttl)

        @functools.wraps(func)
        def wrapper(*args: Any, **kwargs: Any) -> Any:
            # Create cache key from function args/kwargs
            key = _make_cache_key(func.__name__, args, kwargs)

            # Try to get from cache first
            cached_result = cache.get(key)
            if cached_result is not None:
                _trace(
                    "cache_hit",
                    function=func.__name__,
                    key_hash=hash(key) if isinstance(key, tuple) else key,
                )
                return cached_result

            # Call function and cache result
            result = func(*args, **kwargs)
            cache.set(key, result)

            _trace(
                "cache_miss_and_set",
                function=func.__name__,
                key_hash=hash(key) if isinstance(key, tuple) else key,
                cache_size=len(cache._cache),
            )

            return result

        # Add cache management methods
        wrapper.cache_info = cache.stats  # type: ignore
        wrapper.cache_clear = cache.clear  # type: ignore

        return wrapper  # type: ignore

    return decorator

async_ttl_cache

async_ttl_cache(maxsize=128, ttl=300.0)

TTL cache decorator for async functions.

Parameters:

Name Type Description Default
maxsize int

Maximum number of entries to cache

128
ttl float

Time to live for cached results in seconds

300.0
Source code in ccproxy/utils/caching.py
def async_ttl_cache(
    maxsize: int = 128, ttl: float = 300.0
) -> Callable[[Callable[..., Any]], Callable[..., Any]]:
    """TTL cache decorator for async functions.

    Args:
        maxsize: Maximum number of entries to cache
        ttl: Time to live for cached results in seconds
    """

    def decorator(func: Callable[..., Any]) -> Callable[..., Any]:
        cache = TTLCache(maxsize=maxsize, ttl=ttl)

        @functools.wraps(func)
        async def wrapper(*args: Any, **kwargs: Any) -> Any:
            # Create cache key from function args/kwargs
            key = _make_cache_key(func.__name__, args, kwargs)

            # Try to get from cache first
            cached_result = cache.get(key)
            if cached_result is not None:
                _trace(
                    "async_cache_hit",
                    function=func.__name__,
                    key_hash=hash(key) if isinstance(key, tuple) else key,
                )
                return cached_result

            # Call async function and cache result
            result = await func(*args, **kwargs)
            cache.set(key, result)

            _trace(
                "async_cache_miss_and_set",
                function=func.__name__,
                key_hash=hash(key) if isinstance(key, tuple) else key,
                cache_size=len(cache._cache),
            )

            return result

        # Add cache management methods
        wrapper.cache_info = cache.stats  # type: ignore
        wrapper.cache_clear = cache.clear  # type: ignore

        return wrapper

    return decorator

cache_detection_result

cache_detection_result(key, result)

Cache a detection result.

Source code in ccproxy/utils/caching.py
def cache_detection_result(key: str, result: Any) -> None:
    """Cache a detection result."""
    _detection_cache.set(f"detection:{key}", result)

get_cached_detection_result

get_cached_detection_result(key)

Get cached detection result.

Source code in ccproxy/utils/caching.py
def get_cached_detection_result(key: str) -> Any | None:
    """Get cached detection result."""
    return _detection_cache.get(f"detection:{key}")

cache_plugin_config

cache_plugin_config(plugin_name, config)

Cache plugin configuration.

Source code in ccproxy/utils/caching.py
def cache_plugin_config(plugin_name: str, config: Any) -> None:
    """Cache plugin configuration."""
    _config_cache.set(f"plugin_config:{plugin_name}", config)

get_cached_plugin_config

get_cached_plugin_config(plugin_name)

Get cached plugin configuration.

Source code in ccproxy/utils/caching.py
def get_cached_plugin_config(plugin_name: str) -> Any | None:
    """Get cached plugin configuration."""
    return _config_cache.get(f"plugin_config:{plugin_name}")

clear_all_caches

clear_all_caches()

Clear all global caches.

Source code in ccproxy/utils/caching.py
def clear_all_caches() -> None:
    """Clear all global caches."""
    _detection_cache.clear()
    _auth_cache.clear()
    _config_cache.clear()
    logger.info("all_caches_cleared", category="cache")

get_cache_stats

get_cache_stats()

Get statistics for all caches.

Source code in ccproxy/utils/caching.py
def get_cache_stats() -> dict[str, Any]:
    """Get statistics for all caches."""
    return {
        "detection_cache": _detection_cache.stats(),
        "auth_cache": _auth_cache._cache.stats(),
        "config_cache": _config_cache.stats(),
    }