Skip to content

ccproxy.services.interfaces

ccproxy.services.interfaces

Service interfaces for explicit dependency injection.

This module defines protocol interfaces for core services that adapters need, enabling explicit dependency injection and removing the service locator pattern.

IRequestHandler

Bases: Protocol

Protocol for request handling functionality.

Note: The dispatch_request method has been removed in favor of using plugin adapters' handle_request() method directly.

IRequestTracer

Bases: Protocol

Request tracing interface.

trace_request async

trace_request(request_id, method, url, headers, body=None)

Trace an outgoing request.

Parameters:

Name Type Description Default
request_id str

Unique request identifier

required
method str

HTTP method

required
url str

Target URL

required
headers dict[str, str]

Request headers

required
body bytes | None

Request body if available

None
Source code in ccproxy/services/interfaces.py
async def trace_request(
    self,
    request_id: str,
    method: str,
    url: str,
    headers: dict[str, str],
    body: bytes | None = None,
) -> None:
    """Trace an outgoing request.

    Args:
        request_id: Unique request identifier
        method: HTTP method
        url: Target URL
        headers: Request headers
        body: Request body if available
    """
    ...

trace_response async

trace_response(request_id, status, headers, body=None)

Trace an incoming response.

Parameters:

Name Type Description Default
request_id str

Unique request identifier

required
status int

HTTP status code

required
headers dict[str, str]

Response headers

required
body bytes | None

Response body if available

None
Source code in ccproxy/services/interfaces.py
async def trace_response(
    self,
    request_id: str,
    status: int,
    headers: dict[str, str],
    body: bytes | None = None,
) -> None:
    """Trace an incoming response.

    Args:
        request_id: Unique request identifier
        status: HTTP status code
        headers: Response headers
        body: Response body if available
    """
    ...

should_trace

should_trace()

Check if tracing is enabled.

Returns:

Type Description
bool

True if tracing should be performed

Source code in ccproxy/services/interfaces.py
def should_trace(self) -> bool:
    """Check if tracing is enabled.

    Returns:
        True if tracing should be performed
    """
    ...

IMetricsCollector

Bases: Protocol

Metrics collection interface.

track_request

track_request(method, path, provider=None)

Track an incoming request.

Parameters:

Name Type Description Default
method str

HTTP method

required
path str

Request path

required
provider str | None

Optional provider identifier

None
Source code in ccproxy/services/interfaces.py
def track_request(
    self, method: str, path: str, provider: str | None = None
) -> None:
    """Track an incoming request.

    Args:
        method: HTTP method
        path: Request path
        provider: Optional provider identifier
    """
    ...

track_response

track_response(status, duration, provider=None)

Track a response.

Parameters:

Name Type Description Default
status int

HTTP status code

required
duration float

Response time in seconds

required
provider str | None

Optional provider identifier

None
Source code in ccproxy/services/interfaces.py
def track_response(
    self, status: int, duration: float, provider: str | None = None
) -> None:
    """Track a response.

    Args:
        status: HTTP status code
        duration: Response time in seconds
        provider: Optional provider identifier
    """
    ...

track_error

track_error(error_type, provider=None)

Track an error occurrence.

Parameters:

Name Type Description Default
error_type str

Type of error

required
provider str | None

Optional provider identifier

None
Source code in ccproxy/services/interfaces.py
def track_error(self, error_type: str, provider: str | None = None) -> None:
    """Track an error occurrence.

    Args:
        error_type: Type of error
        provider: Optional provider identifier
    """
    ...

track_tokens

track_tokens(
    input_tokens, output_tokens, provider=None, model=None
)

Track token usage.

Parameters:

Name Type Description Default
input_tokens int

Number of input tokens

required
output_tokens int

Number of output tokens

required
provider str | None

Optional provider identifier

None
model str | None

Optional model identifier

None
Source code in ccproxy/services/interfaces.py
def track_tokens(
    self,
    input_tokens: int,
    output_tokens: int,
    provider: str | None = None,
    model: str | None = None,
) -> None:
    """Track token usage.

    Args:
        input_tokens: Number of input tokens
        output_tokens: Number of output tokens
        provider: Optional provider identifier
        model: Optional model identifier
    """
    ...

StreamingMetrics

Bases: Protocol

Streaming response handler interface.

handle_stream async

handle_stream(response, request_context=None)

Handle a streaming response.

Parameters:

Name Type Description Default
response Response

HTTP response object

required
request_context RequestContext | None

Optional request context

None

Yields:

Type Description
AsyncIterator[bytes]

Response chunks

Source code in ccproxy/services/interfaces.py
async def handle_stream(
    self,
    response: httpx.Response,
    request_context: "RequestContext | None" = None,
) -> AsyncIterator[bytes]:
    """Handle a streaming response.

    Args:
        response: HTTP response object
        request_context: Optional request context

    Yields:
        Response chunks
    """
    ...

create_streaming_response

create_streaming_response(stream, headers=None)

Create a streaming response.

Parameters:

Name Type Description Default
stream AsyncIterator[bytes]

Async iterator of response chunks

required
headers dict[str, str] | None

Optional response headers

None

Returns:

Type Description
Response

Streaming response object

Source code in ccproxy/services/interfaces.py
def create_streaming_response(
    self,
    stream: AsyncIterator[bytes],
    headers: dict[str, str] | None = None,
) -> Response:
    """Create a streaming response.

    Args:
        stream: Async iterator of response chunks
        headers: Optional response headers

    Returns:
        Streaming response object
    """
    ...

handle_streaming_request async

handle_streaming_request(
    method,
    url,
    headers,
    body,
    handler_config,
    request_context,
    client_config=None,
    client=None,
)

Handle a streaming request.

Parameters:

Name Type Description Default
method str

HTTP method

required
url str

Target URL

required
headers dict[str, str]

Request headers

required
body bytes

Request body

required
handler_config Any

Handler configuration

required
request_context Any

Request context

required
client_config dict[str, Any] | None

Optional client configuration

None
client AsyncClient | None

Optional HTTP client

None

Returns:

Type Description
Any

Deferred streaming response

Source code in ccproxy/services/interfaces.py
async def handle_streaming_request(
    self,
    method: str,
    url: str,
    headers: dict[str, str],
    body: bytes,
    handler_config: Any,
    request_context: Any,
    client_config: dict[str, Any] | None = None,
    client: httpx.AsyncClient | None = None,
) -> Any:
    """Handle a streaming request.

    Args:
        method: HTTP method
        url: Target URL
        headers: Request headers
        body: Request body
        handler_config: Handler configuration
        request_context: Request context
        client_config: Optional client configuration
        client: Optional HTTP client

    Returns:
        Deferred streaming response
    """
    ...

NullRequestTracer

Null implementation of request tracer (no-op).

trace_request async

trace_request(request_id, method, url, headers, body=None)

No-op request tracing.

Source code in ccproxy/services/interfaces.py
async def trace_request(
    self,
    request_id: str,
    method: str,
    url: str,
    headers: dict[str, str],
    body: bytes | None = None,
) -> None:
    """No-op request tracing."""
    pass

trace_response async

trace_response(request_id, status, headers, body=None)

No-op response tracing.

Source code in ccproxy/services/interfaces.py
async def trace_response(
    self,
    request_id: str,
    status: int,
    headers: dict[str, str],
    body: bytes | None = None,
) -> None:
    """No-op response tracing."""
    pass

should_trace

should_trace()

Always return False for null tracer.

Source code in ccproxy/services/interfaces.py
def should_trace(self) -> bool:
    """Always return False for null tracer."""
    return False

NullMetricsCollector

Null implementation of metrics collector (no-op).

track_request

track_request(method, path, provider=None)

No-op request tracking.

Source code in ccproxy/services/interfaces.py
def track_request(
    self, method: str, path: str, provider: str | None = None
) -> None:
    """No-op request tracking."""
    pass

track_response

track_response(status, duration, provider=None)

No-op response tracking.

Source code in ccproxy/services/interfaces.py
def track_response(
    self, status: int, duration: float, provider: str | None = None
) -> None:
    """No-op response tracking."""
    pass

track_error

track_error(error_type, provider=None)

No-op error tracking.

Source code in ccproxy/services/interfaces.py
def track_error(self, error_type: str, provider: str | None = None) -> None:
    """No-op error tracking."""
    pass

track_tokens

track_tokens(
    input_tokens, output_tokens, provider=None, model=None
)

No-op token tracking.

Source code in ccproxy/services/interfaces.py
def track_tokens(
    self,
    input_tokens: int,
    output_tokens: int,
    provider: str | None = None,
    model: str | None = None,
) -> None:
    """No-op token tracking."""
    pass

NullStreamingHandler

Null implementation of streaming handler.

handle_stream async

handle_stream(response, request_context=None)

Return empty stream.

Source code in ccproxy/services/interfaces.py
async def handle_stream(
    self,
    response: httpx.Response,
    request_context: "RequestContext | None" = None,
) -> AsyncIterator[bytes]:
    """Return empty stream."""
    # Make this a proper async generator
    for _ in []:
        yield b""

create_streaming_response

create_streaming_response(stream, headers=None)

Create empty response.

Source code in ccproxy/services/interfaces.py
def create_streaming_response(
    self,
    stream: AsyncIterator[bytes],
    headers: dict[str, str] | None = None,
) -> Response:
    """Create empty response."""
    from starlette.responses import Response

    return Response(content=b"", headers=headers or {})

handle_streaming_request async

handle_streaming_request(
    method,
    url,
    headers,
    body,
    handler_config,
    request_context,
    client_config=None,
    client=None,
)

Null implementation - returns a simple error response.

Source code in ccproxy/services/interfaces.py
async def handle_streaming_request(
    self,
    method: str,
    url: str,
    headers: dict[str, str],
    body: bytes,
    handler_config: Any,
    request_context: Any,
    client_config: dict[str, Any] | None = None,
    client: httpx.AsyncClient | None = None,
) -> Any:
    """Null implementation - returns a simple error response."""
    # For null implementation, return a regular response instead of trying to stream
    from starlette.responses import JSONResponse

    return JSONResponse(
        content={"error": "Streaming handler not available"},
        status_code=503,  # Service Unavailable
        headers={"X-Error": "NullStreamingHandler"},
    )