Skip to content
This repository was archived by the owner on Jun 5, 2025. It is now read-only.

Commit 9b66cf8

Browse files
committed
Add a forgotten file
1 parent 94308e5 commit 9b66cf8

File tree

2 files changed

+59
-1
lines changed

2 files changed

+59
-1
lines changed
Lines changed: 58 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,58 @@
1+
from typing import Any, AsyncIterable, AsyncIterator, Dict, Iterable, Iterator, Union
2+
3+
from litellm import ChatCompletionRequest, ModelResponse
4+
5+
from codegate.providers.normalizer import ModelInputNormalizer, ModelOutputNormalizer
6+
7+
8+
class LLamaCppInputNormalizer(ModelInputNormalizer):
9+
def normalize(self, data: Dict) -> ChatCompletionRequest:
10+
"""
11+
Normalize the input data
12+
"""
13+
try:
14+
return ChatCompletionRequest(**data)
15+
except Exception as e:
16+
raise ValueError(f"Invalid completion parameters: {str(e)}")
17+
18+
def denormalize(self, data: ChatCompletionRequest) -> Dict:
19+
"""
20+
Denormalize the input data
21+
"""
22+
return data
23+
24+
25+
class LLamaCppOutputNormalizer(ModelOutputNormalizer):
26+
def normalize_streaming(
27+
self,
28+
model_reply: Union[AsyncIterable[Any], Iterable[Any]],
29+
) -> Union[AsyncIterator[ModelResponse], Iterator[ModelResponse]]:
30+
"""
31+
Normalize the output stream. This is a pass-through for liteLLM output normalizer
32+
as the liteLLM output is already in the normalized format.
33+
"""
34+
return model_reply
35+
36+
def normalize(self, model_reply: Any) -> ModelResponse:
37+
"""
38+
Normalize the output data. This is a pass-through for liteLLM output normalizer
39+
as the liteLLM output is already in the normalized format.
40+
"""
41+
return model_reply
42+
43+
def denormalize(self, normalized_reply: ModelResponse) -> Any:
44+
"""
45+
Denormalize the output data from the completion function to the format
46+
expected by the client
47+
"""
48+
return normalized_reply
49+
50+
def denormalize_streaming(
51+
self,
52+
normalized_reply: Union[AsyncIterable[ModelResponse], Iterable[ModelResponse]],
53+
) -> Union[AsyncIterator[Any], Iterator[Any]]:
54+
"""
55+
Denormalize the output stream from the completion function to the format
56+
expected by the client
57+
"""
58+
return normalized_reply

src/codegate/providers/llamacpp/provider.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -3,8 +3,8 @@
33
from fastapi import Request
44

55
from codegate.providers.base import BaseProvider
6-
from codegate.providers.llamacpp.completion_handler import LlamaCppCompletionHandler
76
from codegate.providers.llamacpp.adapter import LlamaCppAdapter
7+
from codegate.providers.llamacpp.completion_handler import LlamaCppCompletionHandler
88

99

1010
class LlamaCppProvider(BaseProvider):

0 commit comments

Comments
 (0)