Skip to content
This repository was archived by the owner on Jun 5, 2025. It is now read-only.

Add system prompt only in case bad packages or secrets are found #456

Merged
merged 1 commit into from
Dec 28, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 2 additions & 0 deletions src/codegate/pipeline/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -78,6 +78,8 @@ class PipelineContext:
input_request: Optional[Prompt] = field(default_factory=lambda: None)
output_responses: List[Output] = field(default_factory=list)
shortcut_response: bool = False
bad_packages_found: bool = False
secrets_found: bool = False

def add_code_snippet(self, snippet: CodeSnippet):
self.code_snippets.append(snippet)
Expand Down
27 changes: 16 additions & 11 deletions src/codegate/pipeline/codegate_context_retriever/codegate.py
Original file line number Diff line number Diff line change
Expand Up @@ -80,20 +80,25 @@ async def process(
# Generate context string using the searched objects
logger.info(f"Adding {len(searched_objects)} packages to the context")

if len(searched_objects) > 0:
# Nothing to do if no bad packages are found
if len(searched_objects) == 0:
return PipelineResult(request=request, context=context)
else:
# Add context for bad packages
context_str = self.generate_context_str(searched_objects, context)
context.bad_packages_found = True

last_user_idx = self.get_last_user_message_idx(request)
last_user_idx = self.get_last_user_message_idx(request)

# Make a copy of the request
new_request = request.copy()
# Make a copy of the request
new_request = request.copy()

# Add the context to the last user message
# Format: "Context: {context_str} \n Query: {last user message content}"
message = new_request["messages"][last_user_idx]
context_msg = f'Context: {context_str} \n\n Query: {message["content"]}'
message["content"] = context_msg
# Add the context to the last user message
# Format: "Context: {context_str} \n Query: {last user message content}"
message = new_request["messages"][last_user_idx]
context_msg = f'Context: {context_str} \n\n Query: {message["content"]}'
message["content"] = context_msg

logger.debug("Final context message", context_message=context_msg)
logger.debug("Final context message", context_message=context_msg)

return PipelineResult(request=new_request, context=context)
return PipelineResult(request=new_request, context=context)
2 changes: 1 addition & 1 deletion src/codegate/pipeline/factory.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,8 +29,8 @@ def create_input_pipeline(self) -> SequentialPipelineProcessor:
CodegateSecrets(),
CodegateVersion(),
CodeSnippetExtractor(),
SystemPrompt(Config.get_config().prompts.default_chat),
CodegateContextRetriever(),
SystemPrompt(Config.get_config().prompts.default_chat),
]
return SequentialPipelineProcessor(input_steps, self.secrets_manager, is_fim=False)

Expand Down
1 change: 1 addition & 0 deletions src/codegate/pipeline/secrets/secrets.py
Original file line number Diff line number Diff line change
Expand Up @@ -288,6 +288,7 @@ async def process(
if i > last_assistant_idx:
total_redacted += redacted_count

context.secrets_found = total_redacted > 0
logger.info(f"Total secrets redacted since last assistant message: {total_redacted}")

# Store the count in context metadata
Expand Down
5 changes: 5 additions & 0 deletions src/codegate/pipeline/system_prompt/codegate.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,6 +32,11 @@ async def process(
Add system prompt if not present, otherwise prepend codegate system prompt
to the existing system prompt
"""

# Nothing to do if no secrets or bad_packages are found
if not (context.secrets_found or context.bad_packages_found):
return PipelineResult(request=request, context=context)

new_request = request.copy()

if "messages" not in new_request:
Expand Down
Loading