Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
91 changes: 74 additions & 17 deletions crates/zeroclaw-providers/src/compatible.rs
Original file line number Diff line number Diff line change
Expand Up @@ -313,32 +313,45 @@ impl OpenAiCompatibleProvider {
self
}

/// Collect all `system` role messages, concatenate their content,
/// and prepend to the first `user` message. Drop all system messages.
/// Used for providers (e.g. MiniMax) that reject `role: system`.
/// Collect all `system` role messages and keep them in a provider-safe
/// shape. Strict OpenAI-compatible endpoints accept a leading system
/// message but reject system messages later in the history.
fn flatten_system_messages(messages: &[ChatMessage], merge: bool) -> Vec<ChatMessage> {
if !merge {
let mut saw_system = false;
let mut system_content = String::new();
let mut result: Vec<ChatMessage> = Vec::with_capacity(messages.len());

for message in messages {
if message.role == "system" {
saw_system = true;
if !message.content.is_empty() {
if !system_content.is_empty() {
system_content.push_str("\n\n");
}
system_content.push_str(&message.content);
}
} else {
result.push(message.clone());
}
}

if !saw_system {
return messages.to_vec();
}
let system_content: String = messages
.iter()
.filter(|m| m.role == "system")
.map(|m| m.content.as_str())
.collect::<Vec<_>>()
.join("\n\n");

if system_content.is_empty() {
return messages.to_vec();
return result;
}

let mut result: Vec<ChatMessage> = messages
.iter()
.filter(|m| m.role != "system")
.cloned()
.collect();
if !merge {
result.insert(0, ChatMessage::system(system_content));
return result;
}

if let Some(first_user) = result.iter_mut().find(|m| m.role == "user") {
first_user.content = format!("{system_content}\n\n{}", first_user.content);
if !system_content.is_empty() {
first_user.content = format!("{system_content}\n\n{}", first_user.content);
}
} else {
// No user message found: insert a synthetic user message with system content
result.insert(0, ChatMessage::user(&system_content));
Expand Down Expand Up @@ -3853,6 +3866,50 @@ mod tests {
assert!(!flattened.iter().any(|m| m.role == "system"));
}

#[test]
fn flatten_system_messages_keeps_system_only_at_start_without_user_merge() {
let messages = vec![
ChatMessage::system("System A"),
ChatMessage::user("User turn"),
ChatMessage::assistant("Assistant turn"),
ChatMessage::system("System B"),
ChatMessage::user("Follow-up"),
];

let flattened = OpenAiCompatibleProvider::flatten_system_messages(&messages, false);
assert_eq!(
flattened
.iter()
.map(|message| message.role.as_str())
.collect::<Vec<_>>(),
vec!["system", "user", "assistant", "user"]
);
assert_eq!(
flattened
.iter()
.filter(|message| message.role == "system")
.count(),
1
);
assert!(flattened[0].content.contains("System A"));
assert!(flattened[0].content.contains("System B"));
}

#[test]
fn flatten_system_messages_drops_empty_system_messages() {
let messages = vec![
ChatMessage::system(""),
ChatMessage::user("User turn"),
ChatMessage::system(""),
];

let flattened = OpenAiCompatibleProvider::flatten_system_messages(&messages, false);

assert_eq!(flattened.len(), 1);
assert_eq!(flattened[0].role, "user");
assert_eq!(flattened[0].content, "User turn");
}

#[test]
fn flatten_system_messages_inserts_synthetic_user_when_no_user_exists() {
let messages = vec![
Expand Down
48 changes: 48 additions & 0 deletions crates/zeroclaw-runtime/src/agent/history.rs
Original file line number Diff line number Diff line change
Expand Up @@ -143,6 +143,49 @@ pub fn estimate_history_tokens(history: &[ChatMessage]) -> usize {
.sum()
}

pub fn normalize_system_messages(history: &mut Vec<ChatMessage>) {
let mut saw_system = false;
let mut system_content = String::new();
let mut non_system = Vec::with_capacity(history.len());

for message in history.drain(..) {
if message.role == "system" {
saw_system = true;
if !message.content.is_empty() {
if !system_content.is_empty() {
system_content.push_str("\n\n");
}
system_content.push_str(&message.content);
}
} else {
non_system.push(message);
}
}

if saw_system && !system_content.is_empty() {
history.push(ChatMessage::system(system_content));
}
history.extend(non_system);
}

pub fn append_or_merge_system_message(history: &mut Vec<ChatMessage>, content: impl Into<String>) {
let content = content.into();
if content.is_empty() {
normalize_system_messages(history);
return;
}

if let Some(system_message) = history.iter_mut().find(|message| message.role == "system") {
if !system_message.content.is_empty() {
system_message.content.push_str("\n\n");
}
system_message.content.push_str(&content);
} else {
history.insert(0, ChatMessage::system(content));
}
normalize_system_messages(history);
}

/// Trim conversation history to prevent unbounded growth.
/// Preserves the system prompt (first message if role=system) and the most recent messages.
pub fn trim_history(history: &mut Vec<ChatMessage>, max_history: usize) {
Expand All @@ -162,6 +205,7 @@ pub fn trim_history(history: &mut Vec<ChatMessage>, max_history: usize) {
let to_remove = non_system_count - max_history;
history.drain(start..start + to_remove);
remove_orphaned_tool_messages(history);
normalize_system_messages(history);
}

#[derive(Debug, Clone, Serialize, Deserialize)]
Expand Down Expand Up @@ -194,6 +238,10 @@ pub fn load_interactive_session_history(
} else if state.history.first().map(|msg| msg.role.as_str()) != Some("system") {
state.history.insert(0, ChatMessage::system(system_prompt));
}
normalize_system_messages(&mut state.history);
if state.history.first().map(|msg| msg.role.as_str()) != Some("system") {
state.history.insert(0, ChatMessage::system(system_prompt));
}

// Self-heal persisted sessions that were written with orphaned
// tool_result messages (e.g. a crash mid-compaction, or a trim that
Expand Down
Loading
Loading