Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
# Changelog - [0.6.0] - yyyy-MM-dd

### New Features
1. Add new maxflow path algorithm tool.

### Bug Fixes

Expand Down
114 changes: 114 additions & 0 deletions mcp_server/src/mcp_server_neo4j_gds/path_algorithm_handlers.py
Original file line number Diff line number Diff line change
Expand Up @@ -1165,3 +1165,117 @@ def execute(self, arguments: Dict[str, Any]) -> Any:
nodeLabels=arguments.get("nodeLabels"),
relTypes=arguments.get("relTypes"),
)


class MaxFlowHandler(AlgorithmHandler):
def max_flow(
self,
source_nodes: list,
target_nodes: list,
node_identifier_property: str,
**kwargs,
):
source_node_ids = []
source_node_names = []
unmatched_sources = []

for source_name in source_nodes:
source_query = f"""
MATCH (source)
WHERE toLower(source.{node_identifier_property}) CONTAINS toLower($source_name)
RETURN id(source) as source_id, source.{node_identifier_property} as source_name
"""

source_df = self.gds.run_cypher(
source_query, params={"source_name": source_name}
)

if not source_df.empty:
source_node_ids.append(int(source_df["source_id"].iloc[0]))
source_node_names.append(source_df["source_name"].iloc[0])
else:
unmatched_sources.append(source_name)

# Check if all source nodes were found
if unmatched_sources:
return {
"found": False,
"message": f"The following source nodes were not found: {', '.join(unmatched_sources)}",
}

if not source_node_ids:
return {"found": False, "message": "No source nodes found"}

# Find target node IDs
target_node_ids = []
target_node_names = []
unmatched_targets = []

for target_name in target_nodes:
target_query = f"""
MATCH (target)
WHERE toLower(target.{node_identifier_property}) CONTAINS toLower($target_name)
RETURN id(target) as target_id, target.{node_identifier_property} as target_name
"""

target_df = self.gds.run_cypher(
target_query, params={"target_name": target_name}
)

if not target_df.empty:
target_node_ids.append(int(target_df["target_id"].iloc[0]))
target_node_names.append(target_df["target_name"].iloc[0])
else:
unmatched_targets.append(target_name)

# Check if all target nodes were found
if unmatched_targets:
return {
"found": False,
"message": f"The following target nodes were not found: {', '.join(unmatched_targets)}",
}

if not target_node_ids:
return {"found": False, "message": "No target nodes found"}

with projected_graph_from_params(self.gds, **kwargs) as G:
params = clean_params(kwargs, ["nodeLabels", "relTypes"])
logger.info(f"Max Flow parameters: {params}")

max_flow_data = self.gds.maxFlow.stream(
G, sourceNodes=source_node_ids, targetNodes=target_node_ids, **params
)

# Get node names using GDS utility function (batch operation)
max_flow_data["sourceNodeName"] = self.gds.util.asNodes(
max_flow_data["source"].tolist()
)
max_flow_data["targetNodeName"] = self.gds.util.asNodes(
max_flow_data["target"].tolist()
)

# Convert to list of dictionaries
flows = max_flow_data[
[
"source",
"target",
"sourceNodeName",
"targetNodeName",
"flow",
]
].to_dict("records")

return {
"found": True,
"flows": flows,
}

def execute(self, arguments: Dict[str, Any]) -> Any:
return self.max_flow(
arguments.get("sourceNodes"),
arguments.get("targetNodes"),
arguments.get("nodeIdentifierProperty"),
capacityProperty=arguments.get("capacityProperty"),
nodeLabels=arguments.get("nodeLabels"),
relTypes=arguments.get("relTypes"),
)
44 changes: 44 additions & 0 deletions mcp_server/src/mcp_server_neo4j_gds/path_algorithm_specs.py
Original file line number Diff line number Diff line change
Expand Up @@ -609,4 +609,48 @@
"required": [],
},
),
types.Tool(
name="max_flow",
description="Given source nodes, target nodes and relationships with capacity constraints, the max-flow algorithm assigns a flow to each relationship to achieve maximal transport from source to target. "
"The flow is a scalar property for each relationship and must satisfy 1) Flow into a node equals flow out of a node (preservation). 2) Flow is restricted by the capacity of a relationship",
inputSchema={
"type": "object",
"properties": {
"sourceNodes": {
"type": "array",
"items": {"type": "string"},
"description": "List of source node names from which flow originates.",
},
"targetNodes": {
"type": "array",
"items": {"type": "string"},
"description": "List of target node names to which flow is sent.",
},
"nodeIdentifierProperty": {
"type": "string",
"description": "Property name to use for identifying nodes (e.g., 'name', 'Name', 'title'). Use get_node_properties_keys to find available properties.",
},
"capacityProperty": {
"type": "string",
"description": "Name of the relationship property that specifies the maximum flow capacity for each edge.",
},
"nodeLabels": {
"type": "array",
"items": {"type": "string"},
"description": "The node labels used to project and run max flow on. Nodes with different node labels will be ignored. Do not specify to run for all nodes",
},
"relTypes": {
"type": "array",
"items": {"type": "string"},
"description": "The relationships types used to project and run max flow on. Relationship types of different type will be ignored. Do not specify to run for all relationship types",
},
},
"required": [
"sourceNodes",
"targetNodes",
"nodeIdentifierProperty",
"capacityProperty",
],
},
),
]
2 changes: 2 additions & 0 deletions mcp_server/src/mcp_server_neo4j_gds/registry.py
Original file line number Diff line number Diff line change
Expand Up @@ -51,6 +51,7 @@
DepthFirstSearchHandler,
BellmanFordSingleSourceShortestPathHandler,
LongestPathHandler,
MaxFlowHandler,
)


Expand Down Expand Up @@ -103,6 +104,7 @@ class AlgorithmRegistry:
"depth_first_search": DepthFirstSearchHandler,
"bellman_ford_single_source_shortest_path": BellmanFordSingleSourceShortestPathHandler,
"longest_path": LongestPathHandler,
"max_flow": MaxFlowHandler,
}

@classmethod
Expand Down
2 changes: 1 addition & 1 deletion mcp_server/tests/docker-compose.yml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
services:
neo4j:
image: neo4j:2025.05.0
image: neo4j:2025.11.2
ports:
- "7474"
- "7687"
Expand Down
1 change: 1 addition & 0 deletions mcp_server/tests/test_basic_tools.py
Original file line number Diff line number Diff line change
Expand Up @@ -72,6 +72,7 @@ async def test_list_tools(mcp_client):
"depth_first_search",
"bellman_ford_single_source_shortest_path",
"longest_path",
"max_flow",
# similarity
"node_similarity",
"k_nearest_neighbors",
Expand Down
24 changes: 24 additions & 0 deletions mcp_server/tests/test_path_algorithms.py
Original file line number Diff line number Diff line change
Expand Up @@ -625,3 +625,27 @@ async def test_longest_path(mcp_client):
filtered_paths = result_filtered_data["paths"]
assert len(filtered_paths) == 1
assert result_filtered_data["paths"][0]["costs"] == [0.0, 3.0, 6.0, 10.0, 13.0]


@pytest.mark.asyncio
async def test_max_flow(mcp_client):
result = await mcp_client.call_tool(
"max_flow",
{
"sourceNodes": ["Baker Street"],
"targetNodes": [
"Bond Street",
"Euston Square",
"Paddington",
"Wembley Park",
],
"nodeIdentifierProperty": "name",
"capacityProperty": "time",
},
)

assert len(result) == 1
result_text = result[0]["text"]
result_data = json.loads(result_text)

assert len(result_data.get("flows")) == 7