Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion cognee/api/v1/cognify/cognify.py
Original file line number Diff line number Diff line change
Expand Up @@ -245,7 +245,7 @@ class ScientificPaper(DataPoint):
)


async def get_default_tasks( # TODO: Find out a better way to do this (Boris's comment)
async def get_default_tasks( # TODO(#2349): Find out a better way to do this
user: User = None,
graph_model: BaseModel = KnowledgeGraph,
chunker=TextChunker,
Expand Down
4 changes: 2 additions & 2 deletions cognee/context_global_variables.py
Original file line number Diff line number Diff line change
Expand Up @@ -138,8 +138,8 @@ async def set_database_global_context_variables(dataset: Union[str, UUID], user_
)

# Set vector and graph database configuration based on dataset database information
# TODO: Add better handling of vector and graph config across Cognee.
# LRU_CACHE takes into account order of inputs, if order of inputs is changed it will be registered as a new DB adapter
# TODO(#2357): Add better handling of vector and graph config across Cognee.
# LRU_CACHE takes into account order of inputs, if order of inputs is changed it will be registered as a new DB adapter
vector_config = {
"vector_db_provider": dataset_database.vector_database_provider,
"vector_db_url": dataset_database.vector_database_url,
Expand Down
6 changes: 3 additions & 3 deletions cognee/infrastructure/databases/cache/cache_db_interface.py
Original file line number Diff line number Diff line change
Expand Up @@ -53,7 +53,7 @@ async def add_qa(
context: str,
answer: str,
):
"""Backward-compatibility: delegates to create_qa_entry with generated qa_id. :TODO: delete when retrievers are updated"""
"""Backward-compatibility: delegates to create_qa_entry with generated qa_id."""
return await self.create_qa_entry(
user_id,
session_id,
Expand Down Expand Up @@ -82,7 +82,7 @@ async def create_qa_entry(
pass

async def get_latest_qa(self, user_id: str, session_id: str, last_n: int = 5):
"""Backward-compat: delegates to get_latest_qa_entries. :TODO: delete when retrievers are updated"""
"""Backward-compat: delegates to get_latest_qa_entries."""
return await self.get_latest_qa_entries(user_id, session_id, last_n)

@abstractmethod
Expand All @@ -93,7 +93,7 @@ async def get_latest_qa_entries(self, user_id: str, session_id: str, last_n: int
pass

async def get_all_qas(self, user_id: str, session_id: str):
"""Backward-compat: delegates to get_all_qa_entries. :TODO: delete when retrievers are updated"""
"""Backward-compat: delegates to get_all_qa_entries."""
return await self.get_all_qa_entries(user_id, session_id)

@abstractmethod
Expand Down
1 change: 0 additions & 1 deletion cognee/infrastructure/databases/exceptions/exceptions.py
Original file line number Diff line number Diff line change
Expand Up @@ -46,7 +46,6 @@ def __init__(
self.message = message
self.name = name
self.status_code = status_code
# super().__init__(message, name, status_code) :TODO: This is not an error anymore with the dynamic exception handling therefore we shouldn't log error


class EntityAlreadyExistsError(CogneeValidationError):
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -75,15 +75,15 @@ async def create_dataset(cls, dataset_id: Optional[UUID], user: Optional[User])
"Content-Type": "application/json",
}

# TODO: Maybe we can allow **kwargs parameter forwarding for cases like these
# TODO(#2354): Maybe we can allow **kwargs parameter forwarding for cases like these
# Too allow different configurations between datasets
payload = {
"version": "5",
"region": "europe-west1",
"memory": "1GB",
"name": graph_db_name[
0:29
], # TODO: Find better name to name Neo4j instance within 30 character limit
], # TODO(#2354): Find better name to name Neo4j instance within 30 character limit
"type": "professional-db",
"tenant_id": tenant_id,
"cloud_provider": "gcp",
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -236,7 +236,7 @@ async def search(
query_vector: Optional[List[float]] = None,
limit: Optional[int] = None,
with_vector: bool = False,
include_payload: bool = False, # TODO: Add support for this parameter
include_payload: bool = False, # TODO(#2352): Add support for this parameter
):
"""
Perform a search in the specified collection using either a text query or a vector
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -361,8 +361,8 @@ async def search(
limit: Optional[int] = 15,
with_vector: bool = False,
normalized: bool = True,
include_payload: bool = False, # TODO: Add support for this parameter when set to False
node_name: Optional[List[str]] = None, # TODO: Add support/functionality for this parameter
include_payload: bool = False, # TODO(#2353): Add support for this parameter when set to False
node_name: Optional[List[str]] = None, # TODO(#2353): Add support/functionality for this parameter
):
"""
Search for items in a collection using either a text or a vector query.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -87,8 +87,8 @@ def __init__(
fallback_endpoint=fallback_endpoint,
)
self.instructor_mode = instructor_mode if instructor_mode else self.default_instructor_mode
# TODO: With gpt5 series models OpenAI expects JSON_SCHEMA as a mode for structured outputs.
# Make sure all new gpt models will work with this mode as well.
# TODO(#2355): With gpt5 series models OpenAI expects JSON_SCHEMA as a mode for structured outputs.
# Make sure all new gpt models will work with this mode as well.
if "gpt-5" in model:
self.aclient = instructor.from_litellm(
litellm.acompletion, mode=instructor.Mode(self.instructor_mode)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -285,7 +285,7 @@ async def test_rag_completion_context_multiple_chunks(setup_test_environment_wit
@pytest.mark.asyncio
async def test_rag_completion_context_complex(setup_test_environment_with_chunks_complex):
"""Integration test: verify CompletionRetriever can retrieve context (complex)."""
# TODO: top_k doesn't affect the output, it should be fixed.
# TODO(#2351): top_k doesn't affect the output, it should be fixed.
retriever = CompletionRetriever(top_k=20)
query = "Christina"

Expand Down
2 changes: 1 addition & 1 deletion cognee/tests/test_cognee_server_start.py
Original file line number Diff line number Diff line change
Expand Up @@ -188,7 +188,7 @@ def test_server_is_running(self):
len(ontology_nodes), 0, "No ontology nodes found - ontology was not integrated"
)

# TODO: Add test to verify cognify pipeline is complete before testing search
# TODO(#2350): Add test to verify cognify pipeline is complete before testing search

# Search request
url = "http://127.0.0.1:8000/api/v1/search"
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -83,7 +83,7 @@ async def test_extract_times(regex_extractor):
@pytest.mark.asyncio
async def test_extract_money(regex_extractor):
"""Test extraction of monetary amounts."""
# TODO: Lazar to fix regex for test, it's failing currently
# TODO(#2356): Lazar to fix regex for test, it's failing currently
pass
# text = "The product costs $1,299.99 or €1.045,00 depending on your region."
# entities = await regex_extractor.extract_entities(text)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -87,7 +87,6 @@ async def test_prune(adapter):
assert await adapter.get_all_qa_entries("u1", "s1") == []


# Backward-compatibility tests (add_qa, get_latest_qa, get_all_qas):TODO: Can be deleted after session manager integration into retrievers


@pytest.mark.asyncio
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -119,7 +119,6 @@ async def test_prune(adapter):
assert await adapter.get_all_qa_entries("u1", "s1") == []


# Backward-compatibility tests (add_qa, get_latest_qa, get_all_qas) :TODO: Can be deleted after session manager integration into retrievers
@pytest.mark.asyncio
async def test_add_qa_backward_compat(adapter):
"""Legacy add_qa stores entry with auto-generated qa_id."""
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -247,7 +247,7 @@ class ScientificPaper(DataPoint):
)


async def get_default_tasks( # TODO: Find out a better way to do this (Boris's comment)
async def get_default_tasks( # TODO(#2349): Find out a better way to do this
user: User = None,
graph_model: BaseModel = KnowledgeGraph,
chunker=TextChunker,
Expand Down