2626
2727
2828class StorageEngine :
29- def get_client (self , data_path ):
29+ __storage_engine = None
30+
31+ def __new__ (cls , * args , ** kwargs ):
32+ if cls .__storage_engine is None :
33+ cls .__storage_engine = super ().__new__ (cls )
34+ return cls .__storage_engine
35+
36+ # This function is needed only for the unit testing for the
37+ # mocks to work.
38+ @classmethod
39+ def recreate_instance (cls , * args , ** kwargs ):
40+ cls .__storage_engine = None
41+ return cls (* args , ** kwargs )
42+
43+ def __init__ (self , data_path = "./weaviate_data" ):
44+ if hasattr (self , "initialized" ):
45+ return
46+
47+ self .initialized = True
48+ self .data_path = data_path
49+ self .inference_engine = LlamaCppInferenceEngine ()
50+ self .model_path = (
51+ f"{ Config .get_config ().model_base_path } /{ Config .get_config ().embedding_model } "
52+ )
53+ self .schema_config = schema_config
54+
55+ # setup schema for weaviate
56+ self .weaviate_client = self .get_client (self .data_path )
57+ if self .weaviate_client is not None :
58+ try :
59+ self .weaviate_client .connect ()
60+ self .setup_schema (self .weaviate_client )
61+ except Exception as e :
62+ logger .error (f"Failed to connect or setup schema: { str (e )} " )
63+
64+ def __del__ (self ):
3065 try :
31- # Get current config
32- config = Config .get_config ()
66+ self .weaviate_client .close ()
67+ except Exception as e :
68+ logger .error (f"Failed to close client: { str (e )} " )
3369
70+ def get_client (self , data_path ):
71+ try :
3472 # Configure Weaviate logging
3573 additional_env_vars = {
3674 # Basic logging configuration
37- "LOG_FORMAT" : config .log_format .value .lower (),
38- "LOG_LEVEL" : config .log_level .value .lower (),
75+ "LOG_FORMAT" : Config . get_config () .log_format .value .lower (),
76+ "LOG_LEVEL" : Config . get_config () .log_level .value .lower (),
3977 # Disable colored output
4078 "LOG_FORCE_COLOR" : "false" ,
4179 # Configure JSON format
4280 "LOG_JSON_FIELDS" : "timestamp, level,message" ,
4381 # Configure text format
44- "LOG_METHOD" : config .log_format .value .lower (),
82+ "LOG_METHOD" : Config . get_config () .log_format .value .lower (),
4583 "LOG_LEVEL_IN_UPPER" : "false" , # Keep level lowercase like codegate format
4684 # Disable additional fields
4785 "LOG_GIT_HASH" : "false" ,
@@ -60,28 +98,6 @@ def get_client(self, data_path):
6098 logger .error (f"Error during client creation: { str (e )} " )
6199 return None
62100
63- def __init__ (self , data_path = "./weaviate_data" ):
64- self .data_path = data_path
65- self .inference_engine = LlamaCppInferenceEngine ()
66- self .model_path = "./models/all-minilm-L6-v2-q5_k_m.gguf"
67- self .schema_config = schema_config
68-
69- # setup schema for weaviate
70- weaviate_client = self .get_client (self .data_path )
71- if weaviate_client is not None :
72- try :
73- weaviate_client .connect ()
74- self .setup_schema (weaviate_client )
75- except Exception as e :
76- logger .error (f"Failed to connect or setup schema: { str (e )} " )
77- finally :
78- try :
79- weaviate_client .close ()
80- except Exception as e :
81- logger .error (f"Failed to close client: { str (e )} " )
82- else :
83- logger .error ("Could not find client, skipping schema setup." )
84-
85101 def setup_schema (self , client ):
86102 for class_config in self .schema_config :
87103 if not client .collections .exists (class_config ["name" ]):
@@ -135,14 +151,8 @@ async def search(self, query: str, limit=5, distance=0.3, packages=None) -> list
135151 query_vector = await self .inference_engine .embed (self .model_path , [query ])
136152
137153 # Perform the vector search
138- weaviate_client = self .get_client (self .data_path )
139- if weaviate_client is None :
140- logger .error ("Could not find client, not returning results." )
141- return []
142-
143154 try :
144- weaviate_client .connect ()
145- collection = weaviate_client .collections .get ("Package" )
155+ collection = self .weaviate_client .collections .get ("Package" )
146156 if packages :
147157 response = collection .query .near_vector (
148158 query_vector [0 ],
@@ -159,16 +169,10 @@ async def search(self, query: str, limit=5, distance=0.3, packages=None) -> list
159169 return_metadata = MetadataQuery (distance = True ),
160170 )
161171
162- weaviate_client .close ()
163172 if not response :
164173 return []
165174 return response .objects
166175
167176 except Exception as e :
168177 logger .error (f"Error during search: { str (e )} " )
169178 return []
170- finally :
171- try :
172- weaviate_client .close ()
173- except Exception as e :
174- logger .error (f"Failed to close client: { str (e )} " )
0 commit comments