1
1
import asyncio
2
2
import json
3
3
import os
4
+ import sys
4
5
import cognee
5
6
from cognee .shared .logging_utils import get_logger , get_log_file_location
6
7
import importlib .util
7
- from contextlib import redirect_stderr , redirect_stdout
8
+ from contextlib import redirect_stdout
8
9
9
10
# from PIL import Image as PILImage
10
11
import mcp .types as types
@@ -90,102 +91,112 @@ async def list_tools() -> list[types.Tool]:
90
91
@mcp .call_tool ()
91
92
async def call_tools (name : str , arguments : dict ) -> list [types .TextContent ]:
92
93
try :
93
- with open ( os . devnull , "w" ) as fnull :
94
- with redirect_stdout ( fnull ), redirect_stderr ( fnull ):
95
- log_file = get_log_file_location ()
96
-
97
- if name == "cognify" :
98
- asyncio . create_task (
99
- cognify (
100
- text = arguments [ "text" ],
101
- graph_model_file = arguments . get ( "graph_model_file" ) ,
102
- graph_model_name = arguments .get ("graph_model_name " ),
103
- )
94
+ # NOTE: MCP uses stdout to communicate, we must redirect all output
95
+ # going to stdout ( like the print function ) to stderr.
96
+ with redirect_stdout ( sys . stderr ):
97
+ log_file = get_log_file_location ()
98
+
99
+ if name == "cognify" :
100
+ asyncio . create_task (
101
+ cognify (
102
+ text = arguments [ "text" ] ,
103
+ graph_model_file = arguments .get ("graph_model_file " ),
104
+ graph_model_name = arguments . get ( "graph_model_name" ),
104
105
)
105
-
106
- text = (
107
- f"Background process launched due to MCP timeout limitations.\n "
108
- f"Average completion time is around 4 minutes.\n "
109
- f"For current cognify status you can check the log file at: { log_file } "
106
+ )
107
+
108
+ text = (
109
+ f"Background process launched due to MCP timeout limitations.\n "
110
+ f"Average completion time is around 4 minutes.\n "
111
+ f"For current cognify status you can check the log file at: { log_file } "
112
+ )
113
+
114
+ return [
115
+ types .TextContent (
116
+ type = "text" ,
117
+ text = text ,
110
118
)
111
-
112
- return [
113
- types . TextContent (
114
- type = "text" ,
115
- text = text ,
116
- )
117
- ]
118
- if name == " codify" :
119
- asyncio . create_task ( codify ( arguments . get ( "repo_path" )) )
120
-
121
- text = (
122
- f"Background process launched due to MCP timeout limitations. \n "
123
- f"Average completion time is around 4 minutes. \n "
124
- f"For current codify status you can check the log file at: { log_file } "
119
+ ]
120
+ if name == "codify" :
121
+ asyncio . create_task ( codify ( arguments . get ( "repo_path" )))
122
+
123
+ text = (
124
+ f"Background process launched due to MCP timeout limitations. \n "
125
+ f"Average completion time is around 4 minutes. \n "
126
+ f"For current codify status you can check the log file at: { log_file } "
127
+ )
128
+
129
+ return [
130
+ types . TextContent (
131
+ type = "text" ,
132
+ text = text ,
125
133
)
134
+ ]
135
+ elif name == "search" :
136
+ search_results = await search (arguments ["search_query" ], arguments ["search_type" ])
126
137
127
- return [
128
- types .TextContent (
129
- type = "text" ,
130
- text = text ,
131
- )
132
- ]
133
- elif name == "search" :
134
- search_results = await search (
135
- arguments ["search_query" ], arguments ["search_type" ]
136
- )
138
+ return [types .TextContent (type = "text" , text = search_results )]
139
+ elif name == "prune" :
140
+ await prune ()
137
141
138
- return [types .TextContent (type = "text" , text = search_results )]
139
- elif name == "prune" :
140
- await prune ()
141
-
142
- return [types .TextContent (type = "text" , text = "Pruned" )]
142
+ return [types .TextContent (type = "text" , text = "Pruned" )]
143
143
except Exception as e :
144
144
logger .error (f"Error calling tool '{ name } ': { str (e )} " )
145
145
return [types .TextContent (type = "text" , text = f"Error calling tool '{ name } ': { str (e )} " )]
146
146
147
147
148
148
async def cognify (text : str , graph_model_file : str = None , graph_model_name : str = None ) -> str :
149
149
"""Build knowledge graph from the input text"""
150
- logger .info ("Cognify process starting." )
151
- if graph_model_file and graph_model_name :
152
- graph_model = load_class (graph_model_file , graph_model_name )
153
- else :
154
- graph_model = KnowledgeGraph
155
-
156
- await cognee .add (text )
157
-
158
- try :
159
- await cognee .cognify (graph_model = graph_model )
160
- logger .info ("Cognify process finished." )
161
- except Exception as e :
162
- logger .error ("Cognify process failed." )
163
- raise ValueError (f"Failed to cognify: { str (e )} " )
150
+ # NOTE: MCP uses stdout to communicate, we must redirect all output
151
+ # going to stdout ( like the print function ) to stderr.
152
+ # As cognify is an async background job the output had to be redirected again.
153
+ with redirect_stdout (sys .stderr ):
154
+ logger .info ("Cognify process starting." )
155
+ if graph_model_file and graph_model_name :
156
+ graph_model = load_class (graph_model_file , graph_model_name )
157
+ else :
158
+ graph_model = KnowledgeGraph
159
+
160
+ await cognee .add (text )
161
+
162
+ try :
163
+ await cognee .cognify (graph_model = graph_model )
164
+ logger .info ("Cognify process finished." )
165
+ except Exception as e :
166
+ logger .error ("Cognify process failed." )
167
+ raise ValueError (f"Failed to cognify: { str (e )} " )
164
168
165
169
166
170
async def codify (repo_path : str ):
167
- logger .info ("Codify process starting." )
168
- results = []
169
- async for result in run_code_graph_pipeline (repo_path , False ):
170
- results .append (result )
171
- logger .info (result )
172
- if all (results ):
173
- logger .info ("Codify process finished succesfully." )
174
- else :
175
- logger .info ("Codify process failed." )
171
+ # NOTE: MCP uses stdout to communicate, we must redirect all output
172
+ # going to stdout ( like the print function ) to stderr.
173
+ # As codify is an async background job the output had to be redirected again.
174
+ with redirect_stdout (sys .stderr ):
175
+ logger .info ("Codify process starting." )
176
+ results = []
177
+ async for result in run_code_graph_pipeline (repo_path , False ):
178
+ results .append (result )
179
+ logger .info (result )
180
+ if all (results ):
181
+ logger .info ("Codify process finished succesfully." )
182
+ else :
183
+ logger .info ("Codify process failed." )
176
184
177
185
178
186
async def search (search_query : str , search_type : str ) -> str :
179
187
"""Search the knowledge graph"""
180
- search_results = await cognee .search (
181
- query_type = SearchType [search_type .upper ()], query_text = search_query
182
- )
188
+ # NOTE: MCP uses stdout to communicate, we must redirect all output
189
+ # going to stdout ( like the print function ) to stderr.
190
+ with redirect_stdout (sys .stderr ):
191
+ search_results = await cognee .search (
192
+ query_type = SearchType [search_type .upper ()], query_text = search_query
193
+ )
183
194
184
- if search_type .upper () == "CODE" :
185
- return json .dumps (search_results , cls = JSONEncoder )
186
- else :
187
- results = retrieved_edges_to_string (search_results )
188
- return results
195
+ if search_type .upper () == "CODE" :
196
+ return json .dumps (search_results , cls = JSONEncoder )
197
+ else :
198
+ results = retrieved_edges_to_string (search_results )
199
+ return results
189
200
190
201
191
202
async def prune ():
@@ -198,7 +209,7 @@ async def main():
198
209
try :
199
210
from mcp .server .stdio import stdio_server
200
211
201
- logger .info ("Starting Cognee MCP server..." )
212
+ logger .info ("Cognee MCP server started ..." )
202
213
203
214
async with stdio_server () as (read_stream , write_stream ):
204
215
await mcp .run (
@@ -215,7 +226,8 @@ async def main():
215
226
raise_exceptions = True ,
216
227
)
217
228
218
- logger .info ("Cognee MCP server started." )
229
+ logger .info ("Cognee MCP server closed." )
230
+
219
231
except Exception as e :
220
232
logger .error (f"Server failed to start: { str (e )} " , exc_info = True )
221
233
raise
0 commit comments