Spaces:
Running
Running
Merge pull request #209 from biggraph/darabos-mcp-fixes
Browse files- examples/uploads/lynx_chatbot_scenario_selector.yaml +1 -4
- lynxkite-app/web/src/index.css +1 -0
- lynxkite-bio/pyproject.toml +1 -0
- lynxkite-bio/src/lynxkite_bio/k8s.py +9 -8
- lynxkite-bio/src/lynxkite_bio/llm.py +1 -1
- lynxkite-bio/src/lynxkite_bio/nims.py +8 -5
- lynxkite-core/src/lynxkite/core/ops.py +2 -1
- lynxkite-core/src/lynxkite/core/workspace.py +2 -2
- lynxkite-graph-analytics/src/lynxkite_graph_analytics/core.py +2 -2
- lynxkite-graph-analytics/src/lynxkite_graph_analytics/networkx_ops.py +1 -1
- lynxkite-lynxscribe/src/lynxkite_lynxscribe/lynxscribe_ops.py +23 -7
examples/uploads/lynx_chatbot_scenario_selector.yaml
CHANGED
@@ -296,7 +296,4 @@
|
|
296 |
link_answer: *link
|
297 |
min_similarity_score: -1
|
298 |
- name: malicious
|
299 |
-
mode:
|
300 |
-
fixed_answer: >
|
301 |
-
I am sorry, but I feel you want me use in a wrong way. If I feel it wrong, please try to
|
302 |
-
rephrase your question.
|
|
|
296 |
link_answer: *link
|
297 |
min_similarity_score: -1
|
298 |
- name: malicious
|
299 |
+
mode: llm_only
|
|
|
|
|
|
lynxkite-app/web/src/index.css
CHANGED
@@ -325,6 +325,7 @@ body {
|
|
325 |
.table-viewer {
|
326 |
td {
|
327 |
padding: 5px 10px;
|
|
|
328 |
}
|
329 |
|
330 |
.image-in-table {
|
|
|
325 |
.table-viewer {
|
326 |
td {
|
327 |
padding: 5px 10px;
|
328 |
+
vertical-align: top;
|
329 |
}
|
330 |
|
331 |
.image-in-table {
|
lynxkite-bio/pyproject.toml
CHANGED
@@ -7,6 +7,7 @@ requires-python = ">=3.11"
|
|
7 |
dependencies = [
|
8 |
"fsspec>=2025.2.0",
|
9 |
"joblib>=1.4.2",
|
|
|
10 |
"lynxkite-core",
|
11 |
"lynxkite-graph-analytics",
|
12 |
"pandas>=2.2.3",
|
|
|
7 |
dependencies = [
|
8 |
"fsspec>=2025.2.0",
|
9 |
"joblib>=1.4.2",
|
10 |
+
"kubernetes>=32.0.1",
|
11 |
"lynxkite-core",
|
12 |
"lynxkite-graph-analytics",
|
13 |
"pandas>=2.2.3",
|
lynxkite-bio/src/lynxkite_bio/k8s.py
CHANGED
@@ -19,7 +19,7 @@ Use `k8s.needs()` to declare a Kubernetes dependency for an operation. For examp
|
|
19 |
port=8000,
|
20 |
args=["--model", "google/gemma-3-1b-it"],
|
21 |
health_probe="/health",
|
22 |
-
|
23 |
storage_path="/root/.cache/huggingface",
|
24 |
storage_size="10Gi",
|
25 |
)
|
@@ -39,8 +39,6 @@ import httpx
|
|
39 |
from kubernetes import client, config
|
40 |
from kubernetes.client.rest import ApiException
|
41 |
|
42 |
-
config.load_kube_config()
|
43 |
-
|
44 |
|
45 |
def _run(
|
46 |
*,
|
@@ -51,6 +49,7 @@ def _run(
|
|
51 |
storage_size,
|
52 |
storage_path,
|
53 |
health_probe,
|
|
|
54 |
**kwargs,
|
55 |
):
|
56 |
print(f"Starting {name} in namespace {namespace}...")
|
@@ -74,6 +73,10 @@ def _run(
|
|
74 |
),
|
75 |
)
|
76 |
)
|
|
|
|
|
|
|
|
|
77 |
container = client.V1Container(
|
78 |
name=name,
|
79 |
image=image,
|
@@ -194,17 +197,13 @@ def _pvc_exists(name: str, namespace: str = "default") -> bool:
|
|
194 |
raise
|
195 |
|
196 |
|
197 |
-
def env_vars(*names: str):
|
198 |
-
"""A convenient way to pass local environment variables to the microservice."""
|
199 |
-
return [{"name": name, "value": os.environ[name]} for name in names]
|
200 |
-
|
201 |
-
|
202 |
def needs(
|
203 |
name: str,
|
204 |
image: str,
|
205 |
port: int,
|
206 |
args: list = None,
|
207 |
env: list = None,
|
|
|
208 |
health_probe: str = None,
|
209 |
storage_size: str = None,
|
210 |
storage_path: str = "/data",
|
@@ -217,12 +216,14 @@ def needs(
|
|
217 |
def decorator(func):
|
218 |
@functools.wraps(func)
|
219 |
def wrapper(*func_args, **func_kwargs):
|
|
|
220 |
_using(
|
221 |
name=name,
|
222 |
image=image,
|
223 |
port=port,
|
224 |
args=args or [],
|
225 |
env=env or [],
|
|
|
226 |
health_probe=health_probe,
|
227 |
storage_size=storage_size,
|
228 |
storage_path=storage_path,
|
|
|
19 |
port=8000,
|
20 |
args=["--model", "google/gemma-3-1b-it"],
|
21 |
health_probe="/health",
|
22 |
+
forward_env=["HUGGING_FACE_HUB_TOKEN"],
|
23 |
storage_path="/root/.cache/huggingface",
|
24 |
storage_size="10Gi",
|
25 |
)
|
|
|
39 |
from kubernetes import client, config
|
40 |
from kubernetes.client.rest import ApiException
|
41 |
|
|
|
|
|
42 |
|
43 |
def _run(
|
44 |
*,
|
|
|
49 |
storage_size,
|
50 |
storage_path,
|
51 |
health_probe,
|
52 |
+
forward_env,
|
53 |
**kwargs,
|
54 |
):
|
55 |
print(f"Starting {name} in namespace {namespace}...")
|
|
|
73 |
),
|
74 |
)
|
75 |
)
|
76 |
+
# Forward local environment variables to the container.
|
77 |
+
kwargs.setdefault("env", []).extend(
|
78 |
+
[{"name": name, "value": os.environ[name]} for name in forward_env]
|
79 |
+
)
|
80 |
container = client.V1Container(
|
81 |
name=name,
|
82 |
image=image,
|
|
|
197 |
raise
|
198 |
|
199 |
|
|
|
|
|
|
|
|
|
|
|
200 |
def needs(
|
201 |
name: str,
|
202 |
image: str,
|
203 |
port: int,
|
204 |
args: list = None,
|
205 |
env: list = None,
|
206 |
+
forward_env: list = None,
|
207 |
health_probe: str = None,
|
208 |
storage_size: str = None,
|
209 |
storage_path: str = "/data",
|
|
|
216 |
def decorator(func):
|
217 |
@functools.wraps(func)
|
218 |
def wrapper(*func_args, **func_kwargs):
|
219 |
+
config.load_kube_config()
|
220 |
_using(
|
221 |
name=name,
|
222 |
image=image,
|
223 |
port=port,
|
224 |
args=args or [],
|
225 |
env=env or [],
|
226 |
+
forward_env=forward_env or [],
|
227 |
health_probe=health_probe,
|
228 |
storage_size=storage_size,
|
229 |
storage_path=storage_path,
|
lynxkite-bio/src/lynxkite_bio/llm.py
CHANGED
@@ -22,7 +22,7 @@ op = ops.op_registration(ENV)
|
|
22 |
port=8000,
|
23 |
args=["--model", "google/gemma-3-1b-it"],
|
24 |
health_probe="/health",
|
25 |
-
|
26 |
storage_path="/root/.cache/huggingface",
|
27 |
storage_size="10Gi",
|
28 |
)
|
|
|
22 |
port=8000,
|
23 |
args=["--model", "google/gemma-3-1b-it"],
|
24 |
health_probe="/health",
|
25 |
+
forward_env=["HUGGING_FACE_HUB_TOKEN"],
|
26 |
storage_path="/root/.cache/huggingface",
|
27 |
storage_size="10Gi",
|
28 |
)
|
lynxkite-bio/src/lynxkite_bio/nims.py
CHANGED
@@ -3,7 +3,9 @@
|
|
3 |
from lynxkite_graph_analytics import Bundle
|
4 |
from lynxkite.core import ops
|
5 |
import httpx
|
|
|
6 |
import pandas as pd
|
|
|
7 |
import os
|
8 |
|
9 |
from . import k8s
|
@@ -122,14 +124,15 @@ def view_molecule(
|
|
122 |
row_index: int = 0,
|
123 |
):
|
124 |
molecule_data = bundle.dfs[molecule_table][molecule_column].iloc[row_index]
|
|
|
|
|
|
|
|
|
|
|
125 |
|
126 |
return {
|
127 |
"data": molecule_data,
|
128 |
-
"format": "pdb"
|
129 |
-
if molecule_data.startswith("ATOM")
|
130 |
-
else "sdf"
|
131 |
-
if molecule_data.startswith("CTfile")
|
132 |
-
else "smiles",
|
133 |
}
|
134 |
|
135 |
|
|
|
3 |
from lynxkite_graph_analytics import Bundle
|
4 |
from lynxkite.core import ops
|
5 |
import httpx
|
6 |
+
import io
|
7 |
import pandas as pd
|
8 |
+
import rdkit
|
9 |
import os
|
10 |
|
11 |
from . import k8s
|
|
|
124 |
row_index: int = 0,
|
125 |
):
|
126 |
molecule_data = bundle.dfs[molecule_table][molecule_column].iloc[row_index]
|
127 |
+
if isinstance(molecule_data, rdkit.Chem.Mol):
|
128 |
+
sio = io.StringIO()
|
129 |
+
with rdkit.Chem.SDWriter(sio) as w:
|
130 |
+
w.write(molecule_data)
|
131 |
+
molecule_data = sio.getvalue()
|
132 |
|
133 |
return {
|
134 |
"data": molecule_data,
|
135 |
+
"format": "pdb" if molecule_data.startswith("ATOM") else "sdf",
|
|
|
|
|
|
|
|
|
136 |
}
|
137 |
|
138 |
|
lynxkite-core/src/lynxkite/core/ops.py
CHANGED
@@ -254,8 +254,8 @@ def op(
|
|
254 |
_view = "image"
|
255 |
func = matplotlib_to_image(func)
|
256 |
if slow:
|
257 |
-
func = mem.cache(func)
|
258 |
func = make_async(func)
|
|
|
259 |
# Positional arguments are inputs.
|
260 |
inputs = [
|
261 |
Input(name=name, type=param.annotation)
|
@@ -456,6 +456,7 @@ def parse_doc(func):
|
|
456 |
return doc
|
457 |
if doc is None:
|
458 |
return None
|
|
|
459 |
ds = griffe.Docstring(doc, parent=_get_griffe_function(func))
|
460 |
if "----" in doc:
|
461 |
ds = ds.parse("numpy")
|
|
|
254 |
_view = "image"
|
255 |
func = matplotlib_to_image(func)
|
256 |
if slow:
|
|
|
257 |
func = make_async(func)
|
258 |
+
func = mem.cache(func)
|
259 |
# Positional arguments are inputs.
|
260 |
inputs = [
|
261 |
Input(name=name, type=param.annotation)
|
|
|
456 |
return doc
|
457 |
if doc is None:
|
458 |
return None
|
459 |
+
griffe.logger.setLevel("ERROR")
|
460 |
ds = griffe.Docstring(doc, parent=_get_griffe_function(func))
|
461 |
if "----" in doc:
|
462 |
ds = ds.parse("numpy")
|
lynxkite-core/src/lynxkite/core/workspace.py
CHANGED
@@ -52,7 +52,7 @@ class WorkspaceNode(BaseConfig):
|
|
52 |
"""Notifies the frontend that work has started on this node."""
|
53 |
self.data.error = None
|
54 |
self.data.status = NodeStatus.active
|
55 |
-
if hasattr(self, "_crdt"):
|
56 |
with self._crdt.doc.transaction():
|
57 |
self._crdt["data"]["error"] = None
|
58 |
self._crdt["data"]["status"] = NodeStatus.active
|
@@ -63,7 +63,7 @@ class WorkspaceNode(BaseConfig):
|
|
63 |
self.data.input_metadata = result.input_metadata
|
64 |
self.data.error = result.error
|
65 |
self.data.status = NodeStatus.done
|
66 |
-
if hasattr(self, "_crdt"):
|
67 |
with self._crdt.doc.transaction():
|
68 |
try:
|
69 |
self._crdt["data"]["status"] = NodeStatus.done
|
|
|
52 |
"""Notifies the frontend that work has started on this node."""
|
53 |
self.data.error = None
|
54 |
self.data.status = NodeStatus.active
|
55 |
+
if hasattr(self, "_crdt") and "data" in self._crdt:
|
56 |
with self._crdt.doc.transaction():
|
57 |
self._crdt["data"]["error"] = None
|
58 |
self._crdt["data"]["status"] = NodeStatus.active
|
|
|
63 |
self.data.input_metadata = result.input_metadata
|
64 |
self.data.error = result.error
|
65 |
self.data.status = NodeStatus.done
|
66 |
+
if hasattr(self, "_crdt") and "data" in self._crdt:
|
67 |
with self._crdt.doc.transaction():
|
68 |
try:
|
69 |
self._crdt["data"]["status"] = NodeStatus.done
|
lynxkite-graph-analytics/src/lynxkite_graph_analytics/core.py
CHANGED
@@ -227,13 +227,13 @@ async def _execute_node(
|
|
227 |
if p.type == nx.Graph:
|
228 |
if isinstance(x, Bundle):
|
229 |
x = x.to_nx()
|
230 |
-
assert isinstance(x, nx.Graph), "Input must be a graph."
|
231 |
elif p.type == Bundle:
|
232 |
if isinstance(x, nx.Graph):
|
233 |
x = Bundle.from_nx(x)
|
234 |
elif isinstance(x, pd.DataFrame):
|
235 |
x = Bundle.from_df(x)
|
236 |
-
assert isinstance(x, Bundle), "Input must be a graph or dataframe."
|
237 |
inputs.append(x)
|
238 |
except Exception as e:
|
239 |
if not os.environ.get("LYNXKITE_SUPPRESS_OP_ERRORS"):
|
|
|
227 |
if p.type == nx.Graph:
|
228 |
if isinstance(x, Bundle):
|
229 |
x = x.to_nx()
|
230 |
+
assert isinstance(x, nx.Graph), f"Input must be a graph. Got: {x}"
|
231 |
elif p.type == Bundle:
|
232 |
if isinstance(x, nx.Graph):
|
233 |
x = Bundle.from_nx(x)
|
234 |
elif isinstance(x, pd.DataFrame):
|
235 |
x = Bundle.from_df(x)
|
236 |
+
assert isinstance(x, Bundle), f"Input must be a graph or dataframe. Got: {x}"
|
237 |
inputs.append(x)
|
238 |
except Exception as e:
|
239 |
if not os.environ.get("LYNXKITE_SUPPRESS_OP_ERRORS"):
|
lynxkite-graph-analytics/src/lynxkite_graph_analytics/networkx_ops.py
CHANGED
@@ -156,7 +156,7 @@ def wrapped(name: str, func):
|
|
156 |
for k, v in kwargs.items():
|
157 |
if v == "None":
|
158 |
kwargs[k] = None
|
159 |
-
res = await ops.
|
160 |
# Figure out what the returned value is.
|
161 |
if isinstance(res, nx.Graph):
|
162 |
return res
|
|
|
156 |
for k, v in kwargs.items():
|
157 |
if v == "None":
|
158 |
kwargs[k] = None
|
159 |
+
res = await ops.make_async(func)(*args, **kwargs)
|
160 |
# Figure out what the returned value is.
|
161 |
if isinstance(res, nx.Graph):
|
162 |
return res
|
lynxkite-lynxscribe/src/lynxkite_lynxscribe/lynxscribe_ops.py
CHANGED
@@ -864,11 +864,15 @@ async def get_chat_api(ws: str):
|
|
864 |
from lynxkite.core import workspace
|
865 |
|
866 |
cwd = pathlib.Path()
|
867 |
-
path = cwd / ws
|
868 |
assert path.is_relative_to(cwd), f"Path '{path}' is invalid"
|
869 |
assert path.exists(), f"Workspace {path} does not exist"
|
870 |
ws = workspace.Workspace.load(path)
|
871 |
-
|
|
|
|
|
|
|
|
|
872 |
nodes = [op for op in ws.nodes if op.data.title == "LynxScribe RAG Graph Chatbot Backend"]
|
873 |
[node] = nodes
|
874 |
context = contexts[node.id]
|
@@ -879,9 +883,18 @@ async def stream_chat_api_response(request):
|
|
879 |
chat_api = await get_chat_api(request["model"])
|
880 |
request = ChatCompletionPrompt(**request)
|
881 |
async for chunk in await chat_api.answer(request, stream=True):
|
|
|
882 |
yield chunk.model_dump_json()
|
883 |
|
884 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
885 |
async def api_service_post(request):
|
886 |
"""
|
887 |
Serves a chat endpoint that matches LynxScribe's interface.
|
@@ -899,9 +912,12 @@ async def api_service_post(request):
|
|
899 |
path = "/".join(request.url.path.split("/")[4:])
|
900 |
request = await request.json()
|
901 |
if path == "chat/completions":
|
902 |
-
|
|
|
903 |
|
904 |
-
|
|
|
|
|
905 |
return {"error": "Not found"}
|
906 |
|
907 |
|
@@ -912,7 +928,7 @@ async def api_service_get(request):
|
|
912 |
"object": "list",
|
913 |
"data": [
|
914 |
{
|
915 |
-
"id": ws,
|
916 |
"object": "model",
|
917 |
"created": 0,
|
918 |
"owned_by": "lynxkite",
|
@@ -924,7 +940,7 @@ async def api_service_get(request):
|
|
924 |
return {"error": "Not found"}
|
925 |
|
926 |
|
927 |
-
def get_lynxscribe_workspaces():
|
928 |
from lynxkite.core import workspace
|
929 |
|
930 |
workspaces = []
|
@@ -933,7 +949,7 @@ def get_lynxscribe_workspaces():
|
|
933 |
try:
|
934 |
ws = workspace.Workspace.load(p)
|
935 |
if ws.env == ENV:
|
936 |
-
workspaces.append(p)
|
937 |
except Exception:
|
938 |
pass # Ignore files that are not valid workspaces.
|
939 |
workspaces.sort()
|
|
|
864 |
from lynxkite.core import workspace
|
865 |
|
866 |
cwd = pathlib.Path()
|
867 |
+
path = cwd / (ws + ".lynxkite.json")
|
868 |
assert path.is_relative_to(cwd), f"Path '{path}' is invalid"
|
869 |
assert path.exists(), f"Workspace {path} does not exist"
|
870 |
ws = workspace.Workspace.load(path)
|
871 |
+
# Remove any test nodes.
|
872 |
+
ws.nodes = [op for op in ws.nodes if op.data.title != "Test Chat API"]
|
873 |
+
ws.normalize()
|
874 |
+
executor = ops.EXECUTORS[ENV]
|
875 |
+
contexts = await executor(ws)
|
876 |
nodes = [op for op in ws.nodes if op.data.title == "LynxScribe RAG Graph Chatbot Backend"]
|
877 |
[node] = nodes
|
878 |
context = contexts[node.id]
|
|
|
883 |
chat_api = await get_chat_api(request["model"])
|
884 |
request = ChatCompletionPrompt(**request)
|
885 |
async for chunk in await chat_api.answer(request, stream=True):
|
886 |
+
chunk.sources = []
|
887 |
yield chunk.model_dump_json()
|
888 |
|
889 |
|
890 |
+
async def get_chat_api_response(request):
|
891 |
+
chat_api = await get_chat_api(request["model"])
|
892 |
+
request = ChatCompletionPrompt(**request)
|
893 |
+
response = await chat_api.answer(request, stream=False)
|
894 |
+
response.sources = []
|
895 |
+
return response.model_dump_json()
|
896 |
+
|
897 |
+
|
898 |
async def api_service_post(request):
|
899 |
"""
|
900 |
Serves a chat endpoint that matches LynxScribe's interface.
|
|
|
912 |
path = "/".join(request.url.path.split("/")[4:])
|
913 |
request = await request.json()
|
914 |
if path == "chat/completions":
|
915 |
+
if request["stream"]:
|
916 |
+
from sse_starlette.sse import EventSourceResponse
|
917 |
|
918 |
+
return EventSourceResponse(stream_chat_api_response(request))
|
919 |
+
else:
|
920 |
+
return await get_chat_api_response(request)
|
921 |
return {"error": "Not found"}
|
922 |
|
923 |
|
|
|
928 |
"object": "list",
|
929 |
"data": [
|
930 |
{
|
931 |
+
"id": ws.removesuffix(".lynxkite.json"),
|
932 |
"object": "model",
|
933 |
"created": 0,
|
934 |
"owned_by": "lynxkite",
|
|
|
940 |
return {"error": "Not found"}
|
941 |
|
942 |
|
943 |
+
def get_lynxscribe_workspaces() -> list[str]:
|
944 |
from lynxkite.core import workspace
|
945 |
|
946 |
workspaces = []
|
|
|
949 |
try:
|
950 |
ws = workspace.Workspace.load(p)
|
951 |
if ws.env == ENV:
|
952 |
+
workspaces.append(str(p))
|
953 |
except Exception:
|
954 |
pass # Ignore files that are not valid workspaces.
|
955 |
workspaces.sort()
|