chart data loading
This commit is contained in:
@@ -1,18 +1,27 @@
|
||||
# Multi-stage build for DexOrder user container
|
||||
FROM python:3.11-slim as builder
|
||||
FROM python:3.11-slim AS builder
|
||||
|
||||
WORKDIR /build
|
||||
|
||||
# Install build dependencies
|
||||
# Install build dependencies including protobuf compiler
|
||||
RUN apt-get update && apt-get install -y --no-install-recommends \
|
||||
gcc \
|
||||
g++ \
|
||||
protobuf-compiler \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Copy dependency specifications
|
||||
COPY setup.py .
|
||||
COPY dexorder/ dexorder/
|
||||
|
||||
# Copy protobuf definitions (copied by bin/build from canonical /protobuf/)
|
||||
COPY protobuf/ protobuf/
|
||||
|
||||
# Compile protobufs to Python
|
||||
RUN mkdir -p dexorder/generated && \
|
||||
protoc --python_out=dexorder/generated --proto_path=protobuf protobuf/*.proto && \
|
||||
touch dexorder/generated/__init__.py
|
||||
|
||||
# Install dependencies to a target directory
|
||||
RUN pip install --no-cache-dir --target=/build/deps .
|
||||
|
||||
@@ -38,6 +47,9 @@ COPY --from=builder /build/deps /usr/local/lib/python3.11/site-packages/
|
||||
COPY dexorder/ /app/dexorder/
|
||||
COPY main.py /app/
|
||||
|
||||
# Copy generated protobuf code from builder
|
||||
COPY --from=builder /build/dexorder/generated/ /app/dexorder/generated/
|
||||
|
||||
# Create directories for config, secrets, and data
|
||||
RUN mkdir -p /app/config /app/secrets /app/data && \
|
||||
chown -R dexorder:dexorder /app
|
||||
|
||||
@@ -1,6 +1,11 @@
|
||||
# Example configuration file for DexOrder user container
|
||||
# Mount this at /app/config/config.yaml in k8s
|
||||
|
||||
# Data directory for persistent storage (workspace, strategies, etc.)
|
||||
# Defaults to ./data relative to working directory if not set
|
||||
# In k8s this is mounted as a PVC at /app/data
|
||||
data_dir: "/app/data"
|
||||
|
||||
# User-specific settings
|
||||
user:
|
||||
timezone: "UTC"
|
||||
|
||||
40
client-py/dexorder/api/ChartingAPI.py
Normal file
40
client-py/dexorder/api/ChartingAPI.py
Normal file
@@ -0,0 +1,40 @@
|
||||
import logging
|
||||
from matplotlib import pyplot as plt
|
||||
import pandas as pd
|
||||
from abc import abstractmethod, ABC
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class ChartingAPI(ABC):
|
||||
"""
|
||||
User-facing pyplot charts. Start a Figure with plot_ohlc() or gca(), continue plotting indicators and other
|
||||
time-series using plot_indicator(), add any ad-hoc axes you need, then call show() to send an image to the user.
|
||||
"""
|
||||
|
||||
@abstractmethod
|
||||
def plot_ohlc(self, ohlc: pd.DataFrame, axes: plt.Axes = None, **plot_args) -> plt.Figure:
|
||||
"""
|
||||
Plots a standard OHLC candlestick chart in the user's preferred style. Use this to overlay any price-series data
|
||||
or to have a chart for reference above a time-series indicator or other value.
|
||||
"""
|
||||
|
||||
@abstractmethod
|
||||
def plot_indicator(self, indicator: pd.DataFrame, domain: tuple[float, float] = None, axes: plt.Axes = None,
|
||||
**plot_args) -> None:
|
||||
"""
|
||||
Plots an indicator in the user's standard style. If axes is None then new axes will be created at the bottom
|
||||
of the current figure.
|
||||
:param indicator:
|
||||
:param domain: The minimum and maximum possible values of the indicator. If None, the domain will be inferred from the data
|
||||
"""
|
||||
|
||||
@abstractmethod
|
||||
def gca(self) -> plt.Figure:
|
||||
"""
|
||||
Returns a generic pyplot gca() pre-configured with the user's preferred styling. Calling show() will
|
||||
send the chart image to the user.
|
||||
Use this only if it doesn't make sense to have a candlestick chart shown anywhere in the figure. Otherwise
|
||||
for most indicators, price series, and other time-series values, it's better to start with plot_ohlc() to
|
||||
at least give the user a chart for reference, even if the primary data you want to show has separate axes.
|
||||
"""
|
||||
3
client-py/dexorder/api/__init__.py
Normal file
3
client-py/dexorder/api/__init__.py
Normal file
@@ -0,0 +1,3 @@
|
||||
import logging
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
384
client-py/dexorder/api/workspace_tools.py
Normal file
384
client-py/dexorder/api/workspace_tools.py
Normal file
@@ -0,0 +1,384 @@
|
||||
"""
|
||||
Workspace Tools for MCP Server
|
||||
|
||||
Provides read/write/patch tools for workspace stores that are persisted
|
||||
in the user container. These stores sync with the gateway and web client.
|
||||
|
||||
Storage location: {DATA_DIR}/workspace/{store_name}.json
|
||||
|
||||
Available tools:
|
||||
- workspace_read(store_name) -> dict
|
||||
- workspace_write(store_name, data) -> None
|
||||
- workspace_patch(store_name, patch) -> dict
|
||||
|
||||
Future: Path-based triggers for container-side reactions to state changes.
|
||||
"""
|
||||
|
||||
import json
|
||||
import logging
|
||||
from pathlib import Path
|
||||
from typing import Any, Callable, Optional
|
||||
|
||||
import jsonpatch
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
# Default workspace storage directory (relative to working dir for local dev)
|
||||
# In production, this is overridden by passing workspace_dir from Config
|
||||
DEFAULT_WORKSPACE_DIR = Path("data/workspace")
|
||||
|
||||
|
||||
class WorkspaceStore:
|
||||
"""
|
||||
Manages persistent workspace stores on the filesystem.
|
||||
|
||||
Stores are JSON files at: {workspace_dir}/{store_name}.json
|
||||
"""
|
||||
|
||||
def __init__(self, workspace_dir: Path = DEFAULT_WORKSPACE_DIR):
|
||||
self.workspace_dir = workspace_dir
|
||||
self.workspace_dir.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
# Path triggers (for future use)
|
||||
# Map of "store_name/json/pointer/path" -> list of callbacks
|
||||
self._triggers: dict[str, list[Callable[[Any, Any], None]]] = {}
|
||||
|
||||
def _store_path(self, store_name: str) -> Path:
|
||||
"""Get the filesystem path for a store."""
|
||||
# Sanitize store name to prevent directory traversal
|
||||
safe_name = store_name.replace("/", "_").replace("\\", "_").replace("..", "_")
|
||||
return self.workspace_dir / f"{safe_name}.json"
|
||||
|
||||
def read(self, store_name: str) -> dict[str, Any]:
|
||||
"""
|
||||
Read a workspace store from disk.
|
||||
|
||||
Returns:
|
||||
dict with:
|
||||
- exists: bool - whether the store exists
|
||||
- data: Any - the store data (if exists)
|
||||
- error: str - error message (if any)
|
||||
"""
|
||||
path = self._store_path(store_name)
|
||||
|
||||
if not path.exists():
|
||||
log.debug(f"Store '{store_name}' does not exist at {path}")
|
||||
return {"exists": False}
|
||||
|
||||
try:
|
||||
with open(path, "r") as f:
|
||||
data = json.load(f)
|
||||
log.debug(f"Read store '{store_name}' from {path}")
|
||||
return {"exists": True, "data": data}
|
||||
except json.JSONDecodeError as e:
|
||||
log.error(f"Failed to parse store '{store_name}': {e}")
|
||||
return {"exists": False, "error": f"Invalid JSON: {e}"}
|
||||
except Exception as e:
|
||||
log.error(f"Failed to read store '{store_name}': {e}")
|
||||
return {"exists": False, "error": str(e)}
|
||||
|
||||
def write(self, store_name: str, data: Any) -> dict[str, Any]:
|
||||
"""
|
||||
Write a workspace store to disk.
|
||||
|
||||
Returns:
|
||||
dict with:
|
||||
- success: bool
|
||||
- error: str - error message (if any)
|
||||
"""
|
||||
path = self._store_path(store_name)
|
||||
|
||||
try:
|
||||
# Read old state for triggers
|
||||
old_state = None
|
||||
if path.exists():
|
||||
try:
|
||||
with open(path, "r") as f:
|
||||
old_state = json.load(f)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
# Write new state
|
||||
with open(path, "w") as f:
|
||||
json.dump(data, f, indent=2)
|
||||
|
||||
log.debug(f"Wrote store '{store_name}' to {path}")
|
||||
|
||||
# Fire triggers if state changed
|
||||
if old_state != data:
|
||||
self._fire_triggers(store_name, old_state, data)
|
||||
|
||||
return {"success": True}
|
||||
except Exception as e:
|
||||
log.error(f"Failed to write store '{store_name}': {e}")
|
||||
return {"success": False, "error": str(e)}
|
||||
|
||||
def patch(self, store_name: str, patch: list[dict]) -> dict[str, Any]:
|
||||
"""
|
||||
Apply a JSON patch (RFC 6902) to a store.
|
||||
|
||||
Args:
|
||||
store_name: Name of the store
|
||||
patch: List of JSON patch operations
|
||||
|
||||
Returns:
|
||||
dict with:
|
||||
- success: bool
|
||||
- data: Any - the new state (if success)
|
||||
- error: str - error message (if any)
|
||||
"""
|
||||
path = self._store_path(store_name)
|
||||
|
||||
try:
|
||||
# Read current state (or empty dict if doesn't exist)
|
||||
old_state = {}
|
||||
if path.exists():
|
||||
with open(path, "r") as f:
|
||||
old_state = json.load(f)
|
||||
|
||||
# Apply patch
|
||||
new_state = jsonpatch.apply_patch(old_state, patch)
|
||||
|
||||
# Write new state
|
||||
with open(path, "w") as f:
|
||||
json.dump(new_state, f, indent=2)
|
||||
|
||||
log.debug(f"Patched store '{store_name}' with {len(patch)} operations")
|
||||
|
||||
# Fire triggers
|
||||
self._fire_triggers(store_name, old_state, new_state)
|
||||
|
||||
return {"success": True, "data": new_state}
|
||||
except jsonpatch.JsonPatchConflict as e:
|
||||
log.error(f"Patch conflict for store '{store_name}': {e}")
|
||||
return {"success": False, "error": f"Patch conflict: {e}"}
|
||||
except Exception as e:
|
||||
log.error(f"Failed to patch store '{store_name}': {e}")
|
||||
return {"success": False, "error": str(e)}
|
||||
|
||||
def delete(self, store_name: str) -> dict[str, Any]:
|
||||
"""
|
||||
Delete a workspace store.
|
||||
|
||||
Returns:
|
||||
dict with:
|
||||
- success: bool
|
||||
- error: str - error message (if any)
|
||||
"""
|
||||
path = self._store_path(store_name)
|
||||
|
||||
try:
|
||||
if path.exists():
|
||||
path.unlink()
|
||||
log.debug(f"Deleted store '{store_name}'")
|
||||
return {"success": True}
|
||||
except Exception as e:
|
||||
log.error(f"Failed to delete store '{store_name}': {e}")
|
||||
return {"success": False, "error": str(e)}
|
||||
|
||||
def list_stores(self) -> list[str]:
|
||||
"""List all existing store names."""
|
||||
stores = []
|
||||
for path in self.workspace_dir.glob("*.json"):
|
||||
stores.append(path.stem)
|
||||
return stores
|
||||
|
||||
# =========================================================================
|
||||
# Triggers (for future use)
|
||||
# =========================================================================
|
||||
|
||||
def on_change(
|
||||
self,
|
||||
store_name: str,
|
||||
path: str,
|
||||
callback: Callable[[Any, Any], None]
|
||||
) -> Callable[[], None]:
|
||||
"""
|
||||
Register a trigger for when a path changes.
|
||||
|
||||
Args:
|
||||
store_name: Name of the store to watch
|
||||
path: JSON pointer path (e.g., "/drawings" or "/*" for any change)
|
||||
callback: Function called with (old_value, new_value)
|
||||
|
||||
Returns:
|
||||
Unsubscribe function
|
||||
"""
|
||||
key = f"{store_name}{path}"
|
||||
if key not in self._triggers:
|
||||
self._triggers[key] = []
|
||||
self._triggers[key].append(callback)
|
||||
|
||||
log.debug(f"Registered trigger for {key}")
|
||||
|
||||
def unsubscribe():
|
||||
if key in self._triggers:
|
||||
try:
|
||||
self._triggers[key].remove(callback)
|
||||
if not self._triggers[key]:
|
||||
del self._triggers[key]
|
||||
except ValueError:
|
||||
pass
|
||||
|
||||
return unsubscribe
|
||||
|
||||
def _fire_triggers(self, store_name: str, old_state: Any, new_state: Any) -> None:
|
||||
"""Fire triggers for changes between old and new state."""
|
||||
if not self._triggers:
|
||||
return
|
||||
|
||||
# Fire store-level wildcard triggers
|
||||
wildcard_key = f"{store_name}/*"
|
||||
if wildcard_key in self._triggers:
|
||||
for callback in self._triggers[wildcard_key]:
|
||||
try:
|
||||
callback(old_state, new_state)
|
||||
except Exception as e:
|
||||
log.error(f"Error in trigger callback for {wildcard_key}: {e}")
|
||||
|
||||
# Fire path-specific triggers by computing a patch and checking paths
|
||||
if old_state is not None and new_state is not None:
|
||||
try:
|
||||
patch = jsonpatch.make_patch(old_state, new_state)
|
||||
for op in patch.patch:
|
||||
op_path = op.get("path", "")
|
||||
trigger_key = f"{store_name}{op_path}"
|
||||
if trigger_key in self._triggers:
|
||||
old_value = self._get_value_at_path(old_state, op_path)
|
||||
new_value = self._get_value_at_path(new_state, op_path)
|
||||
for callback in self._triggers[trigger_key]:
|
||||
try:
|
||||
callback(old_value, new_value)
|
||||
except Exception as e:
|
||||
log.error(f"Error in trigger callback for {trigger_key}: {e}")
|
||||
except Exception as e:
|
||||
log.error(f"Error computing patch for triggers: {e}")
|
||||
|
||||
def _get_value_at_path(self, obj: Any, path: str) -> Any:
|
||||
"""Get value at a JSON pointer path."""
|
||||
if not path or path == "/":
|
||||
return obj
|
||||
|
||||
parts = path.split("/")[1:] # Skip empty first part
|
||||
current = obj
|
||||
|
||||
for part in parts:
|
||||
if current is None:
|
||||
return None
|
||||
if isinstance(current, dict):
|
||||
current = current.get(part)
|
||||
elif isinstance(current, list):
|
||||
try:
|
||||
current = current[int(part)]
|
||||
except (ValueError, IndexError):
|
||||
return None
|
||||
else:
|
||||
return None
|
||||
|
||||
return current
|
||||
|
||||
|
||||
# Global workspace store instance
|
||||
_workspace_store: Optional[WorkspaceStore] = None
|
||||
|
||||
|
||||
def get_workspace_store(workspace_dir: Optional[Path] = None) -> WorkspaceStore:
|
||||
"""Get or create the global workspace store."""
|
||||
global _workspace_store
|
||||
if _workspace_store is None:
|
||||
_workspace_store = WorkspaceStore(workspace_dir or DEFAULT_WORKSPACE_DIR)
|
||||
return _workspace_store
|
||||
|
||||
|
||||
def register_workspace_tools(server):
|
||||
"""
|
||||
Register workspace tools on an MCP server.
|
||||
|
||||
Args:
|
||||
server: MCP Server instance
|
||||
"""
|
||||
store = get_workspace_store()
|
||||
|
||||
@server.call_tool()
|
||||
async def handle_tool_call(name: str, arguments: dict) -> Any:
|
||||
"""Handle workspace tool calls."""
|
||||
if name == "workspace_read":
|
||||
return store.read(arguments.get("store_name", ""))
|
||||
elif name == "workspace_write":
|
||||
return store.write(
|
||||
arguments.get("store_name", ""),
|
||||
arguments.get("data")
|
||||
)
|
||||
elif name == "workspace_patch":
|
||||
return store.patch(
|
||||
arguments.get("store_name", ""),
|
||||
arguments.get("patch", [])
|
||||
)
|
||||
else:
|
||||
raise ValueError(f"Unknown tool: {name}")
|
||||
|
||||
@server.list_tools()
|
||||
async def list_tools():
|
||||
"""List available workspace tools."""
|
||||
return [
|
||||
{
|
||||
"name": "workspace_read",
|
||||
"description": "Read a workspace store from persistent storage",
|
||||
"inputSchema": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"store_name": {
|
||||
"type": "string",
|
||||
"description": "Name of the store (e.g., 'chartStore', 'userPreferences')"
|
||||
}
|
||||
},
|
||||
"required": ["store_name"]
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "workspace_write",
|
||||
"description": "Write a workspace store to persistent storage",
|
||||
"inputSchema": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"store_name": {
|
||||
"type": "string",
|
||||
"description": "Name of the store"
|
||||
},
|
||||
"data": {
|
||||
"description": "Data to write"
|
||||
}
|
||||
},
|
||||
"required": ["store_name", "data"]
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "workspace_patch",
|
||||
"description": "Apply JSON patch operations to a workspace store",
|
||||
"inputSchema": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"store_name": {
|
||||
"type": "string",
|
||||
"description": "Name of the store"
|
||||
},
|
||||
"patch": {
|
||||
"type": "array",
|
||||
"description": "JSON Patch operations (RFC 6902)",
|
||||
"items": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"op": {"type": "string", "enum": ["add", "remove", "replace", "move", "copy", "test"]},
|
||||
"path": {"type": "string"},
|
||||
"value": {}
|
||||
},
|
||||
"required": ["op", "path"]
|
||||
}
|
||||
}
|
||||
},
|
||||
"required": ["store_name", "patch"]
|
||||
}
|
||||
}
|
||||
]
|
||||
|
||||
log.info("Registered workspace tools")
|
||||
@@ -23,6 +23,23 @@ from mcp.server.stdio import stdio_server
|
||||
|
||||
from dexorder import EventPublisher, start_lifecycle_manager, get_lifecycle_manager
|
||||
from dexorder.events import EventType, UserEvent, DeliverySpec
|
||||
from dexorder.api.workspace_tools import get_workspace_store
|
||||
|
||||
|
||||
# =============================================================================
|
||||
# Global Data Directory
|
||||
# =============================================================================
|
||||
|
||||
# Default data directory (relative to working directory for local dev)
|
||||
DEFAULT_DATA_DIR = Path("data")
|
||||
|
||||
# Global data directory - set after config is loaded
|
||||
DATA_DIR: Path = DEFAULT_DATA_DIR
|
||||
|
||||
|
||||
def get_data_dir() -> Path:
|
||||
"""Get the global data directory."""
|
||||
return DATA_DIR
|
||||
|
||||
|
||||
# =============================================================================
|
||||
@@ -60,8 +77,13 @@ class Config:
|
||||
self.config_data: dict = {}
|
||||
self.secrets_data: dict = {}
|
||||
|
||||
# Data directory (set after config load)
|
||||
self.data_dir: Path = DEFAULT_DATA_DIR
|
||||
|
||||
def load(self) -> None:
|
||||
"""Load configuration and secrets from YAML files"""
|
||||
global DATA_DIR
|
||||
|
||||
# Load config.yaml if exists
|
||||
if self.config_path.exists():
|
||||
with open(self.config_path) as f:
|
||||
@@ -78,16 +100,40 @@ class Config:
|
||||
else:
|
||||
logging.warning(f"Secrets file not found: {self.secrets_path}")
|
||||
|
||||
# Set data directory from config or environment
|
||||
# Priority: env var > config file > default
|
||||
data_dir_str = os.getenv("DATA_DIR") or self.config_data.get("data_dir")
|
||||
if data_dir_str:
|
||||
self.data_dir = Path(data_dir_str)
|
||||
else:
|
||||
self.data_dir = DEFAULT_DATA_DIR
|
||||
|
||||
# Update global DATA_DIR
|
||||
DATA_DIR = self.data_dir
|
||||
|
||||
# Ensure data directory exists
|
||||
self.data_dir.mkdir(parents=True, exist_ok=True)
|
||||
logging.info(f"Data directory: {self.data_dir}")
|
||||
|
||||
@property
|
||||
def workspace_dir(self) -> Path:
|
||||
"""Workspace directory under DATA_DIR."""
|
||||
return self.data_dir / "workspace"
|
||||
|
||||
|
||||
# =============================================================================
|
||||
# MCP Server Setup
|
||||
# =============================================================================
|
||||
|
||||
def create_mcp_server(config: Config, event_publisher: EventPublisher) -> Server:
|
||||
"""Create MCP server with minimal hello world resource"""
|
||||
"""Create MCP server with resources and workspace tools"""
|
||||
|
||||
server = Server(config.mcp_server_name)
|
||||
|
||||
# Initialize workspace store
|
||||
workspace_store = get_workspace_store(config.workspace_dir)
|
||||
logging.info(f"Workspace store initialized at {config.workspace_dir}")
|
||||
|
||||
@server.list_resources()
|
||||
async def list_resources():
|
||||
"""List available resources"""
|
||||
@@ -122,7 +168,89 @@ def create_mcp_server(config: Config, event_publisher: EventPublisher) -> Server
|
||||
else:
|
||||
raise ValueError(f"Unknown resource: {uri}")
|
||||
|
||||
logging.info(f"MCP server '{config.mcp_server_name}' created")
|
||||
@server.list_tools()
|
||||
async def list_tools():
|
||||
"""List available tools including workspace tools"""
|
||||
return [
|
||||
{
|
||||
"name": "workspace_read",
|
||||
"description": "Read a workspace store from persistent storage",
|
||||
"inputSchema": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"store_name": {
|
||||
"type": "string",
|
||||
"description": "Name of the store (e.g., 'chartStore', 'userPreferences')"
|
||||
}
|
||||
},
|
||||
"required": ["store_name"]
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "workspace_write",
|
||||
"description": "Write a workspace store to persistent storage",
|
||||
"inputSchema": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"store_name": {
|
||||
"type": "string",
|
||||
"description": "Name of the store"
|
||||
},
|
||||
"data": {
|
||||
"description": "Data to write"
|
||||
}
|
||||
},
|
||||
"required": ["store_name", "data"]
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "workspace_patch",
|
||||
"description": "Apply JSON patch operations to a workspace store",
|
||||
"inputSchema": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"store_name": {
|
||||
"type": "string",
|
||||
"description": "Name of the store"
|
||||
},
|
||||
"patch": {
|
||||
"type": "array",
|
||||
"description": "JSON Patch operations (RFC 6902)",
|
||||
"items": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"op": {"type": "string", "enum": ["add", "remove", "replace", "move", "copy", "test"]},
|
||||
"path": {"type": "string"},
|
||||
"value": {}
|
||||
},
|
||||
"required": ["op", "path"]
|
||||
}
|
||||
}
|
||||
},
|
||||
"required": ["store_name", "patch"]
|
||||
}
|
||||
}
|
||||
]
|
||||
|
||||
@server.call_tool()
|
||||
async def handle_tool_call(name: str, arguments: dict):
|
||||
"""Handle tool calls including workspace tools"""
|
||||
if name == "workspace_read":
|
||||
return workspace_store.read(arguments.get("store_name", ""))
|
||||
elif name == "workspace_write":
|
||||
return workspace_store.write(
|
||||
arguments.get("store_name", ""),
|
||||
arguments.get("data")
|
||||
)
|
||||
elif name == "workspace_patch":
|
||||
return workspace_store.patch(
|
||||
arguments.get("store_name", ""),
|
||||
arguments.get("patch", [])
|
||||
)
|
||||
else:
|
||||
raise ValueError(f"Unknown tool: {name}")
|
||||
|
||||
logging.info(f"MCP server '{config.mcp_server_name}' created with workspace tools")
|
||||
return server
|
||||
|
||||
|
||||
|
||||
@@ -15,6 +15,7 @@ setup(
|
||||
"pyyaml>=6.0",
|
||||
"aiofiles>=23.0.0",
|
||||
"mcp>=0.9.0",
|
||||
"jsonpatch>=1.33",
|
||||
],
|
||||
extras_require={
|
||||
"dev": [
|
||||
|
||||
Reference in New Issue
Block a user