bugfixes
This commit is contained in:
@@ -6,10 +6,12 @@ All internal timestamps use nanoseconds since epoch (UTC).
|
||||
"""
|
||||
|
||||
import logging
|
||||
import re
|
||||
from typing import Union
|
||||
from datetime import datetime
|
||||
from datetime import datetime, timezone
|
||||
import pandas as pd
|
||||
from dateutil import parser as dateparser
|
||||
from dateutil import parser as _dateutil_parser
|
||||
from dateutil.relativedelta import relativedelta
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
@@ -18,6 +20,20 @@ TimestampInput = Union[int, float, str, datetime, pd.Timestamp]
|
||||
|
||||
NANOS_PER_SECOND = 1_000_000_000
|
||||
|
||||
_RELATIVE_RE = re.compile(
|
||||
r'^(\d+)\s+(second|minute|hour|day|week|month|year)s?\s+ago$',
|
||||
re.IGNORECASE,
|
||||
)
|
||||
|
||||
|
||||
def _parse_relative_date(s: str) -> datetime | None:
|
||||
"""Parse relative date strings like '30 days ago', '2 weeks ago'."""
|
||||
m = _RELATIVE_RE.match(s.strip())
|
||||
if not m:
|
||||
return None
|
||||
n, unit = int(m.group(1)), m.group(2).lower()
|
||||
return datetime.now(timezone.utc) - relativedelta(**{f'{unit}s': n})
|
||||
|
||||
|
||||
def to_nanoseconds(timestamp: TimestampInput) -> int:
|
||||
"""
|
||||
@@ -31,6 +47,7 @@ def to_nanoseconds(timestamp: TimestampInput) -> int:
|
||||
timestamp: Can be:
|
||||
- Unix timestamp (int/float) - assumed to be in seconds
|
||||
- ISO date string (str) - parsed using dateutil
|
||||
- Relative date string (str) - e.g. "30 days ago", "2 weeks ago"
|
||||
- datetime object
|
||||
- pandas Timestamp
|
||||
|
||||
@@ -48,7 +65,9 @@ def to_nanoseconds(timestamp: TimestampInput) -> int:
|
||||
if isinstance(timestamp, (int, float)):
|
||||
return int(timestamp * NANOS_PER_SECOND)
|
||||
elif isinstance(timestamp, str):
|
||||
dt = dateparser.parse(timestamp)
|
||||
dt = _parse_relative_date(timestamp)
|
||||
if dt is None:
|
||||
dt = _dateutil_parser.parse(timestamp)
|
||||
if dt is None:
|
||||
raise ValueError(f"Could not parse date string: {timestamp}")
|
||||
return int(dt.timestamp() * NANOS_PER_SECOND)
|
||||
|
||||
@@ -155,6 +155,21 @@ def _remove_indicator_instances(workspace_store, pandas_ta_name: str) -> None:
|
||||
logging.warning(f"Failed to remove indicator instances for {pandas_ta_name}", exc_info=True)
|
||||
|
||||
|
||||
def _workspace_sync_content(workspace_store, category: str) -> "TextContent | None":
|
||||
"""
|
||||
Return a TextContent item carrying the current {category}_types workspace state so the
|
||||
gateway can sync it to connected web clients without a separate workspace_patch call.
|
||||
The gateway detects items of the form {"_workspace_sync": {"store": ..., "data": ...}}.
|
||||
"""
|
||||
store = _type_store_name(category)
|
||||
result = workspace_store.read(store)
|
||||
if not result.get('exists'):
|
||||
return None
|
||||
import json as _json
|
||||
payload = _json.dumps({"_workspace_sync": {"store": store, "data": result.get("data")}})
|
||||
return TextContent(type="text", text=payload)
|
||||
|
||||
|
||||
def _populate_types_from_disk(workspace_store, category_manager, category: str) -> None:
|
||||
"""Scan existing category items and add any missing entries to the {category}_types store."""
|
||||
store = _type_store_name(category)
|
||||
@@ -921,6 +936,9 @@ def create_mcp_server(config: Config, event_publisher: EventPublisher) -> Server
|
||||
if result.get("success"):
|
||||
_upsert_type(workspace_store, category_manager, arguments.get("category", ""), arguments.get("name", ""))
|
||||
await cleanup_extra_packages_async(get_data_dir(), _get_env_yml())
|
||||
sync = _workspace_sync_content(workspace_store, arguments.get("category", ""))
|
||||
if sync:
|
||||
content.append(sync)
|
||||
return content
|
||||
elif name == "python_edit":
|
||||
result = await category_manager.edit(
|
||||
@@ -952,6 +970,9 @@ def create_mcp_server(config: Config, event_publisher: EventPublisher) -> Server
|
||||
if result.get("success"):
|
||||
_upsert_type(workspace_store, category_manager, arguments.get("category", ""), arguments.get("name", ""))
|
||||
await cleanup_extra_packages_async(get_data_dir(), _get_env_yml())
|
||||
sync = _workspace_sync_content(workspace_store, arguments.get("category", ""))
|
||||
if sync:
|
||||
content.append(sync)
|
||||
return content
|
||||
elif name == "python_read":
|
||||
return category_manager.read(
|
||||
@@ -987,6 +1008,11 @@ def create_mcp_server(config: Config, event_publisher: EventPublisher) -> Server
|
||||
meta_parts.append(f"validation errors: {result['validation'].get('errors', [])}")
|
||||
if result.get("success"):
|
||||
_upsert_type(workspace_store, category_manager, arguments.get("category", ""), arguments.get("name", ""))
|
||||
sync = _workspace_sync_content(workspace_store, arguments.get("category", ""))
|
||||
content_out = [TextContent(type="text", text="\n".join(meta_parts))]
|
||||
if sync:
|
||||
content_out.append(sync)
|
||||
return content_out
|
||||
return [TextContent(type="text", text="\n".join(meta_parts))]
|
||||
elif name == "python_delete":
|
||||
result = await category_manager.delete(
|
||||
@@ -1002,7 +1028,12 @@ def create_mcp_server(config: Config, event_publisher: EventPublisher) -> Server
|
||||
for k in ("category", "name", "revision", "packages_removed", "error"):
|
||||
if result.get(k):
|
||||
parts.append(f"{k}: {result[k]}")
|
||||
return [TextContent(type="text", text="\n".join(parts))]
|
||||
content_out = [TextContent(type="text", text="\n".join(parts))]
|
||||
if result.get("success"):
|
||||
sync = _workspace_sync_content(workspace_store, arguments.get("category", ""))
|
||||
if sync:
|
||||
content_out.append(sync)
|
||||
return content_out
|
||||
elif name == "conda_sync":
|
||||
return await sync_packages_async(
|
||||
data_dir=get_data_dir(),
|
||||
|
||||
Reference in New Issue
Block a user