diff --git a/Dockerfile b/Dockerfile
index 4581334..002e641 100644
--- a/Dockerfile
+++ b/Dockerfile
@@ -21,6 +21,9 @@ RUN apt-get update \
COPY requirements.txt .
RUN pip install --no-cache-dir -r requirements.txt
+# Install Playwright browser (Chromium) and its system dependencies
+RUN playwright install --with-deps chromium
+
COPY server/ ./server/
# Data directory for encrypted DB (mounted as volume in production)
diff --git a/README.md b/README.md
index 7be9ebf..0e83f52 100644
--- a/README.md
+++ b/README.md
@@ -1,29 +1,30 @@
# oAI-Web - Personal AI Agent
-A secure, self-hosted personal AI agent powered by OpenRouter, Anthropic and OpenAI. Handles calendar, email, files, web research, and Telegram - controlled by you, running on your own hardware.
+A secure, self-hosted personal AI agent powered by Claude. Handles calendar, email, files, web research, and Telegram - controlled by you, running on your own hardware.
## Features
- **Chat interface** - conversational UI via browser, with model selector
-- **CalDAV** - read and write calendar events
+- **CalDAV** - read and write calendar events (per-user credentials, configured in Settings)
+- **CardDAV / Contacts** - search and manage contacts from your CardDAV server
- **Email** - read inbox, send replies (whitelist-managed recipients)
-- **Filesystem** - read/write files in declared sandbox directories
+- **Filesystem** - read/write files in your personal data folder
- **Web access** - tiered: whitelisted domains always allowed, others on request
-- **Push notifications** - Pushover for iOS/Android
+- **Push notifications** - Pushover for iOS/Android (set your own User Key in Settings)
- **Telegram** - send and receive messages via your own bot
+- **Webhooks** - trigger agents from external services (iOS Shortcuts, GitHub, Home Assistant, etc.)
+- **Monitors** - page-change and RSS feed monitors that dispatch agents automatically
- **Scheduled tasks** - cron-based autonomous tasks with declared permission scopes
- **Agents** - goal-oriented runs with model selection and full run history
- **Audit log** - every tool call logged, append-only
- **Multi-user** - each user has their own credentials and settings
-oAI-Web also has an extensive built in help function. This makes it easy for both admins and normal useres to learn and use oAI-Web in the best way possible.
-
---
## Requirements
- Docker and Docker Compose
-- An API key from [Anthropic](https://console.anthropic.com) and/or [OpenRouter](https://openrouter.ai) and/or [OpenAI](https://openai.com)
+- An API key from [Anthropic](https://console.anthropic.com) and/or [OpenRouter](https://openrouter.ai)
- A PostgreSQL-compatible host (included in the compose file)
---
@@ -247,30 +248,51 @@ The file is mounted read-only into the container. Changes take effect on the nex
---
-## First Run - Settings
+## Your Settings
-After the setup wizard, go to **Settings** to configure your services.
+After logging in, go to **Settings** to configure your personal services. Each user has their own credentials — nothing is shared with other users.
-### Credentials (admin only)
+### CalDAV / CardDAV
-Add credentials for the services you use. Common keys:
+Set up your personal calendar and contacts server under **Settings → CalDAV / CardDAV**:
-| Key | Example | Used by |
-|-----|---------|---------|
-| `anthropic_api_key` | `sk-ant-...` | Claude (Anthropic) |
-| `openrouter_api_key` | `sk-or-...` | OpenRouter models |
-| `mailcow_host` | `mail.yourdomain.com` | CalDAV, Email |
-| `mailcow_username` | `you@yourdomain.com` | CalDAV, Email |
-| `mailcow_password` | your IMAP password | CalDAV, Email |
-| `caldav_calendar_name` | `personal` | CalDAV |
-| `pushover_app_token` | from Pushover dashboard | Push notifications |
-| `telegram_bot_token` | from @BotFather | Telegram |
+- Enter your server URL (e.g. `mail.example.com`), username, and password
+- Optionally specify a calendar name (leave blank for the default calendar)
+- For CardDAV (contacts): tick *Same server as CalDAV* to reuse your credentials, or enter separate details
+- Use the **Test** buttons to verify your connection before saving
+- Enable **Allow contact writes** if you want agents to be able to create and update contacts
-### Whitelists
+There is no system-wide fallback — if you don't configure it, calendar and contacts tools won't be available to your agents.
-- **Email whitelist** - addresses the agent is allowed to send email to
-- **Web whitelist** - domains always accessible to the agent (Tier 1)
-- **Filesystem sandbox** - directories the agent is allowed to read/write
+### Pushover
+
+To receive push notifications on your iOS or Android device:
+
+1. Create a free account at [pushover.net](https://pushover.net)
+2. Copy your **User Key** from the dashboard
+3. Go to **Settings → Pushover** and save your User Key
+
+The app is already registered by your admin — you only need your own User Key.
+
+### Webhooks
+
+Create inbound webhooks under **Settings → Webhooks** to trigger your agents from external services:
+
+- Assign a name and target agent, then copy the secret token shown at creation (it's shown only once)
+- **POST trigger**: send `{"message": "your message"}` to `/webhook/{token}`
+- **GET trigger**: visit `/webhook/{token}?q=your+message` — ideal for iOS Shortcuts URL actions
+- Enable or disable webhooks without deleting them
+
+### Telegram
+
+Set your personal bot token under **Settings → Telegram** (or **Settings → Profile → Telegram Bot Token**) if you want your own Telegram bot. Your chat ID must be whitelisted by the admin before messages are processed.
+
+### Email Accounts
+
+Set up your own email accounts under **Settings → Email Accounts**:
+
+- **Trigger account** — dispatches agents based on keyword rules in incoming emails
+- **Handling account** — a dedicated AI agent reads and handles each incoming email
---
@@ -290,9 +312,12 @@ docker compose up -d
| `/` | Chat - send messages, select model, view tool activity |
| `/tasks` | Scheduled tasks - cron-based autonomous tasks |
| `/agents` | Agents - goal-oriented runs with model selection and run history |
+| `/monitors` | Monitors - page-change watchers and RSS feed monitors |
+| `/files` | Files - browse, download, and manage your personal data folder |
| `/audit` | Audit log - filterable view of every tool call |
-| `/settings` | Credentials, whitelists, agent config, Telegram, and more |
+| `/settings` | Your personal settings: CalDAV, CardDAV, Pushover, Webhooks, Telegram, Email Accounts, and more |
+---
## License
diff --git a/requirements.txt b/requirements.txt
index b05b8ad..080db56 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -26,6 +26,8 @@ aioimaplib>=1.0
# Web
httpx==0.27.*
beautifulsoup4==4.12.*
+feedparser==6.0.* # RSS/Atom feed parsing
+playwright>=1.40 # Headless browser for JS-heavy pages
# Scheduler
apscheduler==3.10.*
diff --git a/server/agents/runner.py b/server/agents/runner.py
index d3f8ffb..2c7b7a8 100644
--- a/server/agents/runner.py
+++ b/server/agents/runner.py
@@ -21,17 +21,50 @@ from . import tasks as agent_store
logger = logging.getLogger(__name__)
+# Priority levels for the run queue (lower number = higher priority)
+PRIORITY_HIGH = 0 # User-initiated chat runs
+PRIORITY_NORMAL = 1 # Webhook / inbox / Telegram triggers
+PRIORITY_LOW = 2 # Background monitors
+
+_DEFAULT_MAX_CONCURRENT = 3
+
+
class AgentRunner:
def __init__(self) -> None:
self._agent: Agent | None = None
self._scheduler = AsyncIOScheduler(timezone=settings.timezone)
self._running: dict[str, asyncio.Task] = {} # run_id → asyncio.Task
+ # Concurrency semaphore — initialised in start() once event loop is running
+ self._semaphore: asyncio.Semaphore | None = None
+ self._max_concurrent: int = _DEFAULT_MAX_CONCURRENT
+
+ @property
+ def scheduler(self) -> AsyncIOScheduler:
+ return self._scheduler
+
+ @property
+ def queue_status(self) -> dict:
+ running = sum(1 for t in self._running.values() if not t.done())
+ # Tasks waiting for the semaphore are counted as "queued"
+ queued = max(0, running - self._max_concurrent)
+ return {"running": min(running, self._max_concurrent), "queued": queued, "max_concurrent": self._max_concurrent}
def init(self, agent: Agent) -> None:
self._agent = agent
+ async def _load_max_concurrent(self) -> int:
+ val = await credential_store.get("system:max_concurrent_runs")
+ try:
+ return max(1, int(val)) if val else _DEFAULT_MAX_CONCURRENT
+ except (ValueError, TypeError):
+ return _DEFAULT_MAX_CONCURRENT
+
async def start(self) -> None:
"""Load all enabled agents with schedules into APScheduler and start it."""
+ # Initialise concurrency semaphore (must happen inside a running event loop)
+ self._max_concurrent = await self._load_max_concurrent()
+ self._semaphore = asyncio.Semaphore(self._max_concurrent)
+
for agent in await agent_store.list_agents():
if agent["enabled"] and agent["schedule"]:
self._add_job(agent)
@@ -44,7 +77,9 @@ class AgentRunner:
misfire_grace_time=3600,
)
self._scheduler.start()
- logger.info("[agent-runner] Scheduler started, loaded scheduled agents")
+ logger.info(
+ "[agent-runner] Scheduler started, max_concurrent=%d", self._max_concurrent
+ )
def shutdown(self) -> None:
if self._scheduler.running:
@@ -261,7 +296,14 @@ class AgentRunner:
finally:
self._running.pop(run_id, None)
- task = asyncio.create_task(_execute())
+ async def _execute_with_semaphore():
+ if self._semaphore:
+ async with self._semaphore:
+ await _execute()
+ else:
+ await _execute()
+
+ task = asyncio.create_task(_execute_with_semaphore())
self._running[run_id] = task
return await agent_store.get_run(run_id)
diff --git a/server/database.py b/server/database.py
index 69283b4..9bd83dc 100644
--- a/server/database.py
+++ b/server/database.py
@@ -393,6 +393,78 @@ _MIGRATIONS: list[list[str]] = [
[
"ALTER TABLE conversations ADD COLUMN IF NOT EXISTS model TEXT",
],
+ # v25 — Inbound webhook trigger endpoints + outbound webhook targets + monitors
+ [
+ """
+ CREATE TABLE IF NOT EXISTS webhook_endpoints (
+ id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
+ name TEXT NOT NULL,
+ token TEXT NOT NULL UNIQUE,
+ description TEXT DEFAULT '',
+ agent_id TEXT REFERENCES agents(id) ON DELETE CASCADE,
+ enabled BOOLEAN DEFAULT TRUE,
+ allow_get BOOLEAN DEFAULT TRUE,
+ created_at TIMESTAMPTZ DEFAULT NOW(),
+ last_triggered_at TIMESTAMPTZ,
+ trigger_count INTEGER DEFAULT 0
+ )
+ """,
+ """
+ CREATE TABLE IF NOT EXISTS webhook_targets (
+ id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
+ name TEXT NOT NULL UNIQUE,
+ url TEXT NOT NULL,
+ secret_header TEXT,
+ enabled BOOLEAN DEFAULT TRUE,
+ created_at TIMESTAMPTZ DEFAULT NOW()
+ )
+ """,
+ """
+ CREATE TABLE IF NOT EXISTS watched_pages (
+ id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
+ name TEXT NOT NULL,
+ url TEXT NOT NULL,
+ schedule TEXT NOT NULL DEFAULT '0 * * * *',
+ css_selector TEXT,
+ last_content_hash TEXT,
+ last_changed_at TIMESTAMPTZ,
+ last_checked_at TIMESTAMPTZ,
+ last_error TEXT,
+ agent_id TEXT REFERENCES agents(id) ON DELETE SET NULL,
+ notification_mode TEXT DEFAULT 'agent',
+ enabled BOOLEAN DEFAULT TRUE,
+ owner_user_id TEXT REFERENCES users(id) ON DELETE CASCADE,
+ created_at TIMESTAMPTZ DEFAULT NOW()
+ )
+ """,
+ """
+ CREATE TABLE IF NOT EXISTS rss_feeds (
+ id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
+ name TEXT NOT NULL,
+ url TEXT NOT NULL,
+ schedule TEXT NOT NULL DEFAULT '0 */4 * * *',
+ agent_id TEXT REFERENCES agents(id) ON DELETE SET NULL,
+ last_fetched_at TIMESTAMPTZ,
+ last_etag TEXT,
+ last_modified TEXT,
+ seen_item_ids JSONB DEFAULT '[]',
+ max_items_per_run INTEGER DEFAULT 5,
+ notification_mode TEXT DEFAULT 'agent',
+ last_error TEXT,
+ enabled BOOLEAN DEFAULT TRUE,
+ owner_user_id TEXT REFERENCES users(id) ON DELETE CASCADE,
+ created_at TIMESTAMPTZ DEFAULT NOW()
+ )
+ """,
+ ],
+ # v26 — Per-user webhook endpoint ownership
+ [
+ "ALTER TABLE webhook_endpoints ADD COLUMN IF NOT EXISTS owner_user_id TEXT REFERENCES users(id) ON DELETE CASCADE",
+ ],
+ # v27 — Per-user webhook target ownership
+ [
+ "ALTER TABLE webhook_targets ADD COLUMN IF NOT EXISTS owner_user_id TEXT REFERENCES users(id) ON DELETE CASCADE",
+ ],
]
diff --git a/server/main.py b/server/main.py
index 5d8d288..86c7dab 100644
--- a/server/main.py
+++ b/server/main.py
@@ -42,6 +42,8 @@ from .context_vars import current_user as _current_user_var
from .database import close_db, credential_store, init_db
from .inbox.listener import inbox_listener
from .mcp import create_mcp_app, _session_manager
+from .monitors.page_monitor import page_monitor
+from .monitors.rss_monitor import rss_monitor
from .telegram.listener import telegram_listener
from .tools import build_registry
from .users import assign_existing_data_to_admin, create_user, get_user_by_username, user_count
@@ -190,6 +192,11 @@ async def lifespan(app: FastAPI):
print("[aide] Agent ready.")
agent_runner.init(_agent)
await agent_runner.start()
+ # Wire monitors into the shared scheduler after it starts
+ page_monitor.init(agent_runner.scheduler)
+ rss_monitor.init(agent_runner.scheduler)
+ await page_monitor.start_all()
+ await rss_monitor.start_all()
await _migrate_email_accounts()
await inbox_listener.start_all()
telegram_listener.start()
@@ -260,7 +267,7 @@ import time as _time
_USER_COOKIE = "aide_user"
_EXEMPT_PATHS = frozenset({"/login", "/login/mfa", "/logout", "/setup", "/health"})
-_EXEMPT_PREFIXES = ("/static/", "/brain-mcp/", "/docs", "/redoc", "/openapi.json")
+_EXEMPT_PREFIXES = ("/static/", "/brain-mcp/", "/docs", "/redoc", "/openapi.json", "/webhook/")
_EXEMPT_API_PATHS = frozenset({"/api/settings/api-key"})
@@ -380,6 +387,87 @@ app.include_router(api_router, prefix="/api")
app.mount("/brain-mcp", create_mcp_app())
+# ── Public webhook trigger endpoints ─────────────────────────────────────────
+# These live outside /api and outside the auth middleware (token = auth).
+
+_webhook_logger = logging.getLogger("server.webhook")
+
+
+async def _handle_webhook_trigger(token: str, message: str, wait: bool = False) -> JSONResponse:
+ """Shared logic for GET and POST webhook triggers."""
+ from .webhooks.endpoints import get_by_token, record_trigger
+ from .security import sanitize_external_content
+
+ ep = await get_by_token(token)
+ if ep is None:
+ return JSONResponse({"error": "Not found"}, status_code=404)
+
+ agent_id = ep.get("agent_id")
+ if not agent_id:
+ return JSONResponse({"error": "No agent configured for this webhook"}, status_code=422)
+
+ message = (message or "").strip()
+ if not message:
+ message = "Webhook triggered"
+
+ message = await sanitize_external_content(message, source="webhook")
+
+ from .audit import audit_log
+ ep_id = str(ep["id"])
+ session_id = f"webhook:{ep_id}"
+
+ await audit_log.record(
+ tool_name="webhook",
+ arguments={"endpoint": ep.get("name"), "message": message[:200]},
+ session_id=session_id,
+ )
+
+ if wait:
+ result = await agent_runner.run_agent_and_wait(
+ agent_id=agent_id,
+ override_message=message,
+ session_id=session_id,
+ )
+ await record_trigger(ep_id)
+ return JSONResponse({"ok": True, "result": result})
+ else:
+ run = await agent_runner.run_agent_now(agent_id=agent_id, override_message=message)
+ await record_trigger(ep_id)
+ run_id = run.get("id") or run.get("error", "error")
+ return JSONResponse({"ok": True, "run_id": run_id})
+
+
+@app.get("/webhook/{token}")
+async def webhook_trigger_get(token: str, q: str = "", wait: bool = False):
+ """iOS Shortcuts / simple GET trigger. Message via ?q= query param."""
+ ep = await _get_webhook_endpoint_for_get(token)
+ if ep is None:
+ return JSONResponse({"error": "Not found"}, status_code=404)
+ return await _handle_webhook_trigger(token, q, wait=wait)
+
+
+@app.post("/webhook/{token}")
+async def webhook_trigger_post(token: str, request: Request):
+ """External service POST trigger. Message via JSON body {"message": "..."}."""
+ try:
+ body = await request.json()
+ message = body.get("message", "")
+ wait = bool(body.get("wait", False))
+ except Exception:
+ message = ""
+ wait = False
+ return await _handle_webhook_trigger(token, message, wait=wait)
+
+
+async def _get_webhook_endpoint_for_get(token: str) -> dict | None:
+ """Return endpoint only if allow_get is True."""
+ from .webhooks.endpoints import get_by_token
+ ep = await get_by_token(token)
+ if ep and not ep.get("allow_get", True):
+ return None
+ return ep
+
+
# ── Auth helpers ──────────────────────────────────────────────────────────────
@@ -682,6 +770,11 @@ async def audit_page(request: Request):
return templates.TemplateResponse("audit.html", await _ctx(request))
+@app.get("/monitors", response_class=HTMLResponse)
+async def monitors_page(request: Request):
+ return templates.TemplateResponse("monitors.html", await _ctx(request))
+
+
@app.get("/help", response_class=HTMLResponse)
async def help_page(request: Request):
return templates.TemplateResponse("help.html", await _ctx(request))
diff --git a/server/monitors/__init__.py b/server/monitors/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/server/monitors/page_monitor.py b/server/monitors/page_monitor.py
new file mode 100644
index 0000000..d10e1a0
--- /dev/null
+++ b/server/monitors/page_monitor.py
@@ -0,0 +1,175 @@
+"""
+monitors/page_monitor.py — Page change monitor.
+
+Polls watched URLs on a cron schedule, hashes the content, and dispatches
+an agent (or Pushover notification) when the page content changes.
+"""
+from __future__ import annotations
+
+import hashlib
+import logging
+
+import httpx
+from apscheduler.schedulers.asyncio import AsyncIOScheduler
+from apscheduler.triggers.cron import CronTrigger
+
+from ..config import settings
+from . import store
+
+logger = logging.getLogger(__name__)
+
+_DEFAULT_HEADERS = {
+ "User-Agent": "Mozilla/5.0 (compatible; oAI-Web page-monitor/1.0)",
+}
+
+
+async def _fetch_page_content(url: str, css_selector: str | None = None) -> str:
+ """Fetch URL and return plain text (optionally filtered by CSS selector)."""
+ async with httpx.AsyncClient(
+ timeout=30.0,
+ follow_redirects=True,
+ headers=_DEFAULT_HEADERS,
+ ) as client:
+ resp = await client.get(url)
+ resp.raise_for_status()
+ html = resp.text
+
+ if css_selector:
+ try:
+ from bs4 import BeautifulSoup
+ soup = BeautifulSoup(html, "html.parser")
+ elements = soup.select(css_selector)
+ return "\n".join(el.get_text(separator=" ", strip=True) for el in elements)
+ except Exception as e:
+ logger.warning("[page-monitor] CSS selector '%s' failed: %s", css_selector, e)
+
+ try:
+ from bs4 import BeautifulSoup
+ soup = BeautifulSoup(html, "html.parser")
+ for tag in soup(["script", "style", "nav", "footer", "header"]):
+ tag.decompose()
+ return soup.get_text(separator="\n", strip=True)
+ except Exception:
+ return html
+
+
+def _content_hash(text: str) -> str:
+ return hashlib.sha256(text.encode("utf-8", errors="replace")).hexdigest()
+
+
+class PageMonitorManager:
+ """Manages APScheduler jobs for all watched_pages entries."""
+
+ def __init__(self) -> None:
+ self._scheduler: AsyncIOScheduler | None = None
+
+ def init(self, scheduler: AsyncIOScheduler) -> None:
+ """Share the AgentRunner's scheduler."""
+ self._scheduler = scheduler
+
+ async def start_all(self) -> None:
+ """Load all enabled watched pages and register APScheduler jobs."""
+ pages = await store.list_watched_pages()
+ for page in pages:
+ if page["enabled"]:
+ self._add_job(page)
+ logger.info("[page-monitor] Registered %d page monitor jobs", len([p for p in pages if p["enabled"]]))
+
+ def _add_job(self, page: dict) -> None:
+ if not self._scheduler:
+ return
+ try:
+ self._scheduler.add_job(
+ self._check_page,
+ trigger=CronTrigger.from_crontab(page["schedule"], timezone=settings.timezone),
+ id=f"page:{page['id']}",
+ args=[str(page["id"])],
+ replace_existing=True,
+ misfire_grace_time=300,
+ )
+ except Exception as e:
+ logger.error("[page-monitor] Failed to schedule page '%s': %s", page["name"], e)
+
+ def reschedule(self, page: dict) -> None:
+ if not self._scheduler:
+ return
+ job_id = f"page:{page['id']}"
+ try:
+ self._scheduler.remove_job(job_id)
+ except Exception:
+ pass
+ if page.get("enabled"):
+ self._add_job(page)
+
+ def remove(self, page_id: str) -> None:
+ if not self._scheduler:
+ return
+ try:
+ self._scheduler.remove_job(f"page:{page_id}")
+ except Exception:
+ pass
+
+ async def check_now(self, page_id: str) -> dict:
+ """Force-check a page immediately (UI-triggered). Returns status dict."""
+ return await self._check_page(page_id)
+
+ async def _check_page(self, page_id: str) -> dict:
+ page = await store.get_watched_page(page_id)
+ if not page:
+ return {"error": "Page not found"}
+
+ logger.info("[page-monitor] Checking '%s' (%s)", page["name"], page["url"])
+
+ try:
+ content = await _fetch_page_content(page["url"], page.get("css_selector"))
+ except Exception as e:
+ error_msg = str(e)[:200]
+ logger.warning("[page-monitor] Failed to fetch '%s': %s", page["url"], error_msg)
+ await store.update_page_check_result(page_id, None, False, error=error_msg)
+ return {"error": error_msg}
+
+ new_hash = _content_hash(content)
+ old_hash = page.get("last_content_hash")
+ changed = old_hash is not None and new_hash != old_hash
+
+ await store.update_page_check_result(page_id, new_hash, changed)
+
+ if changed:
+ logger.info("[page-monitor] Change detected on '%s'", page["name"])
+ await self._dispatch_change(page, content)
+
+ return {"changed": changed, "hash": new_hash, "first_check": old_hash is None}
+
+ async def _dispatch_change(self, page: dict, content: str) -> None:
+ mode = page.get("notification_mode", "agent")
+ message = (
+ f"Page change detected: {page['name']}\n"
+ f"URL: {page['url']}\n\n"
+ f"Current content (first 2000 chars):\n{content[:2000]}"
+ )
+
+ if mode in ("pushover", "both"):
+ try:
+ from ..tools.pushover_tool import PushoverTool
+ await PushoverTool().execute(
+ title=f"Page changed: {page['name']}",
+ message=f"{page['url']} has new content.",
+ priority=0,
+ )
+ except Exception as e:
+ logger.warning("[page-monitor] Pushover notify failed: %s", e)
+
+ if mode in ("agent", "both"):
+ agent_id = page.get("agent_id")
+ if agent_id:
+ try:
+ from ..agents.runner import agent_runner
+ await agent_runner.run_agent_now(
+ agent_id=agent_id,
+ override_message=message,
+ )
+ except Exception as e:
+ logger.warning("[page-monitor] Agent dispatch failed: %s", e)
+
+
+page_monitor = PageMonitorManager()
diff --git a/server/monitors/rss_monitor.py b/server/monitors/rss_monitor.py
new file mode 100644
index 0000000..1356410
--- /dev/null
+++ b/server/monitors/rss_monitor.py
@@ -0,0 +1,185 @@
+"""
+monitors/rss_monitor.py — RSS / Atom feed monitor.
+
+Polls feeds on a cron schedule, tracks seen item IDs, and dispatches
+an agent (or Pushover) when new items appear.
+Sends ETag / If-Modified-Since headers for bandwidth efficiency.
+"""
+from __future__ import annotations
+
+import logging
+
+from apscheduler.schedulers.asyncio import AsyncIOScheduler
+from apscheduler.triggers.cron import CronTrigger
+
+from ..config import settings
+from . import store
+
+logger = logging.getLogger(__name__)
+
+
+def _item_id(entry) -> str:
+ """Return a stable ID for a feed entry (id → link → title)."""
+ return entry.get("id") or entry.get("link") or entry.get("title") or ""
+
+
+def _format_items(entries: list) -> str:
+ """Format new feed entries as a readable message for agents."""
+ lines = []
+ for e in entries:
+ title = e.get("title", "(no title)")
+ link = e.get("link", "")
+ summary = e.get("summary", "")[:500]
+ lines.append(f"- {title}\n {link}")
+ if summary:
+ lines.append(f" {summary}")
+ return "\n\n".join(lines)
+
+
+class RssFeedManager:
+ """Manages APScheduler jobs for all rss_feeds entries."""
+
+ def __init__(self) -> None:
+ self._scheduler: AsyncIOScheduler | None = None
+
+ def init(self, scheduler: AsyncIOScheduler) -> None:
+ self._scheduler = scheduler
+
+ async def start_all(self) -> None:
+ feeds = await store.list_rss_feeds()
+ for feed in feeds:
+ if feed["enabled"]:
+ self._add_job(feed)
+ logger.info("[rss-monitor] Registered %d RSS feed jobs", len([f for f in feeds if f["enabled"]]))
+
+ def _add_job(self, feed: dict) -> None:
+ if not self._scheduler:
+ return
+ try:
+ self._scheduler.add_job(
+ self._fetch_feed,
+ trigger=CronTrigger.from_crontab(feed["schedule"], timezone=settings.timezone),
+ id=f"rss:{feed['id']}",
+ args=[str(feed["id"])],
+ replace_existing=True,
+ misfire_grace_time=300,
+ )
+ except Exception as e:
+ logger.error("[rss-monitor] Failed to schedule feed '%s': %s", feed["name"], e)
+
+ def reschedule(self, feed: dict) -> None:
+ if not self._scheduler:
+ return
+ job_id = f"rss:{feed['id']}"
+ try:
+ self._scheduler.remove_job(job_id)
+ except Exception:
+ pass
+ if feed.get("enabled"):
+ self._add_job(feed)
+
+ def remove(self, feed_id: str) -> None:
+ if not self._scheduler:
+ return
+ try:
+ self._scheduler.remove_job(f"rss:{feed_id}")
+ except Exception:
+ pass
+
+ async def fetch_now(self, feed_id: str) -> dict:
+ """Force-fetch a feed immediately (UI-triggered). Returns status dict."""
+ return await self._fetch_feed(feed_id)
+
+ async def _fetch_feed(self, feed_id: str) -> dict:
+ import feedparser
+
+ feed_row = await store.get_rss_feed(feed_id)
+ if not feed_row:
+ return {"error": "Feed not found"}
+
+ logger.info("[rss-monitor] Fetching '%s' (%s)", feed_row["name"], feed_row["url"])
+
+ # Build request with conditional headers for bandwidth efficiency
+ request_headers = {}
+ if feed_row.get("last_etag"):
+ request_headers["ETag"] = feed_row["last_etag"]
+ if feed_row.get("last_modified"):
+ request_headers["If-Modified-Since"] = feed_row["last_modified"]
+
+ try:
+ import httpx
+ async with httpx.AsyncClient(timeout=30.0, follow_redirects=True) as client:
+ resp = await client.get(feed_row["url"], headers=request_headers)
+
+ if resp.status_code == 304:
+ logger.info("[rss-monitor] '%s' unchanged (304)", feed_row["name"])
+ await store.update_feed_fetch_result(feed_id, feed_row.get("seen_item_ids") or [])
+ return {"new_items": 0, "status": "not_modified"}
+
+ resp.raise_for_status()
+ parsed = feedparser.parse(resp.text)
+ etag = resp.headers.get("etag")
+ last_modified = resp.headers.get("last-modified")
+
+ except Exception as e:
+ error_msg = str(e)[:200]
+ logger.warning("[rss-monitor] Failed to fetch '%s': %s", feed_row["url"], error_msg)
+ await store.update_feed_fetch_result(feed_id, feed_row.get("seen_item_ids") or [], error=error_msg)
+ return {"error": error_msg}
+
+ seen = set(feed_row.get("seen_item_ids") or [])
+ max_items = feed_row.get("max_items_per_run") or 5
+ new_entries = [e for e in parsed.entries if _item_id(e) and _item_id(e) not in seen][:max_items]
+
+ # Update seen IDs (keep last 500 to prevent unbounded growth)
+ all_ids = list(seen | {_item_id(e) for e in parsed.entries if _item_id(e)})
+ all_ids = all_ids[-500:]
+
+ await store.update_feed_fetch_result(
+ feed_id,
+ seen_item_ids=all_ids,
+ etag=etag,
+ last_modified=last_modified,
+ )
+
+ if new_entries:
+ logger.info("[rss-monitor] %d new items in '%s'", len(new_entries), feed_row["name"])
+ await self._dispatch_new_items(feed_row, new_entries)
+
+ return {"new_items": len(new_entries)}
+
+ async def _dispatch_new_items(self, feed_row: dict, entries: list) -> None:
+ mode = feed_row.get("notification_mode", "agent")
+ count = len(entries)
+ items_text = _format_items(entries)
+ message = (
+ f"{count} new item{'s' if count != 1 else ''} in feed: {feed_row['name']}\n"
+ f"URL: {feed_row['url']}\n\n"
+ f"{items_text}"
+ )
+
+ if mode in ("pushover", "both"):
+ try:
+ from ..tools.pushover_tool import PushoverTool
+ await PushoverTool().execute(
+ title=f"RSS: {count} new in {feed_row['name']}",
+ message=items_text[:512],
+ priority=0,
+ )
+ except Exception as e:
+ logger.warning("[rss-monitor] Pushover notify failed: %s", e)
+
+ if mode in ("agent", "both"):
+ agent_id = feed_row.get("agent_id")
+ if agent_id:
+ try:
+ from ..agents.runner import agent_runner
+ await agent_runner.run_agent_now(
+ agent_id=agent_id,
+ override_message=message,
+ )
+ except Exception as e:
+ logger.warning("[rss-monitor] Agent dispatch failed: %s", e)
+
+
+rss_monitor = RssFeedManager()
diff --git a/server/monitors/store.py b/server/monitors/store.py
new file mode 100644
index 0000000..364570a
--- /dev/null
+++ b/server/monitors/store.py
@@ -0,0 +1,203 @@
+"""
+monitors/store.py — DB CRUD for watched_pages and rss_feeds tables.
+"""
+from __future__ import annotations
+
+import json
+from datetime import datetime, timezone
+
+from ..database import _rowcount, get_pool
+
+
+def _utcnow() -> str:
+ return datetime.now(timezone.utc).isoformat()
+
+
+def _page_row(row) -> dict:
+ return dict(row)
+
+
+def _feed_row(row) -> dict:
+ d = dict(row)
+ if isinstance(d.get("seen_item_ids"), str):
+ try:
+ d["seen_item_ids"] = json.loads(d["seen_item_ids"])
+ except Exception:
+ d["seen_item_ids"] = []
+ return d
+
+
+# ─── Watched Pages ────────────────────────────────────────────────────────────
+
+async def create_watched_page(
+ name: str,
+ url: str,
+ schedule: str = "0 * * * *",
+ css_selector: str | None = None,
+ agent_id: str | None = None,
+ notification_mode: str = "agent",
+ owner_user_id: str | None = None,
+) -> dict:
+ pool = await get_pool()
+ row = await pool.fetchrow(
+ """
+ INSERT INTO watched_pages
+ (name, url, schedule, css_selector, agent_id, notification_mode, owner_user_id, created_at)
+ VALUES ($1,$2,$3,$4,$5,$6,$7,$8) RETURNING *
+ """,
+ name, url, schedule, css_selector, agent_id or None, notification_mode,
+ owner_user_id, _utcnow(),
+ )
+ return _page_row(row)
+
+
+async def list_watched_pages(owner_user_id: str | None = None) -> list[dict]:
+ pool = await get_pool()
+ if owner_user_id:
+ rows = await pool.fetch(
+ "SELECT * FROM watched_pages WHERE owner_user_id = $1 ORDER BY created_at DESC",
+ owner_user_id,
+ )
+ else:
+ rows = await pool.fetch("SELECT * FROM watched_pages ORDER BY created_at DESC")
+ return [_page_row(r) for r in rows]
+
+
+async def get_watched_page(page_id: str) -> dict | None:
+ pool = await get_pool()
+ row = await pool.fetchrow("SELECT * FROM watched_pages WHERE id = $1::uuid", page_id)
+ return _page_row(row) if row else None
+
+
+async def update_watched_page(page_id: str, **fields) -> dict | None:
+ allowed = {"name", "url", "schedule", "css_selector", "agent_id", "notification_mode", "enabled"}
+ updates = {k: v for k, v in fields.items() if k in allowed}
+ if not updates:
+ return await get_watched_page(page_id)
+ pool = await get_pool()
+ set_clauses = ", ".join(f"{k} = ${i + 2}" for i, k in enumerate(updates))
+ await pool.execute(
+ f"UPDATE watched_pages SET {set_clauses} WHERE id = $1::uuid",
+ page_id, *updates.values(),
+ )
+ return await get_watched_page(page_id)
+
+
+async def delete_watched_page(page_id: str) -> bool:
+ pool = await get_pool()
+ status = await pool.execute("DELETE FROM watched_pages WHERE id = $1::uuid", page_id)
+ return _rowcount(status) > 0
+
+
+async def update_page_check_result(
+ page_id: str,
+ content_hash: str | None,
+ changed: bool,
+ error: str | None = None,
+) -> None:
+ pool = await get_pool()
+ now = _utcnow()
+ if error:
+ await pool.execute(
+ "UPDATE watched_pages SET last_checked_at=$1, last_error=$2 WHERE id=$3::uuid",
+ now, error, page_id,
+ )
+ elif changed:
+ await pool.execute(
+ """UPDATE watched_pages
+ SET last_checked_at=$1, last_content_hash=$2, last_changed_at=$1, last_error=NULL
+ WHERE id=$3::uuid""",
+ now, content_hash, page_id,
+ )
+ else:
+ await pool.execute(
+ """UPDATE watched_pages
+ SET last_checked_at=$1, last_content_hash=$2, last_error=NULL
+ WHERE id=$3::uuid""",
+ now, content_hash, page_id,
+ )
+
+
+# ─── RSS Feeds ────────────────────────────────────────────────────────────────
+
+async def create_rss_feed(
+ name: str,
+ url: str,
+ schedule: str = "0 */4 * * *",
+ agent_id: str | None = None,
+ notification_mode: str = "agent",
+ max_items_per_run: int = 5,
+ owner_user_id: str | None = None,
+) -> dict:
+ pool = await get_pool()
+ row = await pool.fetchrow(
+ """
+ INSERT INTO rss_feeds
+ (name, url, schedule, agent_id, notification_mode, max_items_per_run, owner_user_id, created_at)
+ VALUES ($1,$2,$3,$4,$5,$6,$7,$8) RETURNING *
+ """,
+ name, url, schedule, agent_id or None, notification_mode, max_items_per_run,
+ owner_user_id, _utcnow(),
+ )
+ return _feed_row(row)
+
+
+async def list_rss_feeds(owner_user_id: str | None = None) -> list[dict]:
+ pool = await get_pool()
+ if owner_user_id:
+ rows = await pool.fetch(
+ "SELECT * FROM rss_feeds WHERE owner_user_id = $1 ORDER BY created_at DESC",
+ owner_user_id,
+ )
+ else:
+ rows = await pool.fetch("SELECT * FROM rss_feeds ORDER BY created_at DESC")
+ return [_feed_row(r) for r in rows]
+
+
+async def get_rss_feed(feed_id: str) -> dict | None:
+ pool = await get_pool()
+ row = await pool.fetchrow("SELECT * FROM rss_feeds WHERE id = $1::uuid", feed_id)
+ return _feed_row(row) if row else None
+
+
+async def update_rss_feed(feed_id: str, **fields) -> dict | None:
+ allowed = {"name", "url", "schedule", "agent_id", "notification_mode", "max_items_per_run", "enabled"}
+ updates = {k: v for k, v in fields.items() if k in allowed}
+ if not updates:
+ return await get_rss_feed(feed_id)
+ pool = await get_pool()
+ set_clauses = ", ".join(f"{k} = ${i + 2}" for i, k in enumerate(updates))
+ await pool.execute(
+ f"UPDATE rss_feeds SET {set_clauses} WHERE id = $1::uuid",
+ feed_id, *updates.values(),
+ )
+ return await get_rss_feed(feed_id)
+
+
+async def delete_rss_feed(feed_id: str) -> bool:
+ pool = await get_pool()
+ status = await pool.execute("DELETE FROM rss_feeds WHERE id = $1::uuid", feed_id)
+ return _rowcount(status) > 0
+
+
+async def update_feed_fetch_result(
+ feed_id: str,
+ seen_item_ids: list[str],
+ etag: str | None = None,
+ last_modified: str | None = None,
+ error: str | None = None,
+) -> None:
+ pool = await get_pool()
+ now = _utcnow()
+ if error:
+ await pool.execute(
+ "UPDATE rss_feeds SET last_fetched_at=$1, last_error=$2 WHERE id=$3::uuid",
+ now, error, feed_id,
+ )
+ else:
+ await pool.execute(
+ """UPDATE rss_feeds
+ SET last_fetched_at=$1, seen_item_ids=$2, last_etag=$3, last_modified=$4, last_error=NULL
+ WHERE id=$5::uuid""",
+ now, seen_item_ids, etag, last_modified, feed_id,
+ )
diff --git a/server/tools/__init__.py b/server/tools/__init__.py
index fa46be7..5516007 100644
--- a/server/tools/__init__.py
+++ b/server/tools/__init__.py
@@ -20,24 +20,30 @@ def build_registry(include_mock: bool = False, is_admin: bool = True):
# Production tools — each imported lazily to avoid errors if optional
# dependencies are missing during development
from .brain_tool import BrainTool
+ from .browser_tool import BrowserTool
from .caldav_tool import CalDAVTool
+ from .contacts_tool import ContactsTool
from .email_tool import EmailTool
from .filesystem_tool import FilesystemTool
from .image_gen_tool import ImageGenTool
from .pushover_tool import PushoverTool
from .telegram_tool import TelegramTool
from .web_tool import WebTool
+ from .webhook_tool import WebhookTool
from .whitelist_tool import WhitelistTool
if is_admin:
from .bash_tool import BashTool
registry.register(BashTool())
registry.register(BrainTool())
+ registry.register(BrowserTool())
registry.register(CalDAVTool())
+ registry.register(ContactsTool())
registry.register(EmailTool())
registry.register(FilesystemTool())
registry.register(ImageGenTool())
registry.register(WebTool())
+ registry.register(WebhookTool())
registry.register(PushoverTool())
registry.register(TelegramTool())
registry.register(WhitelistTool())
diff --git a/server/tools/browser_tool.py b/server/tools/browser_tool.py
new file mode 100644
index 0000000..fb75343
--- /dev/null
+++ b/server/tools/browser_tool.py
@@ -0,0 +1,149 @@
+"""
+tools/browser_tool.py — Playwright headless browser tool.
+
+For JS-heavy pages that httpx can't render. Enforces the same Tier 1/2
+web whitelist as WebTool. Browser instance is lazy-initialized and shared
+across calls.
+
+Requires: playwright package + `playwright install chromium`
+"""
+from __future__ import annotations
+
+import asyncio
+import logging
+from typing import ClassVar
+
+from ..context_vars import current_task_id, web_tier2_enabled
+from ..security import assert_domain_tier1, sanitize_external_content
+from .base import BaseTool, ToolResult
+
+logger = logging.getLogger(__name__)
+
+_MAX_TEXT_CHARS = 25_000
+_TIMEOUT_MS = 30_000
+
+
+class BrowserTool(BaseTool):
+ name = "browser"
+ description = (
+ "Fetch web pages using a real headless browser (Chromium). "
+ "Use this for JS-heavy pages or single-page apps that the regular 'web' tool cannot read. "
+ "Operations: fetch_page (extract text content), screenshot (base64 PNG). "
+ "Follows the same domain whitelist rules as the web tool."
+ )
+ input_schema = {
+ "type": "object",
+ "properties": {
+ "operation": {
+ "type": "string",
+ "enum": ["fetch_page", "screenshot"],
+ "description": "fetch_page extracts text; screenshot returns a base64 PNG.",
+ },
+ "url": {
+ "type": "string",
+ "description": "URL to navigate to.",
+ },
+ "wait_for": {
+ "type": "string",
+ "description": "CSS selector to wait for before extracting content (optional).",
+ },
+ "extract_selector": {
+ "type": "string",
+ "description": "CSS selector to extract text from (optional; defaults to full page).",
+ },
+ },
+ "required": ["operation", "url"],
+ }
+ requires_confirmation = False
+ allowed_in_scheduled_tasks = False # Too resource-heavy for scheduled agents
+
+ # Module-level shared browser/playwright (lazy-init, reused)
+ _playwright = None
+ _browser = None
+ _lock: ClassVar[asyncio.Lock] = asyncio.Lock()
+
+ async def execute(self, operation: str, url: str = "", wait_for: str = "", extract_selector: str = "", **_) -> ToolResult:
+ if not url:
+ return ToolResult(success=False, error="'url' is required")
+
+ # Whitelist check (same Tier 1/2 rules as WebTool)
+ denied = await self._check_tier(url)
+ if denied:
+ return denied
+
+ try:
+ from playwright.async_api import async_playwright
+ except ImportError:
+ return ToolResult(
+ success=False,
+ error="Playwright is not installed. Run: pip install playwright && playwright install chromium",
+ )
+
+ try:
+ browser = await self._get_browser()
+ context = await browser.new_context(
+ user_agent=(
+ "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) "
+ "AppleWebKit/537.36 (KHTML, like Gecko) Chrome/120.0.0.0 Safari/537.36"
+ )
+ )
+ page = await context.new_page()
+ try:
+ await page.goto(url, timeout=_TIMEOUT_MS, wait_until="domcontentloaded")
+
+ if wait_for:
+ try:
+ await page.wait_for_selector(wait_for, timeout=10_000)
+ except Exception:
+ pass # continue even if selector doesn't appear
+
+ if operation == "screenshot":
+ data = await page.screenshot(type="png")
+ import base64
+ return ToolResult(success=True, data={"screenshot_base64": base64.b64encode(data).decode()})
+
+ # fetch_page
+ if extract_selector:
+ elements = await page.query_selector_all(extract_selector)
+ text_parts = [await el.inner_text() for el in elements]
+ text = "\n".join(text_parts)
+ else:
+ text = await page.inner_text("body")
+
+ text = text[:_MAX_TEXT_CHARS]
+ text = await sanitize_external_content(text, source="browser")
+ return ToolResult(success=True, data={"url": url, "text": text, "length": len(text)})
+ finally:
+ await context.close()
+ except Exception as e:
+ return ToolResult(success=False, error=f"Browser error: {e}")
+
+ async def _get_browser(self):
+ async with BrowserTool._lock:
+ if BrowserTool._browser is None or not BrowserTool._browser.is_connected():
+ from playwright.async_api import async_playwright
+ BrowserTool._playwright = await async_playwright().start()
+ BrowserTool._browser = await BrowserTool._playwright.chromium.launch(
+ args=["--no-sandbox", "--disable-dev-shm-usage"],
+ )
+ logger.info("[browser] Chromium launched")
+ return BrowserTool._browser
+
+ async def _check_tier(self, url: str) -> ToolResult | None:
+ """Returns ToolResult(success=False) if denied, None if allowed."""
+ from urllib.parse import urlparse
+ if await assert_domain_tier1(url):
+ return None
+ task_id = current_task_id.get()
+ if task_id is not None:
+ return None
+ if web_tier2_enabled.get():
+ return None
+ parsed = urlparse(url)
+ return ToolResult(
+ success=False,
+ error=(
+ f"Domain '{parsed.hostname}' is not in the Tier 1 whitelist. "
+ "Ask me to fetch a specific external page to enable Tier 2 access."
+ ),
+ )
diff --git a/server/tools/caldav_tool.py b/server/tools/caldav_tool.py
index d6e6698..fb28963 100644
--- a/server/tools/caldav_tool.py
+++ b/server/tools/caldav_tool.py
@@ -37,32 +37,21 @@ logger = logging.getLogger(__name__)
async def _get_caldav_config(user_id: str | None = None) -> dict:
"""
- Two-layer CalDAV config lookup: user_settings → credential_store (global fallback).
+ Per-user CalDAV config lookup — no system-wide fallback.
- Keys in user_settings: caldav_url, caldav_username, caldav_password, caldav_calendar_name
- Keys in credential_store: mailcow_host, mailcow_username, mailcow_password, caldav_calendar_name
-
- Returns a dict with url, username, password, calendar_name (any may be None).
+ Each user (including admin) configures their own CalDAV credentials in Settings → CalDAV.
+ Returns a dict with url, username, password, calendar_name (any may be None/empty).
"""
if user_id:
from ..database import user_settings_store
- url = await user_settings_store.get(user_id, "caldav_url")
- if url:
- return {
- "url": url,
- "username": await user_settings_store.get(user_id, "caldav_username"),
- "password": await user_settings_store.get(user_id, "caldav_password"),
- "calendar_name": await user_settings_store.get(user_id, "caldav_calendar_name"),
- }
+ return {
+ "url": await user_settings_store.get(user_id, "caldav_url"),
+ "username": await user_settings_store.get(user_id, "caldav_username"),
+ "password": await user_settings_store.get(user_id, "caldav_password"),
+ "calendar_name": await user_settings_store.get(user_id, "caldav_calendar_name"),
+ }
- # Fall back to global credential_store
- host = await credential_store.get("mailcow_host")
- return {
- "url": f"https://{host}/SOGo/dav/" if host else None,
- "username": await credential_store.get("mailcow_username"),
- "password": await credential_store.get("mailcow_password"),
- "calendar_name": await credential_store.get("caldav_calendar_name"),
- }
+ return {"url": None, "username": None, "password": None, "calendar_name": None}
MAX_EVENTS = 100
@@ -135,11 +124,15 @@ class CalDAVTool(BaseTool):
if not url or not username or not password:
raise RuntimeError(
"CalDAV credentials not configured. "
- "Set them in Settings → My Settings → CalDAV, or ask the admin to configure global CalDAV."
+ "Set them in Settings → CalDAV / CardDAV."
)
+ # Normalise scheme — users often enter just the hostname
+ if not url.startswith(("http://", "https://")):
+ url = "https://" + url
+
# Build principal URL: if the stored URL is already the full principal URL use it directly;
- # otherwise append the SOGo-style path (backward compat with old mailcow_host keys).
+ # otherwise append the SOGo-style path.
if "/SOGo/dav/" in url or url.rstrip("/").endswith(username):
principal_url = url.rstrip("/") + "/"
else:
diff --git a/server/tools/contacts_tool.py b/server/tools/contacts_tool.py
new file mode 100644
index 0000000..1898354
--- /dev/null
+++ b/server/tools/contacts_tool.py
@@ -0,0 +1,423 @@
+"""
+tools/contacts_tool.py — CardDAV contacts access (Mailcow / SOGo).
+
+Uses httpx directly for CardDAV protocol (caldav 2.x dropped AddressBook support).
+Address book URL format: {base_url}/SOGo/dav/{username}/Contacts/personal/
+
+Read operations: list_contacts, search_contacts, get_contact (always available)
+Write operations: create_contact, update_contact, delete_contact
+ → only available when credential_store key 'contacts:allow_write' == '1'
+ → all write ops require user confirmation
+"""
+from __future__ import annotations
+
+import logging
+import uuid as _uuid
+from urllib.parse import urlparse
+from xml.etree import ElementTree as ET
+
+import httpx
+import vobject
+
+from ..context_vars import current_user
+from ..database import credential_store
+from .base import BaseTool, ToolResult
+
+logger = logging.getLogger(__name__)
+
+MAX_CONTACTS = 100
+
+_CARDDAV_NS = "urn:ietf:params:xml:ns:carddav"
+_DAV_NS = "DAV:"
+
+
+# ── URL helpers ───────────────────────────────────────────────────────────────
+
+def _sogo_carddav_url(base_url: str, username: str) -> str:
+ """Return the SOGo CardDAV personal address book URL for the given user."""
+ if not base_url.startswith(("http://", "https://")):
+ base_url = "https://" + base_url
+ if "/SOGo/dav/" in base_url or base_url.rstrip("/").endswith("/Contacts/personal"):
+ return base_url.rstrip("/") + "/"
+ return f"{base_url.rstrip('/')}/SOGo/dav/{username}/Contacts/personal/"
+
+
+def _abs_href(href: str, base_url: str) -> str:
+ """Convert a relative href to an absolute URL using base_url's scheme+host."""
+ if href.startswith("http://") or href.startswith("https://"):
+ return href
+ parsed = urlparse(base_url)
+ return f"{parsed.scheme}://{parsed.netloc}{href}"
+
+
+# ── vCard helpers ─────────────────────────────────────────────────────────────
+
+def _vcard_to_dict(vcard_text: str, contact_url: str = "") -> dict:
+ """Parse a vCard string into a structured dict."""
+ try:
+ vc = vobject.readOne(vcard_text)
+ except Exception:
+ return {"id": contact_url, "raw": vcard_text[:200]}
+
+ def _get(component: str) -> str:
+ try:
+ return str(getattr(vc, component).value)
+ except Exception:
+ return ""
+
+ def _get_list(component: str) -> list[str]:
+ try:
+ items = vc.contents.get(component.lower(), [])
+ return [str(item.value) for item in items]
+ except Exception:
+ return []
+
+ name = _get("fn")
+ emails = _get_list("email")
+ phones = _get_list("tel")
+ org = _get("org")
+ note = _get("note")
+ uid = _get("uid") or contact_url
+
+ result: dict = {"id": contact_url or uid, "name": name}
+ if emails:
+ result["email"] = emails[0]
+ if len(emails) > 1:
+ result["emails"] = emails
+ if phones:
+ result["phone"] = phones[0]
+ if len(phones) > 1:
+ result["phones"] = phones
+ if org:
+ result["organization"] = org
+ if note:
+ result["note"] = note[:200]
+ return result
+
+
+# ── Credential helpers ────────────────────────────────────────────────────────
+
+async def _get_carddav_config(user_id: str | None = None) -> dict:
+ """
+ Per-user CardDAV config lookup — no system-wide fallback.
+
+ If carddav_same_as_caldav is set, reuses the user's own CalDAV credentials.
+ Otherwise uses the user's explicit CardDAV URL/username/password.
+ Returns empty dict values when not configured.
+ """
+ from .caldav_tool import _get_caldav_config
+
+ if not user_id:
+ return {"url": None, "username": None, "password": None}
+
+ from ..database import user_settings_store
+ same = await user_settings_store.get(user_id, "carddav_same_as_caldav")
+ if same == "1":
+ return await _get_caldav_config(user_id)
+
+ carddav_url = await user_settings_store.get(user_id, "carddav_url")
+ carddav_user = await user_settings_store.get(user_id, "carddav_username")
+ carddav_pass = await user_settings_store.get(user_id, "carddav_password")
+ return {"url": carddav_url, "username": carddav_user, "password": carddav_pass}
+
+
+# ── Low-level CardDAV HTTP ────────────────────────────────────────────────────
+
+async def _carddav_report(abook_url: str, auth: tuple[str, str]) -> list[dict]:
+ """
+ Issue an addressbook-query REPORT to fetch all vCards.
+ Returns list of {"url": str, "vcard_text": str}.
+ """
+ body = (
+ ''
+ ''
+ " "
+ " "
+ " "
+ )
+ async with httpx.AsyncClient(auth=auth, timeout=30) as client:
+ r = await client.request(
+ "REPORT",
+ abook_url,
+ content=body,
+ headers={"Content-Type": "application/xml; charset=utf-8", "Depth": "1"},
+ )
+ if r.status_code != 207:
+ raise RuntimeError(f"CardDAV REPORT returned HTTP {r.status_code}")
+
+ root = ET.fromstring(r.content)
+ results = []
+ for resp in root.findall(f"{{{_DAV_NS}}}response"):
+ href_el = resp.find(f"{{{_DAV_NS}}}href")
+ status_el = resp.find(f"{{{_DAV_NS}}}propstat/{{{_DAV_NS}}}status")
+ data_el = resp.find(
+ f"{{{_DAV_NS}}}propstat/{{{_DAV_NS}}}prop/{{{_CARDDAV_NS}}}address-data"
+ )
+ if href_el is None or data_el is None or not (data_el.text or "").strip():
+ continue
+ if status_el is not None and "200" not in (status_el.text or ""):
+ continue
+ abs_url = _abs_href(href_el.text, abook_url)
+ results.append({"url": abs_url, "vcard_text": data_el.text})
+ return results
+
+
+async def _carddav_put(url: str, vcard_text: str, auth: tuple[str, str], etag: str = "") -> None:
+ headers: dict = {"Content-Type": "text/vcard; charset=utf-8"}
+ if etag:
+ headers["If-Match"] = etag
+ async with httpx.AsyncClient(auth=auth, timeout=15) as client:
+ r = await client.put(url, content=vcard_text.encode(), headers=headers)
+ if r.status_code not in (200, 201, 204):
+ raise RuntimeError(f"CardDAV PUT returned HTTP {r.status_code}")
+
+
+async def _carddav_delete(url: str, auth: tuple[str, str]) -> None:
+ async with httpx.AsyncClient(auth=auth, timeout=15) as client:
+ r = await client.delete(url)
+ if r.status_code not in (200, 204):
+ raise RuntimeError(f"CardDAV DELETE returned HTTP {r.status_code}")
+
+
+# ── Tool ──────────────────────────────────────────────────────────────────────
+
+_WRITE_OPS = {"create_contact", "update_contact", "delete_contact"}
+
+
+class ContactsTool(BaseTool):
+ name = "contacts"
+ description = (
+ "Search, read, and (if write access is enabled) modify contacts in the CardDAV address book. "
+ "Read operations: list_contacts, search_contacts, get_contact. "
+ "Write operations (require contacts:allow_write=1): create_contact, update_contact, delete_contact. "
+ "Uses the same CalDAV server credentials."
+ )
+ input_schema = {
+ "type": "object",
+ "properties": {
+ "operation": {
+ "type": "string",
+ "enum": [
+ "list_contacts", "search_contacts", "get_contact",
+ "create_contact", "update_contact", "delete_contact",
+ ],
+ "description": "Operation to perform.",
+ },
+ "query": {
+ "type": "string",
+ "description": "Search query (name or email substring). Required for search_contacts.",
+ },
+ "contact_id": {
+ "type": "string",
+ "description": "Contact URL or UID. Required for get_contact, update_contact, delete_contact.",
+ },
+ "name": {"type": "string", "description": "Full display name. Required for create_contact."},
+ "email": {"type": "string", "description": "Primary email address."},
+ "phone": {"type": "string", "description": "Primary phone number."},
+ "organization": {"type": "string", "description": "Organization / company name."},
+ "note": {"type": "string", "description": "Free-text note."},
+ },
+ "required": ["operation"],
+ }
+ requires_confirmation = False
+ allowed_in_scheduled_tasks = True
+
+ async def execute(
+ self,
+ operation: str,
+ query: str = "",
+ contact_id: str = "",
+ name: str = "",
+ email: str = "",
+ phone: str = "",
+ organization: str = "",
+ note: str = "",
+ **_,
+ ) -> ToolResult:
+ user_id = None
+ try:
+ u = current_user.get()
+ if u:
+ user_id = u.id
+ except Exception:
+ pass
+
+ # Gate write operations
+ if operation in _WRITE_OPS:
+ allow_write = False
+ if user_id:
+ from ..database import user_settings_store
+ per_user = await user_settings_store.get(user_id, "contacts_allow_write")
+ allow_write = (per_user == "1")
+ if not allow_write:
+ allow_write = (await credential_store.get("contacts:allow_write") == "1")
+ if not allow_write:
+ return ToolResult(
+ success=False,
+ error=(
+ "Contact write access is disabled. "
+ "Enable it in Settings → CalDAV/CardDAV."
+ ),
+ )
+
+ cfg = await _get_carddav_config(user_id)
+ base_url = cfg.get("url")
+ username = cfg.get("username")
+ password = cfg.get("password")
+
+ if not base_url or not username or not password:
+ return ToolResult(
+ success=False,
+ error=(
+ "CardDAV credentials not configured. "
+ "Set them in Settings → CalDAV/CardDAV."
+ ),
+ )
+
+ abook_url = _sogo_carddav_url(base_url, username)
+ auth = (username, password)
+
+ if operation == "list_contacts":
+ return await self._list_contacts(abook_url, auth)
+ if operation == "search_contacts":
+ if not query:
+ return ToolResult(success=False, error="'query' is required for search_contacts")
+ return await self._search_contacts(abook_url, auth, query)
+ if operation == "get_contact":
+ if not contact_id:
+ return ToolResult(success=False, error="'contact_id' is required for get_contact")
+ return await self._get_contact(abook_url, auth, contact_id)
+ if operation == "create_contact":
+ if not name:
+ return ToolResult(success=False, error="'name' is required for create_contact")
+ return await self._create_contact(abook_url, auth, name, email, phone, organization, note)
+ if operation == "update_contact":
+ if not contact_id:
+ return ToolResult(success=False, error="'contact_id' is required for update_contact")
+ return await self._update_contact(abook_url, auth, contact_id, name, email, phone, organization, note)
+ if operation == "delete_contact":
+ if not contact_id:
+ return ToolResult(success=False, error="'contact_id' is required for delete_contact")
+ return await self._delete_contact(abook_url, auth, contact_id)
+ return ToolResult(success=False, error=f"Unknown operation: {operation}")
+
+ # ── Read operations ────────────────────────────────────────────────────────
+
+ async def _list_contacts(self, abook_url: str, auth: tuple) -> ToolResult:
+ try:
+ items = await _carddav_report(abook_url, auth)
+ contacts = [_vcard_to_dict(it["vcard_text"], it["url"]) for it in items[:MAX_CONTACTS]]
+ contacts.sort(key=lambda c: c.get("name", "").lower())
+ return ToolResult(success=True, data={"contacts": contacts, "count": len(contacts)})
+ except Exception as e:
+ return ToolResult(success=False, error=f"Failed to list contacts: {e}")
+
+ async def _search_contacts(self, abook_url: str, auth: tuple, query: str) -> ToolResult:
+ try:
+ q_lower = query.lower()
+ items = await _carddav_report(abook_url, auth)
+ matches = []
+ for it in items:
+ d = _vcard_to_dict(it["vcard_text"], it["url"])
+ name_match = q_lower in d.get("name", "").lower()
+ email_match = any(q_lower in e.lower() for e in ([d.get("email", "")] + d.get("emails", [])))
+ if name_match or email_match:
+ matches.append(d)
+ if len(matches) >= MAX_CONTACTS:
+ break
+ matches.sort(key=lambda c: c.get("name", "").lower())
+ return ToolResult(success=True, data={"contacts": matches, "count": len(matches), "query": query})
+ except Exception as e:
+ return ToolResult(success=False, error=f"Failed to search contacts: {e}")
+
+ async def _get_contact(self, abook_url: str, auth: tuple, contact_id: str) -> ToolResult:
+ try:
+ items = await _carddav_report(abook_url, auth)
+ for it in items:
+ d = _vcard_to_dict(it["vcard_text"], it["url"])
+ if d.get("id") == contact_id or contact_id in it["url"]:
+ return ToolResult(success=True, data=d)
+ return ToolResult(success=False, error=f"Contact '{contact_id}' not found")
+ except Exception as e:
+ return ToolResult(success=False, error=f"Failed to get contact: {e}")
+
+ # ── Write operations ───────────────────────────────────────────────────────
+
+ def _build_vcard(
+ self,
+ name: str,
+ email: str = "",
+ phone: str = "",
+ organization: str = "",
+ note: str = "",
+ uid: str | None = None,
+ ) -> str:
+ vc = vobject.vCard()
+ vc.add("fn").value = name
+ parts = name.split(" ", 1)
+ n = vobject.vcard.Name(family=parts[-1], given=parts[0] if len(parts) > 1 else "")
+ vc.add("n").value = n
+ vc.add("uid").value = uid or str(_uuid.uuid4())
+ if email:
+ email_obj = vc.add("email")
+ email_obj.value = email
+ email_obj.type_param = "INTERNET"
+ if phone:
+ tel_obj = vc.add("tel")
+ tel_obj.value = phone
+ tel_obj.type_param = "VOICE"
+ if organization:
+ vc.add("org").value = [organization]
+ if note:
+ vc.add("note").value = note
+ return vc.serialize()
+
+ async def _create_contact(
+ self, abook_url: str, auth: tuple, name: str, email: str, phone: str, organization: str, note: str
+ ) -> ToolResult:
+ try:
+ uid = str(_uuid.uuid4())
+ vcard_text = self._build_vcard(name, email, phone, organization, note, uid=uid)
+ url = abook_url.rstrip("/") + f"/{uid}.vcf"
+ await _carddav_put(url, vcard_text, auth)
+ return ToolResult(success=True, data={"created": name, "id": url})
+ except Exception as e:
+ return ToolResult(success=False, error=f"Failed to create contact: {e}")
+
+ async def _update_contact(
+ self, abook_url: str, auth: tuple, contact_id: str, name: str, email: str, phone: str, organization: str, note: str
+ ) -> ToolResult:
+ try:
+ items = await _carddav_report(abook_url, auth)
+ for it in items:
+ d = _vcard_to_dict(it["vcard_text"], it["url"])
+ if d.get("id") == contact_id or contact_id in it["url"]:
+ new_name = name or d.get("name", "")
+ new_email = email or d.get("email", "")
+ new_phone = phone or d.get("phone", "")
+ new_org = organization or d.get("organization", "")
+ new_note = note or d.get("note", "")
+ # Extract UID from original vCard to reuse
+ try:
+ vc_orig = vobject.readOne(it["vcard_text"])
+ uid = str(vc_orig.uid.value)
+ except Exception:
+ uid = None
+ new_vcard = self._build_vcard(new_name, new_email, new_phone, new_org, new_note, uid=uid)
+ await _carddav_put(it["url"], new_vcard, auth)
+ return ToolResult(success=True, data={"updated": new_name})
+ return ToolResult(success=False, error=f"Contact '{contact_id}' not found")
+ except Exception as e:
+ return ToolResult(success=False, error=f"Failed to update contact: {e}")
+
+ async def _delete_contact(self, abook_url: str, auth: tuple, contact_id: str) -> ToolResult:
+ try:
+ items = await _carddav_report(abook_url, auth)
+ for it in items:
+ d = _vcard_to_dict(it["vcard_text"], it["url"])
+ if d.get("id") == contact_id or contact_id in it["url"]:
+ await _carddav_delete(it["url"], auth)
+ return ToolResult(success=True, data={"deleted": d.get("name", contact_id)})
+ return ToolResult(success=False, error=f"Contact '{contact_id}' not found")
+ except Exception as e:
+ return ToolResult(success=False, error=f"Failed to delete contact: {e}")
diff --git a/server/tools/pushover_tool.py b/server/tools/pushover_tool.py
index d1f4b66..918b9e0 100644
--- a/server/tools/pushover_tool.py
+++ b/server/tools/pushover_tool.py
@@ -72,13 +72,26 @@ class PushoverTool(BaseTool):
# If we got here, it was approved.
pass
- # Load credentials
+ # Load credentials — per-user key first, then system fallback
try:
app_token = await credential_store.require("pushover_app_token")
- user_key = await credential_store.require("pushover_user_key")
except RuntimeError as e:
return ToolResult(success=False, error=str(e))
+ user_key: str | None = None
+ try:
+ from ..context_vars import current_user as _cu
+ u = _cu.get()
+ if u:
+ from ..database import user_settings_store as _us
+ user_key = await _us.get(u.id, "pushover_user_key")
+ except Exception:
+ pass
+ if not user_key:
+ user_key = await credential_store.get("pushover_user_key")
+ if not user_key:
+ return ToolResult(success=False, error="Pushover user key not configured. Set it in Settings → Pushover.")
+
payload: dict = {
"token": app_token,
"user": user_key,
diff --git a/server/tools/webhook_tool.py b/server/tools/webhook_tool.py
new file mode 100644
index 0000000..c0a8250
--- /dev/null
+++ b/server/tools/webhook_tool.py
@@ -0,0 +1,133 @@
+"""
+tools/webhook_tool.py — Outbound webhook tool.
+
+Agents can POST a JSON payload to a pre-configured named target in the
+webhook_targets table. The target URL and optional auth secret are managed
+via Settings → Webhooks → Outbound Targets.
+"""
+from __future__ import annotations
+
+import json
+
+import httpx
+
+from ..context_vars import current_user
+from .base import BaseTool, ToolResult
+
+
+class WebhookTool(BaseTool):
+ name = "webhook"
+ description = (
+ "Send a JSON payload to a configured outbound webhook target. "
+ "Use this to notify external services (e.g. Home Assistant, Zapier, custom APIs). "
+ "List available targets with operation='list_targets', then send with operation='send'."
+ )
+ input_schema = {
+ "type": "object",
+ "properties": {
+ "operation": {
+ "type": "string",
+ "enum": ["send", "list_targets"],
+ "description": "Operation: 'send' to POST to a target, 'list_targets' to see available targets.",
+ },
+ "target": {
+ "type": "string",
+ "description": "Target name (as configured in Settings → Webhooks). Required for 'send'.",
+ },
+ "payload": {
+ "type": "object",
+ "description": "JSON payload to POST. Required for 'send'.",
+ },
+ },
+ "required": ["operation"],
+ }
+ requires_confirmation = True
+ allowed_in_scheduled_tasks = True
+
+ async def execute(self, operation: str, target: str = "", payload: dict | None = None, **_) -> ToolResult:
+ if operation == "list_targets":
+ return await self._list_targets()
+ if operation == "send":
+ if not target:
+ return ToolResult(success=False, error="'target' is required for send operation")
+ return await self._send(target, payload or {})
+ return ToolResult(success=False, error=f"Unknown operation: {operation}")
+
+ def _current_user_id(self) -> str | None:
+ try:
+ u = current_user.get()
+ return u.id if u else None
+ except Exception:
+ return None
+
+ async def _list_targets(self) -> ToolResult:
+ from ..database import get_pool
+ pool = await get_pool()
+ user_id = self._current_user_id()
+ if user_id:
+ rows = await pool.fetch(
+ """
+ SELECT name, url, enabled FROM webhook_targets
+ WHERE (owner_user_id = $1 OR owner_user_id IS NULL)
+ ORDER BY owner_user_id NULLS LAST, name
+ """,
+ user_id,
+ )
+ else:
+ rows = await pool.fetch(
+ "SELECT name, url, enabled FROM webhook_targets WHERE owner_user_id IS NULL ORDER BY name"
+ )
+ targets = [{"name": r["name"], "url": r["url"], "enabled": r["enabled"]} for r in rows]
+ return ToolResult(success=True, data={"targets": targets})
+
+ async def _send(self, target_name: str, payload: dict) -> ToolResult:
+ from ..database import get_pool
+ pool = await get_pool()
+ user_id = self._current_user_id()
+ # User-scoped target takes priority over global (NULL owner) target with same name
+ row = None
+ if user_id:
+ row = await pool.fetchrow(
+ "SELECT * FROM webhook_targets WHERE name = $1 AND owner_user_id = $2 AND enabled = TRUE",
+ target_name, user_id,
+ )
+ if not row:
+ row = await pool.fetchrow(
+ "SELECT * FROM webhook_targets WHERE name = $1 AND owner_user_id IS NULL AND enabled = TRUE",
+ target_name,
+ )
+ if not row:
+ return ToolResult(
+ success=False,
+ error=f"No enabled webhook target named '{target_name}'. Use list_targets to see available targets.",
+ )
+
+ url: str = row["url"]
+ secret: str | None = row.get("secret_header")
+
+ headers = {"Content-Type": "application/json"}
+ if secret:
+ headers["Authorization"] = f"Bearer {secret}"
+
+ try:
+ async with httpx.AsyncClient(timeout=15.0) as client:
+ resp = await client.post(url, json=payload, headers=headers)
+ body = resp.text[:500] if resp.text else ""
+ return ToolResult(
+ success=True,
+ data={
+ "status_code": resp.status_code,
+ "ok": resp.is_success,
+ "response": body,
+ },
+ )
+ except httpx.TimeoutException:
+ return ToolResult(success=False, error=f"Request to '{target_name}' timed out after 15 seconds")
+ except Exception as e:
+ return ToolResult(success=False, error=f"Request to '{target_name}' failed: {e}")
+
+ def confirmation_description(self, operation: str = "", target: str = "", payload: dict | None = None, **_) -> str:
+ if operation == "send":
+ snippet = json.dumps(payload or {})[:100]
+ return f"POST to webhook target '{target}' with payload: {snippet}"
+ return "List webhook targets"
diff --git a/server/web/routes.py b/server/web/routes.py
index 1125817..197bbc6 100644
--- a/server/web/routes.py
+++ b/server/web/routes.py
@@ -312,6 +312,7 @@ async def models_info(request: Request):
class LimitsIn(BaseModel):
max_tool_calls: Optional[int] = None
max_autonomous_runs_per_hour: Optional[int] = None
+ max_concurrent_runs: Optional[int] = None
class ProxyTrustIn(BaseModel):
@@ -328,16 +329,19 @@ async def get_limits(request: Request):
except (ValueError, TypeError):
return default
- mtc, mar = await asyncio_gather(
+ mtc, mar, mcr = await asyncio_gather(
_get("system:max_tool_calls", settings.max_tool_calls),
_get("system:max_autonomous_runs_per_hour", settings.max_autonomous_runs_per_hour),
+ _get("system:max_concurrent_runs", 3),
)
return {
"max_tool_calls": mtc,
"max_autonomous_runs_per_hour": mar,
+ "max_concurrent_runs": mcr,
"defaults": {
"max_tool_calls": settings.max_tool_calls,
"max_autonomous_runs_per_hour": settings.max_autonomous_runs_per_hour,
+ "max_concurrent_runs": 3,
},
}
@@ -359,9 +363,23 @@ async def set_limits(request: Request, body: LimitsIn):
"system:max_autonomous_runs_per_hour", str(body.max_autonomous_runs_per_hour),
"Max autonomous scheduler runs per hour",
)
+ if body.max_concurrent_runs is not None:
+ if body.max_concurrent_runs < 1:
+ raise HTTPException(status_code=400, detail="max_concurrent_runs must be >= 1")
+ await credential_store.set(
+ "system:max_concurrent_runs", str(body.max_concurrent_runs),
+ "Max concurrent agent runs",
+ )
return await get_limits()
+@router.get("/queue")
+async def get_queue_status(request: Request):
+ """Return current run queue status."""
+ _require_auth(request)
+ return agent_runner.queue_status
+
+
@router.get("/settings/default-models")
async def get_default_models(request: Request):
_require_admin(request)
@@ -447,6 +465,128 @@ async def set_users_base_folder(request: Request, body: UserBaseFolderIn):
return {"path": path}
+# ── Admin CalDAV / CardDAV settings ──────────────────────────────────────────
+
+@router.get("/settings/caldav")
+async def get_admin_caldav(request: Request):
+ _require_admin(request)
+ get = credential_store.get
+ return {
+ "host": await get("mailcow_host") or "",
+ "username": await get("mailcow_username") or "",
+ "password_set": bool(await get("mailcow_password")),
+ "calendar_name": await get("caldav_calendar_name") or "",
+ "contacts_allow_write": (await get("contacts:allow_write")) == "1",
+ "carddav_same_as_caldav": (await get("carddav_same_as_caldav")) == "1",
+ "carddav_url": await get("carddav_url") or "",
+ "carddav_username": await get("carddav_username") or "",
+ "carddav_password_set": bool(await get("carddav_password")),
+ "imap_host": await get("mailcow_imap_host") or "",
+ "smtp_host": await get("mailcow_smtp_host") or "",
+ "smtp_port": await get("mailcow_smtp_port") or "",
+ }
+
+
+@router.post("/settings/caldav")
+async def set_admin_caldav(request: Request):
+ _require_admin(request)
+ body = await request.json()
+
+ async def _set(key, val, desc=""):
+ val = (val or "").strip()
+ if val:
+ await credential_store.set(key, val, desc)
+ else:
+ await credential_store.delete(key)
+
+ async def _set_bool(key, val):
+ if val:
+ await credential_store.set(key, "1")
+ else:
+ await credential_store.delete(key)
+
+ await _set("mailcow_host", body.get("host"), "Mailcow hostname")
+ await _set("mailcow_username", body.get("username"), "Mailcow username")
+ # Only update password if a new value is provided
+ pwd = (body.get("password") or "").strip()
+ if pwd:
+ await credential_store.set("mailcow_password", pwd, "Mailcow password")
+ await _set("caldav_calendar_name", body.get("calendar_name"), "Default calendar name")
+ await _set_bool("contacts:allow_write", body.get("contacts_allow_write"))
+
+ same = bool(body.get("carddav_same_as_caldav"))
+ await _set_bool("carddav_same_as_caldav", same)
+ if same:
+ for k in ("carddav_url", "carddav_username", "carddav_password"):
+ await credential_store.delete(k)
+ else:
+ await _set("carddav_url", body.get("carddav_url"), "CardDAV server URL")
+ await _set("carddav_username", body.get("carddav_username"), "CardDAV username")
+ cpwd = (body.get("carddav_password") or "").strip()
+ if cpwd:
+ await credential_store.set("carddav_password", cpwd, "CardDAV password")
+
+ # Email tool overrides (optional host overrides for IMAP/SMTP)
+ await _set("mailcow_imap_host", body.get("imap_host"), "IMAP host override")
+ await _set("mailcow_smtp_host", body.get("smtp_host"), "SMTP host override")
+ await _set("mailcow_smtp_port", body.get("smtp_port"), "SMTP port override")
+
+ return {"ok": True}
+
+
+# ── Admin Pushover settings ───────────────────────────────────────────────────
+
+@router.get("/settings/pushover")
+async def get_admin_pushover(request: Request):
+ _require_admin(request)
+ return {
+ "app_token_set": bool(await credential_store.get("pushover_app_token")),
+ "user_key_set": bool(await credential_store.get("pushover_user_key")),
+ }
+
+
+@router.post("/settings/pushover")
+async def set_admin_pushover(request: Request):
+ _require_admin(request)
+ body = await request.json()
+ for field, key in [("app_token", "pushover_app_token"), ("user_key", "pushover_user_key")]:
+ val = (body.get(field) or "").strip()
+ if val:
+ await credential_store.set(key, val)
+ # Never clear on empty — must explicitly use delete endpoint
+ return {"ok": True}
+
+
+# ── User Pushover settings ────────────────────────────────────────────────────
+
+@router.get("/my/pushover")
+async def get_my_pushover(request: Request):
+ _require_auth(request)
+ user = request.state.current_user
+ app_ok = bool(await credential_store.get("pushover_app_token"))
+ user_key = await _user_settings_store.get(user["id"], "pushover_user_key")
+ return {"app_token_configured": app_ok, "user_key_set": bool(user_key)}
+
+
+@router.post("/my/pushover")
+async def set_my_pushover(request: Request):
+ _require_auth(request)
+ user = request.state.current_user
+ body = await request.json()
+ key = (body.get("user_key") or "").strip()
+ if key:
+ await _user_settings_store.set(user["id"], "pushover_user_key", key)
+ return {"ok": True}
+
+
+@router.delete("/my/pushover")
+async def delete_my_pushover(request: Request):
+ _require_auth(request)
+ user = request.state.current_user
+ await _user_settings_store.delete(user["id"], "pushover_user_key")
+ return {"ok": True}
+
+
# ── Agents ────────────────────────────────────────────────────────────────────
class AgentIn(BaseModel):
@@ -1937,6 +2077,81 @@ async def delete_conversation(request: Request, conv_id: str):
return {"ok": True}
+@router.get("/conversations/{conv_id}/export")
+async def export_conversation(request: Request, conv_id: str, format: str = "markdown"):
+ """Download a conversation as Markdown or JSON."""
+ import json as _json
+ from fastapi.responses import Response
+ from datetime import datetime, timezone
+
+ user = _require_auth(request)
+ from ..database import get_pool
+ pool = await get_pool()
+ row = await pool.fetchrow(
+ "SELECT * FROM conversations WHERE id = $1", conv_id
+ )
+ if not row:
+ raise HTTPException(status_code=404, detail="Conversation not found")
+ if not user.is_admin and row.get("user_id") != user.id:
+ raise HTTPException(status_code=403, detail="Not your conversation")
+
+ messages = row.get("messages") or []
+ if isinstance(messages, str):
+ try:
+ messages = _json.loads(messages)
+ except Exception:
+ messages = []
+
+ started_at = row.get("started_at") or ""
+ title = row.get("title") or "Conversation"
+ model = row.get("model") or ""
+
+ from ..config import settings as _settings
+ agent_name = _settings.agent_name
+
+ if format == "json":
+ content = _json.dumps({
+ "id": str(row["id"]),
+ "title": title,
+ "model": model,
+ "started_at": str(started_at),
+ "messages": messages,
+ }, indent=2, default=str)
+ filename = f"conversation-{conv_id[:8]}.json"
+ media_type = "application/json"
+ else:
+ # Markdown
+ lines = [f"# {title}", ""]
+ if model:
+ lines.append(f"**Model:** {model} ")
+ if started_at:
+ lines.append(f"**Date:** {str(started_at)[:19]} ")
+ lines += ["", "---", ""]
+ for msg in messages:
+ role = msg.get("role", "")
+ content_parts = msg.get("content", "")
+ if isinstance(content_parts, list):
+ text = " ".join(
+ p.get("text", "") for p in content_parts
+ if isinstance(p, dict) and p.get("type") == "text"
+ )
+ else:
+ text = str(content_parts)
+ if not text.strip():
+ continue
+ speaker = "You" if role == "user" else agent_name
+ lines += [f"**{speaker}**", "", text, "", "---", ""]
+ content = "\n".join(lines)
+ filename = f"conversation-{conv_id[:8]}.md"
+ media_type = "text/markdown"
+
+ return Response(
+ content=content,
+ media_type=media_type,
+ headers={"Content-Disposition": f'attachment; filename="{filename}"'},
+ )
+
+
# ── Agent templates ────────────────────────────────────────────────────────────
@router.get("/agent-templates")
@@ -2532,15 +2747,26 @@ async def list_folders_for_account(request: Request, account_id: str):
@router.get("/my/caldav/config")
async def get_my_caldav_config(request: Request):
user = _require_auth(request)
- url = await _user_settings_store.get(user.id, "caldav_url")
- username = await _user_settings_store.get(user.id, "caldav_username")
- password = await _user_settings_store.get(user.id, "caldav_password")
- calendar_name = await _user_settings_store.get(user.id, "caldav_calendar_name")
+ get = lambda k: _user_settings_store.get(user.id, k)
+ url = await get("caldav_url")
+ username = await get("caldav_username")
+ password = await get("caldav_password")
+ calendar_name = await get("caldav_calendar_name")
+ carddav_same = await get("carddav_same_as_caldav")
+ carddav_url = await get("carddav_url")
+ carddav_user = await get("carddav_username")
+ carddav_pass = await get("carddav_password")
+ allow_write = await get("contacts_allow_write")
return {
"url": url or "",
"username": username or "",
- "password_set": bool(password),
+ "password": password or "",
"calendar_name": calendar_name or "",
+ "carddav_same_as_caldav": carddav_same == "1",
+ "carddav_url": carddav_url or "",
+ "carddav_username": carddav_user or "",
+ "carddav_password": carddav_pass or "",
+ "contacts_allow_write": allow_write == "1",
}
@@ -2548,6 +2774,8 @@ async def get_my_caldav_config(request: Request):
async def set_my_caldav_config(request: Request):
user = _require_auth(request)
body = await request.json()
+
+ # CalDAV fields
for key, setting_key in [
("url", "caldav_url"),
("username", "caldav_username"),
@@ -2557,8 +2785,36 @@ async def set_my_caldav_config(request: Request):
val = (body.get(key) or "").strip()
if val:
await _user_settings_store.set(user.id, setting_key, val)
- elif key != "password": # never clear password on empty
+ elif key != "password":
await _user_settings_store.delete(user.id, setting_key)
+
+ # CardDAV fields
+ same = bool(body.get("carddav_same_as_caldav"))
+ if same:
+ await _user_settings_store.set(user.id, "carddav_same_as_caldav", "1")
+ # Clear separate CardDAV creds — not needed when using same server
+ for k in ("carddav_url", "carddav_username", "carddav_password"):
+ await _user_settings_store.delete(user.id, k)
+ else:
+ await _user_settings_store.delete(user.id, "carddav_same_as_caldav")
+ for key, setting_key in [
+ ("carddav_url", "carddav_url"),
+ ("carddav_username", "carddav_username"),
+ ("carddav_password", "carddav_password"),
+ ]:
+ val = (body.get(key) or "").strip()
+ if val:
+ await _user_settings_store.set(user.id, setting_key, val)
+ elif key != "carddav_password":
+ await _user_settings_store.delete(user.id, setting_key)
+
+ # Per-user contacts write permission
+ allow_write = bool(body.get("contacts_allow_write"))
+ if allow_write:
+ await _user_settings_store.set(user.id, "contacts_allow_write", "1")
+ else:
+ await _user_settings_store.delete(user.id, "contacts_allow_write")
+
return {"ok": True}
@@ -2572,6 +2828,8 @@ async def test_my_caldav_config(request: Request):
try:
import caldav
url = cfg["url"]
+ if not url.startswith(("http://", "https://")):
+ url = "https://" + url
if "/SOGo/dav/" not in url:
url = f"{url.rstrip('/')}/SOGo/dav/{cfg['username']}/"
client = caldav.DAVClient(url=url, username=cfg["username"], password=cfg["password"])
@@ -2582,10 +2840,43 @@ async def test_my_caldav_config(request: Request):
return {"success": False, "message": str(e)}
+@router.post("/my/caldav/test-carddav")
+async def test_my_carddav_config(request: Request):
+ user = _require_auth(request)
+ from ..tools.contacts_tool import _get_carddav_config, _sogo_carddav_url
+ cfg = await _get_carddav_config(user_id=user.id)
+ if not cfg.get("url") or not cfg.get("username") or not cfg.get("password"):
+ return {"success": False, "message": "CardDAV credentials not configured"}
+ try:
+ import httpx
+ abook_url = _sogo_carddav_url(cfg["url"], cfg["username"])
+ body = (
+ ''
+ ' '
+ )
+ async with httpx.AsyncClient(
+ auth=(cfg["username"], cfg["password"]), timeout=10
+ ) as client:
+ r = await client.request(
+ "PROPFIND", abook_url,
+ content=body,
+ headers={"Content-Type": "application/xml; charset=utf-8", "Depth": "0"},
+ )
+ if r.status_code in (200, 207):
+ return {"success": True, "message": f"Connected to {abook_url}"}
+ return {"success": False, "message": f"Server returned HTTP {r.status_code}"}
+ except Exception as e:
+ return {"success": False, "message": str(e)}
+
+
@router.delete("/my/caldav/config")
async def delete_my_caldav_config(request: Request):
user = _require_auth(request)
- for key in ("caldav_url", "caldav_username", "caldav_password", "caldav_calendar_name"):
+ for key in (
+ "caldav_url", "caldav_username", "caldav_password", "caldav_calendar_name",
+ "carddav_same_as_caldav", "carddav_url", "carddav_username", "carddav_password",
+ "contacts_allow_write",
+ ):
await _user_settings_store.delete(user.id, key)
return {"ok": True}
@@ -2742,3 +3033,489 @@ async def update_my_profile(request: Request, body: ProfileUpdateIn):
from ..users import update_user
await update_user(user.id, display_name=body.display_name.strip() or None)
return {"ok": True}
+
+
+# ── Webhook endpoints (inbound triggers) ──────────────────────────────────────
+
+class WebhookEndpointIn(BaseModel):
+ name: str
+ agent_id: str = ""
+ description: str = ""
+ allow_get: bool = True
+
+
+class WebhookEndpointUpdate(BaseModel):
+ name: Optional[str] = None
+ agent_id: Optional[str] = None
+ description: Optional[str] = None
+ allow_get: Optional[bool] = None
+ enabled: Optional[bool] = None
+
+
+@router.get("/webhooks")
+async def list_webhooks(request: Request):
+ _require_admin(request)
+ from ..webhooks.endpoints import list_endpoints
+ return await list_endpoints()
+
+
+@router.post("/webhooks", status_code=201)
+async def create_webhook(request: Request, body: WebhookEndpointIn):
+ _require_admin(request)
+ from ..webhooks.endpoints import create_endpoint
+ if not body.name.strip():
+ raise HTTPException(status_code=400, detail="Name is required")
+ ep = await create_endpoint(
+ name=body.name.strip(),
+ agent_id=body.agent_id or "",
+ description=body.description,
+ allow_get=body.allow_get,
+ )
+ return ep # includes token — only time it's returned
+
+
+@router.put("/webhooks/{endpoint_id}")
+async def update_webhook(request: Request, endpoint_id: str, body: WebhookEndpointUpdate):
+ _require_admin(request)
+ from ..webhooks.endpoints import update_endpoint
+ fields = {k: v for k, v in body.model_dump().items() if v is not None}
+ ep = await update_endpoint(endpoint_id, **fields)
+ if ep is None:
+ raise HTTPException(status_code=404, detail="Webhook not found")
+ return ep
+
+
+@router.delete("/webhooks/{endpoint_id}")
+async def delete_webhook(request: Request, endpoint_id: str):
+ _require_admin(request)
+ from ..webhooks.endpoints import delete_endpoint
+ deleted = await delete_endpoint(endpoint_id)
+ if not deleted:
+ raise HTTPException(status_code=404, detail="Webhook not found")
+ return {"ok": True}
+
+
+@router.post("/webhooks/{endpoint_id}/rotate")
+async def rotate_webhook_token(request: Request, endpoint_id: str):
+ _require_admin(request)
+ from ..webhooks.endpoints import get_endpoint, rotate_token
+ ep = await get_endpoint(endpoint_id)
+ if ep is None:
+ raise HTTPException(status_code=404, detail="Webhook not found")
+ new_token = await rotate_token(endpoint_id)
+ return {"ok": True, "token": new_token}
+
+
+# ── User-scoped webhook endpoints (non-admin) ─────────────────────────────────
+
+@router.get("/my/webhooks")
+async def list_my_webhooks(request: Request):
+ _require_auth(request)
+ user_id = request.state.current_user["id"]
+ from ..webhooks.endpoints import list_endpoints
+ return await list_endpoints(owner_user_id=user_id)
+
+
+@router.post("/my/webhooks", status_code=201)
+async def create_my_webhook(request: Request, body: WebhookEndpointIn):
+ _require_auth(request)
+ user_id = request.state.current_user["id"]
+ from ..webhooks.endpoints import create_endpoint
+ if not body.name.strip():
+ raise HTTPException(status_code=400, detail="Name is required")
+ ep = await create_endpoint(
+ name=body.name.strip(),
+ agent_id=body.agent_id or "",
+ description=body.description,
+ allow_get=body.allow_get,
+ owner_user_id=user_id,
+ )
+ return ep # includes token — only time it's returned
+
+
+@router.put("/my/webhooks/{endpoint_id}")
+async def update_my_webhook(request: Request, endpoint_id: str, body: WebhookEndpointUpdate):
+ _require_auth(request)
+ user_id = request.state.current_user["id"]
+ from ..webhooks.endpoints import update_endpoint, get_endpoint
+ ep = await get_endpoint(endpoint_id, owner_user_id=user_id)
+ if ep is None:
+ raise HTTPException(status_code=404, detail="Webhook not found")
+ fields = {k: v for k, v in body.model_dump().items() if v is not None}
+ updated = await update_endpoint(endpoint_id, **fields)
+ return updated
+
+
+@router.delete("/my/webhooks/{endpoint_id}")
+async def delete_my_webhook(request: Request, endpoint_id: str):
+ _require_auth(request)
+ user_id = request.state.current_user["id"]
+ from ..webhooks.endpoints import delete_endpoint
+ deleted = await delete_endpoint(endpoint_id, owner_user_id=user_id)
+ if not deleted:
+ raise HTTPException(status_code=404, detail="Webhook not found")
+ return {"ok": True}
+
+
+@router.post("/my/webhooks/{endpoint_id}/rotate")
+async def rotate_my_webhook_token(request: Request, endpoint_id: str):
+ _require_auth(request)
+ user_id = request.state.current_user["id"]
+ from ..webhooks.endpoints import get_endpoint, rotate_token
+ ep = await get_endpoint(endpoint_id, owner_user_id=user_id)
+ if ep is None:
+ raise HTTPException(status_code=404, detail="Webhook not found")
+ new_token = await rotate_token(endpoint_id)
+ return {"ok": True, "token": new_token}
+
+
+# ── Webhook targets (outbound) ────────────────────────────────────────────────
+
+class WebhookTargetIn(BaseModel):
+ name: str
+ url: str
+ secret_header: str = ""
+
+
+class WebhookTargetUpdate(BaseModel):
+ name: Optional[str] = None
+ url: Optional[str] = None
+ secret_header: Optional[str] = None
+ enabled: Optional[bool] = None
+
+
+@router.get("/webhook-targets")
+async def list_webhook_targets(request: Request):
+ _require_admin(request)
+ from ..database import get_pool
+ pool = await get_pool()
+ rows = await pool.fetch("SELECT * FROM webhook_targets ORDER BY name")
+ return [dict(r) for r in rows]
+
+
+@router.post("/webhook-targets", status_code=201)
+async def create_webhook_target(request: Request, body: WebhookTargetIn):
+ _require_admin(request)
+ if not body.name.strip():
+ raise HTTPException(status_code=400, detail="Name is required")
+ if not body.url.strip():
+ raise HTTPException(status_code=400, detail="URL is required")
+ from ..database import get_pool
+ from datetime import datetime, timezone
+ pool = await get_pool()
+ now = datetime.now(timezone.utc).isoformat()
+ try:
+ row = await pool.fetchrow(
+ """
+ INSERT INTO webhook_targets (name, url, secret_header, created_at)
+ VALUES ($1, $2, $3, $4) RETURNING *
+ """,
+ body.name.strip(), body.url.strip(), body.secret_header or None,
+ now,
+ )
+ except Exception as e:
+ if "unique" in str(e).lower():
+ raise HTTPException(status_code=409, detail="A target with that name already exists")
+ raise
+ return dict(row)
+
+
+@router.put("/webhook-targets/{target_id}")
+async def update_webhook_target(request: Request, target_id: str, body: WebhookTargetUpdate):
+ _require_admin(request)
+ fields = {k: v for k, v in body.model_dump().items() if v is not None}
+ if not fields:
+ raise HTTPException(status_code=400, detail="No fields to update")
+ from ..database import get_pool
+ pool = await get_pool()
+ set_clauses = ", ".join(f"{k} = ${i + 2}" for i, k in enumerate(fields))
+ await pool.execute(
+ f"UPDATE webhook_targets SET {set_clauses} WHERE id = $1::uuid",
+ target_id, *fields.values(),
+ )
+ row = await pool.fetchrow("SELECT * FROM webhook_targets WHERE id = $1::uuid", target_id)
+ if not row:
+ raise HTTPException(status_code=404, detail="Target not found")
+ return dict(row)
+
+
+@router.delete("/webhook-targets/{target_id}")
+async def delete_webhook_target(request: Request, target_id: str):
+ _require_admin(request)
+ from ..database import get_pool, _rowcount
+ pool = await get_pool()
+ status = await pool.execute(
+ "DELETE FROM webhook_targets WHERE id = $1::uuid", target_id
+ )
+ if _rowcount(status) == 0:
+ raise HTTPException(status_code=404, detail="Target not found")
+ return {"ok": True}
+
+
+# ── User-scoped webhook targets (non-admin) ───────────────────────────────────
+
+@router.get("/my/webhook-targets")
+async def list_my_webhook_targets(request: Request):
+ _require_auth(request)
+ user_id = request.state.current_user["id"]
+ from ..database import get_pool
+ pool = await get_pool()
+ rows = await pool.fetch(
+ "SELECT * FROM webhook_targets WHERE owner_user_id = $1 ORDER BY name", user_id
+ )
+ return [dict(r) for r in rows]
+
+
+@router.post("/my/webhook-targets", status_code=201)
+async def create_my_webhook_target(request: Request, body: WebhookTargetIn):
+ _require_auth(request)
+ user_id = request.state.current_user["id"]
+ if not body.name.strip():
+ raise HTTPException(status_code=400, detail="Name is required")
+ if not body.url.strip():
+ raise HTTPException(status_code=400, detail="URL is required")
+ from ..database import get_pool
+ from datetime import datetime, timezone
+ pool = await get_pool()
+ now = datetime.now(timezone.utc).isoformat()
+ try:
+ row = await pool.fetchrow(
+ """
+ INSERT INTO webhook_targets (name, url, secret_header, owner_user_id, created_at)
+ VALUES ($1, $2, $3, $4, $5) RETURNING *
+ """,
+ body.name.strip(), body.url.strip(), body.secret_header or None, user_id, now,
+ )
+ except Exception as e:
+ if "unique" in str(e).lower():
+ raise HTTPException(status_code=409, detail="A target with that name already exists")
+ raise
+ return dict(row)
+
+
+@router.put("/my/webhook-targets/{target_id}")
+async def update_my_webhook_target(request: Request, target_id: str, body: WebhookTargetUpdate):
+ _require_auth(request)
+ user_id = request.state.current_user["id"]
+ fields = {k: v for k, v in body.model_dump().items() if v is not None}
+ if not fields:
+ raise HTTPException(status_code=400, detail="No fields to update")
+ from ..database import get_pool
+ pool = await get_pool()
+ # Verify ownership first
+ existing = await pool.fetchrow(
+ "SELECT id FROM webhook_targets WHERE id = $1::uuid AND owner_user_id = $2",
+ target_id, user_id,
+ )
+ if not existing:
+ raise HTTPException(status_code=404, detail="Target not found")
+ set_clauses = ", ".join(f"{k} = ${i + 2}" for i, k in enumerate(fields))
+ await pool.execute(
+ f"UPDATE webhook_targets SET {set_clauses} WHERE id = $1::uuid",
+ target_id, *fields.values(),
+ )
+ row = await pool.fetchrow("SELECT * FROM webhook_targets WHERE id = $1::uuid", target_id)
+ return dict(row)
+
+
+@router.delete("/my/webhook-targets/{target_id}")
+async def delete_my_webhook_target(request: Request, target_id: str):
+ _require_auth(request)
+ user_id = request.state.current_user["id"]
+ from ..database import get_pool, _rowcount
+ pool = await get_pool()
+ status = await pool.execute(
+ "DELETE FROM webhook_targets WHERE id = $1::uuid AND owner_user_id = $2",
+ target_id, user_id,
+ )
+ if _rowcount(status) == 0:
+ raise HTTPException(status_code=404, detail="Target not found")
+ return {"ok": True}
+
+
+# ── Page Change Monitors ───────────────────────────────────────────────────────
+
+class WatchedPageIn(BaseModel):
+ name: str
+ url: str
+ schedule: str = "0 * * * *"
+ css_selector: Optional[str] = None
+ agent_id: Optional[str] = None
+ notification_mode: str = "agent"
+
+
+class WatchedPageUpdate(BaseModel):
+ name: Optional[str] = None
+ url: Optional[str] = None
+ schedule: Optional[str] = None
+ css_selector: Optional[str] = None
+ agent_id: Optional[str] = None
+ notification_mode: Optional[str] = None
+ enabled: Optional[bool] = None
+
+
+@router.get("/watched-pages")
+async def list_watched_pages(request: Request):
+ user = _require_auth(request)
+ from ..monitors.store import list_watched_pages as _list
+ owner = None if user.is_admin else user.id
+ return await _list(owner_user_id=owner)
+
+
+@router.post("/watched-pages", status_code=201)
+async def create_watched_page(request: Request, body: WatchedPageIn):
+ user = _require_auth(request)
+ if not body.name.strip() or not body.url.strip():
+ raise HTTPException(status_code=400, detail="Name and URL are required")
+ from ..monitors.store import create_watched_page as _create
+ from ..monitors.page_monitor import page_monitor
+ page = await _create(
+ name=body.name.strip(),
+ url=body.url.strip(),
+ schedule=body.schedule,
+ css_selector=body.css_selector or None,
+ agent_id=body.agent_id or None,
+ notification_mode=body.notification_mode,
+ owner_user_id=user.id,
+ )
+ page_monitor.reschedule(page)
+ return page
+
+
+@router.put("/watched-pages/{page_id}")
+async def update_watched_page(request: Request, page_id: str, body: WatchedPageUpdate):
+ user = _require_auth(request)
+ from ..monitors.store import get_watched_page, update_watched_page as _update
+ page = await get_watched_page(page_id)
+ if not page:
+ raise HTTPException(status_code=404, detail="Page not found")
+ if not user.is_admin and str(page.get("owner_user_id")) != user.id:
+ raise HTTPException(status_code=404, detail="Page not found")
+ fields = {k: v for k, v in body.model_dump().items() if v is not None}
+ updated = await _update(page_id, **fields)
+ from ..monitors.page_monitor import page_monitor
+ page_monitor.reschedule(updated)
+ return updated
+
+
+@router.delete("/watched-pages/{page_id}")
+async def delete_watched_page(request: Request, page_id: str):
+ user = _require_auth(request)
+ from ..monitors.store import get_watched_page, delete_watched_page as _delete
+ page = await get_watched_page(page_id)
+ if not page:
+ raise HTTPException(status_code=404, detail="Page not found")
+ if not user.is_admin and str(page.get("owner_user_id")) != user.id:
+ raise HTTPException(status_code=404, detail="Page not found")
+ await _delete(page_id)
+ from ..monitors.page_monitor import page_monitor
+ page_monitor.remove(page_id)
+ return {"ok": True}
+
+
+@router.post("/watched-pages/{page_id}/check-now")
+async def check_page_now(request: Request, page_id: str):
+ user = _require_auth(request)
+ from ..monitors.store import get_watched_page
+ page = await get_watched_page(page_id)
+ if not page:
+ raise HTTPException(status_code=404, detail="Page not found")
+ if not user.is_admin and str(page.get("owner_user_id")) != user.id:
+ raise HTTPException(status_code=404, detail="Page not found")
+ from ..monitors.page_monitor import page_monitor
+ result = await page_monitor.check_now(page_id)
+ return result
+
+
+# ── RSS Feed Monitors ──────────────────────────────────────────────────────────
+
+class RssFeedIn(BaseModel):
+ name: str
+ url: str
+ schedule: str = "0 */4 * * *"
+ agent_id: Optional[str] = None
+ notification_mode: str = "agent"
+ max_items_per_run: int = 5
+
+
+class RssFeedUpdate(BaseModel):
+ name: Optional[str] = None
+ url: Optional[str] = None
+ schedule: Optional[str] = None
+ agent_id: Optional[str] = None
+ notification_mode: Optional[str] = None
+ max_items_per_run: Optional[int] = None
+ enabled: Optional[bool] = None
+
+
+@router.get("/rss-feeds")
+async def list_rss_feeds(request: Request):
+ user = _require_auth(request)
+ from ..monitors.store import list_rss_feeds as _list
+ owner = None if user.is_admin else user.id
+ return await _list(owner_user_id=owner)
+
+
+@router.post("/rss-feeds", status_code=201)
+async def create_rss_feed(request: Request, body: RssFeedIn):
+ user = _require_auth(request)
+ if not body.name.strip() or not body.url.strip():
+ raise HTTPException(status_code=400, detail="Name and URL are required")
+ from ..monitors.store import create_rss_feed as _create
+ from ..monitors.rss_monitor import rss_monitor
+ feed = await _create(
+ name=body.name.strip(),
+ url=body.url.strip(),
+ schedule=body.schedule,
+ agent_id=body.agent_id or None,
+ notification_mode=body.notification_mode,
+ max_items_per_run=body.max_items_per_run,
+ owner_user_id=user.id,
+ )
+ rss_monitor.reschedule(feed)
+ return feed
+
+
+@router.put("/rss-feeds/{feed_id}")
+async def update_rss_feed(request: Request, feed_id: str, body: RssFeedUpdate):
+ user = _require_auth(request)
+ from ..monitors.store import get_rss_feed, update_rss_feed as _update
+ feed = await get_rss_feed(feed_id)
+ if not feed:
+ raise HTTPException(status_code=404, detail="Feed not found")
+ if not user.is_admin and str(feed.get("owner_user_id")) != user.id:
+ raise HTTPException(status_code=404, detail="Feed not found")
+ fields = {k: v for k, v in body.model_dump().items() if v is not None}
+ updated = await _update(feed_id, **fields)
+ from ..monitors.rss_monitor import rss_monitor
+ rss_monitor.reschedule(updated)
+ return updated
+
+
+@router.delete("/rss-feeds/{feed_id}")
+async def delete_rss_feed(request: Request, feed_id: str):
+ user = _require_auth(request)
+ from ..monitors.store import get_rss_feed, delete_rss_feed as _delete
+ feed = await get_rss_feed(feed_id)
+ if not feed:
+ raise HTTPException(status_code=404, detail="Feed not found")
+ if not user.is_admin and str(feed.get("owner_user_id")) != user.id:
+ raise HTTPException(status_code=404, detail="Feed not found")
+ await _delete(feed_id)
+ from ..monitors.rss_monitor import rss_monitor
+ rss_monitor.remove(feed_id)
+ return {"ok": True}
+
+
+@router.post("/rss-feeds/{feed_id}/fetch-now")
+async def fetch_feed_now(request: Request, feed_id: str):
+ user = _require_auth(request)
+ from ..monitors.store import get_rss_feed
+ feed = await get_rss_feed(feed_id)
+ if not feed:
+ raise HTTPException(status_code=404, detail="Feed not found")
+ if not user.is_admin and str(feed.get("owner_user_id")) != user.id:
+ raise HTTPException(status_code=404, detail="Feed not found")
+ from ..monitors.rss_monitor import rss_monitor
+ result = await rss_monitor.fetch_now(feed_id)
+ return result
diff --git a/server/web/static/app.js b/server/web/static/app.js
index 9f2e6d6..65c01c1 100644
--- a/server/web/static/app.js
+++ b/server/web/static/app.js
@@ -216,6 +216,7 @@ function _initPage(url) {
if (path === "/agents") { initAgents(); return; }
if (path.startsWith("/agents/")) { initAgentDetail(); return; }
if (path === "/audit") { initAudit(); return; }
+ if (path === "/monitors") { initMonitors(); return; }
if (path === "/models") { initModels(); return; }
if (path === "/settings") { initSettings(); return; }
if (path === "/help") { initHelp(); return; }
@@ -1312,7 +1313,7 @@ function closeAuditDetail() {
══════════════════════════════════════════════════════════════════════════ */
function switchSettingsTab(name) {
- ["general", "whitelists", "credentials", "inbox", "emailaccounts", "telegram", "system", "brain", "mcp", "security", "branding", "mfa"].forEach(t => {
+ ["general", "whitelists", "credentials", "caldav", "pushover", "inbox", "emailaccounts", "telegram", "system", "brain", "mcp", "security", "branding", "webhooks", "mfa"].forEach(t => {
const pane = document.getElementById(`spane-${t}`);
const btn = document.getElementById(`stab-${t}`);
if (pane) pane.style.display = t === name ? "" : "none";
@@ -1322,6 +1323,9 @@ function switchSettingsTab(name) {
if (name === "emailaccounts") { loadEmailAccounts(); }
if (name === "brain") { loadBrainKey("brain-mcp-key", "brain-mcp-cmd"); loadBrainAutoApprove(); }
if (name === "mfa") { loadTheme(); loadMyProfile(); loadMfaStatus(); loadDataFolder(); }
+ if (name === "webhooks") { loadWebhooks(); loadWebhookTargets(); }
+ if (name === "caldav") { loadAdminCaldav(); }
+ if (name === "pushover") { loadAdminPushover(); }
}
/* ── API Key management ──────────────────────────────────────────────────── */
@@ -1464,6 +1468,8 @@ function switchUserTab(name) {
if (name === "telegram") { loadMyTelegramConfig(); loadMyTelegramWhitelist(); loadMyTelegramTriggers(); }
if (name === "brain") { loadBrainKey("ubrain-mcp-key", "ubrain-mcp-cmd"); loadBrainAutoApprove(); }
if (name === "mfa") { loadTheme(); loadMyProfile(); loadMfaStatus(); loadDataFolder(); }
+ if (name === "webhooks") { loadMyWebhooks(); loadMyWebhookTargets(); }
+ if (name === "pushover") { loadMyPushover(); }
}
async function loadMyProviderKeys() {
@@ -2180,21 +2186,47 @@ async function loadMyCaldavConfig() {
const el = id => document.getElementById(id);
if (el("my-caldav-url")) el("my-caldav-url").value = d.url || "";
if (el("my-caldav-username")) el("my-caldav-username").value = d.username || "";
- if (el("my-caldav-password")) el("my-caldav-password").placeholder = d.password_set ? "••••••••" : "Enter password";
+ if (el("my-caldav-password")) el("my-caldav-password").value = d.password || "";
if (el("my-caldav-calendar-name")) el("my-caldav-calendar-name").value = d.calendar_name || "";
+ // CardDAV
+ const sameChk = el("my-carddav-same");
+ if (sameChk) {
+ sameChk.checked = !!d.carddav_same_as_caldav;
+ _applyCarddavSameState(!!d.carddav_same_as_caldav);
+ }
+ if (el("my-carddav-url")) el("my-carddav-url").value = d.carddav_url || "";
+ if (el("my-carddav-username")) el("my-carddav-username").value = d.carddav_username || "";
+ if (el("my-carddav-password")) el("my-carddav-password").value = d.carddav_password || "";
+ if (el("my-contacts-allow-write")) el("my-contacts-allow-write").checked = !!d.contacts_allow_write;
+}
+
+function _applyCarddavSameState(same) {
+ const fields = document.getElementById("carddav-custom-fields");
+ if (fields) fields.style.display = same ? "none" : "";
+}
+
+function onCarddavSameChange() {
+ const same = document.getElementById("my-carddav-same")?.checked || false;
+ _applyCarddavSameState(same);
}
async function saveMyCaldavConfig() {
const el = id => document.getElementById(id);
+ const same = el("my-carddav-same")?.checked || false;
const payload = {
url: el("my-caldav-url")?.value.trim() || "",
username: el("my-caldav-username")?.value.trim() || "",
password: el("my-caldav-password")?.value || "",
calendar_name: el("my-caldav-calendar-name")?.value.trim() || "",
+ carddav_same_as_caldav: same,
+ carddav_url: same ? "" : (el("my-carddav-url")?.value.trim() || ""),
+ carddav_username: same ? "" : (el("my-carddav-username")?.value.trim() || ""),
+ carddav_password: same ? "" : (el("my-carddav-password")?.value || ""),
+ contacts_allow_write: el("my-contacts-allow-write")?.checked || false,
};
const r = await fetch("/api/my/caldav/config", { method: "POST", headers: { "Content-Type": "application/json" }, body: JSON.stringify(payload) });
- if (r.ok) { showFlash("CalDAV settings saved."); loadMyCaldavConfig(); }
- else { showFlash("Error saving CalDAV settings."); }
+ if (r.ok) { showFlash("CalDAV / CardDAV settings saved."); loadMyCaldavConfig(); }
+ else { showFlash("Error saving settings."); }
}
async function testMyCaldavConfig() {
@@ -2202,18 +2234,192 @@ async function testMyCaldavConfig() {
const res = document.getElementById("caldav-test-result");
if (btn) btn.textContent = "Testing…";
const r = await fetch("/api/my/caldav/test", { method: "POST" });
- if (btn) btn.textContent = "Test connection";
+ if (btn) btn.textContent = "Test CalDAV";
const d = r.ok ? await r.json() : { success: false, message: "Request failed" };
- if (res) {
- res.textContent = d.message;
- res.style.color = d.success ? "var(--green)" : "var(--danger)";
- }
+ if (res) { res.textContent = d.message; res.style.color = d.success ? "var(--green)" : "var(--danger)"; }
+}
+
+async function testMyCarddavConfig() {
+ const btn = document.getElementById("carddav-test-btn");
+ const res = document.getElementById("carddav-test-result");
+ if (btn) btn.textContent = "Testing…";
+ const r = await fetch("/api/my/caldav/test-carddav", { method: "POST" });
+ if (btn) btn.textContent = "Test CardDAV";
+ const d = r.ok ? await r.json() : { success: false, message: "Request failed" };
+ if (res) { res.textContent = d.message; res.style.color = d.success ? "var(--green)" : "var(--danger)"; }
}
async function deleteMyCaldavConfig() {
- if (!confirm("Clear your personal CalDAV settings? The system CalDAV will be used instead.")) return;
+ if (!confirm("Clear all CalDAV / CardDAV settings? System defaults will be used instead.")) return;
const r = await fetch("/api/my/caldav/config", { method: "DELETE" });
- if (r.ok) { showFlash("CalDAV settings cleared."); loadMyCaldavConfig(); }
+ if (r.ok) { showFlash("Settings cleared."); loadMyCaldavConfig(); }
+}
+
+/* ══════════════════════════════════════════════════════════════════════════
+ ADMIN: CalDAV / CardDAV settings tab
+══════════════════════════════════════════════════════════════════════════ */
+
+async function loadAdminCaldav() {
+ // Admin uses the same per-user CalDAV config endpoint as any other user
+ const r = await fetch("/api/my/caldav/config");
+ if (!r.ok) return;
+ const d = await r.json();
+ const el = id => document.getElementById(id);
+ // Response field names from /api/my/caldav/config:
+ // url, username, password_set, calendar_name, carddav_same_as_caldav (bool),
+ // carddav_url, carddav_username, carddav_password_set, contacts_allow_write (bool)
+ if (el("adm-caldav-host")) el("adm-caldav-host").value = d.url || "";
+ if (el("adm-caldav-username")) el("adm-caldav-username").value = d.username || "";
+ if (el("adm-caldav-password")) el("adm-caldav-password").value = d.password || "";
+ if (el("adm-caldav-calendar-name")) el("adm-caldav-calendar-name").value = d.calendar_name || "";
+ const same = d.carddav_same_as_caldav === true;
+ if (el("adm-carddav-same")) el("adm-carddav-same").checked = same;
+ _applyAdmCarddavSameState(same);
+ if (!same) {
+ if (el("adm-carddav-url")) el("adm-carddav-url").value = d.carddav_url || "";
+ if (el("adm-carddav-username")) el("adm-carddav-username").value = d.carddav_username || "";
+ if (el("adm-carddav-password")) el("adm-carddav-password").value = d.carddav_password || "";
+ }
+ if (el("adm-contacts-allow-write")) el("adm-contacts-allow-write").checked = d.contacts_allow_write === true;
+}
+
+function _applyAdmCarddavSameState(same) {
+ const fields = document.getElementById("adm-carddav-custom-fields");
+ if (fields) fields.style.display = same ? "none" : "";
+}
+
+function onAdmCarddavSameChange() {
+ const same = document.getElementById("adm-carddav-same")?.checked || false;
+ _applyAdmCarddavSameState(same);
+}
+
+async function saveAdminCaldav() {
+ const el = id => document.getElementById(id);
+ const same = el("adm-carddav-same")?.checked || false;
+ const payload = {
+ url: el("adm-caldav-host")?.value.trim() || "",
+ username: el("adm-caldav-username")?.value.trim() || "",
+ password: el("adm-caldav-password")?.value || "",
+ calendar_name: el("adm-caldav-calendar-name")?.value.trim() || "",
+ carddav_same_as_caldav: same,
+ carddav_url: same ? "" : (el("adm-carddav-url")?.value.trim() || ""),
+ carddav_username: same ? "" : (el("adm-carddav-username")?.value.trim() || ""),
+ carddav_password: same ? "" : (el("adm-carddav-password")?.value || ""),
+ contacts_allow_write: el("adm-contacts-allow-write")?.checked || false,
+ };
+ // Don't send placeholder passwords
+ if (payload.password === "••••••••") delete payload.password;
+ if (payload.carddav_password === "••••••••") delete payload.carddav_password;
+ const r = await fetch("/api/my/caldav/config", {
+ method: "POST", headers: { "Content-Type": "application/json" }, body: JSON.stringify(payload),
+ });
+ if (r.ok) { showFlash("CalDAV / CardDAV settings saved."); loadAdminCaldav(); }
+ else { const d = await r.json().catch(() => ({})); showFlash(d.detail || "Error saving settings."); }
+}
+
+async function testAdminCaldav() {
+ const btn = document.getElementById("adm-caldav-test-btn");
+ const res = document.getElementById("adm-caldav-test-result");
+ if (btn) btn.disabled = true;
+ if (res) res.textContent = "Testing…";
+ try {
+ const r = await fetch("/api/my/caldav/test", { method: "POST" });
+ const d = await r.json();
+ if (res) res.innerHTML = d.success
+ ? `✓ Connected — ${esc(d.message || "")} `
+ : `✗ ${esc(d.message || "Failed")} `;
+ } catch (e) {
+ if (res) res.innerHTML = `✗ ${esc(String(e))} `;
+ } finally {
+ if (btn) btn.disabled = false;
+ }
+}
+
+async function testAdminCarddav() {
+ const btn = document.getElementById("adm-carddav-test-btn");
+ const res = document.getElementById("adm-carddav-test-result");
+ if (btn) btn.disabled = true;
+ if (res) res.textContent = "Testing…";
+ try {
+ const r = await fetch("/api/my/caldav/test-carddav", { method: "POST" });
+ const d = await r.json();
+ if (res) res.innerHTML = d.success
+ ? `✓ Connected — ${esc(d.message || "")} `
+ : `✗ ${esc(d.message || "Failed")} `;
+ } catch (e) {
+ if (res) res.innerHTML = `✗ ${esc(String(e))} `;
+ } finally {
+ if (btn) btn.disabled = false;
+ }
+}
+
+/* ══════════════════════════════════════════════════════════════════════════
+ ADMIN: Pushover tab
+══════════════════════════════════════════════════════════════════════════ */
+
+async function loadAdminPushover() {
+ const r = await fetch("/api/settings/pushover");
+ if (!r.ok) return;
+ const d = await r.json();
+ const tokenEl = document.getElementById("adm-pushover-app-token");
+ const keyEl = document.getElementById("adm-pushover-user-key");
+ const tokenSt = document.getElementById("adm-pushover-app-token-status");
+ const keySt = document.getElementById("adm-pushover-user-key-status");
+ if (tokenEl) tokenEl.value = ""; // never pre-fill secrets
+ if (keyEl) keyEl.value = "";
+ if (tokenSt) tokenSt.innerHTML = d.app_token_set
+ ? `✓ App token is configured `
+ : `Not set `;
+ if (keySt) keySt.innerHTML = d.user_key_set
+ ? `✓ Admin user key is configured `
+ : `Not set — admin will use system-level key if set `;
+}
+
+async function saveAdminPushover() {
+ const payload = {};
+ const tokenVal = document.getElementById("adm-pushover-app-token")?.value.trim();
+ const keyVal = document.getElementById("adm-pushover-user-key")?.value.trim();
+ if (tokenVal) payload.app_token = tokenVal;
+ if (keyVal) payload.user_key = keyVal;
+ if (!Object.keys(payload).length) { showFlash("Enter at least one value to save."); return; }
+ const r = await fetch("/api/settings/pushover", {
+ method: "POST", headers: { "Content-Type": "application/json" }, body: JSON.stringify(payload),
+ });
+ if (r.ok) { showFlash("Pushover settings saved."); loadAdminPushover(); }
+ else { const d = await r.json().catch(() => ({})); showFlash(d.detail || "Error saving."); }
+}
+
+/* ══════════════════════════════════════════════════════════════════════════
+ USER: Pushover tab
+══════════════════════════════════════════════════════════════════════════ */
+
+async function loadMyPushover() {
+ const r = await fetch("/api/my/pushover");
+ if (!r.ok) return;
+ const d = await r.json();
+ const st = document.getElementById("my-pushover-status");
+ const inp = document.getElementById("my-pushover-user-key");
+ if (inp) inp.value = "";
+ if (st) st.innerHTML = d.user_key_set
+ ? `✓ Your Pushover key is configured `
+ : `Not set — notifications will use the admin's key (if configured) `;
+}
+
+async function saveMyPushover() {
+ const val = document.getElementById("my-pushover-user-key")?.value.trim();
+ if (!val) { showFlash("Enter your Pushover user key first."); return; }
+ const r = await fetch("/api/my/pushover", {
+ method: "POST", headers: { "Content-Type": "application/json" }, body: JSON.stringify({ user_key: val }),
+ });
+ if (r.ok) { showFlash("Pushover key saved."); document.getElementById("my-pushover-user-key").value = ""; loadMyPushover(); }
+ else { const d = await r.json().catch(() => ({})); showFlash(d.detail || "Error saving."); }
+}
+
+async function deleteMyPushover() {
+ if (!confirm("Remove your Pushover user key?")) return;
+ const r = await fetch("/api/my/pushover", { method: "DELETE" });
+ if (r.ok) { showFlash("Pushover key removed."); loadMyPushover(); }
+ else { showFlash("Error removing key."); }
}
function togglePasswordVisibility(inputId, btn) {
@@ -2303,9 +2509,11 @@ function initSettings() {
e.preventDefault();
const tc = parseInt(document.getElementById("lim-tool-calls").value, 10);
const rph = parseInt(document.getElementById("lim-runs-per-hour").value, 10);
+ const mcr = parseInt(document.getElementById("lim-concurrent-runs").value, 10);
const body = {};
if (!isNaN(tc)) body.max_tool_calls = tc;
if (!isNaN(rph)) body.max_autonomous_runs_per_hour = rph;
+ if (!isNaN(mcr)) body.max_concurrent_runs = mcr;
const r = await fetch("/api/settings/limits", {
method: "POST",
headers: { "Content-Type": "application/json" },
@@ -2568,15 +2776,18 @@ async function loadLimits() {
try {
const r = await fetch("/api/settings/limits");
const data = await r.json();
- const tcEl = document.getElementById("lim-tool-calls");
+ const tcEl = document.getElementById("lim-tool-calls");
const rphEl = document.getElementById("lim-runs-per-hour");
+ const mcrEl = document.getElementById("lim-concurrent-runs");
const defaultsEl = document.getElementById("limits-defaults");
- if (tcEl) tcEl.value = data.max_tool_calls;
+ if (tcEl) tcEl.value = data.max_tool_calls;
if (rphEl) rphEl.value = data.max_autonomous_runs_per_hour;
+ if (mcrEl) mcrEl.value = data.max_concurrent_runs;
if (defaultsEl) {
defaultsEl.textContent =
`.env defaults: max_tool_calls=${data.defaults.max_tool_calls}, ` +
- `max_autonomous_runs_per_hour=${data.defaults.max_autonomous_runs_per_hour}`;
+ `max_autonomous_runs_per_hour=${data.defaults.max_autonomous_runs_per_hour}, ` +
+ `max_concurrent_runs=${data.defaults.max_concurrent_runs}`;
}
} catch { /* ignore */ }
}
@@ -2770,85 +2981,72 @@ function _titleCase(str) {
return str.split("_").map(w => w.charAt(0).toUpperCase() + w.slice(1)).join(" ");
}
-// The exact set of credentials managed on the Credentials tab.
-// Keys owned by Inbox, Telegram, Brain, Security, Branding, or API tabs are excluded.
-const _CREDENTIALS_TAB_KEYS = [
- { key: "mailcow_host", label: "Mailcow Host", usedBy: "CalDAV", note: "SOGo/CalDAV hostname, e.g. example.com" },
- { key: "mailcow_username", label: "Mailcow Username", usedBy: "CalDAV, Email", note: "Full email address, e.g. you@example.com" },
- { key: "mailcow_password", label: "Mailcow Password", usedBy: "CalDAV, Email", note: "Account or app-specific password" },
- { key: "caldav_calendar_name", label: "Calendar Name", usedBy: "CalDAV", note: "Optional; omit to use first calendar found" },
- { key: "mailcow_imap_host", label: "IMAP Host", usedBy: "Email", note: "IMAP hostname if different from mailcow_host" },
- { key: "mailcow_smtp_host", label: "SMTP Host", usedBy: "Email", note: "SMTP hostname if different from mailcow_host" },
- { key: "mailcow_smtp_port", label: "SMTP Port", usedBy: "Email", note: "SMTP port (default: 465)" },
- { key: "pushover_user_key", label: "Pushover User Key", usedBy: "Pushover", note: "Your Pushover user key (from pushover.net dashboard)" },
- { key: "pushover_app_token", label: "Pushover App Token", usedBy: "Pushover", note: "Application API token from pushover.net" },
- { key: "system:trusted_proxy_ips", label: "Trusted Proxy IPs", usedBy: "Network", note: "Reverse proxy IP(s) - managed via Settings - General; requires restart" },
-];
-
-const _KNOWN_CRED_KEYS = new Set(_CREDENTIALS_TAB_KEYS.map(d => d.key));
+// Keys managed in dedicated tabs — excluded from the generic Credentials list.
+const _DEDICATED_CRED_KEYS = new Set([
+ // CalDAV / CardDAV tab
+ "mailcow_host", "mailcow_username", "mailcow_password", "caldav_calendar_name",
+ "contacts:allow_write", "carddav_same_as_caldav", "carddav_url", "carddav_username", "carddav_password",
+ "mailcow_imap_host", "mailcow_smtp_host", "mailcow_smtp_port",
+ // Pushover tab
+ "pushover_app_token", "pushover_user_key",
+ // Trusted proxy — General tab
+ "system:trusted_proxy_ips",
+ // These are managed by Inbox / Telegram / Brain / Security / Branding tabs
+]);
async function reloadCredList() {
const r = await fetch("/api/credentials");
if (!r.ok) return;
const rows = await r.json();
- const stored = new Map(rows.map(row => [typeof row === "string" ? row : row.key, row]));
const list = document.getElementById("cred-list");
if (!list) return;
list.innerHTML = "";
- for (const def of _CREDENTIALS_TAB_KEYS) {
- const k = def.key;
- const isSet = stored.has(k);
- const row = stored.get(k) || {};
- const desc = row.description || "";
+ // Filter out keys that have dedicated management tabs
+ const display = rows.filter(row => {
+ const k = typeof row === "string" ? row : row.key;
+ return !_DEDICATED_CRED_KEYS.has(k);
+ });
+
+ if (!display.length) {
+ list.innerHTML = `
No custom credentials stored. Use "Add Credential" to add one. `;
+ return;
+ }
+
+ for (const row of display) {
+ const k = typeof row === "string" ? row : row.key;
+ const desc = (typeof row === "object" ? row.description : "") || "";
const tr = document.createElement("tr");
- // Key column
const tdKey = document.createElement("td");
- tdKey.innerHTML = `${esc(def.label)} ` +
- `${esc(k)} ` +
- `${esc(def.note)} `;
+ tdKey.innerHTML = `${esc(k)}`;
- // Used by column
- const tdUsed = document.createElement("td");
- tdUsed.style.cssText = "color:var(--text-dim);font-size:12px;white-space:nowrap";
- tdUsed.textContent = def.usedBy;
+ const tdDesc = document.createElement("td");
+ tdDesc.style.cssText = "color:var(--text-dim);font-size:12px";
+ tdDesc.textContent = desc;
- // Status column
const tdStatus = document.createElement("td");
tdStatus.style.cssText = "font-size:12px";
- tdStatus.innerHTML = isSet
- ? `✓ Set `
- : `Not set `;
+ tdStatus.innerHTML = `✓ Set `;
- // Actions column
const tdAct = document.createElement("td");
- if (k === "system:trusted_proxy_ips") {
- const span = document.createElement("span");
- span.style.cssText = "color:var(--text-dim);font-size:12px";
- span.textContent = "via General tab";
- tdAct.appendChild(span);
- } else {
- const editBtn = document.createElement("button");
- editBtn.className = "btn btn-ghost";
- editBtn.style.cssText = "padding:4px 10px;font-size:12px;margin-right:6px";
- editBtn.textContent = isSet ? "Edit" : "Set";
- editBtn.addEventListener("click", () => openCredModal(k, desc));
- tdAct.appendChild(editBtn);
- if (isSet) {
- const delBtn = document.createElement("button");
- delBtn.className = "btn btn-danger";
- delBtn.style.cssText = "padding:4px 10px;font-size:12px";
- delBtn.textContent = "Delete";
- delBtn.addEventListener("click", () => deleteCred(k));
- tdAct.appendChild(delBtn);
- }
- }
+ const editBtn = document.createElement("button");
+ editBtn.className = "btn btn-ghost";
+ editBtn.style.cssText = "padding:4px 10px;font-size:12px;margin-right:6px";
+ editBtn.textContent = "Edit";
+ editBtn.addEventListener("click", () => openCredModal(k, desc));
+ const delBtn = document.createElement("button");
+ delBtn.className = "btn btn-danger";
+ delBtn.style.cssText = "padding:4px 10px;font-size:12px";
+ delBtn.textContent = "Delete";
+ delBtn.addEventListener("click", () => deleteCred(k));
+ tdAct.appendChild(editBtn);
+ tdAct.appendChild(delBtn);
tr.appendChild(tdKey);
- tr.appendChild(tdUsed);
+ tr.appendChild(tdDesc);
tr.appendChild(tdStatus);
tr.appendChild(tdAct);
list.appendChild(tr);
@@ -2862,9 +3060,7 @@ function _isSensitiveCredKey(key) {
async function openCredModal(key = null, desc = "") {
const modal = document.getElementById("cred-modal");
const title = document.getElementById("cred-modal-title");
- const keySelect = document.getElementById("cred-modal-key-select");
- const custGroup = document.getElementById("cred-modal-custom-group");
- const custInput = document.getElementById("cred-modal-key-custom");
+ const keyInput = document.getElementById("cred-modal-key-custom");
const valInput = document.getElementById("cred-modal-value");
const descInput = document.getElementById("cred-modal-desc");
@@ -2874,17 +3070,9 @@ async function openCredModal(key = null, desc = "") {
if (key) {
title.textContent = "Edit Credential";
- keySelect.disabled = true;
- custInput.disabled = true;
- if (_KNOWN_CRED_KEYS.has(key)) {
- keySelect.value = key;
- custGroup.style.display = "none";
- } else {
- keySelect.value = "__custom__";
- custInput.value = key;
- custGroup.style.display = "";
- }
- // Pre-fill existing value — always fetch so the eye button works
+ keyInput.value = key;
+ keyInput.readOnly = true;
+ // Pre-fill existing value
try {
const r = await fetch(`/api/credentials/${encodeURIComponent(key)}`);
if (r.ok) {
@@ -2894,12 +3082,9 @@ async function openCredModal(key = null, desc = "") {
} catch { /* leave blank */ }
} else {
title.textContent = "Add Credential";
- keySelect.disabled = false;
- keySelect.value = "";
- custInput.disabled = false;
- custInput.value = "";
- custGroup.style.display = "none";
- descInput.value = "";
+ keyInput.readOnly = false;
+ keyInput.value = "";
+ descInput.value = "";
}
modal.classList.remove("hidden");
}
@@ -2908,22 +3093,13 @@ function closeCredModal() {
document.getElementById("cred-modal").classList.add("hidden");
}
-function onCredKeySelectChange() {
- const sel = document.getElementById("cred-modal-key-select");
- const grp = document.getElementById("cred-modal-custom-group");
- if (grp) grp.style.display = sel.value === "__custom__" ? "" : "none";
-}
-
async function saveCredModal() {
- const keySelect = document.getElementById("cred-modal-key-select");
- const custInput = document.getElementById("cred-modal-key-custom");
+ const keyInput = document.getElementById("cred-modal-key-custom");
const valInput = document.getElementById("cred-modal-value");
const descInput = document.getElementById("cred-modal-desc");
- const key = keySelect.value === "__custom__"
- ? custInput.value.trim()
- : keySelect.value.trim();
- if (!key) { alert("Please select or enter a credential name."); return; }
+ const key = keyInput.value.trim();
+ if (!key) { alert("Please enter a credential key name."); return; }
const value = valInput.value;
if (!value) { alert("Please enter a value."); return; }
@@ -4989,6 +5165,560 @@ async function clearUserMfa(userId, username) {
loadUsers();
}
+/* ── Monitors page ───────────────────────────────────────────────────────── */
+
+let _pages = [];
+let _feeds = [];
+let _editingPageId = null;
+let _editingFeedId = null;
+
+function switchMonitorTab(name) {
+ ["pages", "rss"].forEach(t => {
+ const pane = document.getElementById(`mpane-${t}`);
+ const btn = document.getElementById(`mtab-${t}`);
+ if (pane) pane.style.display = t === name ? "" : "none";
+ if (btn) btn.classList.toggle("active", t === name);
+ });
+}
+
+function initMonitors() {
+ loadPages();
+ loadFeeds();
+}
+
+/* ── Page Watchers ── */
+
+async function loadPages() {
+ const tbody = document.getElementById("pages-tbody");
+ if (!tbody) return;
+ const r = await fetch("/api/watched-pages");
+ if (!r.ok) { tbody.innerHTML = 'Error loading pages '; return; }
+ _pages = await r.json();
+ if (!_pages.length) {
+ tbody.innerHTML = 'No page watchers configured. ';
+ return;
+ }
+ tbody.innerHTML = _pages.map(p => {
+ const statusColor = p.last_error ? 'var(--red)' : (p.last_content_hash ? 'var(--green)' : 'var(--text-dim)');
+ const statusText = p.last_error ? 'error' : (p.last_content_hash ? 'ok' : 'pending');
+ return `
+ ${esc(p.name)}
+ ${esc(p.url)}
+ ${esc(p.schedule)}
+ ${statusText}${!p.enabled ? ' (disabled)' : ''}
+ ${p.last_checked_at ? formatDateShort(p.last_checked_at) : '—'}
+ ${p.last_changed_at ? formatDateShort(p.last_changed_at) : '—'}
+
+ Edit
+ Check now
+ Delete
+
+ `;
+ }).join("");
+}
+
+async function openPageModal(id) {
+ _editingPageId = id || null;
+ const p = id ? _pages.find(x => x.id === id) : null;
+ document.getElementById("page-modal-title").textContent = id ? "Edit Page Watcher" : "Add Page Watcher";
+ document.getElementById("page-modal-id").value = id || "";
+ document.getElementById("page-modal-name").value = p?.name || "";
+ document.getElementById("page-modal-url").value = p?.url || "";
+ document.getElementById("page-modal-selector").value = p?.css_selector || "";
+ document.getElementById("page-modal-schedule").value = p?.schedule || "0 * * * *";
+ document.getElementById("page-modal-mode").value = p?.notification_mode || "agent";
+ await _populateMonitorAgentSelect("page-modal-agent", p?.agent_id);
+ document.getElementById("page-modal").style.display = "";
+}
+
+function closePageModal() { document.getElementById("page-modal").style.display = "none"; }
+
+async function savePage() {
+ const name = document.getElementById("page-modal-name").value.trim();
+ const url = document.getElementById("page-modal-url").value.trim();
+ if (!name || !url) { showFlash("Name and URL are required."); return; }
+ const payload = {
+ name, url,
+ css_selector: document.getElementById("page-modal-selector").value.trim() || null,
+ schedule: document.getElementById("page-modal-schedule").value.trim() || "0 * * * *",
+ notification_mode: document.getElementById("page-modal-mode").value,
+ agent_id: document.getElementById("page-modal-agent").value || null,
+ };
+ const id = _editingPageId;
+ const r = await fetch(id ? `/api/watched-pages/${id}` : "/api/watched-pages", {
+ method: id ? "PUT" : "POST",
+ headers: { "Content-Type": "application/json" },
+ body: JSON.stringify(payload),
+ });
+ if (!r.ok) { const e = await r.json().catch(() => ({})); showFlash("Error: " + (e.detail || r.status)); return; }
+ closePageModal();
+ showFlash(id ? "Page watcher updated." : "Page watcher added.");
+ loadPages();
+}
+
+async function deletePage(id) {
+ if (!confirm("Delete this page watcher?")) return;
+ const r = await fetch(`/api/watched-pages/${id}`, { method: "DELETE" });
+ if (r.ok) { showFlash("Deleted."); loadPages(); }
+}
+
+async function checkPageNow(id) {
+ const r = await fetch(`/api/watched-pages/${id}/check-now`, { method: "POST" });
+ const d = await r.json();
+ if (d.error) { showFlash("Error: " + d.error); }
+ else if (d.first_check) { showFlash("First check done — baseline recorded."); }
+ else { showFlash(d.changed ? "Change detected! Agent triggered." : "No change detected."); }
+ loadPages();
+}
+
+/* ── RSS Feeds ── */
+
+async function loadFeeds() {
+ const tbody = document.getElementById("feeds-tbody");
+ if (!tbody) return;
+ const r = await fetch("/api/rss-feeds");
+ if (!r.ok) { tbody.innerHTML = 'Error loading feeds '; return; }
+ _feeds = await r.json();
+ if (!_feeds.length) {
+ tbody.innerHTML = 'No RSS feeds configured. ';
+ return;
+ }
+ tbody.innerHTML = _feeds.map(f => {
+ const statusColor = f.last_error ? 'var(--red)' : (f.last_fetched_at ? 'var(--green)' : 'var(--text-dim)');
+ const statusText = f.last_error ? 'error' : (f.last_fetched_at ? 'ok' : 'pending');
+ return `
+ ${esc(f.name)}
+ ${esc(f.url)}
+ ${esc(f.schedule)}
+ ${statusText}${!f.enabled ? ' (disabled)' : ''}
+ ${f.last_fetched_at ? formatDateShort(f.last_fetched_at) : '—'}
+
+ Edit
+ Fetch now
+ Delete
+
+ `;
+ }).join("");
+}
+
+async function openFeedModal(id) {
+ _editingFeedId = id || null;
+ const f = id ? _feeds.find(x => x.id === id) : null;
+ document.getElementById("feed-modal-title").textContent = id ? "Edit RSS Feed" : "Add RSS Feed";
+ document.getElementById("feed-modal-id").value = id || "";
+ document.getElementById("feed-modal-name").value = f?.name || "";
+ document.getElementById("feed-modal-url").value = f?.url || "";
+ document.getElementById("feed-modal-schedule").value = f?.schedule || "0 */4 * * *";
+ document.getElementById("feed-modal-max-items").value = f?.max_items_per_run || 5;
+ document.getElementById("feed-modal-mode").value = f?.notification_mode || "agent";
+ await _populateMonitorAgentSelect("feed-modal-agent", f?.agent_id);
+ document.getElementById("feed-modal").style.display = "";
+}
+
+function closeFeedModal() { document.getElementById("feed-modal").style.display = "none"; }
+
+async function saveFeed() {
+ const name = document.getElementById("feed-modal-name").value.trim();
+ const url = document.getElementById("feed-modal-url").value.trim();
+ if (!name || !url) { showFlash("Name and URL are required."); return; }
+ const payload = {
+ name, url,
+ schedule: document.getElementById("feed-modal-schedule").value.trim() || "0 */4 * * *",
+ max_items_per_run: parseInt(document.getElementById("feed-modal-max-items").value) || 5,
+ notification_mode: document.getElementById("feed-modal-mode").value,
+ agent_id: document.getElementById("feed-modal-agent").value || null,
+ };
+ const id = _editingFeedId;
+ const r = await fetch(id ? `/api/rss-feeds/${id}` : "/api/rss-feeds", {
+ method: id ? "PUT" : "POST",
+ headers: { "Content-Type": "application/json" },
+ body: JSON.stringify(payload),
+ });
+ if (!r.ok) { const e = await r.json().catch(() => ({})); showFlash("Error: " + (e.detail || r.status)); return; }
+ closeFeedModal();
+ showFlash(id ? "Feed updated." : "Feed added.");
+ loadFeeds();
+}
+
+async function deleteFeed(id) {
+ if (!confirm("Delete this RSS feed?")) return;
+ const r = await fetch(`/api/rss-feeds/${id}`, { method: "DELETE" });
+ if (r.ok) { showFlash("Deleted."); loadFeeds(); }
+}
+
+async function fetchFeedNow(id) {
+ const r = await fetch(`/api/rss-feeds/${id}/fetch-now`, { method: "POST" });
+ const d = await r.json();
+ if (d.error) { showFlash("Error: " + d.error); }
+ else { showFlash(`Fetched. ${d.new_items || 0} new item(s) found.`); }
+ loadFeeds();
+}
+
+async function _populateMonitorAgentSelect(selectId, selected) {
+ const sel = document.getElementById(selectId);
+ if (!sel) return;
+ sel.innerHTML = '— no agent — ';
+ try {
+ const r = await fetch("/api/agents");
+ const agents = r.ok ? await r.json() : [];
+ agents.forEach(a => {
+ const opt = document.createElement("option");
+ opt.value = a.id;
+ opt.textContent = a.name;
+ if (a.id === selected) opt.selected = true;
+ sel.appendChild(opt);
+ });
+ } catch (_) { /* ignore */ }
+}
+
+
+/* ── Webhooks ────────────────────────────────────────────────────────────── */
+
+let _webhooks = [];
+let _webhookTargets = [];
+let _editingWebhookId = null;
+let _editingWebhookTargetId = null;
+
+async function loadWebhooks() {
+ const tbody = document.getElementById("webhooks-tbody");
+ if (!tbody) return;
+ const r = await fetch("/api/webhooks");
+ if (!r.ok) { tbody.innerHTML = 'Error loading webhooks '; return; }
+ _webhooks = await r.json();
+ if (!_webhooks.length) {
+ tbody.innerHTML = 'No webhook endpoints configured. ';
+ return;
+ }
+ tbody.innerHTML = _webhooks.map(w => `
+
+ ${esc(w.name)}
+ ${esc(w.agent_id || '—')}
+ ${w.enabled ? 'enabled' : 'disabled'}
+ ${w.trigger_count || 0}
+ ${w.last_triggered_at ? formatDate(w.last_triggered_at) : '—'}
+
+ Edit
+ Rotate token
+ Delete
+
+ `).join("");
+}
+
+async function openWebhookModal(id) {
+ _editingWebhookId = id || null;
+ const w = id ? _webhooks.find(x => x.id === id) : null;
+ document.getElementById("webhook-modal-title").textContent = id ? "Edit Webhook Endpoint" : "Add Webhook Endpoint";
+ document.getElementById("webhook-modal-id").value = id || "";
+ document.getElementById("webhook-modal-name").value = w?.name || "";
+ document.getElementById("webhook-modal-desc").value = w?.description || "";
+ document.getElementById("webhook-modal-allow-get").checked = w ? !!w.allow_get : true;
+ await _populateWebhookAgentSelect(w?.agent_id);
+ document.getElementById("webhook-modal").style.display = "";
+}
+
+async function _populateWebhookAgentSelect(selected) {
+ const sel = document.getElementById("webhook-modal-agent");
+ if (!sel) return;
+ sel.innerHTML = '— no agent — ';
+ try {
+ const r = await fetch("/api/agents");
+ const agents = r.ok ? await r.json() : [];
+ agents.forEach(a => {
+ const opt = document.createElement("option");
+ opt.value = a.id;
+ opt.textContent = a.name;
+ if (a.id === selected) opt.selected = true;
+ sel.appendChild(opt);
+ });
+ } catch (_) { /* ignore */ }
+}
+
+function closeWebhookModal() {
+ document.getElementById("webhook-modal").style.display = "none";
+}
+
+async function saveWebhook() {
+ const name = document.getElementById("webhook-modal-name").value.trim();
+ if (!name) { showFlash("Name is required."); return; }
+ const agent_id = document.getElementById("webhook-modal-agent").value;
+ const description = document.getElementById("webhook-modal-desc").value.trim();
+ const allow_get = document.getElementById("webhook-modal-allow-get").checked;
+ const payload = { name, agent_id, description, allow_get };
+ const id = _editingWebhookId;
+ const url = id ? `/api/webhooks/${id}` : "/api/webhooks";
+ const method = id ? "PUT" : "POST";
+ const r = await fetch(url, { method, headers: { "Content-Type": "application/json" }, body: JSON.stringify(payload) });
+ if (!r.ok) { const e = await r.json().catch(() => ({})); showFlash("Error: " + (e.detail || r.status)); return; }
+ const data = await r.json();
+ closeWebhookModal();
+ showFlash(id ? "Webhook updated." : "Webhook created.");
+ if (!id && data.token) {
+ _showWebhookTokenModal(data.token);
+ }
+ loadWebhooks();
+}
+
+async function deleteWebhook(id) {
+ if (!confirm("Delete this webhook endpoint?")) return;
+ const r = await fetch(`/api/webhooks/${id}`, { method: "DELETE" });
+ if (r.ok) { showFlash("Deleted."); loadWebhooks(); }
+ else { const e = await r.json().catch(() => ({})); showFlash("Error: " + (e.detail || r.status)); }
+}
+
+async function rotateWebhookToken(id) {
+ if (!confirm("Rotate the token for this webhook? The old token will stop working immediately.")) return;
+ const r = await fetch(`/api/webhooks/${id}/rotate`, { method: "POST" });
+ if (!r.ok) { const e = await r.json().catch(() => ({})); showFlash("Error: " + (e.detail || r.status)); return; }
+ const data = await r.json();
+ showFlash("Token rotated.");
+ _showWebhookTokenModal(data.token);
+}
+
+function _showWebhookTokenModal(token) {
+ document.getElementById("webhook-token-value").value = token;
+ const base = location.origin;
+ document.getElementById("webhook-token-url-post").textContent = `POST ${base}/webhook/${token}`;
+ document.getElementById("webhook-token-url-get").textContent = `GET ${base}/webhook/${token}?q=your+message`;
+ document.getElementById("webhook-token-modal").style.display = "";
+}
+
+function closeWebhookTokenModal() {
+ document.getElementById("webhook-token-modal").style.display = "none";
+}
+
+function copyWebhookToken() {
+ const val = document.getElementById("webhook-token-value").value;
+ navigator.clipboard.writeText(val).then(() => showFlash("Copied!"));
+}
+
+/* Webhook targets (outbound) */
+
+async function loadWebhookTargets() {
+ const tbody = document.getElementById("webhook-targets-tbody");
+ if (!tbody) return;
+ const r = await fetch("/api/webhook-targets");
+ if (!r.ok) { tbody.innerHTML = 'Error loading targets '; return; }
+ _webhookTargets = await r.json();
+ if (!_webhookTargets.length) {
+ tbody.innerHTML = 'No outbound targets configured. ';
+ return;
+ }
+ tbody.innerHTML = _webhookTargets.map(t => `
+
+ ${esc(t.name)}
+ ${esc(t.url)}
+ ${t.enabled ? 'enabled' : 'disabled'}
+
+ Edit
+ Delete
+
+ `).join("");
+}
+
+function openWebhookTargetModal(id) {
+ _editingWebhookTargetId = id || null;
+ const t = id ? _webhookTargets.find(x => x.id === id) : null;
+ document.getElementById("webhook-target-modal-title").textContent = id ? "Edit Webhook Target" : "Add Webhook Target";
+ document.getElementById("webhook-target-modal-id").value = id || "";
+ document.getElementById("webhook-target-modal-name").value = t?.name || "";
+ document.getElementById("webhook-target-modal-url").value = t?.url || "";
+ document.getElementById("webhook-target-modal-secret").value = "";
+ document.getElementById("webhook-target-modal").style.display = "";
+}
+
+function closeWebhookTargetModal() {
+ document.getElementById("webhook-target-modal").style.display = "none";
+}
+
+async function saveWebhookTarget() {
+ const name = document.getElementById("webhook-target-modal-name").value.trim();
+ const url = document.getElementById("webhook-target-modal-url").value.trim();
+ if (!name || !url) { showFlash("Name and URL are required."); return; }
+ const secret_header = document.getElementById("webhook-target-modal-secret").value;
+ const payload = { name, url, secret_header };
+ const id = _editingWebhookTargetId;
+ const apiUrl = id ? `/api/webhook-targets/${id}` : "/api/webhook-targets";
+ const method = id ? "PUT" : "POST";
+ const r = await fetch(apiUrl, { method, headers: { "Content-Type": "application/json" }, body: JSON.stringify(payload) });
+ if (!r.ok) { const e = await r.json().catch(() => ({})); showFlash("Error: " + (e.detail || r.status)); return; }
+ closeWebhookTargetModal();
+ showFlash(id ? "Target updated." : "Target created.");
+ loadWebhookTargets();
+}
+
+async function deleteWebhookTarget(id) {
+ if (!confirm("Delete this webhook target?")) return;
+ const r = await fetch(`/api/webhook-targets/${id}`, { method: "DELETE" });
+ if (r.ok) { showFlash("Deleted."); loadWebhookTargets(); }
+ else { const e = await r.json().catch(() => ({})); showFlash("Error: " + (e.detail || r.status)); }
+}
+
+
+/* ── User webhooks (non-admin) ───────────────────────────────────────────── */
+
+let _myWebhooks = [];
+let _editingMyWebhookId = null;
+
+async function loadMyWebhooks() {
+ const r = await fetch("/api/my/webhooks");
+ if (!r.ok) return;
+ _myWebhooks = await r.json();
+ const tbody = document.getElementById("my-webhooks-tbody");
+ if (!tbody) return;
+ if (!_myWebhooks.length) {
+ tbody.innerHTML = 'No webhooks yet. ';
+ return;
+ }
+ tbody.innerHTML = _myWebhooks.map(w => `
+
+ ${esc(w.name)} ${w.description ? `${esc(w.description)} ` : ""}
+ ${esc(w.agent_id || "—")}
+ ${w.enabled ? 'Active ' : 'Disabled '}
+ ${w.trigger_count || 0}
+ ${w.last_triggered_at ? formatDateShort(w.last_triggered_at) : "Never"}
+
+ Rotate token
+ Edit
+ Delete
+
+ `).join("");
+}
+
+async function openMyWebhookModal(id) {
+ _editingMyWebhookId = id || null;
+ const w = id ? _myWebhooks.find(x => x.id === id) : null;
+ document.getElementById("my-webhook-modal-title").textContent = id ? "Edit Webhook" : "Add Webhook";
+ document.getElementById("my-webhook-modal-id").value = id || "";
+ document.getElementById("my-webhook-modal-name").value = w?.name || "";
+ document.getElementById("my-webhook-modal-desc").value = w?.description || "";
+ document.getElementById("my-webhook-modal-allow-get").checked = w ? !!w.allow_get : true;
+ await _populateMyWebhookAgentSelect(w?.agent_id);
+ document.getElementById("my-webhook-modal").style.display = "";
+}
+
+async function _populateMyWebhookAgentSelect(selected) {
+ const sel = document.getElementById("my-webhook-modal-agent");
+ if (!sel) return;
+ sel.innerHTML = '— no agent — ';
+ try {
+ const r = await fetch("/api/agents");
+ const agents = r.ok ? await r.json() : [];
+ agents.forEach(a => {
+ const opt = document.createElement("option");
+ opt.value = a.id;
+ opt.textContent = a.name;
+ if (a.id === selected) opt.selected = true;
+ sel.appendChild(opt);
+ });
+ } catch (_) {}
+}
+
+function closeMyWebhookModal() {
+ document.getElementById("my-webhook-modal").style.display = "none";
+}
+
+async function saveMyWebhook() {
+ const name = document.getElementById("my-webhook-modal-name").value.trim();
+ if (!name) { showFlash("Name is required."); return; }
+ const agent_id = document.getElementById("my-webhook-modal-agent").value;
+ const description = document.getElementById("my-webhook-modal-desc").value.trim();
+ const allow_get = document.getElementById("my-webhook-modal-allow-get").checked;
+ const id = _editingMyWebhookId;
+ const url = id ? `/api/my/webhooks/${id}` : "/api/my/webhooks";
+ const method = id ? "PUT" : "POST";
+ const r = await fetch(url, { method, headers: { "Content-Type": "application/json" }, body: JSON.stringify({ name, agent_id, description, allow_get }) });
+ if (!r.ok) { const e = await r.json().catch(() => ({})); showFlash("Error: " + (e.detail || r.status)); return; }
+ const data = await r.json();
+ closeMyWebhookModal();
+ showFlash(id ? "Webhook updated." : "Webhook created.");
+ if (!id && data.token) _showWebhookTokenModal(data.token);
+ loadMyWebhooks();
+}
+
+async function deleteMyWebhook(id) {
+ if (!confirm("Delete this webhook endpoint?")) return;
+ const r = await fetch(`/api/my/webhooks/${id}`, { method: "DELETE" });
+ if (r.ok) { showFlash("Deleted."); loadMyWebhooks(); }
+ else showFlash("Error deleting webhook.");
+}
+
+async function rotateMyWebhookToken(id) {
+ if (!confirm("Rotate token? The old token will stop working immediately.")) return;
+ const r = await fetch(`/api/my/webhooks/${id}/rotate`, { method: "POST" });
+ if (!r.ok) { showFlash("Error rotating token."); return; }
+ const d = await r.json();
+ _showWebhookTokenModal(d.token);
+ loadMyWebhooks();
+}
+
+/* ── User outbound webhook targets ──────────────────────────────────────── */
+
+let _myWebhookTargets = [];
+let _editingMyWebhookTargetId = null;
+
+async function loadMyWebhookTargets() {
+ const r = await fetch("/api/my/webhook-targets");
+ if (!r.ok) return;
+ _myWebhookTargets = await r.json();
+ const tbody = document.getElementById("my-webhook-targets-tbody");
+ if (!tbody) return;
+ if (!_myWebhookTargets.length) {
+ tbody.innerHTML = 'No targets yet. ';
+ return;
+ }
+ tbody.innerHTML = _myWebhookTargets.map(t => `
+
+ ${esc(t.name)}
+ ${esc(t.url)}
+ ${t.enabled ? 'Active ' : 'Disabled '}
+
+ Edit
+ Delete
+
+ `).join("");
+}
+
+function openMyWebhookTargetModal(id) {
+ _editingMyWebhookTargetId = id || null;
+ const t = id ? _myWebhookTargets.find(x => x.id === id) : null;
+ document.getElementById("my-webhook-target-modal-title").textContent = id ? "Edit Target" : "Add Outbound Target";
+ document.getElementById("my-webhook-target-modal-id").value = id || "";
+ document.getElementById("my-webhook-target-modal-name").value = t?.name || "";
+ document.getElementById("my-webhook-target-modal-url").value = t?.url || "";
+ document.getElementById("my-webhook-target-modal-secret").value = "";
+ document.getElementById("my-webhook-target-modal-secret").placeholder = t?.secret_header ? "••••••••" : "Leave blank to omit";
+ document.getElementById("my-webhook-target-modal").style.display = "";
+}
+
+function closeMyWebhookTargetModal() {
+ document.getElementById("my-webhook-target-modal").style.display = "none";
+}
+
+async function saveMyWebhookTarget() {
+ const name = document.getElementById("my-webhook-target-modal-name").value.trim();
+ const url = document.getElementById("my-webhook-target-modal-url").value.trim();
+ if (!name) { showFlash("Name is required."); return; }
+ if (!url) { showFlash("URL is required."); return; }
+ const secret = document.getElementById("my-webhook-target-modal-secret").value;
+ const id = _editingMyWebhookTargetId;
+ const endpoint = id ? `/api/my/webhook-targets/${id}` : "/api/my/webhook-targets";
+ const method = id ? "PUT" : "POST";
+ const payload = { name, url, secret_header: secret };
+ const r = await fetch(endpoint, { method, headers: { "Content-Type": "application/json" }, body: JSON.stringify(payload) });
+ if (!r.ok) { const e = await r.json().catch(() => ({})); showFlash("Error: " + (e.detail || r.status)); return; }
+ closeMyWebhookTargetModal();
+ showFlash(id ? "Target updated." : "Target created.");
+ loadMyWebhookTargets();
+}
+
+async function deleteMyWebhookTarget(id) {
+ if (!confirm("Delete this outbound target?")) return;
+ const r = await fetch(`/api/my/webhook-targets/${id}`, { method: "DELETE" });
+ if (r.ok) { showFlash("Deleted."); loadMyWebhookTargets(); }
+ else showFlash("Error deleting target.");
+}
+
+
/* ── Bootstrap ───────────────────────────────────────────────────────────── */
document.addEventListener("DOMContentLoaded", () => {
diff --git a/server/web/templates/base.html b/server/web/templates/base.html
index 9037888..4a0c331 100644
--- a/server/web/templates/base.html
+++ b/server/web/templates/base.html
@@ -16,7 +16,7 @@
@@ -40,6 +40,10 @@
Files
+
+
+ Monitors
+
Audit Log
diff --git a/server/web/templates/chats.html b/server/web/templates/chats.html
index 09dd5d8..0b17f7d 100644
--- a/server/web/templates/chats.html
+++ b/server/web/templates/chats.html
@@ -128,6 +128,7 @@ function renderChats(data) {
data-id="${c.id}" onclick="localStorage.setItem('current_session_id',this.dataset.id)">Open
Rename
+ Export
Delete
diff --git a/server/web/templates/help.html b/server/web/templates/help.html
index f28c115..0d35d17 100644
--- a/server/web/templates/help.html
+++ b/server/web/templates/help.html
@@ -42,14 +42,18 @@
General
Whitelists
Credentials
+ DAV
+ Pushover
{% endif %}
Inbox
Email Accounts
Telegram
- Profile
{% if not (current_user and current_user.is_admin) %}
- CalDAV
+ CalDAV / CardDAV
+ Pushover
{% endif %}
+ Webhooks
+ Profile
Personality
2nd Brain
MCP Servers
@@ -372,8 +376,28 @@ mcp = FastMCP(
Credentials (Admin)
- All secrets (API keys, passwords, app settings) are stored in an AES-256-GCM encrypted PostgreSQL table. Keys use a namespace:key convention. See the Credential Key Reference for a full list.
+ A generic AES-256-GCM encrypted key-value store for API keys and other secrets. Keys use a namespace:key convention. Service-specific credentials (CalDAV, CardDAV, Pushover) are managed in their own dedicated tabs — they do not appear here. See the Credential Key Reference for a full list of system keys.
+
+ DAV (CalDAV & CardDAV) (Admin)
+
+ Configure CalDAV and CardDAV for the admin user. There is no system-wide fallback — every user configures their own credentials independently via this tab (admin) or the CalDAV / CardDAV tab (regular users).
+
+
+ CalDAV : server URL, username, password, and calendar name. Bare hostnames (e.g. mail.example.com) are accepted — https:// is prepended automatically.
+ CardDAV : tick Same server as CalDAV to reuse the same credentials, or enter a separate URL, username, and password. The SOGo URL pattern (/SOGo/dav/{user}/Contacts/personal/) is built automatically.
+ Allow contact writes : when enabled, agents can create, update, and delete contacts (not just read them). This is per-user — enabling it for your account does not affect other users.
+ Test buttons : verify CalDAV and CardDAV connectivity without saving.
+
+
+ Pushover (Admin)
+
+ Pushover sends push notifications to iOS and Android devices.
+
+
+ App Token : registered once at pushover.net for this oAI-Web installation. Shared by all users — they cannot see or change it.
+ User Key : the admin's personal Pushover user key, shown on your pushover.net dashboard. Each user sets their own User Key in Settings → Pushover .
+
{% endif %}
Inbox
@@ -413,6 +437,36 @@ mcp = FastMCP(
Trigger Rules : same keyword-matching logic as email inbox
+ {% if not (current_user and current_user.is_admin) %}
+ CalDAV / CardDAV
+
+ Configure your personal CalDAV and CardDAV connection. There is no system-wide fallback — if you don't configure it, the tools are unavailable to you.
+
+
+ CalDAV : server URL, username, password, and calendar name. Bare hostnames are accepted — https:// is added automatically.
+ CardDAV : tick Same server as CalDAV to reuse credentials, or enter separate details.
+ Allow contact writes : when enabled, agents can create, update, and delete contacts.
+ Test buttons : verify connectivity before saving.
+
+
+ Pushover
+
+ Set your personal User Key to receive push notifications on your Pushover-connected devices. Your User Key is shown on your pushover.net dashboard. The App Token (the shared application credential) is managed by the admin — you only need your own User Key.
+
+ {% endif %}
+
+ Webhooks
+
+ Inbound webhooks let external services trigger agents via HTTP — useful for iOS Shortcuts, GitHub actions, Home Assistant automations, or any tool that can send an HTTP request.
+
+
+ Create a webhook : assign a name, description, and target agent. The secret token is shown once at creation — copy it immediately. Use Rotate Token to generate a new one if it is ever compromised.
+ Trigger via POST : POST /webhook/{token} with body {"message": "..."}
+ Trigger via GET : GET /webhook/{token}?q=your+message — useful for iOS Shortcuts URL actions
+ Enable/disable : toggle a webhook on/off without deleting it
+
+ The Outbound Targets section (same tab) manages named URLs that agents can send JSON payloads to via the webhook tool.
+
Profile
Available to all users. Contains:
@@ -422,16 +476,8 @@ mcp = FastMCP(
Two-Factor Authentication (TOTP) : enable/disable TOTP-based MFA. On setup, a QR code is shown to scan with any authenticator app (e.g. Aegis, Google Authenticator). Once enabled, every login requires a 6-digit code.
Data Folder : shows the path of your auto-provisioned personal folder (set by admin via system:users_base_folder). This folder is where the Files page browses and where agent memory files are stored.
Telegram Bot Token : per-user Telegram bot token (optional). Overrides the global token for your sessions.
- CalDAV : per-user CalDAV server, credentials, and calendar name. Overrides the global CalDAV config.
- {% if not (current_user and current_user.is_admin) %}
- CalDAV
-
- Configure your personal CalDAV connection under Settings → Profile → CalDAV . This overrides the global CalDAV config set by the admin. Fields: server URL, username, password, calendar name. Leave blank to inherit the global config.
-
- {% endif %}
-
Personality
Edit SOUL.md (agent identity, values, communication style) and USER.md (owner context: name, location, preferences) directly in the browser. Changes take effect immediately — no restart required.
@@ -553,9 +599,7 @@ mcp = FastMCP(
system:security_max_subject_charsMax chars of email subject (default: 200)
telegram:bot_tokenGlobal Telegram bot API token
telegram:default_agent_idUUID of agent for unmatched Telegram messages
- pushover_user_keyPushover user key
- pushover_app_tokenPushover application token
- caldav_calendar_nameOptional CalDAV calendar name override (global)
+ pushover_app_tokenPushover App Token — managed via Settings → Pushover , not this tab
brain:mcp_key2nd Brain MCP authentication key
system:api_key_hashSHA-256 hash of the external API key (raw key never stored)
system:api_key_created_atTimestamp of last API key generation
@@ -632,6 +676,10 @@ mcp = FastMCP(
DELETE /api/settings/branding/logoReset logo to default
GET /api/settings/audit-retentionCurrent audit retention setting
POST /api/settings/audit-retentionUpdate {days}
+ GET /api/settings/caldavAdmin CalDAV & CardDAV config (same as /api/my/caldav/config)
+ POST /api/settings/caldavSave admin CalDAV & CardDAV config
+ GET /api/settings/pushoverCurrent Pushover App Token and admin User Key
+ POST /api/settings/pushoverSave App Token and admin User Key
@@ -768,14 +816,62 @@ mcp = FastMCP(
POST /api/my/mfa/setup/beginStart MFA setup — returns QR code PNG (base64) and provisioning URI
POST /api/my/mfa/setup/confirmConfirm setup with a valid TOTP code {code}
DELETE /api/my/mfa/disableDisable MFA for the current user
- GET /api/my/caldav/configGet per-user CalDAV config
- POST /api/my/caldav/configSave per-user CalDAV credentials
- DELETE /api/my/caldav/configRemove per-user CalDAV config (fall back to global)
+ GET /api/my/caldav/configGet per-user CalDAV & CardDAV config
+ POST /api/my/caldav/configSave per-user CalDAV & CardDAV credentials
+ DELETE /api/my/caldav/configRemove per-user CalDAV & CardDAV config
+ POST /api/my/caldav/testTest CalDAV connectivity with current saved config
+ POST /api/my/caldav/test-carddavTest CardDAV connectivity with current saved config
+ GET /api/my/pushoverGet current user's Pushover User Key (masked)
+ POST /api/my/pushoverSave personal User Key {user_key}
+ DELETE /api/my/pushoverRemove personal User Key
GET /api/my/telegram/whitelisted-chatsList Telegram chat IDs whitelisted for the current user
+ Webhooks
+
+
+ Method Path Description
+
+ GET /api/webhooksList inbound webhook endpoints (admin)
+ POST /api/webhooksCreate endpoint — returns token once (admin)
+ PUT /api/webhooks/{id}Update name/description/agent/enabled (admin)
+ DELETE /api/webhooks/{id}Delete endpoint (admin)
+ POST /api/webhooks/{id}/rotateRegenerate token — returns new token once (admin)
+ GET /api/my/webhooksList current user's webhook endpoints
+ POST /api/my/webhooksCreate personal webhook endpoint
+ PUT /api/my/webhooks/{id}Update personal webhook endpoint
+ DELETE /api/my/webhooks/{id}Delete personal webhook endpoint
+ GET /webhook/{token}Trigger via GET — param: ?q=message (no auth)
+ POST /webhook/{token}Trigger via POST — body: {"message": "...", "async": true} (no auth)
+ GET /api/webhook-targetsList outbound webhook targets (admin)
+ POST /api/webhook-targetsCreate outbound target (admin)
+ PUT /api/webhook-targets/{id}Update outbound target (admin)
+ DELETE /api/webhook-targets/{id}Delete outbound target (admin)
+
+
+
+
+ Monitors
+
+
+ Method Path Description
+
+ GET /api/watched-pagesList page-change monitors
+ POST /api/watched-pagesCreate page monitor
+ PUT /api/watched-pages/{id}Update page monitor
+ DELETE /api/watched-pages/{id}Delete page monitor
+ POST /api/watched-pages/{id}/check-nowForce an immediate check
+ GET /api/rss-feedsList RSS feed monitors
+ POST /api/rss-feedsCreate RSS feed monitor
+ PUT /api/rss-feeds/{id}Update RSS feed monitor
+ DELETE /api/rss-feeds/{id}Delete RSS feed monitor
+ POST /api/rss-feeds/{id}/fetch-nowForce an immediate fetch
+
+
+
+
User Management (Admin)
diff --git a/server/web/templates/monitors.html b/server/web/templates/monitors.html
new file mode 100644
index 0000000..a0e03c0
--- /dev/null
+++ b/server/web/templates/monitors.html
@@ -0,0 +1,138 @@
+{% extends "base.html" %}
+{% block title %}{{ agent_name }} — Monitors{% endblock %}
+
+{% block content %}
+
+
+
Monitors
+
+
+
+
+ Page Watchers
+
+
+
+
+
+
+ + Add Page Watcher
+
+
+
+
+ Name
+ URL
+ Schedule
+ Status
+ Last checked
+ Last changed
+ Actions
+
+ Loading…
+
+
+
+
+
+
+
+
+
+
+
+
Add Page Watcher
+
+
+ Name
+
+
+
+ URL
+
+
+
+ CSS selector (optional — extract specific element)
+
+
+
+
+ On change: notify via
+
+ Agent
+ Pushover
+ Both
+
+
+
+ Agent to trigger
+
+
+
+ Cancel
+ Save
+
+
+
+
+
+
+
+
Add RSS Feed
+
+
+ Name
+
+
+
+ Feed URL (RSS or Atom)
+
+
+
+ Fetch schedule (cron)
+
+
+
+ Max new items per run
+
+
+
+ On new items: notify via
+
+ Agent
+ Pushover
+ Both
+
+
+
+ Agent to trigger
+
+
+
+ Cancel
+ Save
+
+
+
+{% endblock %}
diff --git a/server/web/templates/settings.html b/server/web/templates/settings.html
index 0cf1abd..f52c487 100644
--- a/server/web/templates/settings.html
+++ b/server/web/templates/settings.html
@@ -17,6 +17,8 @@
General
Whitelists
Credentials
+ DAV
+ Pushover
Inbox
Email Accounts
Telegram
@@ -25,6 +27,7 @@
MCP Servers
Security
Branding
+ Webhooks
Profile
{% else %}
@@ -34,10 +37,12 @@
Personality
Inbox
Email Accounts
- CalDAV
+ CalDAV / CardDAV
Telegram
MCP Servers
2nd Brain
+ Pushover
+ Webhooks
Profile
{% endif %}
@@ -107,7 +112,7 @@
Override the defaults from .env. Changes take effect immediately — no restart needed.
Individual agents can further override Max tool calls on their own settings page.
-
@@ -404,17 +413,18 @@
- Encrypted Credentials
+ Encrypted Credential Store
- Credentials for CalDAV, Email, and Pushover. All values are stored AES-256-GCM encrypted.
- Inbox, Telegram, and other integration credentials are managed in their respective Settings tabs.
+ Generic key-value store for any credentials not managed in a dedicated settings tab.
+ All values are stored AES-256-GCM encrypted.
+ CalDAV, CardDAV, Pushover, Inbox, and Telegram credentials are managed in their own tabs.
- Credential
- Used By
+ Key
+ Description
Status
Actions
@@ -428,6 +438,102 @@
+
+
+
+
+
+
+
+
+
+
+ Save all
+
+
+
+
+
+
+
+ Pushover
+
+ Pushover delivers push notifications to iOS/Android devices.
+ The App Token is created at pushover.net
+ and is shared across all users of this installation.
+ Each user sets their own User Key in their personal Settings → Pushover tab.
+
+
+
+ Save
+
+
+
@@ -1081,6 +1187,127 @@
+
+
+
+
+
+ Inbound Webhook Triggers
+
+ Each endpoint has a secret token. POST /webhook/{token} with {"message":"..."}
+ — or GET /webhook/{token}?q=... for iOS Shortcuts — to trigger the associated agent.
+
+ + Add Endpoint
+
+
+
+ Name
+ Agent
+ Status
+ Triggers
+ Last triggered
+ Actions
+
+ Loading…
+
+
+
+
+
+
+ Outbound Webhook Targets
+
+ Named targets agents can POST to using the webhook tool.
+
+ + Add Target
+
+
+
+ Name
+ URL
+ Status
+ Actions
+
+ Loading…
+
+
+
+
+
+
+
+
+
+
Add Webhook Endpoint
+
+
+ Name
+
+
+
+ Description (optional)
+
+
+
+ Agent to trigger
+
+
+
+
+ Allow GET requests (for iOS Shortcuts)
+
+
+ Cancel
+ Save
+
+
+
+
+
+
+
+
Webhook Token
+
Copy this token now — it will not be shown again.
+
+
+ Copy
+
+
+ POST:
+ GET:
+
+
+ Done
+
+
+
+
+
+
+
+
Add Webhook Target
+
+
+ Name (used by agents)
+
+
+
+ URL
+
+
+
+ Secret header value (optional — sent as Authorization: Bearer)
+
+
+
+ Cancel
+ Save
+
+
+
+
{% else %}
-
+
-
- My CalDAV
+
+
+
+ My CalDAV (Calendar)
- Your personal CalDAV server for calendar access. Leave blank to use the system CalDAV server
- configured by the admin.
+ Used by the caldav tool for reading and writing calendar events.
+ Used by the caldav tool for reading and writing calendar events.
-
-
CalDAV Server URL
-
+
CalDAV Server URL
+
+
Username
+
+
+
Calendar name (optional — uses first found if blank)
+
+
+ Test CalDAV
+
+
+
+
+
+
-
-
Save
-
Test connection
-
Clear / use system CalDAV
+
+
+
+ Allow contact writes (create, update, delete — requires confirmation)
+
-
-
- If left blank, the system CalDAV server will be used (configured by the admin in Credentials).
-
+
+ Test CardDAV
+
+
+
+
+
+ Save all
+ Clear all
+
+
+ Each user must configure their own CalDAV / CardDAV credentials. There is no shared fallback.
+
+
@@ -1335,6 +1603,83 @@
+
+
+
+ My Pushover Notifications
+
+ Set your personal Pushover User Key to receive push notifications on your devices.
+ The App Token is shared and managed by the admin.
+ Find your User Key at pushover.net → your user page.
+
+
+
+
+ Save
+ Remove
+
+
+
+
+
+
+
+
+
+ Inbound Endpoints
+
+ Webhook endpoints that trigger your agents. Each has a secret token —
+ POST /webhook/{token} with {"message":"..."}
+ or GET /webhook/{token}?q=... for iOS Shortcuts.
+
+ + Add Endpoint
+
+
+
+ Name
+ Agent
+ Status
+ Triggers
+ Last triggered
+ Actions
+
+ Loading…
+
+
+
+
+
+
+ Outbound Targets
+
+ Named targets your agents can POST to using the webhook tool.
+ Targets you define here are only visible to your own agents.
+
+ + Add Target
+
+
+
+ Name
+ URL
+ Status
+ Actions
+
+ Loading…
+
+
+
+
+
+
@@ -1657,31 +2002,9 @@
Add Credential
- Credential
-
- - choose a credential -
-
- Mailcow Host
- Mailcow Username
- Mailcow Password
- Calendar Name
-
-
- IMAP Host
- SMTP Host
- SMTP Port
-
-
- User Key
- App Token
-
- Custom…
-
-
-
- Custom credential name
+ Key name
+ placeholder="e.g. my_api_key or service:token" autocomplete="off">
Value
@@ -1727,6 +2050,58 @@
+
+
+
+
Add Outbound Target
+
+
+ Name (used by agents)
+
+
+
+ URL
+
+
+
+ Secret header value (optional — sent as Authorization: Bearer)
+
+
+
+ Cancel
+ Save
+
+
+
+
+
+
+
+
Add Webhook
+
+
+ Name
+
+
+
+ Description (optional)
+
+
+
+ Agent to trigger
+
+
+
+
+ Allow GET requests (for iOS Shortcuts)
+
+
+ Cancel
+ Save
+
+
+
+
{% block extra_scripts %}
{% endblock %}
diff --git a/server/webhooks/__init__.py b/server/webhooks/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/server/webhooks/endpoints.py b/server/webhooks/endpoints.py
new file mode 100644
index 0000000..a13af7d
--- /dev/null
+++ b/server/webhooks/endpoints.py
@@ -0,0 +1,138 @@
+"""
+webhooks/endpoints.py — CRUD for inbound webhook trigger endpoints.
+
+Each endpoint has a secret token. When the token is presented via GET ?q=...
+or POST {"message": "..."}, the associated agent is triggered.
+"""
+from __future__ import annotations
+
+import secrets
+from datetime import datetime, timezone
+
+from ..database import _rowcount, get_pool
+
+
+def _utcnow() -> str:
+ return datetime.now(timezone.utc).isoformat()
+
+
+async def create_endpoint(
+ name: str,
+ agent_id: str,
+ description: str = "",
+ allow_get: bool = True,
+ owner_user_id: str | None = None,
+) -> dict:
+ """Create a new webhook endpoint. Returns the full row including the plaintext token."""
+ token = secrets.token_urlsafe(32)
+ pool = await get_pool()
+ row = await pool.fetchrow(
+ """
+ INSERT INTO webhook_endpoints
+ (name, token, agent_id, description, allow_get, owner_user_id, created_at)
+ VALUES ($1, $2, $3, $4, $5, $6, $7)
+ RETURNING *
+ """,
+ name, token, agent_id or None, description, allow_get, owner_user_id, _utcnow(),
+ )
+ return dict(row)
+
+
+async def list_endpoints(owner_user_id: str | None = None) -> list[dict]:
+ """List endpoints. Pass owner_user_id to scope to one user; None returns all (admin). Token never included."""
+ pool = await get_pool()
+ if owner_user_id:
+ rows = await pool.fetch(
+ "SELECT * FROM webhook_endpoints WHERE owner_user_id = $1 ORDER BY created_at DESC",
+ owner_user_id,
+ )
+ else:
+ rows = await pool.fetch(
+ "SELECT * FROM webhook_endpoints ORDER BY created_at DESC"
+ )
+ result = []
+ for row in rows:
+ d = dict(row)
+ d.pop("token", None)
+ result.append(d)
+ return result
+
+
+async def get_endpoint(endpoint_id: str, owner_user_id: str | None = None) -> dict | None:
+ """Get one endpoint by ID. Token is not included. Pass owner_user_id to enforce ownership."""
+ pool = await get_pool()
+ if owner_user_id:
+ row = await pool.fetchrow(
+ "SELECT * FROM webhook_endpoints WHERE id = $1::uuid AND owner_user_id = $2",
+ endpoint_id, owner_user_id,
+ )
+ else:
+ row = await pool.fetchrow(
+ "SELECT * FROM webhook_endpoints WHERE id = $1::uuid", endpoint_id
+ )
+ if not row:
+ return None
+ d = dict(row)
+ d.pop("token", None)
+ return d
+
+
+async def get_by_token(token: str) -> dict | None:
+ """Look up an enabled endpoint by its secret token (includes token field)."""
+ pool = await get_pool()
+ row = await pool.fetchrow(
+ "SELECT * FROM webhook_endpoints WHERE token = $1 AND enabled = TRUE", token
+ )
+ return dict(row) if row else None
+
+
+async def update_endpoint(endpoint_id: str, **fields) -> dict | None:
+ allowed = {"name", "agent_id", "description", "allow_get", "enabled"}
+ updates = {k: v for k, v in fields.items() if k in allowed}
+ if not updates:
+ return await get_endpoint(endpoint_id)
+ pool = await get_pool()
+ set_clauses = ", ".join(f"{k} = ${i + 2}" for i, k in enumerate(updates))
+ await pool.execute(
+ f"UPDATE webhook_endpoints SET {set_clauses} WHERE id = $1::uuid",
+ endpoint_id, *updates.values(),
+ )
+ return await get_endpoint(endpoint_id)
+
+
+async def rotate_token(endpoint_id: str) -> str:
+ """Generate and store a new token. Returns the new plaintext token."""
+ new_token = secrets.token_urlsafe(32)
+ pool = await get_pool()
+ await pool.execute(
+ "UPDATE webhook_endpoints SET token = $1 WHERE id = $2::uuid",
+ new_token, endpoint_id,
+ )
+ return new_token
+
+
+async def delete_endpoint(endpoint_id: str, owner_user_id: str | None = None) -> bool:
+ pool = await get_pool()
+ if owner_user_id:
+ status = await pool.execute(
+ "DELETE FROM webhook_endpoints WHERE id = $1::uuid AND owner_user_id = $2",
+ endpoint_id, owner_user_id,
+ )
+ else:
+ status = await pool.execute(
+ "DELETE FROM webhook_endpoints WHERE id = $1::uuid", endpoint_id
+ )
+ return _rowcount(status) > 0
+
+
+async def record_trigger(endpoint_id: str) -> None:
+ """Increment trigger_count and update last_triggered_at."""
+ pool = await get_pool()
+ await pool.execute(
+ """
+ UPDATE webhook_endpoints
+ SET last_triggered_at = $1, trigger_count = COALESCE(trigger_count, 0) + 1
+ WHERE id = $2::uuid
+ """,
+ _utcnow(), endpoint_id,
+ )