Settings: add dedicated DAV/Pushover tabs, fix CalDAV/CardDAV bugs

- Add admin DAV tab (rename from CalDAV/CardDAV) and Pushover tab
  - Add per-user Pushover tab (User Key only; App Token stays admin-managed)
  - Remove system-wide CalDAV/CardDAV fallback — per-user config only
  - Rewrite contacts_tool.py using httpx directly (caldav 2.x dropped AddressBook)
  - Fix CardDAV REPORT/PROPFIND using SOGo URL pattern
  - Fix CalDAV/CardDAV test endpoints (POST method, URL scheme normalization)
  - Fix Show Password button — API now returns actual credential values
  - Convert Credentials tab to generic key-value store; dedicated keys
    (CalDAV, Pushover, trusted_proxy) excluded via _DEDICATED_CRED_KEYS
This commit is contained in:
2026-04-10 12:06:23 +02:00
parent a9ca08f13d
commit 7b0a9ccc2b
25 changed files with 4011 additions and 235 deletions

View File

View File

@@ -0,0 +1,175 @@
"""
monitors/page_monitor.py — Page change monitor.
Polls watched URLs on a cron schedule, hashes the content, and dispatches
an agent (or Pushover notification) when the page content changes.
"""
from __future__ import annotations
import hashlib
import logging
import httpx
from apscheduler.schedulers.asyncio import AsyncIOScheduler
from apscheduler.triggers.cron import CronTrigger
from ..config import settings
from . import store
logger = logging.getLogger(__name__)
_DEFAULT_HEADERS = {
"User-Agent": "Mozilla/5.0 (compatible; oAI-Web page-monitor/1.0)",
}
async def _fetch_page_content(url: str, css_selector: str | None = None) -> str:
"""Fetch URL and return plain text (optionally filtered by CSS selector)."""
async with httpx.AsyncClient(
timeout=30.0,
follow_redirects=True,
headers=_DEFAULT_HEADERS,
) as client:
resp = await client.get(url)
resp.raise_for_status()
html = resp.text
if css_selector:
try:
from bs4 import BeautifulSoup
soup = BeautifulSoup(html, "html.parser")
elements = soup.select(css_selector)
return "\n".join(el.get_text(separator=" ", strip=True) for el in elements)
except Exception as e:
logger.warning("[page-monitor] CSS selector '%s' failed: %s", css_selector, e)
try:
from bs4 import BeautifulSoup
soup = BeautifulSoup(html, "html.parser")
for tag in soup(["script", "style", "nav", "footer", "header"]):
tag.decompose()
return soup.get_text(separator="\n", strip=True)
except Exception:
return html
def _content_hash(text: str) -> str:
return hashlib.sha256(text.encode("utf-8", errors="replace")).hexdigest()
class PageMonitorManager:
"""Manages APScheduler jobs for all watched_pages entries."""
def __init__(self) -> None:
self._scheduler: AsyncIOScheduler | None = None
def init(self, scheduler: AsyncIOScheduler) -> None:
"""Share the AgentRunner's scheduler."""
self._scheduler = scheduler
async def start_all(self) -> None:
"""Load all enabled watched pages and register APScheduler jobs."""
pages = await store.list_watched_pages()
for page in pages:
if page["enabled"]:
self._add_job(page)
logger.info("[page-monitor] Registered %d page monitor jobs", len([p for p in pages if p["enabled"]]))
def _add_job(self, page: dict) -> None:
if not self._scheduler:
return
try:
self._scheduler.add_job(
self._check_page,
trigger=CronTrigger.from_crontab(page["schedule"], timezone=settings.timezone),
id=f"page:{page['id']}",
args=[str(page["id"])],
replace_existing=True,
misfire_grace_time=300,
)
except Exception as e:
logger.error("[page-monitor] Failed to schedule page '%s': %s", page["name"], e)
def reschedule(self, page: dict) -> None:
if not self._scheduler:
return
job_id = f"page:{page['id']}"
try:
self._scheduler.remove_job(job_id)
except Exception:
pass
if page.get("enabled"):
self._add_job(page)
def remove(self, page_id: str) -> None:
if not self._scheduler:
return
try:
self._scheduler.remove_job(f"page:{page_id}")
except Exception:
pass
async def check_now(self, page_id: str) -> dict:
"""Force-check a page immediately (UI-triggered). Returns status dict."""
return await self._check_page(page_id)
async def _check_page(self, page_id: str) -> dict:
page = await store.get_watched_page(page_id)
if not page:
return {"error": "Page not found"}
logger.info("[page-monitor] Checking '%s' (%s)", page["name"], page["url"])
try:
content = await _fetch_page_content(page["url"], page.get("css_selector"))
except Exception as e:
error_msg = str(e)[:200]
logger.warning("[page-monitor] Failed to fetch '%s': %s", page["url"], error_msg)
await store.update_page_check_result(page_id, None, False, error=error_msg)
return {"error": error_msg}
new_hash = _content_hash(content)
old_hash = page.get("last_content_hash")
changed = old_hash is not None and new_hash != old_hash
await store.update_page_check_result(page_id, new_hash, changed)
if changed:
logger.info("[page-monitor] Change detected on '%s'", page["name"])
await self._dispatch_change(page, content)
return {"changed": changed, "hash": new_hash, "first_check": old_hash is None}
async def _dispatch_change(self, page: dict, content: str) -> None:
mode = page.get("notification_mode", "agent")
message = (
f"Page change detected: {page['name']}\n"
f"URL: {page['url']}\n\n"
f"Current content (first 2000 chars):\n{content[:2000]}"
)
if mode in ("pushover", "both"):
try:
from ..tools.pushover_tool import PushoverTool
await PushoverTool().execute(
title=f"Page changed: {page['name']}",
message=f"{page['url']} has new content.",
priority=0,
)
except Exception as e:
logger.warning("[page-monitor] Pushover notify failed: %s", e)
if mode in ("agent", "both"):
agent_id = page.get("agent_id")
if agent_id:
try:
from ..agents.runner import agent_runner
await agent_runner.run_agent_now(
agent_id=agent_id,
override_message=message,
)
except Exception as e:
logger.warning("[page-monitor] Agent dispatch failed: %s", e)
page_monitor = PageMonitorManager()

View File

@@ -0,0 +1,185 @@
"""
monitors/rss_monitor.py — RSS / Atom feed monitor.
Polls feeds on a cron schedule, tracks seen item IDs, and dispatches
an agent (or Pushover) when new items appear.
Sends ETag / If-Modified-Since headers for bandwidth efficiency.
"""
from __future__ import annotations
import logging
from apscheduler.schedulers.asyncio import AsyncIOScheduler
from apscheduler.triggers.cron import CronTrigger
from ..config import settings
from . import store
logger = logging.getLogger(__name__)
def _item_id(entry) -> str:
"""Return a stable ID for a feed entry (id → link → title)."""
return entry.get("id") or entry.get("link") or entry.get("title") or ""
def _format_items(entries: list) -> str:
"""Format new feed entries as a readable message for agents."""
lines = []
for e in entries:
title = e.get("title", "(no title)")
link = e.get("link", "")
summary = e.get("summary", "")[:500]
lines.append(f"- {title}\n {link}")
if summary:
lines.append(f" {summary}")
return "\n\n".join(lines)
class RssFeedManager:
"""Manages APScheduler jobs for all rss_feeds entries."""
def __init__(self) -> None:
self._scheduler: AsyncIOScheduler | None = None
def init(self, scheduler: AsyncIOScheduler) -> None:
self._scheduler = scheduler
async def start_all(self) -> None:
feeds = await store.list_rss_feeds()
for feed in feeds:
if feed["enabled"]:
self._add_job(feed)
logger.info("[rss-monitor] Registered %d RSS feed jobs", len([f for f in feeds if f["enabled"]]))
def _add_job(self, feed: dict) -> None:
if not self._scheduler:
return
try:
self._scheduler.add_job(
self._fetch_feed,
trigger=CronTrigger.from_crontab(feed["schedule"], timezone=settings.timezone),
id=f"rss:{feed['id']}",
args=[str(feed["id"])],
replace_existing=True,
misfire_grace_time=300,
)
except Exception as e:
logger.error("[rss-monitor] Failed to schedule feed '%s': %s", feed["name"], e)
def reschedule(self, feed: dict) -> None:
if not self._scheduler:
return
job_id = f"rss:{feed['id']}"
try:
self._scheduler.remove_job(job_id)
except Exception:
pass
if feed.get("enabled"):
self._add_job(feed)
def remove(self, feed_id: str) -> None:
if not self._scheduler:
return
try:
self._scheduler.remove_job(f"rss:{feed_id}")
except Exception:
pass
async def fetch_now(self, feed_id: str) -> dict:
"""Force-fetch a feed immediately (UI-triggered). Returns status dict."""
return await self._fetch_feed(feed_id)
async def _fetch_feed(self, feed_id: str) -> dict:
import feedparser
feed_row = await store.get_rss_feed(feed_id)
if not feed_row:
return {"error": "Feed not found"}
logger.info("[rss-monitor] Fetching '%s' (%s)", feed_row["name"], feed_row["url"])
# Build request with conditional headers for bandwidth efficiency
request_headers = {}
if feed_row.get("last_etag"):
request_headers["ETag"] = feed_row["last_etag"]
if feed_row.get("last_modified"):
request_headers["If-Modified-Since"] = feed_row["last_modified"]
try:
import httpx
async with httpx.AsyncClient(timeout=30.0, follow_redirects=True) as client:
resp = await client.get(feed_row["url"], headers=request_headers)
if resp.status_code == 304:
logger.info("[rss-monitor] '%s' unchanged (304)", feed_row["name"])
await store.update_feed_fetch_result(feed_id, feed_row.get("seen_item_ids") or [])
return {"new_items": 0, "status": "not_modified"}
resp.raise_for_status()
parsed = feedparser.parse(resp.text)
etag = resp.headers.get("etag")
last_modified = resp.headers.get("last-modified")
except Exception as e:
error_msg = str(e)[:200]
logger.warning("[rss-monitor] Failed to fetch '%s': %s", feed_row["url"], error_msg)
await store.update_feed_fetch_result(feed_id, feed_row.get("seen_item_ids") or [], error=error_msg)
return {"error": error_msg}
seen = set(feed_row.get("seen_item_ids") or [])
max_items = feed_row.get("max_items_per_run") or 5
new_entries = [e for e in parsed.entries if _item_id(e) and _item_id(e) not in seen][:max_items]
# Update seen IDs (keep last 500 to prevent unbounded growth)
all_ids = list(seen | {_item_id(e) for e in parsed.entries if _item_id(e)})
all_ids = all_ids[-500:]
await store.update_feed_fetch_result(
feed_id,
seen_item_ids=all_ids,
etag=etag,
last_modified=last_modified,
)
if new_entries:
logger.info("[rss-monitor] %d new items in '%s'", len(new_entries), feed_row["name"])
await self._dispatch_new_items(feed_row, new_entries)
return {"new_items": len(new_entries)}
async def _dispatch_new_items(self, feed_row: dict, entries: list) -> None:
mode = feed_row.get("notification_mode", "agent")
count = len(entries)
items_text = _format_items(entries)
message = (
f"{count} new item{'s' if count != 1 else ''} in feed: {feed_row['name']}\n"
f"URL: {feed_row['url']}\n\n"
f"{items_text}"
)
if mode in ("pushover", "both"):
try:
from ..tools.pushover_tool import PushoverTool
await PushoverTool().execute(
title=f"RSS: {count} new in {feed_row['name']}",
message=items_text[:512],
priority=0,
)
except Exception as e:
logger.warning("[rss-monitor] Pushover notify failed: %s", e)
if mode in ("agent", "both"):
agent_id = feed_row.get("agent_id")
if agent_id:
try:
from ..agents.runner import agent_runner
await agent_runner.run_agent_now(
agent_id=agent_id,
override_message=message,
)
except Exception as e:
logger.warning("[rss-monitor] Agent dispatch failed: %s", e)
rss_monitor = RssFeedManager()

203
server/monitors/store.py Normal file
View File

@@ -0,0 +1,203 @@
"""
monitors/store.py — DB CRUD for watched_pages and rss_feeds tables.
"""
from __future__ import annotations
import json
from datetime import datetime, timezone
from ..database import _rowcount, get_pool
def _utcnow() -> str:
return datetime.now(timezone.utc).isoformat()
def _page_row(row) -> dict:
return dict(row)
def _feed_row(row) -> dict:
d = dict(row)
if isinstance(d.get("seen_item_ids"), str):
try:
d["seen_item_ids"] = json.loads(d["seen_item_ids"])
except Exception:
d["seen_item_ids"] = []
return d
# ─── Watched Pages ────────────────────────────────────────────────────────────
async def create_watched_page(
name: str,
url: str,
schedule: str = "0 * * * *",
css_selector: str | None = None,
agent_id: str | None = None,
notification_mode: str = "agent",
owner_user_id: str | None = None,
) -> dict:
pool = await get_pool()
row = await pool.fetchrow(
"""
INSERT INTO watched_pages
(name, url, schedule, css_selector, agent_id, notification_mode, owner_user_id, created_at)
VALUES ($1,$2,$3,$4,$5,$6,$7,$8) RETURNING *
""",
name, url, schedule, css_selector, agent_id or None, notification_mode,
owner_user_id, _utcnow(),
)
return _page_row(row)
async def list_watched_pages(owner_user_id: str | None = None) -> list[dict]:
pool = await get_pool()
if owner_user_id:
rows = await pool.fetch(
"SELECT * FROM watched_pages WHERE owner_user_id = $1 ORDER BY created_at DESC",
owner_user_id,
)
else:
rows = await pool.fetch("SELECT * FROM watched_pages ORDER BY created_at DESC")
return [_page_row(r) for r in rows]
async def get_watched_page(page_id: str) -> dict | None:
pool = await get_pool()
row = await pool.fetchrow("SELECT * FROM watched_pages WHERE id = $1::uuid", page_id)
return _page_row(row) if row else None
async def update_watched_page(page_id: str, **fields) -> dict | None:
allowed = {"name", "url", "schedule", "css_selector", "agent_id", "notification_mode", "enabled"}
updates = {k: v for k, v in fields.items() if k in allowed}
if not updates:
return await get_watched_page(page_id)
pool = await get_pool()
set_clauses = ", ".join(f"{k} = ${i + 2}" for i, k in enumerate(updates))
await pool.execute(
f"UPDATE watched_pages SET {set_clauses} WHERE id = $1::uuid",
page_id, *updates.values(),
)
return await get_watched_page(page_id)
async def delete_watched_page(page_id: str) -> bool:
pool = await get_pool()
status = await pool.execute("DELETE FROM watched_pages WHERE id = $1::uuid", page_id)
return _rowcount(status) > 0
async def update_page_check_result(
page_id: str,
content_hash: str | None,
changed: bool,
error: str | None = None,
) -> None:
pool = await get_pool()
now = _utcnow()
if error:
await pool.execute(
"UPDATE watched_pages SET last_checked_at=$1, last_error=$2 WHERE id=$3::uuid",
now, error, page_id,
)
elif changed:
await pool.execute(
"""UPDATE watched_pages
SET last_checked_at=$1, last_content_hash=$2, last_changed_at=$1, last_error=NULL
WHERE id=$3::uuid""",
now, content_hash, page_id,
)
else:
await pool.execute(
"""UPDATE watched_pages
SET last_checked_at=$1, last_content_hash=$2, last_error=NULL
WHERE id=$3::uuid""",
now, content_hash, page_id,
)
# ─── RSS Feeds ────────────────────────────────────────────────────────────────
async def create_rss_feed(
name: str,
url: str,
schedule: str = "0 */4 * * *",
agent_id: str | None = None,
notification_mode: str = "agent",
max_items_per_run: int = 5,
owner_user_id: str | None = None,
) -> dict:
pool = await get_pool()
row = await pool.fetchrow(
"""
INSERT INTO rss_feeds
(name, url, schedule, agent_id, notification_mode, max_items_per_run, owner_user_id, created_at)
VALUES ($1,$2,$3,$4,$5,$6,$7,$8) RETURNING *
""",
name, url, schedule, agent_id or None, notification_mode, max_items_per_run,
owner_user_id, _utcnow(),
)
return _feed_row(row)
async def list_rss_feeds(owner_user_id: str | None = None) -> list[dict]:
pool = await get_pool()
if owner_user_id:
rows = await pool.fetch(
"SELECT * FROM rss_feeds WHERE owner_user_id = $1 ORDER BY created_at DESC",
owner_user_id,
)
else:
rows = await pool.fetch("SELECT * FROM rss_feeds ORDER BY created_at DESC")
return [_feed_row(r) for r in rows]
async def get_rss_feed(feed_id: str) -> dict | None:
pool = await get_pool()
row = await pool.fetchrow("SELECT * FROM rss_feeds WHERE id = $1::uuid", feed_id)
return _feed_row(row) if row else None
async def update_rss_feed(feed_id: str, **fields) -> dict | None:
allowed = {"name", "url", "schedule", "agent_id", "notification_mode", "max_items_per_run", "enabled"}
updates = {k: v for k, v in fields.items() if k in allowed}
if not updates:
return await get_rss_feed(feed_id)
pool = await get_pool()
set_clauses = ", ".join(f"{k} = ${i + 2}" for i, k in enumerate(updates))
await pool.execute(
f"UPDATE rss_feeds SET {set_clauses} WHERE id = $1::uuid",
feed_id, *updates.values(),
)
return await get_rss_feed(feed_id)
async def delete_rss_feed(feed_id: str) -> bool:
pool = await get_pool()
status = await pool.execute("DELETE FROM rss_feeds WHERE id = $1::uuid", feed_id)
return _rowcount(status) > 0
async def update_feed_fetch_result(
feed_id: str,
seen_item_ids: list[str],
etag: str | None = None,
last_modified: str | None = None,
error: str | None = None,
) -> None:
pool = await get_pool()
now = _utcnow()
if error:
await pool.execute(
"UPDATE rss_feeds SET last_fetched_at=$1, last_error=$2 WHERE id=$3::uuid",
now, error, feed_id,
)
else:
await pool.execute(
"""UPDATE rss_feeds
SET last_fetched_at=$1, seen_item_ids=$2, last_etag=$3, last_modified=$4, last_error=NULL
WHERE id=$5::uuid""",
now, seen_item_ids, etag, last_modified, feed_id,
)