Initial commit

This commit is contained in:
2026-04-08 12:43:24 +02:00
commit be674c2f93
148 changed files with 25007 additions and 0 deletions

0
server/inbox/__init__.py Normal file
View File

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

246
server/inbox/accounts.py Normal file
View File

@@ -0,0 +1,246 @@
"""
inbox/accounts.py — CRUD for email_accounts table.
Passwords are encrypted with AES-256-GCM (same scheme as credential_store).
"""
from __future__ import annotations
import json
import uuid
from datetime import datetime, timezone
from typing import Any
from ..database import _encrypt, _decrypt, get_pool, _rowcount
def _now() -> str:
return datetime.now(timezone.utc).isoformat()
# ── Read ──────────────────────────────────────────────────────────────────────
async def list_accounts(user_id: str | None = None) -> list[dict]:
"""
List email accounts with decrypted passwords.
- user_id=None: all accounts (admin view)
- user_id="<uuid>": accounts for this user only
"""
pool = await get_pool()
if user_id is None:
rows = await pool.fetch(
"SELECT ea.*, a.name AS agent_name, a.model AS agent_model, a.prompt AS agent_prompt FROM email_accounts ea"
" LEFT JOIN agents a ON a.id = ea.agent_id"
" ORDER BY ea.created_at"
)
else:
rows = await pool.fetch(
"SELECT ea.*, a.name AS agent_name, a.model AS agent_model, a.prompt AS agent_prompt FROM email_accounts ea"
" LEFT JOIN agents a ON a.id = ea.agent_id"
" WHERE ea.user_id = $1 ORDER BY ea.created_at",
user_id,
)
return [_decrypt_row(dict(r)) for r in rows]
async def list_accounts_enabled() -> list[dict]:
"""Return all enabled accounts (used by listener on startup)."""
pool = await get_pool()
rows = await pool.fetch(
"SELECT ea.*, a.name AS agent_name, a.model AS agent_model, a.prompt AS agent_prompt FROM email_accounts ea"
" LEFT JOIN agents a ON a.id = ea.agent_id"
" WHERE ea.enabled = TRUE ORDER BY ea.created_at"
)
return [_decrypt_row(dict(r)) for r in rows]
async def get_account(account_id: str) -> dict | None:
pool = await get_pool()
row = await pool.fetchrow(
"SELECT ea.*, a.name AS agent_name, a.model AS agent_model, a.prompt AS agent_prompt FROM email_accounts ea"
" LEFT JOIN agents a ON a.id = ea.agent_id"
" WHERE ea.id = $1",
account_id,
)
if row is None:
return None
return _decrypt_row(dict(row))
# ── Write ─────────────────────────────────────────────────────────────────────
async def create_account(
label: str,
account_type: str,
imap_host: str,
imap_port: int,
imap_username: str,
imap_password: str,
smtp_host: str | None = None,
smtp_port: int | None = None,
smtp_username: str | None = None,
smtp_password: str | None = None,
agent_id: str | None = None,
user_id: str | None = None,
initial_load_limit: int = 200,
monitored_folders: list[str] | None = None,
extra_tools: list[str] | None = None,
telegram_chat_id: str | None = None,
telegram_keyword: str | None = None,
enabled: bool = True,
) -> dict:
now = _now()
account_id = str(uuid.uuid4())
folders_json = json.dumps(monitored_folders or ["INBOX"])
extra_tools_json = json.dumps(extra_tools or [])
pool = await get_pool()
await pool.execute(
"""
INSERT INTO email_accounts (
id, user_id, label, account_type,
imap_host, imap_port, imap_username, imap_password,
smtp_host, smtp_port, smtp_username, smtp_password,
agent_id, enabled, initial_load_done, initial_load_limit,
monitored_folders, extra_tools, telegram_chat_id, telegram_keyword,
paused, created_at, updated_at
) VALUES ($1,$2,$3,$4,$5,$6,$7,$8,$9,$10,$11,$12,$13,$14,$15,$16,$17,$18,$19,$20,$21,$22,$23)
""",
account_id, user_id, label, account_type,
imap_host, int(imap_port), imap_username, _encrypt(imap_password),
smtp_host, int(smtp_port) if smtp_port else None,
smtp_username, _encrypt(smtp_password) if smtp_password else None,
agent_id, enabled, False, int(initial_load_limit),
folders_json, extra_tools_json, telegram_chat_id or None,
(telegram_keyword or "").lower().strip() or None,
False, now, now,
)
return await get_account(account_id)
async def update_account(account_id: str, **fields) -> bool:
"""Update fields. Encrypts imap_password/smtp_password if provided."""
fields["updated_at"] = _now()
if "imap_password" in fields:
if fields["imap_password"]:
fields["imap_password"] = _encrypt(fields["imap_password"])
else:
del fields["imap_password"] # don't clear on empty string
if "smtp_password" in fields:
if fields["smtp_password"]:
fields["smtp_password"] = _encrypt(fields["smtp_password"])
else:
del fields["smtp_password"]
if "monitored_folders" in fields and isinstance(fields["monitored_folders"], list):
fields["monitored_folders"] = json.dumps(fields["monitored_folders"])
if "extra_tools" in fields and isinstance(fields["extra_tools"], list):
fields["extra_tools"] = json.dumps(fields["extra_tools"])
if "telegram_keyword" in fields and fields["telegram_keyword"]:
fields["telegram_keyword"] = fields["telegram_keyword"].lower().strip() or None
if "imap_port" in fields and fields["imap_port"] is not None:
fields["imap_port"] = int(fields["imap_port"])
if "smtp_port" in fields and fields["smtp_port"] is not None:
fields["smtp_port"] = int(fields["smtp_port"])
set_parts = []
values: list[Any] = []
for i, (k, v) in enumerate(fields.items(), start=1):
set_parts.append(f"{k} = ${i}")
values.append(v)
id_param = len(fields) + 1
values.append(account_id)
pool = await get_pool()
status = await pool.execute(
f"UPDATE email_accounts SET {', '.join(set_parts)} WHERE id = ${id_param}",
*values,
)
return _rowcount(status) > 0
async def delete_account(account_id: str) -> bool:
pool = await get_pool()
status = await pool.execute("DELETE FROM email_accounts WHERE id = $1", account_id)
return _rowcount(status) > 0
async def pause_account(account_id: str) -> bool:
pool = await get_pool()
await pool.execute(
"UPDATE email_accounts SET paused = TRUE, updated_at = $1 WHERE id = $2",
_now(), account_id,
)
return True
async def resume_account(account_id: str) -> bool:
pool = await get_pool()
await pool.execute(
"UPDATE email_accounts SET paused = FALSE, updated_at = $1 WHERE id = $2",
_now(), account_id,
)
return True
async def toggle_account(account_id: str) -> bool:
pool = await get_pool()
await pool.execute(
"UPDATE email_accounts SET enabled = NOT enabled, updated_at = $1 WHERE id = $2",
_now(), account_id,
)
return True
async def mark_initial_load_done(account_id: str) -> None:
pool = await get_pool()
await pool.execute(
"UPDATE email_accounts SET initial_load_done = TRUE, updated_at = $1 WHERE id = $2",
_now(), account_id,
)
# ── Helpers ───────────────────────────────────────────────────────────────────
def _decrypt_row(row: dict) -> dict:
"""Decrypt password fields in-place. Safe to call on any email_accounts row."""
if row.get("imap_password"):
try:
row["imap_password"] = _decrypt(row["imap_password"])
except Exception:
row["imap_password"] = ""
if row.get("smtp_password"):
try:
row["smtp_password"] = _decrypt(row["smtp_password"])
except Exception:
row["smtp_password"] = None
if row.get("monitored_folders") and isinstance(row["monitored_folders"], str):
try:
row["monitored_folders"] = json.loads(row["monitored_folders"])
except Exception:
row["monitored_folders"] = ["INBOX"]
if isinstance(row.get("extra_tools"), str):
try:
row["extra_tools"] = json.loads(row["extra_tools"])
except Exception:
row["extra_tools"] = []
elif row.get("extra_tools") is None:
row["extra_tools"] = []
# Convert UUID to str for JSON serialisation
if row.get("id") and not isinstance(row["id"], str):
row["id"] = str(row["id"])
return row
def mask_account(account: dict) -> dict:
"""Return a copy safe for the API response — passwords replaced with booleans."""
m = dict(account)
m["imap_password"] = bool(account.get("imap_password"))
m["smtp_password"] = bool(account.get("smtp_password"))
return m

642
server/inbox/listener.py Normal file
View File

@@ -0,0 +1,642 @@
"""
inbox/listener.py — Multi-account IMAP listener (async).
EmailAccountListener: one instance per email_accounts row.
- account_type='trigger': IMAP IDLE on INBOX, keyword → agent dispatch
- account_type='handling': poll monitored folders every 60s, run handling agent
InboxListenerManager: pool of listeners keyed by account_id (UUID str).
Backward-compatible shims: .status / .reconnect() / .stop() act on the
global trigger account (user_id IS NULL, account_type='trigger').
"""
from __future__ import annotations
import asyncio
import email as email_lib
import logging
import re
import smtplib
import ssl
from datetime import datetime, timezone
from email.mime.text import MIMEText
import aioimaplib
from ..database import credential_store, email_whitelist_store
from .accounts import list_accounts_enabled, mark_initial_load_done
from .triggers import get_enabled_triggers
logger = logging.getLogger(__name__)
_IDLE_TIMEOUT = 28 * 60 # 28 min — IMAP servers drop IDLE at ~30 min
_POLL_INTERVAL = 60 # seconds between polls for handling accounts
_MAX_BACKOFF = 60
# ── Per-account listener ───────────────────────────────────────────────────────
class EmailAccountListener:
"""Manages IMAP connection and dispatch for one email_accounts row."""
def __init__(self, account: dict) -> None:
self._account = account
self._account_id = str(account["id"])
self._type = account.get("account_type", "handling")
self._task: asyncio.Task | None = None
self._status = "idle"
self._error: str | None = None
self._last_seen: datetime | None = None
self._dispatched: set[str] = set() # folder:num pairs dispatched this session
# ── Lifecycle ─────────────────────────────────────────────────────────────
def start(self) -> None:
if self._task is None or self._task.done():
label = self._account.get("label", self._account_id[:8])
name = f"inbox-{self._type}-{label}"
self._task = asyncio.create_task(self._run_loop(), name=name)
def stop(self) -> None:
if self._task and not self._task.done():
self._task.cancel()
self._status = "stopped"
def reconnect(self) -> None:
self.stop()
self._status = "idle"
self.start()
@property
def status_dict(self) -> dict:
return {
"account_id": self._account_id,
"label": self._account.get("label", ""),
"account_type": self._type,
"user_id": self._account.get("user_id"),
"status": self._status,
"error": self._error,
"last_seen": self._last_seen.isoformat() if self._last_seen else None,
}
def update_account(self, account: dict) -> None:
"""Refresh account data (e.g. after settings change)."""
self._account = account
# ── Main loop ─────────────────────────────────────────────────────────────
async def _run_loop(self) -> None:
backoff = 5
while True:
try:
if self._type == "trigger":
await self._trigger_loop()
else:
await self._handling_loop()
backoff = 5
except asyncio.CancelledError:
self._status = "stopped"
break
except Exception as e:
self._status = "error"
self._error = str(e)
logger.warning(
"[inbox] %s account %s error: %s — retry in %ds",
self._type, self._account.get("label"), e, backoff
)
await asyncio.sleep(backoff)
backoff = min(backoff * 2, _MAX_BACKOFF)
# ── Trigger account (IMAP IDLE on INBOX) ──────────────────────────────────
async def _trigger_loop(self) -> None:
host = self._account["imap_host"]
port = int(self._account.get("imap_port") or 993)
username = self._account["imap_username"]
password = self._account["imap_password"]
client = aioimaplib.IMAP4_SSL(host=host, port=port, timeout=30)
await client.wait_hello_from_server()
res = await client.login(username, password)
if res.result != "OK":
raise RuntimeError(f"IMAP login failed: {res.result}")
res = await client.select("INBOX")
if res.result != "OK":
raise RuntimeError("IMAP SELECT INBOX failed")
self._status = "connected"
self._error = None
logger.info("[inbox] trigger '%s' connected as %s", self._account.get("label"), username)
# Process any unseen messages already in inbox
res = await client.search("UNSEEN")
if res.result == "OK" and res.lines and res.lines[0].strip():
for num in res.lines[0].split():
await self._process_trigger(client, num.decode() if isinstance(num, bytes) else str(num))
await client.expunge()
while True:
idle_task = await client.idle_start(timeout=_IDLE_TIMEOUT)
await client.wait_server_push()
client.idle_done()
await asyncio.wait_for(idle_task, timeout=5)
self._last_seen = datetime.now(timezone.utc)
res = await client.search("UNSEEN")
if res.result == "OK" and res.lines and res.lines[0].strip():
for num in res.lines[0].split():
await self._process_trigger(client, num.decode() if isinstance(num, bytes) else str(num))
await client.expunge()
async def _process_trigger(self, client: aioimaplib.IMAP4_SSL, num: str) -> None:
res = await client.fetch(num, "(RFC822)")
if res.result != "OK" or len(res.lines) < 2:
return
raw = res.lines[1]
msg = email_lib.message_from_bytes(raw)
from_addr = email_lib.utils.parseaddr(msg.get("From", ""))[1].lower().strip()
subject = msg.get("Subject", "(no subject)")
body = _extract_body(msg)
from ..security import sanitize_external_content
body = await sanitize_external_content(body, source="inbox_email")
logger.info("[inbox] trigger '%s': message from %s%s",
self._account.get("label"), from_addr, subject)
await client.store(num, "+FLAGS", "\\Deleted")
# Load whitelist and check trigger word first so non-whitelisted emails
# without a trigger are silently dropped (no reply that reveals the system).
account_id = self._account_id
user_id = self._account.get("user_id")
allowed = {e["email"].lower() for e in await email_whitelist_store.list()}
is_whitelisted = from_addr in allowed
# Trigger matching — scoped to this account
triggers = await get_enabled_triggers(user_id=user_id or "GLOBAL")
body_lower = body.lower()
matched = next(
(t for t in triggers
if all(tok in body_lower for tok in t["trigger_word"].lower().split())),
None,
)
if matched is None:
if is_whitelisted:
# Trusted sender — let them know no trigger was found
logger.info("[inbox] trigger '%s': no match for %s", self._account.get("label"), from_addr)
await self._send_smtp_reply(
from_addr, f"Re: {subject}",
"I received your email but could not find a valid trigger word in the message body."
)
else:
# Unknown sender with no trigger — silently drop, reveal nothing
logger.info("[inbox] %s not whitelisted and no trigger — silently dropping", from_addr)
return
if not is_whitelisted:
logger.info("[inbox] %s not whitelisted but trigger matched — running agent (reply blocked by output validation)", from_addr)
logger.info("[inbox] trigger '%s': matched '%s' — running agent %s",
self._account.get("label"), matched["trigger_word"], matched["agent_id"])
session_id = (
f"inbox:{from_addr}" if not user_id
else f"inbox:{user_id}:{from_addr}"
)
agent_input = (
f"You received an email.\n"
f"From: {from_addr}\n"
f"Subject: {subject}\n\n"
f"{body}\n\n"
f"Please process this request. "
f"Your response will be sent as an email reply to {from_addr}."
)
try:
from ..agents.runner import agent_runner
result_text = await agent_runner.run_agent_and_wait(
matched["agent_id"],
override_message=agent_input,
session_id=session_id,
)
except Exception as e:
logger.error("[inbox] trigger agent run failed: %s", e)
result_text = f"Sorry, an error occurred while processing your request: {e}"
await self._send_smtp_reply(from_addr, f"Re: {subject}", result_text)
async def _send_smtp_reply(self, to: str, subject: str, body: str) -> None:
try:
from_addr = self._account["imap_username"]
smtp_host = self._account.get("smtp_host") or self._account["imap_host"]
smtp_port = int(self._account.get("smtp_port") or 465)
smtp_user = self._account.get("smtp_username") or from_addr
smtp_pass = self._account.get("smtp_password") or self._account["imap_password"]
mime = MIMEText(body, "plain", "utf-8")
mime["From"] = from_addr
mime["To"] = to
mime["Subject"] = subject
ctx = ssl.create_default_context()
loop = asyncio.get_event_loop()
await loop.run_in_executor(
None,
lambda: _smtp_send(smtp_host, smtp_port, smtp_user, smtp_pass, ctx, from_addr, to, mime),
)
except Exception as e:
logger.error("[inbox] SMTP reply failed to %s: %s", to, e)
# ── Handling account (poll monitored folders) ─────────────────────────────
async def _handling_loop(self) -> None:
host = self._account["imap_host"]
port = int(self._account.get("imap_port") or 993)
username = self._account["imap_username"]
password = self._account["imap_password"]
monitored = self._account.get("monitored_folders") or ["INBOX"]
if isinstance(monitored, str):
import json
monitored = json.loads(monitored)
# Initial load to 2nd Brain (first connect only)
if not self._account.get("initial_load_done"):
self._status = "initial_load"
await self._run_initial_load(host, port, username, password, monitored)
self._status = "connected"
self._error = None
logger.info("[inbox] handling '%s' ready, polling %s",
self._account.get("label"), monitored)
# Track last-seen message counts per folder
seen_counts: dict[str, int] = {}
while True:
# Reload account state each cycle so pause/resume takes effect without restart
from .accounts import get_account as _get_account
fresh = await _get_account(self._account["id"])
if fresh:
self._account = fresh
# Pick up any credential/config changes (e.g. password update)
host = fresh["imap_host"]
port = int(fresh.get("imap_port") or 993)
username = fresh["imap_username"]
password = fresh["imap_password"]
monitored = fresh.get("monitored_folders") or ["INBOX"]
if isinstance(monitored, str):
import json as _json
monitored = _json.loads(monitored)
if self._account.get("paused"):
logger.debug("[inbox] handling '%s' is paused — skipping poll", self._account.get("label"))
await asyncio.sleep(_POLL_INTERVAL)
continue
client = aioimaplib.IMAP4_SSL(host=host, port=port, timeout=30)
try:
await client.wait_hello_from_server()
res = await client.login(username, password)
if res.result != "OK":
raise RuntimeError(f"IMAP login failed: {res.result}")
for folder in monitored:
res = await client.select(folder)
if res.result != "OK":
logger.warning("[inbox] handling: cannot select %r — skipping", folder)
continue
res = await client.search("UNSEEN")
if res.result != "OK" or not res.lines or not res.lines[0].strip():
continue
for num in res.lines[0].split():
num_s = num.decode() if isinstance(num, bytes) else str(num)
key = f"{folder}:{num_s}"
if key not in self._dispatched:
self._dispatched.add(key)
await self._process_handling(client, num_s, folder)
self._last_seen = datetime.now(timezone.utc)
except asyncio.CancelledError:
raise
except Exception as e:
self._status = "error"
self._error = str(e)
logger.warning("[inbox] handling '%s' poll error: %s", self._account.get("label"), e)
finally:
try:
await client.logout()
except Exception:
pass
await asyncio.sleep(_POLL_INTERVAL)
async def _run_initial_load(
self, host: str, port: int, username: str, password: str, folders: list[str]
) -> None:
"""Ingest email metadata into 2nd Brain. Best-effort — failure is non-fatal."""
try:
from ..brain.database import get_pool as _brain_pool
if _brain_pool() is None:
logger.info("[inbox] handling '%s': no Brain DB — skipping initial load",
self._account.get("label"))
await mark_initial_load_done(self._account_id)
return
except Exception:
logger.info("[inbox] handling '%s': Brain not available — skipping initial load",
self._account.get("label"))
await mark_initial_load_done(self._account_id)
return
limit = int(self._account.get("initial_load_limit") or 200)
owner_user_id = self._account.get("user_id")
total_ingested = 0
try:
client = aioimaplib.IMAP4_SSL(host=host, port=port, timeout=30)
await client.wait_hello_from_server()
res = await client.login(username, password)
if res.result != "OK":
raise RuntimeError(f"Login failed: {res.result}")
for folder in folders:
res = await client.select(folder, readonly=True)
if res.result != "OK":
continue
res = await client.search("ALL")
if res.result != "OK" or not res.lines or not res.lines[0].strip():
continue
nums = res.lines[0].split()
nums = nums[-limit:] # most recent N
batch_lines = [f"Initial email index for folder: {folder}\n"]
for num in nums:
num_s = num.decode() if isinstance(num, bytes) else str(num)
res2 = await client.fetch(
num_s,
"(FLAGS BODY.PEEK[HEADER.FIELDS (FROM TO SUBJECT DATE)])"
)
if res2.result != "OK" or len(res2.lines) < 2:
continue
msg = email_lib.message_from_bytes(res2.lines[1])
flags_str = (res2.lines[0].decode() if isinstance(res2.lines[0], bytes)
else str(res2.lines[0]))
is_unread = "\\Seen" not in flags_str
batch_lines.append(
f"uid={num_s} from={msg.get('From','')} "
f"subject={msg.get('Subject','')} date={msg.get('Date','')} "
f"unread={is_unread}"
)
total_ingested += 1
# Ingest this folder's batch as one Brain entry
if len(batch_lines) > 1:
content = "\n".join(batch_lines)
try:
from ..brain.ingest import ingest_thought
await ingest_thought(content=content, user_id=owner_user_id)
except Exception as e:
logger.warning("[inbox] Brain ingest failed for %r: %s", folder, e)
await client.logout()
except Exception as e:
logger.warning("[inbox] handling '%s' initial load error: %s",
self._account.get("label"), e)
await mark_initial_load_done(self._account_id)
logger.info("[inbox] handling '%s': initial load done — %d emails indexed",
self._account.get("label"), total_ingested)
async def _process_handling(
self, client: aioimaplib.IMAP4_SSL, num: str, folder: str
) -> None:
"""Fetch one email and dispatch to the handling agent."""
# Use BODY.PEEK[] to avoid auto-marking as \Seen
res = await client.fetch(num, "(FLAGS BODY.PEEK[])")
if res.result != "OK" or len(res.lines) < 2:
return
raw = res.lines[1]
msg = email_lib.message_from_bytes(raw)
from_addr = email_lib.utils.parseaddr(msg.get("From", ""))[1].lower().strip()
subject = msg.get("Subject", "(no subject)")
date = msg.get("Date", "")
body = _extract_body(msg)[:3000]
# Do NOT mark as \Seen — the agent decides what flags to set
agent_id = self._account.get("agent_id")
if not agent_id:
logger.warning("[inbox] handling '%s': no agent assigned — skipping",
self._account.get("label"))
return
email_summary = (
f"New email received:\n"
f"From: {from_addr}\n"
f"Subject: {subject}\n"
f"Date: {date}\n"
f"Folder: {folder}\n"
f"UID: {num}\n\n"
f"{body}"
)
logger.info("[inbox] handling '%s': dispatching to agent %s (from=%s)",
self._account.get("label"), agent_id, from_addr)
try:
from ..agents.runner import agent_runner
from ..tools.email_handling_tool import EmailHandlingTool
extra_tools = [EmailHandlingTool(account=self._account)]
# Optionally include notification tools the user enabled for this account
enabled_extras = self._account.get("extra_tools") or []
if "telegram" in enabled_extras:
from ..tools.telegram_tool import BoundTelegramTool
chat_id = self._account.get("telegram_chat_id") or ""
keyword = self._account.get("telegram_keyword") or ""
if chat_id:
extra_tools.append(BoundTelegramTool(chat_id=chat_id, reply_keyword=keyword or None))
if "pushover" in enabled_extras:
from ..tools.pushover_tool import PushoverTool
extra_tools.append(PushoverTool())
# BoundFilesystemTool: scoped to user's provisioned folder
user_id = self._account.get("user_id")
data_folder = None
if user_id:
from ..users import get_user_folder
data_folder = await get_user_folder(str(user_id))
if data_folder:
from ..tools.bound_filesystem_tool import BoundFilesystemTool
import os as _os
_os.makedirs(data_folder, exist_ok=True)
extra_tools.append(BoundFilesystemTool(base_path=data_folder))
# Build context message with memory/reasoning file paths
imap_user = self._account.get("imap_username", "account")
memory_hint = ""
if data_folder:
import os as _os2
mem_path = _os2.path.join(data_folder, f"memory_{imap_user}.md")
log_path = _os2.path.join(data_folder, f"reasoning_{imap_user}.md")
memory_hint = (
f"\n\nFilesystem context:\n"
f"- Memory file: {mem_path}\n"
f"- Reasoning log: {log_path}\n"
f"Read the memory file before acting. "
f"Append a reasoning entry to the reasoning log for each email you act on. "
f"If either file doesn't exist yet, create it with an appropriate template."
)
await agent_runner.run_agent_and_wait(
agent_id,
override_message=email_summary + memory_hint,
extra_tools=extra_tools,
force_only_extra_tools=True,
)
except Exception as e:
logger.error("[inbox] handling agent dispatch failed: %s", e)
# ── Manager ───────────────────────────────────────────────────────────────────
class InboxListenerManager:
"""
Pool of EmailAccountListener instances keyed by account_id (UUID str).
Backward-compatible shims:
.status — status of the global trigger account
.reconnect() — reconnect the global trigger account
.stop() — stop the global trigger account
"""
def __init__(self) -> None:
self._listeners: dict[str, EmailAccountListener] = {}
async def start_all(self) -> None:
"""Load all enabled email_accounts from DB and start listeners."""
accounts = await list_accounts_enabled()
for account in accounts:
account_id = str(account["id"])
if account_id not in self._listeners:
listener = EmailAccountListener(account)
self._listeners[account_id] = listener
self._listeners[account_id].start()
logger.info("[inbox] started %d account listener(s)", len(accounts))
def start(self) -> None:
"""Backward compat — schedules start_all() as a coroutine."""
asyncio.create_task(self.start_all())
def stop(self) -> None:
"""Stop global trigger account listener (backward compat)."""
for listener in self._listeners.values():
if (listener._account.get("account_type") == "trigger"
and listener._account.get("user_id") is None):
listener.stop()
return
def stop_all(self) -> None:
for listener in self._listeners.values():
listener.stop()
self._listeners.clear()
def reconnect(self) -> None:
"""Reconnect global trigger account (backward compat)."""
for listener in self._listeners.values():
if (listener._account.get("account_type") == "trigger"
and listener._account.get("user_id") is None):
listener.reconnect()
return
def start_account(self, account_id: str, account: dict) -> None:
"""Start or restart a specific account listener."""
account_id = str(account_id)
if account_id in self._listeners:
self._listeners[account_id].stop()
listener = EmailAccountListener(account)
self._listeners[account_id] = listener
listener.start()
def stop_account(self, account_id: str) -> None:
account_id = str(account_id)
if account_id in self._listeners:
self._listeners[account_id].stop()
del self._listeners[account_id]
def restart_account(self, account_id: str, account: dict) -> None:
self.start_account(account_id, account)
def start_for_user(self, user_id: str) -> None:
"""Backward compat — reconnect all listeners for this user."""
asyncio.create_task(self._restart_user(user_id))
async def _restart_user(self, user_id: str) -> None:
from .accounts import list_accounts
accounts = await list_accounts(user_id=user_id)
for account in accounts:
if account.get("enabled"):
self.start_account(str(account["id"]), account)
def stop_for_user(self, user_id: str) -> None:
to_stop = [
aid for aid, lst in self._listeners.items()
if lst._account.get("user_id") == user_id
]
for aid in to_stop:
self._listeners[aid].stop()
del self._listeners[aid]
def reconnect_for_user(self, user_id: str) -> None:
self.start_for_user(user_id)
@property
def status(self) -> dict:
"""Global trigger account status (backward compat for admin routes)."""
for listener in self._listeners.values():
if (listener._account.get("account_type") == "trigger"
and listener._account.get("user_id") is None):
d = listener.status_dict
return {
"configured": True,
"connected": d["status"] == "connected",
"error": d["error"],
"user_id": None,
}
return {"configured": False, "connected": False, "error": None, "user_id": None}
def all_statuses(self) -> list[dict]:
return [lst.status_dict for lst in self._listeners.values()]
# Module-level singleton (backward-compatible name kept)
inbox_listener = InboxListenerManager()
# ── Private helpers ───────────────────────────────────────────────────────────
def _smtp_send(host, port, user, password, ctx, from_addr, to, mime) -> None:
with smtplib.SMTP_SSL(host, port, context=ctx) as server:
server.login(user, password)
server.sendmail(from_addr, [to], mime.as_string())
def _extract_body(msg: email_lib.message.Message) -> str:
if msg.is_multipart():
for part in msg.walk():
if part.get_content_type() == "text/plain":
payload = part.get_payload(decode=True)
return payload.decode("utf-8", errors="replace") if payload else ""
for part in msg.walk():
if part.get_content_type() == "text/html":
payload = part.get_payload(decode=True)
html = payload.decode("utf-8", errors="replace") if payload else ""
return re.sub(r"<[^>]+>", "", html).strip()
else:
payload = msg.get_payload(decode=True)
return payload.decode("utf-8", errors="replace") if payload else ""
return ""

View File

@@ -0,0 +1,146 @@
"""
inbox/telegram_handler.py — Route Telegram /keyword messages to email handling agents.
Called by the global Telegram listener before normal trigger matching.
Returns True if the message was handled (consumed), False to fall through.
"""
from __future__ import annotations
import logging
logger = logging.getLogger(__name__)
# Built-in commands handled directly without agent dispatch
_BUILTIN = {"pause", "resume", "status"}
async def handle_keyword_message(
chat_id: str,
user_id: str | None,
keyword: str,
message: str,
) -> bool:
"""
Returns True if a matching email account was found and the message was handled.
message is the text AFTER the /keyword prefix (stripped).
"""
from ..database import get_pool
from .accounts import get_account, pause_account, resume_account
pool = await get_pool()
# Find email account matching keyword + chat_id (security: must match bound chat)
row = await pool.fetchrow(
"SELECT * FROM email_accounts WHERE telegram_keyword = $1 AND telegram_chat_id = $2",
keyword.lower(), str(chat_id),
)
if row is None:
return False
account_id = str(row["id"])
from .accounts import get_account as _get_account
account = await _get_account(account_id)
if account is None:
return False
label = account.get("label", keyword)
# ── Built-in commands ────────────────────────────────────────────────────
cmd = message.strip().lower().split()[0] if message.strip() else ""
if cmd == "pause":
await pause_account(account_id)
from ..inbox.listener import inbox_listener
inbox_listener.stop_account(account_id)
await _send_reply(chat_id, account, f"⏸ *{label}* listener paused. Send `/{keyword} resume` to restart.")
logger.info("[telegram-handler] paused account %s (%s)", account_id, label)
return True
if cmd == "resume":
await resume_account(account_id)
from ..inbox.listener import inbox_listener
from ..inbox.accounts import get_account as _get
updated = await _get(account_id)
if updated:
inbox_listener.start_account(account_id, updated)
await _send_reply(chat_id, account, f"▶ *{label}* listener resumed.")
logger.info("[telegram-handler] resumed account %s (%s)", account_id, label)
return True
if cmd == "status":
enabled = account.get("enabled", False)
paused = account.get("paused", False)
state = "paused" if paused else ("enabled" if enabled else "disabled")
reply = (
f"📊 *{label}* status\n"
f"State: {state}\n"
f"IMAP: {account.get('imap_username', '?')}\n"
f"Keyword: /{keyword}"
)
await _send_reply(chat_id, account, reply)
return True
# ── Agent dispatch ───────────────────────────────────────────────────────
agent_id = str(account.get("agent_id") or "")
if not agent_id:
await _send_reply(chat_id, account, f"⚠️ No agent configured for *{label}*.")
return True
# Build extra tools (same as email processing dispatch)
from ..tools.email_handling_tool import EmailHandlingTool
from ..tools.telegram_tool import BoundTelegramTool
extra_tools = [EmailHandlingTool(account=account)]
tg_chat_id = account.get("telegram_chat_id") or ""
tg_keyword = account.get("telegram_keyword") or ""
if tg_chat_id:
extra_tools.append(BoundTelegramTool(chat_id=tg_chat_id, reply_keyword=tg_keyword))
# Add BoundFilesystemTool scoped to user's provisioned folder
if user_id:
from ..users import get_user_folder
data_folder = await get_user_folder(str(user_id))
if data_folder:
from ..tools.bound_filesystem_tool import BoundFilesystemTool
extra_tools.append(BoundFilesystemTool(base_path=data_folder))
from ..agents.runner import agent_runner
task_message = (
f"The user sent you a message via Telegram:\n\n{message}\n\n"
f"Respond via Telegram (/{keyword}). "
f"Read your memory file first if you need context."
)
try:
await agent_runner.run_agent_and_wait(
agent_id,
override_message=task_message,
extra_tools=extra_tools,
force_only_extra_tools=True,
)
except Exception as e:
logger.error("[telegram-handler] agent dispatch failed for %s: %s", label, e)
await _send_reply(chat_id, account, f"⚠️ Error dispatching to *{label}* agent: {e}")
return True
async def _send_reply(chat_id: str, account: dict, text: str) -> None:
"""Send a Telegram reply using the account's bound token."""
import httpx
from ..database import credential_store, user_settings_store
token = await credential_store.get("telegram:bot_token")
if not token and account.get("user_id"):
token = await user_settings_store.get(str(account["user_id"]), "telegram_bot_token")
if not token:
return
try:
async with httpx.AsyncClient(timeout=10) as http:
await http.post(
f"https://api.telegram.org/bot{token}/sendMessage",
json={"chat_id": chat_id, "text": text, "parse_mode": "Markdown"},
)
except Exception as e:
logger.warning("[telegram-handler] reply send failed: %s", e)

125
server/inbox/triggers.py Normal file
View File

@@ -0,0 +1,125 @@
"""
inbox/triggers.py — CRUD for email_triggers table (async).
"""
from __future__ import annotations
import uuid
from datetime import datetime, timezone
from typing import Any
from ..database import _rowcount, get_pool
def _now() -> str:
return datetime.now(timezone.utc).isoformat()
async def list_triggers(user_id: str | None = "GLOBAL") -> list[dict]:
"""
- user_id="GLOBAL" (default): global triggers (user_id IS NULL)
- user_id=None: ALL triggers (admin view)
- user_id="<uuid>": that user's triggers only
"""
pool = await get_pool()
if user_id == "GLOBAL":
rows = await pool.fetch(
"SELECT t.*, a.name AS agent_name "
"FROM email_triggers t LEFT JOIN agents a ON a.id = t.agent_id "
"WHERE t.user_id IS NULL ORDER BY t.created_at"
)
elif user_id is None:
rows = await pool.fetch(
"SELECT t.*, a.name AS agent_name "
"FROM email_triggers t LEFT JOIN agents a ON a.id = t.agent_id "
"ORDER BY t.created_at"
)
else:
rows = await pool.fetch(
"SELECT t.*, a.name AS agent_name "
"FROM email_triggers t LEFT JOIN agents a ON a.id = t.agent_id "
"WHERE t.user_id = $1 ORDER BY t.created_at",
user_id,
)
return [dict(r) for r in rows]
async def create_trigger(
trigger_word: str,
agent_id: str,
description: str = "",
enabled: bool = True,
user_id: str | None = None,
) -> dict:
now = _now()
trigger_id = str(uuid.uuid4())
pool = await get_pool()
await pool.execute(
"""
INSERT INTO email_triggers
(id, trigger_word, agent_id, description, enabled, user_id, created_at, updated_at)
VALUES ($1, $2, $3, $4, $5, $6, $7, $8)
""",
trigger_id, trigger_word, agent_id, description, enabled, user_id, now, now,
)
return {
"id": trigger_id,
"trigger_word": trigger_word,
"agent_id": agent_id,
"description": description,
"enabled": enabled,
"user_id": user_id,
"created_at": now,
"updated_at": now,
}
async def update_trigger(id: str, **fields) -> bool:
fields["updated_at"] = _now()
set_parts = []
values: list[Any] = []
for i, (k, v) in enumerate(fields.items(), start=1):
set_parts.append(f"{k} = ${i}")
values.append(v)
id_param = len(fields) + 1
values.append(id)
pool = await get_pool()
status = await pool.execute(
f"UPDATE email_triggers SET {', '.join(set_parts)} WHERE id = ${id_param}",
*values,
)
return _rowcount(status) > 0
async def delete_trigger(id: str) -> bool:
pool = await get_pool()
status = await pool.execute("DELETE FROM email_triggers WHERE id = $1", id)
return _rowcount(status) > 0
async def toggle_trigger(id: str) -> bool:
pool = await get_pool()
await pool.execute(
"UPDATE email_triggers SET enabled = NOT enabled, updated_at = $1 WHERE id = $2",
_now(), id,
)
return True
async def get_enabled_triggers(user_id: str | None = "GLOBAL") -> list[dict]:
"""Return enabled triggers scoped to user_id (same semantics as list_triggers)."""
pool = await get_pool()
if user_id == "GLOBAL":
rows = await pool.fetch(
"SELECT * FROM email_triggers WHERE enabled = TRUE AND user_id IS NULL"
)
elif user_id is None:
rows = await pool.fetch("SELECT * FROM email_triggers WHERE enabled = TRUE")
else:
rows = await pool.fetch(
"SELECT * FROM email_triggers WHERE enabled = TRUE AND user_id = $1",
user_id,
)
return [dict(r) for r in rows]