From 25d05e0e86c664ed6a9e680dd78e5140b4a28b04 Mon Sep 17 00:00:00 2001 From: chelsea Date: Thu, 12 Feb 2026 22:11:52 -0600 Subject: [PATCH] first commit --- .env | 1 + DOCUMENTATION.md | 2342 +++++++++++++++++ Dockerfile | 10 + HOOKS.md | 44 + README.md | 251 ++ ai/__pycache__/parser.cpython-312.pyc | Bin 0 -> 6051 bytes ai/ai_config.json | 15 + ai/parser.py | 151 ++ api/__pycache__/main.cpython-312.pyc | Bin 0 -> 7190 bytes api/main.py | 139 + .../__pycache__/example.cpython-312.pyc | Bin 0 -> 3585 bytes api/routes/example.py | 56 + api/routes/medications.py | 160 ++ api/routes/routines.py | 204 ++ bot/__pycache__/bot.cpython-312.pyc | Bin 0 -> 12741 bytes .../command_registry.cpython-312.pyc | Bin 0 -> 1319 bytes bot/bot.py | 272 ++ bot/command_registry.py | 35 + .../__pycache__/example.cpython-312.pyc | Bin 0 -> 2710 bytes bot/commands/example.py | 63 + bot/commands/medications.py | 30 + bot/commands/routines.py | 30 + core/__pycache__/auth.cpython-312.pyc | Bin 0 -> 2693 bytes .../__pycache__/notifications.cpython-312.pyc | Bin 0 -> 3151 bytes core/__pycache__/postgres.cpython-312.pyc | Bin 0 -> 13677 bytes core/__pycache__/users.cpython-312.pyc | Bin 0 -> 4072 bytes core/auth.py | 58 + core/notifications.py | 74 + core/postgres.py | 264 ++ core/users.py | 96 + diagrams/README.md | 12 + diagrams/flow.mmd | 15 + diagrams/system.mmd | 61 + docker-compose.yml | 45 + requirements.txt | 7 + scheduler/__pycache__/daemon.cpython-312.pyc | Bin 0 -> 1476 bytes scheduler/daemon.py | 57 + 37 files changed, 4492 insertions(+) create mode 100644 .env create mode 100644 DOCUMENTATION.md create mode 100644 Dockerfile create mode 100644 HOOKS.md create mode 100644 README.md create mode 100644 ai/__pycache__/parser.cpython-312.pyc create mode 100644 ai/ai_config.json create mode 100644 ai/parser.py create mode 100644 api/__pycache__/main.cpython-312.pyc create mode 100644 api/main.py create mode 100644 api/routes/__pycache__/example.cpython-312.pyc create mode 100644 api/routes/example.py create mode 100644 api/routes/medications.py create mode 100644 api/routes/routines.py create mode 100644 bot/__pycache__/bot.cpython-312.pyc create mode 100644 bot/__pycache__/command_registry.cpython-312.pyc create mode 100644 bot/bot.py create mode 100644 bot/command_registry.py create mode 100644 bot/commands/__pycache__/example.cpython-312.pyc create mode 100644 bot/commands/example.py create mode 100644 bot/commands/medications.py create mode 100644 bot/commands/routines.py create mode 100644 core/__pycache__/auth.cpython-312.pyc create mode 100644 core/__pycache__/notifications.cpython-312.pyc create mode 100644 core/__pycache__/postgres.cpython-312.pyc create mode 100644 core/__pycache__/users.cpython-312.pyc create mode 100644 core/auth.py create mode 100644 core/notifications.py create mode 100644 core/postgres.py create mode 100644 core/users.py create mode 100644 diagrams/README.md create mode 100644 diagrams/flow.mmd create mode 100644 diagrams/system.mmd create mode 100644 docker-compose.yml create mode 100644 requirements.txt create mode 100644 scheduler/__pycache__/daemon.cpython-312.pyc create mode 100644 scheduler/daemon.py diff --git a/.env b/.env new file mode 100644 index 0000000..99139f8 --- /dev/null +++ b/.env @@ -0,0 +1 @@ +DB_PASS=y8Khu7pJQZq6ywFDIJiqpx4zYmclHGHw diff --git a/DOCUMENTATION.md b/DOCUMENTATION.md new file mode 100644 index 0000000..ded32cc --- /dev/null +++ b/DOCUMENTATION.md @@ -0,0 +1,2342 @@ +====== LLM Bot Framework - Complete Technical Documentation ====== + +===== Introduction ===== + +The LLM Bot Framework is a template for building Discord bots powered by Large Language Models (LLMs) with natural language command parsing. It provides a complete architecture for creating domain-specific bots that can understand and execute user commands expressed in natural language. + +The framework demonstrates a **modular, layered architecture** that separates concerns cleanly: + + * **AI Layer** - Natural language parsing via LLM + * **Bot Layer** - Discord client with session management + * **API Layer** - RESTful endpoints with JWT authentication + * **Core Layer** - Database, authentication, notifications + * **Scheduler Layer** - Background task processing + +===== Architecture Overview ===== + +==== The Big Picture ==== + +At its core, the framework implements a **request-response flow** that transforms natural language into structured actions: + + +flowchart TD + A["User Message (Discord DM)"] --> B["bot/bot.py"] + B --> C["ai/parser.py (LLM)"] + C --> D["Structured JSON"] + D --> E["command_registry"] + E --> F["Domain Handler"] + F --> G["API Request (HTTP)"] + G --> H["api/main.py (Flask)"] + H --> I["core/ modules"] + I --> J[("PostgreSQL")] + J -.-> I + I -.-> H + H -.-> G + G -.-> F + F -.-> B + B -.-> A + + +==== Global Architecture ==== + + +flowchart TB + subgraph external ["External Services"] + DISCORD["Discord API"] + OPENROUTER["OpenRouter API"] + end + + subgraph docker ["Docker Compose"] + subgraph bot_svc ["bot service"] + BOT["bot/bot.py"] + REG["command_registry.py"] + CMDS["commands/example.py"] + end + + subgraph app_svc ["app service (port 8080)"] + FLASK["api/main.py (Flask)"] + ROUTES["api/routes/example.py"] + end + + subgraph core_svc ["core/"] + AUTH["auth.py"] + USERS["users.py"] + NOTIF["notifications.py"] + PG["postgres.py"] + end + + subgraph ai_svc ["ai/"] + PARSER["parser.py"] + CONFIG["ai_config.json"] + end + + subgraph sched_svc ["scheduler service"] + DAEMON["daemon.py"] + end + + DB[("PostgreSQL")] + end + + DISCORD <--> BOT + BOT --> PARSER + PARSER --> OPENROUTER + PARSER --> CONFIG + BOT --> REG + REG --> CMDS + CMDS --> PARSER + BOT -- "HTTP" --> FLASK + FLASK --> ROUTES + FLASK --> AUTH + FLASK --> USERS + ROUTES --> AUTH + ROUTES --> PG + AUTH --> USERS + AUTH --> PG + USERS --> PG + NOTIF --> PG + PG --> DB + DAEMON --> PG + + +==== Docker Service Orchestration ==== + + +flowchart LR + DB[("db\nPostgreSQL:16\nport 5432")] -- "healthcheck:\npg_isready" --> DB + DB -- "service_healthy" --> APP["app\nFlask API\nport 8080:5000"] + DB -- "service_healthy" --> SCHED["scheduler\ndaemon.py"] + APP -- "service_started" --> BOT["bot\nbot.bot"] + ENV[".env\n(DB_PASS)"] -.-> DB + CENV["config/.env\n(all vars)"] -.-> APP + CENV -.-> BOT + CENV -.-> SCHED + + +===== Core Layer ===== + +==== core/postgres.py - Generic PostgreSQL CRUD ==== + +This module provides a **database abstraction layer** that eliminates the need to write raw SQL for common operations. It uses parameterized queries throughout to prevent SQL injection. + +=== Configuration === + +Connection settings are pulled from environment variables: + +^ Variable ^ Default ^ Description ^ +| DB_HOST | localhost | PostgreSQL server hostname | +| DB_PORT | 5432 | PostgreSQL server port | +| DB_NAME | app | Database name | +| DB_USER | app | Database user | +| DB_PASS | (empty) | Database password | + +=== Internal Functions === + +**_get_config()** - Returns a dictionary of connection parameters + + +def _get_config(): + return { + "host": os.environ.get("DB_HOST", "localhost"), + "port": int(os.environ.get("DB_PORT", 5432)), + "dbname": os.environ.get("DB_NAME", "app"), + "user": os.environ.get("DB_USER", "app"), + "password": os.environ.get("DB_PASS", ""), + } + + +This function centralizes database configuration, making it easy to test with different databases or override settings. + +**_safe_id(name)** - Validates and escapes SQL identifiers + + +def _safe_id(name): + if not re.match(r"^[a-zA-Z_][a-zA-Z0-9_]*$", name): + raise ValueError(f"Invalid SQL identifier: {name}") + return f'"{name}"' + + +Critical for security. This function: + * Validates that the identifier contains only alphanumeric characters and underscores + * Must start with a letter or underscore + * Wraps the identifier in double quotes for PostgreSQL + +This prevents SQL injection through table or column names. Without this, a malicious input like ''users; DROP TABLE users;--'' could execute destructive commands. + +**_build_where(where, prefix="")** - Constructs WHERE clauses from dictionaries + + +def _build_where(where, prefix=""): + clauses = [] + params = {} + for i, (col, val) in enumerate(where.items()): + param_name = f"{prefix}{col}_{i}" + safe_col = _safe_id(col) + # ... handles various conditions + + +This function transforms Python dictionaries into SQL WHERE clauses: + +**Simple equality:** + +{"username": "john"} +# → WHERE "username" = %(username_0)s + + +**Comparison operators:** + +{"age": (">", 18)} +# → WHERE "age" > %(age_0)s + + +**Supported operators:** =, !=, <, >, <=, >=, LIKE, ILIKE, IN, IS, IS NOT + +**IN clause:** + +{"status": ("IN", ["active", "pending"])} +# → WHERE "status" IN (%(status_0_0)s, %(status_0_1)s) + + +**NULL checks:** + +{"deleted_at": None} +# → WHERE "deleted_at" IS NULL + +{"deleted_at": ("IS NOT", None)} +# → WHERE "deleted_at" IS NOT NULL + + +The ''prefix'' parameter prevents parameter name collisions when the same column appears in multiple parts of a query (e.g., in both SET and WHERE clauses). + +=== Connection Management === + + +flowchart TD + CRUD["CRUD function\n(insert/select/update/delete)"] --> GC["get_cursor(dict_cursor)"] + GC --> GCONN["get_connection()"] + GCONN --> CONNECT["psycopg2.connect(**_get_config())"] + CONNECT --> YIELD_CONN["yield conn"] + YIELD_CONN --> CURSOR["conn.cursor(RealDictCursor)"] + CURSOR --> YIELD_CUR["yield cursor"] + YIELD_CUR --> EXEC["cur.execute(query, params)"] + EXEC --> SUCCESS{"Success?"} + SUCCESS -- "Yes" --> COMMIT["conn.commit()"] + SUCCESS -- "Exception" --> ROLLBACK["conn.rollback()"] + COMMIT --> CLOSE["conn.close()"] + ROLLBACK --> CLOSE + + +**get_connection()** - Context manager for database connections + + +@contextmanager +def get_connection(): + conn = psycopg2.connect(**_get_config()) + try: + yield conn + conn.commit() + except Exception: + conn.rollback() + raise + finally: + conn.close() + + +This context manager ensures: + * **Automatic connection opening** when entering the context + * **Automatic commit** on successful completion + * **Automatic rollback** on any exception + * **Guaranteed cleanup** - connection always closes + +**get_cursor(dict_cursor=True)** - Context manager for cursors + + +@contextmanager +def get_cursor(dict_cursor=True): + with get_connection() as conn: + factory = psycopg2.extras.RealDictCursor if dict_cursor else None + cur = conn.cursor(cursor_factory=factory) + try: + yield cur + finally: + cur.close() + + +By default, returns a ''RealDictCursor'' which returns rows as dictionaries instead of tuples, making results easier to work with: + + +# With dict cursor: +{"id": "abc123", "username": "john"} + +# Without dict cursor: +("abc123", "john") + + +=== CRUD Operations === + +**insert(table, data)** - Insert a single row + + +def insert(table, data): + columns = list(data.keys()) + placeholders = [f"%({col})s" for col in columns] + safe_cols = [_safe_id(c) for c in columns] + + query = f""" + INSERT INTO {_safe_id(table)} + ({", ".join(safe_cols)}) + VALUES ({", ".join(placeholders)}) + RETURNING * + """ + with get_cursor() as cur: + cur.execute(query, data) + return dict(cur.fetchone()) if cur.rowcount else None + + +Example usage: + +user = postgres.insert("users", { + "id": str(uuid.uuid4()), + "username": "john", + "password_hashed": hashed_pw +}) +# Returns the inserted row with all fields + + +The ''RETURNING *'' clause returns the complete inserted row, including any auto-generated fields like ''created_at''. + +**select(table, where=None, order_by=None, limit=None, offset=None)** - Query rows + + +def select(table, where=None, order_by=None, limit=None, offset=None): + query = f"SELECT * FROM {_safe_id(table)}" + params = {} + + if where: + clauses, params = _build_where(where) + query += f" WHERE {clauses}" + if order_by: + if isinstance(order_by, list): + order_by = ", ".join(order_by) + query += f" ORDER BY {order_by}" + if limit is not None: + query += f" LIMIT {int(limit)}" + if offset is not None: + query += f" OFFSET {int(offset)}" + + with get_cursor() as cur: + cur.execute(query, params) + return [dict(row) for row in cur.fetchall()] + + +Examples: + +# Get all users +all_users = postgres.select("users") + +# Get users with filtering +active_users = postgres.select("users", + where={"status": "active"}, + order_by="created_at DESC", + limit=10 +) + +# Complex filtering +adults = postgres.select("users", + where={"age": (">=", 18), "status": "active"} +) + + +**select_one(table, where)** - Query a single row + + +def select_one(table, where): + results = select(table, where=where, limit=1) + return results[0] if results else None + + +Convenience method that returns ''None'' if no row found, instead of an empty list: + +user = postgres.select_one("users", {"username": "john"}) +if user: + print(user["id"]) + + +**update(table, data, where)** - Update rows + + +def update(table, data, where): + set_columns = list(data.keys()) + set_clause = ", ".join(f"{_safe_id(col)} = %(set_{col})s" for col in set_columns) + params = {f"set_{col}": val for col, val in data.items()} + + where_clause, where_params = _build_where(where, prefix="where_") + params.update(where_params) + + query = f""" + UPDATE {_safe_id(table)} + SET {set_clause} + WHERE {where_clause} + RETURNING * + """ + with get_cursor() as cur: + cur.execute(query, params) + return [dict(row) for row in cur.fetchall()] + + +The ''prefix'' parameter prevents parameter name collisions. Example: + +updated = postgres.update( + "users", + {"status": "inactive"}, + {"id": user_uuid} +) + + +Returns all updated rows (useful when updating multiple rows). + +**delete(table, where)** - Delete rows + + +def delete(table, where): + where_clause, params = _build_where(where) + query = f""" + DELETE FROM {_safe_id(table)} + WHERE {where_clause} + RETURNING * + """ + with get_cursor() as cur: + cur.execute(query, params) + return [dict(row) for row in cur.fetchall()] + + +Returns deleted rows for confirmation/auditing: + +deleted = postgres.delete("users", {"id": user_uuid}) +print(f"Deleted {len(deleted)} user(s)") + + +=== Utility Functions === + +**count(table, where=None)** - Count rows + + +def count(table, where=None): + query = f"SELECT COUNT(*) as count FROM {_safe_id(table)}" + params = {} + if where: + clauses, params = _build_where(where) + query += f" WHERE {clauses}" + with get_cursor() as cur: + cur.execute(query, params) + return cur.fetchone()["count"] + + +**exists(table, where)** - Check if rows exist + + +def exists(table, where): + return count(table, where) > 0 + + +**upsert(table, data, conflict_columns)** - Insert or update + + +def upsert(table, data, conflict_columns): + columns = list(data.keys()) + placeholders = [f"%({col})s" for col in columns] + safe_cols = [_safe_id(c) for c in columns] + conflict_cols = [_safe_id(c) for c in conflict_columns] + + update_cols = [c for c in columns if c not in conflict_columns] + update_clause = ", ".join( + f"{_safe_id(c)} = EXCLUDED.{_safe_id(c)}" for c in update_cols + ) + + query = f""" + INSERT INTO {_safe_id(table)} + ({", ".join(safe_cols)}) + VALUES ({", ".join(placeholders)}) + ON CONFLICT ({", ".join(conflict_cols)}) + DO UPDATE SET {update_clause} + RETURNING * + """ + + +Example: Create or update user settings + +settings = postgres.upsert( + "notifications", + { + "user_uuid": user_uuid, + "discord_enabled": True, + "discord_webhook": "https://..." + }, + conflict_columns=["user_uuid"] +) + + +**insert_many(table, rows)** - Bulk insert + + +def insert_many(table, rows): + if not rows: + return 0 + columns = list(rows[0].keys()) + safe_cols = [_safe_id(c) for c in columns] + query = f""" + INSERT INTO {_safe_id(table)} + ({", ".join(safe_cols)}) + VALUES %s + """ + template = f"({', '.join(f'%({col})s' for col in columns)})" + with get_cursor() as cur: + psycopg2.extras.execute_values( + cur, query, rows, template=template, page_size=100 + ) + return cur.rowcount + + +Uses ''execute_values'' for efficient bulk inserts (up to 100 rows per batch): + +rows = [ + {"id": str(uuid.uuid4()), "name": "Task 1"}, + {"id": str(uuid.uuid4()), "name": "Task 2"}, + {"id": str(uuid.uuid4()), "name": "Task 3"}, +] +count = postgres.insert_many("tasks", rows) + + +**execute(query, params=None)** - Execute raw SQL + + +def execute(query, params=None): + with get_cursor() as cur: + cur.execute(query, params or {}) + if cur.description: + return [dict(row) for row in cur.fetchall()] + return cur.rowcount + + +For complex queries that don't fit the CRUD pattern: + +results = postgres.execute(""" + SELECT u.username, COUNT(t.id) as task_count + FROM users u + LEFT JOIN tasks t ON t.user_uuid = u.id + GROUP BY u.username + HAVING COUNT(t.id) > :min_count +""", {"min_count": 5}) + + +**table_exists(table)** - Check if table exists + +**get_columns(table)** - Get table schema information + +==== core/auth.py - JWT Authentication ==== + +This module handles **user authentication** using JWT (JSON Web Tokens) with bcrypt password hashing. + +=== Token Management === + + +flowchart LR + subgraph getLoginToken + A["username, password"] --> B["users.getUserUUID()"] + B --> C["getUserpasswordHash()"] + C --> D["bcrypt.checkpw()"] + D -- "match" --> E["jwt.encode(payload)"] + D -- "no match" --> F["return False"] + E --> G["return JWT token"] + end + + subgraph verifyLoginToken + H["token, username/userUUID"] --> I{"username\nprovided?"} + I -- "Yes" --> J["users.getUserUUID()"] + J --> K["jwt.decode()"] + I -- "No" --> K + K -- "valid" --> L{"sub == userUUID?"} + L -- "Yes" --> M["return True"] + L -- "No" --> N["return False"] + K -- "expired/invalid" --> N + end + + +**getLoginToken(username, password)** - Generate JWT on successful login + + +def getLoginToken(username, password): + userUUID = users.getUserUUID(username) + if userUUID: + formatted_pass = password.encode("utf-8") + users_hashed_pw = getUserpasswordHash(userUUID) + if bcrypt.checkpw(formatted_pass, users_hashed_pw): + payload = { + "sub": userUUID, + "name": users.getUserFirstName(userUUID), + "exp": datetime.datetime.utcnow() + datetime.timedelta(hours=1), + } + return jwt.encode(payload, os.getenv("JWT_SECRET"), algorithm="HS256") + return False + + +The JWT payload contains: + * **sub** (subject) - User's UUID, used for identification + * **name** - User's display name + * **exp** (expiration) - Token expires in 1 hour + +**verifyLoginToken(login_token, username=False, userUUID=False)** - Validate JWT + + +def verifyLoginToken(login_token, username=False, userUUID=False): + if username: + userUUID = users.getUserUUID(username) + + if userUUID: + try: + decoded_token = jwt.decode( + login_token, os.getenv("JWT_SECRET"), algorithms=["HS256"] + ) + if decoded_token.get("sub") == str(userUUID): + return True + return False + except (ExpiredSignatureError, InvalidTokenError): + return False + return False + + +Validates that: + 1. The token is properly signed + 2. The token hasn't expired + 3. The token belongs to the claimed user + +=== Password Management === + +**getUserpasswordHash(userUUID)** - Retrieve stored password hash + + +def getUserpasswordHash(userUUID): + user = postgres.select_one("users", {"id": userUUID}) + if user: + pw_hash = user.get("password_hashed") + if isinstance(pw_hash, memoryview): + return bytes(pw_hash) + return pw_hash + return None + + +Handles the case where PostgreSQL returns ''BYTEA'' as a ''memoryview'' object. + + +flowchart LR + A["unregisterUser(userUUID, password)"] --> B["getUserpasswordHash()"] + B --> C["postgres.select_one('users')"] + C --> D{"hash found?"} + D -- "No" --> E["return False"] + D -- "Yes" --> F["bcrypt.checkpw()"] + F -- "match" --> G["users.deleteUser()"] + F -- "no match" --> E + G --> H["postgres.delete('users')"] + + +**unregisterUser(userUUID, password)** - Delete user account with password confirmation + + +def unregisterUser(userUUID, password): + pw_hash = getUserpasswordHash(userUUID) + if not pw_hash: + return False + if bcrypt.checkpw(password.encode("utf-8"), pw_hash): + return users.deleteUser(userUUID) + return False + + +Requires password re-entry to prevent unauthorized account deletion. + +==== core/users.py - User Management ==== + +This module provides **CRUD operations for users** with validation and security considerations. + +=== Query Functions === + +**getUserUUID(username)** - Get UUID from username + +def getUserUUID(username): + userRecord = postgres.select_one("users", {"username": username}) + if userRecord: + return userRecord["id"] + return False + + +**getUserFirstName(userUUID)** - Get user's display name + +def getUserFirstName(userUUID): + userRecord = postgres.select_one("users", {"id": userUUID}) + if userRecord: + return userRecord.get("username") + return None + + +**isUsernameAvailable(username)** - Check username uniqueness + +def isUsernameAvailable(username): + return not postgres.exists("users", {"username": username}) + + +**doesUserUUIDExist(userUUID)** - Verify UUID exists + +def doesUserUUIDExist(userUUID): + return postgres.exists("users", {"id": userUUID}) + + +=== Mutation Functions === + + +flowchart TD + REG["registerUser(username, password)"] --> AVAIL["isUsernameAvailable()"] + AVAIL --> EXISTS["postgres.exists('users')"] + EXISTS -- "taken" --> RET_F["return False"] + EXISTS -- "available" --> HASH["bcrypt.hashpw(password, gensalt())"] + HASH --> CREATE["createUser(user_data)"] + CREATE --> VALIDATE["validateUser()"] + VALIDATE -- "invalid" --> RAISE["raise ValueError"] + VALIDATE -- "valid" --> INSERT["postgres.insert('users')"] + INSERT --> RET_T["return True"] + + UPD["updateUser(userUUID, data)"] --> LOOKUP["postgres.select_one('users')"] + LOOKUP -- "not found" --> RET_F2["return False"] + LOOKUP -- "found" --> FILTER["filter blocked fields\n(id, password_hashed, created_at)"] + FILTER --> UPD_DB["postgres.update('users')"] + + DEL["deleteUser(userUUID)"] --> LOOKUP2["postgres.select_one('users')"] + LOOKUP2 -- "not found" --> RET_F3["return False"] + LOOKUP2 -- "found" --> DEL_DB["postgres.delete('users')"] + + +**registerUser(username, password, data=None)** - Create new user + +def registerUser(username, password, data=None): + if isUsernameAvailable(username): + hashed_pass = bcrypt.hashpw(password.encode("utf-8"), bcrypt.gensalt()) + user_data = { + "id": str(uuid.uuid4()), + "username": username, + "password_hashed": hashed_pass, + } + if data: + user_data.update(data) + createUser(user_data) + return True + return False + + +Uses ''bcrypt.gensalt()'' to generate a unique salt for each password. + +**updateUser(userUUID, data_dict)** - Update user fields + +def updateUser(userUUID, data_dict): + user = postgres.select_one("users", {"id": userUUID}) + if not user: + return False + blocked = {"id", "password_hashed", "created_at"} + allowed = set(user.keys()) - blocked + updates = {k: v for k, v in data_dict.items() if k in allowed} + if not updates: + return False + postgres.update("users", updates, {"id": userUUID}) + return True + + +**Blocked fields** prevent modification of: + * ''id'' - Primary key should never change + * ''password_hashed'' - Use ''changePassword()'' instead + * ''created_at'' - Audit field should be immutable + +**changePassword(userUUID, new_password)** - Securely update password + +def changePassword(userUUID, new_password): + user = postgres.select_one("users", {"id": userUUID}) + if not user: + return False + hashed = bcrypt.hashpw(new_password.encode("utf-8"), bcrypt.gensalt()) + postgres.update("users", {"password_hashed": hashed}, {"id": userUUID}) + return True + + +**deleteUser(userUUID)** - Remove user record + +def deleteUser(userUUID): + user = postgres.select_one("users", {"id": userUUID}) + if not user: + return False + postgres.delete("users", {"id": userUUID}) + return True + + +=== Internal Functions === + +**createUser(data_dict)** - Internal user creation with validation + +def createUser(data_dict): + user_schema = { + "id": None, + "username": None, + "password_hashed": None, + "created_at": None, + } + for key in user_schema: + if key in data_dict: + user_schema[key] = data_dict[key] + + is_valid, errors = validateUser(user_schema) + if not is_valid: + raise ValueError(f"Invalid user data: {', '.join(errors)}") + + postgres.insert("users", user_schema) + + +**validateUser(user)** - Ensure required fields present + +def validateUser(user): + required = ["id", "username", "password_hashed"] + missing = [f for f in required if f not in user or user[f] is None] + if missing: + return False, missing + return True, [] + + +==== core/notifications.py - Multi-Channel Notifications ==== + +This module provides **notification routing** to multiple channels (Discord webhooks, ntfy). + + +flowchart TD + SEND["_sendToEnabledChannels(settings, message)"] + SEND --> CHK_D{"discord_enabled\nand webhook set?"} + CHK_D -- "Yes" --> DISC["discord.send(webhook_url, message)"] + CHK_D -- "No" --> CHK_N + DISC --> CHK_N{"ntfy_enabled\nand topic set?"} + CHK_N -- "Yes" --> NTFY["ntfy.send(topic, message)"] + CHK_N -- "No" --> RESULT + NTFY --> RESULT["return True if any succeeded"] + + DISC -- "POST webhook_url\n{content: message}" --> DISCORD_API["Discord Webhook\n(expects 204)"] + NTFY -- "POST ntfy.sh/topic\nmessage body" --> NTFY_API["ntfy.sh\n(expects 200)"] + + GET["getNotificationSettings(userUUID)"] --> DB_SEL["postgres.select_one('notifications')"] + SET["setNotificationSettings(userUUID, data)"] --> DB_CHK{"existing\nrecord?"} + DB_CHK -- "Yes" --> DB_UPD["postgres.update('notifications')"] + DB_CHK -- "No" --> DB_INS["postgres.insert('notifications')"] + + +=== Notification Channels === + +**discord.send(webhook_url, message)** - Send via Discord webhook + +class discord: + @staticmethod + def send(webhook_url, message): + try: + response = requests.post(webhook_url, json={"content": message}) + return response.status_code == 204 + except: + return False + + +Discord webhooks return ''204 No Content'' on success. + +**ntfy.send(topic, message)** - Send via ntfy.sh + +class ntfy: + @staticmethod + def send(topic, message): + try: + response = requests.post( + f"https://ntfy.sh/{topic}", data=message.encode("utf-8") + ) + return response.status_code == 200 + except: + return False + + +ntfy.sh is a free push notification service. Users subscribe to topics. + +=== Settings Management === + +**getNotificationSettings(userUUID)** - Retrieve user notification config + +def getNotificationSettings(userUUID): + settings = postgres.select_one("notifications", {"user_uuid": userUUID}) + if not settings: + return False + return settings + + +**setNotificationSettings(userUUID, data_dict)** - Update notification config + +def setNotificationSettings(userUUID, data_dict): + existing = postgres.select_one("notifications", {"user_uuid": userUUID}) + allowed = [ + "discord_webhook", + "discord_enabled", + "ntfy_topic", + "ntfy_enabled", + ] + updates = {k: v for k, v in data_dict.items() if k in allowed} + if not updates: + return False + if existing: + postgres.update("notifications", updates, {"user_uuid": userUUID}) + else: + updates["id"] = str(uuid.uuid4()) + updates["user_uuid"] = userUUID + postgres.insert("notifications", updates) + return True + + +Implements an **upsert pattern** - updates if exists, inserts if not. + +**_sendToEnabledChannels(notif_settings, message)** - Route to all enabled channels + +def _sendToEnabledChannels(notif_settings, message): + sent = False + + if notif_settings.get("discord_enabled") and notif_settings.get("discord_webhook"): + if discord.send(notif_settings["discord_webhook"], message): + sent = True + + if notif_settings.get("ntfy_enabled") and notif_settings.get("ntfy_topic"): + if ntfy.send(notif_settings["ntfy_topic"], message): + sent = True + + return sent + + +Returns ''True'' if **any** channel succeeded, allowing partial failures. + +===== AI Layer ===== + +==== ai/parser.py - LLM-Powered JSON Parser ==== + +This is the **heart of the natural language interface**. It transforms user messages into structured JSON using an LLM, with automatic retry and validation. + +=== Configuration Loading === + + +CONFIG_PATH = os.environ.get( + "AI_CONFIG_PATH", os.path.join(os.path.dirname(__file__), "ai_config.json") +) + +with open(CONFIG_PATH, "r") as f: + AI_CONFIG = json.load(f) + + +Configuration is loaded once at module import time. + +=== OpenAI Client Initialization === + + +client = OpenAI( + api_key=os.getenv("OPENROUTER_API_KEY"), + base_url=os.getenv("OPENROUTER_BASE_URL", "https://openrouter.ai/api/v1"), +) + + +Uses OpenRouter for **model flexibility** - can use any OpenAI-compatible model including: + * OpenAI models (GPT-4, GPT-3.5) + * Anthropic models (Claude) + * Open-source models (Llama, Qwen, Mistral) + +=== Internal Functions === + +**_extract_json_from_text(text)** - Extract JSON from reasoning model output + + +def _extract_json_from_text(text): + match = re.search(r"```json\s*(\{.*?\})\s*```", text, re.DOTALL) + if match: + return match.group(1) + match = re.search(r"(\{[^{}]*\})", text, re.DOTALL) + if match: + return match.group(1) + return None + + +Some reasoning models (like Qwen with thinking) output reasoning before the JSON. This extracts: + 1. JSON inside markdown code blocks: ''%%```json {...} ```%%'' + 2. First JSON object in text: ''{...}'' + + +flowchart LR + CALL["_call_llm(system, user)"] --> API["OpenAI client\nchat.completions.create()"] + API --> MSG["response.choices[0].message"] + MSG --> CHK_C{"msg.content\nnon-empty?"} + CHK_C -- "Yes" --> RET_TEXT["return content"] + CHK_C -- "No" --> CHK_R{"msg.reasoning\nexists?"} + CHK_R -- "Yes" --> EXTRACT["_extract_json_from_text()"] + CHK_R -- "No" --> RET_NONE["return None"] + EXTRACT --> RET_JSON["return extracted JSON"] + API -- "Exception" --> LOG["print error"] --> RET_NONE + + +**_call_llm(system_prompt, user_prompt)** - Execute LLM request + + +def _call_llm(system_prompt, user_prompt): + try: + response = client.chat.completions.create( + model=AI_CONFIG["model"], + max_tokens=AI_CONFIG.get("max_tokens", 8192), + timeout=AI_CONFIG["validation"]["timeout_seconds"], + messages=[ + {"role": "system", "content": system_prompt}, + {"role": "user", "content": user_prompt}, + ], + ) + msg = response.choices[0].message + text = msg.content.strip() if msg.content else "" + if text: + return text + reasoning = getattr(msg, "reasoning", None) + if reasoning: + extracted = _extract_json_from_text(reasoning) + if extracted: + return extracted + return None + except Exception as e: + print(f"LLM error: {type(e).__name__}: {e}", flush=True) + return None + + +Handles both: + * **Standard responses** - JSON in ''message.content'' + * **Reasoning responses** - JSON extracted from ''message.reasoning'' + +=== Main Parsing Function === + + +flowchart TD + START["parse(user_input, interaction_type)"] --> RETRY_CHK{"retry_count\n>= max_retries?"} + RETRY_CHK -- "Yes" --> ERR_MAX["return {error: 'Failed after N retries'}"] + RETRY_CHK -- "No" --> PROMPT["Build prompt from\nai_config.json template"] + PROMPT --> HISTORY["Inject last 3 conversation turns"] + HISTORY --> LLM["_call_llm(system, user_prompt)"] + LLM --> LLM_CHK{"Response\nreceived?"} + LLM_CHK -- "No" --> ERR_UNAVAIL["return {error: 'AI unavailable'}"] + LLM_CHK -- "Yes" --> JSON_PARSE["json.loads(response)"] + JSON_PARSE -- "JSONDecodeError" --> RETRY_JSON["parse(..., retry+1,\nerrors=['not valid JSON'])"] + JSON_PARSE -- "Success" --> VALIDATE{"Custom validator\nregistered?"} + VALIDATE -- "No" --> RETURN["return parsed JSON"] + VALIDATE -- "Yes" --> RUN_VAL["validator(parsed)"] + RUN_VAL --> VAL_CHK{"Validation\nerrors?"} + VAL_CHK -- "No" --> RETURN + VAL_CHK -- "Yes" --> RETRY_VAL["parse(..., retry+1,\nerrors=validation_errors)"] + + +**parse(user_input, interaction_type, retry_count=0, errors=None, history=None)** + + +def parse(user_input, interaction_type, retry_count=0, errors=None, history=None): + if retry_count >= AI_CONFIG["validation"]["max_retries"]: + return { + "error": f"Failed to parse after {retry_count} retries", + "user_input": user_input, + } + + +**Parameters:** + * ''user_input'' - Raw user message + * ''interaction_type'' - Key in config prompts (e.g., ''command_parser'') + * ''retry_count'' - Internal counter for automatic retry + * ''errors'' - Previous validation errors (for retry context) + * ''history'' - List of (message, parsed_result) tuples + +**Retry Logic:** + +try: + parsed = json.loads(response_text) +except json.JSONDecodeError: + return parse( + user_input, + interaction_type, + retry_count + 1, + ["Response was not valid JSON"], + history=history, + ) + + +When JSON parsing fails, the function **recursively calls itself** with incremented retry count, passing the error to give the LLM context to fix its output. + +**Validation Integration:** + +validator = AI_CONFIG["validation"].get("validators", {}).get(interaction_type) +if validator: + validation_errors = validator(parsed) + if validation_errors: + return parse( + user_input, + interaction_type, + retry_count + 1, + validation_errors, + history=history, + ) + + +Custom validators are called after successful JSON parsing. If validation fails, the parser retries with the validation errors as context. + +**Conversation History:** + +history_context = "No previous context" +if history and len(history) > 0: + history_lines = [] + for i, (msg, result) in enumerate(history[-3:]): + history_lines.append(f"{i + 1}. User: {msg}") + if isinstance(result, dict) and not result.get("error"): + history_lines.append(f" Parsed: {json.dumps(result)}") + else: + history_lines.append(f" Parsed: {result}") + history_context = "\n".join(history_lines) + + +The last 3 conversation turns are included for context, enabling: + * Pronoun resolution ("Add **it** to my list") + * Follow-up commands ("Change **the second one**") + * Clarification handling + +=== Validator Registration === + +**register_validator(interaction_type, validator_fn)** + +def register_validator(interaction_type, validator_fn): + if "validators" not in AI_CONFIG["validation"]: + AI_CONFIG["validation"]["validators"] = {} + AI_CONFIG["validation"]["validators"][interaction_type] = validator_fn + + +Domain modules register their validators: + +def validate_example_json(data): + errors = [] + if "action" not in data: + errors.append("Missing required field: action") + return errors + +ai_parser.register_validator("example", validate_example_json) + + +==== ai/ai_config.json - AI Configuration ==== + + +{ + "model": "qwen/qwen3-next-80b-a3b-thinking:nitro", + "max_tokens": 8192, + "prompts": { + "command_parser": { + "system": "...", + "user_template": "..." + } + }, + "validation": { + "max_retries": 3, + "timeout_seconds": 15, + "validators": {} + } +} + + +**Configuration Fields:** + +^ Field ^ Purpose ^ +| model | OpenRouter model identifier | +| max_tokens | Maximum response length | +| prompts | Prompt templates by interaction type | +| validation.max_retries | Retry attempts on failure | +| validation.timeout_seconds | LLM request timeout | +| validation.validators | Runtime-registered validators | + +**Prompt Structure:** + +Each prompt has: + * ''system'' - System prompt defining the AI's role + * ''user_template'' - Template with ''{user_input}'' and ''{history_context}'' placeholders + +===== Bot Layer ===== + +==== bot/bot.py - Discord Client ==== + +This is the **Discord bot client** that manages user sessions and routes commands. + +=== Global State === + + +user_sessions = {} # discord_id → {token, user_uuid, username} +login_state = {} # discord_id → {step, username} +message_history = {} # discord_id → [(msg, parsed), ...] +user_cache = {} # discord_id → {hashed_password, user_uuid, username} +CACHE_FILE = "/app/user_cache.pkl" + + +**user_sessions** - Active authenticated sessions + +**login_state** - Tracks multi-step login flow + +**message_history** - Last 5 messages per user for context + +**user_cache** - Persisted credentials for auto-login + + +flowchart TD + MSG["on_message(message)"] --> SELF{"Own\nmessage?"} + SELF -- "Yes" --> IGNORE["ignore"] + SELF -- "No" --> DM{"Is DM\nchannel?"} + DM -- "No" --> IGNORE + DM -- "Yes" --> LOGIN_CHK{"In\nlogin_state?"} + LOGIN_CHK -- "Yes" --> HANDLE_LOGIN["handleLoginStep()"] + LOGIN_CHK -- "No" --> SESSION_CHK{"Has\nsession?"} + SESSION_CHK -- "No" --> START_LOGIN["Start login flow\n(ask for username)"] + SESSION_CHK -- "Yes" --> ROUTE["routeCommand()"] + + ROUTE --> HELP_CHK{"'help' in\nmessage?"} + HELP_CHK -- "Yes" --> HELP["sendHelpMessage()"] + HELP_CHK -- "No" --> PARSE["ai_parser.parse()"] + PARSE --> CLARIFY{"needs_\nclarification?"} + CLARIFY -- "Yes" --> ASK["Ask user to clarify"] + CLARIFY -- "No" --> ERROR_CHK{"error in\nparsed?"} + ERROR_CHK -- "Yes" --> SHOW_ERR["Show error"] + ERROR_CHK -- "No" --> HANDLER["get_handler(interaction_type)"] + HANDLER -- "found" --> EXEC["handler(message, session, parsed)"] + HANDLER -- "not found" --> UNKNOWN["'Unknown command type'"] + + +=== Discord Client Setup === + + +intents = discord.Intents.default() +intents.message_content = True + +client = discord.Client(intents=intents) + + +''message_content'' intent is required to read message text. + +=== Utility Functions === + +**decodeJwtPayload(token)** - Decode JWT without verification + +def decodeJwtPayload(token): + payload = token.split(".")[1] + payload += "=" * (4 - len(payload) % 4) + return json.loads(base64.urlsafe_b64decode(payload)) + + +Used to extract ''user_uuid'' from tokens. Note: This **does not verify** the signature; verification happens server-side. + +**apiRequest(method, endpoint, token=None, data=None)** - Make HTTP requests to API + +def apiRequest(method, endpoint, token=None, data=None): + url = f"{API_URL}{endpoint}" + headers = {"Content-Type": "application/json"} + if token: + headers["Authorization"] = f"Bearer {token}" + try: + resp = getattr(requests, method)(url, headers=headers, json=data, timeout=10) + try: + return resp.json(), resp.status_code + except ValueError: + return {}, resp.status_code + except requests.RequestException: + return {"error": "API unavailable"}, 503 + + +Returns a tuple of (response_data, status_code) for easy handling. + +=== Cache Management === + +**loadCache()** - Load persisted user credentials + +def loadCache(): + try: + if os.path.exists(CACHE_FILE): + with open(CACHE_FILE, "rb") as f: + global user_cache + user_cache = pickle.load(f) + print(f"Loaded cache for {len(user_cache)} users") + except Exception as e: + print(f"Error loading cache: {e}") + + +**saveCache()** - Persist user credentials + +def saveCache(): + try: + with open(CACHE_FILE, "wb") as f: + pickle.dump(user_cache, f) + except Exception as e: + print(f"Error saving cache: {e}") + + +**Why cache credentials?** + * Users don't need to re-login every session + * Passwords are hashed locally for verification + * New tokens are fetched automatically + +**hashPassword() / verifyPassword()** - Local password handling + +def hashPassword(password): + return bcrypt.hashpw(password.encode("utf-8"), bcrypt.gensalt()).decode("utf-8") + +def verifyPassword(password, hashed): + return bcrypt.checkpw(password.encode("utf-8"), hashed.encode("utf-8")) + + +=== Authentication Flow === + + +flowchart TD + START["negotiateToken(discord_id, username, password)"] --> CACHE{"Cached user\nfound?"} + CACHE -- "Yes" --> VERIFY["verifyPassword(password, cached.hashed_password)"] + VERIFY -- "Match" --> API_CACHED["POST /api/login"] + VERIFY -- "Mismatch" --> API_FRESH["POST /api/login"] + CACHE -- "No" --> API_FRESH + + API_CACHED -- "200 + token" --> DECODE_C["decodeJwtPayload(token)"] + DECODE_C --> UPDATE_C["Update cache\n(keep existing hash)"] + UPDATE_C --> RET_OK["return (token, user_uuid)"] + + API_FRESH -- "200 + token" --> DECODE_F["decodeJwtPayload(token)"] + DECODE_F --> UPDATE_F["Cache new credentials\n(hashPassword(password))"] + UPDATE_F --> RET_OK + + API_CACHED -- "failure" --> RET_FAIL["return (None, None)"] + API_FRESH -- "failure" --> RET_FAIL + + +**negotiateToken(discord_id, username, password)** - Get token with caching + +def negotiateToken(discord_id, username, password): + cached = getCachedUser(discord_id) + if ( + cached + and cached.get("username") == username + and verifyPassword(password, cached.get("hashed_password")) + ): + result, status = apiRequest( + "post", "/api/login", data={"username": username, "password": password} + ) + if status == 200 and "token" in result: + token = result["token"] + payload = decodeJwtPayload(token) + user_uuid = payload["sub"] + setCachedUser( + discord_id, + { + "hashed_password": cached["hashed_password"], + "user_uuid": user_uuid, + "username": username, + }, + ) + return token, user_uuid + return None, None + + result, status = apiRequest( + "post", "/api/login", data={"username": username, "password": password} + ) + if status == 200 and "token" in result: + token = result["token"] + payload = decodeJwtPayload(token) + user_uuid = payload["sub"] + setCachedUser( + discord_id, + { + "hashed_password": hashPassword(password), + "user_uuid": user_uuid, + "username": username, + }, + ) + return token, user_uuid + return None, None + + +**Flow:** + 1. Check if user has cached credentials + 2. If cached password matches, fetch new token + 3. If no cache or mismatch, authenticate normally + 4. Cache credentials for future sessions + +**handleAuthFailure(message)** - Handle expired sessions + +async def handleAuthFailure(message): + discord_id = message.author.id + user_sessions.pop(discord_id, None) + await message.channel.send( + "Your session has expired. Send any message to log in again." + ) + + +=== Login Flow === + +**handleLoginStep(message)** - Multi-step login process + +async def handleLoginStep(message): + discord_id = message.author.id + state = login_state[discord_id] + + if state["step"] == "username": + state["username"] = message.content.strip() + state["step"] = "password" + await message.channel.send("Password?") + + elif state["step"] == "password": + username = state["username"] + password = message.content.strip() + del login_state[discord_id] + + token, user_uuid = negotiateToken(discord_id, username, password) + + if token and user_uuid: + user_sessions[discord_id] = { + "token": token, + "user_uuid": user_uuid, + "username": username, + } + registered = ", ".join(list_registered()) or "none" + await message.channel.send( + f"Welcome back **{username}**!\n\n" + f"Registered modules: {registered}\n\n" + f"Send 'help' for available commands." + ) + else: + await message.channel.send( + "Invalid credentials. Send any message to try again." + ) + + +**State Machine:** + + +stateDiagram-v2 + [*] --> AskUsername: First message (no session) + AskUsername --> AskPassword: User sends username + AskPassword --> Authenticated: negotiateToken() succeeds + AskPassword --> [*]: Invalid credentials\n(send any message to retry) + Authenticated --> routeCommand: Subsequent messages + + +=== Command Routing === + +**routeCommand(message)** - Parse and route commands + +async def routeCommand(message): + discord_id = message.author.id + session = user_sessions[discord_id] + user_input = message.content.lower() + + if "help" in user_input or "what can i say" in user_input: + await sendHelpMessage(message) + return + + async with message.channel.typing(): + history = message_history.get(discord_id, []) + parsed = ai_parser.parse(message.content, "command_parser", history=history) + + if discord_id not in message_history: + message_history[discord_id] = [] + message_history[discord_id].append((message.content, parsed)) + message_history[discord_id] = message_history[discord_id][-5:] + + if "needs_clarification" in parsed: + await message.channel.send( + f"I'm not quite sure what you mean. {parsed['needs_clarification']}" + ) + return + + if "error" in parsed: + await message.channel.send( + f"I had trouble understanding that: {parsed['error']}" + ) + return + + interaction_type = parsed.get("interaction_type") + handler = get_handler(interaction_type) + + if handler: + await handler(message, session, parsed) + else: + registered = ", ".join(list_registered()) or "none" + await message.channel.send( + f"Unknown command type '{interaction_type}'. Registered modules: {registered}" + ) + + +**Flow:** + 1. Check for help request + 2. Show typing indicator while LLM processes + 3. Parse with AI, including conversation history + 4. Update message history (keep last 5) + 5. Handle clarification requests + 6. Handle parsing errors + 7. Route to appropriate handler + 8. Handle unknown interaction types + +=== Discord Event Handlers === + +**on_ready()** - Bot startup + +@client.event +async def on_ready(): + print(f"Bot logged in as {client.user}") + loadCache() + backgroundLoop.start() + + +**on_message(message)** - Message handler + +@client.event +async def on_message(message): + if message.author == client.user: + return + if not isinstance(message.channel, discord.DMChannel): + return + + discord_id = message.author.id + + if discord_id in login_state: + await handleLoginStep(message) + return + + if discord_id not in user_sessions: + login_state[discord_id] = {"step": "username"} + await message.channel.send("Welcome! Send your username to log in.") + return + + await routeCommand(message) + + +**Filters:** + * Ignore own messages + * Only respond to DMs (not server channels) + +=== Background Tasks === + +**backgroundLoop()** - Scheduled task execution + +@tasks.loop(seconds=60) +async def backgroundLoop(): + """Override this in your domain module or extend as needed.""" + pass + +@backgroundLoop.before_loop +async def beforeBackgroundLoop(): + await client.wait_until_ready() + + +Runs every 60 seconds. Override for domain-specific polling. + +==== bot/command_registry.py - Module Registration ==== + + +flowchart TD + subgraph "Module Registration (at import time)" + EX["commands/example.py"] -- "register_module('example', handle_example)" --> REG["COMMAND_MODULES dict"] + EX -- "register_validator('example', validate_fn)" --> VAL["AI_CONFIG validators dict"] + HABIT["commands/habits.py"] -- "register_module('habit', handle_habit)" --> REG + HABIT -- "register_validator('habit', validate_fn)" --> VAL + end + + subgraph "Runtime Dispatch" + PARSED["parsed JSON\n{interaction_type: 'example'}"] --> GET["get_handler('example')"] + GET --> REG + REG --> HANDLER["handle_example(message, session, parsed)"] + end + + +This module provides a **simple registry pattern** for command handlers. + + +COMMAND_MODULES = {} + +def register_module(interaction_type, handler): + COMMAND_MODULES[interaction_type] = handler + +def get_handler(interaction_type): + return COMMAND_MODULES.get(interaction_type) + +def list_registered(): + return list(COMMAND_MODULES.keys()) + + +**Why a registry?** + * Decouples command handling from bot logic + * Domain modules self-register + * Easy to add/remove modules without touching core code + +==== bot/commands/example.py - Example Command Module ==== + +This demonstrates the **pattern for creating domain modules**. + + +from bot.command_registry import register_module +import ai.parser as ai_parser + + +async def handle_example(message, session, parsed): + action = parsed.get("action", "unknown") + token = session["token"] + user_uuid = session["user_uuid"] + + if action == "check": + await message.channel.send( + f"Checking example items for {session['username']}..." + ) + elif action == "add": + item_name = parsed.get("item_name", "unnamed") + await message.channel.send(f"Adding example item: **{item_name}**") + else: + await message.channel.send(f"Unknown example action: {action}") + + +def validate_example_json(data): + errors = [] + + if not isinstance(data, dict): + return ["Response must be a JSON object"] + + if "error" in data: + return [] + + if "action" not in data: + errors.append("Missing required field: action") + + action = data.get("action") + + if action == "add" and "item_name" not in data: + errors.append("Missing required field for add: item_name") + + return errors + + +register_module("example", handle_example) +ai_parser.register_validator("example", validate_example_json) + + +**Pattern:** + 1. Define async handler function + 2. Define validation function + 3. Register both at module load time + +===== API Layer ===== + +==== api/main.py - Flask Application ==== + +This is the **REST API server** providing endpoints for authentication, user management, and domain operations. + +=== Route Registration === + + +ROUTE_MODULES = [] + +def register_routes(module): + """Register a routes module. Module should have a register(app) function.""" + ROUTE_MODULES.append(module) + + +Modules are registered before app startup: + +if __name__ == "__main__": + for module in ROUTE_MODULES: + if hasattr(module, "register"): + module.register(app) + app.run(host="0.0.0.0", port=5000) + + + +flowchart LR + subgraph "API Auth Pattern (all protected routes)" + REQ["Incoming Request"] --> HDR{"Authorization\nheader?"} + HDR -- "Missing/invalid" --> R401A["401 missing token"] + HDR -- "Bearer token" --> DECODE["_get_user_uuid(token)\nor extract from URL"] + DECODE --> VERIFY["auth.verifyLoginToken\n(token, userUUID)"] + VERIFY -- "True" --> HANDLER["Route handler logic"] + VERIFY -- "False" --> R401B["401 unauthorized"] + end + + +=== Authentication Endpoints === + +**POST /api/register** - Create new user + +@app.route("/api/register", methods=["POST"]) +def api_register(): + data = flask.request.get_json() + username = data.get("username") + password = data.get("password") + if not username or not password: + return flask.jsonify({"error": "username and password required"}), 400 + result = users.registerUser(username, password, data) + if result: + return flask.jsonify({"success": True}), 201 + else: + return flask.jsonify({"error": "username taken"}), 409 + + +**Response Codes:** + * ''201'' - User created successfully + * ''400'' - Missing required fields + * ''409'' - Username already exists + +**POST /api/login** - Authenticate user + +@app.route("/api/login", methods=["POST"]) +def api_login(): + data = flask.request.get_json() + username = data.get("username") + password = data.get("password") + if not username or not password: + return flask.jsonify({"error": "username and password required"}), 400 + token = auth.getLoginToken(username, password) + if token: + return flask.jsonify({"token": token}), 200 + else: + return flask.jsonify({"error": "invalid credentials"}), 401 + + +**Response Codes:** + * ''200'' - Returns JWT token + * ''400'' - Missing required fields + * ''401'' - Invalid credentials + +=== User Endpoints === + +**GET /api/getUserUUID/** - Get user's UUID + +@app.route("/api/getUserUUID/", methods=["GET"]) +def api_getUserUUID(username): + header = flask.request.headers.get("Authorization", "") + if not header.startswith("Bearer "): + return flask.jsonify({"error": "missing token"}), 401 + token = header[7:] + if auth.verifyLoginToken(token, username): + return flask.jsonify(users.getUserUUID(username)), 200 + else: + return flask.jsonify({"error": "unauthorized"}), 401 + + +**GET /api/user/** - Get user details + +@app.route("/api/user/", methods=["GET"]) +def api_getUser(userUUID): + # ... auth check ... + user = postgres.select_one("users", {"id": userUUID}) + if user: + user.pop("password_hashed", None) # Never return password hash + return flask.jsonify(user), 200 + else: + return flask.jsonify({"error": "user not found"}), 404 + + +**Security note:** ''password_hashed'' is explicitly removed before returning. + +**PUT /api/user/** - Update user + +@app.route("/api/user/", methods=["PUT"]) +def api_updateUser(userUUID): + # ... auth check ... + data = flask.request.get_json() + result = users.updateUser(userUUID, data) + if result: + return flask.jsonify({"success": True}), 200 + else: + return flask.jsonify({"error": "no valid fields to update"}), 400 + + +**DELETE /api/user/** - Delete user + +@app.route("/api/user/", methods=["DELETE"]) +def api_deleteUser(userUUID): + # ... auth check ... + data = flask.request.get_json() + password = data.get("password") + if not password: + return flask.jsonify( + {"error": "password required for account deletion"} + ), 400 + result = auth.unregisterUser(userUUID, password) + if result: + return flask.jsonify({"success": True}), 200 + else: + return flask.jsonify({"error": "invalid password"}), 401 + + +**Security:** Requires password confirmation for deletion. + +=== Health Check === + +**GET /health** - Service health + +@app.route("/health", methods=["GET"]) +def health_check(): + return flask.jsonify({"status": "ok"}), 200 + + +Used by Docker health checks and load balancers. + +==== api/routes/example.py - Example Route Module ==== + + +def _get_user_uuid(token): + """Decode JWT to extract user UUID. Returns None on failure.""" + try: + payload = jwt.decode(token, os.getenv("JWT_SECRET"), algorithms=["HS256"]) + return payload.get("sub") + except (jwt.ExpiredSignatureError, jwt.InvalidTokenError): + return None + + +def register(app): + @app.route("/api/example", methods=["GET"]) + def api_listExamples(): + header = flask.request.headers.get("Authorization", "") + if not header.startswith("Bearer "): + return flask.jsonify({"error": "missing token"}), 401 + token = header[7:] + + user_uuid = _get_user_uuid(token) + if not user_uuid or not auth.verifyLoginToken(token, userUUID=user_uuid): + return flask.jsonify({"error": "unauthorized"}), 401 + + items = postgres.select("examples") + return flask.jsonify(items), 200 + + @app.route("/api/example", methods=["POST"]) + def api_addExample(): + # ... similar pattern ... + data = flask.request.get_json() + item = postgres.insert("examples", data) + return flask.jsonify(item), 201 + + +**Authentication Pattern:** + 1. Extract Bearer token from Authorization header + 2. Decode token to extract user UUID + 3. Verify token belongs to that user via ''verifyLoginToken'' + 4. Return ''401 Unauthorized'' if invalid + 5. Proceed with request if valid + +===== Scheduler Layer ===== + +==== scheduler/daemon.py - Background Task Daemon ==== + + +flowchart TD + START["daemon_loop()"] --> POLL["poll_callback()"] + POLL --> SLEEP["time.sleep(POLL_INTERVAL)"] + SLEEP --> POLL + POLL -- "Exception" --> LOG["logger.error()"] + LOG --> SLEEP + + +This module provides a **simple polling loop** for background tasks. + + +POLL_INTERVAL = int(os.environ.get("POLL_INTERVAL", 60)) + +def poll_callback(): + """ + Override this function with your domain logic. + Called every POLL_INTERVAL seconds. + """ + pass + +def daemon_loop(): + logger.info("Scheduler daemon starting") + while True: + try: + poll_callback() + except Exception as e: + logger.error(f"Poll callback error: {e}") + time.sleep(POLL_INTERVAL) + + +**Usage:** +Override ''poll_callback()'' in your implementation: + +# scheduler/daemon.py (customized) +def poll_callback(): + # Check for due tasks + tasks = postgres.select("tasks", where={"due_at": ("<=", datetime.now())}) + for task in tasks: + settings = notifications.getNotificationSettings(task["user_uuid"]) + notifications._sendToEnabledChannels( + settings, + f"Task due: {task['name']}" + ) + + +**Error Handling:** +Exceptions are caught and logged, allowing the daemon to continue running. + +===== Configuration ===== + +==== config/schema.sql - Database Schema ==== + + +erDiagram + users { + UUID id PK + VARCHAR username UK "NOT NULL" + BYTEA password_hashed "NOT NULL" + TIMESTAMP created_at "DEFAULT NOW()" + } + + notifications { + UUID id PK + UUID user_uuid FK,UK "REFERENCES users(id)" + VARCHAR discord_webhook + BOOLEAN discord_enabled "DEFAULT FALSE" + VARCHAR ntfy_topic + BOOLEAN ntfy_enabled "DEFAULT FALSE" + TIMESTAMP last_message_sent + VARCHAR current_notification_status "DEFAULT 'inactive'" + TIMESTAMP created_at "DEFAULT NOW()" + TIMESTAMP updated_at "DEFAULT NOW()" + } + + users ||--o| notifications : "ON DELETE CASCADE" + + + +-- Users table (minimal) +CREATE TABLE IF NOT EXISTS users ( + id UUID PRIMARY KEY, + username VARCHAR(255) UNIQUE NOT NULL, + password_hashed BYTEA NOT NULL, + created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP +); + +-- Notifications table +CREATE TABLE IF NOT EXISTS notifications ( + id UUID PRIMARY KEY, + user_uuid UUID REFERENCES users(id) ON DELETE CASCADE UNIQUE, + discord_webhook VARCHAR(500), + discord_enabled BOOLEAN DEFAULT FALSE, + ntfy_topic VARCHAR(255), + ntfy_enabled BOOLEAN DEFAULT FALSE, + last_message_sent TIMESTAMP, + current_notification_status VARCHAR(50) DEFAULT 'inactive', + created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP, + updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP +); + + +**Design Notes:** + * ''UUID'' primary keys for security (not auto-increment) + * ''ON DELETE CASCADE'' automatically removes related records + * ''BYTEA'' for password hashes (binary data) + +==== config/.env.example - Environment Variables ==== + + +# Discord Bot +DISCORD_BOT_TOKEN=your_discord_bot_token_here + +# API +API_URL=http://app:5000 + +# Database +DB_HOST=db +DB_PORT=5432 +DB_NAME=app +DB_USER=app +DB_PASS=your_db_password_here + +# JWT +JWT_SECRET=your_jwt_secret_here + +# AI / OpenRouter +OPENROUTER_API_KEY=your_openrouter_api_key_here +OPENROUTER_BASE_URL=https://openrouter.ai/api/v1 +AI_CONFIG_PATH=/app/ai/ai_config.json + + +===== Docker Deployment ===== + +==== Dockerfile ==== + + +FROM python:3.11-slim + +WORKDIR /app + +COPY requirements.txt . +RUN pip install --no-cache-dir -r requirements.txt + +COPY . . + +CMD ["python", "-m", "api.main"] + + +**Base Image:** ''python:3.11-slim'' for minimal footprint + +**Default Command:** Runs the Flask API via ''python -m api.main'' (ensures ''/app'' is on sys.path for correct module resolution) + +==== docker-compose.yml ==== + + +services: + db: + image: postgres:16 + environment: + POSTGRES_DB: app + POSTGRES_USER: app + POSTGRES_PASSWORD: ${DB_PASS} + ports: + - "5432:5432" + volumes: + - pgdata:/var/lib/postgresql/data + - ./config/schema.sql:/docker-entrypoint-initdb.d/schema.sql + healthcheck: + test: ["CMD-SHELL", "pg_isready -U app"] + interval: 5s + timeout: 5s + retries: 5 + + app: + build: . + ports: + - "8080:5000" + env_file: config/.env + depends_on: + db: + condition: service_healthy + + scheduler: + build: . + command: ["python", "-m", "scheduler.daemon"] + env_file: config/.env + depends_on: + db: + condition: service_healthy + + bot: + build: . + command: ["python", "-m", "bot.bot"] + env_file: config/.env + depends_on: + app: + condition: service_started + +volumes: + pgdata: + + +**Services:** + +^ Service ^ Purpose ^ Dependencies ^ +| db | PostgreSQL database | None | +| app | Flask API (port 8080) | db (healthy) | +| scheduler | Background tasks | db (healthy) | +| bot | Discord client | app | + +**Health Checks:** + * ''db'' uses PostgreSQL's built-in ''pg_isready'' + * Other services wait for their dependencies + +**Environment Files:** + * ''config/.env'' - Loaded by app, bot, and scheduler services + * Root ''.env'' - Provides ''DB_PASS'' for docker-compose variable substitution (''${DB_PASS}'' in the db service) + +**Volume Mounts:** + * ''pgdata'' - Persistent database storage + * ''schema.sql'' - Auto-runs on first startup + +===== Data Flow Examples ===== + +==== Example 1: User Login ==== + + +sequenceDiagram + actor User + participant Bot as bot.py + participant API as api/main.py + participant Auth as core/auth.py + participant DB as PostgreSQL + + User->>Bot: DM "username123" + Bot->>Bot: login_state[id] = {step: "username"} + Bot->>User: "Password?" + User->>Bot: DM "mypassword" + Bot->>API: POST /api/login {username, password} + API->>Auth: getLoginToken("username123", "mypassword") + Auth->>DB: SELECT * FROM users WHERE username='username123' + DB-->>Auth: user record (with hashed pw) + Auth->>Auth: bcrypt.checkpw(password, hash) + Auth-->>API: JWT token + API-->>Bot: {token: "eyJ..."} 200 + Bot->>Bot: Store session {token, user_uuid, username} + Bot->>User: "Welcome back username123!" + + +==== Example 2: Natural Language Command ==== + + +sequenceDiagram + actor User + participant Bot as bot.py + participant Parser as ai/parser.py + participant LLM as OpenRouter + participant Registry as command_registry + participant Handler as Domain Handler + participant API as api/main.py + participant DB as PostgreSQL + + User->>Bot: DM "add a task to buy groceries" + Bot->>Bot: Show typing indicator + Bot->>Parser: parse("add a task...", "command_parser", history) + Parser->>LLM: chat.completions.create(system + user prompt) + LLM-->>Parser: {"interaction_type":"task","action":"add","task_name":"buy groceries"} + Parser->>Parser: json.loads() + validate + Parser-->>Bot: parsed JSON + Bot->>Bot: Update message_history (keep last 5) + Bot->>Registry: get_handler("task") + Registry-->>Bot: handle_task function + Bot->>Handler: handle_task(message, session, parsed) + Handler->>API: POST /api/tasks {name: "buy groceries"} + API->>DB: INSERT INTO tasks... + DB-->>API: inserted row + API-->>Handler: {id, name, ...} 201 + Handler->>User: "Added task: **buy groceries**" + + +==== Example 3: API Request with Authentication ==== + + +sequenceDiagram + actor Client as External Client + participant Flask as api/main.py + participant Route as routes/example.py + participant Auth as core/auth.py + participant PG as core/postgres.py + participant DB as PostgreSQL + + Client->>Flask: POST /api/tasks
Authorization: Bearer + Flask->>Route: Route handler + Route->>Route: _get_user_uuid(token)
jwt.decode() → sub claim + Route->>Auth: verifyLoginToken(token, userUUID) + Auth->>Auth: jwt.decode() + check sub == userUUID + Auth-->>Route: True + Route->>PG: insert("tasks", data) + PG->>DB: INSERT INTO tasks... RETURNING * + DB-->>PG: inserted row + PG-->>Route: {id, name, ...} + Route-->>Client: 201 {id, name, ...} +
+ +===== Security Considerations ===== + +==== SQL Injection Prevention ==== + +All database queries use parameterized queries: + +# Safe - parameterized +cur.execute("SELECT * FROM users WHERE id = %(id)s", {"id": user_id}) + +# Unsafe - string interpolation (NOT used) +cur.execute(f"SELECT * FROM users WHERE id = '{user_id}'") + + +The ''_safe_id()'' function validates SQL identifiers. + +==== Password Storage ==== + +Passwords are: + * Hashed with bcrypt (adaptive hash function) + * Salted automatically by bcrypt + * Never stored in plain text + * Never returned in API responses + +==== JWT Security ==== + + * Tokens expire after 1 hour + * Signed with secret key (''JWT_SECRET'') + * Contain minimal data (UUID, name) + * Verified on every request + +==== Rate Limiting ==== + +Not implemented in this template. Consider adding: + * Rate limiting on API endpoints + * Login attempt throttling + * Command cooldown per user + +===== Extending the Framework ===== + +==== Adding a New Domain Module ==== + +**Step 1: Create database table** + +Edit ''config/schema.sql'': + +CREATE TABLE IF NOT EXISTS habits ( + id UUID PRIMARY KEY, + user_uuid UUID REFERENCES users(id) ON DELETE CASCADE, + name VARCHAR(255) NOT NULL, + streak INT DEFAULT 0, + last_completed DATE, + created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP +); + + +**Step 2: Create API routes** + +Create ''api/routes/habits.py'': + +import os +import flask +import jwt +import core.auth as auth +import core.postgres as postgres +import uuid + + +def _get_user_uuid(token): + """Decode JWT to extract user UUID. Returns None on failure.""" + try: + payload = jwt.decode(token, os.getenv("JWT_SECRET"), algorithms=["HS256"]) + return payload.get("sub") + except (jwt.ExpiredSignatureError, jwt.InvalidTokenError): + return None + + +def register(app): + @app.route("/api/habits", methods=["GET"]) + def api_listHabits(): + header = flask.request.headers.get("Authorization", "") + if not header.startswith("Bearer "): + return flask.jsonify({"error": "missing token"}), 401 + token = header[7:] + + user_uuid = _get_user_uuid(token) + if not user_uuid or not auth.verifyLoginToken(token, userUUID=user_uuid): + return flask.jsonify({"error": "unauthorized"}), 401 + + items = postgres.select("habits", where={"user_uuid": user_uuid}) + return flask.jsonify(items), 200 + + @app.route("/api/habits", methods=["POST"]) + def api_addHabit(): + # ... similar pattern ... + pass + + +Register in ''api/main.py'': + +import api.routes.habits as habits_routes +register_routes(habits_routes) + + +**Step 3: Create bot commands** + +Create ''bot/commands/habits.py'': + +from bot.command_registry import register_module +import ai.parser as ai_parser +from bot.bot import apiRequest + +async def handle_habit(message, session, parsed): + action = parsed.get("action") + token = session["token"] + + if action == "list": + result, status = apiRequest("get", "/api/habits", token) + if status == 200: + lines = [f"- {h['name']} (streak: {h['streak']})" for h in result] + await message.channel.send("Your habits:\n" + "\n".join(lines)) + + elif action == "add": + name = parsed.get("habit_name") + result, status = apiRequest("post", "/api/habits", token, {"name": name}) + if status == 201: + await message.channel.send(f"Created habit: **{name}**") + +def validate_habit_json(data): + errors = [] + if "action" not in data: + errors.append("Missing required field: action") + return errors + +register_module("habit", handle_habit) +ai_parser.register_validator("habit", validate_habit_json) + + +**Step 4: Add AI prompts** + +Edit ''ai/ai_config.json'': + +{ + "prompts": { + "command_parser": { ... }, + "habit_parser": { + "system": "You parse habit tracking commands...", + "user_template": "Parse: \"{user_input}\". Return JSON with action (list/add/complete) and habit_name." + } + } +} + + +===== Troubleshooting ===== + +==== Common Issues ==== + +**Bot not responding to DMs** + * Verify ''message_content'' intent is enabled in Discord Developer Portal + * Check bot has permission to read messages + +**API returning 401 Unauthorized** + * Verify token is valid and not expired + * Check Authorization header format: ''Bearer '' + +**AI parser failing** + * Verify OpenRouter API key is valid + * Check model is available + * Review prompts in ''ai_config.json'' + +**Database connection errors** + * Verify PostgreSQL is running + * Check environment variables match Docker config + +==== Debugging Tips ==== + +**Enable verbose logging:** + +import logging +logging.basicConfig(level=logging.DEBUG) + + +**Test database connection:** + +from core import postgres +print(postgres.select("users")) + + +**Test AI parser:** + +import ai.parser as ai_parser +result = ai_parser.parse("add task buy milk", "command_parser") +print(result) + + +===== Conclusion ===== + +The LLM Bot Framework provides a solid foundation for building AI-powered Discord bots. Its modular architecture allows developers to: + + * Add new domains without modifying core code + * Use any OpenAI-compatible LLM + * Deploy easily with Docker + * Scale with PostgreSQL + +Key design decisions: + * **Separation of concerns** - Bot, API, and core logic are independent + * **Configuration-driven** - AI behavior is customizable via JSON + * **Security-first** - Parameterized queries, hashed passwords, JWT auth + * **Developer experience** - Clear patterns for extending the framework + +The framework demonstrates how LLMs can bridge the gap between natural language and structured database operations, enabling conversational interfaces that feel intuitive to users. diff --git a/Dockerfile b/Dockerfile new file mode 100644 index 0000000..444d8b2 --- /dev/null +++ b/Dockerfile @@ -0,0 +1,10 @@ +FROM python:3.11-slim + +WORKDIR /app + +COPY requirements.txt . +RUN pip install --no-cache-dir -r requirements.txt + +COPY . . + +CMD ["python", "-m", "api.main"] diff --git a/HOOKS.md b/HOOKS.md new file mode 100644 index 0000000..5b656f0 --- /dev/null +++ b/HOOKS.md @@ -0,0 +1,44 @@ +# Where the hooks are + +## API route registration — `api/main.py` + +Lines 10-11: imported `api.routes.routines` and `api.routes.medications` +Line 16: added both to `ROUTE_MODULES` list so they auto-register on startup + +## Bot command registration — `bot/bot.py` + +Lines 23-24: imported `bot.commands.routines` and `bot.commands.medications` +These imports trigger `register_module()` and `register_validator()` at load time, +which makes the bot's AI parser route "routine" and "medication" interaction types +to the right handlers. + +## Bot command handlers — `bot/commands/routines.py`, `bot/commands/medications.py` + +Each file: +- Defines an async handler (`handle_routine`, `handle_medication`) +- Defines a JSON validator for the AI parser +- Calls `register_module()` to hook into the command registry +- Calls `ai_parser.register_validator()` to hook into parse validation + +## Scheduler — `scheduler/daemon.py` + +`poll_callback()` now calls three check functions on every tick: +- `check_medication_reminders()` — sends notifications for doses due now +- `check_routine_reminders()` — sends notifications for scheduled routines +- `check_refills()` — warns when medication supply is running low + +All three use `core.notifications._sendToEnabledChannels()` to deliver. + +## AI config — `ai/ai_config.json` + +Updated the `command_parser` system prompt to list the two interaction types +(`routine`, `medication`) and the fields to extract for each. This is what +tells the LLM how to parse natural language into the right action structure. + +## What's NOT hooked yet (needs implementation) + +- `config/schema.sql` — needs tables for routines, routine_steps, + routine_sessions, routine_schedules, medications, med_logs +- The actual body of every API route (all prototyped as `pass`) +- The actual body of both bot command handlers +- The three scheduler check functions diff --git a/README.md b/README.md new file mode 100644 index 0000000..a5656db --- /dev/null +++ b/README.md @@ -0,0 +1,251 @@ +# LLM Bot Framework + +A template for building Discord bots powered by LLMs with natural language command parsing. + +## Features + +- **AI-Powered Parsing**: Uses LLMs to parse natural language into structured JSON with automatic retry and validation +- **Module Registry**: Easily register domain-specific command handlers +- **Flask API**: REST API with JWT authentication +- **PostgreSQL**: Generic CRUD layer for any table +- **Discord Bot**: Session management, login flow, background tasks +- **Notifications**: Discord webhook + ntfy support out of the box +- **Docker Ready**: Full docker-compose setup + +## Quick Start + +```bash +# Copy environment config +cp config/.env.example config/.env + +# Edit with your values +nano config/.env + +# Start everything +docker-compose up +``` + +## Project Structure + +``` +llm-bot-framework/ +├── api/ +│ ├── main.py # Flask app with auth routes +│ └── routes/ +│ └── example.py # Example route module +├── bot/ +│ ├── bot.py # Discord client +│ ├── command_registry.py # Module registration +│ └── commands/ +│ └── example.py # Example command module +├── core/ +│ ├── postgres.py # Generic PostgreSQL CRUD +│ ├── auth.py # JWT + bcrypt +│ ├── users.py # User management +│ └── notifications.py # Multi-channel notifications +├── ai/ +│ ├── parser.py # LLM JSON parser +│ └── ai_config.json # Model + prompts config +├── scheduler/ +│ └── daemon.py # Background polling +├── config/ +│ ├── schema.sql # Database schema +│ └── .env.example # Environment template +├── docker-compose.yml +├── Dockerfile +└── requirements.txt +``` + +## Creating a Domain Module + +### 1. Add Database Schema + +Edit `config/schema.sql`: + +```sql +CREATE TABLE IF NOT EXISTS tasks ( + id UUID PRIMARY KEY, + user_uuid UUID REFERENCES users(id) ON DELETE CASCADE, + name VARCHAR(255) NOT NULL, + completed BOOLEAN DEFAULT FALSE, + created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP +); +``` + +### 2. Create API Routes + +Create `api/routes/tasks.py`: + +```python +import flask +import sys, os +sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))) + +import core.auth as auth +import core.postgres as postgres +import uuid + +def register(app): + @app.route('/api/tasks', methods=['GET']) + def api_listTasks(): + header = flask.request.headers.get('Authorization', '') + if not header.startswith('Bearer '): + return flask.jsonify({'error': 'missing token'}), 401 + token = header[7:] + + # Get user UUID from token + from core.auth import decodeJwtPayload + import json, base64 + payload = token.split('.')[1] + payload += '=' * (4 - len(payload) % 4) + decoded = json.loads(base64.urlsafe_b64decode(payload)) + user_uuid = decoded['sub'] + + tasks = postgres.select("tasks", {"user_uuid": user_uuid}) + return flask.jsonify(tasks), 200 + + @app.route('/api/tasks', methods=['POST']) + def api_addTask(): + header = flask.request.headers.get('Authorization', '') + if not header.startswith('Bearer '): + return flask.jsonify({'error': 'missing token'}), 401 + token = header[7:] + + data = flask.request.get_json() + task = postgres.insert("tasks", { + 'id': str(uuid.uuid4()), + 'user_uuid': data['user_uuid'], + 'name': data['name'], + }) + return flask.jsonify(task), 201 +``` + +Register it in `api/main.py`: + +```python +import api.routes.tasks as tasks_routes +register_routes(tasks_routes) +``` + +### 3. Create Bot Commands + +Create `bot/commands/tasks.py`: + +```python +import sys, os +sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))) + +from bot.command_registry import register_module +import ai.parser as ai_parser + +async def handle_task(message, session, parsed): + action = parsed.get('action') + token = session['token'] + user_uuid = session['user_uuid'] + + # Make API calls using the bot's apiRequest helper + from bot.bot import apiRequest + + if action == 'list': + result, status = apiRequest('get', f'/api/tasks', token) + if status == 200: + lines = [f"- {t['name']}" for t in result] + await message.channel.send("Your tasks:\n" + "\n".join(lines)) + else: + await message.channel.send("Failed to fetch tasks.") + + elif action == 'add': + task_name = parsed.get('task_name') + result, status = apiRequest('post', '/api/tasks', token, { + 'user_uuid': user_uuid, + 'name': task_name + }) + if status == 201: + await message.channel.send(f"Added task: **{task_name}**") + else: + await message.channel.send("Failed to add task.") + +def validate_task_json(data): + errors = [] + if 'action' not in data: + errors.append('Missing required field: action') + if data.get('action') == 'add' and 'task_name' not in data: + errors.append('Missing required field for add: task_name') + return errors + +register_module('task', handle_task) +ai_parser.register_validator('task', validate_task_json) +``` + +### 4. Add AI Prompts + +Edit `ai/ai_config.json`: + +```json +{ + "prompts": { + "command_parser": { + "system": "You are a helpful assistant...", + "user_template": "..." + }, + "task_parser": { + "system": "You parse task commands...", + "user_template": "Parse: \"{user_input}\"\n\nReturn JSON with action (list/add/complete) and task_name." + } + } +} +``` + +## AI Parser Usage + +```python +import ai.parser as ai_parser + +# Basic usage +parsed = ai_parser.parse(user_input, 'command_parser') + +# With conversation history +history = [("previous message", {"action": "add", "item": "test"})] +parsed = ai_parser.parse(user_input, 'command_parser', history=history) + +# Register custom validator +ai_parser.register_validator('task', validate_task_json) +``` + +## Notification Channels + +```python +import core.notifications as notif + +# Set user notification settings +notif.setNotificationSettings(user_uuid, { + 'discord_webhook': 'https://discord.com/api/webhooks/...', + 'discord_enabled': True, + 'ntfy_topic': 'my-alerts', + 'ntfy_enabled': True +}) + +# Send notification +settings = notif.getNotificationSettings(user_uuid) +notif._sendToEnabledChannels(settings, "Task due: Buy groceries") +``` + +## Environment Variables + +| Variable | Description | +|----------|-------------| +| `DISCORD_BOT_TOKEN` | Discord bot token | +| `API_URL` | API URL (default: `http://app:5000`) | +| `DB_HOST` | PostgreSQL host | +| `DB_PORT` | PostgreSQL port | +| `DB_NAME` | Database name | +| `DB_USER` | Database user | +| `DB_PASS` | Database password | +| `JWT_SECRET` | JWT signing secret | +| `OPENROUTER_API_KEY` | OpenRouter API key | +| `OPENROUTER_BASE_URL` | OpenRouter base URL | +| `AI_CONFIG_PATH` | Path to ai_config.json | + +## License + +MIT diff --git a/ai/__pycache__/parser.cpython-312.pyc b/ai/__pycache__/parser.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..98b6305dbe5db68776b8dc9d90bcfb1761093f72 GIT binary patch literal 6051 zcmb_gT}&KTmcG^hUHw<4n}4ti+lgrm*kEUHu;UEII3dPhfwPhsXFKJt0;=i1xm69O zU1ZQI4{gn8jg^sLH;$H!Rth6qT1HxFywXa$J08u;zI3M((bTY#rOiv;u<>dluU6S} zZ*?_o57EACLEpMR=bn4-x#xW6oced4k45mTPyV%hB#6*|l7{)X%9Y)}We{3HBAP@Z zBU<9jBtzepNeg{jC#~?d#%&4vq@6*MCFV6*?7d@wy}DTwZ9vPJt-W^)cJk`;1C+I| z(jg>y9>9LdBibKWjGFnL^jeVQlf0tC?0ExC`k{}B9W*GbOUZ9?n&duR2Ym+si=VLS(6unS}#DM7gn3=2<{m`mgupB`>NMYtJ z47!dO`1{3$Th{Ano(XfpNjNZ@7O*1W?(_oR#gB|!=}M>OB`k^jdlO@$yfMhn%jyh| zB^595LQ>@Cgt#mUs+>x)>|iQ6B~N#WSe}!T{G2TCf*gs`&hD6!N_O)TnRGgZRi&Tp z;s=ro{8(B_4h(liQ;9V2xEYuDfvdxOJLw)zWmE}w^6^wuhPah8f*qoJ02y3`YjXMlTJ&6S+EY z?cJ>Z<@4(d)@#PD4vmhFy?7TpQ8$gH%WyKV}K6v^Otw5+pKgX~aR;kWw@6e~!rjK#|OOsUF+ z$Y@Bdbi{0xYNhgk@PcQN(HML{V`5IDU0s51msgqizt*$sL}hO%9ql)6cXzyf zV=)YM=>3+QG3$lipZw+R#Sc1QU^HygEm+bWiX`CZjPAHNc5Pr}M7K}Fy`|xP2UVT#;HWFeU9B3`bYvnt`>D%*>Y09-&L%-d_nJe1K(1wf_M1hqX% z(MD#9c3L_#Bs%X~M3=^p_q64q`$1WC7cJVpE<|L>%$WbBrD$W(s#*7QB)Dn+{{0-x zgEHGBBYGBXnhjet8<;}H!Dv>|EBY#?WY_FCpw^pv(d?%AEIKp?%ymql1>X^*HkcGg z5k9%-RGZDV=vRqq&_}B{jQ!lur$`KF&QH;2Hj#UHz?3U-1EO!y`H^$MXT0Z`c~o`@ zHQ6(R0+`eO5KW*ToB)ADVsBVd(twB(HL7mVbb`$~n-FeA)YPn$RAj^>q_cb8LJz2N zLIPNeC=y_;sDSk>D5{ho=7LjkNq15RPO7>s15PPy!*y^_OvI%-%X4*CLQ)iAT2e4k zOdaX&iYbH43F8#*=d+f6{u&^)eJY+&X2P8Ah{k1!Fpq-&&^^Qh#U%>sFa%6g zNq1M!T(?e3Dr{&bB}XMica~ShEo4!DRx2G|94Zwhf zw4}Qtk))82B9X95_ZbI{q`}zJum^HXv*9u&s#_DvG$s)aZX>I?%Qzv4IwL8>r1+wAJ8l%} z-9@etn{(~dpg{em|6sv?aP_AJf7_-%T=0i;*3#?YFJ^x=n^OojZ3g&4fd49RcmwBb zTb@8ESf9JR9co@3Tx(tzi=k6F_m(HL-Ed;VU2N#j4Q~Y+O7(o6<4b|yciW!OGh~4i zxO~f=`xn;^eEQC2OJ|{_GvC~`KEA=MD|!E^N2kAb{dhYN{O=t*kiJkzMD|w~4h|yp zyS7#+A0zkRo7Ttf*9T8qAD^~EJxUy<`4DkZdnnr*E6$h-@L{{`PA(QGN`rspkZh%27x@hTMWHo1up!zG1<`TVG=z~-EG&k(R zYVu%^TES$ERV%@`i5&P%jg9SdI zD3@$VnLxBIjmo$p{%zb6vC?j*-{l=>_ z&ae4#s|9JWLS?;x7Qn$OKY}zyvlx1z1vJ~kh6iO$oIqqp;EyWaGuE_s0<#7_svfjw z&o5Q+6YHm$YK~FMbu@u09;ZpT4*>^D;Or_%M0f&-5OEuky_(_yXfsjpIc7$ZQAi#D zDBsP}Sp#?)G7A$ADV);T0R6;A3G)WWGJ01Ct77^H0qK7Jva~>QiI<~y-oR;vZ272O~xlcPw0CNV6kId@@P3pC2X}LsEzncL_NY?Uitamii~9pTCNwIXRV4 zs`C_MjHV!Ta%Iwttf(ox(9e%Rg~YP$^swM5I*l|8=8SrZg~Yh$9ix`()RvLRVgvUQgPISv;B*F)vN3sZ1jKI@|A z1q$%zcFIO)_ijXYmW7#ZdOtavOwGTPS@Fg#WP_u?)Raqk_tepU`#;}*|NST?5S4Xx z^Y4S6^z*o%G(D#Fi2eKvhShy^QHU$jaq#J33qDJZ5GYHKWTzy|&K_df$_;=Q2Xd94 z5kx}(a6Q>3_G%nbOOjgLqKPj}fTsmH*^P-AWa|codDzDsLA>%3|L~~ymf0_N`lu1d6C>)J;T97OFOL@&(sOy5z zrS9C9&T~)dyVhsdKX}xhuN^Ek9erxIa_kPWy4mL}sy(pBFkgG|r8zq+s_$GH+j9DUd1`t7lXGirUj~nq8k%yxZ9cqyY~%38 zk@e0Z-}emJ-ES4VExCa^i_6yKzbJVdH@$qp%dZWrGoKHw^{uyWoZ5IT-*)CJ@0;6! z#?{yV{^(bMV;hd&vcF*;^?h}6u*9`)az_i?(Y4$Ec>1^fzv+K8RXjD6@4b|N?VYc= zcc1JHZ~SPZJ^$L-ueozO9$0n9i<%GpUHEUpYgdX*y}7`XnnP=WLe0s|n!Z9!Un$tS z{<9q`3ZB_TmY`$l@>3Ub`Ae=~$$w(Qv3@JxH=g%SJhfWfY|iuCMI@A1f46Y(jg8yI z&>$5~sj=r@n+k!EylZ5~=C-rj2WnTE?lrAmDIVy^Id($G6osbk(*c=-g0tVEw8Oj6`g+%|Q85K1|{q z^4q-%1$@M0s^UBv56&^zD_2K@A(_{e^qEv*0=e2IcB1oqzA+;at36YhE2G*uwda z{x5B3aR}yxtrVa~N5l2HGZGO~(MSZp3j?|(rC<_A>JIWjOOP=UsqU1Lb23gP4Xm@l zSHKzk9vu*6@R|vlF-?Joi9~cy)u%PxM!4wAlx`!aMvtvKAO!<{_y{ndX&A*l10Klf zevRlJsR&-U&OwrziA(2k42B@^K+0n%cB~A;JhiwO+inOk-ajHY{Qn1PeB$Ts#_q)K zChsImw$Sp;mFauaEAe~rwcaoKKJWYD?a$xdI8!`wE`NTsSU0w7wFE3n9xx~en>)U& z6z$DBh+$e+e_G-imu)NVd+yb?PrOURe}dIopE?lBmAtjN3&y9Njv}b4uP!%!_xhdd z`TEX$_ejxq<+= AI_CONFIG["validation"]["max_retries"]: + return { + "error": f"Failed to parse after {retry_count} retries", + "user_input": user_input, + } + + prompt_config = AI_CONFIG["prompts"].get(interaction_type) + if not prompt_config: + return { + "error": f"Unknown interaction type: {interaction_type}", + "user_input": user_input, + } + + history_context = "No previous context" + if history and len(history) > 0: + history_lines = [] + for i, (msg, result) in enumerate(history[-3:]): + history_lines.append(f"{i + 1}. User: {msg}") + if isinstance(result, dict) and not result.get("error"): + history_lines.append(f" Parsed: {json.dumps(result)}") + else: + history_lines.append(f" Parsed: {result}") + history_context = "\n".join(history_lines) + + user_prompt = prompt_config["user_template"].format( + user_input=user_input, history_context=history_context + ) + + if errors: + user_prompt += ( + f"\n\nPrevious attempt had errors: {errors}\nPlease fix and try again." + ) + + response_text = _call_llm(prompt_config["system"], user_prompt) + if not response_text: + return {"error": "AI service unavailable", "user_input": user_input} + + try: + parsed = json.loads(response_text) + except json.JSONDecodeError: + return parse( + user_input, + interaction_type, + retry_count + 1, + ["Response was not valid JSON"], + history=history, + ) + + if "error" in parsed: + return parsed + + validator = AI_CONFIG["validation"].get("validators", {}).get(interaction_type) + if validator: + validation_errors = validator(parsed) + if validation_errors: + return parse( + user_input, + interaction_type, + retry_count + 1, + validation_errors, + history=history, + ) + + return parsed + + +def register_validator(interaction_type, validator_fn): + """Register a custom validation function for an interaction type.""" + if "validators" not in AI_CONFIG["validation"]: + AI_CONFIG["validation"]["validators"] = {} + AI_CONFIG["validation"]["validators"][interaction_type] = validator_fn diff --git a/api/__pycache__/main.cpython-312.pyc b/api/__pycache__/main.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..36f63ddf3b3cbe74c6d4626ccf48308a4e225c45 GIT binary patch literal 7190 zcmd^ET}&HS7M>Z8fAGvOHYNmUXvRqkIBB3w(uA}?wuwndHz832Rm*N=S zOMkg>j{bV)_;H@K`@D0$abKnFp9_o!ScYRXE)8SX7cKY|Yd;$orkHV2^?b~XOPY}G zsF!3Il~=t}?AN}pz!EH~|6}GWyUC2pJLm-JbQ)Ybp&fLBs!%hJ=+X)Apd;1kG`e(} zcGQt;<_WoUB0K7YYjm1iI?X%iMCxp7cIoWVg6f``ZiXQzG%?P+2kSoG0$*#d)}ppJ zE3LDiN?J?$mC9nfel)@Pek}9pex3mfj=8!kO?9u@`gNN#8q3_^Zg8>oh4+JV$yBCi z{)TcyIX9csXOuUtTu?qp8PiHK2NlWY3{6*(8C99fs<~NBA=*?*H^_}(FrG!-st9rV zG@_}>wNz3urs0u2fTPtDe1ry#OjIC8t@A*Q*Jck?cNl{{#43$X>q?&4dfgjrj9WPO z7PUwtO46ClHnm5&WSgu{XLGZvGM&7pp-e@*J2^ieQzmnn2_u!w^o+zfOM2__*yvE= z(&hNr#i6T~4B-F2m+qFnP`5R5<7e;&0mV z6|RR4KNp!illHqhDovGj7Icx_L4vDFaCw$+c}{OLsvj_oFI-ntkIJX2)oDYH(=;Q6nadSb?0bkxX(3URLiRok|j@`ce*GAoL<1t`?(ar{LyWg(DLiKi3v^DM<4weY+q=s>KMrxEtC2J zWMY2HJBj?V{HUwxh8397j6_<`W-Jf1EI&3=lQ%3cs_T~Mz+((a#nxz83i_JMvXmKwP3Xut5&Ei9z%#F0lF{ZXC1}i zVmx^c1*$+M`{A|@#SMsJ3I*~4^g?K4^gB@f+z=K+>*Dik;`1xli>)Q`=+e-AzvuvX zs*p1~j+Eq{RgWnjGXuw50PZ(C21@eaH_&s|44kzA{2T=^yetxSZ0th3x8HFDgO=y~ z&}dD}tGe~c-M|`2P*=8h{jbt{WfEQ?QU03ZI1;51=5C(1s7ss9?A>LXAfsn1D#*hzstxB zmj*csy{F*PSD*rZNKLoiyZPR_bYM+7ur773Nu7lc?{X#S^`&zgLU>)+wdObLRVu$h4z8Qh)}fBn^x;_HAzQ1bhy z17B@WgLwuBSfLS(3bDK=jjpz?sDjEkaxC+7y5*}5Ko#qv#`Z*#Tuz{IR~oWQ!=_$n zgmi(;1YJ3aVMT+@A#5Sb$YH3yWbIT8fnk$_OR1LZWL=5rq&}^wl#6zfrDU>(GMUX~ z)YRvI81fPf5)-y~Nsi#qG$7&DVvCHV7j7Zj%j(7y9~?7Fs;wwk}NJKENR(=?BpvxO5Y$Uyz8!ckcO5JdpRU%Lmrv1BH{t(bYclrT&sU zu*}~V(peSpAODV}*cfi2IKvDzSpVTtBu5C0$dI_=t+ZnyWUhA~)`N~!L>JVwNMmd1RkuH+eD zrNeVR&)4jIwqqsu1{cSKU9nPhX-K=$Xmn}VJeGIEV{#l`kKi77)Fx|NCac)#<77&k zRdoojN^TxDG#bq`|4rW7X(kPxL3QLW1_uE{T!x&Ov92y;}3Aj}P_XGHzxw%5=y7IrDg}MUn z=l=+j+Q>@uvrU>O8E* zT*f&hp}xbnt?xiX33VVoXi*1JIcA|AgxaW~K8ZvB6kIx-v;P-7$VO!EoxacdR^BW{ zx|Rd?<-HZ}u@P=H+s~Mxvzs2Sx$y_NsqE#NBV~q@BLDO;P2hH3SPQ>Ucn1>saNn-o z&VU&mEJaR#6EP!kQ;a{|?JW3vVKXypOmBavgOLLK4y>A8(%S@9{O5)XZypR?F@D7I z!8Xgt=@yrrpSnI zL&dYE&};I&bO0-mNZ?6tB4Gs_#4Pwx0#+B?A@y{8GKo#smPfs>7rJ}}Jk(&KoXA5SJ>I1ijg6CS$g3A5#; z$IbMJj~Pf>OO-s^` zK8}r+Lk#Err0t`&<K$sXrq8C&I-9ivaJ", methods=["GET"]) +def api_getUserUUID(username): + header = flask.request.headers.get("Authorization", "") + if not header.startswith("Bearer "): + return flask.jsonify({"error": "missing token"}), 401 + token = header[7:] + if auth.verifyLoginToken(token, username): + return flask.jsonify(users.getUserUUID(username)), 200 + else: + return flask.jsonify({"error": "unauthorized"}), 401 + + +@app.route("/api/user/", methods=["GET"]) +def api_getUser(userUUID): + header = flask.request.headers.get("Authorization", "") + if not header.startswith("Bearer "): + return flask.jsonify({"error": "missing token"}), 401 + token = header[7:] + if auth.verifyLoginToken(token, userUUID=userUUID): + user = postgres.select_one("users", {"id": userUUID}) + if user: + user.pop("password_hashed", None) + return flask.jsonify(user), 200 + else: + return flask.jsonify({"error": "user not found"}), 404 + else: + return flask.jsonify({"error": "unauthorized"}), 401 + + +@app.route("/api/user/", methods=["PUT"]) +def api_updateUser(userUUID): + header = flask.request.headers.get("Authorization", "") + if not header.startswith("Bearer "): + return flask.jsonify({"error": "missing token"}), 401 + token = header[7:] + if auth.verifyLoginToken(token, userUUID=userUUID): + data = flask.request.get_json() + result = users.updateUser(userUUID, data) + if result: + return flask.jsonify({"success": True}), 200 + else: + return flask.jsonify({"error": "no valid fields to update"}), 400 + else: + return flask.jsonify({"error": "unauthorized"}), 401 + + +@app.route("/api/user/", methods=["DELETE"]) +def api_deleteUser(userUUID): + header = flask.request.headers.get("Authorization", "") + if not header.startswith("Bearer "): + return flask.jsonify({"error": "missing token"}), 401 + token = header[7:] + if auth.verifyLoginToken(token, userUUID=userUUID): + data = flask.request.get_json() + password = data.get("password") + if not password: + return flask.jsonify( + {"error": "password required for account deletion"} + ), 400 + result = auth.unregisterUser(userUUID, password) + if result: + return flask.jsonify({"success": True}), 200 + else: + return flask.jsonify({"error": "invalid password"}), 401 + else: + return flask.jsonify({"error": "unauthorized"}), 401 + + +# ── Health Check ─────────────────────────────────────────────────── + + +@app.route("/health", methods=["GET"]) +def health_check(): + return flask.jsonify({"status": "ok"}), 200 + + +if __name__ == "__main__": + for module in ROUTE_MODULES: + if hasattr(module, "register"): + module.register(app) + app.run(host="0.0.0.0", port=5000) diff --git a/api/routes/__pycache__/example.cpython-312.pyc b/api/routes/__pycache__/example.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..fbc5597959cfcc0539ad55da13f93333ef63be52 GIT binary patch literal 3585 zcmeHJO>7&-6`tMY@@GYP^=Dgl>tJPBmZ;REtyGm8%VumxZCoTuU`Z*Y3$f;oq@|Wz zW@aggiqzOeQOlQFx|F_^1`Jq%b4X4x&_fd-2M4`SpbI$@1qcvdbhD#Np;PBAxumS9 zKR_=%bb$Tl@4eZ1Z{GK2{@U26AYT6T$3K|!0ffGTj#u0n-q}7XAXGpGnm~qNh?X!R zxV1PTy0tVRVKEi-)arGrq2BgdhJdAgWg9(-di`HZs`a(J77jk()bij|>> zkZYZiZkTAOj)l{W6OFtr{SHkWFnk=o(48nJT@*3!0-16k$!L={(eieVXoj8E%}i8LMq#?DVc@i#p$^dp4C?t6Qd_JEomcUWsaN=`%3H2lQ(?#adFgEUL|# zx|X%6GexlXV`U3$A5KTF^4$gOSBtx}fJA*ZwpPg=6IfA21 z;W~+tq03i?Mw$G^NMHZYA_7x%Ysx14aMF}XGcu|2-1q zwy)IIXA-Y>C@}T6gK_ki&Wl~r=O+Y?F9f5H&%42B@kRkHqEsyy`9QS*U)|8Eh9@xe zL3NIY*+0{fU(oUI(h>84NN}OkBjF1Sbf$5gh0%qu>&&Vbne@w{Q63V3G^Mug=wnf6jtnnhxpPau7ZuVqoCvXYtgy#Y#W1!_uW7?!{qIH?L zwaQO#3r^xgfdzpqkTX~~mw`MC>tKTgtMNd$9&y1)1Cu8qZ?gcw@8vLcSYR6K24^mT zw8a$a=)|Fr;8`F=?Tk5@ciG2$uqbPq&?Au@rRen&GBXb6ZPPRs;#S9@yKKK<;~Xi!ui$n#qPCu zspDL^b?{-b)OxWLzWAi+(8_x&WJOt)pSB$@_Ltit%b~B-_Ith!t$$tX{}_K}{!M$Q zu79D_@oKsC+{2Ml>uaU(Yfqcn3+k#`RLV^+FUy;1+wG-WOLxY~YG-kzr1q48J==V% zi22@mdt`KfCVugMCW4E2+3$Ac+2DsvtcZigep#!=)0$X45laqSCT>X`@>=!UP&hli zOy>JHJg&VFYCI-~`BJ1E;0VA`fFA+;7~mMdO8^=GMDo^`93o-T2?Hkpx&VNYuSWijrRF`ppc_08T_EKLHV-9ytXQK6raEpG!S>9s%>J)H?vQ(hZ}^ zyajE4;?*T$T%Z1#!~X{DDn#2*yVljNvf91u+f)zV{@~UJcNW%uT2@a(ze2^)vU1Oao@TPi0T@hCL3InSHh4^Z`*mr;6?!f){-T23SpALL7@M-*$_-AL!ue@3Ae5>3# c{20Cc9a6qQ?cX5nd%s_3+irPD=yA934~Wqd$^ZZW literal 0 HcmV?d00001 diff --git a/api/routes/example.py b/api/routes/example.py new file mode 100644 index 0000000..1f1ff2b --- /dev/null +++ b/api/routes/example.py @@ -0,0 +1,56 @@ +""" +Example route module - Copy this pattern for your domain. + +This module demonstrates: +1. Registering routes with Flask app +2. Using auth validation +3. Making database calls via postgres module +""" + +import os +import flask +import jwt +import core.auth as auth +import core.postgres as postgres + + +def _get_user_uuid(token): + """Decode JWT to extract user UUID. Returns None on failure.""" + try: + payload = jwt.decode(token, os.getenv("JWT_SECRET"), algorithms=["HS256"]) + return payload.get("sub") + except (jwt.ExpiredSignatureError, jwt.InvalidTokenError): + return None + + +def register(app): + """Register routes with the Flask app.""" + + @app.route("/api/example", methods=["GET"]) + def api_listExamples(): + header = flask.request.headers.get("Authorization", "") + if not header.startswith("Bearer "): + return flask.jsonify({"error": "missing token"}), 401 + token = header[7:] + + user_uuid = _get_user_uuid(token) + if not user_uuid or not auth.verifyLoginToken(token, userUUID=user_uuid): + return flask.jsonify({"error": "unauthorized"}), 401 + + items = postgres.select("examples") + return flask.jsonify(items), 200 + + @app.route("/api/example", methods=["POST"]) + def api_addExample(): + header = flask.request.headers.get("Authorization", "") + if not header.startswith("Bearer "): + return flask.jsonify({"error": "missing token"}), 401 + token = header[7:] + + user_uuid = _get_user_uuid(token) + if not user_uuid or not auth.verifyLoginToken(token, userUUID=user_uuid): + return flask.jsonify({"error": "unauthorized"}), 401 + + data = flask.request.get_json() + item = postgres.insert("examples", data) + return flask.jsonify(item), 201 diff --git a/api/routes/medications.py b/api/routes/medications.py new file mode 100644 index 0000000..5e98858 --- /dev/null +++ b/api/routes/medications.py @@ -0,0 +1,160 @@ +""" +Medications API - medication scheduling, logging, and adherence tracking +""" + +import os +import flask +import jwt +import core.auth as auth +import core.postgres as postgres + + +def _get_user_uuid(token): + try: + payload = jwt.decode(token, os.getenv("JWT_SECRET"), algorithms=["HS256"]) + return payload.get("sub") + except (jwt.ExpiredSignatureError, jwt.InvalidTokenError): + return None + + +def _auth(request): + """Extract and verify token. Returns user_uuid or None.""" + header = request.headers.get("Authorization", "") + if not header.startswith("Bearer "): + return None + token = header[7:] + user_uuid = _get_user_uuid(token) + if not user_uuid or not auth.verifyLoginToken(token, userUUID=user_uuid): + return None + return user_uuid + + +def register(app): + + # ── Medications CRUD ────────────────────────────────────────── + + @app.route("/api/medications", methods=["GET"]) + def api_listMedications(): + """List all medications for the logged-in user.""" + user_uuid = _auth(flask.request) + if not user_uuid: + return flask.jsonify({"error": "unauthorized"}), 401 + pass + + @app.route("/api/medications", methods=["POST"]) + def api_addMedication(): + """Add a medication. Body: {name, dosage, unit, frequency, times: ["08:00","20:00"], notes?}""" + user_uuid = _auth(flask.request) + if not user_uuid: + return flask.jsonify({"error": "unauthorized"}), 401 + data = flask.request.get_json() + pass + + @app.route("/api/medications/", methods=["GET"]) + def api_getMedication(med_id): + """Get a single medication with its schedule.""" + user_uuid = _auth(flask.request) + if not user_uuid: + return flask.jsonify({"error": "unauthorized"}), 401 + pass + + @app.route("/api/medications/", methods=["PUT"]) + def api_updateMedication(med_id): + """Update medication details. Body: {name?, dosage?, unit?, frequency?, times?, notes?, active?}""" + user_uuid = _auth(flask.request) + if not user_uuid: + return flask.jsonify({"error": "unauthorized"}), 401 + data = flask.request.get_json() + pass + + @app.route("/api/medications/", methods=["DELETE"]) + def api_deleteMedication(med_id): + """Delete a medication and its logs.""" + user_uuid = _auth(flask.request) + if not user_uuid: + return flask.jsonify({"error": "unauthorized"}), 401 + pass + + # ── Medication Logging (take / skip / snooze) ───────────────── + + @app.route("/api/medications//take", methods=["POST"]) + def api_takeMedication(med_id): + """Log that a dose was taken. Body: {scheduled_time?, notes?}""" + user_uuid = _auth(flask.request) + if not user_uuid: + return flask.jsonify({"error": "unauthorized"}), 401 + data = flask.request.get_json() or {} + pass + + @app.route("/api/medications//skip", methods=["POST"]) + def api_skipMedication(med_id): + """Log a skipped dose. Body: {scheduled_time?, reason?}""" + user_uuid = _auth(flask.request) + if not user_uuid: + return flask.jsonify({"error": "unauthorized"}), 401 + data = flask.request.get_json() or {} + pass + + @app.route("/api/medications//snooze", methods=["POST"]) + def api_snoozeMedication(med_id): + """Snooze a reminder. Body: {minutes: 15}""" + user_uuid = _auth(flask.request) + if not user_uuid: + return flask.jsonify({"error": "unauthorized"}), 401 + data = flask.request.get_json() + pass + + # ── Medication Log / History ────────────────────────────────── + + @app.route("/api/medications//log", methods=["GET"]) + def api_getMedLog(med_id): + """Get dose log for a medication. Query: ?days=30""" + user_uuid = _auth(flask.request) + if not user_uuid: + return flask.jsonify({"error": "unauthorized"}), 401 + pass + + @app.route("/api/medications/today", methods=["GET"]) + def api_todaysMeds(): + """Get today's medication schedule with taken/pending status.""" + user_uuid = _auth(flask.request) + if not user_uuid: + return flask.jsonify({"error": "unauthorized"}), 401 + pass + + # ── Adherence Stats ─────────────────────────────────────────── + + @app.route("/api/medications/adherence", methods=["GET"]) + def api_adherenceStats(): + """Get adherence stats across all meds. Query: ?days=30""" + user_uuid = _auth(flask.request) + if not user_uuid: + return flask.jsonify({"error": "unauthorized"}), 401 + pass + + @app.route("/api/medications//adherence", methods=["GET"]) + def api_medAdherence(med_id): + """Get adherence stats for a single medication. Query: ?days=30""" + user_uuid = _auth(flask.request) + if not user_uuid: + return flask.jsonify({"error": "unauthorized"}), 401 + pass + + # ── Refills ─────────────────────────────────────────────────── + + @app.route("/api/medications//refill", methods=["PUT"]) + def api_setRefill(med_id): + """Set refill info. Body: {quantity_remaining, refill_date?, pharmacy_notes?}""" + user_uuid = _auth(flask.request) + if not user_uuid: + return flask.jsonify({"error": "unauthorized"}), 401 + data = flask.request.get_json() + pass + + @app.route("/api/medications/refills-due", methods=["GET"]) + def api_refillsDue(): + """Get medications that need refills soon. Query: ?days_ahead=7""" + user_uuid = _auth(flask.request) + if not user_uuid: + return flask.jsonify({"error": "unauthorized"}), 401 + pass diff --git a/api/routes/routines.py b/api/routes/routines.py new file mode 100644 index 0000000..6a955d6 --- /dev/null +++ b/api/routes/routines.py @@ -0,0 +1,204 @@ +""" +Routines API - Brilli-style routine management + +Routines have ordered steps. Users start sessions to walk through them. +""" + +import os +import flask +import jwt +import core.auth as auth +import core.postgres as postgres + + +def _get_user_uuid(token): + try: + payload = jwt.decode(token, os.getenv("JWT_SECRET"), algorithms=["HS256"]) + return payload.get("sub") + except (jwt.ExpiredSignatureError, jwt.InvalidTokenError): + return None + + +def _auth(request): + """Extract and verify token. Returns user_uuid or None.""" + header = request.headers.get("Authorization", "") + if not header.startswith("Bearer "): + return None + token = header[7:] + user_uuid = _get_user_uuid(token) + if not user_uuid or not auth.verifyLoginToken(token, userUUID=user_uuid): + return None + return user_uuid + + +def register(app): + + # ── Routines CRUD ───────────────────────────────────────────── + + @app.route("/api/routines", methods=["GET"]) + def api_listRoutines(): + """List all routines for the logged-in user.""" + user_uuid = _auth(flask.request) + if not user_uuid: + return flask.jsonify({"error": "unauthorized"}), 401 + pass + + @app.route("/api/routines", methods=["POST"]) + def api_createRoutine(): + """Create a new routine. Body: {name, description?, icon?}""" + user_uuid = _auth(flask.request) + if not user_uuid: + return flask.jsonify({"error": "unauthorized"}), 401 + data = flask.request.get_json() + pass + + @app.route("/api/routines/", methods=["GET"]) + def api_getRoutine(routine_id): + """Get a routine with its steps.""" + user_uuid = _auth(flask.request) + if not user_uuid: + return flask.jsonify({"error": "unauthorized"}), 401 + pass + + @app.route("/api/routines/", methods=["PUT"]) + def api_updateRoutine(routine_id): + """Update routine details. Body: {name?, description?, icon?}""" + user_uuid = _auth(flask.request) + if not user_uuid: + return flask.jsonify({"error": "unauthorized"}), 401 + data = flask.request.get_json() + pass + + @app.route("/api/routines/", methods=["DELETE"]) + def api_deleteRoutine(routine_id): + """Delete a routine and all its steps/sessions.""" + user_uuid = _auth(flask.request) + if not user_uuid: + return flask.jsonify({"error": "unauthorized"}), 401 + pass + + # ── Steps CRUD ──────────────────────────────────────────────── + + @app.route("/api/routines//steps", methods=["GET"]) + def api_listSteps(routine_id): + """List steps for a routine, ordered by position.""" + user_uuid = _auth(flask.request) + if not user_uuid: + return flask.jsonify({"error": "unauthorized"}), 401 + pass + + @app.route("/api/routines//steps", methods=["POST"]) + def api_addStep(routine_id): + """Add a step to a routine. Body: {name, duration_minutes?, position?}""" + user_uuid = _auth(flask.request) + if not user_uuid: + return flask.jsonify({"error": "unauthorized"}), 401 + data = flask.request.get_json() + pass + + @app.route("/api/routines//steps/", methods=["PUT"]) + def api_updateStep(routine_id, step_id): + """Update a step. Body: {name?, duration_minutes?, position?}""" + user_uuid = _auth(flask.request) + if not user_uuid: + return flask.jsonify({"error": "unauthorized"}), 401 + data = flask.request.get_json() + pass + + @app.route("/api/routines//steps/", methods=["DELETE"]) + def api_deleteStep(routine_id, step_id): + """Delete a step from a routine.""" + user_uuid = _auth(flask.request) + if not user_uuid: + return flask.jsonify({"error": "unauthorized"}), 401 + pass + + @app.route("/api/routines//steps/reorder", methods=["PUT"]) + def api_reorderSteps(routine_id): + """Reorder steps. Body: {step_ids: [ordered list of step UUIDs]}""" + user_uuid = _auth(flask.request) + if not user_uuid: + return flask.jsonify({"error": "unauthorized"}), 401 + data = flask.request.get_json() + pass + + # ── Routine Sessions (active run-through) ───────────────────── + + @app.route("/api/routines//start", methods=["POST"]) + def api_startRoutine(routine_id): + """Start a routine session. Returns the session with first step.""" + user_uuid = _auth(flask.request) + if not user_uuid: + return flask.jsonify({"error": "unauthorized"}), 401 + pass + + @app.route("/api/sessions/active", methods=["GET"]) + def api_getActiveSession(): + """Get the user's currently active routine session, if any.""" + user_uuid = _auth(flask.request) + if not user_uuid: + return flask.jsonify({"error": "unauthorized"}), 401 + pass + + @app.route("/api/sessions//complete-step", methods=["POST"]) + def api_completeStep(session_id): + """Mark current step done, advance to next. Body: {step_id}""" + user_uuid = _auth(flask.request) + if not user_uuid: + return flask.jsonify({"error": "unauthorized"}), 401 + data = flask.request.get_json() + pass + + @app.route("/api/sessions//skip-step", methods=["POST"]) + def api_skipStep(session_id): + """Skip current step, advance to next. Body: {step_id}""" + user_uuid = _auth(flask.request) + if not user_uuid: + return flask.jsonify({"error": "unauthorized"}), 401 + data = flask.request.get_json() + pass + + @app.route("/api/sessions//cancel", methods=["POST"]) + def api_cancelSession(session_id): + """Cancel an active routine session.""" + user_uuid = _auth(flask.request) + if not user_uuid: + return flask.jsonify({"error": "unauthorized"}), 401 + pass + + # ── Routine History / Stats ─────────────────────────────────── + + @app.route("/api/routines//history", methods=["GET"]) + def api_routineHistory(routine_id): + """Get past sessions for a routine. Query: ?days=7""" + user_uuid = _auth(flask.request) + if not user_uuid: + return flask.jsonify({"error": "unauthorized"}), 401 + pass + + # ── Routine Scheduling ──────────────────────────────────────── + + @app.route("/api/routines//schedule", methods=["PUT"]) + def api_setRoutineSchedule(routine_id): + """Set when this routine should run. Body: {days: ["mon","tue",...], time: "08:00", remind: true}""" + user_uuid = _auth(flask.request) + if not user_uuid: + return flask.jsonify({"error": "unauthorized"}), 401 + data = flask.request.get_json() + pass + + @app.route("/api/routines//schedule", methods=["GET"]) + def api_getRoutineSchedule(routine_id): + """Get the schedule for a routine.""" + user_uuid = _auth(flask.request) + if not user_uuid: + return flask.jsonify({"error": "unauthorized"}), 401 + pass + + @app.route("/api/routines//schedule", methods=["DELETE"]) + def api_deleteRoutineSchedule(routine_id): + """Remove the schedule from a routine.""" + user_uuid = _auth(flask.request) + if not user_uuid: + return flask.jsonify({"error": "unauthorized"}), 401 + pass diff --git a/bot/__pycache__/bot.cpython-312.pyc b/bot/__pycache__/bot.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..edb334f12548f3bc75e218bdc80e41dd48b8c94c GIT binary patch literal 12741 zcmbt4Yj9K7nfE1Ky)0RNVjJ5R8(Wqze(ROoSDwVnWQb5Z8yyB&Mr>VhMCwHc{DTdo=gEpU(T9^S#gcm%Ka+1!<-KyS{g;DC$4(kCsRY{COa-6g5MMR6ixs zA`_tdX?U{%W`yl$>2w~~&yg}izk$4s{YLWU`+4#<^_$4s+;1jtOTR@j_)D~*^tZoE z$2Cl(B`#;(3ay6z{*!A~>V0XL3p&ZTp)kM+3y@F=r72uO5sRRe=$ul z6wIno>rH=)2CcvT(mzDTk-RM|szarv6Ha_CKTdsbT&-8m7PB z&7l2)XqJS54O$6|P5bw1|Ma&cyl2VIuvWCbLiKMHt3}%@bbp{gHC-t#3OkDR0m7V97KTxM^Rh_Lc zd)7a!wb$Qb`Fj=G?8Wt~T5OlL`vZDC4HGL@)oGV@h*hT>C`vAXIVf5k{VkPhebdx9 zGtRh$=zm#Gh9XVjabdHt*QX4IWKn<&VKCs6f)QcN7a0~5Nl|>EpfKVMdWWPD%=ZRG zVK6iTIietkMkBu9kj1iJ@87>ojS zj(xFTGG5OMeL^I3S_&#qspH`0aA-`DC4IoKS5{y^;f&8K$kLEciOAznv(r0xdI%;1 zQ;K+%(?TE=3Ja$~vJegh0yx1>aUpu$v}(X|#m%VJAt^F23?C7YWYrOXo(9q#;&`fa z@4;hTN1xj}(0R0Ppzr9ReZ4B*@$A8YW@XqFDZ#cYjdrM18v=Gr|{LFzt z@8Gc16h0m3gRyvDFaj&43}%>-Y=#jah$rg7Jb)Q0M)_Blk}4F4C~dPu2Ib2H9RmR- zauIAvQcSPGMWN3q2*0QSo;<7_0W6*TINoC^V%9SJn#j~L;H~$p&EXj3HuOHCn?A*v zKB9Mh%E8-hR1Hcv;ESkiKnkjyUkL?OLm=c871emst4M9zROhH1P`sz4fs<|9L}@T2 zO0e(-vcD=H_KuUJ3<$H>fw638FcC^n@YC$R?4EF_;@O zh#ZPFGe}Q;nv)LHffHAX|r?CkuuY^PZSdNYGw;6;;<7R{%awXNNj z-l(lBL@0c7-*{Mx<^vZ5e1qNy2#jVVhbZ4Ed1Xl!)Vz+-$Z$yZMM>eOIAJ%d^23r> zlw?KDhyN-c@r_6zPgH{>%ON@H0DcojgWfYAbV69Kw48R&wB%-(mw1>&3`UkZ{OKLDU368^Qv(~f?0{GNeYVL5HPD; zgne_OH{w-Upj~-A`44g}tl+RhI>~4Ts1|S7m+4eS(rkuD`7wY=YB{fH>frS7bbs6; zOm?LlmGSD0Mm!JCUr{}mOp(P=-#0y*Eww8y>j{@7Bf9=rzWU~RqG)a^#Z@4xPs*6BhEAif|ZFBYIwx1 zVOq^MD3N`W)&2qPlb<#w=uP<5QmvFik1;fLf}-I!u5%Mo4@tl|>p%$WqjG7bzWWl^JmI!1{W6Oslc=M_;Gk{UY&w^e0A zipqt(kzv&+odu1isFtpdt^@l9_8&aFPvt^k5Y)!7Z}43skEe|<#d|%&DgjLXC^-d)px_ATmmpjAvJGWw&q=GztDbZ_l4b8J&V@5 zWoPkg{<)1wr)z~`^SbEy*1MIp*S>z`>+{YxzBO%G&M%IybH7*np8wtExO?Y~`glpl zVt(gmJl0LOSYat!{b!7x@%e8}l&u))degEgFP>jF=bbwix4V~2o`lKsIg<9KN6I#s z)A_9(yQrUT>1bsaTqZylS`8gL`GqzX@tp+jGIv(79~$kQW$cG#2Ef-4cvd0&EA%!4 z0zQ%CqRY)>Gy)gm8B4YYnbiOiQEH5N#1>}0ekO;3vV z%xZGtClBkS1ofLi?GEYw_KlL$vp%h!HRH?J!FqJ&V6;RD{gOC zGPx5bH`!xPT6~?)?_j9!RkX1`Gy(E=hCTd0uz>tCf(6FhQOhn^?HyI@LX`pVHTD?p z6cJzF1q9T6c5ibDFR<3KI}6n_Nw?Nkwa$t(trnGa6EuP1&)Q*n9lft()H_UX)G!)3 zwRwlz05;X2JRXjy#$m5A93BG>4-zE@nlPw%15l8$uxQn!8QKbdjF4to8@78kV}Z;2 zq2R0V5aACB%j&$e>%y+tb=Rt|RL_?t%UXZZ_QCG=cPF=YC9QjxtcMcTL$|t<*1kB` zC$EEcgIU6dKUoIx!=FEO31jq=msnf8W0Z^@qnnk#0xy*ZSu}V$JSKNyCL`~|OtMyz zxQ0IMXC&EoYCO9VJy7vDJW3sa^h$u(nr*n&e5HBbl`P+Keg6kX-am5FmE6{ov>u9c zhtdm>r4wj|^h0Q<5s=j;qaSl0OS)o$9j1F>uEGo%i4FL~)w`DmrG%-( z%_gI@ zotdDF+zfaxyi_w$8)zr3S=&5i%`BgTs^5esGs{$E{VP3F(F9k%%;o(>W*Jv6P}Y+8 z5iIU%I?|VHf{n5Ac#O@pusZ9+XyOS=x*HT@{y-Z>)5#iL$vqLA2TAC1#poC#8m=;6 zt!L)?4WRuA4y=EJ$dhqeL{p4AL&=RXPBamB&8S;1F{3uZ1Rvu?GbuOewo8lxUw}Ay zF%vj>maD8b6B_LJmuTAz8=(C;+J=4*ot!8fc zwykN|RzAC9?)kU--|COIbR_FK7j0d4%W9$C^x^9VZZ#xa$9~H)<(3QfDdV(p#YjQH zvA^6k`@+02QQ4F#DxYn?d~|Bxa!LK%XtHGURCj7!^_<~~ed^${tt^#ae#f;r-n1v- z`buhpCnYr8<2h%+Zw-8&eI=hNDw}o33v1)H+LWX0ZgF+WWS_QOH{Bdg)ED>NAnJ1NBGCg+LF=bdTsF~ZIEO1ZpsiKP6eV2Qu%*&?Yl&$1$ zZS!?gvi2$9>D2m-FlL?|#xz@C%>3eMU))h0H&v&s#b1<_9IV^R@I3etfJ0zFlNbo# zkEdMv7c3oYPf{*-85k(*W&`_Bx!hoX)@{&wS{-kB84U722S$ILnPIE@kP#+vN$g_Uua&k zR3$7`Dbv0OEMql(MllBCeFTq`0_f;`hqa@aUdW?63b_Tl5%4tz6iQ37p~D~-|C9}l z(L^>fAa4mKGDN!Whi%MZOyx43G~ER~FsRiqHY3uAK_3=)J(m1gvso_?IZa;YB8BYS zSTeW2#gWLY*)KoK2~H>G7`=;%IJ8-S&S7KR8hvA!Fvbk>3o)a79HK`?j2~TPBN#X# zDMr3hH2SmNtyTiM9c3Fy+WKf;c$QYcj}>{)On?0Pm|`sW$>taj(DG*XL?GvC{qen8 zwK?VLXPg2ST!5z$9t&Yb&^MeCk;1xskD4``ZV%`V)FVWmlM0WRMnUC*p`a9XydVW2 zd@TtlA!aOiJkc_bXM@G^90?|qSo26o91TDyTZq!aeV~K77-ZtT>o3t%J_Te{IdbeBw@7 zRaPUp?H7;CdJ?v}6$@2fb*=15*$?(4YqurKw%;z>bE9OcCsj~3(|oyksh~blP@gK; zFxN0w4o+#KE9GjP3jnsYZG~l=?GIS4(Eb?(D()VF6&nTf(i`TZ)Wix5RsgW#rYva7BOy4_9oE`$%br^*rCvuy+d$akagT+`^{L68Q5GP48{uK4NMSucP<2a39t4 zh&Ry)TM(`hxVY^^;Qk{ZX@RTzof?Zzz>#N+mN)2zy_hxRaynS-E8rIF)qPahcT7QA z4JYMf3h>?pHY=zRzL7L{pgQR$J@8uXF*8StG!eapL}HK=Y6 z0GA(0>+Ys!0--@*~OoS?`Fr1f*Tz-w*KU7qOI?D3K}No9Zou0 z3PXm3j1jARd@`0|5{SU^X0d1p)#}8DTs$DnJx`gWEv( zTV)4_WjPFe>w*Bp8{v5wa#a(y8W>TA6kH%+>kLOf_CazCN^iiUphreNVc97p(I`kc z-HETAe6M!?)3-uVKDv{Em!}J17xN;%@?~c7z#7|O_H5G9 zu*5ao<{C7kp`c?uy--AVlyeKkM!?r>X4Yu1!Ln(a>4%v;p8hW@Fl#h?X(M5R1rA}) zPznbzGkD8ItDUkZsUzTn9LFX8toqi!LF2}4)XPAmYwb#1dFN8vF?uL#$%3JfRpGg= z73AhiFn+lAvPJ~BtF%7i8Co_EQTqS1`LOyw8a27DHsl^FW;m6VF7%pyu^-VOQsrxM ziE##≀Jz%sjq6qVXjI?26|Ptags86e15Uxe-Ui1Gy*dAUMZgwr?}^O`QAxtM8a0 z*M5rGiwP6PNTo(0Yl8!FAw$K$6oX)eK{Nhx$xqShuhx3d-;oU(u2G$~0vR*UHevb_ zYnc}_g50;qOz^!=G>O}(Y5L+{5f}i;{_hyICc6oo{W^SYYw_bgdkjMcyZJhkpu<2B!TpcVs=FG2Ac#Kt`SjCQ5g2Z z?E$z%0B4PG?uZAI5fIH75|6-4b_!}fp6AQnK|DSjh`^Djd>Wd{0c>A#Ja{@78VhP> zp@8LrD_ZJm5^~v=Zl`Yl5t~j9LN|DT4i_GXuqN_eH9|iy1=W%90|zqSOoAV(+3U+J z2b|vEvVembhEpCfZ7`xrO6v|{DwNB5I-2kP@zHmV-grK~Y44(G-*Wx->;6Q2$IY6XwekA>7agSLXAT@V zJ^V@mWvZMrCiwa_#%;>pa=j$J<4B^lH{NnIZhIDtM27{8M2L|PV5H2BJG_0mKEbd1 zV#^!+`_&LFirYGKwkxfiiC&JzYqljzx5v4nWqVn?+>^A!O^09e_FvhHr-x?8=8d{} zU0gPE;_`{BFD0v6lEtmJi=V#!{FLRfQ8&ixw#PTN#oLbD^56PK+}5v;3}!7%dkcQvgi-fOR#z$9<3BXT{L+S77!!Y2>2uAZ-6e&cjO=7=!F8hyP8`ltVO(y z?yfN|w6los?4-M0?9ZOI!k>?fbax&15#NaTR=WEc?xSs+0sr|1x_dYG^O_dGe^Edm zptxTYb{OH$$7S>ZhWogj;59Vn)Dh@1c0a{`yqWH9v3=acBfi~)rMt287DXU~a1dsg zO~^aQ0uLquJZ@!+w0`pE$C{6&)j9(7AzC{mX_6W8vS)y~;!%3NaY~CkIXgj&fulGC zhr$?AQUte=oWVh0lib)+ISeR~16kFM-fj9u)M2;_g}$U$j>uHWE!B$7HyJ zF)qIb1;2qu*+h_slhj?#9Jf_oJ$3E$mD97|j9WG>a*YtTf923JSFpsbOK|Ic$(3iU z-e-2NweGTO_d0kHPqOZwyow~m0!QTf`hot+t{Y4=lk^KPx$H76oUTAm&|Yh_>g~6$ z`jbKN#+7al5$iA;TG5Xqa%pF1hrtJuz|LyM>!ZF^eWh)!++X6ydrprfF?%5)*Ju$m zcSw5jtU$j$3r__+WvT^7qSvcq74h0bN&Dd?)8V-3Fm!#_0s-=* zrFx00PH@#LPLPZbA4hzH7t1YdZ3Fy6H{E66KJ*kK zzKibSxes?60sonSM#v-l6pdrFZWOpN7ZjB>QVG;*S}LIflIE z;d|yWnaOIiJCL1S1h90xm=VDluDxR+94zooti)9^+g6oA;uQSNUpWBR*Ozb zaPL#nFNC7_ldnNr3w{QkU6aqaYsp`-Y)Du(BrUZ|TC-Z0j{ zLHfV|uHC?Z+z&jw@ zf{_W|qlii$xIKj!9t+F+5%eH9ir_f}Bw9I)7~y)1KM?y6?)RO6ASqEfGV(WGMS?v- zdQ+|G?HeFDxNEovD1~4_Sa~$~m>740R$2rF|J-ub7{c<}79Ot&Y-Tqel+k4*H zbA8JP?eDjLu;=|fH+Lkr9!}OCNf!1lQb+%j+68^yDXN_AoH=;;V7#g|S+pfiIa8Lx zshUgf3-0NIaWJ(kt&{wn{DMh{d*)ACQw2qnj#Qpw(vr%nnY7%|&R#N9ILW8Vt0wtH zs`QSzbgJ}H)rG1#e$m`8X}oV_>8BppY;^1WDigiweu;th+|Q@!d z%XiX3CLges28yvw7Qb44u6(+4kuJT*Ftizq*R3E1;*QB*qAL<~#XW|mH)6^96~tCd f6la@?-e${EoaNP?b3Ji~@Jnt3T;bx%iShBjIAgih literal 0 HcmV?d00001 diff --git a/bot/__pycache__/command_registry.cpython-312.pyc b/bot/__pycache__/command_registry.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..38f703f47e4464ea3d3068bec170b6585735cc9d GIT binary patch literal 1319 zcmah}&2G~`5Z-m%A30QxN&yL>u!zvaQtAeAt4gJcR$Q7?qT&*SEZ5$oZv7MOx)P~+ z$e~BX38@!e0QCuY1}-T*RLT_|pftChm|Z(gDIiwb_3X~hH}lQR`s>V$hG2YYe(*l! z5c)1Qqk&UlSOno1xu}Ue)Wj}6LQTb0V9mKXSk~zTj zsYmXa756qD3~YuYPmLm6?S!1lrg-l2}y@*J`Qh(2Xp+PGe;MHvla z;)O9~wjDY`@*B}ckq!EQQ326PD9dvv;j>(oVE^VcZEb#wt zu(6%slP30g>gr87IViu)q7}Cu6Vo(PbOPv}lT_D_+E6&qC6S4%N%bkS0@)3fG#LqJ z5m}OlhJgqAi_Lm{wXtT^H`iWotZy00pl~z4gQ-zlh?)~}bq~5}@h|;1&~JEVQ(z!k zp6jsm&_xj$wWH1cTDR2wcSDnK6v($#w6Yu zwLbK%``_j=6efTNxi*ar%v6xMz$9!^9OvQKN~yCvoIDU0=3wceANl*|`QmB5*nhZq zmap~I8k+`%12Y=}Pv8&53jjJ9)i`@QyA#>%OgKrU-{{S6z#}KN?`OpYe%A^`l4oh; z2C6T92a=Zx4}yP2GO~n{E|lpnL!^imz}X|1T)bRCOaG{!7fPpv(ph2tEMM-cWoh0p zCI(ZM None + + Example: + async def handle_med(message, session, parsed): + action = parsed['action'] + # ... handle medication logic ... + + register_module('med', handle_med) + """ + COMMAND_MODULES[interaction_type] = handler + + +def get_handler(interaction_type): + """Get the registered handler for an interaction type.""" + return COMMAND_MODULES.get(interaction_type) + + +def list_registered(): + """List all registered interaction types.""" + return list(COMMAND_MODULES.keys()) diff --git a/bot/commands/__pycache__/example.cpython-312.pyc b/bot/commands/__pycache__/example.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..9d8f642c8b526a878b5eb9107208938d2d4300f4 GIT binary patch literal 2710 zcmbtWU2GIp6ux(6cju?w-BMaX`SA)UY{3O;WTi@*R;5Du3y6XGuo?DF+i7=q);qIc zmu+0ss2C&5i*14=@Fa$q;FEpOM0nJg?AjWdF(#X+iM|#3AmPb#XJ(fQh#GG)=bpXy z+;h)4_k8EQzDB6p<5g_u+d9jVA(N4DGtUS$Gn$c&tLn>q+do0|jG47;rrEmHrEZOrgZi*x!9gQC zOf)hA%F{X{Ck%T8)ZV#}xngWw?TnMd78mBby}RIAX4Ma*@|JC8NRFAAoNcL3#mQc6 zjLY^O*hNxWI&G=Hb65ApuoLw(>MZG5mJ$~QAqf0i@GTS|_yY(-8Wn|6{~;F1xrtQ| z)`VDuhje=M6S+XTdTk>LMp?b4<@}B6&!wWIk_DkIiZseWD@lW*zM! z2YB{Tvis2fePjsI?oLL!qE5&&&yn)kv8;I_>jYixj?&QX1XCk=YOJv0c|P%E_5CA; zt!FG3PL9mQxdjC=9*^&HM2%8M;Q~p%%GL9k7MAr=dbf@)($P^6;9Gv!Goo>WXIvMF ziHj#ecdnC*=0BW2U{? zuNKy3-=%nN+=+M)Px?!&9S$w!r@sU88@g3O^Q|b-c4qMO;FNqJHWS`d+Bw&}wA^29 zURio^u4UO&Q?+GXX;)1OmzRAnEuV|DpD|CH*CT7Hku`IX*roMXI;tHzDzWbARHeCZ zK@_6uA4pKt8=TCCklIpy`u#`e+8&wQeRlWM$xFLt+P1x?{6$UW@#m(GP7i#|Dr%Jgt!?1Zo4r!s zv-qll`<@P5Rl9+|CgZ+D;F|IT=QrcN?SX4sLYz-vPPcQKT3l)ZpoSNxg)f0jqG2?F z?Z$o)*^Q!7hq)zEJd6(^DiqOi#Drl4ya+sION#iu1t)OX*N7362>y_F4caI|6GBn& zOIkY=st0Lp^e)f}_Qd2u?<-o`piun1x?M*Oz`CCG^_=0I!OlZJNke-e<`B^tGno}H ztUjn)Ip`re$w0X$$8@53`11hm1YJQlK?|$*8h~oR6w_bJ8w`*-Waw$Yt(W!OsAXo^D%jfypJf0kp;|av+WAXUdzpUx!fexHb1hpc(XADEYfXxj z*PVJ}F1qABr6xtoc2!z2Cr8eNPlqcHZJU)7weXTNM@}CpkIaN0E$!q;X@B#Lsb??I z>Wa>WVDGHlS8G~Q+PeUkAxH8bAlBkY$s{#X$)qD%k7o1u36 r627^m6}R5lgm6n)nd+WJ9Z*8>s*-XYt*oMzQ%5TscKm?4UGx3~0M@8$ literal 0 HcmV?d00001 diff --git a/bot/commands/example.py b/bot/commands/example.py new file mode 100644 index 0000000..b3cad7b --- /dev/null +++ b/bot/commands/example.py @@ -0,0 +1,63 @@ +""" +Example command module - Copy this pattern for your domain. + +This module demonstrates: +1. Registering a handler with the command registry +2. Using the AI parser with custom prompts +3. Making API calls +""" + +from bot.command_registry import register_module +import ai.parser as ai_parser + + +async def handle_example(message, session, parsed): + """ + Handler for 'example' interaction type. + + Args: + message: Discord message object + session: {token, user_uuid, username} + parsed: Parsed JSON from AI parser + """ + action = parsed.get("action", "unknown") + token = session["token"] + user_uuid = session["user_uuid"] + + if action == "check": + await message.channel.send( + f"Checking example items for {session['username']}..." + ) + elif action == "add": + item_name = parsed.get("item_name", "unnamed") + await message.channel.send(f"Adding example item: **{item_name}**") + else: + await message.channel.send(f"Unknown example action: {action}") + + +def validate_example_json(data): + """Validate parsed JSON for example commands. Return list of errors.""" + errors = [] + + if not isinstance(data, dict): + return ["Response must be a JSON object"] + + if "error" in data: + return [] + + if "action" not in data: + errors.append("Missing required field: action") + + action = data.get("action") + + if action == "add" and "item_name" not in data: + errors.append("Missing required field for add: item_name") + + return errors + + +# Register the module +register_module("example", handle_example) + +# Register the validator +ai_parser.register_validator("example", validate_example_json) diff --git a/bot/commands/medications.py b/bot/commands/medications.py new file mode 100644 index 0000000..98e45aa --- /dev/null +++ b/bot/commands/medications.py @@ -0,0 +1,30 @@ +""" +Medications command handler - bot-side hooks for medication management +""" + +from bot.command_registry import register_module +import ai.parser as ai_parser + + +async def handle_medication(message, session, parsed): + action = parsed.get("action", "unknown") + token = session["token"] + user_uuid = session["user_uuid"] + + # TODO: wire up API calls per action + pass + + +def validate_medication_json(data): + errors = [] + if not isinstance(data, dict): + return ["Response must be a JSON object"] + if "error" in data: + return [] + if "action" not in data: + errors.append("Missing required field: action") + return errors + + +register_module("medication", handle_medication) +ai_parser.register_validator("medication", validate_medication_json) diff --git a/bot/commands/routines.py b/bot/commands/routines.py new file mode 100644 index 0000000..4ef294f --- /dev/null +++ b/bot/commands/routines.py @@ -0,0 +1,30 @@ +""" +Routines command handler - bot-side hooks for routine management +""" + +from bot.command_registry import register_module +import ai.parser as ai_parser + + +async def handle_routine(message, session, parsed): + action = parsed.get("action", "unknown") + token = session["token"] + user_uuid = session["user_uuid"] + + # TODO: wire up API calls per action + pass + + +def validate_routine_json(data): + errors = [] + if not isinstance(data, dict): + return ["Response must be a JSON object"] + if "error" in data: + return [] + if "action" not in data: + errors.append("Missing required field: action") + return errors + + +register_module("routine", handle_routine) +ai_parser.register_validator("routine", validate_routine_json) diff --git a/core/__pycache__/auth.cpython-312.pyc b/core/__pycache__/auth.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..8b9ce878ff32f7bc8a0ef37f3209b511e5fadc0c GIT binary patch literal 2693 zcmai0&u@CUJrtLI?rc#6WC8Q|h1+2|>aS(V!+oExts?yK(H;UTbz7 z$J9YVLTY+xL9Iwokw~RV4GISi{R7~J)Jt4PikTR-RZrX~;ea^x&D!e_ROplZ=Iy+< zZ)WFx?;HQ-ayb#mwOilDpW6`nn?0Dq*Nn{xFcy%46fTKUT$tmkHlGs0Ld~{>Er#D3 zw!+9KZ7DG%oC znK)LJ(ReH!Az7>rVyt1>bT&O5Nye3n+LW3$rlLY^pKO3{Kyal&ds?JQq9l2WUdkz@5Z&;F$^ zOP;{I{V!2|&f6Vrm0g{8&)q&(Zfw6Za`)2hOG|}PN8iKlhixwfzQaBL8MH=Y$GW_y zDDPQnEy>;YKU$tyIsUsGDtp_@9v`?HnqIxMqNWemx)cOsOdi=cnUpj#@A0iK_br+`^(P3JFr=JEjhboe1UD{ogb}#0ZTb5^5dqO3tf9}*1 zQ7Sj@Sm?Rgv)X>B)Z9BSR;1P&u{&4RqyqqlMep|ms|{UcdB?oFEXkXY3PsEsGhB7%@|eK|O*xlfkTZm|-4*9Chj& z^1BQqV}^EWQ`3y(3^Y;O>sa>%ioU?|Ovx9jcstj5Yz!&Ei zhKDdaRwy5%7<`DSUZxH*Wssl~>J1m-7*JM?zwi;}>;ko&y4=|`R%3CUz|9KLxE~_= zSk!S)P&9YfQcuZ!aPIUIsktn-R7BU;SFc^YF}a}K)RtnU9f1epniw)_+gX%5m+U3k zziRiJXY1#OhWs*hva2v)0I9k`moZ^-GG;tk{SXFG>W1q*pw2|q42f%Led{jAK^ABr zn6+p102t1UecwYyJq<9vwtt$^lx$KRzysiCt*8e;!Ij5xPtne&XwL?6Z6N0#=;Q{H cH;`k~=H{AS>=n7j7cC83)8=j)*JR}T7fF6e4gdfE literal 0 HcmV?d00001 diff --git a/core/__pycache__/notifications.cpython-312.pyc b/core/__pycache__/notifications.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..75ee534a80aa2b7408b442f371b77e686d6ba836 GIT binary patch literal 3151 zcmb7GT}&L;6~6Poe=yjKgNcznb}Hd@&2BeIh5wYMK(S+!ts3k~A!%33%moH^XYjpu zOhCI5id+e#sJMz$h|;RGsuZWr1IbIRN|ku5)IP8>5_rNxq(1qj!jLMhAF7@^`@<4J zf37rV&fI(MIp^N zsPNK|6wJv(a`0A$6id2`hEz)is#yxqkch4#OTCLM?SWX)ITQ}&LNFJu&qb!iL}X?- z;<(HnvCSN_U56$or;RS-buZ8CE^{pBI5=-K3K+!onC*;4B7@%Kq)QmKjH(>HWc1n8 zbctn5;o&jYoj7keY-Bp}ggcQjtG)yt@v}uqt|-**fQXiSsAZ+D_b|(9UE%~&YxWMYtR84t97G9i=Ji4B zSVjR;nj6IibB$a+Z(t`koNu_@r11u39&xB~lX%#$M~oaZ@;FDC;X2r$o@ruiVJq41 z>sBQW*{Y7ciE3bO`Y|4v`g$bzV(z4ECS+e4#mv{LI((UebH2fFVmU<0zoM zR`og%W0c3#FpOt!qzVp8nJJHALR01#&QqLAS(r|MkrbVFOfTE3d>cdhT%disZ3(W47j=C3R@uXSE1AHVppdp+8_8E>20+qINj z*An-gjpVE4&eVkRA0C_fAQXrmj}wn(krm#a%D?YpS`gQhLPEbUXH#1-&cTH zM6A9xUPmb3QyDyyi^#5{vLqA*Dojg70nj05{HA?Z;fML9WaCm6`0V$|5fI#$-8>h9(+|M8|L@a_qmZY+5lV3W zgL3HsDCeUOEyxRsDcNBAM-czF;dCXrq!i_%bO%x4y(n8Qj({jR%A#V1W7B9giJWh2(P6Cnh7HOEd9P{TQ?ISa*WSB2gFxw zh6~hJy-6#_urEEVxUfqtN)8{_Yf2}_<(i_b^ zAIM9>QhMbF4?915d+zqV)AMYtxo7rjxw+?Y^e6!Fv8Hc@S{C$qeM!IHwsLtjwD#g_ z>!Iu05fp3rQbX+>zv%nb^$)IpIPmCrVotj!&o^zJ>I5cQ4s|r_To0u;qxwI0<4Ern zDDTxPu^vPHqXNW)ullH^@h(r5MTF6i01aEj(4Oz28U%)9Xt5IEz}c4{_3AJjYz{Xf zqNccRi6aLvA=ttvVhtD2v&3;8_pc30nt)p#gliVin`l~oM|u-Y2?-_RYo_Zk?68F3 z%j48_p8N&2_CrAr5q#Ujlu=*imBf!ymSY~xny!UmIa=))S&!riud;jtorFIHTr2iL z*a^R@nOL=HBzdL205h~3$Sm5H<#78}+o{FU&q~X0Jq)jQuXW#8ZyT7qwi!RM5$`O= zJMX_#jwjb*$$xB%Lb&~L{P;5(g1zr{oo{EanzAGX1XuwrA1YrY- z`lyBGe~CFS5&L3ZuZi!%$|Cx2F^7nw5795hgKOa4U(qMhK1>``&E$QUP^y?vGczq? zj7`!@sTA++NjjFAQ9L%%b%A4qmw+dK0$2N?%p~DWQb2qaI~*9~TkrwsP=a5E02ky% zm^uIw;fiv=Lk}0Y3WVc9t>a|(j{@Q3yYMuITdRi~xXK1DmWcXVHfy;i;HruLg(Pd; zwNg$`aee$Ye3{t=9Q=9&4U_QV762Hfz<&nlWgy#%cuYV literal 0 HcmV?d00001 diff --git a/core/__pycache__/postgres.cpython-312.pyc b/core/__pycache__/postgres.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..afa74986482bfc0d63d453546a1bc6b2938731b2 GIT binary patch literal 13677 zcmcIrYj7Lab>1i5FM@ATrl1E!LYAoalMfprDTx+E*&>zLvK0uzf)Xf@pm#w_WWWyN zcqXJRM_^=!m{>~T*qzXk+cL>as?N02Q1*|^blL$;Mus=+)J;7}{i9Qg^o%Sv?ev_x z4}cUTyH2~(9^Acq_U`W8bH4kXb1(niZnsddg-?Aqd}lR9{VN`58>0H3^XNhFHxtA0uRz86bpuz=u_s9K`_2V-B24o zWr5=o!SoWX*H{HJ=-Y%+!2fW-q_^FjZS;~kxwvGC;a6H1}ocdUifoa|^hp`7w{ zbXY#co@w{evIClog)YT<0=f z)r}do>aWlaD{xvnH{e-t`wFe_f^ z8m}FzO_fiy%(!>VExWX8#=)1KbOxY1v*Y?y)HRox`hf%oO*-0&S}bi|yRmcYeL%Rh76B=DsDfMDoGgI=+XIJ#&UI83oZ?L^Kqnzd)tew+0HMynaJ z#+h#L+m&0~2)$*BTS1zybDB1tW4>{{^=9fcya9Q8+mJnOkF%o=($_Y47q%XnzFfdj zm;V9Y#qt6rmObQ-R(yZKmV3v`pMIgfo!rlHUH>LD6*uEk;Jp*{WlP*JT=I}soLR@B zQGeY-bY1Gba{450$hp_!93>Rw`%-b;dE;ys15WF1mudf}=mm{vR!hvEtSL+09AkG9BsEB-9e^-FZa65Y6DXzlQ{AMYG2IoT`q_aTgi1W&XtBnDzp@N2P} zG`HEa42KL*xfAxqZ6IE!k5V+CQJgvrraD9m)F?wf890i-T<5<}kJB`D*~w7@?4Pq2 zX)m|LRMm*6`7rfCW#`T9DR=6LRQ<&BlLeFPj{gHRvq?~8#L(NirRl*V$w%xJWFzpZ=*5sAGkxd9UF2r?aN=$#N(dLgo^rOw6@a$ocCmOI)YhVY z3NRdcG~;j$AI{jF!-wX|*QIv3w=Yr~_^MI+2t9gWq$yFDDXUCeO0miQ zlrI%Xb)<{EBbK?sie%H(h7r>z*5X9)+U{BF+P8xf>)$#*x%NjrnN^RB9vwNDFlGu0 z$IHgbuC5w6xWL#-9V3V5S5Yo^vUJ+%nJe+6*YCf%?q*SXZR1SIV`9Xp)xC1RN!dlH9|-O1-B3MRHstWQ_eeo(aa4h!c$E+|ZwR!1MrBoEE~+O_xNockyz*vTg*>u-wbH3w&1O_{3A2}5FV#^;VZQK5FPF4TJsfo6mOuC|O)>fZcyJ6%|;=qipa=w*@j-GF$DbTGS ztxp=6U~PWUo+uO5GCb0{ zCczy>g&HGJLp96*k5b|bLh@uoFxt}-j>+bhOTkbd1|70Vj7B171HlV2AB;q$5Rw_Z zCYQ`ORI;cBD&o`d*vDX(0w4$iePS*A&{{E#|2JQ|@J0{(3^&g+M!q=XaKCKHm~2<9 zqt;jL^A^f%zhW7+ylVUWGc#o?UZ5D>fv4@G_RG$U$v)3R#edG5biF@F2+=QA`VP=< z7Ws_SFAAVEWvuY6W2e@!zA9>}%I(|BPVHr({yr2ZRsryRpH*nzd^aF^oDx^XaBv}8 zH)7_Bts5Xp~|>@h`Y4 zYcJa=iN46t)HCqPc&%bbs&$ZnuwRs-qQ5H;gyC<%YariJdWA{2-; zhl8;uq6n_-wZp}vh0o`0l-;G&E@i9#AmK>2gBUN-{O*DXB9YfM1h^Vo9S7^kz=3pzIFy%n2So~7}ik%y{l_tD-6 zeJ#kW^os_>uS+-V}`= zLi9_aU_a0&K;4VMXn!y0p*zW@t`G#3(cX~P3=}GG7AO=a1Y!Zi??``7uOuGE>ur6J zKrnPZ8WBPwkfWe%)+muA^UwE(!~yXP=t=m!$h)CX9MJ$2EhNT7%#DOl=QaokM;-3O zuJQe2`>)o`IMx8?*}U_1;jUCi;%u@o$tO>IuPt4;YZyglu6ZIl7>DuK1B;=&Cp^Lq(z`xog(s$y9Ipar{T|%dte$c-vUp z)%IEE`kO^}VT@?JgZlRt3aO&q_heV?T^AgZHkKqL+&{E8@!U_{O&0DJv%ks7zUSnd z%8WrhO8_c3wxjPZP7&zhW+&Ck8bG(y@>0^lIA7X*P<{v$@wgESI2y`Ld&|77{RRDE-PUy;N+BsSr(K6&{ z<#@H$R!rv>n!mwlh<=T%dBqmz0XtMg1P=31uLYzt=U|vQ8*pa>VFkm$cR}-ker4FW@rVS(fM@Huy z#2)A0=4P!`nbL~!fw6&9*-YuCkt3*k0J(U&pk~@sGs0(#wzRQ~G;wWbvj2?($<674 z>SlF}zPfu!!szf8SJh-=Z+YEKM#HT-2FmXld6YK3 zso1z|97g{oK8un0vT>M-!+gTK%nj8;al{q*I3uu_-s}KQMRI?iqSHbMj9*V2K!G;x zrAgm=*_=;ngxr09BqmANpbn!JNYA@iOevS`Y}OMTwKCs)^@k9ovTwe^l-n z*_+rtdLYehN(ms8s71#QxJ1XFPvD`Jy~~&56u==?56lZ&^45fhJ)P{Z_=8Tf}K%NDd}1)Q?Rt@)Tk+!8lx&jL7Q?JivLx^Sqq0&i@~ z!6{ozPwvHS8m8(v1)2}QDHfTS&OmGr!7NKVfu?0q3R*NW)Wt>+dZr1UWGHt$(nlNU z6R>oU^mR_nJe5M`i6I=H9W;}a4Iq%8(46*nXaaNZFe2WAum}k)UZn-%9w-l%_xAzW z$d_mugVV&kl|0j`lfPVl?R-jlqbKQ~Xq^mBp1iqkwxVHr)k!7O^kT&$5I*AFWdq6V z_`$Q9@D?tM26`u-Z1#1wcn0ly4Z`t~Q$JKNe5qWoSjn-xRt2liL&FMSAHWjkaAbq> zD&(cJ5H4Osg$p3OcE!bev4TLj6(3h+Su(gc9FQaw=_~OF>LHHHHsytsliHDzrc%)U z461$;goMMY_PXSkP%v>66;ND3rGEzjpy4h_#1NpbesjjXe)y=$lb}a`RWZBjsVs*EVl-EnrqHgmMp&NOglZm$=2F4_R^6yD3v=i&We#<0OUvV zaI&ue@r$LD1E?7!w00BVSD}6byzvz8YN{~YDywSR$lhvjH?1|i=VYM#-dY~zM!sq5 za=_O?^1tEgliH8@+f!mzWU|1gajNdWN{7z zEJer#*yV(u%%?q+@RML22tUq|DTJT-U)dJ(JP)~TT7=aSllg@VYky;7+R529=@6JE;j-W&JiQs{eg@WIRyr%c_#+?1AC0^lQB%OMJx2idY&AvLpv(JXTxq&-;Q1p4?CTKkZHfEsAy z1zMsno8ks&*9y$hjBGL=2gOxBadRHdYJo^Md*2)V^0HlP)}nxm+P*LsX3NKDwfkt#^98vuPG_Pp zl&JZ!Wu*s#hBYTYc0W%=N8btiVmk<(rvd;!n5X9CQ*!w0bynrAYk;@XN^W}42E4W9 z>87@m%`MHf?c%rL9^hWi}nST~wEP>b0knf!Ci+7u5|P z1MzWX)opiEvS6Yhz2o2q-h(%7q zJ-Mjv-f!JacUNVTjA&s$v^Q5mlCgO`H)VD=*Kkv7&COf5sYf}G?^Wz?-od`VgKw@| zJ^%qkD}etRd<|Gu01!El%S0fJfhmixFs%S_2do0h!lj-VRAC)Bi;%-omT_cQgqIJP z<2h+Vfd>W+%k1)N3Pyl%7&KPG6+-NF2$Aywr!htrj>79w`M@!6BGL${NIGs1%l)TeTs1BWn>d9nSj{^n42*7!CFzj6dr`hh@3#%1A5u13^HXX zJ_~iSL!H#{!y1kLkR*0vgC;_hWDZlFA^|%c;uISBr`X&X5EA->!XUM|3Tw_Aif_T` zU!&d(h+*m@D})p-0%1vdZ0~seSpC(;8OJ)9y2w94aeqv5Kf*8`eqM=AV1n9RsLY5! zUrl~}c6Gx{VME&8cr%c8K0bUzrKB9cI$1bpa$RwbIzKdRoHlLD*qlK1xYVVaJLg%- zT>Tlvm@V@L%2kL&^{yGNxI=-sx8S4-w%&C>aUPGCqyl)ax7`Q#b5oCPJJ`tHYUG=Y z%MnA5flUN4vcsz8Yd}FD#?6Nqaj>^bv;@;+;hafj&_x?vv5DKD9`Chc)9wNUPn*D0 zkOpX;vOz4Pk}f^Q18(OY3W{N}sv(>2 zQLK^630=s4OS~SiW8x-!C%`)i9JKEm=9a*@Ls={@6fYXtO6Q`Fr~#5u7!Lb)1A?`-MSbpKDRN}qp(Swi@8kBA?Z;D^zq^;;2Z^Kv9CN%O= zRQrnApSgW5!xYCramvM`EarWcM#z-liVrW{2`lYk6aNg4%4DP8FGPcWzvzZz5nEAK z9XYk7YPIU}BMSsT_%|z&@L3Vxn5z$h;kyiBr<+SFmmj1-h_^ zkq==IN=6G^Q^d|9YjR00hVp)ASw2NOOyBinjcQGXI8h z{)Q^~C#q_}V5AKTMJ!#qX!FwE`KM`$aou6ebnzk!rFksPmqBR(E-zE-ptMkAryCbN zZhH5^CWd}=!Qz6_Ml-#8vCKqQFO<;q`UO)l-MFyLNv~eqz|*T2TvobfakZ7+O&<7v E0M7j__y7O^ literal 0 HcmV?d00001 diff --git a/core/__pycache__/users.cpython-312.pyc b/core/__pycache__/users.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..4b07d03efa2fc36aa7cbbeeeecd3d7e1f378330d GIT binary patch literal 4072 zcmbVPT}&L;6`ngkyUWh9%L2w2Fy1<-Wt@7quIv~`7Q`5Xe=2Ai+(aZUTJIh3uJ;Gu zJD99H*qh2#p|T2=t9qlR5^f)Y<%c?ba->K}p8K+7X;~(;s?@yn&4#|jPd#V$hxHaY zb+2^K+_`^q?z!Lj&Y3^g*SiHMH-Gc1Xcx5pi7(|MmKs|>fyP~d3Y5f!vEq{?#4(W+ z+tQds#hb#IOeK&Cl|eeF0@6u6)NzxHxu_G~>Zl8(o7RC;sT-t+svzsrM63T7erz~G zO5enX0#$;#)eqvXU{uCpWzC?!TqvQUDMZPG(%3992~$i$r9Cj^4IwRS%CMzObDeR^ zg^y%7p<7~<4r-$1no4nFlIh%Xb3Lv{jPX=b*F=UYSZB#l{IZdn3h{I@G96D%^Q%32hDzJ!(j?}@whM$M z!P(HfXeax8b&ZQMpAM?PHoy~99$zl@5rexG&mgvmE-nQEqSz5C***?GY#*rzy(u zE)iQNu7;!W@Wr^!nqeF(ZwCP<^6g%@F@GaFv)l&f>F&+@`f_StPVRe-`rn6@FRFf( z^7idC!XBWq4>hS$*-ulttu$WHDLfc!fhnyx4FxEyHZAnc_hpYRjV|}(x(?^nBRTm< zQCoD-azSI*4(o#LtF<+SGG^F}QCSf~;M5er9HOVDP*PrBwHg6dQy!GwDr1v8)vljHXJjyxp6YY}$FMOdAsi zd+tlyUSj#C!khye(D6%Qeo3dJgPPk0isiT%Vd*Kua^S$!41g^eNm1Q$PU=Y>jvJN) zH)P4v(@_e12J*1wn4Y3xL$}-!ro$5vs3}GAa+SYh$uK2ssm0HYW4Bd@emRc)9PciG zrHEq>(R6YR$@ZfCA_N+R)Hd;FFCT=H|PJcLZZ{*=nzU%lW6Q3qO zPUd@u^DQHD>Q{}seogPjZpZG#^NlYRS_6fB2c9_PUGBNMCvL&hQ1I+p^K`6uIHc+uDu0c6EIp4UDZU77`D;ntwGoS6b?jF@JZ!Kw=pEG@ z)$Gi5!;&xSX>KV|Lr-wFALeK-nRlAceUHvotN|8&ZXTe-tNp$A)tNeyXVUX4y}3n-69bW zBQlEQ;*DGiCnxnEmKcy7gV7$Sd>q6#AY-GxY2m{Bh3xUQ_TWl;aCs)*e)N-;Pdh*E z%pW_GuRog^tXz7oGxV?}x3}lf;m=xguby3%&pkgyh{4xi_?{untnpwxYfY(5PQiaM z9o^2H21lPsn;aC9Py-WC4A!C)-P&xY7HyN+u4|M5tZtuZWIk=-q zDpdO!>}hBInCV0Q|ACBo^?+~z^qK`qH3gg2To5ir%(G2mim1>nzg(5Co0afw6Cwsx ziSOMGpAn;yBE)t`0w|Dl*(Jcx0vKA8w!%n?LG+EKqRCP&7)`=z1L7^_ ztCos;jdS=VNQ5m3xYcq+`MA9iL~>lrYho>X3D()-3^e#pL1ct~cpD4Nt%de|g_gD_ zbxM;XbMCQ6XlUOQL?t*U6;$t{_J!JB*w?+>^#1j^b6M#NPsd{g2LJ66TDz9c<#&H? zMQ+ZVo|{-#n>M_yIq((ZkLmmAM=kl#Yq{Ww+@Ait_him>@~a9c-u{cMJtqe({z*&% z41Q0FFjx>H;v~j8$f!ziAly*~!+Ydby(!)nZ#6))lVDt8GQ=@yLVQD@(xiY-JBo?3 zBE&HGlRyX!j^2lMkv?2ZzcL+V7-=RfXClgZG&%V!nXq289*Vc2@&OPT!SWr>oG!HO z|EqUs@%5$fvT-l*(0Fv>qnVFx{PIf9JCqp$vF`Ljh*+GQyPkujL-j2-&h=zQzTR{S zzSG2xW%J%{IV*R|8X4BSEC5?u_4w}=EdMPQ$ z;$_=958*4)v`vanUB?qCIvv-0*$z@AQ}}A4L)*Ut4krx_9y-WQdmZe literal 0 HcmV?d00001 diff --git a/core/auth.py b/core/auth.py new file mode 100644 index 0000000..3f40bae --- /dev/null +++ b/core/auth.py @@ -0,0 +1,58 @@ +import core.users as users +import core.postgres as postgres +import bcrypt +import jwt +from jwt.exceptions import ExpiredSignatureError, InvalidTokenError +import datetime +import os + + +def verifyLoginToken(login_token, username=False, userUUID=False): + if username: + userUUID = users.getUserUUID(username) + + if userUUID: + try: + decoded_token = jwt.decode( + login_token, os.getenv("JWT_SECRET"), algorithms=["HS256"] + ) + if decoded_token.get("sub") == str(userUUID): + return True + return False + except (ExpiredSignatureError, InvalidTokenError): + return False + return False + + +def getUserpasswordHash(userUUID): + user = postgres.select_one("users", {"id": userUUID}) + if user: + pw_hash = user.get("password_hashed") + if isinstance(pw_hash, memoryview): + return bytes(pw_hash) + return pw_hash + return None + + +def getLoginToken(username, password): + userUUID = users.getUserUUID(username) + if userUUID: + formatted_pass = password.encode("utf-8") + users_hashed_pw = getUserpasswordHash(userUUID) + if bcrypt.checkpw(formatted_pass, users_hashed_pw): + payload = { + "sub": userUUID, + "name": users.getUserFirstName(userUUID), + "exp": datetime.datetime.utcnow() + datetime.timedelta(hours=1), + } + return jwt.encode(payload, os.getenv("JWT_SECRET"), algorithm="HS256") + return False + + +def unregisterUser(userUUID, password): + pw_hash = getUserpasswordHash(userUUID) + if not pw_hash: + return False + if bcrypt.checkpw(password.encode("utf-8"), pw_hash): + return users.deleteUser(userUUID) + return False diff --git a/core/notifications.py b/core/notifications.py new file mode 100644 index 0000000..91d0ede --- /dev/null +++ b/core/notifications.py @@ -0,0 +1,74 @@ +""" +notifications.py - Multi-channel notification routing + +Supported channels: Discord webhook, ntfy +""" + +import core.postgres as postgres +import uuid +import requests +import time + + +def _sendToEnabledChannels(notif_settings, message): + """Send message to all enabled channels. Returns True if at least one succeeded.""" + sent = False + + if notif_settings.get("discord_enabled") and notif_settings.get("discord_webhook"): + if discord.send(notif_settings["discord_webhook"], message): + sent = True + + if notif_settings.get("ntfy_enabled") and notif_settings.get("ntfy_topic"): + if ntfy.send(notif_settings["ntfy_topic"], message): + sent = True + + return sent + + +def getNotificationSettings(userUUID): + settings = postgres.select_one("notifications", {"user_uuid": userUUID}) + if not settings: + return False + return settings + + +def setNotificationSettings(userUUID, data_dict): + existing = postgres.select_one("notifications", {"user_uuid": userUUID}) + allowed = [ + "discord_webhook", + "discord_enabled", + "ntfy_topic", + "ntfy_enabled", + ] + updates = {k: v for k, v in data_dict.items() if k in allowed} + if not updates: + return False + if existing: + postgres.update("notifications", updates, {"user_uuid": userUUID}) + else: + updates["id"] = str(uuid.uuid4()) + updates["user_uuid"] = userUUID + postgres.insert("notifications", updates) + return True + + +class discord: + @staticmethod + def send(webhook_url, message): + try: + response = requests.post(webhook_url, json={"content": message}) + return response.status_code == 204 + except: + return False + + +class ntfy: + @staticmethod + def send(topic, message): + try: + response = requests.post( + f"https://ntfy.sh/{topic}", data=message.encode("utf-8") + ) + return response.status_code == 200 + except: + return False diff --git a/core/postgres.py b/core/postgres.py new file mode 100644 index 0000000..0f8be50 --- /dev/null +++ b/core/postgres.py @@ -0,0 +1,264 @@ +""" +postgres.py - Generic PostgreSQL CRUD module + +Requires: pip install psycopg2-binary + +Connection config from environment: + DB_HOST, DB_PORT, DB_NAME, DB_USER, DB_PASS +""" + +import os +import re +import psycopg2 +import psycopg2.extras +from contextlib import contextmanager + + +def _get_config(): + return { + "host": os.environ.get("DB_HOST", "localhost"), + "port": int(os.environ.get("DB_PORT", 5432)), + "dbname": os.environ.get("DB_NAME", "app"), + "user": os.environ.get("DB_USER", "app"), + "password": os.environ.get("DB_PASS", ""), + } + + +def _safe_id(name): + if not re.match(r"^[a-zA-Z_][a-zA-Z0-9_]*$", name): + raise ValueError(f"Invalid SQL identifier: {name}") + return f'"{name}"' + + +def _build_where(where, prefix=""): + clauses = [] + params = {} + for i, (col, val) in enumerate(where.items()): + param_name = f"{prefix}{col}_{i}" + safe_col = _safe_id(col) + + if isinstance(val, tuple) and len(val) == 2: + op, operand = val + op = op.upper() + allowed = { + "=", + "!=", + "<", + ">", + "<=", + ">=", + "LIKE", + "ILIKE", + "IN", + "IS", + "IS NOT", + } + if op not in allowed: + raise ValueError(f"Unsupported operator: {op}") + if op == "IN": + ph = ", ".join(f"%({param_name}_{j})s" for j in range(len(operand))) + clauses.append(f"{safe_col} IN ({ph})") + for j, item in enumerate(operand): + params[f"{param_name}_{j}"] = item + elif op in ("IS", "IS NOT"): + clauses.append(f"{safe_col} {op} NULL") + else: + clauses.append(f"{safe_col} {op} %({param_name})s") + params[param_name] = operand + elif val is None: + clauses.append(f"{safe_col} IS NULL") + else: + clauses.append(f"{safe_col} = %({param_name})s") + params[param_name] = val + + return " AND ".join(clauses), params + + +@contextmanager +def get_connection(): + conn = psycopg2.connect(**_get_config()) + try: + yield conn + conn.commit() + except Exception: + conn.rollback() + raise + finally: + conn.close() + + +@contextmanager +def get_cursor(dict_cursor=True): + with get_connection() as conn: + factory = psycopg2.extras.RealDictCursor if dict_cursor else None + cur = conn.cursor(cursor_factory=factory) + try: + yield cur + finally: + cur.close() + + +def insert(table, data): + columns = list(data.keys()) + placeholders = [f"%({col})s" for col in columns] + safe_cols = [_safe_id(c) for c in columns] + + query = f""" + INSERT INTO {_safe_id(table)} + ({", ".join(safe_cols)}) + VALUES ({", ".join(placeholders)}) + RETURNING * + """ + with get_cursor() as cur: + cur.execute(query, data) + return dict(cur.fetchone()) if cur.rowcount else None + + +def select(table, where=None, order_by=None, limit=None, offset=None): + query = f"SELECT * FROM {_safe_id(table)}" + params = {} + + if where: + clauses, params = _build_where(where) + query += f" WHERE {clauses}" + if order_by: + if isinstance(order_by, list): + order_by = ", ".join(order_by) + query += f" ORDER BY {order_by}" + if limit is not None: + query += f" LIMIT {int(limit)}" + if offset is not None: + query += f" OFFSET {int(offset)}" + + with get_cursor() as cur: + cur.execute(query, params) + return [dict(row) for row in cur.fetchall()] + + +def select_one(table, where): + results = select(table, where=where, limit=1) + return results[0] if results else None + + +def update(table, data, where): + set_columns = list(data.keys()) + set_clause = ", ".join(f"{_safe_id(col)} = %(set_{col})s" for col in set_columns) + params = {f"set_{col}": val for col, val in data.items()} + + where_clause, where_params = _build_where(where, prefix="where_") + params.update(where_params) + + query = f""" + UPDATE {_safe_id(table)} + SET {set_clause} + WHERE {where_clause} + RETURNING * + """ + with get_cursor() as cur: + cur.execute(query, params) + return [dict(row) for row in cur.fetchall()] + + +def delete(table, where): + where_clause, params = _build_where(where) + query = f""" + DELETE FROM {_safe_id(table)} + WHERE {where_clause} + RETURNING * + """ + with get_cursor() as cur: + cur.execute(query, params) + return [dict(row) for row in cur.fetchall()] + + +def count(table, where=None): + query = f"SELECT COUNT(*) as count FROM {_safe_id(table)}" + params = {} + if where: + clauses, params = _build_where(where) + query += f" WHERE {clauses}" + with get_cursor() as cur: + cur.execute(query, params) + return cur.fetchone()["count"] + + +def exists(table, where): + return count(table, where) > 0 + + +def upsert(table, data, conflict_columns): + columns = list(data.keys()) + placeholders = [f"%({col})s" for col in columns] + safe_cols = [_safe_id(c) for c in columns] + conflict_cols = [_safe_id(c) for c in conflict_columns] + + update_cols = [c for c in columns if c not in conflict_columns] + update_clause = ", ".join( + f"{_safe_id(c)} = EXCLUDED.{_safe_id(c)}" for c in update_cols + ) + + query = f""" + INSERT INTO {_safe_id(table)} + ({", ".join(safe_cols)}) + VALUES ({", ".join(placeholders)}) + ON CONFLICT ({", ".join(conflict_cols)}) + DO UPDATE SET {update_clause} + RETURNING * + """ + with get_cursor() as cur: + cur.execute(query, data) + return dict(cur.fetchone()) if cur.rowcount else None + + +def insert_many(table, rows): + if not rows: + return 0 + columns = list(rows[0].keys()) + safe_cols = [_safe_id(c) for c in columns] + query = f""" + INSERT INTO {_safe_id(table)} + ({", ".join(safe_cols)}) + VALUES %s + """ + template = f"({', '.join(f'%({col})s' for col in columns)})" + with get_cursor() as cur: + psycopg2.extras.execute_values( + cur, query, rows, template=template, page_size=100 + ) + return cur.rowcount + + +def execute(query, params=None): + with get_cursor() as cur: + cur.execute(query, params or {}) + if cur.description: + return [dict(row) for row in cur.fetchall()] + return cur.rowcount + + +def table_exists(table): + with get_cursor() as cur: + cur.execute( + """ + SELECT EXISTS ( + SELECT FROM information_schema.tables + WHERE table_schema = 'public' AND table_name = %(table)s + ) + """, + {"table": table}, + ) + return cur.fetchone()["exists"] + + +def get_columns(table): + with get_cursor() as cur: + cur.execute( + """ + SELECT column_name, data_type, is_nullable, column_default + FROM information_schema.columns + WHERE table_schema = 'public' AND table_name = %(table)s + ORDER BY ordinal_position + """, + {"table": table}, + ) + return [dict(row) for row in cur.fetchall()] diff --git a/core/users.py b/core/users.py new file mode 100644 index 0000000..e267fa2 --- /dev/null +++ b/core/users.py @@ -0,0 +1,96 @@ +import uuid +import core.postgres as postgres +import bcrypt + + +def getUserUUID(username): + userRecord = postgres.select_one("users", {"username": username}) + if userRecord: + return userRecord["id"] + return False + + +def getUserFirstName(userUUID): + userRecord = postgres.select_one("users", {"id": userUUID}) + if userRecord: + return userRecord.get("username") + return None + + +def isUsernameAvailable(username): + return not postgres.exists("users", {"username": username}) + + +def doesUserUUIDExist(userUUID): + return postgres.exists("users", {"id": userUUID}) + + +def registerUser(username, password, data=None): + if isUsernameAvailable(username): + hashed_pass = bcrypt.hashpw(password.encode("utf-8"), bcrypt.gensalt()) + user_data = { + "id": str(uuid.uuid4()), + "username": username, + "password_hashed": hashed_pass, + } + if data: + user_data.update(data) + createUser(user_data) + return True + return False + + +def updateUser(userUUID, data_dict): + user = postgres.select_one("users", {"id": userUUID}) + if not user: + return False + blocked = {"id", "password_hashed", "created_at"} + allowed = set(user.keys()) - blocked + updates = {k: v for k, v in data_dict.items() if k in allowed} + if not updates: + return False + postgres.update("users", updates, {"id": userUUID}) + return True + + +def changePassword(userUUID, new_password): + user = postgres.select_one("users", {"id": userUUID}) + if not user: + return False + hashed = bcrypt.hashpw(new_password.encode("utf-8"), bcrypt.gensalt()) + postgres.update("users", {"password_hashed": hashed}, {"id": userUUID}) + return True + + +def deleteUser(userUUID): + user = postgres.select_one("users", {"id": userUUID}) + if not user: + return False + postgres.delete("users", {"id": userUUID}) + return True + + +def createUser(data_dict): + user_schema = { + "id": None, + "username": None, + "password_hashed": None, + "created_at": None, + } + for key in user_schema: + if key in data_dict: + user_schema[key] = data_dict[key] + + is_valid, errors = validateUser(user_schema) + if not is_valid: + raise ValueError(f"Invalid user data: {', '.join(errors)}") + + postgres.insert("users", user_schema) + + +def validateUser(user): + required = ["id", "username", "password_hashed"] + missing = [f for f in required if f not in user or user[f] is None] + if missing: + return False, missing + return True, [] diff --git a/diagrams/README.md b/diagrams/README.md new file mode 100644 index 0000000..08e9544 --- /dev/null +++ b/diagrams/README.md @@ -0,0 +1,12 @@ +# LLM Bot Framework - Mermaid Diagrams + +## Diagrams + +| File | Description | +|------|-------------| +| `system.mmd` | Architecture: services, modules, and data flow | +| `flow.mmd` | Sequence: user command from input to response | + +## Render + +Paste into https://mermaid.live/ or use VS Code "Mermaid" extension. diff --git a/diagrams/flow.mmd b/diagrams/flow.mmd new file mode 100644 index 0000000..acbdd47 --- /dev/null +++ b/diagrams/flow.mmd @@ -0,0 +1,15 @@ +sequenceDiagram + participant U as User + participant B as Bot + participant L as LLM + participant A as API + participant D as DB + + U->>B: DM "add task buy groceries" + B->>L: parse message + L-->>B: {type: "task", action: "add", name: "buy groceries"} + B->>A: POST /api/tasks + A->>D: INSERT + D-->>A: {id, name, created_at} + A-->>B: 201 Created + B-->>U: "Added task: buy groceries" diff --git a/diagrams/system.mmd b/diagrams/system.mmd new file mode 100644 index 0000000..245124a --- /dev/null +++ b/diagrams/system.mmd @@ -0,0 +1,61 @@ +flowchart TB + subgraph External + USER([User]) + DISCORD([Discord API]) + LLM([OpenRouter]) + NTFY([ntfy.sh]) + end + + subgraph Bot["bot/"] + CLIENT[bot.py] + REGISTRY[command_registry.py] + COMMANDS[commands/] + end + + subgraph API["api/"] + FLASK[main.py] + ROUTES[routes/] + end + + subgraph Scheduler["scheduler/"] + DAEMON[daemon.py] + end + + subgraph Core["core/"] + AUTH[auth.py] + USERS[users.py] + PG[postgres.py] + NOTIF[notifications.py] + end + + subgraph AI["ai/"] + PARSER[parser.py] + CONFIG[ai_config.json] + end + + subgraph DB["db service"] + POSTGRES[(PostgreSQL)] + end + + USER <-->|"DM"| DISCORD + DISCORD <-->|"events"| CLIENT + CLIENT -->|"parse"| PARSER + PARSER -->|"completion"| LLM + LLM -->|"JSON"| PARSER + PARSER -->|"structured data"| CLIENT + CLIENT -->|"get_handler"| REGISTRY + REGISTRY -->|"handler"| COMMANDS + COMMANDS -->|"logic"| CLIENT + CLIENT -->|"HTTP"| FLASK + FLASK --> ROUTES + FLASK --> AUTH + FLASK --> USERS + FLASK --> PG + AUTH --> USERS + USERS --> PG + PG --> POSTGRES + DAEMON --> PG + DAEMON --> NOTIF + NOTIF -->|"webhook"| DISCORD + NOTIF -->|"push"| NTFY + PARSER --> CONFIG diff --git a/docker-compose.yml b/docker-compose.yml new file mode 100644 index 0000000..c2552c7 --- /dev/null +++ b/docker-compose.yml @@ -0,0 +1,45 @@ +services: + db: + image: postgres:16 + environment: + POSTGRES_DB: app + POSTGRES_USER: app + POSTGRES_PASSWORD: ${DB_PASS} + ports: + - "5432:5432" + volumes: + - pgdata:/var/lib/postgresql/data + - ./config/schema.sql:/docker-entrypoint-initdb.d/schema.sql + healthcheck: + test: ["CMD-SHELL", "pg_isready -U app"] + interval: 5s + timeout: 5s + retries: 5 + + app: + build: . + ports: + - "8080:5000" + env_file: config/.env + depends_on: + db: + condition: service_healthy + + scheduler: + build: . + command: ["python", "-m", "scheduler.daemon"] + env_file: config/.env + depends_on: + db: + condition: service_healthy + + bot: + build: . + command: ["python", "-m", "bot.bot"] + env_file: config/.env + depends_on: + app: + condition: service_started + +volumes: + pgdata: diff --git a/requirements.txt b/requirements.txt new file mode 100644 index 0000000..0761643 --- /dev/null +++ b/requirements.txt @@ -0,0 +1,7 @@ +flask>=3.0.0 +psycopg2-binary>=2.9.0 +bcrypt>=4.1.0 +PyJWT>=2.8.0 +discord.py>=2.3.0 +openai>=1.0.0 +requests>=2.31.0 diff --git a/scheduler/__pycache__/daemon.cpython-312.pyc b/scheduler/__pycache__/daemon.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..e44803e6a002246d33a990fa972d67867aade3e0 GIT binary patch literal 1476 zcmZuw&2Jk;6o0e3{@AshxHwgDIIx7G)FO#Nq7b3*kyJ%$Yn%uWqP{H7?!?(-y=%>^ zlRBa(NT9Y-M5%;G5JGT8;gCOoOV3;qk}74}1BeT~nS=uh7v8M1F(JIw&U^djea&xw z^J6BX0m_XpKDF;j0KbWkNaBaa{_kk4f&~?@h$T6sLP9N7q)^KhSpq80C8NZyMsHZZ z>k-!X@;d2w2!xsG15@S?K=K4-X&7C>vvlJ_~j%N{aR3mN0KZZ)iO5oZ< z6IOEm8~_uQd)7OlK%gbHq=LHSYgU!kJ$Izh(uehlsySa{UemSohUYl8ThkrSYv?l` z)44fIt)@dQ-LLX_u4$7Clrh_)VW?qN9cLQT&lU7V+n?22UX$sTSFhUcFmF)Pp0Q0V zRY%D%M@U zXcn8CGG0U|98Rl6i}HEjYZQ6QHJgstAjBSc~cjaBe*?Z}+){zohlQG$Sqk1w)A>C(VY zMZUTJA*SCG?RG1zJo7a8(RG2dzy_}Wdk!_q=xR&)0KS5+rEZO@c)_m2hp_T`M=z2^ zxF%nPMN*K*yBLO}3vR+6Aaims_vOuw2B&z^3 zP%mCHX+!WKh=qZH;@fo^#JEFgg9$Dn1qtQie(FpN9;7-A4Z(VrLr)%G{vIyN@KDii zT>0qAX7?EIRry$k)7y7dKJs=|FvD>9v zrOhXv+c>o$Z{}avO`qRNpZ~ipf&?+(P!4w8l;u8(09 zdV_>vSe|JZf#h+P#grf+PK=lYsp%@W%?qA8W7h)ZQu&R^K&?@~)ByyGxJX08VB-G< zvhDgog1QSfLtJEx3xNm`3lAJ}J|td<$U$