fix yesterdays schedule blocking todays
This commit is contained in:
BIN
api/routes/__pycache__/adaptive_meds.cpython-312.pyc
Normal file
BIN
api/routes/__pycache__/adaptive_meds.cpython-312.pyc
Normal file
Binary file not shown.
BIN
api/routes/__pycache__/medications.cpython-312.pyc
Normal file
BIN
api/routes/__pycache__/medications.cpython-312.pyc
Normal file
Binary file not shown.
BIN
api/routes/__pycache__/routines.cpython-312.pyc
Normal file
BIN
api/routes/__pycache__/routines.cpython-312.pyc
Normal file
Binary file not shown.
4657
bot/data/dbt_knowledge.text.json
Normal file
4657
bot/data/dbt_knowledge.text.json
Normal file
File diff suppressed because it is too large
Load Diff
@@ -468,6 +468,8 @@ def create_daily_schedule(user_uuid: str, med_id: str, base_times: List[str], re
|
||||
# Check recent med logs to skip doses already taken/skipped.
|
||||
# Handles cross-midnight: if adaptive offset shifts 23:00 → 00:42 today,
|
||||
# but the user already took the 23:00 dose last night, don't schedule it.
|
||||
# Yesterday's logs only suppress if the scheduled_time is late-night
|
||||
# (21:00+), since only those could plausibly cross midnight with an offset.
|
||||
user_tz = tz_for_user(user_uuid)
|
||||
yesterday = today - timedelta(days=1)
|
||||
recent_logs = postgres.select("med_logs", {"medication_id": med_id, "user_uuid": user_uuid})
|
||||
@@ -481,11 +483,15 @@ def create_daily_schedule(user_uuid: str, med_id: str, base_times: List[str], re
|
||||
if created_at.tzinfo is None:
|
||||
created_at = created_at.replace(tzinfo=timezone.utc)
|
||||
log_date = created_at.astimezone(user_tz).date()
|
||||
if log_date not in (today, yesterday):
|
||||
continue
|
||||
if log_date == today:
|
||||
log_sched = _normalize_time(log.get("scheduled_time"))
|
||||
if log_sched:
|
||||
taken_base_times.add(log_sched)
|
||||
elif log_date == yesterday:
|
||||
# Only suppress cross-midnight doses (late-night times like 21:00+)
|
||||
log_sched = _normalize_time(log.get("scheduled_time"))
|
||||
if log_sched and log_sched >= "21:00":
|
||||
taken_base_times.add(log_sched)
|
||||
|
||||
# Create schedule records for each time
|
||||
for base_time, (adjusted_time, offset) in zip(base_times, adjusted_times):
|
||||
|
||||
56
regenerate_embeddings.py
Normal file
56
regenerate_embeddings.py
Normal file
@@ -0,0 +1,56 @@
|
||||
#!/usr/bin/env python3
|
||||
"""Regenerate DBT embeddings with qwen/qwen3-embedding-8b model (384 dimensions)"""
|
||||
|
||||
import json
|
||||
import os
|
||||
from openai import OpenAI
|
||||
import time
|
||||
|
||||
# Load config
|
||||
with open("config.json", "r") as f:
|
||||
config = json.load(f)
|
||||
|
||||
# Initialize OpenAI client with OpenRouter
|
||||
client = OpenAI(
|
||||
base_url="https://openrouter.ai/api/v1",
|
||||
api_key=config["openrouter_api_key"],
|
||||
)
|
||||
|
||||
# Load text data
|
||||
with open("bot/data/dbt_knowledge.text.json", "r") as f:
|
||||
text_data = json.load(f)
|
||||
|
||||
print(f"Regenerating embeddings for {len(text_data)} chunks...")
|
||||
|
||||
# Generate embeddings
|
||||
embeddings_data = []
|
||||
for i, item in enumerate(text_data):
|
||||
try:
|
||||
response = client.embeddings.create(
|
||||
model="qwen/qwen3-embedding-8b",
|
||||
input=item["text"]
|
||||
)
|
||||
embedding = response.data[0].embedding
|
||||
|
||||
embeddings_data.append({
|
||||
"id": item["id"],
|
||||
"source": item["source"],
|
||||
"text": item["text"],
|
||||
"embedding": embedding
|
||||
})
|
||||
|
||||
if (i + 1) % 10 == 0:
|
||||
print(f"Processed {i + 1}/{len(text_data)} chunks...")
|
||||
|
||||
# Small delay to avoid rate limits
|
||||
time.sleep(0.1)
|
||||
|
||||
except Exception as e:
|
||||
print(f"Error processing item {i}: {e}")
|
||||
continue
|
||||
|
||||
# Save new embeddings
|
||||
with open("bot/data/dbt_knowledge.embeddings.json", "w") as f:
|
||||
json.dump(embeddings_data, f)
|
||||
|
||||
print(f"\nDone! Generated {len(embeddings_data)} embeddings with {len(embeddings_data[0]['embedding'])} dimensions")
|
||||
Reference in New Issue
Block a user