Store competed tasks across runs to maintain completed tasks older than 90 days

This commit is contained in:
2025-10-18 12:11:46 -04:00
parent 406f8cef0b
commit 1b352d2586
4 changed files with 98 additions and 15 deletions

View File

@ -10,6 +10,13 @@ from jinja2 import Environment, FileSystemLoader, select_autoescape
ATTACHMENTS_DIR = "attachments"
TODOIST_API_TOKEN: str | None = None
COMPLETED_HISTORY_FILE = "Todoist-Completed-History.json"
def json_serial(obj):
if isinstance(obj, datetime):
return obj.isoformat()
return str(obj)
def usage():
@ -39,6 +46,70 @@ def ensure_attachments_dir():
os.makedirs(ATTACHMENTS_DIR)
def load_completed_history():
if not os.path.exists(COMPLETED_HISTORY_FILE):
return {}
try:
with open(COMPLETED_HISTORY_FILE, "r", encoding="utf-8") as handle:
data = json.load(handle)
except (OSError, json.JSONDecodeError) as exc: # pylint: disable=broad-except
print(f"Warning: failed to load completed history ({exc}). Starting fresh.")
return {}
if isinstance(data, dict):
history = {}
for key, value in data.items():
if isinstance(value, list):
history[str(key)] = value
return history
if isinstance(data, list):
history = defaultdict(list)
for item in data:
if isinstance(item, dict):
project_id = str(item.get("project_id", ""))
if project_id:
history[project_id].append(item)
return {key: value for key, value in history.items()}
return {}
def save_completed_history(history):
try:
with open(COMPLETED_HISTORY_FILE, "w", encoding="utf-8") as handle:
json.dump(history, handle, ensure_ascii=False, indent=2, default=json_serial)
except OSError as exc: # pylint: disable=broad-except
print(f"Warning: failed to write completed history ({exc}).")
def merge_completed_lists(history_tasks, new_tasks):
merged = []
seen = set()
def make_key(task):
task_id = str(task.get('id', ''))
completed_at = task.get('completed_at') or task.get('updated_at') or ""
return (task_id, completed_at)
def add_task(task):
key = make_key(task)
if key in seen:
return
seen.add(key)
merged.append(task)
for item in new_tasks:
add_task(item)
for item in history_tasks:
add_task(item)
def sort_key(task):
completed_at = task.get('completed_at') or ""
updated_at = task.get('updated_at') or ""
return (completed_at, updated_at)
merged.sort(key=sort_key, reverse=True)
return merged
def _file_looks_like_html(path):
try:
with open(path, 'rb') as handle:
@ -336,6 +407,8 @@ def main():
comments_by_task = fetch_comments_by_task(
api, comment_project_ids, sorted(task_ids_for_comments)
)
completed_history = load_completed_history()
updated_history = {}
data = []
for project in projects:
project_dict = project.__dict__.copy()
@ -366,15 +439,18 @@ def main():
"content": parent_name,
}
project_dict['completed_tasks'] = processed_completed
historical = completed_history.get(project_id, [])
merged_completed = merge_completed_lists(historical, processed_completed)
project_dict['completed_tasks'] = merged_completed
updated_history[project_id] = merged_completed
data.append(project_dict)
for project_id, tasks in completed_history.items():
if project_id not in updated_history:
updated_history[project_id] = tasks
save_completed_history(updated_history)
# Write JSON
today = datetime.now().strftime("%Y-%m-%d")
json_filename = f"Todoist-Actual-Backup-{today}.json"
def json_serial(obj):
if isinstance(obj, datetime):
return obj.isoformat()
return str(obj)
with open(json_filename, "w", encoding="utf-8") as f:
json.dump(data, f, ensure_ascii=False, indent=2, default=json_serial)
print(f"Exported data to {json_filename}")