Move output to output folder
This commit is contained in:
@ -6,7 +6,7 @@ Todoist is a SaaS task manager. Todoist provides backups of current tasks, but d
|
|||||||
- Exports all active and completed tasks from all projects (active and archived)
|
- Exports all active and completed tasks from all projects (active and archived)
|
||||||
- Nests tasks under their respective projects, including all available fields
|
- Nests tasks under their respective projects, including all available fields
|
||||||
- Includes comments for each task
|
- Includes comments for each task
|
||||||
- Downloads attachments and references them in the JSON and HTML output
|
- Downloads attachments to `output/attachments/` and references them in the JSON and HTML output
|
||||||
- JSON and HTML files are named with the current date when the script is run
|
- JSON and HTML files are named with the current date when the script is run
|
||||||
- Maintains `Todoist-Completed-History.json` so completed tasks older than Todoist's 90-day API window stay in future exports
|
- Maintains `Todoist-Completed-History.json` so completed tasks older than Todoist's 90-day API window stay in future exports
|
||||||
|
|
||||||
@ -26,8 +26,8 @@ Todoist is a SaaS task manager. Todoist provides backups of current tasks, but d
|
|||||||
```bash
|
```bash
|
||||||
python export_todoist.py export
|
python export_todoist.py export
|
||||||
```
|
```
|
||||||
This will create `Todoist-Actual-Backup-YYYY-MM-DD.json` and `Todoist-Actual-Backup-YYYY-MM-DD.html` in the current directory, and it will update `Todoist-Completed-History.json` with every completed task encountered.
|
This will create `output/Todoist-Actual-Backup-YYYY-MM-DD.json` and `output/Todoist-Actual-Backup-YYYY-MM-DD.html`, and it will update `output/attachments/` with any downloaded files while leaving `Todoist-Completed-History.json` in the project root.
|
||||||
Keep `Todoist-Completed-History.json` somewhere safe (e.g., in source control or a backup location); it is the only way the exporter can retain completions older than Todoist's 90-day API retention window.
|
Keep `Todoist-Completed-History.json` somewhere safe (e.g., in source control or a backup location); it is the only way the exporter can retain completions older than Todoist's 90-day API retention window.
|
||||||
4. To see usage instructions, run the script with no arguments or any argument other than `export`.
|
4. To see usage instructions, run the script with no arguments or any argument other than `export`.
|
||||||
|
|
||||||
## Requirements
|
## Requirements
|
||||||
|
@ -3,13 +3,16 @@ import sys
|
|||||||
import json
|
import json
|
||||||
import time
|
import time
|
||||||
import getpass
|
import getpass
|
||||||
|
import shutil
|
||||||
from collections import defaultdict
|
from collections import defaultdict
|
||||||
import requests
|
import requests
|
||||||
from datetime import datetime, timedelta
|
from datetime import datetime, timedelta
|
||||||
from todoist_api_python.api import TodoistAPI
|
from todoist_api_python.api import TodoistAPI
|
||||||
from jinja2 import Environment, FileSystemLoader, select_autoescape
|
from jinja2 import Environment, FileSystemLoader, select_autoescape
|
||||||
|
|
||||||
ATTACHMENTS_DIR = "attachments"
|
OUTPUT_DIR = "output"
|
||||||
|
ATTACHMENTS_DIR = os.path.join(OUTPUT_DIR, "attachments")
|
||||||
|
LEGACY_ATTACHMENTS_DIR = "attachments"
|
||||||
TODOIST_API_TOKEN: str | None = None
|
TODOIST_API_TOKEN: str | None = None
|
||||||
COMPLETED_HISTORY_FILE = "Todoist-Completed-History.json"
|
COMPLETED_HISTORY_FILE = "Todoist-Completed-History.json"
|
||||||
|
|
||||||
@ -49,9 +52,29 @@ def get_api_key():
|
|||||||
return key
|
return key
|
||||||
|
|
||||||
|
|
||||||
|
def ensure_output_dir():
|
||||||
|
if not os.path.exists(OUTPUT_DIR):
|
||||||
|
os.makedirs(OUTPUT_DIR, exist_ok=True)
|
||||||
|
|
||||||
|
|
||||||
def ensure_attachments_dir():
|
def ensure_attachments_dir():
|
||||||
|
ensure_output_dir()
|
||||||
|
if os.path.isdir(LEGACY_ATTACHMENTS_DIR) and LEGACY_ATTACHMENTS_DIR != ATTACHMENTS_DIR:
|
||||||
|
try:
|
||||||
|
if not os.path.exists(ATTACHMENTS_DIR):
|
||||||
|
shutil.move(LEGACY_ATTACHMENTS_DIR, ATTACHMENTS_DIR)
|
||||||
|
else:
|
||||||
|
for name in os.listdir(LEGACY_ATTACHMENTS_DIR):
|
||||||
|
shutil.move(
|
||||||
|
os.path.join(LEGACY_ATTACHMENTS_DIR, name),
|
||||||
|
os.path.join(ATTACHMENTS_DIR, name),
|
||||||
|
)
|
||||||
|
os.rmdir(LEGACY_ATTACHMENTS_DIR)
|
||||||
|
print(f"Moved legacy attachments into {ATTACHMENTS_DIR}")
|
||||||
|
except (OSError, shutil.Error) as exc: # pylint: disable=broad-except
|
||||||
|
print(f"Warning: failed to migrate legacy attachments: {exc}")
|
||||||
if not os.path.exists(ATTACHMENTS_DIR):
|
if not os.path.exists(ATTACHMENTS_DIR):
|
||||||
os.makedirs(ATTACHMENTS_DIR)
|
os.makedirs(ATTACHMENTS_DIR, exist_ok=True)
|
||||||
|
|
||||||
|
|
||||||
def load_completed_history():
|
def load_completed_history():
|
||||||
@ -88,13 +111,23 @@ def save_completed_history(history):
|
|||||||
print(f"Warning: failed to write completed history ({exc}).")
|
print(f"Warning: failed to write completed history ({exc}).")
|
||||||
|
|
||||||
|
|
||||||
|
def normalize_timestamp(value):
|
||||||
|
if not value:
|
||||||
|
return ""
|
||||||
|
if isinstance(value, datetime):
|
||||||
|
return value.isoformat()
|
||||||
|
return str(value)
|
||||||
|
|
||||||
|
|
||||||
def merge_completed_lists(history_tasks, new_tasks):
|
def merge_completed_lists(history_tasks, new_tasks):
|
||||||
merged = []
|
merged = []
|
||||||
seen = set()
|
seen = set()
|
||||||
|
|
||||||
def make_key(task):
|
def make_key(task):
|
||||||
task_id = str(task.get('id', ''))
|
task_id = str(task.get('id', ''))
|
||||||
completed_at = task.get('completed_at') or task.get('updated_at') or ""
|
completed_at = normalize_timestamp(task.get('completed_at'))
|
||||||
|
if not completed_at:
|
||||||
|
completed_at = normalize_timestamp(task.get('updated_at'))
|
||||||
return (task_id, completed_at)
|
return (task_id, completed_at)
|
||||||
|
|
||||||
def add_task(task):
|
def add_task(task):
|
||||||
@ -110,8 +143,8 @@ def merge_completed_lists(history_tasks, new_tasks):
|
|||||||
add_task(item)
|
add_task(item)
|
||||||
|
|
||||||
def sort_key(task):
|
def sort_key(task):
|
||||||
completed_at = task.get('completed_at') or ""
|
completed_at = normalize_timestamp(task.get('completed_at'))
|
||||||
updated_at = task.get('updated_at') or ""
|
updated_at = normalize_timestamp(task.get('updated_at'))
|
||||||
return (completed_at, updated_at)
|
return (completed_at, updated_at)
|
||||||
|
|
||||||
merged.sort(key=sort_key, reverse=True)
|
merged.sort(key=sort_key, reverse=True)
|
||||||
@ -307,7 +340,7 @@ def process_task(task, comments_lookup):
|
|||||||
filename = att_dict.get('file_name') or os.path.basename(att_dict['file_url'])
|
filename = att_dict.get('file_name') or os.path.basename(att_dict['file_url'])
|
||||||
local_path = download_attachment(att_dict['file_url'], filename)
|
local_path = download_attachment(att_dict['file_url'], filename)
|
||||||
if local_path:
|
if local_path:
|
||||||
att_dict['local_file'] = os.path.relpath(local_path)
|
att_dict['local_file'] = os.path.relpath(local_path, OUTPUT_DIR)
|
||||||
attachments.append(att_dict)
|
attachments.append(att_dict)
|
||||||
if attachments:
|
if attachments:
|
||||||
task_dict['attachments'] = attachments
|
task_dict['attachments'] = attachments
|
||||||
@ -325,7 +358,7 @@ def process_task(task, comments_lookup):
|
|||||||
filename = attachment_dict.get("file_name") or os.path.basename(file_url)
|
filename = attachment_dict.get("file_name") or os.path.basename(file_url)
|
||||||
local_path = download_attachment(file_url, filename)
|
local_path = download_attachment(file_url, filename)
|
||||||
if local_path:
|
if local_path:
|
||||||
attachment_dict['local_file'] = os.path.relpath(local_path)
|
attachment_dict['local_file'] = os.path.relpath(local_path, OUTPUT_DIR)
|
||||||
comment_dict['attachment'] = attachment_dict
|
comment_dict['attachment'] = attachment_dict
|
||||||
serialized_comments.append(comment_dict)
|
serialized_comments.append(comment_dict)
|
||||||
task_dict['comments'] = serialized_comments
|
task_dict['comments'] = serialized_comments
|
||||||
@ -459,9 +492,10 @@ def main():
|
|||||||
# Write JSON
|
# Write JSON
|
||||||
today = datetime.now().strftime("%Y-%m-%d")
|
today = datetime.now().strftime("%Y-%m-%d")
|
||||||
json_filename = f"Todoist-Actual-Backup-{today}.json"
|
json_filename = f"Todoist-Actual-Backup-{today}.json"
|
||||||
with open(json_filename, "w", encoding="utf-8") as f:
|
json_output_path = os.path.join(OUTPUT_DIR, json_filename)
|
||||||
|
with open(json_output_path, "w", encoding="utf-8") as f:
|
||||||
json.dump(data, f, ensure_ascii=False, indent=2, default=json_serial)
|
json.dump(data, f, ensure_ascii=False, indent=2, default=json_serial)
|
||||||
print(f"Exported data to {json_filename}")
|
print(f"Exported data to {json_output_path}")
|
||||||
# Write HTML
|
# Write HTML
|
||||||
env = Environment(
|
env = Environment(
|
||||||
loader=FileSystemLoader(os.path.dirname(__file__)),
|
loader=FileSystemLoader(os.path.dirname(__file__)),
|
||||||
@ -475,9 +509,10 @@ def main():
|
|||||||
env.filters['markdown'] = lambda text: text or ""
|
env.filters['markdown'] = lambda text: text or ""
|
||||||
template = env.get_template("todoist_backup_template.html")
|
template = env.get_template("todoist_backup_template.html")
|
||||||
html_filename = f"Todoist-Actual-Backup-{today}.html"
|
html_filename = f"Todoist-Actual-Backup-{today}.html"
|
||||||
with open(html_filename, "w", encoding="utf-8") as f:
|
html_output_path = os.path.join(OUTPUT_DIR, html_filename)
|
||||||
|
with open(html_output_path, "w", encoding="utf-8") as f:
|
||||||
f.write(template.render(projects=data, date=today))
|
f.write(template.render(projects=data, date=today))
|
||||||
print(f"Generated HTML backup at {html_filename}")
|
print(f"Generated HTML backup at {html_output_path}")
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
main()
|
main()
|
||||||
|
Reference in New Issue
Block a user