614 lines
22 KiB
Python
614 lines
22 KiB
Python
#!/usr/bin/env python3
|
|
import os
|
|
import sys
|
|
import time
|
|
import unicodedata
|
|
from collections import defaultdict
|
|
from typing import Any, Dict, List, Optional, Set, Tuple
|
|
from urllib.parse import quote
|
|
|
|
import requests
|
|
|
|
|
|
def load_env(path: str) -> None:
|
|
if not os.path.exists(path):
|
|
return
|
|
with open(path, "r", encoding="utf-8") as f:
|
|
for raw in f:
|
|
line = raw.strip()
|
|
if not line or line.startswith("#") or "=" not in line:
|
|
continue
|
|
key, val = line.split("=", 1)
|
|
key = key.strip()
|
|
val = val.strip().strip('"').strip("'")
|
|
if key and key not in os.environ:
|
|
os.environ[key] = val
|
|
|
|
|
|
def env(name: str, default: Optional[str] = None, required: bool = False) -> str:
|
|
value = os.environ.get(name, default)
|
|
if required and (value is None or value == ""):
|
|
raise SystemExit(f"Missing required env var: {name}")
|
|
return value or ""
|
|
|
|
|
|
def env_bool(name: str, default: bool = False) -> bool:
|
|
raw = os.environ.get(name, "true" if default else "false").strip().lower()
|
|
return raw in {"1", "true", "yes", "y", "on"}
|
|
|
|
|
|
def env_int(name: str, default: int) -> int:
|
|
raw = os.environ.get(name, str(default)).strip()
|
|
try:
|
|
return int(raw)
|
|
except Exception:
|
|
return default
|
|
|
|
|
|
def is_debug() -> bool:
|
|
return "--debug" in sys.argv
|
|
|
|
|
|
def dprint(msg: str) -> None:
|
|
if is_debug():
|
|
print(f"[debug] {msg}")
|
|
|
|
|
|
def notion_headers() -> Dict[str, str]:
|
|
return {
|
|
"Authorization": f"Bearer {NOTION_TOKEN}",
|
|
"Notion-Version": NOTION_VERSION,
|
|
"Content-Type": "application/json",
|
|
}
|
|
|
|
|
|
def redmine_headers() -> Dict[str, str]:
|
|
return {
|
|
"X-Redmine-API-Key": REDMINE_API_KEY,
|
|
"Content-Type": "application/json",
|
|
}
|
|
|
|
|
|
def wiki_slug(title: str) -> str:
|
|
normalized = unicodedata.normalize("NFKD", title)
|
|
ascii_title = "".join(c for c in normalized if not unicodedata.combining(c))
|
|
out = []
|
|
for ch in ascii_title:
|
|
if ch.isalnum() or ch in {"_", "-"}:
|
|
out.append(ch)
|
|
elif ch.isspace():
|
|
out.append("_")
|
|
slug = "".join(out).strip("_")
|
|
return slug or title
|
|
|
|
|
|
def wiki_title_for_url(title: str) -> str:
|
|
return quote(wiki_slug(title), safe="")
|
|
|
|
|
|
def resolve_project_identifier(value: str) -> str:
|
|
url = f"{REDMINE_URL}/projects.json"
|
|
resp = requests.get(url, headers=redmine_headers(), params={"limit": 200}, timeout=60)
|
|
resp.raise_for_status()
|
|
projects = resp.json().get("projects", [])
|
|
for p in projects:
|
|
if p.get("identifier", "").lower() == value.lower():
|
|
return p.get("identifier")
|
|
if p.get("name", "").lower() == value.lower():
|
|
return p.get("identifier")
|
|
if str(p.get("id")) == str(value):
|
|
return p.get("identifier")
|
|
raise SystemExit(f"Redmine project not found: {value}")
|
|
|
|
|
|
def redmine_wiki_get(project: str, title: str) -> Dict[str, Any]:
|
|
for candidate in [title, wiki_slug(title)]:
|
|
url = f"{REDMINE_URL}/projects/{project}/wiki/{wiki_title_for_url(candidate)}.json"
|
|
resp = requests.get(url, headers=redmine_headers(), timeout=60)
|
|
if resp.status_code == 404:
|
|
continue
|
|
resp.raise_for_status()
|
|
try:
|
|
return resp.json().get("wiki_page", {})
|
|
except Exception:
|
|
return {}
|
|
return {}
|
|
|
|
|
|
def redmine_wiki_index(project: str) -> List[Dict[str, Any]]:
|
|
url = f"{REDMINE_URL}/projects/{project}/wiki/index.json"
|
|
resp = requests.get(url, headers=redmine_headers(), timeout=60)
|
|
resp.raise_for_status()
|
|
return resp.json().get("wiki_pages", []) or []
|
|
|
|
|
|
def redmine_wiki_delete(project: str, title: str) -> None:
|
|
for candidate in [title, wiki_slug(title)]:
|
|
url = f"{REDMINE_URL}/projects/{project}/wiki/{wiki_title_for_url(candidate)}.json"
|
|
resp = requests.delete(url, headers=redmine_headers(), timeout=60)
|
|
if resp.status_code == 404:
|
|
continue
|
|
if resp.status_code not in {200, 204}:
|
|
resp.raise_for_status()
|
|
return
|
|
|
|
|
|
def reset_redmine_wiki(project: str, keep_titles: Set[str]) -> None:
|
|
pages = redmine_wiki_index(project)
|
|
parent_of: Dict[str, str] = {}
|
|
titles: Set[str] = set()
|
|
for p in pages:
|
|
title = (p.get("title") or "").strip()
|
|
if not title:
|
|
continue
|
|
titles.add(title)
|
|
parent = ((p.get("parent") or {}).get("title") or "").strip()
|
|
if parent:
|
|
parent_of[title] = parent
|
|
|
|
def depth(title: str) -> int:
|
|
d = 0
|
|
seen = set()
|
|
cur = title
|
|
while cur in parent_of and cur not in seen:
|
|
seen.add(cur)
|
|
cur = parent_of[cur]
|
|
d += 1
|
|
return d
|
|
|
|
ordered = sorted(titles, key=lambda t: depth(t), reverse=True)
|
|
deleted = 0
|
|
for t in ordered:
|
|
if t in keep_titles:
|
|
continue
|
|
redmine_wiki_delete(project, t)
|
|
deleted += 1
|
|
print(f"Reset wiki: deleted {deleted} pages (kept {len(keep_titles)})")
|
|
|
|
|
|
def redmine_wiki_upsert(project: str, title: str, text: str, parent_title: Optional[str]) -> None:
|
|
def do_put(url: str, payload: Dict[str, Any]) -> requests.Response:
|
|
resp = requests.put(url, headers=redmine_headers(), json={"wiki_page": payload}, timeout=60)
|
|
return resp
|
|
|
|
payload: Dict[str, Any] = {"text": text}
|
|
if parent_title:
|
|
payload["parent_title"] = parent_title
|
|
|
|
existing = redmine_wiki_get(project, title)
|
|
if existing:
|
|
current_text = (existing.get("text") or "").strip()
|
|
current_parent = ((existing.get("parent") or {}).get("title") or "")
|
|
desired_parent = parent_title or ""
|
|
if current_text == text.strip() and current_parent == desired_parent:
|
|
return
|
|
use_title = existing.get("title") or title
|
|
url = f"{REDMINE_URL}/projects/{project}/wiki/{wiki_title_for_url(use_title)}.json"
|
|
resp = do_put(url, payload)
|
|
if resp.status_code == 422 and parent_title:
|
|
# Common Redmine validation fallback: keep content, drop parent assignment.
|
|
payload_wo_parent = {"text": text}
|
|
resp = do_put(url, payload_wo_parent)
|
|
if resp.status_code >= 400:
|
|
raise requests.HTTPError(
|
|
f"Redmine upsert failed ({resp.status_code}) for '{title}' update: {resp.text}",
|
|
response=resp,
|
|
)
|
|
return
|
|
|
|
for candidate in [title, wiki_slug(title)]:
|
|
url = f"{REDMINE_URL}/projects/{project}/wiki/{wiki_title_for_url(candidate)}.json"
|
|
resp = do_put(url, payload)
|
|
if resp.status_code == 404:
|
|
continue
|
|
if resp.status_code == 422 and parent_title:
|
|
payload_wo_parent = {"text": text}
|
|
resp = do_put(url, payload_wo_parent)
|
|
if resp.status_code >= 400:
|
|
raise requests.HTTPError(
|
|
f"Redmine upsert failed ({resp.status_code}) for '{title}' create: {resp.text}",
|
|
response=resp,
|
|
)
|
|
return
|
|
|
|
payload2 = dict(payload)
|
|
payload2["title"] = title
|
|
url = f"{REDMINE_URL}/projects/{project}/wiki.json"
|
|
resp = do_put(url, payload2)
|
|
if resp.status_code == 422 and parent_title:
|
|
payload2 = {"title": title, "text": text}
|
|
resp = do_put(url, payload2)
|
|
if resp.status_code >= 400:
|
|
raise requests.HTTPError(
|
|
f"Redmine upsert failed ({resp.status_code}) for '{title}' fallback create: {resp.text}",
|
|
response=resp,
|
|
)
|
|
|
|
|
|
def notion_query_database(database_id: str) -> List[Dict[str, Any]]:
|
|
url = f"https://api.notion.com/v1/databases/{database_id}/query"
|
|
out: List[Dict[str, Any]] = []
|
|
payload: Dict[str, Any] = {"page_size": 100}
|
|
seen_cursors: Set[str] = set()
|
|
while True:
|
|
dprint(f"Notion query database: {database_id}")
|
|
resp = requests.post(url, headers=notion_headers(), json=payload, timeout=60)
|
|
if resp.status_code == 429:
|
|
time.sleep(1.0)
|
|
continue
|
|
resp.raise_for_status()
|
|
data = resp.json()
|
|
out.extend(data.get("results", []))
|
|
if not data.get("has_more"):
|
|
break
|
|
next_cursor = data.get("next_cursor")
|
|
if not next_cursor or next_cursor in seen_cursors:
|
|
dprint("Stopping database pagination: repeated or empty cursor")
|
|
break
|
|
seen_cursors.add(next_cursor)
|
|
payload["start_cursor"] = next_cursor
|
|
return out
|
|
|
|
|
|
def notion_get_page(page_id: str) -> Dict[str, Any]:
|
|
url = f"https://api.notion.com/v1/pages/{page_id}"
|
|
resp = requests.get(url, headers=notion_headers(), timeout=60)
|
|
resp.raise_for_status()
|
|
return resp.json()
|
|
|
|
|
|
def notion_get_page_title(page: Dict[str, Any], title_prop: str) -> str:
|
|
prop = page.get("properties", {}).get(title_prop, {})
|
|
if prop.get("type") == "title":
|
|
return "".join(t.get("plain_text", "") for t in prop.get("title", []))
|
|
# Fallback for non-database page objects: find first title property
|
|
for v in (page.get("properties", {}) or {}).values():
|
|
if v.get("type") == "title":
|
|
return "".join(t.get("plain_text", "") for t in v.get("title", []))
|
|
return ""
|
|
|
|
|
|
def notion_get_tags(page: Dict[str, Any], tags_prop: str) -> List[str]:
|
|
prop = page.get("properties", {}).get(tags_prop, {})
|
|
if prop.get("type") != "multi_select":
|
|
return []
|
|
return [x.get("name", "").strip() for x in prop.get("multi_select", []) if x.get("name")]
|
|
|
|
|
|
def notion_list_blocks(block_id: str) -> List[Dict[str, Any]]:
|
|
url = f"https://api.notion.com/v1/blocks/{block_id}/children"
|
|
out: List[Dict[str, Any]] = []
|
|
params: Dict[str, Any] = {"page_size": 100}
|
|
seen_cursors: Set[str] = set()
|
|
while True:
|
|
dprint(f"Notion blocks: {block_id}")
|
|
resp = requests.get(url, headers=notion_headers(), params=params, timeout=60)
|
|
if resp.status_code == 429:
|
|
time.sleep(1.0)
|
|
continue
|
|
resp.raise_for_status()
|
|
data = resp.json()
|
|
out.extend(data.get("results", []))
|
|
if not data.get("has_more"):
|
|
break
|
|
next_cursor = data.get("next_cursor")
|
|
if not next_cursor or next_cursor in seen_cursors:
|
|
dprint(f"Stopping blocks pagination for {block_id}: repeated or empty cursor")
|
|
break
|
|
seen_cursors.add(next_cursor)
|
|
params["start_cursor"] = next_cursor
|
|
return out
|
|
|
|
|
|
def notion_fetch_block_tree(block_id: str, visited: Optional[Set[str]] = None, budget: int = 30000) -> List[Dict[str, Any]]:
|
|
if visited is None:
|
|
visited = set()
|
|
if budget <= 0:
|
|
dprint("Block tree budget reached; truncating")
|
|
return []
|
|
if block_id in visited:
|
|
dprint(f"Skip already visited block: {block_id}")
|
|
return []
|
|
visited.add(block_id)
|
|
|
|
blocks = notion_list_blocks(block_id)
|
|
for b in blocks:
|
|
child_id = b.get("id")
|
|
if b.get("has_children") and child_id:
|
|
b["children"] = notion_fetch_block_tree(child_id, visited, budget - 1)
|
|
return blocks
|
|
|
|
|
|
def rich_text_to_textile(rich: List[Dict[str, Any]]) -> str:
|
|
parts: List[str] = []
|
|
for r in rich:
|
|
txt = r.get("plain_text", "")
|
|
if not txt:
|
|
continue
|
|
ann = r.get("annotations", {}) or {}
|
|
if ann.get("code"):
|
|
txt = f"@{txt}@"
|
|
if ann.get("bold"):
|
|
txt = f"*{txt}*"
|
|
if ann.get("italic"):
|
|
txt = f"_{txt}_"
|
|
if ann.get("strikethrough"):
|
|
txt = f"-{txt}-"
|
|
href = r.get("href")
|
|
if href:
|
|
txt = f"\"{txt}\":{href}"
|
|
parts.append(txt)
|
|
return "".join(parts)
|
|
|
|
|
|
def notion_file_url(obj: Dict[str, Any]) -> str:
|
|
if not obj:
|
|
return ""
|
|
ftype = obj.get("type")
|
|
if ftype == "external":
|
|
return (obj.get("external") or {}).get("url", "")
|
|
if ftype == "file":
|
|
return (obj.get("file") or {}).get("url", "")
|
|
return ""
|
|
|
|
|
|
def notion_blocks_to_textile(blocks: List[Dict[str, Any]], depth: int = 0) -> List[str]:
|
|
lines: List[str] = []
|
|
for b in blocks:
|
|
btype = b.get("type")
|
|
data = b.get(btype, {}) if btype else {}
|
|
line = ""
|
|
|
|
if btype == "paragraph":
|
|
line = rich_text_to_textile(data.get("rich_text", []))
|
|
elif btype == "heading_1":
|
|
line = f"h1. {rich_text_to_textile(data.get('rich_text', []))}"
|
|
elif btype == "heading_2":
|
|
line = f"h2. {rich_text_to_textile(data.get('rich_text', []))}"
|
|
elif btype == "heading_3":
|
|
line = f"h3. {rich_text_to_textile(data.get('rich_text', []))}"
|
|
elif btype == "bulleted_list_item":
|
|
line = f"{'*' * max(1, depth + 1)} {rich_text_to_textile(data.get('rich_text', []))}"
|
|
elif btype == "numbered_list_item":
|
|
line = f"{'#' * max(1, depth + 1)} {rich_text_to_textile(data.get('rich_text', []))}"
|
|
elif btype == "to_do":
|
|
tick = "[x]" if data.get("checked") else "[ ]"
|
|
line = f"{'*' * max(1, depth + 1)} {tick} {rich_text_to_textile(data.get('rich_text', []))}"
|
|
elif btype == "quote":
|
|
line = f"bq. {rich_text_to_textile(data.get('rich_text', []))}"
|
|
elif btype == "divider":
|
|
line = "----"
|
|
elif btype == "code":
|
|
code = rich_text_to_textile(data.get("rich_text", []))
|
|
line = f"<pre>{code}</pre>"
|
|
elif btype == "image":
|
|
url = notion_file_url(data)
|
|
caption = rich_text_to_textile(data.get("caption", []))
|
|
if url:
|
|
line = f"!{url}!"
|
|
if caption:
|
|
line = f"{line}\n{caption}"
|
|
elif btype in {"file", "pdf", "video", "audio"}:
|
|
url = notion_file_url(data)
|
|
caption = rich_text_to_textile(data.get("caption", [])) or btype
|
|
if url:
|
|
line = f'\"{caption}\":{url}'
|
|
elif btype == "bookmark":
|
|
url = data.get("url", "")
|
|
if url:
|
|
line = f'\"{url}\":{url}'
|
|
elif btype == "embed":
|
|
url = data.get("url", "")
|
|
if url:
|
|
line = f'\"{url}\":{url}'
|
|
else:
|
|
if "rich_text" in data:
|
|
line = rich_text_to_textile(data.get("rich_text", []))
|
|
|
|
if line.strip():
|
|
lines.append(line)
|
|
|
|
children = b.get("children", [])
|
|
if children:
|
|
lines.extend(notion_blocks_to_textile(children, depth + 1))
|
|
|
|
return lines
|
|
|
|
|
|
def pick_category(tags: List[str]) -> str:
|
|
clean = [t.strip() for t in tags if t.strip()]
|
|
if not clean:
|
|
return DEFAULT_CATEGORY
|
|
return sorted(clean, key=lambda s: s.casefold())[0]
|
|
|
|
|
|
def build_home_by_categories(project: str, by_cat: Dict[str, List[str]]) -> None:
|
|
lines: List[str] = []
|
|
lines.append(f"h1. {HOME_TITLE}")
|
|
lines.append("")
|
|
lines.append("h2. Categories")
|
|
lines.append("")
|
|
for cat in sorted(by_cat.keys(), key=lambda s: s.casefold()):
|
|
lines.append(f"h3. {cat}")
|
|
for title in sorted(set(by_cat[cat]), key=lambda s: s.casefold()):
|
|
lines.append(f'* "{title}":{REDMINE_URL}/projects/{project}/wiki/{wiki_title_for_url(title)}')
|
|
lines.append("")
|
|
redmine_wiki_upsert(project, HOME_TITLE, "\n".join(lines).strip() + "\n", None)
|
|
|
|
|
|
def build_home_by_tree(project: str, parent_map: Dict[str, str], nodes: Set[str]) -> None:
|
|
children: Dict[str, List[str]] = defaultdict(list)
|
|
for title in sorted(nodes, key=lambda s: s.casefold()):
|
|
parent = parent_map.get(title, HOME_TITLE)
|
|
children[parent].append(title)
|
|
|
|
lines: List[str] = [f"h1. {HOME_TITLE}", "", "h2. Navigation", ""]
|
|
|
|
def walk(parent: str, depth: int) -> None:
|
|
for t in sorted(children.get(parent, []), key=lambda s: s.casefold()):
|
|
lines.append(f"{'*' * max(1, depth)} \"{t}\":{REDMINE_URL}/projects/{project}/wiki/{wiki_title_for_url(t)}")
|
|
walk(t, depth + 1)
|
|
|
|
walk(HOME_TITLE, 1)
|
|
redmine_wiki_upsert(project, HOME_TITLE, "\n".join(lines).strip() + "\n", None)
|
|
|
|
|
|
def sync_database_mode(project: str) -> Tuple[int, int, int]:
|
|
pages = notion_query_database(NOTION_WIKI_DATABASE_ID)
|
|
print(f"Notion pages found: {len(pages)}")
|
|
|
|
by_category: Dict[str, List[str]] = defaultdict(list)
|
|
synced = 0
|
|
skipped = 0
|
|
|
|
total = len(pages)
|
|
for idx, p in enumerate(pages, start=1):
|
|
page_id = p.get("id")
|
|
title = notion_get_page_title(p, NOTION_WIKI_TITLE_PROP).strip()
|
|
print(f"[{idx}/{total}] {title or '(sans titre)'}")
|
|
|
|
if not title or not page_id:
|
|
skipped += 1
|
|
continue
|
|
|
|
tags = notion_get_tags(p, NOTION_WIKI_TAGS_PROP)
|
|
category = pick_category(tags)
|
|
|
|
redmine_wiki_upsert(project, category, f"h2. {category}\n", HOME_TITLE)
|
|
|
|
started = time.time()
|
|
blocks = notion_fetch_block_tree(page_id, budget=NOTION_MAX_BLOCK_NODES_PER_PAGE)
|
|
body_lines = notion_blocks_to_textile(blocks)
|
|
if not body_lines:
|
|
body_lines = ["(Contenu vide)"]
|
|
body = "\n".join(body_lines).strip() + "\n"
|
|
|
|
try:
|
|
redmine_wiki_upsert(project, title, body, category)
|
|
by_category[category].append(title)
|
|
synced += 1
|
|
print(f" -> content ok ({len(body_lines)} lines, {int(time.time() - started)}s)")
|
|
except requests.HTTPError as e:
|
|
skipped += 1
|
|
print(f" -> skip redmine error: {e}")
|
|
|
|
build_home_by_categories(project, by_category)
|
|
return synced, skipped, len(by_category)
|
|
|
|
|
|
def sync_page_tree_mode(project: str) -> Tuple[int, int, int]:
|
|
if not NOTION_WIKI_ROOT_PAGE_ID:
|
|
raise SystemExit("NOTION_WIKI_ROOT_PAGE_ID is required when NOTION_WIKI_SOURCE=page_tree")
|
|
|
|
parent_map: Dict[str, str] = {}
|
|
nodes: Set[str] = set()
|
|
synced = 0
|
|
skipped = 0
|
|
|
|
def crawl(page_id: str, redmine_parent: str, depth: int, visited_pages: Set[str]) -> None:
|
|
nonlocal synced, skipped
|
|
if page_id in visited_pages:
|
|
return
|
|
visited_pages.add(page_id)
|
|
|
|
page_obj = notion_get_page(page_id)
|
|
title = notion_get_page_title(page_obj, NOTION_WIKI_TITLE_PROP).strip()
|
|
if not title:
|
|
skipped += 1
|
|
return
|
|
|
|
print(f"[{synced + skipped + 1}] {' ' * depth}{title}")
|
|
|
|
blocks = notion_list_blocks(page_id)
|
|
|
|
# Split content blocks vs child pages to preserve hierarchy.
|
|
content_blocks: List[Dict[str, Any]] = []
|
|
child_page_ids: List[str] = []
|
|
for b in blocks:
|
|
if b.get("type") == "child_page" and b.get("id"):
|
|
child_page_ids.append(b["id"])
|
|
else:
|
|
content_blocks.append(b)
|
|
|
|
# Fetch recursive content only for non-child-page blocks.
|
|
tree: List[Dict[str, Any]] = []
|
|
for b in content_blocks:
|
|
if b.get("has_children") and b.get("id"):
|
|
b = dict(b)
|
|
b["children"] = notion_fetch_block_tree(b["id"], budget=NOTION_MAX_BLOCK_NODES_PER_PAGE)
|
|
tree.append(b)
|
|
|
|
body_lines = notion_blocks_to_textile(tree)
|
|
if not body_lines:
|
|
body_lines = ["(Contenu vide)"]
|
|
body = "\n".join(body_lines).strip() + "\n"
|
|
|
|
try:
|
|
redmine_wiki_upsert(project, title, body, redmine_parent)
|
|
parent_map[title] = redmine_parent
|
|
nodes.add(title)
|
|
synced += 1
|
|
except requests.HTTPError as e:
|
|
skipped += 1
|
|
print(f" -> skip redmine error: {e}")
|
|
return
|
|
|
|
for child_id in child_page_ids:
|
|
crawl(child_id, title, depth + 1, visited_pages)
|
|
|
|
root_obj = notion_get_page(NOTION_WIKI_ROOT_PAGE_ID)
|
|
root_title = notion_get_page_title(root_obj, NOTION_WIKI_TITLE_PROP).strip() or HOME_TITLE
|
|
redmine_wiki_upsert(project, root_title, f"h1. {root_title}\n", HOME_TITLE)
|
|
parent_map[root_title] = HOME_TITLE
|
|
nodes.add(root_title)
|
|
|
|
crawl(NOTION_WIKI_ROOT_PAGE_ID, HOME_TITLE, 0, set())
|
|
build_home_by_tree(project, parent_map, nodes)
|
|
return synced, skipped, 0
|
|
|
|
|
|
def main() -> None:
|
|
project = resolve_project_identifier(REDMINE_WIKI_PROJECT)
|
|
|
|
redmine_wiki_upsert(project, HOME_TITLE, f"h1. {HOME_TITLE}\n", None)
|
|
|
|
if REDMINE_WIKI_RESET_BEFORE_IMPORT:
|
|
reset_redmine_wiki(project, {HOME_TITLE})
|
|
redmine_wiki_upsert(project, HOME_TITLE, f"h1. {HOME_TITLE}\n", None)
|
|
|
|
if NOTION_WIKI_SOURCE == "page_tree":
|
|
synced, skipped, categories = sync_page_tree_mode(project)
|
|
else:
|
|
synced, skipped, categories = sync_database_mode(project)
|
|
|
|
print(f"Synced: {synced}, Skipped: {skipped}, Categories: {categories}")
|
|
print(f"Home: {REDMINE_URL}/projects/{project}/wiki/")
|
|
|
|
|
|
if __name__ == "__main__":
|
|
load_env(os.path.join(os.path.dirname(__file__), ".env"))
|
|
|
|
NOTION_TOKEN = env("NOTION_TOKEN", required=True)
|
|
NOTION_VERSION = env("NOTION_VERSION", default="2022-06-28")
|
|
NOTION_WIKI_DATABASE_ID = env("NOTION_WIKI_DATABASE_ID", default="")
|
|
NOTION_WIKI_ROOT_PAGE_ID = env("NOTION_WIKI_ROOT_PAGE_ID", default="")
|
|
NOTION_WIKI_TITLE_PROP = env("NOTION_WIKI_TITLE_PROP", default="Page")
|
|
NOTION_WIKI_TAGS_PROP = env("NOTION_WIKI_TAGS_PROP", default="Tags")
|
|
|
|
REDMINE_URL = env("REDMINE_URL", required=True).rstrip("/")
|
|
REDMINE_API_KEY = env("REDMINE_API_KEY", required=True)
|
|
REDMINE_WIKI_PROJECT = env("REDMINE_WIKI_PROJECT", default="Wiki")
|
|
HOME_TITLE = env("REDMINE_WIKI_HOME_TITLE", default="Wiki")
|
|
DEFAULT_CATEGORY = env("REDMINE_WIKI_DEFAULT_CATEGORY", default="General")
|
|
|
|
NOTION_WIKI_SOURCE = env("NOTION_WIKI_SOURCE", default="database").strip().lower()
|
|
REDMINE_WIKI_RESET_BEFORE_IMPORT = env_bool("REDMINE_WIKI_RESET_BEFORE_IMPORT", default=False)
|
|
NOTION_MAX_BLOCK_NODES_PER_PAGE = env_int("NOTION_MAX_BLOCK_NODES_PER_PAGE", 15000)
|
|
|
|
if NOTION_WIKI_SOURCE not in {"database", "page_tree"}:
|
|
raise SystemExit("NOTION_WIKI_SOURCE must be 'database' or 'page_tree'")
|
|
if NOTION_WIKI_SOURCE == "database" and not NOTION_WIKI_DATABASE_ID:
|
|
raise SystemExit("NOTION_WIKI_DATABASE_ID is required when NOTION_WIKI_SOURCE=database")
|
|
|
|
main()
|