|
2 | 2 |
|
3 | 3 | import json |
4 | 4 | import argparse |
| 5 | +import sys |
| 6 | +import logging |
5 | 7 | from pathlib import Path |
| 8 | +from datetime import datetime |
| 9 | + |
6 | 10 | from .ignore import IgnoreChecker |
7 | 11 | from .parser import Parser |
8 | 12 | from .markers import Marker |
9 | 13 | from .generator import SiteGenerator |
10 | | -from datetime import datetime |
11 | 14 |
|
| 15 | +logging.basicConfig(level=logging.INFO, format="%(levelname)s: %(message)s") |
| 16 | +logger = logging.getLogger("tinydocs") |
12 | 17 |
|
13 | 18 | def parse_arguments(): |
14 | | - parser = argparse.ArgumentParser() |
15 | | - parser.add_argument("-F", "--files", type=str, help="Provide files (comma separated)") |
16 | | - parser.add_argument("-D", "--dirs", type=str, help="Provide directories (comma separated)") |
17 | | - parser.add_argument("-I", "--ignore", type=str, help="Paths to ignore files (comma seperated)") |
18 | | - parser.add_argument("--list-files", action="store_true", help="List the files included in the documentation generation") |
19 | | - parser.add_argument("--comment-style", type=str, default="#", help="Specify the comment style (#, //, etc)") |
20 | | - parser.add_argument("--markers", type=str, default="tiny.markers.json", help="Provide the marker definition json file") |
21 | | - parser.add_argument("-o", "--output", type=str, default="docs", help="Specify the output directory") |
22 | | - parser.add_argument("--generate", action="store_true", help="Generate a static documentation site") |
23 | | - parser.add_argument("--name", type=str, help="Specify the name of the project") |
| 19 | + parser = argparse.ArgumentParser(description="TinyDocs: Minimalist documentation generator.") |
| 20 | + parser.add_argument("-F", "--files", type=str, help="Comma separated files") |
| 21 | + parser.add_argument("-D", "--dirs", type=str, help="Comma separated directories") |
| 22 | + parser.add_argument("-I", "--ignore", type=str, help="Comma separated ignore files") |
| 23 | + parser.add_argument("--list-files", action="store_true", help="List included files and exit") |
| 24 | + parser.add_argument("--comment-style", type=str, default="#", help="Comment style (default: #)") |
| 25 | + parser.add_argument("--markers", type=str, default="tiny.markers.json", help="Markers definition JSON") |
| 26 | + parser.add_argument("-o", "--output", type=str, default="docs", help="Output directory") |
| 27 | + parser.add_argument("--generate", action="store_true", help="Generate static HTML site") |
| 28 | + parser.add_argument("--name", type=str, help="Project name") |
24 | 29 |
|
25 | 30 | return parser.parse_args() |
26 | 31 |
|
27 | | - |
28 | 32 | def main(): |
29 | 33 | args = parse_arguments() |
30 | | - |
| 34 | + |
| 35 | + # 1. Initialize Ignore Checker |
31 | 36 | ignore_files = args.ignore.split(",") if args.ignore else [] |
| 37 | + ignore_checker = IgnoreChecker([".git/", "__pycache__/", ".venv/", "node_modules/"]) |
| 38 | + try: |
| 39 | + ignore_checker.load_ignore_files(ignore_files) |
| 40 | + except Exception as e: |
| 41 | + logger.warning(f"Could not load some ignore files: {e}") |
| 42 | + |
| 43 | + # 2. Collect and Validate Files |
| 44 | + files = [] |
| 45 | + if args.dirs: |
| 46 | + dir_paths = [d.strip() for d in args.dirs.split(",") if d.strip()] |
| 47 | + files.extend(ignore_checker.filter(dir_paths)) |
| 48 | + |
| 49 | + if args.files: |
| 50 | + extra_files = [f.strip() for f in args.files.split(",") if f.strip()] |
| 51 | + files.extend(extra_files) |
32 | 52 |
|
33 | | - ignore_checker = IgnoreChecker([".git/"]) |
34 | | - ignore_checker.load_ignore_files(ignore_files) |
| 53 | + # Remove duplicates and filter non-existent files |
| 54 | + files = list(dict.fromkeys(files)) |
| 55 | + files = [f for f in files if Path(f).exists()] |
35 | 56 |
|
36 | | - files = ignore_checker.filter(args.dirs.split(",")) |
37 | | - if args.files: |
38 | | - files.extend(args.files.split(",")) |
| 57 | + if not files: |
| 58 | + logger.error("No valid files found to document. Check your --dirs or --files arguments.") |
| 59 | + sys.exit(1) |
39 | 60 |
|
40 | 61 | if args.list_files: |
41 | 62 | for f in files: |
42 | 63 | print(f) |
43 | 64 | return |
44 | 65 |
|
45 | | - with open(args.markers, "r") as file: |
46 | | - markers = Marker.parse(file.read()) |
47 | | - |
48 | | - docs = [] |
49 | | - for i, file in enumerate(files, start=1): |
50 | | - print(f"[{i}/{len(files)}] Parsing {file}...") |
51 | | - parser = Parser(Path(file), prefix="@", comment="#") |
52 | | - docs.extend([ |
53 | | - { |
54 | | - "file": file, |
55 | | - "docs": doc |
56 | | - } |
57 | | - for doc in parser.parse(markers) |
58 | | - ]) |
59 | | - |
60 | | - |
61 | | - output = { |
62 | | - "name": args.name or "TinyDocs", |
63 | | - "timestamp": str(datetime.now()).split(".")[0], |
64 | | - "docs": docs |
| 66 | + # 3. Load Markers |
| 67 | + marker_path = Path(args.markers) |
| 68 | + if not marker_path.exists(): |
| 69 | + logger.error(f"Marker file not found: {args.markers}") |
| 70 | + sys.exit(1) |
| 71 | + |
| 72 | + try: |
| 73 | + with open(marker_path, "r", encoding="utf-8") as f: |
| 74 | + markers = Marker.parse(f.read()) |
| 75 | + except json.JSONDecodeError: |
| 76 | + logger.error(f"Malformed JSON in marker file: {args.markers}") |
| 77 | + sys.exit(1) |
| 78 | + except Exception as e: |
| 79 | + logger.error(f"Failed to parse markers: {e}") |
| 80 | + sys.exit(1) |
| 81 | + |
| 82 | + # 4. Parse Documentation |
| 83 | + docs_entries = [] |
| 84 | + for i, file_path in enumerate(files, start=1): |
| 85 | + try: |
| 86 | + logger.info(f"[{i}/{len(files)}] Parsing {file_path}...") |
| 87 | + parser = Parser(Path(file_path), prefix="@", comment=args.comment_style) |
| 88 | + parsed_data = parser.parse(markers) |
| 89 | + |
| 90 | + for doc in parsed_data: |
| 91 | + docs_entries.append({ |
| 92 | + "file": str(file_path), |
| 93 | + "docs": doc |
| 94 | + }) |
| 95 | + except Exception as e: |
| 96 | + logger.error(f"Failed to parse {file_path}: {e}") |
| 97 | + |
| 98 | + # 5. Prepare Output |
| 99 | + output_payload = { |
| 100 | + "name": args.name or Path.cwd().name, |
| 101 | + "timestamp": datetime.now().strftime("%Y-%m-%d %H:%M:%S"), |
| 102 | + "docs": docs_entries |
65 | 103 | } |
66 | 104 |
|
67 | | - json_output = json.dumps(output, indent=4, ensure_ascii=False) |
68 | | - |
69 | | - output_file = f"{args.output}/tiny.docs.json" |
70 | | - with open(output_file, "w") as file: |
71 | | - file.write(json_output) |
72 | | - |
| 105 | + # 6. Write JSON Output |
| 106 | + out_dir = Path(args.output) |
| 107 | + try: |
| 108 | + out_dir.mkdir(parents=True, exist_ok=True) |
| 109 | + json_path = out_dir / "tiny.docs.json" |
| 110 | + |
| 111 | + with open(json_path, "w", encoding="utf-8") as f: |
| 112 | + json.dump(output_payload, f, indent=4, ensure_ascii=False) |
| 113 | + |
| 114 | + logger.info(f"Documentation data saved to {json_path}") |
| 115 | + except Exception as e: |
| 116 | + logger.error(f"Failed to write output JSON: {e}") |
| 117 | + sys.exit(1) |
| 118 | + |
| 119 | + # 7. Site Generation |
73 | 120 | if args.generate: |
74 | | - gen = SiteGenerator(json_path=output_file, output_dir=args.output) |
75 | | - gen.generate() |
76 | | - |
| 121 | + try: |
| 122 | + gen = SiteGenerator(json_path=str(json_path), output_dir=str(out_dir)) |
| 123 | + gen.generate() |
| 124 | + logger.info(f"Static site generated in {out_dir}/") |
| 125 | + except Exception as e: |
| 126 | + logger.error(f"Site generation failed: {e}") |
| 127 | + sys.exit(1) |
77 | 128 |
|
78 | 129 | if __name__ == "__main__": |
79 | 130 | main() |
0 commit comments