-
Notifications
You must be signed in to change notification settings - Fork 2
Expand file tree
/
Copy patharanea.py
More file actions
133 lines (118 loc) · 5.01 KB
/
aranea.py
File metadata and controls
133 lines (118 loc) · 5.01 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
import argparse
from requests import ConnectionError
from mixins.base import Base
from mixins.colour import Colour
from plugins.analysis import Analysis
from plugins.crawler import Crawler
from utils import strings
class Aranea(Base, Colour, Analysis, Crawler):
def __init__(self, url, threads, headers, strict, mainonly=False, continuous=False, output=None, auto=False, html_output=None, no_log=''):
super().__init__(url, threads, headers, strict, mainonly, continuous, output, auto, html_output, no_log)
@staticmethod
def parse_args():
parser = argparse.ArgumentParser()
url_group = parser.add_mutually_exclusive_group(required=True)
url_group.add_argument(
'-u', '--url', help="Target URL")
url_group.add_argument(
'-ul', '--urllist', help="Path to file containing list of URLs (one per line)")
parser.add_argument(
'-m', '--mode',
help="Available Modes: crawl, analysis",
required=True)
parser.add_argument(
'-o', '--output',
help='Output file to save analysis results (analysis mode only).')
parser.add_argument(
'--html',
help='Generate interactive HTML report (analysis mode only).')
parser.add_argument(
'-t', '--threads',
help="Default configuration: 10 threads", default=10)
parser.add_argument(
'--headers',
help='Should be a string as in the example: "Authorization:Bearer ey..,Cookie:role=admin;"',
default='')
parser.add_argument(
'-s', '--strict',
help="For analysis mode: the URL will be parsed even if it does not have a JS extension.",
action='store_true')
parser.add_argument(
'--mainonly',
help="For analysis mode: only the main.js file will be parsed.",
action='store_true')
parser.add_argument(
'-c', '--continuous',
help="For analysis mode: recursively parse found JS files.",
action='store_true')
parser.add_argument(
'--auto',
help='For analysis mode with --continuous: automatically parse all files without prompting.',
action='store_true')
parser.add_argument(
'--no',
help='Exclude categories from output (e.g., --no assets,js,images,modules,json,ts)',
default='')
return parser.parse_args()
@staticmethod
def run_on_url(url, mode, threads, headers, strict, mainonly, continuous, output, auto, html_output, no_log):
"""Run the specified mode on a single URL"""
try:
if mode in ('analysis', 'a'):
Aranea(url, threads, headers, strict, mainonly, continuous, output, auto, html_output, no_log).analyze()
elif mode in ('crawl', 'c'):
Aranea(url, threads, headers, strict, mainonly, continuous, output, auto, html_output, no_log).crawl()
else:
print(
f'{Aranea.RED} The mode "{mode}" does not exist!{Aranea.WHITE}')
except ConnectionError:
print(f'{Aranea.RED} Connection Error: Please check the URL address and try again - {url}{Aranea.WHITE}')
except Exception as e:
print(f'{Aranea.RED}Error processing {url}: {e}{Aranea.WHITE}')
if __name__ == '__main__':
args = Aranea.parse_args()
threads = int(args.threads)
headers = args.headers.strip()
mode = args.mode.strip()
strict = args.strict
mainonly = args.mainonly
continuous = args.continuous
output = args.output
auto = args.auto
html_output = args.html
no_log = args.no
# Collect URLs from either single URL or URL list file
urls = []
if args.url:
urls = [args.url.strip()]
elif args.urllist:
try:
with open(args.urllist, 'r') as f:
urls = [line.strip() for line in f if line.strip() and not line.strip().startswith('#')]
except FileNotFoundError:
print(f'{Aranea.RED}Error: URL list file not found: {args.urllist}{Aranea.WHITE}')
exit(1)
except Exception as e:
print(f'{Aranea.RED}Error reading URL list file: {e}{Aranea.WHITE}')
exit(1)
if not urls:
print(f'{Aranea.RED}Error: No URLs found to process{Aranea.WHITE}')
exit(1)
# Display banner
print(strings.SOLID)
print(strings.INTRO)
print(strings.SOLID)
# Process each URL
total_urls = len(urls)
for idx, url in enumerate(urls, 1):
if total_urls > 1:
print(f'\n{Aranea.CYAN}{"=" * 60}{Aranea.WHITE}')
print(f'{Aranea.CYAN}Processing URL {idx} of {total_urls}{Aranea.WHITE}')
print(f'{Aranea.CYAN}{"=" * 60}{Aranea.WHITE}\n')
banner = f'''
URL :: {url}
Mode :: {mode}
Threads :: {threads}
'''
print(banner)
Aranea.run_on_url(url, mode, threads, headers, strict, mainonly, continuous, output, auto, html_output, no_log)