-
Notifications
You must be signed in to change notification settings - Fork 3
/
BigBrowser.py
executable file
·159 lines (135 loc) · 4.91 KB
/
BigBrowser.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
#!/usr/bin/env python2
import sys
import os
import threading
import argparse
import shutil
import subprocess
from bs4 import BeautifulSoup
PROGRESS = 0
TOTAL = 0
def extract_nmap_xml(filename):
xml_file = open(filename, 'r')
soup = BeautifulSoup(xml_file, 'lxml')
urls = []
for host in soup.find_all('host'):
hostname = host.address["addr"]
hnames = host.find_all('hostname')
if hnames:
hostname = hnames[0]["name"]
if hostname[len(hostname)-1] == ".":
hostname = hostname[:len(hostname)-1]
if not host.ports:
continue
for port in host.ports.find_all('port'):
if port.state["state"] == "open":
service = port.find("service")
if service and "http" in service["name"]:
if port.service.has_attr('tunnel') and service["tunnel"] == "ssl" or "https" in service["name"]:
url = "https://"
else:
url = "http://"
url += hostname + ":" + port["portid"]
urls.append(url)
print "Adding " + url
return urls
def take_screenshots(url_set, nb_threads):
global PROGRESS
for url in url_set:
try:
hostname = url.split("://")[1].split(":")[0]
port = url.split("://")[1].split(":")[1]
sc_file = 'pics/' + hostname + "-" + port + ".png"
PROGRESS += 1
print "[" + str(PROGRESS) + "/" + str(TOTAL) + "] Downloading: " + url + " > " + sc_file
devnull = open(os.devnull, 'w')
subprocess.call(['phantomjs', '--ssl-protocol=any', '--ignore-ssl-errors=true', '../sc.js', url, sc_file], stdout=devnull, stderr=devnull)
devnull.close()
except Exception as exc:
print "Screenshot exception : " + str(exc)
def generate_report(urls, nb_threads=5, report_name="report.html"):
os.makedirs("pics/")
html_file = open(report_name, "w")
html_file.write('''
<html>
<head>
</head>
<body style="background: black">
<table>
'''
)
col = 0
for url in urls:
hostname = url.split("://")[1].split(":")[0]
port = url.split("://")[1].split(":")[1]
sc_file = 'pics/' + hostname + "-" + port + ".png"
if col == 0:
html_file.write('<tr>')
html_file.write('<td style="text-align:center"><div style="overflow:hidden"><a target="_blank" href="' \
+ url + '"><img style="height:60%;width:80%;background:white;" src="' + sc_file + \
'"/></a><strong><a target="_blank" href="'+ url + '" style="color: white">' + url + '</a></strong></div></td>')
if col == 3:
html_file.write('</tr>')
col = (col + 1) % 4
html_file.write('''
</table>
</body>
</html>
'''
)
html_file.close()
thread_load = len(urls) / nb_threads
threads = []
for i in range(nb_threads):
if i == (nb_threads - 1):
threads.append(threading.Thread(target=take_screenshots, args=(urls[i * thread_load:], nb_threads)))
else:
threads.append(threading.Thread(target=take_screenshots, args=(urls[i * thread_load:(i + 1) * thread_load ], nb_threads)))
for thread in threads:
thread.start()
for thread in threads:
thread.join()
print "[*] Report generated: file://" + os.path.join(os.getcwd(), report_name)
def main():
parser = argparse.ArgumentParser(description="Generates an HTML report with screenshots of all Web applications from an XML nmap scan.")
parser.add_argument("file", help="Nmap XML output")
parser.add_argument("-t", "--threads", help="Number of threads")
parser.add_argument("-o", "--output", help="Name of the generated report")
args = parser.parse_args()
# Open nmap file and extract Web applications URLs
if not os.path.exists(args.file):
print "File not found: " + args.file
exit(0)
with open(args.file, "r") as f:
for line in f:
if "<!DOCTYPE nmaprun>" in line:
break
else:
print "Not a valid nmap XML"
exit(1)
urls = extract_nmap_xml(args.file)
print "Web applications: "
print "=" * 50
for url in urls:
print url
print "=" * 50
global TOTAL
TOTAL = len(urls)
# Generate the report
report_name = "bigbrowser_report"
if args.output:
report_name = args.output
if os.path.exists(report_name):
if raw_input("Folder exists (" + report_name + ") overwrite ?(y/n)") == "y":
shutil.rmtree(report_name)
else:
exit(1)
os.makedirs(report_name)
os.chdir(report_name)
if args.threads:
nb_threads = int(args.threads)
else:
nb_threads = 4
generate_report(urls, nb_threads, report_name=report_name + ".html")
if __name__ == "__main__":
main()