1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
|
description = [[
It looks for places where attacker-controlled information in the DOM may be used
to affect JavaScript execution in certain ways. The attack is explained here:
http://www.webappsec.org/projects/articles/071105.shtml
]]
---
-- @usage nmap -p80 --script http-dombased-xss.nse <target>
--
-- DOM-based XSS occur in client-side JavaScript and this script tries to detect
-- them by using some patterns. Please note, that the script may generate some
-- false positives. Don't take everything in the output as a vulnerability, if
-- you don't review it first.
--
-- Most of the patterns used to determine the vulnerable code have been taken
-- from this page: https://code.google.com/p/domxsswiki/wiki/LocationSources
--
-- @args http-dombased-xss.singlepages The pages to test. For example,
-- {/index.php, /profile.php}. Default: nil (crawler mode on)
--
-- @output
-- PORT STATE SERVICE REASON
-- 80/tcp open http syn-ack
-- | http-dombased-xss:
-- | Spidering limited to: maxdepth=3; maxpagecount=20; withinhost=some-very-random-page.com
-- | Found the following indications of potential DOM based XSS:
-- |
-- | Source: document.write("<OPTION value=1>"+document.location.href.substring(document.location.href.indexOf("default=")
-- | Pages: http://some-very-random-page.com:80/, http://some-very-random-page.com/foo.html
-- |
-- | Source: document.write(document.URL.substring(pos,document.URL.length)
-- |_ Pages: http://some-very-random-page.com/foo.html
--
-- @see http-stored-xss.nse
-- @see http-phpself-xss.nse
-- @see http-xssed.nse
-- @see http-unsafe-output-escaping.nse
---
categories = {"intrusive", "exploit", "vuln"}
author = "George Chatzisofroniou"
license = "Same as Nmap--See https://nmap.org/book/man-legal.html"
local http = require "http"
local shortport = require "shortport"
local stdnse = require "stdnse"
local table = require "table"
local string = require "string"
local httpspider = require "httpspider"
JS_FUNC_PATTERNS = {
'(document%.write%s*%((.-)%))',
'(document%.writeln%s*%((.-)%))',
'(document%.execCommand%s*%((.-)%))',
'(document%.open%s*%((.-)%))',
'(window%.open%s*%((.-)%))',
'(eval%s*%((.-)%))',
'(window%.execScript%s*%((.-)%))',
}
JS_CALLS_PATTERNS = {
'document%.URL',
'document%.documentURI',
'document%.URLUnencoded',
'document%.baseURI',
'document%.referrer',
'location',
}
portrule = shortport.port_or_service( {80, 443}, {"http", "https"}, "tcp", "open")
action = function(host, port)
local singlepages = stdnse.get_script_args("http-dombased-xss.singlepages")
local domxss = {}
local crawler = httpspider.Crawler:new( host, port, '/', { scriptname = SCRIPT_NAME, withinhost = 1 } )
if (not(crawler)) then
return
end
crawler:set_timeout(10000)
local index, k, target, response, path
while (true) do
if singlepages then
k, target = next(singlepages, index)
if (k == nil) then
break
end
response = http.get(host, port, target)
path = target
else
local status, r = crawler:crawl()
-- if the crawler fails it can be due to a number of different reasons
-- most of them are "legitimate" and should not be reason to abort
if (not(status)) then
if (r.err) then
return stdnse.format_output(false, r.reason)
else
break
end
end
response = r.response
path = tostring(r.url)
end
if response.body then
for _, fp in ipairs(JS_FUNC_PATTERNS) do
for i in string.gmatch(response.body, fp) do
for _, cp in ipairs(JS_CALLS_PATTERNS) do
if string.find(i, cp) then
if not domxss[i] then
domxss[i] = {path}
else
table.insert(domxss[i], ", " .. path)
end
end
end
end
end
if (index) then
index = index + 1
else
index = 1
end
end
end
-- If the table is empty.
if next(domxss) == nil then
return "Couldn't find any DOM based XSS."
end
local results = {}
for x, _ in pairs(domxss) do
table.insert(results, { "\nSource: " .. x, "Pages: " .. table.concat(_) })
end
table.insert(results, 1, "Found the following indications of potential DOM based XSS: ")
results.name = crawler:getLimitations()
return stdnse.format_output(true, results)
end
|