summaryrefslogtreecommitdiffstats
path: root/scripts/http-referer-checker.nse
blob: 048fdd6b29b3e41ed388c52eef031cd152fc7457 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
description = [[
Informs about cross-domain include of scripts. Websites that include
external javascript scripts are delegating part of their security to
third-party entities.
]]

---
-- @usage nmap -p80 --script http-referer-checker.nse <host>
--
-- This script informs about cross-domain include of scripts by
-- finding src attributes that point to a different domain.
--
-- @output
-- PORT   STATE SERVICE REASON
-- 80/tcp open  http    syn-ack
-- | http-referer-checker:
-- | Spidering limited to: maxdepth=3; maxpagecount=20;
-- |   http://css3-mediaqueries-js.googlecode.com/svn/trunk/css3-mediaqueries.js
-- |_  http://ajax.googleapis.com/ajax/libs/jquery/1/jquery.min.js?ver=3.4.2
--
---

categories = {"discovery", "safe"}
author = "George Chatzisofroniou"
license = "Same as Nmap--See https://nmap.org/book/man-legal.html"

local shortport = require "shortport"
local stdnse = require "stdnse"
local table = require "table"
local httpspider = require "httpspider"

portrule = shortport.port_or_service( {80, 443}, {"http", "https"}, "tcp", "open")

action = function(host, port)

  local crawler = httpspider.Crawler:new(host, port, '/', { scriptname = SCRIPT_NAME,
    maxpagecount = 30,
    maxdepth = -1,
    withinhost = 0,
    withindomain = 0
  })

  crawler.options.doscraping = function(url)
    if crawler:iswithinhost(url)
      and not crawler:isresource(url, "js")
      and not crawler:isresource(url, "css") then
      return true
    end
  end

  crawler:set_timeout(10000)

  if (not(crawler)) then
    return
  end

  local scripts = {}

  while(true) do

    local status, r = crawler:crawl()
    if (not(status)) then
      if (r.err) then
        return stdnse.format_output(false, r.reason)
      else
        break
      end
    end

    if crawler:isresource(r.url, "js") and not crawler:iswithinhost(r.url) then
      scripts[tostring(r.url)] = true
    end

  end

  if next(scripts) == nil then
    return "Couldn't find any cross-domain scripts."
  end

  local results = {}
  for s, _ in pairs(scripts) do
    table.insert(results, s)
  end

  results.name = crawler:getLimitations()

  return stdnse.format_output(true, results)

end