Class: Arachni::Checks::DirectoryListing

Inherits:
Arachni::Check::Base show all
Defined in:
components/checks/passive/directory_listing.rb

Overview

Tries to force directory listings.

Can't take credit for this one, it's Michal's (lcamtuf's) method from Skipfish.

Author:

Constant Summary collapse

DIFF_THRESHOLD =

The compared pages must be at least 75% different

0.75

Constants included from Arachni::Check::Auditor

Arachni::Check::Auditor::DOM_ELEMENTS_WITH_INPUTS, Arachni::Check::Auditor::ELEMENTS_WITH_INPUTS, Arachni::Check::Auditor::FILE_SIGNATURES, Arachni::Check::Auditor::FILE_SIGNATURES_PER_PLATFORM, Arachni::Check::Auditor::Format, Arachni::Check::Auditor::SOURCE_CODE_SIGNATURES_PER_PLATFORM

Constants included from Arachni

BANNER, Arachni::Cookie, Form, Header, JSON, Link, LinkTemplate, NestedCookie, Severity, UIForm, UIInput, VERSION, WEBSITE, WIKI, XML

Instance Attribute Summary

Attributes included from Arachni::Check::Auditor

#framework, #page

Class Method Summary collapse

Instance Method Summary collapse

Methods inherited from Arachni::Check::Base

#browser_cluster, #clean_up, elements, exempt_platforms, has_exempt_platforms?, has_platforms?, #initialize, platforms, #plugins, prefer, #preferred, preferred, #prepare, #session, supports_platforms?

Methods included from Arachni::Check::Auditor

#audit, #audit_differential, #audit_signature, #audit_timeout, #audited, #audited?, #buffered_audit, #each_candidate_dom_element, #each_candidate_element, has_timeout_candidates?, #http, #initialize, #log, #log_issue, #log_remote_file, #log_remote_file_if_exists, #match_and_log, #max_issues, #preferred, reset, #skip?, timeout_audit_run, #trace_taint, #with_browser, #with_browser_cluster

Methods inherited from Arachni::Component::Base

author, description, fullname, #shortname, shortname, shortname=, version

Methods included from Arachni::Component::Output

#depersonalize_output, #depersonalize_output?, #intercept_print_message

Methods included from UI::Output

#caller_location, #debug?, #debug_level, #debug_level_1?, #debug_level_2?, #debug_level_3?, #debug_level_4?, #debug_off, #debug_on, #disable_only_positives, #error_buffer, #error_log_fd, #error_logfile, #has_error_log?, #included, #log_error, #mute, #muted?, #only_positives, #only_positives?, #print_bad, #print_debug, #print_debug_backtrace, #print_debug_exception, #print_debug_level_1, #print_debug_level_2, #print_debug_level_3, #print_debug_level_4, #print_error, #print_error_backtrace, #print_exception, #print_info, #print_line, #print_ok, #print_status, #print_verbose, #reroute_to_file, #reroute_to_file?, reset_output_options, #set_error_logfile, #unmute, #verbose?, #verbose_off, #verbose_on

Methods included from Arachni::Component::Utilities

#read_file

Methods included from Utilities

#available_port, available_port_mutex, #bytes_to_kilobytes, #bytes_to_megabytes, #caller_name, #caller_path, #cookie_decode, #cookie_encode, #cookies_from_file, #cookies_from_parser, #cookies_from_response, #exception_jail, #exclude_path?, #follow_protocol?, #form_decode, #form_encode, #forms_from_parser, #forms_from_response, #full_and_absolute_url?, #generate_token, #get_path, #hms_to_seconds, #html_decode, #html_encode, #include_path?, #links_from_parser, #links_from_response, #normalize_url, #page_from_response, #page_from_url, #parse_set_cookie, #path_in_domain?, #path_too_deep?, #port_available?, #rand_port, #random_seed, #redundant_path?, #regexp_array_match, #remove_constants, #request_parse_body, #seconds_to_hms, #skip_page?, #skip_path?, #skip_resource?, #skip_response?, #to_absolute, #uri_decode, #uri_encode, #uri_parse, #uri_parse_query, #uri_parser, #uri_rewrite

Methods included from Arachni

URI, collect_young_objects, #get_long_win32_filename, jruby?, null_device, profile?, windows?

Constructor Details

This class inherits a constructor from Arachni::Check::Base

Class Method Details

.dirsObject



19
20
21
# File 'components/checks/passive/directory_listing.rb', line 19

def self.dirs
    @dirs ||= [ "\\.#{random_seed}\\", "\\.\\", ".#{random_seed}/", "./" ]
end

.infoObject



77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
# File 'components/checks/passive/directory_listing.rb', line 77

def self.info
    {
        name:             'Directory listing',
        description:      %q{Tries to force directory listings.},
        elements:         [ Element::Server ],
        author:           'Tasos "Zapotek" Laskos <[email protected]>',
        version:          '0.1.7',
        exempt_platforms: Arachni::Platform::Manager::FRAMEWORKS,

        issue:       {
            name:        %q{Directory listing},
            description: %q{
Web servers permitting directory listing are typically used for sharing files.

Directory listing allows the client to view a simple list of all the files and
folders hosted on the web server. The client is then able to traverse each
directory and download the files.

Cyber-criminals will utilise the presence of directory listing to discover
sensitive files, download protected content, or even just learn how the web
application is structured.

Arachni discovered that the affected page permits directory listing.
},
            references: {
                'WASC' => 'http://projects.webappsec.org/w/page/13246922/Directory%20Indexing'
            },
            tags:        %w(path directory listing index),
            cwe:         548,
            severity:    Severity::LOW,
            remedy_guidance: %q{
Unless the web server is being utilised to share static and non-sensitive files,
enabling directory listing is considered a poor security practice

This can typically be done with a simple configuration change on the server. The
steps to disable the directory listing will differ depending on the type of server
being used (IIS, Apache, etc.).
If directory listing is required, and permitted, then steps should be taken to
ensure that the risk of such a configuration is reduced.

These can include:

1. Requiring authentication to access affected pages.
2. Adding the affected path to the `robots.txt` file to prevent the directory
   contents being searchable via search engines.
3. Ensuring that sensitive files are not stored within the web or document root.
4. Removing any files that are not required for the application to function.
}
        }
    }
end

Instance Method Details

#check_and_log(path) ⇒ Object



48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
# File 'components/checks/passive/directory_listing.rb', line 48

def check_and_log( path )
    audited( path )

    # If we have a 403 Forbidden it means that we successfully
    # built a pah which would force a directory listing *but*
    # the web server kicked our asses...so let's run away like
    # little girls...
    @harvested.each { |res| return if !res.ok? || res.code == 403 }

    if !File.basename( @harvested[0].url, '?*' ).empty? &&
        same_page?( @harvested[0], @harvested[5] )
        return
    end

    if same_page?( @harvested[1], @harvested[0] )  ||
        same_page?( @harvested[1], @harvested[2] ) ||
        same_page?( @harvested[3], @harvested[0] ) ||
        same_page?( @harvested[3], @harvested[4] ) ||
        @harvested[5].code != 200 || @harvested[5].body.empty?
        return
    end

    log vector: Element::Server.new( @harvested[5].url ), response: @harvested[5]
end

#done_harvesting?Boolean

Returns:

  • (Boolean)


42
43
44
45
46
# File 'components/checks/passive/directory_listing.rb', line 42

def done_harvesting?
    return false if @harvested.size != 6
    @harvested.each { |res| return false if !res }
    true
end

#runObject



23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
# File 'components/checks/passive/directory_listing.rb', line 23

def run
    return if page.code != 200
    path = get_path( page.url )

    parsed_path = uri_parse( path ).path
    return if parsed_path == '/' || audited?( parsed_path )

    @harvested = []

    dirs = [ page.url ] | self.class.dirs.map { |dir| path + dir } | [ path ]
    dirs.each_with_index do |url, i|
        http.get( url ) do |res|
            next if !res
            @harvested[i] = res
            check_and_log( path ) if done_harvesting?
        end
    end
end

#same_page?(res1, res2) ⇒ Boolean

Returns:

  • (Boolean)


73
74
75
# File 'components/checks/passive/directory_listing.rb', line 73

def same_page?( res1, res2 )
    res1.code == res2.code && res1.body.diff_ratio( res2.body ) <= DIFF_THRESHOLD
end