diff --git a/lib/wpscan/modules/web_site.rb b/lib/wpscan/modules/web_site.rb index 3b53d484..83411c39 100644 --- a/lib/wpscan/modules/web_site.rb +++ b/lib/wpscan/modules/web_site.rb @@ -126,4 +126,16 @@ module WebSite homepage_body = Browser.instance.get(@uri.to_s).body homepage_body[%r{}, 1] end + + # Checks if a robots.txt file exists + def has_robots? + Browser.instance.get(robots_url).code == 200 + end + + # Gets a robots.txt URL + def robots_url + robots = @uri.clone + robots.path = '/robots.txt' + robots.to_s + end end diff --git a/spec/lib/wpscan/modules/web_site_spec.rb b/spec/lib/wpscan/modules/web_site_spec.rb index 492207a3..0171fd6c 100644 --- a/spec/lib/wpscan/modules/web_site_spec.rb +++ b/spec/lib/wpscan/modules/web_site_spec.rb @@ -178,4 +178,22 @@ shared_examples_for 'WebSite' do web_site.rss_url.should === 'http://lamp-wp/wordpress-3.5/?feed=rss2' end end -end + + describe '#robots_url' do + it 'should return the correct url' do + web_site.robots_url.should === 'http://example.localhost/robots.txt' + end + end + + describe '#has_robots?' do + it 'should return true' do + stub_request(:get, web_site.robots_url).to_return(status: 200) + web_site.has_robots?.should be_true + end + + it 'should return false' do + stub_request(:get, web_site.robots_url).to_return(status: 404) + web_site.has_robots?.should be_false + end + end +end \ No newline at end of file diff --git a/wpscan.rb b/wpscan.rb index a8fcbcc8..aa7f1c9b 100755 --- a/wpscan.rb +++ b/wpscan.rb @@ -151,6 +151,10 @@ begin puts end + if wp_target.has_robots? + puts green('[+]') + " robots.txt available under '#{wp_target.robots_url}'" + end + if wp_target.has_readme? puts red('[!]') + " The WordPress '#{wp_target.readme_url}' file exists" end