@@ -1,10 +1,12 @@
|
|||||||
# encoding: UTF-8
|
# encoding: UTF-8
|
||||||
|
|
||||||
require 'web_site/robots_txt'
|
require 'web_site/robots_txt'
|
||||||
|
require 'web_site/humans_txt'
|
||||||
require 'web_site/interesting_headers'
|
require 'web_site/interesting_headers'
|
||||||
|
|
||||||
class WebSite
|
class WebSite
|
||||||
include WebSite::RobotsTxt
|
include WebSite::RobotsTxt
|
||||||
|
include WebSite::HumansTxt
|
||||||
include WebSite::InterestingHeaders
|
include WebSite::InterestingHeaders
|
||||||
|
|
||||||
attr_reader :uri
|
attr_reader :uri
|
||||||
|
|||||||
39
lib/wpscan/web_site/humans_txt.rb
Normal file
39
lib/wpscan/web_site/humans_txt.rb
Normal file
@@ -0,0 +1,39 @@
|
|||||||
|
# encoding: UTF-8
|
||||||
|
|
||||||
|
class WebSite
|
||||||
|
module HumansTxt
|
||||||
|
|
||||||
|
# Checks if a humans.txt file exists
|
||||||
|
# @return [ Boolean ]
|
||||||
|
def has_humans?
|
||||||
|
Browser.get(humans_url).code == 200
|
||||||
|
end
|
||||||
|
|
||||||
|
# Gets a humans.txt URL
|
||||||
|
# @return [ String ]
|
||||||
|
def humans_url
|
||||||
|
@uri.clone.merge('humans.txt').to_s
|
||||||
|
end
|
||||||
|
|
||||||
|
# Parse humans.txt
|
||||||
|
# @return [ Array ] URLs generated from humans.txt
|
||||||
|
def parse_humans_txt
|
||||||
|
return unless has_humans?
|
||||||
|
|
||||||
|
return_object = []
|
||||||
|
response = Browser.get(humans_url.to_s)
|
||||||
|
entries = response.body.split(/\n/)
|
||||||
|
if entries
|
||||||
|
entries.flatten!
|
||||||
|
entries.uniq!
|
||||||
|
|
||||||
|
entries.each do |d|
|
||||||
|
temp = d.strip
|
||||||
|
return_object << temp.to_s
|
||||||
|
end
|
||||||
|
end
|
||||||
|
return_object
|
||||||
|
end
|
||||||
|
|
||||||
|
end
|
||||||
|
end
|
||||||
108
spec/shared_examples/web_site/humans_txt.rb
Normal file
108
spec/shared_examples/web_site/humans_txt.rb
Normal file
@@ -0,0 +1,108 @@
|
|||||||
|
# encoding: UTF-8
|
||||||
|
|
||||||
|
shared_examples 'WebSite::HumansTxt' do
|
||||||
|
let(:known_dirs) { WebSite::HumansTxt.known_dirs }
|
||||||
|
|
||||||
|
describe '#humans_url' do
|
||||||
|
it 'returns the correct url' do
|
||||||
|
expect(web_site.humans_url).to eql 'http://example.localhost/humans.txt'
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
describe '#has_humans?' do
|
||||||
|
it 'returns true' do
|
||||||
|
stub_request(:get, web_site.humans_url).to_return(status: 200)
|
||||||
|
expect(web_site.has_humans?).to be_truthy
|
||||||
|
end
|
||||||
|
|
||||||
|
it 'returns false' do
|
||||||
|
stub_request(:get, web_site.humans_url).to_return(status: 404)
|
||||||
|
expect(web_site.has_humans?).to be_falsey
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
describe '#parse_humans_txt' do
|
||||||
|
|
||||||
|
context 'installed in root' do
|
||||||
|
after :each do
|
||||||
|
stub_request_to_fixture(url: web_site.humans_url, fixture: @fixture)
|
||||||
|
humans = web_site.parse_humans_txt
|
||||||
|
expect(humans).to match_array @expected
|
||||||
|
end
|
||||||
|
|
||||||
|
it 'returns an empty Array (empty humans.txt)' do
|
||||||
|
@fixture = fixtures_dir + '/humans_txt/empty_humans.txt'
|
||||||
|
@expected = []
|
||||||
|
end
|
||||||
|
|
||||||
|
it 'returns an empty Array (invalid humans.txt)' do
|
||||||
|
@fixture = fixtures_dir + '/humans_txt/invalid_humans.txt'
|
||||||
|
@expected = []
|
||||||
|
end
|
||||||
|
|
||||||
|
it 'returns some urls and some strings' do
|
||||||
|
@fixture = fixtures_dir + '/humans_txt/invalid_humans_2.txt'
|
||||||
|
@expected = %w(
|
||||||
|
/ÖÜ()=?
|
||||||
|
http://10.0.0.0/wp-includes/
|
||||||
|
http://example.localhost/asdf/
|
||||||
|
wooooza
|
||||||
|
)
|
||||||
|
end
|
||||||
|
|
||||||
|
it 'returns an Array of urls (valid humans.txt)' do
|
||||||
|
@fixture = fixtures_dir + '/humans_txt/humans.txt'
|
||||||
|
@expected = %w(
|
||||||
|
http://example.localhost/wordpress/admin/
|
||||||
|
http://example.localhost/wordpress/wp-admin/
|
||||||
|
http://example.localhost/wordpress/secret/
|
||||||
|
http://example.localhost/Wordpress/wp-admin/
|
||||||
|
http://example.localhost/wp-admin/tralling-space/
|
||||||
|
http://example.localhost/asdf/
|
||||||
|
)
|
||||||
|
end
|
||||||
|
|
||||||
|
it 'removes duplicate entries from humans.txt test 1' do
|
||||||
|
@fixture = fixtures_dir + '/humans_txt/humans_duplicate_1.txt'
|
||||||
|
@expected = %w(
|
||||||
|
http://example.localhost/wordpress/
|
||||||
|
http://example.localhost/wordpress/admin/
|
||||||
|
http://example.localhost/wordpress/wp-admin/
|
||||||
|
http://example.localhost/wordpress/secret/
|
||||||
|
http://example.localhost/Wordpress/wp-admin/
|
||||||
|
http://example.localhost/wp-admin/tralling-space/
|
||||||
|
http://example.localhost/asdf/
|
||||||
|
)
|
||||||
|
end
|
||||||
|
|
||||||
|
it 'removes duplicate entries from humans.txt test 2' do
|
||||||
|
@fixture = fixtures_dir + '/humans_txt/humans_duplicate_2.txt'
|
||||||
|
@expected = nil
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
context 'installed in sub directory' do
|
||||||
|
it 'returns an Array of urls (valid humans.txt, WP installed in subdir)' do
|
||||||
|
web_site_sub = WebSite.new('http://example.localhost/wordpress/')
|
||||||
|
fixture = fixtures_dir + '/humans_txt/humans.txt'
|
||||||
|
expected = %w(
|
||||||
|
http://example.localhost/wordpress/admin/
|
||||||
|
http://example.localhost/wordpress/secret/
|
||||||
|
http://example.localhost/Wordpress/wp-admin/
|
||||||
|
http://example.localhost/wp-admin/tralling-space/
|
||||||
|
http://example.localhost/asdf/
|
||||||
|
)
|
||||||
|
stub_request_to_fixture(url: web_site_sub.humans_url, fixture: fixture)
|
||||||
|
humans = web_site_sub.parse_humans_txt
|
||||||
|
expect(humans).to match_array expected
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
describe '#known_dirs' do
|
||||||
|
it 'does not contain duplicates' do
|
||||||
|
expect(known_dirs.flatten.uniq.length).to eq known_dirs.length
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
end
|
||||||
@@ -211,6 +211,14 @@ def main
|
|||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
if wp_target.has_humans?
|
||||||
|
puts info("humans.txt available under: #{wp_target.humans_url}")
|
||||||
|
|
||||||
|
wp_target.parse_humans_txt.each do |dir|
|
||||||
|
puts info("Interesting entry from humans.txt: #{dir}")
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
if wp_target.has_full_path_disclosure?
|
if wp_target.has_full_path_disclosure?
|
||||||
puts warning("Full Path Disclosure (FPD) in '#{wp_target.full_path_disclosure_url}': #{wp_target.full_path_disclosure_data}")
|
puts warning("Full Path Disclosure (FPD) in '#{wp_target.full_path_disclosure_url}': #{wp_target.full_path_disclosure_data}")
|
||||||
end
|
end
|
||||||
|
|||||||
Reference in New Issue
Block a user