diff --git a/lib/wpscan/web_site/robots_txt.rb b/lib/wpscan/web_site/robots_txt.rb index dfcf22e4..2e928152 100644 --- a/lib/wpscan/web_site/robots_txt.rb +++ b/lib/wpscan/web_site/robots_txt.rb @@ -28,6 +28,7 @@ class WebSite if entries entries.flatten! entries.compact.sort! + entries.uniq! wordpress_path = @uri.path RobotsTxt.known_dirs.each do |d| entries.delete(d) diff --git a/spec/samples/wpscan/web_site/robots_txt/robots_duplicate_1.txt b/spec/samples/wpscan/web_site/robots_txt/robots_duplicate_1.txt new file mode 100644 index 00000000..fe56eaf4 --- /dev/null +++ b/spec/samples/wpscan/web_site/robots_txt/robots_duplicate_1.txt @@ -0,0 +1,17 @@ +User-agent: * +Disallow: /wp-admin/ +Disallow: /wp-admin/ +Disallow: /wp-admin/ +Disallow: /wp-admin/ +Disallow: /wp-includes/ +Disallow: /wordpress/admin/ +Disallow: /wordpress/wp-admin/ +Disallow: /wordpress/secret/ +Disallow: /wordpress/secret/ +Disallow: /wordpress/ +Disallow: /wordpress/secret/ +Disallow: /Wordpress/wp-admin/ +Disallow: /wp-admin/tralling-space/ +Allow: /asdf/ + +Sitemap: http://10.0.0.0/sitemap.xml.gz diff --git a/spec/samples/wpscan/web_site/robots_txt/robots_duplicate_2.txt b/spec/samples/wpscan/web_site/robots_txt/robots_duplicate_2.txt new file mode 100644 index 00000000..91f19bfc --- /dev/null +++ b/spec/samples/wpscan/web_site/robots_txt/robots_duplicate_2.txt @@ -0,0 +1,9 @@ +User-agent: * +Disallow: /wp-admin/ +Disallow: /wp-admin/ +Disallow: /wp-admin/ +Disallow: /wp-admin/ +Disallow: /wp-admin/ +Disallow: /wp-admin/ + +Sitemap: http://10.0.0.0/sitemap.xml.gz diff --git a/spec/shared_examples/web_site/robots_txt.rb b/spec/shared_examples/web_site/robots_txt.rb index 6dfce44e..252762ab 100644 --- a/spec/shared_examples/web_site/robots_txt.rb +++ b/spec/shared_examples/web_site/robots_txt.rb @@ -61,6 +61,24 @@ shared_examples 'WebSite::RobotsTxt' do http://example.localhost/asdf/ ) end + + it 'removes duplicate entries from robots.txt test 1' do + @fixture = fixtures_dir + '/robots_txt/robots_duplicate_1.txt' + @expected = %w( + http://example.localhost/wordpress/ + http://example.localhost/wordpress/admin/ + http://example.localhost/wordpress/wp-admin/ + http://example.localhost/wordpress/secret/ + http://example.localhost/Wordpress/wp-admin/ + http://example.localhost/wp-admin/tralling-space/ + http://example.localhost/asdf/ + ) + end + + it 'removes duplicate entries from robots.txt test 2' do + @fixture = fixtures_dir + '/robots_txt/robots_duplicate_2.txt' + @expected = nil + end end context 'installed in sub directory' do