@return [ Array ]
# File lib/wpscan/web_site/robots_txt.rb, line 53 def self.known_dirs %{ / /wp-admin/ /wp-includes/ /wp-content/ } end
Checks if a robots.txt file exists @return [ Boolean ]
# File lib/wpscan/web_site/robots_txt.rb, line 7 def has_robots? Browser.get(robots_url).code == 200 end
Parse robots.txt @return [ Array ] URLs generated from robots.txt
# File lib/wpscan/web_site/robots_txt.rb, line 22 def parse_robots_txt return unless has_robots? return_object = [] response = Browser.get(robots_url.to_s) body = response.body # Get all allow and disallow urls entries = body.scan(/^(?:dis)?allow:\s*(.*)$/) if entries entries.flatten! entries.compact.sort! wordpress_path = @uri.path RobotsTxt.known_dirs.each do |d| entries.delete(d) # also delete when wordpress is installed in subdir dir_with_subdir = "#{wordpress_path}/#{d}".gsub(/\/+/, '/') entries.delete(dir_with_subdir) end entries.each do |d| temp = @uri.clone temp.path = d return_object << temp.to_s end end return_object end
Generated with the Darkfish Rdoc Generator 2.