WebSite::RobotsTxt

Protected Class Methods

known_dirs() click to toggle source

@return [ Array ]

# File lib/wpscan/web_site/robots_txt.rb, line 53
def self.known_dirs
  %{
    /
    /wp-admin/
    /wp-includes/
    /wp-content/
  }
end

Public Instance Methods

has_robots?() click to toggle source

Checks if a robots.txt file exists @return [ Boolean ]

# File lib/wpscan/web_site/robots_txt.rb, line 7
def has_robots?
  Browser.get(robots_url).code == 200
end
parse_robots_txt() click to toggle source

Parse robots.txt @return [ Array ] URLs generated from robots.txt

# File lib/wpscan/web_site/robots_txt.rb, line 22
def parse_robots_txt
  return unless has_robots?

  return_object = []
  response = Browser.get(robots_url.to_s)
  body = response.body
  # Get all allow and disallow urls
  entries = body.scan(/^(?:dis)?allow:\s*(.*)$/)
  if entries
    entries.flatten!
    entries.compact.sort!
    wordpress_path = @uri.path
    RobotsTxt.known_dirs.each do |d|
      entries.delete(d)
      # also delete when wordpress is installed in subdir
      dir_with_subdir = "#{wordpress_path}/#{d}".gsub(/\/+/, '/')
      entries.delete(dir_with_subdir)
    end

    entries.each do |d|
      temp = @uri.clone
      temp.path = d
      return_object << temp.to_s
    end
  end
  return_object
end
robots_url() click to toggle source

Gets a robots.txt URL @return [ String ]

# File lib/wpscan/web_site/robots_txt.rb, line 13
def robots_url
  temp = @uri.clone
  temp.path = '/robots.txt'
  temp.to_s
end

[Validate]

Generated with the Darkfish Rdoc Generator 2.