@@ -1,21 +1,21 @@
|
|||||||
git/
|
|
||||||
bundle/
|
|
||||||
.idea/
|
|
||||||
.yardoc/
|
|
||||||
cache/
|
|
||||||
coverage/
|
|
||||||
spec/
|
|
||||||
dev/
|
|
||||||
.*
|
.*
|
||||||
**/*.md
|
bin/
|
||||||
|
dev/
|
||||||
|
spec/
|
||||||
*.md
|
*.md
|
||||||
Dockerfile
|
Dockerfile
|
||||||
|
|
||||||
|
## TEMP
|
||||||
|
.idea/
|
||||||
|
.yardoc/
|
||||||
|
bundle/
|
||||||
|
cache/
|
||||||
|
coverage/
|
||||||
|
git/
|
||||||
|
**/*.md
|
||||||
**/*.orig
|
**/*.orig
|
||||||
*.orig
|
*.orig
|
||||||
CREDITS
|
CREDITS
|
||||||
data.zip
|
data.zip
|
||||||
DISCLAIMER.txt
|
DISCLAIMER.txt
|
||||||
example.conf.json
|
example.conf.json
|
||||||
bin/
|
|
||||||
log.txt
|
|
||||||
|
|
||||||
|
|||||||
31
.gitignore
vendored
31
.gitignore
vendored
@@ -1,16 +1,21 @@
|
|||||||
|
# WPScan (If not using ~/.wpscan/)
|
||||||
|
cache/
|
||||||
|
data/
|
||||||
|
log.txt
|
||||||
|
output.txt
|
||||||
|
|
||||||
|
# WPScan (Deployment)
|
||||||
|
debug.log
|
||||||
|
rspec_results.html
|
||||||
|
wordlist.txt
|
||||||
|
|
||||||
|
# OS/IDE Rubbish
|
||||||
|
coverage/
|
||||||
|
.yardoc/
|
||||||
|
.idea/
|
||||||
|
*.sublime-*
|
||||||
|
.*.swp
|
||||||
.ash_history
|
.ash_history
|
||||||
cache
|
|
||||||
coverage
|
|
||||||
.bundle
|
.bundle
|
||||||
.DS_Store
|
.DS_Store
|
||||||
.DS_Store?
|
.DS_Store?
|
||||||
*.sublime-*
|
|
||||||
.idea
|
|
||||||
.*.swp
|
|
||||||
log.txt
|
|
||||||
.yardoc
|
|
||||||
debug.log
|
|
||||||
wordlist.txt
|
|
||||||
rspec_results.html
|
|
||||||
data/
|
|
||||||
vendor/
|
|
||||||
@@ -24,7 +24,7 @@ before_install:
|
|||||||
- "gem regenerate_binstubs"
|
- "gem regenerate_binstubs"
|
||||||
- "bundle --version"
|
- "bundle --version"
|
||||||
before_script:
|
before_script:
|
||||||
- "unzip -o $TRAVIS_BUILD_DIR/data.zip -d $TRAVIS_BUILD_DIR"
|
- "unzip -o $TRAVIS_BUILD_DIR/data.zip -d $HOME/.wpscan/"
|
||||||
script:
|
script:
|
||||||
- "bundle exec rspec"
|
- "bundle exec rspec"
|
||||||
notifications:
|
notifications:
|
||||||
|
|||||||
20
Dockerfile
20
Dockerfile
@@ -1,29 +1,37 @@
|
|||||||
FROM ruby:2.5-alpine
|
FROM ruby:2.5-alpine
|
||||||
MAINTAINER WPScan Team <team@wpscan.org>
|
LABEL maintainer="WPScan Team <team@wpscan.org>"
|
||||||
|
|
||||||
ARG BUNDLER_ARGS="--jobs=8 --without test"
|
ARG BUNDLER_ARGS="--jobs=8 --without test"
|
||||||
|
|
||||||
|
# Add a new user
|
||||||
RUN adduser -h /wpscan -g WPScan -D wpscan
|
RUN adduser -h /wpscan -g WPScan -D wpscan
|
||||||
|
|
||||||
|
# Setup gems
|
||||||
RUN echo "gem: --no-ri --no-rdoc" > /etc/gemrc
|
RUN echo "gem: --no-ri --no-rdoc" > /etc/gemrc
|
||||||
|
|
||||||
COPY Gemfile /wpscan
|
COPY Gemfile /wpscan
|
||||||
COPY Gemfile.lock /wpscan
|
COPY Gemfile.lock /wpscan
|
||||||
|
|
||||||
# runtime dependencies
|
# Runtime dependencies
|
||||||
RUN apk add --no-cache libcurl procps && \
|
RUN apk add --no-cache libcurl procps && \
|
||||||
# build dependencies
|
# build dependencies
|
||||||
apk add --no-cache --virtual build-deps alpine-sdk ruby-dev libffi-dev zlib-dev && \
|
apk add --no-cache --virtual build-deps alpine-sdk ruby-dev libffi-dev zlib-dev && \
|
||||||
bundle install --system --gemfile=/wpscan/Gemfile $BUNDLER_ARGS && \
|
bundle install --system --gemfile=/wpscan/Gemfile $BUNDLER_ARGS && \
|
||||||
apk del --no-cache build-deps
|
apk del --no-cache build-deps
|
||||||
|
|
||||||
|
# Copy over data & set permissions
|
||||||
COPY . /wpscan
|
COPY . /wpscan
|
||||||
RUN chown -R wpscan:wpscan /wpscan
|
RUN chown -R wpscan:wpscan /wpscan
|
||||||
|
|
||||||
USER wpscan
|
# Switch directory
|
||||||
|
|
||||||
RUN /wpscan/wpscan.rb --update --verbose --no-color
|
|
||||||
|
|
||||||
WORKDIR /wpscan
|
WORKDIR /wpscan
|
||||||
|
|
||||||
|
# Switch users
|
||||||
|
USER wpscan
|
||||||
|
|
||||||
|
# Update WPScan
|
||||||
|
RUN /wpscan/wpscan.rb --update --verbose --no-color
|
||||||
|
|
||||||
|
# Run WPScan
|
||||||
ENTRYPOINT ["/wpscan/wpscan.rb"]
|
ENTRYPOINT ["/wpscan/wpscan.rb"]
|
||||||
CMD ["--help"]
|
CMD ["--help"]
|
||||||
|
|||||||
9
Gemfile
9
Gemfile
@@ -1,11 +1,12 @@
|
|||||||
source 'https://rubygems.org'
|
source 'https://rubygems.org'
|
||||||
|
|
||||||
gem 'typhoeus', '>=1.1.2'
|
|
||||||
gem 'nokogiri', '>=1.7.0.1'
|
|
||||||
gem 'addressable', '>=2.5.0'
|
gem 'addressable', '>=2.5.0'
|
||||||
gem 'yajl-ruby', '>=1.3.0' # Better JSON parser regarding memory usage
|
gem 'nokogiri', '>=1.7.0.1'
|
||||||
gem 'terminal-table', '>=1.6.0'
|
|
||||||
gem 'ruby-progressbar', '>=1.8.1'
|
gem 'ruby-progressbar', '>=1.8.1'
|
||||||
|
gem 'rubyzip', '>=1.2.1'
|
||||||
|
gem 'terminal-table', '>=1.6.0'
|
||||||
|
gem 'typhoeus', '>=1.1.2'
|
||||||
|
gem 'yajl-ruby', '>=1.3.0' # Better JSON parser regarding memory usage
|
||||||
|
|
||||||
group :test do
|
group :test do
|
||||||
gem 'webmock', '>=2.3.2'
|
gem 'webmock', '>=2.3.2'
|
||||||
|
|||||||
@@ -33,6 +33,7 @@ GEM
|
|||||||
rspec-support (~> 3.7.0)
|
rspec-support (~> 3.7.0)
|
||||||
rspec-support (3.7.1)
|
rspec-support (3.7.1)
|
||||||
ruby-progressbar (1.9.0)
|
ruby-progressbar (1.9.0)
|
||||||
|
rubyzip (1.2.1)
|
||||||
safe_yaml (1.0.4)
|
safe_yaml (1.0.4)
|
||||||
simplecov (0.16.1)
|
simplecov (0.16.1)
|
||||||
docile (~> 1.1)
|
docile (~> 1.1)
|
||||||
@@ -59,6 +60,7 @@ DEPENDENCIES
|
|||||||
rspec (>= 3.5.0)
|
rspec (>= 3.5.0)
|
||||||
rspec-its (>= 1.2.0)
|
rspec-its (>= 1.2.0)
|
||||||
ruby-progressbar (>= 1.8.1)
|
ruby-progressbar (>= 1.8.1)
|
||||||
|
rubyzip (>= 1.2.1)
|
||||||
simplecov (>= 0.13.0)
|
simplecov (>= 0.13.0)
|
||||||
terminal-table (>= 1.6.0)
|
terminal-table (>= 1.6.0)
|
||||||
typhoeus (>= 1.1.2)
|
typhoeus (>= 1.1.2)
|
||||||
|
|||||||
2
data/.gitignore
vendored
2
data/.gitignore
vendored
@@ -1,2 +0,0 @@
|
|||||||
*
|
|
||||||
!.gitignore
|
|
||||||
@@ -26,6 +26,10 @@ class CacheFileStore
|
|||||||
unless Dir.exist?(@storage_path)
|
unless Dir.exist?(@storage_path)
|
||||||
FileUtils.mkdir_p(@storage_path)
|
FileUtils.mkdir_p(@storage_path)
|
||||||
end
|
end
|
||||||
|
|
||||||
|
unless Pathname.new(@storage_path).writable?
|
||||||
|
fail "#{@storage_path} is not writable"
|
||||||
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
def clean
|
def clean
|
||||||
|
|||||||
@@ -95,7 +95,7 @@ class WpItems < Array
|
|||||||
code = tag.text.to_s
|
code = tag.text.to_s
|
||||||
next if code.empty?
|
next if code.empty?
|
||||||
|
|
||||||
if ! code.valid_encoding?
|
if !code.valid_encoding?
|
||||||
code = code.encode('UTF-16be', :invalid => :replace, :replace => '?').encode('UTF-8')
|
code = code.encode('UTF-16be', :invalid => :replace, :replace => '?').encode('UTF-8')
|
||||||
end
|
end
|
||||||
|
|
||||||
|
|||||||
@@ -9,7 +9,7 @@ class WpUsers < WpItems
|
|||||||
# @return [ void ]
|
# @return [ void ]
|
||||||
def output(options = {})
|
def output(options = {})
|
||||||
rows = []
|
rows = []
|
||||||
headings = ['Id', 'Login', 'Name']
|
headings = ['ID', 'Login', 'Name']
|
||||||
headings << 'Password' if options[:show_password]
|
headings << 'Password' if options[:show_password]
|
||||||
|
|
||||||
remove_junk_from_display_names
|
remove_junk_from_display_names
|
||||||
|
|||||||
@@ -1,32 +1,30 @@
|
|||||||
# encoding: UTF-8
|
# encoding: UTF-8
|
||||||
|
|
||||||
LIB_DIR = File.expand_path(File.join(__dir__, '..'))
|
# Location directories
|
||||||
ROOT_DIR = File.expand_path(File.join(LIB_DIR, '..')) # expand_path is used to get "wpscan/" instead of "wpscan/lib/../"
|
LIB_DIR = File.expand_path(File.join(__dir__, '..')) # wpscan/lib/
|
||||||
DATA_DIR = File.join(ROOT_DIR, 'data')
|
ROOT_DIR = File.expand_path(File.join(LIB_DIR, '..')) # wpscan/ - expand_path is used to get "wpscan/" instead of "wpscan/lib/../"
|
||||||
CONF_DIR = File.join(ROOT_DIR, 'conf')
|
USER_DIR = File.expand_path(Dir.home) # ~/
|
||||||
CACHE_DIR = File.join(ROOT_DIR, 'cache')
|
|
||||||
WPSCAN_LIB_DIR = File.join(LIB_DIR, 'wpscan')
|
|
||||||
UPDATER_LIB_DIR = File.join(LIB_DIR, 'updater')
|
|
||||||
COMMON_LIB_DIR = File.join(LIB_DIR, 'common')
|
|
||||||
MODELS_LIB_DIR = File.join(COMMON_LIB_DIR, 'models')
|
|
||||||
COLLECTIONS_LIB_DIR = File.join(COMMON_LIB_DIR, 'collections')
|
|
||||||
|
|
||||||
DEFAULT_LOG_FILE = File.join(ROOT_DIR, 'log.txt')
|
# Core WPScan directories
|
||||||
|
CACHE_DIR = File.join(USER_DIR, '.wpscan/cache') # ~/.wpscan/cache/
|
||||||
|
DATA_DIR = File.join(USER_DIR, '.wpscan/data') # ~/.wpscan/data/
|
||||||
|
CONF_DIR = File.join(USER_DIR, '.wpscan/conf') # ~/.wpscan/conf/ - Not used ATM (only ref via ./spec/ for travis)
|
||||||
|
COMMON_LIB_DIR = File.join(LIB_DIR, 'common') # wpscan/lib/common/
|
||||||
|
WPSCAN_LIB_DIR = File.join(LIB_DIR, 'wpscan') # wpscan/lib/wpscan/
|
||||||
|
MODELS_LIB_DIR = File.join(COMMON_LIB_DIR, 'models') # wpscan/lib/common/models/
|
||||||
|
|
||||||
# Plugins directories
|
# Core WPScan files
|
||||||
COMMON_PLUGINS_DIR = File.join(COMMON_LIB_DIR, 'plugins')
|
DEFAULT_LOG_FILE = File.join(USER_DIR, '.wpscan/log.txt') # ~/.wpscan/log.txt
|
||||||
WPSCAN_PLUGINS_DIR = File.join(WPSCAN_LIB_DIR, 'plugins') # Not used ATM
|
DATA_FILE = File.join(ROOT_DIR, 'data.zip') # wpscan/data.zip
|
||||||
|
|
||||||
# Data files
|
# WPScan Data files (data.zip)
|
||||||
WORDPRESSES_FILE = File.join(DATA_DIR, 'wordpresses.json')
|
LAST_UPDATE_FILE = File.join(DATA_DIR, '.last_update') # ~/.wpscan/data/.last_update
|
||||||
PLUGINS_FILE = File.join(DATA_DIR, 'plugins.json')
|
PLUGINS_FILE = File.join(DATA_DIR, 'plugins.json') # ~/.wpscan/data/plugins.json
|
||||||
THEMES_FILE = File.join(DATA_DIR, 'themes.json')
|
THEMES_FILE = File.join(DATA_DIR, 'themes.json') # ~/.wpscan/data/themes.json
|
||||||
WP_VERSIONS_FILE = File.join(DATA_DIR, 'wp_versions.xml')
|
TIMTHUMBS_FILE = File.join(DATA_DIR, 'timthumbs.txt') # ~/.wpscan/data/timthumbs.txt
|
||||||
LOCAL_FILES_FILE = File.join(DATA_DIR, 'local_vulnerable_files.xml')
|
USER_AGENTS_FILE = File.join(DATA_DIR, 'user-agents.txt') # ~/.wpscan/data/user-agents.txt
|
||||||
WP_VERSIONS_XSD = File.join(DATA_DIR, 'wp_versions.xsd')
|
WORDPRESSES_FILE = File.join(DATA_DIR, 'wordpresses.json') # ~/.wpscan/data/wordpresses.json
|
||||||
LOCAL_FILES_XSD = File.join(DATA_DIR, 'local_vulnerable_files.xsd')
|
WP_VERSIONS_FILE = File.join(DATA_DIR, 'wp_versions.xml') # ~/.wpscan/data/wp_versions.xml
|
||||||
USER_AGENTS_FILE = File.join(DATA_DIR, 'user-agents.txt')
|
|
||||||
LAST_UPDATE_FILE = File.join(DATA_DIR, '.last_update')
|
|
||||||
|
|
||||||
MIN_RUBY_VERSION = '2.1.9'
|
MIN_RUBY_VERSION = '2.1.9'
|
||||||
|
|
||||||
@@ -50,6 +48,7 @@ def windows?
|
|||||||
end
|
end
|
||||||
|
|
||||||
require 'environment'
|
require 'environment'
|
||||||
|
require 'zip'
|
||||||
|
|
||||||
def escape_glob(s)
|
def escape_glob(s)
|
||||||
s.gsub(/[\\\{\}\[\]\*\?]/) { |x| '\\' + x }
|
s.gsub(/[\\\{\}\[\]\*\?]/) { |x| '\\' + x }
|
||||||
@@ -78,13 +77,39 @@ def add_trailing_slash(url)
|
|||||||
url =~ /\/$/ ? url : "#{url}/"
|
url =~ /\/$/ ? url : "#{url}/"
|
||||||
end
|
end
|
||||||
|
|
||||||
def missing_db_file?
|
def missing_db_files?
|
||||||
DbUpdater::FILES.each do |db_file|
|
DbUpdater::FILES.each do |db_file|
|
||||||
return true unless File.exist?(File.join(DATA_DIR, db_file))
|
return true unless File.exist?(File.join(DATA_DIR, db_file))
|
||||||
end
|
end
|
||||||
false
|
false
|
||||||
end
|
end
|
||||||
|
|
||||||
|
# Find data.zip?
|
||||||
|
def has_db_zip?
|
||||||
|
return File.exist?(DATA_FILE)
|
||||||
|
end
|
||||||
|
|
||||||
|
# Extract data.zip
|
||||||
|
def extract_db_zip
|
||||||
|
# Create data folder
|
||||||
|
FileUtils.mkdir_p(DATA_DIR)
|
||||||
|
|
||||||
|
Zip::File.open(DATA_FILE) do |zip_file|
|
||||||
|
zip_file.each do |f|
|
||||||
|
# Feedback to the user
|
||||||
|
#puts "[+] Extracting: #{File.basename(f.name)}"
|
||||||
|
f_path = File.join(DATA_DIR, File.basename(f.name))
|
||||||
|
|
||||||
|
# Delete if already there
|
||||||
|
#puts "[+] Deleting: #{File.basename(f.name)}" if File.exist?(f_path)
|
||||||
|
FileUtils.rm(f_path) if File.exist?(f_path)
|
||||||
|
|
||||||
|
# Extract
|
||||||
|
zip_file.extract(f, f_path)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
def last_update
|
def last_update
|
||||||
date = nil
|
date = nil
|
||||||
if File.exists?(LAST_UPDATE_FILE)
|
if File.exists?(LAST_UPDATE_FILE)
|
||||||
@@ -94,6 +119,7 @@ def last_update
|
|||||||
date
|
date
|
||||||
end
|
end
|
||||||
|
|
||||||
|
# Was it 5 days ago?
|
||||||
def update_required?
|
def update_required?
|
||||||
date = last_update
|
date = last_update
|
||||||
day_seconds = 24 * 60 * 60
|
day_seconds = 24 * 60 * 60
|
||||||
@@ -166,6 +192,11 @@ def banner
|
|||||||
puts
|
puts
|
||||||
end
|
end
|
||||||
|
|
||||||
|
# Space out sections
|
||||||
|
def spacer
|
||||||
|
puts " - - - - -"
|
||||||
|
end
|
||||||
|
|
||||||
def xml(file)
|
def xml(file)
|
||||||
Nokogiri::XML(File.open(file)) do |config|
|
Nokogiri::XML(File.open(file)) do |config|
|
||||||
config.noblanks
|
config.noblanks
|
||||||
@@ -253,14 +284,26 @@ end
|
|||||||
# @return [ String ] A random user-agent from data/user-agents.txt
|
# @return [ String ] A random user-agent from data/user-agents.txt
|
||||||
def get_random_user_agent
|
def get_random_user_agent
|
||||||
user_agents = []
|
user_agents = []
|
||||||
|
|
||||||
|
# If we can't access the file, die
|
||||||
|
raise('[ERROR] Missing user-agent data. Please re-run with just --update.') unless File.exist?(USER_AGENTS_FILE)
|
||||||
|
|
||||||
|
# Read in file
|
||||||
f = File.open(USER_AGENTS_FILE, 'r')
|
f = File.open(USER_AGENTS_FILE, 'r')
|
||||||
|
|
||||||
|
# Read every line in the file
|
||||||
f.each_line do |line|
|
f.each_line do |line|
|
||||||
# ignore comments
|
# Remove any End of Line issues, and leading/trailing spaces
|
||||||
|
line = line.strip.chomp
|
||||||
|
# Ignore empty files and comments
|
||||||
next if line.empty? or line =~ /^\s*(#|\/\/)/
|
next if line.empty? or line =~ /^\s*(#|\/\/)/
|
||||||
|
# Add to array
|
||||||
user_agents << line.strip
|
user_agents << line.strip
|
||||||
end
|
end
|
||||||
|
# Close file handler
|
||||||
f.close
|
f.close
|
||||||
# return ransom user-agent
|
|
||||||
|
# Return random user-agent
|
||||||
user_agents.sample
|
user_agents.sample
|
||||||
end
|
end
|
||||||
|
|
||||||
@@ -274,3 +317,21 @@ end
|
|||||||
def url_encode(str)
|
def url_encode(str)
|
||||||
CGI.escape(str).gsub("+", "%20")
|
CGI.escape(str).gsub("+", "%20")
|
||||||
end
|
end
|
||||||
|
|
||||||
|
# Check valid JSON?
|
||||||
|
def valid_json?(json)
|
||||||
|
JSON.parse(json)
|
||||||
|
return true
|
||||||
|
rescue JSON::ParserError => e
|
||||||
|
return false
|
||||||
|
end
|
||||||
|
|
||||||
|
# Get the HTTP response code
|
||||||
|
def get_http_status(url)
|
||||||
|
Browser.get(url.to_s).code
|
||||||
|
end
|
||||||
|
|
||||||
|
# Check to see if we need a "s"
|
||||||
|
def grammar_s(size)
|
||||||
|
size.to_i >= 2 ? "s" : ""
|
||||||
|
end
|
||||||
@@ -13,8 +13,13 @@ class DbUpdater
|
|||||||
def initialize(repo_directory)
|
def initialize(repo_directory)
|
||||||
@repo_directory = repo_directory
|
@repo_directory = repo_directory
|
||||||
|
|
||||||
fail "#{repo_directory} is not writable" unless \
|
unless Dir.exist?(@repo_directory)
|
||||||
Pathname.new(repo_directory).writable?
|
FileUtils.mkdir_p(@repo_directory)
|
||||||
|
end
|
||||||
|
|
||||||
|
unless Pathname.new(@repo_directory).writable?
|
||||||
|
fail "#{@repo_directory} is not writable"
|
||||||
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
# @return [ Hash ] The params for Typhoeus::Request
|
# @return [ Hash ] The params for Typhoeus::Request
|
||||||
@@ -83,7 +88,7 @@ class DbUpdater
|
|||||||
def update(verbose = false)
|
def update(verbose = false)
|
||||||
FILES.each do |filename|
|
FILES.each do |filename|
|
||||||
begin
|
begin
|
||||||
puts "[+] Checking #{filename}" if verbose
|
puts "[+] Checking: #{filename}" if verbose
|
||||||
db_checksum = remote_file_checksum(filename)
|
db_checksum = remote_file_checksum(filename)
|
||||||
|
|
||||||
# Checking if the file needs to be updated
|
# Checking if the file needs to be updated
|
||||||
@@ -95,7 +100,7 @@ class DbUpdater
|
|||||||
puts ' [i] Needs to be updated' if verbose
|
puts ' [i] Needs to be updated' if verbose
|
||||||
create_backup(filename)
|
create_backup(filename)
|
||||||
puts ' [i] Backup Created' if verbose
|
puts ' [i] Backup Created' if verbose
|
||||||
puts ' [i] Downloading new file' if verbose
|
puts " [i] Downloading new file: #{remote_file_url(filename)}" if verbose
|
||||||
dl_checksum = download(filename)
|
dl_checksum = download(filename)
|
||||||
puts " [i] Downloaded File Checksum: #{dl_checksum}" if verbose
|
puts " [i] Downloaded File Checksum: #{dl_checksum}" if verbose
|
||||||
puts " [i] Database File Checksum : #{db_checksum}" if verbose
|
puts " [i] Database File Checksum : #{db_checksum}" if verbose
|
||||||
|
|||||||
@@ -1,11 +1,17 @@
|
|||||||
# encoding: UTF-8
|
# encoding: UTF-8
|
||||||
|
|
||||||
require 'web_site/robots_txt'
|
require 'web_site/humans_txt'
|
||||||
require 'web_site/interesting_headers'
|
require 'web_site/interesting_headers'
|
||||||
|
require 'web_site/robots_txt'
|
||||||
|
require 'web_site/security_txt'
|
||||||
|
require 'web_site/sitemap'
|
||||||
|
|
||||||
class WebSite
|
class WebSite
|
||||||
include WebSite::RobotsTxt
|
include WebSite::HumansTxt
|
||||||
include WebSite::InterestingHeaders
|
include WebSite::InterestingHeaders
|
||||||
|
include WebSite::RobotsTxt
|
||||||
|
include WebSite::SecurityTxt
|
||||||
|
include WebSite::Sitemap
|
||||||
|
|
||||||
attr_reader :uri
|
attr_reader :uri
|
||||||
|
|
||||||
@@ -121,13 +127,6 @@ class WebSite
|
|||||||
@error_404_hash
|
@error_404_hash
|
||||||
end
|
end
|
||||||
|
|
||||||
# Will try to find the rss url in the homepage
|
|
||||||
# Only the first one found is returned
|
|
||||||
def rss_url
|
|
||||||
homepage_body = Browser.get(@uri.to_s).body
|
|
||||||
homepage_body[%r{<link .* type="application/rss\+xml" .* href="([^"]+)" />}, 1]
|
|
||||||
end
|
|
||||||
|
|
||||||
# Only the first 700 bytes are checked to avoid the download
|
# Only the first 700 bytes are checked to avoid the download
|
||||||
# of the whole file which can be very huge (like 2 Go)
|
# of the whole file which can be very huge (like 2 Go)
|
||||||
#
|
#
|
||||||
|
|||||||
13
lib/wpscan/web_site/humans_txt.rb
Normal file
13
lib/wpscan/web_site/humans_txt.rb
Normal file
@@ -0,0 +1,13 @@
|
|||||||
|
# encoding: UTF-8
|
||||||
|
|
||||||
|
class WebSite
|
||||||
|
module HumansTxt
|
||||||
|
|
||||||
|
# Gets the humans.txt URL
|
||||||
|
# @return [ String ]
|
||||||
|
def humans_url
|
||||||
|
@uri.clone.merge('humans.txt').to_s
|
||||||
|
end
|
||||||
|
|
||||||
|
end
|
||||||
|
end
|
||||||
@@ -18,49 +18,53 @@ class WebSite
|
|||||||
# Parse robots.txt
|
# Parse robots.txt
|
||||||
# @return [ Array ] URLs generated from robots.txt
|
# @return [ Array ] URLs generated from robots.txt
|
||||||
def parse_robots_txt
|
def parse_robots_txt
|
||||||
return unless has_robots?
|
|
||||||
|
|
||||||
return_object = []
|
return_object = []
|
||||||
|
|
||||||
|
# Make request
|
||||||
response = Browser.get(robots_url.to_s)
|
response = Browser.get(robots_url.to_s)
|
||||||
body = response.body
|
body = response.body
|
||||||
|
|
||||||
# Get all allow and disallow urls
|
# Get all allow and disallow urls
|
||||||
entries = body.scan(/^(?:dis)?allow:\s*(.*)$/i)
|
entries = body.scan(/^(?:dis)?allow:\s*(.*)$/i)
|
||||||
|
|
||||||
|
# Did we get something?
|
||||||
if entries
|
if entries
|
||||||
entries.flatten!
|
# Remove any rubbish
|
||||||
entries.compact.sort!
|
entries = clean_uri(entries)
|
||||||
entries.uniq!
|
|
||||||
|
# Sort
|
||||||
|
entries.sort!
|
||||||
|
|
||||||
|
# Wordpress URL
|
||||||
wordpress_path = @uri.path
|
wordpress_path = @uri.path
|
||||||
|
|
||||||
|
# Each "boring" value as defined below, remove
|
||||||
RobotsTxt.known_dirs.each do |d|
|
RobotsTxt.known_dirs.each do |d|
|
||||||
entries.delete(d)
|
entries.delete(d)
|
||||||
# also delete when wordpress is installed in subdir
|
# Also delete when wordpress is installed in subdir
|
||||||
dir_with_subdir = "#{wordpress_path}/#{d}".gsub(/\/+/, '/')
|
dir_with_subdir = "#{wordpress_path}/#{d}".gsub(/\/+/, '/')
|
||||||
entries.delete(dir_with_subdir)
|
entries.delete(dir_with_subdir)
|
||||||
end
|
end
|
||||||
|
|
||||||
entries.each do |d|
|
# Convert to full URIs
|
||||||
begin
|
return_object = full_uri(entries)
|
||||||
temp = @uri.clone
|
|
||||||
temp.path = d.strip
|
|
||||||
rescue URI::Error
|
|
||||||
temp = d.strip
|
|
||||||
end
|
|
||||||
return_object << temp.to_s
|
|
||||||
end
|
|
||||||
end
|
end
|
||||||
return_object
|
return return_object
|
||||||
end
|
end
|
||||||
|
|
||||||
protected
|
protected
|
||||||
|
|
||||||
|
# Useful ~ "function do_robots()" -> https://github.com/WordPress/WordPress/blob/master/wp-includes/functions.php
|
||||||
|
#
|
||||||
# @return [ Array ]
|
# @return [ Array ]
|
||||||
def self.known_dirs
|
def self.known_dirs
|
||||||
%w{
|
%w{
|
||||||
/
|
/
|
||||||
/wp-admin/
|
/wp-admin/
|
||||||
|
/wp-admin/admin-ajax.php
|
||||||
/wp-includes/
|
/wp-includes/
|
||||||
/wp-content/
|
/wp-content/
|
||||||
}
|
}
|
||||||
end
|
end
|
||||||
|
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|||||||
13
lib/wpscan/web_site/security_txt.rb
Normal file
13
lib/wpscan/web_site/security_txt.rb
Normal file
@@ -0,0 +1,13 @@
|
|||||||
|
# encoding: UTF-8
|
||||||
|
|
||||||
|
class WebSite
|
||||||
|
module SecurityTxt
|
||||||
|
|
||||||
|
# Gets the security.txt URL
|
||||||
|
# @return [ String ]
|
||||||
|
def security_url
|
||||||
|
@uri.clone.merge('.well-known/security.txt').to_s
|
||||||
|
end
|
||||||
|
|
||||||
|
end
|
||||||
|
end
|
||||||
53
lib/wpscan/web_site/sitemap.rb
Normal file
53
lib/wpscan/web_site/sitemap.rb
Normal file
@@ -0,0 +1,53 @@
|
|||||||
|
# encoding: UTF-8
|
||||||
|
|
||||||
|
class WebSite
|
||||||
|
module Sitemap
|
||||||
|
|
||||||
|
# Checks if a sitemap.txt file exists
|
||||||
|
# @return [ Boolean ]
|
||||||
|
def has_sitemap?
|
||||||
|
# Make the request
|
||||||
|
response = Browser.get(sitemap_url)
|
||||||
|
|
||||||
|
# Make sure its HTTP 200
|
||||||
|
return false unless response.code == 200
|
||||||
|
|
||||||
|
# Is there a sitemap value?
|
||||||
|
result = response.body.scan(/^sitemap\s*:\s*(.*)$/i)
|
||||||
|
return true if result[0]
|
||||||
|
return false
|
||||||
|
end
|
||||||
|
|
||||||
|
# Get the robots.txt URL
|
||||||
|
# @return [ String ]
|
||||||
|
def sitemap_url
|
||||||
|
@uri.clone.merge('robots.txt').to_s
|
||||||
|
end
|
||||||
|
|
||||||
|
# Parse robots.txt
|
||||||
|
# @return [ Array ] URLs generated from robots.txt
|
||||||
|
def parse_sitemap
|
||||||
|
return_object = []
|
||||||
|
|
||||||
|
# Make request
|
||||||
|
response = Browser.get(sitemap_url.to_s)
|
||||||
|
|
||||||
|
# Get all allow and disallow urls
|
||||||
|
entries = response.body.scan(/^sitemap\s*:\s*(.*)$/i)
|
||||||
|
|
||||||
|
# Did we get something?
|
||||||
|
if entries
|
||||||
|
# Remove any rubbish
|
||||||
|
entries = clean_uri(entries)
|
||||||
|
|
||||||
|
# Sort
|
||||||
|
entries.sort!
|
||||||
|
|
||||||
|
# Convert to full URIs
|
||||||
|
return_object = full_uri(entries)
|
||||||
|
end
|
||||||
|
return return_object
|
||||||
|
end
|
||||||
|
|
||||||
|
end
|
||||||
|
end
|
||||||
@@ -1,22 +1,26 @@
|
|||||||
# encoding: UTF-8
|
# encoding: UTF-8
|
||||||
|
|
||||||
require 'web_site'
|
require 'web_site'
|
||||||
require 'wp_target/wp_readme'
|
require 'wp_target/wp_api'
|
||||||
require 'wp_target/wp_registrable'
|
|
||||||
require 'wp_target/wp_config_backup'
|
require 'wp_target/wp_config_backup'
|
||||||
require 'wp_target/wp_must_use_plugins'
|
|
||||||
require 'wp_target/wp_login_protection'
|
|
||||||
require 'wp_target/wp_custom_directories'
|
require 'wp_target/wp_custom_directories'
|
||||||
require 'wp_target/wp_full_path_disclosure'
|
require 'wp_target/wp_full_path_disclosure'
|
||||||
|
require 'wp_target/wp_login_protection'
|
||||||
|
require 'wp_target/wp_must_use_plugins'
|
||||||
|
require 'wp_target/wp_readme'
|
||||||
|
require 'wp_target/wp_registrable'
|
||||||
|
require 'wp_target/wp_rss'
|
||||||
|
|
||||||
class WpTarget < WebSite
|
class WpTarget < WebSite
|
||||||
include WpTarget::WpReadme
|
include WpTarget::WpAPI
|
||||||
include WpTarget::WpRegistrable
|
|
||||||
include WpTarget::WpConfigBackup
|
include WpTarget::WpConfigBackup
|
||||||
include WpTarget::WpMustUsePlugins
|
|
||||||
include WpTarget::WpLoginProtection
|
|
||||||
include WpTarget::WpCustomDirectories
|
include WpTarget::WpCustomDirectories
|
||||||
include WpTarget::WpFullPathDisclosure
|
include WpTarget::WpFullPathDisclosure
|
||||||
|
include WpTarget::WpLoginProtection
|
||||||
|
include WpTarget::WpMustUsePlugins
|
||||||
|
include WpTarget::WpReadme
|
||||||
|
include WpTarget::WpRegistrable
|
||||||
|
include WpTarget::WpRSS
|
||||||
|
|
||||||
attr_reader :verbose
|
attr_reader :verbose
|
||||||
|
|
||||||
|
|||||||
86
lib/wpscan/wp_target/wp_api.rb
Normal file
86
lib/wpscan/wp_target/wp_api.rb
Normal file
@@ -0,0 +1,86 @@
|
|||||||
|
# encoding: UTF-8
|
||||||
|
|
||||||
|
class WpTarget < WebSite
|
||||||
|
module WpAPI
|
||||||
|
|
||||||
|
# Checks to see if the REST API is enabled
|
||||||
|
#
|
||||||
|
# This by default in a WordPress installation since 4.5+
|
||||||
|
# @return [ Boolean ]
|
||||||
|
def has_api?(url)
|
||||||
|
# Make the request
|
||||||
|
response = Browser.get(url)
|
||||||
|
|
||||||
|
# Able to view the output?
|
||||||
|
if valid_json?(response.body)
|
||||||
|
# Read in JSON
|
||||||
|
data = JSON.parse(response.body)
|
||||||
|
|
||||||
|
# If there is nothing there, return false
|
||||||
|
if data.empty?
|
||||||
|
return false
|
||||||
|
# WAF/API disabled response
|
||||||
|
elsif data.include?('message') and data['message'] =~ /Only authenticated users can access the REST API/
|
||||||
|
return false
|
||||||
|
# Success!
|
||||||
|
elsif response.code == 200
|
||||||
|
return true
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
# Something went wrong
|
||||||
|
return false
|
||||||
|
end
|
||||||
|
|
||||||
|
# @return [ String ] The API/JSON URL
|
||||||
|
def json_url
|
||||||
|
@uri.merge('/wp-json/').to_s
|
||||||
|
end
|
||||||
|
|
||||||
|
# @return [ String ] The API/JSON URL to show users
|
||||||
|
def json_users_url
|
||||||
|
@uri.merge('/wp-json/wp/v2/users').to_s
|
||||||
|
end
|
||||||
|
|
||||||
|
# @return [ String ] The API/JSON URL to show users
|
||||||
|
def json_get_users(url)
|
||||||
|
# Variables
|
||||||
|
users = []
|
||||||
|
|
||||||
|
# Make the request
|
||||||
|
response = Browser.get(url)
|
||||||
|
|
||||||
|
# If not HTTP 200, return false
|
||||||
|
return false unless response.code == 200
|
||||||
|
|
||||||
|
# Able to view the output?
|
||||||
|
return false unless valid_json?(response.body)
|
||||||
|
|
||||||
|
# Read in JSON
|
||||||
|
data = JSON.parse(response.body)
|
||||||
|
|
||||||
|
# If there is nothing there, return false
|
||||||
|
return false if data.empty?
|
||||||
|
|
||||||
|
# Add to array
|
||||||
|
data.each do |child|
|
||||||
|
row = [ child['id'], child['name'], child['link'] ]
|
||||||
|
users << row
|
||||||
|
end
|
||||||
|
|
||||||
|
# Sort and uniq
|
||||||
|
users = users.sort.uniq
|
||||||
|
|
||||||
|
if users and users.size >= 1
|
||||||
|
# Feedback
|
||||||
|
grammar = grammar_s(users.size)
|
||||||
|
puts warning("#{users.size} user#{grammar} exposed via API: #{json_users_url}")
|
||||||
|
|
||||||
|
# Print results
|
||||||
|
table = Terminal::Table.new(headings: ['ID', 'Name', 'URL'],
|
||||||
|
rows: users)
|
||||||
|
puts table
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
68
lib/wpscan/wp_target/wp_rss.rb
Normal file
68
lib/wpscan/wp_target/wp_rss.rb
Normal file
@@ -0,0 +1,68 @@
|
|||||||
|
# encoding: UTF-8
|
||||||
|
|
||||||
|
class WpTarget < WebSite
|
||||||
|
module WpRSS
|
||||||
|
|
||||||
|
# Checks to see if there is an rss feed
|
||||||
|
# Will try to find the rss url in the homepage
|
||||||
|
# Only the first one found is returned
|
||||||
|
#
|
||||||
|
# This file comes by default in a WordPress installation
|
||||||
|
#
|
||||||
|
# @return [ Boolean ]
|
||||||
|
def rss_url
|
||||||
|
homepage_body = Browser.get(@uri.to_s).body
|
||||||
|
# Format: <link rel="alternate" type="application/rss+xml" title=".*" href=".*" />
|
||||||
|
homepage_body[%r{<link\s*.*\s*type=['|"]application\/rss\+xml['|"]\s*.*\stitle=".*" href=['|"]([^"]+)['|"]\s*\/?>}i, 1]
|
||||||
|
end
|
||||||
|
|
||||||
|
|
||||||
|
# Gets all the authors from the RSS feed
|
||||||
|
#
|
||||||
|
# @return [ string ]
|
||||||
|
def rss_authors(url)
|
||||||
|
# Variables
|
||||||
|
users = []
|
||||||
|
|
||||||
|
# Make the request
|
||||||
|
response = Browser.get(url, followlocation: true)
|
||||||
|
|
||||||
|
# Valid repose to view? HTTP 200?
|
||||||
|
return false unless response.code == 200
|
||||||
|
|
||||||
|
# Get output
|
||||||
|
data = response.body
|
||||||
|
|
||||||
|
# If there is nothing there, return false
|
||||||
|
return false if data.empty?
|
||||||
|
|
||||||
|
# Read in RSS/XML
|
||||||
|
xml = Nokogiri::XML(data)
|
||||||
|
|
||||||
|
begin
|
||||||
|
# Look for <dc:creator> item
|
||||||
|
xml.xpath('//item/dc:creator').each do |node|
|
||||||
|
#Format: <dc:creator><![CDATA[.*]]></dc:creator>
|
||||||
|
users << [%r{.*}i.match(node).to_s]
|
||||||
|
end
|
||||||
|
rescue
|
||||||
|
puts critical("Missing Author field. Maybe non-standard WordPress RSS feed?")
|
||||||
|
return false
|
||||||
|
end
|
||||||
|
|
||||||
|
# Sort and uniq
|
||||||
|
users = users.sort_by { |user| user.to_s.downcase }.uniq
|
||||||
|
|
||||||
|
if users and users.size >= 1
|
||||||
|
# Feedback
|
||||||
|
grammar = grammar_s(users.size)
|
||||||
|
puts warning("Detected #{users.size} user#{grammar} from RSS feed:")
|
||||||
|
|
||||||
|
# Print results
|
||||||
|
table = Terminal::Table.new(headings: ['Name'],
|
||||||
|
rows: users)
|
||||||
|
puts table
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
@@ -49,7 +49,7 @@ def usage
|
|||||||
puts '-Use custom plugins directory ...'
|
puts '-Use custom plugins directory ...'
|
||||||
puts "ruby #{script_name} -u www.example.com --wp-plugins-dir wp-content/custom-plugins"
|
puts "ruby #{script_name} -u www.example.com --wp-plugins-dir wp-content/custom-plugins"
|
||||||
puts
|
puts
|
||||||
puts '-Update the DB ...'
|
puts '-Update the Database ...'
|
||||||
puts "ruby #{script_name} --update"
|
puts "ruby #{script_name} --update"
|
||||||
puts
|
puts
|
||||||
puts '-Debug output ...'
|
puts '-Debug output ...'
|
||||||
@@ -120,6 +120,58 @@ def help
|
|||||||
puts
|
puts
|
||||||
end
|
end
|
||||||
|
|
||||||
|
|
||||||
|
def clean_uri(entries)
|
||||||
|
# Extract elements
|
||||||
|
entries.flatten!
|
||||||
|
# Remove any leading/trailing spaces
|
||||||
|
entries.collect{|x| x.strip || x }
|
||||||
|
# End Of Line issues
|
||||||
|
entries.collect{|x| x.chomp! || x }
|
||||||
|
# Remove nil's
|
||||||
|
entries.compact
|
||||||
|
# Unique values only
|
||||||
|
entries.uniq!
|
||||||
|
|
||||||
|
return entries
|
||||||
|
end
|
||||||
|
|
||||||
|
# Return the full URL
|
||||||
|
def full_uri(entries)
|
||||||
|
return_object = []
|
||||||
|
# Each value now, try and make it a full URL
|
||||||
|
entries.each do |d|
|
||||||
|
begin
|
||||||
|
temp = @uri.clone
|
||||||
|
temp.path = d.strip
|
||||||
|
rescue URI::Error
|
||||||
|
temp = d.strip
|
||||||
|
end
|
||||||
|
return_object << temp.to_s
|
||||||
|
end
|
||||||
|
|
||||||
|
return return_object
|
||||||
|
end
|
||||||
|
|
||||||
|
# Parse humans.txt
|
||||||
|
# @return [ Array ] URLs generated from humans.txt
|
||||||
|
def parse_txt(url)
|
||||||
|
return_object = []
|
||||||
|
response = Browser.get(url.to_s)
|
||||||
|
body = response.body
|
||||||
|
|
||||||
|
# Get all non-comments
|
||||||
|
entries = body.split(/\n/)
|
||||||
|
|
||||||
|
# Did we get something?
|
||||||
|
if entries
|
||||||
|
# Remove any rubbish
|
||||||
|
entries = clean_uri(entries)
|
||||||
|
end
|
||||||
|
return return_object
|
||||||
|
end
|
||||||
|
|
||||||
|
|
||||||
# Hook to check if the target if down during the scan
|
# Hook to check if the target if down during the scan
|
||||||
# And have the number of requests performed to display at the end of the scan
|
# And have the number of requests performed to display at the end of the scan
|
||||||
# The target is considered down after 30 requests with status = 0
|
# The target is considered down after 30 requests with status = 0
|
||||||
@@ -138,3 +190,4 @@ Typhoeus.on_complete do |response|
|
|||||||
|
|
||||||
sleep(Browser.instance.throttle)
|
sleep(Browser.instance.throttle)
|
||||||
end
|
end
|
||||||
|
|
||||||
|
|||||||
@@ -207,18 +207,6 @@ describe 'WebSite' do
|
|||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
describe '#rss_url' do
|
|
||||||
it 'returns nil if the url is not found' do
|
|
||||||
stub_request(:get, web_site.url).to_return(body: 'No RSS link in this body !')
|
|
||||||
expect(web_site.rss_url).to be_nil
|
|
||||||
end
|
|
||||||
|
|
||||||
it "returns 'http://lamp-wp/wordpress-3.5/?feed=rss2'" do
|
|
||||||
stub_request_to_fixture(url: web_site.url, fixture: fixtures_dir + '/rss_url/wordpress-3.5.htm')
|
|
||||||
expect(web_site.rss_url).to be === 'http://lamp-wp/wordpress-3.5/?feed=rss2'
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
describe '::has_log?' do
|
describe '::has_log?' do
|
||||||
let(:log_url) { web_site.uri.merge('log.txt').to_s }
|
let(:log_url) { web_site.uri.merge('log.txt').to_s }
|
||||||
let(:pattern) { %r{PHP Fatal error} }
|
let(:pattern) { %r{PHP Fatal error} }
|
||||||
|
|||||||
108
spec/shared_examples/web_site/humans_txt.rb
Normal file
108
spec/shared_examples/web_site/humans_txt.rb
Normal file
@@ -0,0 +1,108 @@
|
|||||||
|
# encoding: UTF-8
|
||||||
|
|
||||||
|
shared_examples 'WebSite::HumansTxt' do
|
||||||
|
let(:known_dirs) { WebSite::HumansTxt.known_dirs }
|
||||||
|
|
||||||
|
describe '#humans_url' do
|
||||||
|
it 'returns the correct url' do
|
||||||
|
expect(web_site.humans_url).to eql 'http://example.localhost/humans.txt'
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
describe '#has_humans?' do
|
||||||
|
it 'returns true' do
|
||||||
|
stub_request(:get, web_site.humans_url).to_return(status: 200)
|
||||||
|
expect(web_site.has_humans?).to be_truthy
|
||||||
|
end
|
||||||
|
|
||||||
|
it 'returns false' do
|
||||||
|
stub_request(:get, web_site.humans_url).to_return(status: 404)
|
||||||
|
expect(web_site.has_humans?).to be_falsey
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
describe '#parse_humans_txt' do
|
||||||
|
|
||||||
|
context 'installed in root' do
|
||||||
|
after :each do
|
||||||
|
stub_request_to_fixture(url: web_site.humans_url, fixture: @fixture)
|
||||||
|
humans = web_site.parse_humans_txt
|
||||||
|
expect(humans).to match_array @expected
|
||||||
|
end
|
||||||
|
|
||||||
|
it 'returns an empty Array (empty humans.txt)' do
|
||||||
|
@fixture = fixtures_dir + '/humans_txt/empty_humans.txt'
|
||||||
|
@expected = []
|
||||||
|
end
|
||||||
|
|
||||||
|
it 'returns an empty Array (invalid humans.txt)' do
|
||||||
|
@fixture = fixtures_dir + '/humans_txt/invalid_humans.txt'
|
||||||
|
@expected = []
|
||||||
|
end
|
||||||
|
|
||||||
|
it 'returns some urls and some strings' do
|
||||||
|
@fixture = fixtures_dir + '/humans_txt/invalid_humans_2.txt'
|
||||||
|
@expected = %w(
|
||||||
|
/ÖÜ()=?
|
||||||
|
http://10.0.0.0/wp-includes/
|
||||||
|
http://example.localhost/asdf/
|
||||||
|
wooooza
|
||||||
|
)
|
||||||
|
end
|
||||||
|
|
||||||
|
it 'returns an Array of urls (valid humans.txt)' do
|
||||||
|
@fixture = fixtures_dir + '/humans_txt/humans.txt'
|
||||||
|
@expected = %w(
|
||||||
|
http://example.localhost/wordpress/admin/
|
||||||
|
http://example.localhost/wordpress/wp-admin/
|
||||||
|
http://example.localhost/wordpress/secret/
|
||||||
|
http://example.localhost/Wordpress/wp-admin/
|
||||||
|
http://example.localhost/wp-admin/tralling-space/
|
||||||
|
http://example.localhost/asdf/
|
||||||
|
)
|
||||||
|
end
|
||||||
|
|
||||||
|
it 'removes duplicate entries from humans.txt test 1' do
|
||||||
|
@fixture = fixtures_dir + '/humans_txt/humans_duplicate_1.txt'
|
||||||
|
@expected = %w(
|
||||||
|
http://example.localhost/wordpress/
|
||||||
|
http://example.localhost/wordpress/admin/
|
||||||
|
http://example.localhost/wordpress/wp-admin/
|
||||||
|
http://example.localhost/wordpress/secret/
|
||||||
|
http://example.localhost/Wordpress/wp-admin/
|
||||||
|
http://example.localhost/wp-admin/tralling-space/
|
||||||
|
http://example.localhost/asdf/
|
||||||
|
)
|
||||||
|
end
|
||||||
|
|
||||||
|
it 'removes duplicate entries from humans.txt test 2' do
|
||||||
|
@fixture = fixtures_dir + '/humans_txt/humans_duplicate_2.txt'
|
||||||
|
@expected = nil
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
context 'installed in sub directory' do
|
||||||
|
it 'returns an Array of urls (valid humans.txt, WP installed in subdir)' do
|
||||||
|
web_site_sub = WebSite.new('http://example.localhost/wordpress/')
|
||||||
|
fixture = fixtures_dir + '/humans_txt/humans.txt'
|
||||||
|
expected = %w(
|
||||||
|
http://example.localhost/wordpress/admin/
|
||||||
|
http://example.localhost/wordpress/secret/
|
||||||
|
http://example.localhost/Wordpress/wp-admin/
|
||||||
|
http://example.localhost/wp-admin/tralling-space/
|
||||||
|
http://example.localhost/asdf/
|
||||||
|
)
|
||||||
|
stub_request_to_fixture(url: web_site_sub.humans_url, fixture: fixture)
|
||||||
|
humans = web_site_sub.parse_humans_txt
|
||||||
|
expect(humans).to match_array expected
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
describe '#known_dirs' do
|
||||||
|
it 'does not contain duplicates' do
|
||||||
|
expect(known_dirs.flatten.uniq.length).to eq known_dirs.length
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
end
|
||||||
108
spec/shared_examples/web_site/security_txt.rb
Normal file
108
spec/shared_examples/web_site/security_txt.rb
Normal file
@@ -0,0 +1,108 @@
|
|||||||
|
# encoding: UTF-8
|
||||||
|
|
||||||
|
shared_examples 'WebSite::SecurityTxt' do
|
||||||
|
let(:known_dirs) { WebSite::SecurityTxt.known_dirs }
|
||||||
|
|
||||||
|
describe '#security_url' do
|
||||||
|
it 'returns the correct url' do
|
||||||
|
expect(web_site.security_url).to eql 'http://example.localhost/security.txt'
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
describe '#has_security?' do
|
||||||
|
it 'returns true' do
|
||||||
|
stub_request(:get, web_site.security_url).to_return(status: 200)
|
||||||
|
expect(web_site.has_security?).to be_truthy
|
||||||
|
end
|
||||||
|
|
||||||
|
it 'returns false' do
|
||||||
|
stub_request(:get, web_site.security_url).to_return(status: 404)
|
||||||
|
expect(web_site.has_security?).to be_falsey
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
describe '#parse_security_txt' do
|
||||||
|
|
||||||
|
context 'installed in root' do
|
||||||
|
after :each do
|
||||||
|
stub_request_to_fixture(url: web_site.security_url, fixture: @fixture)
|
||||||
|
security = web_site.parse_security_txt
|
||||||
|
expect(security).to match_array @expected
|
||||||
|
end
|
||||||
|
|
||||||
|
it 'returns an empty Array (empty security.txt)' do
|
||||||
|
@fixture = fixtures_dir + '/security_txt/empty_security.txt'
|
||||||
|
@expected = []
|
||||||
|
end
|
||||||
|
|
||||||
|
it 'returns an empty Array (invalid security.txt)' do
|
||||||
|
@fixture = fixtures_dir + '/security_txt/invalid_security.txt'
|
||||||
|
@expected = []
|
||||||
|
end
|
||||||
|
|
||||||
|
it 'returns some urls and some strings' do
|
||||||
|
@fixture = fixtures_dir + '/security_txt/invalid_security_2.txt'
|
||||||
|
@expected = %w(
|
||||||
|
/ÖÜ()=?
|
||||||
|
http://10.0.0.0/wp-includes/
|
||||||
|
http://example.localhost/asdf/
|
||||||
|
wooooza
|
||||||
|
)
|
||||||
|
end
|
||||||
|
|
||||||
|
it 'returns an Array of urls (valid security.txt)' do
|
||||||
|
@fixture = fixtures_dir + '/security_txt/security.txt'
|
||||||
|
@expected = %w(
|
||||||
|
http://example.localhost/wordpress/admin/
|
||||||
|
http://example.localhost/wordpress/wp-admin/
|
||||||
|
http://example.localhost/wordpress/secret/
|
||||||
|
http://example.localhost/Wordpress/wp-admin/
|
||||||
|
http://example.localhost/wp-admin/tralling-space/
|
||||||
|
http://example.localhost/asdf/
|
||||||
|
)
|
||||||
|
end
|
||||||
|
|
||||||
|
it 'removes duplicate entries from security.txt test 1' do
|
||||||
|
@fixture = fixtures_dir + '/security_txt/security_duplicate_1.txt'
|
||||||
|
@expected = %w(
|
||||||
|
http://example.localhost/wordpress/
|
||||||
|
http://example.localhost/wordpress/admin/
|
||||||
|
http://example.localhost/wordpress/wp-admin/
|
||||||
|
http://example.localhost/wordpress/secret/
|
||||||
|
http://example.localhost/Wordpress/wp-admin/
|
||||||
|
http://example.localhost/wp-admin/tralling-space/
|
||||||
|
http://example.localhost/asdf/
|
||||||
|
)
|
||||||
|
end
|
||||||
|
|
||||||
|
it 'removes duplicate entries from security.txt test 2' do
|
||||||
|
@fixture = fixtures_dir + '/security_txt/security_duplicate_2.txt'
|
||||||
|
@expected = nil
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
context 'installed in sub directory' do
|
||||||
|
it 'returns an Array of urls (valid security.txt, WP installed in subdir)' do
|
||||||
|
web_site_sub = WebSite.new('http://example.localhost/wordpress/')
|
||||||
|
fixture = fixtures_dir + '/security_txt/security.txt'
|
||||||
|
expected = %w(
|
||||||
|
http://example.localhost/wordpress/admin/
|
||||||
|
http://example.localhost/wordpress/secret/
|
||||||
|
http://example.localhost/Wordpress/wp-admin/
|
||||||
|
http://example.localhost/wp-admin/tralling-space/
|
||||||
|
http://example.localhost/asdf/
|
||||||
|
)
|
||||||
|
stub_request_to_fixture(url: web_site_sub.security_url, fixture: fixture)
|
||||||
|
security = web_site_sub.parse_security_txt
|
||||||
|
expect(security).to match_array expected
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
describe '#known_dirs' do
|
||||||
|
it 'does not contain duplicates' do
|
||||||
|
expect(known_dirs.flatten.uniq.length).to eq known_dirs.length
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
end
|
||||||
241
wpscan.rb
241
wpscan.rb
@@ -10,6 +10,7 @@ require File.join(__dir__, 'lib', 'wpscan', 'wpscan_helper')
|
|||||||
def main
|
def main
|
||||||
begin
|
begin
|
||||||
wpscan_options = WpscanOptions.load_from_arguments
|
wpscan_options = WpscanOptions.load_from_arguments
|
||||||
|
date = last_update
|
||||||
|
|
||||||
$log = wpscan_options.log
|
$log = wpscan_options.log
|
||||||
|
|
||||||
@@ -27,7 +28,7 @@ def main
|
|||||||
# check if file exists and has a size greater zero
|
# check if file exists and has a size greater zero
|
||||||
if File.exist?($log) && File.size?($log)
|
if File.exist?($log) && File.size?($log)
|
||||||
puts notice("The supplied log file #{$log} already exists. If you continue the new output will be appended.")
|
puts notice("The supplied log file #{$log} already exists. If you continue the new output will be appended.")
|
||||||
print '[?] Do you want to continue? [Y]es [N]o, default: [N]'
|
print '[?] Do you want to continue? [Y]es [N]o, default: [N] >'
|
||||||
if Readline.readline !~ /^y/i
|
if Readline.readline !~ /^y/i
|
||||||
# unset logging so puts will try to log to the file
|
# unset logging so puts will try to log to the file
|
||||||
$log = nil
|
$log = nil
|
||||||
@@ -54,6 +55,8 @@ def main
|
|||||||
unless wpscan_options.has_options?
|
unless wpscan_options.has_options?
|
||||||
# first parameter only url?
|
# first parameter only url?
|
||||||
if ARGV.length == 1
|
if ARGV.length == 1
|
||||||
|
puts
|
||||||
|
puts notice("Please use '-u #{ARGV[0]}' next time")
|
||||||
wpscan_options.url = ARGV[0]
|
wpscan_options.url = ARGV[0]
|
||||||
else
|
else
|
||||||
usage()
|
usage()
|
||||||
@@ -72,8 +75,7 @@ def main
|
|||||||
|
|
||||||
if wpscan_options.version
|
if wpscan_options.version
|
||||||
puts "Current version: #{WPSCAN_VERSION}"
|
puts "Current version: #{WPSCAN_VERSION}"
|
||||||
date = last_update
|
puts "Last database update: #{date.strftime('%Y-%m-%d')}" unless date.nil?
|
||||||
puts "Last DB update: #{date.strftime('%Y-%m-%d')}" unless date.nil?
|
|
||||||
exit(0)
|
exit(0)
|
||||||
end
|
end
|
||||||
|
|
||||||
@@ -83,28 +85,44 @@ def main
|
|||||||
wpscan_options.to_h.merge(max_threads: wpscan_options.threads)
|
wpscan_options.to_h.merge(max_threads: wpscan_options.threads)
|
||||||
)
|
)
|
||||||
|
|
||||||
# check if db file needs upgrade and we are not running in batch mode
|
# Check if database needs upgrade (if its older than 5 days) and we are not running in --batch mode
|
||||||
# also no need to check if the user supplied the --update switch
|
# Also no need to check if the user supplied the --update switch
|
||||||
if update_required? && !wpscan_options.batch && !wpscan_options.update
|
if update_required? and not wpscan_options.batch and not wpscan_options.update
|
||||||
puts notice('It seems like you have not updated the database for some time.')
|
# Banner
|
||||||
print '[?] Do you want to update now? [Y]es [N]o [A]bort, default: [N]'
|
puts
|
||||||
if (input = Readline.readline) =~ /^y/i
|
puts notice('It seems like you have not updated the database for some time')
|
||||||
|
puts notice("Last database update: #{date.strftime('%Y-%m-%d')}") unless date.nil?
|
||||||
|
|
||||||
|
# User prompt
|
||||||
|
print '[?] Do you want to update now? [Y]es [N]o [A]bort update, default: [N] > '
|
||||||
|
if (input = Readline.readline) =~ /^a/i
|
||||||
|
puts 'Update aborted'
|
||||||
|
elsif input =~ /^y/i
|
||||||
wpscan_options.update = true
|
wpscan_options.update = true
|
||||||
elsif input =~ /^a/i
|
end
|
||||||
puts 'Scan aborted'
|
|
||||||
exit(1)
|
# Is there a database to go on with?
|
||||||
else
|
if missing_db_files? and not wpscan_options.update
|
||||||
if missing_db_file?
|
# Check for data.zip
|
||||||
puts critical('You can not run a scan without any databases. Extract the data.zip file.')
|
if has_db_zip?
|
||||||
|
puts notice('Extracting the Database ...')
|
||||||
|
# Extract data.zip
|
||||||
|
extract_db_zip
|
||||||
|
puts notice('Extraction completed')
|
||||||
|
# Missing, so can't go on!
|
||||||
|
else
|
||||||
|
puts critical('You can not run a scan without any databases')
|
||||||
exit(1)
|
exit(1)
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
# Should we update?
|
||||||
if wpscan_options.update
|
if wpscan_options.update
|
||||||
puts notice('Updating the Database ...')
|
puts notice('Updating the Database ...')
|
||||||
DbUpdater.new(DATA_DIR).update(wpscan_options.verbose)
|
DbUpdater.new(DATA_DIR).update(wpscan_options.verbose)
|
||||||
puts notice('Update completed.')
|
puts notice('Update completed')
|
||||||
|
|
||||||
# Exit program if only option --update is used
|
# Exit program if only option --update is used
|
||||||
exit(0) unless wpscan_options.url
|
exit(0) unless wpscan_options.url
|
||||||
end
|
end
|
||||||
@@ -120,12 +138,18 @@ def main
|
|||||||
end
|
end
|
||||||
|
|
||||||
if wp_target.ssl_error?
|
if wp_target.ssl_error?
|
||||||
raise "The target site returned an SSL/TLS error. You can try again using the --disable-tls-checks option.\nError: #{wp_target.get_root_path_return_code}\nSee here for a detailed explanation of the error: http://www.rubydoc.info/github/typhoeus/ethon/Ethon/Easy:return_code"
|
raise "The target site returned an SSL/TLS error. You can try again using --disable-tls-checks\nError: #{wp_target.get_root_path_return_code}\nSee here for a detailed explanation of the error: http://www.rubydoc.info/github/typhoeus/ethon/Ethon/Easy:return_code"
|
||||||
end
|
end
|
||||||
|
|
||||||
# Remote website up?
|
# Remote website up?
|
||||||
unless wp_target.online?
|
unless wp_target.online?
|
||||||
raise "The WordPress URL supplied '#{wp_target.uri}' seems to be down. Maybe the site is blocking wpscan so you can try the --random-agent parameter."
|
if wpscan_options.user_agent
|
||||||
|
puts info("User-Agent: #{wpscan_options.user_agent}")
|
||||||
|
raise "The WordPress URL supplied '#{wp_target.uri}' seems to be down. Maybe the site is blocking the user-agent?"
|
||||||
|
else
|
||||||
|
raise "The WordPress URL supplied '#{wp_target.uri}' seems to be down. Maybe the site is blocking the wpscan user-agent, so you can try --random-agent"
|
||||||
|
end
|
||||||
|
|
||||||
end
|
end
|
||||||
|
|
||||||
if wpscan_options.proxy
|
if wpscan_options.proxy
|
||||||
@@ -145,7 +169,7 @@ def main
|
|||||||
puts "Following redirection #{redirection}"
|
puts "Following redirection #{redirection}"
|
||||||
else
|
else
|
||||||
puts notice("The remote host tried to redirect to: #{redirection}")
|
puts notice("The remote host tried to redirect to: #{redirection}")
|
||||||
print '[?] Do you want follow the redirection ? [Y]es [N]o [A]bort, default: [N]'
|
print '[?] Do you want follow the redirection ? [Y]es [N]o [A]bort, default: [N] >'
|
||||||
end
|
end
|
||||||
if wpscan_options.follow_redirection || !wpscan_options.batch
|
if wpscan_options.follow_redirection || !wpscan_options.batch
|
||||||
if wpscan_options.follow_redirection || (input = Readline.readline) =~ /^y/i
|
if wpscan_options.follow_redirection || (input = Readline.readline) =~ /^y/i
|
||||||
@@ -174,7 +198,7 @@ def main
|
|||||||
# Remote website is wordpress?
|
# Remote website is wordpress?
|
||||||
unless wpscan_options.force
|
unless wpscan_options.force
|
||||||
unless wp_target.wordpress?
|
unless wp_target.wordpress?
|
||||||
raise 'The remote website is up, but does not seem to be running WordPress.'
|
raise 'The remote website is up, but does not seem to be running WordPress. If you are sure, use --force'
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
@@ -196,35 +220,8 @@ def main
|
|||||||
start_memory = get_memory_usage unless windows?
|
start_memory = get_memory_usage unless windows?
|
||||||
puts info("URL: #{wp_target.url}")
|
puts info("URL: #{wp_target.url}")
|
||||||
puts info("Started: #{start_time.asctime}")
|
puts info("Started: #{start_time.asctime}")
|
||||||
puts
|
puts info("User-Agent: #{wpscan_options.user_agent}") if wpscan_options.verbose and wpscan_options.user_agent
|
||||||
|
spacer()
|
||||||
if wp_target.has_robots?
|
|
||||||
puts info("robots.txt available under: '#{wp_target.robots_url}'")
|
|
||||||
|
|
||||||
wp_target.parse_robots_txt.each do |dir|
|
|
||||||
puts info("Interesting entry from robots.txt: #{dir}")
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
if wp_target.has_full_path_disclosure?
|
|
||||||
puts warning("Full Path Disclosure (FPD) in '#{wp_target.full_path_disclosure_url}': #{wp_target.full_path_disclosure_data}")
|
|
||||||
end
|
|
||||||
|
|
||||||
if wp_target.has_debug_log?
|
|
||||||
puts critical("Debug log file found: #{wp_target.debug_log_url}")
|
|
||||||
end
|
|
||||||
|
|
||||||
wp_target.config_backup.each do |file_url|
|
|
||||||
puts critical("A wp-config.php backup file has been found in: '#{file_url}'")
|
|
||||||
end
|
|
||||||
|
|
||||||
if wp_target.search_replace_db_2_exists?
|
|
||||||
puts critical("searchreplacedb2.php has been found in: '#{wp_target.search_replace_db_2_url}'")
|
|
||||||
end
|
|
||||||
|
|
||||||
if wp_target.emergency_exists?
|
|
||||||
puts critical("emergency.php has been found in: '#{wp_target.emergency_url}'")
|
|
||||||
end
|
|
||||||
|
|
||||||
wp_target.interesting_headers.each do |header|
|
wp_target.interesting_headers.each do |header|
|
||||||
output = info('Interesting header: ')
|
output = info('Interesting header: ')
|
||||||
@@ -237,29 +234,126 @@ def main
|
|||||||
puts output + "#{header[0]}: #{header[1]}"
|
puts output + "#{header[0]}: #{header[1]}"
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
spacer()
|
||||||
|
|
||||||
|
if wp_target.has_robots?
|
||||||
|
code = get_http_status(wp_target.robots_url)
|
||||||
|
puts info("robots.txt available under: #{wp_target.robots_url} [HTTP #{code}]")
|
||||||
|
|
||||||
|
wp_target.parse_robots_txt.each do |dir|
|
||||||
|
code = get_http_status(dir)
|
||||||
|
puts info("Interesting entry from robots.txt: #{dir} [HTTP #{code}]")
|
||||||
|
end
|
||||||
|
spacer()
|
||||||
|
end
|
||||||
|
|
||||||
|
if wp_target.has_sitemap?
|
||||||
|
code = get_http_status(wp_target.sitemap_url)
|
||||||
|
puts info("Sitemap found: #{wp_target.sitemap_url} [HTTP #{code}]")
|
||||||
|
|
||||||
|
wp_target.parse_sitemap.each do |dir|
|
||||||
|
code = get_http_status(dir)
|
||||||
|
puts info("Sitemap entry: #{dir} [HTTP #{code}]")
|
||||||
|
end
|
||||||
|
spacer()
|
||||||
|
end
|
||||||
|
|
||||||
|
code = get_http_status(wp_target.humans_url)
|
||||||
|
if code == 200
|
||||||
|
puts info("humans.txt available under: #{wp_target.humans_url} [HTTP #{code}]")
|
||||||
|
|
||||||
|
parse_txt(wp_target.humans_url).each do |dir|
|
||||||
|
puts info("Entry from humans.txt: #{dir}")
|
||||||
|
end
|
||||||
|
spacer()
|
||||||
|
end
|
||||||
|
|
||||||
|
code = get_http_status(wp_target.security_url)
|
||||||
|
if code == 200
|
||||||
|
puts info("security.txt available under: #{wp_target.security_url} [HTTP #{code}]")
|
||||||
|
|
||||||
|
parse_txt(wp_target.security_url).each do |dir|
|
||||||
|
puts info("Entry from security.txt: #{dir}")
|
||||||
|
end
|
||||||
|
spacer()
|
||||||
|
end
|
||||||
|
|
||||||
|
if wp_target.has_debug_log?
|
||||||
|
puts critical("Debug log file found: #{wp_target.debug_log_url}")
|
||||||
|
spacer()
|
||||||
|
end
|
||||||
|
|
||||||
|
wp_target.config_backup.each do |file_url|
|
||||||
|
puts critical("A wp-config.php backup file has been found in: #{file_url}")
|
||||||
|
spacer()
|
||||||
|
end
|
||||||
|
|
||||||
|
if wp_target.search_replace_db_2_exists?
|
||||||
|
puts critical("searchreplacedb2.php has been found in: #{wp_target.search_replace_db_2_url}")
|
||||||
|
spacer()
|
||||||
|
end
|
||||||
|
|
||||||
|
if wp_target.emergency_exists?
|
||||||
|
puts critical("emergency.php has been found in: #{wp_target.emergency_url}")
|
||||||
|
spacer()
|
||||||
|
end
|
||||||
|
|
||||||
if wp_target.multisite?
|
if wp_target.multisite?
|
||||||
puts info('This site seems to be a multisite (http://codex.wordpress.org/Glossary#Multisite)')
|
puts info('This site seems to be a multisite (http://codex.wordpress.org/Glossary#Multisite)')
|
||||||
|
spacer()
|
||||||
end
|
end
|
||||||
|
|
||||||
if wp_target.has_must_use_plugins?
|
if wp_target.has_must_use_plugins?
|
||||||
puts info("This site has 'Must Use Plugins' (http://codex.wordpress.org/Must_Use_Plugins)")
|
puts info("This site has 'Must Use Plugins' (http://codex.wordpress.org/Must_Use_Plugins)")
|
||||||
end
|
spacer()
|
||||||
|
|
||||||
if wp_target.registration_enabled?
|
|
||||||
puts warning("Registration is enabled: #{wp_target.registration_url}")
|
|
||||||
end
|
end
|
||||||
|
|
||||||
if wp_target.has_xml_rpc?
|
if wp_target.has_xml_rpc?
|
||||||
puts info("XML-RPC Interface available under: #{wp_target.xml_rpc_url}")
|
code = get_http_status(wp_target.xml_rpc_url)
|
||||||
|
puts info("XML-RPC Interface available under: #{wp_target.xml_rpc_url} [HTTP #{code}]")
|
||||||
|
spacer()
|
||||||
|
end
|
||||||
|
|
||||||
|
# Test to see if MAIN API URL gives anything back
|
||||||
|
if wp_target.has_api?(wp_target.json_url)
|
||||||
|
code = get_http_status(wp_target.json_url)
|
||||||
|
puts info("API exposed: #{wp_target.json_url} [HTTP #{code}]")
|
||||||
|
|
||||||
|
# Test to see if USER API URL gives anything back
|
||||||
|
if wp_target.has_api?(wp_target.json_users_url)
|
||||||
|
# Print users from JSON
|
||||||
|
wp_target.json_get_users(wp_target.json_users_url)
|
||||||
|
end
|
||||||
|
spacer()
|
||||||
|
end
|
||||||
|
|
||||||
|
# Get RSS
|
||||||
|
rss = wp_target.rss_url
|
||||||
|
if rss
|
||||||
|
code = get_http_status(rss)
|
||||||
|
|
||||||
|
# Feedback
|
||||||
|
puts info("Found an RSS Feed: #{rss} [HTTP #{code}]")
|
||||||
|
|
||||||
|
# Print users from RSS feed
|
||||||
|
wp_target.rss_authors(rss)
|
||||||
|
|
||||||
|
spacer()
|
||||||
|
end
|
||||||
|
|
||||||
|
if wp_target.has_full_path_disclosure?
|
||||||
|
puts warning("Full Path Disclosure (FPD) in '#{wp_target.full_path_disclosure_url}': #{wp_target.full_path_disclosure_data}")
|
||||||
|
spacer()
|
||||||
end
|
end
|
||||||
|
|
||||||
if wp_target.upload_directory_listing_enabled?
|
if wp_target.upload_directory_listing_enabled?
|
||||||
puts warning("Upload directory has directory listing enabled: #{wp_target.upload_dir_url}")
|
puts warning("Upload directory has directory listing enabled: #{wp_target.upload_dir_url}")
|
||||||
|
spacer()
|
||||||
end
|
end
|
||||||
|
|
||||||
if wp_target.include_directory_listing_enabled?
|
if wp_target.include_directory_listing_enabled?
|
||||||
puts warning("Includes directory has directory listing enabled: #{wp_target.includes_dir_url}")
|
puts warning("Includes directory has directory listing enabled: #{wp_target.includes_dir_url}")
|
||||||
|
spacer()
|
||||||
end
|
end
|
||||||
|
|
||||||
enum_options = {
|
enum_options = {
|
||||||
@@ -267,6 +361,7 @@ def main
|
|||||||
exclude_content: wpscan_options.exclude_content_based
|
exclude_content: wpscan_options.exclude_content_based
|
||||||
}
|
}
|
||||||
|
|
||||||
|
puts info('Enumerating WordPress version ...')
|
||||||
if (wp_version = wp_target.version(WP_VERSIONS_FILE))
|
if (wp_version = wp_target.version(WP_VERSIONS_FILE))
|
||||||
if wp_target.has_readme? && VersionCompare::lesser?(wp_version.identifier, '4.7')
|
if wp_target.has_readme? && VersionCompare::lesser?(wp_version.identifier, '4.7')
|
||||||
puts warning("The WordPress '#{wp_target.readme_url}' file exists exposing a version number")
|
puts warning("The WordPress '#{wp_target.readme_url}' file exists exposing a version number")
|
||||||
@@ -277,6 +372,7 @@ def main
|
|||||||
puts
|
puts
|
||||||
puts notice('WordPress version can not be detected')
|
puts notice('WordPress version can not be detected')
|
||||||
end
|
end
|
||||||
|
spacer()
|
||||||
|
|
||||||
if wp_theme = wp_target.theme
|
if wp_theme = wp_target.theme
|
||||||
puts
|
puts
|
||||||
@@ -295,7 +391,7 @@ def main
|
|||||||
parent.output(wpscan_options.verbose)
|
parent.output(wpscan_options.verbose)
|
||||||
wp_theme = parent
|
wp_theme = parent
|
||||||
end
|
end
|
||||||
|
spacer()
|
||||||
end
|
end
|
||||||
|
|
||||||
if wpscan_options.enumerate_plugins == nil and wpscan_options.enumerate_only_vulnerable_plugins == nil
|
if wpscan_options.enumerate_plugins == nil and wpscan_options.enumerate_only_vulnerable_plugins == nil
|
||||||
@@ -304,15 +400,13 @@ def main
|
|||||||
|
|
||||||
wp_plugins = WpPlugins.passive_detection(wp_target)
|
wp_plugins = WpPlugins.passive_detection(wp_target)
|
||||||
if !wp_plugins.empty?
|
if !wp_plugins.empty?
|
||||||
if wp_plugins.size == 1
|
grammar = grammar_s(wp_plugins.size)
|
||||||
puts " | #{wp_plugins.size} plugin found:"
|
puts " | #{wp_plugins.size} plugin#{grammar} found:"
|
||||||
else
|
|
||||||
puts " | #{wp_plugins.size} plugins found:"
|
|
||||||
end
|
|
||||||
wp_plugins.output(wpscan_options.verbose)
|
wp_plugins.output(wpscan_options.verbose)
|
||||||
else
|
else
|
||||||
puts info('No plugins found')
|
puts info('No plugins found passively')
|
||||||
end
|
end
|
||||||
|
spacer()
|
||||||
end
|
end
|
||||||
|
|
||||||
# Enumerate the installed plugins
|
# Enumerate the installed plugins
|
||||||
@@ -343,12 +437,14 @@ def main
|
|||||||
|
|
||||||
puts
|
puts
|
||||||
if !wp_plugins.empty?
|
if !wp_plugins.empty?
|
||||||
puts info("We found #{wp_plugins.size} plugins:")
|
grammar = grammar_s(wp_plugins.size)
|
||||||
|
puts info("We found #{wp_plugins.size} plugin#{grammar}:")
|
||||||
|
|
||||||
wp_plugins.output(wpscan_options.verbose)
|
wp_plugins.output(wpscan_options.verbose)
|
||||||
else
|
else
|
||||||
puts info('No plugins found')
|
puts info('No plugins found')
|
||||||
end
|
end
|
||||||
|
spacer()
|
||||||
end
|
end
|
||||||
|
|
||||||
# Enumerate installed themes
|
# Enumerate installed themes
|
||||||
@@ -378,12 +474,14 @@ def main
|
|||||||
)
|
)
|
||||||
puts
|
puts
|
||||||
if !wp_themes.empty?
|
if !wp_themes.empty?
|
||||||
puts info("We found #{wp_themes.size} themes:")
|
grammar = grammar_s(wp_themes.size)
|
||||||
|
puts info("We found #{wp_themes.size} theme#{grammar}:")
|
||||||
|
|
||||||
wp_themes.output(wpscan_options.verbose)
|
wp_themes.output(wpscan_options.verbose)
|
||||||
else
|
else
|
||||||
puts info('No themes found')
|
puts info('No themes found')
|
||||||
end
|
end
|
||||||
|
spacer()
|
||||||
end
|
end
|
||||||
|
|
||||||
if wpscan_options.enumerate_timthumbs
|
if wpscan_options.enumerate_timthumbs
|
||||||
@@ -393,18 +491,20 @@ def main
|
|||||||
|
|
||||||
wp_timthumbs = WpTimthumbs.aggressive_detection(wp_target,
|
wp_timthumbs = WpTimthumbs.aggressive_detection(wp_target,
|
||||||
enum_options.merge(
|
enum_options.merge(
|
||||||
file: DATA_DIR + '/timthumbs.txt',
|
file: TIMTHUMBS_FILE,
|
||||||
theme_name: wp_theme ? wp_theme.name : nil
|
theme_name: wp_theme ? wp_theme.name : nil
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
puts
|
puts
|
||||||
if !wp_timthumbs.empty?
|
if !wp_timthumbs.empty?
|
||||||
puts info("We found #{wp_timthumbs.size} timthumb file/s:")
|
grammar = grammar_s(wp_timthumbs.size)
|
||||||
|
puts info("We found #{wp_timthumbs.size} timthumb file#{grammar}:")
|
||||||
|
|
||||||
wp_timthumbs.output(wpscan_options.verbose)
|
wp_timthumbs.output(wpscan_options.verbose)
|
||||||
else
|
else
|
||||||
puts info('No timthumb files found')
|
puts info('No timthumb files found')
|
||||||
end
|
end
|
||||||
|
spacer()
|
||||||
end
|
end
|
||||||
|
|
||||||
# If we haven't been supplied a username/usernames list, enumerate them...
|
# If we haven't been supplied a username/usernames list, enumerate them...
|
||||||
@@ -432,7 +532,8 @@ def main
|
|||||||
exit(1)
|
exit(1)
|
||||||
end
|
end
|
||||||
else
|
else
|
||||||
puts info("Identified the following #{wp_users.size} user/s:")
|
grammar = grammar_s(wp_users.size)
|
||||||
|
puts info("We identified the following #{wp_users.size} user#{grammar}:")
|
||||||
wp_users.output(margin_left: ' ' * 4)
|
wp_users.output(margin_left: ' ' * 4)
|
||||||
if wp_users[0].login == "admin"
|
if wp_users[0].login == "admin"
|
||||||
puts warning("Default first WordPress username 'admin' is still used")
|
puts warning("Default first WordPress username 'admin' is still used")
|
||||||
@@ -442,10 +543,12 @@ def main
|
|||||||
else
|
else
|
||||||
wp_users = WpUsers.new
|
wp_users = WpUsers.new
|
||||||
|
|
||||||
|
# Username file?
|
||||||
if wpscan_options.usernames
|
if wpscan_options.usernames
|
||||||
File.open(wpscan_options.usernames).each do |username|
|
File.open(wpscan_options.usernames).each do |username|
|
||||||
wp_users << WpUser.new(wp_target.uri, login: username.chomp)
|
wp_users << WpUser.new(wp_target.uri, login: username.chomp)
|
||||||
end
|
end
|
||||||
|
# Single username?
|
||||||
else
|
else
|
||||||
wp_users << WpUser.new(wp_target.uri, login: wpscan_options.username)
|
wp_users << WpUser.new(wp_target.uri, login: wpscan_options.username)
|
||||||
end
|
end
|
||||||
@@ -455,7 +558,6 @@ def main
|
|||||||
bruteforce = true
|
bruteforce = true
|
||||||
if wpscan_options.wordlist
|
if wpscan_options.wordlist
|
||||||
if wp_target.has_login_protection?
|
if wp_target.has_login_protection?
|
||||||
|
|
||||||
protection_plugin = wp_target.login_protection_plugin()
|
protection_plugin = wp_target.login_protection_plugin()
|
||||||
|
|
||||||
puts
|
puts
|
||||||
@@ -481,6 +583,7 @@ def main
|
|||||||
else
|
else
|
||||||
puts critical('Brute forcing aborted')
|
puts critical('Brute forcing aborted')
|
||||||
end
|
end
|
||||||
|
spacer()
|
||||||
end
|
end
|
||||||
|
|
||||||
stop_time = Time.now
|
stop_time = Time.now
|
||||||
@@ -489,9 +592,9 @@ def main
|
|||||||
|
|
||||||
puts
|
puts
|
||||||
puts info("Finished: #{stop_time.asctime}")
|
puts info("Finished: #{stop_time.asctime}")
|
||||||
puts info("Requests Done: #{@total_requests_done}")
|
|
||||||
puts info("Memory used: #{used_memory.bytes_to_human}") unless windows?
|
|
||||||
puts info("Elapsed time: #{Time.at(elapsed).utc.strftime('%H:%M:%S')}")
|
puts info("Elapsed time: #{Time.at(elapsed).utc.strftime('%H:%M:%S')}")
|
||||||
|
puts info("Requests made: #{@total_requests_done}")
|
||||||
|
puts info("Memory used: #{used_memory.bytes_to_human}") unless windows?
|
||||||
|
|
||||||
# do nothing on interrupt
|
# do nothing on interrupt
|
||||||
rescue Interrupt
|
rescue Interrupt
|
||||||
|
|||||||
Reference in New Issue
Block a user