Merge branch 'typhoeus-support'
This commit is contained in:
4
Gemfile
4
Gemfile
@@ -1,11 +1,11 @@
|
|||||||
source "https://rubygems.org"
|
source "https://rubygems.org"
|
||||||
|
|
||||||
gem "typhoeus", "0.4.2"
|
gem "typhoeus", "~>0.6.2"
|
||||||
gem "nokogiri"
|
gem "nokogiri"
|
||||||
gem "json"
|
gem "json"
|
||||||
|
|
||||||
group :development, :test do
|
group :development, :test do
|
||||||
gem "webmock", "1.8.11"
|
gem "webmock", "~>1.9.3"
|
||||||
gem "simplecov"
|
gem "simplecov"
|
||||||
gem "rspec", :require => "spec"
|
gem "rspec", :require => "spec"
|
||||||
end
|
end
|
||||||
|
|||||||
@@ -14,7 +14,7 @@
|
|||||||
//"proxy": "127.0.0.1:3128",
|
//"proxy": "127.0.0.1:3128",
|
||||||
//"proxy_auth": "username:password",
|
//"proxy_auth": "username:password",
|
||||||
|
|
||||||
"cache_timeout": 600, // 10 minutes, at this time the cache is cleaned before each scan. If this value is set to 0, the cache will be disabled
|
"cache_ttl": 600, // 10 minutes, at this time the cache is cleaned before each scan. If this value is set to 0, the cache will be disabled
|
||||||
|
|
||||||
"request_timeout": 2000, // 2s
|
"request_timeout": 2000, // 2s
|
||||||
|
|
||||||
|
|||||||
@@ -17,6 +17,8 @@
|
|||||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||||
#++
|
#++
|
||||||
|
|
||||||
|
require 'common/typhoeus_cache'
|
||||||
|
|
||||||
class Browser
|
class Browser
|
||||||
@@instance = nil
|
@@instance = nil
|
||||||
USER_AGENT_MODES = %w{ static semi-static random }
|
USER_AGENT_MODES = %w{ static semi-static random }
|
||||||
@@ -28,7 +30,7 @@ class Browser
|
|||||||
:proxy,
|
:proxy,
|
||||||
:proxy_auth,
|
:proxy_auth,
|
||||||
:max_threads,
|
:max_threads,
|
||||||
:cache_timeout,
|
:cache_ttl,
|
||||||
:request_timeout,
|
:request_timeout,
|
||||||
:basic_auth
|
:basic_auth
|
||||||
]
|
]
|
||||||
@@ -48,16 +50,15 @@ class Browser
|
|||||||
|
|
||||||
@hydra = Typhoeus::Hydra.new(
|
@hydra = Typhoeus::Hydra.new(
|
||||||
max_concurrency: @max_threads,
|
max_concurrency: @max_threads,
|
||||||
timeout: @request_timeout
|
#connecttimeout: @request_timeout
|
||||||
)
|
)
|
||||||
|
|
||||||
# TODO : add an option for the cache dir instead of using a constant
|
# TODO : add an argument for the cache dir instead of using a constant
|
||||||
@cache = CacheFileStore.new(CACHE_DIR + '/browser')
|
@cache = TyphoeusCache.new(CACHE_DIR + '/browser')
|
||||||
|
|
||||||
@cache.clean
|
@cache.clean
|
||||||
|
|
||||||
# might be in CacheFileStore
|
Typhoeus::Config.cache = @cache
|
||||||
setup_cache_handlers
|
|
||||||
end
|
end
|
||||||
|
|
||||||
private_class_method :new
|
private_class_method :new
|
||||||
@@ -112,13 +113,10 @@ class Browser
|
|||||||
if !auth.include?(:proxy_username) or !auth.include?(:proxy_password)
|
if !auth.include?(:proxy_username) or !auth.include?(:proxy_password)
|
||||||
raise_invalid_proxy_format()
|
raise_invalid_proxy_format()
|
||||||
end
|
end
|
||||||
@proxy_auth = auth
|
@proxy_auth = auth[:proxy_username] + ':' + auth[:proxy_password]
|
||||||
elsif auth.is_a?(String)
|
elsif auth.is_a?(String)
|
||||||
if matches = %r{([^:]+):(.*)}.match(auth)
|
if auth.index(':') != nil
|
||||||
@proxy_auth = {
|
@proxy_auth = auth
|
||||||
proxy_username: matches[1],
|
|
||||||
proxy_password: matches[2]
|
|
||||||
}
|
|
||||||
else
|
else
|
||||||
raise_invalid_proxy_auth_format()
|
raise_invalid_proxy_auth_format()
|
||||||
end
|
end
|
||||||
@@ -150,24 +148,6 @@ class Browser
|
|||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
def setup_cache_handlers
|
|
||||||
@hydra.cache_setter do |request|
|
|
||||||
@cache.write_entry(
|
|
||||||
Browser.generate_cache_key_from_request(request),
|
|
||||||
request.response,
|
|
||||||
request.cache_timeout
|
|
||||||
)
|
|
||||||
end
|
|
||||||
|
|
||||||
@hydra.cache_getter do |request|
|
|
||||||
@cache.read_entry(
|
|
||||||
Browser.generate_cache_key_from_request(request)
|
|
||||||
) rescue nil
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
private :setup_cache_handlers
|
|
||||||
|
|
||||||
def get(url, params = {})
|
def get(url, params = {})
|
||||||
run_request(
|
run_request(
|
||||||
forge_request(url, params.merge(method: :get))
|
forge_request(url, params.merge(method: :get))
|
||||||
@@ -181,10 +161,10 @@ class Browser
|
|||||||
end
|
end
|
||||||
|
|
||||||
def get_and_follow_location(url, params = {})
|
def get_and_follow_location(url, params = {})
|
||||||
params[:max_redirects] ||= 2
|
params[:maxredirs] ||= 2
|
||||||
|
|
||||||
run_request(
|
run_request(
|
||||||
forge_request(url, params.merge(method: :get, follow_location: true))
|
forge_request(url, params.merge(method: :get, followlocation: true))
|
||||||
)
|
)
|
||||||
end
|
end
|
||||||
|
|
||||||
@@ -197,10 +177,10 @@ class Browser
|
|||||||
|
|
||||||
def merge_request_params(params = {})
|
def merge_request_params(params = {})
|
||||||
if @proxy
|
if @proxy
|
||||||
params = params.merge(:proxy => @proxy)
|
params = params.merge(proxy: @proxy)
|
||||||
|
|
||||||
if @proxy_auth
|
if @proxy_auth
|
||||||
params = params.merge(@proxy_auth)
|
params = params.merge(proxyauth: @proxy_auth)
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
@@ -212,23 +192,23 @@ class Browser
|
|||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
unless params.has_key?(:disable_ssl_host_verification)
|
#unless params.has_key?(:ssl_verifyhost)
|
||||||
params = params.merge(:disable_ssl_host_verification => true)
|
# params = params.merge(ssl_verifyhost: 0)
|
||||||
end
|
#end
|
||||||
|
|
||||||
unless params.has_key?(:disable_ssl_peer_verification)
|
#unless params.has_key?(:ssl_verifypeer)
|
||||||
params = params.merge(:disable_ssl_peer_verification => true)
|
# params = params.merge(ssl_verifypeer: 0)
|
||||||
end
|
#end
|
||||||
|
|
||||||
if !params.has_key?(:headers)
|
if !params.has_key?(:headers)
|
||||||
params = params.merge(:headers => {'user-agent' => self.user_agent})
|
params = params.merge(:headers => {'User-Agent' => self.user_agent})
|
||||||
elsif !params[:headers].has_key?('user-agent')
|
elsif !params[:headers].has_key?('User-Agent')
|
||||||
params[:headers]['user-agent'] = self.user_agent
|
params[:headers]['User-Agent'] = self.user_agent
|
||||||
end
|
end
|
||||||
|
|
||||||
# Used to enable the cache system if :cache_timeout > 0
|
# Used to enable the cache system if :cache_ttl > 0
|
||||||
unless params.has_key?(:cache_timeout)
|
unless params.has_key?(:cache_ttl)
|
||||||
params = params.merge(:cache_timeout => @cache_timeout)
|
params = params.merge(cache_ttl: @cache_ttl)
|
||||||
end
|
end
|
||||||
|
|
||||||
params
|
params
|
||||||
@@ -251,17 +231,4 @@ class Browser
|
|||||||
end
|
end
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
# The Typhoeus::Request.cache_key only hash the url :/
|
|
||||||
# this one will include the params
|
|
||||||
# TODO : include also the method (:get, :post, :any)
|
|
||||||
def self.generate_cache_key_from_request(request)
|
|
||||||
cache_key = request.cache_key
|
|
||||||
|
|
||||||
if request.params
|
|
||||||
cache_key = Digest::SHA1.hexdigest("#{cache_key}-#{request.params.hash}")
|
|
||||||
end
|
|
||||||
|
|
||||||
cache_key
|
|
||||||
end
|
|
||||||
end
|
end
|
||||||
|
|||||||
@@ -59,8 +59,8 @@ class CacheFileStore
|
|||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
def write_entry(key, data_to_store, cache_timeout)
|
def write_entry(key, data_to_store, cache_ttl)
|
||||||
if cache_timeout > 0
|
if cache_ttl > 0
|
||||||
File.open(get_entry_file_path(key), 'w') do |f|
|
File.open(get_entry_file_path(key), 'w') do |f|
|
||||||
f.write(@serializer.dump(data_to_store))
|
f.write(@serializer.dump(data_to_store))
|
||||||
end
|
end
|
||||||
@@ -68,7 +68,7 @@ class CacheFileStore
|
|||||||
end
|
end
|
||||||
|
|
||||||
def get_entry_file_path(key)
|
def get_entry_file_path(key)
|
||||||
@storage_path + '/' + key
|
File::join(@storage_path, key)
|
||||||
end
|
end
|
||||||
|
|
||||||
end
|
end
|
||||||
|
|||||||
43
lib/common/typhoeus_cache.rb
Normal file
43
lib/common/typhoeus_cache.rb
Normal file
@@ -0,0 +1,43 @@
|
|||||||
|
# encoding: UTF-8
|
||||||
|
#--
|
||||||
|
# WPScan - WordPress Security Scanner
|
||||||
|
# Copyright (C) 2012-2013
|
||||||
|
#
|
||||||
|
# This program is free software: you can redistribute it and/or modify
|
||||||
|
# it under the terms of the GNU General Public License as published by
|
||||||
|
# the Free Software Foundation, either version 3 of the License, or
|
||||||
|
# (at your option) any later version.
|
||||||
|
#
|
||||||
|
# This program is distributed in the hope that it will be useful,
|
||||||
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
# GNU General Public License for more details.
|
||||||
|
#
|
||||||
|
# You should have received a copy of the GNU General Public License
|
||||||
|
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
#++
|
||||||
|
|
||||||
|
require 'common/cache_file_store'
|
||||||
|
|
||||||
|
# Implementaion of a cache_key (Typhoeus::Request#hash has too many options)
|
||||||
|
module Typhoeus
|
||||||
|
class Request
|
||||||
|
module Cacheable
|
||||||
|
def cache_key
|
||||||
|
Digest::SHA2.hexdigest("#{url}-#{options[:body]}-#{options[:method]}")[0..32]
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
class TyphoeusCache < CacheFileStore
|
||||||
|
|
||||||
|
def get(request)
|
||||||
|
read_entry(request.cache_key)
|
||||||
|
end
|
||||||
|
|
||||||
|
def set(request, response)
|
||||||
|
write_entry(request.cache_key, response, request.cache_ttl)
|
||||||
|
end
|
||||||
|
|
||||||
|
end
|
||||||
@@ -20,6 +20,7 @@
|
|||||||
begin
|
begin
|
||||||
# Standard libs
|
# Standard libs
|
||||||
require 'rubygems'
|
require 'rubygems'
|
||||||
|
require 'bundler/setup'
|
||||||
require 'getoptlong'
|
require 'getoptlong'
|
||||||
require 'optparse' # Will replace getoptlong
|
require 'optparse' # Will replace getoptlong
|
||||||
require 'uri'
|
require 'uri'
|
||||||
@@ -33,13 +34,11 @@ begin
|
|||||||
require 'rbconfig'
|
require 'rbconfig'
|
||||||
require 'pp'
|
require 'pp'
|
||||||
# Third party libs
|
# Third party libs
|
||||||
gem 'typhoeus', '=0.4.2'
|
|
||||||
require 'typhoeus'
|
require 'typhoeus'
|
||||||
require 'json'
|
require 'json'
|
||||||
require 'nokogiri'
|
require 'nokogiri'
|
||||||
# Custom libs
|
# Custom libs
|
||||||
require 'common/browser'
|
require 'common/browser'
|
||||||
require 'common/cache_file_store'
|
|
||||||
require 'common/custom_option_parser'
|
require 'common/custom_option_parser'
|
||||||
rescue LoadError => e
|
rescue LoadError => e
|
||||||
puts "[ERROR] #{e}"
|
puts "[ERROR] #{e}"
|
||||||
|
|||||||
@@ -36,10 +36,11 @@ module BruteForce
|
|||||||
password_found = false
|
password_found = false
|
||||||
|
|
||||||
File.open(wordlist_path, 'r').each do |password|
|
File.open(wordlist_path, 'r').each do |password|
|
||||||
|
|
||||||
# ignore file comments, but will miss passwords if they start with a hash...
|
# ignore file comments, but will miss passwords if they start with a hash...
|
||||||
next if password[0, 1] == '#'
|
next if password[0, 1] == '#'
|
||||||
|
|
||||||
|
password.strip!
|
||||||
|
|
||||||
# keep a count of the amount of requests to be sent
|
# keep a count of the amount of requests to be sent
|
||||||
request_count += 1
|
request_count += 1
|
||||||
queue_count += 1
|
queue_count += 1
|
||||||
@@ -52,8 +53,8 @@ module BruteForce
|
|||||||
request = Browser.instance.forge_request(login_url,
|
request = Browser.instance.forge_request(login_url,
|
||||||
{
|
{
|
||||||
method: :post,
|
method: :post,
|
||||||
params: { log: URI::encode(username), pwd: URI::encode(password) },
|
body: { log: URI::encode(username), pwd: URI::encode(password) },
|
||||||
cache_timeout: 0
|
cache_ttl: 0
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|||||||
@@ -23,10 +23,11 @@ module WpConfigBackup
|
|||||||
# See http://www.feross.org/cmsploit/
|
# See http://www.feross.org/cmsploit/
|
||||||
# return an array of backup config files url
|
# return an array of backup config files url
|
||||||
def config_backup
|
def config_backup
|
||||||
found = []
|
found = []
|
||||||
backups = WpConfigBackup.config_backup_files
|
backups = WpConfigBackup.config_backup_files
|
||||||
browser = Browser.instance
|
browser = Browser.instance
|
||||||
hydra = browser.hydra
|
hydra = browser.hydra
|
||||||
|
queue_count = 0
|
||||||
|
|
||||||
backups.each do |file|
|
backups.each do |file|
|
||||||
file_url = @uri.merge(URI.escape(file)).to_s
|
file_url = @uri.merge(URI.escape(file)).to_s
|
||||||
@@ -39,6 +40,12 @@ module WpConfigBackup
|
|||||||
end
|
end
|
||||||
|
|
||||||
hydra.queue(request)
|
hydra.queue(request)
|
||||||
|
queue_count += 1
|
||||||
|
|
||||||
|
if queue_count == browser.max_threads
|
||||||
|
hydra.run
|
||||||
|
queue_count = 0
|
||||||
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
hydra.run
|
hydra.run
|
||||||
|
|||||||
@@ -60,7 +60,7 @@ module WpUsernames
|
|||||||
end
|
end
|
||||||
|
|
||||||
def get_nickname_from_url(url)
|
def get_nickname_from_url(url)
|
||||||
resp = Browser.instance.get(url, { follow_location: true, max_redirects: 2 })
|
resp = Browser.instance.get_and_follow_location(url)
|
||||||
nickname = nil
|
nickname = nil
|
||||||
if resp.code == 200
|
if resp.code == 200
|
||||||
nickname = extract_nickname_from_body(resp.body)
|
nickname = extract_nickname_from_body(resp.body)
|
||||||
|
|||||||
@@ -48,12 +48,14 @@ class WebSite
|
|||||||
|
|
||||||
def xml_rpc_url
|
def xml_rpc_url
|
||||||
unless @xmlrpc_url
|
unless @xmlrpc_url
|
||||||
headers = Browser.instance.get(@uri.to_s).headers_hash
|
headers = Browser.instance.get(@uri.to_s).headers_hash
|
||||||
value = headers['x-pingback']
|
@xmlrpc_url = nil
|
||||||
if value.nil? or value.empty?
|
|
||||||
@xmlrpc_url = nil
|
unless headers.nil?
|
||||||
else
|
value = headers['X-Pingback']
|
||||||
@xmlrpc_url = value
|
unless value.nil? && value.empty?
|
||||||
|
@xmlrpc_url = value
|
||||||
|
end
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
@xmlrpc_url
|
@xmlrpc_url
|
||||||
|
|||||||
@@ -55,7 +55,7 @@ class WpEnumerator
|
|||||||
targets.each do |target|
|
targets.each do |target|
|
||||||
url = target.get_full_url
|
url = target.get_full_url
|
||||||
|
|
||||||
request = enum_browser.forge_request(url, { cache_timeout: 0, follow_location: true })
|
request = enum_browser.forge_request(url, cache_ttl: 0, followlocation: true)
|
||||||
request_count += 1
|
request_count += 1
|
||||||
|
|
||||||
request.on_complete do |response|
|
request.on_complete do |response|
|
||||||
|
|||||||
@@ -74,7 +74,7 @@ class WpTheme < WpItem
|
|||||||
|
|
||||||
# Discover the wordpress theme name by parsing the css link rel
|
# Discover the wordpress theme name by parsing the css link rel
|
||||||
def self.find_from_css_link(target_uri)
|
def self.find_from_css_link(target_uri)
|
||||||
response = Browser.instance.get(target_uri.to_s, { follow_location: true, max_redirects: 2 })
|
response = Browser.instance.get_and_follow_location(target_uri.to_s)
|
||||||
|
|
||||||
# https + domain is optional because of relative links
|
# https + domain is optional because of relative links
|
||||||
matches = %r{(?:https?://[^"']+)?/([^/]+)/themes/([^"']+)/style.css}i.match(response.body)
|
matches = %r{(?:https?://[^"']+)?/([^/]+)/themes/([^"']+)/style.css}i.match(response.body)
|
||||||
|
|||||||
@@ -63,7 +63,7 @@ class CheckerPlugin < Plugin
|
|||||||
number_of_urls = urls.size
|
number_of_urls = urls.size
|
||||||
|
|
||||||
urls.each do |url|
|
urls.each do |url|
|
||||||
request = browser.forge_request(url, { cache_timeout: 0, follow_location: true })
|
request = browser.forge_request(url, { cache_ttl: 0, followlocation: true })
|
||||||
request_count += 1
|
request_count += 1
|
||||||
|
|
||||||
request.on_complete do |response|
|
request.on_complete do |response|
|
||||||
|
|||||||
@@ -23,7 +23,7 @@ describe Browser do
|
|||||||
CONFIG_FILE_WITHOUT_PROXY = SPEC_FIXTURES_CONF_DIR + '/browser/browser.conf.json'
|
CONFIG_FILE_WITHOUT_PROXY = SPEC_FIXTURES_CONF_DIR + '/browser/browser.conf.json'
|
||||||
CONFIG_FILE_WITH_PROXY = SPEC_FIXTURES_CONF_DIR + '/browser/browser.conf_proxy.json'
|
CONFIG_FILE_WITH_PROXY = SPEC_FIXTURES_CONF_DIR + '/browser/browser.conf_proxy.json'
|
||||||
CONFIG_FILE_WITH_PROXY_AND_AUTH = SPEC_FIXTURES_CONF_DIR + '/browser/browser.conf_proxy_auth.json'
|
CONFIG_FILE_WITH_PROXY_AND_AUTH = SPEC_FIXTURES_CONF_DIR + '/browser/browser.conf_proxy_auth.json'
|
||||||
INSTANCE_VARS_TO_CHECK = ['user_agent', 'user_agent_mode', 'available_user_agents', 'proxy', 'max_threads', 'request_timeout', 'cache_timeout']
|
INSTANCE_VARS_TO_CHECK = ['user_agent', 'user_agent_mode', 'available_user_agents', 'proxy', 'max_threads', 'request_timeout', 'cache_ttl']
|
||||||
|
|
||||||
before :all do
|
before :all do
|
||||||
@json_config_without_proxy = JSON.parse(File.read(CONFIG_FILE_WITHOUT_PROXY))
|
@json_config_without_proxy = JSON.parse(File.read(CONFIG_FILE_WITHOUT_PROXY))
|
||||||
@@ -31,6 +31,7 @@ describe Browser do
|
|||||||
end
|
end
|
||||||
|
|
||||||
before :each do
|
before :each do
|
||||||
|
Browser::reset
|
||||||
@browser = Browser.instance(config_file: CONFIG_FILE_WITHOUT_PROXY)
|
@browser = Browser.instance(config_file: CONFIG_FILE_WITHOUT_PROXY)
|
||||||
end
|
end
|
||||||
|
|
||||||
@@ -100,12 +101,12 @@ describe Browser do
|
|||||||
|
|
||||||
it 'should set the correct credentials' do
|
it 'should set the correct credentials' do
|
||||||
@proxy_auth = { proxy_username: 'user', proxy_password: 'pass' }
|
@proxy_auth = { proxy_username: 'user', proxy_password: 'pass' }
|
||||||
@expected = @proxy_auth
|
@expected = 'user:pass'
|
||||||
end
|
end
|
||||||
|
|
||||||
it 'should set the correct credentials' do
|
it 'should set the correct credentials' do
|
||||||
@proxy_auth = 'username:passwd'
|
@proxy_auth = 'username:passwd'
|
||||||
@expected = { proxy_username: 'username', proxy_password: 'passwd' }
|
@expected = @proxy_auth
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
@@ -213,10 +214,10 @@ describe Browser do
|
|||||||
describe '#merge_request_params without proxy' do
|
describe '#merge_request_params without proxy' do
|
||||||
it 'should return the default params' do
|
it 'should return the default params' do
|
||||||
expected_params = {
|
expected_params = {
|
||||||
disable_ssl_host_verification: true,
|
#disable_ssl_host_verification: true,
|
||||||
disable_ssl_peer_verification: true,
|
#disable_ssl_peer_verification: true,
|
||||||
headers: { 'user-agent' => @browser.user_agent },
|
headers: { 'User-Agent' => @browser.user_agent },
|
||||||
cache_timeout: @json_config_without_proxy['cache_timeout']
|
cache_ttl: @json_config_without_proxy['cache_ttl']
|
||||||
}
|
}
|
||||||
|
|
||||||
@browser.merge_request_params().should == expected_params
|
@browser.merge_request_params().should == expected_params
|
||||||
@@ -224,25 +225,25 @@ describe Browser do
|
|||||||
|
|
||||||
it 'should return the default params with some values overriden' do
|
it 'should return the default params with some values overriden' do
|
||||||
expected_params = {
|
expected_params = {
|
||||||
disable_ssl_host_verification: false,
|
#disable_ssl_host_verification: false,
|
||||||
disable_ssl_peer_verification: true,
|
#disable_ssl_peer_verification: true,
|
||||||
headers: { 'user-agent' => 'Fake IE' },
|
headers: { 'User-Agent' => 'Fake IE' },
|
||||||
cache_timeout: 0
|
cache_ttl: 0
|
||||||
}
|
}
|
||||||
|
|
||||||
@browser.merge_request_params(
|
@browser.merge_request_params(
|
||||||
disable_ssl_host_verification: false,
|
#disable_ssl_host_verification: false,
|
||||||
headers: { 'user-agent' => 'Fake IE' },
|
headers: { 'User-Agent' => 'Fake IE' },
|
||||||
cache_timeout: 0
|
cache_ttl: 0
|
||||||
).should == expected_params
|
).should == expected_params
|
||||||
end
|
end
|
||||||
|
|
||||||
it 'should return the defaul params with :headers:accept = \'text/html\' (should not override :headers:user-agent)' do
|
it 'should return the defaul params with :headers:accept = \'text/html\' (should not override :headers:User-Agent)' do
|
||||||
expected_params = {
|
expected_params = {
|
||||||
disable_ssl_host_verification: true,
|
#disable_ssl_host_verification: true,
|
||||||
disable_ssl_peer_verification: true,
|
#disable_ssl_peer_verification: true,
|
||||||
headers: { 'user-agent' => @browser.user_agent, 'accept' => 'text/html' },
|
headers: { 'User-Agent' => @browser.user_agent, 'accept' => 'text/html' },
|
||||||
cache_timeout: @json_config_without_proxy['cache_timeout']
|
cache_ttl: @json_config_without_proxy['cache_ttl']
|
||||||
}
|
}
|
||||||
|
|
||||||
@browser.merge_request_params(headers: { 'accept' => 'text/html' }).should == expected_params
|
@browser.merge_request_params(headers: { 'accept' => 'text/html' }).should == expected_params
|
||||||
@@ -251,19 +252,19 @@ describe Browser do
|
|||||||
it 'should merge the basic-auth' do
|
it 'should merge the basic-auth' do
|
||||||
@browser.basic_auth = 'Basic QWxhZGRpbjpvcGVuIHNlc2FtZQ=='
|
@browser.basic_auth = 'Basic QWxhZGRpbjpvcGVuIHNlc2FtZQ=='
|
||||||
expected_params = {
|
expected_params = {
|
||||||
disable_ssl_host_verification: true,
|
#disable_ssl_host_verification: true,
|
||||||
disable_ssl_peer_verification: true,
|
#disable_ssl_peer_verification: true,
|
||||||
cache_timeout: @json_config_without_proxy['cache_timeout'],
|
cache_ttl: @json_config_without_proxy['cache_ttl'],
|
||||||
headers: {
|
headers: {
|
||||||
'Authorization' => 'Basic QWxhZGRpbjpvcGVuIHNlc2FtZQ==',
|
'Authorization' => 'Basic QWxhZGRpbjpvcGVuIHNlc2FtZQ==',
|
||||||
'user-agent' => @browser.user_agent
|
'User-Agent' => @browser.user_agent
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@browser.merge_request_params().should == expected_params
|
@browser.merge_request_params().should == expected_params
|
||||||
|
|
||||||
expected_params[:headers].merge!('user-agent' => 'Fake FF')
|
expected_params[:headers].merge!('User-Agent' => 'Fake FF')
|
||||||
@browser.merge_request_params(headers: { 'user-agent' => 'Fake FF' }).should == expected_params
|
@browser.merge_request_params(headers: { 'User-Agent' => 'Fake FF' }).should == expected_params
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
@@ -274,10 +275,10 @@ describe Browser do
|
|||||||
|
|
||||||
expected_params = {
|
expected_params = {
|
||||||
proxy: @json_config_with_proxy['proxy'],
|
proxy: @json_config_with_proxy['proxy'],
|
||||||
disable_ssl_host_verification: true,
|
#disable_ssl_host_verification: true,
|
||||||
disable_ssl_peer_verification: true,
|
#disable_ssl_peer_verification: true,
|
||||||
headers: { 'user-agent' => @json_config_with_proxy['user_agent'] },
|
headers: { 'User-Agent' => @json_config_with_proxy['user_agent'] },
|
||||||
cache_timeout: @json_config_with_proxy['cache_timeout']
|
cache_ttl: @json_config_with_proxy['cache_ttl']
|
||||||
}
|
}
|
||||||
|
|
||||||
browser.merge_request_params().should == expected_params
|
browser.merge_request_params().should == expected_params
|
||||||
@@ -289,12 +290,11 @@ describe Browser do
|
|||||||
|
|
||||||
expected_params = {
|
expected_params = {
|
||||||
proxy: @json_config_with_proxy['proxy'],
|
proxy: @json_config_with_proxy['proxy'],
|
||||||
proxy_username: 'user',
|
proxyauth: 'user:pass',
|
||||||
proxy_password: 'pass',
|
#disable_ssl_host_verification: true,
|
||||||
disable_ssl_host_verification: true,
|
#disable_ssl_peer_verification: true,
|
||||||
disable_ssl_peer_verification: true,
|
headers: { 'User-Agent' => @json_config_with_proxy['user_agent'] },
|
||||||
headers: { 'user-agent' => @json_config_with_proxy['user_agent'] },
|
cache_ttl: @json_config_with_proxy['cache_ttl']
|
||||||
cache_timeout: @json_config_with_proxy['cache_timeout']
|
|
||||||
}
|
}
|
||||||
|
|
||||||
browser.merge_request_params().should == expected_params
|
browser.merge_request_params().should == expected_params
|
||||||
@@ -307,16 +307,16 @@ describe Browser do
|
|||||||
end
|
end
|
||||||
|
|
||||||
describe '#post' do
|
describe '#post' do
|
||||||
it 'should return a Typhoeus::Response wth body = "Welcome Master" if login=master&password=it\'s me !' do
|
it 'should return a Typhoeus::Response wth body = "Welcome Master" if login=master&password=itsme!' do
|
||||||
url = 'http://example.com/'
|
url = 'http://example.com/'
|
||||||
|
|
||||||
stub_request(:post, url).
|
stub_request(:post, url).with(body: { login: 'master', password: 'itsme!' }).
|
||||||
with(body: "login=master&password=it's me !").
|
|
||||||
to_return(status: 200, body: 'Welcome Master')
|
to_return(status: 200, body: 'Welcome Master')
|
||||||
|
|
||||||
response = @browser.post(
|
response = @browser.post(
|
||||||
url,
|
url,
|
||||||
params: { login: 'master', password: 'it\'s me !' }
|
body: 'login=master&password=itsme!'
|
||||||
|
#body: { login: 'master', password: 'hello' } # It's should be this line, but it fails
|
||||||
)
|
)
|
||||||
|
|
||||||
response.should be_a Typhoeus::Response
|
response.should be_a Typhoeus::Response
|
||||||
@@ -361,26 +361,6 @@ describe Browser do
|
|||||||
#end
|
#end
|
||||||
end
|
end
|
||||||
|
|
||||||
describe '#Browser.generate_cache_key_from_request' do
|
|
||||||
it '2 requests with the same url, without params must have the same cache_key' do
|
|
||||||
|
|
||||||
url = 'http://example.com'
|
|
||||||
key1 = Browser.generate_cache_key_from_request(@browser.forge_request(url))
|
|
||||||
key2 = Browser.generate_cache_key_from_request(@browser.forge_request(url))
|
|
||||||
|
|
||||||
key1.should === key2
|
|
||||||
end
|
|
||||||
|
|
||||||
it '2 requests with the same url, but with different params should have a different cache_key' do
|
|
||||||
|
|
||||||
url = 'http://example.com'
|
|
||||||
key1 = Browser.generate_cache_key_from_request(@browser.forge_request(url, params: { login: 'master', password: 'it\'s me !' }))
|
|
||||||
key2 = Browser.generate_cache_key_from_request(@browser.forge_request(url))
|
|
||||||
|
|
||||||
key1.should_not == key2
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
describe 'testing caching' do
|
describe 'testing caching' do
|
||||||
it 'should only do 1 request, and retrieve the other one from the cache' do
|
it 'should only do 1 request, and retrieve the other one from the cache' do
|
||||||
|
|
||||||
|
|||||||
@@ -20,15 +20,12 @@
|
|||||||
require 'spec_helper'
|
require 'spec_helper'
|
||||||
|
|
||||||
describe CacheFileStore do
|
describe CacheFileStore do
|
||||||
|
let(:cache_dir) { SPEC_CACHE_DIR + '/cache_file_store' }
|
||||||
before :all do
|
|
||||||
@cache_dir = SPEC_CACHE_DIR + '/cache_file_store'
|
|
||||||
end
|
|
||||||
|
|
||||||
before :each do
|
before :each do
|
||||||
Dir.delete(@cache_dir) rescue nil
|
Dir.delete(cache_dir) rescue nil
|
||||||
|
|
||||||
@cache = CacheFileStore.new(@cache_dir)
|
@cache = CacheFileStore.new(cache_dir)
|
||||||
end
|
end
|
||||||
|
|
||||||
after :each do
|
after :each do
|
||||||
@@ -37,7 +34,7 @@ describe CacheFileStore do
|
|||||||
|
|
||||||
describe '#storage_path' do
|
describe '#storage_path' do
|
||||||
it 'returns the storage path given in the #new' do
|
it 'returns the storage path given in the #new' do
|
||||||
@cache.storage_path.should == @cache_dir
|
@cache.storage_path.should == cache_dir
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
@@ -52,12 +49,12 @@ describe CacheFileStore do
|
|||||||
it "should remove all files from the cache dir (#{@cache_dir}" do
|
it "should remove all files from the cache dir (#{@cache_dir}" do
|
||||||
# let's create some files into the directory first
|
# let's create some files into the directory first
|
||||||
(0..5).each do |i|
|
(0..5).each do |i|
|
||||||
File.new(@cache_dir + "/file_#{i}.txt", File::CREAT)
|
File.new(cache_dir + "/file_#{i}.txt", File::CREAT)
|
||||||
end
|
end
|
||||||
|
|
||||||
count_files_in_dir(@cache_dir, 'file_*.txt').should == 6
|
count_files_in_dir(cache_dir, 'file_*.txt').should == 6
|
||||||
@cache.clean
|
@cache.clean
|
||||||
count_files_in_dir(@cache_dir).should == 0
|
count_files_in_dir(cache_dir).should == 0
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
@@ -75,16 +72,16 @@ describe CacheFileStore do
|
|||||||
end
|
end
|
||||||
|
|
||||||
it 'should get the correct entry (string)' do
|
it 'should get the correct entry (string)' do
|
||||||
@timeout = 10
|
@timeout = 10
|
||||||
@key = 'some_key'
|
@key = 'some_key'
|
||||||
@data = 'Hello World !'
|
@data = 'Hello World !'
|
||||||
@expected = @data
|
@expected = @data
|
||||||
end
|
end
|
||||||
|
|
||||||
it 'should not write the entry' do
|
it 'should not write the entry' do
|
||||||
@timeout = 0
|
@timeout = 0
|
||||||
@key = 'another_key'
|
@key = 'another_key'
|
||||||
@data = 'Another Hello World !'
|
@data = 'Another Hello World !'
|
||||||
@expected = nil
|
@expected = nil
|
||||||
end
|
end
|
||||||
|
|
||||||
|
|||||||
3
spec/lib/common/typhoeus_cache_spec.rb
Normal file
3
spec/lib/common/typhoeus_cache_spec.rb
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
# encoding: UTF-8
|
||||||
|
|
||||||
|
# TODO
|
||||||
@@ -48,26 +48,29 @@ shared_examples_for 'BruteForce' do
|
|||||||
passwords << password.strip unless password.strip[0, 1] == '#'
|
passwords << password.strip unless password.strip[0, 1] == '#'
|
||||||
end
|
end
|
||||||
# Last status must be 302 to get full code coverage
|
# Last status must be 302 to get full code coverage
|
||||||
passwords.each do |_|
|
passwords.each do |password|
|
||||||
stub_request(:any, @module.login_url).to_return(
|
stub_request(:post, @module.login_url).
|
||||||
{ status: 200, body: 'login_error' },
|
to_return(
|
||||||
{ status: 0, body: 'no reponse' },
|
{ status: 200, body: 'login_error' },
|
||||||
{ status: 50, body: 'server error' },
|
{ status: 0, body: 'no reponse' },
|
||||||
{ status: 999, body: 'invalid' },
|
{ status: 500, body: 'server error' },
|
||||||
{ status: 302, body: 'FOUND!' }
|
{ status: 999, body: 'invalid' },
|
||||||
)
|
{ status: 302, body: 'FOUND!' }
|
||||||
|
)
|
||||||
end
|
end
|
||||||
|
|
||||||
user = WpUser.new('admin', 1, nil)
|
user = WpUser.new('admin', 1, nil)
|
||||||
result = @module.brute_force([user], @wordlist)
|
result = @module.brute_force([user], @wordlist)
|
||||||
|
|
||||||
result.length.should == 1
|
result.length.should == 1
|
||||||
result.should === [{ name: 'admin', password: 'root' }]
|
result.should === [{ name: 'admin', password: 'root' }]
|
||||||
end
|
end
|
||||||
|
|
||||||
it 'should cover the timeout branch and return an empty array' do
|
it 'should cover the timeout branch and return an empty array' do
|
||||||
stub_request(:any, @module.login_url).to_timeout
|
stub_request(:post, @module.login_url).to_timeout
|
||||||
user = WpUser.new('admin', 1, nil)
|
|
||||||
result = @module.brute_force([user], @wordlist)
|
user = WpUser.new('admin', 1, nil)
|
||||||
|
result = @module.brute_force([user], @wordlist)
|
||||||
result.should == []
|
result.should == []
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|||||||
@@ -34,8 +34,7 @@ shared_examples_for 'WpConfigBackup' do
|
|||||||
@config_backup_files.each do |backup_file|
|
@config_backup_files.each do |backup_file|
|
||||||
file_url = @module.uri.merge(URI.escape(backup_file)).to_s
|
file_url = @module.uri.merge(URI.escape(backup_file)).to_s
|
||||||
|
|
||||||
stub_request(:get, file_url).
|
stub_request(:get, file_url).to_return(status: 404)
|
||||||
to_return(status: 404, body: '')
|
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
@@ -50,8 +49,7 @@ shared_examples_for 'WpConfigBackup' do
|
|||||||
file_url = @module.uri.merge(URI.escape(backup_file)).to_s
|
file_url = @module.uri.merge(URI.escape(backup_file)).to_s
|
||||||
expected << file_url
|
expected << file_url
|
||||||
|
|
||||||
stub_request(:get, file_url).
|
stub_request_to_fixture(url: file_url, fixture: @fixtures_dir + '/wp-config.php')
|
||||||
to_return(status: 200, body: File.new(@fixtures_dir + '/wp-config.php'))
|
|
||||||
end
|
end
|
||||||
|
|
||||||
wp_config_backup = @module.config_backup
|
wp_config_backup = @module.config_backup
|
||||||
@@ -67,8 +65,7 @@ shared_examples_for 'WpConfigBackup' do
|
|||||||
file_url = @module.uri.merge(URI.escape(backup_file)).to_s
|
file_url = @module.uri.merge(URI.escape(backup_file)).to_s
|
||||||
expected << file_url
|
expected << file_url
|
||||||
|
|
||||||
stub_request(:get, file_url).
|
stub_request_to_fixture(url: file_url, fixture: @fixtures_dir + '/wp-config.php')
|
||||||
to_return(status: 200, body: File.new(@fixtures_dir + '/wp-config.php'))
|
|
||||||
end
|
end
|
||||||
|
|
||||||
wp_config_backup = @module.config_backup
|
wp_config_backup = @module.config_backup
|
||||||
|
|||||||
@@ -21,6 +21,14 @@ describe 'WebSite' do
|
|||||||
let(:fixtures_dir) { SPEC_FIXTURES_WPSCAN_WEB_SITE_DIR }
|
let(:fixtures_dir) { SPEC_FIXTURES_WPSCAN_WEB_SITE_DIR }
|
||||||
subject(:web_site) { WebSite.new('http://example.localhost/') }
|
subject(:web_site) { WebSite.new('http://example.localhost/') }
|
||||||
|
|
||||||
|
before :all do
|
||||||
|
Browser::reset
|
||||||
|
Browser.instance(
|
||||||
|
config_file: SPEC_FIXTURES_CONF_DIR + '/browser/browser.conf.json',
|
||||||
|
cache_ttl: 0
|
||||||
|
)
|
||||||
|
end
|
||||||
|
|
||||||
describe "#new" do
|
describe "#new" do
|
||||||
its(:url) { should === 'http://example.localhost/' }
|
its(:url) { should === 'http://example.localhost/' }
|
||||||
end
|
end
|
||||||
@@ -74,7 +82,7 @@ describe 'WebSite' do
|
|||||||
it 'should return the correct url : http://example.localhost/xmlrpc.php' do
|
it 'should return the correct url : http://example.localhost/xmlrpc.php' do
|
||||||
xmlrpc = 'http://example.localhost/xmlrpc.php'
|
xmlrpc = 'http://example.localhost/xmlrpc.php'
|
||||||
stub_request(:get, web_site.url).
|
stub_request(:get, web_site.url).
|
||||||
to_return(status: 200, body: '', headers: { 'X-Pingback' => xmlrpc})
|
to_return(status: 200, headers: { 'X-Pingback' => xmlrpc })
|
||||||
|
|
||||||
web_site.xml_rpc_url.should === xmlrpc
|
web_site.xml_rpc_url.should === xmlrpc
|
||||||
end
|
end
|
||||||
@@ -88,7 +96,7 @@ describe 'WebSite' do
|
|||||||
describe '#has_xml_rpc?' do
|
describe '#has_xml_rpc?' do
|
||||||
it 'should return true' do
|
it 'should return true' do
|
||||||
stub_request(:get, web_site.url).
|
stub_request(:get, web_site.url).
|
||||||
to_return(status: 200, body: '', headers: { 'X-Pingback' => 'xmlrpc'})
|
to_return(status: 200, headers: { 'X-Pingback' => 'xmlrpc' })
|
||||||
|
|
||||||
web_site.should have_xml_rpc
|
web_site.should have_xml_rpc
|
||||||
end
|
end
|
||||||
|
|||||||
@@ -24,11 +24,11 @@ describe WpTarget do
|
|||||||
let(:target_url) { 'http://example.localhost/' }
|
let(:target_url) { 'http://example.localhost/' }
|
||||||
|
|
||||||
before :each do
|
before :each do
|
||||||
Browser.reset
|
Browser::reset
|
||||||
@options =
|
@options =
|
||||||
{
|
{
|
||||||
config_file: SPEC_FIXTURES_CONF_DIR + '/browser/browser.conf.json',
|
config_file: SPEC_FIXTURES_CONF_DIR + '/browser/browser.conf.json',
|
||||||
cache_timeout: 0,
|
cache_ttl: 0,
|
||||||
wp_content_dir: 'wp-content',
|
wp_content_dir: 'wp-content',
|
||||||
wp_plugins_dir: 'wp-content/plugins'
|
wp_plugins_dir: 'wp-content/plugins'
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -39,9 +39,10 @@ class WpScanModuleSpec
|
|||||||
def initialize(target_url)
|
def initialize(target_url)
|
||||||
@uri = URI.parse(add_trailing_slash(add_http_protocol(target_url)))
|
@uri = URI.parse(add_trailing_slash(add_http_protocol(target_url)))
|
||||||
|
|
||||||
|
Browser::reset
|
||||||
Browser.instance(
|
Browser.instance(
|
||||||
config_file: SPEC_FIXTURES_CONF_DIR + '/browser/browser.conf.json',
|
config_file: SPEC_FIXTURES_CONF_DIR + '/browser/browser.conf.json',
|
||||||
cache_timeout: 0
|
cache_ttl: 0
|
||||||
)
|
)
|
||||||
end
|
end
|
||||||
|
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
{
|
{
|
||||||
"user_agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10.6; rv:9.0) Gecko/20100101 Firefox/9.0",
|
"user_agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10.6; rv:9.0) Gecko/20100101 Firefox/9.0",
|
||||||
"user_agent_mode": "static",
|
"user_agent_mode": "static",
|
||||||
"cache_timeout": 300,
|
"cache_ttl": 300,
|
||||||
"request_timeout": 2000,
|
"request_timeout": 2000,
|
||||||
"max_threads": 5
|
"max_threads": 5
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -2,6 +2,6 @@
|
|||||||
"user_agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10.6; rv:10.0) Gecko/20100101 Firefox/11.0",
|
"user_agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10.6; rv:10.0) Gecko/20100101 Firefox/11.0",
|
||||||
"user_agent_mode": "static",
|
"user_agent_mode": "static",
|
||||||
"proxy": "127.0.0.1:3038",
|
"proxy": "127.0.0.1:3038",
|
||||||
"cache_timeout": 300,
|
"cache_ttl": 300,
|
||||||
"request_timeout": 2000
|
"request_timeout": 2000
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -3,6 +3,6 @@
|
|||||||
"user_agent_mode": "static",
|
"user_agent_mode": "static",
|
||||||
"proxy": "127.0.0.1:3038",
|
"proxy": "127.0.0.1:3038",
|
||||||
"proxy_auth": "user:pass",
|
"proxy_auth": "user:pass",
|
||||||
"cache_timeout": 300,
|
"cache_ttl": 300,
|
||||||
"request_timeout": 2000
|
"request_timeout": 2000
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -20,15 +20,11 @@
|
|||||||
# https://github.com/bblimke/webmock
|
# https://github.com/bblimke/webmock
|
||||||
# https://github.com/colszowka/simplecov
|
# https://github.com/colszowka/simplecov
|
||||||
|
|
||||||
# Code Coverage (only works with ruby >= 1.9)
|
|
||||||
if RUBY_VERSION >= '1.9'
|
|
||||||
require 'simplecov'
|
|
||||||
end
|
|
||||||
|
|
||||||
require File.expand_path(File.dirname(__FILE__) + '/../lib/common/common_helper')
|
require File.expand_path(File.dirname(__FILE__) + '/../lib/common/common_helper')
|
||||||
|
|
||||||
gem 'webmock', '=1.8.11'
|
|
||||||
require 'webmock/rspec'
|
require 'webmock/rspec'
|
||||||
|
# Code Coverage (only works with ruby >= 1.9)
|
||||||
|
require 'simplecov' if RUBY_VERSION >= '1.9'
|
||||||
|
|
||||||
SPEC_DIR = ROOT_DIR + '/spec'
|
SPEC_DIR = ROOT_DIR + '/spec'
|
||||||
SPEC_LIB_DIR = SPEC_DIR + '/lib'
|
SPEC_LIB_DIR = SPEC_DIR + '/lib'
|
||||||
|
|||||||
Reference in New Issue
Block a user