Merge branch 'typhoeus-support'

This commit is contained in:
erwanlr
2013-03-04 11:29:11 +01:00
26 changed files with 197 additions and 192 deletions

1
.rspec
View File

@@ -1 +1,2 @@
--color
--fail-fast

View File

@@ -1,11 +1,11 @@
source "https://rubygems.org"
gem "typhoeus", "0.4.2"
gem "typhoeus", "~>0.6.2"
gem "nokogiri"
gem "json"
group :development, :test do
gem "webmock", "1.8.11"
gem "webmock", "~>1.9.3"
gem "simplecov"
gem "rspec", :require => "spec"
end

View File

@@ -14,7 +14,7 @@
//"proxy": "127.0.0.1:3128",
//"proxy_auth": "username:password",
"cache_timeout": 600, // 10 minutes, at this time the cache is cleaned before each scan. If this value is set to 0, the cache will be disabled
"cache_ttl": 600, // 10 minutes, at this time the cache is cleaned before each scan. If this value is set to 0, the cache will be disabled
"request_timeout": 2000, // 2s

View File

@@ -17,6 +17,8 @@
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#++
require 'common/typhoeus_cache'
class Browser
@@instance = nil
USER_AGENT_MODES = %w{ static semi-static random }
@@ -28,7 +30,7 @@ class Browser
:proxy,
:proxy_auth,
:max_threads,
:cache_timeout,
:cache_ttl,
:request_timeout,
:basic_auth
]
@@ -48,16 +50,15 @@ class Browser
@hydra = Typhoeus::Hydra.new(
max_concurrency: @max_threads,
timeout: @request_timeout
#connecttimeout: @request_timeout
)
# TODO : add an option for the cache dir instead of using a constant
@cache = CacheFileStore.new(CACHE_DIR + '/browser')
# TODO : add an argument for the cache dir instead of using a constant
@cache = TyphoeusCache.new(CACHE_DIR + '/browser')
@cache.clean
# might be in CacheFileStore
setup_cache_handlers
Typhoeus::Config.cache = @cache
end
private_class_method :new
@@ -112,13 +113,10 @@ class Browser
if !auth.include?(:proxy_username) or !auth.include?(:proxy_password)
raise_invalid_proxy_format()
end
@proxy_auth = auth
@proxy_auth = auth[:proxy_username] + ':' + auth[:proxy_password]
elsif auth.is_a?(String)
if matches = %r{([^:]+):(.*)}.match(auth)
@proxy_auth = {
proxy_username: matches[1],
proxy_password: matches[2]
}
if auth.index(':') != nil
@proxy_auth = auth
else
raise_invalid_proxy_auth_format()
end
@@ -150,24 +148,6 @@ class Browser
end
end
def setup_cache_handlers
@hydra.cache_setter do |request|
@cache.write_entry(
Browser.generate_cache_key_from_request(request),
request.response,
request.cache_timeout
)
end
@hydra.cache_getter do |request|
@cache.read_entry(
Browser.generate_cache_key_from_request(request)
) rescue nil
end
end
private :setup_cache_handlers
def get(url, params = {})
run_request(
forge_request(url, params.merge(method: :get))
@@ -181,10 +161,10 @@ class Browser
end
def get_and_follow_location(url, params = {})
params[:max_redirects] ||= 2
params[:maxredirs] ||= 2
run_request(
forge_request(url, params.merge(method: :get, follow_location: true))
forge_request(url, params.merge(method: :get, followlocation: true))
)
end
@@ -197,10 +177,10 @@ class Browser
def merge_request_params(params = {})
if @proxy
params = params.merge(:proxy => @proxy)
params = params.merge(proxy: @proxy)
if @proxy_auth
params = params.merge(@proxy_auth)
params = params.merge(proxyauth: @proxy_auth)
end
end
@@ -212,23 +192,23 @@ class Browser
end
end
unless params.has_key?(:disable_ssl_host_verification)
params = params.merge(:disable_ssl_host_verification => true)
end
#unless params.has_key?(:ssl_verifyhost)
# params = params.merge(ssl_verifyhost: 0)
#end
unless params.has_key?(:disable_ssl_peer_verification)
params = params.merge(:disable_ssl_peer_verification => true)
end
#unless params.has_key?(:ssl_verifypeer)
# params = params.merge(ssl_verifypeer: 0)
#end
if !params.has_key?(:headers)
params = params.merge(:headers => {'user-agent' => self.user_agent})
elsif !params[:headers].has_key?('user-agent')
params[:headers]['user-agent'] = self.user_agent
params = params.merge(:headers => {'User-Agent' => self.user_agent})
elsif !params[:headers].has_key?('User-Agent')
params[:headers]['User-Agent'] = self.user_agent
end
# Used to enable the cache system if :cache_timeout > 0
unless params.has_key?(:cache_timeout)
params = params.merge(:cache_timeout => @cache_timeout)
# Used to enable the cache system if :cache_ttl > 0
unless params.has_key?(:cache_ttl)
params = params.merge(cache_ttl: @cache_ttl)
end
params
@@ -251,17 +231,4 @@ class Browser
end
end
end
# The Typhoeus::Request.cache_key only hash the url :/
# this one will include the params
# TODO : include also the method (:get, :post, :any)
def self.generate_cache_key_from_request(request)
cache_key = request.cache_key
if request.params
cache_key = Digest::SHA1.hexdigest("#{cache_key}-#{request.params.hash}")
end
cache_key
end
end

View File

@@ -59,8 +59,8 @@ class CacheFileStore
end
end
def write_entry(key, data_to_store, cache_timeout)
if cache_timeout > 0
def write_entry(key, data_to_store, cache_ttl)
if cache_ttl > 0
File.open(get_entry_file_path(key), 'w') do |f|
f.write(@serializer.dump(data_to_store))
end
@@ -68,7 +68,7 @@ class CacheFileStore
end
def get_entry_file_path(key)
@storage_path + '/' + key
File::join(@storage_path, key)
end
end

View File

@@ -0,0 +1,43 @@
# encoding: UTF-8
#--
# WPScan - WordPress Security Scanner
# Copyright (C) 2012-2013
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#++
require 'common/cache_file_store'
# Implementaion of a cache_key (Typhoeus::Request#hash has too many options)
module Typhoeus
class Request
module Cacheable
def cache_key
Digest::SHA2.hexdigest("#{url}-#{options[:body]}-#{options[:method]}")[0..32]
end
end
end
end
class TyphoeusCache < CacheFileStore
def get(request)
read_entry(request.cache_key)
end
def set(request, response)
write_entry(request.cache_key, response, request.cache_ttl)
end
end

View File

@@ -20,6 +20,7 @@
begin
# Standard libs
require 'rubygems'
require 'bundler/setup'
require 'getoptlong'
require 'optparse' # Will replace getoptlong
require 'uri'
@@ -33,13 +34,11 @@ begin
require 'rbconfig'
require 'pp'
# Third party libs
gem 'typhoeus', '=0.4.2'
require 'typhoeus'
require 'json'
require 'nokogiri'
# Custom libs
require 'common/browser'
require 'common/cache_file_store'
require 'common/custom_option_parser'
rescue LoadError => e
puts "[ERROR] #{e}"

View File

@@ -36,10 +36,11 @@ module BruteForce
password_found = false
File.open(wordlist_path, 'r').each do |password|
# ignore file comments, but will miss passwords if they start with a hash...
next if password[0, 1] == '#'
password.strip!
# keep a count of the amount of requests to be sent
request_count += 1
queue_count += 1
@@ -52,8 +53,8 @@ module BruteForce
request = Browser.instance.forge_request(login_url,
{
method: :post,
params: { log: URI::encode(username), pwd: URI::encode(password) },
cache_timeout: 0
body: { log: URI::encode(username), pwd: URI::encode(password) },
cache_ttl: 0
}
)

View File

@@ -27,6 +27,7 @@ module WpConfigBackup
backups = WpConfigBackup.config_backup_files
browser = Browser.instance
hydra = browser.hydra
queue_count = 0
backups.each do |file|
file_url = @uri.merge(URI.escape(file)).to_s
@@ -39,6 +40,12 @@ module WpConfigBackup
end
hydra.queue(request)
queue_count += 1
if queue_count == browser.max_threads
hydra.run
queue_count = 0
end
end
hydra.run

View File

@@ -60,7 +60,7 @@ module WpUsernames
end
def get_nickname_from_url(url)
resp = Browser.instance.get(url, { follow_location: true, max_redirects: 2 })
resp = Browser.instance.get_and_follow_location(url)
nickname = nil
if resp.code == 200
nickname = extract_nickname_from_body(resp.body)

View File

@@ -49,13 +49,15 @@ class WebSite
def xml_rpc_url
unless @xmlrpc_url
headers = Browser.instance.get(@uri.to_s).headers_hash
value = headers['x-pingback']
if value.nil? or value.empty?
@xmlrpc_url = nil
else
unless headers.nil?
value = headers['X-Pingback']
unless value.nil? && value.empty?
@xmlrpc_url = value
end
end
end
@xmlrpc_url
end

View File

@@ -55,7 +55,7 @@ class WpEnumerator
targets.each do |target|
url = target.get_full_url
request = enum_browser.forge_request(url, { cache_timeout: 0, follow_location: true })
request = enum_browser.forge_request(url, cache_ttl: 0, followlocation: true)
request_count += 1
request.on_complete do |response|

View File

@@ -74,7 +74,7 @@ class WpTheme < WpItem
# Discover the wordpress theme name by parsing the css link rel
def self.find_from_css_link(target_uri)
response = Browser.instance.get(target_uri.to_s, { follow_location: true, max_redirects: 2 })
response = Browser.instance.get_and_follow_location(target_uri.to_s)
# https + domain is optional because of relative links
matches = %r{(?:https?://[^"']+)?/([^/]+)/themes/([^"']+)/style.css}i.match(response.body)

View File

@@ -63,7 +63,7 @@ class CheckerPlugin < Plugin
number_of_urls = urls.size
urls.each do |url|
request = browser.forge_request(url, { cache_timeout: 0, follow_location: true })
request = browser.forge_request(url, { cache_ttl: 0, followlocation: true })
request_count += 1
request.on_complete do |response|

View File

@@ -23,7 +23,7 @@ describe Browser do
CONFIG_FILE_WITHOUT_PROXY = SPEC_FIXTURES_CONF_DIR + '/browser/browser.conf.json'
CONFIG_FILE_WITH_PROXY = SPEC_FIXTURES_CONF_DIR + '/browser/browser.conf_proxy.json'
CONFIG_FILE_WITH_PROXY_AND_AUTH = SPEC_FIXTURES_CONF_DIR + '/browser/browser.conf_proxy_auth.json'
INSTANCE_VARS_TO_CHECK = ['user_agent', 'user_agent_mode', 'available_user_agents', 'proxy', 'max_threads', 'request_timeout', 'cache_timeout']
INSTANCE_VARS_TO_CHECK = ['user_agent', 'user_agent_mode', 'available_user_agents', 'proxy', 'max_threads', 'request_timeout', 'cache_ttl']
before :all do
@json_config_without_proxy = JSON.parse(File.read(CONFIG_FILE_WITHOUT_PROXY))
@@ -31,6 +31,7 @@ describe Browser do
end
before :each do
Browser::reset
@browser = Browser.instance(config_file: CONFIG_FILE_WITHOUT_PROXY)
end
@@ -100,12 +101,12 @@ describe Browser do
it 'should set the correct credentials' do
@proxy_auth = { proxy_username: 'user', proxy_password: 'pass' }
@expected = @proxy_auth
@expected = 'user:pass'
end
it 'should set the correct credentials' do
@proxy_auth = 'username:passwd'
@expected = { proxy_username: 'username', proxy_password: 'passwd' }
@expected = @proxy_auth
end
end
@@ -213,10 +214,10 @@ describe Browser do
describe '#merge_request_params without proxy' do
it 'should return the default params' do
expected_params = {
disable_ssl_host_verification: true,
disable_ssl_peer_verification: true,
headers: { 'user-agent' => @browser.user_agent },
cache_timeout: @json_config_without_proxy['cache_timeout']
#disable_ssl_host_verification: true,
#disable_ssl_peer_verification: true,
headers: { 'User-Agent' => @browser.user_agent },
cache_ttl: @json_config_without_proxy['cache_ttl']
}
@browser.merge_request_params().should == expected_params
@@ -224,25 +225,25 @@ describe Browser do
it 'should return the default params with some values overriden' do
expected_params = {
disable_ssl_host_verification: false,
disable_ssl_peer_verification: true,
headers: { 'user-agent' => 'Fake IE' },
cache_timeout: 0
#disable_ssl_host_verification: false,
#disable_ssl_peer_verification: true,
headers: { 'User-Agent' => 'Fake IE' },
cache_ttl: 0
}
@browser.merge_request_params(
disable_ssl_host_verification: false,
headers: { 'user-agent' => 'Fake IE' },
cache_timeout: 0
#disable_ssl_host_verification: false,
headers: { 'User-Agent' => 'Fake IE' },
cache_ttl: 0
).should == expected_params
end
it 'should return the defaul params with :headers:accept = \'text/html\' (should not override :headers:user-agent)' do
it 'should return the defaul params with :headers:accept = \'text/html\' (should not override :headers:User-Agent)' do
expected_params = {
disable_ssl_host_verification: true,
disable_ssl_peer_verification: true,
headers: { 'user-agent' => @browser.user_agent, 'accept' => 'text/html' },
cache_timeout: @json_config_without_proxy['cache_timeout']
#disable_ssl_host_verification: true,
#disable_ssl_peer_verification: true,
headers: { 'User-Agent' => @browser.user_agent, 'accept' => 'text/html' },
cache_ttl: @json_config_without_proxy['cache_ttl']
}
@browser.merge_request_params(headers: { 'accept' => 'text/html' }).should == expected_params
@@ -251,19 +252,19 @@ describe Browser do
it 'should merge the basic-auth' do
@browser.basic_auth = 'Basic QWxhZGRpbjpvcGVuIHNlc2FtZQ=='
expected_params = {
disable_ssl_host_verification: true,
disable_ssl_peer_verification: true,
cache_timeout: @json_config_without_proxy['cache_timeout'],
#disable_ssl_host_verification: true,
#disable_ssl_peer_verification: true,
cache_ttl: @json_config_without_proxy['cache_ttl'],
headers: {
'Authorization' => 'Basic QWxhZGRpbjpvcGVuIHNlc2FtZQ==',
'user-agent' => @browser.user_agent
'User-Agent' => @browser.user_agent
}
}
@browser.merge_request_params().should == expected_params
expected_params[:headers].merge!('user-agent' => 'Fake FF')
@browser.merge_request_params(headers: { 'user-agent' => 'Fake FF' }).should == expected_params
expected_params[:headers].merge!('User-Agent' => 'Fake FF')
@browser.merge_request_params(headers: { 'User-Agent' => 'Fake FF' }).should == expected_params
end
end
@@ -274,10 +275,10 @@ describe Browser do
expected_params = {
proxy: @json_config_with_proxy['proxy'],
disable_ssl_host_verification: true,
disable_ssl_peer_verification: true,
headers: { 'user-agent' => @json_config_with_proxy['user_agent'] },
cache_timeout: @json_config_with_proxy['cache_timeout']
#disable_ssl_host_verification: true,
#disable_ssl_peer_verification: true,
headers: { 'User-Agent' => @json_config_with_proxy['user_agent'] },
cache_ttl: @json_config_with_proxy['cache_ttl']
}
browser.merge_request_params().should == expected_params
@@ -289,12 +290,11 @@ describe Browser do
expected_params = {
proxy: @json_config_with_proxy['proxy'],
proxy_username: 'user',
proxy_password: 'pass',
disable_ssl_host_verification: true,
disable_ssl_peer_verification: true,
headers: { 'user-agent' => @json_config_with_proxy['user_agent'] },
cache_timeout: @json_config_with_proxy['cache_timeout']
proxyauth: 'user:pass',
#disable_ssl_host_verification: true,
#disable_ssl_peer_verification: true,
headers: { 'User-Agent' => @json_config_with_proxy['user_agent'] },
cache_ttl: @json_config_with_proxy['cache_ttl']
}
browser.merge_request_params().should == expected_params
@@ -307,16 +307,16 @@ describe Browser do
end
describe '#post' do
it 'should return a Typhoeus::Response wth body = "Welcome Master" if login=master&password=it\'s me !' do
it 'should return a Typhoeus::Response wth body = "Welcome Master" if login=master&password=itsme!' do
url = 'http://example.com/'
stub_request(:post, url).
with(body: "login=master&password=it's me !").
stub_request(:post, url).with(body: { login: 'master', password: 'itsme!' }).
to_return(status: 200, body: 'Welcome Master')
response = @browser.post(
url,
params: { login: 'master', password: 'it\'s me !' }
body: 'login=master&password=itsme!'
#body: { login: 'master', password: 'hello' } # It's should be this line, but it fails
)
response.should be_a Typhoeus::Response
@@ -361,26 +361,6 @@ describe Browser do
#end
end
describe '#Browser.generate_cache_key_from_request' do
it '2 requests with the same url, without params must have the same cache_key' do
url = 'http://example.com'
key1 = Browser.generate_cache_key_from_request(@browser.forge_request(url))
key2 = Browser.generate_cache_key_from_request(@browser.forge_request(url))
key1.should === key2
end
it '2 requests with the same url, but with different params should have a different cache_key' do
url = 'http://example.com'
key1 = Browser.generate_cache_key_from_request(@browser.forge_request(url, params: { login: 'master', password: 'it\'s me !' }))
key2 = Browser.generate_cache_key_from_request(@browser.forge_request(url))
key1.should_not == key2
end
end
describe 'testing caching' do
it 'should only do 1 request, and retrieve the other one from the cache' do

View File

@@ -20,15 +20,12 @@
require 'spec_helper'
describe CacheFileStore do
before :all do
@cache_dir = SPEC_CACHE_DIR + '/cache_file_store'
end
let(:cache_dir) { SPEC_CACHE_DIR + '/cache_file_store' }
before :each do
Dir.delete(@cache_dir) rescue nil
Dir.delete(cache_dir) rescue nil
@cache = CacheFileStore.new(@cache_dir)
@cache = CacheFileStore.new(cache_dir)
end
after :each do
@@ -37,7 +34,7 @@ describe CacheFileStore do
describe '#storage_path' do
it 'returns the storage path given in the #new' do
@cache.storage_path.should == @cache_dir
@cache.storage_path.should == cache_dir
end
end
@@ -52,12 +49,12 @@ describe CacheFileStore do
it "should remove all files from the cache dir (#{@cache_dir}" do
# let's create some files into the directory first
(0..5).each do |i|
File.new(@cache_dir + "/file_#{i}.txt", File::CREAT)
File.new(cache_dir + "/file_#{i}.txt", File::CREAT)
end
count_files_in_dir(@cache_dir, 'file_*.txt').should == 6
count_files_in_dir(cache_dir, 'file_*.txt').should == 6
@cache.clean
count_files_in_dir(@cache_dir).should == 0
count_files_in_dir(cache_dir).should == 0
end
end

View File

@@ -0,0 +1,3 @@
# encoding: UTF-8
# TODO

View File

@@ -48,11 +48,12 @@ shared_examples_for 'BruteForce' do
passwords << password.strip unless password.strip[0, 1] == '#'
end
# Last status must be 302 to get full code coverage
passwords.each do |_|
stub_request(:any, @module.login_url).to_return(
passwords.each do |password|
stub_request(:post, @module.login_url).
to_return(
{ status: 200, body: 'login_error' },
{ status: 0, body: 'no reponse' },
{ status: 50, body: 'server error' },
{ status: 500, body: 'server error' },
{ status: 999, body: 'invalid' },
{ status: 302, body: 'FOUND!' }
)
@@ -60,12 +61,14 @@ shared_examples_for 'BruteForce' do
user = WpUser.new('admin', 1, nil)
result = @module.brute_force([user], @wordlist)
result.length.should == 1
result.should === [{ name: 'admin', password: 'root' }]
end
it 'should cover the timeout branch and return an empty array' do
stub_request(:any, @module.login_url).to_timeout
stub_request(:post, @module.login_url).to_timeout
user = WpUser.new('admin', 1, nil)
result = @module.brute_force([user], @wordlist)
result.should == []

View File

@@ -34,8 +34,7 @@ shared_examples_for 'WpConfigBackup' do
@config_backup_files.each do |backup_file|
file_url = @module.uri.merge(URI.escape(backup_file)).to_s
stub_request(:get, file_url).
to_return(status: 404, body: '')
stub_request(:get, file_url).to_return(status: 404)
end
end
@@ -50,8 +49,7 @@ shared_examples_for 'WpConfigBackup' do
file_url = @module.uri.merge(URI.escape(backup_file)).to_s
expected << file_url
stub_request(:get, file_url).
to_return(status: 200, body: File.new(@fixtures_dir + '/wp-config.php'))
stub_request_to_fixture(url: file_url, fixture: @fixtures_dir + '/wp-config.php')
end
wp_config_backup = @module.config_backup
@@ -67,8 +65,7 @@ shared_examples_for 'WpConfigBackup' do
file_url = @module.uri.merge(URI.escape(backup_file)).to_s
expected << file_url
stub_request(:get, file_url).
to_return(status: 200, body: File.new(@fixtures_dir + '/wp-config.php'))
stub_request_to_fixture(url: file_url, fixture: @fixtures_dir + '/wp-config.php')
end
wp_config_backup = @module.config_backup

View File

@@ -21,6 +21,14 @@ describe 'WebSite' do
let(:fixtures_dir) { SPEC_FIXTURES_WPSCAN_WEB_SITE_DIR }
subject(:web_site) { WebSite.new('http://example.localhost/') }
before :all do
Browser::reset
Browser.instance(
config_file: SPEC_FIXTURES_CONF_DIR + '/browser/browser.conf.json',
cache_ttl: 0
)
end
describe "#new" do
its(:url) { should === 'http://example.localhost/' }
end
@@ -74,7 +82,7 @@ describe 'WebSite' do
it 'should return the correct url : http://example.localhost/xmlrpc.php' do
xmlrpc = 'http://example.localhost/xmlrpc.php'
stub_request(:get, web_site.url).
to_return(status: 200, body: '', headers: { 'X-Pingback' => xmlrpc})
to_return(status: 200, headers: { 'X-Pingback' => xmlrpc })
web_site.xml_rpc_url.should === xmlrpc
end
@@ -88,7 +96,7 @@ describe 'WebSite' do
describe '#has_xml_rpc?' do
it 'should return true' do
stub_request(:get, web_site.url).
to_return(status: 200, body: '', headers: { 'X-Pingback' => 'xmlrpc'})
to_return(status: 200, headers: { 'X-Pingback' => 'xmlrpc' })
web_site.should have_xml_rpc
end

View File

@@ -24,11 +24,11 @@ describe WpTarget do
let(:target_url) { 'http://example.localhost/' }
before :each do
Browser.reset
Browser::reset
@options =
{
config_file: SPEC_FIXTURES_CONF_DIR + '/browser/browser.conf.json',
cache_timeout: 0,
cache_ttl: 0,
wp_content_dir: 'wp-content',
wp_plugins_dir: 'wp-content/plugins'
}

View File

@@ -39,9 +39,10 @@ class WpScanModuleSpec
def initialize(target_url)
@uri = URI.parse(add_trailing_slash(add_http_protocol(target_url)))
Browser::reset
Browser.instance(
config_file: SPEC_FIXTURES_CONF_DIR + '/browser/browser.conf.json',
cache_timeout: 0
cache_ttl: 0
)
end

View File

@@ -1,7 +1,7 @@
{
"user_agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10.6; rv:9.0) Gecko/20100101 Firefox/9.0",
"user_agent_mode": "static",
"cache_timeout": 300,
"cache_ttl": 300,
"request_timeout": 2000,
"max_threads": 5
}

View File

@@ -2,6 +2,6 @@
"user_agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10.6; rv:10.0) Gecko/20100101 Firefox/11.0",
"user_agent_mode": "static",
"proxy": "127.0.0.1:3038",
"cache_timeout": 300,
"cache_ttl": 300,
"request_timeout": 2000
}

View File

@@ -3,6 +3,6 @@
"user_agent_mode": "static",
"proxy": "127.0.0.1:3038",
"proxy_auth": "user:pass",
"cache_timeout": 300,
"cache_ttl": 300,
"request_timeout": 2000
}

View File

@@ -20,15 +20,11 @@
# https://github.com/bblimke/webmock
# https://github.com/colszowka/simplecov
# Code Coverage (only works with ruby >= 1.9)
if RUBY_VERSION >= '1.9'
require 'simplecov'
end
require File.expand_path(File.dirname(__FILE__) + '/../lib/common/common_helper')
gem 'webmock', '=1.8.11'
require 'webmock/rspec'
# Code Coverage (only works with ruby >= 1.9)
require 'simplecov' if RUBY_VERSION >= '1.9'
SPEC_DIR = ROOT_DIR + '/spec'
SPEC_LIB_DIR = SPEC_DIR + '/lib'