Tried to make code climate happy

This commit is contained in:
g0tmi1k
2018-05-14 17:56:49 +01:00
parent b9fa1e3587
commit f90a64ce81
10 changed files with 113 additions and 108 deletions

View File

@@ -95,7 +95,7 @@ class WpItems < Array
code = tag.text.to_s
next if code.empty?
if ! code.valid_encoding?
if !code.valid_encoding?
code = code.encode('UTF-16be', :invalid => :replace, :replace => '?').encode('UTF-8')
end

View File

@@ -294,18 +294,25 @@ end
def get_random_user_agent
user_agents = []
unless File.exist?(USER_AGENTS_FILE)
raise('[ERROR] Missing user-agent data. Please re-run with --update.')
end
# If we can't access the file, die
raise('[ERROR] Missing user-agent data. Please re-run with just --update.') unless File.exist?(USER_AGENTS_FILE)
# Read in file
f = File.open(USER_AGENTS_FILE, 'r')
# Read every line in the file
f.each_line do |line|
# ignore comments
# Remove any End of Line issues, and leading/trailing spaces
line = line.strip.chomp
# Ignore empty files and comments
next if line.empty? or line =~ /^\s*(#|\/\/)/
# Add to array
user_agents << line.strip
end
# Close file handler
f.close
# return ransom user-agent
# Return random user-agent
user_agents.sample
end
@@ -331,4 +338,9 @@ end
# Get the HTTP response code
def get_http_status(url)
Browser.get(url.to_s).code
end
# Check to see if we need a "s"
def grammar_s(size)
size.to_i >= 1 ? "s" : ""
end

View File

@@ -3,12 +3,6 @@
class WebSite
module HumansTxt
# Checks if a humans.txt file exists
# @return [ Boolean ]
def has_humans?
Browser.get(humans_url).code == 200
end
# Gets a humans.txt URL
# @return [ String ]
def humans_url
@@ -22,18 +16,15 @@ class WebSite
response = Browser.get(humans_url.to_s)
body = response.body
# Get all non-comments
entries = body.split(/\n/)
# Did we get something?
if entries
entries.flatten!
entries.uniq!
entries.each do |d|
temp = d.strip
return_object << temp.to_s
end
# Remove any rubbish
entries = clean_uri(entries)
end
return_object
return return_object
end
end

View File

@@ -29,16 +29,12 @@ class WebSite
# Did we get something?
if entries
# Extract elements
entries.flatten!
# Remove any leading/trailing spaces
entries.collect{|x| x.strip || x }
# End Of Line issues
entries.collect{|x| x.chomp! || x }
# Remove nil's and sort
entries.compact.sort!
# Unique values only
entries.uniq!
# Remove any rubbish
entries = clean_uri(entries)
# Sort
entries.sort!
# Wordpress URL
wordpress_path = @uri.path
@@ -50,19 +46,10 @@ class WebSite
entries.delete(dir_with_subdir)
end
# Each value now, try and make it a full URL
entries.each do |d|
begin
temp = @uri.clone
temp.path = d.strip
rescue URI::Error
temp = d.strip
end
return_object << temp.to_s
end
# Convert to full URIs
return_object = full_uri(entries)
end
return_object
return return_object
end
protected

View File

@@ -3,12 +3,6 @@
class WebSite
module SecurityTxt
# Checks if a security.txt file exists
# @return [ Boolean ]
def has_security?
Browser.get(security_url).code == 200
end
# Gets a security.txt URL
# @return [ String ]
def security_url
@@ -25,16 +19,12 @@ class WebSite
# Get all non-comments
entries = body.split(/\n/)
# Did we get something?
if entries
entries.flatten!
entries.uniq!
entries.each do |d|
temp = d.strip
return_object << temp.to_s
end
# Remove any rubbish
entries = clean_uri(entries)
end
return_object
return return_object
end
end

View File

@@ -31,37 +31,22 @@ class WebSite
# Make request
response = Browser.get(sitemap_url.to_s)
body = response.body
# Get all allow and disallow urls
entries = body.scan(/^sitemap\s*:\s*(.*)$/i)
entries = response.body.scan(/^sitemap\s*:\s*(.*)$/i)
# Did we get something?
if entries
# Extract elements
entries.flatten!
# Remove any leading/trailing spaces
entries.collect{|x| x.strip || x }
# End Of Line issues
entries.collect{|x| x.chomp! || x }
# Remove nil's and sort
entries.compact.sort!
# Unique values only
entries.uniq!
# Remove any rubbish
entries = clean_uri(entries)
# Each value now, try and make it a full URL
entries.each do |d|
begin
temp = @uri.clone
temp.path = d.strip
rescue URI::Error
temp = d.strip
end
return_object << temp.to_s
end
# Sort
entries.sort!
# Convert to full URIs
return_object = full_uri(entries)
end
return_object
return return_object
end
end

View File

@@ -17,13 +17,15 @@ class WpTarget < WebSite
data = JSON.parse(response.body)
# If there is nothing there, return false
return false if data.empty?
if data.empty?
return false
# WAF/API disabled response
return false if data.include?('message') and data['message'] =~ /Only authenticated users can access the REST API/
elsif data.include?('message') and data['message'] =~ /Only authenticated users can access the REST API/
return false
# Success!
return true if response.code == 200
elsif response.code == 200
return true
end
end
# Something went wrong
@@ -70,6 +72,10 @@ class WpTarget < WebSite
# Sort and uniq
users = users.sort.uniq
# Feedback
grammar = grammar_s(users.size)
puts warning("#{users.size} user#{grammar} exposed via API: #{json_users_url}")
# Print results
table = Terminal::Table.new(headings: ['ID', 'Name', 'URL'],
rows: users)

View File

@@ -43,12 +43,13 @@ class WpTarget < WebSite
end
if users
# Feedback
puts warning("Detected users from RSS feed:")
# Sort and uniq
users = users.sort_by { |user| user.to_s.downcase }.uniq
# Feedback
grammar = grammar_s(users.size)
puts warning("Detected #{users.size} user#{grammar} from RSS feed:")
# Print results
table = Terminal::Table.new(headings: ['Name'],
rows: users)

View File

@@ -120,6 +120,39 @@ def help
puts
end
def clean_uri(entries)
# Extract elements
entries.flatten!
# Remove any leading/trailing spaces
entries.collect{|x| x.strip || x }
# End Of Line issues
entries.collect{|x| x.chomp! || x }
# Remove nil's
entries.compact
# Unique values only
entries.uniq!
return entries
end
# Return the full URL
def full_uri(entries)
return_object = []
# Each value now, try and make it a full URL
entries.each do |d|
begin
temp = @uri.clone
temp.path = d.strip
rescue URI::Error
temp = d.strip
end
return_object << temp.to_s
end
return return_object
end
# Hook to check if the target if down during the scan
# And have the number of requests performed to display at the end of the scan
# The target is considered down after 30 requests with status = 0
@@ -138,3 +171,4 @@ Typhoeus.on_complete do |response|
sleep(Browser.instance.throttle)
end