-
Notifications
You must be signed in to change notification settings - Fork 3
Expand file tree
/
Copy pathcrawler_sqlmap.py
More file actions
103 lines (90 loc) · 3.54 KB
/
crawler_sqlmap.py
File metadata and controls
103 lines (90 loc) · 3.54 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
#! /usr/bin/env python
# -*- coding: utf-8 -*-
# __author__ = 'CwT'
import logging
import time
import subprocess
import sys
import os
import json
import traceback
from crawler.crawler import Crawler
from crawler.setting import Setting
from crawler.util.utils import execute
logging.basicConfig(stream=sys.stdout, level=logging.INFO)
# logging.basicConfig(filename="logger.txt", level=logging.INFO)
logger = logging.getLogger(__name__)
BASE_DIR = os.path.dirname(__file__)
HINT = 0
MAX_PORT_NUMBER = 65535
def check_port(port): # is used
return execute("netstat -at | awk '{print $4}' | grep %d" % port)
def get_available_port():
global HINT
while True:
port = 8775 + HINT
HINT += 1
if port > MAX_PORT_NUMBER:
raise Exception("no available port")
if not check_port(port):
return port
def start_sqlmap(addr="127.0.0.1"):
port = get_available_port()
sqlmap = subprocess.Popen(["python2", "sqlmapapi.py", "-s", "-p", str(port),
"-H", addr], cwd=os.path.join(BASE_DIR, "sqlmap"))
while check_port(port):
logger.debug("wait 5 seconds for sqlmap initialization")
time.sleep(5) # wait 5 seconds for sqlmap initialization
return sqlmap, addr, port
def crawler_sqlmap(entry_url, depth=-1, level=1, threads=2, timeout=30, checkhost=True):
"""启动sqlmap扫描的入口函数。
:param entry_url: 扫描网站的入口地址
:param depth: 网页爬虫爬取页面深度,-1则表示不设置深度,默认-1
:param level: sqlmap扫描测试等级:1-5(默认为1),等级越高使用的测试样例越多,结果越精确,时间也越长
:param threads: sqlmap多线程扫描设置(默认为2)
:param timeout: sqlmap扫描超时时间(默认30s)
:param checkhost: 检查爬取链接是否属于同一域
:return: 返回值为四元组(ret, url, simple, content)
ret: 执行结果, False为失败, True为成功
url: 扫描目标地址
simple: 解析content抽取重要数据生成的报告,字典类型
content: sqlmap返回的完整报告,字典类型
若执行结果为False,那么把扫描错误信息存在扫描关键结果(simple)这个位置
"""
settings = Setting(handle=False)
settings.depth = depth
settings.nocheckhost = not checkhost
settings.level = level
settings.threads = threads
settings.timeout = timeout
sqlmap, crawler = None, None
try:
sqlmap, ip, port = start_sqlmap()
# crawler的创建必须在sqlmap启动之后, 才能正确获取sqlmap的端口号
crawler = Crawler(BASE_DIR, ip, port, entry_url, setting=settings)
crawler.run()
cont, simple = crawler.raw_report()
return True, entry_url, simple, cont
except:
logger.error(traceback.format_exc())
return False, entry_url, traceback.format_exc(), {}
finally:
if crawler: crawler.close()
if sqlmap: sqlmap.terminate()
if __name__ == '__main__':
# ret, url, simp, cont = crawler_sqlmap("http://testphp.vulnweb.com/")
# print(json.dumps(simp))
sqlmap, crawler = None, None
try:
sqlmap, ip, port = start_sqlmap()
crawler = Crawler(BASE_DIR, ip, port, "http://testphp.vulnweb.com/")
crawler.run()
crawler.report()
finally:
# print("close")
crawler.eliminator.display()
if crawler:
crawler.close()
if sqlmap: sqlmap.terminate()
while True:
sys.exit(1)