联动套娃挖洞bp+awvs+xray

burp 配置

第一层代理配置

awvs的流量会经过这里

image-20230706170206267

第二层代理配置

流量会经过burp后转发给xray

image-20230706170720687

xray 配置

1
./xray_darwin_arm64 webscan --listen 127.0.0.1:7777 --html-output ./out/vul.html       

image-20230706170937042

AWVS配置

启动容器

我这里使用docker起的一个Awvs

1
2
3
4
5
docker pull secfa/docker-awvs

docker run -it -d -p 13443:3443 --cap-add LINUX_IMMUTABLE secfa/docker-awvs

Then visit https://YOUR_IP:13443/ # 这里访问要加https才可以访问

image-20230706163316215

生成apikey

image-20230706163928380

脚本批量导入

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73

#coding=utf-8
import requests
import json

import urllib3

urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)


apikey = '1986ad8c0a5b3df4d7...' # API

headers = {'Content-Type': 'application/json', "X-Auth": apikey}


def addTask(url, target):
try:
url = ''.join((url, '/api/v1/targets/add'))
data = {"targets": [{"address": target, "description": ""}], "groups": []}
r = requests.post(url, headers=headers, data=json.dumps(data), timeout=30, verify=False)
result = json.loads(r.content.decode())
return result['targets'][0]['target_id']
except Exception as e:
return e


def scan(url, target, Crawl, user_agent, profile_id, proxy_address, proxy_port):
scanUrl = ''.join((url, '/api/v1/scans'))
target_id = addTask(url, target)

if target_id:
data = {"target_id": target_id, "profile_id": profile_id, "incremental": False,
"schedule": {"disable": False, "start_date": None, "time_sensitive": False}}
try:
configuration(url, target_id, proxy_address, proxy_port, Crawl, user_agent)
response = requests.post(scanUrl, data=json.dumps(data), headers=headers, timeout=30, verify=False)
result = json.loads(response.content)
return result['target_id']
except Exception as e:
print(e)


def configuration(url, target_id, proxy_address, proxy_port, Crawl, user_agent):
configuration_url = ''.join((url, '/api/v1/targets/{0}/configuration'.format(target_id)))
data = {"scan_speed": "fast", "login": {"kind": "none"}, "ssh_credentials": {"kind": "none"}, "sensor": False,
"user_agent": user_agent, "case_sensitive": "auto", "limit_crawler_scope": True, "excluded_paths": [],
"authentication": {"enabled": False},
"proxy": {"enabled": Crawl, "protocol": "http", "address": proxy_address, "port": proxy_port},
"technologies": [], "custom_headers": [], "custom_cookies": [], "debug": False,
"client_certificate_password": "", "issue_tracker_id": "", "excluded_hours_id": ""}
r = requests.patch(url=configuration_url, data=json.dumps(data), headers=headers, timeout=30, verify=False)


def main():
Crawl = True
proxy_address = '127.0.0.1' # docker需要填写物理机ip
proxy_port = '8080'
awvs_url = 'https://127.0.0.1:13443' # awvs url
with open(r'C:\x\x\url.txt', 'r', encoding='utf-8') as f: #添加URL路径
targets = f.readlines()
profile_id = "11111111-1111-1111-1111-111111111111"
user_agent = "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.21 (KHTML, like Gecko) Chrome/41.0.2228.0 Safari/537.21" # 扫描默认UA头
if Crawl:
profile_id = "11111111-1111-1111-1111-111111111117"

for target in targets:
target = target.strip()
if scan(awvs_url, target, Crawl, user_agent, profile_id, proxy_address, int(proxy_port)):
print("{0} 添加成功".format(target))


if __name__ == '__main__':
main()

添加url

等待扫描

image-20230706165826438

开启捡洞模式

image-20230707103924039

image-20230707104013987