-
Notifications
You must be signed in to change notification settings - Fork 572
/
ShuiZe.py
1654 lines (1323 loc) · 70.6 KB
/
ShuiZe.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954
955
956
957
958
959
960
961
962
963
964
965
966
967
968
969
970
971
972
973
974
975
976
977
978
979
980
981
982
983
984
985
986
987
988
989
990
991
992
993
994
995
996
997
998
999
1000
# 调用各类插件获取子域名信息
# -*- coding:utf-8 -*-
import sys
import os
# from gevent import monkey
# monkey.patch_all()
import urllib3
import openpyxl
from Plugins.infoGather.subdomain.subdomainInterface.subdomainInterface import run_subdomainInterface
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
from uuid import uuid4
import dns.resolver
import re
from threading import Thread
from IPy import IP
from collections import Counter
from queue import Queue
from urllib.parse import urlparse
from termcolor import cprint
from optparse import OptionParser
import os
import platform
from Plugins.saveToExcel import saveToExcel
from uuid import uuid4
import socket
import socks
import configparser
from tqdm import *
from colorama import Fore
import requests
## 调用lijiejie的子域名收集脚本
# def lijiejieSubdomain():
# Subdomains_ips = {} # 字典,key为子域名,value为子域名的A记录IP值
# Subdomains_ips[domain] = [] # 主域名
# ips_L = []
# subdomains = ''
# lijiejie_folder = './Plugins/infoGather/subdomain/lijiejie'
# cprint('-' * 50 + 'Load lijiejie Subdomain ...' + '-' * 50, 'green') # 启动lijiejie脚本
# from Plugins.infoGather.subdomain.lijiejie.subDomainsBrute import lijiejieRun
# # print('cd {} && python3 ./subDomainsBrute.py {}'.format(lijiejie_folder, domain))
# # p1 = Popen('cd {} && python3 ./subDomainsBrute.py {}'.format(lijiejie_folder, domain), shell=True, stdin=PIPE, stdout=PIPE)
# # print(p1.stdout.read().decode('gb2312'))
# lijiejieRun(domain)
# lijiejie_domain_file = '{}/{}.txt'.format(lijiejie_folder, domain)
# with open(lijiejie_domain_file, 'rt') as f:
# for each_line in f.readlines():
# each_line_split = each_line.split('\t')
# subdomain = each_line_split[0].strip() # 子域名
# ips = each_line_split[1].strip('\n') # 子域名的dns解析A记录IP
# # print(subdomain, ips)
# for ip in ips.split(','):
# ips_L.append(ip.strip())
# # print(subdomain, ips_L)
# Subdomains_ips[subdomain] = ips_L
# ips_L = []
#
# os.remove(lijiejie_domain_file) # 删除临时文件
#
# lijiejie_tmp = lijiejie_folder + '/tmp' # 删除tmp目录
# if os.path.isdir(lijiejie_tmp):
# shutil.rmtree(lijiejie_tmp, True)
#
# return Subdomains_ips
# 进度条
# 判断是否是IP
def isIP(str):
p = re.compile('^((25[0-5]|2[0-4]\d|[01]?\d\d?)\.){3}(25[0-5]|2[0-4]\d|[01]?\d\d?)$')
if p.match(str):
return True
else:
return False
# 获取github敏感信息
def get_GitSensitiveInfo(github_txt, raw_url_emails):
cf = configparser.ConfigParser()
cf.read("./iniFile/config.ini")
secs = cf.sections()
github_keywords = eval(cf.get('github keywords', 'github_keywords'))
line_urls = {}
gitSensitiveInfo = []
with open(github_txt, 'rt', encoding="utf-8", errors='ignore') as f:
content = f.readlines()
for line, each in enumerate(content):
if '[------------------] ' in each:
line_urls[str(line + 1)] = each.split('[------------------] ')[1]
lines = list(line_urls.keys())
# print(line_urls)
def get_githubAddr(line):
for i, num in enumerate(lines):
# print(line)
if i < len(lines) - 1:
# print(line, int(num), int(lines[i + 1]))
if int(num) <= line <= int(lines[i + 1]):
return int(num)
elif line > int(lines[-1]):
return int(lines[-1])
for keyword in github_keywords:
for line, each in enumerate(content):
if line < len(content) - 1:
if keyword in each:
# print(line)
githubAddr = get_githubAddr(line)
# print(githubAddr)
if githubAddr:
raw_url = content[int(githubAddr) - 1].replace('[------------------]', '').strip()
try:
emails = str(raw_url_emails[raw_url])
print('github address: [line:{}] {}'.format(githubAddr, raw_url))
print('[emails] : {}'.format(emails))
print('[{}] [line:{}] {}'.format(keyword, line, content[line - 1].strip()))
print('[{}] [line:{}] {}'.format(keyword, line + 1, content[line].strip()))
print('[{}] [line:{}] {}'.format(keyword, line + 2, content[line + 1].strip()))
gitSensitiveInfo.append(['gitAddress', githubAddr, raw_url, emails])
gitSensitiveInfo.append([keyword, line, content[line - 1].strip(), emails])
gitSensitiveInfo.append([keyword, line + 1, content[line].strip(), emails])
gitSensitiveInfo.append([keyword, line + 2, content[line + 1].strip(), emails])
gitSensitiveInfo.append(['-' * 50, '-' * 50, '-' * 50, '-' * 50])
except Exception as e:
pass
return gitSensitiveInfo
# 打印脚本跑出了几个新的子域名,并返回最新最全的子域名列表 传递两个列表,old是前面收集好的子域名,new是刚跑完的脚本收集的子域名,进行比较.
def printGetNewSubdomains(old_subdomains, new_subdomains):
if len(old_subdomains) > 0:
newSubdomains = list(set(new_subdomains) - set(old_subdomains))
print('[new :{}] {}'.format(len(newSubdomains), newSubdomains))
return list(set(new_subdomains + old_subdomains))
# subdomains3脚本调用
def subdomains3():
cprint('-' * 50 + 'Load subdomains3 ...' + '-' * 50, 'green')
from Plugins.infoGather.subdomain.subdomain3.brutedns import run_subdomains
Subdomains_ips = run_subdomains(domain)
return Subdomains_ips
# dns域传送漏洞
def dnsZoneTransfer():
pass
# 从fofa收集代理
def getSocksProxy():
cprint('-' * 50 + 'Load getSocksProxy ...' + '-' * 50, 'green')
from Plugins.infoGather.SocksProxy.getSocksProxy import run_getSocksProxy
socksProxysDict = run_getSocksProxy()
# 保存到excel
# socksProxysSheet = saveToExcel(excelSavePath, excel, '代理')
# socksProxysSheet.saveSocksProxys(socksProxysDict)
# 备案反查顶级域名
def beian2NewDomain():
cprint('-' * 50 + 'Load beian2NewDomain ...' + '-' * 50, 'green')
from Plugins.infoGather.subdomain.beian2NewDomain.beian2domain import run_beian2domain
beianNewDomains, companyName = run_beian2domain(domain)
for _ in beianNewDomains:
newDomains.append(_[2])
# 保存到excel
beianNewDomainsSheet = saveToExcel(excelSavePath, excel, '备案反查顶级域名')
beianNewDomainsSheet.saveBeianNewDomains(beianNewDomains)
return companyName
# 从爱企查获取目标相关信息
def Aiqicha(companyName):
cprint('-' * 50 + 'Load Aiqicha ...' + '-' * 50, 'green')
if not companyName:
return
cprint("查询【{}】公司架构".format(companyName), 'red')
from Plugins.infoGather.subdomain.Aiqicha.Aiqicha import run_aiqicha
selfIcpinfo_infos, invest_infos, holds_infos, branch_infos = run_aiqicha(companyName)
# 保存到excel
aiqichaSheet = saveToExcel(excelSavePath, excel, '爱企查')
aiqichaSheet.saveAiqicha(selfIcpinfo_infos, invest_infos, holds_infos, branch_infos)
# 判断是否是泛解析
def checkPanAnalysis():
cprint('-' * 50 + 'check Pan-Analysis ...' + '-' * 50, 'green')
panDomain = 'sadfsadnxzjlkcxjvlkasdfasdf.{}'.format(domain)
try:
dns_A_ips = [j for i in dns.resolver.query(panDomain, 'A').response.answer for j in i.items]
print(dns_A_ips)
cprint('[泛解析] {} -> {}'.format(panDomain, dns_A_ips), 'red')
return True
except Exception as e:
cprint('[不是泛解析] :{}'.format(e.args), 'red')
return False
# 调用kSubdomain脚本
def callKsubdomain():
cprint('-' * 50 + 'Load ksubdomain ...' + '-' * 50, 'green')
from Plugins.infoGather.subdomain.ksubdomain.ksubdomain import run_ksubdomain
ksubdomains = run_ksubdomain(domain)
return ksubdomains
# theHarvest脚本调用
def theHarvester():
cprint('-' * 50 + 'Load theHarvest ...' + '-' * 50, 'green')
from Plugins.infoGather.subdomain.theHarvester.theHarvester import run_theHarvester
theHarvesterIp, emails, hosts = run_theHarvester(domain)
print(hosts)
theHarvesterSubdomains = []
subdomain = None
for host in list(set(hosts)):
if '/' not in host and ' ' not in host:
domain_ip = host.strip().split(':')
if len(domain_ip) == 2:
subdomain, ip = [domain_ip[0]], domain_ip[1]
elif len(domain_ip) == 1:
subdomain, ip = domain_ip, None
if subdomain:
theHarvesterSubdomains.extend(subdomain)
# 测试
# 检测邮箱的真实性
cprint('-' * 50 + 'Load verifyEmails ...' + '-' * 50, 'green')
from Plugins.infoGather.subdomain.verifyEmails.VerifyEmails import run_verifyEmails
aliveEmails = run_verifyEmails(emails)
# 保存到excel
theHarvesterIpSheet = saveToExcel(excelSavePath, excel, 'theHarvester—IP')
theHarvesterIpSheet.saveTheHarvesterIp(theHarvesterIp)
emailsSheet = saveToExcel(excelSavePath, excel, '邮箱')
emailsSheet.saveEmails(emails, aliveEmails)
return list(set(theHarvesterSubdomains))
# 调用virustotal|ce.baidu.com|www.threatcrowd.org|url.fht.im|的子域名收集脚本
def othersApiSubdomain():
cprint('-' * 50 + 'Load VirusTotal threatcrowd url.fht.im ...' + '-' * 50, 'green')
from Plugins.infoGather.subdomain.othersApiSubdomains.othersApiSubdomains import othersApiRun
othersApiTotalSubdomains = othersApiRun(domain) # 列表,存放子域名
return othersApiTotalSubdomains
def interfaceSubdomain():
cprint('-' * 50 + 'Load VirusTotal threatcrowd url.fht.im ...' + '-' * 50, 'green')
# from Plugins.infoGather.subdomain.othersApiSubdomains.othersApiSubdomains import othersApiRun
othersApiTotalSubdomains = run_subdomainInterface(domain) # 列表,存放子域名
return othersApiTotalSubdomains
# 调用github api的子域名收集脚本
def githubApiSubdomain():
cprint('-' * 50 + 'Load Github Api Subdomain ...' + '-' * 50, 'green')
from Plugins.infoGather.subdomain.githubSubdomains.githubSubdomains import githubApiRun
githubApiSubdomains, raw_url_emails = githubApiRun(domain, save_fold_path) # 列表,存放子域名
# 保存到excel
githubSheet = saveToExcel(excelSavePath, excel, 'Github敏感信息')
github_txt = r'{}/{}_github.txt'.format(save_fold_path, domain)
if os.path.exists(github_txt):
gitSensitiveInfo = get_GitSensitiveInfo(github_txt, raw_url_emails)
githubSheet.saveGithub(gitSensitiveInfo)
return githubApiSubdomains
# 调用Sublist3r子域名收集脚本
def Sublist3r():
print('[+] Load Sublist3r Subdomain ...')
from Plugins.infoGather.subdomain.Sublist3r.sublist3r import sublist3rRun
sublist3rSubdomains = sublist3rRun(domain)
return sublist3rSubdomains
# 调用爬虫
def SpiderSubdomain():
cprint('-' * 50 + 'Load Spider ...' + '-' * 50, 'green') # 启动百度爬虫
spiderSheet = saveToExcel(excelSavePath, excel, '爬虫')
# 百度爬虫
def BaiduSubdomain():
cprint('Load BaiduSpider ...', 'green') # 启动百度爬虫
from Plugins.infoGather.subdomain.Spider.Baidu.baidu import BaiduSpider
bdSubdomains, links = BaiduSpider().run_subdomain(domain)
# 保存到excel
spiderSheet.saveSpider('百度', links)
return bdSubdomains
# 必应爬虫
def BingSubdomain():
cprint('Load BingSpider ...', 'green') # 启动必应爬虫
from Plugins.infoGather.subdomain.Spider.Bing.bing import BingSpider
bingSubdomains, links = BingSpider().run_subdomain(domain)
# 保存到excel
spiderSheet.saveSpider('必应', links)
return bingSubdomains
bdSubdomains = BaiduSubdomain()
bingSubdomains = BingSubdomain()
spiderSubdomains = list(set(bdSubdomains + bingSubdomains))
return spiderSubdomains
# 抓取https域名的证书dns信息
def crawlCerts(subdomains):
cprint('-' * 50 + 'Load crawlCerts ...' + '-' * 50, 'green') # 启动证书爬虫
from Plugins.infoGather.subdomain.Certs.crawlCerts import crawlCerts
certsSubdomains, trustedDomainDict, _newDomains = crawlCerts(domain, subdomains).run()
newDomains.extend(_newDomains)
# 保存到excel
certSheet = saveToExcel(excelSavePath, excel, '证书')
certSheet.saveCert(trustedDomainDict)
return certsSubdomains
# 调用友链爬虫
def FriendChinsSubdomain(temp_subdomains):
cprint('-' * 50 + 'Load FriendChins ...' + '-' * 50, 'green') # 启动友链爬虫
from Plugins.infoGather.subdomain.FriendChins.crawlFriendChins import FriendChins
fcSubdomains = FriendChins(domain, temp_subdomains).run()
return fcSubdomains
# 整合子域名,对所有子域名解析A记录
def checkCDN_queryA_subdomains(Subdomains_ips, subdomains):
cprint('-' * 50 + 'check subdomains CDN and query ip ...' + '-' * 50, 'green') # 整合所有子域名
tmp_subdomains = []
for subdomain in subdomains:
if '.{}'.format(domain) in subdomain:
tmp_subdomains.append(subdomain)
subdomains = list(set(tmp_subdomains))
print('Check CDN [{}] subdomains'.format(len(subdomains)))
from Plugins.infoGather.subdomain.CDN import checkCDN
notCDNSubdomains, CDNSubdomainsDict = checkCDN.run_checkCDN(subdomains)
print('Query the A record of [{}] subdomains'.format(len(subdomains)))
from Plugins.infoGather.subdomain.queryA import queryA
Subdomains_ips = queryA.run_queryA(Subdomains_ips, subdomains)
# 保存到excel
queryASheet = saveToExcel(excelSavePath, excel, '子域名A记录')
queryASheet.saveQueryA(Subdomains_ips, CDNSubdomainsDict)
return Subdomains_ips, CDNSubdomainsDict, notCDNSubdomains
# host碰撞
def hostCollide(Subdomains_ips):
cprint('-' * 50 + 'run_hostCollide ...' + '-' * 50, 'green') # 启动网络空间引擎
from Plugins.infoGather.subdomain.hostCollide import hostCollide
hostCollideResult, censysIPS = hostCollide.run_hostCollide(domain, Subdomains_ips)
# 保存到excel
queryASheet = saveToExcel(excelSavePath, excel, 'HOST碰撞')
queryASheet.saveHostCollide(hostCollideResult)
return censysIPS
# 获取所有子域名的参数链接和后台链接(存活)
def run_ParamLinks():
cprint('-' * 50 + 'run_ParamLinks ...' + '-' * 50, 'green') # 启动网络空间引擎
from Plugins.infoGather.ParamSpider.paramSpider import getParamLinks
paramLinks, htLinks = getParamLinks(domain)
# 保存到excel
paramHtLinksSheet = saveToExcel(excelSavePath, excel, '动态链接和后台地址')
paramHtLinksSheet.saveparamHtLinks(paramLinks, htLinks)
# 如果动态链接的个数大于1000,
if len(paramLinks) > 1000:
paramLinks = []
return paramLinks
# 整理IP,获取C段IP
def get_CIP(Subdomains_ips, CDNSubdomainsDict, censysIPS):
cprint('-' * 50 + 'get_CIP ...' + '-' * 50, 'green') # 整理C段IP
# 过滤内网IP
def is_internal_ip(ip_subnet):
ip_subnet_list = ip_subnet.split('.')
if ip_subnet_list[0] == '10' or ip_subnet_list[0] == '127':
return True
elif ip_subnet_list[0] == '172' and 15 < int(ip_subnet_list[1]) < 32:
return True
elif ip_subnet_list[0] == '192' and ip_subnet_list[1] == '168':
return True
else:
return False
ips = []
CIP_List = []
CIP_List_all = []
for subdomain in Subdomains_ips:
if CDNSubdomainsDict[subdomain] == 'NOT': # 如果该子域名没有CDN,则开始统计解析出来的IP
ip_List = Subdomains_ips[subdomain]
for ip in ip_List:
if not is_internal_ip(ip):
ips.append(ip)
ips.extend(censysIPS)
for ip in list(set(ips)):
c_subnet = str(IP(ip).make_net('255.255.255.0')).rsplit('.', 1)[0] + '.0'
CIP_List_all.append(c_subnet)
global ip_count
ip_count = Counter(CIP_List_all)
cprint(ip_count, 'red')
import configparser
cf = configparser.ConfigParser()
cf.read("./iniFile/config.ini")
c_nums = cf.get('C nums', 'c_nums')
for ip in ip_count:
if ip_count[ip] > int(c_nums):
CIP_List.append(ip)
return CIP_List
# return list(set(CIP_List))
# 调用网络空间引擎,查询根域名和C段IP的资产
def run_webSpace(domain, SubdomainAndNotCDNIPs, CIP_List, fofaTitle):
cprint('-' * 50 + 'run_webSpace ...' + '-' * 50, 'green') # 启动网络空间引擎
from Plugins.infoGather.WebspaceSearchEngine import fofaApi, shodanApi, quakeApi, qianxinApi
webSpaceSheet = saveToExcel(excelSavePath, excel, '网络空间搜索引擎')
serviceSheet = saveToExcel(excelSavePath, excel, '服务')
webSpace_web_host_port = [] # 存放开放web服务
webSpace_service_host_port = [] # 存放除了Web的其他服务
# fofa搜索引擎信息收集
def run_fofa():
# 查询域名
if domain:
query_str = 'domain="{}"'.format(domain)
fofa_Results, fofa_web_host_port, fofa_service_host_port = fofaApi.query_domain(query_str)
if fofa_Results:
webSpaceSheet.saveWebSpace('fofa', fofa_Results, query_str) # 将网络空间搜索引擎的结果保存到webSpace项里
# save_webSpace(fofa_Results, 'fofa', query_str)
webSpace_web_host_port.extend(fofa_web_host_port)
webSpace_service_host_port.extend(fofa_service_host_port)
# 查询C段IP
if CIP_List:
for c_subnet in CIP_List:
query_str = 'ip="{}/24"'.format(c_subnet)
fofa_Results, fofa_web_host_port, fofa_service_host_port = fofaApi.query_ip(query_str)
if fofa_Results:
webSpaceSheet.saveWebSpace('fofa', fofa_Results, query_str)
webSpace_web_host_port.extend(fofa_web_host_port)
webSpace_service_host_port.extend(fofa_service_host_port)
if fofaTitle:
query_str = 'title="{}" && country="CN"'.format(fofaTitle)
fofa_Results, fofa_web_host_port, fofa_service_host_port = fofaApi.query_domain(query_str)
if fofa_Results:
webSpaceSheet.saveWebSpace('fofa', fofa_Results, query_str) # 将网络空间搜索引擎的结果保存到webSpace项里
# save_webSpace(fofa_Results, 'fofa', query_str)
webSpace_web_host_port.extend(fofa_web_host_port)
webSpace_service_host_port.extend(fofa_service_host_port)
# shodan搜索引擎信息收集
def run_shodan():
# 查询域名
if domain:
query_str = 'hostname:"{}"'.format(domain)
shodan_Results, shodan_web_host_port, shodan_service_host_port = shodanApi.query_domain(query_str)
if shodan_Results:
webSpaceSheet.saveWebSpace('shodan', shodan_Results, query_str)
webSpace_web_host_port.extend(shodan_web_host_port)
webSpace_service_host_port.extend(shodan_service_host_port)
# 查询C段IP
if CIP_List:
for c_subnet in CIP_List:
query_str = 'net:"{}/24"'.format(c_subnet)
shodan_Results, shodan_web_host_port, shodan_service_host_port = shodanApi.query_ip(query_str)
if shodan_Results:
webSpaceSheet.saveWebSpace('shodan', shodan_Results, query_str)
webSpace_web_host_port.extend(shodan_web_host_port)
webSpace_service_host_port.extend(shodan_service_host_port)
# quake搜索引擎信息收集
def run_quake():
# 查询域名
if domain:
query_str = 'domain:"{}" AND country:"China"'.format(domain)
quake_Results, quake_web_host_port, quake_service_host_port = quakeApi.query_domain(query_str)
if quake_Results:
webSpaceSheet.saveWebSpace('quake', quake_Results, query_str)
webSpace_web_host_port.extend(quake_web_host_port)
webSpace_service_host_port.extend(quake_service_host_port)
# 查询C段IP
if CIP_List:
for c_subnet in CIP_List:
query_str = 'ip:"{}/24"'.format(c_subnet)
quake_Results, quake_web_host_port, quake_service_host_port = quakeApi.query_ip(query_str)
if quake_Results:
webSpaceSheet.saveWebSpace('quake', quake_Results, query_str)
webSpace_web_host_port.extend(quake_web_host_port)
webSpace_service_host_port.extend(quake_service_host_port)
if fofaTitle:
query_str = 'title:"{}" AND country:"China"'.format(fofaTitle)
quake_Results, quake_web_host_port, quake_service_host_port = quakeApi.query_ip(query_str)
if quake_Results:
webSpaceSheet.saveWebSpace('quake', quake_Results, query_str)
webSpace_web_host_port.extend(quake_web_host_port)
webSpace_service_host_port.extend(quake_service_host_port)
# qianxin搜索引擎信息收集
def run_qianxin():
# 查询域名
if domain:
query_str = '(domain.suffix="{}")&&(country=="中国")'.format(domain)
qianxin_Results, qianxin_web_host_port, qianxin_service_host_port = qianxinApi.query_domain(query_str)
if qianxin_Results:
webSpaceSheet.saveWebSpace('qianxin', qianxin_Results, query_str)
webSpace_web_host_port.extend(qianxin_web_host_port)
webSpace_service_host_port.extend(qianxin_service_host_port)
# 查询C段IP
if CIP_List:
for c_subnet in CIP_List:
query_str = 'ip="{}/24"'.format(c_subnet)
qianxin_Results, qianxin_web_host_port, qianxin_service_host_port = qianxinApi.query_ip(query_str)
if qianxin_Results:
webSpaceSheet.saveWebSpace('qianxin', qianxin_Results, query_str)
webSpace_web_host_port.extend(qianxin_web_host_port)
webSpace_service_host_port.extend(qianxin_service_host_port)
if fofaTitle:
query_str = '(title="{}")&&(country=="中国")'.format(fofaTitle)
qianxin_Results, qianxin_web_host_port, qianxin_service_host_port = qianxinApi.query_ip(query_str)
if qianxin_Results:
webSpaceSheet.saveWebSpace('qianxin', qianxin_Results, query_str)
webSpace_web_host_port.extend(qianxin_web_host_port)
webSpace_service_host_port.extend(qianxin_service_host_port)
# 对子域名和非CDN的IP进行fofa查询
def run_fofaOne(subdomainAndIP_Q):
while not subdomainAndIP_Q.empty():
subdomainOrIp = subdomainAndIP_Q.get()
if isIP(subdomainOrIp):
query_str = 'ip="{}"'.format(subdomainOrIp)
else:
query_str = 'domain="{}"'.format(subdomainOrIp)
fofa_Results, fofa_web_host_port, fofa_service_host_port = fofaApi.query_ip(query_str)
if fofa_Results:
webSpaceSheet.saveWebSpace('fofa', fofa_Results, query_str) # 将网络空间搜索引擎的结果保存到webSpace项里
# save_webSpace(fofa_Results, 'fofa', query_str)
webSpace_web_host_port.extend(fofa_web_host_port)
webSpace_service_host_port.extend(fofa_service_host_port)
run_fofa()
run_shodan()
run_quake()
run_qianxin()
# fofa跑所有子域名解析出来的IP
if SubdomainAndNotCDNIPs:
subdomainAndIP_Q = Queue(-1)
for subdomainAndIP in SubdomainAndNotCDNIPs:
subdomainAndIP_Q.put(subdomainAndIP)
threads = []
for t_id in range(5):
t = Thread(target=run_fofaOne, args=(subdomainAndIP_Q, ))
threads.append(t)
t.start()
for t in threads:
t.join()
serviceResult = []
for _ in webSpace_service_host_port: # 去重
if _ not in serviceResult:
serviceResult.append(_)
webSpace_service_host_port = serviceResult
# 将非Web服务的结果保存到service项里
serviceSheet.saveService(webSpace_service_host_port)
return webSpace_web_host_port, webSpace_service_host_port
# 整理fofaTitle结果的域名和IP
def collation_fofaDomainIP(webSpace_web_host_port):
ips = []
fofaTitle_IPs = []
fofaTitle_newDomains = []
for _ in webSpace_web_host_port:
a = urlparse(_)
if a.scheme:
newdomain_ip = a.netloc.split(':')[0]
else:
newdomain_ip = a.path.split(':')[0]
if isIP(newdomain_ip):
ips.append(newdomain_ip)
else:
fofaTitle_newDomains.append(newdomain_ip)
for ip in list(set(ips)):
ip_C = str(IP(ip).make_net('255.255.255.0')).rsplit('.', 1)[0] + '.0'
fofaTitle_IPs.append(ip_C)
global ip_count
ip_count = Counter(fofaTitle_IPs)
newDomains.extend(fofaTitle_newDomains)
# ip反查域名,并将域名结果保存到Subdomains_ips列表里,并且存放到ip2domain_dict里
def get_ip2domain():
cprint('-' * 50 + 'ip to domain ...' + '-' * 50, 'green') # 对IP进行域名反查
from Plugins.infoGather.subdomain.ip2domain import getIp2Domain
ip2domain_dict, _newDomains = getIp2Domain.run_ip2domain(domain, allTargets_Queue) # ip2domain_dict字典,key为IP,value为反查的域名
# 和目标关联的相关域名
newDomains.extend(_newDomains)
# 去重
ip2domainSubdomains = [] # 反查出来的子域名列表 ['ca.hbu.edu.cn', 'jwjcc.bdu.edu.cn', 'yzuuc.hbu.cn']
for subdomains in ip2domain_dict.values():
for subdomain in subdomains:
if domain:
if domain in subdomain:
ip2domainSubdomains.append(subdomain)
else:
ip2domainSubdomains.append(subdomain)
ip2domainSubdomains = list(set(ip2domainSubdomains))
ip2domainSheet = saveToExcel(excelSavePath, excel, 'ip反查域名') # 创建一个ip反查域名页
ip2domainSheet.saveIp2Domain(ip2domain_dict)
return ip2domainSubdomains # 返回ip反查得到的域名列表
# 对IP进行归属地查询
def get_ip_address(web_ip_list):
cprint('-' * 50 + 'get ip address ...' + '-' * 50, 'green') # 对IP进行归属地查询
from Plugins.infoGather.subdomain.ipAddress import getIpAddress
ip_address_dict = getIpAddress.run_getIpAddress(web_ip_list) # 字典,key为IP,value为归属地
return ip_address_dict
# 整理开放web服务的host
def collation_web_host(Subdomains_ips, webSpace_web_host_port, ip2domainSubdomains):
cprint('-' * 50 + 'collation_web_host ...' + '-' * 50, 'green') # 启动web服务收集
web_host_port = [] # 存放最终的开放web服务的host
web_host_port_temp = [] # web_host_port临时列表
web_ip_list = [] # 存放开放web服务的IP
for subdomain in list(set(list(Subdomains_ips.keys()) + ip2domainSubdomains)):
if ':' in subdomain: # ip2domainSubdomains的结果里有些类似于221.192.236.146:999这种结果,所以不加80端口
web_host_port_temp.append(subdomain)
else:
web_host_port_temp.append('{}:80'.format(subdomain))
web_host_port_temp.extend(webSpace_web_host_port)
# print('[{}] {}'.format(len(web_host_port), web_host_port))
web_host_port_temp = list(set(web_host_port_temp))
# print('[{}] {}'.format(len(web_host_port), web_host_port))
# 整合url,格式规范。全部是http(s)://www.domain.com:xxxx
for host_port in web_host_port_temp:
host_port_urlparse = urlparse(host_port)
if not host_port_urlparse.scheme: # 如果没有http(https), 则加上。如果是443端口,则加https,其他端口加http
if ':' in host_port:
try:
host, port = host_port.split(':')
if isIP(host):
web_ip_list.append(host)
if port == '443':
host_port = 'https://{}'.format(host)
elif port == '80':
host_port = 'http://{}'.format(host)
else:
host_port = 'http://{}'.format(host_port)
except Exception as e:
pass
else:
host_port = 'http://{}'.format(host_port)
else: # 如果有https或者http,则不加
host_port = host_port
web_host_port.append(host_port)
web_host_port = list(set(web_host_port)) # 去重
web_ip_list = list(set(web_ip_list))
return web_host_port, web_ip_list
# 内网端口扫描
def scan_IntranetPorts():
from Plugins.infoGather.Intranet.scanPort import scanPort
from Plugins.infoGather.Intranet import getMoreIp
tqdm.write(Fore.BLACK + '-' * 50 + 'scan_IntranetPorts ...' + '-' * 50)
web_host_port, service_host_port, alive_host_List = scanPort.run_ScanPort(allTargets_Queue, proxy)
tqdm.write(Fore.BLACK + '-' * 50 + 'get_IntranetHostName and IP ...' + '-' * 50)
alive_hostname_ips = getMoreIp.run_getMoreIp(alive_host_List) # 通过oxid获取主机名和更多的内网IP
# 写入表格里
intranetServiceSheet = saveToExcel(excelSavePath, excel, '内网服务')
intranetServiceSheet.saveService(service_host_port)
intranetHostNameIpsSheet = saveToExcel(excelSavePath, excel, '内网主机名和IP')
intranetHostNameIpsSheet.saveHostNameAndIps(alive_hostname_ips)
return web_host_port, alive_host_List
# 筛选存活并获取标题
def run_getWebTitle(web_host_port, ip_address_dict):
tqdm.write(Fore.BLACK + '-' * 50 + 'run_getWebTitle ...' + '-' * 50) # 筛选存活并获取标题
from Plugins.infoGather.webInfo import getWebTitle
if isIntranet == 1:
threadNum = 10 # 如果是扫内网,则线程为5
else:
threadNum = 300 # 扫外网则线程为300
# title不需要使用快代理
requests_proxies = None
web_Titles = getWebTitle.run_getWebTitle(web_host_port, ip_address_dict, requests_proxies, threadNum)
# print(web_Titles)
alive_Web = [] # 存活的web服务
for each in web_Titles:
if each[1] != 65535:
alive_Web.append(each[0])
# 写入表格里
webTitileSheet = saveToExcel(excelSavePath, excel, '存活网站标题') # 创建一个ip反查域名页
webTitileSheet.saveWebTitle(web_Titles)
return web_Titles, alive_Web
# Web漏洞检测
def detect_webVul(alive_Web):
# 跑自己的漏洞脚本
def runSelfVul():
vul_path = os.getcwd() + '/Plugins/Vul/Web/'
sys.path.append(vul_path) # 添加环境变量
vulList = filter(lambda x: (True, False)[x[-3:] == 'pyc' or x[-5:] == '__.py' or x[:2] == '__'],
os.listdir(vul_path)) # 获取漏洞脚本
# 内网跑的漏洞
intPassVul = ['Jboss.py', 'phpstudy.py', 'weblogic.py', 'cms.py', 'yongyou.py', 'easyConnect.py', 'shiro.py']
for vulName in vulList:
tqdm.write(Fore.BLACK + '-' * 50 + 'detect ' + vulName[:-3] + '-' * 50) # 探测各种漏洞
md = __import__(vulName[:-3]) # 导入类
try:
if hasattr(md, 'Detect'):
detect = getattr(md, 'Detect') # 获取类
alive_Web_queue = Queue(-1) # 将存活的web存入队列里
for _ in alive_Web:
alive_Web_queue.put(_)
threads = []
if isIntranet == 1:
threadNum = 30 # 如果是扫内网,则线程为5
if vulName in intPassVul:
pass
else:
tqdm.write(Fore.BLACK + '内网不跑{}漏洞'.format(vulName))
continue
else:
threadNum = 100 # 扫外网则线程为300
# 使用快代理时,线程调整
if kuaidaili_thread_num:
threadNum = int(kuaidaili_thread_num)
pbar = tqdm(total=alive_Web_queue.qsize(), desc="检测Web漏洞", ncols=150) # total是总数
for num in range(1, threadNum + 1):
t = detect(alive_Web_queue, pbar, webVul_list, requests_proxies) # 实例化漏洞类,传递参数:存活web的队列, 存储漏洞的列表
threads.append(t)
t.start()
for t in threads:
t.join()
pbar.close()
except Exception as e:
tqdm.write(Fore.BLACK + r'[-] Load Vul [{}] Error: {}'.format(vulName, e.args))
continue
# 调用ObserverWard跑指纹
def runObserverWard():
cprint('-' * 50 + 'Load ObserverWard ...' + '-' * 50, 'green')
from Plugins.Vul.ObserverWard.ObserverWardApi import run_observerWard
observerWardVul_list = run_observerWard(alive_Web)
webVul_list.extend(observerWardVul_list)
# 调用Nuclei跑漏洞
def runNucleiVul():
cprint('-' * 50 + 'Load Nuclei ...' + '-' * 50, 'green')
from Plugins.Vul.Nuclei.NucleiApi import run_nuclei
nucleiVul_list = run_nuclei(alive_Web)
webVul_list.extend(nucleiVul_list)
tqdm.write(Fore.BLACK + '-' * 50 + 'detect Web vul' + '-' * 50) # 探测各种漏洞
webVul_list = [] # 存储Web漏洞,每个元素都是一个列表。[['shiro', 'http://127.0.0.1'], ['weblogic', 'http://127.0.0.1'], ['phpstudy', 'http://127.0.0.1']]
runObserverWard()
runSelfVul()
runNucleiVul()
# print(webVul_list)
return webVul_list
# 参数漏洞检测
def detect_paramVul(param_Links):
tqdm.write(Fore.BLACK + '-' * 50 + 'detect param vul' + '-' * 50) # 探测各种参数漏洞-注入
paramVul_list = [] # 存储参数漏洞,每个元素都是一个列表。[['SQL', 'http://127.0.0.1/a.php?id=1'], ['SQL', 'http://127.0.0.1/a.php?id=2']]
vul_path = os.getcwd() + '/Plugins/Vul/Param/'
sys.path.append(vul_path) # 添加环境变量
vulList = filter(lambda x: (True, False)[x[-3:] == 'pyc' or x[-5:] == '__.py' or x[:2] == '__'],
os.listdir(vul_path)) # 获取漏洞脚本
for vulName in vulList:
tqdm.write(Fore.BLACK + '-' * 50 + 'detect ' + vulName[:-3] + '-' * 50) # 探测各种漏洞
md = __import__(vulName[:-3]) # 导入类
try:
paramVul_list = md.detect(param_Links)
except Exception as e:
tqdm.write(Fore.BLACK + r'[-] Load Vul [{}] Error: {}'.format(vulName, e.args))
continue
return paramVul_list
# 未授权漏洞检测
def detect_unauthWeakVul(service_host_port):
tqdm.write(Fore.BLACK + '-' * 50 + 'detect unauth vul' + '-' * 50) # 探测各种漏洞
tqdm.write(Fore.BLACK + 'service_host_port : {}'.format(service_host_port))
service_host_port_queue = Queue(-1) # 队列
for _ in service_host_port:
service_host_port_queue.put((_))
# 键值队,key为漏洞的名字,value为漏洞插件名
serviceVulName = {'redis': 'unAuthRedis', 'elastic': 'unAuthElastic', 'mongodb': 'unAuthMongodb', 'ldaps': 'unAuthLdaps',
'zookeeper': 'unAuthZookeeper', 'ftp': 'weakFTP', 'ssh': 'weakSSH', 'mssql': 'weakMSSQL',
'mysql': 'weakMYSQL', 'rdp': 'weakRDP'}
# 弱口令漏洞-密码字典文件地址
weakTxtDict = {'ftp': 'dic_password_ftp.txt', 'ssh': 'dic_password_ssh.txt', 'mssql': 'dic_password_sqlserver.txt',
'mysql': 'dic_password_mysql.txt', 'rdp': 'dic_password_rdp.txt'}
# 存放弱口令密码
serviceWeakPwds = {}
# 读取密码字典
for protocol in weakTxtDict.keys():
weakPwdTxt = './iniFile/PwdTxt/{}'.format(weakTxtDict[protocol])
with open(weakPwdTxt, 'rt') as f:
serviceWeakPwds[protocol] = f.readlines()
unauthWeakVul_list = [] # 存储未授权漏洞,每个元素都是一个列表。[['redis', 'http://127.0.0.1'], ['elastic', 'http://127.0.0.1']]
unauthVul_path = os.getcwd() + '/Plugins/Vul/Service/'
sys.path.append(unauthVul_path) # 添加环境变量
# 多线程探测未授权-弱口令漏洞
def detect_unauthWeak(protocol, ip, port):
if protocol in serviceVulName.keys():
vulName = serviceVulName[protocol]
# 跑域名和C段的时候默认要跑弱口令
# if not domain and not cSubnet:
if not weak and protocol in weakTxtDict.keys():
return
if protocol in serviceWeakPwds.keys():
weakPwdsList = serviceWeakPwds[protocol] # 弱口令密码列表
else:
weakPwdsList = []
tqdm.write(Fore.BLACK + 'test [{}] : {} {}'.format(vulName, ip, port))
try:
md = __import__(vulName) # 导入类
if hasattr(md, 'Detect'):
detect = getattr(md, 'Detect') # 获取类
detect(ip, port, unauthWeakVul_list).run_detect(weakPwdsList)
except Exception as e:
tqdm.write(Fore.BLACK + r'[-] Load Vul [{}] Error: {}'.format(vulName, e.args))
pbar = tqdm(total=len(service_host_port), desc="检测未授权漏洞", ncols=150) # total是总数
for _ in service_host_port:
protocol, ip, port = _
detect_unauthWeak(protocol, ip, port)
pbar.update(1)
pbar.close() # 关闭进度条
return unauthWeakVul_list
# Windows漏洞检测
def detect_winVul(alive_host_List):
cprint('-' * 50 + 'detect Windows vul' + '-' * 50, 'green') # 探测Windows漏洞
winVul_list = [] # 存储Windows漏洞,每个元素都是一个列表。[['CVE-2020-0796', '127.0.0.1'], ['MS17010', '127.0.0.1']]
vul_path = os.getcwd() + '/Plugins/Vul/Win/'
sys.path.append(vul_path) # 添加环境变量
vulList = filter(lambda x: (True, False)[x[-3:] == 'pyc' or x[-5:] == '__.py' or x[:2] == '__'],
os.listdir(vul_path)) # 获取漏洞脚本
for vulName in vulList:
cprint('-' * 50 + 'detect ' + vulName[:-3] + '-' * 50, 'green') # 探测各种漏洞
md = __import__(vulName[:-3]) # 导入类
try:
if hasattr(md, 'Detect'):
detect = getattr(md, 'Detect') # 获取类
alive_host_queue = Queue(-1) # 将存活的主机存入队列里
for _ in alive_host_List:
alive_host_queue.put(_)
threads = []
if isIntranet == 1:
threadNum = 5 # 如果是扫内网,则线程为5
else:
threadNum = 200 # 扫外网则线程为300
for num in range(1, threadNum + 1):
t = detect(alive_host_queue, winVul_list, proxy) # 实例化漏洞类,传递参数:存活主机的队列, 存储漏洞的列表
threads.append(t)
t.start()
for t in threads:
t.join()
except Exception as e:
print(r'[-] Load Vul [{}] Error: {}'.format(vulName, e.args))
continue
return winVul_list
# 打印漏洞并保存
def printSave_Vul(Vul_list):
if Vul_list: # 如果探测出漏洞
tqdm.write(Fore.BLACK + '-' * 50 + 'Vulnerabilities exist ' + '-' * 50) # 探测各种漏洞
for vul in Vul_list:
Vul_Name, Vul_url, Vul_exist = vul
tqdm.write(Fore.BLACK + '[{}] {} {}'.format(Vul_Name, Vul_url, Vul_exist))
# 写入表格里
vulSheet = saveToExcel(excelSavePath, excel, '漏洞') # 创建一个ip反查域名页
vulSheet.saveVul(Vul_list)
else:
tqdm.write(Fore.BLACK + '-' * 50 + 'Non-existent vulnerabilities' + '-' * 50)
# 15. 保存相关信息:新的域名和C段IP信息
def saveRelatedInfo(newDomains, ip_count):
ip2domainSheet = saveToExcel(excelSavePath, excel, '相关域名和C段') # 创建一个ip反查域名页
ip2domainSheet.saveNewDomainAndCSubnet(newDomains, ip_count)