1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
|
from urllib import parse
import threading
import argparse
import requests
import logging
import queue
parser = argparse.ArgumentParser()
parser.add_argument('url',help='Target url')
parser.add_argument('-t',help='Thread number',type=int,default=2)
parser.add_argument('-a',help='Show all message',action='store_const',const=logging.INFO,default=logging.WARNING)
args = parser.parse_args()
logging.basicConfig(level=args.a,format='%(asctime)s - %(levelname)s - %(message)s')
class WeakFileScan(object):
def __init__(self,url,threads):
self.url = url
self.threads = threads
self.queue = queue.Queue()
self.headers = {'User-Agent':'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_13_1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/63.0.3239.84 Safari/537.36','X-Forwarded-For':'127.0.0.1'}
def run(self):
for item in self._dict():
self.queue.put(item)
for _ in range(self.threads):
t = threading.Thread(target=self._request)
t.start()
t.join()
def _request(self):
while not self.queue.empty():
url = self.queue.get()
logging.info('GET {0}'.format(url))
resp = requests.head(url,headers=self.headers)
if resp.status_code == 200:
logging.warning('FOUND {0}'.format(url))
def _parse(self,url):
obj = parse.urlparse(url)
return obj.netloc
def _join(self,*args):
obj = parse.urljoin(*args)
return obj
def _dict(self):
url = self._parse(self.url)
a = ('bak','backup','www','web','wwwroot','beifen','ftp','website','back','backupdata','temp','htdocs','database','data','user','admin','test','conf','config','db','sql','install','w','bf','aaa','0','1','2','3','4','5','6','7','8','9')
b = (url,url.replace('.',''),url.split('.',1)[1],url.split('.',1)[0],url.split('.')[1],url.split('.')[-1])
c = ('.rar','.zip','.tar','.tar.gz','.tar.bz2','.tar.xz','.gz','.bz2','.xz','.tgz','.7z','.z')
for x in a+b:
for y in c:
yield self._join(self.url,x+y)
Scanner = WeakFileScan(args.url,args.t)
Scanner.run()
|