55from requests .exceptions import HTTPError
66import requests
77from requests import get
8-
8+ import re
99from .link_io import LinkIO
1010
1111
@@ -32,10 +32,10 @@ def execute_all(link, *, display_status=False):
3232 page ,response = LinkIO .read (link , response = True , show_msg = display_status )
3333 response = get (link , verify = False ).text
3434 soup = BeautifulSoup (page , 'html.parser' )
35- validation_functions = [get_robots_txt , get_dot_git , get_dot_svn , get_dot_git , get_intel ]
35+ validation_functions = [get_robots_txt , get_dot_git , get_dot_svn , get_dot_git , get_intel , get_bitcoin_address ]
3636 for validate_func in validation_functions :
3737 try :
38- validate_func (link ,response )
38+ validate_func (link , response )
3939 except (ConnectionError , HTTPError ):
4040 cprint ('Error' , 'red' )
4141
@@ -68,15 +68,15 @@ def get_robots_txt(target,response):
6868 cprint ("Robots.txt found" ,'blue' )
6969 print (robots )
7070
71+
7172def get_intel (link ,response ):
7273 intel = set ()
7374 matches = findall (r'''([\w\.-]+s[\w\.-]+\.amazonaws\.com)|([\w\.-]+@[\w\.-]+\.[\.\w]+)''' , response )
75+ print ("Intel\n --------\n \n " )
7476 if matches :
7577 for match in matches :
76- verb ('Intel' , match )
7778 intel .add (match )
78- print ("Intel\n --------\n \n %s" )
79- print (intel )
79+
8080
8181def get_dot_git (target ,response ):
8282 cprint ("[*]Checking for .git folder" , 'yellow' )
@@ -91,6 +91,13 @@ def get_dot_git(target,response):
9191 cprint ("NO .git folder found" , 'blue' )
9292
9393
94+ def get_bitcoin_address (target , response ):
95+ bitcoins = re .findall (r'^[13][a-km-zA-HJ-NP-Z1-9]{25,34}$' , response )
96+ print ("BTC FOUND: " , len (bitcoins ))
97+ for bitcoin in bitcoins :
98+ print ("BTC: " , bitcoin )
99+
100+
94101def get_dot_svn (target ,response ):
95102 cprint ("[*]Checking for .svn folder" , 'yellow' )
96103 url = target
@@ -121,13 +128,10 @@ def get_dot_htaccess(target,response):
121128
122129
123130def display_webpage_description (soup ):
124- cprint ("[*]Checking for description meta tag" , 'yellow' )
131+ cprint ("[*]Checking for meta tag" , 'yellow' )
125132 metatags = soup .find_all ('meta' )
126133 for meta in metatags :
127- if meta .has_attr ('name' ):
128- attributes = meta .attrs
129- if attributes ['name' ] == 'description' :
130- cprint ("Page description: " + attributes ['content' ])
134+ print ("Meta : " ,meta )
131135
132136
133137def writer (datasets , dataset_names , output_dir ):
0 commit comments