limited-WriteUP

Kyrie-Lrving 2024-10-08 16:53:26 99 0


from scapy.all import *
from scapy.layers.http import *
from bs4 import BeautifulSoup
from collections import defaultdict
from ptrlib import chinese_remainder_theorem
import urllib.parse
import re

def get_requests_url():
urls = []
packets = sniff(offline="./packet.pcap", session=TCPSession)
for packet in packets:
if not packet.haslayer("IP"):
continue
if packet[IP].dst != "10.128.0.2":
continue
if packet.haslayer("HTTPRequest"):
request = packet.getlayer("HTTPRequest").fields
urls.append(request["Path"])
return urls

def get_response_data():
datas = []
packets = sniff(offline="./packet.pcap", session=TCPSession)
for packet in packets:
if not packet.haslayer("IP"):
continue
if packet[IP].src != "10.128.0.2":
continue
if packet.haslayer("HTTPResponse") and packet.haslayer("Raw"):
raw = bytes(packet.getlayer("Raw"))
datas.append(raw)
return datas

urls = get_requests_url()[5:-4]
print("[+] urls:", len(urls))
datas = get_response_data()[5:-4]
print("[+] datas:", len(datas))

d = defaultdict(list)
p = re.compile(r"\(SELECT unicode\(substr\(secret, (\d+), 1\)\) FROM account WHERE name=\"admin\"\) % (\d+)")
for url, data in zip(urls, datas):
url = urllib.parse.unquote_plus(url.decode())
soup = BeautifulSoup(data, "html.parser")
hits = len(soup.find_all("th")) - 3
a, b = p.findall(url)[0]
print(f"[+] {a}: x % {b} = {hits}")
d[a].append([int(hits), int(b)])

flag = b""
for i in range(1, 50):
flag += bytes([chinese_remainder_theorem(d[str(i)])[0]])
print(flag)

分类:MISC
image
作者:Kyrie-Lrving

41

提交

0

收入

相关WriteUP

问题反馈