from scapy.all import *
from scapy.layers.http import *
from bs4 import BeautifulSoup
from collections import defaultdict
from ptrlib import chinese_remainder_theorem
import urllib.parse
import re
def get_requests_url():
urls = []
packets = sniff(offline="./packet.pcap", session=TCPSession)
for packet in packets:
if not packet.haslayer("IP"):
continue
if packet[IP].dst != "10.128.0.2":
continue
if packet.haslayer("HTTPRequest"):
request = packet.getlayer("HTTPRequest").fields
urls.append(request["Path"])
return urls
def get_response_data():
datas = []
packets = sniff(offline="./packet.pcap", session=TCPSession)
for packet in packets:
if not packet.haslayer("IP"):
continue
if packet[IP].src != "10.128.0.2":
continue
if packet.haslayer("HTTPResponse") and packet.haslayer("Raw"):
raw = bytes(packet.getlayer("Raw"))
datas.append(raw)
return datas
urls = get_requests_url()[5:-4]
print("[+] urls:", len(urls))
datas = get_response_data()[5:-4]
print("[+] datas:", len(datas))
d = defaultdict(list)
p = re.compile(r"\(SELECT unicode\(substr\(secret, (\d+), 1\)\) FROM account WHERE name=\"admin\"\) % (\d+)")
for url, data in zip(urls, datas):
url = urllib.parse.unquote_plus(url.decode())
soup = BeautifulSoup(data, "html.parser")
hits = len(soup.find_all("th")) - 3
a, b = p.findall(url)[0]
print(f"[+] {a}: x % {b} = {hits}")
d[a].append([int(hits), int(b)])
flag = b""
for i in range(1, 50):
flag += bytes([chinese_remainder_theorem(d[str(i)])[0]])
print(flag)
limited-WriteUP
Kyrie-Lrving 2024-10-08 16:53:26 99 0
作者:Kyrie-Lrving
41
提交0
收入相关WriteUP
-
SusCTF2017-Crack Zip
***收费WriteUP请购买后查看,VIP用户可免费查看***
- MISC
- 3年前
-
SusCTF2017-misc1 Writeup
***收费WriteUP请购买后查看,VIP用户可免费查看***
- MISC
- 3年前
-
一张含有信息的图片
***收费WriteUP请购买后查看,VIP用户可免费查看***
- MISC
- 1年前
-
traffic
***收费WriteUP请购买后查看,VIP用户可免费查看***
- MISC
- 1年前
-
Xp0intCTF-2017-重重flag背后隐藏的秘密
***收费WriteUP请购买后查看,VIP用户可免费查看***
- MISC
- 1年前