initial commit

This commit is contained in:
Gregor Michels 2025-06-26 21:47:46 +02:00
commit f10ceb07f1
3 changed files with 183 additions and 0 deletions

1
.gitignore vendored Normal file
View file

@ -0,0 +1 @@
environment/

5
environment.txt Normal file
View file

@ -0,0 +1,5 @@
certifi==2025.6.15
charset-normalizer==3.4.2
idna==3.10
requests==2.32.4
urllib3==2.5.0

177
main.py Executable file
View file

@ -0,0 +1,177 @@
#!/usr/bin/env python3
from bs4 import BeautifulSoup
import time
import requests
import pdb
import os
import random
import re
from enum import Enum
IP = "192.168.42.100"
class VLAN_PORT_MODE(Enum):
GENERAL = 0
ACCESS = 1
TRUNK = 2
DOT1QTUNNEL = 3
class VLAN_PORT_ACL(Enum):
EXCLUDED = 0
FORBIDDEN = 1
TAGGED = 2
UNTAGGED = 3
def zyxel_password(pw: str) -> str:
alphabet = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789"
result = ""
l = len(pw)
for i in range(1, 322 - len(pw)):
if i % 5 == 0 and l > 0:
l -= 1
result += pw[l]
elif i == 123:
if len(pw) < 10:
result += "0"
else:
c = str(len(pw) // 10)
result += c
elif i == 289:
result += str(len(pw) % 10)
else:
rnd = random.choice(list(alphabet))
result += rnd
return result
def get_login_cookie(host: str, username: str, password: str) -> str:
auth_id = requests.get(
f"http://{host}/cgi-bin/dispatcher.cgi",
params={"login": 1, "username": username, "password": zyxel_password(password)},
)
if auth_id.status_code != 200:
raise Exception("error while getting auth_id {auth_id.text}")
time.sleep(0.5)
cookie = requests.post(
f"http://{host}/cgi-bin/dispatcher.cgi",
data={"authId": auth_id.text.strip(), "login_chk": "true"},
)
if cookie.text.strip() != "OK,":
raise Exception("error while getting cookie {cookie.text}")
return cookie.cookies.get("HTTP_XSSID")
def get_cmd(host: str, xssid: str, cmd: int, **params) -> str:
params.update({"cmd": cmd})
return requests.get(
f"http://{host}/cgi-bin/dispatcher.cgi",
params=params,
cookies={"HTTP_XSSID": xssid},
).text
def get_vlans(response: str) -> dict:
soup = BeautifulSoup(response, "html.parser")
data = {}
table_body = soup.find("table", attrs={"width": "560"})
rows = table_body.find_all("tr")
for row in rows[1:]:
cols = row.find_all("td")
cols = [ele.text.strip() for ele in cols]
try:
_, vlan_id, vlan_name, vlan_type, _, _ = cols
data.update(
{
vlan_id: {
"name": vlan_name,
"type": vlan_type,
}
}
)
except ValueError:
continue
return data
def parse_vlan_ports(response: str) -> dict:
soup = BeautifulSoup(response, "html.parser")
data = {}
# <input type="hidden" id="vlanMode_33" name="vlanMode_33" value="0">
port_settings = soup.find_all(
"input", attrs={"type": "hidden", "id": re.compile(r"vlanMode_\d+")}
)
for port_setting in port_settings:
port = port_setting.find_previous().text.strip()
port_type = int(port_setting.attrs.get("value"))
acl = list(
filter(
lambda p: "checked" in p.attrs,
port_setting.find_next().find_all("input"),
)
)[0]
data.update(
{
port: {
"mode": VLAN_PORT_MODE(port_type),
"acl": VLAN_PORT_ACL(int(acl.attrs.get("value"))),
}
}
)
return data
def extract_data_from_table(response: str) -> dict:
soup = BeautifulSoup(response, "html.parser")
data = {}
table_body = soup.find_all("table")[-1]
rows = table_body.find_all("tr")
keys = [ele.text.strip() for ele in rows[0].find_all("td")]
entries = []
for row in rows[1:]:
cols = row.find_all("td")
cols = [ele.text.strip() for ele in cols]
entries.append({k: v for (k, v) in zip(keys, cols) if k.strip() != ""})
return list(filter(lambda e: e, entries))
def dictify(data: list, key: str) -> dict:
result = {}
for d in data:
k = d[key]
del d[key]
result.update({k: d})
return result
if __name__ == "__main__":
cookie = get_login_cookie(IP, "admin", os.environ.get("ADMIN_PW"))
print(parse_vlan_ports(get_cmd(IP, cookie, 1293, vid=2)))
# print(dictify(extract_data_from_table(get_cmd(IP, cookie, 1283)), "VLAN ID"))
# print(dictify(extract_data_from_table(get_cmd(IP, cookie, 1290)), "Port"))
# vlans = get_vlans(get_cmd(IP, cookie, 1283))
# print(vlans)