56 lines
1.5 KiB
Python
Executable File
56 lines
1.5 KiB
Python
Executable File
#!/usr/bin/python3
|
|
import sys
|
|
|
|
# The next line is for my setup. This might not be correct for every setup!
|
|
sys.path.append("/opt/homebrew/lib/python3.11/site-packages")
|
|
import cmd
|
|
import requests
|
|
from bs4 import BeautifulSoup
|
|
|
|
try:
|
|
max_len = int(sys.argv[1])
|
|
except IndexError:
|
|
max_len = None
|
|
|
|
|
|
def cleanlist(my_list):
|
|
if max_len != None:
|
|
retList = []
|
|
for x in my_list:
|
|
if len(x) <= (max_len + 3):
|
|
retList.append(x)
|
|
return retList
|
|
else:
|
|
return my_list
|
|
|
|
|
|
def fetch():
|
|
URL = "https://domene.shop/expired"
|
|
page = requests.get(URL)
|
|
return page.content
|
|
|
|
|
|
def parse():
|
|
soup = BeautifulSoup(fetch(), "html.parser")
|
|
web_links = soup.find_all("a")
|
|
actual_web_links = [web_link["href"] for web_link in web_links]
|
|
new_list = [x for x in actual_web_links if "/?domain=" in x]
|
|
final_list = [s.replace("/?domain=", "") for s in new_list]
|
|
final_list = [s.replace("xn--", "") for s in final_list] # remove all the 8s
|
|
the_list = cleanlist(final_list)
|
|
final_list = the_list.sort() # sorts normally by alphabetical order
|
|
out = cmd.Cmd()
|
|
if len(the_list) > 0:
|
|
if max_len != None:
|
|
the_list = sorted(the_list, key=len, reverse=False)
|
|
out.columnize(the_list, displaywidth=80)
|
|
else:
|
|
the_list = sorted(the_list, reverse=False)
|
|
out.columnize(the_list, displaywidth=140)
|
|
else:
|
|
print("No expired domains with the length citeria you wanted!")
|
|
|
|
|
|
if __name__ == "__main__":
|
|
parse()
|