#!/usr/bin/python3 import sys # The next line is for my setup. This might not be correct for every setup! sys.path.append("/opt/homebrew/lib/python3.11/site-packages") import cmd import requests from bs4 import BeautifulSoup try: max_len = int(sys.argv[1]) if max_len == 0: max_len = None except IndexError: max_len = None try: top_domain = str(sys.argv[2]) except IndexError: top_domain = None def cleanlist(my_list): retList = [] retListDomain = [] if max_len != None: for x in my_list: if len(x) <= (max_len + 3): retList.append(x) if top_domain != None and len(retList) != 0: for x in retList: if x[-2:] == top_domain: retListDomain.append(x) elif top_domain != None and len(retList) == 0: for x in my_list: if x[-2:] == top_domain: retListDomain.append(x) if len(retList) == 0 and len(retListDomain) == 0: return my_list elif top_domain == None: return retList else: return retListDomain def fetch(): URL = "https://domene.shop/expired" page = requests.get(URL) return page.content def parse(): soup = BeautifulSoup(fetch(), "html.parser") web_links = soup.find_all("a") actual_web_links = [web_link["href"] for web_link in web_links] new_list = [x for x in actual_web_links if "/?domain=" in x] final_list = [s.replace("/?domain=", "") for s in new_list] final_list = [s.replace("xn--", "") for s in final_list] # remove all the 8s the_list = cleanlist(final_list) final_list = the_list.sort() # sorts normally by alphabetical order out = cmd.Cmd() if len(the_list) > 0: if max_len != None: the_list = sorted(the_list, key=len, reverse=False) out.columnize(the_list, displaywidth=80) else: the_list = sorted(the_list, reverse=False) out.columnize(the_list, displaywidth=140) else: print("No expired domains with the length citeria you wanted!") if __name__ == "__main__": parse()