51 lines
1.2 KiB
Python
51 lines
1.2 KiB
Python
|
#!/usr/bin/python3
|
||
|
import sys
|
||
|
|
||
|
sys.path.append("/opt/homebrew/lib/python3.11/site-packages")
|
||
|
import sqlite3
|
||
|
import requests
|
||
|
from bs4 import BeautifulSoup
|
||
|
|
||
|
try:
|
||
|
max_len = int(sys.argv[1])
|
||
|
except IndexError:
|
||
|
max_len = 255
|
||
|
|
||
|
|
||
|
def cleanlist(my_list):
|
||
|
retList = []
|
||
|
for x in my_list:
|
||
|
if len(x) == (max_len + 3):
|
||
|
retList.append(x)
|
||
|
return retList
|
||
|
|
||
|
|
||
|
def fetch():
|
||
|
URL = "https://domene.shop/expired"
|
||
|
page = requests.get(URL)
|
||
|
return page.content
|
||
|
|
||
|
|
||
|
def parse():
|
||
|
soup = BeautifulSoup(fetch(), "html.parser")
|
||
|
web_links = soup.find_all("a")
|
||
|
actual_web_links = [web_link["href"] for web_link in web_links]
|
||
|
new_list = [x for x in actual_web_links if "/?domain=" in x]
|
||
|
final_list = [s.replace("/?domain=", "") for s in new_list]
|
||
|
final_list = [s.replace("xn--", "") for s in final_list] # remove all the 8s
|
||
|
the_list = cleanlist(final_list)
|
||
|
final_list = the_list.sort() # sorts normally by alphabetical order
|
||
|
the_list = sorted(the_list, key=len, reverse=False)
|
||
|
if len(the_list) > 0:
|
||
|
print(*the_list, sep="\n")
|
||
|
else:
|
||
|
print("No expired domains with the length citeria you wanted!")
|
||
|
|
||
|
|
||
|
def main():
|
||
|
parse()
|
||
|
|
||
|
|
||
|
if __name__ == "__main__":
|
||
|
main()
|