[untested] RecognizeURLType and IsURLValid functions are working

This commit is contained in:
oxeo0 2025-05-30 00:21:20 +02:00
parent 08697f5c40
commit c9a2fbcfdd
5 changed files with 179 additions and 320 deletions

View file

@ -50,9 +50,6 @@ def main():
os.makedirs(participantdir)
print_colors("""
;
ED.
@ -94,8 +91,8 @@ def main():
if os.path.isfile(urlpath):
with open(urlpath) as f:
instance = f.read().rstrip()
if IsOnionValid(instance):
print_colors(f"[+] Instance Name: {instance}. Valid:{IsOnionValid(instance)}")
if IsOnionLinkValid(instance):
print_colors(f"[+] Instance Name: {instance}. Valid:{IsOnionLinkValid(instance)}")
break
else:
print_colors(f'[-] Invalid instance name in ~/.darknet_participant_url: {instance}',is_error=True )
@ -104,8 +101,8 @@ def main():
print_colors("[+] Instance Path doesn't exist yet")
print_colors(f"Your url will be saved here {urlpath}")
instance = input("What is your Instance domain?(ex: lantern.nowherejezfoltodf4jiyl6r56jnzintap5vyjlia7fkirfsnfizflqd.onion): ")
if IsOnionValid(instance):
print_colors(f"[+] Instance Name: {instance}. Valid: {IsUrlValid(instance)}")
if IsOnionLinkValid(instance):
print_colors(f"[+] Instance Name: {instance}. Valid: {IsOnionLinkValid(instance)}")
instancepath=rootpath+'www/participants/'+instance
else:
print_colors(f'[-] Invalid instance name in ~/.darknet_participant_url: {instance}', is_error=True )
@ -212,9 +209,9 @@ Maintenance:
while(IsCategoryValid(category) is not True):
category = input("What is the website Category? ")
# the url of the website (required) + check if its valid
url=''
while(IsUrlValid(url) is not True and IsSimpleXChatroomValid(url) is not True):
url=input("What is the website URL ? ")
url = ''
while not IsURLValid(url):
url = input("What is the website URL ? ")
# a quick description (optional) + check if its valid
desc='DEFAULT'
@ -458,7 +455,7 @@ Maintenance:
value = input("What is the new name of the website? ")
vdf.at[index,'Name']=value
elif i == 3: # column URL
while(IsUrlValid(value) is not True or value == ''):
while(IsURLValid(value) is not True or value == ''):
value = input("What is the new URL of the website? ")
vdf.at[index,'URL']=value
elif i == 4: # column Sensitive
@ -504,7 +501,7 @@ Maintenance:
value = input("What is the new name of the website? ")
uvdf.at[index,'Name']=value
elif i == 3: # column URL
while(IsUrlValid(value) is not True or value == ''):
while(IsURLValid(value) is not True or value == ''):
value = input("What is the new URL of the website? ")
uvdf.at[index,'URL']=value
elif i == 4: # column Sensitive
@ -656,7 +653,7 @@ Maintenance:
csvdf.to_csv(csvfilepath, index=False)
### SANITY CHECK 1: Mark all the rows that have incorrect formatting for deletion###
if IsUrlValid(csvdf.at[i, 'Instance']) is False or IsCategoryValid(csvdf.at[i, 'Category']) is False or IsNameValid(csvdf.at[i, 'Name']) is False or IsUrlValid(csvdf.at[i, 'URL']) is False or IsStatusValid(csvdf.at[i, 'Sensitive']) is False or IsDescriptionValid(csvdf.at[i, 'Description']) is False or IsStatusValid(csvdf.at[i, 'Status']) is False or IsScoreValid(csvdf.at[i, 'Score']) is False:
if IsURLValid(csvdf.at[i, 'Instance']) is False or IsCategoryValid(csvdf.at[i, 'Category']) is False or IsNameValid(csvdf.at[i, 'Name']) is False or IsURLValid(csvdf.at[i, 'URL']) is False or IsStatusValid(csvdf.at[i, 'Sensitive']) is False or IsDescriptionValid(csvdf.at[i, 'Description']) is False or IsStatusValid(csvdf.at[i, 'Status']) is False or IsScoreValid(csvdf.at[i, 'Score']) is False:
#mark the row for deletion as it has invalid inputs
if i not in rows2delete:
print_colors(f"Marking row {i} for deletion, as it has invalid inputs")
@ -788,7 +785,7 @@ Maintenance:
case 5:
print_colors("[+] Add a new webring participant (and download their files into their directory (without trusting them yet!))")
webring_participant_url = ''
while(IsOnionValid(webring_participant_url) is not True):
while(IsOnionLinkValid(webring_participant_url) is not True):
webring_participant_url = input("What is the onion domain of the new webring participant? (ex: lantern.nowherejezfoltodf4jiyl6r56jnzintap5vyjlia7fkirfsnfizflqd.onion) ")
participantdir=rootpath+'www/participants/'+webring_participant_url
if os.path.isdir(participantdir):
@ -892,7 +889,7 @@ Maintenance:
csvdf.to_csv(csvfilepath, index=False)
### SANITY CHECK 1: Mark all the rows that have incorrect formatting for deletion###
if IsUrlValid(csvdf.at[i, 'Instance']) is False or IsCategoryValid(csvdf.at[i, 'Category']) is False or IsNameValid(csvdf.at[i, 'Name']) is False or IsUrlValid(csvdf.at[i, 'URL']) is False or IsStatusValid(csvdf.at[i, 'Sensitive']) is False or IsDescriptionValid(csvdf.at[i, 'Description']) is False or IsStatusValid(csvdf.at[i, 'Status']) is False or IsScoreValid(csvdf.at[i, 'Score']) is False:
if IsURLValid(csvdf.at[i, 'Instance']) is False or IsCategoryValid(csvdf.at[i, 'Category']) is False or IsNameValid(csvdf.at[i, 'Name']) is False or IsURLValid(csvdf.at[i, 'URL']) is False or IsStatusValid(csvdf.at[i, 'Sensitive']) is False or IsDescriptionValid(csvdf.at[i, 'Description']) is False or IsStatusValid(csvdf.at[i, 'Status']) is False or IsScoreValid(csvdf.at[i, 'Score']) is False:
#mark the row for deletion as it has invalid inputs
if i not in rows2delete:
print_colors(f"Marking row {i} for deletion, as it has invalid inputs")
@ -1072,7 +1069,7 @@ Maintenance:
break
else:
print_colors("[+] checking if the Word/URL is valid: ")
if IsUrlValid(word) or IsOnionValid(word) or IsDescriptionValid(word):
if IsURLValid(word) or IsDescriptionValid(word):
print_colors('[+] Word/URL is valid, adding the word into the sensitive wordlist')
newrow=[word]
print_colors(f"[+] NEWROW= {newrow}")
@ -1141,7 +1138,7 @@ Maintenance:
break
else:
print_colors("[+] Checking if the Word/URL is valid: ")
if IsUrlValid(word) or IsOnionValid(word) or IsDescriptionValid(word):
if IsURLValid(word) or IsDescriptionValid(word):
print_colors('[+] Word/URL is valid, adding the word into the blacklist')
newrow=[word]
print_colors(f"[+] NEWROW= {newrow}")
@ -1254,11 +1251,11 @@ Maintenance:
csvdf.to_csv(csvfilepath, index=False)
### SANITY CHECK 1: Mark all the rows that have incorrect formatting for deletion###
if IsUrlValid(csvdf.at[i, 'Instance']) is False or IsCategoryValid(csvdf.at[i, 'Category']) is False or IsNameValid(csvdf.at[i, 'Name']) is False or IsUrlValid(csvdf.at[i, 'URL']) is False or IsStatusValid(csvdf.at[i, 'Sensitive']) is False or IsDescriptionValid(csvdf.at[i, 'Description']) is False or IsStatusValid(csvdf.at[i, 'Status']) is False or IsScoreValid(csvdf.at[i, 'Score']) is False:
if IsURLValid(csvdf.at[i, 'Instance']) is False or IsCategoryValid(csvdf.at[i, 'Category']) is False or IsNameValid(csvdf.at[i, 'Name']) is False or IsURLValid(csvdf.at[i, 'URL']) is False or IsStatusValid(csvdf.at[i, 'Sensitive']) is False or IsDescriptionValid(csvdf.at[i, 'Description']) is False or IsStatusValid(csvdf.at[i, 'Status']) is False or IsScoreValid(csvdf.at[i, 'Score']) is False:
if i not in rows2delete:
print_colors(f"Marking row {i} for deletion, as it has invalid inputs")
#print_colors(f"{row}")
print(IsUrlValid(csvdf.at[i, 'Instance']), IsCategoryValid(csvdf.at[i, 'Category']), IsNameValid(csvdf.at[i, 'Name']), IsUrlValid(csvdf.at[i, 'URL']), IsStatusValid(csvdf.at[i, 'Sensitive']), IsDescriptionValid(csvdf.at[i, 'Description']), IsStatusValid(csvdf.at[i, 'Status']), IsScoreValid(csvdf.at[i, 'Score']))
print(IsURLValid(csvdf.at[i, 'Instance']), IsCategoryValid(csvdf.at[i, 'Category']), IsNameValid(csvdf.at[i, 'Name']), IsURLValid(csvdf.at[i, 'URL']), IsStatusValid(csvdf.at[i, 'Sensitive']), IsDescriptionValid(csvdf.at[i, 'Description']), IsStatusValid(csvdf.at[i, 'Status']), IsScoreValid(csvdf.at[i, 'Score']))
rows2delete.append(i)
read=input("Continue?")
@ -1360,9 +1357,6 @@ Maintenance:
print_colors("Invalid Number",is_error=True)
continue
except Exception as e:
print_colors(f'Try again {e}',is_error=True)
break
@ -1371,7 +1365,6 @@ Maintenance:
print_colors("No more submissions to review, exiting.")
break
case 12:
# review the crawled websites
try:
@ -1459,12 +1452,12 @@ Maintenance:
crawled_df.to_csv(crawled_file_abs_path, index=False)
elif number == 3:
# Delete from crawled_onion.csv
# Delete from crawled_onion.csv
crawled_df.drop(index=i,inplace=True)
crawled_df.to_csv(crawled_file_abs_path, index=False)
elif number == 4:
# Add to blacklist.csv
# Add to blacklist.csv
newrow=[link]
blacklist_df.loc[-1] = newrow # adding a row
@ -1482,15 +1475,10 @@ Maintenance:
print_colors("Invalid Number",is_error=True)
continue
except Exception as e:
print_colors(f'Try again {e}',is_error=True)
break
break
finally:
print_colors("No more crawled websites to review, exiting.")
break