diff --git a/scripts/darknet_exploration.py b/scripts/darknet_exploration.py index e535abe..3b11af5 100644 --- a/scripts/darknet_exploration.py +++ b/scripts/darknet_exploration.py @@ -29,6 +29,8 @@ def main(): templatepath=rootpath+'templates/' verifiedcsvfile=instancepath+'/verified.csv' unverifiedcsvfile=instancepath+'/unverified.csv' + blcsvfile=instancepath+'/blacklist.csv' + secsvfile=instancepath+'/sensitive.csv' # check if instancepath exists, if not then create the directory if not os.path.exists(instancepath): os.makedirs(instancepath) @@ -43,6 +45,8 @@ def main(): # now that they exist, get vdf and uvdf vdf = pd.read_csv(verifiedcsvfile) uvdf = pd.read_csv(unverifiedcsvfile) + bldf = pd.read_csv(blcsvfile) + sedf = pd.read_csv(secsvfile) print("[+] file exists, your Webring URL is", instance) isitvalid = "y" else: @@ -226,8 +230,8 @@ Managing Wordlists: webring_participant_url = input("What is the onion domain of the new webring participant? (ex: uptime.nowherejezfoltodf4jiyl6r56jnzintap5vyjlia7fkirfsnfizflqd.onion) ") # check if the directory exists locally or not, participantdir=rootpath+'www/participants/'+webring_participant_url - #if not os.path.isdir(participantdir): # to test on your own instance - if os.path.isdir(participantdir): + if not os.path.isdir(participantdir): # to test on your own instance + #if os.path.isdir(participantdir): # if it does, it means that the webring is ALREADY added print("[-] Webring Participant is already listed, skipping.") return False @@ -298,15 +302,63 @@ Managing Wordlists: print("[+] file written, let's read it") f = open(csvfilepath,"r") print(f.read()) - # TODO and remove all of the invalid entries !!! - ####################################################################### - #newrow=[instance,category,name,url,sensi,desc,'',''] - #print("[+] NEWROW=",newrow) - #uvdf.loc[-1] = newrow # adding a row - #uvdf.index = uvdf.index + 1 # shifting index - #uvdf = uvdf.sort_index() # sorting by index - #print("[+] New row added! now writing the csv file:") - #uvdf.to_csv(unverifiedcsvfile, index=False) + + ########### PERFORM SANITY CHECKS ON the webring participant's verified.csv and unverified.csv ################## + for w in ['verified.csv','unverified.csv']: + csvfilepath=participantdir+'/'+w + csvdf = pd.read_csv(csvfilepath) + + #print(bldf[['blacklisted-words']]) + bldf[['blacklisted-words']].iterrows() + rows2delete= [] # it is an empty list at first + for i,j in csvdf.iterrows(): + #print("[+] Unverified.csv ROW=",i, uvdf.at[i, 'Instance'], uvdf.at[i, 'Category'], uvdf.at[i, 'Name'], uvdf.at[i, 'URL'], uvdf.at[i, 'Description']) + #print("[+] Unverified.csv ROW=",i, uvdf.iloc[[i]]) + #row=uvdf.iloc[[i]] #it displays the index + row=csvdf.loc[i,:].values.tolist() + #print(i,row) + + ### SANITY CHECK 1: Mark all the rows that have incorrect formatting for deletion### + #print("[+] ROW=",i,"ROW CONTENTS=", IsUrlValid(uvdf.at[i, 'Instance']), IsCategoryValid(uvdf.at[i, 'Category']), IsNameValid(uvdf.at[i, 'Name']), IsUrlValid(uvdf.at[i, 'URL']), IsStatusValid(uvdf.at[i, 'Sensitive']), IsDescriptionValid(uvdf.at[i, 'Description']), IsStatusValid(uvdf.at[i, 'Status']), IsScoreValid(uvdf.at[i, 'Score'])) + if IsUrlValid(csvdf.at[i, 'Instance']) is False or IsCategoryValid(csvdf.at[i, 'Category']) is False or IsNameValid(csvdf.at[i, 'Name']) is False or IsUrlValid(csvdf.at[i, 'URL']) is False or IsStatusValid(csvdf.at[i, 'Sensitive']) is False or IsDescriptionValid(csvdf.at[i, 'Description']) is False or IsStatusValid(csvdf.at[i, 'Status']) is False or IsScoreValid(csvdf.at[i, 'Score']) is False: + #mark the row for deletion as it has invalid inputs + if i not in rows2delete: + print("Marking row", i,"for deletion, as it has invalid inputs") + rows2delete.append(i) #mark the row for deletion if not already done + + ### SANITY CHECK 2: Mark all rows that are not allowed (blacklist) for deletion ### + for k,l in bldf.iterrows(): + #print("[+] Blacklisted word=",k, bldf.at[k, 'blacklisted-words']) + blword=bldf.at[k, 'blacklisted-words'] + if any(blword in str(x) for x in row) == True: + #print("found blacklisted word! marking row for deletion") + if i not in rows2delete: + print("Marking row", i,"for deletion, as it matches with a blacklisted word") + rows2delete.append(i) #mark the row for deletion if not already done + + ### SANITY CHECK 3: Mark all the rows that are supposed to be sensitive ### + for k,l in sedf.iterrows(): + #print("[+] Sensitive word=",k, sedf.at[k, 'sensitive-words']) + seword=sedf.at[k, 'sensitive-words'] + if any(seword in str(x) for x in row) == True: + if csvdf.at[i, 'Sensitive'] != '✔️': + print("Marking row", i,"as sensitive, as it matches with a sensitive word") + uvdf.at[i, 'Sensitive']='✔️' + + print('[-] Rows to delete: ',rows2delete) + + # TODO : MAKE SURE IT WORKS IN PROD + for i in rows2delete: + row=csvdf.loc[i,:].values.tolist() + print('[+] REMOVING ROW :',i,row) + csvdf.drop(i, inplace= True) + csvdf.to_csv(csvfilepath, index=False) +############################################## + + + + + case "5": @@ -375,6 +427,8 @@ Managing Wordlists: # TODO print("do you want to 1) add words or 2) remove words ?") # TODO display the contents of blacklist.csv file + # TODO CASE 10 : cleanup all duplicates in unverified + verified.csv, based on the url (check if each url appears more than once, and if they do, remove them + write to csv file) + case _: print("[-] Exiting") return True @@ -420,26 +474,26 @@ def IsOnionValid(url: str)-> bool: pattern = re.compile("^[A-Za-z0-9.]+(\.onion)?$") url = url.strip().removesuffix('/') if url.startswith('http://'): - print('URL starts with http') + #print('URL starts with http') # Removes the http:// domain = url.split('/')[2] if pattern.fullmatch(domain) is not None: if len(domain.split('.')) > 3: n_subdomians = len(domain.split('.')) # Checks if there is more than 1 subdomain. "subdomain.url.onion" only - print(f"This domain have more than one subdomain. There are {n_subdomians} subdomains") + #print(f"This domain have more than one subdomain. There are {n_subdomians} subdomains") return False else: if len(domain) < 62: - print("Domain length is less than 62.") + #print("Domain length is less than 62.") return False return True elif pattern.fullmatch(domain) is None: - print("Domain contains invalid character.") - print(domain) + #print("Domain contains invalid character.") + #print(domain) return False else: - print("Domain not valid") + #print("Domain not valid") return False else: #print("URL doesn't start http") @@ -447,25 +501,23 @@ def IsOnionValid(url: str)-> bool: if len(url.split('.')) > 3: n_subdomians = len(url.split('.')) # Checks if there is more than 1 subdomain. "subdomain.url.onion" only - print(f"This domain have more than one subdomain. There are {n_subdomians - 1} subdomains") + #print(f"This domain have more than one subdomain. There are {n_subdomians - 1} subdomains") return False else: if len(url) < 62: - print("Domain length is less than 62.") + #print("Domain length is less than 62.") return False return True elif pattern.fullmatch(url) is None: - print("Domain contains invalid character.") - print(url) + #print("Domain contains invalid character.") + #print(url) return False else: - print("Domain not valid") + #print("Domain not valid") return False except Exception as e: print(f"Error: {e}") - - def IsUrlValid(url:str)->bool: """ Check if url is valid both dark net end clearnet. @@ -475,34 +527,57 @@ def IsUrlValid(url:str)->bool: # if OK return True #if not : return False pattern = re.compile("^[A-Za-z0-9:/.]+$") + url = str(url) if url.endswith('.onion'): return IsOnionValid(url) else: if not url.__contains__('.'): - print("No (DOT) in clearnet url") + #print("No (DOT) in clearnet url") return False if pattern.fullmatch(url) is None: - print('Url contains invalid chars') + #print('Url contains invalid chars') return False return True def IsStatusValid(status: str)-> bool: """ - Checks if status contains only [v,w]. Verbose only if False is returned + Checks if status contains only [v,x,❌,✔️]. Verbose only if False is returned """ - # check if the characters are only [vx] with maximum 1 chars max - # if OK return True - #if not : return False - pattern = ['y','n'] - if len(status) != 1: - print("Got more than one character or nothing.") + pattern = ['y','n','✔️','❌','','nan'] + status = str(status) + status.strip() + #print('[+] STATUS = ',status.splitlines()) + if len(status) > 4: + #print("Status: Got more than one character or nothing.") return False elif (status not in pattern): - print("Got an invalid character it must be either y or n") + #print("Status: Got an invalid character it must be either y, n, ✔️, or ❌ ") return False - + return True + +def IsScoreValid(score:str)->bool: + """ + Check the Score is only "^[0-9.,]+$" with 8 max chars. + """ + # check if the characters are only [a-zA-Z0-9.,' ] with maximum 256 chars max + #(careful with the ' and , make sure you test if it fucks the csv up or else) + # if OK return True + #if not : return False + pattern = re.compile("^[0-9.,]+$") + score = str(score) + score.strip() + if pattern.fullmatch(score) is None: + # empty description is fine as it's optional + return False + elif len(score) > 8: + #print("score is greater than 8 chars") + return False + # empty score is fine + return True + + def IsDescriptionValid(desc:str)->bool: """ Check the categories are only [a-zA-Z0-9,.' ] with 256 max chars. @@ -512,12 +587,15 @@ def IsDescriptionValid(desc:str)->bool: # if OK return True #if not : return False pattern = re.compile("^[A-Za-z0-9-.,' ]+$") + desc = str(desc) desc.strip() - # empty description is fine as it's optional + if pattern.fullmatch(desc) is None: + # empty description is fine as it's optional + return False if desc == "DEFAULT": return False elif len(desc) > 256: - print("desc is greater than 256 chars") + #print("desc is greater than 256 chars") return False return True @@ -536,7 +614,8 @@ def IsCategoryValid(categories: list)-> bool: #print('Got an empty list or invalid chars') return False elif len(category) > 64: - print('Category is too long') + #print('Category is too long') + return False else: return True @@ -554,12 +633,13 @@ def IsNameValid(name: str)->bool: #print("Got an invalid character or nothing") return False elif len(name) > 64: - print(f'Got a name lenght greater than 64. {len(name)}') + #print(f'Got a name length greater than 64. {len(name)}') return False return True + if __name__ == '__main__': main() diff --git a/scripts/tests/blacklist.csv b/scripts/tests/blacklist.csv new file mode 100644 index 0000000..0bee960 --- /dev/null +++ b/scripts/tests/blacklist.csv @@ -0,0 +1,7 @@ +blacklisted-words +porn +pron +p0rn +pr0n +gore + diff --git a/scripts/tests/csvwork.py b/scripts/tests/csvwork.py index b05f1ff..96111e2 100644 --- a/scripts/tests/csvwork.py +++ b/scripts/tests/csvwork.py @@ -4,37 +4,6 @@ def main(): #print("aaa") csvfile="verified.csv" df = pd.read_csv(csvfile) - #for i in range (df.index.stop): - # print(i,df.at[i,i]) - - #for i in range (df.index.stop): - # for col in df.columns.values.tolist(): - # print(df.at[i][col]) - - #print(df) -# print("[+] Display 2 columns:") - #display 2 columns: -# print(df[['Name', 'URL']]) - -# print("[+] Display rows 0 to 5") - #display rows from 0 to 5 -# print(df[0:5]) - - #display rows from 0 to 5 -# print("[+] Display rows 3 to 5 and columns Name and URL") -# print(df.loc[3:5,['Name','URL']]) - #print(df[0:1,['Name','URL']]) - -# print("[+] Display all rows for and columns name and URL") - #df.loc[startrow:endrow, startcolumn:endcolumn] -# print(df.loc[0:df.index.stop,['Name','URL']]) - - #display rows from 0 to the end of indexes -# print("[+] Display all rows for all columns") -# print(df[0:df.index.stop]) - - - ############################################################################## print('\n[+] list the entire csv file and get all row IDs') @@ -83,7 +52,7 @@ def main(): index=-1 while (index not in filter_df.index): #index=int(input("\n[+] Please select a valid row: ")) - index=int("1") + index=int("12") print("ROW=",index, 'CONTENT=', filter_df.at[index, 'Name'], filter_df.at[index, 'URL']) @@ -120,15 +89,16 @@ def main(): print(df) - #write the dataframe into the csv file - #read and print the contents of the csv file - #re-add that row in the csv file - #remove that row from the dataframe print() - # then select a row in it (by the ID) and display it - # list + ########### + # list every word in the blacklist wordlist csv file + # for each word, check if it matches with any of the rows in unverified.csv + # if it matches (on any column!), remove that row and write to the csv file + # list every word in the sensitive wordlist csv file + # if it matches (on any column!), mark the sensitive column as V + if __name__ == '__main__': main() diff --git a/scripts/tests/csvwork2.py b/scripts/tests/csvwork2.py new file mode 100644 index 0000000..723a708 --- /dev/null +++ b/scripts/tests/csvwork2.py @@ -0,0 +1,255 @@ +import csv, json, pandas as pd, glob, re + +def main(): + #print("aaa") + vcsvfile="verified.csv" + vdf = pd.read_csv(vcsvfile) + unverifiedcsvfile="unverified.csv" + uvdf = pd.read_csv(unverifiedcsvfile) + blcsvfile="blacklist.csv" + bldf = pd.read_csv(blcsvfile) + secsvfile="sensitive.csv" + sedf = pd.read_csv(secsvfile) + + ########### + # list every word in the blacklist wordlist csv file + # for each word, check if it matches with any of the rows in unverified.csv + # if it matches (on any column!), remove that row and write to the csv file + + + + ########### SANITY CHECKS ON UNVERIFIED.CSV ################## + #print(bldf[['blacklisted-words']]) + bldf[['blacklisted-words']].iterrows() + rows2delete= [] # it is an empty list at first + for i,j in uvdf.iterrows(): + #print("[+] Unverified.csv ROW=",i, uvdf.at[i, 'Instance'], uvdf.at[i, 'Category'], uvdf.at[i, 'Name'], uvdf.at[i, 'URL'], uvdf.at[i, 'Description']) + #print("[+] Unverified.csv ROW=",i, uvdf.iloc[[i]]) + #row=uvdf.iloc[[i]] #it displays the index + row=uvdf.loc[i,:].values.tolist() + #print(i,row) + + ### SANITY CHECK 1: Mark all the rows that have incorrect formatting for deletion### + #print("[+] ROW=",i,"ROW CONTENTS=", IsUrlValid(uvdf.at[i, 'Instance']), IsCategoryValid(uvdf.at[i, 'Category']), IsNameValid(uvdf.at[i, 'Name']), IsUrlValid(uvdf.at[i, 'URL']), IsStatusValid(uvdf.at[i, 'Sensitive']), IsDescriptionValid(uvdf.at[i, 'Description']), IsStatusValid(uvdf.at[i, 'Status']), IsScoreValid(uvdf.at[i, 'Score'])) + if IsUrlValid(uvdf.at[i, 'Instance']) is False or IsCategoryValid(uvdf.at[i, 'Category']) is False or IsNameValid(uvdf.at[i, 'Name']) is False or IsUrlValid(uvdf.at[i, 'URL']) is False or IsStatusValid(uvdf.at[i, 'Sensitive']) is False or IsDescriptionValid(uvdf.at[i, 'Description']) is False or IsStatusValid(uvdf.at[i, 'Status']) is False or IsScoreValid(uvdf.at[i, 'Score']) is False: + #mark the row for deletion as it has invalid inputs + if i not in rows2delete: + print("Marking row", i,"for deletion, as it has invalid inputs") + rows2delete.append(i) #mark the row for deletion if not already done + + ### SANITY CHECK 2: Mark all rows that are not allowed (blacklist) for deletion ### + for k,l in bldf.iterrows(): + #print("[+] Blacklisted word=",k, bldf.at[k, 'blacklisted-words']) + blword=bldf.at[k, 'blacklisted-words'] + if any(blword in str(x) for x in row) == True: + #print("found blacklisted word! marking row for deletion") + if i not in rows2delete: + print("Marking row", i,"for deletion, as it matches with a blacklisted word") + rows2delete.append(i) #mark the row for deletion if not already done + ### SANITY CHECK 3: Mark all the rows that are supposed to be sensitive ### + for k,l in sedf.iterrows(): + #print("[+] Sensitive word=",k, sedf.at[k, 'sensitive-words']) + seword=sedf.at[k, 'sensitive-words'] + if any(seword in str(x) for x in row) == True: + if uvdf.at[i, 'Sensitive'] != '✔️': + print("Marking row", i,"as sensitive, as it matches with a sensitive word") + uvdf.at[i, 'Sensitive']='✔️' + + print('[-] Rows to delete: ',rows2delete) + + for i in rows2delete: + row=uvdf.loc[i,:].values.tolist() + print('[+] REMOVING ROW :',i,row) + uvdf.drop(i, inplace= True) + uvdf.to_csv(unverifiedcsvfile, index=False) + ############################################## + + # list every word in the sensitive wordlist csv file + # if it matches (on any column!), mark the sensitive column as V + + + ############################################################################# + return True + ############################################################################## + + +#### PROTECTIONS AGAINST MALICIOUS CSV INPUTS #### + +def IsOnionValid(url: str)-> bool: + """ + Checks if the domain(param) is a valid onion domain and return True else False. + """ + # check if the characters are only [a-zA-Z0-9.] with maximum 128 chars max? + # check that it is only url.onion or subdomain.url.onion, + # if OK return True + #if not : return False + try: + pattern = re.compile("^[A-Za-z0-9.]+(\.onion)?$") + url = url.strip().removesuffix('/') + if url.startswith('http://'): + #print('URL starts with http') + # Removes the http:// + domain = url.split('/')[2] + if pattern.fullmatch(domain) is not None: + if len(domain.split('.')) > 3: + n_subdomians = len(domain.split('.')) + # Checks if there is more than 1 subdomain. "subdomain.url.onion" only + #print(f"This domain have more than one subdomain. There are {n_subdomians} subdomains") + return False + else: + if len(domain) < 62: + #print("Domain length is less than 62.") + return False + return True + elif pattern.fullmatch(domain) is None: + #print("Domain contains invalid character.") + #print(domain) + return False + else: + #print("Domain not valid") + return False + else: + #print("URL doesn't start http") + if pattern.fullmatch(url) is not None: + if len(url.split('.')) > 3: + n_subdomians = len(url.split('.')) + # Checks if there is more than 1 subdomain. "subdomain.url.onion" only + #print(f"This domain have more than one subdomain. There are {n_subdomians - 1} subdomains") + return False + else: + if len(url) < 62: + #print("Domain length is less than 62.") + return False + return True + elif pattern.fullmatch(url) is None: + #print("Domain contains invalid character.") + #print(url) + return False + else: + #print("Domain not valid") + return False + except Exception as e: + print(f"Error: {e}") + + + +def IsUrlValid(url:str)->bool: + """ + Check if url is valid both dark net end clearnet. + """ + # check if the characters are only [a-zA-Z0-9.:/] with maximum 128 chars max? + # check that it is only http(s)://wordA.wordB or http(s)://WordC.WordB.WordC, (onion or not), clearnet is fine too (double check if those are fine!) + # if OK return True + #if not : return False + pattern = re.compile("^[A-Za-z0-9:/.]+$") + if url.endswith('.onion'): + return IsOnionValid(url) + else: + if not url.__contains__('.'): + #print("No (DOT) in clearnet url") + return False + if pattern.fullmatch(url) is None: + #print('Url contains invalid chars') + return False + return True + +def IsStatusValid(status: str)-> bool: + """ + Checks if status contains only [v,x,❌,✔️]. Verbose only if False is returned + """ + pattern = ['y','n','✔️','❌','','nan'] + status = str(status) + status.strip() + #print('[+] STATUS = ',status.splitlines()) + if len(status) > 4: + #print("Status: Got more than one character or nothing.") + return False + elif (status not in pattern): + #print("Status: Got an invalid character it must be either y, n, ✔️, or ❌ ") + return False + + return True + +def IsScoreValid(score:str)->bool: + """ + Check the Score is only "^[0-9.,]+$" with 8 max chars. + """ + # check if the characters are only [a-zA-Z0-9.,' ] with maximum 256 chars max + #(careful with the ' and , make sure you test if it fucks the csv up or else) + # if OK return True + #if not : return False + pattern = re.compile("^[0-9.,]+$") + score = str(score) + score.strip() + if pattern.fullmatch(score) is None: + # empty description is fine as it's optional + return False + elif len(score) > 8: + #print("score is greater than 8 chars") + return False + # empty score is fine + return True + + +def IsDescriptionValid(desc:str)->bool: + """ + Check the categories are only [a-zA-Z0-9,.' ] with 256 max chars. + """ + # check if the characters are only [a-zA-Z0-9.,' ] with maximum 256 chars max + #(careful with the ' and , make sure you test if it fucks the csv up or else) + # if OK return True + #if not : return False + pattern = re.compile("^[A-Za-z0-9-.,' ]+$") + desc.strip() + if pattern.fullmatch(desc) is None: + # empty description is fine as it's optional + return False + if desc == "DEFAULT": + return False + elif len(desc) > 256: + #print("desc is greater than 256 chars") + return False + return True + +def IsCategoryValid(categories: list)-> bool: + """ + Check the categories are only [a-zA-Z0-9 ] with 64 max chars. + """ + # check if the characters are only [a-zA-Z0-9 ] with maximum 64 chars max + #(careful with the ' and , make sure you test if it fucks the csv up or else) + # if OK return True + #if not : return False + pattern = re.compile("^[A-Za-z0-9 ]+$") + for category in categories: + category.strip() + if pattern.fullmatch(category) is None: + #print('Got an empty list or invalid chars') + return False + elif len(category) > 64: + #print('Category is too long') + return False + else: + return True + +def IsNameValid(name: str)->bool: + """ + Check the parameter name only contains [a-zA-Z0-9 ] and is 64 chars long. + """ + # check if the characters are only [a-zA-Z0-9 ] with maximum 64 chars max + #(careful with the ' and , make sure you test if it fucks the csv up or else) + # if OK return True + #if not : return False + pattern = re.compile("^[A-Za-z0-9 ]+$") + name = name.strip() + if (pattern.fullmatch(name) is None): + #print("Got an invalid character or nothing") + return False + elif len(name) > 64: + #print(f'Got a name length greater than 64. {len(name)}') + return False + return True + + + +if __name__ == '__main__': + main() diff --git a/scripts/tests/sensitive.csv b/scripts/tests/sensitive.csv new file mode 100644 index 0000000..f28d586 --- /dev/null +++ b/scripts/tests/sensitive.csv @@ -0,0 +1,8 @@ +sensitive-words +Market +market +drug +drugs +Drugz +Search +Engine diff --git a/scripts/tests/unverified.csv b/scripts/tests/unverified.csv index 9003a03..1ff856e 100644 --- a/scripts/tests/unverified.csv +++ b/scripts/tests/unverified.csv @@ -1,4 +1,18 @@ Instance,Category,Name,URL,Sensitive,Description,Status,Score -uptime.nowherejezfoltodf4jiyl6r56jnzintap5vyjlia7fkirfsnfizflqd.onion,Infos and Links,Tor Taxi,http://tortaxi2dev6xjwbaydqzla77rrnth7yn2oqzjfmiuwn5h6vsk2a4syd.onion/,✔️,"List of links to go to popular darknet places",✔️,100.0 - - +uptime.nowherejezfoltodf4jiyl6r56jnzintap5vyjlia7fkirfsnfizflqd.onion,Infos and Links,Tor Taxi,http://tortaxi2dev6xjwbaydqzla77rrnth7yn2oqzjfmiuwn5h6vsk2a4syd.onion/,,List of links to go to popular darknet places,✔️,100.0 +uptime.nowherejezfoltodf4jiyl6r56jnzintap5vyjlia7fkirfsnfizflqd.onion,Infos and Links,Tor Taxi,http://tortaxi2dev6xjwbaydqzla77rrnth7yn2oqzjfmiuwn5h6vsk2a4syd.onion/,,List of links to go to popular darknet places,✔️,100.0 +uptime.nowherejezfoltodf4jiyl6r56jnzintap5vyjlia7fkirfsnfizflqd.onion,Infos and Links,Tor Taxi,http://tortaxi2dev6xjwbaydqzla77rrnth7yn2oqzjfmiuwn5h6vsk2a4syd.onion/,,List of links to go to popular darknet places,✔️,100.0 +uptime.nowherejezfoltodf4jiyl6r56jnzintap5vyjlia7fkirfsnfizflqd.onion,Infos and Links,Tor Taxi,http://tortaxi2dev6xjwbaydqzla77rrnth7yn2oqzjfmiuwn5h6vsk2a4syd.onion/,❌,List of links to go to popular darknet places,,100.0 +uptime.nowherejezfoltodf4jiyl6r56jnzintap5vyjlia7fkirfsnfizflqd.onion,Infos and Links,Tor Taxi,http://tortaxi2dev6xjwbaydqzla77rrnth7yn2oqzjfmiuwn5h6vsk2a4syd.onion/,✔️,List of links to go to popular darknet places,,100.0 +uptime.nowherejezfoltodf4jiyl6r56jnzintap5vyjlia7fkirfsnfizflqd.onion,Infos and Links,Tor Taxi,http://tortaxi2dev6xjwbaydqzla77rrnth7yn2oqzjfmiuwn5h6vsk2a4syd.onion/,✔️,List of links to go to popular darknet places,,100.0 +uptime.nowherejezfoltodf4jiyl6r56jnzintap5vyjlia7fkirfsnfizflqd.onion,Infos and Links,Tor Taxi,http://tortaxi2dev6xjwbaydqzla77rrnth7yn2oqzjfmiuwn5h6vsk2a4syd.onion/,❌,List of links to go to popular darknet places,,100.0 +uptime.nowherejezfoltodf4jiyl6r56jnzintap5vyjlidruga7fkirfsnfizflqd.onion,Infos and Links,Tor Taxi,http://tortaxi2dev6xjwbaydqzla77rrnth7yn2oqzjfmiuwn5h6vsk2a4syd.onion/,✔️,List of links to go to popular darknet places,✔️,500.0 +uptime.nowherejezfoltodf4jiyl6r56jnzintap5vyjlia7fkirfsnfizflqd.onion,Infos and Links,Tor Taxi,http://tortaxi2dev6xjwbaydqzla77rrnth7yn2oqzjfmiuwn5h6vsk2a4syd.onion/,✔️,List of links to go to popular darknet places,✔️,300.0 +uptime.nowherejezfoltodf4jiyl6r56jnzintap5vyjlia7fkirfsnfizflqd.onion,Infos and Links,Tor Taxi,http://tortaxi2dev6xjwbaydqzldruga77rrnth7yn2oqzjfmiuwn5h6vsk2a4syd.onion/,✔️,List of links to go to popular darknet places,✔️,0.0 +uptime.nowherejezfoltodf4jiyl6r56jnzintap5vyjlia7fkirfsnfizflqd.onion,Infos and Links,Tor Taxi,http://tortaxi2dev6xjwbaydqzla77rrnth7yn2oqzjfmiuwn5h6vsk2a4syd.onion/,✔️,List of links to go to popular darknet places,✔️,100.0 +uptime.nowherejezfoltodf4jiyl6r56jnzintap5vyjlia7fkirfsnfizflqd.onion,Infos and Links,Tor Taxi,http://tortaxi2dev6xjwbaydqzla77rrnth7yn2oqzjfmiuwn5h6vsk2a4syd.onion/,❌,List of links to go to popular darknet places,✔️,100.0 +uptime.nowherejezfoltodf4jiyl6r56jnzintap5vyjlia7fkirfsnfizflqd.onion,Infos and Links,Tor Taxi,http://tortaxi2dev6xjwbaydqzla77rrnth7yn2oqzjfmiuwn5h6vsk2a4syd.onion/,✔️,List of links to go to popular darknet places,✔️,100.0 +uptime.nowherejezfoltodf4jiyl6r56jnzintap5vyjlia7fkirfsnfizflqd.onion,Infos and Links,Tor Taxi,http://tortaxi2dev6xjwbaydqzla77rrnth7yn2oqzjfmiuwn5h6vsk2a4syd.onion/,✔️,List of links to go to popular darknet places,✔️,100.0 +uptime.nowherejezfoltodf4jiyl6r56jnzintap5vyjlia7fkirfsnfizflqd.onion,Infos and Links,Tor Taxi,http://tortaxi2dev6xjwbaydqzla77rrnth7yn2oqzjfmiuwn5h6vsk2a4syd.onion/,❌,List of links to go to popular darknet places,✔️,100.0 +uptime.nowherejezfoltodf4jiyl6r56jnzintap5vyjlia7fkirfsnfizflqd.onion,Infos and Links,Tor Taxi,http://tortaxi2dev6xjwbaydqzla77rrnth7yn2oqzjfmiuwn5h6vsk2a4syd.onion/,✔️,List of links to go to populadrugr darknet places,✔️,100.0 +uptime.nowherejezfoltodf4jiyl6r56jnzintap5vyjlia7fkirfsnfizflqd.onion,Infos and Links,Tor Taxi,http://tortaxi2dev6xjwbaydqzla77rrnth7yn2oqzjfmiuwn5h6vsk2a4syd.onion/,✔️,List of links to go to popular darknet places,✔️,100.0 diff --git a/scripts/tests/verified.csv b/scripts/tests/verified.csv index 194eb98..e630665 100644 --- a/scripts/tests/verified.csv +++ b/scripts/tests/verified.csv @@ -1,6 +1,8 @@ Instance,Category,Name,URL,Sensitive,Description,Status,Score uptime.nowherejezfoltodf4jiyl6r56jnzintap5vyjlia7fkirfsnfizflqd.onion,TestCategory,NewWebsite,http://newwebsitewoidwajiawdhjoidwahjoadiwhj.onion,n,"This is a new website that we add, it has this description",, uptime.nowherejezfoltodf4jiyl6r56jnzintap5vyjlia7fkirfsnfizflqd.onion,TestCategory,NewWebsite,http://newwebsitewoidwajiawdhjoidwahjoadiwhj.onion,n,"This is a new website that we add, it has this description",, +uptime.nowherejezfoltodf4jiyl6r56jnzintap5vyjlia7fkirfsnfizflqd.onion,TestCategory,NewWebsite,http://newwebsitewoidwajiawdhjoidwahjoadiwhj.onion,n,"This is a new website that we add, it has this description",, +uptime.nowherejezfoltodf4jiyl6r56jnzintap5vyjlia7fkirfsnfizflqd.onion,TestCategory,NewWebsite,http://newwebsitewoidwajiawdhjoidwahjoadiwhj.onion,n,"This is a new website that we add, it has this description",, FIRSTherefoltodf4jiyl6r56jnzintap5vyjlia7fkirfsnfizflqd.onion,Infos and Links,Psychonaut Wiki,http://vvedndyt433kopnhv6vejxnut54y5752vpxshjaqmj7ftwiu6quiv2ad.onion/,,"This is the wiki for psychonauts, it contains infos on substances and trip reports",✔️,100.0 somewherefoltodf4jiyl6r56jnzintap5vyjlia7fkirfsnfizflqd.onion,Infos and Links,1DNM Bible,http://biblemeowimkh3utujmhm6oh2oeb3ubjw2lpgeq3lahrfr2l6ev6zgyd.onion/,✔️,General guide on how to navigate the Darknet to buy drugs,✔️,100.0 somewherefoltodf4jiyl6r56jnzintap5vyjlia7fkirfsnfizflqd.onion,Infos and Links,2DNM Bible,http://biblemeowimkh3utujmhm6oh2oeb3ubjw2lpgeq3lahrfr2l6ev6zgyd.onion/,✔️,General guide on how to navigate the Darknet to buy drugs,✔️,100.0 diff --git a/www/participants/uptime.nowherejezfoltodf4jiyl6r56jnzintap5vyjlia7fkirfsnfizflqd.onion/unverified.csv b/www/participants/uptime.nowherejezfoltodf4jiyl6r56jnzintap5vyjlia7fkirfsnfizflqd.onion/unverified.csv index c98aafd..3c69df1 100644 --- a/www/participants/uptime.nowherejezfoltodf4jiyl6r56jnzintap5vyjlia7fkirfsnfizflqd.onion/unverified.csv +++ b/www/participants/uptime.nowherejezfoltodf4jiyl6r56jnzintap5vyjlia7fkirfsnfizflqd.onion/unverified.csv @@ -1,12 +1 @@ Instance,Category,Name,URL,Sensitive,Description,Status,Score -uptime.nowherejezfoltodf4jiyl6r56jnzintap5vyjlia7fkirfsnfizflqd.onion,aaa,aa,http://niggaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa.onion,y,test,, -uptime.nowherejezfoltodf4jiyl6r56jnzintap5vyjlia7fkirfsnfizflqd.onion,bbb,aaa,ccc.com,y,,, -uptime.nowherejezfoltodf4jiyl6r56jnzintap5vyjlia7fkirfsnfizflqd.onion,w,q,adwwawa.com,n,dwaawdwa,, -uptime.nowherejezfoltodf4jiyl6r56jnzintap5vyjlia7fkirfsnfizflqd.onion,w,a,dwa.com,n,,, -uptime.nowherejezfoltodf4jiyl6r56jnzintap5vyjlia7fkirfsnfizflqd.onion,wadwda,dwawdawawda,meinnigga.com,n,"This is a new website that we add, it has this description",, -uptime.nowherejezfoltodf4jiyl6r56jnzintap5vyjlia7fkirfsnfizflqd.onion,b,a,http://newwebsitewoidwajiawdhjoidwahjoadiwhj.onion,n,"This is a new website that we add, it has this description",, -uptime.nowherejezfoltodf4jiyl6r56jnzintap5vyjlia7fkirfsnfizflqd.onion,TestCategory,NewWebsite,http://newwebsitewoidwajiawdhjoidwahjoadiwhj.onion,n,"This is a new website that we add, it has this description",, -uptime.nowherejezfoltodf4jiyl6r56jnzintap5vyjlia7fkirfsnfizflqd.onion,TestCategory,NewWebsite,http://newwebsitewoidwajiawdhjoidwahjoadiwhj.onion,n,"This is a new website that we add, it has this description",, -uptime.nowherejezfoltodf4jiyl6r56jnzintap5vyjlia7fkirfsnfizflqd.onion,TestCategory,NewWebsite,http://newwebsitewoidwajiawdhjoidwahjoadiwhj.onion,n,"This is a new website that we add, it has this description",, -uptime.nowherejezfoltodf4jiyl6r56jnzintap5vyjlia7fkirfsnfizflqd.onion,TestCategory,NewWebsite,http://newwebsitewoidwajiawdhjoidwahjoadiwhj.onion,n,"This is a new website that we add, it has this description",, -,Infos and Links,Tor Taxi,http://tortaxi2dev6xjwbaydqzla77rrnth7yn2oqzjfmiuwn5h6vsk2a4syd.onion/,✔️,List of links to go to popular darknet places,✔️,100.0 diff --git a/www/participants/uptime.nowherejezfoltodf4jiyl6r56jnzintap5vyjlia7fkirfsnfizflqd.onion/verified.csv b/www/participants/uptime.nowherejezfoltodf4jiyl6r56jnzintap5vyjlia7fkirfsnfizflqd.onion/verified.csv index 443af65..3c69df1 100644 --- a/www/participants/uptime.nowherejezfoltodf4jiyl6r56jnzintap5vyjlia7fkirfsnfizflqd.onion/verified.csv +++ b/www/participants/uptime.nowherejezfoltodf4jiyl6r56jnzintap5vyjlia7fkirfsnfizflqd.onion/verified.csv @@ -1,16 +1 @@ Instance,Category,Name,URL,Sensitive,Description,Status,Score -uptime.nowherejezfoltodf4jiyl6r56jnzintap5vyjlia7fkirfsnfizflqd.onion,TestCategory,NewWebsite,http://newwebsitewoidwajiawdhjoidwahjoadiwhj.onion,n,"This is a new website that we add, it has this description",, -uptime.nowherejezfoltodf4jiyl6r56jnzintap5vyjlia7fkirfsnfizflqd.onion,TestCategory,NewWebsite,http://newwebsitewoidwajiawdhjoidwahjoadiwhj.onion,n,"This is a new website that we add, it has this description",, -uptime.nowherejezfoltodf4jiyl6r56jnzintap5vyjlia7fkirfsnfizflqd.onion,TestCategory,NewWebsite,http://newwebsitewoidwajiawdhjoidwahjoadiwhj.onion,n,"This is a new website that we add, it has this description",, -uptime.nowherejezfoltodf4jiyl6r56jnzintap5vyjlia7fkirfsnfizflqd.onion,Test,Test,http://newwebsitewoidwajiawdhjoidwahjoadiwhj.onion,n,"This is a new website that we add, it has this description",, -uptime.nowherejezfoltodf4jiyl6r56jnzintap5vyjlia7fkirfsnfizflqd.onion,TestCategory,Test,http://newwebsitewoidwajiawdhjoidwahjoadiwhj.onion,n,"This is a new website that we add, it has this description",, -uptime.nowherejezfoltodf4jiyl6r56jnzintap5vyjlia7fkirfsnfizflqd.onion,Test,Test,http://newwebsitewoidwajiawdhjoidwahjoadiwhj.onion,n,"This is a new website that we add, it has this description",, -uptime.nowherejezfoltodf4jiyl6r56jnzintap5vyjlia7fkirfsnfizflqd.onion,TestCategory,Test,http://newwebsitewoidwajiawdhjoidwahjoadiwhj.onion,n,"This is a new website that we add, it has this description",, -uptime.nowherejezfoltodf4jiyl6r56jnzintap5vyjlia7fkirfsnfizflqd.onion,Test,Test,http://newwebsitewoidwajiawdhjoidwahjoadiwhj.onion,n,"This is a new website that we add, it has this description",, -uptime.nowherejezfoltodf4jiyl6r56jnzintap5vyjlia7fkirfsnfizflqd.onion,TestCategory,Test,http://newwebsitewoidwajiawdhjoidwahjoadiwhj.onion,n,"This is a new website that we add, it has this description",, -uptime.nowherejezfoltodf4jiyl6r56jnzintap5vyjlia7fkirfsnfizflqd.onion,TestCategory,Test,http://newwebsitewoidwajiawdhjoidwahjoadiwhj.onion,n,"This is a new website that we add, it has this description",, -uptime.nowherejezfoltodf4jiyl6r56jnzintap5vyjlia7fkirfsnfizflqd.onion,Test,Test,http://newwebsitewoidwajiawdhjoidwahjoadiwhj.onion,n,"This is a new website that we add, it has this description",, -uptime.nowherejezfoltodf4jiyl6r56jnzintap5vyjlia7fkirfsnfizflqd.onion,TestCategory,Test,http://newwebsitewoidwajiawdhjoidwahjoadiwhj.onion,n,"This is a new website that we add, it has this description",, -uptime.nowherejezfoltodf4jiyl6r56jnzintap5vyjlia7fkirfsnfizflqd.onion,Test,Test,http://newwebsitewoidwajiawdhjoidwahjoadiwhj.onion,n,"This is a new website that we add, it has this description",, -,Infos and Links,Psychonaut Wiki,http://vvedndyt433kopnhv6vejxnut54y5752vpxshjaqmj7ftwiu6quiv2ad.onion/,,"This is the wiki for psychonauts, it contains infos on substances and trip reports",✔️,100.0 -,Infos and Links,DNM Bible,http://biblemeowimkh3utujmhm6oh2oeb3ubjw2lpgeq3lahrfr2l6ev6zgyd.onion/,✔️,General guide on how to navigate the Darknet to buy drugs,✔️,100.0 diff --git a/www/participants/uptime.nowherejezfoltodf4jiyl6r56jnzintap5vyjlia7fkirfsnfizflqd.onion/webring-participants.csv b/www/participants/uptime.nowherejezfoltodf4jiyl6r56jnzintap5vyjlia7fkirfsnfizflqd.onion/webring-participants.csv index 2641837..a2c90ae 100644 --- a/www/participants/uptime.nowherejezfoltodf4jiyl6r56jnzintap5vyjlia7fkirfsnfizflqd.onion/webring-participants.csv +++ b/www/participants/uptime.nowherejezfoltodf4jiyl6r56jnzintap5vyjlia7fkirfsnfizflqd.onion/webring-participants.csv @@ -1,2 +1,5 @@ Name,URL,Description,Trusted,Status,Score -Nowhere,http://uptime.nowherejezfoltodf4jiyl6r56jnzintap5vyjlia7fkirfsnfizflqd.onion,"Darknet Webring Onion Participant",✔️,✔️,100.0 +Nowhere,uptime.nowherejezfoltodf4jiyl6r56jnzintap5vyjlia7fkirfsnfizflqd.onion,First instance,,, +Nowhere,uptime.nowherejezfoltodf4jiyl6r56jnzintap5vyjlia7fkirfsnfizflqd.onion,First webring participant,,, +Nowhere,uptime.nowherejezfoltodf4jiyl6r56jnzintap5vyjlia7fkirfsnfizflqd.onion,New webring participant,,, +Nowhere,http://uptime.nowherejezfoltodf4jiyl6r56jnzintap5vyjlia7fkirfsnfizflqd.onion,Darknet Webring Onion Participant,✔️,✔️,100.0