mirror of
http://git.nowherejezfoltodf4jiyl6r56jnzintap5vyjlia7fkirfsnfizflqd.onion/nihilist/darknet-lantern.git
synced 2025-05-16 20:26:58 +00:00
WIP still, but good progress on option 4
This commit is contained in:
parent
d891df79a2
commit
9d81e7a779
10 changed files with 420 additions and 107 deletions
|
@ -29,6 +29,8 @@ def main():
|
||||||
templatepath=rootpath+'templates/'
|
templatepath=rootpath+'templates/'
|
||||||
verifiedcsvfile=instancepath+'/verified.csv'
|
verifiedcsvfile=instancepath+'/verified.csv'
|
||||||
unverifiedcsvfile=instancepath+'/unverified.csv'
|
unverifiedcsvfile=instancepath+'/unverified.csv'
|
||||||
|
blcsvfile=instancepath+'/blacklist.csv'
|
||||||
|
secsvfile=instancepath+'/sensitive.csv'
|
||||||
# check if instancepath exists, if not then create the directory
|
# check if instancepath exists, if not then create the directory
|
||||||
if not os.path.exists(instancepath):
|
if not os.path.exists(instancepath):
|
||||||
os.makedirs(instancepath)
|
os.makedirs(instancepath)
|
||||||
|
@ -43,6 +45,8 @@ def main():
|
||||||
# now that they exist, get vdf and uvdf
|
# now that they exist, get vdf and uvdf
|
||||||
vdf = pd.read_csv(verifiedcsvfile)
|
vdf = pd.read_csv(verifiedcsvfile)
|
||||||
uvdf = pd.read_csv(unverifiedcsvfile)
|
uvdf = pd.read_csv(unverifiedcsvfile)
|
||||||
|
bldf = pd.read_csv(blcsvfile)
|
||||||
|
sedf = pd.read_csv(secsvfile)
|
||||||
print("[+] file exists, your Webring URL is", instance)
|
print("[+] file exists, your Webring URL is", instance)
|
||||||
isitvalid = "y"
|
isitvalid = "y"
|
||||||
else:
|
else:
|
||||||
|
@ -226,8 +230,8 @@ Managing Wordlists:
|
||||||
webring_participant_url = input("What is the onion domain of the new webring participant? (ex: uptime.nowherejezfoltodf4jiyl6r56jnzintap5vyjlia7fkirfsnfizflqd.onion) ")
|
webring_participant_url = input("What is the onion domain of the new webring participant? (ex: uptime.nowherejezfoltodf4jiyl6r56jnzintap5vyjlia7fkirfsnfizflqd.onion) ")
|
||||||
# check if the directory exists locally or not,
|
# check if the directory exists locally or not,
|
||||||
participantdir=rootpath+'www/participants/'+webring_participant_url
|
participantdir=rootpath+'www/participants/'+webring_participant_url
|
||||||
#if not os.path.isdir(participantdir): # to test on your own instance
|
if not os.path.isdir(participantdir): # to test on your own instance
|
||||||
if os.path.isdir(participantdir):
|
#if os.path.isdir(participantdir):
|
||||||
# if it does, it means that the webring is ALREADY added
|
# if it does, it means that the webring is ALREADY added
|
||||||
print("[-] Webring Participant is already listed, skipping.")
|
print("[-] Webring Participant is already listed, skipping.")
|
||||||
return False
|
return False
|
||||||
|
@ -298,15 +302,63 @@ Managing Wordlists:
|
||||||
print("[+] file written, let's read it")
|
print("[+] file written, let's read it")
|
||||||
f = open(csvfilepath,"r")
|
f = open(csvfilepath,"r")
|
||||||
print(f.read())
|
print(f.read())
|
||||||
# TODO and remove all of the invalid entries !!!
|
|
||||||
#######################################################################
|
########### PERFORM SANITY CHECKS ON the webring participant's verified.csv and unverified.csv ##################
|
||||||
#newrow=[instance,category,name,url,sensi,desc,'','']
|
for w in ['verified.csv','unverified.csv']:
|
||||||
#print("[+] NEWROW=",newrow)
|
csvfilepath=participantdir+'/'+w
|
||||||
#uvdf.loc[-1] = newrow # adding a row
|
csvdf = pd.read_csv(csvfilepath)
|
||||||
#uvdf.index = uvdf.index + 1 # shifting index
|
|
||||||
#uvdf = uvdf.sort_index() # sorting by index
|
#print(bldf[['blacklisted-words']])
|
||||||
#print("[+] New row added! now writing the csv file:")
|
bldf[['blacklisted-words']].iterrows()
|
||||||
#uvdf.to_csv(unverifiedcsvfile, index=False)
|
rows2delete= [] # it is an empty list at first
|
||||||
|
for i,j in csvdf.iterrows():
|
||||||
|
#print("[+] Unverified.csv ROW=",i, uvdf.at[i, 'Instance'], uvdf.at[i, 'Category'], uvdf.at[i, 'Name'], uvdf.at[i, 'URL'], uvdf.at[i, 'Description'])
|
||||||
|
#print("[+] Unverified.csv ROW=",i, uvdf.iloc[[i]])
|
||||||
|
#row=uvdf.iloc[[i]] #it displays the index
|
||||||
|
row=csvdf.loc[i,:].values.tolist()
|
||||||
|
#print(i,row)
|
||||||
|
|
||||||
|
### SANITY CHECK 1: Mark all the rows that have incorrect formatting for deletion###
|
||||||
|
#print("[+] ROW=",i,"ROW CONTENTS=", IsUrlValid(uvdf.at[i, 'Instance']), IsCategoryValid(uvdf.at[i, 'Category']), IsNameValid(uvdf.at[i, 'Name']), IsUrlValid(uvdf.at[i, 'URL']), IsStatusValid(uvdf.at[i, 'Sensitive']), IsDescriptionValid(uvdf.at[i, 'Description']), IsStatusValid(uvdf.at[i, 'Status']), IsScoreValid(uvdf.at[i, 'Score']))
|
||||||
|
if IsUrlValid(csvdf.at[i, 'Instance']) is False or IsCategoryValid(csvdf.at[i, 'Category']) is False or IsNameValid(csvdf.at[i, 'Name']) is False or IsUrlValid(csvdf.at[i, 'URL']) is False or IsStatusValid(csvdf.at[i, 'Sensitive']) is False or IsDescriptionValid(csvdf.at[i, 'Description']) is False or IsStatusValid(csvdf.at[i, 'Status']) is False or IsScoreValid(csvdf.at[i, 'Score']) is False:
|
||||||
|
#mark the row for deletion as it has invalid inputs
|
||||||
|
if i not in rows2delete:
|
||||||
|
print("Marking row", i,"for deletion, as it has invalid inputs")
|
||||||
|
rows2delete.append(i) #mark the row for deletion if not already done
|
||||||
|
|
||||||
|
### SANITY CHECK 2: Mark all rows that are not allowed (blacklist) for deletion ###
|
||||||
|
for k,l in bldf.iterrows():
|
||||||
|
#print("[+] Blacklisted word=",k, bldf.at[k, 'blacklisted-words'])
|
||||||
|
blword=bldf.at[k, 'blacklisted-words']
|
||||||
|
if any(blword in str(x) for x in row) == True:
|
||||||
|
#print("found blacklisted word! marking row for deletion")
|
||||||
|
if i not in rows2delete:
|
||||||
|
print("Marking row", i,"for deletion, as it matches with a blacklisted word")
|
||||||
|
rows2delete.append(i) #mark the row for deletion if not already done
|
||||||
|
|
||||||
|
### SANITY CHECK 3: Mark all the rows that are supposed to be sensitive ###
|
||||||
|
for k,l in sedf.iterrows():
|
||||||
|
#print("[+] Sensitive word=",k, sedf.at[k, 'sensitive-words'])
|
||||||
|
seword=sedf.at[k, 'sensitive-words']
|
||||||
|
if any(seword in str(x) for x in row) == True:
|
||||||
|
if csvdf.at[i, 'Sensitive'] != '✔️':
|
||||||
|
print("Marking row", i,"as sensitive, as it matches with a sensitive word")
|
||||||
|
uvdf.at[i, 'Sensitive']='✔️'
|
||||||
|
|
||||||
|
print('[-] Rows to delete: ',rows2delete)
|
||||||
|
|
||||||
|
# TODO : MAKE SURE IT WORKS IN PROD
|
||||||
|
for i in rows2delete:
|
||||||
|
row=csvdf.loc[i,:].values.tolist()
|
||||||
|
print('[+] REMOVING ROW :',i,row)
|
||||||
|
csvdf.drop(i, inplace= True)
|
||||||
|
csvdf.to_csv(csvfilepath, index=False)
|
||||||
|
##############################################
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
case "5":
|
case "5":
|
||||||
|
@ -375,6 +427,8 @@ Managing Wordlists:
|
||||||
# TODO print("do you want to 1) add words or 2) remove words ?")
|
# TODO print("do you want to 1) add words or 2) remove words ?")
|
||||||
# TODO display the contents of blacklist.csv file
|
# TODO display the contents of blacklist.csv file
|
||||||
|
|
||||||
|
# TODO CASE 10 : cleanup all duplicates in unverified + verified.csv, based on the url (check if each url appears more than once, and if they do, remove them + write to csv file)
|
||||||
|
|
||||||
case _:
|
case _:
|
||||||
print("[-] Exiting")
|
print("[-] Exiting")
|
||||||
return True
|
return True
|
||||||
|
@ -420,26 +474,26 @@ def IsOnionValid(url: str)-> bool:
|
||||||
pattern = re.compile("^[A-Za-z0-9.]+(\.onion)?$")
|
pattern = re.compile("^[A-Za-z0-9.]+(\.onion)?$")
|
||||||
url = url.strip().removesuffix('/')
|
url = url.strip().removesuffix('/')
|
||||||
if url.startswith('http://'):
|
if url.startswith('http://'):
|
||||||
print('URL starts with http')
|
#print('URL starts with http')
|
||||||
# Removes the http://
|
# Removes the http://
|
||||||
domain = url.split('/')[2]
|
domain = url.split('/')[2]
|
||||||
if pattern.fullmatch(domain) is not None:
|
if pattern.fullmatch(domain) is not None:
|
||||||
if len(domain.split('.')) > 3:
|
if len(domain.split('.')) > 3:
|
||||||
n_subdomians = len(domain.split('.'))
|
n_subdomians = len(domain.split('.'))
|
||||||
# Checks if there is more than 1 subdomain. "subdomain.url.onion" only
|
# Checks if there is more than 1 subdomain. "subdomain.url.onion" only
|
||||||
print(f"This domain have more than one subdomain. There are {n_subdomians} subdomains")
|
#print(f"This domain have more than one subdomain. There are {n_subdomians} subdomains")
|
||||||
return False
|
return False
|
||||||
else:
|
else:
|
||||||
if len(domain) < 62:
|
if len(domain) < 62:
|
||||||
print("Domain length is less than 62.")
|
#print("Domain length is less than 62.")
|
||||||
return False
|
return False
|
||||||
return True
|
return True
|
||||||
elif pattern.fullmatch(domain) is None:
|
elif pattern.fullmatch(domain) is None:
|
||||||
print("Domain contains invalid character.")
|
#print("Domain contains invalid character.")
|
||||||
print(domain)
|
#print(domain)
|
||||||
return False
|
return False
|
||||||
else:
|
else:
|
||||||
print("Domain not valid")
|
#print("Domain not valid")
|
||||||
return False
|
return False
|
||||||
else:
|
else:
|
||||||
#print("URL doesn't start http")
|
#print("URL doesn't start http")
|
||||||
|
@ -447,25 +501,23 @@ def IsOnionValid(url: str)-> bool:
|
||||||
if len(url.split('.')) > 3:
|
if len(url.split('.')) > 3:
|
||||||
n_subdomians = len(url.split('.'))
|
n_subdomians = len(url.split('.'))
|
||||||
# Checks if there is more than 1 subdomain. "subdomain.url.onion" only
|
# Checks if there is more than 1 subdomain. "subdomain.url.onion" only
|
||||||
print(f"This domain have more than one subdomain. There are {n_subdomians - 1} subdomains")
|
#print(f"This domain have more than one subdomain. There are {n_subdomians - 1} subdomains")
|
||||||
return False
|
return False
|
||||||
else:
|
else:
|
||||||
if len(url) < 62:
|
if len(url) < 62:
|
||||||
print("Domain length is less than 62.")
|
#print("Domain length is less than 62.")
|
||||||
return False
|
return False
|
||||||
return True
|
return True
|
||||||
elif pattern.fullmatch(url) is None:
|
elif pattern.fullmatch(url) is None:
|
||||||
print("Domain contains invalid character.")
|
#print("Domain contains invalid character.")
|
||||||
print(url)
|
#print(url)
|
||||||
return False
|
return False
|
||||||
else:
|
else:
|
||||||
print("Domain not valid")
|
#print("Domain not valid")
|
||||||
return False
|
return False
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
print(f"Error: {e}")
|
print(f"Error: {e}")
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def IsUrlValid(url:str)->bool:
|
def IsUrlValid(url:str)->bool:
|
||||||
"""
|
"""
|
||||||
Check if url is valid both dark net end clearnet.
|
Check if url is valid both dark net end clearnet.
|
||||||
|
@ -475,34 +527,57 @@ def IsUrlValid(url:str)->bool:
|
||||||
# if OK return True
|
# if OK return True
|
||||||
#if not : return False
|
#if not : return False
|
||||||
pattern = re.compile("^[A-Za-z0-9:/.]+$")
|
pattern = re.compile("^[A-Za-z0-9:/.]+$")
|
||||||
|
url = str(url)
|
||||||
if url.endswith('.onion'):
|
if url.endswith('.onion'):
|
||||||
return IsOnionValid(url)
|
return IsOnionValid(url)
|
||||||
else:
|
else:
|
||||||
if not url.__contains__('.'):
|
if not url.__contains__('.'):
|
||||||
print("No (DOT) in clearnet url")
|
#print("No (DOT) in clearnet url")
|
||||||
return False
|
return False
|
||||||
if pattern.fullmatch(url) is None:
|
if pattern.fullmatch(url) is None:
|
||||||
print('Url contains invalid chars')
|
#print('Url contains invalid chars')
|
||||||
return False
|
return False
|
||||||
return True
|
return True
|
||||||
|
|
||||||
def IsStatusValid(status: str)-> bool:
|
def IsStatusValid(status: str)-> bool:
|
||||||
"""
|
"""
|
||||||
Checks if status contains only [v,w]. Verbose only if False is returned
|
Checks if status contains only [v,x,❌,✔️]. Verbose only if False is returned
|
||||||
"""
|
"""
|
||||||
# check if the characters are only [vx] with maximum 1 chars max
|
pattern = ['y','n','✔️','❌','','nan']
|
||||||
# if OK return True
|
status = str(status)
|
||||||
#if not : return False
|
status.strip()
|
||||||
pattern = ['y','n']
|
#print('[+] STATUS = ',status.splitlines())
|
||||||
if len(status) != 1:
|
if len(status) > 4:
|
||||||
print("Got more than one character or nothing.")
|
#print("Status: Got more than one character or nothing.")
|
||||||
return False
|
return False
|
||||||
elif (status not in pattern):
|
elif (status not in pattern):
|
||||||
print("Got an invalid character it must be either y or n")
|
#print("Status: Got an invalid character it must be either y, n, ✔️, or ❌ ")
|
||||||
return False
|
return False
|
||||||
|
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
|
||||||
|
def IsScoreValid(score:str)->bool:
|
||||||
|
"""
|
||||||
|
Check the Score is only "^[0-9.,]+$" with 8 max chars.
|
||||||
|
"""
|
||||||
|
# check if the characters are only [a-zA-Z0-9.,' ] with maximum 256 chars max
|
||||||
|
#(careful with the ' and , make sure you test if it fucks the csv up or else)
|
||||||
|
# if OK return True
|
||||||
|
#if not : return False
|
||||||
|
pattern = re.compile("^[0-9.,]+$")
|
||||||
|
score = str(score)
|
||||||
|
score.strip()
|
||||||
|
if pattern.fullmatch(score) is None:
|
||||||
|
# empty description is fine as it's optional
|
||||||
|
return False
|
||||||
|
elif len(score) > 8:
|
||||||
|
#print("score is greater than 8 chars")
|
||||||
|
return False
|
||||||
|
# empty score is fine
|
||||||
|
return True
|
||||||
|
|
||||||
|
|
||||||
def IsDescriptionValid(desc:str)->bool:
|
def IsDescriptionValid(desc:str)->bool:
|
||||||
"""
|
"""
|
||||||
Check the categories are only [a-zA-Z0-9,.' ] with 256 max chars.
|
Check the categories are only [a-zA-Z0-9,.' ] with 256 max chars.
|
||||||
|
@ -512,12 +587,15 @@ def IsDescriptionValid(desc:str)->bool:
|
||||||
# if OK return True
|
# if OK return True
|
||||||
#if not : return False
|
#if not : return False
|
||||||
pattern = re.compile("^[A-Za-z0-9-.,' ]+$")
|
pattern = re.compile("^[A-Za-z0-9-.,' ]+$")
|
||||||
|
desc = str(desc)
|
||||||
desc.strip()
|
desc.strip()
|
||||||
# empty description is fine as it's optional
|
if pattern.fullmatch(desc) is None:
|
||||||
|
# empty description is fine as it's optional
|
||||||
|
return False
|
||||||
if desc == "DEFAULT":
|
if desc == "DEFAULT":
|
||||||
return False
|
return False
|
||||||
elif len(desc) > 256:
|
elif len(desc) > 256:
|
||||||
print("desc is greater than 256 chars")
|
#print("desc is greater than 256 chars")
|
||||||
return False
|
return False
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
@ -536,7 +614,8 @@ def IsCategoryValid(categories: list)-> bool:
|
||||||
#print('Got an empty list or invalid chars')
|
#print('Got an empty list or invalid chars')
|
||||||
return False
|
return False
|
||||||
elif len(category) > 64:
|
elif len(category) > 64:
|
||||||
print('Category is too long')
|
#print('Category is too long')
|
||||||
|
return False
|
||||||
else:
|
else:
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
@ -554,12 +633,13 @@ def IsNameValid(name: str)->bool:
|
||||||
#print("Got an invalid character or nothing")
|
#print("Got an invalid character or nothing")
|
||||||
return False
|
return False
|
||||||
elif len(name) > 64:
|
elif len(name) > 64:
|
||||||
print(f'Got a name lenght greater than 64. {len(name)}')
|
#print(f'Got a name length greater than 64. {len(name)}')
|
||||||
return False
|
return False
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
main()
|
main()
|
||||||
|
|
7
scripts/tests/blacklist.csv
Normal file
7
scripts/tests/blacklist.csv
Normal file
|
@ -0,0 +1,7 @@
|
||||||
|
blacklisted-words
|
||||||
|
porn
|
||||||
|
pron
|
||||||
|
p0rn
|
||||||
|
pr0n
|
||||||
|
gore
|
||||||
|
|
|
|
@ -4,37 +4,6 @@ def main():
|
||||||
#print("aaa")
|
#print("aaa")
|
||||||
csvfile="verified.csv"
|
csvfile="verified.csv"
|
||||||
df = pd.read_csv(csvfile)
|
df = pd.read_csv(csvfile)
|
||||||
#for i in range (df.index.stop):
|
|
||||||
# print(i,df.at[i,i])
|
|
||||||
|
|
||||||
#for i in range (df.index.stop):
|
|
||||||
# for col in df.columns.values.tolist():
|
|
||||||
# print(df.at[i][col])
|
|
||||||
|
|
||||||
#print(df)
|
|
||||||
# print("[+] Display 2 columns:")
|
|
||||||
#display 2 columns:
|
|
||||||
# print(df[['Name', 'URL']])
|
|
||||||
|
|
||||||
# print("[+] Display rows 0 to 5")
|
|
||||||
#display rows from 0 to 5
|
|
||||||
# print(df[0:5])
|
|
||||||
|
|
||||||
#display rows from 0 to 5
|
|
||||||
# print("[+] Display rows 3 to 5 and columns Name and URL")
|
|
||||||
# print(df.loc[3:5,['Name','URL']])
|
|
||||||
#print(df[0:1,['Name','URL']])
|
|
||||||
|
|
||||||
# print("[+] Display all rows for and columns name and URL")
|
|
||||||
#df.loc[startrow:endrow, startcolumn:endcolumn]
|
|
||||||
# print(df.loc[0:df.index.stop,['Name','URL']])
|
|
||||||
|
|
||||||
#display rows from 0 to the end of indexes
|
|
||||||
# print("[+] Display all rows for all columns")
|
|
||||||
# print(df[0:df.index.stop])
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
##############################################################################
|
##############################################################################
|
||||||
print('\n[+] list the entire csv file and get all row IDs')
|
print('\n[+] list the entire csv file and get all row IDs')
|
||||||
|
@ -83,7 +52,7 @@ def main():
|
||||||
index=-1
|
index=-1
|
||||||
while (index not in filter_df.index):
|
while (index not in filter_df.index):
|
||||||
#index=int(input("\n[+] Please select a valid row: "))
|
#index=int(input("\n[+] Please select a valid row: "))
|
||||||
index=int("1")
|
index=int("12")
|
||||||
print("ROW=",index, 'CONTENT=', filter_df.at[index, 'Name'], filter_df.at[index, 'URL'])
|
print("ROW=",index, 'CONTENT=', filter_df.at[index, 'Name'], filter_df.at[index, 'URL'])
|
||||||
|
|
||||||
|
|
||||||
|
@ -120,15 +89,16 @@ def main():
|
||||||
|
|
||||||
print(df)
|
print(df)
|
||||||
|
|
||||||
#write the dataframe into the csv file
|
|
||||||
#read and print the contents of the csv file
|
|
||||||
#re-add that row in the csv file
|
|
||||||
#remove that row from the dataframe
|
|
||||||
print()
|
print()
|
||||||
|
|
||||||
|
|
||||||
# then select a row in it (by the ID) and display it
|
###########
|
||||||
# list
|
# list every word in the blacklist wordlist csv file
|
||||||
|
# for each word, check if it matches with any of the rows in unverified.csv
|
||||||
|
# if it matches (on any column!), remove that row and write to the csv file
|
||||||
|
# list every word in the sensitive wordlist csv file
|
||||||
|
# if it matches (on any column!), mark the sensitive column as V
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
main()
|
main()
|
||||||
|
|
255
scripts/tests/csvwork2.py
Normal file
255
scripts/tests/csvwork2.py
Normal file
|
@ -0,0 +1,255 @@
|
||||||
|
import csv, json, pandas as pd, glob, re
|
||||||
|
|
||||||
|
def main():
|
||||||
|
#print("aaa")
|
||||||
|
vcsvfile="verified.csv"
|
||||||
|
vdf = pd.read_csv(vcsvfile)
|
||||||
|
unverifiedcsvfile="unverified.csv"
|
||||||
|
uvdf = pd.read_csv(unverifiedcsvfile)
|
||||||
|
blcsvfile="blacklist.csv"
|
||||||
|
bldf = pd.read_csv(blcsvfile)
|
||||||
|
secsvfile="sensitive.csv"
|
||||||
|
sedf = pd.read_csv(secsvfile)
|
||||||
|
|
||||||
|
###########
|
||||||
|
# list every word in the blacklist wordlist csv file
|
||||||
|
# for each word, check if it matches with any of the rows in unverified.csv
|
||||||
|
# if it matches (on any column!), remove that row and write to the csv file
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
########### SANITY CHECKS ON UNVERIFIED.CSV ##################
|
||||||
|
#print(bldf[['blacklisted-words']])
|
||||||
|
bldf[['blacklisted-words']].iterrows()
|
||||||
|
rows2delete= [] # it is an empty list at first
|
||||||
|
for i,j in uvdf.iterrows():
|
||||||
|
#print("[+] Unverified.csv ROW=",i, uvdf.at[i, 'Instance'], uvdf.at[i, 'Category'], uvdf.at[i, 'Name'], uvdf.at[i, 'URL'], uvdf.at[i, 'Description'])
|
||||||
|
#print("[+] Unverified.csv ROW=",i, uvdf.iloc[[i]])
|
||||||
|
#row=uvdf.iloc[[i]] #it displays the index
|
||||||
|
row=uvdf.loc[i,:].values.tolist()
|
||||||
|
#print(i,row)
|
||||||
|
|
||||||
|
### SANITY CHECK 1: Mark all the rows that have incorrect formatting for deletion###
|
||||||
|
#print("[+] ROW=",i,"ROW CONTENTS=", IsUrlValid(uvdf.at[i, 'Instance']), IsCategoryValid(uvdf.at[i, 'Category']), IsNameValid(uvdf.at[i, 'Name']), IsUrlValid(uvdf.at[i, 'URL']), IsStatusValid(uvdf.at[i, 'Sensitive']), IsDescriptionValid(uvdf.at[i, 'Description']), IsStatusValid(uvdf.at[i, 'Status']), IsScoreValid(uvdf.at[i, 'Score']))
|
||||||
|
if IsUrlValid(uvdf.at[i, 'Instance']) is False or IsCategoryValid(uvdf.at[i, 'Category']) is False or IsNameValid(uvdf.at[i, 'Name']) is False or IsUrlValid(uvdf.at[i, 'URL']) is False or IsStatusValid(uvdf.at[i, 'Sensitive']) is False or IsDescriptionValid(uvdf.at[i, 'Description']) is False or IsStatusValid(uvdf.at[i, 'Status']) is False or IsScoreValid(uvdf.at[i, 'Score']) is False:
|
||||||
|
#mark the row for deletion as it has invalid inputs
|
||||||
|
if i not in rows2delete:
|
||||||
|
print("Marking row", i,"for deletion, as it has invalid inputs")
|
||||||
|
rows2delete.append(i) #mark the row for deletion if not already done
|
||||||
|
|
||||||
|
### SANITY CHECK 2: Mark all rows that are not allowed (blacklist) for deletion ###
|
||||||
|
for k,l in bldf.iterrows():
|
||||||
|
#print("[+] Blacklisted word=",k, bldf.at[k, 'blacklisted-words'])
|
||||||
|
blword=bldf.at[k, 'blacklisted-words']
|
||||||
|
if any(blword in str(x) for x in row) == True:
|
||||||
|
#print("found blacklisted word! marking row for deletion")
|
||||||
|
if i not in rows2delete:
|
||||||
|
print("Marking row", i,"for deletion, as it matches with a blacklisted word")
|
||||||
|
rows2delete.append(i) #mark the row for deletion if not already done
|
||||||
|
### SANITY CHECK 3: Mark all the rows that are supposed to be sensitive ###
|
||||||
|
for k,l in sedf.iterrows():
|
||||||
|
#print("[+] Sensitive word=",k, sedf.at[k, 'sensitive-words'])
|
||||||
|
seword=sedf.at[k, 'sensitive-words']
|
||||||
|
if any(seword in str(x) for x in row) == True:
|
||||||
|
if uvdf.at[i, 'Sensitive'] != '✔️':
|
||||||
|
print("Marking row", i,"as sensitive, as it matches with a sensitive word")
|
||||||
|
uvdf.at[i, 'Sensitive']='✔️'
|
||||||
|
|
||||||
|
print('[-] Rows to delete: ',rows2delete)
|
||||||
|
|
||||||
|
for i in rows2delete:
|
||||||
|
row=uvdf.loc[i,:].values.tolist()
|
||||||
|
print('[+] REMOVING ROW :',i,row)
|
||||||
|
uvdf.drop(i, inplace= True)
|
||||||
|
uvdf.to_csv(unverifiedcsvfile, index=False)
|
||||||
|
##############################################
|
||||||
|
|
||||||
|
# list every word in the sensitive wordlist csv file
|
||||||
|
# if it matches (on any column!), mark the sensitive column as V
|
||||||
|
|
||||||
|
|
||||||
|
#############################################################################
|
||||||
|
return True
|
||||||
|
##############################################################################
|
||||||
|
|
||||||
|
|
||||||
|
#### PROTECTIONS AGAINST MALICIOUS CSV INPUTS ####
|
||||||
|
|
||||||
|
def IsOnionValid(url: str)-> bool:
|
||||||
|
"""
|
||||||
|
Checks if the domain(param) is a valid onion domain and return True else False.
|
||||||
|
"""
|
||||||
|
# check if the characters are only [a-zA-Z0-9.] with maximum 128 chars max?
|
||||||
|
# check that it is only url.onion or subdomain.url.onion,
|
||||||
|
# if OK return True
|
||||||
|
#if not : return False
|
||||||
|
try:
|
||||||
|
pattern = re.compile("^[A-Za-z0-9.]+(\.onion)?$")
|
||||||
|
url = url.strip().removesuffix('/')
|
||||||
|
if url.startswith('http://'):
|
||||||
|
#print('URL starts with http')
|
||||||
|
# Removes the http://
|
||||||
|
domain = url.split('/')[2]
|
||||||
|
if pattern.fullmatch(domain) is not None:
|
||||||
|
if len(domain.split('.')) > 3:
|
||||||
|
n_subdomians = len(domain.split('.'))
|
||||||
|
# Checks if there is more than 1 subdomain. "subdomain.url.onion" only
|
||||||
|
#print(f"This domain have more than one subdomain. There are {n_subdomians} subdomains")
|
||||||
|
return False
|
||||||
|
else:
|
||||||
|
if len(domain) < 62:
|
||||||
|
#print("Domain length is less than 62.")
|
||||||
|
return False
|
||||||
|
return True
|
||||||
|
elif pattern.fullmatch(domain) is None:
|
||||||
|
#print("Domain contains invalid character.")
|
||||||
|
#print(domain)
|
||||||
|
return False
|
||||||
|
else:
|
||||||
|
#print("Domain not valid")
|
||||||
|
return False
|
||||||
|
else:
|
||||||
|
#print("URL doesn't start http")
|
||||||
|
if pattern.fullmatch(url) is not None:
|
||||||
|
if len(url.split('.')) > 3:
|
||||||
|
n_subdomians = len(url.split('.'))
|
||||||
|
# Checks if there is more than 1 subdomain. "subdomain.url.onion" only
|
||||||
|
#print(f"This domain have more than one subdomain. There are {n_subdomians - 1} subdomains")
|
||||||
|
return False
|
||||||
|
else:
|
||||||
|
if len(url) < 62:
|
||||||
|
#print("Domain length is less than 62.")
|
||||||
|
return False
|
||||||
|
return True
|
||||||
|
elif pattern.fullmatch(url) is None:
|
||||||
|
#print("Domain contains invalid character.")
|
||||||
|
#print(url)
|
||||||
|
return False
|
||||||
|
else:
|
||||||
|
#print("Domain not valid")
|
||||||
|
return False
|
||||||
|
except Exception as e:
|
||||||
|
print(f"Error: {e}")
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
def IsUrlValid(url:str)->bool:
|
||||||
|
"""
|
||||||
|
Check if url is valid both dark net end clearnet.
|
||||||
|
"""
|
||||||
|
# check if the characters are only [a-zA-Z0-9.:/] with maximum 128 chars max?
|
||||||
|
# check that it is only http(s)://wordA.wordB or http(s)://WordC.WordB.WordC, (onion or not), clearnet is fine too (double check if those are fine!)
|
||||||
|
# if OK return True
|
||||||
|
#if not : return False
|
||||||
|
pattern = re.compile("^[A-Za-z0-9:/.]+$")
|
||||||
|
if url.endswith('.onion'):
|
||||||
|
return IsOnionValid(url)
|
||||||
|
else:
|
||||||
|
if not url.__contains__('.'):
|
||||||
|
#print("No (DOT) in clearnet url")
|
||||||
|
return False
|
||||||
|
if pattern.fullmatch(url) is None:
|
||||||
|
#print('Url contains invalid chars')
|
||||||
|
return False
|
||||||
|
return True
|
||||||
|
|
||||||
|
def IsStatusValid(status: str)-> bool:
|
||||||
|
"""
|
||||||
|
Checks if status contains only [v,x,❌,✔️]. Verbose only if False is returned
|
||||||
|
"""
|
||||||
|
pattern = ['y','n','✔️','❌','','nan']
|
||||||
|
status = str(status)
|
||||||
|
status.strip()
|
||||||
|
#print('[+] STATUS = ',status.splitlines())
|
||||||
|
if len(status) > 4:
|
||||||
|
#print("Status: Got more than one character or nothing.")
|
||||||
|
return False
|
||||||
|
elif (status not in pattern):
|
||||||
|
#print("Status: Got an invalid character it must be either y, n, ✔️, or ❌ ")
|
||||||
|
return False
|
||||||
|
|
||||||
|
return True
|
||||||
|
|
||||||
|
def IsScoreValid(score:str)->bool:
|
||||||
|
"""
|
||||||
|
Check the Score is only "^[0-9.,]+$" with 8 max chars.
|
||||||
|
"""
|
||||||
|
# check if the characters are only [a-zA-Z0-9.,' ] with maximum 256 chars max
|
||||||
|
#(careful with the ' and , make sure you test if it fucks the csv up or else)
|
||||||
|
# if OK return True
|
||||||
|
#if not : return False
|
||||||
|
pattern = re.compile("^[0-9.,]+$")
|
||||||
|
score = str(score)
|
||||||
|
score.strip()
|
||||||
|
if pattern.fullmatch(score) is None:
|
||||||
|
# empty description is fine as it's optional
|
||||||
|
return False
|
||||||
|
elif len(score) > 8:
|
||||||
|
#print("score is greater than 8 chars")
|
||||||
|
return False
|
||||||
|
# empty score is fine
|
||||||
|
return True
|
||||||
|
|
||||||
|
|
||||||
|
def IsDescriptionValid(desc:str)->bool:
|
||||||
|
"""
|
||||||
|
Check the categories are only [a-zA-Z0-9,.' ] with 256 max chars.
|
||||||
|
"""
|
||||||
|
# check if the characters are only [a-zA-Z0-9.,' ] with maximum 256 chars max
|
||||||
|
#(careful with the ' and , make sure you test if it fucks the csv up or else)
|
||||||
|
# if OK return True
|
||||||
|
#if not : return False
|
||||||
|
pattern = re.compile("^[A-Za-z0-9-.,' ]+$")
|
||||||
|
desc.strip()
|
||||||
|
if pattern.fullmatch(desc) is None:
|
||||||
|
# empty description is fine as it's optional
|
||||||
|
return False
|
||||||
|
if desc == "DEFAULT":
|
||||||
|
return False
|
||||||
|
elif len(desc) > 256:
|
||||||
|
#print("desc is greater than 256 chars")
|
||||||
|
return False
|
||||||
|
return True
|
||||||
|
|
||||||
|
def IsCategoryValid(categories: list)-> bool:
|
||||||
|
"""
|
||||||
|
Check the categories are only [a-zA-Z0-9 ] with 64 max chars.
|
||||||
|
"""
|
||||||
|
# check if the characters are only [a-zA-Z0-9 ] with maximum 64 chars max
|
||||||
|
#(careful with the ' and , make sure you test if it fucks the csv up or else)
|
||||||
|
# if OK return True
|
||||||
|
#if not : return False
|
||||||
|
pattern = re.compile("^[A-Za-z0-9 ]+$")
|
||||||
|
for category in categories:
|
||||||
|
category.strip()
|
||||||
|
if pattern.fullmatch(category) is None:
|
||||||
|
#print('Got an empty list or invalid chars')
|
||||||
|
return False
|
||||||
|
elif len(category) > 64:
|
||||||
|
#print('Category is too long')
|
||||||
|
return False
|
||||||
|
else:
|
||||||
|
return True
|
||||||
|
|
||||||
|
def IsNameValid(name: str)->bool:
|
||||||
|
"""
|
||||||
|
Check the parameter name only contains [a-zA-Z0-9 ] and is 64 chars long.
|
||||||
|
"""
|
||||||
|
# check if the characters are only [a-zA-Z0-9 ] with maximum 64 chars max
|
||||||
|
#(careful with the ' and , make sure you test if it fucks the csv up or else)
|
||||||
|
# if OK return True
|
||||||
|
#if not : return False
|
||||||
|
pattern = re.compile("^[A-Za-z0-9 ]+$")
|
||||||
|
name = name.strip()
|
||||||
|
if (pattern.fullmatch(name) is None):
|
||||||
|
#print("Got an invalid character or nothing")
|
||||||
|
return False
|
||||||
|
elif len(name) > 64:
|
||||||
|
#print(f'Got a name length greater than 64. {len(name)}')
|
||||||
|
return False
|
||||||
|
return True
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
main()
|
8
scripts/tests/sensitive.csv
Normal file
8
scripts/tests/sensitive.csv
Normal file
|
@ -0,0 +1,8 @@
|
||||||
|
sensitive-words
|
||||||
|
Market
|
||||||
|
market
|
||||||
|
drug
|
||||||
|
drugs
|
||||||
|
Drugz
|
||||||
|
Search
|
||||||
|
Engine
|
|
|
@ -1,4 +1,18 @@
|
||||||
Instance,Category,Name,URL,Sensitive,Description,Status,Score
|
Instance,Category,Name,URL,Sensitive,Description,Status,Score
|
||||||
uptime.nowherejezfoltodf4jiyl6r56jnzintap5vyjlia7fkirfsnfizflqd.onion,Infos and Links,Tor Taxi,http://tortaxi2dev6xjwbaydqzla77rrnth7yn2oqzjfmiuwn5h6vsk2a4syd.onion/,✔️,"List of links to go to popular darknet places",✔️,100.0
|
uptime.nowherejezfoltodf4jiyl6r56jnzintap5vyjlia7fkirfsnfizflqd.onion,Infos and Links,Tor Taxi,http://tortaxi2dev6xjwbaydqzla77rrnth7yn2oqzjfmiuwn5h6vsk2a4syd.onion/,,List of links to go to popular darknet places,✔️,100.0
|
||||||
|
uptime.nowherejezfoltodf4jiyl6r56jnzintap5vyjlia7fkirfsnfizflqd.onion,Infos and Links,Tor Taxi,http://tortaxi2dev6xjwbaydqzla77rrnth7yn2oqzjfmiuwn5h6vsk2a4syd.onion/,,List of links to go to popular darknet places,✔️,100.0
|
||||||
|
uptime.nowherejezfoltodf4jiyl6r56jnzintap5vyjlia7fkirfsnfizflqd.onion,Infos and Links,Tor Taxi,http://tortaxi2dev6xjwbaydqzla77rrnth7yn2oqzjfmiuwn5h6vsk2a4syd.onion/,,List of links to go to popular darknet places,✔️,100.0
|
||||||
|
uptime.nowherejezfoltodf4jiyl6r56jnzintap5vyjlia7fkirfsnfizflqd.onion,Infos and Links,Tor Taxi,http://tortaxi2dev6xjwbaydqzla77rrnth7yn2oqzjfmiuwn5h6vsk2a4syd.onion/,❌,List of links to go to popular darknet places,,100.0
|
||||||
|
uptime.nowherejezfoltodf4jiyl6r56jnzintap5vyjlia7fkirfsnfizflqd.onion,Infos and Links,Tor Taxi,http://tortaxi2dev6xjwbaydqzla77rrnth7yn2oqzjfmiuwn5h6vsk2a4syd.onion/,✔️,List of links to go to popular darknet places,,100.0
|
||||||
|
uptime.nowherejezfoltodf4jiyl6r56jnzintap5vyjlia7fkirfsnfizflqd.onion,Infos and Links,Tor Taxi,http://tortaxi2dev6xjwbaydqzla77rrnth7yn2oqzjfmiuwn5h6vsk2a4syd.onion/,✔️,List of links to go to popular darknet places,,100.0
|
||||||
|
uptime.nowherejezfoltodf4jiyl6r56jnzintap5vyjlia7fkirfsnfizflqd.onion,Infos and Links,Tor Taxi,http://tortaxi2dev6xjwbaydqzla77rrnth7yn2oqzjfmiuwn5h6vsk2a4syd.onion/,❌,List of links to go to popular darknet places,,100.0
|
||||||
|
uptime.nowherejezfoltodf4jiyl6r56jnzintap5vyjlidruga7fkirfsnfizflqd.onion,Infos and Links,Tor Taxi,http://tortaxi2dev6xjwbaydqzla77rrnth7yn2oqzjfmiuwn5h6vsk2a4syd.onion/,✔️,List of links to go to popular darknet places,✔️,500.0
|
||||||
|
uptime.nowherejezfoltodf4jiyl6r56jnzintap5vyjlia7fkirfsnfizflqd.onion,Infos and Links,Tor Taxi,http://tortaxi2dev6xjwbaydqzla77rrnth7yn2oqzjfmiuwn5h6vsk2a4syd.onion/,✔️,List of links to go to popular darknet places,✔️,300.0
|
||||||
|
uptime.nowherejezfoltodf4jiyl6r56jnzintap5vyjlia7fkirfsnfizflqd.onion,Infos and Links,Tor Taxi,http://tortaxi2dev6xjwbaydqzldruga77rrnth7yn2oqzjfmiuwn5h6vsk2a4syd.onion/,✔️,List of links to go to popular darknet places,✔️,0.0
|
||||||
|
uptime.nowherejezfoltodf4jiyl6r56jnzintap5vyjlia7fkirfsnfizflqd.onion,Infos and Links,Tor Taxi,http://tortaxi2dev6xjwbaydqzla77rrnth7yn2oqzjfmiuwn5h6vsk2a4syd.onion/,✔️,List of links to go to popular darknet places,✔️,100.0
|
||||||
|
uptime.nowherejezfoltodf4jiyl6r56jnzintap5vyjlia7fkirfsnfizflqd.onion,Infos and Links,Tor Taxi,http://tortaxi2dev6xjwbaydqzla77rrnth7yn2oqzjfmiuwn5h6vsk2a4syd.onion/,❌,List of links to go to popular darknet places,✔️,100.0
|
||||||
|
uptime.nowherejezfoltodf4jiyl6r56jnzintap5vyjlia7fkirfsnfizflqd.onion,Infos and Links,Tor Taxi,http://tortaxi2dev6xjwbaydqzla77rrnth7yn2oqzjfmiuwn5h6vsk2a4syd.onion/,✔️,List of links to go to popular darknet places,✔️,100.0
|
||||||
|
uptime.nowherejezfoltodf4jiyl6r56jnzintap5vyjlia7fkirfsnfizflqd.onion,Infos and Links,Tor Taxi,http://tortaxi2dev6xjwbaydqzla77rrnth7yn2oqzjfmiuwn5h6vsk2a4syd.onion/,✔️,List of links to go to popular darknet places,✔️,100.0
|
||||||
|
uptime.nowherejezfoltodf4jiyl6r56jnzintap5vyjlia7fkirfsnfizflqd.onion,Infos and Links,Tor Taxi,http://tortaxi2dev6xjwbaydqzla77rrnth7yn2oqzjfmiuwn5h6vsk2a4syd.onion/,❌,List of links to go to popular darknet places,✔️,100.0
|
||||||
|
uptime.nowherejezfoltodf4jiyl6r56jnzintap5vyjlia7fkirfsnfizflqd.onion,Infos and Links,Tor Taxi,http://tortaxi2dev6xjwbaydqzla77rrnth7yn2oqzjfmiuwn5h6vsk2a4syd.onion/,✔️,List of links to go to populadrugr darknet places,✔️,100.0
|
||||||
|
uptime.nowherejezfoltodf4jiyl6r56jnzintap5vyjlia7fkirfsnfizflqd.onion,Infos and Links,Tor Taxi,http://tortaxi2dev6xjwbaydqzla77rrnth7yn2oqzjfmiuwn5h6vsk2a4syd.onion/,✔️,List of links to go to popular darknet places,✔️,100.0
|
||||||
|
|
|
|
@ -1,6 +1,8 @@
|
||||||
Instance,Category,Name,URL,Sensitive,Description,Status,Score
|
Instance,Category,Name,URL,Sensitive,Description,Status,Score
|
||||||
uptime.nowherejezfoltodf4jiyl6r56jnzintap5vyjlia7fkirfsnfizflqd.onion,TestCategory,NewWebsite,http://newwebsitewoidwajiawdhjoidwahjoadiwhj.onion,n,"This is a new website that we add, it has this description",,
|
uptime.nowherejezfoltodf4jiyl6r56jnzintap5vyjlia7fkirfsnfizflqd.onion,TestCategory,NewWebsite,http://newwebsitewoidwajiawdhjoidwahjoadiwhj.onion,n,"This is a new website that we add, it has this description",,
|
||||||
uptime.nowherejezfoltodf4jiyl6r56jnzintap5vyjlia7fkirfsnfizflqd.onion,TestCategory,NewWebsite,http://newwebsitewoidwajiawdhjoidwahjoadiwhj.onion,n,"This is a new website that we add, it has this description",,
|
uptime.nowherejezfoltodf4jiyl6r56jnzintap5vyjlia7fkirfsnfizflqd.onion,TestCategory,NewWebsite,http://newwebsitewoidwajiawdhjoidwahjoadiwhj.onion,n,"This is a new website that we add, it has this description",,
|
||||||
|
uptime.nowherejezfoltodf4jiyl6r56jnzintap5vyjlia7fkirfsnfizflqd.onion,TestCategory,NewWebsite,http://newwebsitewoidwajiawdhjoidwahjoadiwhj.onion,n,"This is a new website that we add, it has this description",,
|
||||||
|
uptime.nowherejezfoltodf4jiyl6r56jnzintap5vyjlia7fkirfsnfizflqd.onion,TestCategory,NewWebsite,http://newwebsitewoidwajiawdhjoidwahjoadiwhj.onion,n,"This is a new website that we add, it has this description",,
|
||||||
FIRSTherefoltodf4jiyl6r56jnzintap5vyjlia7fkirfsnfizflqd.onion,Infos and Links,Psychonaut Wiki,http://vvedndyt433kopnhv6vejxnut54y5752vpxshjaqmj7ftwiu6quiv2ad.onion/,,"This is the wiki for psychonauts, it contains infos on substances and trip reports",✔️,100.0
|
FIRSTherefoltodf4jiyl6r56jnzintap5vyjlia7fkirfsnfizflqd.onion,Infos and Links,Psychonaut Wiki,http://vvedndyt433kopnhv6vejxnut54y5752vpxshjaqmj7ftwiu6quiv2ad.onion/,,"This is the wiki for psychonauts, it contains infos on substances and trip reports",✔️,100.0
|
||||||
somewherefoltodf4jiyl6r56jnzintap5vyjlia7fkirfsnfizflqd.onion,Infos and Links,1DNM Bible,http://biblemeowimkh3utujmhm6oh2oeb3ubjw2lpgeq3lahrfr2l6ev6zgyd.onion/,✔️,General guide on how to navigate the Darknet to buy drugs,✔️,100.0
|
somewherefoltodf4jiyl6r56jnzintap5vyjlia7fkirfsnfizflqd.onion,Infos and Links,1DNM Bible,http://biblemeowimkh3utujmhm6oh2oeb3ubjw2lpgeq3lahrfr2l6ev6zgyd.onion/,✔️,General guide on how to navigate the Darknet to buy drugs,✔️,100.0
|
||||||
somewherefoltodf4jiyl6r56jnzintap5vyjlia7fkirfsnfizflqd.onion,Infos and Links,2DNM Bible,http://biblemeowimkh3utujmhm6oh2oeb3ubjw2lpgeq3lahrfr2l6ev6zgyd.onion/,✔️,General guide on how to navigate the Darknet to buy drugs,✔️,100.0
|
somewherefoltodf4jiyl6r56jnzintap5vyjlia7fkirfsnfizflqd.onion,Infos and Links,2DNM Bible,http://biblemeowimkh3utujmhm6oh2oeb3ubjw2lpgeq3lahrfr2l6ev6zgyd.onion/,✔️,General guide on how to navigate the Darknet to buy drugs,✔️,100.0
|
||||||
|
|
|
|
@ -1,12 +1 @@
|
||||||
Instance,Category,Name,URL,Sensitive,Description,Status,Score
|
Instance,Category,Name,URL,Sensitive,Description,Status,Score
|
||||||
uptime.nowherejezfoltodf4jiyl6r56jnzintap5vyjlia7fkirfsnfizflqd.onion,aaa,aa,http://niggaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa.onion,y,test,,
|
|
||||||
uptime.nowherejezfoltodf4jiyl6r56jnzintap5vyjlia7fkirfsnfizflqd.onion,bbb,aaa,ccc.com,y,,,
|
|
||||||
uptime.nowherejezfoltodf4jiyl6r56jnzintap5vyjlia7fkirfsnfizflqd.onion,w,q,adwwawa.com,n,dwaawdwa,,
|
|
||||||
uptime.nowherejezfoltodf4jiyl6r56jnzintap5vyjlia7fkirfsnfizflqd.onion,w,a,dwa.com,n,,,
|
|
||||||
uptime.nowherejezfoltodf4jiyl6r56jnzintap5vyjlia7fkirfsnfizflqd.onion,wadwda,dwawdawawda,meinnigga.com,n,"This is a new website that we add, it has this description",,
|
|
||||||
uptime.nowherejezfoltodf4jiyl6r56jnzintap5vyjlia7fkirfsnfizflqd.onion,b,a,http://newwebsitewoidwajiawdhjoidwahjoadiwhj.onion,n,"This is a new website that we add, it has this description",,
|
|
||||||
uptime.nowherejezfoltodf4jiyl6r56jnzintap5vyjlia7fkirfsnfizflqd.onion,TestCategory,NewWebsite,http://newwebsitewoidwajiawdhjoidwahjoadiwhj.onion,n,"This is a new website that we add, it has this description",,
|
|
||||||
uptime.nowherejezfoltodf4jiyl6r56jnzintap5vyjlia7fkirfsnfizflqd.onion,TestCategory,NewWebsite,http://newwebsitewoidwajiawdhjoidwahjoadiwhj.onion,n,"This is a new website that we add, it has this description",,
|
|
||||||
uptime.nowherejezfoltodf4jiyl6r56jnzintap5vyjlia7fkirfsnfizflqd.onion,TestCategory,NewWebsite,http://newwebsitewoidwajiawdhjoidwahjoadiwhj.onion,n,"This is a new website that we add, it has this description",,
|
|
||||||
uptime.nowherejezfoltodf4jiyl6r56jnzintap5vyjlia7fkirfsnfizflqd.onion,TestCategory,NewWebsite,http://newwebsitewoidwajiawdhjoidwahjoadiwhj.onion,n,"This is a new website that we add, it has this description",,
|
|
||||||
,Infos and Links,Tor Taxi,http://tortaxi2dev6xjwbaydqzla77rrnth7yn2oqzjfmiuwn5h6vsk2a4syd.onion/,✔️,List of links to go to popular darknet places,✔️,100.0
|
|
||||||
|
|
|
|
@ -1,16 +1 @@
|
||||||
Instance,Category,Name,URL,Sensitive,Description,Status,Score
|
Instance,Category,Name,URL,Sensitive,Description,Status,Score
|
||||||
uptime.nowherejezfoltodf4jiyl6r56jnzintap5vyjlia7fkirfsnfizflqd.onion,TestCategory,NewWebsite,http://newwebsitewoidwajiawdhjoidwahjoadiwhj.onion,n,"This is a new website that we add, it has this description",,
|
|
||||||
uptime.nowherejezfoltodf4jiyl6r56jnzintap5vyjlia7fkirfsnfizflqd.onion,TestCategory,NewWebsite,http://newwebsitewoidwajiawdhjoidwahjoadiwhj.onion,n,"This is a new website that we add, it has this description",,
|
|
||||||
uptime.nowherejezfoltodf4jiyl6r56jnzintap5vyjlia7fkirfsnfizflqd.onion,TestCategory,NewWebsite,http://newwebsitewoidwajiawdhjoidwahjoadiwhj.onion,n,"This is a new website that we add, it has this description",,
|
|
||||||
uptime.nowherejezfoltodf4jiyl6r56jnzintap5vyjlia7fkirfsnfizflqd.onion,Test,Test,http://newwebsitewoidwajiawdhjoidwahjoadiwhj.onion,n,"This is a new website that we add, it has this description",,
|
|
||||||
uptime.nowherejezfoltodf4jiyl6r56jnzintap5vyjlia7fkirfsnfizflqd.onion,TestCategory,Test,http://newwebsitewoidwajiawdhjoidwahjoadiwhj.onion,n,"This is a new website that we add, it has this description",,
|
|
||||||
uptime.nowherejezfoltodf4jiyl6r56jnzintap5vyjlia7fkirfsnfizflqd.onion,Test,Test,http://newwebsitewoidwajiawdhjoidwahjoadiwhj.onion,n,"This is a new website that we add, it has this description",,
|
|
||||||
uptime.nowherejezfoltodf4jiyl6r56jnzintap5vyjlia7fkirfsnfizflqd.onion,TestCategory,Test,http://newwebsitewoidwajiawdhjoidwahjoadiwhj.onion,n,"This is a new website that we add, it has this description",,
|
|
||||||
uptime.nowherejezfoltodf4jiyl6r56jnzintap5vyjlia7fkirfsnfizflqd.onion,Test,Test,http://newwebsitewoidwajiawdhjoidwahjoadiwhj.onion,n,"This is a new website that we add, it has this description",,
|
|
||||||
uptime.nowherejezfoltodf4jiyl6r56jnzintap5vyjlia7fkirfsnfizflqd.onion,TestCategory,Test,http://newwebsitewoidwajiawdhjoidwahjoadiwhj.onion,n,"This is a new website that we add, it has this description",,
|
|
||||||
uptime.nowherejezfoltodf4jiyl6r56jnzintap5vyjlia7fkirfsnfizflqd.onion,TestCategory,Test,http://newwebsitewoidwajiawdhjoidwahjoadiwhj.onion,n,"This is a new website that we add, it has this description",,
|
|
||||||
uptime.nowherejezfoltodf4jiyl6r56jnzintap5vyjlia7fkirfsnfizflqd.onion,Test,Test,http://newwebsitewoidwajiawdhjoidwahjoadiwhj.onion,n,"This is a new website that we add, it has this description",,
|
|
||||||
uptime.nowherejezfoltodf4jiyl6r56jnzintap5vyjlia7fkirfsnfizflqd.onion,TestCategory,Test,http://newwebsitewoidwajiawdhjoidwahjoadiwhj.onion,n,"This is a new website that we add, it has this description",,
|
|
||||||
uptime.nowherejezfoltodf4jiyl6r56jnzintap5vyjlia7fkirfsnfizflqd.onion,Test,Test,http://newwebsitewoidwajiawdhjoidwahjoadiwhj.onion,n,"This is a new website that we add, it has this description",,
|
|
||||||
,Infos and Links,Psychonaut Wiki,http://vvedndyt433kopnhv6vejxnut54y5752vpxshjaqmj7ftwiu6quiv2ad.onion/,,"This is the wiki for psychonauts, it contains infos on substances and trip reports",✔️,100.0
|
|
||||||
,Infos and Links,DNM Bible,http://biblemeowimkh3utujmhm6oh2oeb3ubjw2lpgeq3lahrfr2l6ev6zgyd.onion/,✔️,General guide on how to navigate the Darknet to buy drugs,✔️,100.0
|
|
||||||
|
|
|
|
@ -1,2 +1,5 @@
|
||||||
Name,URL,Description,Trusted,Status,Score
|
Name,URL,Description,Trusted,Status,Score
|
||||||
Nowhere,http://uptime.nowherejezfoltodf4jiyl6r56jnzintap5vyjlia7fkirfsnfizflqd.onion,"Darknet Webring Onion Participant",✔️,✔️,100.0
|
Nowhere,uptime.nowherejezfoltodf4jiyl6r56jnzintap5vyjlia7fkirfsnfizflqd.onion,First instance,,,
|
||||||
|
Nowhere,uptime.nowherejezfoltodf4jiyl6r56jnzintap5vyjlia7fkirfsnfizflqd.onion,First webring participant,,,
|
||||||
|
Nowhere,uptime.nowherejezfoltodf4jiyl6r56jnzintap5vyjlia7fkirfsnfizflqd.onion,New webring participant,,,
|
||||||
|
Nowhere,http://uptime.nowherejezfoltodf4jiyl6r56jnzintap5vyjlia7fkirfsnfizflqd.onion,Darknet Webring Onion Participant,✔️,✔️,100.0
|
||||||
|
|
|
Loading…
Add table
Add a link
Reference in a new issue