mirror of
http://git.nowherejezfoltodf4jiyl6r56jnzintap5vyjlia7fkirfsnfizflqd.onion/nihilist/darknet-lantern.git
synced 2025-05-17 04:36:57 +00:00
make it cool now
This commit is contained in:
parent
f23a6e3ac8
commit
4d4567caf0
40 changed files with 504 additions and 611 deletions
BIN
scripts/old/tests/banner.png
Normal file
BIN
scripts/old/tests/banner.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 22 KiB |
7
scripts/old/tests/blacklist.csv
Normal file
7
scripts/old/tests/blacklist.csv
Normal file
|
@ -0,0 +1,7 @@
|
|||
blacklisted-words
|
||||
porn
|
||||
pron
|
||||
p0rn
|
||||
pr0n
|
||||
gore
|
||||
|
|
18
scripts/old/tests/checkimagesize.py
Normal file
18
scripts/old/tests/checkimagesize.py
Normal file
|
@ -0,0 +1,18 @@
|
|||
from PIL import Image
|
||||
def main():
|
||||
print('[+] checking image size')
|
||||
im = Image.open("banner.png")
|
||||
#im = Image.open("favicon.png")
|
||||
width, height = im.size
|
||||
print('width =',width, 'height=',height)
|
||||
if width != 240 or height != 60:
|
||||
print('[-] Banner doesnt have the correct size (240x60)')
|
||||
return False
|
||||
else:
|
||||
print('[+] Banner has the correct size (240x60)')
|
||||
return True
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
|
8
scripts/old/tests/checkwebringinstanceexists.py
Normal file
8
scripts/old/tests/checkwebringinstanceexists.py
Normal file
|
@ -0,0 +1,8 @@
|
|||
def main():
|
||||
print()
|
||||
csvpath='/srv/darknet-onion-webring/scripts/tests/webring-participants.csv'
|
||||
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
104
scripts/old/tests/csvwork.py
Normal file
104
scripts/old/tests/csvwork.py
Normal file
|
@ -0,0 +1,104 @@
|
|||
import csv, json, pandas as pd, glob
|
||||
|
||||
def main():
|
||||
#print("aaa")
|
||||
csvfile="verified.csv"
|
||||
df = pd.read_csv(csvfile)
|
||||
|
||||
##############################################################################
|
||||
print('\n[+] list the entire csv file and get all row IDs')
|
||||
print(df[['Name', 'URL']])
|
||||
index=-1
|
||||
while (index > df.tail(1).index.item() or index < 0):
|
||||
#index=input("\n[+] What entry do you want to edit ? (enter the row index (ex: 4)")
|
||||
index=4
|
||||
index=int(index)
|
||||
print(df.iloc[index], "last index:", df.index.stop-1)
|
||||
print("\n[+] Number of Rows:", len(df.index)-1)
|
||||
|
||||
|
||||
|
||||
print('\n[+] Iterate over the rows by their IDs for the 2 Columns URL and Name')
|
||||
print(df.iterrows())
|
||||
for i,j in df.iterrows():
|
||||
#print("[+] ROW=",i,"\n[+] CONTENT=\n",j)
|
||||
#print("[+] ROW=",i)
|
||||
#print("[+] ROW CONTENTS= \n",df.loc[i, ['URL','Name']])
|
||||
#print("[+] ROW CONTENTS= \n",df.loc[i, ['URL']])
|
||||
print("[+] ROW=",i,"ROW CONTENTS=", df.at[i, 'Name'], df.at[i, 'URL'])
|
||||
#print(df[0][i])
|
||||
|
||||
|
||||
|
||||
print('\n[+] Iterate over the columns by their name:')
|
||||
columns = list(df)
|
||||
print(columns)
|
||||
print('\n[+] Iterate over the columns of the first row:')
|
||||
for i in columns:
|
||||
print('\nCOLUMN=',i)
|
||||
print('CONTENTS=',df[i][0])
|
||||
|
||||
|
||||
|
||||
#print('[+] list the csv file by filtering a keyword and get all row IDs')
|
||||
#filterterm=input("[+] Filter the CSV file using a keyword (ex: DNM)")
|
||||
filterterm="Psy"
|
||||
filter_df = df[df.Name.str.contains(filterterm)]
|
||||
#print(filtered_df) # print all columns
|
||||
print(filter_df[['Name','URL']]) #print only 2 columns
|
||||
#print("\n[+] Number of Rows:", len(filter_df.index))
|
||||
#for index in filter_df.index:
|
||||
#print(index)
|
||||
index=-1
|
||||
while (index not in filter_df.index):
|
||||
#index=int(input("\n[+] Please select a valid row: "))
|
||||
index=int("12")
|
||||
print("ROW=",index, 'CONTENT=', filter_df.at[index, 'Name'], filter_df.at[index, 'URL'])
|
||||
|
||||
|
||||
|
||||
print("\n[+] Adding a new row:")
|
||||
# ask for the following:
|
||||
#unverifiedpath=instancepath+'/unverified.csv'
|
||||
instance='uptime.nowherejezfoltodf4jiyl6r56jnzintap5vyjlia7fkirfsnfizflqd.onion'
|
||||
unverifiedpath='verified.csv'
|
||||
# the name of the website (required) + check if its valid
|
||||
# if the website name is "exit" then get out of the while loop
|
||||
#entry_name = input("What is the Website name ?")
|
||||
name="NewWebsite"
|
||||
category="TestCategory"
|
||||
# the url of the website (required) + check if its valid
|
||||
#entry_url = input("What is URL of the Website ? (ex: https://torproject.org or http://2gzyxa5ihm7nsggfxnu52rck2vv4rvmdlkiu3zzui5du4xyclen53wid.onion)")
|
||||
url="http://newwebsitewoidwajiawdhjoidwahjoadiwhj.onion"
|
||||
# a quick description (optional) + check if its valid
|
||||
#entry_desc = input("(Optional) Description of the website ? (max 256 characters) (press enter to skip)")
|
||||
desc="This is a new website that we add, it has this description"
|
||||
# sensitive ? (y/n) + check if its valid
|
||||
#entry_sensi = input("is it a sensitive website ? (ex: website related to drugs) (y/n)")
|
||||
sensi = "n"
|
||||
newrow=[instance,category,name,url,sensi,desc,'','']
|
||||
print("[+] NEWROW=",newrow)
|
||||
|
||||
#add a new row (get all the new data you need first):
|
||||
|
||||
df.loc[-1] = newrow # adding a row
|
||||
df.index = df.index + 1 # shifting index
|
||||
df = df.sort_index() # sorting by index
|
||||
print("[+] New row added! now writing the csv file:")
|
||||
df.to_csv(csvfile, index=False)
|
||||
|
||||
print(df)
|
||||
|
||||
print()
|
||||
|
||||
|
||||
###########
|
||||
# list every word in the blacklist wordlist csv file
|
||||
# for each word, check if it matches with any of the rows in unverified.csv
|
||||
# if it matches (on any column!), remove that row and write to the csv file
|
||||
# list every word in the sensitive wordlist csv file
|
||||
# if it matches (on any column!), mark the sensitive column as V
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
255
scripts/old/tests/csvwork2.py
Normal file
255
scripts/old/tests/csvwork2.py
Normal file
|
@ -0,0 +1,255 @@
|
|||
import csv, json, pandas as pd, glob, re
|
||||
|
||||
def main():
|
||||
#print("aaa")
|
||||
vcsvfile="verified.csv"
|
||||
vdf = pd.read_csv(vcsvfile)
|
||||
unverifiedcsvfile="unverified.csv"
|
||||
uvdf = pd.read_csv(unverifiedcsvfile)
|
||||
blcsvfile="blacklist.csv"
|
||||
bldf = pd.read_csv(blcsvfile)
|
||||
secsvfile="sensitive.csv"
|
||||
sedf = pd.read_csv(secsvfile)
|
||||
|
||||
###########
|
||||
# list every word in the blacklist wordlist csv file
|
||||
# for each word, check if it matches with any of the rows in unverified.csv
|
||||
# if it matches (on any column!), remove that row and write to the csv file
|
||||
|
||||
|
||||
|
||||
########### SANITY CHECKS ON UNVERIFIED.CSV ##################
|
||||
#print(bldf[['blacklisted-words']])
|
||||
bldf[['blacklisted-words']].iterrows()
|
||||
rows2delete= [] # it is an empty list at first
|
||||
for i,j in uvdf.iterrows():
|
||||
#print("[+] Unverified.csv ROW=",i, uvdf.at[i, 'Instance'], uvdf.at[i, 'Category'], uvdf.at[i, 'Name'], uvdf.at[i, 'URL'], uvdf.at[i, 'Description'])
|
||||
#print("[+] Unverified.csv ROW=",i, uvdf.iloc[[i]])
|
||||
#row=uvdf.iloc[[i]] #it displays the index
|
||||
row=uvdf.loc[i,:].values.tolist()
|
||||
#print(i,row)
|
||||
|
||||
### SANITY CHECK 1: Mark all the rows that have incorrect formatting for deletion###
|
||||
#print("[+] ROW=",i,"ROW CONTENTS=", IsUrlValid(uvdf.at[i, 'Instance']), IsCategoryValid(uvdf.at[i, 'Category']), IsNameValid(uvdf.at[i, 'Name']), IsUrlValid(uvdf.at[i, 'URL']), IsStatusValid(uvdf.at[i, 'Sensitive']), IsDescriptionValid(uvdf.at[i, 'Description']), IsStatusValid(uvdf.at[i, 'Status']), IsScoreValid(uvdf.at[i, 'Score']))
|
||||
if IsUrlValid(uvdf.at[i, 'Instance']) is False or IsCategoryValid(uvdf.at[i, 'Category']) is False or IsNameValid(uvdf.at[i, 'Name']) is False or IsUrlValid(uvdf.at[i, 'URL']) is False or IsStatusValid(uvdf.at[i, 'Sensitive']) is False or IsDescriptionValid(uvdf.at[i, 'Description']) is False or IsStatusValid(uvdf.at[i, 'Status']) is False or IsScoreValid(uvdf.at[i, 'Score']) is False:
|
||||
#mark the row for deletion as it has invalid inputs
|
||||
if i not in rows2delete:
|
||||
print("Marking row", i,"for deletion, as it has invalid inputs")
|
||||
rows2delete.append(i) #mark the row for deletion if not already done
|
||||
|
||||
### SANITY CHECK 2: Mark all rows that are not allowed (blacklist) for deletion ###
|
||||
for k,l in bldf.iterrows():
|
||||
#print("[+] Blacklisted word=",k, bldf.at[k, 'blacklisted-words'])
|
||||
blword=bldf.at[k, 'blacklisted-words']
|
||||
if any(blword in str(x) for x in row) == True:
|
||||
#print("found blacklisted word! marking row for deletion")
|
||||
if i not in rows2delete:
|
||||
print("Marking row", i,"for deletion, as it matches with a blacklisted word")
|
||||
rows2delete.append(i) #mark the row for deletion if not already done
|
||||
### SANITY CHECK 3: Mark all the rows that are supposed to be sensitive ###
|
||||
for k,l in sedf.iterrows():
|
||||
#print("[+] Sensitive word=",k, sedf.at[k, 'sensitive-words'])
|
||||
seword=sedf.at[k, 'sensitive-words']
|
||||
if any(seword in str(x) for x in row) == True:
|
||||
if uvdf.at[i, 'Sensitive'] != '✔️':
|
||||
print("Marking row", i,"as sensitive, as it matches with a sensitive word")
|
||||
uvdf.at[i, 'Sensitive']='✔️'
|
||||
|
||||
print('[-] Rows to delete: ',rows2delete)
|
||||
|
||||
for i in rows2delete:
|
||||
row=uvdf.loc[i,:].values.tolist()
|
||||
print('[+] REMOVING ROW :',i,row)
|
||||
uvdf.drop(i, inplace= True)
|
||||
uvdf.to_csv(unverifiedcsvfile, index=False)
|
||||
##############################################
|
||||
|
||||
# list every word in the sensitive wordlist csv file
|
||||
# if it matches (on any column!), mark the sensitive column as V
|
||||
|
||||
|
||||
#############################################################################
|
||||
return True
|
||||
##############################################################################
|
||||
|
||||
|
||||
#### PROTECTIONS AGAINST MALICIOUS CSV INPUTS ####
|
||||
|
||||
def IsOnionValid(url: str)-> bool:
|
||||
"""
|
||||
Checks if the domain(param) is a valid onion domain and return True else False.
|
||||
"""
|
||||
# check if the characters are only [a-zA-Z0-9.] with maximum 128 chars max?
|
||||
# check that it is only url.onion or subdomain.url.onion,
|
||||
# if OK return True
|
||||
#if not : return False
|
||||
try:
|
||||
pattern = re.compile("^[A-Za-z0-9.]+(\.onion)?$")
|
||||
url = url.strip().removesuffix('/')
|
||||
if url.startswith('http://'):
|
||||
#print('URL starts with http')
|
||||
# Removes the http://
|
||||
domain = url.split('/')[2]
|
||||
if pattern.fullmatch(domain) is not None:
|
||||
if len(domain.split('.')) > 3:
|
||||
n_subdomians = len(domain.split('.'))
|
||||
# Checks if there is more than 1 subdomain. "subdomain.url.onion" only
|
||||
#print(f"This domain have more than one subdomain. There are {n_subdomians} subdomains")
|
||||
return False
|
||||
else:
|
||||
if len(domain) < 62:
|
||||
#print("Domain length is less than 62.")
|
||||
return False
|
||||
return True
|
||||
elif pattern.fullmatch(domain) is None:
|
||||
#print("Domain contains invalid character.")
|
||||
#print(domain)
|
||||
return False
|
||||
else:
|
||||
#print("Domain not valid")
|
||||
return False
|
||||
else:
|
||||
#print("URL doesn't start http")
|
||||
if pattern.fullmatch(url) is not None:
|
||||
if len(url.split('.')) > 3:
|
||||
n_subdomians = len(url.split('.'))
|
||||
# Checks if there is more than 1 subdomain. "subdomain.url.onion" only
|
||||
#print(f"This domain have more than one subdomain. There are {n_subdomians - 1} subdomains")
|
||||
return False
|
||||
else:
|
||||
if len(url) < 62:
|
||||
#print("Domain length is less than 62.")
|
||||
return False
|
||||
return True
|
||||
elif pattern.fullmatch(url) is None:
|
||||
#print("Domain contains invalid character.")
|
||||
#print(url)
|
||||
return False
|
||||
else:
|
||||
#print("Domain not valid")
|
||||
return False
|
||||
except Exception as e:
|
||||
print(f"Error: {e}")
|
||||
|
||||
|
||||
|
||||
def IsUrlValid(url:str)->bool:
|
||||
"""
|
||||
Check if url is valid both dark net end clearnet.
|
||||
"""
|
||||
# check if the characters are only [a-zA-Z0-9.:/] with maximum 128 chars max?
|
||||
# check that it is only http(s)://wordA.wordB or http(s)://WordC.WordB.WordC, (onion or not), clearnet is fine too (double check if those are fine!)
|
||||
# if OK return True
|
||||
#if not : return False
|
||||
pattern = re.compile("^[A-Za-z0-9:/.]+$")
|
||||
if url.endswith('.onion'):
|
||||
return IsOnionValid(url)
|
||||
else:
|
||||
if not url.__contains__('.'):
|
||||
#print("No (DOT) in clearnet url")
|
||||
return False
|
||||
if pattern.fullmatch(url) is None:
|
||||
#print('Url contains invalid chars')
|
||||
return False
|
||||
return True
|
||||
|
||||
def IsStatusValid(status: str)-> bool:
|
||||
"""
|
||||
Checks if status contains only [v,x,❌,✔️]. Verbose only if False is returned
|
||||
"""
|
||||
pattern = ['y','n','✔️','❌','','nan']
|
||||
status = str(status)
|
||||
status.strip()
|
||||
#print('[+] STATUS = ',status.splitlines())
|
||||
if len(status) > 4:
|
||||
#print("Status: Got more than one character or nothing.")
|
||||
return False
|
||||
elif (status not in pattern):
|
||||
#print("Status: Got an invalid character it must be either y, n, ✔️, or ❌ ")
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
def IsScoreValid(score:str)->bool:
|
||||
"""
|
||||
Check the Score is only "^[0-9.,]+$" with 8 max chars.
|
||||
"""
|
||||
# check if the characters are only [a-zA-Z0-9.,' ] with maximum 256 chars max
|
||||
#(careful with the ' and , make sure you test if it fucks the csv up or else)
|
||||
# if OK return True
|
||||
#if not : return False
|
||||
pattern = re.compile("^[0-9.,]+$")
|
||||
score = str(score)
|
||||
score.strip()
|
||||
if pattern.fullmatch(score) is None:
|
||||
# empty description is fine as it's optional
|
||||
return False
|
||||
elif len(score) > 8:
|
||||
#print("score is greater than 8 chars")
|
||||
return False
|
||||
# empty score is fine
|
||||
return True
|
||||
|
||||
|
||||
def IsDescriptionValid(desc:str)->bool:
|
||||
"""
|
||||
Check the categories are only [a-zA-Z0-9,.' ] with 256 max chars.
|
||||
"""
|
||||
# check if the characters are only [a-zA-Z0-9.,' ] with maximum 256 chars max
|
||||
#(careful with the ' and , make sure you test if it fucks the csv up or else)
|
||||
# if OK return True
|
||||
#if not : return False
|
||||
pattern = re.compile("^[A-Za-z0-9-.,' ]+$")
|
||||
desc.strip()
|
||||
if pattern.fullmatch(desc) is None:
|
||||
# empty description is fine as it's optional
|
||||
return False
|
||||
if desc == "DEFAULT":
|
||||
return False
|
||||
elif len(desc) > 256:
|
||||
#print("desc is greater than 256 chars")
|
||||
return False
|
||||
return True
|
||||
|
||||
def IsCategoryValid(categories: list)-> bool:
|
||||
"""
|
||||
Check the categories are only [a-zA-Z0-9 ] with 64 max chars.
|
||||
"""
|
||||
# check if the characters are only [a-zA-Z0-9 ] with maximum 64 chars max
|
||||
#(careful with the ' and , make sure you test if it fucks the csv up or else)
|
||||
# if OK return True
|
||||
#if not : return False
|
||||
pattern = re.compile("^[A-Za-z0-9 ]+$")
|
||||
for category in categories:
|
||||
category.strip()
|
||||
if pattern.fullmatch(category) is None:
|
||||
#print('Got an empty list or invalid chars')
|
||||
return False
|
||||
elif len(category) > 64:
|
||||
#print('Category is too long')
|
||||
return False
|
||||
else:
|
||||
return True
|
||||
|
||||
def IsNameValid(name: str)->bool:
|
||||
"""
|
||||
Check the parameter name only contains [a-zA-Z0-9 ] and is 64 chars long.
|
||||
"""
|
||||
# check if the characters are only [a-zA-Z0-9 ] with maximum 64 chars max
|
||||
#(careful with the ' and , make sure you test if it fucks the csv up or else)
|
||||
# if OK return True
|
||||
#if not : return False
|
||||
pattern = re.compile("^[A-Za-z0-9 ]+$")
|
||||
name = name.strip()
|
||||
if (pattern.fullmatch(name) is None):
|
||||
#print("Got an invalid character or nothing")
|
||||
return False
|
||||
elif len(name) > 64:
|
||||
#print(f'Got a name length greater than 64. {len(name)}')
|
||||
return False
|
||||
return True
|
||||
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
BIN
scripts/old/tests/favicon.png
Executable file
BIN
scripts/old/tests/favicon.png
Executable file
Binary file not shown.
After Width: | Height: | Size: 1.9 KiB |
8
scripts/old/tests/sensitive.csv
Normal file
8
scripts/old/tests/sensitive.csv
Normal file
|
@ -0,0 +1,8 @@
|
|||
sensitive-words
|
||||
Market
|
||||
market
|
||||
drug
|
||||
drugs
|
||||
Drugz
|
||||
Search
|
||||
Engine
|
|
18
scripts/old/tests/unverified.csv
Normal file
18
scripts/old/tests/unverified.csv
Normal file
|
@ -0,0 +1,18 @@
|
|||
Instance,Category,Name,URL,Sensitive,Description,Status,Score
|
||||
uptime.nowherejezfoltodf4jiyl6r56jnzintap5vyjlia7fkirfsnfizflqd.onion,Infos and Links,Tor Taxi,http://tortaxi2dev6xjwbaydqzla77rrnth7yn2oqzjfmiuwn5h6vsk2a4syd.onion/,,List of links to go to popular darknet places,✔️,100.0
|
||||
uptime.nowherejezfoltodf4jiyl6r56jnzintap5vyjlia7fkirfsnfizflqd.onion,Infos and Links,Tor Taxi,http://tortaxi2dev6xjwbaydqzla77rrnth7yn2oqzjfmiuwn5h6vsk2a4syd.onion/,,List of links to go to popular darknet places,✔️,100.0
|
||||
uptime.nowherejezfoltodf4jiyl6r56jnzintap5vyjlia7fkirfsnfizflqd.onion,Infos and Links,Tor Taxi,http://tortaxi2dev6xjwbaydqzla77rrnth7yn2oqzjfmiuwn5h6vsk2a4syd.onion/,,List of links to go to popular darknet places,✔️,100.0
|
||||
uptime.nowherejezfoltodf4jiyl6r56jnzintap5vyjlia7fkirfsnfizflqd.onion,Infos and Links,Tor Taxi,http://tortaxi2dev6xjwbaydqzla77rrnth7yn2oqzjfmiuwn5h6vsk2a4syd.onion/,❌,List of links to go to popular darknet places,,100.0
|
||||
uptime.nowherejezfoltodf4jiyl6r56jnzintap5vyjlia7fkirfsnfizflqd.onion,Infos and Links,Tor Taxi,http://tortaxi2dev6xjwbaydqzla77rrnth7yn2oqzjfmiuwn5h6vsk2a4syd.onion/,✔️,List of links to go to popular darknet places,,100.0
|
||||
uptime.nowherejezfoltodf4jiyl6r56jnzintap5vyjlia7fkirfsnfizflqd.onion,Infos and Links,Tor Taxi,http://tortaxi2dev6xjwbaydqzla77rrnth7yn2oqzjfmiuwn5h6vsk2a4syd.onion/,✔️,List of links to go to popular darknet places,,100.0
|
||||
uptime.nowherejezfoltodf4jiyl6r56jnzintap5vyjlia7fkirfsnfizflqd.onion,Infos and Links,Tor Taxi,http://tortaxi2dev6xjwbaydqzla77rrnth7yn2oqzjfmiuwn5h6vsk2a4syd.onion/,❌,List of links to go to popular darknet places,,100.0
|
||||
uptime.nowherejezfoltodf4jiyl6r56jnzintap5vyjlidruga7fkirfsnfizflqd.onion,Infos and Links,Tor Taxi,http://tortaxi2dev6xjwbaydqzla77rrnth7yn2oqzjfmiuwn5h6vsk2a4syd.onion/,✔️,List of links to go to popular darknet places,✔️,500.0
|
||||
uptime.nowherejezfoltodf4jiyl6r56jnzintap5vyjlia7fkirfsnfizflqd.onion,Infos and Links,Tor Taxi,http://tortaxi2dev6xjwbaydqzla77rrnth7yn2oqzjfmiuwn5h6vsk2a4syd.onion/,✔️,List of links to go to popular darknet places,✔️,300.0
|
||||
uptime.nowherejezfoltodf4jiyl6r56jnzintap5vyjlia7fkirfsnfizflqd.onion,Infos and Links,Tor Taxi,http://tortaxi2dev6xjwbaydqzldruga77rrnth7yn2oqzjfmiuwn5h6vsk2a4syd.onion/,✔️,List of links to go to popular darknet places,✔️,0.0
|
||||
uptime.nowherejezfoltodf4jiyl6r56jnzintap5vyjlia7fkirfsnfizflqd.onion,Infos and Links,Tor Taxi,http://tortaxi2dev6xjwbaydqzla77rrnth7yn2oqzjfmiuwn5h6vsk2a4syd.onion/,✔️,List of links to go to popular darknet places,✔️,100.0
|
||||
uptime.nowherejezfoltodf4jiyl6r56jnzintap5vyjlia7fkirfsnfizflqd.onion,Infos and Links,Tor Taxi,http://tortaxi2dev6xjwbaydqzla77rrnth7yn2oqzjfmiuwn5h6vsk2a4syd.onion/,❌,List of links to go to popular darknet places,✔️,100.0
|
||||
uptime.nowherejezfoltodf4jiyl6r56jnzintap5vyjlia7fkirfsnfizflqd.onion,Infos and Links,Tor Taxi,http://tortaxi2dev6xjwbaydqzla77rrnth7yn2oqzjfmiuwn5h6vsk2a4syd.onion/,✔️,List of links to go to popular darknet places,✔️,100.0
|
||||
uptime.nowherejezfoltodf4jiyl6r56jnzintap5vyjlia7fkirfsnfizflqd.onion,Infos and Links,Tor Taxi,http://tortaxi2dev6xjwbaydqzla77rrnth7yn2oqzjfmiuwn5h6vsk2a4syd.onion/,✔️,List of links to go to popular darknet places,✔️,100.0
|
||||
uptime.nowherejezfoltodf4jiyl6r56jnzintap5vyjlia7fkirfsnfizflqd.onion,Infos and Links,Tor Taxi,http://tortaxi2dev6xjwbaydqzla77rrnth7yn2oqzjfmiuwn5h6vsk2a4syd.onion/,❌,List of links to go to popular darknet places,✔️,100.0
|
||||
uptime.nowherejezfoltodf4jiyl6r56jnzintap5vyjlia7fkirfsnfizflqd.onion,Infos and Links,Tor Taxi,http://tortaxi2dev6xjwbaydqzla77rrnth7yn2oqzjfmiuwn5h6vsk2a4syd.onion/,✔️,List of links to go to populadrugr darknet places,✔️,100.0
|
||||
uptime.nowherejezfoltodf4jiyl6r56jnzintap5vyjlia7fkirfsnfizflqd.onion,Infos and Links,Tor Taxi,http://tortaxi2dev6xjwbaydqzla77rrnth7yn2oqzjfmiuwn5h6vsk2a4syd.onion/,✔️,List of links to go to popular darknet places,✔️,100.0
|
|
15
scripts/old/tests/verified.csv
Normal file
15
scripts/old/tests/verified.csv
Normal file
|
@ -0,0 +1,15 @@
|
|||
Instance,Category,Name,URL,Sensitive,Description,Status,Score
|
||||
uptime.nowherejezfoltodf4jiyl6r56jnzintap5vyjlia7fkirfsnfizflqd.onion,TestCategory,NewWebsite,http://newwebsitewoidwajiawdhjoidwahjoadiwhj.onion,n,"This is a new website that we add, it has this description",,
|
||||
uptime.nowherejezfoltodf4jiyl6r56jnzintap5vyjlia7fkirfsnfizflqd.onion,TestCategory,NewWebsite,http://newwebsitewoidwajiawdhjoidwahjoadiwhj.onion,n,"This is a new website that we add, it has this description",,
|
||||
uptime.nowherejezfoltodf4jiyl6r56jnzintap5vyjlia7fkirfsnfizflqd.onion,TestCategory,NewWebsite,http://newwebsitewoidwajiawdhjoidwahjoadiwhj.onion,n,"This is a new website that we add, it has this description",,
|
||||
uptime.nowherejezfoltodf4jiyl6r56jnzintap5vyjlia7fkirfsnfizflqd.onion,TestCategory,NewWebsite,http://newwebsitewoidwajiawdhjoidwahjoadiwhj.onion,n,"This is a new website that we add, it has this description",,
|
||||
FIRSTherefoltodf4jiyl6r56jnzintap5vyjlia7fkirfsnfizflqd.onion,Infos and Links,Psychonaut Wiki,http://vvedndyt433kopnhv6vejxnut54y5752vpxshjaqmj7ftwiu6quiv2ad.onion/,,"This is the wiki for psychonauts, it contains infos on substances and trip reports",✔️,100.0
|
||||
somewherefoltodf4jiyl6r56jnzintap5vyjlia7fkirfsnfizflqd.onion,Infos and Links,1DNM Bible,http://biblemeowimkh3utujmhm6oh2oeb3ubjw2lpgeq3lahrfr2l6ev6zgyd.onion/,✔️,General guide on how to navigate the Darknet to buy drugs,✔️,100.0
|
||||
somewherefoltodf4jiyl6r56jnzintap5vyjlia7fkirfsnfizflqd.onion,Infos and Links,2DNM Bible,http://biblemeowimkh3utujmhm6oh2oeb3ubjw2lpgeq3lahrfr2l6ev6zgyd.onion/,✔️,General guide on how to navigate the Darknet to buy drugs,✔️,100.0
|
||||
somewherefoltodf4jiyl6r56jnzintap5vyjlia7fkirfsnfizflqd.onion,Infos and Links,3DNM Bible,http://biblemeowimkh3utujmhm6oh2oeb3ubjw2lpgeq3lahrfr2l6ev6zgyd.onion/,✔️,General guide on how to navigate the Darknet to buy drugs,✔️,100.0
|
||||
somewh3refoltodf4jiyl6r56jnzintap5vyjlia7fkirfsnfizflqd.onion,Infos and Links,4DNM Bible,http://biblemeowimkh3utujmhm6oh2oeb3ubjw2lpgeq3lahrfr2l6ev6zgyd.onion/,✔️,General guide on how to navigate the Darknet to buy drugs,✔️,100.0
|
||||
somewherefoltodf4jiyl6r56jnzintap5vyjlia7fkirfsnfizflqd.onion,Infos and Links,5DNM Bible,http://biblemeowimkh3utujmhm6oh2oeb3ubjw2lpgeq3lahrfr2l6ev6zgyd.onion/,✔️,General guide on how to navigate the Darknet to buy drugs,✔️,100.0
|
||||
somewherefoltodf4jiyl6r56jnzintap5vyjlia7fkirfsnfizflqd.onion,Infos and Links,6DNM Bible,http://biblemeowimkh3utujmhm6oh2oeb3ubjw2lpgeq3lahrfr2l6ev6zgyd.onion/,✔️,General guide on how to navigate the Darknet to buy drugs,✔️,100.0
|
||||
somewherefoltodf4jiyl6r56jnzintap5vyjlia7fkirfsnfizflqd.onion,Infos and Links,7DNM Bible,http://biblemeowimkh3utujmhm6oh2oeb3ubjw2lpgeq3lahrfr2l6ev6zgyd.onion/,✔️,General guide on how to navigate the Darknet to buy drugs,✔️,100.0
|
||||
nowherefoltodf4jiyl6r56jnzintap5vyjlia7fkirfsnfizflqd.onion,Infos and Links,8DNM Bible,http://biblemeowimkh3utujmhm6oh2oeb3ubjw2lpgeq3lahrfr2l6ev6zgyd.onion/,✔️,General guide on how to navigate the Darknet to buy drugs,✔️,100.0
|
||||
LASTherefoltodf4jiyl6r56jnzintap5vyjlia7fkirfsnfizflqd.onion,Infos and Links,Psychonaut Wiki,http://vvedndyt433kopnhv6vejxnut54y5752vpxshjaqmj7ftwiu6quiv2ad.onion/,,"This is the wiki for psychonauts, it contains infos on substances and trip reports",✔️,100.0
|
|
7
scripts/old/tests/webring-participants.csv
Normal file
7
scripts/old/tests/webring-participants.csv
Normal file
|
@ -0,0 +1,7 @@
|
|||
Name,URL,Description,Trusted,Status,Score
|
||||
Nowhere,uptime.nowherejezfoltodf4jiyl6r56jnzintap5vyjlia7fkirfsnfizflqd.onion,,,,
|
||||
Nowhere,uptime.nowherejezfoltodf4jiyl6r56jnzintap5vyjlia7fkirfsnfizflqd.onion,,,,
|
||||
Nowhere,uptime.nowherejezfoltodf4jiyl6r56jnzintap5vyjlia7fkirfsnfizflqd.onion,First instance,,,
|
||||
Nowhere,uptime.nowherejezfoltodf4jiyl6r56jnzintap5vyjlia7fkirfsnfizflqd.onion,First webring participant,,,
|
||||
Nowhere,uptime.nowherejezfoltodf4jiyl6r56jnzintap5vyjlia7fkirfsnfizflqd.onion,New webring participant,,,
|
||||
Nowhere,http://uptime.nowherejezfoltodf4jiyl6r56jnzintap5vyjlia7fkirfsnfizflqd.onion,Darknet Webring Onion Participant,✔️,✔️,100.0
|
|
14
scripts/old/tests/writefile.py
Normal file
14
scripts/old/tests/writefile.py
Normal file
|
@ -0,0 +1,14 @@
|
|||
import os, pwd
|
||||
def main():
|
||||
urlpath=pwd.getpwuid(os.getuid()).pw_dir+"/.darknet_participant_url"
|
||||
url="uptime.nowherejezaaa...onion"
|
||||
print(urlpath, ":", url)
|
||||
with open(urlpath, "w") as f:
|
||||
f.write(url)
|
||||
print("[+] file written, let's read it")
|
||||
f = open(urlpath,"r")
|
||||
print(f.read())
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
Loading…
Add table
Add a link
Reference in a new issue