mirror of
http://git.nowherejezfoltodf4jiyl6r56jnzintap5vyjlia7fkirfsnfizflqd.onion/nihilist/darknet-lantern.git
synced 2025-07-01 18:56:40 +00:00
176 lines
7.3 KiB
Python
176 lines
7.3 KiB
Python
from dotenv import load_dotenv
|
|
|
|
|
|
import os,re,pwd
|
|
import csv
|
|
import random
|
|
import requests
|
|
import json
|
|
import pandas as pd
|
|
import glob
|
|
from utils import RecognizeURLType, IsOnionLinkValid, send_server_checks
|
|
|
|
|
|
|
|
|
|
script_abs_path = os.path.dirname(os.path.abspath(__file__))
|
|
env_path = os.path.join(script_abs_path+"/.env")
|
|
default_env_path = os.path.join(script_abs_path+"/.env.sample")
|
|
|
|
if os.path.exists(env_path):
|
|
load_dotenv(dotenv_path=env_path)
|
|
else:
|
|
load_dotenv(dotenv_path=default_env_path)
|
|
|
|
tor_host = os.getenv("TOR_HOST")
|
|
tor_port = os.getenv("TOR_PORT")
|
|
|
|
#apt install python3-pandas python3-requests python3-socks
|
|
|
|
def main():
|
|
|
|
print('[+] ONION UPTIME CHECKER')
|
|
|
|
# TODO get the instance name and exit if its not there
|
|
rootpath='/srv/darknet-lantern/'
|
|
urlpath=pwd.getpwuid(os.getuid()).pw_dir+"/.darknet_participant_url"
|
|
|
|
#print(urlpath)
|
|
|
|
|
|
# check if ~/.darknet_participant_url exists,
|
|
# if exists, instance= the content of ~/.darknet_participant_url (which is the url: such as uptime.nowherejez...onion)
|
|
isitvalid="n"
|
|
while isitvalid != "y":
|
|
if os.path.isfile(urlpath):
|
|
with open(urlpath) as f:
|
|
instance = f.read().rstrip()
|
|
# check if the instance URL domain is valid
|
|
if IsOnionLinkValid(instance):
|
|
print("[+] Instance Name:",instance,IsOnionLinkValid(instance))
|
|
isitvalid="y"
|
|
else:
|
|
print('[-] Invalid instance name in ~/.darknet_participant_url:', instance)
|
|
return False
|
|
else:
|
|
print("[-] Instance path doesn't exist yet, run darknet_exploration.py to set it up" )
|
|
return False
|
|
|
|
proxies = {
|
|
'http': f'{tor_host}:{tor_port}',
|
|
'https': f'{tor_host}:{tor_port}'
|
|
}
|
|
|
|
instancepath=rootpath+'www/participants/'+instance+'/'
|
|
csvfiles2check=['verified.csv','unverified.csv','webring-participants.csv']
|
|
csvfiles2sortcat=['verified.csv','unverified.csv']
|
|
|
|
#for csvfile in glob.glob("/srv/darknet-lantern/www/links/*.csv"):
|
|
for csvfilename in csvfiles2check:
|
|
csvfile = instancepath+csvfilename
|
|
print('[+] Reading the CSV File:', csvfile)
|
|
|
|
df = pd.read_csv(csvfile)
|
|
print(df[['Name','URL']])
|
|
print('[+] Checking if each .onion link is reachable:')
|
|
#for i in range(df.index.stop):
|
|
for i in df.index:
|
|
print("[+] Editing the uptime score")
|
|
#if empty, set to 100
|
|
if pd.isnull(df.at[i,"Score"]):
|
|
df.at[i,"Score"] = 100
|
|
|
|
print(i)
|
|
#print(df.at[i,"URL"])
|
|
url=df.at[i,"URL"]
|
|
try:
|
|
index1 = url.find("http://")
|
|
index2 = url.find("https://")
|
|
|
|
urltype = RecognizeURLType(url)
|
|
if urltype == 'smp':
|
|
resp,resp_type,failed_response = send_server_checks(url)
|
|
|
|
if resp_type in ["chatError", "contactSubSummary"]:
|
|
resp, resp_type,failed_response = send_server_checks(url)
|
|
|
|
if failed_response is None:
|
|
print(url, "✔️")
|
|
df.at[i, "Status"]="YES"
|
|
if df.at[i, "Score"] < 100:
|
|
df.at[i,"Score"] = df.at[i,"Score"] + 1
|
|
else:
|
|
print(url,"❌")
|
|
df.at[i,"Status"]="NO"
|
|
#if uptime >0 do -1 to the value
|
|
if df.at[i,"Score"] > 0:
|
|
df.at[i,"Score"] = df.at[i,"Score"] - 1
|
|
|
|
elif urltype == 'xftp':
|
|
resp,resp_type,failed_response = send_server_checks(url)
|
|
|
|
if resp_type in ["chatError", "contactSubSummary"]:
|
|
resp, resp_type,failed_response = send_server_checks(url)
|
|
|
|
if failed_response is None:
|
|
print(url, "✔️")
|
|
df.at[i, "Status"]="YES"
|
|
if df.at[i, "Score"] < 100:
|
|
df.at[i,"Score"] = df.at[i,"Score"] + 1
|
|
else:
|
|
print(url,"❌")
|
|
df.at[i,"Status"]="NO"
|
|
#if uptime >0 do -1 to the value
|
|
if df.at[i,"Score"] > 0:
|
|
df.at[i,"Score"] = df.at[i,"Score"] - 1
|
|
|
|
else:
|
|
if index1 == -1 and index2 == -1:
|
|
url = "http://"+url
|
|
|
|
status = requests.get(url,proxies=proxies, timeout=5).status_code
|
|
print('[+]',url,status)
|
|
if status != 502:
|
|
print(url,"✔️")
|
|
df.at[i,"Status"]="YES"
|
|
#if uptime <100 do +1 to the value
|
|
if df.at[i,"Score"] < 100:
|
|
df.at[i,"Score"] = df.at[i,"Score"] + 1
|
|
else:
|
|
print(url,"❌")
|
|
df.at[i,"Status"]="NO"
|
|
#if uptime >0 do -1 to the value
|
|
if df.at[i,"Score"] > 0:
|
|
df.at[i,"Score"] = df.at[i,"Score"] - 1
|
|
|
|
except requests.ConnectionError:
|
|
#print(e)
|
|
print(url,"❌")
|
|
df.at[i,"Status"]="NO"
|
|
#if uptime >0 do -1 to the value
|
|
if df.at[i,"Score"] > 0:
|
|
df.at[i,"Score"] = df.at[i,"Score"] - 1
|
|
except requests.exceptions.ReadTimeout:
|
|
#print(e)
|
|
print(url,"❌")
|
|
df.at[i,"Status"]="NO"
|
|
#if uptime >0 do -1 to the value
|
|
if df.at[i,"Score"] > 0:
|
|
df.at[i,"Score"] = df.at[i,"Score"] - 1
|
|
except ConnectionRefusedError:
|
|
print(url,"✔️")
|
|
df.at[i, "Status"]="YES"
|
|
if df.at[i, "Score"] < 100:
|
|
df.at[i,"Score"] = df.at[i,"Score"] + 1
|
|
|
|
|
|
df2 = df.sort_values(by=["Score"], ascending=False)
|
|
#sort by category if you are verified/unverified.csv
|
|
if csvfilename in csvfiles2sortcat:
|
|
df2 = df.sort_values(by=["Category"], ascending=True)
|
|
#print(df2)
|
|
df2.to_csv(csvfile, index=False)
|
|
|
|
|
|
if __name__ == '__main__':
|
|
main()
|