mirror of
http://git.nowherejezfoltodf4jiyl6r56jnzintap5vyjlia7fkirfsnfizflqd.onion/nihilist/darknet-lantern.git
synced 2025-05-16 20:26:58 +00:00
65 lines
1.7 KiB
Python
65 lines
1.7 KiB
Python
import csv
|
|
import requests
|
|
import json
|
|
import pandas as pd
|
|
import glob
|
|
|
|
#apt install python3-pandas python3-requests python3-socks
|
|
|
|
def main():
|
|
print('[+] ONION UPTIME CHECKER')
|
|
|
|
proxies = {
|
|
'http': 'socks5h://127.0.0.1:9050',
|
|
'https': 'socks5h://127.0.0.1:9050'
|
|
}
|
|
|
|
for csvfile in glob.glob("/srv/darknet-onion-webring/www/links/*.csv"):
|
|
print('[+] Reading the CSV File:', csvfile)
|
|
|
|
df = pd.read_csv(csvfile)
|
|
print('[+] Checking if each .onion link is reachable:')
|
|
for i in range(df.index.stop):
|
|
print("[+] Editing the uptime score")
|
|
#if empty, set to 100
|
|
if pd.isnull(df.at[i,"Score"]):
|
|
df.at[i,"Score"] = 100
|
|
|
|
print(i)
|
|
#print(df.at[i,"URL"])
|
|
url=df.at[i,"URL"]
|
|
try:
|
|
status = requests.get(url,proxies=proxies, timeout=5).status_code
|
|
print('[+]',url,status)
|
|
if status != 502:
|
|
print(url,"✔️")
|
|
df.at[i,"Status"]="✔️"
|
|
#if uptime <100 do +1 to the value
|
|
if df.at[i,"Score"] < 100:
|
|
df.at[i,"Score"] = df.at[i,"Score"] + 1
|
|
else:
|
|
print(url,"❌")
|
|
df.at[i,"Status"]="❌"
|
|
#if uptime >0 do -1 to the value
|
|
if df.at[i,"Score"] > 0:
|
|
df.at[i,"Score"] = df.at[i,"Score"] - 1
|
|
except requests.ConnectionError as e:
|
|
#print(e)
|
|
print(url,"❌")
|
|
df.at[i,"Status"]="❌"
|
|
#if uptime >0 do -1 to the value
|
|
if df.at[i,"Score"] > 0:
|
|
df.at[i,"Score"] = df.at[i,"Score"] - 1
|
|
except requests.exceptions.ReadTimeout as e:
|
|
#print(e)
|
|
print(url,"❌")
|
|
df.at[i,"Status"]="❌"
|
|
#if uptime >0 do -1 to the value
|
|
if df.at[i,"Score"] > 0:
|
|
df.at[i,"Score"] = df.at[i,"Score"] - 1
|
|
|
|
df2 = df.sort_values(by=["Score"], ascending=False)
|
|
#print(df2)
|
|
df2.to_csv(csvfile, index=False)
|
|
if __name__ == '__main__':
|
|
main()
|