v1.0.1 release candidate 1

This commit is contained in:
root 2025-02-15 21:50:47 +01:00
parent 24bb960cd3
commit e66a37ae87
11 changed files with 917 additions and 997 deletions

View file

@ -1,4 +1,4 @@
import os,pwd,re
import os,re,pwd
import csv
import requests
import json
@ -13,6 +13,7 @@ def main():
# TODO get the instance name and exit if its not there
rootpath='/srv/darknet-lantern/'
urlpath=pwd.getpwuid(os.getuid()).pw_dir+"/.darknet_participant_url"
#print(urlpath)
@ -24,7 +25,6 @@ def main():
with open(urlpath) as f:
instance = f.read().rstrip()
# check if the instance URL domain is valid
#print(urlpath,instance)
if IsOnionValid(instance):
print("[+] Instance Name:",instance,IsOnionValid(instance))
isitvalid="y"
@ -35,7 +35,6 @@ def main():
print("[-] Instance path doesn't exist yet, run darknet_exploration.py to set it up" )
return False
#i=input("continue?")
proxies = {
'http': 'socks5h://127.0.0.1:9050',
'https': 'socks5h://127.0.0.1:9050'
@ -72,38 +71,37 @@ def main():
print('[+]',url,status)
if status != 502:
print(url,"✔️")
df.at[i,"Status"]="✔️"
df.at[i,"Status"]="YES"
#if uptime <100 do +1 to the value
if df.at[i,"Score"] < 100:
df.at[i,"Score"] = df.at[i,"Score"] + 1
else:
print(url,"")
df.at[i,"Status"]=""
df.at[i,"Status"]="NO"
#if uptime >0 do -1 to the value
if df.at[i,"Score"] > 0:
df.at[i,"Score"] = df.at[i,"Score"] - 1
except requests.ConnectionError as e:
#print(e)
print(url,"")
df.at[i,"Status"]=""
df.at[i,"Status"]="NO"
#if uptime >0 do -1 to the value
if df.at[i,"Score"] > 0:
df.at[i,"Score"] = df.at[i,"Score"] - 1
except requests.exceptions.ReadTimeout as e:
#print(e)
print(url,"")
df.at[i,"Status"]=""
df.at[i,"Status"]="NO"
#if uptime >0 do -1 to the value
if df.at[i,"Score"] > 0:
df.at[i,"Score"] = df.at[i,"Score"] - 1
df2 = df.sort_values(by=["Score"], ascending=False)
#sort by category if you are verified/unverified.csv
if csvfilename in csvfiles2sortcat:
df2 = df.sort_values(by=["Category","Name"], ascending=[True,True])
else:
df2 = df.sort_values(by="Score", ascending=False)
df2 = df.sort_values(by=["Category"], ascending=True)
#print(df2)
df2.to_csv(csvfile, index=False)
#print(df2)
def IsUrlValid(url:str)->bool:
@ -167,19 +165,14 @@ def IsOnionValid(url: str)-> bool:
if len(url.split('.')) > 3:
n_subdomians = len(url.split('.'))
# Checks if there is more than 1 subdomain. "subdomain.url.onion" only
#print(f"This domain have more than one subdomain. There are {n_subdomians - 1} subdomains")
return False
else:
if len(url) < 62:
#print("Domain length is less than 62.")
return False
return True
elif pattern.fullmatch(url) is None:
#print("Domain contains invalid character.")
#print(url)
return False
else:
#print("Domain not valid")
return False
except Exception as e:
print(f"Error: {e}")