mirror of
http://git.nowherejezfoltodf4jiyl6r56jnzintap5vyjlia7fkirfsnfizflqd.onion/nihilist/darknet-lantern.git
synced 2025-07-01 18:56:40 +00:00
starting work on option 10
This commit is contained in:
parent
c4ebef10a4
commit
1b67f7a218
3 changed files with 33 additions and 21 deletions
|
@ -152,7 +152,7 @@ def is_participant_reachable(instance):
|
|||
status = requests.get(f'{url}{file_name}',proxies=conf.PROXIES, timeout=10).status_code
|
||||
if status != 200:
|
||||
return False
|
||||
except Exception:
|
||||
except Exception as err:
|
||||
return False
|
||||
|
||||
return True
|
||||
|
@ -288,7 +288,7 @@ def is_row_valid(row):
|
|||
IsScoreValid(row['Score'])
|
||||
)
|
||||
|
||||
except Exception:
|
||||
except Exception as err:
|
||||
return False
|
||||
|
||||
###################### General ######################
|
||||
|
@ -316,7 +316,7 @@ def merge_verification_df(receiving_df, merging_df):
|
|||
else:
|
||||
return pd.concat([receiving_df, filtered_df], ignore_index=True)
|
||||
|
||||
except Exception:
|
||||
except Exception as err:
|
||||
return receiving_df
|
||||
|
||||
def remove_duplications(df):
|
||||
|
@ -333,7 +333,7 @@ def remove_duplications(df):
|
|||
df = df.drop_duplicates(subset='Name')
|
||||
df = df.drop_duplicates(subset='URL')
|
||||
|
||||
except Exception:
|
||||
except Exception as err:
|
||||
print_colors('[-] Removing duplication failed',is_error=True)
|
||||
|
||||
return df
|
||||
|
@ -344,12 +344,14 @@ def remove_cross_dataframe_replications(main_df, sub_df):
|
|||
main_df = remove_duplications(main_df)
|
||||
sub_df = remove_duplications(sub_df)
|
||||
|
||||
mask = sub_df['URL'].isin(main_fd['URL']) | df_a['Name'].isin(df_b['Name'])
|
||||
mask = sub_df['URL'].isin(main_df['URL']) | sub_df['Name'].isin(main_df['Name'])
|
||||
|
||||
sub_df = sub_df[~mask]
|
||||
|
||||
except:
|
||||
except Exception as err:
|
||||
print_colors('[-] Removing cross dataframe duplications failed',is_error=True)
|
||||
raise err #REMOVE!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
|
||||
|
||||
|
||||
return main_df, sub_df
|
||||
|
||||
|
@ -376,7 +378,7 @@ def save_local_verified_and_unverified(verified_df, unverified_df):
|
|||
|
||||
return True
|
||||
|
||||
except Exception:
|
||||
except Exception as err:
|
||||
print_colors('[-] Saving verified and unverified failed',is_error=True)
|
||||
return False
|
||||
|
||||
|
@ -435,7 +437,7 @@ def get_participant_local_verified_and_unverified(participant):
|
|||
|
||||
return verified_df, unverified_df
|
||||
|
||||
except Exception:
|
||||
except Exception as err:
|
||||
print_colors('[-] Failed reading the verified and unverified files',is_error=True)
|
||||
|
||||
return pd.DataFrame(), pd.DataFrame()
|
||||
|
@ -454,7 +456,7 @@ def get_official_participants():
|
|||
with open(conf.OFFICIAL_PARTICIPANTS_FILE, 'r') as file:
|
||||
return [line.strip() for line in file if current_instance not in line]
|
||||
|
||||
except Exception:
|
||||
except Exception as err:
|
||||
print_colors('[-] Couldn\'t read official webring participants file',is_error=True )
|
||||
|
||||
def get_local_blacklist_and_sensitive():
|
||||
|
@ -485,7 +487,7 @@ def get_local_blacklist_and_sensitive():
|
|||
return blacklist, sensitive_list
|
||||
|
||||
|
||||
except Exception:
|
||||
except Exception as err:
|
||||
print_colors('[-] Failed reading the blacklist and sensitive words file',is_error=True)
|
||||
|
||||
return [], []
|
||||
|
@ -515,7 +517,7 @@ def get_local_verified_and_unverified():
|
|||
|
||||
return verified_df, unverified_df
|
||||
|
||||
except Exception:
|
||||
except Exception as err:
|
||||
print_colors('[-] Failed reading the verified and unverified files',is_error=True)
|
||||
|
||||
return pd.DataFrame(), pd.DataFrame()
|
||||
|
@ -542,7 +544,7 @@ def get_local_webring_participants():
|
|||
|
||||
return webring_df
|
||||
|
||||
except Exception:
|
||||
except Exception as err:
|
||||
print_colors(f'[-] failed reading webring participants file',is_error=True )
|
||||
return pd.DataFrame()
|
||||
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue