mirror of
http://git.nowherejezfoltodf4jiyl6r56jnzintap5vyjlia7fkirfsnfizflqd.onion/nihilist/darknet-lantern.git
synced 2025-05-17 04:36:57 +00:00
replace default parameters with paths to the current lantern instance paths
This commit is contained in:
parent
6cc297fe66
commit
c041e5df19
1 changed files with 16 additions and 1 deletions
|
@ -4,11 +4,24 @@ import requests
|
||||||
import urllib.parse
|
import urllib.parse
|
||||||
import argparse
|
import argparse
|
||||||
import os
|
import os
|
||||||
|
import pwd
|
||||||
import re
|
import re
|
||||||
from utils import print_colors, IsUrlValid
|
from utils import print_colors, IsUrlValid
|
||||||
|
|
||||||
from dotenv import load_dotenv
|
from dotenv import load_dotenv
|
||||||
|
|
||||||
|
# Make default parameters for arguments
|
||||||
|
rootpath='/srv/darknet-lantern/'
|
||||||
|
urlpath=pwd.getpwuid(os.getuid()).pw_dir+"/.darknet_participant_url"
|
||||||
|
instance = ""
|
||||||
|
if os.path.isfile(urlpath):
|
||||||
|
with open(urlpath) as f:
|
||||||
|
instance = f.read().rstrip()
|
||||||
|
|
||||||
|
instancepath=rootpath+'www/participants/'+instance
|
||||||
|
verifiedcsvfile=instancepath+'/verified.csv'
|
||||||
|
blcsvfile=instancepath+'/blacklist.csv'
|
||||||
|
|
||||||
parser = argparse.ArgumentParser(
|
parser = argparse.ArgumentParser(
|
||||||
prog='Lantern crawler',
|
prog='Lantern crawler',
|
||||||
description='Crawls .onion sites for links to more .onion sites')
|
description='Crawls .onion sites for links to more .onion sites')
|
||||||
|
@ -19,7 +32,9 @@ parser.add_argument('-o', '--output',
|
||||||
help='Output CSV file for found .onion links', type=str, default='onion_crawler.csv')
|
help='Output CSV file for found .onion links', type=str, default='onion_crawler.csv')
|
||||||
parser.add_argument('-c', '--crawler-file',
|
parser.add_argument('-c', '--crawler-file',
|
||||||
help='Crawler CSV file to log .onion sites and the amount crawled', type=str, default='crawler.csv')
|
help='Crawler CSV file to log .onion sites and the amount crawled', type=str, default='crawler.csv')
|
||||||
parser.add_argument('verified_csv', help='Input file to read for .onion links to crawl', type=str, default='verified.csv')
|
parser.add_argument('-b', '--blacklist-file', help='Blacklist CSV files to filter out sites with forbidden words in them',
|
||||||
|
type=str, default=blcsvfile)
|
||||||
|
parser.add_argument('verified_csv', help='Input file to read for .onion links to crawl', type=str, default=verifiedcsvfile)
|
||||||
args = parser.parse_args()
|
args = parser.parse_args()
|
||||||
|
|
||||||
script_abs_path = os.path.dirname(os.path.abspath(__file__))
|
script_abs_path = os.path.dirname(os.path.abspath(__file__))
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue