mirror of
http://git.nowherejezfoltodf4jiyl6r56jnzintap5vyjlia7fkirfsnfizflqd.onion/nihilist/darknet-lantern.git
synced 2025-05-16 20:26:58 +00:00
add dotenv
This commit is contained in:
parent
08e2b5ab20
commit
6cc297fe66
1 changed files with 13 additions and 2 deletions
|
@ -7,6 +7,8 @@ import os
|
||||||
import re
|
import re
|
||||||
from utils import print_colors, IsUrlValid
|
from utils import print_colors, IsUrlValid
|
||||||
|
|
||||||
|
from dotenv import load_dotenv
|
||||||
|
|
||||||
parser = argparse.ArgumentParser(
|
parser = argparse.ArgumentParser(
|
||||||
prog='Lantern crawler',
|
prog='Lantern crawler',
|
||||||
description='Crawls .onion sites for links to more .onion sites')
|
description='Crawls .onion sites for links to more .onion sites')
|
||||||
|
@ -20,12 +22,21 @@ parser.add_argument('-c', '--crawler-file',
|
||||||
parser.add_argument('verified_csv', help='Input file to read for .onion links to crawl', type=str, default='verified.csv')
|
parser.add_argument('verified_csv', help='Input file to read for .onion links to crawl', type=str, default='verified.csv')
|
||||||
args = parser.parse_args()
|
args = parser.parse_args()
|
||||||
|
|
||||||
|
script_abs_path = os.path.dirname(os.path.abspath(__file__))
|
||||||
|
env_path = os.path.join(script_abs_path+"/.env")
|
||||||
|
default_env_path = os.path.join(script_abs_path+"/.env.sample")
|
||||||
|
|
||||||
|
if os.path.exists(env_path):
|
||||||
|
load_dotenv(dotenv_path=env_path)
|
||||||
|
else:
|
||||||
|
load_dotenv(dotenv_path=default_env_path)
|
||||||
|
|
||||||
tor_host = os.getenv("TOR_HOST")
|
tor_host = os.getenv("TOR_HOST")
|
||||||
tor_port = os.getenv("TOR_PORT")
|
tor_port = os.getenv("TOR_PORT")
|
||||||
session = requests.session()
|
session = requests.session()
|
||||||
session.proxies = {
|
session.proxies = {
|
||||||
'http': f'socks5h://{tor_host}:{tor_port}',
|
'http': f'{tor_host}:{tor_port}',
|
||||||
'https': f'socks5h://{tor_host}:{tor_port}'
|
'https': f'{tor_host}:{tor_port}'
|
||||||
}
|
}
|
||||||
# Set user agent too for the crawler
|
# Set user agent too for the crawler
|
||||||
session.headers.update({'User-Agent': 'LanternCrawler'})
|
session.headers.update({'User-Agent': 'LanternCrawler'})
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue