add host
- small clean up
This commit is contained in:
parent
b51712cb87
commit
6cb493eba7
7 changed files with 29 additions and 21 deletions
|
@ -36,6 +36,8 @@ pushover.user_key =
|
||||||
[WEB]
|
[WEB]
|
||||||
# should we enable the webserver which is just a simple, simple, simple webui to view the logs
|
# should we enable the webserver which is just a simple, simple, simple webui to view the logs
|
||||||
web.enabled = false
|
web.enabled = false
|
||||||
|
# the host to listen on. localhost or 0.0.0.0 are the two common options
|
||||||
|
web.host = localhost
|
||||||
# the port to run on
|
# the port to run on
|
||||||
web.port = 9547
|
web.port = 9547
|
||||||
# the app root / web folder / root / many other names . MUST contain a trailing '/'
|
# the app root / web folder / root / many other names . MUST contain a trailing '/'
|
||||||
|
|
|
@ -27,7 +27,6 @@ def choose_user_agent():
|
||||||
|
|
||||||
|
|
||||||
class ConfigReader(ConfigParser):
|
class ConfigReader(ConfigParser):
|
||||||
|
|
||||||
required_values = {
|
required_values = {
|
||||||
'DEFAULT': {
|
'DEFAULT': {
|
||||||
'enabled': ('true', 'false'),
|
'enabled': ('true', 'false'),
|
||||||
|
@ -51,13 +50,13 @@ class ConfigReader(ConfigParser):
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
default_values = {
|
default_values = {
|
||||||
'DEFAULT': {
|
'DEFAULT': {
|
||||||
'cookie': '',
|
'cookie': '',
|
||||||
'user_agent': f"{choose_user_agent()}",
|
'user_agent': f"{choose_user_agent()}",
|
||||||
'enabled': 'true',
|
'enabled': 'true',
|
||||||
'minimum_points': f"{randint(20, 50)}",
|
'minimum_points': f"{randint(20, 50)}",
|
||||||
'max_entries': f"{randint(1000, 2500)}",
|
'max_entries': f"{randint(1000, 2500)}",
|
||||||
'max_time_left': f"{randint(180,500)}",
|
'max_time_left': f"{randint(180, 500)}",
|
||||||
'minimum_game_points': "0",
|
'minimum_game_points': "0",
|
||||||
'blacklist_keywords': 'hentai,adult'
|
'blacklist_keywords': 'hentai,adult'
|
||||||
},
|
},
|
||||||
|
@ -65,7 +64,7 @@ class ConfigReader(ConfigParser):
|
||||||
'wishlist.enabled': 'true',
|
'wishlist.enabled': 'true',
|
||||||
'wishlist.minimum_points': '1',
|
'wishlist.minimum_points': '1',
|
||||||
'wishlist.max_entries': f"{randint(10000, 100000)}",
|
'wishlist.max_entries': f"{randint(10000, 100000)}",
|
||||||
'wishlist.max_time_left': f"{randint(180,500)}"
|
'wishlist.max_time_left': f"{randint(180, 500)}"
|
||||||
},
|
},
|
||||||
'NOTIFICATIONS': {
|
'NOTIFICATIONS': {
|
||||||
'notification.prefix': '',
|
'notification.prefix': '',
|
||||||
|
@ -75,6 +74,7 @@ class ConfigReader(ConfigParser):
|
||||||
},
|
},
|
||||||
'WEB': {
|
'WEB': {
|
||||||
'web.enabled': 'false',
|
'web.enabled': 'false',
|
||||||
|
'web.host': '0.0.0.0',
|
||||||
'web.app_root': '/',
|
'web.app_root': '/',
|
||||||
'web.port': '9647',
|
'web.port': '9647',
|
||||||
'web.ssl': 'true',
|
'web.ssl': 'true',
|
||||||
|
@ -127,11 +127,11 @@ class ConfigReader(ConfigParser):
|
||||||
for key, values in keys.items():
|
for key, values in keys.items():
|
||||||
if key not in self[section] or self[section][key] == '':
|
if key not in self[section] or self[section][key] == '':
|
||||||
raise ConfigException((
|
raise ConfigException((
|
||||||
'Missing value for "%s" under section "%s" in ' +
|
'Missing value for "%s" under section "%s" in ' +
|
||||||
'the config file') % (key, section))
|
'the config file') % (key, section))
|
||||||
|
|
||||||
if values:
|
if values:
|
||||||
if self[section][key] not in values:
|
if self[section][key] not in values:
|
||||||
raise ConfigException((
|
raise ConfigException((
|
||||||
'Invalid value for "%s" under section "%s" in ' +
|
'Invalid value for "%s" under section "%s" in ' +
|
||||||
'the config file') % (key, section))
|
'the config file') % (key, section))
|
||||||
|
|
|
@ -69,7 +69,7 @@ class EnterGiveaways:
|
||||||
|
|
||||||
def get_soup_from_page(self, url):
|
def get_soup_from_page(self, url):
|
||||||
headers = {
|
headers = {
|
||||||
'User-Agent': self.user_agent
|
'User-Agent': self.user_agent
|
||||||
}
|
}
|
||||||
self.requests_retry_session().get(url, headers=headers)
|
self.requests_retry_session().get(url, headers=headers)
|
||||||
r = requests.get(url, cookies=self.cookie)
|
r = requests.get(url, cookies=self.cookie)
|
||||||
|
|
|
@ -1,8 +1,12 @@
|
||||||
|
import datetime
|
||||||
import threading
|
import threading
|
||||||
|
from datetime import timedelta, datetime
|
||||||
from random import randint
|
from random import randint
|
||||||
from threading import Thread
|
from threading import Thread
|
||||||
from time import sleep
|
from time import sleep
|
||||||
|
|
||||||
|
from dateutil import tz
|
||||||
|
|
||||||
import log
|
import log
|
||||||
from enter_giveaways import EnterGiveaways
|
from enter_giveaways import EnterGiveaways
|
||||||
|
|
||||||
|
@ -52,7 +56,9 @@ class GiveawayThread(threading.Thread):
|
||||||
|
|
||||||
logger.info("🔴 All giveaways evaluated.")
|
logger.info("🔴 All giveaways evaluated.")
|
||||||
random_seconds = randint(1740, 3540) # sometime between 29-59 minutes
|
random_seconds = randint(1740, 3540) # sometime between 29-59 minutes
|
||||||
logger.info(f"🛋 Going to sleep for {random_seconds / 60} minutes.")
|
when_to_start_again = datetime.now(tz=tz.tzlocal()) + timedelta(seconds=random_seconds)
|
||||||
|
logger.info(f"🛋 Going to sleep for {random_seconds / 60} minutes. "
|
||||||
|
f"Will start again at {when_to_start_again}")
|
||||||
sleep(random_seconds)
|
sleep(random_seconds)
|
||||||
|
|
||||||
def run(self):
|
def run(self):
|
||||||
|
@ -69,4 +75,4 @@ class GiveawayThread(threading.Thread):
|
||||||
# we re-raise the caught exception
|
# we re-raise the caught exception
|
||||||
# if any was caught
|
# if any was caught
|
||||||
if self.exc:
|
if self.exc:
|
||||||
raise self.exc
|
raise self.exc
|
||||||
|
|
|
@ -1,10 +1,8 @@
|
||||||
import http.client
|
import http.client
|
||||||
import urllib
|
import urllib
|
||||||
|
|
||||||
from sqlalchemy.orm import Session
|
|
||||||
|
|
||||||
from tables import TableNotification
|
|
||||||
import log
|
import log
|
||||||
|
from tables import TableNotification
|
||||||
|
|
||||||
logger = log.get_logger(__name__)
|
logger = log.get_logger(__name__)
|
||||||
|
|
||||||
|
|
|
@ -35,13 +35,14 @@ class TableNotification(Base):
|
||||||
with Session(engine) as session:
|
with Session(engine) as session:
|
||||||
# with how filtering of datetimes works with a sqlite backend I couldn't figure out a better way
|
# with how filtering of datetimes works with a sqlite backend I couldn't figure out a better way
|
||||||
# to filter out the dates to local time when they are stored in utc in the db
|
# to filter out the dates to local time when they are stored in utc in the db
|
||||||
within_3_days = session.query(TableNotification)\
|
within_3_days = session.query(TableNotification) \
|
||||||
.filter(func.DATE(TableNotification.created_at) >= (datetime.utcnow().date() - timedelta(days=1)))\
|
.filter(func.DATE(TableNotification.created_at) >= (datetime.utcnow().date() - timedelta(days=1))) \
|
||||||
.filter(func.DATE(TableNotification.created_at) <= (datetime.utcnow().date() + timedelta(days=1)))\
|
.filter(func.DATE(TableNotification.created_at) <= (datetime.utcnow().date() + timedelta(days=1))) \
|
||||||
.filter_by(type='won').all()
|
.filter_by(type='won').all()
|
||||||
actual = []
|
actual = []
|
||||||
for r in within_3_days:
|
for r in within_3_days:
|
||||||
if r.created_at.replace(tzinfo=tz.tzutc()).astimezone(tz.tzlocal()).date() == datetime.now(tz=tz.tzlocal()).date():
|
if r.created_at.replace(tzinfo=tz.tzutc()).astimezone(tz.tzlocal()).date() == datetime.now(
|
||||||
|
tz=tz.tzlocal()).date():
|
||||||
actual.append(r)
|
actual.append(r)
|
||||||
return actual
|
return actual
|
||||||
|
|
||||||
|
|
|
@ -15,6 +15,7 @@ class WebServerThread(threading.Thread):
|
||||||
Thread.__init__(self)
|
Thread.__init__(self)
|
||||||
self.exc = None
|
self.exc = None
|
||||||
self.config = config
|
self.config = config
|
||||||
|
self.host = config['WEB'].get('web.host')
|
||||||
self.port = config['WEB'].getint('web.port')
|
self.port = config['WEB'].getint('web.port')
|
||||||
self.ssl = config['WEB'].getboolean('web.ssl')
|
self.ssl = config['WEB'].getboolean('web.ssl')
|
||||||
self.enabled = config['WEB'].getboolean('web.enabled')
|
self.enabled = config['WEB'].getboolean('web.enabled')
|
||||||
|
@ -63,9 +64,9 @@ class WebServerThread(threading.Thread):
|
||||||
if self.enabled:
|
if self.enabled:
|
||||||
logger.info("Webserver Enabled. Running")
|
logger.info("Webserver Enabled. Running")
|
||||||
if self.ssl:
|
if self.ssl:
|
||||||
app.run(port=self.port, host="0.0.0.0", ssl_context='adhoc')
|
app.run(port=self.port, host=self.host, ssl_context='adhoc')
|
||||||
else:
|
else:
|
||||||
app.run(port=self.port, host="0.0.0.0")
|
app.run(port=self.port, host=self.host)
|
||||||
else:
|
else:
|
||||||
logger.info("Webserver NOT Enabled.")
|
logger.info("Webserver NOT Enabled.")
|
||||||
|
|
||||||
|
@ -83,4 +84,4 @@ class WebServerThread(threading.Thread):
|
||||||
# we re-raise the caught exception
|
# we re-raise the caught exception
|
||||||
# if any was caught
|
# if any was caught
|
||||||
if self.exc:
|
if self.exc:
|
||||||
raise self.exc
|
raise self.exc
|
||||||
|
|
Loading…
Reference in a new issue