update
This commit is contained in:
496
TwitchChannelPointsMiner/TwitchChannelPointsMiner.py
Normal file
496
TwitchChannelPointsMiner/TwitchChannelPointsMiner.py
Normal file
@ -0,0 +1,496 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
import logging
|
||||
import os
|
||||
import random
|
||||
import signal
|
||||
import sys
|
||||
import threading
|
||||
import time
|
||||
import uuid
|
||||
from datetime import datetime
|
||||
from pathlib import Path
|
||||
|
||||
from TwitchChannelPointsMiner.classes.Chat import ChatPresence, ThreadChat
|
||||
from TwitchChannelPointsMiner.classes.entities.PubsubTopic import PubsubTopic
|
||||
from TwitchChannelPointsMiner.classes.entities.Streamer import (
|
||||
Streamer,
|
||||
StreamerSettings,
|
||||
)
|
||||
from TwitchChannelPointsMiner.classes.Exceptions import StreamerDoesNotExistException
|
||||
from TwitchChannelPointsMiner.classes.Settings import FollowersOrder, Priority, Settings
|
||||
from TwitchChannelPointsMiner.classes.Twitch import Twitch
|
||||
from TwitchChannelPointsMiner.classes.WebSocketsPool import WebSocketsPool
|
||||
from TwitchChannelPointsMiner.logger import LoggerSettings, configure_loggers
|
||||
from TwitchChannelPointsMiner.utils import (
|
||||
_millify,
|
||||
at_least_one_value_in_settings_is,
|
||||
check_versions,
|
||||
get_user_agent,
|
||||
internet_connection_available,
|
||||
set_default_settings,
|
||||
)
|
||||
|
||||
# Suppress:
|
||||
# - chardet.charsetprober - [feed]
|
||||
# - chardet.charsetprober - [get_confidence]
|
||||
# - requests - [Starting new HTTPS connection (1)]
|
||||
# - Flask (werkzeug) logs
|
||||
# - irc.client - [process_data]
|
||||
# - irc.client - [_dispatcher]
|
||||
# - irc.client - [_handle_message]
|
||||
logging.getLogger("chardet.charsetprober").setLevel(logging.ERROR)
|
||||
logging.getLogger("requests").setLevel(logging.ERROR)
|
||||
logging.getLogger("werkzeug").setLevel(logging.ERROR)
|
||||
logging.getLogger("irc.client").setLevel(logging.ERROR)
|
||||
logging.getLogger("seleniumwire").setLevel(logging.ERROR)
|
||||
logging.getLogger("websocket").setLevel(logging.ERROR)
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class TwitchChannelPointsMiner:
|
||||
__slots__ = [
|
||||
"username",
|
||||
"twitch",
|
||||
"claim_drops_startup",
|
||||
"enable_analytics",
|
||||
"disable_ssl_cert_verification",
|
||||
"disable_at_in_nickname",
|
||||
"priority",
|
||||
"streamers",
|
||||
"events_predictions",
|
||||
"minute_watcher_thread",
|
||||
"sync_campaigns_thread",
|
||||
"ws_pool",
|
||||
"session_id",
|
||||
"running",
|
||||
"start_datetime",
|
||||
"original_streamers",
|
||||
"logs_file",
|
||||
"queue_listener",
|
||||
]
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
username: str,
|
||||
password: str = None,
|
||||
claim_drops_startup: bool = False,
|
||||
enable_analytics: bool = False,
|
||||
disable_ssl_cert_verification: bool = False,
|
||||
disable_at_in_nickname: bool = False,
|
||||
# Settings for logging and selenium as you can see.
|
||||
priority: list = [Priority.STREAK, Priority.DROPS, Priority.ORDER],
|
||||
# This settings will be global shared trought Settings class
|
||||
logger_settings: LoggerSettings = LoggerSettings(),
|
||||
# Default values for all streamers
|
||||
streamer_settings: StreamerSettings = StreamerSettings(),
|
||||
):
|
||||
# Fixes TypeError: 'NoneType' object is not subscriptable
|
||||
if not username or username == "your-twitch-username":
|
||||
logger.error(
|
||||
"Please edit your runner file (usually run.py) and try again.")
|
||||
logger.error("No username, exiting...")
|
||||
sys.exit(0)
|
||||
|
||||
# This disables certificate verification and allows the connection to proceed, but also makes it vulnerable to man-in-the-middle (MITM) attacks.
|
||||
Settings.disable_ssl_cert_verification = disable_ssl_cert_verification
|
||||
|
||||
Settings.disable_at_in_nickname = disable_at_in_nickname
|
||||
|
||||
import socket
|
||||
|
||||
def is_connected():
|
||||
try:
|
||||
# resolve the IP address of the Twitch.tv domain name
|
||||
socket.gethostbyname("twitch.tv")
|
||||
return True
|
||||
except OSError:
|
||||
pass
|
||||
return False
|
||||
|
||||
# check for Twitch.tv connectivity every 5 seconds
|
||||
error_printed = False
|
||||
while not is_connected():
|
||||
if not error_printed:
|
||||
logger.error("Waiting for Twitch.tv connectivity...")
|
||||
error_printed = True
|
||||
time.sleep(5)
|
||||
|
||||
# Analytics switch
|
||||
Settings.enable_analytics = enable_analytics
|
||||
|
||||
if enable_analytics is True:
|
||||
Settings.analytics_path = os.path.join(
|
||||
Path().absolute(), "analytics", username
|
||||
)
|
||||
Path(Settings.analytics_path).mkdir(parents=True, exist_ok=True)
|
||||
|
||||
self.username = username
|
||||
|
||||
# Set as global config
|
||||
Settings.logger = logger_settings
|
||||
|
||||
# Init as default all the missing values
|
||||
streamer_settings.default()
|
||||
streamer_settings.bet.default()
|
||||
Settings.streamer_settings = streamer_settings
|
||||
|
||||
# user_agent = get_user_agent("FIREFOX")
|
||||
user_agent = get_user_agent("CHROME")
|
||||
self.twitch = Twitch(self.username, user_agent, password)
|
||||
|
||||
self.claim_drops_startup = claim_drops_startup
|
||||
self.priority = priority if isinstance(priority, list) else [priority]
|
||||
|
||||
self.streamers = []
|
||||
self.events_predictions = {}
|
||||
self.minute_watcher_thread = None
|
||||
self.sync_campaigns_thread = None
|
||||
self.ws_pool = None
|
||||
|
||||
self.session_id = str(uuid.uuid4())
|
||||
self.running = False
|
||||
self.start_datetime = None
|
||||
self.original_streamers = []
|
||||
|
||||
self.logs_file, self.queue_listener = configure_loggers(
|
||||
self.username, logger_settings
|
||||
)
|
||||
|
||||
# Check for the latest version of the script
|
||||
current_version, github_version = check_versions()
|
||||
|
||||
logger.info(
|
||||
f"Twitch Channel Points Miner v2-{current_version} (fork by rdavydov)"
|
||||
)
|
||||
logger.info(
|
||||
"https://github.com/rdavydov/Twitch-Channel-Points-Miner-v2")
|
||||
|
||||
if github_version == "0.0.0":
|
||||
logger.error(
|
||||
"Unable to detect if you have the latest version of this script"
|
||||
)
|
||||
elif current_version != github_version:
|
||||
logger.info(
|
||||
f"You are running version {current_version} of this script")
|
||||
logger.info(f"The latest version on GitHub is {github_version}")
|
||||
|
||||
for sign in [signal.SIGINT, signal.SIGSEGV, signal.SIGTERM]:
|
||||
signal.signal(sign, self.end)
|
||||
|
||||
def analytics(
|
||||
self,
|
||||
host: str = "127.0.0.1",
|
||||
port: int = 5000,
|
||||
refresh: int = 5,
|
||||
days_ago: int = 7,
|
||||
):
|
||||
# Analytics switch
|
||||
if Settings.enable_analytics is True:
|
||||
from TwitchChannelPointsMiner.classes.AnalyticsServer import AnalyticsServer
|
||||
|
||||
http_server = AnalyticsServer(
|
||||
host=host, port=port, refresh=refresh, days_ago=days_ago, username=self.username
|
||||
)
|
||||
http_server.daemon = True
|
||||
http_server.name = "Analytics Thread"
|
||||
http_server.start()
|
||||
else:
|
||||
logger.error(
|
||||
"Can't start analytics(), please set enable_analytics=True")
|
||||
|
||||
def mine(
|
||||
self,
|
||||
streamers: list = [],
|
||||
blacklist: list = [],
|
||||
followers: bool = False,
|
||||
followers_order: FollowersOrder = FollowersOrder.ASC,
|
||||
):
|
||||
self.run(streamers=streamers, blacklist=blacklist, followers=followers)
|
||||
|
||||
def run(
|
||||
self,
|
||||
streamers: list = [],
|
||||
blacklist: list = [],
|
||||
followers: bool = False,
|
||||
followers_order: FollowersOrder = FollowersOrder.ASC,
|
||||
):
|
||||
if self.running:
|
||||
logger.error("You can't start multiple sessions of this instance!")
|
||||
else:
|
||||
logger.info(
|
||||
f"Start session: '{self.session_id}'", extra={"emoji": ":bomb:"}
|
||||
)
|
||||
self.running = True
|
||||
self.start_datetime = datetime.now()
|
||||
|
||||
self.twitch.login()
|
||||
|
||||
if self.claim_drops_startup is True:
|
||||
self.twitch.claim_all_drops_from_inventory()
|
||||
|
||||
streamers_name: list = []
|
||||
streamers_dict: dict = {}
|
||||
|
||||
for streamer in streamers:
|
||||
username = (
|
||||
streamer.username
|
||||
if isinstance(streamer, Streamer)
|
||||
else streamer.lower().strip()
|
||||
)
|
||||
if username not in blacklist:
|
||||
streamers_name.append(username)
|
||||
streamers_dict[username] = streamer
|
||||
|
||||
if followers is True:
|
||||
followers_array = self.twitch.get_followers(
|
||||
order=followers_order)
|
||||
logger.info(
|
||||
f"Load {len(followers_array)} followers from your profile!",
|
||||
extra={"emoji": ":clipboard:"},
|
||||
)
|
||||
for username in followers_array:
|
||||
if username not in streamers_dict and username not in blacklist:
|
||||
streamers_name.append(username)
|
||||
streamers_dict[username] = username.lower().strip()
|
||||
|
||||
logger.info(
|
||||
f"Loading data for {len(streamers_name)} streamers. Please wait...",
|
||||
extra={"emoji": ":nerd_face:"},
|
||||
)
|
||||
for username in streamers_name:
|
||||
if username in streamers_name:
|
||||
time.sleep(random.uniform(0.3, 0.7))
|
||||
try:
|
||||
streamer = (
|
||||
streamers_dict[username]
|
||||
if isinstance(streamers_dict[username], Streamer) is True
|
||||
else Streamer(username)
|
||||
)
|
||||
streamer.channel_id = self.twitch.get_channel_id(
|
||||
username)
|
||||
streamer.settings = set_default_settings(
|
||||
streamer.settings, Settings.streamer_settings
|
||||
)
|
||||
streamer.settings.bet = set_default_settings(
|
||||
streamer.settings.bet, Settings.streamer_settings.bet
|
||||
)
|
||||
if streamer.settings.chat != ChatPresence.NEVER:
|
||||
streamer.irc_chat = ThreadChat(
|
||||
self.username,
|
||||
self.twitch.twitch_login.get_auth_token(),
|
||||
streamer.username,
|
||||
)
|
||||
self.streamers.append(streamer)
|
||||
except StreamerDoesNotExistException:
|
||||
logger.info(
|
||||
f"Streamer {username} does not exist",
|
||||
extra={"emoji": ":cry:"},
|
||||
)
|
||||
|
||||
# Populate the streamers with default values.
|
||||
# 1. Load channel points and auto-claim bonus
|
||||
# 2. Check if streamers are online
|
||||
# 3. DEACTIVATED: Check if the user is a moderator. (was used before the 5th of April 2021 to deactivate predictions)
|
||||
for streamer in self.streamers:
|
||||
time.sleep(random.uniform(0.3, 0.7))
|
||||
self.twitch.load_channel_points_context(streamer)
|
||||
self.twitch.check_streamer_online(streamer)
|
||||
# self.twitch.viewer_is_mod(streamer)
|
||||
|
||||
self.original_streamers = [
|
||||
streamer.channel_points for streamer in self.streamers
|
||||
]
|
||||
|
||||
# If we have at least one streamer with settings = make_predictions True
|
||||
make_predictions = at_least_one_value_in_settings_is(
|
||||
self.streamers, "make_predictions", True
|
||||
)
|
||||
|
||||
# If we have at least one streamer with settings = claim_drops True
|
||||
# Spawn a thread for sync inventory and dashboard
|
||||
if (
|
||||
at_least_one_value_in_settings_is(
|
||||
self.streamers, "claim_drops", True)
|
||||
is True
|
||||
):
|
||||
self.sync_campaigns_thread = threading.Thread(
|
||||
target=self.twitch.sync_campaigns,
|
||||
args=(self.streamers,),
|
||||
)
|
||||
self.sync_campaigns_thread.name = "Sync campaigns/inventory"
|
||||
self.sync_campaigns_thread.start()
|
||||
time.sleep(30)
|
||||
|
||||
self.minute_watcher_thread = threading.Thread(
|
||||
target=self.twitch.send_minute_watched_events,
|
||||
args=(self.streamers, self.priority),
|
||||
)
|
||||
self.minute_watcher_thread.name = "Minute watcher"
|
||||
self.minute_watcher_thread.start()
|
||||
|
||||
self.ws_pool = WebSocketsPool(
|
||||
twitch=self.twitch,
|
||||
streamers=self.streamers,
|
||||
events_predictions=self.events_predictions,
|
||||
)
|
||||
|
||||
# Subscribe to community-points-user. Get update for points spent or gains
|
||||
user_id = self.twitch.twitch_login.get_user_id()
|
||||
# print(f"!!!!!!!!!!!!!! USER_ID: {user_id}")
|
||||
|
||||
# Fixes 'ERR_BADAUTH'
|
||||
if not user_id:
|
||||
logger.error("No user_id, exiting...")
|
||||
self.end(0, 0)
|
||||
|
||||
self.ws_pool.submit(
|
||||
PubsubTopic(
|
||||
"community-points-user-v1",
|
||||
user_id=user_id,
|
||||
)
|
||||
)
|
||||
|
||||
# Going to subscribe to predictions-user-v1. Get update when we place a new prediction (confirm)
|
||||
if make_predictions is True:
|
||||
self.ws_pool.submit(
|
||||
PubsubTopic(
|
||||
"predictions-user-v1",
|
||||
user_id=user_id,
|
||||
)
|
||||
)
|
||||
|
||||
for streamer in self.streamers:
|
||||
self.ws_pool.submit(
|
||||
PubsubTopic("video-playback-by-id", streamer=streamer)
|
||||
)
|
||||
|
||||
if streamer.settings.follow_raid is True:
|
||||
self.ws_pool.submit(PubsubTopic("raid", streamer=streamer))
|
||||
|
||||
if streamer.settings.make_predictions is True:
|
||||
self.ws_pool.submit(
|
||||
PubsubTopic("predictions-channel-v1",
|
||||
streamer=streamer)
|
||||
)
|
||||
|
||||
if streamer.settings.claim_moments is True:
|
||||
self.ws_pool.submit(
|
||||
PubsubTopic("community-moments-channel-v1",
|
||||
streamer=streamer)
|
||||
)
|
||||
|
||||
refresh_context = time.time()
|
||||
while self.running:
|
||||
time.sleep(random.uniform(20, 60))
|
||||
# Do an external control for WebSocket. Check if the thread is running
|
||||
# Check if is not None because maybe we have already created a new connection on array+1 and now index is None
|
||||
for index in range(0, len(self.ws_pool.ws)):
|
||||
if (
|
||||
self.ws_pool.ws[index].is_reconnecting is False
|
||||
and self.ws_pool.ws[index].elapsed_last_ping() > 10
|
||||
and internet_connection_available() is True
|
||||
):
|
||||
logger.info(
|
||||
f"#{index} - The last PING was sent more than 10 minutes ago. Reconnecting to the WebSocket..."
|
||||
)
|
||||
WebSocketsPool.handle_reconnection(
|
||||
self.ws_pool.ws[index])
|
||||
|
||||
if ((time.time() - refresh_context) // 60) >= 30:
|
||||
refresh_context = time.time()
|
||||
for index in range(0, len(self.streamers)):
|
||||
if self.streamers[index].is_online:
|
||||
self.twitch.load_channel_points_context(
|
||||
self.streamers[index]
|
||||
)
|
||||
|
||||
def end(self, signum, frame):
|
||||
logger.info("CTRL+C Detected! Please wait just a moment!")
|
||||
|
||||
for streamer in self.streamers:
|
||||
if (
|
||||
streamer.irc_chat is not None
|
||||
and streamer.settings.chat != ChatPresence.NEVER
|
||||
):
|
||||
streamer.leave_chat()
|
||||
if streamer.irc_chat.is_alive() is True:
|
||||
streamer.irc_chat.join()
|
||||
|
||||
self.running = self.twitch.running = False
|
||||
if self.ws_pool is not None:
|
||||
self.ws_pool.end()
|
||||
|
||||
if self.minute_watcher_thread is not None:
|
||||
self.minute_watcher_thread.join()
|
||||
|
||||
if self.sync_campaigns_thread is not None:
|
||||
self.sync_campaigns_thread.join()
|
||||
|
||||
# Check if all the mutex are unlocked.
|
||||
# Prevent breaks of .json file
|
||||
for streamer in self.streamers:
|
||||
if streamer.mutex.locked():
|
||||
streamer.mutex.acquire()
|
||||
streamer.mutex.release()
|
||||
|
||||
self.__print_report()
|
||||
|
||||
# Stop the queue listener to make sure all messages have been logged
|
||||
self.queue_listener.stop()
|
||||
|
||||
sys.exit(0)
|
||||
|
||||
def __print_report(self):
|
||||
print("\n")
|
||||
logger.info(
|
||||
f"Ending session: '{self.session_id}'", extra={"emoji": ":stop_sign:"}
|
||||
)
|
||||
if self.logs_file is not None:
|
||||
logger.info(
|
||||
f"Logs file: {self.logs_file}", extra={"emoji": ":page_facing_up:"}
|
||||
)
|
||||
logger.info(
|
||||
f"Duration {datetime.now() - self.start_datetime}",
|
||||
extra={"emoji": ":hourglass:"},
|
||||
)
|
||||
|
||||
if self.events_predictions != {}:
|
||||
print("")
|
||||
for event_id in self.events_predictions:
|
||||
event = self.events_predictions[event_id]
|
||||
if (
|
||||
event.bet_confirmed is True
|
||||
and event.streamer.settings.make_predictions is True
|
||||
):
|
||||
logger.info(
|
||||
f"{event.streamer.settings.bet}",
|
||||
extra={"emoji": ":wrench:"},
|
||||
)
|
||||
if event.streamer.settings.bet.filter_condition is not None:
|
||||
logger.info(
|
||||
f"{event.streamer.settings.bet.filter_condition}",
|
||||
extra={"emoji": ":pushpin:"},
|
||||
)
|
||||
logger.info(
|
||||
f"{event.print_recap()}",
|
||||
extra={"emoji": ":bar_chart:"},
|
||||
)
|
||||
|
||||
print("")
|
||||
for streamer_index in range(0, len(self.streamers)):
|
||||
if self.streamers[streamer_index].history != {}:
|
||||
gained = (
|
||||
self.streamers[streamer_index].channel_points
|
||||
- self.original_streamers[streamer_index]
|
||||
)
|
||||
logger.info(
|
||||
f"{repr(self.streamers[streamer_index])}, Total Points Gained (after farming - before farming): {_millify(gained)}",
|
||||
extra={"emoji": ":robot:"},
|
||||
)
|
||||
if self.streamers[streamer_index].history != {}:
|
||||
logger.info(
|
||||
f"{self.streamers[streamer_index].print_history()}",
|
||||
extra={"emoji": ":moneybag:"},
|
||||
)
|
||||
7
TwitchChannelPointsMiner/__init__.py
Normal file
7
TwitchChannelPointsMiner/__init__.py
Normal file
@ -0,0 +1,7 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
__version__ = "1.9.3"
|
||||
from .TwitchChannelPointsMiner import TwitchChannelPointsMiner
|
||||
|
||||
__all__ = [
|
||||
"TwitchChannelPointsMiner",
|
||||
]
|
||||
295
TwitchChannelPointsMiner/classes/AnalyticsServer.py
Normal file
295
TwitchChannelPointsMiner/classes/AnalyticsServer.py
Normal file
@ -0,0 +1,295 @@
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
from datetime import datetime
|
||||
from pathlib import Path
|
||||
from threading import Thread
|
||||
|
||||
import pandas as pd
|
||||
from flask import Flask, Response, cli, render_template, request
|
||||
|
||||
from TwitchChannelPointsMiner.classes.Settings import Settings
|
||||
from TwitchChannelPointsMiner.utils import download_file
|
||||
|
||||
cli.show_server_banner = lambda *_: None
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def streamers_available():
|
||||
path = Settings.analytics_path
|
||||
return [
|
||||
f
|
||||
for f in os.listdir(path)
|
||||
if os.path.isfile(os.path.join(path, f)) and f.endswith(".json")
|
||||
]
|
||||
|
||||
|
||||
def aggregate(df, freq="30Min"):
|
||||
df_base_events = df[(df.z == "Watch") | (df.z == "Claim")]
|
||||
df_other_events = df[(df.z != "Watch") & (df.z != "Claim")]
|
||||
|
||||
be = df_base_events.groupby(
|
||||
[pd.Grouper(freq=freq, key="datetime"), "z"]).max()
|
||||
be = be.reset_index()
|
||||
|
||||
oe = df_other_events.groupby(
|
||||
[pd.Grouper(freq=freq, key="datetime"), "z"]).max()
|
||||
oe = oe.reset_index()
|
||||
|
||||
result = pd.concat([be, oe])
|
||||
return result
|
||||
|
||||
|
||||
def filter_datas(start_date, end_date, datas):
|
||||
# Note: https://stackoverflow.com/questions/4676195/why-do-i-need-to-multiply-unix-timestamps-by-1000-in-javascript
|
||||
start_date = (
|
||||
datetime.strptime(start_date, "%Y-%m-%d").timestamp() * 1000
|
||||
if start_date is not None
|
||||
else 0
|
||||
)
|
||||
end_date = (
|
||||
datetime.strptime(end_date, "%Y-%m-%d")
|
||||
if end_date is not None
|
||||
else datetime.now()
|
||||
).replace(hour=23, minute=59, second=59).timestamp() * 1000
|
||||
|
||||
original_series = datas["series"]
|
||||
|
||||
if "series" in datas:
|
||||
df = pd.DataFrame(datas["series"])
|
||||
df["datetime"] = pd.to_datetime(df.x // 1000, unit="s")
|
||||
|
||||
df = df[(df.x >= start_date) & (df.x <= end_date)]
|
||||
|
||||
datas["series"] = (
|
||||
df.drop(columns="datetime")
|
||||
.sort_values(by=["x", "y"], ascending=True)
|
||||
.to_dict("records")
|
||||
)
|
||||
else:
|
||||
datas["series"] = []
|
||||
|
||||
# If no data is found within the timeframe, that usually means the streamer hasn't streamed within that timeframe
|
||||
# We create a series that shows up as a straight line on the dashboard, with 'No Stream' as labels
|
||||
if len(datas["series"]) == 0:
|
||||
new_end_date = start_date
|
||||
new_start_date = 0
|
||||
df = pd.DataFrame(original_series)
|
||||
df["datetime"] = pd.to_datetime(df.x // 1000, unit="s")
|
||||
|
||||
# Attempt to get the last known balance from before the provided timeframe
|
||||
df = df[(df.x >= new_start_date) & (df.x <= new_end_date)]
|
||||
last_balance = df.drop(columns="datetime").sort_values(
|
||||
by=["x", "y"], ascending=True).to_dict("records")[-1]['y']
|
||||
|
||||
datas["series"] = [{'x': start_date, 'y': last_balance, 'z': 'No Stream'}, {
|
||||
'x': end_date, 'y': last_balance, 'z': 'No Stream'}]
|
||||
|
||||
if "annotations" in datas:
|
||||
df = pd.DataFrame(datas["annotations"])
|
||||
df["datetime"] = pd.to_datetime(df.x // 1000, unit="s")
|
||||
|
||||
df = df[(df.x >= start_date) & (df.x <= end_date)]
|
||||
|
||||
datas["annotations"] = (
|
||||
df.drop(columns="datetime")
|
||||
.sort_values(by="x", ascending=True)
|
||||
.to_dict("records")
|
||||
)
|
||||
else:
|
||||
datas["annotations"] = []
|
||||
|
||||
return datas
|
||||
|
||||
|
||||
def read_json(streamer, return_response=True):
|
||||
start_date = request.args.get("startDate", type=str)
|
||||
end_date = request.args.get("endDate", type=str)
|
||||
|
||||
path = Settings.analytics_path
|
||||
streamer = streamer if streamer.endswith(".json") else f"{streamer}.json"
|
||||
|
||||
# Check if the file exists before attempting to read it
|
||||
if not os.path.exists(os.path.join(path, streamer)):
|
||||
error_message = f"File '{streamer}' not found."
|
||||
logger.error(error_message)
|
||||
if return_response:
|
||||
return Response(json.dumps({"error": error_message}), status=404, mimetype="application/json")
|
||||
else:
|
||||
return {"error": error_message}
|
||||
|
||||
try:
|
||||
with open(os.path.join(path, streamer), 'r') as file:
|
||||
data = json.load(file)
|
||||
except json.JSONDecodeError as e:
|
||||
error_message = f"Error decoding JSON in file '{streamer}': {str(e)}"
|
||||
logger.error(error_message)
|
||||
if return_response:
|
||||
return Response(json.dumps({"error": error_message}), status=500, mimetype="application/json")
|
||||
else:
|
||||
return {"error": error_message}
|
||||
|
||||
# Handle filtering data, if applicable
|
||||
filtered_data = filter_datas(start_date, end_date, data)
|
||||
if return_response:
|
||||
return Response(json.dumps(filtered_data), status=200, mimetype="application/json")
|
||||
else:
|
||||
return filtered_data
|
||||
|
||||
|
||||
def get_challenge_points(streamer):
|
||||
datas = read_json(streamer, return_response=False)
|
||||
if "series" in datas and datas["series"]:
|
||||
return datas["series"][-1]["y"]
|
||||
return 0 # Default value when 'series' key is not found or empty
|
||||
|
||||
|
||||
def get_last_activity(streamer):
|
||||
datas = read_json(streamer, return_response=False)
|
||||
if "series" in datas and datas["series"]:
|
||||
return datas["series"][-1]["x"]
|
||||
return 0 # Default value when 'series' key is not found or empty
|
||||
|
||||
|
||||
def json_all():
|
||||
return Response(
|
||||
json.dumps(
|
||||
[
|
||||
{
|
||||
"name": streamer.strip(".json"),
|
||||
"data": read_json(streamer, return_response=False),
|
||||
}
|
||||
for streamer in streamers_available()
|
||||
]
|
||||
),
|
||||
status=200,
|
||||
mimetype="application/json",
|
||||
)
|
||||
|
||||
|
||||
def index(refresh=5, days_ago=7):
|
||||
return render_template(
|
||||
"charts.html",
|
||||
refresh=(refresh * 60 * 1000),
|
||||
daysAgo=days_ago,
|
||||
)
|
||||
|
||||
|
||||
def streamers():
|
||||
return Response(
|
||||
json.dumps(
|
||||
[
|
||||
{"name": s, "points": get_challenge_points(
|
||||
s), "last_activity": get_last_activity(s)}
|
||||
for s in sorted(streamers_available())
|
||||
]
|
||||
),
|
||||
status=200,
|
||||
mimetype="application/json",
|
||||
)
|
||||
|
||||
|
||||
def download_assets(assets_folder, required_files):
|
||||
Path(assets_folder).mkdir(parents=True, exist_ok=True)
|
||||
logger.info(f"Downloading assets to {assets_folder}")
|
||||
|
||||
for f in required_files:
|
||||
if os.path.isfile(os.path.join(assets_folder, f)) is False:
|
||||
if (
|
||||
download_file(os.path.join("assets", f),
|
||||
os.path.join(assets_folder, f))
|
||||
is True
|
||||
):
|
||||
logger.info(f"Downloaded {f}")
|
||||
|
||||
|
||||
def check_assets():
|
||||
required_files = [
|
||||
"banner.png",
|
||||
"charts.html",
|
||||
"script.js",
|
||||
"style.css",
|
||||
"dark-theme.css",
|
||||
]
|
||||
assets_folder = os.path.join(Path().absolute(), "assets")
|
||||
if os.path.isdir(assets_folder) is False:
|
||||
logger.info(f"Assets folder not found at {assets_folder}")
|
||||
download_assets(assets_folder, required_files)
|
||||
else:
|
||||
for f in required_files:
|
||||
if os.path.isfile(os.path.join(assets_folder, f)) is False:
|
||||
logger.info(f"Missing file {f} in {assets_folder}")
|
||||
download_assets(assets_folder, required_files)
|
||||
break
|
||||
|
||||
last_sent_log_index = 0
|
||||
|
||||
class AnalyticsServer(Thread):
|
||||
def __init__(
|
||||
self,
|
||||
host: str = "127.0.0.1",
|
||||
port: int = 5000,
|
||||
refresh: int = 5,
|
||||
days_ago: int = 7,
|
||||
username: str = None
|
||||
):
|
||||
super(AnalyticsServer, self).__init__()
|
||||
|
||||
check_assets()
|
||||
|
||||
self.host = host
|
||||
self.port = port
|
||||
self.refresh = refresh
|
||||
self.days_ago = days_ago
|
||||
self.username = username
|
||||
|
||||
def generate_log():
|
||||
global last_sent_log_index # Use the global variable
|
||||
|
||||
# Get the last received log index from the client request parameters
|
||||
last_received_index = int(request.args.get("lastIndex", last_sent_log_index))
|
||||
|
||||
logs_path = os.path.join(Path().absolute(), "logs")
|
||||
log_file_path = os.path.join(logs_path, f"{username}.log")
|
||||
try:
|
||||
with open(log_file_path, "r") as log_file:
|
||||
log_content = log_file.read()
|
||||
|
||||
# Extract new log entries since the last received index
|
||||
new_log_entries = log_content[last_received_index:]
|
||||
last_sent_log_index = len(log_content) # Update the last sent index
|
||||
|
||||
return Response(new_log_entries, status=200, mimetype="text/plain")
|
||||
|
||||
except FileNotFoundError:
|
||||
return Response("Log file not found.", status=404, mimetype="text/plain")
|
||||
|
||||
self.app = Flask(
|
||||
__name__,
|
||||
template_folder=os.path.join(Path().absolute(), "assets"),
|
||||
static_folder=os.path.join(Path().absolute(), "assets"),
|
||||
)
|
||||
self.app.add_url_rule(
|
||||
"/",
|
||||
"index",
|
||||
index,
|
||||
defaults={"refresh": refresh, "days_ago": days_ago},
|
||||
methods=["GET"],
|
||||
)
|
||||
self.app.add_url_rule("/streamers", "streamers",
|
||||
streamers, methods=["GET"])
|
||||
self.app.add_url_rule(
|
||||
"/json/<string:streamer>", "json", read_json, methods=["GET"]
|
||||
)
|
||||
self.app.add_url_rule("/json_all", "json_all",
|
||||
json_all, methods=["GET"])
|
||||
self.app.add_url_rule(
|
||||
"/log", "log", generate_log, methods=["GET"])
|
||||
|
||||
def run(self):
|
||||
logger.info(
|
||||
f"Analytics running on http://{self.host}:{self.port}/",
|
||||
extra={"emoji": ":globe_with_meridians:"},
|
||||
)
|
||||
self.app.run(host=self.host, port=self.port,
|
||||
threaded=True, debug=False)
|
||||
105
TwitchChannelPointsMiner/classes/Chat.py
Normal file
105
TwitchChannelPointsMiner/classes/Chat.py
Normal file
@ -0,0 +1,105 @@
|
||||
import logging
|
||||
import time
|
||||
from enum import Enum, auto
|
||||
from threading import Thread
|
||||
|
||||
from irc.bot import SingleServerIRCBot
|
||||
|
||||
from TwitchChannelPointsMiner.constants import IRC, IRC_PORT
|
||||
from TwitchChannelPointsMiner.classes.Settings import Events, Settings
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class ChatPresence(Enum):
|
||||
ALWAYS = auto()
|
||||
NEVER = auto()
|
||||
ONLINE = auto()
|
||||
OFFLINE = auto()
|
||||
|
||||
def __str__(self):
|
||||
return self.name
|
||||
|
||||
|
||||
class ClientIRC(SingleServerIRCBot):
|
||||
def __init__(self, username, token, channel):
|
||||
self.token = token
|
||||
self.channel = "#" + channel
|
||||
self.__active = False
|
||||
|
||||
super(ClientIRC, self).__init__(
|
||||
[(IRC, IRC_PORT, f"oauth:{token}")], username, username
|
||||
)
|
||||
|
||||
def on_welcome(self, client, event):
|
||||
client.join(self.channel)
|
||||
|
||||
def start(self):
|
||||
self.__active = True
|
||||
self._connect()
|
||||
while self.__active:
|
||||
try:
|
||||
self.reactor.process_once(timeout=0.2)
|
||||
time.sleep(0.01)
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
f"Exception raised: {e}. Thread is active: {self.__active}"
|
||||
)
|
||||
|
||||
def die(self, msg="Bye, cruel world!"):
|
||||
self.connection.disconnect(msg)
|
||||
self.__active = False
|
||||
|
||||
"""
|
||||
def on_join(self, connection, event):
|
||||
logger.info(f"Event: {event}", extra={"emoji": ":speech_balloon:"})
|
||||
"""
|
||||
|
||||
# """
|
||||
def on_pubmsg(self, connection, event):
|
||||
msg = event.arguments[0]
|
||||
mention = None
|
||||
|
||||
if Settings.disable_at_in_nickname is True:
|
||||
mention = f"{self._nickname.lower()}"
|
||||
else:
|
||||
mention = f"@{self._nickname.lower()}"
|
||||
|
||||
# also self._realname
|
||||
# if msg.startswith(f"@{self._nickname}"):
|
||||
if mention != None and mention in msg.lower():
|
||||
# nickname!username@nickname.tmi.twitch.tv
|
||||
nick = event.source.split("!", 1)[0]
|
||||
# chan = event.target
|
||||
|
||||
logger.info(f"{nick} at {self.channel} wrote: {msg}", extra={
|
||||
"emoji": ":speech_balloon:", "event": Events.CHAT_MENTION})
|
||||
# """
|
||||
|
||||
|
||||
class ThreadChat(Thread):
|
||||
def __deepcopy__(self, memo):
|
||||
return None
|
||||
|
||||
def __init__(self, username, token, channel):
|
||||
super(ThreadChat, self).__init__()
|
||||
|
||||
self.username = username
|
||||
self.token = token
|
||||
self.channel = channel
|
||||
|
||||
self.chat_irc = None
|
||||
|
||||
def run(self):
|
||||
self.chat_irc = ClientIRC(self.username, self.token, self.channel)
|
||||
logger.info(
|
||||
f"Join IRC Chat: {self.channel}", extra={"emoji": ":speech_balloon:"}
|
||||
)
|
||||
self.chat_irc.start()
|
||||
|
||||
def stop(self):
|
||||
if self.chat_irc is not None:
|
||||
logger.info(
|
||||
f"Leave IRC Chat: {self.channel}", extra={"emoji": ":speech_balloon:"}
|
||||
)
|
||||
self.chat_irc.die()
|
||||
24
TwitchChannelPointsMiner/classes/Discord.py
Normal file
24
TwitchChannelPointsMiner/classes/Discord.py
Normal file
@ -0,0 +1,24 @@
|
||||
from textwrap import dedent
|
||||
|
||||
import requests
|
||||
|
||||
from TwitchChannelPointsMiner.classes.Settings import Events
|
||||
|
||||
|
||||
class Discord(object):
|
||||
__slots__ = ["webhook_api", "events"]
|
||||
|
||||
def __init__(self, webhook_api: str, events: list):
|
||||
self.webhook_api = webhook_api
|
||||
self.events = [str(e) for e in events]
|
||||
|
||||
def send(self, message: str, event: Events) -> None:
|
||||
if str(event) in self.events:
|
||||
requests.post(
|
||||
url=self.webhook_api,
|
||||
data={
|
||||
"content": dedent(message),
|
||||
"username": "Twitch Channel Points Miner",
|
||||
"avatar_url": "https://i.imgur.com/X9fEkhT.png",
|
||||
},
|
||||
)
|
||||
14
TwitchChannelPointsMiner/classes/Exceptions.py
Normal file
14
TwitchChannelPointsMiner/classes/Exceptions.py
Normal file
@ -0,0 +1,14 @@
|
||||
class StreamerDoesNotExistException(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class StreamerIsOfflineException(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class WrongCookiesException(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class BadCredentialsException(Exception):
|
||||
pass
|
||||
40
TwitchChannelPointsMiner/classes/Matrix.py
Normal file
40
TwitchChannelPointsMiner/classes/Matrix.py
Normal file
@ -0,0 +1,40 @@
|
||||
from textwrap import dedent
|
||||
|
||||
import logging
|
||||
import requests
|
||||
from urllib.parse import quote
|
||||
|
||||
from TwitchChannelPointsMiner.classes.Settings import Events
|
||||
|
||||
|
||||
class Matrix(object):
|
||||
__slots__ = ["access_token", "homeserver", "room_id", "events"]
|
||||
|
||||
def __init__(self, username: str, password: str, homeserver: str, room_id: str, events: list):
|
||||
self.homeserver = homeserver
|
||||
self.room_id = quote(room_id)
|
||||
self.events = [str(e) for e in events]
|
||||
|
||||
body = requests.post(
|
||||
url=f"https://{self.homeserver}/_matrix/client/r0/login",
|
||||
json={
|
||||
"user": username,
|
||||
"password": password,
|
||||
"type": "m.login.password"
|
||||
}
|
||||
).json()
|
||||
|
||||
self.access_token = body.get("access_token")
|
||||
|
||||
if not self.access_token:
|
||||
logging.getLogger(__name__).info("Invalid Matrix password provided. Notifications will not be sent.")
|
||||
|
||||
def send(self, message: str, event: Events) -> None:
|
||||
if str(event) in self.events:
|
||||
requests.post(
|
||||
url=f"https://{self.homeserver}/_matrix/client/r0/rooms/{self.room_id}/send/m.room.message?access_token={self.access_token}",
|
||||
json={
|
||||
"body": dedent(message),
|
||||
"msgtype": "m.text"
|
||||
}
|
||||
)
|
||||
30
TwitchChannelPointsMiner/classes/Pushover.py
Normal file
30
TwitchChannelPointsMiner/classes/Pushover.py
Normal file
@ -0,0 +1,30 @@
|
||||
from textwrap import dedent
|
||||
|
||||
import requests
|
||||
|
||||
from TwitchChannelPointsMiner.classes.Settings import Events
|
||||
|
||||
|
||||
class Pushover(object):
|
||||
__slots__ = ["userkey", "token", "priority", "sound", "events"]
|
||||
|
||||
def __init__(self, userkey: str, token: str, priority, sound, events: list):
|
||||
self.userkey = userkey
|
||||
self.token = token
|
||||
self. priority = priority
|
||||
self.sound = sound
|
||||
self.events = [str(e) for e in events]
|
||||
|
||||
def send(self, message: str, event: Events) -> None:
|
||||
if str(event) in self.events:
|
||||
requests.post(
|
||||
url="https://api.pushover.net/1/messages.json",
|
||||
data={
|
||||
"user": self.userkey,
|
||||
"token": self.token,
|
||||
"message": dedent(message),
|
||||
"title": "Twitch Channel Points Miner",
|
||||
"priority": self.priority,
|
||||
"sound": self.sound,
|
||||
},
|
||||
)
|
||||
53
TwitchChannelPointsMiner/classes/Settings.py
Normal file
53
TwitchChannelPointsMiner/classes/Settings.py
Normal file
@ -0,0 +1,53 @@
|
||||
from enum import Enum, auto
|
||||
|
||||
|
||||
class Priority(Enum):
|
||||
ORDER = auto()
|
||||
STREAK = auto()
|
||||
DROPS = auto()
|
||||
SUBSCRIBED = auto()
|
||||
POINTS_ASCENDING = auto()
|
||||
POINTS_DESCEDING = auto()
|
||||
|
||||
|
||||
class FollowersOrder(Enum):
|
||||
ASC = auto()
|
||||
DESC = auto()
|
||||
|
||||
def __str__(self):
|
||||
return self.name
|
||||
|
||||
|
||||
# Empty object shared between class
|
||||
class Settings(object):
|
||||
__slots__ = ["logger", "streamer_settings",
|
||||
"enable_analytics", "disable_ssl_cert_verification", "disable_at_in_nickname"]
|
||||
|
||||
|
||||
class Events(Enum):
|
||||
STREAMER_ONLINE = auto()
|
||||
STREAMER_OFFLINE = auto()
|
||||
GAIN_FOR_RAID = auto()
|
||||
GAIN_FOR_CLAIM = auto()
|
||||
GAIN_FOR_WATCH = auto()
|
||||
GAIN_FOR_WATCH_STREAK = auto()
|
||||
BET_WIN = auto()
|
||||
BET_LOSE = auto()
|
||||
BET_REFUND = auto()
|
||||
BET_FILTERS = auto()
|
||||
BET_GENERAL = auto()
|
||||
BET_FAILED = auto()
|
||||
BET_START = auto()
|
||||
BONUS_CLAIM = auto()
|
||||
MOMENT_CLAIM = auto()
|
||||
JOIN_RAID = auto()
|
||||
DROP_CLAIM = auto()
|
||||
DROP_STATUS = auto()
|
||||
CHAT_MENTION = auto()
|
||||
|
||||
def __str__(self):
|
||||
return self.name
|
||||
|
||||
@classmethod
|
||||
def get(cls, key):
|
||||
return getattr(cls, str(key)) if str(key) in dir(cls) else None
|
||||
29
TwitchChannelPointsMiner/classes/Telegram.py
Normal file
29
TwitchChannelPointsMiner/classes/Telegram.py
Normal file
@ -0,0 +1,29 @@
|
||||
from textwrap import dedent
|
||||
|
||||
import requests
|
||||
|
||||
from TwitchChannelPointsMiner.classes.Settings import Events
|
||||
|
||||
|
||||
class Telegram(object):
|
||||
__slots__ = ["chat_id", "telegram_api", "events", "disable_notification"]
|
||||
|
||||
def __init__(
|
||||
self, chat_id: int, token: str, events: list, disable_notification: bool = False
|
||||
):
|
||||
self.chat_id = chat_id
|
||||
self.telegram_api = f"https://api.telegram.org/bot{token}/sendMessage"
|
||||
self.events = [str(e) for e in events]
|
||||
self.disable_notification = disable_notification
|
||||
|
||||
def send(self, message: str, event: Events) -> None:
|
||||
if str(event) in self.events:
|
||||
requests.post(
|
||||
url=self.telegram_api,
|
||||
data={
|
||||
"chat_id": self.chat_id,
|
||||
"text": dedent(message),
|
||||
"disable_web_page_preview": True, # include link to twitch streamer?
|
||||
"disable_notification": self.disable_notification, # no sound, notif just in tray
|
||||
},
|
||||
)
|
||||
859
TwitchChannelPointsMiner/classes/Twitch.py
Normal file
859
TwitchChannelPointsMiner/classes/Twitch.py
Normal file
@ -0,0 +1,859 @@
|
||||
# For documentation on Twitch GraphQL API see:
|
||||
# https://www.apollographql.com/docs/
|
||||
# https://github.com/mauricew/twitch-graphql-api
|
||||
# Full list of available methods: https://azr.ivr.fi/schema/query.doc.html (a bit outdated)
|
||||
|
||||
|
||||
import copy
|
||||
import logging
|
||||
import os
|
||||
import random
|
||||
import re
|
||||
import string
|
||||
import time
|
||||
# from datetime import datetime
|
||||
from pathlib import Path
|
||||
from secrets import choice, token_hex
|
||||
|
||||
# import json
|
||||
# from base64 import urlsafe_b64decode
|
||||
|
||||
import requests
|
||||
|
||||
from TwitchChannelPointsMiner.classes.entities.Campaign import Campaign
|
||||
from TwitchChannelPointsMiner.classes.entities.Drop import Drop
|
||||
from TwitchChannelPointsMiner.classes.Exceptions import (
|
||||
StreamerDoesNotExistException,
|
||||
StreamerIsOfflineException,
|
||||
)
|
||||
from TwitchChannelPointsMiner.classes.Settings import (
|
||||
Events,
|
||||
FollowersOrder,
|
||||
Priority,
|
||||
Settings,
|
||||
)
|
||||
from TwitchChannelPointsMiner.classes.TwitchLogin import TwitchLogin
|
||||
from TwitchChannelPointsMiner.constants import (
|
||||
CLIENT_ID,
|
||||
CLIENT_VERSION,
|
||||
URL,
|
||||
GQLOperations,
|
||||
)
|
||||
from TwitchChannelPointsMiner.utils import (
|
||||
_millify,
|
||||
create_chunks,
|
||||
internet_connection_available,
|
||||
)
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class Twitch(object):
|
||||
__slots__ = [
|
||||
"cookies_file",
|
||||
"user_agent",
|
||||
"twitch_login",
|
||||
"running",
|
||||
"device_id",
|
||||
# "integrity",
|
||||
# "integrity_expire",
|
||||
"client_session",
|
||||
"client_version",
|
||||
"twilight_build_id_pattern",
|
||||
]
|
||||
|
||||
def __init__(self, username, user_agent, password=None):
|
||||
cookies_path = os.path.join(Path().absolute(), "cookies")
|
||||
Path(cookies_path).mkdir(parents=True, exist_ok=True)
|
||||
self.cookies_file = os.path.join(cookies_path, f"{username}.pkl")
|
||||
self.user_agent = user_agent
|
||||
self.device_id = "".join(
|
||||
choice(string.ascii_letters + string.digits) for _ in range(32)
|
||||
)
|
||||
self.twitch_login = TwitchLogin(
|
||||
CLIENT_ID, self.device_id, username, self.user_agent, password=password
|
||||
)
|
||||
self.running = True
|
||||
# self.integrity = None
|
||||
# self.integrity_expire = 0
|
||||
self.client_session = token_hex(16)
|
||||
self.client_version = CLIENT_VERSION
|
||||
self.twilight_build_id_pattern = re.compile(
|
||||
r"window\.__twilightBuildID=\"([0-9A-Fa-f]{8}-[0-9A-Fa-f]{4}-4[0-9A-Fa-f]{3}-[89ABab][0-9A-Fa-f]{3}-[0-9A-Fa-f]{12})\";"
|
||||
)
|
||||
|
||||
def login(self):
|
||||
if not os.path.isfile(self.cookies_file):
|
||||
if self.twitch_login.login_flow():
|
||||
self.twitch_login.save_cookies(self.cookies_file)
|
||||
else:
|
||||
self.twitch_login.load_cookies(self.cookies_file)
|
||||
self.twitch_login.set_token(self.twitch_login.get_auth_token())
|
||||
|
||||
# === STREAMER / STREAM / INFO === #
|
||||
def update_stream(self, streamer):
|
||||
if streamer.stream.update_required() is True:
|
||||
stream_info = self.get_stream_info(streamer)
|
||||
if stream_info is not None:
|
||||
streamer.stream.update(
|
||||
broadcast_id=stream_info["stream"]["id"],
|
||||
title=stream_info["broadcastSettings"]["title"],
|
||||
game=stream_info["broadcastSettings"]["game"],
|
||||
tags=stream_info["stream"]["tags"],
|
||||
viewers_count=stream_info["stream"]["viewersCount"],
|
||||
)
|
||||
|
||||
event_properties = {
|
||||
"channel_id": streamer.channel_id,
|
||||
"broadcast_id": streamer.stream.broadcast_id,
|
||||
"player": "site",
|
||||
"user_id": self.twitch_login.get_user_id(),
|
||||
"live": True,
|
||||
"channel": streamer.username
|
||||
}
|
||||
|
||||
if (
|
||||
streamer.stream.game_name() is not None
|
||||
and streamer.stream.game_id() is not None
|
||||
and streamer.settings.claim_drops is True
|
||||
):
|
||||
event_properties["game"] = streamer.stream.game_name()
|
||||
event_properties["game_id"] = streamer.stream.game_id()
|
||||
# Update also the campaigns_ids so we are sure to tracking the correct campaign
|
||||
streamer.stream.campaigns_ids = (
|
||||
self.__get_campaign_ids_from_streamer(streamer)
|
||||
)
|
||||
|
||||
streamer.stream.payload = [
|
||||
{"event": "minute-watched", "properties": event_properties}
|
||||
]
|
||||
|
||||
def get_spade_url(self, streamer):
|
||||
try:
|
||||
# fixes AttributeError: 'NoneType' object has no attribute 'group'
|
||||
# headers = {"User-Agent": self.user_agent}
|
||||
from TwitchChannelPointsMiner.constants import USER_AGENTS
|
||||
headers = {"User-Agent": USER_AGENTS["Linux"]["FIREFOX"]}
|
||||
|
||||
main_page_request = requests.get(
|
||||
streamer.streamer_url, headers=headers)
|
||||
response = main_page_request.text
|
||||
# logger.info(response)
|
||||
regex_settings = "(https://static.twitchcdn.net/config/settings.*?js)"
|
||||
settings_url = re.search(regex_settings, response).group(1)
|
||||
|
||||
settings_request = requests.get(settings_url, headers=headers)
|
||||
response = settings_request.text
|
||||
regex_spade = '"spade_url":"(.*?)"'
|
||||
streamer.stream.spade_url = re.search(
|
||||
regex_spade, response).group(1)
|
||||
except requests.exceptions.RequestException as e:
|
||||
logger.error(
|
||||
f"Something went wrong during extraction of 'spade_url': {e}")
|
||||
|
||||
def get_broadcast_id(self, streamer):
|
||||
json_data = copy.deepcopy(GQLOperations.WithIsStreamLiveQuery)
|
||||
json_data["variables"] = {"id": streamer.channel_id}
|
||||
response = self.post_gql_request(json_data)
|
||||
if response != {}:
|
||||
stream = response["data"]["user"]["stream"]
|
||||
if stream is not None:
|
||||
return stream["id"]
|
||||
else:
|
||||
raise StreamerIsOfflineException
|
||||
|
||||
def get_stream_info(self, streamer):
|
||||
json_data = copy.deepcopy(
|
||||
GQLOperations.VideoPlayerStreamInfoOverlayChannel)
|
||||
json_data["variables"] = {"channel": streamer.username}
|
||||
response = self.post_gql_request(json_data)
|
||||
if response != {}:
|
||||
if response["data"]["user"]["stream"] is None:
|
||||
raise StreamerIsOfflineException
|
||||
else:
|
||||
return response["data"]["user"]
|
||||
|
||||
def check_streamer_online(self, streamer):
|
||||
if time.time() < streamer.offline_at + 60:
|
||||
return
|
||||
|
||||
if streamer.is_online is False:
|
||||
try:
|
||||
self.get_spade_url(streamer)
|
||||
self.update_stream(streamer)
|
||||
except StreamerIsOfflineException:
|
||||
streamer.set_offline()
|
||||
else:
|
||||
streamer.set_online()
|
||||
else:
|
||||
try:
|
||||
self.update_stream(streamer)
|
||||
except StreamerIsOfflineException:
|
||||
streamer.set_offline()
|
||||
|
||||
def get_channel_id(self, streamer_username):
|
||||
json_data = copy.deepcopy(GQLOperations.ReportMenuItem)
|
||||
json_data["variables"] = {"channelLogin": streamer_username}
|
||||
json_response = self.post_gql_request(json_data)
|
||||
if (
|
||||
"data" not in json_response
|
||||
or "user" not in json_response["data"]
|
||||
or json_response["data"]["user"] is None
|
||||
):
|
||||
raise StreamerDoesNotExistException
|
||||
else:
|
||||
return json_response["data"]["user"]["id"]
|
||||
|
||||
def get_followers(
|
||||
self, limit: int = 100, order: FollowersOrder = FollowersOrder.ASC
|
||||
):
|
||||
json_data = copy.deepcopy(GQLOperations.ChannelFollows)
|
||||
json_data["variables"] = {"limit": limit, "order": str(order)}
|
||||
has_next = True
|
||||
last_cursor = ""
|
||||
follows = []
|
||||
while has_next is True:
|
||||
json_data["variables"]["cursor"] = last_cursor
|
||||
json_response = self.post_gql_request(json_data)
|
||||
try:
|
||||
follows_response = json_response["data"]["user"]["follows"]
|
||||
last_cursor = None
|
||||
for f in follows_response["edges"]:
|
||||
follows.append(f["node"]["login"].lower())
|
||||
last_cursor = f["cursor"]
|
||||
|
||||
has_next = follows_response["pageInfo"]["hasNextPage"]
|
||||
except KeyError:
|
||||
return []
|
||||
return follows
|
||||
|
||||
def update_raid(self, streamer, raid):
|
||||
if streamer.raid != raid:
|
||||
streamer.raid = raid
|
||||
json_data = copy.deepcopy(GQLOperations.JoinRaid)
|
||||
json_data["variables"] = {"input": {"raidID": raid.raid_id}}
|
||||
self.post_gql_request(json_data)
|
||||
|
||||
logger.info(
|
||||
f"Joining raid from {streamer} to {raid.target_login}!",
|
||||
extra={"emoji": ":performing_arts:",
|
||||
"event": Events.JOIN_RAID},
|
||||
)
|
||||
|
||||
def viewer_is_mod(self, streamer):
|
||||
json_data = copy.deepcopy(GQLOperations.ModViewChannelQuery)
|
||||
json_data["variables"] = {"channelLogin": streamer.username}
|
||||
response = self.post_gql_request(json_data)
|
||||
try:
|
||||
streamer.viewer_is_mod = response["data"]["user"]["self"]["isModerator"]
|
||||
except (ValueError, KeyError):
|
||||
streamer.viewer_is_mod = False
|
||||
|
||||
# === 'GLOBALS' METHODS === #
|
||||
# Create chunk of sleep of speed-up the break loop after CTRL+C
|
||||
def __chuncked_sleep(self, seconds, chunk_size=3):
|
||||
sleep_time = max(seconds, 0) / chunk_size
|
||||
for i in range(0, chunk_size):
|
||||
time.sleep(sleep_time)
|
||||
if self.running is False:
|
||||
break
|
||||
|
||||
def __check_connection_handler(self, chunk_size):
|
||||
# The success rate It's very hight usually. Why we have failed?
|
||||
# Check internet connection ...
|
||||
while internet_connection_available() is False:
|
||||
random_sleep = random.randint(1, 3)
|
||||
logger.warning(
|
||||
f"No internet connection available! Retry after {random_sleep}m"
|
||||
)
|
||||
self.__chuncked_sleep(random_sleep * 60, chunk_size=chunk_size)
|
||||
|
||||
def post_gql_request(self, json_data):
|
||||
try:
|
||||
response = requests.post(
|
||||
GQLOperations.url,
|
||||
json=json_data,
|
||||
headers={
|
||||
"Authorization": f"OAuth {self.twitch_login.get_auth_token()}",
|
||||
"Client-Id": CLIENT_ID,
|
||||
# "Client-Integrity": self.post_integrity(),
|
||||
"Client-Session-Id": self.client_session,
|
||||
"Client-Version": self.update_client_version(),
|
||||
"User-Agent": self.user_agent,
|
||||
"X-Device-Id": self.device_id,
|
||||
},
|
||||
)
|
||||
logger.debug(
|
||||
f"Data: {json_data}, Status code: {response.status_code}, Content: {response.text}"
|
||||
)
|
||||
return response.json()
|
||||
except requests.exceptions.RequestException as e:
|
||||
logger.error(
|
||||
f"Error with GQLOperations ({json_data['operationName']}): {e}"
|
||||
)
|
||||
return {}
|
||||
|
||||
# Request for Integrity Token
|
||||
# Twitch needs Authorization, Client-Id, X-Device-Id to generate JWT which is used for authorize gql requests
|
||||
# Regenerate Integrity Token 5 minutes before expire
|
||||
"""def post_integrity(self):
|
||||
if (
|
||||
self.integrity_expire - datetime.now().timestamp() * 1000 > 5 * 60 * 1000
|
||||
and self.integrity is not None
|
||||
):
|
||||
return self.integrity
|
||||
try:
|
||||
response = requests.post(
|
||||
GQLOperations.integrity_url,
|
||||
json={},
|
||||
headers={
|
||||
"Authorization": f"OAuth {self.twitch_login.get_auth_token()}",
|
||||
"Client-Id": CLIENT_ID,
|
||||
"Client-Session-Id": self.client_session,
|
||||
"Client-Version": self.update_client_version(),
|
||||
"User-Agent": self.user_agent,
|
||||
"X-Device-Id": self.device_id,
|
||||
},
|
||||
)
|
||||
logger.debug(
|
||||
f"Data: [], Status code: {response.status_code}, Content: {response.text}"
|
||||
)
|
||||
self.integrity = response.json().get("token", None)
|
||||
# logger.info(f"integrity: {self.integrity}")
|
||||
|
||||
if self.isBadBot(self.integrity) is True:
|
||||
logger.info(
|
||||
"Uh-oh, Twitch has detected this miner as a \"Bad Bot\". Don't worry.")
|
||||
|
||||
self.integrity_expire = response.json().get("expiration", 0)
|
||||
# logger.info(f"integrity_expire: {self.integrity_expire}")
|
||||
return self.integrity
|
||||
except requests.exceptions.RequestException as e:
|
||||
logger.error(f"Error with post_integrity: {e}")
|
||||
return self.integrity
|
||||
|
||||
# verify the integrity token's contents for the "is_bad_bot" flag
|
||||
def isBadBot(self, integrity):
|
||||
stripped_token: str = self.integrity.split('.')[2] + "=="
|
||||
messy_json: str = urlsafe_b64decode(
|
||||
stripped_token.encode()).decode(errors="ignore")
|
||||
match = re.search(r'(.+)(?<="}).+$', messy_json)
|
||||
if match is None:
|
||||
# raise MinerException("Unable to parse the integrity token")
|
||||
logger.info("Unable to parse the integrity token. Don't worry.")
|
||||
return
|
||||
decoded_header = json.loads(match.group(1))
|
||||
# logger.info(f"decoded_header: {decoded_header}")
|
||||
if decoded_header.get("is_bad_bot", "false") != "false":
|
||||
return True
|
||||
else:
|
||||
return False"""
|
||||
|
||||
def update_client_version(self):
|
||||
try:
|
||||
response = requests.get(URL)
|
||||
if response.status_code != 200:
|
||||
logger.debug(
|
||||
f"Error with update_client_version: {response.status_code}"
|
||||
)
|
||||
return self.client_version
|
||||
matcher = re.search(self.twilight_build_id_pattern, response.text)
|
||||
if not matcher:
|
||||
logger.debug("Error with update_client_version: no match")
|
||||
return self.client_version
|
||||
self.client_version = matcher.group(1)
|
||||
logger.debug(f"Client version: {self.client_version}")
|
||||
return self.client_version
|
||||
except requests.exceptions.RequestException as e:
|
||||
logger.error(f"Error with update_client_version: {e}")
|
||||
return self.client_version
|
||||
|
||||
def send_minute_watched_events(self, streamers, priority, chunk_size=3):
|
||||
while self.running:
|
||||
try:
|
||||
streamers_index = [
|
||||
i
|
||||
for i in range(0, len(streamers))
|
||||
if streamers[i].is_online is True
|
||||
and (
|
||||
streamers[i].online_at == 0
|
||||
or (time.time() - streamers[i].online_at) > 30
|
||||
)
|
||||
]
|
||||
|
||||
for index in streamers_index:
|
||||
if (streamers[index].stream.update_elapsed() / 60) > 10:
|
||||
# Why this user It's currently online but the last updated was more than 10minutes ago?
|
||||
# Please perform a manually update and check if the user it's online
|
||||
self.check_streamer_online(streamers[index])
|
||||
|
||||
streamers_watching = []
|
||||
for prior in priority:
|
||||
if prior == Priority.ORDER and len(streamers_watching) < 2:
|
||||
# Get the first 2 items, they are already in order
|
||||
streamers_watching += streamers_index[:2]
|
||||
|
||||
elif (
|
||||
prior in [Priority.POINTS_ASCENDING,
|
||||
Priority.POINTS_DESCEDING]
|
||||
and len(streamers_watching) < 2
|
||||
):
|
||||
items = [
|
||||
{"points": streamers[index].channel_points,
|
||||
"index": index}
|
||||
for index in streamers_index
|
||||
]
|
||||
items = sorted(
|
||||
items,
|
||||
key=lambda x: x["points"],
|
||||
reverse=(
|
||||
True if prior == Priority.POINTS_DESCEDING else False
|
||||
),
|
||||
)
|
||||
streamers_watching += [item["index"]
|
||||
for item in items][:2]
|
||||
|
||||
elif prior == Priority.STREAK and len(streamers_watching) < 2:
|
||||
"""
|
||||
Check if we need need to change priority based on watch streak
|
||||
Viewers receive points for returning for x consecutive streams.
|
||||
Each stream must be at least 10 minutes long and it must have been at least 30 minutes since the last stream ended.
|
||||
Watch at least 6m for get the +10
|
||||
"""
|
||||
for index in streamers_index:
|
||||
if (
|
||||
streamers[index].settings.watch_streak is True
|
||||
and streamers[index].stream.watch_streak_missing is True
|
||||
and (
|
||||
streamers[index].offline_at == 0
|
||||
or (
|
||||
(time.time() -
|
||||
streamers[index].offline_at)
|
||||
// 60
|
||||
)
|
||||
> 30
|
||||
)
|
||||
and streamers[index].stream.minute_watched < 7 # fix #425
|
||||
):
|
||||
streamers_watching.append(index)
|
||||
if len(streamers_watching) == 2:
|
||||
break
|
||||
|
||||
elif prior == Priority.DROPS and len(streamers_watching) < 2:
|
||||
for index in streamers_index:
|
||||
if streamers[index].drops_condition() is True:
|
||||
streamers_watching.append(index)
|
||||
if len(streamers_watching) == 2:
|
||||
break
|
||||
|
||||
elif prior == Priority.SUBSCRIBED and len(streamers_watching) < 2:
|
||||
streamers_with_multiplier = [
|
||||
index
|
||||
for index in streamers_index
|
||||
if streamers[index].viewer_has_points_multiplier()
|
||||
]
|
||||
streamers_with_multiplier = sorted(
|
||||
streamers_with_multiplier,
|
||||
key=lambda x: streamers[x].total_points_multiplier(
|
||||
),
|
||||
reverse=True,
|
||||
)
|
||||
streamers_watching += streamers_with_multiplier[:2]
|
||||
|
||||
"""
|
||||
Twitch has a limit - you can't watch more than 2 channels at one time.
|
||||
We take the first two streamers from the list as they have the highest priority (based on order or WatchStreak).
|
||||
"""
|
||||
streamers_watching = streamers_watching[:2]
|
||||
|
||||
for index in streamers_watching:
|
||||
next_iteration = time.time() + 60 / len(streamers_watching)
|
||||
|
||||
try:
|
||||
response = requests.post(
|
||||
streamers[index].stream.spade_url,
|
||||
data=streamers[index].stream.encode_payload(),
|
||||
headers={"User-Agent": self.user_agent},
|
||||
timeout=60,
|
||||
)
|
||||
logger.debug(
|
||||
f"Send minute watched request for {streamers[index]} - Status code: {response.status_code}"
|
||||
)
|
||||
if response.status_code == 204:
|
||||
streamers[index].stream.update_minute_watched()
|
||||
|
||||
"""
|
||||
Remember, you can only earn progress towards a time-based Drop on one participating channel at a time. [ ! ! ! ]
|
||||
You can also check your progress towards Drops within a campaign anytime by viewing the Drops Inventory.
|
||||
For time-based Drops, if you are unable to claim the Drop in time, you will be able to claim it from the inventory page until the Drops campaign ends.
|
||||
"""
|
||||
|
||||
for campaign in streamers[index].stream.campaigns:
|
||||
for drop in campaign.drops:
|
||||
# We could add .has_preconditions_met condition inside is_printable
|
||||
if (
|
||||
drop.has_preconditions_met is not False
|
||||
and drop.is_printable is True
|
||||
):
|
||||
drop_messages = [
|
||||
f"{streamers[index]} is streaming {streamers[index].stream}",
|
||||
f"Campaign: {campaign}",
|
||||
f"Drop: {drop}",
|
||||
f"{drop.progress_bar()}",
|
||||
]
|
||||
for single_line in drop_messages:
|
||||
logger.info(
|
||||
single_line,
|
||||
extra={
|
||||
"event": Events.DROP_STATUS,
|
||||
"skip_telegram": True,
|
||||
"skip_discord": True,
|
||||
"skip_webhook": True,
|
||||
"skip_matrix": True,
|
||||
},
|
||||
)
|
||||
|
||||
if Settings.logger.telegram is not None:
|
||||
Settings.logger.telegram.send(
|
||||
"\n".join(drop_messages),
|
||||
Events.DROP_STATUS,
|
||||
)
|
||||
|
||||
if Settings.logger.discord is not None:
|
||||
Settings.logger.discord.send(
|
||||
"\n".join(drop_messages),
|
||||
Events.DROP_STATUS,
|
||||
)
|
||||
if Settings.logger.webhook is not None:
|
||||
Settings.logger.webhook.send(
|
||||
"\n".join(drop_messages),
|
||||
Events.DROP_STATUS,
|
||||
)
|
||||
|
||||
except requests.exceptions.ConnectionError as e:
|
||||
logger.error(
|
||||
f"Error while trying to send minute watched: {e}")
|
||||
self.__check_connection_handler(chunk_size)
|
||||
except requests.exceptions.Timeout as e:
|
||||
logger.error(
|
||||
f"Error while trying to send minute watched: {e}")
|
||||
|
||||
self.__chuncked_sleep(
|
||||
next_iteration - time.time(), chunk_size=chunk_size
|
||||
)
|
||||
|
||||
if streamers_watching == []:
|
||||
self.__chuncked_sleep(60, chunk_size=chunk_size)
|
||||
except Exception:
|
||||
logger.error(
|
||||
"Exception raised in send minute watched", exc_info=True)
|
||||
|
||||
# === CHANNEL POINTS / PREDICTION === #
|
||||
# Load the amount of current points for a channel, check if a bonus is available
|
||||
def load_channel_points_context(self, streamer):
|
||||
json_data = copy.deepcopy(GQLOperations.ChannelPointsContext)
|
||||
json_data["variables"] = {"channelLogin": streamer.username}
|
||||
|
||||
response = self.post_gql_request(json_data)
|
||||
if response != {}:
|
||||
if response["data"]["community"] is None:
|
||||
raise StreamerDoesNotExistException
|
||||
channel = response["data"]["community"]["channel"]
|
||||
community_points = channel["self"]["communityPoints"]
|
||||
streamer.channel_points = community_points["balance"]
|
||||
streamer.activeMultipliers = community_points["activeMultipliers"]
|
||||
|
||||
if community_points["availableClaim"] is not None:
|
||||
self.claim_bonus(
|
||||
streamer, community_points["availableClaim"]["id"])
|
||||
|
||||
def make_predictions(self, event):
|
||||
decision = event.bet.calculate(event.streamer.channel_points)
|
||||
# selector_index = 0 if decision["choice"] == "A" else 1
|
||||
|
||||
logger.info(
|
||||
f"Going to complete bet for {event}",
|
||||
extra={
|
||||
"emoji": ":four_leaf_clover:",
|
||||
"event": Events.BET_GENERAL,
|
||||
},
|
||||
)
|
||||
if event.status == "ACTIVE":
|
||||
skip, compared_value = event.bet.skip()
|
||||
if skip is True:
|
||||
logger.info(
|
||||
f"Skip betting for the event {event}",
|
||||
extra={
|
||||
"emoji": ":pushpin:",
|
||||
"event": Events.BET_FILTERS,
|
||||
},
|
||||
)
|
||||
logger.info(
|
||||
f"Skip settings {event.bet.settings.filter_condition}, current value is: {compared_value}",
|
||||
extra={
|
||||
"emoji": ":pushpin:",
|
||||
"event": Events.BET_FILTERS,
|
||||
},
|
||||
)
|
||||
else:
|
||||
if decision["amount"] >= 10:
|
||||
logger.info(
|
||||
# f"Place {_millify(decision['amount'])} channel points on: {event.bet.get_outcome(selector_index)}",
|
||||
f"Place {_millify(decision['amount'])} channel points on: {event.bet.get_outcome(decision['choice'])}",
|
||||
extra={
|
||||
"emoji": ":four_leaf_clover:",
|
||||
"event": Events.BET_GENERAL,
|
||||
},
|
||||
)
|
||||
|
||||
json_data = copy.deepcopy(GQLOperations.MakePrediction)
|
||||
json_data["variables"] = {
|
||||
"input": {
|
||||
"eventID": event.event_id,
|
||||
"outcomeID": decision["id"],
|
||||
"points": decision["amount"],
|
||||
"transactionID": token_hex(16),
|
||||
}
|
||||
}
|
||||
response = self.post_gql_request(json_data)
|
||||
if (
|
||||
"data" in response
|
||||
and "makePrediction" in response["data"]
|
||||
and "error" in response["data"]["makePrediction"]
|
||||
and response["data"]["makePrediction"]["error"] is not None
|
||||
):
|
||||
error_code = response["data"]["makePrediction"]["error"]["code"]
|
||||
logger.error(
|
||||
f"Failed to place bet, error: {error_code}",
|
||||
extra={
|
||||
"emoji": ":four_leaf_clover:",
|
||||
"event": Events.BET_FAILED,
|
||||
},
|
||||
)
|
||||
else:
|
||||
logger.info(
|
||||
f"Bet won't be placed as the amount {_millify(decision['amount'])} is less than the minimum required 10",
|
||||
extra={
|
||||
"emoji": ":four_leaf_clover:",
|
||||
"event": Events.BET_GENERAL,
|
||||
},
|
||||
)
|
||||
else:
|
||||
logger.info(
|
||||
f"Oh no! The event is not active anymore! Current status: {event.status}",
|
||||
extra={
|
||||
"emoji": ":disappointed_relieved:",
|
||||
"event": Events.BET_FAILED,
|
||||
},
|
||||
)
|
||||
|
||||
def claim_bonus(self, streamer, claim_id):
|
||||
if Settings.logger.less is False:
|
||||
logger.info(
|
||||
f"Claiming the bonus for {streamer}!",
|
||||
extra={"emoji": ":gift:", "event": Events.BONUS_CLAIM},
|
||||
)
|
||||
|
||||
json_data = copy.deepcopy(GQLOperations.ClaimCommunityPoints)
|
||||
json_data["variables"] = {
|
||||
"input": {"channelID": streamer.channel_id, "claimID": claim_id}
|
||||
}
|
||||
self.post_gql_request(json_data)
|
||||
|
||||
# === MOMENTS === #
|
||||
def claim_moment(self, streamer, moment_id):
|
||||
if Settings.logger.less is False:
|
||||
logger.info(
|
||||
f"Claiming the moment for {streamer}!",
|
||||
extra={"emoji": ":video_camera:",
|
||||
"event": Events.MOMENT_CLAIM},
|
||||
)
|
||||
|
||||
json_data = copy.deepcopy(GQLOperations.CommunityMomentCallout_Claim)
|
||||
json_data["variables"] = {
|
||||
"input": {"momentID": moment_id}
|
||||
}
|
||||
self.post_gql_request(json_data)
|
||||
|
||||
# === CAMPAIGNS / DROPS / INVENTORY === #
|
||||
def __get_campaign_ids_from_streamer(self, streamer):
|
||||
json_data = copy.deepcopy(
|
||||
GQLOperations.DropsHighlightService_AvailableDrops)
|
||||
json_data["variables"] = {"channelID": streamer.channel_id}
|
||||
response = self.post_gql_request(json_data)
|
||||
try:
|
||||
return (
|
||||
[]
|
||||
if response["data"]["channel"]["viewerDropCampaigns"] is None
|
||||
else [
|
||||
item["id"]
|
||||
for item in response["data"]["channel"]["viewerDropCampaigns"]
|
||||
]
|
||||
)
|
||||
except (ValueError, KeyError):
|
||||
return []
|
||||
|
||||
def __get_inventory(self):
|
||||
response = self.post_gql_request(GQLOperations.Inventory)
|
||||
try:
|
||||
return (
|
||||
response["data"]["currentUser"]["inventory"] if response != {} else {}
|
||||
)
|
||||
except (ValueError, KeyError, TypeError):
|
||||
return {}
|
||||
|
||||
def __get_drops_dashboard(self, status=None):
|
||||
response = self.post_gql_request(GQLOperations.ViewerDropsDashboard)
|
||||
campaigns = response["data"]["currentUser"]["dropCampaigns"] or []
|
||||
|
||||
if status is not None:
|
||||
campaigns = list(
|
||||
filter(lambda x: x["status"] == status.upper(), campaigns)) or []
|
||||
|
||||
return campaigns
|
||||
|
||||
def __get_campaigns_details(self, campaigns):
|
||||
result = []
|
||||
chunks = create_chunks(campaigns, 20)
|
||||
for chunk in chunks:
|
||||
json_data = []
|
||||
for campaign in chunk:
|
||||
json_data.append(copy.deepcopy(
|
||||
GQLOperations.DropCampaignDetails))
|
||||
json_data[-1]["variables"] = {
|
||||
"dropID": campaign["id"],
|
||||
"channelLogin": f"{self.twitch_login.get_user_id()}",
|
||||
}
|
||||
|
||||
response = self.post_gql_request(json_data)
|
||||
for r in response:
|
||||
if r["data"]["user"] is not None:
|
||||
result.append(r["data"]["user"]["dropCampaign"])
|
||||
return result
|
||||
|
||||
def __sync_campaigns(self, campaigns):
|
||||
# We need the inventory only for get the real updated value/progress
|
||||
# Get data from inventory and sync current status with streamers.campaigns
|
||||
inventory = self.__get_inventory()
|
||||
if inventory not in [None, {}] and inventory["dropCampaignsInProgress"] not in [
|
||||
None,
|
||||
{},
|
||||
]:
|
||||
# Iterate all campaigns from dashboard (only active, with working drops)
|
||||
# In this array we have also the campaigns never started from us (not in nventory)
|
||||
for i in range(len(campaigns)):
|
||||
campaigns[i].clear_drops() # Remove all the claimed drops
|
||||
# Iterate all campaigns currently in progress from out inventory
|
||||
for progress in inventory["dropCampaignsInProgress"]:
|
||||
if progress["id"] == campaigns[i].id:
|
||||
campaigns[i].in_inventory = True
|
||||
campaigns[i].sync_drops(
|
||||
progress["timeBasedDrops"], self.claim_drop
|
||||
)
|
||||
# Remove all the claimed drops
|
||||
campaigns[i].clear_drops()
|
||||
break
|
||||
return campaigns
|
||||
|
||||
def claim_drop(self, drop):
|
||||
logger.info(
|
||||
f"Claim {drop}", extra={"emoji": ":package:", "event": Events.DROP_CLAIM}
|
||||
)
|
||||
|
||||
json_data = copy.deepcopy(GQLOperations.DropsPage_ClaimDropRewards)
|
||||
json_data["variables"] = {
|
||||
"input": {"dropInstanceID": drop.drop_instance_id}}
|
||||
response = self.post_gql_request(json_data)
|
||||
try:
|
||||
# response["data"]["claimDropRewards"] can be null and respose["data"]["errors"] != []
|
||||
# or response["data"]["claimDropRewards"]["status"] === DROP_INSTANCE_ALREADY_CLAIMED
|
||||
if ("claimDropRewards" in response["data"]) and (
|
||||
response["data"]["claimDropRewards"] is None
|
||||
):
|
||||
return False
|
||||
elif ("errors" in response["data"]) and (response["data"]["errors"] != []):
|
||||
return False
|
||||
elif ("claimDropRewards" in response["data"]) and (
|
||||
response["data"]["claimDropRewards"]["status"]
|
||||
in ["ELIGIBLE_FOR_ALL", "DROP_INSTANCE_ALREADY_CLAIMED"]
|
||||
):
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
except (ValueError, KeyError):
|
||||
return False
|
||||
|
||||
def claim_all_drops_from_inventory(self):
|
||||
inventory = self.__get_inventory()
|
||||
if inventory not in [None, {}]:
|
||||
if inventory["dropCampaignsInProgress"] not in [None, {}]:
|
||||
for campaign in inventory["dropCampaignsInProgress"]:
|
||||
for drop_dict in campaign["timeBasedDrops"]:
|
||||
drop = Drop(drop_dict)
|
||||
drop.update(drop_dict["self"])
|
||||
if drop.is_claimable is True:
|
||||
drop.is_claimed = self.claim_drop(drop)
|
||||
time.sleep(random.uniform(5, 10))
|
||||
|
||||
def sync_campaigns(self, streamers, chunk_size=3):
|
||||
campaigns_update = 0
|
||||
while self.running:
|
||||
try:
|
||||
# Get update from dashboard each 60minutes
|
||||
if (
|
||||
campaigns_update == 0
|
||||
# or ((time.time() - campaigns_update) / 60) > 60
|
||||
|
||||
# TEMPORARY AUTO DROP CLAIMING FIX
|
||||
# 30 minutes instead of 60 minutes
|
||||
or ((time.time() - campaigns_update) / 30) > 30
|
||||
#####################################
|
||||
):
|
||||
campaigns_update = time.time()
|
||||
|
||||
# TEMPORARY AUTO DROP CLAIMING FIX
|
||||
self.claim_all_drops_from_inventory()
|
||||
#####################################
|
||||
|
||||
# Get full details from current ACTIVE campaigns
|
||||
# Use dashboard so we can explore new drops not currently active in our Inventory
|
||||
campaigns_details = self.__get_campaigns_details(
|
||||
self.__get_drops_dashboard(status="ACTIVE")
|
||||
)
|
||||
campaigns = []
|
||||
|
||||
# Going to clear array and structure. Remove all the timeBasedDrops expired or not started yet
|
||||
for index in range(0, len(campaigns_details)):
|
||||
if campaigns_details[index] is not None:
|
||||
campaign = Campaign(campaigns_details[index])
|
||||
if campaign.dt_match is True:
|
||||
# Remove all the drops already claimed or with dt not matching
|
||||
campaign.clear_drops()
|
||||
if campaign.drops != []:
|
||||
campaigns.append(campaign)
|
||||
else:
|
||||
continue
|
||||
|
||||
# Divide et impera :)
|
||||
campaigns = self.__sync_campaigns(campaigns)
|
||||
|
||||
# Check if user It's currently streaming the same game present in campaigns_details
|
||||
for i in range(0, len(streamers)):
|
||||
if streamers[i].drops_condition() is True:
|
||||
# yes! The streamer[i] have the drops_tags enabled and we It's currently stream a game with campaign active!
|
||||
# With 'campaigns_ids' we are also sure that this streamer have the campaign active.
|
||||
# yes! The streamer[index] have the drops_tags enabled and we It's currently stream a game with campaign active!
|
||||
streamers[i].stream.campaigns = list(
|
||||
filter(
|
||||
lambda x: x.drops != []
|
||||
and x.game == streamers[i].stream.game
|
||||
and x.id in streamers[i].stream.campaigns_ids,
|
||||
campaigns,
|
||||
)
|
||||
)
|
||||
|
||||
except (ValueError, KeyError, requests.exceptions.ConnectionError) as e:
|
||||
logger.error(f"Error while syncing inventory: {e}")
|
||||
self.__check_connection_handler(chunk_size)
|
||||
|
||||
self.__chuncked_sleep(60, chunk_size=chunk_size)
|
||||
360
TwitchChannelPointsMiner/classes/TwitchLogin.py
Normal file
360
TwitchChannelPointsMiner/classes/TwitchLogin.py
Normal file
@ -0,0 +1,360 @@
|
||||
# Based on https://github.com/derrod/twl.py
|
||||
# Original Copyright (c) 2020 Rodney
|
||||
# The MIT License (MIT)
|
||||
|
||||
import copy
|
||||
# import getpass
|
||||
import logging
|
||||
import os
|
||||
import pickle
|
||||
|
||||
# import webbrowser
|
||||
# import browser_cookie3
|
||||
|
||||
import requests
|
||||
|
||||
from TwitchChannelPointsMiner.classes.Exceptions import (
|
||||
BadCredentialsException,
|
||||
WrongCookiesException,
|
||||
)
|
||||
from TwitchChannelPointsMiner.constants import CLIENT_ID, GQLOperations, USER_AGENTS
|
||||
|
||||
from datetime import datetime, timedelta, timezone
|
||||
from time import sleep
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
"""def interceptor(request) -> str:
|
||||
if (
|
||||
request.method == 'POST'
|
||||
and request.url == 'https://passport.twitch.tv/protected_login'
|
||||
):
|
||||
import json
|
||||
body = request.body.decode('utf-8')
|
||||
data = json.loads(body)
|
||||
data['client_id'] = CLIENT_ID
|
||||
request.body = json.dumps(data).encode('utf-8')
|
||||
del request.headers['Content-Length']
|
||||
request.headers['Content-Length'] = str(len(request.body))"""
|
||||
|
||||
|
||||
class TwitchLogin(object):
|
||||
__slots__ = [
|
||||
"client_id",
|
||||
"device_id",
|
||||
"token",
|
||||
"login_check_result",
|
||||
"session",
|
||||
"session",
|
||||
"username",
|
||||
"password",
|
||||
"user_id",
|
||||
"email",
|
||||
"cookies",
|
||||
"shared_cookies"
|
||||
]
|
||||
|
||||
def __init__(self, client_id, device_id, username, user_agent, password=None):
|
||||
self.client_id = client_id
|
||||
self.device_id = device_id
|
||||
self.token = None
|
||||
self.login_check_result = False
|
||||
self.session = requests.session()
|
||||
self.session.headers.update(
|
||||
{"Client-ID": self.client_id,
|
||||
"X-Device-Id": self.device_id, "User-Agent": user_agent}
|
||||
)
|
||||
self.username = username
|
||||
self.password = password
|
||||
self.user_id = None
|
||||
self.email = None
|
||||
|
||||
self.cookies = []
|
||||
self.shared_cookies = []
|
||||
|
||||
def login_flow(self):
|
||||
logger.info("You'll have to login to Twitch!")
|
||||
|
||||
post_data = {
|
||||
"client_id": self.client_id,
|
||||
"scopes": (
|
||||
"channel_read chat:read user_blocks_edit "
|
||||
"user_blocks_read user_follows_edit user_read"
|
||||
)
|
||||
}
|
||||
# login-fix
|
||||
use_backup_flow = False
|
||||
# use_backup_flow = True
|
||||
while True:
|
||||
logger.info("Trying the TV login method..")
|
||||
|
||||
login_response = self.send_oauth_request(
|
||||
"https://id.twitch.tv/oauth2/device", post_data)
|
||||
|
||||
# {
|
||||
# "device_code": "40 chars [A-Za-z0-9]",
|
||||
# "expires_in": 1800,
|
||||
# "interval": 5,
|
||||
# "user_code": "8 chars [A-Z]",
|
||||
# "verification_uri": "https://www.twitch.tv/activate"
|
||||
# }
|
||||
|
||||
if login_response.status_code != 200:
|
||||
logger.error("TV login response is not 200. Try again")
|
||||
break
|
||||
|
||||
login_response_json = login_response.json()
|
||||
|
||||
if "user_code" in login_response_json:
|
||||
user_code: str = login_response_json["user_code"]
|
||||
now = datetime.now(timezone.utc)
|
||||
device_code: str = login_response_json["device_code"]
|
||||
interval: int = login_response_json["interval"]
|
||||
expires_at = now + \
|
||||
timedelta(seconds=login_response_json["expires_in"])
|
||||
logger.info(
|
||||
"Open https://www.twitch.tv/activate"
|
||||
)
|
||||
logger.info(
|
||||
f"and enter this code: {user_code}"
|
||||
)
|
||||
logger.info(
|
||||
f"Hurry up! It will expire in {int(login_response_json['expires_in'] / 60)} minutes!"
|
||||
)
|
||||
# twofa = input("2FA token: ")
|
||||
# webbrowser.open_new_tab("https://www.twitch.tv/activate")
|
||||
|
||||
post_data = {
|
||||
"client_id": CLIENT_ID,
|
||||
"device_code": device_code,
|
||||
"grant_type": "urn:ietf:params:oauth:grant-type:device_code",
|
||||
}
|
||||
|
||||
while True:
|
||||
# sleep first, not like the user is gonna enter the code *that* fast
|
||||
sleep(interval)
|
||||
login_response = self.send_oauth_request(
|
||||
"https://id.twitch.tv/oauth2/token", post_data)
|
||||
if now == expires_at:
|
||||
logger.error("Code expired. Try again")
|
||||
break
|
||||
# 200 means success, 400 means the user haven't entered the code yet
|
||||
if login_response.status_code != 200:
|
||||
continue
|
||||
# {
|
||||
# "access_token": "40 chars [A-Za-z0-9]",
|
||||
# "refresh_token": "40 chars [A-Za-z0-9]",
|
||||
# "scope": [...],
|
||||
# "token_type": "bearer"
|
||||
# }
|
||||
login_response_json = login_response.json()
|
||||
if "access_token" in login_response_json:
|
||||
self.set_token(login_response_json["access_token"])
|
||||
return self.check_login()
|
||||
# except RequestInvalid:
|
||||
# the device_code has expired, request a new code
|
||||
# continue
|
||||
# invalidate_after is not None
|
||||
# account for the expiration landing during the request
|
||||
# and datetime.now(timezone.utc) >= (invalidate_after - session_timeout)
|
||||
# ):
|
||||
# raise RequestInvalid()
|
||||
else:
|
||||
if "error_code" in login_response:
|
||||
err_code = login_response["error_code"]
|
||||
|
||||
logger.error(f"Unknown error: {login_response}")
|
||||
raise NotImplementedError(
|
||||
f"Unknown TwitchAPI error code: {err_code}"
|
||||
)
|
||||
|
||||
if use_backup_flow:
|
||||
break
|
||||
|
||||
if use_backup_flow:
|
||||
# self.set_token(self.login_flow_backup(password))
|
||||
self.set_token(self.login_flow_backup())
|
||||
return self.check_login()
|
||||
|
||||
return False
|
||||
|
||||
def set_token(self, new_token):
|
||||
self.token = new_token
|
||||
self.session.headers.update({"Authorization": f"Bearer {self.token}"})
|
||||
|
||||
# def send_login_request(self, json_data):
|
||||
def send_oauth_request(self, url, json_data):
|
||||
# response = self.session.post("https://passport.twitch.tv/protected_login", json=json_data)
|
||||
"""response = self.session.post("https://passport.twitch.tv/login", json=json_data, headers={
|
||||
'Accept': 'application/vnd.twitchtv.v3+json',
|
||||
'Accept-Encoding': 'gzip',
|
||||
'Accept-Language': 'en-US',
|
||||
'Content-Type': 'application/json; charset=UTF-8',
|
||||
'Host': 'passport.twitch.tv'
|
||||
},)"""
|
||||
response = self.session.post(url, data=json_data, headers={
|
||||
'Accept': 'application/json',
|
||||
'Accept-Encoding': 'gzip',
|
||||
'Accept-Language': 'en-US',
|
||||
"Cache-Control": "no-cache",
|
||||
"Client-Id": CLIENT_ID,
|
||||
"Host": "id.twitch.tv",
|
||||
"Origin": "https://android.tv.twitch.tv",
|
||||
"Pragma": "no-cache",
|
||||
"Referer": "https://android.tv.twitch.tv/",
|
||||
"User-Agent": USER_AGENTS["Android"]["TV"],
|
||||
"X-Device-Id": self.device_id
|
||||
},)
|
||||
return response
|
||||
|
||||
def login_flow_backup(self, password=None):
|
||||
"""Backup OAuth Selenium login
|
||||
from undetected_chromedriver import ChromeOptions
|
||||
import seleniumwire.undetected_chromedriver.v2 as uc
|
||||
from selenium.webdriver.common.by import By
|
||||
from time import sleep
|
||||
|
||||
HEADLESS = False
|
||||
|
||||
options = uc.ChromeOptions()
|
||||
if HEADLESS is True:
|
||||
options.add_argument('--headless')
|
||||
options.add_argument('--log-level=3')
|
||||
options.add_argument('--disable-web-security')
|
||||
options.add_argument('--allow-running-insecure-content')
|
||||
options.add_argument('--lang=en')
|
||||
options.add_argument('--no-sandbox')
|
||||
options.add_argument('--disable-gpu')
|
||||
# options.add_argument("--user-agent=\"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/107.0.0.0 Safari/537.36\"")
|
||||
# options.add_argument("--window-size=1920,1080")
|
||||
# options.set_capability("detach", True)
|
||||
|
||||
logger.info(
|
||||
'Now a browser window will open, it will login with your data.')
|
||||
driver = uc.Chrome(
|
||||
options=options, use_subprocess=True # , executable_path=EXECUTABLE_PATH
|
||||
)
|
||||
driver.request_interceptor = interceptor
|
||||
driver.get('https://www.twitch.tv/login')
|
||||
|
||||
driver.find_element(By.ID, 'login-username').send_keys(self.username)
|
||||
driver.find_element(By.ID, 'password-input').send_keys(password)
|
||||
sleep(0.3)
|
||||
driver.execute_script(
|
||||
'document.querySelector("#root > div > div.scrollable-area > div.simplebar-scroll-content > div > div > div > div.Layout-sc-nxg1ff-0.gZaqky > form > div > div:nth-child(3) > button > div > div").click()'
|
||||
)
|
||||
|
||||
logger.info(
|
||||
'Enter your verification code in the browser and wait for the Twitch website to load, then press Enter here.'
|
||||
)
|
||||
input()
|
||||
|
||||
logger.info("Extracting cookies...")
|
||||
self.cookies = driver.get_cookies()
|
||||
# print(self.cookies)
|
||||
# driver.close()
|
||||
driver.quit()
|
||||
self.username = self.get_cookie_value("login")
|
||||
# print(f"self.username: {self.username}")
|
||||
|
||||
if not self.username:
|
||||
logger.error("Couldn't extract login, probably bad cookies.")
|
||||
return False
|
||||
|
||||
return self.get_cookie_value("auth-token")"""
|
||||
|
||||
# logger.error("Backup login flow is not available. Use a VPN or wait a while to avoid the CAPTCHA.")
|
||||
# return False
|
||||
|
||||
"""Backup OAuth login flow in case manual captcha solving is required"""
|
||||
browser = input(
|
||||
"What browser do you use? Chrome (1), Firefox (2), Other (3): "
|
||||
).strip()
|
||||
if browser not in ("1", "2"):
|
||||
logger.info("Your browser is unsupported, sorry.")
|
||||
return None
|
||||
|
||||
input(
|
||||
"Please login inside your browser of choice (NOT incognito mode) and press Enter..."
|
||||
)
|
||||
logger.info("Loading cookies saved on your computer...")
|
||||
twitch_domain = ".twitch.tv"
|
||||
if browser == "1": # chrome
|
||||
cookie_jar = browser_cookie3.chrome(domain_name=twitch_domain)
|
||||
else:
|
||||
cookie_jar = browser_cookie3.firefox(domain_name=twitch_domain)
|
||||
# logger.info(f"cookie_jar: {cookie_jar}")
|
||||
cookies_dict = requests.utils.dict_from_cookiejar(cookie_jar)
|
||||
# logger.info(f"cookies_dict: {cookies_dict}")
|
||||
self.username = cookies_dict.get("login")
|
||||
self.shared_cookies = cookies_dict
|
||||
return cookies_dict.get("auth-token")
|
||||
|
||||
def check_login(self):
|
||||
if self.login_check_result:
|
||||
return self.login_check_result
|
||||
if self.token is None:
|
||||
return False
|
||||
|
||||
self.login_check_result = self.__set_user_id()
|
||||
return self.login_check_result
|
||||
|
||||
def save_cookies(self, cookies_file):
|
||||
logger.info("Saving cookies to your computer..")
|
||||
cookies_dict = self.session.cookies.get_dict()
|
||||
# print(f"cookies_dict2pickle: {cookies_dict}")
|
||||
cookies_dict["auth-token"] = self.token
|
||||
if "persistent" not in cookies_dict: # saving user id cookies
|
||||
cookies_dict["persistent"] = self.user_id
|
||||
|
||||
# old way saves only 'auth-token' and 'persistent'
|
||||
self.cookies = []
|
||||
# cookies_dict = self.shared_cookies
|
||||
# print(f"cookies_dict2pickle: {cookies_dict}")
|
||||
for cookie_name, value in cookies_dict.items():
|
||||
self.cookies.append({"name": cookie_name, "value": value})
|
||||
# print(f"cookies2pickle: {self.cookies}")
|
||||
pickle.dump(self.cookies, open(cookies_file, "wb"))
|
||||
|
||||
def get_cookie_value(self, key):
|
||||
for cookie in self.cookies:
|
||||
if cookie["name"] == key:
|
||||
if cookie["value"] is not None:
|
||||
return cookie["value"]
|
||||
return None
|
||||
|
||||
def load_cookies(self, cookies_file):
|
||||
if os.path.isfile(cookies_file):
|
||||
self.cookies = pickle.load(open(cookies_file, "rb"))
|
||||
else:
|
||||
raise WrongCookiesException("There must be a cookies file!")
|
||||
|
||||
def get_user_id(self):
|
||||
persistent = self.get_cookie_value("persistent")
|
||||
user_id = (
|
||||
int(persistent.split("%")[
|
||||
0]) if persistent is not None else self.user_id
|
||||
)
|
||||
if user_id is None:
|
||||
if self.__set_user_id() is True:
|
||||
return self.user_id
|
||||
return user_id
|
||||
|
||||
def __set_user_id(self):
|
||||
json_data = copy.deepcopy(GQLOperations.ReportMenuItem)
|
||||
json_data["variables"] = {"channelLogin": self.username}
|
||||
response = self.session.post(GQLOperations.url, json=json_data)
|
||||
|
||||
if response.status_code == 200:
|
||||
json_response = response.json()
|
||||
if (
|
||||
"data" in json_response
|
||||
and "user" in json_response["data"]
|
||||
and json_response["data"]["user"]["id"] is not None
|
||||
):
|
||||
self.user_id = json_response["data"]["user"]["id"]
|
||||
return True
|
||||
return False
|
||||
|
||||
def get_auth_token(self):
|
||||
return self.get_cookie_value("auth-token")
|
||||
65
TwitchChannelPointsMiner/classes/TwitchWebSocket.py
Normal file
65
TwitchChannelPointsMiner/classes/TwitchWebSocket.py
Normal file
@ -0,0 +1,65 @@
|
||||
import json
|
||||
import logging
|
||||
import time
|
||||
|
||||
from websocket import WebSocketApp, WebSocketConnectionClosedException
|
||||
|
||||
from TwitchChannelPointsMiner.utils import create_nonce
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class TwitchWebSocket(WebSocketApp):
|
||||
def __init__(self, index, parent_pool, *args, **kw):
|
||||
super().__init__(*args, **kw)
|
||||
self.index = index
|
||||
|
||||
self.parent_pool = parent_pool
|
||||
self.is_closed = False
|
||||
self.is_opened = False
|
||||
|
||||
self.is_reconnecting = False
|
||||
self.forced_close = False
|
||||
|
||||
# Custom attribute
|
||||
self.topics = []
|
||||
self.pending_topics = []
|
||||
|
||||
self.twitch = parent_pool.twitch
|
||||
self.streamers = parent_pool.streamers
|
||||
self.events_predictions = parent_pool.events_predictions
|
||||
|
||||
self.last_message_timestamp = None
|
||||
self.last_message_type_channel = None
|
||||
|
||||
self.last_pong = time.time()
|
||||
self.last_ping = time.time()
|
||||
|
||||
# def close(self):
|
||||
# self.forced_close = True
|
||||
# super().close()
|
||||
|
||||
def listen(self, topic, auth_token=None):
|
||||
data = {"topics": [str(topic)]}
|
||||
if topic.is_user_topic() and auth_token is not None:
|
||||
data["auth_token"] = auth_token
|
||||
nonce = create_nonce()
|
||||
self.send({"type": "LISTEN", "nonce": nonce, "data": data})
|
||||
|
||||
def ping(self):
|
||||
self.send({"type": "PING"})
|
||||
self.last_ping = time.time()
|
||||
|
||||
def send(self, request):
|
||||
try:
|
||||
request_str = json.dumps(request, separators=(",", ":"))
|
||||
logger.debug(f"#{self.index} - Send: {request_str}")
|
||||
super().send(request_str)
|
||||
except WebSocketConnectionClosedException:
|
||||
self.is_closed = True
|
||||
|
||||
def elapsed_last_pong(self):
|
||||
return (time.time() - self.last_pong) // 60
|
||||
|
||||
def elapsed_last_ping(self):
|
||||
return (time.time() - self.last_ping) // 60
|
||||
434
TwitchChannelPointsMiner/classes/WebSocketsPool.py
Normal file
434
TwitchChannelPointsMiner/classes/WebSocketsPool.py
Normal file
@ -0,0 +1,434 @@
|
||||
import json
|
||||
import logging
|
||||
import random
|
||||
import time
|
||||
# import os
|
||||
from threading import Thread, Timer
|
||||
# from pathlib import Path
|
||||
|
||||
from dateutil import parser
|
||||
|
||||
from TwitchChannelPointsMiner.classes.entities.EventPrediction import EventPrediction
|
||||
from TwitchChannelPointsMiner.classes.entities.Message import Message
|
||||
from TwitchChannelPointsMiner.classes.entities.Raid import Raid
|
||||
from TwitchChannelPointsMiner.classes.Settings import Events, Settings
|
||||
from TwitchChannelPointsMiner.classes.TwitchWebSocket import TwitchWebSocket
|
||||
from TwitchChannelPointsMiner.constants import WEBSOCKET
|
||||
from TwitchChannelPointsMiner.utils import (
|
||||
get_streamer_index,
|
||||
internet_connection_available,
|
||||
)
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class WebSocketsPool:
|
||||
__slots__ = ["ws", "twitch", "streamers", "events_predictions"]
|
||||
|
||||
def __init__(self, twitch, streamers, events_predictions):
|
||||
self.ws = []
|
||||
self.twitch = twitch
|
||||
self.streamers = streamers
|
||||
self.events_predictions = events_predictions
|
||||
|
||||
"""
|
||||
API Limits
|
||||
- Clients can listen to up to 50 topics per connection. Trying to listen to more topics will result in an error message.
|
||||
- We recommend that a single client IP address establishes no more than 10 simultaneous connections.
|
||||
The two limits above are likely to be relaxed for approved third-party applications, as we start to better understand third-party requirements.
|
||||
"""
|
||||
|
||||
def submit(self, topic):
|
||||
# Check if we need to create a new WebSocket instance
|
||||
if self.ws == [] or len(self.ws[-1].topics) >= 50:
|
||||
self.ws.append(self.__new(len(self.ws)))
|
||||
self.__start(-1)
|
||||
|
||||
self.__submit(-1, topic)
|
||||
|
||||
def __submit(self, index, topic):
|
||||
# Topic in topics should never happen. Anyway prevent any types of duplicates
|
||||
if topic not in self.ws[index].topics:
|
||||
self.ws[index].topics.append(topic)
|
||||
|
||||
if self.ws[index].is_opened is False:
|
||||
self.ws[index].pending_topics.append(topic)
|
||||
else:
|
||||
self.ws[index].listen(topic, self.twitch.twitch_login.get_auth_token())
|
||||
|
||||
def __new(self, index):
|
||||
return TwitchWebSocket(
|
||||
index=index,
|
||||
parent_pool=self,
|
||||
url=WEBSOCKET,
|
||||
on_message=WebSocketsPool.on_message,
|
||||
on_open=WebSocketsPool.on_open,
|
||||
on_error=WebSocketsPool.on_error,
|
||||
on_close=WebSocketsPool.on_close
|
||||
# on_close=WebSocketsPool.handle_reconnection, # Do nothing.
|
||||
)
|
||||
|
||||
def __start(self, index):
|
||||
if Settings.disable_ssl_cert_verification is True:
|
||||
import ssl
|
||||
|
||||
thread_ws = Thread(
|
||||
target=lambda: self.ws[index].run_forever(
|
||||
sslopt={"cert_reqs": ssl.CERT_NONE}
|
||||
)
|
||||
)
|
||||
logger.warn("SSL certificate verification is disabled! Be aware!")
|
||||
else:
|
||||
thread_ws = Thread(target=lambda: self.ws[index].run_forever())
|
||||
thread_ws.daemon = True
|
||||
thread_ws.name = f"WebSocket #{self.ws[index].index}"
|
||||
thread_ws.start()
|
||||
|
||||
def end(self):
|
||||
for index in range(0, len(self.ws)):
|
||||
self.ws[index].forced_close = True
|
||||
self.ws[index].close()
|
||||
|
||||
@staticmethod
|
||||
def on_open(ws):
|
||||
def run():
|
||||
ws.is_opened = True
|
||||
ws.ping()
|
||||
|
||||
for topic in ws.pending_topics:
|
||||
ws.listen(topic, ws.twitch.twitch_login.get_auth_token())
|
||||
|
||||
while ws.is_closed is False:
|
||||
# Else: the ws is currently in reconnecting phase, you can't do ping or other operation.
|
||||
# Probably this ws will be closed very soon with ws.is_closed = True
|
||||
if ws.is_reconnecting is False:
|
||||
ws.ping() # We need ping for keep the connection alive
|
||||
time.sleep(random.uniform(25, 30))
|
||||
|
||||
if ws.elapsed_last_pong() > 5:
|
||||
logger.info(
|
||||
f"#{ws.index} - The last PONG was received more than 5 minutes ago"
|
||||
)
|
||||
WebSocketsPool.handle_reconnection(ws)
|
||||
|
||||
thread_ws = Thread(target=run)
|
||||
thread_ws.daemon = True
|
||||
thread_ws.start()
|
||||
|
||||
@staticmethod
|
||||
def on_error(ws, error):
|
||||
# Connection lost | [WinError 10054] An existing connection was forcibly closed by the remote host
|
||||
# Connection already closed | Connection is already closed (raise WebSocketConnectionClosedException)
|
||||
logger.error(f"#{ws.index} - WebSocket error: {error}")
|
||||
|
||||
@staticmethod
|
||||
def on_close(ws, close_status_code, close_reason):
|
||||
logger.info(f"#{ws.index} - WebSocket closed")
|
||||
# On close please reconnect automatically
|
||||
WebSocketsPool.handle_reconnection(ws)
|
||||
|
||||
@staticmethod
|
||||
def handle_reconnection(ws):
|
||||
# Reconnect only if ws.is_reconnecting is False to prevent more than 1 ws from being created
|
||||
if ws.is_reconnecting is False:
|
||||
# Close the current WebSocket.
|
||||
ws.is_closed = True
|
||||
ws.keep_running = False
|
||||
# Reconnect only if ws.forced_close is False (replace the keep_running)
|
||||
|
||||
# Set the current socket as reconnecting status
|
||||
# So the external ping check will be locked
|
||||
ws.is_reconnecting = True
|
||||
|
||||
if ws.forced_close is False:
|
||||
logger.info(
|
||||
f"#{ws.index} - Reconnecting to Twitch PubSub server in ~60 seconds"
|
||||
)
|
||||
time.sleep(30)
|
||||
|
||||
while internet_connection_available() is False:
|
||||
random_sleep = random.randint(1, 3)
|
||||
logger.warning(
|
||||
f"#{ws.index} - No internet connection available! Retry after {random_sleep}m"
|
||||
)
|
||||
time.sleep(random_sleep * 60)
|
||||
|
||||
# Why not create a new ws on the same array index? Let's try.
|
||||
self = ws.parent_pool
|
||||
# Create a new connection.
|
||||
self.ws[ws.index] = self.__new(ws.index)
|
||||
|
||||
self.__start(ws.index) # Start a new thread.
|
||||
time.sleep(30)
|
||||
|
||||
for topic in ws.topics:
|
||||
self.__submit(ws.index, topic)
|
||||
|
||||
@staticmethod
|
||||
def on_message(ws, message):
|
||||
logger.debug(f"#{ws.index} - Received: {message.strip()}")
|
||||
response = json.loads(message)
|
||||
|
||||
if response["type"] == "MESSAGE":
|
||||
# We should create a Message class ...
|
||||
message = Message(response["data"])
|
||||
|
||||
# If we have more than one PubSub connection, messages may be duplicated
|
||||
# Check the concatenation between message_type.top.channel_id
|
||||
if (
|
||||
ws.last_message_type_channel is not None
|
||||
and ws.last_message_timestamp is not None
|
||||
and ws.last_message_timestamp == message.timestamp
|
||||
and ws.last_message_type_channel == message.identifier
|
||||
):
|
||||
return
|
||||
|
||||
ws.last_message_timestamp = message.timestamp
|
||||
ws.last_message_type_channel = message.identifier
|
||||
|
||||
streamer_index = get_streamer_index(ws.streamers, message.channel_id)
|
||||
if streamer_index != -1:
|
||||
try:
|
||||
if message.topic == "community-points-user-v1":
|
||||
if message.type in ["points-earned", "points-spent"]:
|
||||
balance = message.data["balance"]["balance"]
|
||||
ws.streamers[streamer_index].channel_points = balance
|
||||
# Analytics switch
|
||||
if Settings.enable_analytics is True:
|
||||
ws.streamers[streamer_index].persistent_series(
|
||||
event_type=message.data["point_gain"]["reason_code"]
|
||||
if message.type == "points-earned"
|
||||
else "Spent"
|
||||
)
|
||||
|
||||
if message.type == "points-earned":
|
||||
earned = message.data["point_gain"]["total_points"]
|
||||
reason_code = message.data["point_gain"]["reason_code"]
|
||||
|
||||
logger.info(
|
||||
f"+{earned} → {ws.streamers[streamer_index]} - Reason: {reason_code}.",
|
||||
extra={
|
||||
"emoji": ":rocket:",
|
||||
"event": Events.get(f"GAIN_FOR_{reason_code}"),
|
||||
},
|
||||
)
|
||||
ws.streamers[streamer_index].update_history(
|
||||
reason_code, earned
|
||||
)
|
||||
# Analytics switch
|
||||
if Settings.enable_analytics is True:
|
||||
ws.streamers[streamer_index].persistent_annotations(
|
||||
reason_code, f"+{earned} - {reason_code}"
|
||||
)
|
||||
elif message.type == "claim-available":
|
||||
ws.twitch.claim_bonus(
|
||||
ws.streamers[streamer_index],
|
||||
message.data["claim"]["id"],
|
||||
)
|
||||
|
||||
elif message.topic == "video-playback-by-id":
|
||||
# There is stream-up message type, but it's sent earlier than the API updates
|
||||
if message.type == "stream-up":
|
||||
ws.streamers[streamer_index].stream_up = time.time()
|
||||
elif message.type == "stream-down":
|
||||
if ws.streamers[streamer_index].is_online is True:
|
||||
ws.streamers[streamer_index].set_offline()
|
||||
elif message.type == "viewcount":
|
||||
if ws.streamers[streamer_index].stream_up_elapsed():
|
||||
ws.twitch.check_streamer_online(
|
||||
ws.streamers[streamer_index]
|
||||
)
|
||||
|
||||
elif message.topic == "raid":
|
||||
if message.type == "raid_update_v2":
|
||||
raid = Raid(
|
||||
message.message["raid"]["id"],
|
||||
message.message["raid"]["target_login"],
|
||||
)
|
||||
ws.twitch.update_raid(ws.streamers[streamer_index], raid)
|
||||
|
||||
elif message.topic == "community-moments-channel-v1":
|
||||
if message.type == "active":
|
||||
ws.twitch.claim_moment(
|
||||
ws.streamers[streamer_index], message.data["moment_id"]
|
||||
)
|
||||
|
||||
elif message.topic == "predictions-channel-v1":
|
||||
|
||||
event_dict = message.data["event"]
|
||||
event_id = event_dict["id"]
|
||||
event_status = event_dict["status"]
|
||||
|
||||
current_tmsp = parser.parse(message.timestamp)
|
||||
|
||||
if (
|
||||
message.type == "event-created"
|
||||
and event_id not in ws.events_predictions
|
||||
):
|
||||
if event_status == "ACTIVE":
|
||||
prediction_window_seconds = float(
|
||||
event_dict["prediction_window_seconds"]
|
||||
)
|
||||
# Reduce prediction window by 3/6s - Collect more accurate data for decision
|
||||
prediction_window_seconds = ws.streamers[
|
||||
streamer_index
|
||||
].get_prediction_window(prediction_window_seconds)
|
||||
event = EventPrediction(
|
||||
ws.streamers[streamer_index],
|
||||
event_id,
|
||||
event_dict["title"],
|
||||
parser.parse(event_dict["created_at"]),
|
||||
prediction_window_seconds,
|
||||
event_status,
|
||||
event_dict["outcomes"],
|
||||
)
|
||||
if (
|
||||
ws.streamers[streamer_index].is_online
|
||||
and event.closing_bet_after(current_tmsp) > 0
|
||||
):
|
||||
streamer = ws.streamers[streamer_index]
|
||||
bet_settings = streamer.settings.bet
|
||||
if (
|
||||
bet_settings.minimum_points is None
|
||||
or streamer.channel_points
|
||||
> bet_settings.minimum_points
|
||||
):
|
||||
ws.events_predictions[event_id] = event
|
||||
start_after = event.closing_bet_after(
|
||||
current_tmsp
|
||||
)
|
||||
|
||||
place_bet_thread = Timer(
|
||||
start_after,
|
||||
ws.twitch.make_predictions,
|
||||
(ws.events_predictions[event_id],),
|
||||
)
|
||||
place_bet_thread.daemon = True
|
||||
place_bet_thread.start()
|
||||
|
||||
logger.info(
|
||||
f"Place the bet after: {start_after}s for: {ws.events_predictions[event_id]}",
|
||||
extra={
|
||||
"emoji": ":alarm_clock:",
|
||||
"event": Events.BET_START,
|
||||
},
|
||||
)
|
||||
else:
|
||||
logger.info(
|
||||
f"{streamer} have only {streamer.channel_points} channel points and the minimum for bet is: {bet_settings.minimum_points}",
|
||||
extra={
|
||||
"emoji": ":pushpin:",
|
||||
"event": Events.BET_FILTERS,
|
||||
},
|
||||
)
|
||||
|
||||
elif (
|
||||
message.type == "event-updated"
|
||||
and event_id in ws.events_predictions
|
||||
):
|
||||
ws.events_predictions[event_id].status = event_status
|
||||
# Game over we can't update anymore the values... The bet was placed!
|
||||
if (
|
||||
ws.events_predictions[event_id].bet_placed is False
|
||||
and ws.events_predictions[event_id].bet.decision == {}
|
||||
):
|
||||
ws.events_predictions[event_id].bet.update_outcomes(
|
||||
event_dict["outcomes"]
|
||||
)
|
||||
|
||||
elif message.topic == "predictions-user-v1":
|
||||
event_id = message.data["prediction"]["event_id"]
|
||||
if event_id in ws.events_predictions:
|
||||
event_prediction = ws.events_predictions[event_id]
|
||||
if (
|
||||
message.type == "prediction-result"
|
||||
and event_prediction.bet_confirmed
|
||||
):
|
||||
points = event_prediction.parse_result(
|
||||
message.data["prediction"]["result"]
|
||||
)
|
||||
|
||||
decision = event_prediction.bet.get_decision()
|
||||
choice = event_prediction.bet.decision["choice"]
|
||||
|
||||
logger.info(
|
||||
(
|
||||
f"{event_prediction} - Decision: {choice}: {decision['title']} "
|
||||
f"({decision['color']}) - Result: {event_prediction.result['string']}"
|
||||
),
|
||||
extra={
|
||||
"emoji": ":bar_chart:",
|
||||
"event": Events.get(
|
||||
f"BET_{event_prediction.result['type']}"
|
||||
),
|
||||
},
|
||||
)
|
||||
|
||||
ws.streamers[streamer_index].update_history(
|
||||
"PREDICTION", points["gained"]
|
||||
)
|
||||
|
||||
# Remove duplicate history records from previous message sent in community-points-user-v1
|
||||
if event_prediction.result["type"] == "REFUND":
|
||||
ws.streamers[streamer_index].update_history(
|
||||
"REFUND",
|
||||
-points["placed"],
|
||||
counter=-1,
|
||||
)
|
||||
elif event_prediction.result["type"] == "WIN":
|
||||
ws.streamers[streamer_index].update_history(
|
||||
"PREDICTION",
|
||||
-points["won"],
|
||||
counter=-1,
|
||||
)
|
||||
|
||||
if event_prediction.result["type"]:
|
||||
# Analytics switch
|
||||
if Settings.enable_analytics is True:
|
||||
ws.streamers[
|
||||
streamer_index
|
||||
].persistent_annotations(
|
||||
event_prediction.result["type"],
|
||||
f"{ws.events_predictions[event_id].title}",
|
||||
)
|
||||
elif message.type == "prediction-made":
|
||||
event_prediction.bet_confirmed = True
|
||||
# Analytics switch
|
||||
if Settings.enable_analytics is True:
|
||||
ws.streamers[streamer_index].persistent_annotations(
|
||||
"PREDICTION_MADE",
|
||||
f"Decision: {event_prediction.bet.decision['choice']} - {event_prediction.title}",
|
||||
)
|
||||
except Exception:
|
||||
logger.error(
|
||||
f"Exception raised for topic: {message.topic} and message: {message}",
|
||||
exc_info=True,
|
||||
)
|
||||
|
||||
elif response["type"] == "RESPONSE" and len(response.get("error", "")) > 0:
|
||||
# raise RuntimeError(f"Error while trying to listen for a topic: {response}")
|
||||
error_message = response.get("error", "")
|
||||
logger.error(f"Error while trying to listen for a topic: {error_message}")
|
||||
|
||||
# Check if the error message indicates an authentication issue (ERR_BADAUTH)
|
||||
if "ERR_BADAUTH" in error_message:
|
||||
# Inform the user about the potential outdated cookie file
|
||||
username = ws.twitch.twitch_login.username
|
||||
logger.error(f"Received the ERR_BADAUTH error, most likely you have an outdated cookie file \"cookies\\{username}.pkl\". Delete this file and try again.")
|
||||
# Attempt to delete the outdated cookie file
|
||||
# try:
|
||||
# cookie_file_path = os.path.join("cookies", f"{username}.pkl")
|
||||
# if os.path.exists(cookie_file_path):
|
||||
# os.remove(cookie_file_path)
|
||||
# logger.info(f"Deleted outdated cookie file for user: {username}")
|
||||
# else:
|
||||
# logger.warning(f"Cookie file not found for user: {username}")
|
||||
# except Exception as e:
|
||||
# logger.error(f"Error occurred while deleting cookie file: {str(e)}")
|
||||
|
||||
elif response["type"] == "RECONNECT":
|
||||
logger.info(f"#{ws.index} - Reconnection required")
|
||||
WebSocketsPool.handle_reconnection(ws)
|
||||
|
||||
elif response["type"] == "PONG":
|
||||
ws.last_pong = time.time()
|
||||
26
TwitchChannelPointsMiner/classes/Webhook.py
Normal file
26
TwitchChannelPointsMiner/classes/Webhook.py
Normal file
@ -0,0 +1,26 @@
|
||||
from textwrap import dedent
|
||||
|
||||
import requests
|
||||
|
||||
from TwitchChannelPointsMiner.classes.Settings import Events
|
||||
|
||||
|
||||
class Webhook(object):
|
||||
__slots__ = ["endpoint", "method", "events"]
|
||||
|
||||
def __init__(self, endpoint: str, method: str, events: list):
|
||||
self.endpoint = endpoint
|
||||
self.method = method
|
||||
self.events = [str(e) for e in events]
|
||||
|
||||
def send(self, message: str, event: Events) -> None:
|
||||
|
||||
if str(event) in self.events:
|
||||
url = self.endpoint + f"?event_name={str(event)}&message={message}"
|
||||
|
||||
if self.method.lower() == "get":
|
||||
requests.get(url=url)
|
||||
elif self.method.lower() == "post":
|
||||
requests.post(url=url)
|
||||
else:
|
||||
raise ValueError("Invalid method, use POST or GET")
|
||||
0
TwitchChannelPointsMiner/classes/__init__.py
Normal file
0
TwitchChannelPointsMiner/classes/__init__.py
Normal file
315
TwitchChannelPointsMiner/classes/entities/Bet.py
Normal file
315
TwitchChannelPointsMiner/classes/entities/Bet.py
Normal file
@ -0,0 +1,315 @@
|
||||
import copy
|
||||
from enum import Enum, auto
|
||||
from random import uniform
|
||||
|
||||
from millify import millify
|
||||
|
||||
#from TwitchChannelPointsMiner.utils import char_decision_as_index, float_round
|
||||
from TwitchChannelPointsMiner.utils import float_round
|
||||
|
||||
|
||||
class Strategy(Enum):
|
||||
MOST_VOTED = auto()
|
||||
HIGH_ODDS = auto()
|
||||
PERCENTAGE = auto()
|
||||
SMART_MONEY = auto()
|
||||
SMART = auto()
|
||||
|
||||
def __str__(self):
|
||||
return self.name
|
||||
|
||||
|
||||
class Condition(Enum):
|
||||
GT = auto()
|
||||
LT = auto()
|
||||
GTE = auto()
|
||||
LTE = auto()
|
||||
|
||||
def __str__(self):
|
||||
return self.name
|
||||
|
||||
|
||||
class OutcomeKeys(object):
|
||||
# Real key on Bet dict ['']
|
||||
PERCENTAGE_USERS = "percentage_users"
|
||||
ODDS_PERCENTAGE = "odds_percentage"
|
||||
ODDS = "odds"
|
||||
TOP_POINTS = "top_points"
|
||||
# Real key on Bet dict [''] - Sum()
|
||||
TOTAL_USERS = "total_users"
|
||||
TOTAL_POINTS = "total_points"
|
||||
# This key does not exist
|
||||
DECISION_USERS = "decision_users"
|
||||
DECISION_POINTS = "decision_points"
|
||||
|
||||
|
||||
class DelayMode(Enum):
|
||||
FROM_START = auto()
|
||||
FROM_END = auto()
|
||||
PERCENTAGE = auto()
|
||||
|
||||
def __str__(self):
|
||||
return self.name
|
||||
|
||||
|
||||
class FilterCondition(object):
|
||||
__slots__ = [
|
||||
"by",
|
||||
"where",
|
||||
"value",
|
||||
]
|
||||
|
||||
def __init__(self, by=None, where=None, value=None, decision=None):
|
||||
self.by = by
|
||||
self.where = where
|
||||
self.value = value
|
||||
|
||||
def __repr__(self):
|
||||
return f"FilterCondition(by={self.by.upper()}, where={self.where}, value={self.value})"
|
||||
|
||||
|
||||
class BetSettings(object):
|
||||
__slots__ = [
|
||||
"strategy",
|
||||
"percentage",
|
||||
"percentage_gap",
|
||||
"max_points",
|
||||
"minimum_points",
|
||||
"stealth_mode",
|
||||
"filter_condition",
|
||||
"delay",
|
||||
"delay_mode",
|
||||
]
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
strategy: Strategy = None,
|
||||
percentage: int = None,
|
||||
percentage_gap: int = None,
|
||||
max_points: int = None,
|
||||
minimum_points: int = None,
|
||||
stealth_mode: bool = None,
|
||||
filter_condition: FilterCondition = None,
|
||||
delay: float = None,
|
||||
delay_mode: DelayMode = None,
|
||||
):
|
||||
self.strategy = strategy
|
||||
self.percentage = percentage
|
||||
self.percentage_gap = percentage_gap
|
||||
self.max_points = max_points
|
||||
self.minimum_points = minimum_points
|
||||
self.stealth_mode = stealth_mode
|
||||
self.filter_condition = filter_condition
|
||||
self.delay = delay
|
||||
self.delay_mode = delay_mode
|
||||
|
||||
def default(self):
|
||||
self.strategy = self.strategy if self.strategy is not None else Strategy.SMART
|
||||
self.percentage = self.percentage if self.percentage is not None else 5
|
||||
self.percentage_gap = (
|
||||
self.percentage_gap if self.percentage_gap is not None else 20
|
||||
)
|
||||
self.max_points = self.max_points if self.max_points is not None else 50000
|
||||
self.minimum_points = (
|
||||
self.minimum_points if self.minimum_points is not None else 0
|
||||
)
|
||||
self.stealth_mode = (
|
||||
self.stealth_mode if self.stealth_mode is not None else False
|
||||
)
|
||||
self.delay = self.delay if self.delay is not None else 6
|
||||
self.delay_mode = (
|
||||
self.delay_mode if self.delay_mode is not None else DelayMode.FROM_END
|
||||
)
|
||||
|
||||
def __repr__(self):
|
||||
return f"BetSettings(strategy={self.strategy}, percentage={self.percentage}, percentage_gap={self.percentage_gap}, max_points={self.max_points}, minimum_points={self.minimum_points}, stealth_mode={self.stealth_mode})"
|
||||
|
||||
|
||||
class Bet(object):
|
||||
__slots__ = ["outcomes", "decision", "total_users", "total_points", "settings"]
|
||||
|
||||
def __init__(self, outcomes: list, settings: BetSettings):
|
||||
self.outcomes = outcomes
|
||||
self.__clear_outcomes()
|
||||
self.decision: dict = {}
|
||||
self.total_users = 0
|
||||
self.total_points = 0
|
||||
self.settings = settings
|
||||
|
||||
def update_outcomes(self, outcomes):
|
||||
for index in range(0, len(self.outcomes)):
|
||||
self.outcomes[index][OutcomeKeys.TOTAL_USERS] = int(
|
||||
outcomes[index][OutcomeKeys.TOTAL_USERS]
|
||||
)
|
||||
self.outcomes[index][OutcomeKeys.TOTAL_POINTS] = int(
|
||||
outcomes[index][OutcomeKeys.TOTAL_POINTS]
|
||||
)
|
||||
if outcomes[index]["top_predictors"] != []:
|
||||
# Sort by points placed by other users
|
||||
outcomes[index]["top_predictors"] = sorted(
|
||||
outcomes[index]["top_predictors"],
|
||||
key=lambda x: x["points"],
|
||||
reverse=True,
|
||||
)
|
||||
# Get the first elements (most placed)
|
||||
top_points = outcomes[index]["top_predictors"][0]["points"]
|
||||
self.outcomes[index][OutcomeKeys.TOP_POINTS] = top_points
|
||||
|
||||
# Inefficient, but otherwise outcomekeys are represented wrong
|
||||
self.total_points = 0
|
||||
self.total_users = 0
|
||||
for index in range(0, len(self.outcomes)):
|
||||
self.total_users += self.outcomes[index][OutcomeKeys.TOTAL_USERS]
|
||||
self.total_points += self.outcomes[index][OutcomeKeys.TOTAL_POINTS]
|
||||
|
||||
if (
|
||||
self.total_users > 0
|
||||
and self.total_points > 0
|
||||
):
|
||||
for index in range(0, len(self.outcomes)):
|
||||
self.outcomes[index][OutcomeKeys.PERCENTAGE_USERS] = float_round(
|
||||
(100 * self.outcomes[index][OutcomeKeys.TOTAL_USERS]) / self.total_users
|
||||
)
|
||||
self.outcomes[index][OutcomeKeys.ODDS] = float_round(
|
||||
#self.total_points / max(self.outcomes[index][OutcomeKeys.TOTAL_POINTS], 1)
|
||||
0
|
||||
if self.outcomes[index][OutcomeKeys.TOTAL_POINTS] == 0
|
||||
else self.total_points / self.outcomes[index][OutcomeKeys.TOTAL_POINTS]
|
||||
)
|
||||
self.outcomes[index][OutcomeKeys.ODDS_PERCENTAGE] = float_round(
|
||||
#100 / max(self.outcomes[index][OutcomeKeys.ODDS], 1)
|
||||
0
|
||||
if self.outcomes[index][OutcomeKeys.ODDS] == 0
|
||||
else 100 / self.outcomes[index][OutcomeKeys.ODDS]
|
||||
)
|
||||
|
||||
self.__clear_outcomes()
|
||||
|
||||
def __repr__(self):
|
||||
return f"Bet(total_users={millify(self.total_users)}, total_points={millify(self.total_points)}), decision={self.decision})\n\t\tOutcome A({self.get_outcome(0)})\n\t\tOutcome B({self.get_outcome(1)})"
|
||||
|
||||
def get_decision(self, parsed=False):
|
||||
#decision = self.outcomes[0 if self.decision["choice"] == "A" else 1]
|
||||
decision = self.outcomes[self.decision["choice"]]
|
||||
return decision if parsed is False else Bet.__parse_outcome(decision)
|
||||
|
||||
@staticmethod
|
||||
def __parse_outcome(outcome):
|
||||
return f"{outcome['title']} ({outcome['color']}), Points: {millify(outcome[OutcomeKeys.TOTAL_POINTS])}, Users: {millify(outcome[OutcomeKeys.TOTAL_USERS])} ({outcome[OutcomeKeys.PERCENTAGE_USERS]}%), Odds: {outcome[OutcomeKeys.ODDS]} ({outcome[OutcomeKeys.ODDS_PERCENTAGE]}%)"
|
||||
|
||||
def get_outcome(self, index):
|
||||
return Bet.__parse_outcome(self.outcomes[index])
|
||||
|
||||
def __clear_outcomes(self):
|
||||
for index in range(0, len(self.outcomes)):
|
||||
keys = copy.deepcopy(list(self.outcomes[index].keys()))
|
||||
for key in keys:
|
||||
if key not in [
|
||||
OutcomeKeys.TOTAL_USERS,
|
||||
OutcomeKeys.TOTAL_POINTS,
|
||||
OutcomeKeys.TOP_POINTS,
|
||||
OutcomeKeys.PERCENTAGE_USERS,
|
||||
OutcomeKeys.ODDS,
|
||||
OutcomeKeys.ODDS_PERCENTAGE,
|
||||
"title",
|
||||
"color",
|
||||
"id",
|
||||
]:
|
||||
del self.outcomes[index][key]
|
||||
for key in [
|
||||
OutcomeKeys.PERCENTAGE_USERS,
|
||||
OutcomeKeys.ODDS,
|
||||
OutcomeKeys.ODDS_PERCENTAGE,
|
||||
OutcomeKeys.TOP_POINTS,
|
||||
]:
|
||||
if key not in self.outcomes[index]:
|
||||
self.outcomes[index][key] = 0
|
||||
|
||||
'''def __return_choice(self, key) -> str:
|
||||
return "A" if self.outcomes[0][key] > self.outcomes[1][key] else "B"'''
|
||||
|
||||
def __return_choice(self, key) -> int:
|
||||
largest=0
|
||||
for index in range(0, len(self.outcomes)):
|
||||
if self.outcomes[index][key] > self.outcomes[largest][key]:
|
||||
largest = index
|
||||
return largest
|
||||
|
||||
def skip(self) -> bool:
|
||||
if self.settings.filter_condition is not None:
|
||||
# key == by , condition == where
|
||||
key = self.settings.filter_condition.by
|
||||
condition = self.settings.filter_condition.where
|
||||
value = self.settings.filter_condition.value
|
||||
|
||||
fixed_key = (
|
||||
key
|
||||
if key not in [OutcomeKeys.DECISION_USERS, OutcomeKeys.DECISION_POINTS]
|
||||
else key.replace("decision", "total")
|
||||
)
|
||||
if key in [OutcomeKeys.TOTAL_USERS, OutcomeKeys.TOTAL_POINTS]:
|
||||
compared_value = (
|
||||
self.outcomes[0][fixed_key] + self.outcomes[1][fixed_key]
|
||||
)
|
||||
else:
|
||||
#outcome_index = char_decision_as_index(self.decision["choice"])
|
||||
outcome_index = self.decision["choice"]
|
||||
compared_value = self.outcomes[outcome_index][fixed_key]
|
||||
|
||||
# Check if condition is satisfied
|
||||
if condition == Condition.GT:
|
||||
if compared_value > value:
|
||||
return False, compared_value
|
||||
elif condition == Condition.LT:
|
||||
if compared_value < value:
|
||||
return False, compared_value
|
||||
elif condition == Condition.GTE:
|
||||
if compared_value >= value:
|
||||
return False, compared_value
|
||||
elif condition == Condition.LTE:
|
||||
if compared_value <= value:
|
||||
return False, compared_value
|
||||
return True, compared_value # Else skip the bet
|
||||
else:
|
||||
return False, 0 # Default don't skip the bet
|
||||
|
||||
def calculate(self, balance: int) -> dict:
|
||||
self.decision = {"choice": None, "amount": 0, "id": None}
|
||||
if self.settings.strategy == Strategy.MOST_VOTED:
|
||||
self.decision["choice"] = self.__return_choice(OutcomeKeys.TOTAL_USERS)
|
||||
elif self.settings.strategy == Strategy.HIGH_ODDS:
|
||||
self.decision["choice"] = self.__return_choice(OutcomeKeys.ODDS)
|
||||
elif self.settings.strategy == Strategy.PERCENTAGE:
|
||||
self.decision["choice"] = self.__return_choice(OutcomeKeys.ODDS_PERCENTAGE)
|
||||
elif self.settings.strategy == Strategy.SMART_MONEY:
|
||||
self.decision["choice"] = self.__return_choice(OutcomeKeys.TOP_POINTS)
|
||||
elif self.settings.strategy == Strategy.SMART:
|
||||
difference = abs(
|
||||
self.outcomes[0][OutcomeKeys.PERCENTAGE_USERS]
|
||||
- self.outcomes[1][OutcomeKeys.PERCENTAGE_USERS]
|
||||
)
|
||||
self.decision["choice"] = (
|
||||
self.__return_choice(OutcomeKeys.ODDS)
|
||||
if difference < self.settings.percentage_gap
|
||||
else self.__return_choice(OutcomeKeys.TOTAL_USERS)
|
||||
)
|
||||
|
||||
if self.decision["choice"] is not None:
|
||||
#index = char_decision_as_index(self.decision["choice"])
|
||||
index = self.decision["choice"]
|
||||
self.decision["id"] = self.outcomes[index]["id"]
|
||||
self.decision["amount"] = min(
|
||||
int(balance * (self.settings.percentage / 100)),
|
||||
self.settings.max_points,
|
||||
)
|
||||
if (
|
||||
self.settings.stealth_mode is True
|
||||
and self.decision["amount"]
|
||||
>= self.outcomes[index][OutcomeKeys.TOP_POINTS]
|
||||
):
|
||||
reduce_amount = uniform(1, 5)
|
||||
self.decision["amount"] = (
|
||||
self.outcomes[index][OutcomeKeys.TOP_POINTS] - reduce_amount
|
||||
)
|
||||
self.decision["amount"] = int(self.decision["amount"])
|
||||
return self.decision
|
||||
74
TwitchChannelPointsMiner/classes/entities/Campaign.py
Normal file
74
TwitchChannelPointsMiner/classes/entities/Campaign.py
Normal file
@ -0,0 +1,74 @@
|
||||
from datetime import datetime
|
||||
|
||||
from TwitchChannelPointsMiner.classes.entities.Drop import Drop
|
||||
from TwitchChannelPointsMiner.classes.Settings import Settings
|
||||
|
||||
|
||||
class Campaign(object):
|
||||
__slots__ = [
|
||||
"id",
|
||||
"game",
|
||||
"name",
|
||||
"status",
|
||||
"in_inventory",
|
||||
"end_at",
|
||||
"start_at",
|
||||
"dt_match",
|
||||
"drops",
|
||||
"channels",
|
||||
]
|
||||
|
||||
def __init__(self, dict):
|
||||
self.id = dict["id"]
|
||||
self.game = dict["game"]
|
||||
self.name = dict["name"]
|
||||
self.status = dict["status"]
|
||||
self.channels = (
|
||||
[]
|
||||
if dict["allow"]["channels"] is None
|
||||
else list(map(lambda x: x["id"], dict["allow"]["channels"]))
|
||||
)
|
||||
self.in_inventory = False
|
||||
|
||||
self.end_at = datetime.strptime(dict["endAt"], "%Y-%m-%dT%H:%M:%SZ")
|
||||
self.start_at = datetime.strptime(dict["startAt"], "%Y-%m-%dT%H:%M:%SZ")
|
||||
self.dt_match = self.start_at < datetime.now() < self.end_at
|
||||
|
||||
self.drops = list(map(lambda x: Drop(x), dict["timeBasedDrops"]))
|
||||
|
||||
def __repr__(self):
|
||||
return f"Campaign(id={self.id}, name={self.name}, game={self.game}, in_inventory={self.in_inventory})"
|
||||
|
||||
def __str__(self):
|
||||
return (
|
||||
f"{self.name}, Game: {self.game['displayName']} - Drops: {len(self.drops)} pcs. - In inventory: {self.in_inventory}"
|
||||
if Settings.logger.less
|
||||
else self.__repr__()
|
||||
)
|
||||
|
||||
def clear_drops(self):
|
||||
self.drops = list(
|
||||
filter(lambda x: x.dt_match is True and x.is_claimed is False, self.drops)
|
||||
)
|
||||
|
||||
def __eq__(self, other):
|
||||
if isinstance(other, self.__class__):
|
||||
return self.id == other.id
|
||||
else:
|
||||
return False
|
||||
|
||||
def sync_drops(self, drops, callback):
|
||||
# Iterate all the drops from inventory
|
||||
for drop in drops:
|
||||
# Iterate all the drops from out campaigns array
|
||||
# After id match update with:
|
||||
# [currentMinutesWatched, hasPreconditionsMet, dropInstanceID, isClaimed]
|
||||
for i in range(len(self.drops)):
|
||||
current_id = self.drops[i].id
|
||||
if drop["id"] == current_id:
|
||||
self.drops[i].update(drop["self"])
|
||||
# If after update we all conditions are meet we can claim the drop
|
||||
if self.drops[i].is_claimable is True:
|
||||
claimed = callback(self.drops[i])
|
||||
self.drops[i].is_claimed = claimed
|
||||
break
|
||||
103
TwitchChannelPointsMiner/classes/entities/Drop.py
Normal file
103
TwitchChannelPointsMiner/classes/entities/Drop.py
Normal file
@ -0,0 +1,103 @@
|
||||
from datetime import datetime
|
||||
|
||||
from TwitchChannelPointsMiner.classes.Settings import Settings
|
||||
from TwitchChannelPointsMiner.utils import percentage
|
||||
|
||||
|
||||
class Drop(object):
|
||||
__slots__ = [
|
||||
"id",
|
||||
"name",
|
||||
"benefit",
|
||||
"minutes_required",
|
||||
"has_preconditions_met",
|
||||
"current_minutes_watched",
|
||||
"drop_instance_id",
|
||||
"is_claimed",
|
||||
"is_claimable",
|
||||
"percentage_progress",
|
||||
"end_at",
|
||||
"start_at",
|
||||
"dt_match",
|
||||
"is_printable",
|
||||
]
|
||||
|
||||
def __init__(self, dict):
|
||||
self.id = dict["id"]
|
||||
self.name = dict["name"]
|
||||
self.benefit = ", ".join(
|
||||
list(set([bf["benefit"]["name"] for bf in dict["benefitEdges"]]))
|
||||
)
|
||||
self.minutes_required = dict["requiredMinutesWatched"]
|
||||
|
||||
self.has_preconditions_met = None # [True, False], None we don't know
|
||||
self.current_minutes_watched = 0
|
||||
self.drop_instance_id = None
|
||||
self.is_claimed = False
|
||||
self.is_claimable = False
|
||||
self.is_printable = False
|
||||
self.percentage_progress = 0
|
||||
|
||||
self.end_at = datetime.strptime(dict["endAt"], "%Y-%m-%dT%H:%M:%SZ")
|
||||
self.start_at = datetime.strptime(dict["startAt"], "%Y-%m-%dT%H:%M:%SZ")
|
||||
self.dt_match = self.start_at < datetime.now() < self.end_at
|
||||
|
||||
def update(
|
||||
self,
|
||||
progress,
|
||||
):
|
||||
self.has_preconditions_met = progress["hasPreconditionsMet"]
|
||||
|
||||
updated_percentage = percentage(
|
||||
progress["currentMinutesWatched"], self.minutes_required
|
||||
)
|
||||
quarter = round((updated_percentage / 25), 4).is_integer()
|
||||
self.is_printable = (
|
||||
# The new currentMinutesWatched are GT than previous
|
||||
progress["currentMinutesWatched"] > self.current_minutes_watched
|
||||
and (
|
||||
# The drop is printable when we have a new updated values and:
|
||||
# - also the percentage It's different and quarter is True (self.current_minutes_watched != 0 for skip boostrap phase)
|
||||
# - or we have watched 1 and the previous value is 0 - We are collecting a new drop :)
|
||||
(
|
||||
updated_percentage > self.percentage_progress
|
||||
and quarter is True
|
||||
and self.current_minutes_watched != 0
|
||||
)
|
||||
or (
|
||||
progress["currentMinutesWatched"] == 1
|
||||
and self.current_minutes_watched == 0
|
||||
)
|
||||
)
|
||||
)
|
||||
|
||||
self.current_minutes_watched = progress["currentMinutesWatched"]
|
||||
self.drop_instance_id = progress["dropInstanceID"]
|
||||
self.is_claimed = progress["isClaimed"]
|
||||
self.is_claimable = (
|
||||
self.is_claimed is False and self.drop_instance_id is not None
|
||||
)
|
||||
self.percentage_progress = updated_percentage
|
||||
|
||||
def __repr__(self):
|
||||
return f"Drop(id={self.id}, name={self.name}, benefit={self.benefit}, minutes_required={self.minutes_required}, has_preconditions_met={self.has_preconditions_met}, current_minutes_watched={self.current_minutes_watched}, percentage_progress={self.percentage_progress}%, drop_instance_id={self.drop_instance_id}, is_claimed={self.is_claimed})"
|
||||
|
||||
def __str__(self):
|
||||
return (
|
||||
f"{self.name} ({self.benefit}) {self.current_minutes_watched}/{self.minutes_required} ({self.percentage_progress}%)"
|
||||
if Settings.logger.less
|
||||
else self.__repr__()
|
||||
)
|
||||
|
||||
def progress_bar(self):
|
||||
progress = self.percentage_progress // 2
|
||||
remaining = (100 - self.percentage_progress) // 2
|
||||
if remaining + progress < 50:
|
||||
remaining += 50 - (remaining + progress)
|
||||
return f"|{('█' * progress)}{(' ' * remaining)}|\t{self.percentage_progress}% [{self.current_minutes_watched}/{self.minutes_required}]"
|
||||
|
||||
def __eq__(self, other):
|
||||
if isinstance(other, self.__class__):
|
||||
return self.id == other.id
|
||||
else:
|
||||
return False
|
||||
94
TwitchChannelPointsMiner/classes/entities/EventPrediction.py
Normal file
94
TwitchChannelPointsMiner/classes/entities/EventPrediction.py
Normal file
@ -0,0 +1,94 @@
|
||||
from TwitchChannelPointsMiner.classes.entities.Bet import Bet
|
||||
from TwitchChannelPointsMiner.classes.entities.Streamer import Streamer
|
||||
from TwitchChannelPointsMiner.classes.Settings import Settings
|
||||
from TwitchChannelPointsMiner.utils import _millify, float_round
|
||||
|
||||
|
||||
class EventPrediction(object):
|
||||
__slots__ = [
|
||||
"streamer",
|
||||
"event_id",
|
||||
"title",
|
||||
"created_at",
|
||||
"prediction_window_seconds",
|
||||
"status",
|
||||
"result",
|
||||
"box_fillable",
|
||||
"bet_confirmed",
|
||||
"bet_placed",
|
||||
"bet",
|
||||
]
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
streamer: Streamer,
|
||||
event_id,
|
||||
title,
|
||||
created_at,
|
||||
prediction_window_seconds,
|
||||
status,
|
||||
outcomes,
|
||||
):
|
||||
self.streamer = streamer
|
||||
|
||||
self.event_id = event_id
|
||||
self.title = title.strip()
|
||||
self.created_at = created_at
|
||||
self.prediction_window_seconds = prediction_window_seconds
|
||||
self.status = status
|
||||
self.result: dict = {"string": "", "type": None, "gained": 0}
|
||||
|
||||
self.box_fillable = False
|
||||
self.bet_confirmed = False
|
||||
self.bet_placed = False
|
||||
self.bet = Bet(outcomes, streamer.settings.bet)
|
||||
|
||||
def __repr__(self):
|
||||
return f"EventPrediction(event_id={self.event_id}, streamer={self.streamer}, title={self.title})"
|
||||
|
||||
def __str__(self):
|
||||
return (
|
||||
f"EventPrediction: {self.streamer} - {self.title}"
|
||||
if Settings.logger.less
|
||||
else self.__repr__()
|
||||
)
|
||||
|
||||
def elapsed(self, timestamp):
|
||||
return float_round((timestamp - self.created_at).total_seconds())
|
||||
|
||||
def closing_bet_after(self, timestamp):
|
||||
return float_round(self.prediction_window_seconds - self.elapsed(timestamp))
|
||||
|
||||
def print_recap(self) -> str:
|
||||
return f"{self}\n\t\t{self.bet}\n\t\tResult: {self.result['string']}"
|
||||
|
||||
def parse_result(self, result) -> dict:
|
||||
result_type = result["type"]
|
||||
|
||||
points = {}
|
||||
points["placed"] = (
|
||||
self.bet.decision["amount"] if result_type != "REFUND" else 0
|
||||
)
|
||||
points["won"] = (
|
||||
result["points_won"]
|
||||
if result["points_won"] or result_type == "REFUND"
|
||||
else 0
|
||||
)
|
||||
points["gained"] = (
|
||||
points["won"] - points["placed"] if result_type != "REFUND" else 0
|
||||
)
|
||||
points["prefix"] = "+" if points["gained"] >= 0 else ""
|
||||
|
||||
action = (
|
||||
"Lost"
|
||||
if result_type == "LOSE"
|
||||
else ("Refunded" if result_type == "REFUND" else "Gained")
|
||||
)
|
||||
|
||||
self.result = {
|
||||
"string": f"{result_type}, {action}: {points['prefix']}{_millify(points['gained'])}",
|
||||
"type": result_type,
|
||||
"gained": points["gained"],
|
||||
}
|
||||
|
||||
return points
|
||||
69
TwitchChannelPointsMiner/classes/entities/Message.py
Normal file
69
TwitchChannelPointsMiner/classes/entities/Message.py
Normal file
@ -0,0 +1,69 @@
|
||||
import json
|
||||
|
||||
from TwitchChannelPointsMiner.utils import server_time
|
||||
|
||||
|
||||
class Message(object):
|
||||
__slots__ = [
|
||||
"topic",
|
||||
"topic_user",
|
||||
"message",
|
||||
"type",
|
||||
"data",
|
||||
"timestamp",
|
||||
"channel_id",
|
||||
"identifier",
|
||||
]
|
||||
|
||||
def __init__(self, data):
|
||||
self.topic, self.topic_user = data["topic"].split(".")
|
||||
|
||||
self.message = json.loads(data["message"])
|
||||
self.type = self.message["type"]
|
||||
|
||||
self.data = self.message["data"] if "data" in self.message else None
|
||||
|
||||
self.timestamp = self.__get_timestamp()
|
||||
self.channel_id = self.__get_channel_id()
|
||||
|
||||
self.identifier = f"{self.type}.{self.topic}.{self.channel_id}"
|
||||
|
||||
def __repr__(self):
|
||||
return f"{self.message}"
|
||||
|
||||
def __str__(self):
|
||||
return f"{self.message}"
|
||||
|
||||
def __get_timestamp(self):
|
||||
return (
|
||||
server_time(self.message)
|
||||
if self.data is None
|
||||
else (
|
||||
self.data["timestamp"]
|
||||
if "timestamp" in self.data
|
||||
else server_time(self.data)
|
||||
)
|
||||
)
|
||||
|
||||
def __get_channel_id(self):
|
||||
return (
|
||||
self.topic_user
|
||||
if self.data is None
|
||||
else (
|
||||
self.data["prediction"]["channel_id"]
|
||||
if "prediction" in self.data
|
||||
else (
|
||||
self.data["claim"]["channel_id"]
|
||||
if "claim" in self.data
|
||||
else (
|
||||
self.data["channel_id"]
|
||||
if "channel_id" in self.data
|
||||
else (
|
||||
self.data["balance"]["channel_id"]
|
||||
if "balance" in self.data
|
||||
else self.topic_user
|
||||
)
|
||||
)
|
||||
)
|
||||
)
|
||||
)
|
||||
16
TwitchChannelPointsMiner/classes/entities/PubsubTopic.py
Normal file
16
TwitchChannelPointsMiner/classes/entities/PubsubTopic.py
Normal file
@ -0,0 +1,16 @@
|
||||
class PubsubTopic(object):
|
||||
__slots__ = ["topic", "user_id", "streamer"]
|
||||
|
||||
def __init__(self, topic, user_id=None, streamer=None):
|
||||
self.topic = topic
|
||||
self.user_id = user_id
|
||||
self.streamer = streamer
|
||||
|
||||
def is_user_topic(self):
|
||||
return self.streamer is None
|
||||
|
||||
def __str__(self):
|
||||
if self.is_user_topic():
|
||||
return f"{self.topic}.{self.user_id}"
|
||||
else:
|
||||
return f"{self.topic}.{self.streamer.channel_id}"
|
||||
12
TwitchChannelPointsMiner/classes/entities/Raid.py
Normal file
12
TwitchChannelPointsMiner/classes/entities/Raid.py
Normal file
@ -0,0 +1,12 @@
|
||||
class Raid(object):
|
||||
__slots__ = ["raid_id", "target_login"]
|
||||
|
||||
def __init__(self, raid_id, target_login):
|
||||
self.raid_id = raid_id
|
||||
self.target_login = target_login
|
||||
|
||||
def __eq__(self, other):
|
||||
if isinstance(other, self.__class__):
|
||||
return self.raid_id == other.raid_id
|
||||
else:
|
||||
return False
|
||||
107
TwitchChannelPointsMiner/classes/entities/Stream.py
Normal file
107
TwitchChannelPointsMiner/classes/entities/Stream.py
Normal file
@ -0,0 +1,107 @@
|
||||
import json
|
||||
import logging
|
||||
import time
|
||||
from base64 import b64encode
|
||||
|
||||
from TwitchChannelPointsMiner.classes.Settings import Settings
|
||||
from TwitchChannelPointsMiner.constants import DROP_ID
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class Stream(object):
|
||||
__slots__ = [
|
||||
"broadcast_id",
|
||||
"title",
|
||||
"game",
|
||||
"tags",
|
||||
"drops_tags",
|
||||
"campaigns",
|
||||
"campaigns_ids",
|
||||
"viewers_count",
|
||||
"spade_url",
|
||||
"payload",
|
||||
"watch_streak_missing",
|
||||
"minute_watched",
|
||||
"__last_update",
|
||||
"__minute_watched_timestamp",
|
||||
]
|
||||
|
||||
def __init__(self):
|
||||
self.broadcast_id = None
|
||||
|
||||
self.title = None
|
||||
self.game = {}
|
||||
self.tags = []
|
||||
|
||||
self.drops_tags = False
|
||||
self.campaigns = []
|
||||
self.campaigns_ids = []
|
||||
|
||||
self.viewers_count = 0
|
||||
self.__last_update = 0
|
||||
|
||||
self.spade_url = None
|
||||
self.payload = None
|
||||
|
||||
self.init_watch_streak()
|
||||
|
||||
def encode_payload(self) -> dict:
|
||||
json_event = json.dumps(self.payload, separators=(",", ":"))
|
||||
return {"data": (b64encode(json_event.encode("utf-8"))).decode("utf-8")}
|
||||
|
||||
def update(self, broadcast_id, title, game, tags, viewers_count):
|
||||
self.broadcast_id = broadcast_id
|
||||
self.title = title.strip()
|
||||
self.game = game
|
||||
# #343 temporary workaround
|
||||
self.tags = tags or []
|
||||
# ------------------------
|
||||
self.viewers_count = viewers_count
|
||||
|
||||
self.drops_tags = (
|
||||
DROP_ID in [tag["id"] for tag in self.tags] and self.game != {}
|
||||
)
|
||||
self.__last_update = time.time()
|
||||
|
||||
logger.debug(f"Update: {self}")
|
||||
|
||||
def __repr__(self):
|
||||
return f"Stream(title={self.title}, game={self.__str_game()}, tags={self.__str_tags()})"
|
||||
|
||||
def __str__(self):
|
||||
return f"{self.title}" if Settings.logger.less else self.__repr__()
|
||||
|
||||
def __str_tags(self):
|
||||
return (
|
||||
None
|
||||
if self.tags == []
|
||||
else ", ".join([tag["localizedName"] for tag in self.tags])
|
||||
)
|
||||
|
||||
def __str_game(self):
|
||||
return None if self.game in [{}, None] else self.game["displayName"]
|
||||
|
||||
def game_name(self):
|
||||
return None if self.game in [{}, None] else self.game["name"]
|
||||
|
||||
def game_id(self):
|
||||
return None if self.game in [{}, None] else self.game["id"]
|
||||
|
||||
def update_required(self):
|
||||
return self.__last_update == 0 or self.update_elapsed() >= 120
|
||||
|
||||
def update_elapsed(self):
|
||||
return 0 if self.__last_update == 0 else (time.time() - self.__last_update)
|
||||
|
||||
def init_watch_streak(self):
|
||||
self.watch_streak_missing = True
|
||||
self.minute_watched = 0
|
||||
self.__minute_watched_timestamp = 0
|
||||
|
||||
def update_minute_watched(self):
|
||||
if self.__minute_watched_timestamp != 0:
|
||||
self.minute_watched += round(
|
||||
(time.time() - self.__minute_watched_timestamp) / 60, 5
|
||||
)
|
||||
self.__minute_watched_timestamp = time.time()
|
||||
284
TwitchChannelPointsMiner/classes/entities/Streamer.py
Normal file
284
TwitchChannelPointsMiner/classes/entities/Streamer.py
Normal file
@ -0,0 +1,284 @@
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
import time
|
||||
from datetime import datetime
|
||||
from threading import Lock
|
||||
|
||||
from TwitchChannelPointsMiner.classes.Chat import ChatPresence, ThreadChat
|
||||
from TwitchChannelPointsMiner.classes.entities.Bet import BetSettings, DelayMode
|
||||
from TwitchChannelPointsMiner.classes.entities.Stream import Stream
|
||||
from TwitchChannelPointsMiner.classes.Settings import Events, Settings
|
||||
from TwitchChannelPointsMiner.constants import URL
|
||||
from TwitchChannelPointsMiner.utils import _millify
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class StreamerSettings(object):
|
||||
__slots__ = [
|
||||
"make_predictions",
|
||||
"follow_raid",
|
||||
"claim_drops",
|
||||
"claim_moments",
|
||||
"watch_streak",
|
||||
"bet",
|
||||
"chat",
|
||||
]
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
make_predictions: bool = None,
|
||||
follow_raid: bool = None,
|
||||
claim_drops: bool = None,
|
||||
claim_moments: bool = None,
|
||||
watch_streak: bool = None,
|
||||
bet: BetSettings = None,
|
||||
chat: ChatPresence = None,
|
||||
):
|
||||
self.make_predictions = make_predictions
|
||||
self.follow_raid = follow_raid
|
||||
self.claim_drops = claim_drops
|
||||
self.claim_moments = claim_moments
|
||||
self.watch_streak = watch_streak
|
||||
self.bet = bet
|
||||
self.chat = chat
|
||||
|
||||
def default(self):
|
||||
for name in [
|
||||
"make_predictions",
|
||||
"follow_raid",
|
||||
"claim_drops",
|
||||
"claim_moments",
|
||||
"watch_streak",
|
||||
]:
|
||||
if getattr(self, name) is None:
|
||||
setattr(self, name, True)
|
||||
if self.bet is None:
|
||||
self.bet = BetSettings()
|
||||
if self.chat is None:
|
||||
self.chat = ChatPresence.ONLINE
|
||||
|
||||
def __repr__(self):
|
||||
return f"BetSettings(make_predictions={self.make_predictions}, follow_raid={self.follow_raid}, claim_drops={self.claim_drops}, claim_moments={self.claim_moments}, watch_streak={self.watch_streak}, bet={self.bet}, chat={self.chat})"
|
||||
|
||||
|
||||
class Streamer(object):
|
||||
__slots__ = [
|
||||
"username",
|
||||
"channel_id",
|
||||
"settings",
|
||||
"is_online",
|
||||
"stream_up",
|
||||
"online_at",
|
||||
"offline_at",
|
||||
"channel_points",
|
||||
"minute_watched_requests",
|
||||
"viewer_is_mod",
|
||||
"activeMultipliers",
|
||||
"irc_chat",
|
||||
"stream",
|
||||
"raid",
|
||||
"history",
|
||||
"streamer_url",
|
||||
"mutex",
|
||||
]
|
||||
|
||||
def __init__(self, username, settings=None):
|
||||
self.username: str = username.lower().strip()
|
||||
self.channel_id: str = ""
|
||||
self.settings = settings
|
||||
self.is_online = False
|
||||
self.stream_up = 0
|
||||
self.online_at = 0
|
||||
self.offline_at = 0
|
||||
self.channel_points = 0
|
||||
self.minute_watched_requests = None
|
||||
self.viewer_is_mod = False
|
||||
self.activeMultipliers = None
|
||||
self.irc_chat = None
|
||||
|
||||
self.stream = Stream()
|
||||
|
||||
self.raid = None
|
||||
self.history = {}
|
||||
|
||||
self.streamer_url = f"{URL}/{self.username}"
|
||||
|
||||
self.mutex = Lock()
|
||||
|
||||
def __repr__(self):
|
||||
return f"Streamer(username={self.username}, channel_id={self.channel_id}, channel_points={_millify(self.channel_points)})"
|
||||
|
||||
def __str__(self):
|
||||
return (
|
||||
f"{self.username} ({_millify(self.channel_points)} points)"
|
||||
if Settings.logger.less
|
||||
else self.__repr__()
|
||||
)
|
||||
|
||||
def set_offline(self):
|
||||
if self.is_online is True:
|
||||
self.offline_at = time.time()
|
||||
self.is_online = False
|
||||
|
||||
self.toggle_chat()
|
||||
|
||||
logger.info(
|
||||
f"{self} is Offline!",
|
||||
extra={
|
||||
"emoji": ":sleeping:",
|
||||
"event": Events.STREAMER_OFFLINE,
|
||||
},
|
||||
)
|
||||
|
||||
def set_online(self):
|
||||
if self.is_online is False:
|
||||
self.online_at = time.time()
|
||||
self.is_online = True
|
||||
self.stream.init_watch_streak()
|
||||
|
||||
self.toggle_chat()
|
||||
|
||||
logger.info(
|
||||
f"{self} is Online!",
|
||||
extra={
|
||||
"emoji": ":partying_face:",
|
||||
"event": Events.STREAMER_ONLINE,
|
||||
},
|
||||
)
|
||||
|
||||
def print_history(self):
|
||||
return ", ".join(
|
||||
[
|
||||
f"{key}({self.history[key]['counter']} times, {_millify(self.history[key]['amount'])} gained)"
|
||||
for key in sorted(self.history)
|
||||
if self.history[key]["counter"] != 0
|
||||
]
|
||||
)
|
||||
|
||||
def update_history(self, reason_code, earned, counter=1):
|
||||
if reason_code not in self.history:
|
||||
self.history[reason_code] = {"counter": 0, "amount": 0}
|
||||
self.history[reason_code]["counter"] += counter
|
||||
self.history[reason_code]["amount"] += earned
|
||||
|
||||
if reason_code == "WATCH_STREAK":
|
||||
self.stream.watch_streak_missing = False
|
||||
|
||||
def stream_up_elapsed(self):
|
||||
return self.stream_up == 0 or ((time.time() - self.stream_up) > 120)
|
||||
|
||||
def drops_condition(self):
|
||||
return (
|
||||
self.settings.claim_drops is True
|
||||
and self.is_online is True
|
||||
# and self.stream.drops_tags is True
|
||||
and self.stream.campaigns_ids != []
|
||||
)
|
||||
|
||||
def viewer_has_points_multiplier(self):
|
||||
return self.activeMultipliers is not None and len(self.activeMultipliers) > 0
|
||||
|
||||
def total_points_multiplier(self):
|
||||
return (
|
||||
sum(
|
||||
map(
|
||||
lambda x: x["factor"],
|
||||
self.activeMultipliers,
|
||||
),
|
||||
)
|
||||
if self.activeMultipliers is not None
|
||||
else 0
|
||||
)
|
||||
|
||||
def get_prediction_window(self, prediction_window_seconds):
|
||||
delay_mode = self.settings.bet.delay_mode
|
||||
delay = self.settings.bet.delay
|
||||
if delay_mode == DelayMode.FROM_START:
|
||||
return min(delay, prediction_window_seconds)
|
||||
elif delay_mode == DelayMode.FROM_END:
|
||||
return max(prediction_window_seconds - delay, 0)
|
||||
elif delay_mode == DelayMode.PERCENTAGE:
|
||||
return prediction_window_seconds * delay
|
||||
else:
|
||||
return prediction_window_seconds
|
||||
|
||||
# === ANALYTICS === #
|
||||
def persistent_annotations(self, event_type, event_text):
|
||||
event_type = event_type.upper()
|
||||
if event_type in ["WATCH_STREAK", "WIN", "PREDICTION_MADE", "LOSE"]:
|
||||
primary_color = (
|
||||
"#45c1ff" # blue #45c1ff yellow #ffe045 green #36b535 red #ff4545
|
||||
if event_type == "WATCH_STREAK"
|
||||
else ("#ffe045" if event_type == "PREDICTION_MADE" else ("#36b535" if event_type == "WIN" else "#ff4545"))
|
||||
)
|
||||
data = {
|
||||
"borderColor": primary_color,
|
||||
"label": {
|
||||
"style": {"color": "#000", "background": primary_color},
|
||||
"text": event_text,
|
||||
},
|
||||
}
|
||||
self.__save_json("annotations", data)
|
||||
|
||||
def persistent_series(self, event_type="Watch"):
|
||||
self.__save_json("series", event_type=event_type)
|
||||
|
||||
def __save_json(self, key, data={}, event_type="Watch"):
|
||||
# https://stackoverflow.com/questions/4676195/why-do-i-need-to-multiply-unix-timestamps-by-1000-in-javascript
|
||||
now = datetime.now().replace(microsecond=0)
|
||||
data.update({"x": round(datetime.timestamp(now) * 1000)})
|
||||
|
||||
if key == "series":
|
||||
data.update({"y": self.channel_points})
|
||||
if event_type is not None:
|
||||
data.update({"z": event_type.replace("_", " ").title()})
|
||||
|
||||
fname = os.path.join(Settings.analytics_path, f"{self.username}.json")
|
||||
temp_fname = fname + '.temp' # Temporary file name
|
||||
|
||||
with self.mutex:
|
||||
# Create and write to the temporary file
|
||||
with open(temp_fname, "w") as temp_file:
|
||||
json_data = json.load(
|
||||
open(fname, "r")) if os.path.isfile(fname) else {}
|
||||
if key not in json_data:
|
||||
json_data[key] = []
|
||||
json_data[key].append(data)
|
||||
json.dump(json_data, temp_file, indent=4)
|
||||
|
||||
# Replace the original file with the temporary file
|
||||
os.replace(temp_fname, fname)
|
||||
|
||||
def leave_chat(self):
|
||||
if self.irc_chat is not None:
|
||||
self.irc_chat.stop()
|
||||
|
||||
# Recreate a new thread to start again
|
||||
# raise RuntimeError("threads can only be started once")
|
||||
self.irc_chat = ThreadChat(
|
||||
self.irc_chat.username,
|
||||
self.irc_chat.token,
|
||||
self.username,
|
||||
)
|
||||
|
||||
def __join_chat(self):
|
||||
if self.irc_chat is not None:
|
||||
if self.irc_chat.is_alive() is False:
|
||||
self.irc_chat.start()
|
||||
|
||||
def toggle_chat(self):
|
||||
if self.settings.chat == ChatPresence.ALWAYS:
|
||||
self.__join_chat()
|
||||
elif self.settings.chat != ChatPresence.NEVER:
|
||||
if self.is_online is True:
|
||||
if self.settings.chat == ChatPresence.ONLINE:
|
||||
self.__join_chat()
|
||||
elif self.settings.chat == ChatPresence.OFFLINE:
|
||||
self.leave_chat()
|
||||
else:
|
||||
if self.settings.chat == ChatPresence.ONLINE:
|
||||
self.leave_chat()
|
||||
elif self.settings.chat == ChatPresence.OFFLINE:
|
||||
self.__join_chat()
|
||||
199
TwitchChannelPointsMiner/constants.py
Normal file
199
TwitchChannelPointsMiner/constants.py
Normal file
@ -0,0 +1,199 @@
|
||||
# Twitch endpoints
|
||||
URL = "https://www.twitch.tv"
|
||||
IRC = "irc.chat.twitch.tv"
|
||||
IRC_PORT = 6667
|
||||
WEBSOCKET = "wss://pubsub-edge.twitch.tv/v1"
|
||||
CLIENT_ID = "ue6666qo983tsx6so1t0vnawi233wa" # TV
|
||||
# CLIENT_ID = "kimne78kx3ncx6brgo4mv6wki5h1ko" # Browser
|
||||
# CLIENT_ID = "kd1unb4b3q4t58fwlpcbzcbnm76a8fp" # Android App
|
||||
# CLIENT_ID = "851cqzxpb9bqu9z6galo155du" # iOS App
|
||||
DROP_ID = "c2542d6d-cd10-4532-919b-3d19f30a768b"
|
||||
# CLIENT_VERSION = "32d439b2-bd5b-4e35-b82a-fae10b04da70" # Android App
|
||||
CLIENT_VERSION = "ef928475-9403-42f2-8a34-55784bd08e16" # Browser
|
||||
|
||||
USER_AGENTS = {
|
||||
"Windows": {
|
||||
'CHROME': "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/108.0.0.0 Safari/537.36",
|
||||
"FIREFOX": "Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:84.0) Gecko/20100101 Firefox/84.0",
|
||||
},
|
||||
"Linux": {
|
||||
"CHROME": "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/88.0.4324.96 Safari/537.36",
|
||||
"FIREFOX": "Mozilla/5.0 (X11; Linux x86_64; rv:85.0) Gecko/20100101 Firefox/85.0",
|
||||
},
|
||||
"Android": {
|
||||
# "App": "Dalvik/2.1.0 (Linux; U; Android 7.1.2; SM-G975N Build/N2G48C) tv.twitch.android.app/13.4.1/1304010"
|
||||
"App": "Dalvik/2.1.0 (Linux; U; Android 7.1.2; SM-G977N Build/LMY48Z) tv.twitch.android.app/14.3.2/1403020",
|
||||
"TV": "Mozilla/5.0 (Linux; Android 7.1; Smart Box C1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/108.0.0.0 Safari/537.36"
|
||||
}
|
||||
}
|
||||
|
||||
BRANCH = "master"
|
||||
GITHUB_url = (
|
||||
"https://raw.githubusercontent.com/rdavydov/Twitch-Channel-Points-Miner-v2/"
|
||||
+ BRANCH
|
||||
)
|
||||
|
||||
|
||||
class GQLOperations:
|
||||
url = "https://gql.twitch.tv/gql"
|
||||
integrity_url = "https://gql.twitch.tv/integrity"
|
||||
WithIsStreamLiveQuery = {
|
||||
"operationName": "WithIsStreamLiveQuery",
|
||||
"extensions": {
|
||||
"persistedQuery": {
|
||||
"version": 1,
|
||||
"sha256Hash": "04e46329a6786ff3a81c01c50bfa5d725902507a0deb83b0edbf7abe7a3716ea",
|
||||
}
|
||||
},
|
||||
}
|
||||
VideoPlayerStreamInfoOverlayChannel = {
|
||||
"operationName": "VideoPlayerStreamInfoOverlayChannel",
|
||||
"extensions": {
|
||||
"persistedQuery": {
|
||||
"version": 1,
|
||||
"sha256Hash": "a5f2e34d626a9f4f5c0204f910bab2194948a9502089be558bb6e779a9e1b3d2",
|
||||
}
|
||||
},
|
||||
}
|
||||
ClaimCommunityPoints = {
|
||||
"operationName": "ClaimCommunityPoints",
|
||||
"extensions": {
|
||||
"persistedQuery": {
|
||||
"version": 1,
|
||||
"sha256Hash": "46aaeebe02c99afdf4fc97c7c0cba964124bf6b0af229395f1f6d1feed05b3d0",
|
||||
}
|
||||
},
|
||||
}
|
||||
CommunityMomentCallout_Claim = {
|
||||
"operationName": "CommunityMomentCallout_Claim",
|
||||
"extensions": {
|
||||
"persistedQuery": {
|
||||
"version": 1,
|
||||
"sha256Hash": "e2d67415aead910f7f9ceb45a77b750a1e1d9622c936d832328a0689e054db62",
|
||||
}
|
||||
},
|
||||
}
|
||||
DropsPage_ClaimDropRewards = {
|
||||
"operationName": "DropsPage_ClaimDropRewards",
|
||||
"extensions": {
|
||||
"persistedQuery": {
|
||||
"version": 1,
|
||||
"sha256Hash": "a455deea71bdc9015b78eb49f4acfbce8baa7ccbedd28e549bb025bd0f751930",
|
||||
}
|
||||
},
|
||||
}
|
||||
ChannelPointsContext = {
|
||||
"operationName": "ChannelPointsContext",
|
||||
"extensions": {
|
||||
"persistedQuery": {
|
||||
"version": 1,
|
||||
"sha256Hash": "1530a003a7d374b0380b79db0be0534f30ff46e61cffa2bc0e2468a909fbc024",
|
||||
}
|
||||
},
|
||||
}
|
||||
JoinRaid = {
|
||||
"operationName": "JoinRaid",
|
||||
"extensions": {
|
||||
"persistedQuery": {
|
||||
"version": 1,
|
||||
"sha256Hash": "c6a332a86d1087fbbb1a8623aa01bd1313d2386e7c63be60fdb2d1901f01a4ae",
|
||||
}
|
||||
},
|
||||
}
|
||||
ModViewChannelQuery = {
|
||||
"operationName": "ModViewChannelQuery",
|
||||
"extensions": {
|
||||
"persistedQuery": {
|
||||
"version": 1,
|
||||
"sha256Hash": "df5d55b6401389afb12d3017c9b2cf1237164220c8ef4ed754eae8188068a807",
|
||||
}
|
||||
},
|
||||
}
|
||||
Inventory = {
|
||||
"operationName": "Inventory",
|
||||
"variables": {"fetchRewardCampaigns": True},
|
||||
# "variables": {},
|
||||
"extensions": {
|
||||
"persistedQuery": {
|
||||
"version": 1,
|
||||
"sha256Hash": "37fea486d6179047c41d0f549088a4c3a7dd60c05c70956a1490262f532dccd9",
|
||||
}
|
||||
},
|
||||
}
|
||||
MakePrediction = {
|
||||
"operationName": "MakePrediction",
|
||||
"extensions": {
|
||||
"persistedQuery": {
|
||||
"version": 1,
|
||||
"sha256Hash": "b44682ecc88358817009f20e69d75081b1e58825bb40aa53d5dbadcc17c881d8",
|
||||
}
|
||||
},
|
||||
}
|
||||
ViewerDropsDashboard = {
|
||||
"operationName": "ViewerDropsDashboard",
|
||||
# "variables": {},
|
||||
"variables": {"fetchRewardCampaigns": True},
|
||||
"extensions": {
|
||||
"persistedQuery": {
|
||||
"version": 1,
|
||||
"sha256Hash": "8d5d9b5e3f088f9d1ff39eb2caab11f7a4cf7a3353da9ce82b5778226ff37268",
|
||||
}
|
||||
},
|
||||
}
|
||||
DropCampaignDetails = {
|
||||
"operationName": "DropCampaignDetails",
|
||||
"extensions": {
|
||||
"persistedQuery": {
|
||||
"version": 1,
|
||||
"sha256Hash": "f6396f5ffdde867a8f6f6da18286e4baf02e5b98d14689a69b5af320a4c7b7b8",
|
||||
}
|
||||
},
|
||||
}
|
||||
DropsHighlightService_AvailableDrops = {
|
||||
"operationName": "DropsHighlightService_AvailableDrops",
|
||||
"extensions": {
|
||||
"persistedQuery": {
|
||||
"version": 1,
|
||||
"sha256Hash": "9a62a09bce5b53e26e64a671e530bc599cb6aab1e5ba3cbd5d85966d3940716f",
|
||||
}
|
||||
},
|
||||
}
|
||||
ReportMenuItem = { # Use for replace https://api.twitch.tv/helix/users?login={self.username}
|
||||
"operationName": "ReportMenuItem",
|
||||
"extensions": {
|
||||
"persistedQuery": {
|
||||
"version": 1,
|
||||
"sha256Hash": "8f3628981255345ca5e5453dfd844efffb01d6413a9931498836e6268692a30c",
|
||||
}
|
||||
},
|
||||
}
|
||||
PersonalSections = (
|
||||
{
|
||||
"operationName": "PersonalSections",
|
||||
"variables": {
|
||||
"input": {
|
||||
"sectionInputs": ["FOLLOWED_SECTION"],
|
||||
"recommendationContext": {"platform": "web"},
|
||||
},
|
||||
"channelLogin": None,
|
||||
"withChannelUser": False,
|
||||
"creatorAnniversariesExperimentEnabled": False,
|
||||
},
|
||||
"extensions": {
|
||||
"persistedQuery": {
|
||||
"version": 1,
|
||||
"sha256Hash": "9fbdfb00156f754c26bde81eb47436dee146655c92682328457037da1a48ed39",
|
||||
}
|
||||
},
|
||||
},
|
||||
)
|
||||
ChannelFollows = {
|
||||
"operationName": "ChannelFollows",
|
||||
"variables": {"limit": 100, "order": "ASC"},
|
||||
"extensions": {
|
||||
"persistedQuery": {
|
||||
"version": 1,
|
||||
"sha256Hash": "eecf815273d3d949e5cf0085cc5084cd8a1b5b7b6f7990cf43cb0beadf546907",
|
||||
}
|
||||
},
|
||||
}
|
||||
342
TwitchChannelPointsMiner/logger.py
Normal file
342
TwitchChannelPointsMiner/logger.py
Normal file
@ -0,0 +1,342 @@
|
||||
import logging
|
||||
import os
|
||||
import platform
|
||||
import queue
|
||||
import pytz
|
||||
import sys
|
||||
from datetime import datetime
|
||||
from logging.handlers import QueueHandler, QueueListener, TimedRotatingFileHandler
|
||||
from pathlib import Path
|
||||
|
||||
import emoji
|
||||
from colorama import Fore, init
|
||||
|
||||
from TwitchChannelPointsMiner.classes.Discord import Discord
|
||||
from TwitchChannelPointsMiner.classes.Webhook import Webhook
|
||||
from TwitchChannelPointsMiner.classes.Matrix import Matrix
|
||||
from TwitchChannelPointsMiner.classes.Settings import Events
|
||||
from TwitchChannelPointsMiner.classes.Telegram import Telegram
|
||||
from TwitchChannelPointsMiner.classes.Pushover import Pushover
|
||||
from TwitchChannelPointsMiner.utils import remove_emoji
|
||||
|
||||
|
||||
# Fore: BLACK, RED, GREEN, YELLOW, BLUE, MAGENTA, CYAN, WHITE, RESET.
|
||||
class ColorPalette(object):
|
||||
def __init__(self, **kwargs):
|
||||
# Init with default values RESET for all and GREEN and RED only for WIN and LOSE bet
|
||||
# Then set args from kwargs
|
||||
for k in Events:
|
||||
setattr(self, str(k), Fore.RESET)
|
||||
setattr(self, "BET_WIN", Fore.GREEN)
|
||||
setattr(self, "BET_LOSE", Fore.RED)
|
||||
|
||||
for k in kwargs:
|
||||
if k.upper() in dir(self) and getattr(self, k.upper()) is not None:
|
||||
if kwargs[k] in [
|
||||
Fore.BLACK,
|
||||
Fore.RED,
|
||||
Fore.GREEN,
|
||||
Fore.YELLOW,
|
||||
Fore.BLUE,
|
||||
Fore.MAGENTA,
|
||||
Fore.CYAN,
|
||||
Fore.WHITE,
|
||||
Fore.RESET,
|
||||
]:
|
||||
setattr(self, k.upper(), kwargs[k])
|
||||
elif kwargs[k].upper() in [
|
||||
"BLACK",
|
||||
"RED",
|
||||
"GREEN",
|
||||
"YELLOW",
|
||||
"BLUE",
|
||||
"MAGENTA",
|
||||
"CYAN",
|
||||
"WHITE",
|
||||
"RESET",
|
||||
]:
|
||||
setattr(self, k.upper(), getattr(Fore, kwargs[k].upper()))
|
||||
|
||||
def get(self, key):
|
||||
color = getattr(self, str(key)) if str(key) in dir(self) else None
|
||||
return Fore.RESET if color is None else color
|
||||
|
||||
|
||||
class LoggerSettings:
|
||||
__slots__ = [
|
||||
"save",
|
||||
"less",
|
||||
"console_level",
|
||||
"console_username",
|
||||
"time_zone",
|
||||
"file_level",
|
||||
"emoji",
|
||||
"colored",
|
||||
"color_palette",
|
||||
"auto_clear",
|
||||
"telegram",
|
||||
"discord",
|
||||
"webhook",
|
||||
"matrix",
|
||||
"pushover",
|
||||
"username"
|
||||
]
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
save: bool = True,
|
||||
less: bool = False,
|
||||
console_level: int = logging.INFO,
|
||||
console_username: bool = False,
|
||||
time_zone: str or None = None,
|
||||
file_level: int = logging.DEBUG,
|
||||
emoji: bool = platform.system() != "Windows",
|
||||
colored: bool = False,
|
||||
color_palette: ColorPalette = ColorPalette(),
|
||||
auto_clear: bool = True,
|
||||
telegram: Telegram or None = None,
|
||||
discord: Discord or None = None,
|
||||
webhook: Webhook or None = None,
|
||||
matrix: Matrix or None = None,
|
||||
pushover: Pushover or None = None,
|
||||
username: str or None = None
|
||||
):
|
||||
self.save = save
|
||||
self.less = less
|
||||
self.console_level = console_level
|
||||
self.console_username = console_username
|
||||
self.time_zone = time_zone
|
||||
self.file_level = file_level
|
||||
self.emoji = emoji
|
||||
self.colored = colored
|
||||
self.color_palette = color_palette
|
||||
self.auto_clear = auto_clear
|
||||
self.telegram = telegram
|
||||
self.discord = discord
|
||||
self.webhook = webhook
|
||||
self.matrix = matrix
|
||||
self.pushover = pushover
|
||||
self.username = username
|
||||
|
||||
|
||||
class FileFormatter(logging.Formatter):
|
||||
def __init__(self, *, fmt, settings: LoggerSettings, datefmt=None):
|
||||
self.settings = settings
|
||||
self.timezone = None
|
||||
if settings.time_zone:
|
||||
try:
|
||||
self.timezone = pytz.timezone(settings.time_zone)
|
||||
logging.info(f"File logger time zone set to: {self.timezone}")
|
||||
except pytz.UnknownTimeZoneError:
|
||||
logging.error(
|
||||
f"File logger: invalid time zone: {settings.time_zone}")
|
||||
logging.Formatter.__init__(self, fmt=fmt, datefmt=datefmt)
|
||||
|
||||
def formatTime(self, record, datefmt=None):
|
||||
if self.timezone:
|
||||
dt = datetime.fromtimestamp(record.created, self.timezone)
|
||||
else:
|
||||
dt = datetime.fromtimestamp(record.created)
|
||||
return dt.strftime(datefmt or self.default_time_format)
|
||||
|
||||
|
||||
class GlobalFormatter(logging.Formatter):
|
||||
def __init__(self, *, fmt, settings: LoggerSettings, datefmt=None):
|
||||
self.settings = settings
|
||||
self.timezone = None
|
||||
if settings.time_zone:
|
||||
try:
|
||||
self.timezone = pytz.timezone(settings.time_zone)
|
||||
logging.info(
|
||||
f"Console logger time zone set to: {self.timezone}")
|
||||
except pytz.UnknownTimeZoneError:
|
||||
logging.error(
|
||||
f"Console logger: invalid time zone: {settings.time_zone}")
|
||||
logging.Formatter.__init__(self, fmt=fmt, datefmt=datefmt)
|
||||
|
||||
def formatTime(self, record, datefmt=None):
|
||||
if self.timezone:
|
||||
dt = datetime.fromtimestamp(record.created, self.timezone)
|
||||
else:
|
||||
dt = datetime.fromtimestamp(record.created)
|
||||
return dt.strftime(datefmt or self.default_time_format)
|
||||
|
||||
def format(self, record):
|
||||
record.emoji_is_present = (
|
||||
record.emoji_is_present if hasattr(
|
||||
record, "emoji_is_present") else False
|
||||
)
|
||||
if (
|
||||
hasattr(record, "emoji")
|
||||
and self.settings.emoji is True
|
||||
and record.emoji_is_present is False
|
||||
):
|
||||
record.msg = emoji.emojize(
|
||||
f"{record.emoji} {record.msg.strip()}", language="alias"
|
||||
)
|
||||
record.emoji_is_present = True
|
||||
|
||||
if self.settings.emoji is False:
|
||||
if "\u2192" in record.msg:
|
||||
record.msg = record.msg.replace("\u2192", "-->")
|
||||
|
||||
# With the update of Stream class, the Stream Title may contain emoji
|
||||
# Full remove using a method from utils.
|
||||
record.msg = remove_emoji(record.msg)
|
||||
|
||||
record.msg = self.settings.username + record.msg
|
||||
|
||||
if hasattr(record, "event"):
|
||||
self.telegram(record)
|
||||
self.discord(record)
|
||||
self.webhook(record)
|
||||
self.matrix(record)
|
||||
self.pushover(record)
|
||||
|
||||
if self.settings.colored is True:
|
||||
record.msg = (
|
||||
f"{self.settings.color_palette.get(record.event)}{record.msg}"
|
||||
)
|
||||
|
||||
return super().format(record)
|
||||
|
||||
def telegram(self, record):
|
||||
skip_telegram = False if hasattr(
|
||||
record, "skip_telegram") is False else True
|
||||
|
||||
if (
|
||||
self.settings.telegram is not None
|
||||
and skip_telegram is False
|
||||
and self.settings.telegram.chat_id != 123456789
|
||||
):
|
||||
self.settings.telegram.send(record.msg, record.event)
|
||||
|
||||
def discord(self, record):
|
||||
skip_discord = False if hasattr(
|
||||
record, "skip_discord") is False else True
|
||||
|
||||
if (
|
||||
self.settings.discord is not None
|
||||
and skip_discord is False
|
||||
and self.settings.discord.webhook_api
|
||||
!= "https://discord.com/api/webhooks/0123456789/0a1B2c3D4e5F6g7H8i9J"
|
||||
):
|
||||
self.settings.discord.send(record.msg, record.event)
|
||||
|
||||
def webhook(self, record):
|
||||
skip_webhook = False if hasattr(
|
||||
record, "skip_webhook") is False else True
|
||||
|
||||
if (
|
||||
self.settings.webhook is not None
|
||||
and skip_webhook is False
|
||||
and self.settings.webhook.endpoint
|
||||
!= "https://example.com/webhook"
|
||||
):
|
||||
self.settings.webhook.send(record.msg, record.event)
|
||||
|
||||
def matrix(self, record):
|
||||
skip_matrix = False if hasattr(
|
||||
record, "skip_matrix") is False else True
|
||||
|
||||
if (
|
||||
self.settings.matrix is not None
|
||||
and skip_matrix is False
|
||||
and self.settings.matrix.room_id != "..."
|
||||
and self.settings.matrix.access_token
|
||||
):
|
||||
self.settings.matrix.send(record.msg, record.event)
|
||||
|
||||
def pushover(self, record):
|
||||
skip_pushover = False if hasattr(
|
||||
record, "skip_pushover") is False else True
|
||||
|
||||
if (
|
||||
self.settings.pushover is not None
|
||||
and skip_pushover is False
|
||||
and self.settings.pushover.userkey != "YOUR-ACCOUNT-TOKEN"
|
||||
and self.settings.pushover.token != "YOUR-APPLICATION-TOKEN"
|
||||
):
|
||||
self.settings.pushover.send(record.msg, record.event)
|
||||
|
||||
|
||||
def configure_loggers(username, settings):
|
||||
if settings.colored is True:
|
||||
init(autoreset=True)
|
||||
|
||||
# Queue handler that will handle the logger queue
|
||||
logger_queue = queue.Queue(-1)
|
||||
queue_handler = QueueHandler(logger_queue)
|
||||
root_logger = logging.getLogger()
|
||||
root_logger.setLevel(logging.DEBUG)
|
||||
# Add the queue handler to the root logger
|
||||
# Send log messages to another thread through the queue
|
||||
root_logger.addHandler(queue_handler)
|
||||
|
||||
# Adding a username to the format based on settings
|
||||
console_username = "" if settings.console_username is False else f"[{username}] "
|
||||
|
||||
settings.username = console_username
|
||||
|
||||
console_handler = logging.StreamHandler(sys.stdout)
|
||||
console_handler.setLevel(settings.console_level)
|
||||
console_handler.setFormatter(
|
||||
GlobalFormatter(
|
||||
fmt=(
|
||||
"%(asctime)s - %(levelname)s - [%(funcName)s]: %(message)s"
|
||||
if settings.less is False
|
||||
else "%(asctime)s - %(message)s"
|
||||
),
|
||||
datefmt=(
|
||||
"%d/%m/%y %H:%M:%S" if settings.less is False else "%d/%m %H:%M:%S"
|
||||
),
|
||||
settings=settings,
|
||||
)
|
||||
)
|
||||
|
||||
if settings.save is True:
|
||||
logs_path = os.path.join(Path().absolute(), "logs")
|
||||
Path(logs_path).mkdir(parents=True, exist_ok=True)
|
||||
if settings.auto_clear is True:
|
||||
logs_file = os.path.join(
|
||||
logs_path,
|
||||
f"{username}.log",
|
||||
)
|
||||
file_handler = TimedRotatingFileHandler(
|
||||
logs_file,
|
||||
when="D",
|
||||
interval=1,
|
||||
backupCount=7,
|
||||
encoding="utf-8",
|
||||
delay=False,
|
||||
)
|
||||
else:
|
||||
# Getting time zone from the console_handler's formatter since they are the same
|
||||
tz = "" if console_handler.formatter.timezone is False else console_handler.formatter.timezone
|
||||
logs_file = os.path.join(
|
||||
logs_path,
|
||||
f"{username}.{datetime.now(tz).strftime('%Y%m%d-%H%M%S')}.log",
|
||||
)
|
||||
file_handler = logging.FileHandler(logs_file, "w", "utf-8")
|
||||
|
||||
file_handler.setFormatter(
|
||||
FileFormatter(
|
||||
fmt="%(asctime)s - %(levelname)s - %(name)s - [%(funcName)s]: %(message)s",
|
||||
datefmt="%d/%m/%y %H:%M:%S",
|
||||
settings=settings
|
||||
)
|
||||
)
|
||||
file_handler.setLevel(settings.file_level)
|
||||
|
||||
# Add logger handlers to the logger queue and start the process
|
||||
queue_listener = QueueListener(
|
||||
logger_queue, file_handler, console_handler, respect_handler_level=True
|
||||
)
|
||||
queue_listener.start()
|
||||
return logs_file, queue_listener
|
||||
else:
|
||||
queue_listener = QueueListener(
|
||||
logger_queue, console_handler, respect_handler_level=True
|
||||
)
|
||||
queue_listener.start()
|
||||
return None, queue_listener
|
||||
212
TwitchChannelPointsMiner/utils.py
Normal file
212
TwitchChannelPointsMiner/utils.py
Normal file
@ -0,0 +1,212 @@
|
||||
import platform
|
||||
import re
|
||||
import socket
|
||||
import time
|
||||
from copy import deepcopy
|
||||
from datetime import datetime, timezone
|
||||
from os import path
|
||||
from random import randrange
|
||||
|
||||
import requests
|
||||
from millify import millify
|
||||
|
||||
from TwitchChannelPointsMiner.constants import USER_AGENTS, GITHUB_url
|
||||
|
||||
|
||||
def _millify(input, precision=2):
|
||||
return millify(input, precision)
|
||||
|
||||
|
||||
def get_streamer_index(streamers: list, channel_id) -> int:
|
||||
try:
|
||||
return next(
|
||||
i for i, x in enumerate(streamers) if str(x.channel_id) == str(channel_id)
|
||||
)
|
||||
except StopIteration:
|
||||
return -1
|
||||
|
||||
|
||||
def float_round(number, ndigits=2):
|
||||
return round(float(number), ndigits)
|
||||
|
||||
|
||||
def server_time(message_data):
|
||||
return (
|
||||
datetime.fromtimestamp(
|
||||
message_data["server_time"], timezone.utc).isoformat()
|
||||
+ "Z"
|
||||
if message_data is not None and "server_time" in message_data
|
||||
else datetime.fromtimestamp(time.time(), timezone.utc).isoformat() + "Z"
|
||||
)
|
||||
|
||||
|
||||
# https://en.wikipedia.org/wiki/Cryptographic_nonce
|
||||
def create_nonce(length=30) -> str:
|
||||
nonce = ""
|
||||
for i in range(length):
|
||||
char_index = randrange(0, 10 + 26 + 26)
|
||||
if char_index < 10:
|
||||
char = chr(ord("0") + char_index)
|
||||
elif char_index < 10 + 26:
|
||||
char = chr(ord("a") + char_index - 10)
|
||||
else:
|
||||
char = chr(ord("A") + char_index - 26 - 10)
|
||||
nonce += char
|
||||
return nonce
|
||||
|
||||
# for mobile-token
|
||||
|
||||
|
||||
def get_user_agent(browser: str) -> str:
|
||||
"""try:
|
||||
return USER_AGENTS[platform.system()][browser]
|
||||
except KeyError:
|
||||
# return USER_AGENTS["Linux"]["FIREFOX"]
|
||||
# return USER_AGENTS["Windows"]["CHROME"]"""
|
||||
return USER_AGENTS["Android"]["TV"]
|
||||
# return USER_AGENTS["Android"]["App"]
|
||||
|
||||
|
||||
def remove_emoji(string: str) -> str:
|
||||
emoji_pattern = re.compile(
|
||||
"["
|
||||
"\U0001F600-\U0001F64F" # emoticons
|
||||
"\U0001F300-\U0001F5FF" # symbols & pictographs
|
||||
"\U0001F680-\U0001F6FF" # transport & map symbols
|
||||
"\U0001F1E0-\U0001F1FF" # flags (iOS)
|
||||
"\U00002500-\U00002587" # chinese char
|
||||
"\U00002589-\U00002BEF" # I need Unicode Character “█” (U+2588)
|
||||
"\U00002702-\U000027B0"
|
||||
"\U00002702-\U000027B0"
|
||||
"\U000024C2-\U00002587"
|
||||
"\U00002589-\U0001F251"
|
||||
"\U0001f926-\U0001f937"
|
||||
"\U00010000-\U0010ffff"
|
||||
"\u2640-\u2642"
|
||||
"\u2600-\u2B55"
|
||||
"\u200d"
|
||||
"\u23cf"
|
||||
"\u23e9"
|
||||
"\u231a"
|
||||
"\ufe0f" # dingbats
|
||||
"\u3030"
|
||||
"\u231b"
|
||||
"\u2328"
|
||||
"\u23cf"
|
||||
"\u23e9"
|
||||
"\u23ea"
|
||||
"\u23eb"
|
||||
"\u23ec"
|
||||
"\u23ed"
|
||||
"\u23ee"
|
||||
"\u23ef"
|
||||
"\u23f0"
|
||||
"\u23f1"
|
||||
"\u23f2"
|
||||
"\u23f3"
|
||||
"]+",
|
||||
flags=re.UNICODE,
|
||||
)
|
||||
return emoji_pattern.sub(r"", string)
|
||||
|
||||
|
||||
def at_least_one_value_in_settings_is(items, attr, value=True):
|
||||
for item in items:
|
||||
if getattr(item.settings, attr) == value:
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def copy_values_if_none(settings, defaults):
|
||||
values = list(
|
||||
filter(
|
||||
lambda x: x.startswith("__") is False
|
||||
and callable(getattr(settings, x)) is False,
|
||||
dir(settings),
|
||||
)
|
||||
)
|
||||
|
||||
for value in values:
|
||||
if getattr(settings, value) is None:
|
||||
setattr(settings, value, getattr(defaults, value))
|
||||
return settings
|
||||
|
||||
|
||||
def set_default_settings(settings, defaults):
|
||||
# If no settings was provided use the default settings ...
|
||||
# If settings was provided but maybe are only partial set
|
||||
# Get the default values from Settings.streamer_settings
|
||||
return (
|
||||
deepcopy(defaults)
|
||||
if settings is None
|
||||
else copy_values_if_none(settings, defaults)
|
||||
)
|
||||
|
||||
|
||||
'''def char_decision_as_index(char):
|
||||
return 0 if char == "A" else 1'''
|
||||
|
||||
|
||||
def internet_connection_available(host="8.8.8.8", port=53, timeout=3):
|
||||
try:
|
||||
socket.setdefaulttimeout(timeout)
|
||||
socket.socket(socket.AF_INET, socket.SOCK_STREAM).connect((host, port))
|
||||
return True
|
||||
except socket.error:
|
||||
return False
|
||||
|
||||
|
||||
def percentage(a, b):
|
||||
return 0 if a == 0 else int((a / b) * 100)
|
||||
|
||||
|
||||
def create_chunks(lst, n):
|
||||
return [lst[i: (i + n)] for i in range(0, len(lst), n)] # noqa: E203
|
||||
|
||||
|
||||
def download_file(name, fpath):
|
||||
r = requests.get(
|
||||
path.join(GITHUB_url, name),
|
||||
headers={"User-Anget": get_user_agent("FIREFOX")},
|
||||
stream=True,
|
||||
)
|
||||
if r.status_code == 200:
|
||||
with open(fpath, "wb") as f:
|
||||
for chunk in r.iter_content(chunk_size=1024):
|
||||
if chunk:
|
||||
f.write(chunk)
|
||||
return True
|
||||
|
||||
|
||||
def read(fname):
|
||||
return open(path.join(path.dirname(__file__), fname), encoding="utf-8").read()
|
||||
|
||||
|
||||
def init2dict(content):
|
||||
return dict(re.findall(r"""__([a-z]+)__ = "([^"]+)""", content))
|
||||
|
||||
|
||||
def check_versions():
|
||||
try:
|
||||
current_version = init2dict(read("__init__.py"))
|
||||
current_version = (
|
||||
current_version["version"] if "version" in current_version else "0.0.0"
|
||||
)
|
||||
except Exception:
|
||||
current_version = "0.0.0"
|
||||
try:
|
||||
r = requests.get(
|
||||
"/".join(
|
||||
[
|
||||
s.strip("/")
|
||||
for s in [GITHUB_url, "TwitchChannelPointsMiner", "__init__.py"]
|
||||
]
|
||||
)
|
||||
)
|
||||
github_version = init2dict(r.text)
|
||||
github_version = (
|
||||
github_version["version"] if "version" in github_version else "0.0.0"
|
||||
)
|
||||
except Exception:
|
||||
github_version = "0.0.0"
|
||||
return current_version, github_version
|
||||
Reference in New Issue
Block a user