diff --git a/TwitchChannelPointsMiner/TwitchChannelPointsMiner.py b/TwitchChannelPointsMiner/TwitchChannelPointsMiner.py index c1b5ddd4..f1d451d7 100644 --- a/TwitchChannelPointsMiner/TwitchChannelPointsMiner.py +++ b/TwitchChannelPointsMiner/TwitchChannelPointsMiner.py @@ -54,6 +54,7 @@ class TwitchChannelPointsMiner: "username", "twitch", "claim_drops_startup", + "twitch_dash_update", "enable_analytics", "disable_ssl_cert_verification", "disable_at_in_nickname", @@ -76,6 +77,7 @@ def __init__( username: str, password: str = None, claim_drops_startup: bool = False, + twitch_dash_update : int = 60, enable_analytics: bool = False, disable_ssl_cert_verification: bool = False, disable_at_in_nickname: bool = False, @@ -140,6 +142,7 @@ def is_connected(): self.twitch = Twitch(self.username, user_agent, password) self.claim_drops_startup = claim_drops_startup + self.twitch_dash_update = twitch_dash_update self.priority = priority if isinstance(priority, list) else [priority] self.streamers: list[Streamer] = [] @@ -315,7 +318,7 @@ def run( ): self.sync_campaigns_thread = threading.Thread( target=self.twitch.sync_campaigns, - args=(self.streamers,), + args=(self.streamers, self.twitch_dash_update), ) self.sync_campaigns_thread.name = "Sync campaigns/inventory" self.sync_campaigns_thread.start() diff --git a/TwitchChannelPointsMiner/classes/Twitch.py b/TwitchChannelPointsMiner/classes/Twitch.py index a6b7d2a1..4c89d095 100644 --- a/TwitchChannelPointsMiner/classes/Twitch.py +++ b/TwitchChannelPointsMiner/classes/Twitch.py @@ -150,11 +150,10 @@ def get_spade_url(self, streamer): settings_request = requests.get(settings_url, headers=headers) response = settings_request.text regex_spade = '"spade_url":"(.*?)"' - streamer.stream.spade_url = re.search( - regex_spade, response).group(1) + streamer.stream.spade_url = re.search(regex_spade, response).group(1) except requests.exceptions.RequestException as e: - logger.error( - f"Something went wrong during extraction of 'spade_url': {e}") + logger.error(f"Something went wrong during extraction of 'spade_url': {e}") + logger.exception("message") def get_broadcast_id(self, streamer): json_data = copy.deepcopy(GQLOperations.WithIsStreamLiveQuery) @@ -296,6 +295,7 @@ def post_gql_request(self, json_data): logger.error( f"Error with GQLOperations ({json_data['operationName']}): {e}" ) + logger.exception("message") return {} # Request for Integrity Token @@ -335,6 +335,7 @@ def post_gql_request(self, json_data): return self.integrity except requests.exceptions.RequestException as e: logger.error(f"Error with post_integrity: {e}") + logger.exception("message") return self.integrity # verify the integrity token's contents for the "is_bad_bot" flag @@ -371,6 +372,7 @@ def update_client_version(self): return self.client_version except requests.exceptions.RequestException as e: logger.error(f"Error with update_client_version: {e}") + logger.exception("message") return self.client_version def send_minute_watched_events(self, streamers, priority, chunk_size=3): @@ -642,12 +644,12 @@ def send_minute_watched_events(self, streamers, priority, chunk_size=3): ) except requests.exceptions.ConnectionError as e: - logger.error( - f"Error while trying to send minute watched: {e}") + logger.error(f"Error while trying to send minute watched: {e}") + logger.exception("message") self.__check_connection_handler(chunk_size) except requests.exceptions.Timeout as e: - logger.error( - f"Error while trying to send minute watched: {e}") + logger.error(f"Error while trying to send minute watched: {e}") + logger.exception("message") self.__chuncked_sleep( next_iteration - time.time(), chunk_size=chunk_size @@ -657,8 +659,8 @@ def send_minute_watched_events(self, streamers, priority, chunk_size=3): # self.__chuncked_sleep(60, chunk_size=chunk_size) self.__chuncked_sleep(20, chunk_size=chunk_size) except Exception: - logger.error( - "Exception raised in send minute watched", exc_info=True) + logger.error("Exception raised in send minute watched", exc_info=True) + logger.exception("message") # === CHANNEL POINTS / PREDICTION === # # Load the amount of current points for a channel, check if a bonus is available @@ -851,8 +853,13 @@ def __get_campaigns_details(self, campaigns): response = self.post_gql_request(json_data) for r in response: - if r["data"]["user"] is not None: - result.append(r["data"]["user"]["dropCampaign"]) + try: + if r["data"]["user"] is not None: + result.append(r["data"]["user"]["dropCampaign"]) + except KeyError as e: + logger.debug( + f"KeyError: r['data']['user'] {e}" + ) return result def __sync_campaigns(self, campaigns): @@ -919,18 +926,15 @@ def claim_all_drops_from_inventory(self): drop.is_claimed = self.claim_drop(drop) time.sleep(random.uniform(5, 10)) - def sync_campaigns(self, streamers, chunk_size=3): + def sync_campaigns(self, streamers, twitch_dash_update, chunk_size=3): campaigns_update = 0 + campaigns = [] while self.running: try: - # Get update from dashboard each 60minutes + # Get update from dashboard each X minutes (defined in global variable twitch_dash_update) if ( campaigns_update == 0 - # or ((time.time() - campaigns_update) / 60) > 60 - # TEMPORARY AUTO DROP CLAIMING FIX - # 30 minutes instead of 60 minutes - or ((time.time() - campaigns_update) / 30) > 30 - ##################################### + or ((time.time() - campaigns_update) / twitch_dash_update) > twitch_dash_update ): campaigns_update = time.time() @@ -943,8 +947,6 @@ def sync_campaigns(self, streamers, chunk_size=3): campaigns_details = self.__get_campaigns_details( self.__get_drops_dashboard(status="ACTIVE") ) - campaigns = [] - # Going to clear array and structure. Remove all the timeBasedDrops expired or not started yet for index in range(0, len(campaigns_details)): if campaigns_details[index] is not None: @@ -977,6 +979,7 @@ def sync_campaigns(self, streamers, chunk_size=3): except (ValueError, KeyError, requests.exceptions.ConnectionError) as e: logger.error(f"Error while syncing inventory: {e}") + logger.exception("message") self.__check_connection_handler(chunk_size) self.__chuncked_sleep(60, chunk_size=chunk_size) diff --git a/TwitchChannelPointsMiner/constants.py b/TwitchChannelPointsMiner/constants.py index c06bcedf..24cbd6be 100644 --- a/TwitchChannelPointsMiner/constants.py +++ b/TwitchChannelPointsMiner/constants.py @@ -30,9 +30,12 @@ } } +GITHUB_USER = "rdavydov" BRANCH = "master" GITHUB_url = ( - "https://raw.githubusercontent.com/rdavydov/Twitch-Channel-Points-Miner-v2/" + "https://raw.githubusercontent.com/" + + GITHUB_USER + + "/Twitch-Channel-Points-Miner-v2/" + BRANCH ) diff --git a/delete_pycache.sh b/delete_pycache.sh new file mode 100755 index 00000000..bca342c0 --- /dev/null +++ b/delete_pycache.sh @@ -0,0 +1,4 @@ +#!/bin/bash +echo cleaning __pycache__ +find . -name "__pycache__" -exec rm -rf {} \; 2>/dev/null +echo Done! diff --git a/example.py b/example.py index 17060bab..67f1df21 100644 --- a/example.py +++ b/example.py @@ -19,6 +19,7 @@ username="your-twitch-username", password="write-your-secure-psw", # If no password will be provided, the script will ask interactively claim_drops_startup=False, # If you want to auto claim all drops from Twitch inventory on the startup + twitch_dash_update=60, # How often in minutes, the twitch dashboard will update to check drops progress, Default to 60 minutes priority=[ # Custom priority in this case for example: Priority.STREAK, # - We want first of all to catch all watch streak from all streamers Priority.DROPS, # - When we don't have anymore watch streak to catch, wait until all drops are collected over the streamers