Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 4 additions & 1 deletion TwitchChannelPointsMiner/TwitchChannelPointsMiner.py
Original file line number Diff line number Diff line change
Expand Up @@ -54,6 +54,7 @@ class TwitchChannelPointsMiner:
"username",
"twitch",
"claim_drops_startup",
"twitch_dash_update",
"enable_analytics",
"disable_ssl_cert_verification",
"disable_at_in_nickname",
Expand All @@ -76,6 +77,7 @@ def __init__(
username: str,
password: str = None,
claim_drops_startup: bool = False,
twitch_dash_update : int = 60,
enable_analytics: bool = False,
disable_ssl_cert_verification: bool = False,
disable_at_in_nickname: bool = False,
Expand Down Expand Up @@ -140,6 +142,7 @@ def is_connected():
self.twitch = Twitch(self.username, user_agent, password)

self.claim_drops_startup = claim_drops_startup
self.twitch_dash_update = twitch_dash_update
self.priority = priority if isinstance(priority, list) else [priority]

self.streamers: list[Streamer] = []
Expand Down Expand Up @@ -315,7 +318,7 @@ def run(
):
self.sync_campaigns_thread = threading.Thread(
target=self.twitch.sync_campaigns,
args=(self.streamers,),
args=(self.streamers, self.twitch_dash_update),
)
self.sync_campaigns_thread.name = "Sync campaigns/inventory"
self.sync_campaigns_thread.start()
Expand Down
45 changes: 24 additions & 21 deletions TwitchChannelPointsMiner/classes/Twitch.py
Original file line number Diff line number Diff line change
Expand Up @@ -150,11 +150,10 @@ def get_spade_url(self, streamer):
settings_request = requests.get(settings_url, headers=headers)
response = settings_request.text
regex_spade = '"spade_url":"(.*?)"'
streamer.stream.spade_url = re.search(
regex_spade, response).group(1)
streamer.stream.spade_url = re.search(regex_spade, response).group(1)
except requests.exceptions.RequestException as e:
logger.error(
f"Something went wrong during extraction of 'spade_url': {e}")
logger.error(f"Something went wrong during extraction of 'spade_url': {e}")
logger.exception("message")

def get_broadcast_id(self, streamer):
json_data = copy.deepcopy(GQLOperations.WithIsStreamLiveQuery)
Expand Down Expand Up @@ -296,6 +295,7 @@ def post_gql_request(self, json_data):
logger.error(
f"Error with GQLOperations ({json_data['operationName']}): {e}"
)
logger.exception("message")
return {}

# Request for Integrity Token
Expand Down Expand Up @@ -335,6 +335,7 @@ def post_gql_request(self, json_data):
return self.integrity
except requests.exceptions.RequestException as e:
logger.error(f"Error with post_integrity: {e}")
logger.exception("message")
return self.integrity

# verify the integrity token's contents for the "is_bad_bot" flag
Expand Down Expand Up @@ -371,6 +372,7 @@ def update_client_version(self):
return self.client_version
except requests.exceptions.RequestException as e:
logger.error(f"Error with update_client_version: {e}")
logger.exception("message")
return self.client_version

def send_minute_watched_events(self, streamers, priority, chunk_size=3):
Expand Down Expand Up @@ -642,12 +644,12 @@ def send_minute_watched_events(self, streamers, priority, chunk_size=3):
)

except requests.exceptions.ConnectionError as e:
logger.error(
f"Error while trying to send minute watched: {e}")
logger.error(f"Error while trying to send minute watched: {e}")
logger.exception("message")
self.__check_connection_handler(chunk_size)
except requests.exceptions.Timeout as e:
logger.error(
f"Error while trying to send minute watched: {e}")
logger.error(f"Error while trying to send minute watched: {e}")
logger.exception("message")

self.__chuncked_sleep(
next_iteration - time.time(), chunk_size=chunk_size
Expand All @@ -657,8 +659,8 @@ def send_minute_watched_events(self, streamers, priority, chunk_size=3):
# self.__chuncked_sleep(60, chunk_size=chunk_size)
self.__chuncked_sleep(20, chunk_size=chunk_size)
except Exception:
logger.error(
"Exception raised in send minute watched", exc_info=True)
logger.error("Exception raised in send minute watched", exc_info=True)
logger.exception("message")

# === CHANNEL POINTS / PREDICTION === #
# Load the amount of current points for a channel, check if a bonus is available
Expand Down Expand Up @@ -851,8 +853,13 @@ def __get_campaigns_details(self, campaigns):

response = self.post_gql_request(json_data)
for r in response:
if r["data"]["user"] is not None:
result.append(r["data"]["user"]["dropCampaign"])
try:
if r["data"]["user"] is not None:
result.append(r["data"]["user"]["dropCampaign"])
except KeyError as e:
logger.debug(
f"KeyError: r['data']['user'] {e}"
)
return result

def __sync_campaigns(self, campaigns):
Expand Down Expand Up @@ -919,18 +926,15 @@ def claim_all_drops_from_inventory(self):
drop.is_claimed = self.claim_drop(drop)
time.sleep(random.uniform(5, 10))

def sync_campaigns(self, streamers, chunk_size=3):
def sync_campaigns(self, streamers, twitch_dash_update, chunk_size=3):
campaigns_update = 0
campaigns = []
while self.running:
try:
# Get update from dashboard each 60minutes
# Get update from dashboard each X minutes (defined in global variable twitch_dash_update)
if (
campaigns_update == 0
# or ((time.time() - campaigns_update) / 60) > 60
# TEMPORARY AUTO DROP CLAIMING FIX
# 30 minutes instead of 60 minutes
or ((time.time() - campaigns_update) / 30) > 30
#####################################
or ((time.time() - campaigns_update) / twitch_dash_update) > twitch_dash_update
):
campaigns_update = time.time()

Expand All @@ -943,8 +947,6 @@ def sync_campaigns(self, streamers, chunk_size=3):
campaigns_details = self.__get_campaigns_details(
self.__get_drops_dashboard(status="ACTIVE")
)
campaigns = []

# Going to clear array and structure. Remove all the timeBasedDrops expired or not started yet
for index in range(0, len(campaigns_details)):
if campaigns_details[index] is not None:
Expand Down Expand Up @@ -977,6 +979,7 @@ def sync_campaigns(self, streamers, chunk_size=3):

except (ValueError, KeyError, requests.exceptions.ConnectionError) as e:
logger.error(f"Error while syncing inventory: {e}")
logger.exception("message")
self.__check_connection_handler(chunk_size)

self.__chuncked_sleep(60, chunk_size=chunk_size)
Expand Down
5 changes: 4 additions & 1 deletion TwitchChannelPointsMiner/constants.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,9 +30,12 @@
}
}

GITHUB_USER = "rdavydov"
BRANCH = "master"
GITHUB_url = (
"https://raw.githubusercontent.com/rdavydov/Twitch-Channel-Points-Miner-v2/"
"https://raw.githubusercontent.com/"
+ GITHUB_USER
+ "/Twitch-Channel-Points-Miner-v2/"
+ BRANCH
)

Expand Down
4 changes: 4 additions & 0 deletions delete_pycache.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
#!/bin/bash
echo cleaning __pycache__
find . -name "__pycache__" -exec rm -rf {} \; 2>/dev/null
echo Done!
1 change: 1 addition & 0 deletions example.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@
username="your-twitch-username",
password="write-your-secure-psw", # If no password will be provided, the script will ask interactively
claim_drops_startup=False, # If you want to auto claim all drops from Twitch inventory on the startup
twitch_dash_update=60, # How often in minutes, the twitch dashboard will update to check drops progress, Default to 60 minutes
priority=[ # Custom priority in this case for example:
Priority.STREAK, # - We want first of all to catch all watch streak from all streamers
Priority.DROPS, # - When we don't have anymore watch streak to catch, wait until all drops are collected over the streamers
Expand Down