Skip to content

Commit c44b385

Browse files
committed
HOTFIX (#20):
- temporarely disabled cache (sorry beatsaver!) * some ugly hacks as hotfix for the time being TODO: proper fix & refactoring
1 parent 1339b74 commit c44b385

File tree

2 files changed

+97
-81
lines changed

2 files changed

+97
-81
lines changed

cache.py

Lines changed: 26 additions & 24 deletions
Original file line numberDiff line numberDiff line change
@@ -17,7 +17,7 @@
1717
headers = {"User-Agent": "ARBSMapDo V1"}
1818

1919
# Used to avoid spamming beatsaver API
20-
beatsaver_scraped_data_url = "https://raw.githubusercontent.com/andruzzzhka/BeatSaberScrappedData/master/beatSaverScrappedData.zip"
20+
BEATSAVER_SCRAPED_DATA_URL = "https://github.com/andruzzzhka/BeatSaberScrappedData/raw/master/combinedScrappedData.zip"
2121

2222

2323
class Cache:
@@ -27,36 +27,38 @@ def __init__(self, arbsmapdo_config):
2727
self.tmp_dir = Path(arbsmapdo_config["tmp_dir"])
2828
self.tmp_dir.mkdir(exist_ok=True)
2929
self.download_dir = Path(arbsmapdo_config["download_dir"])
30-
self.beatsaver_cachefile = Path(arbsmapdo_config["beatsaver_cachefile"])
31-
self.levelhash_cachefile = Path(arbsmapdo_config["levelhash_cachefile"])
30+
self.beatsaver_cachefile = Path(
31+
arbsmapdo_config["beatsaver_cachefile"])
32+
self.levelhash_cachefile = Path(
33+
arbsmapdo_config["levelhash_cachefile"])
3234
self.rescan = arbsmapdo_config["rescan"]
3335

34-
self._beatsaver_cache, self.local_cache_last_downloaded = self.load_beatsaver_cache_from_andruzzzhka_scrapes()
36+
# self.load_beatsaver_cache_from_andruzzzhka_scrapes()
37+
self._beatsaver_cache, self.local_cache_last_downloaded = {}, {}
3538
self.levelhash_cache = self.load_levelhash_cache()
3639
self.update_levelhash_cache()
3740

3841
def _update_andruzzzhka_scrapes(self):
3942
print("Updating Local BeatSaver Cache. This helps avoiding spamming the API hundreds of times.")
40-
print("Downloading beatSaverScrappedData (helps to avoid spamming beatsaver API)...")
43+
print(
44+
"Downloading beatSaverScrappedData (helps to avoid spamming beatsaver API)...")
4145

4246
self.beatsaver_cachefile.unlink(missing_ok=True)
4347

4448
dl_filename = str(self.tmp_dir.joinpath("andruzzzhka_scrape.zip"))
45-
wget.download(beatsaver_scraped_data_url, dl_filename)
49+
wget.download(BEATSAVER_SCRAPED_DATA_URL, dl_filename)
4650

4751
# Unzip
4852
try:
4953
with zipfile.ZipFile(str(dl_filename), "r") as zip_file:
5054
zip_file.extractall(str(self.tmp_dir))
5155
# Replace old local cache by updated version
52-
os.replace(self.tmp_dir.joinpath("beatSaverScrappedData.json"), self.beatsaver_cachefile)
56+
os.replace(self.tmp_dir.joinpath(
57+
"combinedScrappedData.json"), self.beatsaver_cachefile)
5358
except zipfile.BadZipFile as e:
5459
# Workaround for https://github.com/andruzzzhka/BeatSaberScrappedData/issues/6
55-
print(f"Error when extracting zipfile:\n{e}\nDownloading uncompressed json instead (will be slower!)...")
56-
wget.download("https://raw.githubusercontent.com/andruzzzhka/BeatSaberScrappedData/master/beatSaverScrappedData.json",
57-
out=str(self.beatsaver_cachefile))
60+
print(f"Error when extracting zipfile:\n{e}")
5861

59-
last_updated = time.time()
6062
print("\nCache ready.")
6163

6264
def load_beatsaver_cache_from_andruzzzhka_scrapes(self):
@@ -70,8 +72,8 @@ def load_beatsaver_cache_from_andruzzzhka_scrapes(self):
7072

7173
# Elapsed is given in seconds. The scrapes of andruzzzhka get updated once per day.
7274
if elapsed > 86400:
73-
update = True
74-
75+
update = True
76+
7577
# Update cache if neccessary
7678
if update:
7779
self._update_andruzzzhka_scrapes()
@@ -83,7 +85,7 @@ def load_beatsaver_cache_from_andruzzzhka_scrapes(self):
8385

8486
cache_dict = dict()
8587
for levelinfo in scraped_cache_raw:
86-
cache_dict[levelinfo["hash"].lower()] = levelinfo
88+
cache_dict[levelinfo["Hash"].lower()] = levelinfo
8789

8890
return cache_dict, last_modified
8991

@@ -96,16 +98,18 @@ def _get_beatsaver_info_by_api(self, level_id):
9698
try:
9799
if len(level_id) == 40:
98100
# Is sha1-hash
99-
response = requests.get("https://beatsaver.com/api/maps/by-hash/{id}".format(id=level_id), headers=headers)
101+
response = requests.get(
102+
"https://beatsaver.com/api/maps/hash/{id}".format(id=level_id), headers=headers)
100103
else:
101104
# Treat as level key
102-
response = requests.get("https://beatsaver.com/api/maps/detail/{id}".format(id=level_id), headers=headers)
105+
response = requests.get(
106+
"https://beatsaver.com/api/maps/detail/{id}".format(id=level_id), headers=headers)
103107
json = response.json()
104108
except JSONDecodeError:
105109
print("Failed to get level {} from Beat Saver.".format(level_id))
106110
return None
107111
return json
108-
112+
109113
def get_beatsaver_info(self, level_id):
110114
"""
111115
Uses information from the cache (hashes only) or calls the beatsaver API (hashes & keys)
@@ -135,7 +139,7 @@ def load_levelhash_cache(self):
135139
return hashcache
136140
else:
137141
return dict()
138-
142+
139143
def save_levelhash_cache(self):
140144
# Save updates to the cachefile
141145
with open(self.levelhash_cachefile, "w+", encoding="UTF-8") as fp:
@@ -149,12 +153,10 @@ def update_levelhash_cache(self):
149153
# If this is not the case -> calculate the hash and store to hashcache
150154
if entry.name not in self.levelhash_cache.keys():
151155
if entry.is_dir():
152-
levelhash = utils.calculate_Level_hash_from_dir(self.download_dir.joinpath(entry.name))
156+
levelhash = utils.calculate_Level_hash_from_dir(
157+
self.download_dir.joinpath(entry.name))
153158
self.levelhash_cache[entry.name] = levelhash
154159
if entry.is_file() and entry.suffix == ".zip":
155-
levelhash = utils.calculate_Level_hash_from_zip(self.download_dir.joinpath(entry.name))
160+
levelhash = utils.calculate_Level_hash_from_zip(
161+
self.download_dir.joinpath(entry.name))
156162
self.levelhash_cache[entry.name] = levelhash
157-
158-
159-
160-

0 commit comments

Comments
 (0)