Skip to content

Instantly share code, notes, and snippets.

@ohaiibuzzle
Last active June 28, 2024 19:49
Show Gist options
  • Save ohaiibuzzle/f861094fc7afdd50c65ea1a1877e728d to your computer and use it in GitHub Desktop.
Save ohaiibuzzle/f861094fc7afdd50c65ea1a1877e728d to your computer and use it in GitHub Desktop.
import os
import subprocess
import json
import urllib
import urllib.request
import urllib.parse
API_HOST = "hk4e-api-os.hoyoverse.com"
def get_wish_urls():
result = []
cmd = "find ~/Library/Containers/com.miHoYo.GenshinImpact/Data/Library/Caches/WebKit -type f -exec strings {} \; | grep e20190909gacha-v3 | egrep '(https.+?game_biz=)' | uniq | more"
output = subprocess.check_output(cmd, shell=True)
for line in output.splitlines():
result.append(line)
return result
def check_url(url) -> bool:
uri = urllib.parse.urlparse(url)
path = "gacha_info/api/getGachaLog"
host = API_HOST
fragment = ""
query = urllib.parse.parse_qs(uri.query)
query["lang"] = "en"
query["gacha_type"] = "301"
query["size"] = "5"
query["lang"] = "en-us"
query = urllib.parse.urlencode(query, doseq=True)
url = uri.scheme.decode("utf-8") + "://" + API_HOST + "/" + path + "?" + query + "#" + fragment
# print(url)
try:
response = urllib.request.urlopen(url, timeout=10)
# parse json
data = json.loads(response.read())
# check error code
if data["retcode"] == 0:
return True
return False
except Exception as e:
# print(e)
return False
def main():
urls = get_wish_urls()
found = False
for url in urls:
if check_url(url):
print("Found valid url: " + url.decode('utf-8'))
# copy to clipboard, properly escaping the url
cmd = f'echo "{url.decode("utf-8")}" | pbcopy'
os.system(cmd)
print("Copied to clipboard.")
found = True
break
print("No valid url found.") if not found else None
print("Done.")
if __name__ == "__main__":
main()
@TheFermi0n
Copy link

In line 12 change v2 to v3 and it's able to extract the new link grep e20190909gacha-v3

@Computron010
Copy link

In line 20 change "event/gacha_info/api/getGachaLog" to "gacha_info/api/getGachaLog" and it appears to pick up the links fine now.

Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment