Created
October 10, 2023 18:22
-
-
Save TigerWalts/38411ca8e7dd0be969a4ac2f5b798880 to your computer and use it in GitHub Desktop.
Python script that syncs Better Questing quest progress data from one player to another
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
""" | |
Syncs Better Questing quest progress data from one player to another | |
python3 bq_sync_player.py [-h] [-u] [-c] file source destination | |
-h Help | |
-u Mark quest rewards as already claimed (cannot use with -c) | |
-c Mark quest rewards as unclaimed (cannot use with -u) | |
file The file path to the QuestProgress.json file | |
source The UUID of the player to copy quest progress from (Check NameCache.json) | |
destination The UUID of the player to copy quest progress to (Check NameCache.json) | |
""" | |
import argparse | |
import json | |
import pathlib | |
import shutil | |
import sys | |
from typing import Any | |
def backup_file(filep: pathlib.Path): | |
rev = 1 | |
bakp = filep.with_suffix(f'.bak.{rev}.json') | |
while bakp.exists(): | |
rev += 1 | |
bakp = filep.with_suffix(f'.bak.{rev}.json') | |
shutil.copy(filep.as_posix(), bakp.as_posix()) | |
def load_data(filep: pathlib.Path) -> dict: | |
with filep.open() as f: | |
return json.load(f) | |
def save_data(filep: pathlib.Path, data: dict): | |
with filep.open('w') as f: | |
json.dump(data, f, indent=2) | |
def process_completed(data: dict, src: str, dst: str, claim: str) -> dict: | |
if len(data) == 0: | |
return data | |
_, _, index_suffix = tuple(data.keys())[0].partition(':') | |
new_data = None | |
for player_data in data.values(): | |
for k, v in player_data.items(): | |
if k.startswith('uuid:') and v == src: | |
new_data = player_data.copy() | |
result = { | |
f'{i}:{index_suffix}': v | |
for i, v in enumerate( | |
v1 for v1 | |
in data.values() | |
if not any( k2.startswith('uuid:') and v2 == dst for k2, v2 in v1.items() ) | |
) | |
} | |
if new_data is not None: | |
for k, v in new_data.items(): | |
if k.startswith('claimed:'): | |
new_data[k] = 0 if claim == 'unclaim' else 1 if claim == 'claim' else v | |
if k.startswith('uuid:'): | |
new_data[k] = dst | |
result[f'{len(result)}:{index_suffix}'] = new_data | |
return result | |
def process_complete_users(data: dict, src: str, dst: str) -> dict: | |
if len(data) == 0: | |
return data | |
if src in data.values() and dst in data.values(): | |
return data | |
_, _, index_suffix = tuple(data.keys())[0].partition(':') | |
if src in data.values() and not dst in data.values(): | |
result = data.copy() | |
result[f'{len(result)}:{index_suffix}'] = dst | |
return result | |
return { | |
f'{i}:{index_suffix}': v | |
for i, v | |
in enumerate(x for x in data.values() if x != dst) | |
} | |
def process_user_progress(data: dict, src: str, dst: str) -> dict: | |
if len(data) == 0: | |
return data | |
_, _, index_suffix = tuple(data.keys())[0].partition(':') | |
new_data = None | |
for player_data in data.values(): | |
for k, v in player_data.items(): | |
if k.startswith('uuid:') and v == src: | |
new_data = player_data.copy() | |
result = { | |
f'{i}:{index_suffix}': v | |
for i, v in enumerate( | |
v1 for v1 | |
in data.values() | |
if not any( k2.startswith('uuid:') and v2 == dst for k2, v2 in v1.items() ) | |
) | |
} | |
if new_data is not None: | |
for k, _ in new_data.items(): | |
if k.startswith('uuid:'): | |
new_data[k] = dst | |
result[f'{len(result)}:{index_suffix}'] = new_data | |
return result | |
def process_task_attr(key: str, data: Any, src: str, dst: str) -> dict: | |
if key.startswith('completeUsers:'): | |
return process_complete_users(data, src, dst) | |
if key.startswith('userProgress:'): | |
return process_user_progress(data, src, dst) | |
return data | |
def process_task(data: dict, src: str, dst: str) -> dict: | |
return { | |
key: process_task_attr(key, value, src, dst) | |
for key, value | |
in data.items() | |
} | |
def process_tasks(data: dict, src: str, dst: str) -> dict: | |
return { | |
key: process_task(value, src, dst) | |
for key, value | |
in data.items() | |
} | |
def process_quest_attr(key: str, data: Any, src: str, dst: str, claim: str) -> dict: | |
if key.startswith('completed:'): | |
return process_completed(data, src, dst, claim) | |
if key.startswith('tasks:'): | |
return process_tasks(data, src, dst) | |
return data | |
def process_quest(data: dict, src: str, dst: str, claim: str) -> dict: | |
return { | |
quest_attr: process_quest_attr(quest_attr, quest_attr_data, src, dst, claim) | |
for quest_attr, quest_attr_data | |
in data.items() | |
} | |
def process_quest_progress(data: dict, src: str, dst: str, claim: str) -> dict: | |
return { | |
quest_id: process_quest(quest, src, dst, claim) | |
for quest_id, quest | |
in data.items() | |
} | |
def process_root(data: dict, src: str, dst: str, claim: str) -> dict: | |
key = tuple( k for k in data.keys() if k.startswith('questProgress:') )[0] | |
return { | |
key: process_quest_progress(data[key], src, dst, claim) | |
} | |
def main() -> int: | |
parser = argparse.ArgumentParser('bq_sync_player') | |
parser.add_argument('file', type=str) | |
parser.add_argument('source', type=str) | |
parser.add_argument('destination', type=str) | |
parser.add_argument('-u', '--unclaim', action='store_true') | |
parser.add_argument('-c', '--claim', action='store_true') | |
args = parser.parse_args() | |
if (args.unclaim and args.claim): | |
print(f"Cannot use clain and unclaim flags at the same time", file=sys.stderr) | |
return 1 | |
claim = 'copy' | |
if (args.claim): | |
claim = 'claim' | |
if (args.unclaim): | |
claim = 'unclaim' | |
filep = pathlib.Path(args.file) | |
if not filep.exists(): | |
print(f"File not found {filep}", file=sys.stderr) | |
return 1 | |
backup_file(filep) | |
data = load_data(filep) | |
data = process_root(data, args.source, args.destination, claim) | |
save_data(filep, data) | |
return 0 | |
if __name__ == "__main__": | |
sys.exit(main()) |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment