Last active
April 20, 2020 04:48
-
-
Save huderlem/4cb4cbfb0d6e17faa40db03e64ebc513 to your computer and use it in GitHub Desktop.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
# pokemap2json.py | |
# This script will process a pokeruby or pokeemerald project and generate the map data JSON files for it. | |
# It creates: | |
# data/maps/map_groups.json | |
# data/layouts/layouts.json | |
# data/maps/<map_name>/map.json | |
# | |
# In order to convert your project to use the new JSON map data, perform the following steps: | |
# 1. Run pokemap2json.py on your project. | |
# Example: "python pokemap2json.py emerald path/to/my/project" | |
# 2. Commit all the JSON files it creates. | |
# 3. Merge pret/master. | |
# This will result in merge conflicts in all of the .json files for maps you've changed. | |
# Resolve all of the .json file merge conflicts by choosing the changes from your branch (merge strategy "ours"), not pret/master. | |
# If your project is very up-to-date with pret/master, and you don't expect any non-JSON merge conflicts, you can use "git merge -s ours master" to automatically do the merging. | |
# After resolving any merge conflicts, make sure to delete the `connections.inc`, `header.inc`, and `events.inc` files from each map data directory. These files will be generated from the JSON and are no longer needed. | |
# 4. If you wish to compare the ROM hashes before and after the JSON conversion, you will need to manually add the "connections_order" field to data/maps/map_groups.json. | |
# 5. Build your ROM (Make sure to run ./build_tools.sh, since there is a new JSON conversion tool in tools/mapsjson.) | |
# | |
import os | |
import re | |
import sys | |
import json | |
from collections import OrderedDict | |
def strip_comment(line): | |
in_string = False | |
for i in range(len(line)): | |
char = line[i] | |
if char == '"': | |
in_string = not in_string | |
elif char == '@': | |
return line[:i] | |
return line | |
def parse_asm(text): | |
parsed = [] | |
for line in text.splitlines(): | |
line = line.strip() | |
line = strip_comment(line) | |
if line.strip() == '': | |
continue | |
elif ':' in line: | |
index = line.index(':') | |
label = line[:index] | |
label_items = ['.label', label] | |
parsed.append(label_items) | |
else: | |
items = [] | |
index = -1 | |
match = re.search('\s+', line) | |
if match is not None: | |
index = match.start() | |
if index == -1: | |
items.append(line) | |
else: | |
items.append(line[:index]) | |
items += [v.strip() for v in re.split('\s*,\s*', line[index:])] | |
parsed.append(items) | |
return parsed | |
def get_label_macros(commands, label): | |
in_label = False | |
label_commands = [] | |
for params in commands: | |
macro = params[0] | |
if macro == '.label': | |
if params[1] == label: | |
in_label = True | |
elif in_label: | |
if len(label_commands) > 0: | |
break; | |
elif in_label: | |
label_commands.append(params) | |
return label_commands | |
def get_label_values(commands, label): | |
commands = get_label_macros(commands, label) | |
label_values = [] | |
for params in commands: | |
macro = params[0] | |
if macro == '.align' or macro == '.ifdef' or macro == 'ifndef': | |
continue | |
label_values += params[1:] | |
return label_values | |
def parse_map_groups(project_root): | |
filepath = os.path.join(project_root, 'data/maps/groups.inc') | |
if not os.path.exists(filepath): | |
print 'ERROR: Map groups file "%s" does not exist' % (filepath) | |
sys.exit() | |
print 'Parsing map groups from "%s"' % (filepath) | |
with open(filepath) as f: | |
content = f.read() | |
commands = parse_asm(content) | |
in_group_pointers = False | |
map_groups = {} | |
map_names = [] | |
map_groups_table_label = '' | |
group_id = 0 | |
for params in commands: | |
macro = params[0] | |
if macro == '.label': | |
if in_group_pointers: | |
break; | |
if re.search('MapGroups', params[1]) is not None: | |
map_groups_table_label = params[1] | |
in_group_pointers = True | |
elif macro == '.4byte': | |
if in_group_pointers: | |
for i, val in enumerate(params[1:]): | |
map_groups[val] = {'group_id': group_id, 'map_names': []} | |
group_id += 1 | |
group_id = -1 | |
for params in commands: | |
macro = params[0] | |
if macro == '.label': | |
group_name = params[1] | |
if group_name == map_groups_table_label: | |
group_id = -1 | |
continue | |
if group_name not in map_groups: | |
print 'ERROR: Map group name "%s" is not listed in the map groups table (i.e. gMapGroups)' % (group_name) | |
sys.exit() | |
group_id = map_groups[group_name]['group_id'] | |
elif macro == '.4byte': | |
if group_id != -1: | |
for map_name in params[1:]: | |
map_groups[group_name]['map_names'].append(map_name) | |
if map_name in map_names: | |
print 'ERROR: Map "%s" is listed in more than one map group' % (map_name) | |
sys.exit() | |
map_names.append(map_name) | |
return map_groups, map_names | |
def get_layout_constant_from_name(layout_name): | |
name_with_underscores = re.sub(r'([a-z])([A-Z])', r'\1_\2', layout_name) | |
with_layout_and_uppercase = 'LAYOUT_' + name_with_underscores.upper() | |
constant_name = re.sub(r'_+', r'_', with_layout_and_uppercase) | |
constant_name = constant_name.replace("SSTIDAL", "SS_TIDAL") | |
return constant_name | |
def parse_layout(filepath): | |
match = re.match(r'.*data/layouts/(.+)/layout.inc', filepath) | |
if not match: | |
print 'ERROR: Layout label could not be determined from filepath "%s"' % (filepath) | |
sys.exit() | |
layout_name = match.group(1) | |
layout_label = layout_name + '_Layout' | |
filepath = os.path.join(filepath) | |
if not os.path.exists(filepath): | |
print 'ERROR: Layout file "%s" does not exist' % (filepath) | |
sys.exit() | |
with open(filepath) as f: | |
content = f.read() | |
commands = get_label_values(parse_asm(content), layout_label) | |
if not len(commands): | |
# Try adding an 's' to pluralize "_Layouts". Some maps have that... | |
layout_label = layout_label + 's' | |
commands = get_label_values(parse_asm(content), layout_label) | |
if not len(commands): | |
print 'ERROR: Could not find layout data in "%s"' % (filepath) | |
sys.exit() | |
border_label = commands[2] | |
blockdata_label = commands[3] | |
border_commands = get_label_values(parse_asm(content), border_label) | |
blockdata_commands = get_label_values(parse_asm(content), blockdata_label) | |
return OrderedDict([ | |
('id', get_layout_constant_from_name(layout_name)), | |
('name', layout_label), | |
('width', int(commands[0], 0)), | |
('height', int(commands[1], 0)), | |
('primary_tileset', commands[4]), | |
('secondary_tileset', commands[5]), | |
('border_filepath', border_commands[0].strip('"')), | |
('blockdata_filepath', blockdata_commands[0].strip('"'))]) | |
def parse_layouts(project_root): | |
filepath = os.path.join(project_root, 'data/layouts_table.inc') | |
if not os.path.exists(filepath): | |
print 'ERROR: Layouts table file "%s" does not exist' % (filepath) | |
sys.exit() | |
print 'Parsing layouts order from "%s"' % (filepath) | |
with open(filepath) as f: | |
content = f.read() | |
commands = parse_asm(content) | |
in_layout_pointers = False | |
layouts_table_label = '' | |
layout_names = {} | |
layout_id = 0 | |
for params in commands: | |
macro = params[0] | |
if macro == '.label': | |
if in_layout_pointers: | |
break; | |
if re.search('MapLayouts', params[1]) is not None: | |
layouts_table_label = params[1] | |
in_layout_pointers = True | |
elif macro == '.4byte': | |
if in_layout_pointers: | |
for i, val in enumerate(params[1:]): | |
layout_names[val] = layout_id | |
layout_id += 1 | |
filepath = os.path.join(project_root, 'data/layouts.inc') | |
if not os.path.exists(filepath): | |
print 'ERROR: Layouts include file "%s" does not exist' % (filepath) | |
sys.exit() | |
print 'Parsing layout inclusions from "%s"' % (filepath) | |
with open(filepath) as f: | |
content = f.read() | |
layouts = [] | |
commands = parse_asm(content) | |
for params in commands: | |
macro = params[0] | |
if macro == '.include': | |
layout_filepath = os.path.join(project_root, params[1].strip('"')) | |
layout_data = parse_layout(layout_filepath) | |
layouts.append(layout_data) | |
layouts_data = OrderedDict([('layouts_table_label', layouts_table_label), ('layouts', layouts)]) | |
return layouts_data | |
# Same as layout constants | |
def get_map_constant_from_name(map_name): | |
name_with_underscores = re.sub(r'([a-z])([A-Z])', r'\1_\2', map_name) | |
with_layout_and_uppercase = 'MAP_' + name_with_underscores.upper() | |
constant_name = re.sub(r'_+', r'_', with_layout_and_uppercase) | |
constant_name = constant_name.replace("SSTIDAL", "SS_TIDAL") | |
return constant_name | |
def parse_int_or_bool(value): | |
try: | |
return int(value, 0) | |
except: | |
if value.lower() == 'true': | |
return 1 | |
else: | |
return 0 | |
def parse_map(project_root, project_type, map_name): | |
filepath = os.path.join(project_root, 'data/maps/' + map_name + '/header.inc') | |
if not os.path.exists(filepath): | |
print 'ERROR: Map header file "%s" does not exist' % (filepath) | |
sys.exit() | |
with open(filepath) as f: | |
content = f.read() | |
map_data = OrderedDict() | |
commands = get_label_values(parse_asm(content), map_name) | |
events_label = commands[1] | |
scripts_label = commands[2] | |
connections_label = commands[3] | |
map_data['id'] = get_map_constant_from_name(map_name) | |
map_data['name'] = map_name | |
map_data['layout'] = get_layout_constant_from_name(commands[0].replace("_Layout", "")) | |
map_data['music'] = commands[4] | |
map_data['region_map_section'] = commands[6] | |
map_data['requires_flash'] = bool(int(commands[7])) | |
map_data['weather'] = commands[8] | |
map_data['map_type'] = commands[9] | |
if project_type == 'emerald': | |
if not commands[11].startswith('allow_bike'): | |
print 'ERROR: Expected "allow_bike" in map_header_flags macro in map header "%s", but found "%s" instead' % (filepath, commands[11]) | |
sys.exit() | |
map_data['allow_bike'] = bool(int(commands[11].split('=')[-1])) | |
if not commands[12].startswith('allow_escape_rope'): | |
print 'ERROR: Expected "allow_escape_rope" in map_header_flags macro in map header "%s", but found "%s" instead' % (filepath, commands[12]) | |
sys.exit() | |
map_data['allow_escape_rope'] = bool(int(commands[12].split('=')[-1])) | |
if not commands[13].startswith('allow_run'): | |
print 'ERROR: Expected "allow_run" in map_header_flags macro in map header "%s", but found "%s" instead' % (filepath, commands[13]) | |
sys.exit() | |
map_data['allow_running'] = bool(int(commands[13].split('=')[-1])) | |
if not commands[14].startswith('show_map_name'): | |
print 'ERROR: Expected "show_map_name" in map_header_flags macro in map header "%s", but found "%s" instead' % (filepath, commands[14]) | |
sys.exit() | |
map_data['show_map_name'] = bool(int(commands[14].split('=')[-1])) | |
map_data['battle_scene'] = commands[15] | |
elif project_type == 'ruby': | |
map_data['show_map_name'] = bool(int(commands[11])) | |
map_data['battle_scene'] = commands[12] | |
# Read connections.inc | |
map_data['connections'] = None | |
filepath = os.path.join(project_root, 'data/maps/' + map_name + '/connections.inc') | |
if os.path.exists(filepath): | |
with open(filepath) as f: | |
content = f.read() | |
map_data['connections'] = [] | |
commands = parse_asm(content) | |
label_commands = get_label_values(commands, connections_label) | |
if len(label_commands) > 1: | |
connections_list_label = label_commands[1] | |
connections = get_label_macros(commands, connections_list_label) | |
for params in connections: | |
macro = params[0] | |
if macro == 'connection': | |
connection = OrderedDict() | |
if not params[1] in ['up', 'right', 'down', 'left', 'dive', 'emerge']: | |
print 'ERROR: Invalid connection direction "%s" in "%s"' % (params[1], filepath) | |
sys.exit() | |
connection['direction'] = params[1] | |
connection['offset'] = int(params[2], 0) | |
connection['map'] = params[3] | |
map_data['connections'].append(connection) | |
# Read events.inc | |
filepath = os.path.join(project_root, 'data/maps/' + map_name + '/events.inc') | |
if not os.path.exists(filepath): | |
print 'ERROR: Map events file "%s" does not exist' % (filepath) | |
sys.exit() | |
with open(filepath) as f: | |
content = f.read() | |
commands = parse_asm(content) | |
labels = get_label_values(commands, events_label) | |
if len(labels) == 4: | |
map_data['object_events'] = [] | |
map_data['warp_events'] = [] | |
map_data['coord_events'] = [] | |
map_data['bg_events'] = [] | |
object_events_label = labels[0] | |
warp_events_label = labels[1] | |
coord_events_label = labels[2] | |
bg_events_label = labels[3] | |
object_events = get_label_macros(commands, object_events_label) | |
for params in object_events: | |
if params[0] == 'object_event': | |
object_event = OrderedDict() | |
object_event['graphics_id'] = params[2] | |
object_event['x'] = int(params[4], 0) | |
object_event['y'] = int(params[5], 0) | |
object_event['elevation'] = int(params[6], 0) | |
object_event['movement_type'] = params[7] | |
object_event['movement_range_x'] = int(params[8], 0) | |
object_event['movement_range_y'] = int(params[9], 0) | |
object_event['trainer_type'] = parse_int_or_bool(params[10]) | |
object_event['trainer_sight_or_berry_tree_id'] = int(params[11], 0) | |
object_event['script'] = params[12] | |
object_event['flag'] = params[13] | |
map_data['object_events'].append(object_event) | |
warp_events = get_label_macros(commands, warp_events_label) | |
for params in warp_events: | |
if params[0] == 'warp_def': | |
warp_event = OrderedDict() | |
warp_event['x'] = int(params[1], 0) | |
warp_event['y'] = int(params[2], 0) | |
warp_event['elevation'] = int(params[3], 0) | |
warp_event['dest_map'] = params[5] | |
warp_event['dest_warp_id'] = int(params[4], 0) | |
map_data['warp_events'].append(warp_event) | |
coord_events = get_label_macros(commands, coord_events_label) | |
for params in coord_events: | |
if params[0] == 'coord_event': | |
coord_event = OrderedDict() | |
coord_event['type'] = 'trigger' | |
coord_event['x'] = int(params[1], 0) | |
coord_event['y'] = int(params[2], 0) | |
coord_event['elevation'] = int(params[3], 0) | |
coord_event['var'] = params[4] | |
coord_event['var_value'] = int(params[5], 0) | |
coord_event['script'] = params[6] | |
map_data['coord_events'].append(coord_event) | |
elif params[0] == 'coord_weather_event': | |
weather_event = OrderedDict() | |
weather_event['type'] = 'weather' | |
weather_event['x'] = int(params[1], 0) | |
weather_event['y'] = int(params[2], 0) | |
weather_event['elevation'] = int(params[3], 0) | |
weather_event['weather'] = params[4] | |
map_data['coord_events'].append(weather_event) | |
bg_events = get_label_macros(commands, bg_events_label) | |
for params in bg_events: | |
if params[0] == 'bg_event': | |
bg_event = OrderedDict() | |
bg_event['type'] = 'sign' | |
bg_event['x'] = int(params[1], 0) | |
bg_event['y'] = int(params[2], 0) | |
bg_event['elevation'] = int(params[3], 0) | |
bg_event['player_facing_dir'] = params[4] | |
bg_event['script'] = params[5] | |
map_data['bg_events'].append(bg_event) | |
elif params[0] == 'bg_hidden_item_event': | |
hidden_item_event = OrderedDict() | |
hidden_item_event['type'] = 'hidden_item' | |
hidden_item_event['x'] = int(params[1], 0) | |
hidden_item_event['y'] = int(params[2], 0) | |
hidden_item_event['elevation'] = int(params[3], 0) | |
hidden_item_event['item'] = params[4] | |
hidden_item_event['flag'] = params[5] | |
map_data['bg_events'].append(hidden_item_event) | |
elif params[0] == 'bg_secret_base_event': | |
secret_base_event = OrderedDict() | |
secret_base_event['type'] = 'secret_base' | |
secret_base_event['x'] = int(params[1], 0) | |
secret_base_event['y'] = int(params[2], 0) | |
secret_base_event['elevation'] = int(params[3], 0) | |
secret_base_event['secret_base_id'] = params[4] | |
map_data['bg_events'].append(secret_base_event) | |
elif not events_label.startswith(map_name): | |
# map is sharing events with another map | |
map_data['shared_events_map'] = events_label.replace('_MapEvents', '') | |
else: | |
print 'ERROR: Failed to determine map events for "%s"' % (filepath) | |
sys.exit() | |
if not scripts_label.startswith(map_name): | |
map_data['shared_scripts_map'] = scripts_label.replace('_MapScripts', '') | |
return map_data | |
def parse_maps(project_root, project_type, map_names): | |
maps = {} | |
for map_name in map_names: | |
map_data = parse_map(project_root, project_type, map_name) | |
maps[map_name] = map_data | |
return maps | |
def write_map_groups_json(map_groups, project_root): | |
data = OrderedDict([('group_order', sorted(map_groups, key=lambda v: map_groups[v]['group_id']))]) | |
for group_name in data['group_order']: | |
data[group_name] = map_groups[group_name]['map_names'] | |
filepath = os.path.join(project_root, 'data/maps/map_groups.json') | |
with open(filepath, 'w') as f: | |
json.dump(data, f, indent=2, separators=(',', ': ')) | |
print 'Generated "%s"' % (filepath) | |
def write_layouts_json(layouts, project_root): | |
filepath = os.path.join(project_root, 'data/layouts/layouts.json') | |
with open(filepath, 'w') as f: | |
json.dump(layouts, f, indent=2, separators=(',', ': ')) | |
print 'Generated "%s"' % (filepath) | |
def write_maps_json(map_data, project_root): | |
for map_name, value in map_data.iteritems(): | |
filepath = os.path.join(project_root, 'data/maps/' + map_name + '/map.json') | |
with open(filepath, 'w') as f: | |
json.dump(value, f, indent=2, separators=(',', ': ')) | |
print 'Generated "%s"' % (filepath) | |
def validate(map_groups, map_names, layouts_data, map_data): | |
layout_ids = [v['id'] for v in layouts_data['layouts']] | |
map_ids = [map_data[v]['id'] for v in map_data] + ['MAP_NONE'] | |
for map_name in map_data: | |
# Ensure map has known name. | |
if map_name not in map_names: | |
print 'ERROR: Unknown map name "%s"' % (map_name) | |
sys.exit() | |
# Ensure map has known layout. | |
layout_id = map_data[map_name]['layout'] | |
if layout_id not in layout_ids: | |
print 'ERROR: Unknown layout id "%s" for map "%s"' % (layout_id, map_name) | |
sys.exit() | |
# Ensure map's connection destination are valid maps. | |
if map_data[map_name]['connections'] is not None: | |
for connection in map_data[map_name]['connections']: | |
if connection['map'] not in map_ids: | |
print 'ERROR: Unknown map id "%s" for connection in map "%s"' % (connections['map'], map_name) | |
sys.exit() | |
if 'object_events' not in map_data[map_name] and 'warp_events' not in map_data[map_name] and 'coord_events' not in map_data[map_name] and 'bg_events' not in map_data[map_name]: | |
# Ensure shared events is present and valid map. | |
if 'shared_events_map' not in map_data[map_name]: | |
print 'ERROR: Map "%s" does not have any any events defined, so it must have a "shared_events_map", but it does not' % (map_name) | |
sys.exit() | |
if map_data[map_name]['shared_events_map'] not in map_names: | |
print 'ERROR: Map "%s" has an unknown "shared_events_map": %s' % (map_name, map_data[map_name]['shared_events_map']) | |
sys.exit() | |
elif 'object_events' not in map_data[map_name] or 'warp_events' not in map_data[map_name] or 'coord_events' not in map_data[map_name] or 'bg_events' not in map_data[map_name]: | |
print 'ERROR: Map "%s" must have all of the following defined: "object_events", "warp_events", "coord_events", "bg_events"' % (map_name) | |
sys.exit() | |
else: | |
# Ensure map's warp destinations are valid maps. | |
for warp in map_data[map_name]['warp_events']: | |
if warp['dest_map'] not in map_ids: | |
print 'ERROR: Unknown map id "%s" for warp event in map "%s"' % (warp['dest_map'], map_name) | |
sys.exit() | |
if 'shared_scripts_map' in map_data[map_name]: | |
if map_data[map_name]['shared_scripts_map'] not in map_names: | |
print 'ERROR: Map "%s" has an unknown "shared_scripts_map": %s' % (map_name, map_data[map_name]['shared_scripts_map']) | |
sys.exit() | |
def main(): | |
if len(sys.argv) != 3: | |
print 'USAGE: python pokemap2json.py <project_type> <project_path>' | |
sys.exit() | |
project_type = sys.argv[1] | |
if not project_type in ['ruby', 'emerald']: | |
print 'ERROR: Project type must be either "ruby" or "emerald"' | |
sys.exit() | |
project_root = sys.argv[2] | |
if not os.path.exists(project_root): | |
print 'ERROR: Project directory "%s" does not exist' % (project_root) | |
sys.exit() | |
print 'Converting map data to json for project "%s"' % (project_root) | |
map_groups, map_names = parse_map_groups(project_root) | |
print 'Detected %s map groups containing %s total maps' % (len(map_groups), len(map_names)) | |
layouts_data = parse_layouts(project_root) | |
map_data = parse_maps(project_root, project_type, map_names) | |
validate(map_groups, map_names, layouts_data, map_data) | |
write_map_groups_json(map_groups, project_root) | |
write_layouts_json(layouts_data, project_root) | |
write_maps_json(map_data, project_root) | |
if __name__ == '__main__': | |
main() |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment