summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorBryan Bishop <kanzure@gmail.com>2016-08-27 19:12:23 -0500
committerBryan Bishop <kanzure@gmail.com>2016-08-27 19:12:23 -0500
commit30ec94681f8dda48cd66ecdbf1d7a0a82666f145 (patch)
treef07e8bc5043b47dee7f980c35fc3a5846f2008ae
parent857c18b6d1ca9c7597f78d2b4e851fedeb33c01b (diff)
parent74c620d01ad59bfb09cf4111ace549b925fcb9ab (diff)
Merge branch 'master' into py3-continued
Conflicts: pokemontools/crystal.py
-rw-r--r--README.md6
-rw-r--r--pokemontools/crystal.py67
-rw-r--r--pokemontools/crystalparts/old_parsers.py63
-rw-r--r--pokemontools/labels.py2
-rw-r--r--tests/integration/tests.py107
-rw-r--r--tests/tests.py26
6 files changed, 168 insertions, 103 deletions
diff --git a/README.md b/README.md
index 9c3ca92..3bbfa1c 100644
--- a/README.md
+++ b/README.md
@@ -39,6 +39,12 @@ Run the tests with:
nosetests-2.7
```
+There might be a great deal of spammy output. Drop some of the spam with:
+
+```
+nosetests-2.7 tests.integration.tests --nocapture --nologcapture
+```
+
# see also
* [Pokémon Crystal source code](https://github.com/kanzure/pokecrystal)
diff --git a/pokemontools/crystal.py b/pokemontools/crystal.py
index d08c15d..74763e0 100644
--- a/pokemontools/crystal.py
+++ b/pokemontools/crystal.py
@@ -135,13 +135,7 @@ def load_rom(filename=None):
and then loads the rom if necessary."""
if filename == None:
filename = os.path.join(conf.path, "baserom.gbc")
- global rom
- if rom != romstr.RomStr(None) and rom != None:
- return rom
- if not isinstance(rom, romstr.RomStr):
- return direct_load_rom(filename=filename)
- elif os.lstat(filename).st_size != len(rom):
- return direct_load_rom(filename)
+ return direct_load_rom(filename)
def direct_load_asm(filename=None):
if filename == None:
@@ -903,6 +897,8 @@ class PointerLabelParam(MultiByteParam):
label = None
elif result.address != caddress:
label = None
+ elif hasattr(result, "keys") and "label" in result.keys():
+ label = result["label"]
elif result != None:
label = None
@@ -1457,21 +1453,27 @@ def read_event_flags():
global event_flags
constants = wram.read_constants(os.path.join(conf.path, 'constants.asm'))
event_flags = dict(filter(lambda key_value: key_value[1].startswith('EVENT_'), constants.items()))
+ return event_flags
engine_flags = None
def read_engine_flags():
global engine_flags
constants = wram.read_constants(os.path.join(conf.path, 'constants.asm'))
engine_flags = dict(filter(lambda key_value1: key_value1[1].startswith('ENGINE_'), constants.items()))
+ return engine_flags
class EventFlagParam(MultiByteParam):
def to_asm(self):
- if event_flags is None: read_event_flags()
+ global event_flags
+ if event_flags is None:
+ event_flags = read_event_flags()
return event_flags.get(self.parsed_number) or MultiByteParam.to_asm(self)
class EngineFlagParam(MultiByteParam):
def to_asm(self):
- if engine_flags is None: read_engine_flags()
+ global engine_flags
+ if engine_flags is None:
+ engine_flags = read_engine_flags()
return engine_flags.get(self.parsed_number) or MultiByteParam.to_asm(self)
@@ -4905,7 +4907,7 @@ class MapHeader(object):
output += "db " + ", ".join([self.location_on_world_map.to_asm(), self.music.to_asm(), self.time_of_day.to_asm(), self.fishing_group.to_asm()])
return output
-def parse_map_header_at(address, map_group=None, map_id=None, all_map_headers=None, debug=True):
+def parse_map_header_at(address, map_group=None, map_id=None, all_map_headers=None, rom=None, debug=True):
"""parses an arbitrary map header at some address"""
logging.debug("parsing a map header at {0}".format(hex(address)))
map_header = MapHeader(address, map_group=map_group, map_id=map_id, debug=debug)
@@ -6111,11 +6113,13 @@ def parse_map_header_by_id(*args, **kwargs):
map_header_offset = offset + ((map_id - 1) * map_header_byte_size)
return parse_map_header_at(map_header_offset, all_map_headers=all_map_headers, map_group=map_group, map_id=map_id)
-def parse_all_map_headers(map_names, all_map_headers=None, debug=True):
+def parse_all_map_headers(map_names, all_map_headers=None, _parse_map_header_at=None, rom=None, debug=True):
"""
Calls parse_map_header_at for each map in each map group. Updates the
map_names structure.
"""
+ if _parse_map_header_at == None:
+ _parse_map_header_at = parse_map_header_at
if "offset" not in map_names[1]:
raise Exception("dunno what to do - map_names should have groups with pre-calculated offsets by now")
for (group_id, group_data) in map_names.items():
@@ -6132,7 +6136,7 @@ def parse_all_map_headers(map_names, all_map_headers=None, debug=True):
map_header_offset = offset + ((map_id - 1) * map_header_byte_size)
map_names[group_id][map_id]["header_offset"] = map_header_offset
- new_parsed_map = parse_map_header_at(map_header_offset, map_group=group_id, map_id=map_id, all_map_headers=all_map_headers, debug=debug)
+ new_parsed_map = _parse_map_header_at(map_header_offset, map_group=group_id, map_id=map_id, all_map_headers=all_map_headers, rom=rom, debug=debug)
map_names[group_id][map_id]["header_new"] = new_parsed_map
class PokedexEntryPointerTable(object):
@@ -6457,12 +6461,15 @@ def split_incbin_line_into_three(line, start_address, byte_count, rom_file=None)
output += "INCBIN \"baserom.gbc\",$" + hex(third[0])[2:] + ",$" + hex(third[1])[2:] # no newline
return output
-def generate_diff_insert(line_number, newline, debug=False):
+def generate_diff_insert(line_number, newline, _asm=None, debug=False):
"""generates a diff between the old main.asm and the new main.asm
note: requires python2.7 i think? b/c of subprocess.check_output"""
global asm
- original = "\n".join(line for line in asm)
- newfile = deepcopy(asm)
+ if _asm == None:
+ _asm = asm
+
+ original = "\n".join(line for line in _asm)
+ newfile = deepcopy(_asm)
newfile[line_number] = newline # possibly inserting multiple lines
newfile = "\n".join(line for line in newfile)
@@ -6473,6 +6480,10 @@ def generate_diff_insert(line_number, newline, debug=False):
original_filename = "ejroqjfoad.temp"
newfile_filename = "fjiqefo.temp"
+ main_path = os.path.join(conf.path, "main.asm")
+ if os.path.exists(main_path):
+ original_filename = main_path
+
original_fh = open(original_filename, "w")
original_fh.write(original)
original_fh.close()
@@ -6487,9 +6498,9 @@ def generate_diff_insert(line_number, newline, debug=False):
CalledProcessError = None
try:
- diffcontent = subprocess.check_output("diff -u " + os.path.join(conf.path, "main.asm") + " " + newfile_filename, shell=True)
+ diffcontent = subprocess.check_output("diff -u " + original_filename + " " + newfile_filename, shell=True)
except (AttributeError, CalledProcessError):
- p = subprocess.Popen(["diff", "-u", os.path.join(conf.path, "main.asm"), newfile_filename], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+ p = subprocess.Popen(["diff", "-u", original_filename, newfile_filename], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
out, err = p.communicate()
diffcontent = out
@@ -7077,11 +7088,18 @@ def get_ram_label(address):
return wram.wram_labels[address][-1]
return None
-def get_label_for(address):
+def get_label_for(address, _all_labels=None, _script_parse_table=None):
"""
returns a label assigned to a particular address
"""
global all_labels
+ global script_parse_table
+
+ if _all_labels == None:
+ _all_labels = all_labels
+
+ if _script_parse_table == None:
+ _script_parse_table = script_parse_table
if address == None:
return None
@@ -7093,15 +7111,17 @@ def get_label_for(address):
return None
# the old way
- for thing in all_labels:
+ for thing in _all_labels:
if thing["address"] == address:
return thing["label"]
# the new way
- obj = script_parse_table[address]
+ obj = _script_parse_table[address]
if obj:
if hasattr(obj, "label"):
return obj.label.name
+ elif hasattr(obj, "keys") and "label" in obj.keys():
+ return obj["label"]
else:
return "AlreadyParsedNoDefaultUnknownLabel_" + hex(address)
@@ -7320,13 +7340,14 @@ def add_map_offsets_into_map_names(map_group_offsets, map_names=None):
rom_parsed = False
-def parse_rom(rom=None):
+def parse_rom(rom=None, _skip_wram_labels=False, _parse_map_header_at=None, debug=False):
if not rom:
# read the rom and figure out the offsets for maps
rom = direct_load_rom()
# make wram.wram_labels available
- setup_wram_labels()
+ if not _skip_wram_labels:
+ setup_wram_labels()
# figure out the map offsets
map_group_offsets = load_map_group_offsets(map_group_pointer_table=map_group_pointer_table, map_group_count=map_group_count, rom=rom)
@@ -7335,7 +7356,7 @@ def parse_rom(rom=None):
add_map_offsets_into_map_names(map_group_offsets, map_names=map_names)
# parse map header bytes for each map
- parse_all_map_headers(map_names, all_map_headers=all_map_headers)
+ parse_all_map_headers(map_names, all_map_headers=all_map_headers, _parse_map_header_at=_parse_map_header_at, rom=rom, debug=debug)
# find trainers based on scripts and map headers
# this can only happen after parsing the entire map and map scripts
diff --git a/pokemontools/crystalparts/old_parsers.py b/pokemontools/crystalparts/old_parsers.py
index 15878d2..81d79ed 100644
--- a/pokemontools/crystalparts/old_parsers.py
+++ b/pokemontools/crystalparts/old_parsers.py
@@ -2,18 +2,37 @@
Some old methods rescued from crystal.py
"""
+import logging
+
import pokemontools.pointers as pointers
+from pokemontools.crystal import (
+ rom_interval,
+ old_parse_xy_trigger_bytes,
+ parse_script_engine_script_at,
+ calculate_pointer_from_bytes_at,
+ parse_text_engine_script_at,
+
+ # constants
+ second_map_header_byte_size,
+ warp_byte_size,
+ trigger_byte_size,
+ signpost_byte_size,
+ people_event_byte_size,
+)
+
+import pokemontools.helpers as helpers
+
map_header_byte_size = 9
all_map_headers = []
-def old_parse_map_script_header_at(address, map_group=None, map_id=None, debug=True):
+def old_parse_map_script_header_at(address, map_group=None, map_id=None, rom=None, debug=True):
logging.debug("starting to parse the map's script header..")
#[[Number1 of pointers] Number1 * [2byte pointer to script][00][00]]
ptr_line_size = 4 #[2byte pointer to script][00][00]
trigger_ptr_cnt = ord(rom[address])
- trigger_pointers = helpers.grouper(rom_interval(address+1, trigger_ptr_cnt * ptr_line_size, strings=False), count=ptr_line_size)
+ trigger_pointers = helpers.grouper(rom_interval(address+1, trigger_ptr_cnt * ptr_line_size, rom=rom, strings=False), count=ptr_line_size)
triggers = {}
for index, trigger_pointer in enumerate(trigger_pointers):
logging.debug("parsing a trigger header...")
@@ -34,7 +53,7 @@ def old_parse_map_script_header_at(address, map_group=None, map_id=None, debug=T
#[[Number2 of pointers] Number2 * [hook number][2byte pointer to script]]
callback_ptr_line_size = 3
callback_ptr_cnt = ord(rom[address])
- callback_ptrs = helpers.grouper(rom_interval(address+1, callback_ptr_cnt * callback_ptr_line_size, strings=False), count=callback_ptr_line_size)
+ callback_ptrs = helpers.grouper(rom_interval(address+1, callback_ptr_cnt * callback_ptr_line_size, rom=rom, strings=False), count=callback_ptr_line_size)
callback_pointers = {}
callbacks = {}
for index, callback_line in enumerate(callback_ptrs):
@@ -64,10 +83,10 @@ def old_parse_map_script_header_at(address, map_group=None, map_id=None, debug=T
}
-def old_parse_map_header_at(address, map_group=None, map_id=None, debug=True):
+def old_parse_map_header_at(address, map_group=None, map_id=None, all_map_headers=None, rom=None, debug=True):
"""parses an arbitrary map header at some address"""
logging.debug("parsing a map header at {0}".format(hex(address)))
- bytes = rom_interval(address, map_header_byte_size, strings=False, debug=debug)
+ bytes = rom_interval(address, map_header_byte_size, rom=rom, strings=False, debug=debug)
bank = bytes[0]
tileset = bytes[1]
permission = bytes[2]
@@ -89,19 +108,19 @@ def old_parse_map_header_at(address, map_group=None, map_id=None, debug=True):
"fishing": fishing_group,
}
logging.debug("second map header address is {0}".format(hex(second_map_header_address)))
- map_header["second_map_header"] = old_parse_second_map_header_at(second_map_header_address, debug=debug)
+ map_header["second_map_header"] = old_parse_second_map_header_at(second_map_header_address, rom=rom, debug=debug)
event_header_address = map_header["second_map_header"]["event_address"]
script_header_address = map_header["second_map_header"]["script_address"]
# maybe event_header and script_header should be put under map_header["second_map_header"]
- map_header["event_header"] = old_parse_map_event_header_at(event_header_address, map_group=map_group, map_id=map_id, debug=debug)
- map_header["script_header"] = old_parse_map_script_header_at(script_header_address, map_group=map_group, map_id=map_id, debug=debug)
+ map_header["event_header"] = old_parse_map_event_header_at(event_header_address, map_group=map_group, map_id=map_id, rom=rom, debug=debug)
+ map_header["script_header"] = old_parse_map_script_header_at(script_header_address, map_group=map_group, map_id=map_id, rom=rom, debug=debug)
return map_header
all_second_map_headers = []
-def old_parse_second_map_header_at(address, map_group=None, map_id=None, debug=True):
+def old_parse_second_map_header_at(address, map_group=None, map_id=None, rom=None, debug=True):
"""each map has a second map header"""
- bytes = rom_interval(address, second_map_header_byte_size, strings=False)
+ bytes = rom_interval(address, second_map_header_byte_size, rom=rom, strings=False)
border_block = bytes[0]
height = bytes[1]
width = bytes[2]
@@ -150,7 +169,7 @@ def old_parse_warp_bytes(some_bytes, debug=True):
})
return warps
-def old_parse_signpost_bytes(some_bytes, bank=None, map_group=None, map_id=None, debug=True):
+def old_parse_signpost_bytes(some_bytes, bank=None, map_group=None, map_id=None, rom=None, debug=True):
assert len(some_bytes) % signpost_byte_size == 0, "wrong number of bytes"
signposts = []
for bytes in helpers.grouper(some_bytes, count=signpost_byte_size):
@@ -216,7 +235,7 @@ def old_parse_signpost_bytes(some_bytes, bank=None, map_group=None, map_id=None,
signposts.append(spost)
return signposts
-def old_parse_people_event_bytes(some_bytes, address=None, map_group=None, map_id=None, debug=True):
+def old_parse_people_event_bytes(some_bytes, address=None, map_group=None, map_id=None, rom=None, debug=True):
"""parse some number of people-events from the data
see http://hax.iimarck.us/files/scriptingcodes_eng.htm#Scripthdr
@@ -297,7 +316,7 @@ def old_parse_people_event_bytes(some_bytes, address=None, map_group=None, map_i
"parsing a trainer (person-event) at x={x} y={y}"
.format(x=x, y=y)
)
- parsed_trainer = parse_trainer_header_at(ptr_address, map_group=map_group, map_id=map_id)
+ parsed_trainer = old_parse_trainer_header_at(ptr_address, map_group=map_group, map_id=map_id, rom=rom)
extra_portion = {
"event_type": "trainer",
"trainer_data_address": ptr_address,
@@ -340,9 +359,9 @@ def old_parse_people_event_bytes(some_bytes, address=None, map_group=None, map_i
people_events.append(people_event)
return people_events
-def old_parse_trainer_header_at(address, map_group=None, map_id=None, debug=True):
+def old_parse_trainer_header_at(address, map_group=None, map_id=None, rom=None, debug=True):
bank = pointers.calculate_bank(address)
- bytes = rom_interval(address, 12, strings=False)
+ bytes = rom_interval(address, 12, rom=rom, strings=False)
bit_number = bytes[0] + (bytes[1] << 8)
trainer_group = bytes[2]
trainer_id = bytes[3]
@@ -382,7 +401,7 @@ def old_parse_trainer_header_at(address, map_group=None, map_id=None, debug=True
"script_talk_again": script_talk_again,
}
-def old_parse_map_event_header_at(address, map_group=None, map_id=None, debug=True):
+def old_parse_map_event_header_at(address, map_group=None, map_id=None, rom=None, debug=True):
"""parse crystal map event header byte structure thing"""
returnable = {}
@@ -396,29 +415,29 @@ def old_parse_map_event_header_at(address, map_group=None, map_id=None, debug=Tr
# warps
warp_count = ord(rom[address+2])
warp_byte_count = warp_byte_size * warp_count
- warps = rom_interval(address+3, warp_byte_count)
+ warps = rom_interval(address+3, warp_byte_count, rom=rom)
after_warps = address + 3 + warp_byte_count
returnable.update({"warp_count": warp_count, "warps": old_parse_warp_bytes(warps)})
# triggers (based on xy location)
trigger_count = ord(rom[after_warps])
trigger_byte_count = trigger_byte_size * trigger_count
- triggers = rom_interval(after_warps+1, trigger_byte_count)
+ triggers = rom_interval(after_warps+1, trigger_byte_count, rom=rom)
after_triggers = after_warps + 1 + trigger_byte_count
returnable.update({"xy_trigger_count": trigger_count, "xy_triggers": old_parse_xy_trigger_bytes(triggers, bank=bank, map_group=map_group, map_id=map_id)})
# signposts
signpost_count = ord(rom[after_triggers])
signpost_byte_count = signpost_byte_size * signpost_count
- signposts = rom_interval(after_triggers+1, signpost_byte_count)
+ signposts = rom_interval(after_triggers+1, signpost_byte_count, rom=rom)
after_signposts = after_triggers + 1 + signpost_byte_count
- returnable.update({"signpost_count": signpost_count, "signposts": old_parse_signpost_bytes(signposts, bank=bank, map_group=map_group, map_id=map_id)})
+ returnable.update({"signpost_count": signpost_count, "signposts": old_parse_signpost_bytes(signposts, bank=bank, map_group=map_group, map_id=map_id, rom=rom)})
# people events
people_event_count = ord(rom[after_signposts])
people_event_byte_count = people_event_byte_size * people_event_count
- people_events_bytes = rom_interval(after_signposts+1, people_event_byte_count)
- people_events = old_parse_people_event_bytes(people_events_bytes, address=after_signposts+1, map_group=map_group, map_id=map_id)
+ people_events_bytes = rom_interval(after_signposts+1, people_event_byte_count, rom=rom)
+ people_events = old_parse_people_event_bytes(people_events_bytes, address=after_signposts+1, map_group=map_group, map_id=map_id, rom=rom)
returnable.update({"people_event_count": people_event_count, "people_events": people_events})
return returnable
diff --git a/pokemontools/labels.py b/pokemontools/labels.py
index d30c1dc..f8e8ec2 100644
--- a/pokemontools/labels.py
+++ b/pokemontools/labels.py
@@ -192,6 +192,8 @@ def line_has_label(line):
return False
if line[0] == "\"":
return False
+ if line[0] == ":":
+ return False
return True
def get_label_from_line(line):
diff --git a/tests/integration/tests.py b/tests/integration/tests.py
index 4f96699..79fadda 100644
--- a/tests/integration/tests.py
+++ b/tests/integration/tests.py
@@ -47,7 +47,7 @@ from pokemontools.crystalparts.old_parsers import (
)
from pokemontools.crystal import (
- rom,
+ script_parse_table,
load_rom,
rom_until,
direct_load_rom,
@@ -86,14 +86,20 @@ from pokemontools.crystal import (
isolate_incbins,
process_incbins,
get_labels_between,
- generate_diff_insert,
rom_text_at,
get_label_for,
split_incbin_line_into_three,
reset_incbins,
+ parse_rom,
+
+ # globals
+ engine_flags,
)
+import pokemontools.wram
+
import unittest
+import mock
class BasicTestCase(unittest.TestCase):
"this is where i cram all of my unit tests together"
@@ -114,13 +120,10 @@ class BasicTestCase(unittest.TestCase):
self.failUnless(isinstance(rom, RomStr))
def test_load_rom(self):
- global rom
- rom = None
- load_rom()
- self.failIf(rom == None)
- rom = RomStr(None)
- load_rom()
- self.failIf(rom == RomStr(None))
+ rom = load_rom()
+ self.assertNotEqual(rom, None)
+ rom = load_rom()
+ self.assertNotEqual(rom, RomStr(None))
def test_load_asm(self):
asm = load_asm()
@@ -131,7 +134,9 @@ class BasicTestCase(unittest.TestCase):
def test_rom_file_existence(self):
"ROM file must exist"
- self.failUnless("baserom.gbc" in os.listdir("../"))
+ dirname = os.path.dirname(__file__)
+ filenames = os.listdir(os.path.join(os.path.abspath(dirname), "../../"))
+ self.failUnless("baserom.gbc" in filenames)
def test_rom_md5(self):
"ROM file must have the correct md5 sum"
@@ -151,24 +156,24 @@ class BasicTestCase(unittest.TestCase):
interval = 10
correct_strings = ['0x0', '0xc3', '0x6e', '0x1', '0xce',
'0xed', '0x66', '0x66', '0xcc', '0xd']
- byte_strings = rom_interval(address, interval, strings=True)
+ byte_strings = rom_interval(address, interval, rom=self.rom, strings=True)
self.assertEqual(byte_strings, correct_strings)
correct_ints = [0, 195, 110, 1, 206, 237, 102, 102, 204, 13]
- ints = rom_interval(address, interval, strings=False)
+ ints = rom_interval(address, interval, rom=self.rom, strings=False)
self.assertEqual(ints, correct_ints)
def test_rom_until(self):
address = 0x1337
byte = 0x13
- bytes = rom_until(address, byte, strings=True)
+ bytes = rom_until(address, byte, rom=self.rom, strings=True)
self.failUnless(len(bytes) == 3)
self.failUnless(bytes[0] == '0xd5')
- bytes = rom_until(address, byte, strings=False)
+ bytes = rom_until(address, byte, rom=self.rom, strings=False)
self.failUnless(len(bytes) == 3)
self.failUnless(bytes[0] == 0xd5)
def test_how_many_until(self):
- how_many = how_many_until(chr(0x13), 0x1337)
+ how_many = how_many_until(chr(0x13), 0x1337, self.rom)
self.assertEqual(how_many, 3)
def test_calculate_pointer_from_bytes_at(self):
@@ -237,8 +242,8 @@ class TestEncodedText(unittest.TestCase):
def test_parse_text_engine_script_at(self):
p = parse_text_engine_script_at(0x197185, debug=False)
- self.assertEqual(len(p.commands), 2)
- self.assertEqual(len(p.commands[0]["lines"]), 41)
+ self.assertEqual(len(p.commands), 1)
+ self.assertEqual(p.commands[0].to_asm().count("\n"), 40)
class TestScript(unittest.TestCase):
"""for testing parse_script_engine_script_at and script parsing in
@@ -302,9 +307,10 @@ class TestByteParams(unittest.TestCase):
class TestMultiByteParam(unittest.TestCase):
def setup_for(self, somecls, byte_size=2, address=443, **kwargs):
+ self.rom = load_rom()
self.cls = somecls(address=address, size=byte_size, **kwargs)
self.assertEqual(self.cls.address, address)
- self.assertEqual(self.cls.bytes, rom_interval(address, byte_size, strings=False))
+ self.assertEqual(self.cls.bytes, rom_interval(address, byte_size, rom=self.rom, strings=False))
self.assertEqual(self.cls.size, byte_size)
def test_two_byte_param(self):
@@ -318,52 +324,63 @@ class TestMultiByteParam(unittest.TestCase):
self.setup_for(PointerLabelParam, bank=None)
# assuming no label at this location..
self.assertEqual(self.cls.to_asm(), "$f0c0")
- global all_labels
- # hm.. maybe all_labels should be using a class?
- all_labels = [{"label": "poop", "address": 0xf0c0,
- "offset": 0xf0c0, "bank": 0,
- "line_number": 2
- }]
+ global script_parse_table
+ script_parse_table[0xf0c0:0xf0c0 + 1] = {"label": "poop", "bank": 0, "line_number": 2}
self.assertEqual(self.cls.to_asm(), "poop")
class TestPostParsing(unittest.TestCase):
"""tests that must be run after parsing all maps"""
+
+ @classmethod
+ def setUpClass(cls):
+ cls.rom = direct_load_rom()
+
+ pokemontools.wram.wram_labels = {}
+
+ with mock.patch("pokemontools.crystal.read_engine_flags", return_value={}):
+ with mock.patch("pokemontools.crystal.read_event_flags", return_value={}):
+ with mock.patch("pokemontools.crystal.setup_wram_labels", return_value={}):
+ parse_rom(rom=cls.rom, _skip_wram_labels=True, _parse_map_header_at=old_parse_map_header_at, debug=False)
+
def test_signpost_counts(self):
- self.assertEqual(len(map_names[1][1]["signposts"]), 0)
- self.assertEqual(len(map_names[1][2]["signposts"]), 2)
- self.assertEqual(len(map_names[10][5]["signposts"]), 7)
+ self.assertEqual(len(map_names[1][1]["header_new"]["event_header"]["signposts"]), 0)
+ self.assertEqual(len(map_names[1][2]["header_new"]["event_header"]["signposts"]), 2)
+ self.assertEqual(len(map_names[10][5]["header_new"]["event_header"]["signposts"]), 7)
def test_warp_counts(self):
- self.assertEqual(map_names[10][5]["warp_count"], 9)
- self.assertEqual(map_names[18][5]["warp_count"], 3)
- self.assertEqual(map_names[15][1]["warp_count"], 2)
+ self.assertEqual(map_names[10][5]["header_new"]["event_header"]["warp_count"], 9)
+ self.assertEqual(map_names[18][5]["header_new"]["event_header"]["warp_count"], 3)
+ self.assertEqual(map_names[15][1]["header_new"]["event_header"]["warp_count"], 2)
def test_map_sizes(self):
- self.assertEqual(map_names[15][1]["height"], 18)
- self.assertEqual(map_names[15][1]["width"], 10)
- self.assertEqual(map_names[7][1]["height"], 4)
- self.assertEqual(map_names[7][1]["width"], 4)
+ self.assertEqual(map_names[15][1]["header_new"]["second_map_header"]["height"], 18)
+ self.assertEqual(map_names[15][1]["header_new"]["second_map_header"]["width"], 10)
+ self.assertEqual(map_names[7][1]["header_new"]["second_map_header"]["height"], 4)
+ self.assertEqual(map_names[7][1]["header_new"]["second_map_header"]["width"], 4)
def test_map_connection_counts(self):
- self.assertEqual(map_names[7][1]["connections"], 0)
- self.assertEqual(map_names[10][1]["connections"], 12)
- self.assertEqual(map_names[10][2]["connections"], 12)
- self.assertEqual(map_names[11][1]["connections"], 9) # or 13?
+ #print map_names[10][5]
+ #print map_names[10][5].keys()
+ #print map_names[10][5]["header_new"].keys()
+ self.assertEqual(map_names[7][1]["header_new"]["second_map_header"]["connections"], 0)
+ self.assertEqual(map_names[10][1]["header_new"]["second_map_header"]["connections"], 12)
+ self.assertEqual(map_names[10][2]["header_new"]["second_map_header"]["connections"], 12)
+ self.assertEqual(map_names[11][1]["header_new"]["second_map_header"]["connections"], 9) # or 13?
def test_second_map_header_address(self):
- self.assertEqual(map_names[11][1]["second_map_header_address"], 0x9509c)
- self.assertEqual(map_names[1][5]["second_map_header_address"], 0x95bd0)
+ self.assertEqual(map_names[11][1]["header_new"]["second_map_header_address"], 0x9509c)
+ self.assertEqual(map_names[1][5]["header_new"]["second_map_header_address"], 0x95bd0)
def test_event_address(self):
- self.assertEqual(map_names[17][5]["event_address"], 0x194d67)
- self.assertEqual(map_names[23][3]["event_address"], 0x1a9ec9)
+ self.assertEqual(map_names[17][5]["header_new"]["second_map_header"]["event_address"], 0x194d67)
+ self.assertEqual(map_names[23][3]["header_new"]["second_map_header"]["event_address"], 0x1a9ec9)
def test_people_event_counts(self):
- self.assertEqual(len(map_names[23][3]["people_events"]), 4)
- self.assertEqual(len(map_names[10][3]["people_events"]), 9)
+ self.assertEqual(len(map_names[23][3]["header_new"]["event_header"]["people_events"]), 4)
+ self.assertEqual(len(map_names[10][3]["header_new"]["event_header"]["people_events"]), 9)
class TestMapParsing(unittest.TestCase):
- def test_parse_all_map_headers(self):
+ def xtest_parse_all_map_headers(self):
global parse_map_header_at, old_parse_map_header_at, counter
counter = 0
for k in map_names.keys():
diff --git a/tests/tests.py b/tests/tests.py
index 4398f03..4c7f7a4 100644
--- a/tests/tests.py
+++ b/tests/tests.py
@@ -171,9 +171,12 @@ class TestCram(unittest.TestCase):
self.assertEqual(map_name(7, 0x11), "Cerulean City")
def test_load_map_group_offsets(self):
- addresses = load_map_group_offsets()
+ rom = load_rom()
+ map_group_pointer_table = 0x94000
+ map_group_count = 26
+ addresses = load_map_group_offsets(map_group_pointer_table, map_group_count, rom=rom)
self.assertEqual(len(addresses), 26, msg="there should be 26 map groups")
- addresses = load_map_group_offsets()
+ addresses = load_map_group_offsets(map_group_pointer_table, map_group_count, rom=rom)
self.assertEqual(len(addresses), 26, msg="there should still be 26 map groups")
self.assertIn(0x94034, addresses)
for address in addresses:
@@ -204,15 +207,12 @@ class TestCram(unittest.TestCase):
self.failUnless("EQU" in r)
def test_get_label_for(self):
- global all_labels
- temp = copy(all_labels)
# this is basd on the format defined in get_labels_between
all_labels = [{"label": "poop", "address": 0x5,
"offset": 0x5, "bank": 0,
"line_number": 2
}]
- self.assertEqual(get_label_for(5), "poop")
- all_labels = temp
+ self.assertEqual(get_label_for(5, _all_labels=all_labels), "poop")
def test_generate_map_constant_labels(self):
ids = generate_map_constant_labels()
@@ -228,8 +228,8 @@ class TestCram(unittest.TestCase):
def test_get_map_constant_label_by_id(self):
global map_internal_ids
map_internal_ids = generate_map_constant_labels()
- self.assertEqual(get_map_constant_label_by_id(0), "OLIVINE_POKECENTER_1F")
- self.assertEqual(get_map_constant_label_by_id(1), "OLIVINE_GYM")
+ self.assertEqual(get_map_constant_label_by_id(0, map_internal_ids), "OLIVINE_POKECENTER_1F")
+ self.assertEqual(get_map_constant_label_by_id(1, map_internal_ids), "OLIVINE_GYM")
def test_is_valid_address(self):
self.assertTrue(is_valid_address(0))
@@ -470,7 +470,8 @@ class TestAsmList(unittest.TestCase):
self.assertEqual(processed_incbins[0]["line"], incbin_lines[0])
self.assertEqual(processed_incbins[2]["line"], incbin_lines[1])
- def test_reset_incbins(self):
+ # TODO: use mocks before re-enabling this test
+ def xtest_reset_incbins(self):
global asm, incbin_lines, processed_incbins
# temporarily override the functions
global load_asm, isolate_incbins, process_incbins
@@ -529,16 +530,15 @@ class TestAsmList(unittest.TestCase):
self.assertEqual(largest[1]["line"], asm[3])
def test_generate_diff_insert(self):
- global asm
asm = ['first line', 'second line', 'third line',
'INCBIN "baserom.gbc",$90,$200 - $90',
'fifth line', 'last line',
- 'INCBIN "baserom.gbc",$33F,$4000 - $33F']
- diff = generate_diff_insert(0, "the real first line", debug=False)
+ 'INCBIN "baserom.gbc",$33F,$4000 - $33F', '\n']
+ diff = generate_diff_insert(0, "the real first line", _asm=asm, debug=False)
self.assertIn("the real first line", diff)
self.assertIn("INCBIN", diff)
self.assertNotIn("No newline at end of file", diff)
- self.assertIn("+"+asm[1], diff)
+ self.assertIn("+the real first line", diff)
class TestTextScript(unittest.TestCase):
"""for testing 'in-script' commands, etc."""