diff options
author | Celeste Brault <theredstoneblock0@gmail.com> | 2019-06-09 23:29:11 +0200 |
---|---|---|
committer | IIMarckus <iimarckus@gmail.com> | 2019-06-10 19:07:44 -0600 |
commit | 013ae78d7415ca75b4e648be5f2c906e16bd13b9 (patch) | |
tree | 7c2039a2d94ea164cf8c9356bcd091f0f3144afc | |
parent | 3ea6f22c48ef9ba053757dbbde65ce059726e229 (diff) |
Update tools
-rw-r--r-- | Makefile | 95 | ||||
-rw-r--r-- | audio.asm | 2 | ||||
-rwxr-xr-x | data/sgb_border.asm | 2 | ||||
-rwxr-xr-x | engine/HoF_room_pc.asm | 2 | ||||
-rw-r--r-- | engine/bank3c.asm | 1 | ||||
m--------- | extras | 0 | ||||
-rw-r--r-- | gfx.py | 1931 | ||||
-rw-r--r-- | gfx/game_boy.png (renamed from gfx/game_boy.norepeat.png) | bin | 246 -> 246 bytes | |||
-rw-r--r-- | gfx/pikachu/unknown_e41d2.png | bin | 249 -> 5952 bytes | |||
-rw-r--r-- | gfx/pikachu/unknown_e444b.png | bin | 353 -> 11020 bytes | |||
-rw-r--r-- | gfx/pikachu/unknown_e4a99.png | bin | 261 -> 5766 bytes | |||
-rw-r--r-- | gfx/pokemon_yellow.png (renamed from gfx/pokemon_yellow.t6.png) | bin | 781 -> 781 bytes | |||
-rw-r--r-- | gfx/surfing_pikachu_1.png (renamed from gfx/surfing_pikachu_1.t4.png) | bin | 3766 -> 3766 bytes | |||
-rw-r--r-- | gfx/surfing_pikachu_1c.png (renamed from gfx/surfing_pikachu_1c.t5.png) | bin | 1318 -> 1318 bytes | |||
-rw-r--r-- | gfx/surfing_pikachu_3.png (renamed from gfx/surfing_pikachu_3.t1.png) | bin | 2783 -> 2783 bytes | |||
-rw-r--r-- | gfx/theend.png (renamed from gfx/theend.interleave.png) | bin | 139 -> 139 bytes | |||
-rw-r--r-- | gfx/tilesets/cavern.png (renamed from gfx/tilesets/cavern.t14.png) | bin | 1022 -> 1022 bytes | |||
-rw-r--r-- | gfx/tilesets/cemetery.png (renamed from gfx/tilesets/cemetery.t4.png) | bin | 928 -> 928 bytes | |||
-rw-r--r-- | gfx/tilesets/club.png (renamed from gfx/tilesets/club.t5.png) | bin | 899 -> 899 bytes | |||
-rw-r--r-- | gfx/tilesets/house.png (renamed from gfx/tilesets/house.t2.png) | bin | 1163 -> 1163 bytes | |||
-rw-r--r-- | gfx/tilesets/interior.png (renamed from gfx/tilesets/interior.t1.png) | bin | 1141 -> 1141 bytes | |||
-rw-r--r-- | gfx/tilesets/lab.png (renamed from gfx/tilesets/lab.t4.png) | bin | 1051 -> 1051 bytes | |||
-rw-r--r-- | gfx/tilesets/lobby.png (renamed from gfx/tilesets/lobby.t2.png) | bin | 966 -> 966 bytes | |||
-rw-r--r-- | gfx/tilesets/mansion.png (renamed from gfx/tilesets/mansion.t2.png) | bin | 960 -> 960 bytes | |||
-rw-r--r-- | gfx/tilesets/museum.png (renamed from gfx/tilesets/museum.t1.png) | bin | 1119 -> 1119 bytes | |||
-rw-r--r-- | gfx/tilesets/plateau.png (renamed from gfx/tilesets/plateau.t10.png) | bin | 1011 -> 1011 bytes | |||
-rw-r--r-- | gfx/tilesets/redshouse2.png (renamed from gfx/tilesets/redshouse2.t7.png) | bin | 845 -> 845 bytes | |||
-rw-r--r-- | gfx/tilesets/ship.png (renamed from gfx/tilesets/ship.t6.png) | bin | 865 -> 865 bytes | |||
-rw-r--r-- | gfx/tilesets/shipport.png (renamed from gfx/tilesets/shipport.t2.png) | bin | 1125 -> 1125 bytes | |||
-rw-r--r-- | gfx/tilesets/underground.png (renamed from gfx/tilesets/underground.t7.png) | bin | 272 -> 272 bytes | |||
-rwxr-xr-x | main.asm | 34 | ||||
-rw-r--r-- | tools/.gitignore | 3 | ||||
-rw-r--r-- | tools/Makefile | 16 | ||||
-rw-r--r-- | tools/common.h | 40 | ||||
-rw-r--r-- | tools/gfx.c | 296 | ||||
-rw-r--r-- | tools/gfx.py | 267 | ||||
-rw-r--r-- | tools/pkmncompress.c | 451 | ||||
-rw-r--r-- | tools/pokemontools/__init__.py | 1 | ||||
-rw-r--r-- | tools/pokemontools/gfx.py | 938 | ||||
-rw-r--r-- | tools/pokemontools/lz.py | 580 | ||||
-rw-r--r-- | tools/pokemontools/pcm.py | 156 | ||||
-rw-r--r-- | tools/pokemontools/png.py | 2650 | ||||
-rw-r--r-- | tools/scan_includes.c | 135 |
43 files changed, 5621 insertions, 1979 deletions
@@ -1,57 +1,94 @@ PYTHON := python -MD5 := md5sum -c --quiet +pcm := $(PYTHON) tools/pokemontools/pcm.py pcm -2bpp := $(PYTHON) gfx.py 2bpp -1bpp := $(PYTHON) gfx.py 1bpp -pcm := $(PYTHON) extras/pokemontools/pcm.py pcm -pic := $(PYTHON) extras/pokemontools/pic.py compress -includes := $(PYTHON) extras/pokemontools/scan_includes.py +rom := pokeyellow.gbc + +objs := audio.o main.o text.o wram.o + + +### Build tools -objs := \ - audio.o \ - main.o \ - text.o \ - wram.o +MD5 := md5sum -c -$(foreach obj, $(objs:.o=), \ - $(eval $(obj)_dep := $(shell $(includes) $(obj).asm)) \ -) +RGBDS ?= +RGBASM ?= $(RGBDS)rgbasm +RGBFIX ?= $(RGBDS)rgbfix +RGBGFX ?= $(RGBDS)rgbgfx +RGBLINK ?= $(RGBDS)rgblink + +### Build targets .SUFFIXES: -.SUFFIXES: .asm .o .gbc .png .2bpp .1bpp .pic .wav .pcm .SECONDEXPANSION: -# Suppress annoying intermediate file deletion messages. -.PRECIOUS: %.2bpp -.PHONY: all clean yellow compare - -rom := pokeyellow.gbc +.PRECIOUS: +.SECONDARY: +.PHONY: all clean yellow tidy compare tools all: $(rom) yellow: $(rom) # For contributors to make sure a change didn't affect the contents of the rom. -compare: yellow +compare: $(rom) @$(MD5) roms.md5 clean: rm -f $(rom) $(objs) $(rom:.gbc=.sym) find . \( -iname '*.1bpp' -o -iname '*.2bpp' -o -iname '*.pic' -o -iname '*.pcm' \) -exec rm {} + + $(MAKE) clean -C tools/ + +tidy: + rm -f $(rom) $(objs) $(rom:.gbc=.sym) + $(MAKE) clean -C tools/ + +tools: + $(MAKE) -C tools/ + +# Build tools when building the rom. +# This has to happen before the rules are processed, since that's when scan_includes is run. +ifeq (,$(filter clean tools,$(MAKECMDGOALS))) +$(info $(shell $(MAKE) -C tools)) +endif + %.asm: ; -$(objs): %.o: %.asm $$(%_dep) - rgbasm -h -o $@ $*.asm + +%.o: dep = $(shell tools/scan_includes $(@D)/$*.asm) +$(objs): %.o: %.asm $$(dep) + $(RGBASM) -h -o $@ $*.asm opts = -cjsv -k 01 -l 0x33 -m 0x1b -p 0 -r 03 -t "POKEMON YELLOW" $(rom): $(objs) - rgblink -n $*.sym -l pokeyellow.link -o $@ $^ - rgbfix $(opts) $@ - sort $(rom:.gbc=.sym) -o $(rom:.gbc=.sym) + $(RGBLINK) -n pokeyellow.sym -l pokeyellow.link -o $@ $^ + $(RGBFIX) $(opts) $@ + sort $(rom:.gbc=.sym) -o $(rom:.gbc=.sym) + +### Misc file-specific graphics rules + +gfx/game_boy.2bpp: tools/gfx += --remove-duplicates +gfx/theend.2bpp: tools/gfx += --interleave --png=$< +gfx/tilesets/%.2bpp: tools/gfx += --trim-whitespace +gfx/pokemon_yellow.2bpp: tools/gfx += --trim-whitespace +gfx/surfing_pikachu_1c.2bpp: tools/gfx += --trim-whitespace +gfx/surfing_pikachu_3.2bpp: tools/gfx += --trim-whitespace +gfx/surfing_pikachu_1.2bpp: tools/gfx += --trim-whitespace +### Catch-all graphics rules %.png: ; -%.2bpp: %.png ; @$(2bpp) $< -%.1bpp: %.png ; @$(1bpp) $< -%.pic: %.2bpp ; @$(pic) $< + +%.2bpp: %.png + $(RGBGFX) $(rgbgfx) -o $@ $< + $(if $(tools/gfx),\ + tools/gfx $(tools/gfx) -o $@ $@) + +%.1bpp: %.png + $(RGBGFX) -d1 $(rgbgfx) -o $@ $< + $(if $(tools/gfx),\ + tools/gfx $(tools/gfx) -d1 -o $@ $@) + +%.pic: %.2bpp + tools/pkmncompress $< $@ + %.wav: ; %.pcm: %.wav ; @$(pcm) $< @@ -652,7 +652,7 @@ SECTION "Audio Engine 4", ROMX ; BANK $20 SurfingPikachu1Graphics1:: INCBIN "gfx/surfing_pikachu_1a.2bpp" SurfingPikachu1Graphics2:: INCBIN "gfx/surfing_pikachu_1b.2bpp" -SurfingPikachu1Graphics3:: INCBIN "gfx/surfing_pikachu_1c.t5.2bpp" +SurfingPikachu1Graphics3:: INCBIN "gfx/surfing_pikachu_1c.2bpp" INCLUDE "audio/engine_4.asm" diff --git a/data/sgb_border.asm b/data/sgb_border.asm index 0080b4d2..cc592487 100755 --- a/data/sgb_border.asm +++ b/data/sgb_border.asm @@ -84,4 +84,4 @@ BorderPalettes: RGB 31, 31, 25 SGBBorderGraphics: - INCBIN "gfx/pokemon_yellow.t6.2bpp" + INCBIN "gfx/pokemon_yellow.2bpp" diff --git a/engine/HoF_room_pc.asm b/engine/HoF_room_pc.asm index 9c1c0fd2..c070e8c8 100755 --- a/engine/HoF_room_pc.asm +++ b/engine/HoF_room_pc.asm @@ -319,5 +319,5 @@ INCLUDE "data/credits_order.asm" INCLUDE "text/credits_text.asm" TheEndGfx: - INCBIN "gfx/theend.interleave.2bpp" + INCBIN "gfx/theend.2bpp" TheEndGfxEnd: diff --git a/engine/bank3c.asm b/engine/bank3c.asm index a9bfea8d..10f7ffdd 100644 --- a/engine/bank3c.asm +++ b/engine/bank3c.asm @@ -104,6 +104,7 @@ SetMapSpecificScriptFlagsOnMapReload: BeachHouse_GFX: INCBIN "gfx/tilesets/beachhouse.2bpp" + ds 384 BeachHouse_Block: INCBIN "gfx/blocksets/beachhouse.bst" diff --git a/extras b/extras deleted file mode 160000 -Subproject 0e1798937a4bf723813574281d0dc12c471c919 diff --git a/gfx.py b/gfx.py deleted file mode 100644 index e2788648..00000000 --- a/gfx.py +++ /dev/null @@ -1,1931 +0,0 @@ -# -*- coding: utf-8 -*- - -import os -import sys -sys.path.insert(0,(os.path.abspath(os.path.dirname(__file__) + 'extras/pokemontools'))) # correct module path to pokemontools -import png -from math import sqrt, floor, ceil -import argparse - -import configuration -config = configuration.Config() - -import pokemon_constants -import trainers -import romstr - - -def load_rom(): - rom = romstr.RomStr.load(filename=config.rom_path) - return rom - - -def split(list_, interval): - """ - Split a list by length. - """ - for i in xrange(0, len(list_), interval): - j = min(i + interval, len(list_)) - yield list_[i:j] - - -def hex_dump(data, length=0x10): - """ - just use hexdump -C - """ - margin = len('%x' % len(data)) - output = [] - address = 0 - for line in split(data, length): - output += [ - hex(address)[2:].zfill(margin) + - ' | ' + - ' '.join('%.2x' % byte for byte in line) - ] - address += length - return '\n'.join(output) - - -def get_tiles(image): - """ - Split a 2bpp image into 8x8 tiles. - """ - return list(split(image, 0x10)) - -def connect(tiles): - """ - Combine 8x8 tiles into a 2bpp image. - """ - return [byte for tile in tiles for byte in tile] - -def transpose(tiles, width=None): - """ - Transpose a tile arrangement along line y=-x. - - 00 01 02 03 04 05 00 06 0c 12 18 1e - 06 07 08 09 0a 0b 01 07 0d 13 19 1f - 0c 0d 0e 0f 10 11 <-> 02 08 0e 14 1a 20 - 12 13 14 15 16 17 03 09 0f 15 1b 21 - 18 19 1a 1b 1c 1d 04 0a 10 16 1c 22 - 1e 1f 20 21 22 23 05 0b 11 17 1d 23 - """ - if width == None: - width = int(sqrt(len(tiles))) # assume square image - tiles = sorted(enumerate(tiles), key= lambda (i, tile): i % width) - return [tile for i, tile in tiles] - -def transpose_tiles(image, width=None): - return connect(transpose(get_tiles(image), width)) - -def interleave(tiles, width): - """ - 00 01 02 03 04 05 00 02 04 06 08 0a - 06 07 08 09 0a 0b 01 03 05 07 09 0b - 0c 0d 0e 0f 10 11 --> 0c 0e 10 12 14 16 - 12 13 14 15 16 17 0d 0f 11 13 15 17 - 18 19 1a 1b 1c 1d 18 1a 1c 1e 20 22 - 1e 1f 20 21 22 23 19 1b 1d 1f 21 23 - """ - interleaved = [] - left, right = split(tiles[::2], width), split(tiles[1::2], width) - for l, r in zip(left, right): - interleaved += l + r - return interleaved - -def deinterleave(tiles, width): - """ - 00 02 04 06 08 0a 00 01 02 03 04 05 - 01 03 05 07 09 0b 06 07 08 09 0a 0b - 0c 0e 10 12 14 16 --> 0c 0d 0e 0f 10 11 - 0d 0f 11 13 15 17 12 13 14 15 16 17 - 18 1a 1c 1e 20 22 18 19 1a 1b 1c 1d - 19 1b 1d 1f 21 23 1e 1f 20 21 22 23 - """ - deinterleaved = [] - rows = list(split(tiles, width)) - for left, right in zip(rows[::2], rows[1::2]): - for l, r in zip(left, right): - deinterleaved += [l, r] - return deinterleaved - -def interleave_tiles(image, width): - return connect(interleave(get_tiles(image), width)) - -def deinterleave_tiles(image, width): - return connect(deinterleave(get_tiles(image), width)) - - -def condense_tiles_to_map(image): - tiles = get_tiles(image) - new_tiles = [] - tilemap = [] - for tile in tiles: - if tile not in new_tiles: - new_tiles += [tile] - tilemap += [new_tiles.index(tile)] - new_image = connect(new_tiles) - return new_image, tilemap - - -def to_file(filename, data): - file = open(filename, 'wb') - for byte in data: - file.write('%c' % byte) - file.close() - - - -""" -A rundown of Pokemon Crystal's compression scheme: - -Control commands occupy bits 5-7. -Bits 0-4 serve as the first parameter <n> for each command. -""" -lz_commands = { - 'literal': 0, # n values for n bytes - 'iterate': 1, # one value for n bytes - 'alternate': 2, # alternate two values for n bytes - 'blank': 3, # zero for n bytes -} - -""" -Repeater commands repeat any data that was just decompressed. -They take an additional signed parameter <s> to mark a relative starting point. -These wrap around (positive from the start, negative from the current position). -""" -lz_commands.update({ - 'repeat': 4, # n bytes starting from s - 'flip': 5, # n bytes in reverse bit order starting from s - 'reverse': 6, # n bytes backwards starting from s -}) - -""" -The long command is used when 5 bits aren't enough. Bits 2-4 contain a new control code. -Bits 0-1 are appended to a new byte as 8-9, allowing a 10-bit parameter. -""" -lz_commands.update({ - 'long': 7, # n is now 10 bits for a new control code -}) -max_length = 1 << 10 # can't go higher than 10 bits -lowmax = 1 << 5 # standard 5-bit param - -""" -If 0xff is encountered instead of a command, decompression ends. -""" -lz_end = 0xff - - -class Compressed: - - """ - Compress arbitrary data, usually 2bpp. - """ - - def __init__(self, image=None, mode='horiz', size=None): - assert image, 'need something to compress!' - image = list(image) - self.image = image - self.pic = [] - self.animtiles = [] - - # only transpose pic (animtiles were never transposed in decompression) - if size != None: - for byte in range((size*size)*16): - self.pic += image[byte] - for byte in range(((size*size)*16),len(image)): - self.animtiles += image[byte] - else: - self.pic = image - - if mode == 'vert': - self.tiles = get_tiles(self.pic) - self.tiles = transpose(self.tiles) - self.pic = connect(self.tiles) - - self.image = self.pic + self.animtiles - - self.end = len(self.image) - - self.byte = None - self.address = 0 - - self.stream = [] - - self.zeros = [] - self.alts = [] - self.iters = [] - self.repeats = [] - self.flips = [] - self.reverses = [] - self.literals = [] - - self.output = [] - - self.compress() - - - def compress(self): - """ - Incomplete, but outputs working compressed data. - """ - - self.address = 0 - - # todo - #self.scanRepeats() - - while ( self.address < self.end ): - - #if (self.repeats): - # self.doRepeats() - - #if (self.flips): - # self.doFlips() - - #if (self.reverses): - # self.doReverses - - if (self.checkWhitespace()): - self.doLiterals() - self.doWhitespace() - - elif (self.checkIter()): - self.doLiterals() - self.doIter() - - elif (self.checkAlts()): - self.doLiterals() - self.doAlts() - - else: # doesn't fit any pattern -> literal - self.addLiteral() - self.next() - - self.doStream() - - # add any literals we've been sitting on - self.doLiterals() - - # done - self.output.append(lz_end) - - - def getCurByte(self): - if self.address < self.end: - self.byte = ord(self.image[self.address]) - else: self.byte = None - - def next(self): - self.address += 1 - self.getCurByte() - - def addLiteral(self): - self.getCurByte() - self.literals.append(self.byte) - if len(self.literals) > max_length: - raise Exception, "literals exceeded max length and the compressor didn't catch it" - elif len(self.literals) == max_length: - self.doLiterals() - - def doLiterals(self): - if len(self.literals) > lowmax: - self.output.append( (lz_commands['long'] << 5) | (lz_commands['literal'] << 2) | ((len(self.literals) - 1) >> 8) ) - self.output.append( (len(self.literals) - 1) & 0xff ) - elif len(self.literals) > 0: - self.output.append( (lz_commands['literal'] << 5) | (len(self.literals) - 1) ) - for byte in self.literals: - self.output.append(byte) - self.literals = [] - - def doStream(self): - for byte in self.stream: - self.output.append(byte) - self.stream = [] - - - def scanRepeats(self): - """ - Works, but doesn't do flipped/reversed streams yet. - - This takes up most of the compress time and only saves a few bytes. - It might be more effective to exclude it entirely. - """ - - self.repeats = [] - self.flips = [] - self.reverses = [] - - # make a 5-letter word list of the sequence - letters = 5 # how many bytes it costs to use a repeat over a literal - # any shorter and it's not worth the trouble - num_words = len(self.image) - letters - words = [] - for i in range(self.address,num_words): - word = [] - for j in range(letters): - word.append( ord(self.image[i+j]) ) - words.append((word, i)) - - zeros = [] - for zero in range(letters): - zeros.append( 0 ) - - # check for matches - def get_matches(): - # TODO: - # append to 3 different match lists instead of yielding to one - # - #flipped = [] - #for byte in enumerate(this[0]): - # flipped.append( sum(1<<(7-i) for i in range(8) if (this[0][byte])>>i&1) ) - #reversed = this[0][::-1] - # - for whereabout, this in enumerate(words): - for that in range(whereabout+1,len(words)): - if words[that][0] == this[0]: - if words[that][1] - this[1] >= letters: - # remove zeros - if this[0] != zeros: - yield [this[0], this[1], words[that][1]] - - matches = list(get_matches()) - - # remove more zeros - buffer = [] - for match in matches: - # count consecutive zeros in a word - num_zeros = 0 - highest = 0 - for j in range(letters): - if match[0][j] == 0: - num_zeros += 1 - else: - if highest < num_zeros: highest = num_zeros - num_zeros = 0 - if highest < 4: - # any more than 3 zeros in a row isn't worth it - # (and likely to already be accounted for) - buffer.append(match) - matches = buffer - - # combine overlapping matches - buffer = [] - for this, match in enumerate(matches): - if this < len(matches) - 1: # special case for the last match - if matches[this+1][1] <= (match[1] + len(match[0])): # check overlap - if match[1] + len(match[0]) < match[2]: - # next match now contains this match's bytes too - # this only appends the last byte (assumes overlaps are +1 - match[0].append(matches[this+1][0][-1]) - matches[this+1] = match - elif match[1] + len(match[0]) == match[2]: - # we've run into the thing we matched - buffer.append(match) - # else we've gone past it and we can ignore it - else: # no more overlaps - buffer.append(match) - else: # last match, so there's nothing to check - buffer.append(match) - matches = buffer - - # remove alternating sequences - buffer = [] - for match in matches: - for i in range(6 if letters > 6 else letters): - if match[0][i] != match[0][i&1]: - buffer.append(match) - break - matches = buffer - - self.repeats = matches - - - def doRepeats(self): - """doesn't output the right values yet""" - - unusedrepeats = [] - for repeat in self.repeats: - if self.address >= repeat[2]: - - # how far in we are - length = (len(repeat[0]) - (self.address - repeat[2])) - - # decide which side we're copying from - if (self.address - repeat[1]) <= 0x80: - self.doLiterals() - self.stream.append( (lz_commands['repeat'] << 5) | length - 1 ) - - # wrong? - self.stream.append( (((self.address - repeat[1])^0xff)+1)&0xff ) - - else: - self.doLiterals() - self.stream.append( (lz_commands['repeat'] << 5) | length - 1 ) - - # wrong? - self.stream.append(repeat[1]>>8) - self.stream.append(repeat[1]&0xff) - - #print hex(self.address) + ': ' + hex(len(self.output)) + ' ' + hex(length) - self.address += length - - else: unusedrepeats.append(repeat) - - self.repeats = unusedrepeats - - - def checkWhitespace(self): - self.zeros = [] - self.getCurByte() - original_address = self.address - - if ( self.byte == 0 ): - while ( self.byte == 0 ) & ( len(self.zeros) <= max_length ): - self.zeros.append(self.byte) - self.next() - if len(self.zeros) > 1: - return True - self.address = original_address - return False - - def doWhitespace(self): - if (len(self.zeros) + 1) >= lowmax: - self.stream.append( (lz_commands['long'] << 5) | (lz_commands['blank'] << 2) | ((len(self.zeros) - 1) >> 8) ) - self.stream.append( (len(self.zeros) - 1) & 0xff ) - elif len(self.zeros) > 1: - self.stream.append( lz_commands['blank'] << 5 | (len(self.zeros) - 1) ) - else: - raise Exception, "checkWhitespace() should prevent this from happening" - - - def checkAlts(self): - self.alts = [] - self.getCurByte() - original_address = self.address - num_alts = 0 - - # make sure we don't check for alts at the end of the file - if self.address+3 >= self.end: return False - - self.alts.append(self.byte) - self.alts.append(ord(self.image[self.address+1])) - - # are we onto smething? - if ( ord(self.image[self.address+2]) == self.alts[0] ): - cur_alt = 0 - while (ord(self.image[(self.address)+1]) == self.alts[num_alts&1]) & (num_alts <= max_length): - num_alts += 1 - self.next() - # include the last alternated byte - num_alts += 1 - self.address = original_address - if num_alts > lowmax: - return True - elif num_alts > 2: - return True - return False - - def doAlts(self): - original_address = self.address - self.getCurByte() - - #self.alts = [] - #num_alts = 0 - - #self.alts.append(self.byte) - #self.alts.append(ord(self.image[self.address+1])) - - #i = 0 - #while (ord(self.image[self.address+1]) == self.alts[i^1]) & (num_alts <= max_length): - # num_alts += 1 - # i ^=1 - # self.next() - ## include the last alternated byte - #num_alts += 1 - - num_alts = len(self.iters) + 1 - - if num_alts > lowmax: - self.stream.append( (lz_commands['long'] << 5) | (lz_commands['alternate'] << 2) | ((num_alts - 1) >> 8) ) - self.stream.append( num_alts & 0xff ) - self.stream.append( self.alts[0] ) - self.stream.append( self.alts[1] ) - elif num_alts > 2: - self.stream.append( (lz_commands['alternate'] << 5) | (num_alts - 1) ) - self.stream.append( self.alts[0] ) - self.stream.append( self.alts[1] ) - else: - raise Exception, "checkAlts() should prevent this from happening" - - self.address = original_address - self.address += num_alts - - - def checkIter(self): - self.iters = [] - self.getCurByte() - iter = self.byte - original_address = self.address - while (self.byte == iter) & (len(self.iters) < max_length): - self.iters.append(self.byte) - self.next() - self.address = original_address - if len(self.iters) > 3: - # 3 or fewer isn't worth the trouble and actually longer - # if part of a larger literal set - return True - - return False - - def doIter(self): - self.getCurByte() - iter = self.byte - original_address = self.address - - self.iters = [] - while (self.byte == iter) & (len(self.iters) < max_length): - self.iters.append(self.byte) - self.next() - - if (len(self.iters) - 1) >= lowmax: - self.stream.append( (lz_commands['long'] << 5) | (lz_commands['iterate'] << 2) | ((len(self.iters)-1) >> 8) ) - self.stream.append( (len(self.iters) - 1) & 0xff ) - self.stream.append( iter ) - elif len(self.iters) > 3: - # 3 or fewer isn't worth the trouble and actually longer - # if part of a larger literal set - self.stream.append( (lz_commands['iterate'] << 5) | (len(self.iters) - 1) ) - self.stream.append( iter ) - else: - self.address = original_address - raise Exception, "checkIter() should prevent this from happening" - - -class Decompressed: - """ - Parse compressed data, usually 2bpp. - - parameters: - [compressed data] - [tile arrangement] default: 'vert' - [size of pic] default: None - [start] (optional) - - splits output into pic [size] and animation tiles if applicable - data can be fed in from rom if [start] is specified - """ - - def __init__(self, lz=None, mode=None, size=None, start=0): - # todo: play nice with Compressed - - assert lz, 'need something to compress!' - self.lz = lz - - self.byte = None - self.address = 0 - self.start = start - - self.output = [] - - self.decompress() - - debug = False - # print tuple containing start and end address - if debug: print '(' + hex(self.start) + ', ' + hex(self.start + self.address+1) + '),' - - # only transpose pic - self.pic = [] - self.animtiles = [] - - if size != None: - self.tiles = get_tiles(self.output) - self.pic = connect(self.tiles[:(size*size)]) - self.animtiles = connect(self.tiles[(size*size):]) - else: self.pic = self.output - - if mode == 'vert': - self.tiles = get_tiles(self.pic) - self.tiles = transpose(self.tiles) - self.pic = connect(self.tiles) - - self.output = self.pic + self.animtiles - - - def decompress(self): - """ - Replica of crystal's decompression. - """ - - self.output = [] - - while True: - self.getCurByte() - - if (self.byte == lz_end): - break - - self.cmd = (self.byte & 0b11100000) >> 5 - - if self.cmd == lz_commands['long']: # 10-bit param - self.cmd = (self.byte & 0b00011100) >> 2 - self.length = (self.byte & 0b00000011) << 8 - self.next() - self.length += self.byte + 1 - else: # 5-bit param - self.length = (self.byte & 0b00011111) + 1 - - # literals - if self.cmd == lz_commands['literal']: - self.doLiteral() - elif self.cmd == lz_commands['iterate']: - self.doIter() - elif self.cmd == lz_commands['alternate']: - self.doAlt() - elif self.cmd == lz_commands['blank']: - self.doZeros() - - else: # repeaters - self.next() - if self.byte > 0x7f: # negative - self.displacement = self.byte & 0x7f - self.displacement = len(self.output) - self.displacement - 1 - else: # positive - self.displacement = self.byte * 0x100 - self.next() - self.displacement += self.byte - - if self.cmd == lz_commands['flip']: - self.doFlip() - elif self.cmd == lz_commands['reverse']: - self.doReverse() - else: # lz_commands['repeat'] - self.doRepeat() - - self.address += 1 - #self.next() # somewhat of a hack - - - def getCurByte(self): - self.byte = ord(self.lz[self.start+self.address]) - - def next(self): - self.address += 1 - self.getCurByte() - - def doLiteral(self): - """ - Copy data directly. - """ - for byte in range(self.length): - self.next() - self.output.append(self.byte) - - def doIter(self): - """ - Write one byte repeatedly. - """ - self.next() - for byte in range(self.length): - self.output.append(self.byte) - - def doAlt(self): - """ - Write alternating bytes. - """ - self.alts = [] - self.next() - self.alts.append(self.byte) - self.next() - self.alts.append(self.byte) - - for byte in range(self.length): - self.output.append(self.alts[byte&1]) - - def doZeros(self): - """ - Write zeros. - """ - for byte in range(self.length): - self.output.append(0x00) - - def doFlip(self): - """ - Repeat flipped bytes from output. - - eg 11100100 -> 00100111 - quat 3 2 1 0 -> 0 2 1 3 - """ - for byte in range(self.length): - flipped = sum(1<<(7-i) for i in range(8) if self.output[self.displacement+byte]>>i&1) - self.output.append(flipped) - - def doReverse(self): - """ - Repeat reversed bytes from output. - """ - for byte in range(self.length): - self.output.append(self.output[self.displacement-byte]) - - def doRepeat(self): - """ - Repeat bytes from output. - """ - for byte in range(self.length): - self.output.append(self.output[self.displacement+byte]) - - - -sizes = [ - 5, 6, 7, 5, 6, 7, 5, 6, 7, 5, 5, 7, 5, 5, 7, 5, - 6, 7, 5, 6, 5, 7, 5, 7, 5, 7, 5, 6, 5, 6, 7, 5, - 6, 7, 5, 6, 6, 7, 5, 6, 5, 7, 5, 6, 7, 5, 7, 5, - 7, 5, 7, 5, 7, 5, 7, 5, 7, 5, 7, 5, 6, 7, 5, 6, - 7, 5, 7, 7, 5, 6, 7, 5, 6, 5, 6, 6, 6, 7, 5, 7, - 5, 6, 6, 5, 7, 6, 7, 5, 7, 5, 7, 7, 6, 6, 7, 6, - 7, 5, 7, 5, 5, 7, 7, 5, 6, 7, 6, 7, 6, 7, 7, 7, - 6, 6, 7, 5, 6, 6, 7, 6, 6, 6, 7, 6, 6, 6, 7, 7, - 6, 7, 7, 5, 5, 6, 6, 6, 6, 5, 6, 5, 6, 7, 7, 7, - 7, 7, 5, 6, 7, 7, 5, 5, 6, 7, 5, 6, 7, 5, 6, 7, - 6, 6, 5, 7, 6, 6, 5, 7, 7, 6, 6, 5, 5, 5, 5, 7, - 5, 6, 5, 6, 7, 7, 5, 7, 6, 7, 5, 6, 7, 5, 5, 6, - 6, 5, 6, 6, 6, 6, 7, 6, 5, 6, 7, 5, 7, 6, 6, 7, - 6, 6, 5, 7, 5, 6, 6, 5, 7, 5, 6, 5, 6, 6, 5, 6, - 6, 7, 7, 6, 7, 7, 5, 7, 6, 7, 7, 5, 7, 5, 6, 6, - 6, 7, 7, 7, 7, 5, 6, 7, 7, 7, 5, -] - -def make_sizes(): - """ - Front pics have specified sizes. - """ - rom = load_rom() - top = 251 - base_stats = 0x51424 - # print monster sizes - address = base_stats + 0x11 - - output = '' - - for id in range(top): - size = (ord(rom[address])) & 0x0f - if id % 16 == 0: output += '\n\t' - output += str(size) + ', ' - address += 0x20 - - print output - - - -def decompress_fx_by_id(id, fxs=0xcfcf6): - rom = load_rom() - address = fxs + id*4 # len_fxptr - # get size - num_tiles = ord(rom[address]) # # tiles - # get pointer - bank = ord(rom[address+1]) - address = (ord(rom[address+3]) << 8) + ord(rom[address+2]) - address = (bank * 0x4000) + (address & 0x3fff) - # decompress - fx = Decompressed(rom, 'horiz', num_tiles, address) - return fx - -def decompress_fx(num_fx=40): - for id in range(num_fx): - fx = decompress_fx_by_id(id) - filename = './gfx/fx/' + str(id).zfill(3) + '.2bpp' # ./gfx/fx/039.2bpp - to_file(filename, fx.pic) - - -num_pics = 2 -front = 0 -back = 1 - -monsters = 0x120000 -num_monsters = 251 - -unowns = 0x124000 -num_unowns = 26 -unown_dex = 201 - -def decompress_monster_by_id(id=0, type=front): - rom = load_rom() - # no unowns here - if id + 1 == unown_dex: return None - # get size - if type == front: - size = sizes[id] - else: size = None - # get pointer - address = monsters + (id*2 + type)*3 # bank, address - bank = ord(rom[address]) + 0x36 # crystal - address = (ord(rom[address+2]) << 8) + ord(rom[address+1]) - address = (bank * 0x4000) + (address & 0x3fff) - # decompress - monster = Decompressed(rom, 'vert', size, address) - return monster - -def decompress_monsters(type=front): - for id in range(num_monsters): - # decompress - monster = decompress_monster_by_id(id, type) - if monster != None: # no unowns here - if not type: # front - filename = 'front.2bpp' - folder = './gfx/pics/' + str(id+1).zfill(3) + '/' - to_file(folder+filename, monster.pic) - filename = 'tiles.2bpp' - folder = './gfx/pics/' + str(id+1).zfill(3) + '/' - to_file(folder+filename, monster.animtiles) - else: # back - filename = 'back.2bpp' - folder = './gfx/pics/' + str(id+1).zfill(3) + '/' - to_file(folder+filename, monster.pic) - - -def decompress_unown_by_id(letter, type=front): - rom = load_rom() - # get size - if type == front: - size = sizes[unown_dex-1] - else: size = None - # get pointer - address = unowns + (letter*2 + type)*3 # bank, address - bank = ord(rom[address]) + 0x36 # crystal - address = (ord(rom[address+2]) << 8) + ord(rom[address+1]) - address = (bank * 0x4000) + (address & 0x3fff) - # decompress - unown = Decompressed(rom, 'vert', size, address) - return unown - -def decompress_unowns(type=front): - for letter in range(num_unowns): - # decompress - unown = decompress_unown_by_id(letter, type) - - if not type: # front - filename = 'front.2bpp' - folder = './gfx/pics/' + str(unown_dex).zfill(3) + chr(ord('a') + letter) + '/' - to_file(folder+filename, unown.pic) - filename = 'tiles.2bpp' - folder = './gfx/anim/' - to_file(folder+filename, unown.animtiles) - else: # back - filename = 'back.2bpp' - folder = './gfx/pics/' + str(unown_dex).zfill(3) + chr(ord('a') + letter) + '/' - to_file(folder+filename, unown.pic) - - -trainers = 0x128000 -num_trainers = 67 - -def decompress_trainer_by_id(id): - rom = load_rom() - # get pointer - address = trainers + id*3 # bank, address - bank = ord(rom[address]) + 0x36 # crystal - address = (ord(rom[address+2]) << 8) + ord(rom[address+1]) - address = (bank * 0x4000) + (address & 0x3fff) - # decompress - trainer = Decompressed(rom, 'vert', None, address) - return trainer - -def decompress_trainers(): - for id in range(num_trainers): - # decompress - trainer = decompress_trainer_by_id(id) - filename = './gfx/trainers/' + str(id).zfill(3) + '.2bpp' # ./gfx/trainers/066.2bpp - to_file(filename, trainer.pic) - - -# in order of use (sans repeats) -intro_gfx = [ - ('logo', 0x109407), - ('001', 0xE641D), # tilemap - ('unowns', 0xE5F5D), - ('pulse', 0xE634D), - ('002', 0xE63DD), # tilemap - ('003', 0xE5ECD), # tilemap - ('background', 0xE5C7D), - ('004', 0xE5E6D), # tilemap - ('005', 0xE647D), # tilemap - ('006', 0xE642D), # tilemap - ('pichu_wooper', 0xE592D), - ('suicune_run', 0xE555D), - ('007', 0xE655D), # tilemap - ('008', 0xE649D), # tilemap - ('009', 0xE76AD), # tilemap - ('suicune_jump', 0xE6DED), - ('unown_back', 0xE785D), - ('010', 0xE764D), # tilemap - ('011', 0xE6D0D), # tilemap - ('suicune_close', 0xE681D), - ('012', 0xE6C3D), # tilemap - ('013', 0xE778D), # tilemap - ('suicune_back', 0xE72AD), - ('014', 0xE76BD), # tilemap - ('015', 0xE676D), # tilemap - ('crystal_unowns', 0xE662D), - ('017', 0xE672D), # tilemap -] - -def decompress_intro(): - rom = load_rom() - for name, address in intro_gfx: - filename = './gfx/intro/' + name + '.2bpp' - gfx = Decompressed( rom, 'horiz', None, address ) - to_file(filename, gfx.output) - - -title_gfx = [ - ('suicune', 0x10EF46), - ('logo', 0x10F326), - ('crystal', 0x10FCEE), -] - -def decompress_title(): - rom = load_rom() - for name, address in title_gfx: - filename = './gfx/title/' + name + '.2bpp' - gfx = Decompressed( rom, 'horiz', None, address ) - to_file(filename, gfx.output) - -def decompress_tilesets(): - rom = load_rom() - tileset_headers = 0x4d596 - len_tileset = 15 - num_tilesets = 0x25 - for tileset in range(num_tilesets): - ptr = tileset*len_tileset + tileset_headers - address = (ord(rom[ptr])*0x4000) + (((ord(rom[ptr+1]))+ord(rom[ptr+2])*0x100)&0x3fff) - tiles = Decompressed( rom, 'horiz', None, address ) - filename = './gfx/tilesets/'+str(tileset).zfill(2)+'.2bpp' - to_file( filename, tiles.output ) - #print '(' + hex(address) + ', '+ hex(address+tiles.address+1) + '),' - -misc = [ - ('player', 0x2BA1A, 'vert'), - ('dude', 0x2BBAA, 'vert'), - ('town_map', 0xF8BA0, 'horiz'), - ('pokegear', 0x1DE2E4, 'horiz'), - ('pokegear_sprites', 0x914DD, 'horiz'), -] -def decompress_misc(): - rom = load_rom() - for name, address, mode in misc: - filename = './gfx/misc/' + name + '.2bpp' - gfx = Decompressed( rom, mode, None, address ) - to_file(filename, gfx.output) - -def decompress_all(debug=False): - """ - Decompress all known compressed data in baserom. - """ - - if debug: print 'fronts' - decompress_monsters(front) - if debug: print 'backs' - decompress_monsters(back) - if debug: print 'unown fronts' - decompress_unowns(front) - if debug: print 'unown backs' - decompress_unowns(back) - - if debug: print 'trainers' - decompress_trainers() - - if debug: print 'fx' - decompress_fx() - - if debug: print 'intro' - decompress_intro() - - if debug: print 'title' - decompress_title() - - if debug: print 'tilesets' - decompress_tilesets() - - if debug: print 'misc' - decompress_misc() - - return - - -def decompress_from_address(address, mode='horiz', filename='de.2bpp', size=None): - """ - Write decompressed data from an address to a 2bpp file. - """ - rom = load_rom() - image = Decompressed(rom, mode, size, address) - to_file(filename, image.pic) - - -def decompress_file(filein, fileout, mode='horiz', size=None): - f = open(filein, 'rb') - image = f.read() - f.close() - - de = Decompressed(image, mode, size) - - to_file(fileout, de.pic) - - -def compress_file(filein, fileout, mode='horiz'): - f = open(filein, 'rb') - image = f.read() - f.close() - - lz = Compressed(image, mode) - - to_file(fileout, lz.output) - - - - -def compress_monster_frontpic(id, fileout): - mode = 'vert' - - fpic = './gfx/pics/' + str(id).zfill(3) + '/front.2bpp' - fanim = './gfx/pics/' + str(id).zfill(3) + '/tiles.2bpp' - - pic = open(fpic, 'rb').read() - anim = open(fanim, 'rb').read() - image = pic + anim - - lz = Compressed(image, mode, sizes[id-1]) - - out = './gfx/pics/' + str(id).zfill(3) + '/front.lz' - - to_file(out, lz.output) - - - -def get_uncompressed_gfx(start, num_tiles, filename): - """ - Grab tiles directly from rom and write to file. - """ - rom = load_rom() - bytes_per_tile = 0x10 - length = num_tiles*bytes_per_tile - end = start + length - image = [] - for address in range(start,end): - image.append(ord(rom[address])) - to_file(filename, image) - - - -def bin_to_rgb(word): - red = word & 0b11111 - word >>= 5 - green = word & 0b11111 - word >>= 5 - blue = word & 0b11111 - return (red, green, blue) - -def rgb_from_rom(address, length=0x80): - rom = load_rom() - return convert_binary_pal_to_text(rom[address:address+length]) - -def convert_binary_pal_to_text_by_filename(filename): - with open(filename) as f: - pal = bytearray(f.read()) - return convert_binary_pal_to_text(pal) - -def convert_binary_pal_to_text(pal): - output = '' - words = [hi * 0x100 + lo for lo, hi in zip(pal[::2], pal[1::2])] - for word in words: - red, green, blue = ['%.2d' % c for c in bin_to_rgb(word)] - output += '\tRGB ' + ', '.join((red, green, blue)) - output += '\n' - return output - -def read_rgb_macros(lines): - colors = [] - for line in lines: - macro = line.split(" ")[0].strip() - if macro == 'RGB': - params = ' '.join(line.split(" ")[1:]).split(',') - red, green, blue = [int(v) for v in params] - colors += [[red, green, blue]] - return colors - - -def rewrite_binary_pals_to_text(filenames): - for filename in filenames: - pal_text = convert_binary_pal_to_text_by_filename(filename) - with open(filename, 'w') as out: - out.write(pal_text) - - -def dump_monster_pals(): - rom = load_rom() - - pals = 0xa8d6 - pal_length = 0x4 - for mon in range(251): - - name = pokemon_constants.pokemon_constants[mon+1].title().replace('_','') - num = str(mon+1).zfill(3) - dir = 'gfx/pics/'+num+'/' - - address = pals + mon*pal_length*2 - - - pal_data = [] - for byte in range(pal_length): - pal_data.append(ord(rom[address])) - address += 1 - - filename = 'normal.pal' - to_file('../'+dir+filename, pal_data) - - spacing = ' ' * (15 - len(name)) - #print name+'Palette:'+spacing+' INCBIN "'+dir+filename+'"' - - - pal_data = [] - for byte in range(pal_length): - pal_data.append(ord(rom[address])) - address += 1 - - filename = 'shiny.pal' - to_file('../'+dir+filename, pal_data) - - spacing = ' ' * (10 - len(name)) - #print name+'ShinyPalette:'+spacing+' INCBIN "'+dir+filename+'"' - - -def dump_trainer_pals(): - rom = load_rom() - - pals = 0xb0d2 - pal_length = 0x4 - for trainer in range(67): - - name = trainers.trainer_group_names[trainer+1]['constant'].title().replace('_','') - num = str(trainer).zfill(3) - dir = 'gfx/trainers/' - - address = pals + trainer*pal_length - - pal_data = [] - for byte in range(pal_length): - pal_data.append(ord(rom[address])) - address += 1 - - filename = num+'.pal' - to_file('../'+dir+filename, pal_data) - - spacing = ' ' * (12 - len(name)) - print name+'Palette:'+spacing+' INCBIN"'+dir+filename+'"' - - - -def flatten(planar): - """ - Flatten planar 2bpp image data into a quaternary pixel map. - """ - strips = [] - for bottom, top in split(planar, 2): - bottom = ord(bottom) - top = ord(top) - strip = [] - for i in xrange(7,-1,-1): - color = ( - (bottom >> i & 1) + - (top *2 >> i & 2) - ) - strip += [color] - strips += strip - return strips - - -def to_lines(image, width): - """ - Convert a tiled quaternary pixel map to lines of quaternary pixels. - """ - tile_width = 8 - tile_height = 8 - num_columns = width / tile_width - height = len(image) / width - - lines = [] - for cur_line in xrange(height): - tile_row = cur_line / tile_height - line = [] - for column in xrange(num_columns): - anchor = ( - num_columns * tile_row * tile_width * tile_height + - column * tile_width * tile_height + - cur_line % tile_height * tile_width - ) - line += image[anchor : anchor + tile_width] - lines += [line] - return lines - - -def dmg2rgb(word): - """ - For PNGs. - """ - def shift(value): - while True: - yield value & (2**5 - 1) - value >>= 5 - word = shift(word) - # distribution is less even w/ << 3 - red, green, blue = [int(color * 8.25) for color in [word.next() for _ in xrange(3)]] - alpha = 255 - return (red, green, blue, alpha) - - -def rgb_to_dmg(color): - """ - For PNGs. - """ - word = (color['r'] / 8) - word += (color['g'] / 8) << 5 - word += (color['b'] / 8) << 10 - return word - - -def pal_to_png(filename): - """ - Interpret a .pal file as a png palette. - """ - with open(filename) as rgbs: - colors = read_rgb_macros(rgbs.readlines()) - a = 255 - palette = [] - for color in colors: - # even distribution over 000-255 - r, g, b = [int(hue * 8.25) for hue in color] - palette += [(r, g, b, a)] - white = (255,255,255,255) - black = (000,000,000,255) - if white not in palette and len(palette) < 4: - palette = [white] + palette - if black not in palette and len(palette) < 4: - palette = palette + [black] - return palette - - -def png_to_rgb(palette): - """ - Convert a png palette to rgb macros. - """ - output = '' - for color in palette: - r, g, b = [color[c] / 8 for c in 'rgb'] - output += '\tRGB ' + ', '.join(['%.2d' % hue for hue in (r, g, b)]) - output += '\n' - return output - - -def read_filename_arguments(filename): - int_args = { - 'w': 'width', - 'h': 'height', - 't': 'tile_padding', - } - parsed_arguments = {} - arguments = os.path.splitext(filename)[0].split('.')[1:] - for argument in arguments: - arg = argument[0] - param = argument[1:] - if param.isdigit(): - arg = int_args.get(arg, False) - if arg: - parsed_arguments[arg] = int(param) - elif len(argument) == 3: - w, x, h = argument[:3] - if w.isdigit() and h.isdigit() and x == 'x': - parsed_arguments['pic_dimensions'] = (int(w), int(h)) - elif argument == 'interleave': - parsed_arguments['interleave'] = True - elif argument == 'norepeat': - parsed_arguments['norepeat'] = True - elif argument == 'arrange': - parsed_arguments['norepeat'] = True - parsed_arguments['tilemap'] = True - return parsed_arguments - - -def export_2bpp_to_png(filein, fileout=None, pal_file=None, height=0, width=0, tile_padding=0, pic_dimensions=None): - - if fileout == None: - fileout = os.path.splitext(filein)[0] + '.png' - - image = open(filein, 'rb').read() - - arguments = { - 'width': width, - 'height': height, - 'pal_file': pal_file, - 'tile_padding': tile_padding, - 'pic_dimensions': pic_dimensions, - } - arguments.update(read_filename_arguments(filein)) - - if pal_file == None: - if os.path.exists(os.path.splitext(fileout)[0]+'.pal'): - arguments['pal_file'] = os.path.splitext(fileout)[0]+'.pal' - - result = convert_2bpp_to_png(image, **arguments) - width, height, palette, greyscale, bitdepth, px_map = result - - w = png.Writer( - width, - height, - palette=palette, - compression=9, - greyscale=greyscale, - bitdepth=bitdepth - ) - with open(fileout, 'wb') as f: - w.write(f, px_map) - - -def convert_2bpp_to_png(image, **kwargs): - """ - Convert a planar 2bpp graphic to png. - """ - - width = kwargs.get('width', 0) - height = kwargs.get('height', 0) - tile_padding = kwargs.get('tile_padding', 0) - pic_dimensions = kwargs.get('pic_dimensions', None) - pal_file = kwargs.get('pal_file', None) - interleave = kwargs.get('interleave', False) - - # Width must be specified to interleave. - if interleave and width: - image = ''.join(interleave_tiles(image, width / 8)) - - # Pad the image by a given number of tiles if asked. - image += chr(0) * 0x10 * tile_padding - - # Some images are transposed in blocks. - if pic_dimensions: - w, h = pic_dimensions - if not width: width = w * 8 - - pic_length = w * h * 0x10 - - trailing = len(image) % pic_length - - pic = [] - for i in xrange(0, len(image) - trailing, pic_length): - pic += transpose_tiles(image[i:i+pic_length], w) - image = ''.join(pic) + image[len(image) - trailing:] - - # Pad out trailing lines. - image += chr(0) * 0x10 * ((w - (len(image) / 0x10) % h) % w) - - def px_length(img): - return len(img) * 4 - def tile_length(img): - return len(img) * 4 / (8*8) - - if width and height: - tile_width = width / 8 - more_tile_padding = (tile_width - (tile_length(image) % tile_width or tile_width)) - image += chr(0) * 0x10 * more_tile_padding - - elif width and not height: - tile_width = width / 8 - more_tile_padding = (tile_width - (tile_length(image) % tile_width or tile_width)) - image += chr(0) * 0x10 * more_tile_padding - height = px_length(image) / width - - elif height and not width: - tile_height = height / 8 - more_tile_padding = (tile_height - (tile_length(image) % tile_height or tile_height)) - image += chr(0) * 0x10 * more_tile_padding - width = px_length(image) / height - - # at least one dimension should be given - if width * height != px_length(image): - # look for possible combos of width/height that would form a rectangle - matches = [] - # Height need not be divisible by 8, but width must. - # See pokered gfx/minimize_pic.1bpp. - for w in range(8, px_length(image) / 2 + 1, 8): - h = px_length(image) / w - if w * h == px_length(image): - matches += [(w, h)] - # go for the most square image - if len(matches): - width, height = sorted(matches, key= lambda (w, h): (h % 8 != 0, w + h))[0] # favor height - else: - raise Exception, 'Image can\'t be divided into tiles (%d px)!' % (px_length(image)) - - # convert tiles to lines - lines = to_lines(flatten(image), width) - - if pal_file == None: - palette = None - greyscale = True - bitdepth = 2 - px_map = [[3 - pixel for pixel in line] for line in lines] - - else: # gbc color - palette = pal_to_png(pal_file) - greyscale = False - bitdepth = 8 - px_map = [[pixel for pixel in line] for line in lines] - - return width, height, palette, greyscale, bitdepth, px_map - - -def export_png_to_2bpp(filein, fileout=None, palout=None, tile_padding=0, pic_dimensions=None): - - arguments = { - 'tile_padding': tile_padding, - 'pic_dimensions': pic_dimensions, - } - arguments.update(read_filename_arguments(filein)) - - image, palette, tmap = png_to_2bpp(filein, **arguments) - - if fileout == None: - fileout = os.path.splitext(filein)[0] + '.2bpp' - to_file(fileout, image) - - if tmap != None: - mapout = os.path.splitext(fileout)[0] + '.tilemap' - to_file(mapout, tmap) - - if palout == None: - palout = os.path.splitext(fileout)[0] + '.pal' - export_palette(palette, palout) - - -def get_image_padding(width, height, wstep=8, hstep=8): - - padding = { - 'left': 0, - 'right': 0, - 'top': 0, - 'bottom': 0, - } - - if width % wstep and width >= wstep: - pad = float(width % wstep) / 2 - padding['left'] = int(ceil(pad)) - padding['right'] = int(floor(pad)) - - if height % hstep and height >= hstep: - pad = float(height % hstep) / 2 - padding['top'] = int(ceil(pad)) - padding['bottom'] = int(floor(pad)) - - return padding - - -def png_to_2bpp(filein, **kwargs): - """ - Convert a png image to planar 2bpp. - """ - - tile_padding = kwargs.get('tile_padding', 0) - pic_dimensions = kwargs.get('pic_dimensions', None) - interleave = kwargs.get('interleave', False) - norepeat = kwargs.get('norepeat', False) - tilemap = kwargs.get('tilemap', False) - - with open(filein, 'rb') as data: - width, height, rgba, info = png.Reader(data).asRGBA8() - rgba = list(rgba) - greyscale = info['greyscale'] - - # png.Reader returns flat pixel data. Nested is easier to work with - len_px = 4 # rgba - image = [] - palette = [] - for line in rgba: - newline = [] - for px in xrange(0, len(line), len_px): - color = { 'r': line[px ], - 'g': line[px+1], - 'b': line[px+2], - 'a': line[px+3], } - newline += [color] - if color not in palette: - palette += [color] - image += [newline] - - assert len(palette) <= 4, 'Palette should be 4 colors, is really %d' % len(palette) - - # Pad out smaller palettes with greyscale colors - hues = { - 'white': { 'r': 0xff, 'g': 0xff, 'b': 0xff, 'a': 0xff }, - 'black': { 'r': 0x00, 'g': 0x00, 'b': 0x00, 'a': 0xff }, - 'grey': { 'r': 0x55, 'g': 0x55, 'b': 0x55, 'a': 0xff }, - 'gray': { 'r': 0xaa, 'g': 0xaa, 'b': 0xaa, 'a': 0xff }, - } - for hue in hues.values(): - if len(palette) >= 4: - break - if hue not in palette: - palette += [hue] - - # Sort palettes by luminance - def luminance(color): - rough = { 'r': 4.7, - 'g': 1.4, - 'b': 13.8, } - return sum(color[key] * rough[key] for key in rough.keys()) - palette.sort(key=luminance) - - # Game Boy palette order - palette.reverse() - - # Map pixels to quaternary color ids - padding = get_image_padding(width, height) - width += padding['left'] + padding['right'] - height += padding['top'] + padding['bottom'] - pad = [0] - - qmap = [] - qmap += pad * width * padding['top'] - for line in image: - qmap += pad * padding['left'] - for color in line: - qmap += [palette.index(color)] - qmap += pad * padding['right'] - qmap += pad * width * padding['bottom'] - - # Graphics are stored in tiles instead of lines - tile_width = 8 - tile_height = 8 - num_columns = max(width, tile_width) / tile_width - num_rows = max(height, tile_height) / tile_height - image = [] - - for row in xrange(num_rows): - for column in xrange(num_columns): - - # Split it up into strips to convert to planar data - for strip in xrange(min(tile_height, height)): - anchor = ( - row * num_columns * tile_width * tile_height + - column * tile_width + - strip * width - ) - line = qmap[anchor : anchor + tile_width] - bottom, top = 0, 0 - for bit, quad in enumerate(line): - bottom += (quad & 1) << (7 - bit) - top += (quad /2 & 1) << (7 - bit) - image += [bottom, top] - - if pic_dimensions: - w, h = pic_dimensions - - tiles = get_tiles(image) - pic_length = w * h - tile_width = width / 8 - trailing = len(tiles) % pic_length - new_image = [] - for block in xrange(len(tiles) / pic_length): - offset = (h * tile_width) * ((block * w) / tile_width) + ((block * w) % tile_width) - pic = [] - for row in xrange(h): - index = offset + (row * tile_width) - pic += tiles[index:index + w] - new_image += transpose(pic, w) - new_image += tiles[len(tiles) - trailing:] - image = connect(new_image) - - # Remove any tile padding used to make the png rectangular. - image = image[:len(image) - tile_padding * 0x10] - - if interleave: - image = deinterleave_tiles(image, num_columns) - - if norepeat: - image, tmap = condense_tiles_to_map(image) - if not tilemap: - tmap = None - - return image, palette, tmap - - -def export_palette(palette, filename): - """ - Export a palette from png to rgb macros in a .pal file. - """ - - if os.path.exists(filename): - - # Pic palettes are 2 colors (black/white are added later). - with open(filename) as rgbs: - colors = read_rgb_macros(rgbs.readlines()) - - if len(colors) == 2: - palette = palette[1:3] - - text = png_to_rgb(palette) - with open(filename, 'w') as out: - out.write(text) - - -def png_to_lz(filein): - - name = os.path.splitext(filein)[0] - - export_png_to_2bpp(filein) - image = open(name+'.2bpp', 'rb').read() - to_file(name+'.2bpp'+'.lz', Compressed(image).output) - - - -def convert_2bpp_to_1bpp(data): - """ - Convert planar 2bpp image data to 1bpp. Assume images are two colors. - """ - return data[::2] - -def convert_1bpp_to_2bpp(data): - """ - Convert 1bpp image data to planar 2bpp (black/white). - """ - output = [] - for i in data: - output += [i, i] - return output - - -def export_2bpp_to_1bpp(filename): - name, extension = os.path.splitext(filename) - image = open(filename, 'rb').read() - image = convert_2bpp_to_1bpp(image) - to_file(name + '.1bpp', image) - -def export_1bpp_to_2bpp(filename): - name, extension = os.path.splitext(filename) - image = open(filename, 'rb').read() - image = convert_1bpp_to_2bpp(image) - to_file(name + '.2bpp', image) - - -def export_1bpp_to_png(filename, fileout=None): - - if fileout == None: - fileout = os.path.splitext(filename)[0] + '.png' - - arguments = read_filename_arguments(filename) - - image = open(filename, 'rb').read() - image = convert_1bpp_to_2bpp(image) - - result = convert_2bpp_to_png(image, **arguments) - width, height, palette, greyscale, bitdepth, px_map = result - - w = png.Writer(width, height, palette=palette, compression=9, greyscale=greyscale, bitdepth=bitdepth) - with open(fileout, 'wb') as f: - w.write(f, px_map) - - -def export_png_to_1bpp(filename, fileout=None): - - if fileout == None: - fileout = os.path.splitext(filename)[0] + '.1bpp' - - arguments = read_filename_arguments(filename) - image = png_to_1bpp(filename, **arguments) - - to_file(fileout, image) - -def png_to_1bpp(filename, **kwargs): - image, palette, tmap = png_to_2bpp(filename, **kwargs) - return convert_2bpp_to_1bpp(image) - - -def mass_to_png(debug=False): - # greyscale - for root, dirs, files in os.walk('./gfx/'): - for name in files: - if debug: print os.path.splitext(name), os.path.join(root, name) - if os.path.splitext(name)[1] == '.2bpp': - export_2bpp_to_png(os.path.join(root, name)) - -def mass_to_colored_png(debug=False): - # greyscale, unless a palette is detected - for root, dirs, files in os.walk('./gfx/'): - if 'pics' not in root and 'trainers' not in root: - for name in files: - if debug: print os.path.splitext(name), os.path.join(root, name) - if os.path.splitext(name)[1] == '.2bpp': - export_2bpp_to_png(os.path.join(root, name)) - os.utime(os.path.join(root, name), None) - elif os.path.splitext(name)[1] == '.1bpp': - export_1bpp_to_png(os.path.join(root, name)) - os.utime(os.path.join(root, name), None) - - # only monster and trainer pics for now - for root, dirs, files in os.walk('./gfx/pics/'): - for name in files: - if debug: print os.path.splitext(name), os.path.join(root, name) - if os.path.splitext(name)[1] == '.2bpp': - if 'normal.pal' in files: - export_2bpp_to_png(os.path.join(root, name), None, os.path.join(root, 'normal.pal')) - else: - export_2bpp_to_png(os.path.join(root, name)) - os.utime(os.path.join(root, name), None) - - for root, dirs, files in os.walk('./gfx/trainers/'): - for name in files: - if debug: print os.path.splitext(name), os.path.join(root, name) - if os.path.splitext(name)[1] == '.2bpp': - export_2bpp_to_png(os.path.join(root, name)) - os.utime(os.path.join(root, name), None) - - -def mass_decompress(debug=False): - for root, dirs, files in os.walk('./gfx/'): - for name in files: - if 'lz' in name: - if '/pics' in root: - if 'front' in name: - id = root.split('pics/')[1][:3] - if id != 'egg': - with open(os.path.join(root, name), 'rb') as lz: de = Decompressed(lz.read(), 'vert', sizes[int(id)-1]) - else: - with open(os.path.join(root, name), 'rb') as lz: de = Decompressed(lz.read(), 'vert', 4) - to_file(os.path.join(root, 'front.2bpp'), de.pic) - to_file(os.path.join(root, 'tiles.2bpp'), de.animtiles) - elif 'back' in name: - with open(os.path.join(root, name), 'rb') as lz: de = Decompressed(lz.read(), 'vert') - to_file(os.path.join(root, 'back.2bpp'), de.output) - elif '/trainers' in root or '/fx' in root: - with open(os.path.join(root, name), 'rb') as lz: de = Decompressed(lz.read(), 'vert') - to_file(os.path.join(root, os.path.splitext(name)[0]+'.2bpp'), de.output) - else: - with open(os.path.join(root, name), 'rb') as lz: de = Decompressed(lz.read()) - to_file(os.path.join(root, os.path.splitext(name)[0]+'.2bpp'), de.output) - os.utime(os.path.join(root, name), None) - -def append_terminator_to_lzs(directory): - # fix lzs that were extracted with a missing terminator - for root, dirs, files in os.walk(directory): - for file in files: - if '.lz' in file: - data = open(root+file,'rb').read() - if data[-1] != chr(0xff): - data += chr(0xff) - new = open(root+file,'wb') - new.write(data) - new.close() - -def export_lz_to_png(filename): - """ - Convert a lz file to png. Dump a 2bpp file too. - """ - assert filename[-3:] == ".lz" - lz_data = open(filename, "rb").read() - - bpp = Decompressed(lz_data).output - bpp_filename = os.path.splitext(filename)[0] - to_file(bpp_filename, bpp) - - export_2bpp_to_png(bpp_filename) - - # touch the lz file so it doesn't get remade - os.utime(filename, None) - -def dump_tileset_pngs(): - """ - Convert .lz format tilesets into .png format tilesets. - - Also, leaves a bunch of wonderful .2bpp files everywhere for your amusement. - """ - for tileset_id in range(37): - tileset_filename = "./gfx/tilesets/" + str(tileset_id).zfill(2) + ".lz" - export_lz_to_png(tileset_filename) - -def decompress_frontpic(lz_file): - """ - Convert the pic portion of front.lz to front.2bpp - """ - lz = open(lz_file, 'rb').read() - to_file(Decompressed(lz).pic, os.path.splitext(filein)[0] + '.2bpp') - -def decompress_frontpic_anim(lz_file): - """ - Convert the animation tile portion of front.lz to tiles.2bpp - """ - lz = open(lz_file, 'rb').read() - to_file(Decompressed(lz).animtiles, 'tiles.2bpp') - -def expand_pic_palettes(): - """ - Add white and black to palette files with fewer than 4 colors. - - Pokemon Crystal only defines two colors for a pic palette to - save space, filling in black/white at runtime. - Instead of managing palette files of varying length, black - and white are added to pic palettes and excluded from incbins. - """ - for root, dirs, files in os.walk('./gfx/'): - if 'gfx/pics' in root or 'gfx/trainers' in root: - for name in files: - if os.path.splitext(name)[1] == '.pal': - filename = os.path.join(root, name) - palette = bytearray(open(filename, 'rb').read()) - w = bytearray([0xff, 0x7f]) - b = bytearray([0x00, 0x00]) - if len(palette) == 4: - with open(filename, 'wb') as out: - out.write(w + palette + b) - - -def convert_to_2bpp(filenames=[]): - for filename in filenames: - filename, name, extension = try_decompress(filename) - if extension == '.1bpp': - export_1bpp_to_2bpp(filename) - elif extension == '.2bpp': - pass - elif extension == '.png': - export_png_to_2bpp(filename) - else: - raise Exception, "Don't know how to convert {} to 2bpp!".format(filename) - -def convert_to_1bpp(filenames=[]): - for filename in filenames: - filename, name, extension = try_decompress(filename) - if extension == '.1bpp': - pass - elif extension == '.2bpp': - export_2bpp_to_1bpp(filename) - elif extension == '.png': - export_png_to_1bpp(filename) - else: - raise Exception, "Don't know how to convert {} to 1bpp!".format(filename) - -def convert_to_png(filenames=[]): - for filename in filenames: - filename, name, extension = try_decompress(filename) - if extension == '.1bpp': - export_1bpp_to_png(filename) - elif extension == '.2bpp': - export_2bpp_to_png(filename) - elif extension == '.png': - pass - else: - raise Exception, "Don't know how to convert {} to png!".format(filename) - -def compress(filenames=[]): - for filename in filenames: - data = open(filename, 'rb').read() - lz_data = Compressed(data).output - to_file(filename + '.lz', lz_data) - -def decompress(filenames=[]): - for filename in filenames: - name, extension = os.path.splitext(filename) - lz_data = open(filename, 'rb').read() - data = Decompressed(lz_data).output - to_file(name, data) - -def try_decompress(filename): - """ - Try to decompress a graphic when determining the filetype. - This skips the manual unlz step when attempting - to convert lz-compressed graphics to png. - """ - name, extension = os.path.splitext(filename) - if extension == '.lz': - decompress([filename]) - filename = name - name, extension = os.path.splitext(filename) - return filename, name, extension - - -def main(): - ap = argparse.ArgumentParser() - ap.add_argument('mode') - ap.add_argument('filenames', nargs='*') - args = ap.parse_args() - - method = { - '2bpp': convert_to_2bpp, - '1bpp': convert_to_1bpp, - 'png': convert_to_png, - 'lz': compress, - 'unlz': decompress, - }.get(args.mode, None) - - if method == None: - raise Exception, "Unknown conversion method!" - - method(args.filenames) - - -if __name__ == "__main__": - main() - diff --git a/gfx/game_boy.norepeat.png b/gfx/game_boy.png Binary files differindex 62144956..62144956 100644 --- a/gfx/game_boy.norepeat.png +++ b/gfx/game_boy.png diff --git a/gfx/pikachu/unknown_e41d2.png b/gfx/pikachu/unknown_e41d2.png Binary files differindex df027a11..7d57c684 100644 --- a/gfx/pikachu/unknown_e41d2.png +++ b/gfx/pikachu/unknown_e41d2.png diff --git a/gfx/pikachu/unknown_e444b.png b/gfx/pikachu/unknown_e444b.png Binary files differindex 26128b23..dd13d24c 100644 --- a/gfx/pikachu/unknown_e444b.png +++ b/gfx/pikachu/unknown_e444b.png diff --git a/gfx/pikachu/unknown_e4a99.png b/gfx/pikachu/unknown_e4a99.png Binary files differindex 202cbd2f..75a32629 100644 --- a/gfx/pikachu/unknown_e4a99.png +++ b/gfx/pikachu/unknown_e4a99.png diff --git a/gfx/pokemon_yellow.t6.png b/gfx/pokemon_yellow.png Binary files differindex 7c86a1ec..7c86a1ec 100644 --- a/gfx/pokemon_yellow.t6.png +++ b/gfx/pokemon_yellow.png diff --git a/gfx/surfing_pikachu_1.t4.png b/gfx/surfing_pikachu_1.png Binary files differindex abaf4ba4..abaf4ba4 100644 --- a/gfx/surfing_pikachu_1.t4.png +++ b/gfx/surfing_pikachu_1.png diff --git a/gfx/surfing_pikachu_1c.t5.png b/gfx/surfing_pikachu_1c.png Binary files differindex eeedb243..eeedb243 100644 --- a/gfx/surfing_pikachu_1c.t5.png +++ b/gfx/surfing_pikachu_1c.png diff --git a/gfx/surfing_pikachu_3.t1.png b/gfx/surfing_pikachu_3.png Binary files differindex 91d4a607..91d4a607 100644 --- a/gfx/surfing_pikachu_3.t1.png +++ b/gfx/surfing_pikachu_3.png diff --git a/gfx/theend.interleave.png b/gfx/theend.png Binary files differindex 025ebb61..025ebb61 100644 --- a/gfx/theend.interleave.png +++ b/gfx/theend.png diff --git a/gfx/tilesets/cavern.t14.png b/gfx/tilesets/cavern.png Binary files differindex 5397fd65..5397fd65 100644 --- a/gfx/tilesets/cavern.t14.png +++ b/gfx/tilesets/cavern.png diff --git a/gfx/tilesets/cemetery.t4.png b/gfx/tilesets/cemetery.png Binary files differindex b1df015a..b1df015a 100644 --- a/gfx/tilesets/cemetery.t4.png +++ b/gfx/tilesets/cemetery.png diff --git a/gfx/tilesets/club.t5.png b/gfx/tilesets/club.png Binary files differindex 05a55879..05a55879 100644 --- a/gfx/tilesets/club.t5.png +++ b/gfx/tilesets/club.png diff --git a/gfx/tilesets/house.t2.png b/gfx/tilesets/house.png Binary files differindex b5d67c95..b5d67c95 100644 --- a/gfx/tilesets/house.t2.png +++ b/gfx/tilesets/house.png diff --git a/gfx/tilesets/interior.t1.png b/gfx/tilesets/interior.png Binary files differindex 3123d81c..3123d81c 100644 --- a/gfx/tilesets/interior.t1.png +++ b/gfx/tilesets/interior.png diff --git a/gfx/tilesets/lab.t4.png b/gfx/tilesets/lab.png Binary files differindex 4a114337..4a114337 100644 --- a/gfx/tilesets/lab.t4.png +++ b/gfx/tilesets/lab.png diff --git a/gfx/tilesets/lobby.t2.png b/gfx/tilesets/lobby.png Binary files differindex f2a1c180..f2a1c180 100644 --- a/gfx/tilesets/lobby.t2.png +++ b/gfx/tilesets/lobby.png diff --git a/gfx/tilesets/mansion.t2.png b/gfx/tilesets/mansion.png Binary files differindex 557c23b4..557c23b4 100644 --- a/gfx/tilesets/mansion.t2.png +++ b/gfx/tilesets/mansion.png diff --git a/gfx/tilesets/museum.t1.png b/gfx/tilesets/museum.png Binary files differindex 372e6751..372e6751 100644 --- a/gfx/tilesets/museum.t1.png +++ b/gfx/tilesets/museum.png diff --git a/gfx/tilesets/plateau.t10.png b/gfx/tilesets/plateau.png Binary files differindex b87162d8..b87162d8 100644 --- a/gfx/tilesets/plateau.t10.png +++ b/gfx/tilesets/plateau.png diff --git a/gfx/tilesets/redshouse2.t7.png b/gfx/tilesets/redshouse2.png Binary files differindex 5f210d04..5f210d04 100644 --- a/gfx/tilesets/redshouse2.t7.png +++ b/gfx/tilesets/redshouse2.png diff --git a/gfx/tilesets/ship.t6.png b/gfx/tilesets/ship.png Binary files differindex cf5b6b95..cf5b6b95 100644 --- a/gfx/tilesets/ship.t6.png +++ b/gfx/tilesets/ship.png diff --git a/gfx/tilesets/shipport.t2.png b/gfx/tilesets/shipport.png Binary files differindex fa5bbb27..fa5bbb27 100644 --- a/gfx/tilesets/shipport.t2.png +++ b/gfx/tilesets/shipport.png diff --git a/gfx/tilesets/underground.t7.png b/gfx/tilesets/underground.png Binary files differindex baa86c17..baa86c17 100644 --- a/gfx/tilesets/underground.t7.png +++ b/gfx/tilesets/underground.png @@ -825,7 +825,7 @@ INCLUDE "data/cries.asm" INCLUDE "engine/battle/trainer_ai.asm" INCLUDE "engine/battle/draw_hud_pokeball_gfx.asm" -TradingAnimationGraphics: INCBIN "gfx/game_boy.norepeat.2bpp" +TradingAnimationGraphics: INCBIN "gfx/game_boy.2bpp" INCBIN "gfx/link_cable.2bpp" TradingAnimationGraphicsEnd: @@ -1758,20 +1758,22 @@ INCLUDE "engine/hidden_object_functions18.asm" SECTION "bank19", ROMX Overworld_GFX: INCBIN "gfx/tilesets/overworld.2bpp" +ds 32 Overworld_Block: INCBIN "gfx/blocksets/overworld.bst" RedsHouse1_GFX: -RedsHouse2_GFX: INCBIN "gfx/tilesets/redshouse2.t7.2bpp" +RedsHouse2_GFX: INCBIN "gfx/tilesets/redshouse2.2bpp" +ds 16 RedsHouse1_Block: RedsHouse2_Block: INCBIN "gfx/blocksets/redshouse2.bst" -House_GFX: INCBIN "gfx/tilesets/house.t2.2bpp" +House_GFX: INCBIN "gfx/tilesets/house.2bpp" House_Block: INCBIN "gfx/blocksets/house.bst" -Mansion_GFX: INCBIN "gfx/tilesets/mansion.t2.2bpp" +Mansion_GFX: INCBIN "gfx/tilesets/mansion.2bpp" Mansion_Block: INCBIN "gfx/blocksets/mansion.bst" -ShipPort_GFX: INCBIN "gfx/tilesets/shipport.t2.2bpp" +ShipPort_GFX: INCBIN "gfx/tilesets/shipport.2bpp" ShipPort_Block: INCBIN "gfx/blocksets/shipport.bst" -Interior_GFX: INCBIN "gfx/tilesets/interior.t1.2bpp" +Interior_GFX: INCBIN "gfx/tilesets/interior.2bpp" Interior_Block: INCBIN "gfx/blocksets/interior.bst" -Plateau_GFX: INCBIN "gfx/tilesets/plateau.t10.2bpp" +Plateau_GFX: INCBIN "gfx/tilesets/plateau.2bpp" Plateau_Block: INCBIN "gfx/blocksets/plateau.bst" @@ -1789,7 +1791,7 @@ Mart_Block: Pokecenter_Block: INCBIN "gfx/blocksets/pokecenter.bst" ForestGate_GFX: Gate_GFX: -Museum_GFX: INCBIN "gfx/tilesets/museum.t1.2bpp" +Museum_GFX: INCBIN "gfx/tilesets/museum.2bpp" ForestGate_Block: Gate_Block: Museum_Block: INCBIN "gfx/blocksets/museum.bst" @@ -1801,19 +1803,19 @@ Facility_Block: INCBIN "gfx/blocksets/facility.bst" SECTION "bank1B", ROMX -Cemetery_GFX: INCBIN "gfx/tilesets/cemetery.t4.2bpp" +Cemetery_GFX: INCBIN "gfx/tilesets/cemetery.2bpp" Cemetery_Block: INCBIN "gfx/blocksets/cemetery.bst" -Cavern_GFX: INCBIN "gfx/tilesets/cavern.t14.2bpp" +Cavern_GFX: INCBIN "gfx/tilesets/cavern.2bpp" Cavern_Block: INCBIN "gfx/blocksets/cavern.bst" -Lobby_GFX: INCBIN "gfx/tilesets/lobby.t2.2bpp" +Lobby_GFX: INCBIN "gfx/tilesets/lobby.2bpp" Lobby_Block: INCBIN "gfx/blocksets/lobby.bst" -Ship_GFX: INCBIN "gfx/tilesets/ship.t6.2bpp" +Ship_GFX: INCBIN "gfx/tilesets/ship.2bpp" Ship_Block: INCBIN "gfx/blocksets/ship.bst" -Lab_GFX: INCBIN "gfx/tilesets/lab.t4.2bpp" +Lab_GFX: INCBIN "gfx/tilesets/lab.2bpp" Lab_Block: INCBIN "gfx/blocksets/lab.bst" -Club_GFX: INCBIN "gfx/tilesets/club.t5.2bpp" +Club_GFX: INCBIN "gfx/tilesets/club.2bpp" Club_Block: INCBIN "gfx/blocksets/club.bst" -Underground_GFX: INCBIN "gfx/tilesets/underground.t7.2bpp" +Underground_GFX: INCBIN "gfx/tilesets/underground.2bpp" Underground_Block: INCBIN "gfx/blocksets/underground.bst" @@ -2112,7 +2114,7 @@ INCLUDE "engine/overworld/is_player_just_outside_map.asm" INCLUDE "engine/printer.asm" INCLUDE "engine/diploma_3a.asm" -SurfingPikachu3Graphics: INCBIN "gfx/surfing_pikachu_3.t1.2bpp" +SurfingPikachu3Graphics: INCBIN "gfx/surfing_pikachu_3.2bpp" SurfingPikachu3GraphicsEnd: INCLUDE "engine/unknown_ea3ea.asm" diff --git a/tools/.gitignore b/tools/.gitignore new file mode 100644 index 00000000..967af106 --- /dev/null +++ b/tools/.gitignore @@ -0,0 +1,3 @@ +scan_includes +gfx +pkmncompress diff --git a/tools/Makefile b/tools/Makefile new file mode 100644 index 00000000..13bab1fb --- /dev/null +++ b/tools/Makefile @@ -0,0 +1,16 @@ +.PHONY: all clean + +CC := gcc +CFLAGS := -O3 -std=c99 -Wall -Wextra + +tools := scan_includes gfx pkmncompress + +all: $(tools) + @: + +clean: + rm -f $(tools) + +gfx: common.h +%: %.c + $(CC) $(CFLAGS) -o $@ $< diff --git a/tools/common.h b/tools/common.h new file mode 100644 index 00000000..4da0b2ef --- /dev/null +++ b/tools/common.h @@ -0,0 +1,40 @@ +#ifndef GUARD_COMMON_H +#define GUARD_COMMON_H + +int __getopt_long_i__; +#define getopt_long(c, v, s, l) getopt_long(c, v, s, l, &__getopt_long_i__) + +FILE *fopen_verbose(char *filename, char *mode) { + FILE *f = fopen(filename, mode); + if (!f) { + fprintf(stderr, "Could not open file: \"%s\"\n", filename); + } + return f; +} + +uint8_t *read_u8(char *filename, int *size) { + FILE *f = fopen_verbose(filename, "rb"); + if (!f) { + exit(1); + } + fseek(f, 0, SEEK_END); + *size = ftell(f); + rewind(f); + uint8_t *data = malloc(*size); + if (*size != (int)fread(data, 1, *size, f)) { + fprintf(stderr, "Could not read file: \"%s\"\n", filename); + exit(1); + } + fclose(f); + return data; +} + +void write_u8(char *filename, uint8_t *data, int size) { + FILE *f = fopen_verbose(filename, "wb"); + if (f) { + fwrite(data, 1, size, f); + fclose(f); + } +} + +#endif // GUARD_COMMON_H diff --git a/tools/gfx.c b/tools/gfx.c new file mode 100644 index 00000000..8c4066ab --- /dev/null +++ b/tools/gfx.c @@ -0,0 +1,296 @@ +#include <stdio.h> +#include <stdlib.h> +#include <stdbool.h> +#include <getopt.h> +#include <string.h> +#include <stdint.h> + +#include "common.h" + +static void usage(void) { + fprintf(stderr, "Usage: gfx [--trim-whitespace] [--remove-whitespace] [--interleave] [--remove-duplicates [--keep-whitespace]] [--remove-xflip] [--remove-yflip] [--png filename] [-d depth] [-h] [-o outfile] infile\n"); +} + +static void error(char *message) { + fputs(message, stderr); + fputs("\n", stderr); +} + +struct Options { + int trim_whitespace; + int remove_whitespace; + int help; + char *outfile; + int depth; + int interleave; + int remove_duplicates; + int keep_whitespace; + int remove_xflip; + int remove_yflip; + char *png_file; +}; + +struct Options Options = { + .depth = 2, +}; + +void get_args(int argc, char *argv[]) { + struct option long_options[] = { + {"remove-whitespace", no_argument, &Options.remove_whitespace, 1}, + {"trim-whitespace", no_argument, &Options.trim_whitespace, 1}, + {"interleave", no_argument, &Options.interleave, 1}, + {"remove-duplicates", no_argument, &Options.remove_duplicates, 1}, + {"keep-whitespace", no_argument, &Options.keep_whitespace, 1}, + {"remove-xflip", no_argument, &Options.remove_xflip, 1}, + {"remove-yflip", no_argument, &Options.remove_yflip, 1}, + {"png", required_argument, 0, 'p'}, + {"depth", required_argument, 0, 'd'}, + {"help", no_argument, 0, 'h'}, + {0} + }; + for (int opt = 0; opt != -1;) { + switch (opt = getopt_long(argc, argv, "ho:d:p:", long_options)) { + case 'h': + Options.help = true; + break; + case 'o': + Options.outfile = optarg; + break; + case 'd': + Options.depth = strtoul(optarg, NULL, 0); + break; + case 'p': + Options.png_file = optarg; + break; + case 0: + case -1: + break; + default: + usage(); + exit(1); + break; + } + } +} + +struct Graphic { + int size; + uint8_t *data; +}; + +bool is_whitespace(uint8_t *tile, int tile_size) { + uint8_t WHITESPACE = 0; + for (int i = 0; i < tile_size; i++) { + if (tile[i] != WHITESPACE) { + return false; + } + } + return true; +} + +void trim_whitespace(struct Graphic *graphic) { + int tile_size = Options.depth * 8; + for (int i = graphic->size - tile_size; i > 0; i -= tile_size) { + if (is_whitespace(&graphic->data[i], tile_size)) { + graphic->size = i; + } else { + break; + } + } +} + +void remove_whitespace(struct Graphic *graphic) { + int tile_size = Options.depth * 8; + if (Options.interleave) tile_size *= 2; + int i = 0; + for (int j = 0; i < graphic->size && j < graphic->size; i += tile_size, j += tile_size) { + while (is_whitespace(&graphic->data[j], tile_size)) { + j += tile_size; + } + if (j >= graphic->size) { + break; + } + if (j > i) { + memcpy(&graphic->data[i], &graphic->data[j], tile_size); + } + } + graphic->size = i; +} + +bool tile_exists(uint8_t *tile, uint8_t *tiles, int tile_size, int num_tiles) { + for (int i = 0; i < num_tiles; i++) { + bool match = true; + for (int j = 0; j < tile_size; j++) { + if (tile[j] != tiles[i * tile_size + j]) { + match = false; + } + } + if (match) { + return true; + } + } + return false; +} + +void remove_duplicates(struct Graphic *graphic) { + int tile_size = Options.depth * 8; + if (Options.interleave) tile_size *= 2; + int num_tiles = 0; + for (int i = 0, j = 0; i < graphic->size && j < graphic->size; i += tile_size, j += tile_size) { + while (tile_exists(&graphic->data[j], graphic->data, tile_size, num_tiles)) { + if (Options.keep_whitespace && is_whitespace(&graphic->data[j], tile_size)) { + break; + } + j += tile_size; + } + if (j >= graphic->size) { + break; + } + if (j > i) { + memcpy(&graphic->data[i], &graphic->data[j], tile_size); + } + num_tiles++; + } + graphic->size = num_tiles * tile_size; +} + +bool flip_exists(uint8_t *tile, uint8_t *tiles, int tile_size, int num_tiles, bool xflip, bool yflip) { + uint8_t *flip = calloc(tile_size, 1); + int half_size = tile_size / 2; + for (int i = 0; i < tile_size; i++) { + int byte = i; + if (yflip) { + byte = tile_size - 1 - (i ^ 1); + if (Options.interleave && i < half_size) { + byte = half_size - 1 - (i ^ 1); + } + } + if (xflip) { + for (int bit = 0; bit < 8; bit++) { + flip[byte] |= ((tile[i] >> bit) & 1) << (7 - bit); + } + } else { + flip[byte] = tile[i]; + } + } + if (tile_exists(flip, tiles, tile_size, num_tiles)) { + return true; + } + return false; +} + +void remove_flip(struct Graphic *graphic, bool xflip, bool yflip) { + int tile_size = Options.depth * 8; + if (Options.interleave) tile_size *= 2; + int num_tiles = 0; + for (int i = 0, j = 0; i < graphic->size && j < graphic->size; i += tile_size, j += tile_size) { + while (flip_exists(&graphic->data[j], graphic->data, tile_size, num_tiles, xflip, yflip)) { + if (Options.keep_whitespace && is_whitespace(&graphic->data[j], tile_size)) { + break; + } + j += tile_size; + } + if (j >= graphic->size) { + break; + } + if (j > i) { + memcpy(&graphic->data[i], &graphic->data[j], tile_size); + } + num_tiles++; + } + graphic->size = num_tiles * tile_size; +} + +void interleave(struct Graphic *graphic, int width) { + int tile_size = Options.depth * 8; + int width_tiles = width / 8; + int num_tiles = graphic->size / tile_size; + uint8_t *interleaved = malloc(graphic->size); + for (int i = 0; i < num_tiles; i++) { + int tile = i * 2; + int row = i / width_tiles; + tile -= width_tiles * row; + if (row % 2) { + tile -= width_tiles; + tile += 1; + } + memcpy(&interleaved[tile * tile_size], &graphic->data[i * tile_size], tile_size); + } + graphic->size = num_tiles * tile_size; + memcpy(graphic->data, interleaved, graphic->size); + free(interleaved); +} + +int png_get_width(char *filename) { + FILE *f = fopen_verbose(filename, "rb"); + if (!f) { + exit(1); + } + + const int OFFSET_WIDTH = 16; + uint8_t bytes[4]; + fseek(f, OFFSET_WIDTH, SEEK_SET); + size_t size = 4; + size_t result = fread(bytes, 1, size, f); + fclose(f); + if (result != size) { + fprintf(stderr, "Could not read file at offset 0x%x: \"%s\"\n", OFFSET_WIDTH, filename); + exit(1); + } + + int width = 0; + for (int i = 0; i < 4; i++) { + width |= bytes[i] << (8 * (3 - i)); + } + return width; +} + + +int main(int argc, char *argv[]) { + get_args(argc, argv); + argc -= optind; + argv += optind; + if (Options.help) { + usage(); + return 0; + } + if (argc < 1) { + usage(); + exit(1); + } + char *infile = argv[0]; + struct Graphic graphic; + graphic.data = read_u8(infile, &graphic.size); + if (Options.trim_whitespace) { + trim_whitespace(&graphic); + } + if (Options.interleave) { + if (!Options.png_file) { + error("interleave: need --png to infer dimensions"); + usage(); + exit(1); + } + int width = png_get_width(Options.png_file); + interleave(&graphic, width); + } + if (Options.remove_duplicates) { + remove_duplicates(&graphic); + } + if (Options.remove_xflip) { + remove_flip(&graphic, true, false); + } + if (Options.remove_yflip) { + remove_flip(&graphic, false, true); + } + if (Options.remove_xflip && Options.remove_yflip) { + remove_flip(&graphic, true, true); + } + if (Options.remove_whitespace) { + remove_whitespace(&graphic); + } + if (Options.outfile) { + write_u8(Options.outfile, graphic.data, graphic.size); + } + free(graphic.data); + return 0; +} diff --git a/tools/gfx.py b/tools/gfx.py new file mode 100644 index 00000000..a3d95562 --- /dev/null +++ b/tools/gfx.py @@ -0,0 +1,267 @@ +#!/usr/bin/env python2 +# -*- coding: utf-8 -*- + +"""Supplementary scripts for graphics conversion.""" + +import os +import argparse + +from pokemontools import gfx, lz + + +# Graphics with inverted tilemaps that aren't covered by filepath_rules. +pics = [ + 'gfx/shrink1', + 'gfx/shrink2', +] + +def recursive_read(filename): + def recurse(filename_): + lines = [] + for line in open(filename_): + if 'include "' in line.lower(): + lines += recurse(line.split('"')[1]) + else: + lines += [line] + return lines + lines = recurse(filename) + return ''.join(lines) + +base_stats = None +def get_base_stats(): + global base_stats + if not base_stats: + base_stats = recursive_read('data/base_stats.asm') + return base_stats + +def get_pokemon_dimensions(path): + try: + byte = bytearray(open(path, 'rb').read())[0] + width = byte & 0xf + height = (byte >> 8) & 0xf + return width, height + except: + return None + + +def get_animation_frames(path=None, w=7, h=7, bitmask_path=None, frame_path=None): + """Retrieve animation frame tilemaps from generated frame/bitmask data.""" + if not path: + path = bitmask_path + if not path: + path = frame_path + if not path: + raise Exception("need at least one of path, bitmask_path or frame_path") + + if not bitmask_path: + bitmask_path = os.path.join(os.path.split(path)[0], 'bitmask.asm') + if not frame_path: + frame_path = os.path.join(os.path.split(path)[0], 'frames.asm') + bitmask_lines = open(bitmask_path).readlines() + frame_lines = open(frame_path).readlines() + + bitmask_length = w * h + + bitmasks = [] + bitmask = [] + for line in bitmask_lines: + if '\tdb ' in line: + value = line.split('\tdb ')[1].strip().replace('%', '0b') + value = int(value, 0) + #print line.strip(), value, len(bitmasks), len(bitmask) + for bit in xrange(8): + bitmask += [(value >> bit) & 1] + if len(bitmask) >= bitmask_length: + bitmasks += [bitmask] + bitmask = [] + break + if bitmask: + bitmasks += [bitmask] + + frames = [] + frame_labels = [] + i = 0 + for line in frame_lines: + if '\tdw ' in line: + frame_labels += [line.split('\tdw ')[1].strip()] + else: + for part in line.split(): + part = part.strip() + if part in frame_labels: + frames += [(part, i)] + i += 1 + + results = [] + + for label, i in frames: + result = [] + + # get the bitmask and tile ids for each frame + # don't care if we read past bounds, so just read the rest of the file + values = [] + for line in frame_lines[i:]: + if '\tdb ' in line: + values += line.split('\tdb ')[1].split(';')[0].split(',') + + #print bitmasks + #print values[0] + #print int(values[0].replace('$', '0x'), 0) + bitmask = bitmasks[int(values[0].replace('$', '0x'), 0)] + tiles = values[1:] + k = 0 + j = 0 + for bit in bitmask: + if bit: + result += [int(tiles[k].replace('$', '0x'), 0)] + k += 1 + else: + result += [j] + j += 1 + + results += [result] + + return results + +def get_animated_graphics(path, w=7, h=7, bitmask_path=None, frame_path=None): + frames = get_animation_frames(path, w, h, bitmask_path, frame_path) + new_path = path.replace('.animated.2bpp', '.2bpp') + tiles = gfx.get_tiles(bytearray(open(path, 'rb').read())) + new_tiles = tiles[:w * h] + for frame in frames: + for tile in frame: + new_tiles += [tiles[tile]] + new_graphic = gfx.connect(new_tiles) + print new_path, list(new_graphic) + open(new_path, 'wb').write(bytearray(new_graphic)) + return new_path + +def filepath_rules(filepath): + """Infer attributes of certain graphics by their location in the filesystem.""" + args = {} + + filedir, filename = os.path.split(filepath) + if filedir.startswith('./'): + filedir = filedir[2:] + + name, ext = os.path.splitext(filename) + if ext == '.lz': + name, ext = os.path.splitext(name) + + pokemon_name = '' + + if 'gfx/pokemon/' in filedir: + pokemon_name = filedir.split('/')[-1] + if pokemon_name.startswith('unown_'): + index = filedir.find(pokemon_name) + if index != -1: + filedir = filedir[:index + len('unown')] + filedir[index + len('unown_a'):] + if name == 'front' or name == 'front.animated': + args['pal_file'] = os.path.join(filedir, 'normal.pal') + args['pic'] = True + args['animate'] = True + elif name == 'back': + args['pal_file'] = os.path.join(filedir, 'normal.pal') + args['pic'] = True + + elif 'gfx/trainers' in filedir: + args['pic'] = True + + elif os.path.join(filedir, name) in pics: + args['pic'] = True + + elif filedir == 'gfx/tilesets': + args['tileset'] = True + + if args.get('pal_file'): + if os.path.exists(args['pal_file']): + args['palout'] = args['pal_file'] + else: + del args['pal_file'] + + if args.get('pic'): + if ext == '.png': + w, h = gfx.png.Reader(filepath).asRGBA8()[:2] + w = min(w/8, h/8) + args['pic_dimensions'] = w, w + elif ext == '.2bpp': + if pokemon_name and name == 'front' or name == 'front.animated': + w, h = get_pokemon_dimensions(filepath.replace(ext, '.dimensions')) or (7, 7) + args['pic_dimensions'] = w, w + elif pokemon_name and name == 'back': + args['pic_dimensions'] = 6, 6 + else: + args['pic_dimensions'] = 7, 7 + + if args.get('tileset'): + args['width'] = 128 + return args + + +def to_1bpp(filename, **kwargs): + name, ext = os.path.splitext(filename) + if ext == '.1bpp': pass + elif ext == '.2bpp': gfx.export_2bpp_to_1bpp(filename, **kwargs) + elif ext == '.png': gfx.export_png_to_1bpp(filename, **kwargs) + elif ext == '.lz': + decompress(filename, **kwargs) + to_1bpp(name, **kwargs) + +def to_2bpp(filename, **kwargs): + name, ext = os.path.splitext(filename) + if ext == '.1bpp': gfx.export_1bpp_to_2bpp(filename, **kwargs) + elif ext == '.2bpp': pass + elif ext == '.png': gfx.export_png_to_2bpp(filename, **kwargs) + elif ext == '.lz': + decompress(filename, **kwargs) + to_2bpp(name, **kwargs) + +def to_png(filename, **kwargs): + name, ext = os.path.splitext(filename) + if ext == '.1bpp': gfx.export_1bpp_to_png(filename, **kwargs) + elif ext == '.2bpp' and name.endswith('.animated'): + w, h = kwargs.get('pic_dimensions') or (7, 7) + new_path = get_animated_graphics(filename, w=w, h=h) + return to_png(new_path, **kwargs) + elif ext == '.2bpp': gfx.export_2bpp_to_png(filename, **kwargs) + elif ext == '.png': pass + elif ext == '.lz': + decompress(filename, **kwargs) + to_png(name, **kwargs) + +def compress(filename, **kwargs): + data = open(filename, 'rb').read() + lz_data = lz.Compressed(data).output + open(filename + '.lz', 'wb').write(bytearray(lz_data)) + +def decompress(filename, **kwargs): + lz_data = open(filename, 'rb').read() + data = lz.Decompressed(lz_data).output + name, ext = os.path.splitext(filename) + open(name, 'wb').write(bytearray(data)) + + +methods = { + '2bpp': to_2bpp, + '1bpp': to_1bpp, + 'png': to_png, + 'lz': compress, + 'unlz': decompress, +} + +def main(method_name, filenames=None): + if filenames is None: filenames = [] + for filename in filenames: + args = filepath_rules(filename) + method = methods.get(method_name) + if method: + method(filename, **args) + +def get_args(): + ap = argparse.ArgumentParser() + ap.add_argument('method_name') + ap.add_argument('filenames', nargs='*') + args = ap.parse_args() + return args + +if __name__ == '__main__': + main(**get_args().__dict__) diff --git a/tools/pkmncompress.c b/tools/pkmncompress.c new file mode 100644 index 00000000..db707653 --- /dev/null +++ b/tools/pkmncompress.c @@ -0,0 +1,451 @@ +/* + * Copyright © 2013 stag019 <stag019@gmail.com> + * + * Permission to use, copy, modify, and distribute this software for any + * purpose with or without fee is hereby granted, provided that the above + * copyright notice and this permission notice appear in all copies. + * + * THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES + * WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF + * MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR + * ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES + * WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN + * ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF + * OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + */ + +#include <stdlib.h> +#include <stdio.h> +#include <stdint.h> +#include <string.h> + +typedef uint8_t u8; + +u8 *compressed = NULL; +int xrows = 0; +int xwidth = 0; +int curbit = 0; +int curbyte = 0; + +void writebit(int bit) +{ + if (++curbit == 8) + { + curbyte++; + curbit = 0; + } + compressed[curbyte] |= bit << (7 - curbit); +} + +void method_1(u8 *RAM) +{ + int i; + int j; + int nibble_1; + int nibble_2 = 0; + int code_1; + int code_2; + int table; + static int method_1[2][0x10] = { + {0x0, 0x1, 0x3, 0x2, 0x6, 0x7, 0x5, 0x4, 0xC, 0xD, 0xF, 0xE, 0xA, 0xB, 0x9, 0x8}, + {0x8, 0x9, 0xB, 0xA, 0xE, 0xF, 0xD, 0xC, 0x4, 0x5, 0x7, 0x6, 0x2, 0x3, 0x1, 0x0} + }; + + for (i = 0; i < xrows * xwidth * 8; i++) + { + j = i / xrows; + j += i % xrows * xwidth * 8; + if (!(i % xrows)) + { + nibble_2 = 0; + } + nibble_1 = (RAM[j] >> 4) & 0x0F; + table = 0; + if (nibble_2 & 1) + { + table = 1; + } + code_1 = method_1[table][nibble_1]; + nibble_2 = RAM[j] & 0x0F; + table = 0; + if (nibble_1 & 1) + { + table = 1; + } + code_2 = method_1[table][nibble_2]; + RAM[j] = (code_1 << 4) | code_2; + } +} + +// "Get the previous power of 2. Deriving the bitcount from that seems to be faster on average than using the lookup table." +void RLE(int nums) +{ + int v; + int j; + int bitcount; + int number; + + bitcount = -1; + v = ++nums; + v++; + v |= v >> 1; + v |= v >> 2; + v |= v >> 4; + v |= v >> 8; + v |= v >> 16; + v -= v >> 1; + v--; + + number = nums - v; + while(v) { + v >>= 1; + bitcount++; + } + for(j = 0; j < bitcount; j++) { + writebit(1); + } + writebit(0); + for(j = bitcount; j >= 0; j--) { + writebit((number >> j) & 1); + } +} + +void RLE_old(int nums) +{ + int search; + int i; + int j; + int bitcount; + int number; + static int RLE[0x10] = {0x0001, 0x0003, 0x0007, 0x000F, 0x001F, 0x003F, 0x007F, 0x00FF, 0x01FF, 0x03FF, 0x07FF, 0x0FFF, 0x1FFF, 0x3FFF, 0x7FFF, 0xFFFF}; + + bitcount = -1; + search = ++nums; + while (search > 0) + { + for (i = 0; i < 0xF; i++) + { + if (RLE[i] == search) + { + bitcount = i; + break; + } + } + if (bitcount != -1) + { + break; + } + search--; + } + number = nums - RLE[bitcount]; + for (j = 0; j < bitcount; j++) + { + writebit(1); + } + writebit(0); + for (j = bitcount; j >= 0; j--) + { + writebit((number >> j) & 1); + } +} + +void data_packet(u8 *bitgroups, int bgi) +{ + int i; + for (i = 0; i < bgi; i++) + { + writebit((bitgroups[i] >> 1) & 1); + writebit(bitgroups[i] & 1); + } +} + +int interpret_compress(u8 *RAM_1, u8 *RAM_2, int interpretation, int switchram) +{ + u8 *_1_RAM; + u8 *_2_RAM; + int i; + int ram; + int type; + int nums; + u8 *bitgroups; + int x; + int y; + int byte; + int bit; + int bitgroup; + int bgi = 0; + + int ram_size = xrows * xwidth * 8; + _1_RAM = (u8 *)calloc(ram_size, 1); + _2_RAM = (u8 *)calloc(ram_size, 1); + if (switchram) + { + memcpy(_1_RAM, RAM_2, ram_size); + memcpy(_2_RAM, RAM_1, ram_size); + } + else + { + memcpy(_1_RAM, RAM_1, ram_size); + memcpy(_2_RAM, RAM_2, ram_size); + } + + switch(interpretation) + { + case 1: + method_1(_1_RAM); + method_1(_2_RAM); + break; + case 2: + case 3: + for (i = 0; i < xrows * xwidth * 8; i++) + { + _2_RAM[i] ^= _1_RAM[i]; + } + method_1(_1_RAM); + break; + } + if (interpretation == 3) + { + method_1(_2_RAM); + } + + curbit = 7; + curbyte = 0; + compressed = (u8 *)calloc(0x310, 1); + compressed[0] = (xrows << 4) | xwidth; + writebit(switchram); + + for (ram = 0; ram < 2; ram++) + { + type = 0; + nums = 0; + bitgroups = (u8 *)calloc(0x1000, 1); + + for (x = 0; x < xwidth; x++) + { + for (bit = 0; bit < 8; bit += 2) + { + byte = x * xrows * 8; + for (y=0; y < xrows * 8; y++) + { + if (ram) + { + bitgroup = (_2_RAM[byte] >> (6 - bit)) & 3; + } + else + { + bitgroup = (_1_RAM[byte] >> (6 - bit)) & 3; + } + if (!bitgroup) + { + if (!type) + { + writebit(0); + } + else if (type == 1) + { + nums++; + } + else + { + data_packet(bitgroups, bgi); + writebit(0); + writebit(0); + } + type = 1; + free(bitgroups); + bitgroups = (u8 *)calloc(0x1000, 1); + bgi = 0; + } + else + { + if (!type) + { + writebit(1); + } + else if (type == 1) + { + RLE(nums); + } + type = -1; + bitgroups[bgi++] = bitgroup; + nums = 0; + } + byte++; + } + } + } + if (type == 1) + { + RLE(nums); + } + else + { + data_packet(bitgroups, bgi); + } + if (!ram) + { + if (interpretation < 2) + { + writebit(0); + } + else + { + writebit(1); + writebit(interpretation - 2); + } + } + } + free(bitgroups); + free(_1_RAM); + free(_2_RAM); + return (curbyte + 1) * 8 + curbit; +} + +int compress(u8 *data, int width, int height) +{ + u8 *RAM_1; + u8 *RAM_2; + int i; + int mode; + int order; + int newsize; + int compressedsize; + int size = -1; + u8 *current = NULL; + int ram_size; + + xrows = height; + xwidth = width; + + ram_size = xrows * xwidth * 8; + + RAM_1 = (u8 *)calloc(ram_size, 1); + RAM_2 = (u8 *)calloc(ram_size, 1); + + for (i = 0; i < xrows * xwidth * 8; i++) + { + RAM_1[i] = data[(i << 1)]; + RAM_2[i] = data[(i << 1) | 1]; + } + + for (mode = 1; mode < 4; mode++) + { + for (order = 0; order < 2; order++) + { + if (!(mode == 1 && order == 0)) + { + newsize = interpret_compress(RAM_1, RAM_2, mode, order); + if (size == -1 || newsize < size) + { + if (current != NULL) + { + free(current); + } + current = (u8 *)calloc(0x310, 1); + memcpy(current, compressed, newsize / 8); + free(compressed); + size = newsize; + } + } + } + } + compressed = (u8 *)calloc(0x310, 1); + compressedsize = size / 8; + memcpy(compressed, current, compressedsize); + free(current); + + free(RAM_1); + free(RAM_2); + + return compressedsize; +} + +uint8_t *transpose_tiles(uint8_t *data, int width, int height) +{ + int i; + int j; + int tile_size = 0x10; + + int size = width * height * tile_size; + u8 *transposed = calloc(size, 1); + for (i = 0; i < size; i++) + { + j = (i / 0x10) * width * 0x10; + j = (j % size) + 0x10 * (j / size) + (i % 0x10); + transposed[j] = data[i]; + } + + free(data); + + return transposed; +} + +int main(int argc, char *argv[]) +{ + int width = 0; + int height = 0; + int transpose = 1; + + if (argc != 3) + { + fputs("Usage: pkmncompress infile.2bpp outfile.pic\n", stderr); + return EXIT_FAILURE; + } + + char *infile = argv[1]; + char *outfile = argv[2]; + + FILE *f = fopen(infile, "rb"); + if (!f) { + fprintf(stderr, "failed to open for reading: '%s'\n", infile); + return EXIT_FAILURE; + } + fseek(f, 0, SEEK_END); + int filesize = ftell(f); + + for (int i = 0; i < 32; i++) { + width = i; + height = i; + if (width * height * 16 >= filesize) { + break; + } + } + if (width * height * 16 < filesize) { + fprintf(stderr, "file too big: '%s' (%x)\n", infile, filesize); + return EXIT_FAILURE; + } + if (width * height * 16 > filesize) { + fprintf(stderr, "wrong filesize for '%s' (%x). must be a square image of 16-byte tiles\n", infile, filesize); + return EXIT_FAILURE; + } + + u8 *data = (u8 *)calloc(filesize, 1); + fseek(f, 0, SEEK_SET); + int size = fread(data, 1, filesize, f); + fclose(f); + if (size != filesize) { + fprintf(stderr, "failed to read: '%s'\n", infile); + return EXIT_FAILURE; + } + + if (transpose) { + data = transpose_tiles(data, width, height); + } + + int compressed_size = compress(data, width, height); + + free(data); + + f = fopen(outfile, "wb"); + if (!f) { + fprintf(stderr, "failed to open for writing: '%s'\n", outfile); + return EXIT_FAILURE; + } + fwrite(compressed, 1, compressed_size, f); + fclose(f); + + free(compressed); + + return EXIT_SUCCESS; +} diff --git a/tools/pokemontools/__init__.py b/tools/pokemontools/__init__.py new file mode 100644 index 00000000..a3e75900 --- /dev/null +++ b/tools/pokemontools/__init__.py @@ -0,0 +1 @@ +# A subset of https://github.com/pret/pokemon-reverse-engineering-tools diff --git a/tools/pokemontools/gfx.py b/tools/pokemontools/gfx.py new file mode 100644 index 00000000..2979b5a7 --- /dev/null +++ b/tools/pokemontools/gfx.py @@ -0,0 +1,938 @@ +# -*- coding: utf-8 -*- + +import os +import sys +import png +from math import sqrt, floor, ceil +import argparse +import operator + +from lz import Compressed, Decompressed + + +def split(list_, interval): + """ + Split a list by length. + """ + for i in xrange(0, len(list_), interval): + j = min(i + interval, len(list_)) + yield list_[i:j] + + +def hex_dump(data, length=0x10): + """ + just use hexdump -C + """ + margin = len('%x' % len(data)) + output = [] + address = 0 + for line in split(data, length): + output += [ + hex(address)[2:].zfill(margin) + + ' | ' + + ' '.join('%.2x' % byte for byte in line) + ] + address += length + return '\n'.join(output) + + +def get_tiles(image): + """ + Split a 2bpp image into 8x8 tiles. + """ + return list(split(image, 0x10)) + +def connect(tiles): + """ + Combine 8x8 tiles into a 2bpp image. + """ + return [byte for tile in tiles for byte in tile] + +def transpose(tiles, width=None): + """ + Transpose a tile arrangement along line y=-x. + + 00 01 02 03 04 05 00 06 0c 12 18 1e + 06 07 08 09 0a 0b 01 07 0d 13 19 1f + 0c 0d 0e 0f 10 11 <-> 02 08 0e 14 1a 20 + 12 13 14 15 16 17 03 09 0f 15 1b 21 + 18 19 1a 1b 1c 1d 04 0a 10 16 1c 22 + 1e 1f 20 21 22 23 05 0b 11 17 1d 23 + + 00 01 02 03 00 04 08 + 04 05 06 07 <-> 01 05 09 + 08 09 0a 0b 02 06 0a + 03 07 0b + """ + if width == None: + width = int(sqrt(len(tiles))) # assume square image + tiles = sorted(enumerate(tiles), key= lambda (i, tile): i % width) + return [tile for i, tile in tiles] + +def transpose_tiles(image, width=None): + return connect(transpose(get_tiles(image), width)) + +def interleave(tiles, width): + """ + 00 01 02 03 04 05 00 02 04 06 08 0a + 06 07 08 09 0a 0b 01 03 05 07 09 0b + 0c 0d 0e 0f 10 11 --> 0c 0e 10 12 14 16 + 12 13 14 15 16 17 0d 0f 11 13 15 17 + 18 19 1a 1b 1c 1d 18 1a 1c 1e 20 22 + 1e 1f 20 21 22 23 19 1b 1d 1f 21 23 + """ + interleaved = [] + left, right = split(tiles[::2], width), split(tiles[1::2], width) + for l, r in zip(left, right): + interleaved += l + r + return interleaved + +def deinterleave(tiles, width): + """ + 00 02 04 06 08 0a 00 01 02 03 04 05 + 01 03 05 07 09 0b 06 07 08 09 0a 0b + 0c 0e 10 12 14 16 --> 0c 0d 0e 0f 10 11 + 0d 0f 11 13 15 17 12 13 14 15 16 17 + 18 1a 1c 1e 20 22 18 19 1a 1b 1c 1d + 19 1b 1d 1f 21 23 1e 1f 20 21 22 23 + """ + deinterleaved = [] + rows = list(split(tiles, width)) + for left, right in zip(rows[::2], rows[1::2]): + for l, r in zip(left, right): + deinterleaved += [l, r] + return deinterleaved + +def interleave_tiles(image, width): + return connect(interleave(get_tiles(image), width)) + +def deinterleave_tiles(image, width): + return connect(deinterleave(get_tiles(image), width)) + + +def condense_image_to_map(image, pic=0): + """ + Reduce an image of adjacent frames to an image containing a base frame and any unrepeated tiles. + Returns the new image and the corresponding tilemap used to reconstruct the input image. + + If <pic> is 0, ignore the concept of frames. This behavior might be better off as another function. + """ + tiles = get_tiles(image) + new_tiles, tilemap = condense_tiles_to_map(tiles, pic) + new_image = connect(new_tiles) + return new_image, tilemap + +def condense_tiles_to_map(tiles, pic=0): + """ + Reduce a sequence of tiles representing adjacent frames to a base frame and any unrepeated tiles. + Returns the new tiles and the corresponding tilemap used to reconstruct the input tile sequence. + + If <pic> is 0, ignore the concept of frames. This behavior might be better off as another function. + """ + + # Leave the first frame intact for pics. + new_tiles = tiles[:pic] + tilemap = range(pic) + + for i, tile in enumerate(tiles[pic:]): + if tile not in new_tiles: + new_tiles.append(tile) + + if pic: + # Match the first frame exactly where possible. + # This reduces the space needed to replace tiles in pic animations. + # For example, if a tile is repeated twice in the first frame, + # but at the same relative index as the second tile, use the second index. + # When creating a bitmask later, the second index would not require a replacement, but the first index would have. + pic_i = i % pic + if tile == new_tiles[pic_i]: + tilemap.append(pic_i) + else: + tilemap.append(new_tiles.index(tile)) + else: + tilemap.append(new_tiles.index(tile)) + return new_tiles, tilemap + +def test_condense_tiles_to_map(): + test = condense_tiles_to_map(list('abcadbae')) + if test != (list('abcde'), [0, 1, 2, 0, 3, 1, 0, 4]): + raise Exception(test) + test = condense_tiles_to_map(list('abcadbae'), 2) + if test != (list('abcde'), [0, 1, 2, 0, 3, 1, 0, 4]): + raise Exception(test) + test = condense_tiles_to_map(list('abcadbae'), 4) + if test != (list('abcade'), [0, 1, 2, 3, 4, 1, 0, 5]): + raise Exception(test) + test = condense_tiles_to_map(list('abcadbea'), 4) + if test != (list('abcade'), [0, 1, 2, 3, 4, 1, 5, 3]): + raise Exception(test) + + +def to_file(filename, data): + """ + Apparently open(filename, 'wb').write(bytearray(data)) won't work. + """ + file = open(filename, 'wb') + for byte in data: + file.write('%c' % byte) + file.close() + + +def decompress_file(filein, fileout=None): + image = bytearray(open(filein).read()) + de = Decompressed(image) + + if fileout == None: + fileout = os.path.splitext(filein)[0] + to_file(fileout, de.output) + + +def compress_file(filein, fileout=None): + image = bytearray(open(filein).read()) + lz = Compressed(image) + + if fileout == None: + fileout = filein + '.lz' + to_file(fileout, lz.output) + + +def bin_to_rgb(word): + red = word & 0b11111 + word >>= 5 + green = word & 0b11111 + word >>= 5 + blue = word & 0b11111 + return (red, green, blue) + +def convert_binary_pal_to_text_by_filename(filename): + pal = bytearray(open(filename).read()) + return convert_binary_pal_to_text(pal) + +def convert_binary_pal_to_text(pal): + output = '' + words = [hi * 0x100 + lo for lo, hi in zip(pal[::2], pal[1::2])] + for word in words: + red, green, blue = ['%.2d' % c for c in bin_to_rgb(word)] + output += '\tRGB ' + ', '.join((red, green, blue)) + output += '\n' + return output + +def read_rgb_macros(lines): + colors = [] + for line in lines: + macro = line.split(" ")[0].strip() + if macro == 'RGB': + params = ' '.join(line.split(" ")[1:]).split(',') + red, green, blue = [int(v) for v in params] + colors += [[red, green, blue]] + return colors + + +def rewrite_binary_pals_to_text(filenames): + for filename in filenames: + pal_text = convert_binary_pal_to_text_by_filename(filename) + with open(filename, 'w') as out: + out.write(pal_text) + + +def flatten(planar): + """ + Flatten planar 2bpp image data into a quaternary pixel map. + """ + strips = [] + for bottom, top in split(planar, 2): + bottom = bottom + top = top + strip = [] + for i in xrange(7,-1,-1): + color = ( + (bottom >> i & 1) + + (top *2 >> i & 2) + ) + strip += [color] + strips += strip + return strips + +def to_lines(image, width): + """ + Convert a tiled quaternary pixel map to lines of quaternary pixels. + """ + tile_width = 8 + tile_height = 8 + num_columns = width / tile_width + height = len(image) / width + + lines = [] + for cur_line in xrange(height): + tile_row = cur_line / tile_height + line = [] + for column in xrange(num_columns): + anchor = ( + num_columns * tile_row * tile_width * tile_height + + column * tile_width * tile_height + + cur_line % tile_height * tile_width + ) + line += image[anchor : anchor + tile_width] + lines += [line] + return lines + + +def dmg2rgb(word): + """ + For PNGs. + """ + def shift(value): + while True: + yield value & (2**5 - 1) + value >>= 5 + word = shift(word) + # distribution is less even w/ << 3 + red, green, blue = [int(color * 8.25) for color in [word.next() for _ in xrange(3)]] + alpha = 255 + return (red, green, blue, alpha) + + +def rgb_to_dmg(color): + """ + For PNGs. + """ + word = (color['r'] / 8) + word += (color['g'] / 8) << 5 + word += (color['b'] / 8) << 10 + return word + + +def pal_to_png(filename): + """ + Interpret a .pal file as a png palette. + """ + with open(filename) as rgbs: + colors = read_rgb_macros(rgbs.readlines()) + a = 255 + palette = [] + for color in colors: + # even distribution over 000-255 + r, g, b = [int(hue * 8.25) for hue in color] + palette += [(r, g, b, a)] + white = (255,255,255,255) + black = (000,000,000,255) + if white not in palette and len(palette) < 4: + palette = [white] + palette + if black not in palette and len(palette) < 4: + palette = palette + [black] + return palette + + +def png_to_rgb(palette): + """ + Convert a png palette to rgb macros. + """ + output = '' + for color in palette: + r, g, b = [color[c] / 8 for c in 'rgb'] + output += '\tRGB ' + ', '.join(['%.2d' % hue for hue in (r, g, b)]) + output += '\n' + return output + + +def read_filename_arguments(filename): + """ + Infer graphics conversion arguments given a filename. + + Arguments are separated with '.'. + """ + parsed_arguments = {} + + int_arguments = { + 'w': 'width', + 'h': 'height', + 't': 'tile_padding', + } + arguments = os.path.splitext(filename)[0].lstrip('.').split('.')[1:] + for argument in arguments: + + # Check for integer arguments first (i.e. "w128"). + arg = argument[0] + param = argument[1:] + if param.isdigit(): + arg = int_arguments.get(arg, False) + if arg: + parsed_arguments[arg] = int(param) + + elif argument == 'arrange': + parsed_arguments['norepeat'] = True + parsed_arguments['tilemap'] = True + + # Pic dimensions (i.e. "6x6"). + elif 'x' in argument and any(map(str.isdigit, argument)): + w, h = argument.split('x') + if w.isdigit() and h.isdigit(): + parsed_arguments['pic_dimensions'] = (int(w), int(h)) + + else: + parsed_arguments[argument] = True + + return parsed_arguments + + +def export_2bpp_to_png(filein, fileout=None, pal_file=None, height=0, width=0, tile_padding=0, pic_dimensions=None, **kwargs): + + if fileout == None: + fileout = os.path.splitext(filein)[0] + '.png' + + image = open(filein, 'rb').read() + + arguments = { + 'width': width, + 'height': height, + 'pal_file': pal_file, + 'tile_padding': tile_padding, + 'pic_dimensions': pic_dimensions, + } + arguments.update(read_filename_arguments(filein)) + + if pal_file == None: + if os.path.exists(os.path.splitext(fileout)[0]+'.pal'): + arguments['pal_file'] = os.path.splitext(fileout)[0]+'.pal' + + result = convert_2bpp_to_png(image, **arguments) + width, height, palette, greyscale, bitdepth, px_map = result + + w = png.Writer( + width, + height, + palette=palette, + compression=9, + greyscale=greyscale, + bitdepth=bitdepth + ) + with open(fileout, 'wb') as f: + w.write(f, px_map) + + +def convert_2bpp_to_png(image, **kwargs): + """ + Convert a planar 2bpp graphic to png. + """ + + image = bytearray(image) + + pad_color = bytearray([0]) + + width = kwargs.get('width', 0) + height = kwargs.get('height', 0) + tile_padding = kwargs.get('tile_padding', 0) + pic_dimensions = kwargs.get('pic_dimensions', None) + pal_file = kwargs.get('pal_file', None) + interleave = kwargs.get('interleave', False) + + # Width must be specified to interleave. + if interleave and width: + image = interleave_tiles(image, width / 8) + + # Pad the image by a given number of tiles if asked. + image += pad_color * 0x10 * tile_padding + + # Some images are transposed in blocks. + if pic_dimensions: + w, h = pic_dimensions + if not width: width = w * 8 + + pic_length = w * h * 0x10 + + trailing = len(image) % pic_length + + pic = [] + for i in xrange(0, len(image) - trailing, pic_length): + pic += transpose_tiles(image[i:i+pic_length], h) + image = bytearray(pic) + image[len(image) - trailing:] + + # Pad out trailing lines. + image += pad_color * 0x10 * ((w - (len(image) / 0x10) % h) % w) + + def px_length(img): + return len(img) * 4 + def tile_length(img): + return len(img) * 4 / (8*8) + + if width and height: + tile_width = width / 8 + more_tile_padding = (tile_width - (tile_length(image) % tile_width or tile_width)) + image += pad_color * 0x10 * more_tile_padding + + elif width and not height: + tile_width = width / 8 + more_tile_padding = (tile_width - (tile_length(image) % tile_width or tile_width)) + image += pad_color * 0x10 * more_tile_padding + height = px_length(image) / width + + elif height and not width: + tile_height = height / 8 + more_tile_padding = (tile_height - (tile_length(image) % tile_height or tile_height)) + image += pad_color * 0x10 * more_tile_padding + width = px_length(image) / height + + # at least one dimension should be given + if width * height != px_length(image): + # look for possible combos of width/height that would form a rectangle + matches = [] + # Height need not be divisible by 8, but width must. + # See pokered gfx/minimize_pic.1bpp. + for w in range(8, px_length(image) / 2 + 1, 8): + h = px_length(image) / w + if w * h == px_length(image): + matches += [(w, h)] + # go for the most square image + if len(matches): + width, height = sorted(matches, key= lambda (w, h): (h % 8 != 0, w + h))[0] # favor height + else: + raise Exception, 'Image can\'t be divided into tiles (%d px)!' % (px_length(image)) + + # convert tiles to lines + lines = to_lines(flatten(image), width) + + if pal_file == None: + palette = None + greyscale = True + bitdepth = 2 + px_map = [[3 - pixel for pixel in line] for line in lines] + + else: # gbc color + palette = pal_to_png(pal_file) + greyscale = False + bitdepth = 8 + px_map = [[pixel for pixel in line] for line in lines] + + return width, height, palette, greyscale, bitdepth, px_map + + +def get_pic_animation(tmap, w, h): + """ + Generate pic animation data from a combined tilemap of each frame. + """ + frame_text = '' + bitmask_text = '' + + frames = list(split(tmap, w * h)) + base = frames.pop(0) + bitmasks = [] + + for i in xrange(len(frames)): + frame_text += '\tdw .frame{}\n'.format(i + 1) + + for i, frame in enumerate(frames): + bitmask = map(operator.ne, frame, base) + if bitmask not in bitmasks: + bitmasks.append(bitmask) + which_bitmask = bitmasks.index(bitmask) + + mask = iter(bitmask) + masked_frame = filter(lambda _: mask.next(), frame) + + frame_text += '.frame{}\n'.format(i + 1) + frame_text += '\tdb ${:02x} ; bitmask\n'.format(which_bitmask) + if masked_frame: + frame_text += '\tdb {}\n'.format(', '.join( + map('${:02x}'.format, masked_frame) + )) + + for i, bitmask in enumerate(bitmasks): + bitmask_text += '; {}\n'.format(i) + for byte in split(bitmask, 8): + byte = int(''.join(map(int.__repr__, reversed(byte))), 2) + bitmask_text += '\tdb %{:08b}\n'.format(byte) + + return frame_text, bitmask_text + + +def export_png_to_2bpp(filein, fileout=None, palout=None, **kwargs): + + arguments = { + 'tile_padding': 0, + 'pic_dimensions': None, + 'animate': False, + 'stupid_bitmask_hack': [], + } + arguments.update(kwargs) + arguments.update(read_filename_arguments(filein)) + + image, arguments = png_to_2bpp(filein, **arguments) + + if fileout == None: + fileout = os.path.splitext(filein)[0] + '.2bpp' + to_file(fileout, image) + + tmap = arguments.get('tmap') + + if tmap != None and arguments['animate'] and arguments['pic_dimensions']: + # Generate pic animation data. + frame_text, bitmask_text = get_pic_animation(tmap, *arguments['pic_dimensions']) + + frames_path = os.path.join(os.path.split(fileout)[0], 'frames.asm') + with open(frames_path, 'w') as out: + out.write(frame_text) + + bitmask_path = os.path.join(os.path.split(fileout)[0], 'bitmask.asm') + + # The following Pokemon have a bitmask dummied out. + for exception in arguments['stupid_bitmask_hack']: + if exception in bitmask_path: + bitmasks = bitmask_text.split(';') + bitmasks[-1] = bitmasks[-1].replace('1', '0') + bitmask_text = ';'.join(bitmasks) + + with open(bitmask_path, 'w') as out: + out.write(bitmask_text) + + elif tmap != None and arguments.get('tilemap', False): + tilemap_path = os.path.splitext(fileout)[0] + '.tilemap' + to_file(tilemap_path, tmap) + + palette = arguments.get('palette') + if palout == None: + palout = os.path.splitext(fileout)[0] + '.pal' + export_palette(palette, palout) + + +def get_image_padding(width, height, wstep=8, hstep=8): + + padding = { + 'left': 0, + 'right': 0, + 'top': 0, + 'bottom': 0, + } + + if width % wstep and width >= wstep: + pad = float(width % wstep) / 2 + padding['left'] = int(ceil(pad)) + padding['right'] = int(floor(pad)) + + if height % hstep and height >= hstep: + pad = float(height % hstep) / 2 + padding['top'] = int(ceil(pad)) + padding['bottom'] = int(floor(pad)) + + return padding + + +def png_to_2bpp(filein, **kwargs): + """ + Convert a png image to planar 2bpp. + """ + + arguments = { + 'tile_padding': 0, + 'pic_dimensions': False, + 'interleave': False, + 'norepeat': False, + 'tilemap': False, + } + arguments.update(kwargs) + + if type(filein) is str: + filein = open(filein) + + assert type(filein) is file + + width, height, rgba, info = png.Reader(filein).asRGBA8() + + # png.Reader returns flat pixel data. Nested is easier to work with + len_px = len('rgba') + image = [] + palette = [] + for line in rgba: + newline = [] + for px in xrange(0, len(line), len_px): + color = dict(zip('rgba', line[px:px+len_px])) + if color not in palette: + if len(palette) < 4: + palette += [color] + else: + # TODO Find the nearest match + print 'WARNING: %s: Color %s truncated to' % (filein, color), + color = sorted(palette, key=lambda x: sum(x.values()))[0] + print color + newline += [color] + image += [newline] + + assert len(palette) <= 4, '%s: palette should be 4 colors, is really %d (%s)' % (filein, len(palette), palette) + + # Pad out smaller palettes with greyscale colors + greyscale = { + 'black': { 'r': 0x00, 'g': 0x00, 'b': 0x00, 'a': 0xff }, + 'grey': { 'r': 0x55, 'g': 0x55, 'b': 0x55, 'a': 0xff }, + 'gray': { 'r': 0xaa, 'g': 0xaa, 'b': 0xaa, 'a': 0xff }, + 'white': { 'r': 0xff, 'g': 0xff, 'b': 0xff, 'a': 0xff }, + } + preference = 'white', 'black', 'grey', 'gray' + for hue in map(greyscale.get, preference): + if len(palette) >= 4: + break + if hue not in palette: + palette += [hue] + + palette.sort(key=lambda x: sum(x.values())) + + # Game Boy palette order + palette.reverse() + + # Map pixels to quaternary color ids + padding = get_image_padding(width, height) + width += padding['left'] + padding['right'] + height += padding['top'] + padding['bottom'] + pad = bytearray([0]) + + qmap = [] + qmap += pad * width * padding['top'] + for line in image: + qmap += pad * padding['left'] + for color in line: + qmap += [palette.index(color)] + qmap += pad * padding['right'] + qmap += pad * width * padding['bottom'] + + # Graphics are stored in tiles instead of lines + tile_width = 8 + tile_height = 8 + num_columns = max(width, tile_width) / tile_width + num_rows = max(height, tile_height) / tile_height + image = [] + + for row in xrange(num_rows): + for column in xrange(num_columns): + + # Split it up into strips to convert to planar data + for strip in xrange(min(tile_height, height)): + anchor = ( + row * num_columns * tile_width * tile_height + + column * tile_width + + strip * width + ) + line = qmap[anchor : anchor + tile_width] + bottom, top = 0, 0 + for bit, quad in enumerate(line): + bottom += (quad & 1) << (7 - bit) + top += (quad /2 & 1) << (7 - bit) + image += [bottom, top] + + dim = arguments['pic_dimensions'] + if dim: + if type(dim) in (tuple, list): + w, h = dim + else: + # infer dimensions based on width. + w = width / tile_width + h = height / tile_height + if h % w == 0: + h = w + + tiles = get_tiles(image) + pic_length = w * h + tile_width = width / 8 + trailing = len(tiles) % pic_length + new_image = [] + for block in xrange(len(tiles) / pic_length): + offset = (h * tile_width) * ((block * w) / tile_width) + ((block * w) % tile_width) + pic = [] + for row in xrange(h): + index = offset + (row * tile_width) + pic += tiles[index:index + w] + new_image += transpose(pic, w) + new_image += tiles[len(tiles) - trailing:] + image = connect(new_image) + + # Remove any tile padding used to make the png rectangular. + image = image[:len(image) - arguments['tile_padding'] * 0x10] + + tmap = None + + if arguments['interleave']: + image = deinterleave_tiles(image, num_columns) + + if arguments['pic_dimensions']: + image, tmap = condense_image_to_map(image, w * h) + elif arguments['norepeat']: + image, tmap = condense_image_to_map(image) + if not arguments['tilemap']: + tmap = None + + arguments.update({ 'palette': palette, 'tmap': tmap, }) + + return image, arguments + + +def export_palette(palette, filename): + """ + Export a palette from png to rgb macros in a .pal file. + """ + + if os.path.exists(filename): + + # Pic palettes are 2 colors (black/white are added later). + with open(filename) as rgbs: + colors = read_rgb_macros(rgbs.readlines()) + + if len(colors) == 2: + palette = palette[1:3] + + text = png_to_rgb(palette) + with open(filename, 'w') as out: + out.write(text) + + +def png_to_lz(filein): + + name = os.path.splitext(filein)[0] + + export_png_to_2bpp(filein) + image = open(name+'.2bpp', 'rb').read() + to_file(name+'.2bpp'+'.lz', Compressed(image).output) + + +def convert_2bpp_to_1bpp(data): + """ + Convert planar 2bpp image data to 1bpp. Assume images are two colors. + """ + return data[::2] + +def convert_1bpp_to_2bpp(data): + """ + Convert 1bpp image data to planar 2bpp (black/white). + """ + output = [] + for i in data: + output += [i, i] + return output + + +def export_2bpp_to_1bpp(filename): + name, extension = os.path.splitext(filename) + image = open(filename, 'rb').read() + image = convert_2bpp_to_1bpp(image) + to_file(name + '.1bpp', image) + +def export_1bpp_to_2bpp(filename): + name, extension = os.path.splitext(filename) + image = open(filename, 'rb').read() + image = convert_1bpp_to_2bpp(image) + to_file(name + '.2bpp', image) + + +def export_1bpp_to_png(filename, fileout=None): + + if fileout == None: + fileout = os.path.splitext(filename)[0] + '.png' + + arguments = read_filename_arguments(filename) + + image = open(filename, 'rb').read() + image = convert_1bpp_to_2bpp(image) + + result = convert_2bpp_to_png(image, **arguments) + width, height, palette, greyscale, bitdepth, px_map = result + + w = png.Writer(width, height, palette=palette, compression=9, greyscale=greyscale, bitdepth=bitdepth) + with open(fileout, 'wb') as f: + w.write(f, px_map) + + +def export_png_to_1bpp(filename, fileout=None): + + if fileout == None: + fileout = os.path.splitext(filename)[0] + '.1bpp' + + arguments = read_filename_arguments(filename) + image = png_to_1bpp(filename, **arguments) + + to_file(fileout, image) + +def png_to_1bpp(filename, **kwargs): + image, kwargs = png_to_2bpp(filename, **kwargs) + return convert_2bpp_to_1bpp(image) + + +def convert_to_2bpp(filenames=[]): + for filename in filenames: + filename, name, extension = try_decompress(filename) + if extension == '.1bpp': + export_1bpp_to_2bpp(filename) + elif extension == '.2bpp': + pass + elif extension == '.png': + export_png_to_2bpp(filename) + else: + raise Exception, "Don't know how to convert {} to 2bpp!".format(filename) + +def convert_to_1bpp(filenames=[]): + for filename in filenames: + filename, name, extension = try_decompress(filename) + if extension == '.1bpp': + pass + elif extension == '.2bpp': + export_2bpp_to_1bpp(filename) + elif extension == '.png': + export_png_to_1bpp(filename) + else: + raise Exception, "Don't know how to convert {} to 1bpp!".format(filename) + +def convert_to_png(filenames=[]): + for filename in filenames: + filename, name, extension = try_decompress(filename) + if extension == '.1bpp': + export_1bpp_to_png(filename) + elif extension == '.2bpp': + export_2bpp_to_png(filename) + elif extension == '.png': + pass + else: + raise Exception, "Don't know how to convert {} to png!".format(filename) + +def compress(filenames=[]): + for filename in filenames: + data = open(filename, 'rb').read() + lz_data = Compressed(data).output + to_file(filename + '.lz', lz_data) + +def decompress(filenames=[]): + for filename in filenames: + name, extension = os.path.splitext(filename) + lz_data = open(filename, 'rb').read() + data = Decompressed(lz_data).output + to_file(name, data) + +def try_decompress(filename): + """ + Try to decompress a graphic when determining the filetype. + This skips the manual unlz step when attempting + to convert lz-compressed graphics to png. + """ + name, extension = os.path.splitext(filename) + if extension == '.lz': + decompress([filename]) + filename = name + name, extension = os.path.splitext(filename) + return filename, name, extension + + +def main(): + ap = argparse.ArgumentParser() + ap.add_argument('mode') + ap.add_argument('filenames', nargs='*') + args = ap.parse_args() + + method = { + '2bpp': convert_to_2bpp, + '1bpp': convert_to_1bpp, + 'png': convert_to_png, + 'lz': compress, + 'unlz': decompress, + }.get(args.mode, None) + + if method == None: + raise Exception, "Unknown conversion method!" + + method(args.filenames) + +if __name__ == "__main__": + main() diff --git a/tools/pokemontools/lz.py b/tools/pokemontools/lz.py new file mode 100644 index 00000000..aef5c641 --- /dev/null +++ b/tools/pokemontools/lz.py @@ -0,0 +1,580 @@ +# -*- coding: utf-8 -*- +""" +Pokemon Crystal data de/compression. +""" + +""" +A rundown of Pokemon Crystal's compression scheme: + +Control commands occupy bits 5-7. +Bits 0-4 serve as the first parameter <n> for each command. +""" +lz_commands = { + 'literal': 0, # n values for n bytes + 'iterate': 1, # one value for n bytes + 'alternate': 2, # alternate two values for n bytes + 'blank': 3, # zero for n bytes +} + +""" +Repeater commands repeat any data that was just decompressed. +They take an additional signed parameter <s> to mark a relative starting point. +These wrap around (positive from the start, negative from the current position). +""" +lz_commands.update({ + 'repeat': 4, # n bytes starting from s + 'flip': 5, # n bytes in reverse bit order starting from s + 'reverse': 6, # n bytes backwards starting from s +}) + +""" +The long command is used when 5 bits aren't enough. Bits 2-4 contain a new control code. +Bits 0-1 are appended to a new byte as 8-9, allowing a 10-bit parameter. +""" +lz_commands.update({ + 'long': 7, # n is now 10 bits for a new control code +}) +max_length = 1 << 10 # can't go higher than 10 bits +lowmax = 1 << 5 # standard 5-bit param + +""" +If 0xff is encountered instead of a command, decompression ends. +""" +lz_end = 0xff + + +bit_flipped = [ + sum(((byte >> i) & 1) << (7 - i) for i in xrange(8)) + for byte in xrange(0x100) +] + + +class Compressed: + + """ + Usage: + lz = Compressed(data).output + or + lz = Compressed().compress(data) + or + c = Compressed() + c.data = data + lz = c.compress() + + There are some issues with reproducing the target compressor. + Some notes are listed here: + - the criteria for detecting a lookback is inconsistent + - sometimes lookbacks that are mostly 0s are pruned, sometimes not + - target appears to skip ahead if it can use a lookback soon, stopping the current command short or in some cases truncating it with literals. + - this has been implemented, but the specifics are unknown + - self.min_scores: It's unknown if blank's minimum score should be 1 or 2. Most likely it's 1, with some other hack to account for edge cases. + - may be related to the above + - target does not appear to compress backwards + """ + + def __init__(self, *args, **kwargs): + + self.min_scores = { + 'blank': 1, + 'iterate': 2, + 'alternate': 3, + 'repeat': 3, + 'reverse': 3, + 'flip': 3, + } + + self.preference = [ + 'repeat', + 'blank', + 'flip', + 'reverse', + 'iterate', + 'alternate', + #'literal', + ] + + self.lookback_methods = 'repeat', 'reverse', 'flip' + + self.__dict__.update({ + 'data': None, + 'commands': lz_commands, + 'debug': False, + 'literal_only': False, + }) + + self.arg_names = 'data', 'commands', 'debug', 'literal_only' + + self.__dict__.update(kwargs) + self.__dict__.update(dict(zip(self.arg_names, args))) + + if self.data is not None: + self.compress() + + def compress(self, data=None): + if data is not None: + self.data = data + + self.data = list(bytearray(self.data)) + + self.indexes = {} + self.lookbacks = {} + for method in self.lookback_methods: + self.lookbacks[method] = {} + + self.address = 0 + self.end = len(self.data) + self.output = [] + self.literal = None + + while self.address < self.end: + + if self.score(): + self.do_literal() + self.do_winner() + + else: + if self.literal == None: + self.literal = self.address + self.address += 1 + + self.do_literal() + + self.output += [lz_end] + return self.output + + def reset_scores(self): + self.scores = {} + self.offsets = {} + self.helpers = {} + for method in self.min_scores.iterkeys(): + self.scores[method] = 0 + + def bit_flip(self, byte): + return bit_flipped[byte] + + def do_literal(self): + if self.literal != None: + length = abs(self.address - self.literal) + start = min(self.literal, self.address + 1) + self.helpers['literal'] = self.data[start:start+length] + self.do_cmd('literal', length) + self.literal = None + + def score(self): + self.reset_scores() + + map(self.score_literal, ['iterate', 'alternate', 'blank']) + + for method in self.lookback_methods: + self.scores[method], self.offsets[method] = self.find_lookback(method, self.address) + + self.stop_short() + + return any( + score + > self.min_scores[method] + int(score > lowmax) + for method, score in self.scores.iteritems() + ) + + def stop_short(self): + """ + If a lookback is close, reduce the scores of other commands. + """ + best_method, best_score = max( + self.scores.items(), + key = lambda x: ( + x[1], + -self.preference.index(x[0]) + ) + ) + for method in self.lookback_methods: + min_score = self.min_scores[method] + for address in xrange(self.address+1, self.address+best_score): + length, index = self.find_lookback(method, address) + if length > max(min_score, best_score): + # BUG: lookbacks can reduce themselves. This appears to be a bug in the target also. + for m, score in self.scores.items(): + self.scores[m] = min(score, address - self.address) + + + def read(self, address=None): + if address is None: + address = self.address + if 0 <= address < len(self.data): + return self.data[address] + return None + + def find_all_lookbacks(self): + for method in self.lookback_methods: + for address, byte in enumerate(self.data): + self.find_lookback(method, address) + + def find_lookback(self, method, address=None): + """Temporarily stubbed, because the real function doesn't run in polynomial time.""" + return 0, None + + def broken_find_lookback(self, method, address=None): + if address is None: + address = self.address + + existing = self.lookbacks.get(method, {}).get(address) + if existing != None: + return existing + + lookback = 0, None + + # Better to not carelessly optimize at the moment. + """ + if address < 2: + return lookback + """ + + byte = self.read(address) + if byte is None: + return lookback + + direction, mutate = { + 'repeat': ( 1, int), + 'reverse': (-1, int), + 'flip': ( 1, self.bit_flip), + }[method] + + # Doesn't seem to help + """ + if mutate == self.bit_flip: + if byte == 0: + self.lookbacks[method][address] = lookback + return lookback + """ + + data_len = len(self.data) + is_two_byte_index = lambda index: int(index < address - 0x7f) + + for index in self.get_indexes(mutate(byte)): + + if index >= address: + break + + old_length, old_index = lookback + if direction == 1: + if old_length > data_len - index: break + else: + if old_length > index: continue + + if self.read(index) in [None]: continue + + length = 1 # we know there's at least one match, or we wouldn't be checking this index + while 1: + this_byte = self.read(address + length) + that_byte = self.read(index + length * direction) + if that_byte == None or this_byte != mutate(that_byte): + break + length += 1 + + score = length - is_two_byte_index(index) + old_score = old_length - is_two_byte_index(old_index) + if score >= old_score or (score == old_score and length > old_length): + # XXX maybe avoid two-byte indexes when possible + if score >= lookback[0] - is_two_byte_index(lookback[1]): + lookback = length, index + + self.lookbacks[method][address] = lookback + return lookback + + def get_indexes(self, byte): + if not self.indexes.has_key(byte): + self.indexes[byte] = [] + index = -1 + while 1: + try: + index = self.data.index(byte, index + 1) + except ValueError: + break + self.indexes[byte].append(index) + return self.indexes[byte] + + def score_literal(self, method): + address = self.address + + compare = { + 'blank': [0], + 'iterate': [self.read(address)], + 'alternate': [self.read(address), self.read(address + 1)], + }[method] + + # XXX may or may not be correct + if method == 'alternate' and compare[0] == 0: + return + + length = 0 + while self.read(address + length) == compare[length % len(compare)]: + length += 1 + + self.scores[method] = length + self.helpers[method] = compare + + def do_winner(self): + winners = filter( + lambda (method, score): + score + > self.min_scores[method] + int(score > lowmax), + self.scores.iteritems() + ) + winners.sort( + key = lambda (method, score): ( + -(score - self.min_scores[method] - int(score > lowmax)), + self.preference.index(method) + ) + ) + winner, score = winners[0] + + length = min(score, max_length) + self.do_cmd(winner, length) + self.address += length + + def do_cmd(self, cmd, length): + start_address = self.address + + cmd_length = length - 1 + + output = [] + + if length > lowmax: + output.append( + (self.commands['long'] << 5) + + (self.commands[cmd] << 2) + + (cmd_length >> 8) + ) + output.append( + cmd_length & 0xff + ) + else: + output.append( + (self.commands[cmd] << 5) + + cmd_length + ) + + self.helpers['blank'] = [] # quick hack + output += self.helpers.get(cmd, []) + + if cmd in self.lookback_methods: + offset = self.offsets[cmd] + # Negative offsets are one byte. + # Positive offsets are two. + if 0 < start_address - offset - 1 <= 0x7f: + offset = (start_address - offset - 1) | 0x80 + output += [offset] + else: + output += [offset / 0x100, offset % 0x100] # big endian + + if self.debug: + print ' '.join(map(str, [ + cmd, length, '\t', + ' '.join(map('{:02x}'.format, output)), + self.data[start_address:start_address+length] if cmd in self.lookback_methods else '', + ])) + + self.output += output + + + +class Decompressed: + """ + Interpret and decompress lz-compressed data, usually 2bpp. + """ + + """ + Usage: + data = Decompressed(lz).output + or + data = Decompressed().decompress(lz) + or + d = Decompressed() + d.lz = lz + data = d.decompress() + + To decompress from offset 0x80000 in a rom: + data = Decompressed(rom, start=0x80000).output + """ + + lz = None + start = 0 + commands = lz_commands + debug = False + + arg_names = 'lz', 'start', 'commands', 'debug' + + def __init__(self, *args, **kwargs): + self.__dict__.update(dict(zip(self.arg_names, args))) + self.__dict__.update(kwargs) + + self.command_names = dict(map(reversed, self.commands.items())) + self.address = self.start + + if self.lz is not None: + self.decompress() + + if self.debug: print self.command_list() + + + def command_list(self): + """ + Print a list of commands that were used. Useful for debugging. + """ + + text = '' + + output_address = 0 + for name, attrs in self.used_commands: + length = attrs['length'] + address = attrs['address'] + offset = attrs['offset'] + direction = attrs['direction'] + + text += '{2:03x} {0}: {1}'.format(name, length, output_address) + text += '\t' + ' '.join( + '{:02x}'.format(int(byte)) + for byte in self.lz[ address : address + attrs['cmd_length'] ] + ) + + if offset is not None: + repeated_data = self.output[ offset : offset + length * direction : direction ] + if name == 'flip': + repeated_data = map(bit_flipped.__getitem__, repeated_data) + text += ' [' + ' '.join(map('{:02x}'.format, repeated_data)) + ']' + + text += '\n' + output_address += length + + return text + + + def decompress(self, lz=None): + + if lz is not None: + self.lz = lz + + self.lz = bytearray(self.lz) + + self.used_commands = [] + self.output = [] + + while 1: + + cmd_address = self.address + self.offset = None + self.direction = None + + if (self.byte == lz_end): + self.next() + break + + self.cmd = (self.byte & 0b11100000) >> 5 + + if self.cmd_name == 'long': + # 10-bit length + self.cmd = (self.byte & 0b00011100) >> 2 + self.length = (self.next() & 0b00000011) * 0x100 + self.length += self.next() + 1 + else: + # 5-bit length + self.length = (self.next() & 0b00011111) + 1 + + self.__class__.__dict__[self.cmd_name](self) + + self.used_commands += [( + self.cmd_name, + { + 'length': self.length, + 'address': cmd_address, + 'offset': self.offset, + 'cmd_length': self.address - cmd_address, + 'direction': self.direction, + } + )] + + # Keep track of the data we just decompressed. + self.compressed_data = self.lz[self.start : self.address] + + + @property + def byte(self): + return self.lz[ self.address ] + + def next(self): + byte = self.byte + self.address += 1 + return byte + + @property + def cmd_name(self): + return self.command_names.get(self.cmd) + + + def get_offset(self): + + if self.byte >= 0x80: # negative + # negative + offset = self.next() & 0x7f + offset = len(self.output) - offset - 1 + else: + # positive + offset = self.next() * 0x100 + offset += self.next() + + self.offset = offset + + + def literal(self): + """ + Copy data directly. + """ + self.output += self.lz[ self.address : self.address + self.length ] + self.address += self.length + + def iterate(self): + """ + Write one byte repeatedly. + """ + self.output += [self.next()] * self.length + + def alternate(self): + """ + Write alternating bytes. + """ + alts = [self.next(), self.next()] + self.output += [ alts[x & 1] for x in xrange(self.length) ] + + def blank(self): + """ + Write zeros. + """ + self.output += [0] * self.length + + def flip(self): + """ + Repeat flipped bytes from output. + + Example: 11100100 -> 00100111 + """ + self._repeat(table=bit_flipped) + + def reverse(self): + """ + Repeat reversed bytes from output. + """ + self._repeat(direction=-1) + + def repeat(self): + """ + Repeat bytes from output. + """ + self._repeat() + + def _repeat(self, direction=1, table=None): + self.get_offset() + self.direction = direction + # Note: appends must be one at a time (this way, repeats can draw from themselves if required) + for i in xrange(self.length): + byte = self.output[ self.offset + i * direction ] + self.output.append( table[byte] if table else byte ) diff --git a/tools/pokemontools/pcm.py b/tools/pokemontools/pcm.py new file mode 100644 index 00000000..428d5730 --- /dev/null +++ b/tools/pokemontools/pcm.py @@ -0,0 +1,156 @@ +# pcm.py +# Converts between .wav files and 1-bit pcm data. (pcm = pulse-code modulation) + +import argparse +import os +import struct +import wave + + +BASE_SAMPLE_RATE = 22050 + +def convert_to_wav(filenames=[]): + """ + Converts a file containing 1-bit pcm data into a .wav file. + """ + for filename in filenames: + with open(filename, 'rb') as pcm_file: + # Generate array of on/off pcm values. + samples = [] + byte = pcm_file.read(1) + while byte != "": + byte = struct.unpack('B', byte)[0] + for i in range(8): + bit_index = 7 - i + value = (byte >> bit_index) & 1 + samples.append(value) + byte = pcm_file.read(1) + + # Write a .wav file using the pcm data. + name, extension = os.path.splitext(filename) + wav_filename = name + '.wav' + wave_file = wave.open(wav_filename, 'w') + wave_file.setframerate(BASE_SAMPLE_RATE) + wave_file.setnchannels(1) + wave_file.setsampwidth(1) + + for value in samples: + if value > 0: + value = 0xff + + packed_value = struct.pack('B', value) + wave_file.writeframesraw(packed_value) + + wave_file.close() + + +def convert_to_pcm(filenames=[]): + """ + Converts a .wav file into 1-bit pcm data. + Samples in the .wav file are simply clamped to on/off. + + This currently works correctly on .wav files with the following attributes: + 1. Sample Width = 1 or 2 bytes (Some wave files use 3 bytes per sample...) + 2. Arbitrary sample sample_rate + 3. Mono or Stereo (1 or 2 channels) + """ + for filename in filenames: + samples, average_sample = get_wav_samples(filename) + + # Generate a list of clamped samples + clamped_samples = [] + for sample in samples: + # Clamp the raw sample to on/off + if sample < average_sample: + clamped_samples.append(0) + else: + clamped_samples.append(1) + + # The pcm data must be a multiple of 8, so pad the clamped samples with 0. + while len(clamped_samples) % 8 != 0: + clamped_samples.append(0) + + # Pack the 1-bit samples together. + packed_samples = bytearray() + for i in range(0, len(clamped_samples), 8): + # Read 8 pcm values to pack one byte. + packed_value = 0 + for j in range(8): + packed_value <<= 1 + packed_value += clamped_samples[i + j] + packed_samples.append(packed_value) + + # Open the output .pcm file, and write all 1-bit samples. + name, extension = os.path.splitext(filename) + pcm_filename = name + '.pcm' + with open(pcm_filename, 'wb') as out_file: + out_file.write(packed_samples) + + +def get_wav_samples(filename): + """ + Reads the given .wav file and returns a list of its samples after re-sampling + to BASE_SAMPLE_RATE. + Also returns the average sample amplitude. + """ + wav_file = wave.open(filename, 'r') + sample_width = wav_file.getsampwidth() + sample_count = wav_file.getnframes() + sample_rate = wav_file.getframerate() + num_channels = wav_file.getnchannels() + + samples = bytearray(wav_file.readframes(sample_count)) + + # Unpack the values based on the sample byte width. + unpacked_samples = [] + for i in range(0, len(samples), sample_width): + if sample_width == 1: + fmt = 'B' + elif sample_width == 2: + fmt = 'h' + else: + # todo: support 3-byte sample width + raise (Exception, "Unsupported sample width: " + str(sample_width)) + + value = struct.unpack(fmt, samples[i:i + sample_width])[0] + unpacked_samples.append(value) + + # Only keep the samples from the first audio channel. + unpacked_samples = unpacked_samples[::num_channels] + + # Approximate the BASE_SAMPLE_RATE. + # Also find the average amplitude of the samples. + resampled_samples = [] + total_value = 0 + interval = float(sample_rate) / BASE_SAMPLE_RATE + index = 0 + while index < sample_count: + sample = unpacked_samples[int(index)] + total_value += sample + + resampled_samples.append(sample) + index += interval + + average_sample = float(total_value) / len(resampled_samples) + + return resampled_samples, average_sample + + +def main(): + ap = argparse.ArgumentParser() + ap.add_argument('mode') + ap.add_argument('filenames', nargs='*') + args = ap.parse_args() + + method = { + 'wav': convert_to_wav, + 'pcm': convert_to_pcm, + }.get(args.mode, None) + + if method == None: + raise (Exception, "Unknown conversion method!") + + method(args.filenames) + +if __name__ == "__main__": + main() diff --git a/tools/pokemontools/png.py b/tools/pokemontools/png.py new file mode 100644 index 00000000..db6da128 --- /dev/null +++ b/tools/pokemontools/png.py @@ -0,0 +1,2650 @@ +#!/usr/bin/env python + +from __future__ import print_function + +# png.py - PNG encoder/decoder in pure Python +# +# Copyright (C) 2006 Johann C. Rocholl <johann@browsershots.org> +# Portions Copyright (C) 2009 David Jones <drj@pobox.com> +# And probably portions Copyright (C) 2006 Nicko van Someren <nicko@nicko.org> +# +# Original concept by Johann C. Rocholl. +# +# LICENCE (MIT) +# +# Permission is hereby granted, free of charge, to any person +# obtaining a copy of this software and associated documentation files +# (the "Software"), to deal in the Software without restriction, +# including without limitation the rights to use, copy, modify, merge, +# publish, distribute, sublicense, and/or sell copies of the Software, +# and to permit persons to whom the Software is furnished to do so, +# subject to the following conditions: +# +# The above copyright notice and this permission notice shall be +# included in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. + +""" +Pure Python PNG Reader/Writer + +This Python module implements support for PNG images (see PNG +specification at http://www.w3.org/TR/2003/REC-PNG-20031110/ ). It reads +and writes PNG files with all allowable bit depths +(1/2/4/8/16/24/32/48/64 bits per pixel) and colour combinations: +greyscale (1/2/4/8/16 bit); RGB, RGBA, LA (greyscale with alpha) with +8/16 bits per channel; colour mapped images (1/2/4/8 bit). +Adam7 interlacing is supported for reading and +writing. A number of optional chunks can be specified (when writing) +and understood (when reading): ``tRNS``, ``bKGD``, ``gAMA``. + +For help, type ``import png; help(png)`` in your python interpreter. + +A good place to start is the :class:`Reader` and :class:`Writer` +classes. + +Requires Python 2.3. Limited support is available for Python 2.2, but +not everything works. Best with Python 2.4 and higher. Installation is +trivial, but see the ``README.txt`` file (with the source distribution) +for details. + +This file can also be used as a command-line utility to convert +`Netpbm <http://netpbm.sourceforge.net/>`_ PNM files to PNG, and the +reverse conversion from PNG to PNM. The interface is similar to that +of the ``pnmtopng`` program from Netpbm. Type ``python png.py --help`` +at the shell prompt for usage and a list of options. + +A note on spelling and terminology +---------------------------------- + +Generally British English spelling is used in the documentation. So +that's "greyscale" and "colour". This not only matches the author's +native language, it's also used by the PNG specification. + +The major colour models supported by PNG (and hence by PyPNG) are: +greyscale, RGB, greyscale--alpha, RGB--alpha. These are sometimes +referred to using the abbreviations: L, RGB, LA, RGBA. In this case +each letter abbreviates a single channel: *L* is for Luminance or Luma +or Lightness which is the channel used in greyscale images; *R*, *G*, +*B* stand for Red, Green, Blue, the components of a colour image; *A* +stands for Alpha, the opacity channel (used for transparency effects, +but higher values are more opaque, so it makes sense to call it +opacity). + +A note on formats +----------------- + +When getting pixel data out of this module (reading) and presenting +data to this module (writing) there are a number of ways the data could +be represented as a Python value. Generally this module uses one of +three formats called "flat row flat pixel", "boxed row flat pixel", and +"boxed row boxed pixel". Basically the concern is whether each pixel +and each row comes in its own little tuple (box), or not. + +Consider an image that is 3 pixels wide by 2 pixels high, and each pixel +has RGB components: + +Boxed row flat pixel:: + + list([R,G,B, R,G,B, R,G,B], + [R,G,B, R,G,B, R,G,B]) + +Each row appears as its own list, but the pixels are flattened so +that three values for one pixel simply follow the three values for +the previous pixel. This is the most common format used, because it +provides a good compromise between space and convenience. PyPNG regards +itself as at liberty to replace any sequence type with any sufficiently +compatible other sequence type; in practice each row is an array (from +the array module), and the outer list is sometimes an iterator rather +than an explicit list (so that streaming is possible). + +Flat row flat pixel:: + + [R,G,B, R,G,B, R,G,B, + R,G,B, R,G,B, R,G,B] + +The entire image is one single giant sequence of colour values. +Generally an array will be used (to save space), not a list. + +Boxed row boxed pixel:: + + list([ (R,G,B), (R,G,B), (R,G,B) ], + [ (R,G,B), (R,G,B), (R,G,B) ]) + +Each row appears in its own list, but each pixel also appears in its own +tuple. A serious memory burn in Python. + +In all cases the top row comes first, and for each row the pixels are +ordered from left-to-right. Within a pixel the values appear in the +order, R-G-B-A (or L-A for greyscale--alpha). + +There is a fourth format, mentioned because it is used internally, +is close to what lies inside a PNG file itself, and has some support +from the public API. This format is called packed. When packed, +each row is a sequence of bytes (integers from 0 to 255), just as +it is before PNG scanline filtering is applied. When the bit depth +is 8 this is essentially the same as boxed row flat pixel; when the +bit depth is less than 8, several pixels are packed into each byte; +when the bit depth is 16 (the only value more than 8 that is supported +by the PNG image format) each pixel value is decomposed into 2 bytes +(and `packed` is a misnomer). This format is used by the +:meth:`Writer.write_packed` method. It isn't usually a convenient +format, but may be just right if the source data for the PNG image +comes from something that uses a similar format (for example, 1-bit +BMPs, or another PNG file). + +And now, my famous members +-------------------------- +""" + +__version__ = "0.0.18" + +import itertools +import math +# http://www.python.org/doc/2.4.4/lib/module-operator.html +import operator +import struct +import sys +# http://www.python.org/doc/2.4.4/lib/module-warnings.html +import warnings +import zlib + +from array import array +from functools import reduce + +try: + # `cpngfilters` is a Cython module: it must be compiled by + # Cython for this import to work. + # If this import does work, then it overrides pure-python + # filtering functions defined later in this file (see `class + # pngfilters`). + import cpngfilters as pngfilters +except ImportError: + pass + + +__all__ = ['Image', 'Reader', 'Writer', 'write_chunks', 'from_array'] + + +# The PNG signature. +# http://www.w3.org/TR/PNG/#5PNG-file-signature +_signature = struct.pack('8B', 137, 80, 78, 71, 13, 10, 26, 10) + +_adam7 = ((0, 0, 8, 8), + (4, 0, 8, 8), + (0, 4, 4, 8), + (2, 0, 4, 4), + (0, 2, 2, 4), + (1, 0, 2, 2), + (0, 1, 1, 2)) + +def group(s, n): + # See http://www.python.org/doc/2.6/library/functions.html#zip + return list(zip(*[iter(s)]*n)) + +def isarray(x): + return isinstance(x, array) + +def tostring(row): + return row.tostring() + +def interleave_planes(ipixels, apixels, ipsize, apsize): + """ + Interleave (colour) planes, e.g. RGB + A = RGBA. + + Return an array of pixels consisting of the `ipsize` elements of + data from each pixel in `ipixels` followed by the `apsize` elements + of data from each pixel in `apixels`. Conventionally `ipixels` + and `apixels` are byte arrays so the sizes are bytes, but it + actually works with any arrays of the same type. The returned + array is the same type as the input arrays which should be the + same type as each other. + """ + + itotal = len(ipixels) + atotal = len(apixels) + newtotal = itotal + atotal + newpsize = ipsize + apsize + # Set up the output buffer + # See http://www.python.org/doc/2.4.4/lib/module-array.html#l2h-1356 + out = array(ipixels.typecode) + # It's annoying that there is no cheap way to set the array size :-( + out.extend(ipixels) + out.extend(apixels) + # Interleave in the pixel data + for i in range(ipsize): + out[i:newtotal:newpsize] = ipixels[i:itotal:ipsize] + for i in range(apsize): + out[i+ipsize:newtotal:newpsize] = apixels[i:atotal:apsize] + return out + +def check_palette(palette): + """Check a palette argument (to the :class:`Writer` class) + for validity. Returns the palette as a list if okay; raises an + exception otherwise. + """ + + # None is the default and is allowed. + if palette is None: + return None + + p = list(palette) + if not (0 < len(p) <= 256): + raise ValueError("a palette must have between 1 and 256 entries") + seen_triple = False + for i,t in enumerate(p): + if len(t) not in (3,4): + raise ValueError( + "palette entry %d: entries must be 3- or 4-tuples." % i) + if len(t) == 3: + seen_triple = True + if seen_triple and len(t) == 4: + raise ValueError( + "palette entry %d: all 4-tuples must precede all 3-tuples" % i) + for x in t: + if int(x) != x or not(0 <= x <= 255): + raise ValueError( + "palette entry %d: values must be integer: 0 <= x <= 255" % i) + return p + +def check_sizes(size, width, height): + """Check that these arguments, in supplied, are consistent. + Return a (width, height) pair. + """ + + if not size: + return width, height + + if len(size) != 2: + raise ValueError( + "size argument should be a pair (width, height)") + if width is not None and width != size[0]: + raise ValueError( + "size[0] (%r) and width (%r) should match when both are used." + % (size[0], width)) + if height is not None and height != size[1]: + raise ValueError( + "size[1] (%r) and height (%r) should match when both are used." + % (size[1], height)) + return size + +def check_color(c, greyscale, which): + """Checks that a colour argument for transparent or + background options is the right form. Returns the colour + (which, if it's a bar integer, is "corrected" to a 1-tuple). + """ + + if c is None: + return c + if greyscale: + try: + len(c) + except TypeError: + c = (c,) + if len(c) != 1: + raise ValueError("%s for greyscale must be 1-tuple" % + which) + if not isinteger(c[0]): + raise ValueError( + "%s colour for greyscale must be integer" % which) + else: + if not (len(c) == 3 and + isinteger(c[0]) and + isinteger(c[1]) and + isinteger(c[2])): + raise ValueError( + "%s colour must be a triple of integers" % which) + return c + +class Error(Exception): + def __str__(self): + return self.__class__.__name__ + ': ' + ' '.join(self.args) + +class FormatError(Error): + """Problem with input file format. In other words, PNG file does + not conform to the specification in some way and is invalid. + """ + +class ChunkError(FormatError): + pass + + +class Writer: + """ + PNG encoder in pure Python. + """ + + def __init__(self, width=None, height=None, + size=None, + greyscale=False, + alpha=False, + bitdepth=8, + palette=None, + transparent=None, + background=None, + gamma=None, + compression=None, + interlace=False, + bytes_per_sample=None, # deprecated + planes=None, + colormap=None, + maxval=None, + chunk_limit=2**20, + x_pixels_per_unit = None, + y_pixels_per_unit = None, + unit_is_meter = False): + """ + Create a PNG encoder object. + + Arguments: + + width, height + Image size in pixels, as two separate arguments. + size + Image size (w,h) in pixels, as single argument. + greyscale + Input data is greyscale, not RGB. + alpha + Input data has alpha channel (RGBA or LA). + bitdepth + Bit depth: from 1 to 16. + palette + Create a palette for a colour mapped image (colour type 3). + transparent + Specify a transparent colour (create a ``tRNS`` chunk). + background + Specify a default background colour (create a ``bKGD`` chunk). + gamma + Specify a gamma value (create a ``gAMA`` chunk). + compression + zlib compression level: 0 (none) to 9 (more compressed); + default: -1 or None. + interlace + Create an interlaced image. + chunk_limit + Write multiple ``IDAT`` chunks to save memory. + x_pixels_per_unit + Number of pixels a unit along the x axis (write a + `pHYs` chunk). + y_pixels_per_unit + Number of pixels a unit along the y axis (write a + `pHYs` chunk). Along with `x_pixel_unit`, this gives + the pixel size ratio. + unit_is_meter + `True` to indicate that the unit (for the `pHYs` + chunk) is metre. + + The image size (in pixels) can be specified either by using the + `width` and `height` arguments, or with the single `size` + argument. If `size` is used it should be a pair (*width*, + *height*). + + `greyscale` and `alpha` are booleans that specify whether + an image is greyscale (or colour), and whether it has an + alpha channel (or not). + + `bitdepth` specifies the bit depth of the source pixel values. + Each source pixel value must be an integer between 0 and + ``2**bitdepth-1``. For example, 8-bit images have values + between 0 and 255. PNG only stores images with bit depths of + 1,2,4,8, or 16. When `bitdepth` is not one of these values, + the next highest valid bit depth is selected, and an ``sBIT`` + (significant bits) chunk is generated that specifies the + original precision of the source image. In this case the + supplied pixel values will be rescaled to fit the range of + the selected bit depth. + + The details of which bit depth / colour model combinations the + PNG file format supports directly, are somewhat arcane + (refer to the PNG specification for full details). Briefly: + "small" bit depths (1,2,4) are only allowed with greyscale and + colour mapped images; colour mapped images cannot have bit depth + 16. + + For colour mapped images (in other words, when the `palette` + argument is specified) the `bitdepth` argument must match one of + the valid PNG bit depths: 1, 2, 4, or 8. (It is valid to have a + PNG image with a palette and an ``sBIT`` chunk, but the meaning + is slightly different; it would be awkward to press the + `bitdepth` argument into service for this.) + + The `palette` option, when specified, causes a colour + mapped image to be created: the PNG colour type is set to 3; + `greyscale` must not be set; `alpha` must not be set; + `transparent` must not be set; the bit depth must be 1,2,4, + or 8. When a colour mapped image is created, the pixel values + are palette indexes and the `bitdepth` argument specifies the + size of these indexes (not the size of the colour values in + the palette). + + The palette argument value should be a sequence of 3- or + 4-tuples. 3-tuples specify RGB palette entries; 4-tuples + specify RGBA palette entries. If both 4-tuples and 3-tuples + appear in the sequence then all the 4-tuples must come + before all the 3-tuples. A ``PLTE`` chunk is created; if there + are 4-tuples then a ``tRNS`` chunk is created as well. The + ``PLTE`` chunk will contain all the RGB triples in the same + sequence; the ``tRNS`` chunk will contain the alpha channel for + all the 4-tuples, in the same sequence. Palette entries + are always 8-bit. + + If specified, the `transparent` and `background` parameters must + be a tuple with three integer values for red, green, blue, or + a simple integer (or singleton tuple) for a greyscale image. + + If specified, the `gamma` parameter must be a positive number + (generally, a `float`). A ``gAMA`` chunk will be created. + Note that this will not change the values of the pixels as + they appear in the PNG file, they are assumed to have already + been converted appropriately for the gamma specified. + + The `compression` argument specifies the compression level to + be used by the ``zlib`` module. Values from 1 to 9 specify + compression, with 9 being "more compressed" (usually smaller + and slower, but it doesn't always work out that way). 0 means + no compression. -1 and ``None`` both mean that the default + level of compession will be picked by the ``zlib`` module + (which is generally acceptable). + + If `interlace` is true then an interlaced image is created + (using PNG's so far only interace method, *Adam7*). This does + not affect how the pixels should be presented to the encoder, + rather it changes how they are arranged into the PNG file. + On slow connexions interlaced images can be partially decoded + by the browser to give a rough view of the image that is + successively refined as more image data appears. + + .. note :: + + Enabling the `interlace` option requires the entire image + to be processed in working memory. + + `chunk_limit` is used to limit the amount of memory used whilst + compressing the image. In order to avoid using large amounts of + memory, multiple ``IDAT`` chunks may be created. + """ + + # At the moment the `planes` argument is ignored; + # its purpose is to act as a dummy so that + # ``Writer(x, y, **info)`` works, where `info` is a dictionary + # returned by Reader.read and friends. + # Ditto for `colormap`. + + width, height = check_sizes(size, width, height) + del size + + if width <= 0 or height <= 0: + raise ValueError("width and height must be greater than zero") + if not isinteger(width) or not isinteger(height): + raise ValueError("width and height must be integers") + # http://www.w3.org/TR/PNG/#7Integers-and-byte-order + if width > 2**32-1 or height > 2**32-1: + raise ValueError("width and height cannot exceed 2**32-1") + + if alpha and transparent is not None: + raise ValueError( + "transparent colour not allowed with alpha channel") + + if bytes_per_sample is not None: + warnings.warn('please use bitdepth instead of bytes_per_sample', + DeprecationWarning) + if bytes_per_sample not in (0.125, 0.25, 0.5, 1, 2): + raise ValueError( + "bytes per sample must be .125, .25, .5, 1, or 2") + bitdepth = int(8*bytes_per_sample) + del bytes_per_sample + if not isinteger(bitdepth) or bitdepth < 1 or 16 < bitdepth: + raise ValueError("bitdepth (%r) must be a positive integer <= 16" % + bitdepth) + + self.rescale = None + palette = check_palette(palette) + if palette: + if bitdepth not in (1,2,4,8): + raise ValueError("with palette, bitdepth must be 1, 2, 4, or 8") + if transparent is not None: + raise ValueError("transparent and palette not compatible") + if alpha: + raise ValueError("alpha and palette not compatible") + if greyscale: + raise ValueError("greyscale and palette not compatible") + else: + # No palette, check for sBIT chunk generation. + if alpha or not greyscale: + if bitdepth not in (8,16): + targetbitdepth = (8,16)[bitdepth > 8] + self.rescale = (bitdepth, targetbitdepth) + bitdepth = targetbitdepth + del targetbitdepth + else: + assert greyscale + assert not alpha + if bitdepth not in (1,2,4,8,16): + if bitdepth > 8: + targetbitdepth = 16 + elif bitdepth == 3: + targetbitdepth = 4 + else: + assert bitdepth in (5,6,7) + targetbitdepth = 8 + self.rescale = (bitdepth, targetbitdepth) + bitdepth = targetbitdepth + del targetbitdepth + + if bitdepth < 8 and (alpha or not greyscale and not palette): + raise ValueError( + "bitdepth < 8 only permitted with greyscale or palette") + if bitdepth > 8 and palette: + raise ValueError( + "bit depth must be 8 or less for images with palette") + + transparent = check_color(transparent, greyscale, 'transparent') + background = check_color(background, greyscale, 'background') + + # It's important that the true boolean values (greyscale, alpha, + # colormap, interlace) are converted to bool because Iverson's + # convention is relied upon later on. + self.width = width + self.height = height + self.transparent = transparent + self.background = background + self.gamma = gamma + self.greyscale = bool(greyscale) + self.alpha = bool(alpha) + self.colormap = bool(palette) + self.bitdepth = int(bitdepth) + self.compression = compression + self.chunk_limit = chunk_limit + self.interlace = bool(interlace) + self.palette = palette + self.x_pixels_per_unit = x_pixels_per_unit + self.y_pixels_per_unit = y_pixels_per_unit + self.unit_is_meter = bool(unit_is_meter) + + self.color_type = 4*self.alpha + 2*(not greyscale) + 1*self.colormap + assert self.color_type in (0,2,3,4,6) + + self.color_planes = (3,1)[self.greyscale or self.colormap] + self.planes = self.color_planes + self.alpha + # :todo: fix for bitdepth < 8 + self.psize = (self.bitdepth/8) * self.planes + + def make_palette(self): + """Create the byte sequences for a ``PLTE`` and if necessary a + ``tRNS`` chunk. Returned as a pair (*p*, *t*). *t* will be + ``None`` if no ``tRNS`` chunk is necessary. + """ + + p = array('B') + t = array('B') + + for x in self.palette: + p.extend(x[0:3]) + if len(x) > 3: + t.append(x[3]) + p = tostring(p) + t = tostring(t) + if t: + return p,t + return p,None + + def write(self, outfile, rows): + """Write a PNG image to the output file. `rows` should be + an iterable that yields each row in boxed row flat pixel + format. The rows should be the rows of the original image, + so there should be ``self.height`` rows of ``self.width * + self.planes`` values. If `interlace` is specified (when + creating the instance), then an interlaced PNG file will + be written. Supply the rows in the normal image order; + the interlacing is carried out internally. + + .. note :: + + Interlacing will require the entire image to be in working + memory. + """ + + if self.interlace: + fmt = 'BH'[self.bitdepth > 8] + a = array(fmt, itertools.chain(*rows)) + return self.write_array(outfile, a) + + nrows = self.write_passes(outfile, rows) + if nrows != self.height: + raise ValueError( + "rows supplied (%d) does not match height (%d)" % + (nrows, self.height)) + + def write_passes(self, outfile, rows, packed=False): + """ + Write a PNG image to the output file. + + Most users are expected to find the :meth:`write` or + :meth:`write_array` method more convenient. + + The rows should be given to this method in the order that + they appear in the output file. For straightlaced images, + this is the usual top to bottom ordering, but for interlaced + images the rows should have already been interlaced before + passing them to this function. + + `rows` should be an iterable that yields each row. When + `packed` is ``False`` the rows should be in boxed row flat pixel + format; when `packed` is ``True`` each row should be a packed + sequence of bytes. + """ + + # http://www.w3.org/TR/PNG/#5PNG-file-signature + outfile.write(_signature) + + # http://www.w3.org/TR/PNG/#11IHDR + write_chunk(outfile, b'IHDR', + struct.pack("!2I5B", self.width, self.height, + self.bitdepth, self.color_type, + 0, 0, self.interlace)) + + # See :chunk:order + # http://www.w3.org/TR/PNG/#11gAMA + if self.gamma is not None: + write_chunk(outfile, b'gAMA', + struct.pack("!L", int(round(self.gamma*1e5)))) + + # See :chunk:order + # http://www.w3.org/TR/PNG/#11sBIT + if self.rescale: + write_chunk(outfile, b'sBIT', + struct.pack('%dB' % self.planes, + *[self.rescale[0]]*self.planes)) + + # :chunk:order: Without a palette (PLTE chunk), ordering is + # relatively relaxed. With one, gAMA chunk must precede PLTE + # chunk which must precede tRNS and bKGD. + # See http://www.w3.org/TR/PNG/#5ChunkOrdering + if self.palette: + p,t = self.make_palette() + write_chunk(outfile, b'PLTE', p) + if t: + # tRNS chunk is optional. Only needed if palette entries + # have alpha. + write_chunk(outfile, b'tRNS', t) + + # http://www.w3.org/TR/PNG/#11tRNS + if self.transparent is not None: + if self.greyscale: + write_chunk(outfile, b'tRNS', + struct.pack("!1H", *self.transparent)) + else: + write_chunk(outfile, b'tRNS', + struct.pack("!3H", *self.transparent)) + + # http://www.w3.org/TR/PNG/#11bKGD + if self.background is not None: + if self.greyscale: + write_chunk(outfile, b'bKGD', + struct.pack("!1H", *self.background)) + else: + write_chunk(outfile, b'bKGD', + struct.pack("!3H", *self.background)) + + # http://www.w3.org/TR/PNG/#11pHYs + if self.x_pixels_per_unit is not None and self.y_pixels_per_unit is not None: + tup = (self.x_pixels_per_unit, self.y_pixels_per_unit, int(self.unit_is_meter)) + write_chunk(outfile, b'pHYs', struct.pack("!LLB",*tup)) + + # http://www.w3.org/TR/PNG/#11IDAT + if self.compression is not None: + compressor = zlib.compressobj(self.compression) + else: + compressor = zlib.compressobj() + + # Choose an extend function based on the bitdepth. The extend + # function packs/decomposes the pixel values into bytes and + # stuffs them onto the data array. + data = array('B') + if self.bitdepth == 8 or packed: + extend = data.extend + elif self.bitdepth == 16: + # Decompose into bytes + def extend(sl): + fmt = '!%dH' % len(sl) + data.extend(array('B', struct.pack(fmt, *sl))) + else: + # Pack into bytes + assert self.bitdepth < 8 + # samples per byte + spb = int(8/self.bitdepth) + def extend(sl): + a = array('B', sl) + # Adding padding bytes so we can group into a whole + # number of spb-tuples. + l = float(len(a)) + extra = math.ceil(l / float(spb))*spb - l + a.extend([0]*int(extra)) + # Pack into bytes + l = group(a, spb) + l = [reduce(lambda x,y: + (x << self.bitdepth) + y, e) for e in l] + data.extend(l) + if self.rescale: + oldextend = extend + factor = \ + float(2**self.rescale[1]-1) / float(2**self.rescale[0]-1) + def extend(sl): + oldextend([int(round(factor*x)) for x in sl]) + + # Build the first row, testing mostly to see if we need to + # changed the extend function to cope with NumPy integer types + # (they cause our ordinary definition of extend to fail, so we + # wrap it). See + # http://code.google.com/p/pypng/issues/detail?id=44 + enumrows = enumerate(rows) + del rows + + # First row's filter type. + data.append(0) + # :todo: Certain exceptions in the call to ``.next()`` or the + # following try would indicate no row data supplied. + # Should catch. + i,row = next(enumrows) + try: + # If this fails... + extend(row) + except: + # ... try a version that converts the values to int first. + # Not only does this work for the (slightly broken) NumPy + # types, there are probably lots of other, unknown, "nearly" + # int types it works for. + def wrapmapint(f): + return lambda sl: f([int(x) for x in sl]) + extend = wrapmapint(extend) + del wrapmapint + extend(row) + + for i,row in enumrows: + # Add "None" filter type. Currently, it's essential that + # this filter type be used for every scanline as we do not + # mark the first row of a reduced pass image; that means we + # could accidentally compute the wrong filtered scanline if + # we used "up", "average", or "paeth" on such a line. + data.append(0) + extend(row) + if len(data) > self.chunk_limit: + compressed = compressor.compress(tostring(data)) + if len(compressed): + write_chunk(outfile, b'IDAT', compressed) + # Because of our very witty definition of ``extend``, + # above, we must re-use the same ``data`` object. Hence + # we use ``del`` to empty this one, rather than create a + # fresh one (which would be my natural FP instinct). + del data[:] + if len(data): + compressed = compressor.compress(tostring(data)) + else: + compressed = b'' + flushed = compressor.flush() + if len(compressed) or len(flushed): + write_chunk(outfile, b'IDAT', compressed + flushed) + # http://www.w3.org/TR/PNG/#11IEND + write_chunk(outfile, b'IEND') + return i+1 + + def write_array(self, outfile, pixels): + """ + Write an array in flat row flat pixel format as a PNG file on + the output file. See also :meth:`write` method. + """ + + if self.interlace: + self.write_passes(outfile, self.array_scanlines_interlace(pixels)) + else: + self.write_passes(outfile, self.array_scanlines(pixels)) + + def write_packed(self, outfile, rows): + """ + Write PNG file to `outfile`. The pixel data comes from `rows` + which should be in boxed row packed format. Each row should be + a sequence of packed bytes. + + Technically, this method does work for interlaced images but it + is best avoided. For interlaced images, the rows should be + presented in the order that they appear in the file. + + This method should not be used when the source image bit depth + is not one naturally supported by PNG; the bit depth should be + 1, 2, 4, 8, or 16. + """ + + if self.rescale: + raise Error("write_packed method not suitable for bit depth %d" % + self.rescale[0]) + return self.write_passes(outfile, rows, packed=True) + + def convert_pnm(self, infile, outfile): + """ + Convert a PNM file containing raw pixel data into a PNG file + with the parameters set in the writer object. Works for + (binary) PGM, PPM, and PAM formats. + """ + + if self.interlace: + pixels = array('B') + pixels.fromfile(infile, + (self.bitdepth/8) * self.color_planes * + self.width * self.height) + self.write_passes(outfile, self.array_scanlines_interlace(pixels)) + else: + self.write_passes(outfile, self.file_scanlines(infile)) + + def convert_ppm_and_pgm(self, ppmfile, pgmfile, outfile): + """ + Convert a PPM and PGM file containing raw pixel data into a + PNG outfile with the parameters set in the writer object. + """ + pixels = array('B') + pixels.fromfile(ppmfile, + (self.bitdepth/8) * self.color_planes * + self.width * self.height) + apixels = array('B') + apixels.fromfile(pgmfile, + (self.bitdepth/8) * + self.width * self.height) + pixels = interleave_planes(pixels, apixels, + (self.bitdepth/8) * self.color_planes, + (self.bitdepth/8)) + if self.interlace: + self.write_passes(outfile, self.array_scanlines_interlace(pixels)) + else: + self.write_passes(outfile, self.array_scanlines(pixels)) + + def file_scanlines(self, infile): + """ + Generates boxed rows in flat pixel format, from the input file + `infile`. It assumes that the input file is in a "Netpbm-like" + binary format, and is positioned at the beginning of the first + pixel. The number of pixels to read is taken from the image + dimensions (`width`, `height`, `planes`) and the number of bytes + per value is implied by the image `bitdepth`. + """ + + # Values per row + vpr = self.width * self.planes + row_bytes = vpr + if self.bitdepth > 8: + assert self.bitdepth == 16 + row_bytes *= 2 + fmt = '>%dH' % vpr + def line(): + return array('H', struct.unpack(fmt, infile.read(row_bytes))) + else: + def line(): + scanline = array('B', infile.read(row_bytes)) + return scanline + for y in range(self.height): + yield line() + + def array_scanlines(self, pixels): + """ + Generates boxed rows (flat pixels) from flat rows (flat pixels) + in an array. + """ + + # Values per row + vpr = self.width * self.planes + stop = 0 + for y in range(self.height): + start = stop + stop = start + vpr + yield pixels[start:stop] + + def array_scanlines_interlace(self, pixels): + """ + Generator for interlaced scanlines from an array. `pixels` is + the full source image in flat row flat pixel format. The + generator yields each scanline of the reduced passes in turn, in + boxed row flat pixel format. + """ + + # http://www.w3.org/TR/PNG/#8InterlaceMethods + # Array type. + fmt = 'BH'[self.bitdepth > 8] + # Value per row + vpr = self.width * self.planes + for xstart, ystart, xstep, ystep in _adam7: + if xstart >= self.width: + continue + # Pixels per row (of reduced image) + ppr = int(math.ceil((self.width-xstart)/float(xstep))) + # number of values in reduced image row. + row_len = ppr*self.planes + for y in range(ystart, self.height, ystep): + if xstep == 1: + offset = y * vpr + yield pixels[offset:offset+vpr] + else: + row = array(fmt) + # There's no easier way to set the length of an array + row.extend(pixels[0:row_len]) + offset = y * vpr + xstart * self.planes + end_offset = (y+1) * vpr + skip = self.planes * xstep + for i in range(self.planes): + row[i::self.planes] = \ + pixels[offset+i:end_offset:skip] + yield row + +def write_chunk(outfile, tag, data=b''): + """ + Write a PNG chunk to the output file, including length and + checksum. + """ + + # http://www.w3.org/TR/PNG/#5Chunk-layout + outfile.write(struct.pack("!I", len(data))) + outfile.write(tag) + outfile.write(data) + checksum = zlib.crc32(tag) + checksum = zlib.crc32(data, checksum) + checksum &= 2**32-1 + outfile.write(struct.pack("!I", checksum)) + +def write_chunks(out, chunks): + """Create a PNG file by writing out the chunks.""" + + out.write(_signature) + for chunk in chunks: + write_chunk(out, *chunk) + +def filter_scanline(type, line, fo, prev=None): + """Apply a scanline filter to a scanline. `type` specifies the + filter type (0 to 4); `line` specifies the current (unfiltered) + scanline as a sequence of bytes; `prev` specifies the previous + (unfiltered) scanline as a sequence of bytes. `fo` specifies the + filter offset; normally this is size of a pixel in bytes (the number + of bytes per sample times the number of channels), but when this is + < 1 (for bit depths < 8) then the filter offset is 1. + """ + + assert 0 <= type < 5 + + # The output array. Which, pathetically, we extend one-byte at a + # time (fortunately this is linear). + out = array('B', [type]) + + def sub(): + ai = -fo + for x in line: + if ai >= 0: + x = (x - line[ai]) & 0xff + out.append(x) + ai += 1 + def up(): + for i,x in enumerate(line): + x = (x - prev[i]) & 0xff + out.append(x) + def average(): + ai = -fo + for i,x in enumerate(line): + if ai >= 0: + x = (x - ((line[ai] + prev[i]) >> 1)) & 0xff + else: + x = (x - (prev[i] >> 1)) & 0xff + out.append(x) + ai += 1 + def paeth(): + # http://www.w3.org/TR/PNG/#9Filter-type-4-Paeth + ai = -fo # also used for ci + for i,x in enumerate(line): + a = 0 + b = prev[i] + c = 0 + + if ai >= 0: + a = line[ai] + c = prev[ai] + p = a + b - c + pa = abs(p - a) + pb = abs(p - b) + pc = abs(p - c) + if pa <= pb and pa <= pc: + Pr = a + elif pb <= pc: + Pr = b + else: + Pr = c + + x = (x - Pr) & 0xff + out.append(x) + ai += 1 + + if not prev: + # We're on the first line. Some of the filters can be reduced + # to simpler cases which makes handling the line "off the top" + # of the image simpler. "up" becomes "none"; "paeth" becomes + # "left" (non-trivial, but true). "average" needs to be handled + # specially. + if type == 2: # "up" + type = 0 + elif type == 3: + prev = [0]*len(line) + elif type == 4: # "paeth" + type = 1 + if type == 0: + out.extend(line) + elif type == 1: + sub() + elif type == 2: + up() + elif type == 3: + average() + else: # type == 4 + paeth() + return out + + +def from_array(a, mode=None, info={}): + """Create a PNG :class:`Image` object from a 2- or 3-dimensional + array. One application of this function is easy PIL-style saving: + ``png.from_array(pixels, 'L').save('foo.png')``. + + Unless they are specified using the *info* parameter, the PNG's + height and width are taken from the array size. For a 3 dimensional + array the first axis is the height; the second axis is the width; + and the third axis is the channel number. Thus an RGB image that is + 16 pixels high and 8 wide will use an array that is 16x8x3. For 2 + dimensional arrays the first axis is the height, but the second axis + is ``width*channels``, so an RGB image that is 16 pixels high and 8 + wide will use a 2-dimensional array that is 16x24 (each row will be + 8*3 = 24 sample values). + + *mode* is a string that specifies the image colour format in a + PIL-style mode. It can be: + + ``'L'`` + greyscale (1 channel) + ``'LA'`` + greyscale with alpha (2 channel) + ``'RGB'`` + colour image (3 channel) + ``'RGBA'`` + colour image with alpha (4 channel) + + The mode string can also specify the bit depth (overriding how this + function normally derives the bit depth, see below). Appending + ``';16'`` to the mode will cause the PNG to be 16 bits per channel; + any decimal from 1 to 16 can be used to specify the bit depth. + + When a 2-dimensional array is used *mode* determines how many + channels the image has, and so allows the width to be derived from + the second array dimension. + + The array is expected to be a ``numpy`` array, but it can be any + suitable Python sequence. For example, a list of lists can be used: + ``png.from_array([[0, 255, 0], [255, 0, 255]], 'L')``. The exact + rules are: ``len(a)`` gives the first dimension, height; + ``len(a[0])`` gives the second dimension; ``len(a[0][0])`` gives the + third dimension, unless an exception is raised in which case a + 2-dimensional array is assumed. It's slightly more complicated than + that because an iterator of rows can be used, and it all still + works. Using an iterator allows data to be streamed efficiently. + + The bit depth of the PNG is normally taken from the array element's + datatype (but if *mode* specifies a bitdepth then that is used + instead). The array element's datatype is determined in a way which + is supposed to work both for ``numpy`` arrays and for Python + ``array.array`` objects. A 1 byte datatype will give a bit depth of + 8, a 2 byte datatype will give a bit depth of 16. If the datatype + does not have an implicit size, for example it is a plain Python + list of lists, as above, then a default of 8 is used. + + The *info* parameter is a dictionary that can be used to specify + metadata (in the same style as the arguments to the + :class:`png.Writer` class). For this function the keys that are + useful are: + + height + overrides the height derived from the array dimensions and allows + *a* to be an iterable. + width + overrides the width derived from the array dimensions. + bitdepth + overrides the bit depth derived from the element datatype (but + must match *mode* if that also specifies a bit depth). + + Generally anything specified in the + *info* dictionary will override any implicit choices that this + function would otherwise make, but must match any explicit ones. + For example, if the *info* dictionary has a ``greyscale`` key then + this must be true when mode is ``'L'`` or ``'LA'`` and false when + mode is ``'RGB'`` or ``'RGBA'``. + """ + + # We abuse the *info* parameter by modifying it. Take a copy here. + # (Also typechecks *info* to some extent). + info = dict(info) + + # Syntax check mode string. + bitdepth = None + try: + # Assign the 'L' or 'RGBA' part to `gotmode`. + if mode.startswith('L'): + gotmode = 'L' + mode = mode[1:] + elif mode.startswith('RGB'): + gotmode = 'RGB' + mode = mode[3:] + else: + raise Error() + if mode.startswith('A'): + gotmode += 'A' + mode = mode[1:] + + # Skip any optional ';' + while mode.startswith(';'): + mode = mode[1:] + + # Parse optional bitdepth + if mode: + try: + bitdepth = int(mode) + except (TypeError, ValueError): + raise Error() + except Error: + raise Error("mode string should be 'RGB' or 'L;16' or similar.") + mode = gotmode + + # Get bitdepth from *mode* if possible. + if bitdepth: + if info.get('bitdepth') and bitdepth != info['bitdepth']: + raise Error("mode bitdepth (%d) should match info bitdepth (%d)." % + (bitdepth, info['bitdepth'])) + info['bitdepth'] = bitdepth + + # Fill in and/or check entries in *info*. + # Dimensions. + if 'size' in info: + # Check width, height, size all match where used. + for dimension,axis in [('width', 0), ('height', 1)]: + if dimension in info: + if info[dimension] != info['size'][axis]: + raise Error( + "info[%r] should match info['size'][%r]." % + (dimension, axis)) + info['width'],info['height'] = info['size'] + if 'height' not in info: + try: + l = len(a) + except TypeError: + raise Error( + "len(a) does not work, supply info['height'] instead.") + info['height'] = l + # Colour format. + if 'greyscale' in info: + if bool(info['greyscale']) != ('L' in mode): + raise Error("info['greyscale'] should match mode.") + info['greyscale'] = 'L' in mode + if 'alpha' in info: + if bool(info['alpha']) != ('A' in mode): + raise Error("info['alpha'] should match mode.") + info['alpha'] = 'A' in mode + + planes = len(mode) + if 'planes' in info: + if info['planes'] != planes: + raise Error("info['planes'] should match mode.") + + # In order to work out whether we the array is 2D or 3D we need its + # first row, which requires that we take a copy of its iterator. + # We may also need the first row to derive width and bitdepth. + a,t = itertools.tee(a) + row = next(t) + del t + try: + row[0][0] + threed = True + testelement = row[0] + except (IndexError, TypeError): + threed = False + testelement = row + if 'width' not in info: + if threed: + width = len(row) + else: + width = len(row) // planes + info['width'] = width + + if threed: + # Flatten the threed rows + a = (itertools.chain.from_iterable(x) for x in a) + + if 'bitdepth' not in info: + try: + dtype = testelement.dtype + # goto the "else:" clause. Sorry. + except AttributeError: + try: + # Try a Python array.array. + bitdepth = 8 * testelement.itemsize + except AttributeError: + # We can't determine it from the array element's + # datatype, use a default of 8. + bitdepth = 8 + else: + # If we got here without exception, we now assume that + # the array is a numpy array. + if dtype.kind == 'b': + bitdepth = 1 + else: + bitdepth = 8 * dtype.itemsize + info['bitdepth'] = bitdepth + + for thing in 'width height bitdepth greyscale alpha'.split(): + assert thing in info + return Image(a, info) + +# So that refugee's from PIL feel more at home. Not documented. +fromarray = from_array + +class Image: + """A PNG image. You can create an :class:`Image` object from + an array of pixels by calling :meth:`png.from_array`. It can be + saved to disk with the :meth:`save` method. + """ + + def __init__(self, rows, info): + """ + .. note :: + + The constructor is not public. Please do not call it. + """ + + self.rows = rows + self.info = info + + def save(self, file): + """Save the image to *file*. If *file* looks like an open file + descriptor then it is used, otherwise it is treated as a + filename and a fresh file is opened. + + In general, you can only call this method once; after it has + been called the first time and the PNG image has been saved, the + source data will have been streamed, and cannot be streamed + again. + """ + + w = Writer(**self.info) + + try: + file.write + def close(): pass + except AttributeError: + file = open(file, 'wb') + def close(): file.close() + + try: + w.write(file, self.rows) + finally: + close() + +class _readable: + """ + A simple file-like interface for strings and arrays. + """ + + def __init__(self, buf): + self.buf = buf + self.offset = 0 + + def read(self, n): + r = self.buf[self.offset:self.offset+n] + if isarray(r): + r = r.tostring() + self.offset += n + return r + +try: + str(b'dummy', 'ascii') +except TypeError: + as_str = str +else: + def as_str(x): + return str(x, 'ascii') + +class Reader: + """ + PNG decoder in pure Python. + """ + + def __init__(self, _guess=None, **kw): + """ + Create a PNG decoder object. + + The constructor expects exactly one keyword argument. If you + supply a positional argument instead, it will guess the input + type. You can choose among the following keyword arguments: + + filename + Name of input file (a PNG file). + file + A file-like object (object with a read() method). + bytes + ``array`` or ``string`` with PNG data. + + """ + if ((_guess is not None and len(kw) != 0) or + (_guess is None and len(kw) != 1)): + raise TypeError("Reader() takes exactly 1 argument") + + # Will be the first 8 bytes, later on. See validate_signature. + self.signature = None + self.transparent = None + # A pair of (len,type) if a chunk has been read but its data and + # checksum have not (in other words the file position is just + # past the 4 bytes that specify the chunk type). See preamble + # method for how this is used. + self.atchunk = None + + if _guess is not None: + if isarray(_guess): + kw["bytes"] = _guess + elif isinstance(_guess, str): + kw["filename"] = _guess + elif hasattr(_guess, 'read'): + kw["file"] = _guess + + if "filename" in kw: + self.file = open(kw["filename"], "rb") + elif "file" in kw: + self.file = kw["file"] + elif "bytes" in kw: + self.file = _readable(kw["bytes"]) + else: + raise TypeError("expecting filename, file or bytes array") + + + def chunk(self, seek=None, lenient=False): + """ + Read the next PNG chunk from the input file; returns a + (*type*, *data*) tuple. *type* is the chunk's type as a + byte string (all PNG chunk types are 4 bytes long). + *data* is the chunk's data content, as a byte string. + + If the optional `seek` argument is + specified then it will keep reading chunks until it either runs + out of file or finds the type specified by the argument. Note + that in general the order of chunks in PNGs is unspecified, so + using `seek` can cause you to miss chunks. + + If the optional `lenient` argument evaluates to `True`, + checksum failures will raise warnings rather than exceptions. + """ + + self.validate_signature() + + while True: + # http://www.w3.org/TR/PNG/#5Chunk-layout + if not self.atchunk: + self.atchunk = self.chunklentype() + length, type = self.atchunk + self.atchunk = None + data = self.file.read(length) + if len(data) != length: + raise ChunkError('Chunk %s too short for required %i octets.' + % (type, length)) + checksum = self.file.read(4) + if len(checksum) != 4: + raise ChunkError('Chunk %s too short for checksum.' % type) + if seek and type != seek: + continue + verify = zlib.crc32(type) + verify = zlib.crc32(data, verify) + # Whether the output from zlib.crc32 is signed or not varies + # according to hideous implementation details, see + # http://bugs.python.org/issue1202 . + # We coerce it to be positive here (in a way which works on + # Python 2.3 and older). + verify &= 2**32 - 1 + verify = struct.pack('!I', verify) + if checksum != verify: + (a, ) = struct.unpack('!I', checksum) + (b, ) = struct.unpack('!I', verify) + message = "Checksum error in %s chunk: 0x%08X != 0x%08X." % (type, a, b) + if lenient: + warnings.warn(message, RuntimeWarning) + else: + raise ChunkError(message) + return type, data + + def chunks(self): + """Return an iterator that will yield each chunk as a + (*chunktype*, *content*) pair. + """ + + while True: + t,v = self.chunk() + yield t,v + if t == b'IEND': + break + + def undo_filter(self, filter_type, scanline, previous): + """Undo the filter for a scanline. `scanline` is a sequence of + bytes that does not include the initial filter type byte. + `previous` is decoded previous scanline (for straightlaced + images this is the previous pixel row, but for interlaced + images, it is the previous scanline in the reduced image, which + in general is not the previous pixel row in the final image). + When there is no previous scanline (the first row of a + straightlaced image, or the first row in one of the passes in an + interlaced image), then this argument should be ``None``. + + The scanline will have the effects of filtering removed, and the + result will be returned as a fresh sequence of bytes. + """ + + # :todo: Would it be better to update scanline in place? + # Yes, with the Cython extension making the undo_filter fast, + # updating scanline inplace makes the code 3 times faster + # (reading 50 images of 800x800 went from 40s to 16s) + result = scanline + + if filter_type == 0: + return result + + if filter_type not in (1,2,3,4): + raise FormatError('Invalid PNG Filter Type.' + ' See http://www.w3.org/TR/2003/REC-PNG-20031110/#9Filters .') + + # Filter unit. The stride from one pixel to the corresponding + # byte from the previous pixel. Normally this is the pixel + # size in bytes, but when this is smaller than 1, the previous + # byte is used instead. + fu = max(1, self.psize) + + # For the first line of a pass, synthesize a dummy previous + # line. An alternative approach would be to observe that on the + # first line 'up' is the same as 'null', 'paeth' is the same + # as 'sub', with only 'average' requiring any special case. + if not previous: + previous = array('B', [0]*len(scanline)) + + def sub(): + """Undo sub filter.""" + + ai = 0 + # Loop starts at index fu. Observe that the initial part + # of the result is already filled in correctly with + # scanline. + for i in range(fu, len(result)): + x = scanline[i] + a = result[ai] + result[i] = (x + a) & 0xff + ai += 1 + + def up(): + """Undo up filter.""" + + for i in range(len(result)): + x = scanline[i] + b = previous[i] + result[i] = (x + b) & 0xff + + def average(): + """Undo average filter.""" + + ai = -fu + for i in range(len(result)): + x = scanline[i] + if ai < 0: + a = 0 + else: + a = result[ai] + b = previous[i] + result[i] = (x + ((a + b) >> 1)) & 0xff + ai += 1 + + def paeth(): + """Undo Paeth filter.""" + + # Also used for ci. + ai = -fu + for i in range(len(result)): + x = scanline[i] + if ai < 0: + a = c = 0 + else: + a = result[ai] + c = previous[ai] + b = previous[i] + p = a + b - c + pa = abs(p - a) + pb = abs(p - b) + pc = abs(p - c) + if pa <= pb and pa <= pc: + pr = a + elif pb <= pc: + pr = b + else: + pr = c + result[i] = (x + pr) & 0xff + ai += 1 + + # Call appropriate filter algorithm. Note that 0 has already + # been dealt with. + (None, + pngfilters.undo_filter_sub, + pngfilters.undo_filter_up, + pngfilters.undo_filter_average, + pngfilters.undo_filter_paeth)[filter_type](fu, scanline, previous, result) + return result + + def deinterlace(self, raw): + """ + Read raw pixel data, undo filters, deinterlace, and flatten. + Return in flat row flat pixel format. + """ + + # Values per row (of the target image) + vpr = self.width * self.planes + + # Make a result array, and make it big enough. Interleaving + # writes to the output array randomly (well, not quite), so the + # entire output array must be in memory. + fmt = 'BH'[self.bitdepth > 8] + a = array(fmt, [0]*vpr*self.height) + source_offset = 0 + + for xstart, ystart, xstep, ystep in _adam7: + if xstart >= self.width: + continue + # The previous (reconstructed) scanline. None at the + # beginning of a pass to indicate that there is no previous + # line. + recon = None + # Pixels per row (reduced pass image) + ppr = int(math.ceil((self.width-xstart)/float(xstep))) + # Row size in bytes for this pass. + row_size = int(math.ceil(self.psize * ppr)) + for y in range(ystart, self.height, ystep): + filter_type = raw[source_offset] + source_offset += 1 + scanline = raw[source_offset:source_offset+row_size] + source_offset += row_size + recon = self.undo_filter(filter_type, scanline, recon) + # Convert so that there is one element per pixel value + flat = self.serialtoflat(recon, ppr) + if xstep == 1: + assert xstart == 0 + offset = y * vpr + a[offset:offset+vpr] = flat + else: + offset = y * vpr + xstart * self.planes + end_offset = (y+1) * vpr + skip = self.planes * xstep + for i in range(self.planes): + a[offset+i:end_offset:skip] = \ + flat[i::self.planes] + return a + + def iterboxed(self, rows): + """Iterator that yields each scanline in boxed row flat pixel + format. `rows` should be an iterator that yields the bytes of + each row in turn. + """ + + def asvalues(raw): + """Convert a row of raw bytes into a flat row. Result will + be a freshly allocated object, not shared with + argument. + """ + + if self.bitdepth == 8: + return array('B', raw) + if self.bitdepth == 16: + raw = tostring(raw) + return array('H', struct.unpack('!%dH' % (len(raw)//2), raw)) + assert self.bitdepth < 8 + width = self.width + # Samples per byte + spb = 8//self.bitdepth + out = array('B') + mask = 2**self.bitdepth - 1 + shifts = [self.bitdepth * i + for i in reversed(list(range(spb)))] + for o in raw: + out.extend([mask&(o>>i) for i in shifts]) + return out[:width] + + return map(asvalues, rows) + + def serialtoflat(self, bytes, width=None): + """Convert serial format (byte stream) pixel data to flat row + flat pixel. + """ + + if self.bitdepth == 8: + return bytes + if self.bitdepth == 16: + bytes = tostring(bytes) + return array('H', + struct.unpack('!%dH' % (len(bytes)//2), bytes)) + assert self.bitdepth < 8 + if width is None: + width = self.width + # Samples per byte + spb = 8//self.bitdepth + out = array('B') + mask = 2**self.bitdepth - 1 + shifts = list(map(self.bitdepth.__mul__, reversed(list(range(spb))))) + l = width + for o in bytes: + out.extend([(mask&(o>>s)) for s in shifts][:l]) + l -= spb + if l <= 0: + l = width + return out + + def iterstraight(self, raw): + """Iterator that undoes the effect of filtering, and yields + each row in serialised format (as a sequence of bytes). + Assumes input is straightlaced. `raw` should be an iterable + that yields the raw bytes in chunks of arbitrary size. + """ + + # length of row, in bytes + rb = self.row_bytes + a = array('B') + # The previous (reconstructed) scanline. None indicates first + # line of image. + recon = None + for some in raw: + a.extend(some) + while len(a) >= rb + 1: + filter_type = a[0] + scanline = a[1:rb+1] + del a[:rb+1] + recon = self.undo_filter(filter_type, scanline, recon) + yield recon + if len(a) != 0: + # :file:format We get here with a file format error: + # when the available bytes (after decompressing) do not + # pack into exact rows. + raise FormatError( + 'Wrong size for decompressed IDAT chunk.') + assert len(a) == 0 + + def validate_signature(self): + """If signature (header) has not been read then read and + validate it; otherwise do nothing. + """ + + if self.signature: + return + self.signature = self.file.read(8) + if self.signature != _signature: + raise FormatError("PNG file has invalid signature.") + + def preamble(self, lenient=False): + """ + Extract the image metadata by reading the initial part of + the PNG file up to the start of the ``IDAT`` chunk. All the + chunks that precede the ``IDAT`` chunk are read and either + processed for metadata or discarded. + + If the optional `lenient` argument evaluates to `True`, checksum + failures will raise warnings rather than exceptions. + """ + + self.validate_signature() + + while True: + if not self.atchunk: + self.atchunk = self.chunklentype() + if self.atchunk is None: + raise FormatError( + 'This PNG file has no IDAT chunks.') + if self.atchunk[1] == b'IDAT': + return + self.process_chunk(lenient=lenient) + + def chunklentype(self): + """Reads just enough of the input to determine the next + chunk's length and type, returned as a (*length*, *type*) pair + where *type* is a string. If there are no more chunks, ``None`` + is returned. + """ + + x = self.file.read(8) + if not x: + return None + if len(x) != 8: + raise FormatError( + 'End of file whilst reading chunk length and type.') + length,type = struct.unpack('!I4s', x) + if length > 2**31-1: + raise FormatError('Chunk %s is too large: %d.' % (type,length)) + return length,type + + def process_chunk(self, lenient=False): + """Process the next chunk and its data. This only processes the + following chunk types, all others are ignored: ``IHDR``, + ``PLTE``, ``bKGD``, ``tRNS``, ``gAMA``, ``sBIT``, ``pHYs``. + + If the optional `lenient` argument evaluates to `True`, + checksum failures will raise warnings rather than exceptions. + """ + + type, data = self.chunk(lenient=lenient) + method = '_process_' + as_str(type) + m = getattr(self, method, None) + if m: + m(data) + + def _process_IHDR(self, data): + # http://www.w3.org/TR/PNG/#11IHDR + if len(data) != 13: + raise FormatError('IHDR chunk has incorrect length.') + (self.width, self.height, self.bitdepth, self.color_type, + self.compression, self.filter, + self.interlace) = struct.unpack("!2I5B", data) + + check_bitdepth_colortype(self.bitdepth, self.color_type) + + if self.compression != 0: + raise Error("unknown compression method %d" % self.compression) + if self.filter != 0: + raise FormatError("Unknown filter method %d," + " see http://www.w3.org/TR/2003/REC-PNG-20031110/#9Filters ." + % self.filter) + if self.interlace not in (0,1): + raise FormatError("Unknown interlace method %d," + " see http://www.w3.org/TR/2003/REC-PNG-20031110/#8InterlaceMethods ." + % self.interlace) + + # Derived values + # http://www.w3.org/TR/PNG/#6Colour-values + colormap = bool(self.color_type & 1) + greyscale = not (self.color_type & 2) + alpha = bool(self.color_type & 4) + color_planes = (3,1)[greyscale or colormap] + planes = color_planes + alpha + + self.colormap = colormap + self.greyscale = greyscale + self.alpha = alpha + self.color_planes = color_planes + self.planes = planes + self.psize = float(self.bitdepth)/float(8) * planes + if int(self.psize) == self.psize: + self.psize = int(self.psize) + self.row_bytes = int(math.ceil(self.width * self.psize)) + # Stores PLTE chunk if present, and is used to check + # chunk ordering constraints. + self.plte = None + # Stores tRNS chunk if present, and is used to check chunk + # ordering constraints. + self.trns = None + # Stores sbit chunk if present. + self.sbit = None + + def _process_PLTE(self, data): + # http://www.w3.org/TR/PNG/#11PLTE + if self.plte: + warnings.warn("Multiple PLTE chunks present.") + self.plte = data + if len(data) % 3 != 0: + raise FormatError( + "PLTE chunk's length should be a multiple of 3.") + if len(data) > (2**self.bitdepth)*3: + raise FormatError("PLTE chunk is too long.") + if len(data) == 0: + raise FormatError("Empty PLTE is not allowed.") + + def _process_bKGD(self, data): + try: + if self.colormap: + if not self.plte: + warnings.warn( + "PLTE chunk is required before bKGD chunk.") + self.background = struct.unpack('B', data) + else: + self.background = struct.unpack("!%dH" % self.color_planes, + data) + except struct.error: + raise FormatError("bKGD chunk has incorrect length.") + + def _process_tRNS(self, data): + # http://www.w3.org/TR/PNG/#11tRNS + self.trns = data + if self.colormap: + if not self.plte: + warnings.warn("PLTE chunk is required before tRNS chunk.") + else: + if len(data) > len(self.plte)/3: + # Was warning, but promoted to Error as it + # would otherwise cause pain later on. + raise FormatError("tRNS chunk is too long.") + else: + if self.alpha: + raise FormatError( + "tRNS chunk is not valid with colour type %d." % + self.color_type) + try: + self.transparent = \ + struct.unpack("!%dH" % self.color_planes, data) + except struct.error: + raise FormatError("tRNS chunk has incorrect length.") + + def _process_gAMA(self, data): + try: + self.gamma = struct.unpack("!L", data)[0] / 100000.0 + except struct.error: + raise FormatError("gAMA chunk has incorrect length.") + + def _process_sBIT(self, data): + self.sbit = data + if (self.colormap and len(data) != 3 or + not self.colormap and len(data) != self.planes): + raise FormatError("sBIT chunk has incorrect length.") + + def _process_pHYs(self, data): + # http://www.w3.org/TR/PNG/#11pHYs + self.phys = data + fmt = "!LLB" + if len(data) != struct.calcsize(fmt): + raise FormatError("pHYs chunk has incorrect length.") + self.x_pixels_per_unit, self.y_pixels_per_unit, unit = struct.unpack(fmt,data) + self.unit_is_meter = bool(unit) + + def read(self, lenient=False): + """ + Read the PNG file and decode it. Returns (`width`, `height`, + `pixels`, `metadata`). + + May use excessive memory. + + `pixels` are returned in boxed row flat pixel format. + + If the optional `lenient` argument evaluates to True, + checksum failures will raise warnings rather than exceptions. + """ + + def iteridat(): + """Iterator that yields all the ``IDAT`` chunks as strings.""" + while True: + try: + type, data = self.chunk(lenient=lenient) + except ValueError as e: + raise ChunkError(e.args[0]) + if type == b'IEND': + # http://www.w3.org/TR/PNG/#11IEND + break + if type != b'IDAT': + continue + # type == b'IDAT' + # http://www.w3.org/TR/PNG/#11IDAT + if self.colormap and not self.plte: + warnings.warn("PLTE chunk is required before IDAT chunk") + yield data + + def iterdecomp(idat): + """Iterator that yields decompressed strings. `idat` should + be an iterator that yields the ``IDAT`` chunk data. + """ + + # Currently, with no max_length parameter to decompress, + # this routine will do one yield per IDAT chunk: Not very + # incremental. + d = zlib.decompressobj() + # Each IDAT chunk is passed to the decompressor, then any + # remaining state is decompressed out. + for data in idat: + # :todo: add a max_length argument here to limit output + # size. + yield array('B', d.decompress(data)) + yield array('B', d.flush()) + + self.preamble(lenient=lenient) + raw = iterdecomp(iteridat()) + + if self.interlace: + raw = array('B', itertools.chain(*raw)) + arraycode = 'BH'[self.bitdepth>8] + # Like :meth:`group` but producing an array.array object for + # each row. + pixels = map(lambda *row: array(arraycode, row), + *[iter(self.deinterlace(raw))]*self.width*self.planes) + else: + pixels = self.iterboxed(self.iterstraight(raw)) + meta = dict() + for attr in 'greyscale alpha planes bitdepth interlace'.split(): + meta[attr] = getattr(self, attr) + meta['size'] = (self.width, self.height) + for attr in 'gamma transparent background'.split(): + a = getattr(self, attr, None) + if a is not None: + meta[attr] = a + if self.plte: + meta['palette'] = self.palette() + return self.width, self.height, pixels, meta + + + def read_flat(self): + """ + Read a PNG file and decode it into flat row flat pixel format. + Returns (*width*, *height*, *pixels*, *metadata*). + + May use excessive memory. + + `pixels` are returned in flat row flat pixel format. + + See also the :meth:`read` method which returns pixels in the + more stream-friendly boxed row flat pixel format. + """ + + x, y, pixel, meta = self.read() + arraycode = 'BH'[meta['bitdepth']>8] + pixel = array(arraycode, itertools.chain(*pixel)) + return x, y, pixel, meta + + def palette(self, alpha='natural'): + """Returns a palette that is a sequence of 3-tuples or 4-tuples, + synthesizing it from the ``PLTE`` and ``tRNS`` chunks. These + chunks should have already been processed (for example, by + calling the :meth:`preamble` method). All the tuples are the + same size: 3-tuples if there is no ``tRNS`` chunk, 4-tuples when + there is a ``tRNS`` chunk. Assumes that the image is colour type + 3 and therefore a ``PLTE`` chunk is required. + + If the `alpha` argument is ``'force'`` then an alpha channel is + always added, forcing the result to be a sequence of 4-tuples. + """ + + if not self.plte: + raise FormatError( + "Required PLTE chunk is missing in colour type 3 image.") + plte = group(array('B', self.plte), 3) + if self.trns or alpha == 'force': + trns = array('B', self.trns or '') + trns.extend([255]*(len(plte)-len(trns))) + plte = list(map(operator.add, plte, group(trns, 1))) + return plte + + def asDirect(self): + """Returns the image data as a direct representation of an + ``x * y * planes`` array. This method is intended to remove the + need for callers to deal with palettes and transparency + themselves. Images with a palette (colour type 3) + are converted to RGB or RGBA; images with transparency (a + ``tRNS`` chunk) are converted to LA or RGBA as appropriate. + When returned in this format the pixel values represent the + colour value directly without needing to refer to palettes or + transparency information. + + Like the :meth:`read` method this method returns a 4-tuple: + + (*width*, *height*, *pixels*, *meta*) + + This method normally returns pixel values with the bit depth + they have in the source image, but when the source PNG has an + ``sBIT`` chunk it is inspected and can reduce the bit depth of + the result pixels; pixel values will be reduced according to + the bit depth specified in the ``sBIT`` chunk (PNG nerds should + note a single result bit depth is used for all channels; the + maximum of the ones specified in the ``sBIT`` chunk. An RGB565 + image will be rescaled to 6-bit RGB666). + + The *meta* dictionary that is returned reflects the `direct` + format and not the original source image. For example, an RGB + source image with a ``tRNS`` chunk to represent a transparent + colour, will have ``planes=3`` and ``alpha=False`` for the + source image, but the *meta* dictionary returned by this method + will have ``planes=4`` and ``alpha=True`` because an alpha + channel is synthesized and added. + + *pixels* is the pixel data in boxed row flat pixel format (just + like the :meth:`read` method). + + All the other aspects of the image data are not changed. + """ + + self.preamble() + + # Simple case, no conversion necessary. + if not self.colormap and not self.trns and not self.sbit: + return self.read() + + x,y,pixels,meta = self.read() + + if self.colormap: + meta['colormap'] = False + meta['alpha'] = bool(self.trns) + meta['bitdepth'] = 8 + meta['planes'] = 3 + bool(self.trns) + plte = self.palette() + def iterpal(pixels): + for row in pixels: + row = [plte[x] for x in row] + yield array('B', itertools.chain(*row)) + pixels = iterpal(pixels) + elif self.trns: + # It would be nice if there was some reasonable way + # of doing this without generating a whole load of + # intermediate tuples. But tuples does seem like the + # easiest way, with no other way clearly much simpler or + # much faster. (Actually, the L to LA conversion could + # perhaps go faster (all those 1-tuples!), but I still + # wonder whether the code proliferation is worth it) + it = self.transparent + maxval = 2**meta['bitdepth']-1 + planes = meta['planes'] + meta['alpha'] = True + meta['planes'] += 1 + typecode = 'BH'[meta['bitdepth']>8] + def itertrns(pixels): + for row in pixels: + # For each row we group it into pixels, then form a + # characterisation vector that says whether each + # pixel is opaque or not. Then we convert + # True/False to 0/maxval (by multiplication), + # and add it as the extra channel. + row = group(row, planes) + opa = map(it.__ne__, row) + opa = map(maxval.__mul__, opa) + opa = list(zip(opa)) # convert to 1-tuples + yield array(typecode, + itertools.chain(*map(operator.add, row, opa))) + pixels = itertrns(pixels) + targetbitdepth = None + if self.sbit: + sbit = struct.unpack('%dB' % len(self.sbit), self.sbit) + targetbitdepth = max(sbit) + if targetbitdepth > meta['bitdepth']: + raise Error('sBIT chunk %r exceeds bitdepth %d' % + (sbit,self.bitdepth)) + if min(sbit) <= 0: + raise Error('sBIT chunk %r has a 0-entry' % sbit) + if targetbitdepth == meta['bitdepth']: + targetbitdepth = None + if targetbitdepth: + shift = meta['bitdepth'] - targetbitdepth + meta['bitdepth'] = targetbitdepth + def itershift(pixels): + for row in pixels: + yield [p >> shift for p in row] + pixels = itershift(pixels) + return x,y,pixels,meta + + def asFloat(self, maxval=1.0): + """Return image pixels as per :meth:`asDirect` method, but scale + all pixel values to be floating point values between 0.0 and + *maxval*. + """ + + x,y,pixels,info = self.asDirect() + sourcemaxval = 2**info['bitdepth']-1 + del info['bitdepth'] + info['maxval'] = float(maxval) + factor = float(maxval)/float(sourcemaxval) + def iterfloat(): + for row in pixels: + yield [factor * p for p in row] + return x,y,iterfloat(),info + + def _as_rescale(self, get, targetbitdepth): + """Helper used by :meth:`asRGB8` and :meth:`asRGBA8`.""" + + width,height,pixels,meta = get() + maxval = 2**meta['bitdepth'] - 1 + targetmaxval = 2**targetbitdepth - 1 + factor = float(targetmaxval) / float(maxval) + meta['bitdepth'] = targetbitdepth + def iterscale(): + for row in pixels: + yield [int(round(x*factor)) for x in row] + if maxval == targetmaxval: + return width, height, pixels, meta + else: + return width, height, iterscale(), meta + + def asRGB8(self): + """Return the image data as an RGB pixels with 8-bits per + sample. This is like the :meth:`asRGB` method except that + this method additionally rescales the values so that they + are all between 0 and 255 (8-bit). In the case where the + source image has a bit depth < 8 the transformation preserves + all the information; where the source image has bit depth + > 8, then rescaling to 8-bit values loses precision. No + dithering is performed. Like :meth:`asRGB`, an alpha channel + in the source image will raise an exception. + + This function returns a 4-tuple: + (*width*, *height*, *pixels*, *metadata*). + *width*, *height*, *metadata* are as per the + :meth:`read` method. + + *pixels* is the pixel data in boxed row flat pixel format. + """ + + return self._as_rescale(self.asRGB, 8) + + def asRGBA8(self): + """Return the image data as RGBA pixels with 8-bits per + sample. This method is similar to :meth:`asRGB8` and + :meth:`asRGBA`: The result pixels have an alpha channel, *and* + values are rescaled to the range 0 to 255. The alpha channel is + synthesized if necessary (with a small speed penalty). + """ + + return self._as_rescale(self.asRGBA, 8) + + def asRGB(self): + """Return image as RGB pixels. RGB colour images are passed + through unchanged; greyscales are expanded into RGB + triplets (there is a small speed overhead for doing this). + + An alpha channel in the source image will raise an + exception. + + The return values are as for the :meth:`read` method + except that the *metadata* reflect the returned pixels, not the + source image. In particular, for this method + ``metadata['greyscale']`` will be ``False``. + """ + + width,height,pixels,meta = self.asDirect() + if meta['alpha']: + raise Error("will not convert image with alpha channel to RGB") + if not meta['greyscale']: + return width,height,pixels,meta + meta['greyscale'] = False + typecode = 'BH'[meta['bitdepth'] > 8] + def iterrgb(): + for row in pixels: + a = array(typecode, [0]) * 3 * width + for i in range(3): + a[i::3] = row + yield a + return width,height,iterrgb(),meta + + def asRGBA(self): + """Return image as RGBA pixels. Greyscales are expanded into + RGB triplets; an alpha channel is synthesized if necessary. + The return values are as for the :meth:`read` method + except that the *metadata* reflect the returned pixels, not the + source image. In particular, for this method + ``metadata['greyscale']`` will be ``False``, and + ``metadata['alpha']`` will be ``True``. + """ + + width,height,pixels,meta = self.asDirect() + if meta['alpha'] and not meta['greyscale']: + return width,height,pixels,meta + typecode = 'BH'[meta['bitdepth'] > 8] + maxval = 2**meta['bitdepth'] - 1 + maxbuffer = struct.pack('=' + typecode, maxval) * 4 * width + def newarray(): + return array(typecode, maxbuffer) + + if meta['alpha'] and meta['greyscale']: + # LA to RGBA + def convert(): + for row in pixels: + # Create a fresh target row, then copy L channel + # into first three target channels, and A channel + # into fourth channel. + a = newarray() + pngfilters.convert_la_to_rgba(row, a) + yield a + elif meta['greyscale']: + # L to RGBA + def convert(): + for row in pixels: + a = newarray() + pngfilters.convert_l_to_rgba(row, a) + yield a + else: + assert not meta['alpha'] and not meta['greyscale'] + # RGB to RGBA + def convert(): + for row in pixels: + a = newarray() + pngfilters.convert_rgb_to_rgba(row, a) + yield a + meta['alpha'] = True + meta['greyscale'] = False + return width,height,convert(),meta + +def check_bitdepth_colortype(bitdepth, colortype): + """Check that `bitdepth` and `colortype` are both valid, + and specified in a valid combination. Returns if valid, + raise an Exception if not valid. + """ + + if bitdepth not in (1,2,4,8,16): + raise FormatError("invalid bit depth %d" % bitdepth) + if colortype not in (0,2,3,4,6): + raise FormatError("invalid colour type %d" % colortype) + # Check indexed (palettized) images have 8 or fewer bits + # per pixel; check only indexed or greyscale images have + # fewer than 8 bits per pixel. + if colortype & 1 and bitdepth > 8: + raise FormatError( + "Indexed images (colour type %d) cannot" + " have bitdepth > 8 (bit depth %d)." + " See http://www.w3.org/TR/2003/REC-PNG-20031110/#table111 ." + % (bitdepth, colortype)) + if bitdepth < 8 and colortype not in (0,3): + raise FormatError("Illegal combination of bit depth (%d)" + " and colour type (%d)." + " See http://www.w3.org/TR/2003/REC-PNG-20031110/#table111 ." + % (bitdepth, colortype)) + +def isinteger(x): + try: + return int(x) == x + except (TypeError, ValueError): + return False + + +# === Support for users without Cython === + +try: + pngfilters +except NameError: + class pngfilters(object): + def undo_filter_sub(filter_unit, scanline, previous, result): + """Undo sub filter.""" + + ai = 0 + # Loops starts at index fu. Observe that the initial part + # of the result is already filled in correctly with + # scanline. + for i in range(filter_unit, len(result)): + x = scanline[i] + a = result[ai] + result[i] = (x + a) & 0xff + ai += 1 + undo_filter_sub = staticmethod(undo_filter_sub) + + def undo_filter_up(filter_unit, scanline, previous, result): + """Undo up filter.""" + + for i in range(len(result)): + x = scanline[i] + b = previous[i] + result[i] = (x + b) & 0xff + undo_filter_up = staticmethod(undo_filter_up) + + def undo_filter_average(filter_unit, scanline, previous, result): + """Undo up filter.""" + + ai = -filter_unit + for i in range(len(result)): + x = scanline[i] + if ai < 0: + a = 0 + else: + a = result[ai] + b = previous[i] + result[i] = (x + ((a + b) >> 1)) & 0xff + ai += 1 + undo_filter_average = staticmethod(undo_filter_average) + + def undo_filter_paeth(filter_unit, scanline, previous, result): + """Undo Paeth filter.""" + + # Also used for ci. + ai = -filter_unit + for i in range(len(result)): + x = scanline[i] + if ai < 0: + a = c = 0 + else: + a = result[ai] + c = previous[ai] + b = previous[i] + p = a + b - c + pa = abs(p - a) + pb = abs(p - b) + pc = abs(p - c) + if pa <= pb and pa <= pc: + pr = a + elif pb <= pc: + pr = b + else: + pr = c + result[i] = (x + pr) & 0xff + ai += 1 + undo_filter_paeth = staticmethod(undo_filter_paeth) + + def convert_la_to_rgba(row, result): + for i in range(3): + result[i::4] = row[0::2] + result[3::4] = row[1::2] + convert_la_to_rgba = staticmethod(convert_la_to_rgba) + + def convert_l_to_rgba(row, result): + """Convert a grayscale image to RGBA. This method assumes + the alpha channel in result is already correctly + initialized. + """ + for i in range(3): + result[i::4] = row + convert_l_to_rgba = staticmethod(convert_l_to_rgba) + + def convert_rgb_to_rgba(row, result): + """Convert an RGB image to RGBA. This method assumes the + alpha channel in result is already correctly initialized. + """ + for i in range(3): + result[i::4] = row[i::3] + convert_rgb_to_rgba = staticmethod(convert_rgb_to_rgba) + + +# === Command Line Support === + +def read_pam_header(infile): + """ + Read (the rest of a) PAM header. `infile` should be positioned + immediately after the initial 'P7' line (at the beginning of the + second line). Returns are as for `read_pnm_header`. + """ + + # Unlike PBM, PGM, and PPM, we can read the header a line at a time. + header = dict() + while True: + l = infile.readline().strip() + if l == b'ENDHDR': + break + if not l: + raise EOFError('PAM ended prematurely') + if l[0] == b'#': + continue + l = l.split(None, 1) + if l[0] not in header: + header[l[0]] = l[1] + else: + header[l[0]] += b' ' + l[1] + + required = [b'WIDTH', b'HEIGHT', b'DEPTH', b'MAXVAL'] + WIDTH,HEIGHT,DEPTH,MAXVAL = required + present = [x for x in required if x in header] + if len(present) != len(required): + raise Error('PAM file must specify WIDTH, HEIGHT, DEPTH, and MAXVAL') + width = int(header[WIDTH]) + height = int(header[HEIGHT]) + depth = int(header[DEPTH]) + maxval = int(header[MAXVAL]) + if (width <= 0 or + height <= 0 or + depth <= 0 or + maxval <= 0): + raise Error( + 'WIDTH, HEIGHT, DEPTH, MAXVAL must all be positive integers') + return 'P7', width, height, depth, maxval + +def read_pnm_header(infile, supported=(b'P5', b'P6')): + """ + Read a PNM header, returning (format,width,height,depth,maxval). + `width` and `height` are in pixels. `depth` is the number of + channels in the image; for PBM and PGM it is synthesized as 1, for + PPM as 3; for PAM images it is read from the header. `maxval` is + synthesized (as 1) for PBM images. + """ + + # Generally, see http://netpbm.sourceforge.net/doc/ppm.html + # and http://netpbm.sourceforge.net/doc/pam.html + + # Technically 'P7' must be followed by a newline, so by using + # rstrip() we are being liberal in what we accept. I think this + # is acceptable. + type = infile.read(3).rstrip() + if type not in supported: + raise NotImplementedError('file format %s not supported' % type) + if type == b'P7': + # PAM header parsing is completely different. + return read_pam_header(infile) + # Expected number of tokens in header (3 for P4, 4 for P6) + expected = 4 + pbm = (b'P1', b'P4') + if type in pbm: + expected = 3 + header = [type] + + # We have to read the rest of the header byte by byte because the + # final whitespace character (immediately following the MAXVAL in + # the case of P6) may not be a newline. Of course all PNM files in + # the wild use a newline at this point, so it's tempting to use + # readline; but it would be wrong. + def getc(): + c = infile.read(1) + if not c: + raise Error('premature EOF reading PNM header') + return c + + c = getc() + while True: + # Skip whitespace that precedes a token. + while c.isspace(): + c = getc() + # Skip comments. + while c == '#': + while c not in b'\n\r': + c = getc() + if not c.isdigit(): + raise Error('unexpected character %s found in header' % c) + # According to the specification it is legal to have comments + # that appear in the middle of a token. + # This is bonkers; I've never seen it; and it's a bit awkward to + # code good lexers in Python (no goto). So we break on such + # cases. + token = b'' + while c.isdigit(): + token += c + c = getc() + # Slight hack. All "tokens" are decimal integers, so convert + # them here. + header.append(int(token)) + if len(header) == expected: + break + # Skip comments (again) + while c == '#': + while c not in '\n\r': + c = getc() + if not c.isspace(): + raise Error('expected header to end with whitespace, not %s' % c) + + if type in pbm: + # synthesize a MAXVAL + header.append(1) + depth = (1,3)[type == b'P6'] + return header[0], header[1], header[2], depth, header[3] + +def write_pnm(file, width, height, pixels, meta): + """Write a Netpbm PNM/PAM file. + """ + + bitdepth = meta['bitdepth'] + maxval = 2**bitdepth - 1 + # Rudely, the number of image planes can be used to determine + # whether we are L (PGM), LA (PAM), RGB (PPM), or RGBA (PAM). + planes = meta['planes'] + # Can be an assert as long as we assume that pixels and meta came + # from a PNG file. + assert planes in (1,2,3,4) + if planes in (1,3): + if 1 == planes: + # PGM + # Could generate PBM if maxval is 1, but we don't (for one + # thing, we'd have to convert the data, not just blat it + # out). + fmt = 'P5' + else: + # PPM + fmt = 'P6' + header = '%s %d %d %d\n' % (fmt, width, height, maxval) + if planes in (2,4): + # PAM + # See http://netpbm.sourceforge.net/doc/pam.html + if 2 == planes: + tupltype = 'GRAYSCALE_ALPHA' + else: + tupltype = 'RGB_ALPHA' + header = ('P7\nWIDTH %d\nHEIGHT %d\nDEPTH %d\nMAXVAL %d\n' + 'TUPLTYPE %s\nENDHDR\n' % + (width, height, planes, maxval, tupltype)) + file.write(header.encode('ascii')) + # Values per row + vpr = planes * width + # struct format + fmt = '>%d' % vpr + if maxval > 0xff: + fmt = fmt + 'H' + else: + fmt = fmt + 'B' + for row in pixels: + file.write(struct.pack(fmt, *row)) + file.flush() + +def color_triple(color): + """ + Convert a command line colour value to a RGB triple of integers. + FIXME: Somewhere we need support for greyscale backgrounds etc. + """ + if color.startswith('#') and len(color) == 4: + return (int(color[1], 16), + int(color[2], 16), + int(color[3], 16)) + if color.startswith('#') and len(color) == 7: + return (int(color[1:3], 16), + int(color[3:5], 16), + int(color[5:7], 16)) + elif color.startswith('#') and len(color) == 13: + return (int(color[1:5], 16), + int(color[5:9], 16), + int(color[9:13], 16)) + +def _add_common_options(parser): + """Call *parser.add_option* for each of the options that are + common between this PNG--PNM conversion tool and the gen + tool. + """ + parser.add_option("-i", "--interlace", + default=False, action="store_true", + help="create an interlaced PNG file (Adam7)") + parser.add_option("-t", "--transparent", + action="store", type="string", metavar="#RRGGBB", + help="mark the specified colour as transparent") + parser.add_option("-b", "--background", + action="store", type="string", metavar="#RRGGBB", + help="save the specified background colour") + parser.add_option("-g", "--gamma", + action="store", type="float", metavar="value", + help="save the specified gamma value") + parser.add_option("-c", "--compression", + action="store", type="int", metavar="level", + help="zlib compression level (0-9)") + return parser + +def _main(argv): + """ + Run the PNG encoder with options from the command line. + """ + + # Parse command line arguments + from optparse import OptionParser + version = '%prog ' + __version__ + parser = OptionParser(version=version) + parser.set_usage("%prog [options] [imagefile]") + parser.add_option('-r', '--read-png', default=False, + action='store_true', + help='Read PNG, write PNM') + parser.add_option("-a", "--alpha", + action="store", type="string", metavar="pgmfile", + help="alpha channel transparency (RGBA)") + _add_common_options(parser) + + (options, args) = parser.parse_args(args=argv[1:]) + + # Convert options + if options.transparent is not None: + options.transparent = color_triple(options.transparent) + if options.background is not None: + options.background = color_triple(options.background) + + # Prepare input and output files + if len(args) == 0: + infilename = '-' + infile = sys.stdin + elif len(args) == 1: + infilename = args[0] + infile = open(infilename, 'rb') + else: + parser.error("more than one input file") + outfile = sys.stdout + if sys.platform == "win32": + import msvcrt, os + msvcrt.setmode(sys.stdout.fileno(), os.O_BINARY) + + if options.read_png: + # Encode PNG to PPM + png = Reader(file=infile) + width,height,pixels,meta = png.asDirect() + write_pnm(outfile, width, height, pixels, meta) + else: + # Encode PNM to PNG + format, width, height, depth, maxval = \ + read_pnm_header(infile, (b'P5',b'P6',b'P7')) + # When it comes to the variety of input formats, we do something + # rather rude. Observe that L, LA, RGB, RGBA are the 4 colour + # types supported by PNG and that they correspond to 1, 2, 3, 4 + # channels respectively. So we use the number of channels in + # the source image to determine which one we have. We do not + # care about TUPLTYPE. + greyscale = depth <= 2 + pamalpha = depth in (2,4) + supported = [2**x-1 for x in range(1,17)] + try: + mi = supported.index(maxval) + except ValueError: + raise NotImplementedError( + 'your maxval (%s) not in supported list %s' % + (maxval, str(supported))) + bitdepth = mi+1 + writer = Writer(width, height, + greyscale=greyscale, + bitdepth=bitdepth, + interlace=options.interlace, + transparent=options.transparent, + background=options.background, + alpha=bool(pamalpha or options.alpha), + gamma=options.gamma, + compression=options.compression) + if options.alpha: + pgmfile = open(options.alpha, 'rb') + format, awidth, aheight, adepth, amaxval = \ + read_pnm_header(pgmfile, 'P5') + if amaxval != '255': + raise NotImplementedError( + 'maxval %s not supported for alpha channel' % amaxval) + if (awidth, aheight) != (width, height): + raise ValueError("alpha channel image size mismatch" + " (%s has %sx%s but %s has %sx%s)" + % (infilename, width, height, + options.alpha, awidth, aheight)) + writer.convert_ppm_and_pgm(infile, pgmfile, outfile) + else: + writer.convert_pnm(infile, outfile) + + +if __name__ == '__main__': + try: + _main(sys.argv) + except Error as e: + print(e, file=sys.stderr) diff --git a/tools/scan_includes.c b/tools/scan_includes.c new file mode 100644 index 00000000..63af3bcf --- /dev/null +++ b/tools/scan_includes.c @@ -0,0 +1,135 @@ +#include <stdio.h> +#include <stdlib.h> +#include <string.h> +#include <stdbool.h> +#include <getopt.h> + +void usage(void) { + printf("Usage: scan_includes [-h] [-s] filename\n" + "-h, --help\n" + " Print usage and exit\n" + "-s, --strict\n" + " Fail if a file cannot be read\n"); +} + +struct Options { + bool help; + bool strict; +}; + +struct Options Options = {0}; + +void scan_file(char* filename) { + FILE *f = fopen(filename, "rb"); + if (!f) { + if (Options.strict) { + fprintf(stderr, "Could not open file: '%s'\n", filename); + exit(1); + } else { + return; + } + } + + fseek(f, 0, SEEK_END); + long size = ftell(f); + rewind(f); + + char *buffer = malloc(size + 1); + char *orig = buffer; + size = fread(buffer, 1, size, f); + buffer[size] = '\0'; + fclose(f); + + for (; buffer && (buffer - orig < size); buffer++) { + bool is_include = false; + bool is_incbin = false; + switch (*buffer) { + case ';': + buffer = strchr(buffer, '\n'); + if (!buffer) { + fprintf(stderr, "%s: no newline at end of file\n", filename); + break; + } + break; + + case '"': + buffer++; + buffer = strchr(buffer, '"'); + if (!buffer) { + fprintf(stderr, "%s: unterminated string\n", filename); + break; + } + buffer++; + break; + + case 'i': + case 'I': + if ((strncmp(buffer, "INCBIN", 6) == 0) || (strncmp(buffer, "incbin", 6) == 0)) { + is_incbin = true; + } else if ((strncmp(buffer, "INCLUDE", 7) == 0) || (strncmp(buffer, "include", 7) == 0)) { + is_include = true; + } + if (is_incbin || is_include) { + buffer = strchr(buffer, '"'); + if (!buffer) { + break; + } + buffer++; + int length = strcspn(buffer, "\""); + char *include = malloc(length + 1); + strncpy(include, buffer, length); + include[length] = '\0'; + printf("%s ", include); + if (is_include) { + scan_file(include); + } + free(include); + buffer = strchr(buffer, '"'); + } + break; + + } + if (!buffer) { + break; + } + + } + + free(orig); +} + +int main(int argc, char* argv[]) { + int i = 0; + struct option long_options[] = { + {"strict", no_argument, 0, 's'}, + {"help", no_argument, 0, 'h'}, + {0} + }; + int opt = -1; + while ((opt = getopt_long(argc, argv, "sh", long_options, &i)) != -1) { + switch (opt) { + case 's': + Options.strict = true; + break; + case 'h': + Options.help = true; + break; + default: + usage(); + exit(1); + break; + } + } + argc -= optind; + argv += optind; + if (Options.help) { + usage(); + return 0; + } + if (argc < 1) { + usage(); + exit(1); + } + scan_file(argv[0]); + return 0; +} |