Rename rac.Reader to rac.Parser
diff --git a/lib/rac/example_test.go b/lib/rac/example_test.go
index 7689ce2..d78b5d2 100644
--- a/lib/rac/example_test.go
+++ b/lib/rac/example_test.go
@@ -161,13 +161,13 @@
 	// original "One sheep.\nTwo sheep\.Three sheep.\n" source.
 
 	fmt.Printf("Decoded:\n")
-	r := &rac.Reader{
+	p := &rac.Parser{
 		ReadSeeker:     bytes.NewReader(encoded),
 		CompressedSize: int64(len(encoded)),
 	}
 	zr := io.ReadCloser(nil)
 	for {
-		chunk, err := r.NextChunk()
+		chunk, err := p.NextChunk()
 		if err == io.EOF {
 			break
 		} else if err != nil {
diff --git a/lib/rac/parser.go b/lib/rac/parser.go
index 4765322..25f3cea 100644
--- a/lib/rac/parser.go
+++ b/lib/rac/parser.go
@@ -61,7 +61,7 @@
 func (r *Range) Empty() bool { return r[0] == r[1] }
 func (r *Range) Size() int64 { return r[1] - r[0] }
 
-// Chunk is a compressed chunk returned by a Reader.
+// Chunk is a compressed chunk returned by a Parser.
 //
 // See the RAC specification for further discussion.
 type Chunk struct {
@@ -80,29 +80,29 @@
 	return (16 * int(arity)) + 16
 }
 
-// rNode is the Reader's representation of a node.
+// pNode is the Parser's representation of a node.
 //
 // None of its methods, other than valid, should be called unless valid returns
 // true.
-type rNode [4096]byte
+type pNode [4096]byte
 
-func (b *rNode) arity() int     { return int(b[3]) }
-func (b *rNode) codec() Codec   { return Codec(b[(8*int(b[3]))+7]) }
-func (b *rNode) cPtrMax() int64 { return u48LE(b[(16*int(b[3]))+8:]) }
-func (b *rNode) dPtrMax() int64 { return u48LE(b[8*int(b[3]):]) }
-func (b *rNode) version() uint8 { return b[(16*int(b[3]))+14] }
+func (b *pNode) arity() int     { return int(b[3]) }
+func (b *pNode) codec() Codec   { return Codec(b[(8*int(b[3]))+7]) }
+func (b *pNode) cPtrMax() int64 { return u48LE(b[(16*int(b[3]))+8:]) }
+func (b *pNode) dPtrMax() int64 { return u48LE(b[8*int(b[3]):]) }
+func (b *pNode) version() uint8 { return b[(16*int(b[3]))+14] }
 
-func (b *rNode) cLen(i int) uint8 {
+func (b *pNode) cLen(i int) uint8 {
 	base := (8 * int(b[3])) + 14
 	return b[(8*i)+base]
 }
 
-func (b *rNode) cOff(i int, cBias int64) int64 {
+func (b *pNode) cOff(i int, cBias int64) int64 {
 	base := (8 * int(b[3])) + 8
 	return cBias + u48LE(b[(8*i)+base:])
 }
 
-func (b *rNode) cOffRange(i int, cBias int64) Range {
+func (b *pNode) cOffRange(i int, cBias int64) Range {
 	m := cBias + b.cPtrMax()
 	if i >= b.arity() {
 		return Range{m, m}
@@ -116,18 +116,18 @@
 	return Range{cOff, m}
 }
 
-func (b *rNode) dOff(i int, dBias int64) int64 {
+func (b *pNode) dOff(i int, dBias int64) int64 {
 	if i == 0 {
 		return dBias
 	}
 	return dBias + u48LE(b[8*i:])
 }
 
-func (b *rNode) dOffRange(i int, dBias int64) Range {
+func (b *pNode) dOffRange(i int, dBias int64) Range {
 	return Range{b.dOff(i, dBias), b.dOff(i+1, dBias)}
 }
 
-func (b *rNode) dSize(i int) int64 {
+func (b *pNode) dSize(i int) int64 {
 	x := int64(0)
 	if i > 0 {
 		x = u48LE(b[8*i:])
@@ -135,20 +135,20 @@
 	return u48LE(b[(8*i)+8:]) - x
 }
 
-func (b *rNode) sTag(i int) uint8 {
+func (b *pNode) sTag(i int) uint8 {
 	base := (8 * int(b[3])) + 15
 	return b[(8*i)+base]
 }
 
-func (b *rNode) tTag(i int) uint8 {
+func (b *pNode) tTag(i int) uint8 {
 	return b[(8*i)+7]
 }
 
-func (b *rNode) isLeaf(i int) bool {
+func (b *pNode) isLeaf(i int) bool {
 	return b[(8*i)+7] != 0xFE
 }
 
-func (b *rNode) findChunkContaining(dOff int64, dBias int64) int {
+func (b *pNode) findChunkContaining(dOff int64, dBias int64) int {
 	// TODO: binary search instead of linear search.
 	for i, n := 0, b.arity(); i < n; i++ {
 		if dOff < b.dOff(i+1, dBias) {
@@ -160,7 +160,7 @@
 	panic("rac: internal error: could not find containing chunk")
 }
 
-func (b *rNode) chunk(i int, cBias int64, dBias int64) Chunk {
+func (b *pNode) chunk(i int, cBias int64, dBias int64) Chunk {
 	sTag := b.sTag(i)
 	tTag := b.tTag(i)
 	return Chunk{
@@ -174,7 +174,7 @@
 	}
 }
 
-func (b *rNode) valid() bool {
+func (b *pNode) valid() bool {
 	// Check the magic and arity.
 	if (b[0] != magic[0]) || (b[1] != magic[1]) || (b[2] != magic[2]) || (b[3] == 0) {
 		return false
@@ -241,10 +241,10 @@
 	return true
 }
 
-// Reader reads a RAC file.
+// Parser parses a RAC file.
 //
 // Do not modify its exported fields after calling any of its methods.
-type Reader struct {
+type Parser struct {
 	// ReadSeeker is where the RAC-encoded data is read from.
 	//
 	// It may also implement io.ReaderAt, in which case its ReadAt method will
@@ -264,7 +264,7 @@
 	// Zero is an invalid value, as an empty file is not a valid RAC file.
 	CompressedSize int64
 
-	// initialized is set true after the first call on this Reader.
+	// initialized is set true after the first call on this Parser.
 	initialized bool
 
 	// rootNodeArity is the root node's arity.
@@ -299,10 +299,10 @@
 	currNodeDBias int64
 
 	// currNode is the 4096 byte buffer to hold the current node.
-	currNode rNode
+	currNode pNode
 }
 
-func (r *Reader) checkParameters() error {
+func (r *Parser) checkParameters() error {
 	if r.ReadSeeker == nil {
 		r.err = errors.New("rac: invalid ReadSeeker")
 		return r.err
@@ -314,7 +314,7 @@
 	return nil
 }
 
-func (r *Reader) initialize() error {
+func (r *Parser) initialize() error {
 	if r.err != nil {
 		return r.err
 	}
@@ -337,7 +337,7 @@
 	return nil
 }
 
-func (r *Reader) findRootNode() error {
+func (r *Parser) findRootNode() error {
 	// Look at the start of the compressed file.
 	if err := readAt(r.ReadSeeker, r.currNode[:4], 0); err != nil {
 		r.err = err
@@ -369,7 +369,7 @@
 	return errors.New("rac: invalid input: missing index root node")
 }
 
-func (r *Reader) tryRootNode(arity uint8, fromEnd bool) (found bool, ioErr error) {
+func (r *Parser) tryRootNode(arity uint8, fromEnd bool) (found bool, ioErr error) {
 	if arity == 0 {
 		return false, nil
 	}
@@ -400,7 +400,7 @@
 // load loads a node from the RAC file into r.currNode. It does not check that
 // the result is valid, and the caller should do so if it doesn't already know
 // that it is valid.
-func (r *Reader) load(cOffset int64, arity uint8) error {
+func (r *Parser) load(cOffset int64, arity uint8) error {
 	if arity == 0 {
 		r.err = errors.New("rac: internal error: inconsistent arity")
 		return r.err
@@ -413,7 +413,7 @@
 	return nil
 }
 
-func (r *Reader) loadAndValidate(cOffset int64,
+func (r *Parser) loadAndValidate(cOffset int64,
 	parentCodec Codec, parentVersion uint8, parentCOffMax int64,
 	childCBias int64, childDSize int64) error {
 
@@ -457,7 +457,7 @@
 }
 
 // DecompressedSize returns the total size of the decompressed data.
-func (r *Reader) DecompressedSize() (int64, error) {
+func (r *Parser) DecompressedSize() (int64, error) {
 	if err := r.initialize(); err != nil {
 		return 0, err
 	}
@@ -468,7 +468,7 @@
 // dSpaceOffset. That chunk does not necessarily start at dSpaceOffset.
 //
 // It is an error to seek to a negative value.
-func (r *Reader) SeekToChunkContaining(dSpaceOffset int64) error {
+func (r *Parser) SeekToChunkContaining(dSpaceOffset int64) error {
 	if err := r.initialize(); err != nil {
 		return err
 	}
@@ -486,7 +486,7 @@
 //
 // Empty chunks (those that contain no decompressed data, only metadata) are
 // skipped.
-func (r *Reader) NextChunk() (Chunk, error) {
+func (r *Parser) NextChunk() (Chunk, error) {
 	if err := r.initialize(); err != nil {
 		return Chunk{}, err
 	}
@@ -512,7 +512,7 @@
 	}
 }
 
-func (r *Reader) resolveSeekPosition() error {
+func (r *Parser) resolveSeekPosition() error {
 	// Load the root node. It has already been validated, during initialize.
 	if err := r.load(r.rootNodeCOffset, r.rootNodeArity); err != nil {
 		return err
diff --git a/lib/rac/rac_test.go b/lib/rac/rac_test.go
index 4808bcc..7d59a72 100644
--- a/lib/rac/rac_test.go
+++ b/lib/rac/rac_test.go
@@ -351,13 +351,13 @@
 		t.Fatalf("\ngot:\n%s\nwant:\n%s", gotHexDump, wantHexDump)
 	}
 
-	r := &Reader{
+	p := &Parser{
 		ReadSeeker:     bytes.NewReader(encoded),
 		CompressedSize: int64(len(encoded)),
 	}
 	gotPrimaries := []byte(nil)
 	for {
-		c, err := r.NextChunk()
+		c, err := p.NextChunk()
 		if err == io.EOF {
 			break
 		} else if err != nil {
@@ -450,12 +450,12 @@
 				`DRangeSize:0x44, C0:"Cc...", C1:"Rr...", C2:"Ss..."`
 		}
 
-		r := &Reader{
+		p := &Parser{
 			ReadSeeker:     bytes.NewReader(tc.compressed),
 			CompressedSize: int64(len(tc.compressed)),
 		}
 
-		if gotDecompressedSize, err := r.DecompressedSize(); err != nil {
+		if gotDecompressedSize, err := p.DecompressedSize(); err != nil {
 			t.Errorf("%q test case: %v", tc.name, err)
 			continue loop
 		} else if gotDecompressedSize != wantDecompressedSize {
@@ -468,7 +468,7 @@
 		description := &bytes.Buffer{}
 		prevDRange1 := int64(0)
 		for {
-			c, err := r.NextChunk()
+			c, err := p.NextChunk()
 			if err == io.EOF {
 				break
 			} else if err != nil {
@@ -506,18 +506,18 @@
 		}
 
 		// NextChunk should return io.EOF.
-		if _, err := r.NextChunk(); err != io.EOF {
+		if _, err := p.NextChunk(); err != io.EOF {
 			t.Errorf("%q test case: NextChunk: got %v, want io.EOF", tc.name, err)
 			continue loop
 		}
 
-		if err := r.SeekToChunkContaining(0x30); err != nil {
+		if err := p.SeekToChunkContaining(0x30); err != nil {
 			t.Errorf("%q test case: SeekToChunkContaining: %v", tc.name, err)
 			continue loop
 		}
 
 		// NextChunk should return the "Bb..." chunk.
-		if c, err := r.NextChunk(); err != nil {
+		if c, err := p.NextChunk(); err != nil {
 			t.Errorf("%q test case: NextChunk: %v", tc.name, err)
 			continue loop
 		} else if got, want := snippet(c.CPrimary), "Bb..."; got != want {
diff --git a/lib/raczlib/reader.go b/lib/raczlib/reader.go
index 53a6a5e..d03a1ef 100644
--- a/lib/raczlib/reader.go
+++ b/lib/raczlib/reader.go
@@ -85,7 +85,7 @@
 	err error
 
 	// racReader is the low-level (Codec-agnostic) RAC reader.
-	racReader rac.Reader
+	racReader rac.Parser
 
 	// These two fields combine for a 3-state state machine:
 	//