Skip to content
Snippets Groups Projects
Commit ae04e842 authored by Jan Mercl's avatar Jan Mercl
Browse files

Adjust for weekly.2011-11-09

parent 5d882cb9
No related branches found
No related tags found
No related merge requests found
......@@ -226,7 +226,7 @@ func BenchmarkNFA(b *testing.B) {
var v visitor
for i := 0; i < b.N; i++ {
v = visitor{s: lex.Scanner("test-go-scanner", nil)}
filepath.Walk(runtime.GOROOT()+"/src", func(pth string, info *os.FileInfo, err os.Error) os.Error {
filepath.Walk(runtime.GOROOT()+"/src", func(pth string, info *os.FileInfo, err error) error {
if err != nil {
panic(err)
}
......
......@@ -213,13 +213,13 @@ type Lexer struct {
type StartSetID int
//TODO:full docs
func CompileLexer(starts [][]int, tokdefs map[string]int, grammar, start string) (lexer *Lexer, err os.Error) {
func CompileLexer(starts [][]int, tokdefs map[string]int, grammar, start string) (lexer *Lexer, err error) {
lexer = &Lexer{}
defer func() {
if e := recover(); e != nil {
lexer = nil
err = e.(os.Error)
err = e.(error)
}
}()
......@@ -303,7 +303,7 @@ func CompileLexer(starts [][]int, tokdefs map[string]int, grammar, start string)
// MustCompileLexer is like CompileLexer but panics if the definitions cannot be compiled.
// It simplifies safe initialization of global variables holding compiled Lexers.
func MustCompileLexer(starts [][]int, tokdefs map[string]int, grammar, start string) (lexer *Lexer) {
var err os.Error
var err error
if lexer, err = CompileLexer(starts, tokdefs, grammar, start); err != nil {
if list, ok := err.(scanner.ErrorList); ok {
scanner.PrintError(os.Stderr, list)
......
......@@ -10,8 +10,8 @@
package lexer
import (
"errors"
"fmt"
"os"
"strings"
"unicode"
)
......@@ -23,14 +23,14 @@ func qs(s string) string {
// ParseRE compiles a regular expression re into Nfa, returns the re component starting
// and accepting states or an Error if any.
func (n *Nfa) ParseRE(name, re string) (in, out *NfaState, err os.Error) {
func (n *Nfa) ParseRE(name, re string) (in, out *NfaState, err error) {
s := NewScannerSource(name, strings.NewReader(re))
defer func() {
if e := recover(); e != nil {
in, out = nil, nil
pos := s.CurrentRune().Position
err = fmt.Errorf(`%s - "%s^%s" - %s`, pos, qs(re[:pos.Offset]), qs(re[pos.Offset:]), e.(os.Error))
err = fmt.Errorf(`%s - "%s^%s" - %s`, pos, qs(re[:pos.Offset]), qs(re[pos.Offset:]), e.(error))
}
}()
......@@ -245,7 +245,7 @@ func (s *ScannerSource) hex() (v int) {
return v - 'A' + 10
}
panic(os.NewError("expected hex digit"))
panic(errors.New("expected hex digit"))
}
func (s *ScannerSource) expect(rune int) {
......
......@@ -12,14 +12,13 @@ package lexer
import (
"go/token"
"io"
"os"
)
// EOFReader implements a RuneReader allways returning 0 (EOF)
type EOFReader int
func (r EOFReader) ReadRune() (rune int, size int, err os.Error) {
return 0, 0, os.EOF
func (r EOFReader) ReadRune() (rune int, size int, err error) {
return 0, 0, io.EOF
}
type source struct {
......@@ -69,7 +68,7 @@ func (s *Source) Read() (r ScannerRune) {
for {
r.Position = s.Position()
r.Rune, r.Size, r.Err = s.tos.reader.ReadRune()
if r.Err == nil || r.Err != os.EOF {
if r.Err == nil || r.Err != io.EOF {
p := &s.tos.position
p.Offset += r.Size
if r.Rune != '\n' {
......@@ -98,7 +97,7 @@ type ScannerRune struct {
Position token.Position // Starting position of Rune
Rune int // Rune value
Size int // Rune size
Err os.Error // os.EOF or nil. Any other value invalidates all other fields of a ScannerRune.
Err error // os.EOF or nil. Any other value invalidates all other fields of a ScannerRune.
}
// ScannerSource is a Source with one ScannerRune look behind and an on demand one ScannerRune lookahead.
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment