Skip to content

Commit

Permalink
Make indentation work with REPL
Browse files Browse the repository at this point in the history
  • Loading branch information
peachpit-site committed Feb 1, 2025
1 parent 9a37f57 commit 243c40b
Show file tree
Hide file tree
Showing 12 changed files with 108 additions and 47 deletions.
1 change: 1 addition & 0 deletions go.mod
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,7 @@ require (
github.com/aws/aws-sdk-go-v2/service/internal/s3shared v1.17.15 // indirect
github.com/aws/aws-sdk-go-v2/service/s3 v1.58.2 // indirect
github.com/aws/smithy-go v1.20.3 // indirect
github.com/chzyer/readline v1.5.1 // indirect
github.com/coreos/go-oidc/v3 v3.5.0 // indirect
github.com/danieljoos/wincred v1.2.2 // indirect
github.com/databricks/databricks-sql-go v1.5.7 // indirect
Expand Down
5 changes: 5 additions & 0 deletions go.sum
Original file line number Diff line number Diff line change
Expand Up @@ -47,6 +47,10 @@ github.com/aws/aws-sdk-go-v2/service/s3 v1.58.2 h1:sZXIzO38GZOU+O0C+INqbH7C2yALw
github.com/aws/aws-sdk-go-v2/service/s3 v1.58.2/go.mod h1:Lcxzg5rojyVPU/0eFwLtcyTaek/6Mtic5B1gJo7e/zE=
github.com/aws/smithy-go v1.20.3 h1:ryHwveWzPV5BIof6fyDvor6V3iUL7nTfiTKXHiW05nE=
github.com/aws/smithy-go v1.20.3/go.mod h1:krry+ya/rV9RDcV/Q16kpu6ypI4K2czasz0NC3qS14E=
github.com/chzyer/logex v1.2.1/go.mod h1:JLbx6lG2kDbNRFnfkgvh4eRJRPX1QCoOIWomwysCBrQ=
github.com/chzyer/readline v1.5.1 h1:upd/6fQk4src78LMRzh5vItIt361/o4uq553V8B5sGI=
github.com/chzyer/readline v1.5.1/go.mod h1:Eh+b79XXUwfKfcPLepksvw2tcLE/Ct21YObkaSkeBlk=
github.com/chzyer/test v1.0.0/go.mod h1:2JlltgoNkt4TW/z9V/IzDdFaMTM2JPIi26O1pF38GC8=
github.com/coreos/go-oidc/v3 v3.5.0 h1:VxKtbccHZxs8juq7RdJntSqtXFtde9YpNpGn0yqgEHw=
github.com/coreos/go-oidc/v3 v3.5.0/go.mod h1:ecXRtV4romGPeO6ieExAsUK9cb/3fp9hXNz1tlv8PIM=
github.com/coreos/go-systemd/v22 v22.3.3-0.20220203105225-a9a7ef127534/go.mod h1:Y58oyj3AT4RCenI/lSvhwexgC+NSVTIJ3seZv2GcEnc=
Expand Down Expand Up @@ -279,6 +283,7 @@ golang.org/x/sys v0.0.0-20210630005230-0f9fa26af87c/go.mod h1:oPkhp1MJrh7nUepCBc
golang.org/x/sys v0.0.0-20210927094055-39ccf1dd6fa6/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20211007075335-d3039528d8ac/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20211019181941-9d821ace8654/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20220310020820-b874c991c1a5/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20220412211240-33da011f77ad/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20220715151400-c0bba94af5f8/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
Expand Down
10 changes: 10 additions & 0 deletions source/compiler/test-files/eof_test.pf
Original file line number Diff line number Diff line change
@@ -0,0 +1,10 @@
// A regression test to make sure that the last function declared in a program
// both compiles and runs.

def

troz(i) :
i

zort(i) :
i
1 change: 1 addition & 0 deletions source/hub/hub.go
Original file line number Diff line number Diff line change
Expand Up @@ -1494,6 +1494,7 @@ var (
GOOD_BULLET = Green(" ▪ ")
BROKEN = Red(" ✖ ")
PROMPT = "→ "
INDENT_PROMPT = " "
ERROR = "$Error$"
RT_ERROR = "$Error$"
HUB_ERROR = "$Hub error$"
Expand Down
64 changes: 43 additions & 21 deletions source/hub/repl.go
Original file line number Diff line number Diff line change
Expand Up @@ -2,13 +2,15 @@ package hub

import (
"io"
"regexp"
"strings"

"github.com/lmorg/readline"
"github.com/chzyer/readline"
)

func StartHub(hub *Hub, in io.Reader, out io.Writer) {
rline := readline.NewInstance()
var rline *readline.Instance
colonOrEmdash, _ := regexp.Compile(`.*[\w\s]*(:|---)[\s]*$`)
for {

// The hub's CurrentForm setting allows it to ask for information from the user instead of
Expand All @@ -19,23 +21,15 @@ func StartHub(hub *Hub, in io.Reader, out io.Writer) {

for {
queryString := hub.CurrentForm.Fields[len(hub.CurrentForm.Result)]
// A * at the beginning of the query string indicates that the answer should be
// masked.
if queryString[0] == '*' {
queryString = queryString[1:]
rline.PasswordMask = '▪'
}

// The readln utility doesn't like multiline prompts, so we must kludge a little.
pos := strings.LastIndex(queryString, "\n")
if pos == -1 {
rline.SetPrompt(queryString + ": ")
rline, _ = readline.New(queryString + ": ")
} else {
hub.WriteString(queryString[:pos+1])
rline.SetPrompt(queryString[pos+1:] + ": ")
rline, _ = readline.New(queryString[pos+1:] + ": ")
}
line, _ := rline.Readline()
rline.PasswordMask = 0
hub.CurrentForm.Result[hub.CurrentForm.Fields[len(hub.CurrentForm.Result)]] = line
if len(hub.CurrentForm.Result) == len(hub.CurrentForm.Fields) {
hub.CurrentForm.Call(hub.CurrentForm)
Expand All @@ -45,25 +39,53 @@ func StartHub(hub *Hub, in io.Reader, out io.Writer) {
continue
}

rline.SetPrompt(makePrompt(hub))
line, _ := rline.Readline()

line = strings.TrimSpace(line)
ws := ""
input := ""
c := 0
for {
rline, _ = readline.New(makePrompt(hub, ws != ""))
line, _ := rline.ReadlineWithDefault(ws)
c++
input = input + line + "\n"
ws = ""
for _, c := range line {
if c == ' ' || c == '\t' {
ws = ws + string(c)
} else {
break
}
}
if colonOrEmdash.Match([]byte(line)) {
ws = ws + "\t"
}
if ws == "" {
break
}
}
input = strings.TrimSpace(input)

_, quitCharm := hub.Do(line, hub.Username, hub.Password, hub.currentServiceName())
if quitCharm {
_, quit := hub.Do(input, hub.Username, hub.Password, hub.currentServiceName())
if quit {
break
}
}
}

func makePrompt(hub *Hub) string {
func makePrompt(hub *Hub, indented bool) string {
symbol := PROMPT
left := hub.currentServiceName()
if indented {
symbol = INDENT_PROMPT
left = strings.Repeat(" ", len(left))
}
if hub.currentServiceName() == "" {
return PROMPT
return symbol
}
promptText := hub.currentServiceName() + " " + PROMPT
promptText := left + " " + symbol
if hub.CurrentServiceIsBroken() {
promptText = Red(promptText)
}
return promptText
}


18 changes: 9 additions & 9 deletions source/initializer/initializer.go
Original file line number Diff line number Diff line change
Expand Up @@ -118,9 +118,6 @@ func StartCompiler(scriptFilepath, sourcecode string, db *sql.DB, hubServices ma
// We then carry out five phases of initialization each of which is performed recursively on all of the
// modules in the dependency tree before moving on to the next. (The need to do this is in fact what
// defines the phases, so you shouldn't bother looking for some deeper logic in that.)
//
// NOTE that these five phases are repeated in an un-DRY way in `test_helper.go` in this package, and that
// any changes here will also need to be reflected there.
result := iz.ParseEverythingFromSourcecode(vm.BlankVm(db), parser.NewCommonParserBindle(), compiler.NewCommonCompilerBindle(), scriptFilepath, sourcecode, "")
if iz.ErrorsExist() {
iz.cp.P.Common.IsBroken = true
Expand Down Expand Up @@ -335,7 +332,7 @@ func (iz *initializer) MakeParserAndTokenizedProgram() {

tok = iz.p.TokenizedCode.NextToken() // note that we've already removed leading newlines.
if settings.SHOW_RELEXER && !(settings.IGNORE_BOILERPLATE && settings.ThingsToIgnore.Contains(tok.Source)) {
println(tok.Type, tok.Literal)
println(text.PURPLE + tok.Type, tok.Literal + text.RESET)
}

if tok.Type == token.EOF { // An empty file should still initiate a service, but one with no data.
Expand All @@ -351,9 +348,9 @@ func (iz *initializer) MakeParserAndTokenizedProgram() {

line := token.NewCodeChunk()

for tok = iz.p.TokenizedCode.NextToken(); tok.Type != token.EOF; tok = iz.p.TokenizedCode.NextToken() {
for tok = iz.p.TokenizedCode.NextToken(); true; tok = iz.p.TokenizedCode.NextToken() {
if settings.SHOW_RELEXER && !(settings.IGNORE_BOILERPLATE && settings.ThingsToIgnore.Contains(tok.Source)) {
println(tok.Type, tok.Literal)
println(text.PURPLE + tok.Type, tok.Literal + text.RESET)
}
if token.TokenTypeIsHeadword(tok.Type) {
if tok.Literal == "import" {
Expand Down Expand Up @@ -390,7 +387,7 @@ func (iz *initializer) MakeParserAndTokenizedProgram() {
indentCount--
}
}
if ((tok.Type == token.NEWLINE) && !lastTokenWasColon && indentCount == 0 && line.Length() != 0) ||
if ((tok.Type == token.NEWLINE || tok.Type == token.EOF) && !lastTokenWasColon && indentCount == 0 && line.Length() != 0) ||
tok.Type == token.GOCODE {
if tok.Type == token.GOCODE {
line.Append(tok)
Expand Down Expand Up @@ -423,6 +420,7 @@ func (iz *initializer) MakeParserAndTokenizedProgram() {
} else {
iz.addTokenizedDeclaration(functionDeclaration, line, IsPrivate)
}

case VarSection, ConstSection:
// As a wretched kludge, we will now weaken some of the commas on the LHS of
// the assignment so that it parses properly. (TODO: at this point it would be much easier to
Expand Down Expand Up @@ -461,8 +459,7 @@ func (iz *initializer) MakeParserAndTokenizedProgram() {
colonMeansFunctionOrCommand = (currentSection == CmdSection || currentSection == DefSection)
continue
}

if (tok.Type == token.NEWLINE) && line.Length() == 0 {
if tok.Type == token.NEWLINE && line.Length() == 0 {
continue
}

Expand All @@ -473,6 +470,9 @@ func (iz *initializer) MakeParserAndTokenizedProgram() {
iz.addWordsToParser(line)
}
line.Append(tok)
if tok.Type == token.EOF {
break
}
}

iz.p.Common.Errors = err.MergeErrors(iz.p.TokenizedCode.(*lexer.Relexer).GetErrors(), iz.p.Common.Errors)
Expand Down
31 changes: 21 additions & 10 deletions source/lexer/lexer.go
Original file line number Diff line number Diff line change
Expand Up @@ -64,7 +64,15 @@ func (l *Lexer) NextToken() token.Token {

switch l.ch {
case 0:
return l.NewToken(token.EOF, "EOF")
level := l.whitespaceStack.Find("")
if level > 0 {
for i := 0; i < level; i++ {
l.whitespaceStack.Pop()
}
return l.MakeToken(token.END, fmt.Sprint(level))
} else {
return l.NewToken(token.EOF, "EOF")
}
case '\n':
return l.NewToken(token.NEWLINE, ";")
case '\\':
Expand Down Expand Up @@ -198,7 +206,7 @@ func (l *Lexer) NextToken() token.Token {
l.readChar()
return l.NewToken(tType, text)
case token.EMDASH:
return l.MakeToken(tType, l.readSnippet())
return l.MakeToken(tType, strings.TrimSpace(l.readSnippet()))
default:
return l.NewToken(tType, lit)
}
Expand Down Expand Up @@ -393,8 +401,10 @@ func (l *Lexer) readSnippet() string {
}
// There are two possibilities. Either we found a non-whitespace character, and the whole snippet is on the same line as the
// `---` token, or we found a newline and the snipped is an indent on the succeeding lines. Just like with a colon.
if l.peekChar() == '\n' { // --- then we have to mess with whitespace.
l.readChar()
if l.peekChar() == '\n' || l.peekChar() == '\r' { // --- then we have to mess with whitespace.
for l.peekChar() == '\n' || l.peekChar() == '\r' {
l.readChar()
}
langIndent := ""
stackTop, ok := l.whitespaceStack.HeadValue()
if !ok {
Expand All @@ -417,8 +427,8 @@ func (l *Lexer) readSnippet() string {
return result
}
}
if strings.HasPrefix(stackTop, currentWhitespace) || currentWhitespace == "\n" { // Then we've unindented. Dobby is free!
if currentWhitespace != "\n" {
if strings.HasPrefix(stackTop, currentWhitespace) || currentWhitespace == "\n" || currentWhitespace == "\r" || currentWhitespace == string(rune(0)) { // Then we've unindented. Dobby is free!
if currentWhitespace != "\n" && currentWhitespace != "\r" && currentWhitespace == string(rune(0)) {
l.snippetWhitespace = currentWhitespace
}
return result
Expand All @@ -427,18 +437,19 @@ func (l *Lexer) readSnippet() string {
l.Throw("lex/emdash/indent/c", l.NewToken(token.ILLEGAL, "bad emdash"))
return result
}
for l.peekChar() != '\n' && l.peekChar() != 0 {
for l.peekChar() != '\n' && l.peekChar() != '\r' && l.peekChar() != 0 {
l.readChar()
result = result + string(l.ch)
}
if l.peekChar() == 0 {
l.readChar()
return result
}
l.readChar()
result = result + "\n"
}
} else {
for l.peekChar() != '\n' && l.peekChar() != 0 {
for l.peekChar() != '\n' && l.peekChar() != '\r' && l.peekChar() != 0 {
l.readChar()
result = result + string(l.ch)
}
Expand Down Expand Up @@ -621,7 +632,7 @@ func isHexDigit(ch rune) bool {

func isProtectedPunctuationOrWhitespace(ch rune) bool {
return ch == '(' || ch == ')' || ch == '[' || ch == ']' || ch == '{' || ch == '}' || ch == ' ' || ch == ',' ||
ch == ':' || ch == ';' || ch == '\t' || ch == '\n' || ch == 0
ch == ':' || ch == ';' || ch == '\t' || ch == '\n' || ch == '\r' || ch == 0
}

func isSymbol(ch rune) bool {
Expand All @@ -646,7 +657,7 @@ func (l *Lexer) NewToken(tokenType token.TokenType, st string) token.Token {

func (l *Lexer) MakeToken(tokenType token.TokenType, st string) token.Token {
if settings.SHOW_LEXER && !(settings.IGNORE_BOILERPLATE && settings.ThingsToIgnore.Contains(l.source)) {
fmt.Println(tokenType, st, l.line, l.tstart)
fmt.Println(tokenType, st)
}
return token.Token{Type: tokenType, Literal: st, Source: l.source, Line: l.line, ChStart: l.tstart, ChEnd: l.char}
}
Expand Down
7 changes: 6 additions & 1 deletion source/lexer/relexer.go
Original file line number Diff line number Diff line change
Expand Up @@ -222,8 +222,13 @@ func (rl *Relexer) NextSemanticToken() token.Token {
return rl.burnToken()
}
rl.curTok.Literal = strconv.Itoa(n - 1)
return token.Token{Type: token.NEWLINE, Literal: ";", Line: rl.curTok.Line,
if rl.nexTok.Type == token.EOF {
return token.Token{Type: token.EOF, Literal: ";", Line: rl.curTok.Line,
ChStart: 0, ChEnd: 0, Source: rl.curTok.Source}
} else {
return token.Token{Type: token.NEWLINE, Literal: ";", Line: rl.curTok.Line,
ChStart: 0, ChEnd: 0, Source: rl.curTok.Source}
}
default:
rl.nestingLevel = rl.nestingLevel - 1
rl.curTok.Literal = strconv.Itoa(n - 1)
Expand Down
2 changes: 1 addition & 1 deletion source/lexer/relexer_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@ func TestNextTokenForRelexer(t *testing.T) {
{token.COLON, ":", 3},
{token.INT, "2", 3},
{token.RPAREN, "<-|", 4},
{token.NEWLINE, ";", 0},
{token.EOF, ";", 0},
}

rl := NewRelexer("", input)
Expand Down
4 changes: 2 additions & 2 deletions source/parser/getters.go
Original file line number Diff line number Diff line change
Expand Up @@ -91,7 +91,7 @@ func (p *Parser) extractSig(args []ast.Node) ast.StringSig {
varName := ""
varType := "*"
switch arg := arg.(type) {
case *ast.SuffixExpression:
case *ast.SuffixExpression:
if !p.TypeExists(arg.Operator) {
p.Throw("parse/sig/type/a", &arg.Token)
return nil
Expand All @@ -115,7 +115,7 @@ func (p *Parser) extractSig(args []ast.Node) ast.StringSig {
default:
p.Throw("parse/sig/ident/a", inner.GetToken())
return nil

}
case *ast.Identifier:
if p.Endfixes.Contains(arg.Value) {
Expand Down
4 changes: 2 additions & 2 deletions source/settings/settings.go
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@ const (
OMIT_BUILTINS = false // If true then the file builtins.pf, world.pf, etc, will not be added to the service. Note that this means the hub won't work.
IGNORE_BOILERPLATE = true // Should usually be left true. Means that the first five flags below won't show instrumentation when compiling buitins.pf, world.pf, etc.

FUNCTION_TO_PEEK = "" // Shows the function table entry and function tree associated with the function named in the string, if non-empty.
FUNCTION_TO_PEEK = "zort" // Shows the function table entry and function tree associated with the function named in the string, if non-empty.

// These do what it sounds like.
SHOW_LEXER = false
Expand All @@ -45,7 +45,7 @@ const (
SHOW_GOLANG = false
SHOW_API_SERIALIZATION = false
SHOW_EXTERNAL_STUBS = false
SHOW_TESTS = false // Says whether the tests should say what is being tested, useful if one of them crashes and we don't know which.
SHOW_TESTS = true // Says whether the tests should say what is being tested, useful if one of them crashes and we don't know which.
)

var PipefishHomeDirectory string
Expand Down
8 changes: 7 additions & 1 deletion source/vm/vm_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,13 @@ import (
"github.com/tim-hardcastle/Pipefish/source/text"

)

func TestEof(t *testing.T) {
tests := []test_helper.TestItem{
{`troz 42`, `42`},
{`zort 42`, `42`},
}
test_helper.RunTest(t, "eof_test.pf", tests, test_helper.TestValues)
}
func TestLiterals(t *testing.T) {
tests := []test_helper.TestItem{
{`"foo"`, `"foo"`},
Expand Down

0 comments on commit 243c40b

Please sign in to comment.