diff --git a/language/lexer.go b/language/lexer.go index 5ef54b6..c07d6fe 100644 --- a/language/lexer.go +++ b/language/lexer.go @@ -53,6 +53,7 @@ const ( BOOL // true, false NULL // null + MULTILINE_STRING // """abc\ndef""" ) func (tokenType TokenType) String() string { @@ -111,6 +112,8 @@ func (tokenType TokenType) String() string { return "Boolean" case NULL: return "null" + case MULTILINE_STRING: + return "MultilineString" default: return "Unknown" } @@ -197,6 +200,9 @@ func (lexer *Lexer) Emit(tokenType TokenType) { panic(err) } } + if tokenType == MULTILINE_STRING { + value = value[3 : len(value)-3] + } lexer.Tokens <- Token{tokenType, value, startPos, endPos} lexer.Start = lexer.Pos lexer.Width = 0 @@ -351,6 +357,9 @@ func LexText(lexer *Lexer) StateFn { lexer.Backup() return LexComment case rn == '"': + if lexer.AcceptString("\"\"") { + return LexMultilineQuote + } lexer.Backup() return LexQuote case rn == '.': @@ -421,7 +430,10 @@ func LexComment(lexer *Lexer) StateFn { } for { switch rn := lexer.Next(); rn { - case -1, '\u000A', '\u000D': + case -1: + lexer.Backup() + return LexText + case '\u000A', '\u000D': if rn == '\u000D' && lexer.Peek() == '\u000A' { lexer.Next() } @@ -438,6 +450,27 @@ func LexComment(lexer *Lexer) StateFn { return LexText } +func LexMultilineQuote(lexer *Lexer) StateFn { + for { + switch rn := lexer.Next(); rn { + case -1, '\u000A', '\u000D': + if rn == '\u000D' && lexer.Peek() == '\u000A' { + lexer.Next() + } + lexer.Line += 1 + lexer.Column = 1 + continue + case '"': + if lexer.AcceptString("\"\"") { + lexer.Emit(MULTILINE_STRING) + return LexText + } + return lexer.Errorf(`GraphQL Syntax Error (%d:%d) Invalid multiline description`, lexer.Line, lexer.Column-1) + } + } + return LexText +} + func LexQuote(lexer *Lexer) StateFn { quote := lexer.Next() index := 1 diff --git a/language/parser.go b/language/parser.go index 449c299..6fef694 100644 --- a/language/parser.go +++ b/language/parser.go @@ -112,19 +112,29 @@ func (parser *Parser) description() (string, error) { text := "" isBody := false token := parser.lookahead - for token.Type == DESCRIPTION { - text += strings.Trim(token.Val[2:], " ") - if isBody { - text += " " - } else { - isBody = true + switch token.Type { + case STRING: + for token.Type == STRING { + text += strings.Trim(token.Val, " ") + if isBody { + text += " " + } else { + isBody = true + } + err := parser.match(STRING) + if err != nil { + return text, err + } + token = parser.lookahead } - err := parser.match(DESCRIPTION) + case MULTILINE_STRING: + err := parser.match(MULTILINE_STRING) if err != nil { return text, err } - token = parser.lookahead + text = strings.Trim(token.Val, "\n") } + return text, nil } diff --git a/language/parser_test.go b/language/parser_test.go index 472f125..15eca56 100644 --- a/language/parser_test.go +++ b/language/parser_test.go @@ -1367,6 +1367,41 @@ input Hello { }) So(err, ShouldNotEqual, nil) }) + + Convey("multiline descriptions", func() { + result, err = parser.Parse(&ParseParams{ + Source: ` +# Comment +""" +Multiline Description on Scalar +Line 2 +""" +scalar Scalar + +""" +Multiline Description on Object +Line 2 +""" +type Object { + """ + Multiline Description on Field + Line 2 + """ + field: String! +} +`, + }) + So(err, ShouldEqual, nil) + + scalar := result.Definitions[0].(*ScalarTypeDefinition) + object := result.Definitions[1].(*ObjectTypeDefinition) + field := object.Fields[0] + + So(scalar.Description, ShouldEqual, "Multiline Description on Scalar\nLine 2") + So(object.Description, ShouldEqual, "Multiline Description on Object\nLine 2") + So(field.Description, ShouldEqual, "\tMultiline Description on Field\n\tLine 2\n\t") + }) + }) }