GraphQL master FF for review (#18445)
* Initial work on a graphql API * Added receipts, and more transaction fields. * Finish receipts, add logs * Add transactionCount to block * Add types and . * Update Block type to be compatible with ethql * Rename nonce to transactionCount in Account, to be compatible with ethql * Update transaction, receipt and log to match ethql * Add query operator, for a range of blocks * Added ommerCount to Block * Add transactionAt and ommerAt to Block * Added sendRawTransaction mutation * Add Call and EstimateGas to graphQL API * Refactored to use hexutil.Bytes instead of HexBytes * Replace BigNum with hexutil.Big * Refactor call and estimateGas to use ethapi struct type * Replace ethgraphql.Address with common.Address * Replace ethgraphql.Hash with common.Hash * Converted most quantities to Long instead of Int * Add support for logs * Fix bug in runFilter * Restructured Transaction to work primarily with headers, so uncle data is reported properly * Add gasPrice API * Add protocolVersion API * Add syncing API * Moved schema into its own source file * Move some single use args types into anonymous structs * Add doc-comments * Fixed backend fetching to use context * Added (very) basic tests * Add documentation to the graphql schema * Fix reversion for formatting of big numbers * Correct spelling error * s/BigInt/Long/ * Update common/types.go * Fixes in response to review * Fix lint error * Updated calls on private functions * Fix typo in graphql.go * Rollback ethapi breaking changes for graphql support Co-Authored-By: Arachnid <arachnid@notdot.net>
This commit is contained in:
committed by
Guillaume Ballet
parent
105008b6a1
commit
f91312dbdb
32
vendor/github.com/graph-gophers/graphql-go/internal/common/directive.go
generated
vendored
Normal file
32
vendor/github.com/graph-gophers/graphql-go/internal/common/directive.go
generated
vendored
Normal file
@@ -0,0 +1,32 @@
|
||||
package common
|
||||
|
||||
type Directive struct {
|
||||
Name Ident
|
||||
Args ArgumentList
|
||||
}
|
||||
|
||||
func ParseDirectives(l *Lexer) DirectiveList {
|
||||
var directives DirectiveList
|
||||
for l.Peek() == '@' {
|
||||
l.ConsumeToken('@')
|
||||
d := &Directive{}
|
||||
d.Name = l.ConsumeIdentWithLoc()
|
||||
d.Name.Loc.Column--
|
||||
if l.Peek() == '(' {
|
||||
d.Args = ParseArguments(l)
|
||||
}
|
||||
directives = append(directives, d)
|
||||
}
|
||||
return directives
|
||||
}
|
||||
|
||||
type DirectiveList []*Directive
|
||||
|
||||
func (l DirectiveList) Get(name string) *Directive {
|
||||
for _, d := range l {
|
||||
if d.Name.Name == name {
|
||||
return d
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
161
vendor/github.com/graph-gophers/graphql-go/internal/common/lexer.go
generated
vendored
Normal file
161
vendor/github.com/graph-gophers/graphql-go/internal/common/lexer.go
generated
vendored
Normal file
@@ -0,0 +1,161 @@
|
||||
package common
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"strings"
|
||||
"text/scanner"
|
||||
|
||||
"github.com/graph-gophers/graphql-go/errors"
|
||||
)
|
||||
|
||||
type syntaxError string
|
||||
|
||||
type Lexer struct {
|
||||
sc *scanner.Scanner
|
||||
next rune
|
||||
descComment string
|
||||
}
|
||||
|
||||
type Ident struct {
|
||||
Name string
|
||||
Loc errors.Location
|
||||
}
|
||||
|
||||
func NewLexer(s string) *Lexer {
|
||||
sc := &scanner.Scanner{
|
||||
Mode: scanner.ScanIdents | scanner.ScanInts | scanner.ScanFloats | scanner.ScanStrings,
|
||||
}
|
||||
sc.Init(strings.NewReader(s))
|
||||
|
||||
return &Lexer{sc: sc}
|
||||
}
|
||||
|
||||
func (l *Lexer) CatchSyntaxError(f func()) (errRes *errors.QueryError) {
|
||||
defer func() {
|
||||
if err := recover(); err != nil {
|
||||
if err, ok := err.(syntaxError); ok {
|
||||
errRes = errors.Errorf("syntax error: %s", err)
|
||||
errRes.Locations = []errors.Location{l.Location()}
|
||||
return
|
||||
}
|
||||
panic(err)
|
||||
}
|
||||
}()
|
||||
|
||||
f()
|
||||
return
|
||||
}
|
||||
|
||||
func (l *Lexer) Peek() rune {
|
||||
return l.next
|
||||
}
|
||||
|
||||
// Consume whitespace and tokens equivalent to whitespace (e.g. commas and comments).
|
||||
//
|
||||
// Consumed comment characters will build the description for the next type or field encountered.
|
||||
// The description is available from `DescComment()`, and will be reset every time `Consume()` is
|
||||
// executed.
|
||||
func (l *Lexer) Consume() {
|
||||
l.descComment = ""
|
||||
for {
|
||||
l.next = l.sc.Scan()
|
||||
|
||||
if l.next == ',' {
|
||||
// Similar to white space and line terminators, commas (',') are used to improve the
|
||||
// legibility of source text and separate lexical tokens but are otherwise syntactically and
|
||||
// semantically insignificant within GraphQL documents.
|
||||
//
|
||||
// http://facebook.github.io/graphql/draft/#sec-Insignificant-Commas
|
||||
continue
|
||||
}
|
||||
|
||||
if l.next == '#' {
|
||||
// GraphQL source documents may contain single-line comments, starting with the '#' marker.
|
||||
//
|
||||
// A comment can contain any Unicode code point except `LineTerminator` so a comment always
|
||||
// consists of all code points starting with the '#' character up to but not including the
|
||||
// line terminator.
|
||||
|
||||
l.consumeComment()
|
||||
continue
|
||||
}
|
||||
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
func (l *Lexer) ConsumeIdent() string {
|
||||
name := l.sc.TokenText()
|
||||
l.ConsumeToken(scanner.Ident)
|
||||
return name
|
||||
}
|
||||
|
||||
func (l *Lexer) ConsumeIdentWithLoc() Ident {
|
||||
loc := l.Location()
|
||||
name := l.sc.TokenText()
|
||||
l.ConsumeToken(scanner.Ident)
|
||||
return Ident{name, loc}
|
||||
}
|
||||
|
||||
func (l *Lexer) ConsumeKeyword(keyword string) {
|
||||
if l.next != scanner.Ident || l.sc.TokenText() != keyword {
|
||||
l.SyntaxError(fmt.Sprintf("unexpected %q, expecting %q", l.sc.TokenText(), keyword))
|
||||
}
|
||||
l.Consume()
|
||||
}
|
||||
|
||||
func (l *Lexer) ConsumeLiteral() *BasicLit {
|
||||
lit := &BasicLit{Type: l.next, Text: l.sc.TokenText()}
|
||||
l.Consume()
|
||||
return lit
|
||||
}
|
||||
|
||||
func (l *Lexer) ConsumeToken(expected rune) {
|
||||
if l.next != expected {
|
||||
l.SyntaxError(fmt.Sprintf("unexpected %q, expecting %s", l.sc.TokenText(), scanner.TokenString(expected)))
|
||||
}
|
||||
l.Consume()
|
||||
}
|
||||
|
||||
func (l *Lexer) DescComment() string {
|
||||
return l.descComment
|
||||
}
|
||||
|
||||
func (l *Lexer) SyntaxError(message string) {
|
||||
panic(syntaxError(message))
|
||||
}
|
||||
|
||||
func (l *Lexer) Location() errors.Location {
|
||||
return errors.Location{
|
||||
Line: l.sc.Line,
|
||||
Column: l.sc.Column,
|
||||
}
|
||||
}
|
||||
|
||||
// consumeComment consumes all characters from `#` to the first encountered line terminator.
|
||||
// The characters are appended to `l.descComment`.
|
||||
func (l *Lexer) consumeComment() {
|
||||
if l.next != '#' {
|
||||
return
|
||||
}
|
||||
|
||||
// TODO: count and trim whitespace so we can dedent any following lines.
|
||||
if l.sc.Peek() == ' ' {
|
||||
l.sc.Next()
|
||||
}
|
||||
|
||||
if l.descComment != "" {
|
||||
// TODO: use a bytes.Buffer or strings.Builder instead of this.
|
||||
l.descComment += "\n"
|
||||
}
|
||||
|
||||
for {
|
||||
next := l.sc.Next()
|
||||
if next == '\r' || next == '\n' || next == scanner.EOF {
|
||||
break
|
||||
}
|
||||
|
||||
// TODO: use a bytes.Buffer or strings.Build instead of this.
|
||||
l.descComment += string(next)
|
||||
}
|
||||
}
|
206
vendor/github.com/graph-gophers/graphql-go/internal/common/literals.go
generated
vendored
Normal file
206
vendor/github.com/graph-gophers/graphql-go/internal/common/literals.go
generated
vendored
Normal file
@@ -0,0 +1,206 @@
|
||||
package common
|
||||
|
||||
import (
|
||||
"strconv"
|
||||
"strings"
|
||||
"text/scanner"
|
||||
|
||||
"github.com/graph-gophers/graphql-go/errors"
|
||||
)
|
||||
|
||||
type Literal interface {
|
||||
Value(vars map[string]interface{}) interface{}
|
||||
String() string
|
||||
Location() errors.Location
|
||||
}
|
||||
|
||||
type BasicLit struct {
|
||||
Type rune
|
||||
Text string
|
||||
Loc errors.Location
|
||||
}
|
||||
|
||||
func (lit *BasicLit) Value(vars map[string]interface{}) interface{} {
|
||||
switch lit.Type {
|
||||
case scanner.Int:
|
||||
value, err := strconv.ParseInt(lit.Text, 10, 32)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
return int32(value)
|
||||
|
||||
case scanner.Float:
|
||||
value, err := strconv.ParseFloat(lit.Text, 64)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
return value
|
||||
|
||||
case scanner.String:
|
||||
value, err := strconv.Unquote(lit.Text)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
return value
|
||||
|
||||
case scanner.Ident:
|
||||
switch lit.Text {
|
||||
case "true":
|
||||
return true
|
||||
case "false":
|
||||
return false
|
||||
default:
|
||||
return lit.Text
|
||||
}
|
||||
|
||||
default:
|
||||
panic("invalid literal")
|
||||
}
|
||||
}
|
||||
|
||||
func (lit *BasicLit) String() string {
|
||||
return lit.Text
|
||||
}
|
||||
|
||||
func (lit *BasicLit) Location() errors.Location {
|
||||
return lit.Loc
|
||||
}
|
||||
|
||||
type ListLit struct {
|
||||
Entries []Literal
|
||||
Loc errors.Location
|
||||
}
|
||||
|
||||
func (lit *ListLit) Value(vars map[string]interface{}) interface{} {
|
||||
entries := make([]interface{}, len(lit.Entries))
|
||||
for i, entry := range lit.Entries {
|
||||
entries[i] = entry.Value(vars)
|
||||
}
|
||||
return entries
|
||||
}
|
||||
|
||||
func (lit *ListLit) String() string {
|
||||
entries := make([]string, len(lit.Entries))
|
||||
for i, entry := range lit.Entries {
|
||||
entries[i] = entry.String()
|
||||
}
|
||||
return "[" + strings.Join(entries, ", ") + "]"
|
||||
}
|
||||
|
||||
func (lit *ListLit) Location() errors.Location {
|
||||
return lit.Loc
|
||||
}
|
||||
|
||||
type ObjectLit struct {
|
||||
Fields []*ObjectLitField
|
||||
Loc errors.Location
|
||||
}
|
||||
|
||||
type ObjectLitField struct {
|
||||
Name Ident
|
||||
Value Literal
|
||||
}
|
||||
|
||||
func (lit *ObjectLit) Value(vars map[string]interface{}) interface{} {
|
||||
fields := make(map[string]interface{}, len(lit.Fields))
|
||||
for _, f := range lit.Fields {
|
||||
fields[f.Name.Name] = f.Value.Value(vars)
|
||||
}
|
||||
return fields
|
||||
}
|
||||
|
||||
func (lit *ObjectLit) String() string {
|
||||
entries := make([]string, 0, len(lit.Fields))
|
||||
for _, f := range lit.Fields {
|
||||
entries = append(entries, f.Name.Name+": "+f.Value.String())
|
||||
}
|
||||
return "{" + strings.Join(entries, ", ") + "}"
|
||||
}
|
||||
|
||||
func (lit *ObjectLit) Location() errors.Location {
|
||||
return lit.Loc
|
||||
}
|
||||
|
||||
type NullLit struct {
|
||||
Loc errors.Location
|
||||
}
|
||||
|
||||
func (lit *NullLit) Value(vars map[string]interface{}) interface{} {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (lit *NullLit) String() string {
|
||||
return "null"
|
||||
}
|
||||
|
||||
func (lit *NullLit) Location() errors.Location {
|
||||
return lit.Loc
|
||||
}
|
||||
|
||||
type Variable struct {
|
||||
Name string
|
||||
Loc errors.Location
|
||||
}
|
||||
|
||||
func (v Variable) Value(vars map[string]interface{}) interface{} {
|
||||
return vars[v.Name]
|
||||
}
|
||||
|
||||
func (v Variable) String() string {
|
||||
return "$" + v.Name
|
||||
}
|
||||
|
||||
func (v *Variable) Location() errors.Location {
|
||||
return v.Loc
|
||||
}
|
||||
|
||||
func ParseLiteral(l *Lexer, constOnly bool) Literal {
|
||||
loc := l.Location()
|
||||
switch l.Peek() {
|
||||
case '$':
|
||||
if constOnly {
|
||||
l.SyntaxError("variable not allowed")
|
||||
panic("unreachable")
|
||||
}
|
||||
l.ConsumeToken('$')
|
||||
return &Variable{l.ConsumeIdent(), loc}
|
||||
|
||||
case scanner.Int, scanner.Float, scanner.String, scanner.Ident:
|
||||
lit := l.ConsumeLiteral()
|
||||
if lit.Type == scanner.Ident && lit.Text == "null" {
|
||||
return &NullLit{loc}
|
||||
}
|
||||
lit.Loc = loc
|
||||
return lit
|
||||
case '-':
|
||||
l.ConsumeToken('-')
|
||||
lit := l.ConsumeLiteral()
|
||||
lit.Text = "-" + lit.Text
|
||||
lit.Loc = loc
|
||||
return lit
|
||||
case '[':
|
||||
l.ConsumeToken('[')
|
||||
var list []Literal
|
||||
for l.Peek() != ']' {
|
||||
list = append(list, ParseLiteral(l, constOnly))
|
||||
}
|
||||
l.ConsumeToken(']')
|
||||
return &ListLit{list, loc}
|
||||
|
||||
case '{':
|
||||
l.ConsumeToken('{')
|
||||
var fields []*ObjectLitField
|
||||
for l.Peek() != '}' {
|
||||
name := l.ConsumeIdentWithLoc()
|
||||
l.ConsumeToken(':')
|
||||
value := ParseLiteral(l, constOnly)
|
||||
fields = append(fields, &ObjectLitField{name, value})
|
||||
}
|
||||
l.ConsumeToken('}')
|
||||
return &ObjectLit{fields, loc}
|
||||
|
||||
default:
|
||||
l.SyntaxError("invalid value")
|
||||
panic("unreachable")
|
||||
}
|
||||
}
|
80
vendor/github.com/graph-gophers/graphql-go/internal/common/types.go
generated
vendored
Normal file
80
vendor/github.com/graph-gophers/graphql-go/internal/common/types.go
generated
vendored
Normal file
@@ -0,0 +1,80 @@
|
||||
package common
|
||||
|
||||
import (
|
||||
"github.com/graph-gophers/graphql-go/errors"
|
||||
)
|
||||
|
||||
type Type interface {
|
||||
Kind() string
|
||||
String() string
|
||||
}
|
||||
|
||||
type List struct {
|
||||
OfType Type
|
||||
}
|
||||
|
||||
type NonNull struct {
|
||||
OfType Type
|
||||
}
|
||||
|
||||
type TypeName struct {
|
||||
Ident
|
||||
}
|
||||
|
||||
func (*List) Kind() string { return "LIST" }
|
||||
func (*NonNull) Kind() string { return "NON_NULL" }
|
||||
func (*TypeName) Kind() string { panic("TypeName needs to be resolved to actual type") }
|
||||
|
||||
func (t *List) String() string { return "[" + t.OfType.String() + "]" }
|
||||
func (t *NonNull) String() string { return t.OfType.String() + "!" }
|
||||
func (*TypeName) String() string { panic("TypeName needs to be resolved to actual type") }
|
||||
|
||||
func ParseType(l *Lexer) Type {
|
||||
t := parseNullType(l)
|
||||
if l.Peek() == '!' {
|
||||
l.ConsumeToken('!')
|
||||
return &NonNull{OfType: t}
|
||||
}
|
||||
return t
|
||||
}
|
||||
|
||||
func parseNullType(l *Lexer) Type {
|
||||
if l.Peek() == '[' {
|
||||
l.ConsumeToken('[')
|
||||
ofType := ParseType(l)
|
||||
l.ConsumeToken(']')
|
||||
return &List{OfType: ofType}
|
||||
}
|
||||
|
||||
return &TypeName{Ident: l.ConsumeIdentWithLoc()}
|
||||
}
|
||||
|
||||
type Resolver func(name string) Type
|
||||
|
||||
func ResolveType(t Type, resolver Resolver) (Type, *errors.QueryError) {
|
||||
switch t := t.(type) {
|
||||
case *List:
|
||||
ofType, err := ResolveType(t.OfType, resolver)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return &List{OfType: ofType}, nil
|
||||
case *NonNull:
|
||||
ofType, err := ResolveType(t.OfType, resolver)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return &NonNull{OfType: ofType}, nil
|
||||
case *TypeName:
|
||||
refT := resolver(t.Name)
|
||||
if refT == nil {
|
||||
err := errors.Errorf("Unknown type %q.", t.Name)
|
||||
err.Rule = "KnownTypeNames"
|
||||
err.Locations = []errors.Location{t.Loc}
|
||||
return nil, err
|
||||
}
|
||||
return refT, nil
|
||||
default:
|
||||
return t, nil
|
||||
}
|
||||
}
|
78
vendor/github.com/graph-gophers/graphql-go/internal/common/values.go
generated
vendored
Normal file
78
vendor/github.com/graph-gophers/graphql-go/internal/common/values.go
generated
vendored
Normal file
@@ -0,0 +1,78 @@
|
||||
package common
|
||||
|
||||
import (
|
||||
"github.com/graph-gophers/graphql-go/errors"
|
||||
)
|
||||
|
||||
// http://facebook.github.io/graphql/draft/#InputValueDefinition
|
||||
type InputValue struct {
|
||||
Name Ident
|
||||
Type Type
|
||||
Default Literal
|
||||
Desc string
|
||||
Loc errors.Location
|
||||
TypeLoc errors.Location
|
||||
}
|
||||
|
||||
type InputValueList []*InputValue
|
||||
|
||||
func (l InputValueList) Get(name string) *InputValue {
|
||||
for _, v := range l {
|
||||
if v.Name.Name == name {
|
||||
return v
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func ParseInputValue(l *Lexer) *InputValue {
|
||||
p := &InputValue{}
|
||||
p.Loc = l.Location()
|
||||
p.Desc = l.DescComment()
|
||||
p.Name = l.ConsumeIdentWithLoc()
|
||||
l.ConsumeToken(':')
|
||||
p.TypeLoc = l.Location()
|
||||
p.Type = ParseType(l)
|
||||
if l.Peek() == '=' {
|
||||
l.ConsumeToken('=')
|
||||
p.Default = ParseLiteral(l, true)
|
||||
}
|
||||
return p
|
||||
}
|
||||
|
||||
type Argument struct {
|
||||
Name Ident
|
||||
Value Literal
|
||||
}
|
||||
|
||||
type ArgumentList []Argument
|
||||
|
||||
func (l ArgumentList) Get(name string) (Literal, bool) {
|
||||
for _, arg := range l {
|
||||
if arg.Name.Name == name {
|
||||
return arg.Value, true
|
||||
}
|
||||
}
|
||||
return nil, false
|
||||
}
|
||||
|
||||
func (l ArgumentList) MustGet(name string) Literal {
|
||||
value, ok := l.Get(name)
|
||||
if !ok {
|
||||
panic("argument not found")
|
||||
}
|
||||
return value
|
||||
}
|
||||
|
||||
func ParseArguments(l *Lexer) ArgumentList {
|
||||
var args ArgumentList
|
||||
l.ConsumeToken('(')
|
||||
for l.Peek() != ')' {
|
||||
name := l.ConsumeIdentWithLoc()
|
||||
l.ConsumeToken(':')
|
||||
value := ParseLiteral(l, false)
|
||||
args = append(args, Argument{Name: name, Value: value})
|
||||
}
|
||||
l.ConsumeToken(')')
|
||||
return args
|
||||
}
|
Reference in New Issue
Block a user