d2parser: Add imports support

Includes rewriting @file.d2 to @file
This commit is contained in:
Anmol Sethi 2023-05-23 17:32:05 -07:00
parent 962f3445f5
commit 89e1e324d7
No known key found for this signature in database
GPG key ID: 8CEF1878FF10ADEB
9 changed files with 381 additions and 19 deletions

View file

@ -63,6 +63,7 @@ var _ Node = &DoubleQuotedString{}
var _ Node = &SingleQuotedString{}
var _ Node = &BlockString{}
var _ Node = &Substitution{}
var _ Node = &Import{}
var _ Node = &Array{}
var _ Node = &Map{}
@ -277,6 +278,7 @@ var _ MapNode = &Comment{}
var _ MapNode = &BlockComment{}
var _ MapNode = &Key{}
var _ MapNode = &Substitution{}
var _ MapNode = &Import{}
// ArrayNode is implemented by nodes that may be children of Arrays.
type ArrayNode interface {
@ -288,6 +290,7 @@ type ArrayNode interface {
var _ ArrayNode = &Comment{}
var _ ArrayNode = &BlockComment{}
var _ ArrayNode = &Substitution{}
var _ ArrayNode = &Import{}
// Value is implemented by nodes that may be values of a key.
type Value interface {
@ -334,6 +337,7 @@ func (s *DoubleQuotedString) node() {}
func (s *SingleQuotedString) node() {}
func (s *BlockString) node() {}
func (s *Substitution) node() {}
func (i *Import) node() {}
func (a *Array) node() {}
func (m *Map) node() {}
func (k *Key) node() {}
@ -351,6 +355,7 @@ func (s *DoubleQuotedString) Type() string { return "double quoted string" }
func (s *SingleQuotedString) Type() string { return "single quoted string" }
func (s *BlockString) Type() string { return s.Tag + " block string" }
func (s *Substitution) Type() string { return "substitution" }
func (i *Import) Type() string { return "import" }
func (a *Array) Type() string { return "array" }
func (m *Map) Type() string { return "map" }
func (k *Key) Type() string { return "map key" }
@ -368,6 +373,7 @@ func (s *DoubleQuotedString) GetRange() Range { return s.Range }
func (s *SingleQuotedString) GetRange() Range { return s.Range }
func (s *BlockString) GetRange() Range { return s.Range }
func (s *Substitution) GetRange() Range { return s.Range }
func (i *Import) GetRange() Range { return i.Range }
func (a *Array) GetRange() Range { return a.Range }
func (m *Map) GetRange() Range { return m.Range }
func (k *Key) GetRange() Range { return k.Range }
@ -379,6 +385,7 @@ func (c *Comment) mapNode() {}
func (c *BlockComment) mapNode() {}
func (k *Key) mapNode() {}
func (s *Substitution) mapNode() {}
func (i *Import) mapNode() {}
func (c *Comment) arrayNode() {}
func (c *BlockComment) arrayNode() {}
@ -390,6 +397,7 @@ func (s *DoubleQuotedString) arrayNode() {}
func (s *SingleQuotedString) arrayNode() {}
func (s *BlockString) arrayNode() {}
func (s *Substitution) arrayNode() {}
func (i *Import) arrayNode() {}
func (a *Array) arrayNode() {}
func (m *Map) arrayNode() {}
@ -402,6 +410,7 @@ func (s *SingleQuotedString) value() {}
func (s *BlockString) value() {}
func (a *Array) value() {}
func (m *Map) value() {}
func (i *Import) value() {}
func (n *Null) scalar() {}
func (b *Boolean) scalar() {}
@ -722,11 +731,19 @@ type Substitution struct {
Path []*StringBox `json:"path"`
}
type Import struct {
Range Range `json:"range"`
Spread bool `json:"spread"`
Path []*StringBox `json:"path"`
}
// MapNodeBox is used to box MapNode for JSON persistence.
type MapNodeBox struct {
Comment *Comment `json:"comment,omitempty"`
BlockComment *BlockComment `json:"block_comment,omitempty"`
Substitution *Substitution `json:"substitution,omitempty"`
Import *Import `json:"import,omitempty"`
MapKey *Key `json:"map_key,omitempty"`
}
@ -739,6 +756,8 @@ func MakeMapNodeBox(n MapNode) MapNodeBox {
box.BlockComment = n
case *Substitution:
box.Substitution = n
case *Import:
box.Import = n
case *Key:
box.MapKey = n
}
@ -753,6 +772,8 @@ func (mb MapNodeBox) Unbox() MapNode {
return mb.BlockComment
case mb.Substitution != nil:
return mb.Substitution
case mb.Import != nil:
return mb.Import
case mb.MapKey != nil:
return mb.MapKey
default:
@ -765,6 +786,7 @@ type ArrayNodeBox struct {
Comment *Comment `json:"comment,omitempty"`
BlockComment *BlockComment `json:"block_comment,omitempty"`
Substitution *Substitution `json:"substitution,omitempty"`
Import *Import `json:"import,omitempty"`
Null *Null `json:"null,omitempty"`
Boolean *Boolean `json:"boolean,omitempty"`
Number *Number `json:"number,omitempty"`
@ -785,6 +807,8 @@ func MakeArrayNodeBox(an ArrayNode) ArrayNodeBox {
ab.BlockComment = an
case *Substitution:
ab.Substitution = an
case *Import:
ab.Import = an
case *Null:
ab.Null = an
case *Boolean:
@ -815,6 +839,8 @@ func (ab ArrayNodeBox) Unbox() ArrayNode {
return ab.BlockComment
case ab.Substitution != nil:
return ab.Substitution
case ab.Import != nil:
return ab.Import
case ab.Null != nil:
return ab.Null
case ab.Boolean != nil:
@ -849,6 +875,7 @@ type ValueBox struct {
BlockString *BlockString `json:"block_string,omitempty"`
Array *Array `json:"array,omitempty"`
Map *Map `json:"map,omitempty"`
Import *Import `json:"import,omitempty"`
}
func (vb ValueBox) Unbox() Value {
@ -871,6 +898,8 @@ func (vb ValueBox) Unbox() Value {
return vb.Array
case vb.Map != nil:
return vb.Map
case vb.Import != nil:
return vb.Import
default:
return nil
}
@ -897,6 +926,8 @@ func MakeValueBox(v Value) ValueBox {
vb.Array = v
case *Map:
vb.Map = v
case *Import:
vb.Import = v
}
return vb
}

View file

@ -62,6 +62,8 @@ func (p *printer) node(n d2ast.Node) {
p.blockString(n)
case *d2ast.Substitution:
p.substitution(n)
case *d2ast.Import:
p._import(n)
case *d2ast.Array:
p.array(n)
case *d2ast.Map:
@ -203,6 +205,14 @@ func (p *printer) substitution(s *d2ast.Substitution) {
p.sb.WriteByte('}')
}
func (p *printer) _import(i *d2ast.Import) {
if i.Spread {
p.sb.WriteString("...")
}
p.sb.WriteString("@")
p.path(i.Path)
}
func (p *printer) array(a *d2ast.Array) {
p.sb.WriteByte('[')
if !a.Range.OneLine() {

View file

@ -619,6 +619,14 @@ x <= y
exp: `x <- = y
`,
},
{
name: "import",
in: `
x: @file.d2
`,
exp: `x: @file
`,
},
}
for _, tc := range testCases {

View file

@ -448,17 +448,30 @@ func (p *parser) parseMapNode(r rune) d2ast.MapNodeBox {
box.BlockComment = p.parseBlockComment()
return box
case '.':
s, eof := p.peekn(3)
s, eof := p.peekn(2)
if eof {
break
}
if s != "..$" {
if s != ".." {
p.rewind()
break
}
p.commit()
box.Substitution = p.parseSubstitution(true)
return box
r, eof := p.peek()
if eof {
break
}
if r == '$' {
p.commit()
box.Substitution = p.parseSubstitution(true)
return box
}
if r == '@' {
p.commit()
box.Import = p.parseImport(true)
return box
}
p.rewind()
break
}
p.replay(r)
@ -1502,17 +1515,30 @@ func (p *parser) parseArrayNode(r rune) d2ast.ArrayNodeBox {
box.BlockComment = p.parseBlockComment()
return box
case '.':
s, eof := p.peekn(3)
s, eof := p.peekn(2)
if eof {
break
}
if s != "..$" {
if s != ".." {
p.rewind()
break
}
p.commit()
box.Substitution = p.parseSubstitution(true)
return box
r, eof := p.peek()
if eof {
break
}
if r == '$' {
p.commit()
box.Substitution = p.parseSubstitution(true)
return box
}
if r == '@' {
p.commit()
box.Import = p.parseImport(true)
return box
}
p.rewind()
break
}
p.replay(r)
@ -1529,6 +1555,7 @@ func (p *parser) parseArrayNode(r rune) d2ast.ArrayNodeBox {
box.BlockString = vbox.BlockString
box.Array = vbox.Array
box.Map = vbox.Map
box.Import = vbox.Import
return box
}
@ -1549,6 +1576,9 @@ func (p *parser) parseValue() d2ast.ValueBox {
case '{':
box.Map = p.parseMap(false)
return box
case '@':
box.Import = p.parseImport(false)
return box
}
p.replay(r)
@ -1659,6 +1689,30 @@ func (p *parser) parseSubstitution(spread bool) *d2ast.Substitution {
return subst
}
func (p *parser) parseImport(spread bool) *d2ast.Import {
imp := &d2ast.Import{
Range: d2ast.Range{
Path: p.path,
Start: p.pos.SubtractString("$", p.utf16),
},
Spread: spread,
}
defer imp.Range.End.From(&p.pos)
if imp.Spread {
imp.Range.Start = imp.Range.Start.SubtractString("...", p.utf16)
}
k := p.parseKey()
if k.Path[0].UnquotedString != nil && len(k.Path) > 1 && k.Path[1].Unbox().ScalarString() == "d2" {
k.Path = append(k.Path[:1], k.Path[2:]...)
}
if k != nil {
imp.Path = k.Path
}
return imp
}
// func marshalKey(k *d2ast.Key) string {
// var sb strings.Builder
// for i, s := range k.Path {

View file

@ -13,20 +13,19 @@ import (
"oss.terrastruct.com/d2/d2parser"
)
type testCase struct {
name string
text string
assert func(t testing.TB, ast *d2ast.Map, err error)
}
// TODO: next step for parser is writing as many tests and grouping them nicely
// TODO: add assertions
// to layout *all* expected behavior.
func TestParse(t *testing.T) {
t.Parallel()
testCases := []struct {
name string
text string
assert func(t testing.TB, ast *d2ast.Map, err error)
// exp is in testdata/d2parser/TestParse/${name}.json
}{
var testCases = []testCase{
{
name: "empty",
text: ``,
@ -394,7 +393,53 @@ c-
},
}
for _, tc := range testCases {
t.Run("import", testImport)
runa(t, testCases)
}
func testImport(t *testing.T) {
t.Parallel()
tca := []testCase{
{
text: "x: @file",
assert: func(t testing.TB, ast *d2ast.Map, err error) {
assert.Success(t, err)
assert.Equal(t, "file", ast.Nodes[0].MapKey.Value.Import.Path[0].Unbox().ScalarString())
},
},
{
text: "x: @file.d2",
assert: func(t testing.TB, ast *d2ast.Map, err error) {
assert.Success(t, err)
assert.Equal(t, "file", ast.Nodes[0].MapKey.Value.Import.Path[0].Unbox().ScalarString())
},
},
{
text: "...@file.d2",
assert: func(t testing.TB, ast *d2ast.Map, err error) {
assert.Success(t, err)
assert.True(t, ast.Nodes[0].Import.Spread)
assert.Equal(t, "file", ast.Nodes[0].Import.Path[0].Unbox().ScalarString())
},
},
{
text: "x: [...@file.d2]",
assert: func(t testing.TB, ast *d2ast.Map, err error) {
assert.Success(t, err)
imp := ast.Nodes[0].MapKey.Value.Array.Nodes[0].Import
assert.True(t, imp.Spread)
assert.Equal(t, "file", imp.Path[0].Unbox().ScalarString())
},
},
}
runa(t, tca)
}
func runa(t *testing.T, tca []testCase) {
for _, tc := range tca {
tc := tc
t.Run(tc.name, func(t *testing.T) {
t.Parallel()

49
testdata/d2parser/TestParse/import/#00.exp.json generated vendored Normal file
View file

@ -0,0 +1,49 @@
{
"ast": {
"range": "d2/testdata/d2parser/TestParse/import/#00.d2,0:0:0-0:8:8",
"nodes": [
{
"map_key": {
"range": "d2/testdata/d2parser/TestParse/import/#00.d2,0:0:0-0:8:8",
"key": {
"range": "d2/testdata/d2parser/TestParse/import/#00.d2,0:0:0-0:1:1",
"path": [
{
"unquoted_string": {
"range": "d2/testdata/d2parser/TestParse/import/#00.d2,0:0:0-0:1:1",
"value": [
{
"string": "x",
"raw_string": "x"
}
]
}
}
]
},
"primary": {},
"value": {
"import": {
"range": "d2/testdata/d2parser/TestParse/import/#00.d2,0:3:3-0:8:8",
"spread": false,
"path": [
{
"unquoted_string": {
"range": "d2/testdata/d2parser/TestParse/import/#00.d2,0:4:4-0:8:8",
"value": [
{
"string": "file",
"raw_string": "file"
}
]
}
}
]
}
}
}
}
]
},
"err": null
}

60
testdata/d2parser/TestParse/import/#01.exp.json generated vendored Normal file
View file

@ -0,0 +1,60 @@
{
"ast": {
"range": "d2/testdata/d2parser/TestParse/import/#01.d2,0:0:0-0:11:11",
"nodes": [
{
"map_key": {
"range": "d2/testdata/d2parser/TestParse/import/#01.d2,0:0:0-0:11:11",
"key": {
"range": "d2/testdata/d2parser/TestParse/import/#01.d2,0:0:0-0:1:1",
"path": [
{
"unquoted_string": {
"range": "d2/testdata/d2parser/TestParse/import/#01.d2,0:0:0-0:1:1",
"value": [
{
"string": "x",
"raw_string": "x"
}
]
}
}
]
},
"primary": {},
"value": {
"import": {
"range": "d2/testdata/d2parser/TestParse/import/#01.d2,0:3:3-0:11:11",
"spread": false,
"path": [
{
"unquoted_string": {
"range": "d2/testdata/d2parser/TestParse/import/#01.d2,0:4:4-0:8:8",
"value": [
{
"string": "file",
"raw_string": "file"
}
]
}
},
{
"unquoted_string": {
"range": "d2/testdata/d2parser/TestParse/import/#01.d2,0:9:9-0:11:11",
"value": [
{
"string": "d2",
"raw_string": "d2"
}
]
}
}
]
}
}
}
}
]
},
"err": null
}

38
testdata/d2parser/TestParse/import/#02.exp.json generated vendored Normal file
View file

@ -0,0 +1,38 @@
{
"ast": {
"range": "d2/testdata/d2parser/TestParse/import/#02.d2,0:0:0-0:11:11",
"nodes": [
{
"import": {
"range": "d2/testdata/d2parser/TestParse/import/#02.d2,0:0:0-0:11:11",
"spread": true,
"path": [
{
"unquoted_string": {
"range": "d2/testdata/d2parser/TestParse/import/#02.d2,0:4:4-0:8:8",
"value": [
{
"string": "file",
"raw_string": "file"
}
]
}
},
{
"unquoted_string": {
"range": "d2/testdata/d2parser/TestParse/import/#02.d2,0:9:9-0:11:11",
"value": [
{
"string": "d2",
"raw_string": "d2"
}
]
}
}
]
}
}
]
},
"err": null
}

67
testdata/d2parser/TestParse/import/#03.exp.json generated vendored Normal file
View file

@ -0,0 +1,67 @@
{
"ast": {
"range": "d2/testdata/d2parser/TestParse/import/#03.d2,0:0:0-0:16:16",
"nodes": [
{
"map_key": {
"range": "d2/testdata/d2parser/TestParse/import/#03.d2,0:0:0-0:16:16",
"key": {
"range": "d2/testdata/d2parser/TestParse/import/#03.d2,0:0:0-0:1:1",
"path": [
{
"unquoted_string": {
"range": "d2/testdata/d2parser/TestParse/import/#03.d2,0:0:0-0:1:1",
"value": [
{
"string": "x",
"raw_string": "x"
}
]
}
}
]
},
"primary": {},
"value": {
"array": {
"range": "d2/testdata/d2parser/TestParse/import/#03.d2,0:3:3-0:15:15",
"nodes": [
{
"import": {
"range": "d2/testdata/d2parser/TestParse/import/#03.d2,0:4:4-0:15:15",
"spread": true,
"path": [
{
"unquoted_string": {
"range": "d2/testdata/d2parser/TestParse/import/#03.d2,0:8:8-0:12:12",
"value": [
{
"string": "file",
"raw_string": "file"
}
]
}
},
{
"unquoted_string": {
"range": "d2/testdata/d2parser/TestParse/import/#03.d2,0:13:13-0:15:15",
"value": [
{
"string": "d2",
"raw_string": "d2"
}
]
}
}
]
}
}
]
}
}
}
}
]
},
"err": null
}