d2ir: wip

This commit is contained in:
Anmol Sethi 2023-01-16 03:52:37 -08:00
parent 989fdb0fe5
commit 61aef74975
No known key found for this signature in database
GPG key ID: 25BC68888A99A8BA
20 changed files with 624 additions and 139 deletions

View file

@ -1,83 +0,0 @@
package d2ir
import (
"fmt"
"oss.terrastruct.com/d2/d2ast"
"oss.terrastruct.com/d2/d2format"
"oss.terrastruct.com/d2/d2parser"
)
type compiler struct {
err d2parser.ParseError
}
func (c *compiler) errorf(n d2ast.Node, f string, v ...interface{}) {
f = "%v: " + f
v = append([]interface{}{n.GetRange()}, v...)
c.err.Errors = append(c.err.Errors, d2ast.Error{
Range: n.GetRange(),
Message: fmt.Sprintf(f, v...),
})
}
func Apply(dst *Map, ast *d2ast.Map) error {
var c compiler
c.compileMap(dst, ast)
if !c.err.Empty() {
return c.err
}
return nil
}
func (c *compiler) compileMap(dst *Map, ast *d2ast.Map) {
for _, n := range ast.Nodes {
switch {
case n.MapKey != nil:
c.compileField(dst, n.MapKey)
case n.Substitution != nil:
panic("TODO")
}
}
}
func (c *compiler) compileField(dst *Map, k *d2ast.Key) {
if k.Key != nil && len(k.Key.Path) > 0 {
f, ok := dst.Ensure(d2format.KeyPath(k.Key))
if !ok {
c.errorf(k, "cannot index into array")
return
}
if len(k.Edges) == 0 {
if k.Primary.Unbox() != nil {
f.Primary = &Scalar{
parent: f,
Value: k.Primary.Unbox(),
}
}
if k.Value.Array != nil {
a := &Array{
parent: f,
}
c.compileArray(a, k.Value.Array)
f.Composite = a
} else if k.Value.Map != nil {
m := &Map{
parent: f,
}
c.compileMap(m, k.Value.Map)
f.Composite = m
} else if k.Value.ScalarBox().Unbox() != nil {
f.Primary = &Scalar{
parent: f,
Value: k.Value.ScalarBox().Unbox(),
}
}
}
}
}
func (c *compiler) compileArray(dst *Array, a *d2ast.Array) {
panic(fmt.Sprintf("TODO"))
}

171
d2ir/compile.go Normal file
View file

@ -0,0 +1,171 @@
package d2ir
import (
"fmt"
"oss.terrastruct.com/d2/d2ast"
"oss.terrastruct.com/d2/d2format"
"oss.terrastruct.com/d2/d2parser"
)
type compiler struct {
err d2parser.ParseError
}
func (c *compiler) errorf(n d2ast.Node, f string, v ...interface{}) {
f = "%v: " + f
v = append([]interface{}{n.GetRange()}, v...)
c.err.Errors = append(c.err.Errors, d2ast.Error{
Range: n.GetRange(),
Message: fmt.Sprintf(f, v...),
})
}
func Compile(dst *Map, ast *d2ast.Map) error {
var c compiler
c.compileMap(dst, ast)
if !c.err.Empty() {
return c.err
}
return nil
}
func (c *compiler) compileMap(dst *Map, ast *d2ast.Map) {
for _, n := range ast.Nodes {
switch {
case n.MapKey != nil:
c.compileKey(dst, n.MapKey)
case n.Substitution != nil:
panic("TODO")
}
}
}
func (c *compiler) compileKey(dst *Map, k *d2ast.Key) {
if len(k.Edges) == 0 {
c.compileField(dst, k)
} else {
c.compileEdges(dst, k)
}
}
func (c *compiler) compileField(dst *Map, k *d2ast.Key) {
f, err := dst.Ensure(d2format.KeyPath(k.Key))
if err != nil {
c.errorf(k, err.Error())
return
}
if k.Primary.Unbox() != nil {
f.Primary = &Scalar{
parent: f,
Value: k.Primary.Unbox(),
}
}
if k.Value.Array != nil {
a := &Array{
parent: f,
}
c.compileArray(a, k.Value.Array)
f.Composite = a
} else if k.Value.Map != nil {
if f_m, ok := f.Composite.(*Map); ok {
c.compileMap(f_m, k.Value.Map)
} else {
m := &Map{
parent: f,
}
c.compileMap(m, k.Value.Map)
f.Composite = m
}
} else if k.Value.ScalarBox().Unbox() != nil {
f.Primary = &Scalar{
parent: f,
Value: k.Value.ScalarBox().Unbox(),
}
}
}
func (c *compiler) compileEdges(dst *Map, k *d2ast.Key) {
if k.Key != nil && len(k.Key.Path) > 0 {
f, err := dst.Ensure(d2format.KeyPath(k.Key))
if err != nil {
c.errorf(k, err.Error())
return
}
if f_m, ok := f.Composite.(*Map); ok {
dst = f_m
} else {
m := &Map{
parent: f,
}
f.Composite = m
dst = m
}
}
eida := NewEdgeIDs(k)
for i, eid := range eida {
var e *Edge
if eid.Index != nil {
ea := dst.GetEdges(eid)
if len(ea) == 0 {
c.errorf(k.Edges[i], "indexed edge does not exist")
continue
}
e = ea[0]
} else {
var err error
e, err = dst.EnsureEdge(eid)
if err != nil {
c.errorf(k.Edges[i], err.Error())
continue
}
}
_, err := dst.Ensure(eid.SrcPath)
if err != nil {
c.errorf(k.Edges[i].Src, err.Error())
continue
}
_, err = dst.Ensure(eid.DstPath)
if err != nil {
c.errorf(k.Edges[i].Dst, err.Error())
continue
}
if k.EdgeKey != nil {
if e.Map == nil {
e.Map = &Map{
parent: e,
}
}
tmpk := &d2ast.Key{
Range: k.EdgeKey.Range,
Key: k.EdgeKey,
}
c.compileField(e.Map, tmpk)
} else {
if k.Primary.Unbox() != nil {
e.Primary = &Scalar{
parent: e,
Value: k.Primary.Unbox(),
}
} else if k.Value.Map != nil {
if e.Map == nil {
e.Map = &Map{
parent: e,
}
}
c.compileMap(e.Map, k.Value.Map)
} else if k.Value.Unbox() != nil {
c.errorf(k.Value.Unbox(), "edges cannot be assigned arrays")
continue
}
}
}
}
func (c *compiler) compileArray(dst *Array, a *d2ast.Array) {
panic(fmt.Sprintf("TODO"))
}

View file

@ -11,7 +11,6 @@ import (
"oss.terrastruct.com/util-go/diff"
"oss.terrastruct.com/d2/d2ast"
"oss.terrastruct.com/d2/d2format"
"oss.terrastruct.com/d2/d2ir"
"oss.terrastruct.com/d2/d2parser"
)
@ -21,13 +20,13 @@ type testCase struct {
run func(testing.TB, *d2ir.Map)
}
func TestApply(t *testing.T) {
func TestCompile(t *testing.T) {
t.Parallel()
t.Run("simple", testApplySimple)
t.Run("roots", testCompileRoots)
}
func testApplySimple(t *testing.T) {
func testCompileRoots(t *testing.T) {
t.Parallel()
tca := []testCase{
@ -118,7 +117,7 @@ func testApplySimple(t *testing.T) {
run: func(t testing.TB, m *d2ir.Map) {
err := parse(t, m, `x._ -> z`)
assert.Success(t, err)
assertField(t, m, 2, 1, nil)
assertField(t, m, 3, 1, nil)
assertField(t, m, 0, 0, nil, "x")
assertField(t, m, 0, 0, nil, "z")
@ -149,7 +148,7 @@ func parse(t testing.TB, dst *d2ir.Map, text string) error {
ast, err := d2parser.Parse(d2Path, strings.NewReader(text), nil)
assert.Success(t, err)
err = d2ir.Apply(dst, ast)
err = d2ir.Compile(dst, ast)
if err != nil {
return err
}
@ -182,10 +181,9 @@ func assertField(t testing.TB, n d2ir.Node, nfields, nedges int, primary interfa
}
var f *d2ir.Field
var ok bool
if len(ida) > 0 {
f, ok = m.Get(ida)
if !ok {
f = m.Get(ida)
if f == nil {
t.Fatalf("expected field %v in map %s", ida, m)
}
p = f.Primary
@ -196,8 +194,8 @@ func assertField(t testing.TB, n d2ir.Node, nfields, nedges int, primary interfa
}
}
assert.Equal(t, nfields, m.FieldCount())
assert.Equal(t, nedges, m.EdgeCount())
assert.Equal(t, nfields, m.FieldCountRecursive())
assert.Equal(t, nedges, m.EdgeCountRecursive())
if !makeScalar(p).Equal(makeScalar(primary)) {
t.Fatalf("expected primary %#v but got %s", primary, p)
}
@ -205,24 +203,13 @@ func assertField(t testing.TB, n d2ir.Node, nfields, nedges int, primary interfa
return f
}
func parseEdgeID(t testing.TB, eids string) *d2ir.EdgeID {
t.Helper()
k, err := d2parser.ParseMapKey(eids)
assert.Success(t, err)
return &d2ir.EdgeID{
SrcPath: d2format.KeyPath(k.Edges[0].Src),
SrcArrow: k.Edges[0].SrcArrow == "<",
DstPath: d2format.KeyPath(k.Edges[0].Dst),
DstArrow: k.Edges[0].DstArrow == ">",
Index: *k.EdgeIndex.Int,
}
}
func assertEdge(t testing.TB, n d2ir.Node, nfields int, primary interface{}, eids string) *d2ir.Edge {
t.Helper()
eid := parseEdgeID(t, eids)
k, err := d2parser.ParseMapKey(eids)
assert.Success(t, err)
eid := d2ir.NewEdgeIDs(k)[0]
var m *d2ir.Map
switch n := n.(type) {
@ -239,12 +226,13 @@ func assertEdge(t testing.TB, n d2ir.Node, nfields int, primary interface{}, eid
t.Fatalf("unexpected d2ir.Node %T", n)
}
e, ok := m.GetEdge(eid)
if !ok {
t.Fatalf("expected edge %v in map %s but not found", eid, m)
ea := m.GetEdges(eid)
if len(ea) != 1 {
t.Fatalf("expected single edge %v in map %s but not found", eid, m)
}
e := ea[0]
assert.Equal(t, nfields, e.Map.FieldCount())
assert.Equal(t, nfields, e.Map.FieldCountRecursive())
if !makeScalar(e.Primary).Equal(makeScalar(primary)) {
t.Fatalf("expected primary %#v but %s", primary, e.Primary)
}

View file

@ -2,6 +2,7 @@ package d2ir
import (
"encoding/json"
"errors"
"fmt"
"strings"
@ -155,7 +156,8 @@ type EdgeID struct {
DstPath []string `json:"dst_path"`
DstArrow bool `json:"dst_arrow"`
Index int `json:"index"`
// If nil, then any EdgeID with equal src/dst/arrows matches.
Index *int `json:"index"`
}
func (eid *EdgeID) Copy() *EdgeID {
@ -167,9 +169,11 @@ func (eid *EdgeID) Copy() *EdgeID {
return eid
}
func (eid *EdgeID) Equal(eid2 *EdgeID) bool {
if eid.Index != eid2.Index {
return false
func (eid *EdgeID) Match(eid2 *EdgeID) bool {
if eid.Index != nil && eid2.Index != nil {
if *eid.Index != *eid2.Index {
return false
}
}
if len(eid.SrcPath) != len(eid2.SrcPath) {
@ -272,29 +276,40 @@ type RefContext struct {
Scope *d2ast.Map `json:"-"`
}
func (m *Map) FieldCount() int {
func (m *Map) FieldCountRecursive() int {
if m == nil {
return 0
}
acc := len(m.Fields)
for _, f := range m.Fields {
if f_m, ok := f.Composite.(*Map); ok {
acc += f_m.FieldCount()
acc += f_m.FieldCountRecursive()
}
}
for _, e := range m.Edges {
if e.Map != nil {
acc += e.Map.FieldCountRecursive()
}
}
return acc
}
func (m *Map) EdgeCount() int {
func (m *Map) EdgeCountRecursive() int {
if m == nil {
return 0
}
acc := len(m.Edges)
for _, e := range m.Edges {
if e.Map != nil {
acc += e.Map.EdgeCount()
acc += e.Map.EdgeCountRecursive()
}
}
return acc
}
func (m *Map) Get(ida []string) (*Field, bool) {
func (m *Map) Get(ida []string) *Field {
if len(ida) == 0 {
return nil, false
return nil
}
s := ida[0]
@ -305,18 +320,18 @@ func (m *Map) Get(ida []string) (*Field, bool) {
continue
}
if len(rest) == 0 {
return f, true
return f
}
if f_m, ok := f.Composite.(*Map); ok {
return f_m.Get(rest)
}
}
return nil, false
return nil
}
func (m *Map) Ensure(ida []string) (*Field, bool) {
func (m *Map) Ensure(ida []string) (*Field, error) {
if len(ida) == 0 {
return nil, false
return nil, errors.New("empty ida")
}
s := ida[0]
@ -327,13 +342,13 @@ func (m *Map) Ensure(ida []string) (*Field, bool) {
continue
}
if len(rest) == 0 {
return f, true
return f, nil
}
switch fc := f.Composite.(type) {
case *Map:
return fc.Ensure(rest)
case *Array:
return nil, false
return nil, errors.New("cannot index into array")
}
f.Composite = &Map{
parent: f,
@ -347,7 +362,7 @@ func (m *Map) Ensure(ida []string) (*Field, bool) {
}
m.Fields = append(m.Fields, f)
if len(rest) == 0 {
return f, true
return f, nil
}
f.Composite = &Map{
parent: f,
@ -378,25 +393,58 @@ func (m *Map) Delete(ida []string) bool {
return false
}
func (m *Map) GetEdge(eid *EdgeID) (*Edge, bool) {
func (m *Map) GetEdges(eid *EdgeID) []*Edge {
common, eid := eid.trimCommon()
if len(common) > 0 {
f, ok := m.Get(common)
if !ok {
return nil, false
f := m.Get(common)
if f == nil {
return nil
}
if f_m, ok := f.Composite.(*Map); ok {
return f_m.GetEdge(eid)
return f_m.GetEdges(eid)
}
return nil, false
return nil
}
var ea []*Edge
for _, e := range m.Edges {
if e.ID.Equal(eid) {
return e, true
if e.ID.Match(eid) {
ea = append(ea, e)
}
}
return nil, false
return ea
}
func (m *Map) EnsureEdge(eid *EdgeID) (*Edge, error) {
common, eid := eid.trimCommon()
if len(common) > 0 {
f, err := m.Ensure(common)
if err != nil {
return nil, err
}
switch fc := f.Composite.(type) {
case *Map:
return fc.EnsureEdge(eid)
case *Array:
return nil, errors.New("cannot index into array")
}
f.Composite = &Map{
parent: f,
}
return f.Composite.(*Map).EnsureEdge(eid)
}
eid.Index = nil
ea := m.GetEdges(eid)
index := len(ea)
eid.Index = &index
e := &Edge{
parent: m,
ID: eid,
}
m.Edges = append(m.Edges, e)
return e, nil
}
func (m *Map) String() string {
@ -406,3 +454,18 @@ func (m *Map) String() string {
}
return string(b)
}
func NewEdgeIDs(k *d2ast.Key) (eida []*EdgeID) {
for _, ke := range k.Edges {
eida = append(eida, &EdgeID{
SrcPath: d2format.KeyPath(ke.Src),
SrcArrow: ke.SrcArrow == "<",
DstPath: d2format.KeyPath(ke.Dst),
DstArrow: ke.DstArrow == ">",
})
}
if k.EdgeIndex != nil && k.EdgeIndex.Int != nil {
eida[0].Index = k.EdgeIndex.Int
}
return eida
}

25
testdata/d2ir/TestCompile/roots/edge.exp.json generated vendored Normal file
View file

@ -0,0 +1,25 @@
{
"fields": [
{
"name": "x"
},
{
"name": "y"
}
],
"edges": [
{
"edge_id": {
"src_path": [
"x"
],
"src_arrow": false,
"dst_path": [
"y"
],
"dst_arrow": true,
"index": 0
}
}
]
}

8
testdata/d2ir/TestCompile/roots/field.exp.json generated vendored Normal file
View file

@ -0,0 +1,8 @@
{
"fields": [
{
"name": "x"
}
],
"edges": null
}

19
testdata/d2ir/TestCompile/roots/field/label.exp.json generated vendored Normal file
View file

@ -0,0 +1,19 @@
{
"fields": [
{
"name": "x",
"primary": {
"value": {
"range": "d2/testdata/d2ir/TestCompile/roots/field/label.d2,0:3:3-0:6:6",
"value": [
{
"string": "yes",
"raw_string": "yes"
}
]
}
}
}
],
"edges": null
}

View file

@ -0,0 +1,27 @@
{
"fields": [
{
"name": "x",
"composite": {
"fields": [
{
"name": "y",
"primary": {
"value": {
"range": "d2/testdata/d2ir/TestCompile/roots/field/label/nested.d2,0:5:5-0:8:8",
"value": [
{
"string": "yes",
"raw_string": "yes"
}
]
}
}
}
],
"edges": null
}
}
],
"edges": null
}

43
testdata/d2ir/TestCompile/roots/nested.exp.json generated vendored Normal file
View file

@ -0,0 +1,43 @@
{
"fields": [
{
"name": "x",
"composite": {
"fields": [
{
"name": "y"
}
],
"edges": null
}
},
{
"name": "z",
"composite": {
"fields": [
{
"name": "p"
}
],
"edges": null
}
}
],
"edges": [
{
"edge_id": {
"src_path": [
"x",
"y"
],
"src_arrow": false,
"dst_path": [
"z",
"p"
],
"dst_arrow": true,
"index": 0
}
}
]
}

27
testdata/d2ir/TestCompile/roots/primary.exp.json generated vendored Normal file
View file

@ -0,0 +1,27 @@
{
"fields": [
{
"name": "x",
"primary": {
"value": {
"range": "d2/testdata/d2ir/TestCompile/roots/primary.d2,0:3:3-0:6:6",
"value": [
{
"string": "yes",
"raw_string": "yes"
}
]
}
},
"composite": {
"fields": [
{
"name": "pqrs"
}
],
"edges": null
}
}
],
"edges": null
}

View file

@ -0,0 +1,35 @@
{
"fields": [
{
"name": "x",
"composite": {
"fields": [
{
"name": "y",
"primary": {
"value": {
"range": "d2/testdata/d2ir/TestCompile/roots/primary/nested.d2,0:5:5-0:8:8",
"value": [
{
"string": "yes",
"raw_string": "yes"
}
]
}
},
"composite": {
"fields": [
{
"name": "pqrs"
}
],
"edges": null
}
}
],
"edges": null
}
}
],
"edges": null
}

View file

@ -0,0 +1,34 @@
{
"fields": [
{
"name": "x",
"composite": {
"fields": [
{
"name": "_"
}
],
"edges": null
}
},
{
"name": "z"
}
],
"edges": [
{
"edge_id": {
"src_path": [
"x",
"_"
],
"src_arrow": false,
"dst_path": [
"z"
],
"dst_arrow": true,
"index": 0
}
}
]
}

4
testdata/d2ir/TestCompile/simple/edge.exp.json generated vendored Normal file
View file

@ -0,0 +1,4 @@
{
"fields": null,
"edges": null
}

8
testdata/d2ir/TestCompile/simple/field.exp.json generated vendored Normal file
View file

@ -0,0 +1,8 @@
{
"fields": [
{
"name": "x"
}
],
"edges": null
}

19
testdata/d2ir/TestCompile/simple/field/label.exp.json generated vendored Normal file
View file

@ -0,0 +1,19 @@
{
"fields": [
{
"name": "x",
"primary": {
"value": {
"range": "d2/testdata/d2ir/TestCompile/simple/field/label.d2,0:3:3-0:6:6",
"value": [
{
"string": "yes",
"raw_string": "yes"
}
]
}
}
}
],
"edges": null
}

View file

@ -0,0 +1,27 @@
{
"fields": [
{
"name": "x",
"composite": {
"fields": [
{
"name": "y",
"primary": {
"value": {
"range": "d2/testdata/d2ir/TestCompile/simple/field/label/nested.d2,0:5:5-0:8:8",
"value": [
{
"string": "yes",
"raw_string": "yes"
}
]
}
}
}
],
"edges": null
}
}
],
"edges": null
}

4
testdata/d2ir/TestCompile/simple/nested.exp.json generated vendored Normal file
View file

@ -0,0 +1,4 @@
{
"fields": null,
"edges": null
}

27
testdata/d2ir/TestCompile/simple/primary.exp.json generated vendored Normal file
View file

@ -0,0 +1,27 @@
{
"fields": [
{
"name": "x",
"primary": {
"value": {
"range": "d2/testdata/d2ir/TestCompile/simple/primary.d2,0:3:3-0:6:6",
"value": [
{
"string": "yes",
"raw_string": "yes"
}
]
}
},
"composite": {
"fields": [
{
"name": "pqrs"
}
],
"edges": null
}
}
],
"edges": null
}

View file

@ -0,0 +1,35 @@
{
"fields": [
{
"name": "x",
"composite": {
"fields": [
{
"name": "y",
"primary": {
"value": {
"range": "d2/testdata/d2ir/TestCompile/simple/primary/nested.d2,0:5:5-0:8:8",
"value": [
{
"string": "yes",
"raw_string": "yes"
}
]
}
},
"composite": {
"fields": [
{
"name": "pqrs"
}
],
"edges": null
}
}
],
"edges": null
}
}
],
"edges": null
}

View file

@ -0,0 +1,4 @@
{
"fields": null,
"edges": null
}