d2ir: Fully implement scenarios/steps

This commit is contained in:
Anmol Sethi 2023-01-18 07:15:16 -08:00
parent 7d89174a1b
commit d239b8dad1
No known key found for this signature in database
GPG key ID: 25BC68888A99A8BA
12 changed files with 6198 additions and 340 deletions

View file

@ -658,6 +658,13 @@ func MakeKeyPath(a []string) *KeyPath {
return kp
}
func (kp *KeyPath) IDA() (ida []string) {
for _, el := range kp.Path {
ida = append(ida, el.Unbox().ScalarString())
}
return ida
}
type Edge struct {
Range Range `json:"range"`

View file

@ -14,17 +14,62 @@ func (c *compiler) errorf(n d2ast.Node, f string, v ...interface{}) {
}
func Compile(ast *d2ast.Map) (*Map, error) {
m := &Map{}
c := &compiler{}
c.compile(m, ast)
m := &Map{}
c.compileMap(m, ast)
c.compileScenarios(m)
c.compileSteps(m)
if !c.err.Empty() {
return nil, c.err
}
return m, nil
}
func (c *compiler) compile(dst *Map, ast *d2ast.Map) {
c.compileMap(dst, ast)
func (c *compiler) compileScenarios(m *Map) {
scenariosf := m.GetField("scenarios")
if scenariosf == nil {
return
}
scenarios := scenariosf.Map()
if scenarios == nil {
return
}
for _, sf := range scenarios.Fields {
if sf.Map() == nil {
sf.Composite = &Map{
parent: sf,
}
}
base := m.Copy(sf).(*Map)
sf.Composite = Overlay(base, sf.Map())
}
}
func (c *compiler) compileSteps(m *Map) {
stepsf := m.GetField("steps")
if stepsf == nil {
return
}
steps := stepsf.Map()
if steps == nil {
return
}
for i, sf := range steps.Fields {
if sf.Map() == nil {
sf.Composite = &Map{
parent: sf,
}
}
var base *Map
if i == 0 {
base = m.Copy(sf).(*Map)
} else {
base = steps.Fields[i-1].Map().Copy(sf).(*Map)
}
sf.Composite = Overlay(base, sf.Map())
}
}
func (c *compiler) compileMap(dst *Map, ast *d2ast.Map) {
@ -57,7 +102,7 @@ func (c *compiler) compileField(dst *Map, kp *d2ast.KeyPath, refctx *RefContext)
}
if refctx.Key.Primary.Unbox() != nil {
f.Primary = &Scalar{
f.Primary_ = &Scalar{
parent: f,
Value: refctx.Key.Primary.Unbox(),
}
@ -69,16 +114,14 @@ func (c *compiler) compileField(dst *Map, kp *d2ast.KeyPath, refctx *RefContext)
c.compileArray(a, refctx.Key.Value.Array)
f.Composite = a
} else if refctx.Key.Value.Map != nil {
f_m := ChildMap(f)
if f_m == nil {
f_m = &Map{
if f.Map() == nil {
f.Composite = &Map{
parent: f,
}
f.Composite = f_m
}
c.compileMap(f_m, refctx.Key.Value.Map)
c.compileMap(f.Map(), refctx.Key.Value.Map)
} else if refctx.Key.Value.ScalarBox().Unbox() != nil {
f.Primary = &Scalar{
f.Primary_ = &Scalar{
parent: f,
Value: refctx.Key.Value.ScalarBox().Unbox(),
}
@ -96,14 +139,12 @@ func (c *compiler) compileEdges(dst *Map, refctx *RefContext) {
c.errorf(refctx.Key.Key, "cannot index into array")
return
}
f_m := ChildMap(f)
if f_m == nil {
f_m = &Map{
if f.Map() == nil {
f.Composite = &Map{
parent: f,
}
f.Composite = f_m
}
dst = f_m
dst = f.Map()
}
eida := NewEdgeIDs(refctx.Key)
@ -144,25 +185,25 @@ func (c *compiler) compileEdges(dst *Map, refctx *RefContext) {
}
if refctx.Key.EdgeKey != nil {
if e.Map == nil {
e.Map = &Map{
if e.Map_ == nil {
e.Map_ = &Map{
parent: e,
}
}
c.compileField(e.Map, refctx.Key.EdgeKey, refctx)
c.compileField(e.Map_, refctx.Key.EdgeKey, refctx)
} else {
if refctx.Key.Primary.Unbox() != nil {
e.Primary = &Scalar{
e.Primary_ = &Scalar{
parent: e,
Value: refctx.Key.Primary.Unbox(),
}
} else if refctx.Key.Value.Map != nil {
if e.Map == nil {
e.Map = &Map{
if e.Map_ == nil {
e.Map_ = &Map{
parent: e,
}
}
c.compileMap(e.Map, refctx.Key.Value.Map)
c.compileMap(e.Map_, refctx.Key.Value.Map)
} else if refctx.Key.Value.Unbox() != nil {
c.errorf(refctx.Key.Value.Unbox(), "edges cannot be assigned arrays")
continue

View file

@ -20,7 +20,9 @@ func TestCompile(t *testing.T) {
t.Run("fields", testCompileFields)
t.Run("edges", testCompileEdges)
t.Run("layer", testCompileLayers)
t.Run("layers", testCompileLayers)
t.Run("scenarios", testCompileScenarios)
t.Run("steps", testCompileSteps)
}
type testCase struct {
@ -57,23 +59,20 @@ func compile(t testing.TB, text string) (*d2ir.Map, error) {
return m, nil
}
func assertField(t testing.TB, n d2ir.Node, nfields, nedges int, primary interface{}, ida ...string) *d2ir.Field {
func assertQueryOne(t testing.TB, n d2ir.Node, nfields, nedges int, primary interface{}, idStr string) d2ir.Node {
t.Helper()
m := d2ir.ChildMap(n)
if m == nil {
t.Fatalf("nil m from %T", n)
}
p := d2ir.ToScalar(n)
m := n.Map()
p := n.Primary()
var f *d2ir.Field
if len(ida) > 0 {
f = m.GetField(ida...)
if f == nil {
t.Fatalf("expected field %v in map %s", ida, m)
}
p = f.Primary
m = d2ir.ChildMap(f)
if idStr != "" {
var err error
n, err = m.QueryOne(idStr)
assert.Success(t, err)
assert.NotEqual(t, n, nil)
p = n.Primary()
m = n.Map()
}
assert.Equal(t, nfields, m.FieldCountRecursive())
@ -82,34 +81,7 @@ func assertField(t testing.TB, n d2ir.Node, nfields, nedges int, primary interfa
t.Fatalf("expected primary %#v but got %s", primary, p)
}
return f
}
func assertEdge(t testing.TB, n d2ir.Node, nfields int, primary interface{}, eids string) *d2ir.Edge {
t.Helper()
k, err := d2parser.ParseMapKey(eids)
assert.Success(t, err)
eid := d2ir.NewEdgeIDs(k)[0]
m := d2ir.ChildMap(n)
if m == nil {
t.Fatalf("nil m from %T", n)
}
ea := m.GetEdges(eid)
if len(ea) != 1 {
t.Fatalf("expected single edge %v in map %s but not found", eid, m)
}
e := ea[0]
assert.Equal(t, nfields, e.Map.FieldCountRecursive())
if !makeScalar(e.Primary).Equal(makeScalar(primary)) {
t.Fatalf("expected primary %#v but %s", primary, e.Primary)
}
return e
return n
}
func makeScalar(v interface{}) *d2ir.Scalar {
@ -148,16 +120,15 @@ func makeScalar(v interface{}) *d2ir.Scalar {
func testCompileFields(t *testing.T) {
t.Parallel()
t.Run("primary", testCompileFieldPrimary)
tca := []testCase{
{
name: "root",
run: func(t testing.TB) {
m, err := compile(t, `x`)
assert.Success(t, err)
assertField(t, m, 1, 0, nil)
assertQueryOne(t, m, 1, 0, nil, "")
assertField(t, m, 0, 0, nil, "x")
assertQueryOne(t, m, 0, 0, nil, "x")
},
},
{
@ -165,9 +136,9 @@ func testCompileFields(t *testing.T) {
run: func(t testing.TB) {
m, err := compile(t, `x: yes`)
assert.Success(t, err)
assertField(t, m, 1, 0, nil)
assertQueryOne(t, m, 1, 0, nil, "")
assertField(t, m, 0, 0, "yes", "x")
assertQueryOne(t, m, 0, 0, "yes", "x")
},
},
{
@ -175,10 +146,10 @@ func testCompileFields(t *testing.T) {
run: func(t testing.TB) {
m, err := compile(t, `x.y: yes`)
assert.Success(t, err)
assertField(t, m, 2, 0, nil)
assertQueryOne(t, m, 2, 0, nil, "")
assertField(t, m, 1, 0, nil, "x")
assertField(t, m, 0, 0, "yes", "x", "y")
assertQueryOne(t, m, 1, 0, nil, "x")
assertQueryOne(t, m, 0, 0, "yes", "x.y")
},
},
{
@ -186,44 +157,56 @@ func testCompileFields(t *testing.T) {
run: func(t testing.TB) {
m, err := compile(t, `x: [1;2;3;4]`)
assert.Success(t, err)
assertField(t, m, 1, 0, nil)
assertQueryOne(t, m, 1, 0, nil, "")
f := assertField(t, m, 0, 0, nil, "x")
f := assertQueryOne(t, m, 0, 0, nil, "x").(*d2ir.Field)
assert.String(t, `[1; 2; 3; 4]`, f.Composite.String())
},
},
}
runa(t, tca)
}
func testCompileFieldPrimary(t *testing.T) {
t.Parallel()
tca := []testCase{
{
name: "root",
name: "null",
run: func(t testing.TB) {
m, err := compile(t, `x: yes { pqrs }`)
m, err := compile(t, `pq: pq
pq: null`)
assert.Success(t, err)
assertField(t, m, 2, 0, nil)
assertField(t, m, 1, 0, "yes", "x")
assertField(t, m, 0, 0, nil, "x", "pqrs")
},
},
{
name: "nested",
run: func(t testing.TB) {
m, err := compile(t, `x.y: yes { pqrs }`)
assert.Success(t, err)
assertField(t, m, 3, 0, nil)
assertField(t, m, 2, 0, nil, "x")
assertField(t, m, 1, 0, "yes", "x", "y")
assertField(t, m, 0, 0, nil, "x", "y", "pqrs")
assertQueryOne(t, m, 1, 0, nil, "")
// null doesn't delete pq from *Map so that for language tooling
// we maintain the references.
// Instead d2compiler will ensure it doesn't get rendered.
assertQueryOne(t, m, 0, 0, nil, "pq")
},
},
}
runa(t, tca)
t.Run("primary", func(t *testing.T) {
t.Parallel()
tca := []testCase{
{
name: "root",
run: func(t testing.TB) {
m, err := compile(t, `x: yes { pqrs }`)
assert.Success(t, err)
assertQueryOne(t, m, 2, 0, nil, "")
assertQueryOne(t, m, 1, 0, "yes", "x")
assertQueryOne(t, m, 0, 0, nil, "x.pqrs")
},
},
{
name: "nested",
run: func(t testing.TB) {
m, err := compile(t, `x.y: yes { pqrs }`)
assert.Success(t, err)
assertQueryOne(t, m, 3, 0, nil, "")
assertQueryOne(t, m, 2, 0, nil, "x")
assertQueryOne(t, m, 1, 0, "yes", "x.y")
assertQueryOne(t, m, 0, 0, nil, "x.y.pqrs")
},
},
}
runa(t, tca)
})
}
func testCompileEdges(t *testing.T) {
@ -234,11 +217,11 @@ func testCompileEdges(t *testing.T) {
run: func(t testing.TB) {
m, err := compile(t, `x -> y`)
assert.Success(t, err)
assertField(t, m, 2, 1, nil)
assertEdge(t, m, 0, nil, `(x -> y)[0]`)
assertQueryOne(t, m, 2, 1, nil, "")
assertQueryOne(t, m, 0, 0, nil, `(x -> y)[0]`)
assertField(t, m, 0, 0, nil, "x")
assertField(t, m, 0, 0, nil, "y")
assertQueryOne(t, m, 0, 0, nil, "x")
assertQueryOne(t, m, 0, 0, nil, "y")
},
},
{
@ -246,15 +229,15 @@ func testCompileEdges(t *testing.T) {
run: func(t testing.TB) {
m, err := compile(t, `x.y -> z.p`)
assert.Success(t, err)
assertField(t, m, 4, 1, nil)
assertQueryOne(t, m, 4, 1, nil, "")
assertField(t, m, 1, 0, nil, "x")
assertField(t, m, 0, 0, nil, "x", "y")
assertQueryOne(t, m, 1, 0, nil, "x")
assertQueryOne(t, m, 0, 0, nil, "x.y")
assertField(t, m, 1, 0, nil, "z")
assertField(t, m, 0, 0, nil, "z", "p")
assertQueryOne(t, m, 1, 0, nil, "z")
assertQueryOne(t, m, 0, 0, nil, "z.p")
assertEdge(t, m, 0, nil, "(x.y -> z.p)[0]")
assertQueryOne(t, m, 0, 0, nil, "(x.y -> z.p)[0]")
},
},
{
@ -262,46 +245,49 @@ func testCompileEdges(t *testing.T) {
run: func(t testing.TB) {
m, err := compile(t, `p: { _.x -> z }`)
assert.Success(t, err)
assertField(t, m, 3, 1, nil)
assertQueryOne(t, m, 3, 1, nil, "")
assertField(t, m, 0, 0, nil, "x")
assertField(t, m, 1, 0, nil, "p")
assertQueryOne(t, m, 0, 0, nil, "x")
assertQueryOne(t, m, 1, 0, nil, "p")
assertEdge(t, m, 0, nil, "(x -> p.z)[0]")
assertQueryOne(t, m, 0, 0, nil, "(x -> p.z)[0]")
},
},
{
name: "chain",
run: func(t testing.TB) {
m, err := compile(t, `a -> b -> c -> d`)
assert.Success(t, err)
assertQueryOne(t, m, 4, 3, nil, "")
assertQueryOne(t, m, 0, 0, nil, "a")
assertQueryOne(t, m, 0, 0, nil, "b")
assertQueryOne(t, m, 0, 0, nil, "c")
assertQueryOne(t, m, 0, 0, nil, "d")
assertQueryOne(t, m, 0, 0, nil, "(a -> b)[0]")
assertQueryOne(t, m, 0, 0, nil, "(b -> c)[0]")
assertQueryOne(t, m, 0, 0, nil, "(c -> d)[0]")
},
},
}
runa(t, tca)
}
func testCompileLayers(t *testing.T) {
t.Parallel()
t.Run("errs", func(t *testing.T) {
t.Parallel()
tca := []testCase{
{
name: "bad_edge/1",
name: "bad_edge",
run: func(t testing.TB) {
_, err := compile(t, `layers.x -> layers.y`)
assert.ErrorString(t, err, `TestCompile/layer/errs/bad_edge/1.d2:1:1: cannot create edges between layers, scenarios or steps`)
},
},
{
name: "bad_edge/2",
run: func(t testing.TB) {
_, err := compile(t, `layers -> scenarios`)
assert.ErrorString(t, err, `TestCompile/layer/errs/bad_edge/2.d2:1:1: cannot create edges between layers, scenarios or steps`)
},
},
{
name: "bad_edge/3",
run: func(t testing.TB) {
_, err := compile(t, `layers.x.y -> steps.z.p`)
assert.ErrorString(t, err, `TestCompile/layer/errs/bad_edge/3.d2:1:1: cannot create edges between layers, scenarios or steps`)
_, err := compile(t, `(x -> y): { p -> q }`)
assert.ErrorString(t, err, `TestCompile/edges/errs/bad_edge.d2:1:13: cannot create edge inside edge`)
},
},
}
runa(t, tca)
})
}
func testCompileLayers(t *testing.T) {
t.Parallel()
tca := []testCase{
{
name: "root",
@ -312,13 +298,119 @@ layers: {
}`)
assert.Success(t, err)
assertField(t, m, 7, 1, nil)
assertEdge(t, m, 0, nil, `(x -> y)[0]`)
assertQueryOne(t, m, 7, 1, nil, "")
assertQueryOne(t, m, 0, 0, nil, `(x -> y)[0]`)
assertField(t, m, 0, 0, nil, "x")
assertField(t, m, 0, 0, nil, "y")
assertQueryOne(t, m, 0, 0, nil, "x")
assertQueryOne(t, m, 0, 0, nil, "y")
assertField(t, m, 3, 0, nil, "layers", "bingo")
assertQueryOne(t, m, 3, 0, nil, "layers.bingo")
},
},
}
runa(t, tca)
t.Run("errs", func(t *testing.T) {
t.Parallel()
tca := []testCase{
{
name: "bad_edge/1",
run: func(t testing.TB) {
_, err := compile(t, `layers.x -> layers.y`)
assert.ErrorString(t, err, `TestCompile/layers/errs/bad_edge/1.d2:1:1: cannot create edges between layers, scenarios or steps`)
},
},
{
name: "bad_edge/2",
run: func(t testing.TB) {
_, err := compile(t, `layers -> scenarios`)
assert.ErrorString(t, err, `TestCompile/layers/errs/bad_edge/2.d2:1:1: cannot create edges between layers, scenarios or steps`)
},
},
{
name: "bad_edge/3",
run: func(t testing.TB) {
_, err := compile(t, `layers.x.y -> steps.z.p`)
assert.ErrorString(t, err, `TestCompile/layers/errs/bad_edge/3.d2:1:1: cannot create edges between layers, scenarios or steps`)
},
},
}
runa(t, tca)
})
}
func testCompileScenarios(t *testing.T) {
t.Parallel()
tca := []testCase{
{
name: "root",
run: func(t testing.TB) {
m, err := compile(t, `x -> y
scenarios: {
bingo: { p.q.z }
nuclear: { quiche }
}`)
assert.Success(t, err)
assertQueryOne(t, m, 13, 3, nil, "")
assertQueryOne(t, m, 0, 0, nil, "x")
assertQueryOne(t, m, 0, 0, nil, "y")
assertQueryOne(t, m, 0, 0, nil, `(x -> y)[0]`)
assertQueryOne(t, m, 5, 1, nil, "scenarios.bingo")
assertQueryOne(t, m, 0, 0, nil, "scenarios.bingo.x")
assertQueryOne(t, m, 0, 0, nil, "scenarios.bingo.y")
assertQueryOne(t, m, 0, 0, nil, `scenarios.bingo.(x -> y)[0]`)
assertQueryOne(t, m, 2, 0, nil, "scenarios.bingo.p")
assertQueryOne(t, m, 1, 0, nil, "scenarios.bingo.p.q")
assertQueryOne(t, m, 0, 0, nil, "scenarios.bingo.p.q.z")
assertQueryOne(t, m, 3, 1, nil, "scenarios.nuclear")
assertQueryOne(t, m, 0, 0, nil, "scenarios.nuclear.x")
assertQueryOne(t, m, 0, 0, nil, "scenarios.nuclear.y")
assertQueryOne(t, m, 0, 0, nil, `scenarios.nuclear.(x -> y)[0]`)
assertQueryOne(t, m, 0, 0, nil, "scenarios.nuclear.quiche")
},
},
}
runa(t, tca)
}
func testCompileSteps(t *testing.T) {
t.Parallel()
tca := []testCase{
{
name: "root",
run: func(t testing.TB) {
m, err := compile(t, `x -> y
steps: {
bingo: { p.q.z }
nuclear: { quiche }
}`)
assert.Success(t, err)
assertQueryOne(t, m, 16, 3, nil, "")
assertQueryOne(t, m, 0, 0, nil, "x")
assertQueryOne(t, m, 0, 0, nil, "y")
assertQueryOne(t, m, 0, 0, nil, `(x -> y)[0]`)
assertQueryOne(t, m, 5, 1, nil, "steps.bingo")
assertQueryOne(t, m, 0, 0, nil, "steps.bingo.x")
assertQueryOne(t, m, 0, 0, nil, "steps.bingo.y")
assertQueryOne(t, m, 0, 0, nil, `steps.bingo.(x -> y)[0]`)
assertQueryOne(t, m, 2, 0, nil, "steps.bingo.p")
assertQueryOne(t, m, 1, 0, nil, "steps.bingo.p.q")
assertQueryOne(t, m, 0, 0, nil, "steps.bingo.p.q.z")
assertQueryOne(t, m, 6, 1, nil, "steps.nuclear")
assertQueryOne(t, m, 0, 0, nil, "steps.nuclear.x")
assertQueryOne(t, m, 0, 0, nil, "steps.nuclear.y")
assertQueryOne(t, m, 0, 0, nil, `steps.nuclear.(x -> y)[0]`)
assertQueryOne(t, m, 2, 0, nil, "steps.nuclear.p")
assertQueryOne(t, m, 1, 0, nil, "steps.nuclear.p.q")
assertQueryOne(t, m, 0, 0, nil, "steps.nuclear.p.q.z")
assertQueryOne(t, m, 0, 0, nil, "steps.nuclear.quiche")
},
},
}

View file

@ -20,6 +20,8 @@ type Node interface {
node()
Copy(parent Node) Node
Parent() Node
Primary() *Scalar
Map() *Map
ast() d2ast.Node
fmt.Stringer
@ -61,6 +63,23 @@ func (n *Edge) Parent() Node { return n.parent }
func (n *Array) Parent() Node { return n.parent }
func (n *Map) Parent() Node { return n.parent }
func (n *Scalar) Primary() *Scalar { return n }
func (n *Field) Primary() *Scalar { return n.Primary_ }
func (n *Edge) Primary() *Scalar { return n.Primary_ }
func (n *Array) Primary() *Scalar { return nil }
func (n *Map) Primary() *Scalar { return nil }
func (n *Scalar) Map() *Map { return nil }
func (n *Field) Map() *Map {
if n.Composite == nil {
return nil
}
return n.Composite.Map()
}
func (n *Edge) Map() *Map { return n.Map_ }
func (n *Array) Map() *Map { return nil }
func (n *Map) Map() *Map { return n }
func (n *Scalar) value() {}
func (n *Array) value() {}
func (n *Map) value() {}
@ -94,7 +113,6 @@ func (s *Scalar) Equal(s2 *Scalar) bool {
}
}
return s.Value.Type() == s2.Value.Type() && s.Value.ScalarString() == s2.Value.ScalarString()
}
type Map struct {
@ -103,17 +121,19 @@ type Map struct {
Edges []*Edge `json:"edges"`
}
// Copy copies the map m without layers/scenarios/steps.
func (m *Map) Copy(newp Node) Node {
tmp := *m
m = &tmp
m.parent = newp
m.Fields = append([]*Field(nil), m.Fields...)
for i := range m.Fields {
if hasLayerKeywords(m.Fields[i].Name) != -1 {
pfields := m.Fields
m.Fields = make([]*Field, 0, len(pfields))
for _, f := range pfields {
if hasLayerKeywords(f.Name) != -1 {
continue
}
m.Fields[i] = m.Fields[i].Copy(m).(*Field)
m.Fields = append(m.Fields, f.Copy(m).(*Field))
}
m.Edges = append([]*Edge(nil), m.Edges...)
for i := range m.Edges {
@ -130,9 +150,9 @@ func (m *Map) Root() bool {
type LayerKind string
const (
LayerLayer LayerKind = "layer"
LayerLayer LayerKind = "layer"
LayerScenario LayerKind = "scenario"
LayerStep LayerKind = "step"
LayerStep LayerKind = "step"
)
// NodeLayerKind reports whether n represents the root of a layer.
@ -166,7 +186,7 @@ type Field struct {
Name string `json:"name"`
Primary *Scalar `json:"primary,omitempty"`
Primary_ *Scalar `json:"primary,omitempty"`
Composite Composite `json:"composite,omitempty"`
References []FieldReference `json:"references,omitempty"`
@ -178,8 +198,8 @@ func (f *Field) Copy(newp Node) Node {
f.parent = newp.(*Map)
f.References = append([]FieldReference(nil), f.References...)
if f.Primary != nil {
f.Primary = f.Primary.Copy(f).(*Scalar)
if f.Primary_ != nil {
f.Primary_ = f.Primary_.Copy(f).(*Scalar)
}
if f.Composite != nil {
f.Composite = f.Composite.Copy(f).(Composite)
@ -200,15 +220,16 @@ type EdgeID struct {
func NewEdgeIDs(k *d2ast.Key) (eida []*EdgeID) {
for _, ke := range k.Edges {
eida = append(eida, &EdgeID{
SrcPath: d2format.KeyPath(ke.Src),
eid := &EdgeID{
SrcPath: ke.Src.IDA(),
SrcArrow: ke.SrcArrow == "<",
DstPath: d2format.KeyPath(ke.Dst),
DstPath: ke.Dst.IDA(),
DstArrow: ke.DstArrow == ">",
})
}
if k.EdgeIndex != nil && k.EdgeIndex.Int != nil {
eida[0].Index = k.EdgeIndex.Int
}
if k.EdgeIndex != nil {
eid.Index = k.EdgeIndex.Int
}
eida = append(eida, eid)
}
return eida
}
@ -298,8 +319,8 @@ type Edge struct {
ID *EdgeID `json:"edge_id"`
Primary *Scalar `json:"primary,omitempty"`
Map *Map `json:"map,omitempty"`
Primary_ *Scalar `json:"primary,omitempty"`
Map_ *Map `json:"map,omitempty"`
References []EdgeReference `json:"references,omitempty"`
}
@ -310,11 +331,11 @@ func (e *Edge) Copy(newp Node) Node {
e.parent = newp.(*Map)
e.References = append([]EdgeReference(nil), e.References...)
if e.Primary != nil {
e.Primary = e.Primary.Copy(e).(*Scalar)
if e.Primary_ != nil {
e.Primary_ = e.Primary_.Copy(e).(*Scalar)
}
if e.Map != nil {
e.Map = e.Map.Copy(e).(*Map)
if e.Map_ != nil {
e.Map_ = e.Map_.Copy(e).(*Map)
}
return e
}
@ -410,14 +431,13 @@ func (m *Map) FieldCountRecursive() int {
}
acc := len(m.Fields)
for _, f := range m.Fields {
f_m := ChildMap(f)
if f_m != nil {
acc += f_m.FieldCountRecursive()
if f.Map() != nil {
acc += f.Map().FieldCountRecursive()
}
}
for _, e := range m.Edges {
if e.Map != nil {
acc += e.Map.FieldCountRecursive()
if e.Map_ != nil {
acc += e.Map_.FieldCountRecursive()
}
}
return acc
@ -429,14 +449,13 @@ func (m *Map) EdgeCountRecursive() int {
}
acc := len(m.Edges)
for _, f := range m.Fields {
f_m := ChildMap(f)
if f_m != nil {
acc += f_m.EdgeCountRecursive()
if f.Map() != nil {
acc += f.Map().EdgeCountRecursive()
}
}
for _, e := range m.Edges {
if e.Map != nil {
acc += e.Map.EdgeCountRecursive()
if e.Map_ != nil {
acc += e.Map_.EdgeCountRecursive()
}
}
return acc
@ -471,9 +490,8 @@ func (m *Map) getField(ida []string) *Field {
if len(rest) == 0 {
return f
}
f_m := ChildMap(f)
if f_m != nil {
return f_m.getField(rest)
if f.Map() != nil {
return f.Map().getField(rest)
}
}
return nil
@ -522,14 +540,12 @@ func (m *Map) ensureField(i int, kp *d2ast.KeyPath, refctx *RefContext) (*Field,
if _, ok := f.Composite.(*Array); ok {
return nil, d2parser.Errorf(kp.Path[i].Unbox(), "cannot index into array")
}
f_m := ChildMap(f)
if f_m == nil {
f_m = &Map{
if f.Map() == nil {
f.Composite = &Map{
parent: f,
}
f.Composite = f_m
}
return f_m.ensureField(i+1, kp, refctx)
return f.Map().ensureField(i+1, kp, refctx)
}
f := &Field{
@ -545,11 +561,10 @@ func (m *Map) ensureField(i int, kp *d2ast.KeyPath, refctx *RefContext) (*Field,
if i+1 == len(kp.Path) {
return f, nil
}
f_m := &Map{
f.Composite = &Map{
parent: f,
}
f.Composite = f_m
return f_m.ensureField(i+1, kp, refctx)
return f.Map().ensureField(i+1, kp, refctx)
}
func (m *Map) DeleteField(ida []string) bool {
@ -568,9 +583,8 @@ func (m *Map) DeleteField(ida []string) bool {
copy(m.Fields[i:], m.Fields[i+1:])
return true
}
f_m := ChildMap(f)
if f_m != nil {
return f_m.DeleteField(rest)
if f.Map() != nil {
return f.Map().DeleteField(rest)
}
}
return false
@ -587,9 +601,8 @@ func (m *Map) GetEdges(eid *EdgeID) []*Edge {
if f == nil {
return nil
}
f_m := ChildMap(f)
if f_m != nil {
return f_m.GetEdges(eid)
if f.Map() != nil {
return f.Map().GetEdges(eid)
}
return nil
}
@ -604,6 +617,10 @@ func (m *Map) GetEdges(eid *EdgeID) []*Edge {
}
func (m *Map) CreateEdge(eid *EdgeID, refctx *RefContext) (*Edge, error) {
if ParentEdge(m) != nil {
return nil, d2parser.Errorf(refctx.Edge, "cannot create edge inside edge")
}
eid, m, err := eid.resolveUnderscores(m)
if err != nil {
return nil, d2parser.Errorf(refctx.Edge, err.Error())
@ -620,14 +637,12 @@ func (m *Map) CreateEdge(eid *EdgeID, refctx *RefContext) (*Edge, error) {
if _, ok := f.Composite.(*Array); ok {
return nil, d2parser.Errorf(refctx.Edge.Src, "cannot index into array")
}
f_m := ChildMap(f)
if f_m == nil {
f_m = &Map{
if f.Map() == nil {
f.Composite = &Map{
parent: f,
}
f.Composite = f_m
}
return f_m.CreateEdge(eid, refctx)
return f.Map().CreateEdge(eid, refctx)
}
ij := hasLayerKeywords(eid.SrcPath...)
@ -680,8 +695,8 @@ func (f *Field) ast() d2ast.Node {
},
}
if f.Primary != nil {
k.Primary = d2ast.MakeValueBox(f.Primary.ast().(d2ast.Value)).ScalarBox()
if f.Primary_ != nil {
k.Primary = d2ast.MakeValueBox(f.Primary_.ast().(d2ast.Value)).ScalarBox()
}
if f.Composite != nil {
k.Value = d2ast.MakeValueBox(f.Composite.ast().(d2ast.Value))
@ -706,11 +721,11 @@ func (e *Edge) ast() d2ast.Node {
Edges: []*d2ast.Edge{astEdge},
}
if e.Primary != nil {
k.Primary = d2ast.MakeValueBox(e.Primary.ast().(d2ast.Value)).ScalarBox()
if e.Primary_ != nil {
k.Primary = d2ast.MakeValueBox(e.Primary_.ast().(d2ast.Value)).ScalarBox()
}
if e.Map != nil {
k.Value = d2ast.MakeValueBox(e.Map.ast().(*d2ast.Map))
if e.Map_ != nil {
k.Value = d2ast.MakeValueBox(e.Map_.ast().(*d2ast.Map))
}
return k
@ -761,41 +776,16 @@ func (m *Map) appendFieldReferences(i int, kp *d2ast.KeyPath, refctx *RefContext
if i+1 == len(kp.Path) {
return
}
f_m := ChildMap(f)
if f_m != nil {
f_m.appendFieldReferences(i+1, kp, refctx)
}
}
func ChildMap(n Node) *Map {
switch n := n.(type) {
case *Map:
return n
case *Field:
return ChildMap(n.Composite)
case *Edge:
return n.Map
default:
return nil
}
}
func ToScalar(n Node) *Scalar {
switch n := n.(type) {
case *Field:
return n.Primary
case *Edge:
return n.Primary
default:
return nil
if f.Map() != nil {
f.Map().appendFieldReferences(i+1, kp, refctx)
}
}
func ParentMap(n Node) *Map {
for n.Parent() != nil {
n = n.Parent()
if n_m, ok := n.(*Map); ok {
return n_m
if m, ok := n.(*Map); ok {
return m
}
}
return nil
@ -804,15 +794,16 @@ func ParentMap(n Node) *Map {
func ParentField(n Node) *Field {
for n.Parent() != nil {
n = n.Parent()
if n_f, ok := n.(*Field); ok {
return n_f
if f, ok := n.(*Field); ok {
return f
}
}
return nil
}
func ParentLayer(n Node) Node {
func ParentLayer(n Node) *Map {
for {
// ParentMap and not ParentField so we get the root layer too.
m := ParentMap(n)
if m == nil {
return nil
@ -824,6 +815,16 @@ func ParentLayer(n Node) Node {
}
}
func ParentEdge(n Node) *Edge {
for n.Parent() != nil {
n = n.Parent()
if e, ok := n.(*Edge); ok {
return e
}
}
return nil
}
func countUnderscores(p []string) int {
var count int
for _, el := range p {

View file

@ -27,7 +27,7 @@ func TestCopy(t *testing.T) {
}
m2 := &d2ir.Map{
Fields: []*d2ir.Field{
{Primary: s},
{Primary_: s},
},
}
@ -35,13 +35,13 @@ func TestCopy(t *testing.T) {
f := &d2ir.Field{
Name: keyStr,
Primary: s,
Primary_: s,
Composite: a,
}
e := &d2ir.Edge{
Primary: s,
Map: m2,
Primary_: s,
Map_: m2,
}
m := &d2ir.Map{
@ -54,7 +54,7 @@ func TestCopy(t *testing.T) {
assert.Equal(t, m, m.Fields[0].Parent())
assert.Equal(t, keyStr, m.Fields[0].Name)
assert.Equal(t, m.Fields[0], m.Fields[0].Primary.Parent())
assert.Equal(t, m.Fields[0], m.Fields[0].Primary_.Parent())
assert.Equal(t, m.Fields[0], m.Fields[0].Composite.(*d2ir.Array).Parent())
assert.Equal(t,
@ -63,9 +63,9 @@ func TestCopy(t *testing.T) {
)
assert.Equal(t, m, m.Edges[0].Parent())
assert.Equal(t, m.Edges[0], m.Edges[0].Primary.Parent())
assert.Equal(t, m.Edges[0], m.Edges[0].Map.Parent())
assert.Equal(t, m.Edges[0], m.Edges[0].Primary_.Parent())
assert.Equal(t, m.Edges[0], m.Edges[0].Map_.Parent())
assert.Equal(t, m.Edges[0].Map, m.Edges[0].Map.Fields[0].Parent())
assert.Equal(t, m.Edges[0].Map.Fields[0], m.Edges[0].Map.Fields[0].Primary.Parent())
assert.Equal(t, m.Edges[0].Map_, m.Edges[0].Map_.Fields[0].Parent())
assert.Equal(t, m.Edges[0].Map_.Fields[0], m.Edges[0].Map_.Fields[0].Primary_.Parent())
}

45
d2ir/merge.go Normal file
View file

@ -0,0 +1,45 @@
package d2ir
func Overlay(base, overlay *Map) *Map {
for _, of := range overlay.Fields {
bf := base.GetField(of.Name)
if bf == nil {
base.Fields = append(base.Fields, of.Copy(base).(*Field))
continue
}
if of.Primary_ != nil {
bf.Primary_ = of.Primary_.Copy(bf).(*Scalar)
}
switch ofc := of.Composite.(type) {
case *Array:
bf.Composite = ofc.Copy(bf).(*Map)
case *Map:
if bf.Map() != nil {
bf.Composite = Overlay(bf.Map(), ofc)
} else {
bf.Composite = of.Composite.Copy(bf).(*Map)
}
}
}
for _, oe := range overlay.Edges {
bea := base.GetEdges(oe.ID)
if len(bea) == 0 {
base.Edges = append(base.Edges, oe.Copy(base).(*Edge))
continue
}
be := bea[0]
if oe.Primary_ != nil {
be.Primary_ = oe.Primary_.Copy(be).(*Scalar)
}
if oe.Map_ != nil {
if be.Map_ != nil {
be.Map_ = Overlay(be.Map(), oe.Map_)
} else {
be.Map_ = oe.Map_.Copy(be).(*Map)
}
}
}
return base
}

61
d2ir/query.go Normal file
View file

@ -0,0 +1,61 @@
package d2ir
import (
"fmt"
"oss.terrastruct.com/d2/d2parser"
)
// Query is only for tests and debugging.
func (m *Map) Query(idStr string) (na []Node, _ error) {
k, err := d2parser.ParseMapKey(idStr)
if err != nil {
return nil, err
}
if k.Key != nil {
f := m.GetField(k.Key.IDA()...)
if f == nil {
return nil, nil
}
if len(k.Edges) == 0 {
na = append(na, f)
return na, nil
}
m = f.Map()
if m == nil {
return nil, nil
}
}
eida := NewEdgeIDs(k)
for _, eid := range eida {
ea := m.GetEdges(eid)
for _, e := range ea {
if k.EdgeKey == nil {
na = append(na, e)
} else if e.Map_ != nil {
f := e.Map_.GetField(k.EdgeKey.IDA()...)
if f != nil {
na = append(na, f)
}
}
}
}
return na, nil
}
func (m *Map) QueryOne(idStr string) (Node, error) {
na, err := m.Query(idStr)
if err != nil {
return nil, err
}
if len(na) == 0 {
return nil, nil
}
if len(na) > 1 {
return nil, fmt.Errorf("expected only one query result but got: %#v", err)
}
return na[0], nil
}

1652
testdata/d2ir/TestCompile/edges/chain.exp.json generated vendored Normal file

File diff suppressed because it is too large Load diff

131
testdata/d2ir/TestCompile/fields/null.exp.json generated vendored Normal file
View file

@ -0,0 +1,131 @@
{
"fields": [
{
"name": "pq",
"primary": {
"value": {
"range": "TestCompile/fields/null.d2,1:4:11-1:8:15"
}
},
"references": [
{
"string": {
"range": "TestCompile/fields/null.d2,0:0:0-0:2:2",
"value": [
{
"string": "pq",
"raw_string": "pq"
}
]
},
"key_path": {
"range": "TestCompile/fields/null.d2,0:0:0-0:2:2",
"path": [
{
"unquoted_string": {
"range": "TestCompile/fields/null.d2,0:0:0-0:2:2",
"value": [
{
"string": "pq",
"raw_string": "pq"
}
]
}
}
]
},
"context": {
"key": {
"range": "TestCompile/fields/null.d2,0:0:0-0:6:6",
"key": {
"range": "TestCompile/fields/null.d2,0:0:0-0:2:2",
"path": [
{
"unquoted_string": {
"range": "TestCompile/fields/null.d2,0:0:0-0:2:2",
"value": [
{
"string": "pq",
"raw_string": "pq"
}
]
}
}
]
},
"primary": {},
"value": {
"unquoted_string": {
"range": "TestCompile/fields/null.d2,0:4:4-0:6:6",
"value": [
{
"string": "pq",
"raw_string": "pq"
}
]
}
}
},
"edge": null
}
},
{
"string": {
"range": "TestCompile/fields/null.d2,1:0:7-1:2:9",
"value": [
{
"string": "pq",
"raw_string": "pq"
}
]
},
"key_path": {
"range": "TestCompile/fields/null.d2,1:0:7-1:2:9",
"path": [
{
"unquoted_string": {
"range": "TestCompile/fields/null.d2,1:0:7-1:2:9",
"value": [
{
"string": "pq",
"raw_string": "pq"
}
]
}
}
]
},
"context": {
"key": {
"range": "TestCompile/fields/null.d2,1:0:7-1:8:15",
"key": {
"range": "TestCompile/fields/null.d2,1:0:7-1:2:9",
"path": [
{
"unquoted_string": {
"range": "TestCompile/fields/null.d2,1:0:7-1:2:9",
"value": [
{
"string": "pq",
"raw_string": "pq"
}
]
}
}
]
},
"primary": {},
"value": {
"null": {
"range": "TestCompile/fields/null.d2,1:4:11-1:8:15"
}
}
},
"edge": null
}
}
]
}
],
"edges": null
}

View file

@ -5,7 +5,7 @@
"references": [
{
"string": {
"range": "TestCompile/layer/root.d2,0:0:0-0:1:1",
"range": "TestCompile/layers/root.d2,0:0:0-0:1:1",
"value": [
{
"string": "x",
@ -14,11 +14,11 @@
]
},
"key_path": {
"range": "TestCompile/layer/root.d2,0:0:0-0:2:2",
"range": "TestCompile/layers/root.d2,0:0:0-0:2:2",
"path": [
{
"unquoted_string": {
"range": "TestCompile/layer/root.d2,0:0:0-0:1:1",
"range": "TestCompile/layers/root.d2,0:0:0-0:1:1",
"value": [
{
"string": "x",
@ -31,16 +31,16 @@
},
"context": {
"key": {
"range": "TestCompile/layer/root.d2,0:0:0-0:6:6",
"range": "TestCompile/layers/root.d2,0:0:0-0:6:6",
"edges": [
{
"range": "TestCompile/layer/root.d2,0:0:0-0:6:6",
"range": "TestCompile/layers/root.d2,0:0:0-0:6:6",
"src": {
"range": "TestCompile/layer/root.d2,0:0:0-0:2:2",
"range": "TestCompile/layers/root.d2,0:0:0-0:2:2",
"path": [
{
"unquoted_string": {
"range": "TestCompile/layer/root.d2,0:0:0-0:1:1",
"range": "TestCompile/layers/root.d2,0:0:0-0:1:1",
"value": [
{
"string": "x",
@ -53,11 +53,11 @@
},
"src_arrow": "",
"dst": {
"range": "TestCompile/layer/root.d2,0:4:4-0:6:6",
"range": "TestCompile/layers/root.d2,0:4:4-0:6:6",
"path": [
{
"unquoted_string": {
"range": "TestCompile/layer/root.d2,0:5:5-0:6:6",
"range": "TestCompile/layers/root.d2,0:5:5-0:6:6",
"value": [
{
"string": "y",
@ -75,13 +75,13 @@
"value": {}
},
"edge": {
"range": "TestCompile/layer/root.d2,0:0:0-0:6:6",
"range": "TestCompile/layers/root.d2,0:0:0-0:6:6",
"src": {
"range": "TestCompile/layer/root.d2,0:0:0-0:2:2",
"range": "TestCompile/layers/root.d2,0:0:0-0:2:2",
"path": [
{
"unquoted_string": {
"range": "TestCompile/layer/root.d2,0:0:0-0:1:1",
"range": "TestCompile/layers/root.d2,0:0:0-0:1:1",
"value": [
{
"string": "x",
@ -94,11 +94,11 @@
},
"src_arrow": "",
"dst": {
"range": "TestCompile/layer/root.d2,0:4:4-0:6:6",
"range": "TestCompile/layers/root.d2,0:4:4-0:6:6",
"path": [
{
"unquoted_string": {
"range": "TestCompile/layer/root.d2,0:5:5-0:6:6",
"range": "TestCompile/layers/root.d2,0:5:5-0:6:6",
"value": [
{
"string": "y",
@ -120,7 +120,7 @@
"references": [
{
"string": {
"range": "TestCompile/layer/root.d2,0:5:5-0:6:6",
"range": "TestCompile/layers/root.d2,0:5:5-0:6:6",
"value": [
{
"string": "y",
@ -129,11 +129,11 @@
]
},
"key_path": {
"range": "TestCompile/layer/root.d2,0:4:4-0:6:6",
"range": "TestCompile/layers/root.d2,0:4:4-0:6:6",
"path": [
{
"unquoted_string": {
"range": "TestCompile/layer/root.d2,0:5:5-0:6:6",
"range": "TestCompile/layers/root.d2,0:5:5-0:6:6",
"value": [
{
"string": "y",
@ -146,16 +146,16 @@
},
"context": {
"key": {
"range": "TestCompile/layer/root.d2,0:0:0-0:6:6",
"range": "TestCompile/layers/root.d2,0:0:0-0:6:6",
"edges": [
{
"range": "TestCompile/layer/root.d2,0:0:0-0:6:6",
"range": "TestCompile/layers/root.d2,0:0:0-0:6:6",
"src": {
"range": "TestCompile/layer/root.d2,0:0:0-0:2:2",
"range": "TestCompile/layers/root.d2,0:0:0-0:2:2",
"path": [
{
"unquoted_string": {
"range": "TestCompile/layer/root.d2,0:0:0-0:1:1",
"range": "TestCompile/layers/root.d2,0:0:0-0:1:1",
"value": [
{
"string": "x",
@ -168,11 +168,11 @@
},
"src_arrow": "",
"dst": {
"range": "TestCompile/layer/root.d2,0:4:4-0:6:6",
"range": "TestCompile/layers/root.d2,0:4:4-0:6:6",
"path": [
{
"unquoted_string": {
"range": "TestCompile/layer/root.d2,0:5:5-0:6:6",
"range": "TestCompile/layers/root.d2,0:5:5-0:6:6",
"value": [
{
"string": "y",
@ -190,13 +190,13 @@
"value": {}
},
"edge": {
"range": "TestCompile/layer/root.d2,0:0:0-0:6:6",
"range": "TestCompile/layers/root.d2,0:0:0-0:6:6",
"src": {
"range": "TestCompile/layer/root.d2,0:0:0-0:2:2",
"range": "TestCompile/layers/root.d2,0:0:0-0:2:2",
"path": [
{
"unquoted_string": {
"range": "TestCompile/layer/root.d2,0:0:0-0:1:1",
"range": "TestCompile/layers/root.d2,0:0:0-0:1:1",
"value": [
{
"string": "x",
@ -209,11 +209,11 @@
},
"src_arrow": "",
"dst": {
"range": "TestCompile/layer/root.d2,0:4:4-0:6:6",
"range": "TestCompile/layers/root.d2,0:4:4-0:6:6",
"path": [
{
"unquoted_string": {
"range": "TestCompile/layer/root.d2,0:5:5-0:6:6",
"range": "TestCompile/layers/root.d2,0:5:5-0:6:6",
"value": [
{
"string": "y",
@ -251,7 +251,7 @@
"references": [
{
"string": {
"range": "TestCompile/layer/root.d2,2:14:31-2:15:32",
"range": "TestCompile/layers/root.d2,2:14:31-2:15:32",
"value": [
{
"string": "z",
@ -260,11 +260,11 @@
]
},
"key_path": {
"range": "TestCompile/layer/root.d2,2:10:27-2:16:33",
"range": "TestCompile/layers/root.d2,2:10:27-2:16:33",
"path": [
{
"unquoted_string": {
"range": "TestCompile/layer/root.d2,2:10:27-2:11:28",
"range": "TestCompile/layers/root.d2,2:10:27-2:11:28",
"value": [
{
"string": "p",
@ -275,7 +275,7 @@
},
{
"unquoted_string": {
"range": "TestCompile/layer/root.d2,2:12:29-2:13:30",
"range": "TestCompile/layers/root.d2,2:12:29-2:13:30",
"value": [
{
"string": "q",
@ -286,7 +286,7 @@
},
{
"unquoted_string": {
"range": "TestCompile/layer/root.d2,2:14:31-2:15:32",
"range": "TestCompile/layers/root.d2,2:14:31-2:15:32",
"value": [
{
"string": "z",
@ -299,13 +299,13 @@
},
"context": {
"key": {
"range": "TestCompile/layer/root.d2,2:10:27-2:16:33",
"range": "TestCompile/layers/root.d2,2:10:27-2:16:33",
"key": {
"range": "TestCompile/layer/root.d2,2:10:27-2:16:33",
"range": "TestCompile/layers/root.d2,2:10:27-2:16:33",
"path": [
{
"unquoted_string": {
"range": "TestCompile/layer/root.d2,2:10:27-2:11:28",
"range": "TestCompile/layers/root.d2,2:10:27-2:11:28",
"value": [
{
"string": "p",
@ -316,7 +316,7 @@
},
{
"unquoted_string": {
"range": "TestCompile/layer/root.d2,2:12:29-2:13:30",
"range": "TestCompile/layers/root.d2,2:12:29-2:13:30",
"value": [
{
"string": "q",
@ -327,7 +327,7 @@
},
{
"unquoted_string": {
"range": "TestCompile/layer/root.d2,2:14:31-2:15:32",
"range": "TestCompile/layers/root.d2,2:14:31-2:15:32",
"value": [
{
"string": "z",
@ -352,7 +352,7 @@
"references": [
{
"string": {
"range": "TestCompile/layer/root.d2,2:12:29-2:13:30",
"range": "TestCompile/layers/root.d2,2:12:29-2:13:30",
"value": [
{
"string": "q",
@ -361,11 +361,11 @@
]
},
"key_path": {
"range": "TestCompile/layer/root.d2,2:10:27-2:16:33",
"range": "TestCompile/layers/root.d2,2:10:27-2:16:33",
"path": [
{
"unquoted_string": {
"range": "TestCompile/layer/root.d2,2:10:27-2:11:28",
"range": "TestCompile/layers/root.d2,2:10:27-2:11:28",
"value": [
{
"string": "p",
@ -376,7 +376,7 @@
},
{
"unquoted_string": {
"range": "TestCompile/layer/root.d2,2:12:29-2:13:30",
"range": "TestCompile/layers/root.d2,2:12:29-2:13:30",
"value": [
{
"string": "q",
@ -387,7 +387,7 @@
},
{
"unquoted_string": {
"range": "TestCompile/layer/root.d2,2:14:31-2:15:32",
"range": "TestCompile/layers/root.d2,2:14:31-2:15:32",
"value": [
{
"string": "z",
@ -400,13 +400,13 @@
},
"context": {
"key": {
"range": "TestCompile/layer/root.d2,2:10:27-2:16:33",
"range": "TestCompile/layers/root.d2,2:10:27-2:16:33",
"key": {
"range": "TestCompile/layer/root.d2,2:10:27-2:16:33",
"range": "TestCompile/layers/root.d2,2:10:27-2:16:33",
"path": [
{
"unquoted_string": {
"range": "TestCompile/layer/root.d2,2:10:27-2:11:28",
"range": "TestCompile/layers/root.d2,2:10:27-2:11:28",
"value": [
{
"string": "p",
@ -417,7 +417,7 @@
},
{
"unquoted_string": {
"range": "TestCompile/layer/root.d2,2:12:29-2:13:30",
"range": "TestCompile/layers/root.d2,2:12:29-2:13:30",
"value": [
{
"string": "q",
@ -428,7 +428,7 @@
},
{
"unquoted_string": {
"range": "TestCompile/layer/root.d2,2:14:31-2:15:32",
"range": "TestCompile/layers/root.d2,2:14:31-2:15:32",
"value": [
{
"string": "z",
@ -453,7 +453,7 @@
"references": [
{
"string": {
"range": "TestCompile/layer/root.d2,2:10:27-2:11:28",
"range": "TestCompile/layers/root.d2,2:10:27-2:11:28",
"value": [
{
"string": "p",
@ -462,11 +462,11 @@
]
},
"key_path": {
"range": "TestCompile/layer/root.d2,2:10:27-2:16:33",
"range": "TestCompile/layers/root.d2,2:10:27-2:16:33",
"path": [
{
"unquoted_string": {
"range": "TestCompile/layer/root.d2,2:10:27-2:11:28",
"range": "TestCompile/layers/root.d2,2:10:27-2:11:28",
"value": [
{
"string": "p",
@ -477,7 +477,7 @@
},
{
"unquoted_string": {
"range": "TestCompile/layer/root.d2,2:12:29-2:13:30",
"range": "TestCompile/layers/root.d2,2:12:29-2:13:30",
"value": [
{
"string": "q",
@ -488,7 +488,7 @@
},
{
"unquoted_string": {
"range": "TestCompile/layer/root.d2,2:14:31-2:15:32",
"range": "TestCompile/layers/root.d2,2:14:31-2:15:32",
"value": [
{
"string": "z",
@ -501,13 +501,13 @@
},
"context": {
"key": {
"range": "TestCompile/layer/root.d2,2:10:27-2:16:33",
"range": "TestCompile/layers/root.d2,2:10:27-2:16:33",
"key": {
"range": "TestCompile/layer/root.d2,2:10:27-2:16:33",
"range": "TestCompile/layers/root.d2,2:10:27-2:16:33",
"path": [
{
"unquoted_string": {
"range": "TestCompile/layer/root.d2,2:10:27-2:11:28",
"range": "TestCompile/layers/root.d2,2:10:27-2:11:28",
"value": [
{
"string": "p",
@ -518,7 +518,7 @@
},
{
"unquoted_string": {
"range": "TestCompile/layer/root.d2,2:12:29-2:13:30",
"range": "TestCompile/layers/root.d2,2:12:29-2:13:30",
"value": [
{
"string": "q",
@ -529,7 +529,7 @@
},
{
"unquoted_string": {
"range": "TestCompile/layer/root.d2,2:14:31-2:15:32",
"range": "TestCompile/layers/root.d2,2:14:31-2:15:32",
"value": [
{
"string": "z",
@ -554,7 +554,7 @@
"references": [
{
"string": {
"range": "TestCompile/layer/root.d2,2:1:18-2:6:23",
"range": "TestCompile/layers/root.d2,2:1:18-2:6:23",
"value": [
{
"string": "bingo",
@ -563,11 +563,11 @@
]
},
"key_path": {
"range": "TestCompile/layer/root.d2,2:1:18-2:6:23",
"range": "TestCompile/layers/root.d2,2:1:18-2:6:23",
"path": [
{
"unquoted_string": {
"range": "TestCompile/layer/root.d2,2:1:18-2:6:23",
"range": "TestCompile/layers/root.d2,2:1:18-2:6:23",
"value": [
{
"string": "bingo",
@ -580,13 +580,13 @@
},
"context": {
"key": {
"range": "TestCompile/layer/root.d2,2:1:18-2:17:34",
"range": "TestCompile/layers/root.d2,2:1:18-2:17:34",
"key": {
"range": "TestCompile/layer/root.d2,2:1:18-2:6:23",
"range": "TestCompile/layers/root.d2,2:1:18-2:6:23",
"path": [
{
"unquoted_string": {
"range": "TestCompile/layer/root.d2,2:1:18-2:6:23",
"range": "TestCompile/layers/root.d2,2:1:18-2:6:23",
"value": [
{
"string": "bingo",
@ -600,17 +600,17 @@
"primary": {},
"value": {
"map": {
"range": "TestCompile/layer/root.d2,2:8:25-2:16:33",
"range": "TestCompile/layers/root.d2,2:8:25-2:16:33",
"nodes": [
{
"map_key": {
"range": "TestCompile/layer/root.d2,2:10:27-2:16:33",
"range": "TestCompile/layers/root.d2,2:10:27-2:16:33",
"key": {
"range": "TestCompile/layer/root.d2,2:10:27-2:16:33",
"range": "TestCompile/layers/root.d2,2:10:27-2:16:33",
"path": [
{
"unquoted_string": {
"range": "TestCompile/layer/root.d2,2:10:27-2:11:28",
"range": "TestCompile/layers/root.d2,2:10:27-2:11:28",
"value": [
{
"string": "p",
@ -621,7 +621,7 @@
},
{
"unquoted_string": {
"range": "TestCompile/layer/root.d2,2:12:29-2:13:30",
"range": "TestCompile/layers/root.d2,2:12:29-2:13:30",
"value": [
{
"string": "q",
@ -632,7 +632,7 @@
},
{
"unquoted_string": {
"range": "TestCompile/layer/root.d2,2:14:31-2:15:32",
"range": "TestCompile/layers/root.d2,2:14:31-2:15:32",
"value": [
{
"string": "z",
@ -662,7 +662,7 @@
"references": [
{
"string": {
"range": "TestCompile/layer/root.d2,1:0:7-1:6:13",
"range": "TestCompile/layers/root.d2,1:0:7-1:6:13",
"value": [
{
"string": "layers",
@ -671,11 +671,11 @@
]
},
"key_path": {
"range": "TestCompile/layer/root.d2,1:0:7-1:6:13",
"range": "TestCompile/layers/root.d2,1:0:7-1:6:13",
"path": [
{
"unquoted_string": {
"range": "TestCompile/layer/root.d2,1:0:7-1:6:13",
"range": "TestCompile/layers/root.d2,1:0:7-1:6:13",
"value": [
{
"string": "layers",
@ -688,13 +688,13 @@
},
"context": {
"key": {
"range": "TestCompile/layer/root.d2,1:0:7-3:1:36",
"range": "TestCompile/layers/root.d2,1:0:7-3:1:36",
"key": {
"range": "TestCompile/layer/root.d2,1:0:7-1:6:13",
"range": "TestCompile/layers/root.d2,1:0:7-1:6:13",
"path": [
{
"unquoted_string": {
"range": "TestCompile/layer/root.d2,1:0:7-1:6:13",
"range": "TestCompile/layers/root.d2,1:0:7-1:6:13",
"value": [
{
"string": "layers",
@ -708,17 +708,17 @@
"primary": {},
"value": {
"map": {
"range": "TestCompile/layer/root.d2,1:8:15-3:0:35",
"range": "TestCompile/layers/root.d2,1:8:15-3:0:35",
"nodes": [
{
"map_key": {
"range": "TestCompile/layer/root.d2,2:1:18-2:17:34",
"range": "TestCompile/layers/root.d2,2:1:18-2:17:34",
"key": {
"range": "TestCompile/layer/root.d2,2:1:18-2:6:23",
"range": "TestCompile/layers/root.d2,2:1:18-2:6:23",
"path": [
{
"unquoted_string": {
"range": "TestCompile/layer/root.d2,2:1:18-2:6:23",
"range": "TestCompile/layers/root.d2,2:1:18-2:6:23",
"value": [
{
"string": "bingo",
@ -732,17 +732,17 @@
"primary": {},
"value": {
"map": {
"range": "TestCompile/layer/root.d2,2:8:25-2:16:33",
"range": "TestCompile/layers/root.d2,2:8:25-2:16:33",
"nodes": [
{
"map_key": {
"range": "TestCompile/layer/root.d2,2:10:27-2:16:33",
"range": "TestCompile/layers/root.d2,2:10:27-2:16:33",
"key": {
"range": "TestCompile/layer/root.d2,2:10:27-2:16:33",
"range": "TestCompile/layers/root.d2,2:10:27-2:16:33",
"path": [
{
"unquoted_string": {
"range": "TestCompile/layer/root.d2,2:10:27-2:11:28",
"range": "TestCompile/layers/root.d2,2:10:27-2:11:28",
"value": [
{
"string": "p",
@ -753,7 +753,7 @@
},
{
"unquoted_string": {
"range": "TestCompile/layer/root.d2,2:12:29-2:13:30",
"range": "TestCompile/layers/root.d2,2:12:29-2:13:30",
"value": [
{
"string": "q",
@ -764,7 +764,7 @@
},
{
"unquoted_string": {
"range": "TestCompile/layer/root.d2,2:14:31-2:15:32",
"range": "TestCompile/layers/root.d2,2:14:31-2:15:32",
"value": [
{
"string": "z",
@ -811,16 +811,16 @@
{
"context": {
"key": {
"range": "TestCompile/layer/root.d2,0:0:0-0:6:6",
"range": "TestCompile/layers/root.d2,0:0:0-0:6:6",
"edges": [
{
"range": "TestCompile/layer/root.d2,0:0:0-0:6:6",
"range": "TestCompile/layers/root.d2,0:0:0-0:6:6",
"src": {
"range": "TestCompile/layer/root.d2,0:0:0-0:2:2",
"range": "TestCompile/layers/root.d2,0:0:0-0:2:2",
"path": [
{
"unquoted_string": {
"range": "TestCompile/layer/root.d2,0:0:0-0:1:1",
"range": "TestCompile/layers/root.d2,0:0:0-0:1:1",
"value": [
{
"string": "x",
@ -833,11 +833,11 @@
},
"src_arrow": "",
"dst": {
"range": "TestCompile/layer/root.d2,0:4:4-0:6:6",
"range": "TestCompile/layers/root.d2,0:4:4-0:6:6",
"path": [
{
"unquoted_string": {
"range": "TestCompile/layer/root.d2,0:5:5-0:6:6",
"range": "TestCompile/layers/root.d2,0:5:5-0:6:6",
"value": [
{
"string": "y",
@ -855,13 +855,13 @@
"value": {}
},
"edge": {
"range": "TestCompile/layer/root.d2,0:0:0-0:6:6",
"range": "TestCompile/layers/root.d2,0:0:0-0:6:6",
"src": {
"range": "TestCompile/layer/root.d2,0:0:0-0:2:2",
"range": "TestCompile/layers/root.d2,0:0:0-0:2:2",
"path": [
{
"unquoted_string": {
"range": "TestCompile/layer/root.d2,0:0:0-0:1:1",
"range": "TestCompile/layers/root.d2,0:0:0-0:1:1",
"value": [
{
"string": "x",
@ -874,11 +874,11 @@
},
"src_arrow": "",
"dst": {
"range": "TestCompile/layer/root.d2,0:4:4-0:6:6",
"range": "TestCompile/layers/root.d2,0:4:4-0:6:6",
"path": [
{
"unquoted_string": {
"range": "TestCompile/layer/root.d2,0:5:5-0:6:6",
"range": "TestCompile/layers/root.d2,0:5:5-0:6:6",
"value": [
{
"string": "y",

1759
testdata/d2ir/TestCompile/scenarios/root.exp.json generated vendored Normal file

File diff suppressed because it is too large Load diff

2069
testdata/d2ir/TestCompile/steps/root.exp.json generated vendored Normal file

File diff suppressed because it is too large Load diff