Skip to content

Commit

Permalink
Merge pull request #2395 from alixander/glob-filter-endpoints
Browse files Browse the repository at this point in the history
d2ir: add support for nested property filtering on edges
  • Loading branch information
alixander authored Mar 1, 2025
2 parents e111c22 + 54af9d1 commit dd4e6bc
Show file tree
Hide file tree
Showing 5 changed files with 2,989 additions and 0 deletions.
1 change: 1 addition & 0 deletions ci/release/changelogs/next.md
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@

- Icons: connections can include icons [#12](https://github.com/terrastruct/d2/issues/12)
- Syntax: `suspend`/`unsuspend` to define models and instantiate them [#2394](https://github.com/terrastruct/d2/pull/2394)
- Globs: support for filtering edges based on properties of endpoint nodes (e.g., `&src.style.fill: blue`) [#2395](https://github.com/terrastruct/d2/pull/2395)

#### Improvements 🧹

Expand Down
106 changes: 106 additions & 0 deletions d2compiler/compile_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -5499,6 +5499,112 @@ d -> d: "suspend"
assert.Equal(t, 1, len(g.Edges))
},
},
{
name: "edge-glob-ampersand-filter/1",
run: func(t *testing.T) {
g, _ := assertCompile(t, `
(* -> *)[*]: {
&src: a
style.stroke-dash: 3
}
(* -> *)[*]: {
&dst: c
style.stroke: blue
}
(* -> *)[*]: {
&src: b
&dst: c
style.fill: red
}
a -> b
b -> c
a -> c
`, ``)
tassert.Equal(t, 3, len(g.Edges))

tassert.Equal(t, "a", g.Edges[0].Src.ID)
tassert.Equal(t, "b", g.Edges[0].Dst.ID)
tassert.Equal(t, "3", g.Edges[0].Style.StrokeDash.Value)
tassert.Equal(t, (*d2graph.Scalar)(nil), g.Edges[0].Style.Stroke)
tassert.Equal(t, (*d2graph.Scalar)(nil), g.Edges[0].Style.Fill)

tassert.Equal(t, "b", g.Edges[1].Src.ID)
tassert.Equal(t, "c", g.Edges[1].Dst.ID)
tassert.Equal(t, "blue", g.Edges[1].Style.Stroke.Value)
tassert.Equal(t, (*d2graph.Scalar)(nil), g.Edges[1].Style.StrokeDash)
tassert.Equal(t, "red", g.Edges[1].Style.Fill.Value)

tassert.Equal(t, "a", g.Edges[2].Src.ID)
tassert.Equal(t, "c", g.Edges[2].Dst.ID)
tassert.Equal(t, "3", g.Edges[2].Style.StrokeDash.Value)
tassert.Equal(t, "blue", g.Edges[2].Style.Stroke.Value)
tassert.Equal(t, (*d2graph.Scalar)(nil), g.Edges[2].Style.Fill)
},
},
{
name: "edge-glob-ampersand-filter/2",
run: func(t *testing.T) {
g, _ := assertCompile(t, `
a: {
shape: circle
style: {
fill: blue
opacity: 0.8
}
}
b: {
shape: rectangle
style: {
fill: red
opacity: 0.5
}
}
c: {
shape: diamond
style.fill: green
style.opacity: 0.8
}
(* -> *)[*]: {
&src.style.fill: blue
style.stroke-dash: 3
}
(* -> *)[*]: {
&dst.style.opacity: 0.8
style.stroke: cyan
}
(* -> *)[*]: {
&src.shape: rectangle
&dst.style.fill: green
style.stroke-width: 5
}
a -> b
b -> c
a -> c
`, ``)

tassert.Equal(t, 3, len(g.Edges))

tassert.Equal(t, "a", g.Edges[0].Src.ID)
tassert.Equal(t, "b", g.Edges[0].Dst.ID)
tassert.Equal(t, "3", g.Edges[0].Style.StrokeDash.Value)
tassert.Equal(t, (*d2graph.Scalar)(nil), g.Edges[0].Style.Stroke)
tassert.Equal(t, (*d2graph.Scalar)(nil), g.Edges[0].Style.StrokeWidth)

tassert.Equal(t, "b", g.Edges[1].Src.ID)
tassert.Equal(t, "c", g.Edges[1].Dst.ID)
tassert.Equal(t, "cyan", g.Edges[1].Style.Stroke.Value)
tassert.Equal(t, (*d2graph.Scalar)(nil), g.Edges[1].Style.StrokeDash)
tassert.Equal(t, "5", g.Edges[1].Style.StrokeWidth.Value)

tassert.Equal(t, "a", g.Edges[2].Src.ID)
tassert.Equal(t, "c", g.Edges[2].Dst.ID)
tassert.Equal(t, "3", g.Edges[2].Style.StrokeDash.Value)
tassert.Equal(t, "cyan", g.Edges[2].Style.Stroke.Value)
tassert.Equal(t, (*d2graph.Scalar)(nil), g.Edges[2].Style.StrokeWidth)
},
},
}

for _, tc := range tca {
Expand Down
96 changes: 96 additions & 0 deletions d2ir/compile.go
Original file line number Diff line number Diff line change
Expand Up @@ -694,6 +694,63 @@ func (c *compiler) ampersandFilter(refctx *RefContext) bool {
return true
}

keyPath := refctx.Key.Key
if keyPath == nil || len(keyPath.Path) == 0 {
return false
}

firstPart := keyPath.Path[0].Unbox().ScalarString()
if (firstPart == "src" || firstPart == "dst") && len(keyPath.Path) > 1 {
if len(c.mapRefContextStack) == 0 {
return false
}

edge := ParentEdge(refctx.ScopeMap)
if edge == nil {
return false
}

var nodePath []d2ast.String
if firstPart == "src" {
nodePath = edge.ID.SrcPath
} else {
nodePath = edge.ID.DstPath
}

rootMap := RootMap(refctx.ScopeMap)
node := rootMap.GetField(nodePath...)
if node == nil || node.Map() == nil {
return false
}

propKeyPath := &d2ast.KeyPath{
Path: keyPath.Path[1:],
}

propKey := &d2ast.Key{
Key: propKeyPath,
Value: refctx.Key.Value,
}

propRefCtx := &RefContext{
Key: propKey,
ScopeMap: node.Map(),
ScopeAST: refctx.ScopeAST,
}

fa, err := node.Map().EnsureField(propKeyPath, propRefCtx, false, c)
if err != nil || len(fa) == 0 {
return false
}

for _, f := range fa {
if c._ampersandFilter(f, propRefCtx) {
return true
}
}
return false
}

fa, err := refctx.ScopeMap.EnsureField(refctx.Key.Key, refctx, false, c)
if err != nil {
c.err.Errors = append(c.err.Errors, err.(d2ast.Error))
Expand Down Expand Up @@ -796,6 +853,45 @@ func (c *compiler) ampersandFilter(refctx *RefContext) bool {
f.Primary_ = n.Primary()
}
return c._ampersandFilter(f, refctx)
case "src":
if len(c.mapRefContextStack) == 0 {
return false
}

edge := ParentEdge(refctx.ScopeMap)
if edge == nil {
return false
}

filterValue := refctx.Key.Value.ScalarBox().Unbox().ScalarString()

var srcParts []string
for _, part := range edge.ID.SrcPath {
srcParts = append(srcParts, part.ScalarString())
}
srcPath := strings.Join(srcParts, ".")

return srcPath == filterValue

case "dst":
if len(c.mapRefContextStack) == 0 {
return false
}

edge := ParentEdge(refctx.ScopeMap)
if edge == nil {
return false
}

filterValue := refctx.Key.Value.ScalarBox().Unbox().ScalarString()

var dstParts []string
for _, part := range edge.ID.DstPath {
dstParts = append(dstParts, part.ScalarString())
}
dstPath := strings.Join(dstParts, ".")

return dstPath == filterValue
default:
return false
}
Expand Down
Loading

0 comments on commit dd4e6bc

Please sign in to comment.