Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Removing EXPERIMENTAL_GENERAL_RULE_REFS feature flag #6252

Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Jump to
Jump to file
Failed to load files.
Diff view
Diff view
62 changes: 2 additions & 60 deletions ast/compile.go
Expand Up @@ -7,7 +7,6 @@ package ast
import (
"fmt"
"io"
"os"
"sort"
"strconv"
"strings"
Expand Down Expand Up @@ -146,7 +145,6 @@ type Compiler struct {
keepModules bool // whether to keep the unprocessed, parse modules (below)
parsedModules map[string]*Module // parsed, but otherwise unprocessed modules, kept track of when keepModules is true
useTypeCheckAnnotations bool // whether to provide annotated information (schemas) to the type checker
generalRuleRefsEnabled bool
}

// CompilerStage defines the interface for stages in the compiler.
Expand Down Expand Up @@ -335,8 +333,6 @@ func NewCompiler() *Compiler {
{"BuildComprehensionIndices", "compile_stage_rebuild_comprehension_indices", c.buildComprehensionIndices},
}

_, c.generalRuleRefsEnabled = os.LookupEnv("EXPERIMENTAL_GENERAL_RULE_REFS")

return c
}

Expand Down Expand Up @@ -1005,50 +1001,8 @@ func (c *Compiler) checkRuleConflicts() {
// data.p.q[r][s] { r := input.r; s := input.s }
// data.p[q].r.s { q := input.q }

if c.generalRuleRefsEnabled {
if r.Ref().IsGround() && len(node.Children) > 0 {
conflicts = node.flattenChildren()
}
} else { // TODO: Remove when general rule refs are enabled by default.
if r.Head.RuleKind() == SingleValue && len(node.Children) > 0 {
if len(ref) > 1 && !ref[len(ref)-1].IsGround() { // p.q[x] and p.q.s.t => check grandchildren
for _, c := range node.Children {
grandchildrenFound := false

if len(c.Values) > 0 {
childRules := extractRules(c.Values)
for _, childRule := range childRules {
childRef := childRule.Ref()
if childRule.Head.RuleKind() == SingleValue && !childRef[len(childRef)-1].IsGround() {
// The child is a partial object rule, so it's effectively "generating" grandchildren.
grandchildrenFound = true
break
}
}
}

if len(c.Children) > 0 {
grandchildrenFound = true
}

if grandchildrenFound {
conflicts = node.flattenChildren()
break
}
}
} else { // p.q.s and p.q.s.t => any children are in conflict
conflicts = node.flattenChildren()
}
}

// Multi-value rules may not have any other rules in their extent; e.g.:
//
// data.p[v] { v := ... }
// data.p.q := 42 # In direct conflict with data.p[v], which is constructing a set and cannot have values assigned to a sub-path.

if r.Head.RuleKind() == MultiValue && len(node.Children) > 0 {
conflicts = node.flattenChildren()
}
if r.Ref().IsGround() && len(node.Children) > 0 {
conflicts = node.flattenChildren()
}

if r.Head.RuleKind() == SingleValue && r.Head.Ref().IsGround() {
Expand Down Expand Up @@ -1811,18 +1765,6 @@ func (c *Compiler) rewriteRuleHeadRefs() {
}

for i := 1; i < len(ref); i++ {
// NOTE: Unless enabled via the EXPERIMENTAL_GENERAL_RULE_REFS env var, non-string values in the refs are forbidden
// except for the last position, e.g.
// OK: p.q.r[s]
// NOT OK: p[q].r.s
// TODO: Remove when general rule refs are enabled by default.
if !c.generalRuleRefsEnabled && i != len(ref)-1 { // last
if _, ok := ref[i].Value.(String); !ok {
c.err(NewError(TypeErr, rule.Loc(), "rule head must only contain string terms (except for last): %v", ref[i]))
continue
}
}

// Rewrite so that any non-scalar elements that in the last position of
// the rule are vars:
// p.q.r[y.z] { ... } => p.q.r[__local0__] { __local0__ = y.z }
Expand Down
128 changes: 0 additions & 128 deletions ast/compile_test.go
Expand Up @@ -467,8 +467,6 @@ func toRef(s string) Ref {
}

func TestCompilerCheckRuleHeadRefs(t *testing.T) {
t.Setenv("EXPERIMENTAL_GENERAL_RULE_REFS", "true")

tests := []struct {
note string
modules []*Module
Expand Down Expand Up @@ -607,64 +605,6 @@ func TestCompilerCheckRuleHeadRefs(t *testing.T) {
}
}

// TODO: Remove when general rule refs are enabled by default.
func TestCompilerCheckRuleHeadRefsWithGeneralRuleRefsDisabled(t *testing.T) {

tests := []struct {
note string
modules []*Module
expected *Rule
err string
}{
{
note: "ref contains var",
modules: modules(
`package x
p.q[i].r = 1 { i := 10 }`,
),
err: "rego_type_error: rule head must only contain string terms (except for last): i",
},
{
note: "invalid: ref in ref",
modules: modules(
`package x
p.q[arr[0]].r { i := 10 }`,
),
err: "rego_type_error: rule head must only contain string terms (except for last): arr[0]",
},
{
note: "invalid: non-string in ref (not last position)",
modules: modules(
`package x
p.q[10].r { true }`,
),
err: "rego_type_error: rule head must only contain string terms (except for last): 10",
},
}

for _, tc := range tests {
t.Run(tc.note, func(t *testing.T) {
mods := make(map[string]*Module, len(tc.modules))
for i, m := range tc.modules {
mods[fmt.Sprint(i)] = m
}
c := NewCompiler()
c.Modules = mods
compileStages(c, c.rewriteRuleHeadRefs)
if tc.err != "" {
assertCompilerErrorStrings(t, c, []string{tc.err})
} else {
if len(c.Errors) > 0 {
t.Fatalf("expected no errors, got %v", c.Errors)
}
if tc.expected != nil {
assertRulesEqual(t, tc.expected, mods["0"].Rules[0])
}
}
})
}
}

func TestRuleTreeWithDotsInHeads(t *testing.T) {

// TODO(sr): multi-val with var key in ref
Expand Down Expand Up @@ -1940,8 +1880,6 @@ func TestCompilerCheckRuleConflictsDefaultFunction(t *testing.T) {
}

func TestCompilerCheckRuleConflictsDotsInRuleHeads(t *testing.T) {
t.Setenv("EXPERIMENTAL_GENERAL_RULE_REFS", "true")

tests := []struct {
note string
modules []*Module
Expand Down Expand Up @@ -2187,70 +2125,6 @@ func TestCompilerCheckRuleConflictsDotsInRuleHeads(t *testing.T) {
}
}

// TODO: Remove when general rule refs are enabled by default.
func TestGeneralRuleRefsDisabled(t *testing.T) {
// EXPERIMENTAL_GENERAL_RULE_REFS env var not set

tests := []struct {
note string
modules []*Module
err string
}{
{
note: "single-value with other rule overlap, unknown key",
modules: modules(
`package pkg
p.q[r] = x { r = input.key; x = input.foo }
p.q.r.s = x { true }
`),
err: "rego_type_error: rule data.pkg.p.q[r] conflicts with [data.pkg.p.q.r.s]",
},
{
note: "single-value with other rule overlap, unknown ref var and key",
modules: modules(
`package pkg
p.q[r][s] = x { r = input.key1; s = input.key2; x = input.foo }
p.q.r.s.t = x { true }
`),
err: "rego_type_error: rule head must only contain string terms (except for last): r",
},
{
note: "single-value partial object with other partial object rule overlap, unknown keys (regression test for #5855; invalidated by multi-var refs)",
modules: modules(
`package pkg
p[r] := x { r = input.key; x = input.bar }
p.q[r] := x { r = input.key; x = input.bar }
`),
err: "rego_type_error: rule data.pkg.p[r] conflicts with [data.pkg.p.q[r]]",
},
{
note: "single-value partial object with other partial object (implicit 'true' value) rule overlap, unknown keys",
modules: modules(
`package pkg
p[r] := x { r = input.key; x = input.bar }
p.q[r] { r = input.key }
`),
err: "rego_type_error: rule data.pkg.p[r] conflicts with [data.pkg.p.q[r]]",
},
}
for _, tc := range tests {
t.Run(tc.note, func(t *testing.T) {
mods := make(map[string]*Module, len(tc.modules))
for i, m := range tc.modules {
mods[fmt.Sprint(i)] = m
}
c := NewCompiler()
c.Modules = mods
compileStages(c, c.checkRuleConflicts)
if tc.err != "" {
assertCompilerErrorStrings(t, c, []string{tc.err})
} else {
assertCompilerErrorStrings(t, c, []string{})
}
})
}
}

func TestCompilerCheckUndefinedFuncs(t *testing.T) {

module := `
Expand Down Expand Up @@ -6861,8 +6735,6 @@ func TestCompilerMockVirtualDocumentPartially(t *testing.T) {
}

func TestCompilerCheckUnusedAssignedVar(t *testing.T) {
t.Setenv("EXPERIMENTAL_GENERAL_RULE_REFS", "true")

type testCase struct {
note string
module string
Expand Down
22 changes: 7 additions & 15 deletions ast/parser.go
Expand Up @@ -11,7 +11,6 @@ import (
"io"
"math/big"
"net/url"
"os"
"regexp"
"sort"
"strconv"
Expand Down Expand Up @@ -98,14 +97,13 @@ func (e *parsedTermCacheItem) String() string {

// ParserOptions defines the options for parsing Rego statements.
type ParserOptions struct {
Capabilities *Capabilities
ProcessAnnotation bool
AllFutureKeywords bool
FutureKeywords []string
SkipRules bool
JSONOptions *astJSON.Options
unreleasedKeywords bool // TODO(sr): cleanup
generalRuleRefsEnabled bool
Capabilities *Capabilities
ProcessAnnotation bool
AllFutureKeywords bool
FutureKeywords []string
SkipRules bool
JSONOptions *astJSON.Options
unreleasedKeywords bool // TODO(sr): cleanup
}

// NewParser creates and initializes a Parser.
Expand All @@ -114,7 +112,6 @@ func NewParser() *Parser {
s: &state{},
po: ParserOptions{},
}
_, p.po.generalRuleRefsEnabled = os.LookupEnv("EXPERIMENTAL_GENERAL_RULE_REFS")
return p
}

Expand Down Expand Up @@ -599,11 +596,6 @@ func (p *Parser) parseRules() []*Rule {
return []*Rule{&rule}
}

if !p.po.generalRuleRefsEnabled && usesContains && !rule.Head.Reference.IsGround() {
p.error(p.s.Loc(), "multi-value rules need ground refs")
return nil
}

// back-compat with `p[x] { ... }``
hasIf := p.s.tok == tokens.If

Expand Down
2 changes: 0 additions & 2 deletions format/format_test.go
Expand Up @@ -78,8 +78,6 @@ func TestFormatSourceError(t *testing.T) {
}

func TestFormatSource(t *testing.T) {
t.Setenv("EXPERIMENTAL_GENERAL_RULE_REFS", "true")

regoFiles, err := filepath.Glob("testfiles/*.rego")
if err != nil {
panic(err)
Expand Down
2 changes: 0 additions & 2 deletions internal/planner/planner_test.go
Expand Up @@ -16,8 +16,6 @@ import (
)

func TestPlannerHelloWorld(t *testing.T) {
t.Setenv("EXPERIMENTAL_GENERAL_RULE_REFS", "true")

// NOTE(tsandall): These tests are not meant to give comprehensive coverage
// of the planner. Currently we have a suite of end-to-end tests in the
// test/wasm/ directory that are specified in YAML, compiled into Wasm, and
Expand Down