diff --git a/pkg/air/gadgets/bits.go b/pkg/air/gadgets/bits.go index 08d2489..1c8330f 100644 --- a/pkg/air/gadgets/bits.go +++ b/pkg/air/gadgets/bits.go @@ -51,7 +51,7 @@ func ApplyBitwidthGadget(col uint, nbits uint, schema *air.Schema) { // Create Column + Constraint es[i] = air.NewColumnAccess(index+i, 0).Mul(air.NewConst(coefficient)) - schema.AddRangeConstraint(index+i, &fr256) + schema.AddRangeConstraint(index+i, fr256) // Update coefficient coefficient.Mul(&coefficient, &fr256) } diff --git a/pkg/air/schema.go b/pkg/air/schema.go index 5ad317c..eb5b555 100644 --- a/pkg/air/schema.go +++ b/pkg/air/schema.go @@ -22,6 +22,9 @@ type LookupConstraint = *constraint.LookupConstraint[*ColumnAccess] // VanishingConstraint captures the essence of a vanishing constraint at the AIR level. type VanishingConstraint = *constraint.VanishingConstraint[constraint.ZeroTest[Expr]] +// RangeConstraint captures the essence of a range constraints at the AIR level. +type RangeConstraint = *constraint.RangeConstraint[*ColumnAccess] + // PermutationConstraint captures the essence of a permutation constraint at the AIR level. // Specifically, it represents a constraint that one (or more) columns are a permutation of another. type PermutationConstraint = *constraint.PermutationConstraint @@ -150,8 +153,11 @@ func (p *Schema) AddVanishingConstraint(handle string, context trace.Context, do } // AddRangeConstraint appends a new range constraint. -func (p *Schema) AddRangeConstraint(column uint, bound *fr.Element) { - p.constraints = append(p.constraints, constraint.NewRangeConstraint(column, bound)) +func (p *Schema) AddRangeConstraint(column uint, bound fr.Element) { + col := p.Columns().Nth(column) + handle := col.QualifiedName(p) + tc := constraint.NewRangeConstraint[*ColumnAccess](handle, col.Context(), NewColumnAccess(column, 0), bound) + p.constraints = append(p.constraints, tc) } // ============================================================================ diff --git a/pkg/binfile/constraint.go b/pkg/binfile/constraint.go index 5e81c64..ff797ad 100644 --- a/pkg/binfile/constraint.go +++ b/pkg/binfile/constraint.go @@ -13,6 +13,7 @@ type jsonConstraint struct { Vanishes *jsonVanishingConstraint Permutation *jsonPermutationConstraint Lookup *jsonLookupConstraint + InRange *jsonRangeConstraint } type jsonDomain struct { @@ -38,6 +39,12 @@ type jsonLookupConstraint struct { To []jsonTypedExpr `json:"including"` } +type jsonRangeConstraint struct { + Handle string `json:"handle"` + Expr jsonTypedExpr `json:"exp"` + Max jsonExprConst `json:"max"` +} + // ============================================================================= // Translation // ============================================================================= @@ -70,6 +77,16 @@ func (e jsonConstraint) addToSchema(colmap map[uint]uint, schema *hir.Schema) { } // Add constraint schema.AddLookupConstraint(e.Lookup.Handle, sourceCtx, targetCtx, sources, targets) + } else if e.InRange != nil { + // Translate the vanishing expression + expr := e.InRange.Expr.ToHir(colmap, schema) + // Determine enclosing module + ctx := expr.Context(schema) + // Convert bound into max + bound := e.InRange.Max.ToField() + handle := expr.Lisp(schema).String(true) + // Construct the vanishing constraint + schema.AddRangeConstraint(handle, ctx, expr, bound) } else if e.Permutation == nil { // Catch all panic("Unknown JSON constraint encountered") diff --git a/pkg/binfile/constraint_set.go b/pkg/binfile/constraint_set.go index 07b1056..bbee210 100644 --- a/pkg/binfile/constraint_set.go +++ b/pkg/binfile/constraint_set.go @@ -174,8 +174,9 @@ func allocateRegisters(cs *constraintSet, schema *hir.Schema) map[uint]uint { // Add column for this cid := schema.AddDataColumn(ctx, handle.column, col_type) // Check whether a type constraint required or not. - if c.MustProve { - schema.AddTypeConstraint(cid, col_type) + if c.MustProve && col_type.AsUint() != nil { + bound := col_type.AsUint().Bound() + schema.AddRangeConstraint(c.Handle, ctx, &hir.ColumnAccess{Column: cid, Shift: 0}, *bound) } } } diff --git a/pkg/binfile/expr.go b/pkg/binfile/expr.go index 424eced..479cfdf 100644 --- a/pkg/binfile/expr.go +++ b/pkg/binfile/expr.go @@ -76,6 +76,20 @@ func (e *jsonTypedExpr) ToHir(colmap map[uint]uint, schema *hir.Schema) hir.Expr // ToHir converts a big integer represented as a sequence of unsigned 32bit // words into HIR constant expression. func (e *jsonExprConst) ToHir(schema *hir.Schema) hir.Expr { + return &hir.Constant{Val: e.ToField()} +} + +func (e *jsonExprConst) ToField() fr.Element { + var num fr.Element + // + val := e.ToBigInt() + // Construct Field Value + num.SetBigInt(val) + // + return num +} + +func (e *jsonExprConst) ToBigInt() *big.Int { sign := int(e.BigInt[0].(float64)) words := e.BigInt[1].([]any) // Begin @@ -100,12 +114,8 @@ func (e *jsonExprConst) ToHir(schema *hir.Schema) hir.Expr { } else { panic(fmt.Sprintf("Unknown BigInt sign: %d", sign)) } - // Construct Field Value - var num fr.Element - - num.SetBigInt(val) - // Done! - return &hir.Constant{Val: num} + // Done + return val } func (e *jsonExprColumn) ToHir(colmap map[uint]uint, schema *hir.Schema) hir.Expr { diff --git a/pkg/hir/lower.go b/pkg/hir/lower.go index e58dffa..8df3efa 100644 --- a/pkg/hir/lower.go +++ b/pkg/hir/lower.go @@ -6,7 +6,6 @@ import ( "github.com/consensys/gnark-crypto/ecc/bls12-377/fr" "github.com/consensys/go-corset/pkg/mir" sc "github.com/consensys/go-corset/pkg/schema" - "github.com/consensys/go-corset/pkg/schema/constraint" ) // LowerToMir lowers (or refines) an HIR table into an MIR schema. That means @@ -53,8 +52,12 @@ func lowerConstraintToMir(c sc.Constraint, schema *mir.Schema) { for _, mir_expr := range mir_exprs { schema.AddVanishingConstraint(v.Handle(), v.Context(), v.Domain(), mir_expr) } - } else if v, ok := c.(*constraint.TypeConstraint); ok { - schema.AddTypeConstraint(v.Target(), v.Type()) + } else if v, ok := c.(RangeConstraint); ok { + mir_exprs := v.Target().LowerTo(schema) + // Add individual constraints arising + for _, mir_expr := range mir_exprs { + schema.AddRangeConstraint(v.Handle(), v.Context(), mir_expr, v.Bound()) + } } else { // Should be unreachable as no other constraint types can be added to a // schema. diff --git a/pkg/hir/parser.go b/pkg/hir/parser.go index d9b2fdc..74713eb 100644 --- a/pkg/hir/parser.go +++ b/pkg/hir/parser.go @@ -102,6 +102,8 @@ func (p *hirParser) parseDeclaration(s sexp.SExp) error { return p.parseLookupDeclaration(e) } else if e.Len() == 3 && e.MatchSymbols(1, "definterleaved") { return p.parseInterleavingDeclaration(e) + } else if e.Len() == 3 && e.MatchSymbols(1, "definrange") { + return p.parseRangeDeclaration(e) } } // Error @@ -176,7 +178,11 @@ func (p *hirParser) parseColumnDeclaration(e sexp.SExp) error { } // Register column cid := p.env.AddDataColumn(p.module, columnName, columnType) - p.env.schema.AddTypeConstraint(cid, columnType) + // Apply type constraint (if applicable) + if columnType.AsUint() != nil { + bound := columnType.AsUint().Bound() + p.env.schema.AddRangeConstraint(columnName, p.module, &ColumnAccess{cid, 0}, *bound) + } // return nil } @@ -409,6 +415,37 @@ func (p *hirParser) parseInterleavingDeclaration(l *sexp.List) error { return nil } +// Parse a range constraint +func (p *hirParser) parseRangeDeclaration(l *sexp.List) error { + var bound fr.Element + // Check bound + if l.Get(2).AsSymbol() == nil { + return p.translator.SyntaxError(l.Get(2), "malformed bound") + } + // Parse bound + if _, err := bound.SetString(l.Get(2).AsSymbol().Value); err != nil { + return p.translator.SyntaxError(l.Get(2), "malformed bound") + } + // Parse expression + expr, err := p.translator.Translate(l.Get(1)) + if err != nil { + return err + } + // Determine evaluation context of expression. + ctx := expr.Context(p.env.schema) + // Sanity check we have a sensible context here. + if ctx.IsConflicted() { + return p.translator.SyntaxError(l.Get(1), "conflicting evaluation context") + } else if ctx.IsVoid() { + return p.translator.SyntaxError(l.Get(1), "empty evaluation context") + } + // + handle := l.Get(1).String(true) + p.env.schema.AddRangeConstraint(handle, ctx, expr, bound) + // + return nil +} + // Parse a property assertion func (p *hirParser) parseAssertionDeclaration(elements []sexp.SExp) error { handle := elements[1].AsSymbol().Value diff --git a/pkg/hir/schema.go b/pkg/hir/schema.go index 73bfcd2..0d8d5a3 100644 --- a/pkg/hir/schema.go +++ b/pkg/hir/schema.go @@ -3,6 +3,7 @@ package hir import ( "fmt" + "github.com/consensys/gnark-crypto/ecc/bls12-377/fr" "github.com/consensys/go-corset/pkg/schema" sc "github.com/consensys/go-corset/pkg/schema" "github.com/consensys/go-corset/pkg/schema/assignment" @@ -24,6 +25,9 @@ type VanishingConstraint = *constraint.VanishingConstraint[ZeroArrayTest] // certain expression forms cannot be permitted (e.g. the use of lists). type LookupConstraint = *constraint.LookupConstraint[UnitExpr] +// RangeConstraint captures the essence of a range constraints at the HIR level. +type RangeConstraint = *constraint.RangeConstraint[MaxExpr] + // PropertyAssertion captures the notion of an arbitrary property which should // hold for all acceptable traces. However, such a property is not enforced by // the prover. @@ -127,12 +131,11 @@ func (p *Schema) AddVanishingConstraint(handle string, context trace.Context, do constraint.NewVanishingConstraint(handle, context, domain, ZeroArrayTest{expr})) } -// AddTypeConstraint appends a new range constraint. -func (p *Schema) AddTypeConstraint(target uint, t sc.Type) { +// AddRangeConstraint appends a new range constraint with a raw bound. +func (p *Schema) AddRangeConstraint(handle string, context trace.Context, expr Expr, bound fr.Element) { // Check whether is a field type, as these can actually be ignored. - if t.AsField() == nil { - p.constraints = append(p.constraints, constraint.NewTypeConstraint(target, t)) - } + maxExpr := MaxExpr{expr} + p.constraints = append(p.constraints, constraint.NewRangeConstraint[MaxExpr](handle, context, maxExpr, bound)) } // AddPropertyAssertion appends a new property assertion. diff --git a/pkg/hir/util.go b/pkg/hir/util.go index 13a988f..69d3e8f 100644 --- a/pkg/hir/util.go +++ b/pkg/hir/util.go @@ -2,6 +2,7 @@ package hir import ( "github.com/consensys/gnark-crypto/ecc/bls12-377/fr" + "github.com/consensys/go-corset/pkg/mir" sc "github.com/consensys/go-corset/pkg/schema" "github.com/consensys/go-corset/pkg/sexp" tr "github.com/consensys/go-corset/pkg/trace" @@ -133,3 +134,80 @@ func (e UnitExpr) RequiredCells(row int, trace tr.Trace) *util.AnySortedSet[tr.C func (e UnitExpr) Lisp(schema sc.Schema) sexp.SExp { return e.expr.Lisp(schema) } + +// ============================================================================ +// MaxExpr +// ============================================================================ + +// MaxExpr is an adaptor for a general expression which can be used in +// situations where an Evaluable expression is required. This performs a +// similar function to the ZeroArrayTest, but actually produces a value. +// Specifically, the value produced is always the maximum of all values +// produced. This is only useful in specific situations (e.g. checking range +// constraints). +type MaxExpr struct { + // + expr Expr +} + +// NewMaxExpr constructs a unit wrapper around an HIR expression. In essence, +// this introduces a runtime check that the given expression only every reduces +// to a single value. Evaluation of this expression will panic if that +// condition does not hold. The intention is that this error is checked for +// upstream (e.g. as part of the compiler front end). +func NewMaxExpr(expr Expr) MaxExpr { + return MaxExpr{expr} +} + +// EvalAt evaluates a column access at a given row in a trace, which returns the +// value at that row of the column in question or nil is that row is +// out-of-bounds. +func (e MaxExpr) EvalAt(k int, trace tr.Trace) fr.Element { + vals := e.expr.EvalAllAt(k, trace) + // + max := fr.NewElement(0) + // + for _, v := range vals { + if max.Cmp(&v) < 0 { + max = v + } + } + // + return max +} + +// Bounds returns max shift in either the negative (left) or positive +// direction (right). +func (e MaxExpr) Bounds() util.Bounds { + return e.expr.Bounds() +} + +// Context determines the evaluation context (i.e. enclosing module) for this +// expression. +func (e MaxExpr) Context(schema sc.Schema) tr.Context { + return e.expr.Context(schema) +} + +// RequiredColumns returns the set of columns on which this term depends. +// That is, columns whose values may be accessed when evaluating this term +// on a given trace. +func (e MaxExpr) RequiredColumns() *util.SortedSet[uint] { + return e.expr.RequiredColumns() +} + +// RequiredCells returns the set of trace cells on which this term depends. +// In this case, that is the empty set. +func (e MaxExpr) RequiredCells(row int, trace tr.Trace) *util.AnySortedSet[tr.CellRef] { + return e.expr.RequiredCells(row, trace) +} + +// LowerTo lowers a max expressions down to one or more expressions at the MIR level. +func (e MaxExpr) LowerTo(schema *mir.Schema) []mir.Expr { + return e.expr.LowerTo(schema) +} + +// Lisp converts this schema element into a simple S-Expression, for example +// so it can be printed. +func (e MaxExpr) Lisp(schema sc.Schema) sexp.SExp { + return e.expr.Lisp(schema) +} diff --git a/pkg/mir/lower.go b/pkg/mir/lower.go index b5b1e81..1ca8654 100644 --- a/pkg/mir/lower.go +++ b/pkg/mir/lower.go @@ -2,11 +2,11 @@ package mir import ( "fmt" + "math/big" "github.com/consensys/go-corset/pkg/air" air_gadgets "github.com/consensys/go-corset/pkg/air/gadgets" sc "github.com/consensys/go-corset/pkg/schema" - "github.com/consensys/go-corset/pkg/schema/constraint" "github.com/consensys/go-corset/pkg/trace" ) @@ -66,30 +66,9 @@ func lowerConstraintToAir(c sc.Constraint, schema *air.Schema) { if v, ok := c.(LookupConstraint); ok { lowerLookupConstraintToAir(v, schema) } else if v, ok := c.(VanishingConstraint); ok { - air_expr := lowerExprTo(v.Context(), v.Constraint().Expr, schema) - // Check whether this is a constant - constant := air_expr.AsConstant() - // Check for compile-time constants - if constant != nil && !constant.IsZero() { - panic(fmt.Sprintf("constraint %s cannot vanish!", v.Handle())) - } else if constant == nil { - schema.AddVanishingConstraint(v.Handle(), v.Context(), v.Domain(), air_expr) - } - } else if v, ok := c.(*constraint.TypeConstraint); ok { - if t := v.Type().AsUint(); t != nil { - // Yes, a constraint is implied. Now, decide whether to use a range - // constraint or just a vanishing constraint. - if t.HasBound(2) { - // u1 => use vanishing constraint X * (X - 1) - air_gadgets.ApplyBinaryGadget(v.Target(), schema) - } else if t.HasBound(256) { - // u2..8 use range constraints - schema.AddRangeConstraint(v.Target(), t.Bound()) - } else { - // u9+ use byte decompositions. - air_gadgets.ApplyBitwidthGadget(v.Target(), t.BitWidth(), schema) - } - } + lowerVanishingConstraintToAir(v, schema) + } else if v, ok := c.(RangeConstraint); ok { + lowerRangeConstraintToAir(v, schema) } else { // Should be unreachable as no other constraint types can be added to a // schema. @@ -97,6 +76,52 @@ func lowerConstraintToAir(c sc.Constraint, schema *air.Schema) { } } +// Lower a vanishing constraint to the AIR level. This is relatively +// straightforward and simply relies on lowering the expression being +// constrained. This may result in the generation of computed columns, e.g. to +// hold inverses, etc. +func lowerVanishingConstraintToAir(v VanishingConstraint, schema *air.Schema) { + air_expr := lowerExprTo(v.Context(), v.Constraint().Expr, schema) + // Check whether this is a constant + constant := air_expr.AsConstant() + // Check for compile-time constants + if constant != nil && !constant.IsZero() { + panic(fmt.Sprintf("constraint %s cannot vanish!", v.Handle())) + } else if constant == nil { + schema.AddVanishingConstraint(v.Handle(), v.Context(), v.Domain(), air_expr) + } +} + +// Lower a range constraint to the AIR level. The challenge here is that a +// range constraint at the AIR level cannot use arbitrary expressions; rather it +// can only constrain columns directly. Therefore, whenever a general +// expression is encountered, we must generate a computed column to hold the +// value of that expression, along with appropriate constraints to enforce the +// expected value. +func lowerRangeConstraintToAir(v RangeConstraint, schema *air.Schema) { + // Lower target expression + target := lowerExprTo(v.Context(), v.Target(), schema) + // Expand target expression (if necessary) + column := air_gadgets.Expand(v.Context(), target, schema) + // Yes, a constraint is implied. Now, decide whether to use a range + // constraint or just a vanishing constraint. + if v.BoundedAtMost(2) { + // u1 => use vanishing constraint X * (X - 1) + air_gadgets.ApplyBinaryGadget(column, schema) + } else if v.BoundedAtMost(256) { + // u2..8 use range constraints + schema.AddRangeConstraint(column, v.Bound()) + } else { + // u9+ use byte decompositions. + var bi big.Int + // Convert bound into big int + elem := v.Bound() + elem.BigInt(&bi) + // Apply bitwidth gadget + air_gadgets.ApplyBitwidthGadget(column, uint(bi.BitLen()-1), schema) + } +} + // Lower a lookup constraint to the AIR level. The challenge here is that a // lookup constraint at the AIR level cannot use arbitrary expressions; rather, // it can only access columns directly. Therefore, whenever a general diff --git a/pkg/mir/schema.go b/pkg/mir/schema.go index 1116beb..3e114b8 100644 --- a/pkg/mir/schema.go +++ b/pkg/mir/schema.go @@ -3,6 +3,7 @@ package mir import ( "fmt" + "github.com/consensys/gnark-crypto/ecc/bls12-377/fr" "github.com/consensys/go-corset/pkg/schema" "github.com/consensys/go-corset/pkg/schema/assignment" "github.com/consensys/go-corset/pkg/schema/constraint" @@ -22,6 +23,9 @@ type LookupConstraint = *constraint.LookupConstraint[Expr] // zero. type VanishingConstraint = *constraint.VanishingConstraint[constraint.ZeroTest[Expr]] +// RangeConstraint captures the essence of a range constraints at the MIR level. +type RangeConstraint = *constraint.RangeConstraint[Expr] + // PropertyAssertion captures the notion of an arbitrary property which should // hold for all acceptable traces. However, such a property is not enforced by // the prover. @@ -122,12 +126,9 @@ func (p *Schema) AddVanishingConstraint(handle string, context trace.Context, do constraint.NewVanishingConstraint(handle, context, domain, constraint.ZeroTest[Expr]{Expr: expr})) } -// AddTypeConstraint appends a new range constraint. -func (p *Schema) AddTypeConstraint(target uint, t schema.Type) { - // Check whether is a field type, as these can actually be ignored. - if t.AsField() == nil { - p.constraints = append(p.constraints, constraint.NewTypeConstraint(target, t)) - } +// AddRangeConstraint appends a new range constraint. +func (p *Schema) AddRangeConstraint(handle string, context trace.Context, expr Expr, bound fr.Element) { + p.constraints = append(p.constraints, constraint.NewRangeConstraint(handle, context, expr, bound)) } // AddPropertyAssertion appends a new property assertion. diff --git a/pkg/schema/constraint/range.go b/pkg/schema/constraint/range.go index 448219a..a3af4d5 100644 --- a/pkg/schema/constraint/range.go +++ b/pkg/schema/constraint/range.go @@ -10,89 +10,115 @@ import ( "github.com/consensys/go-corset/pkg/trace" ) -// RangeFailure provides structural information about a failing range constraint. +// RangeFailure provides structural information about a failing type constraint. type RangeFailure struct { - msg string + // Handle of the failing constraint + handle string + // Constraint expression + expr sc.Evaluable + // Row on which the constraint failed + row uint } // Message provides a suitable error message func (p *RangeFailure) Message() string { - return p.msg + // Construct useful error message + return fmt.Sprintf("expression \"%s\" out-of-bounds (row %d)", p.handle, p.row) } func (p *RangeFailure) String() string { - return p.msg + return p.Message() } -// RangeConstraint restricts all values in a given column to be within -// a range [0..n) for some bound n. For example, a bound of 256 would -// restrict all values to be bytes. At this time, range constraints -// are explicitly limited at the arithmetic level to bounds of at most -// 256 (i.e. to ensuring bytes). This restriction is somewhat -// arbitrary and is determined by the underlying prover. -type RangeConstraint struct { - // Column index to be constrained. - column uint - // The actual constraint itself, namely an expression which - // should evaluate to zero. NOTE: an fr.Element is used here - // to store the bound simply to make the necessary comparison - // against table data more direct. - bound *fr.Element +// RangeConstraint restricts all values for a given expression to be within a +// range [0..n) for some bound n. Any bound is supported, and the system will +// choose the best underlying implementation as needed. +type RangeConstraint[E sc.Evaluable] struct { + // A unique identifier for this constraint. This is primarily useful for + // debugging. + handle string + // Evaluation context for this constraint which must match that of the + // constrained expression itself. + context trace.Context + // Indicates (when nil) a global constraint that applies to all rows. + // Otherwise, indicates a local constraint which applies to the specific row + // given here. + expr E + // The upper bound for this constraint. Specifically, every evaluation of + // the expression should produce a value strictly below this bound. NOTE: + // an fr.Element is used here to store the bound simply to make the + // necessary comparison against table data more direct. + bound fr.Element } // NewRangeConstraint constructs a new Range constraint! -func NewRangeConstraint(column uint, bound *fr.Element) *RangeConstraint { - var n fr.Element = fr.NewElement(256) - if bound.Cmp(&n) > 0 { - panic("Range constraint for bitwidth above 8 not supported") - } +func NewRangeConstraint[E sc.Evaluable](handle string, context trace.Context, + expr E, bound fr.Element) *RangeConstraint[E] { + return &RangeConstraint[E]{handle, context, expr, bound} +} - return &RangeConstraint{column, bound} +// Handle returns a unique identifier for this constraint. +// +//nolint:revive +func (p *RangeConstraint[E]) Handle() string { + return p.handle } -// Accepts checks whether a range constraint evaluates to zero on -// every row of a table. If so, return nil otherwise return an error. -func (p *RangeConstraint) Accepts(tr trace.Trace) schema.Failure { - column := tr.Column(p.column) - height := tr.Height(column.Context()) - // Iterate all rows of the module +// Context returns the evaluation context for this constraint. +// +//nolint:revive +func (p *RangeConstraint[E]) Context() trace.Context { + return p.context +} + +// Target returns the target expression for this constraint. +func (p *RangeConstraint[E]) Target() E { + return p.expr +} + +// Bound returns the upper bound for this constraint. Specifically, any +// evaluation of the target expression should produce a value strictly below +// this bound. +func (p *RangeConstraint[E]) Bound() fr.Element { + return p.bound +} + +// BoundedAtMost determines whether the bound for this constraint is at most a given bound. +func (p *RangeConstraint[E]) BoundedAtMost(bound uint) bool { + var n fr.Element = fr.NewElement(uint64(bound)) + return p.bound.Cmp(&n) <= 0 +} + +// Accepts checks whether a range constraint holds on every row of a table. If so, return +// nil otherwise return an error. +// +//nolint:revive +func (p *RangeConstraint[E]) Accepts(tr trace.Trace) schema.Failure { + // Determine height of enclosing module + height := tr.Height(p.context) + // Iterate every row for k := 0; k < int(height); k++ { // Get the value on the kth row - kth := column.Get(k) - // Perform the bounds check - if kth.Cmp(p.bound) >= 0 { - name := column.Name() - // Construct useful error message - msg := fmt.Sprintf("value out-of-bounds (row %d, %s)", kth, name) + kth := p.expr.EvalAt(k, tr) + // Perform the range check + if kth.Cmp(&p.bound) >= 0 { // Evaluation failure - return &RangeFailure{msg} + return &RangeFailure{p.handle, p.expr, uint(k)} } } // All good return nil } -// Column returns the index of the column subjected to the constraint. -func (p *RangeConstraint) Column() uint { - return p.column -} - -// Bound returns the range boundary of the constraint. +// Lisp converts this schema element into a simple S-Expression, for example so +// it can be printed. // -// Note: the bound is returned in the form of a uint because this is simpler -// and more straightforward to understand. -func (p *RangeConstraint) Bound() uint64 { - return p.bound.Uint64() -} - -// Lisp converts this schema element into a simple S-Expression, for example -// so it can be printed. -func (p *RangeConstraint) Lisp(schema sc.Schema) sexp.SExp { - col := schema.Columns().Nth(p.column) +//nolint:revive +func (p *RangeConstraint[E]) Lisp(schema sc.Schema) sexp.SExp { // return sexp.NewList([]sexp.SExp{ - sexp.NewSymbol("range"), - sexp.NewSymbol(col.QualifiedName(schema)), + sexp.NewSymbol("definrange"), + p.expr.Lisp(schema), sexp.NewSymbol(p.bound.String()), }) } diff --git a/pkg/schema/constraint/type.go b/pkg/schema/constraint/type.go deleted file mode 100644 index 42d7bc5..0000000 --- a/pkg/schema/constraint/type.go +++ /dev/null @@ -1,87 +0,0 @@ -package constraint - -import ( - "fmt" - - "github.com/consensys/go-corset/pkg/schema" - sc "github.com/consensys/go-corset/pkg/schema" - "github.com/consensys/go-corset/pkg/sexp" - "github.com/consensys/go-corset/pkg/trace" -) - -// TypeFailure provides structural information about a failing type constraint. -type TypeFailure struct { - msg string -} - -// Message provides a suitable error message -func (p *TypeFailure) Message() string { - return p.msg -} - -func (p *TypeFailure) String() string { - return p.msg -} - -// TypeConstraint restricts all values in a given column to be within -// a range [0..n) for some bound n. Any bound is supported, and the system will -// choose the best underlying implementation as needed. -type TypeConstraint struct { - // Column to be constrained. - column uint - // The actual constraint itself, namely an expression which - // should evaluate to zero. NOTE: an fr.Element is used here - // to store the bound simply to make the necessary comparison - // against table data more direct. - expected schema.Type -} - -// NewTypeConstraint constructs a new Range constraint! -func NewTypeConstraint(column uint, expected schema.Type) *TypeConstraint { - return &TypeConstraint{column, expected} -} - -// Target returns the target column for this constraint. -func (p *TypeConstraint) Target() uint { - return p.column -} - -// Type returns the expected for all values in the target column. -func (p *TypeConstraint) Type() schema.Type { - return p.expected -} - -// Accepts checks whether a range constraint evaluates to zero on -// every row of a table. If so, return nil otherwise return an error. -func (p *TypeConstraint) Accepts(tr trace.Trace) schema.Failure { - column := tr.Column(p.column) - // Determine height - height := tr.Height(column.Context()) - // Iterate every row - for k := 0; k < int(height); k++ { - // Get the value on the kth row - kth := column.Get(k) - // Perform the type check - if !p.expected.Accept(kth) { - name := column.Name() - // Construct useful error message - msg := fmt.Sprintf("value out-of-bounds (row %d, %s)", kth, name) - // Evaluation failure - return &TypeFailure{msg} - } - } - // All good - return nil -} - -// Lisp converts this schema element into a simple S-Expression, for example -// so it can be printed. -func (p *TypeConstraint) Lisp(schema sc.Schema) sexp.SExp { - col := schema.Columns().Nth(p.column) - // - return sexp.NewList([]sexp.SExp{ - sexp.NewSymbol("type"), - sexp.NewSymbol(col.QualifiedName(schema)), - sexp.NewSymbol(p.expected.String()), - }) -} diff --git a/pkg/schema/constraint/vanishing.go b/pkg/schema/constraint/vanishing.go index 4ce503e..f67e347 100644 --- a/pkg/schema/constraint/vanishing.go +++ b/pkg/schema/constraint/vanishing.go @@ -151,6 +151,8 @@ func (p *VanishingConstraint[T]) Domain() *int { // Context returns the evaluation context for this constraint. Every constraint // must be situated within exactly one module in order to be well-formed. +// +//nolint:revive func (p *VanishingConstraint[T]) Context() tr.Context { return p.context } diff --git a/pkg/schema/schemas.go b/pkg/schema/schemas.go index b9657e5..82d76ba 100644 --- a/pkg/schema/schemas.go +++ b/pkg/schema/schemas.go @@ -73,6 +73,8 @@ func ContextOfColumns(cols []uint, schema Schema) tr.Context { // can fail to adhere to the schema for a variety of reasons, such as having a // constraint which does not hold. Observe that this does not check assertions // within the schema hold. +// +//nolint:revive func Accepts(batchsize uint, schema Schema, trace tr.Trace) []Failure { errors := make([]Failure, 0) // Initialise batch number (for debugging purposes) diff --git a/pkg/test/ir_test.go b/pkg/test/ir_test.go index dcdce8f..c6c2ae3 100644 --- a/pkg/test/ir_test.go +++ b/pkg/test/ir_test.go @@ -288,6 +288,30 @@ func Test_Type_03(t *testing.T) { Check(t, "type_03") } +// =================================================================== +// Range Constraints +// =================================================================== + +func Test_Range_01(t *testing.T) { + Check(t, "range_01") +} + +func Test_Range_02(t *testing.T) { + Check(t, "range_02") +} + +func Test_Range_03(t *testing.T) { + Check(t, "range_03") +} + +func Test_Range_04(t *testing.T) { + Check(t, "range_04") +} + +func Test_Range_05(t *testing.T) { + Check(t, "range_05") +} + // =================================================================== // Constant Propagation // =================================================================== diff --git a/testdata/range_01.accepts b/testdata/range_01.accepts new file mode 100644 index 0000000..7039f38 --- /dev/null +++ b/testdata/range_01.accepts @@ -0,0 +1,17 @@ +{ "X1": [], "X4": [], "X8": [] } +{ "X1": [0], "X4": [0], "X8": [0] } +{ "X1": [1], "X4": [1], "X8": [0] } +{ "X1": [1], "X4": [2], "X8": [1] } +{ "X1": [0], "X4": [3], "X8": [4] } +{ "X1": [1], "X4": [4], "X8": [7] } +{ "X1": [0], "X4": [5], "X8": [124] } +{ "X1": [1], "X4": [6], "X8": [133] } +{ "X1": [0], "X4": [7], "X8": [155] } +{ "X1": [0], "X4": [8], "X8": [181] } +{ "X1": [0], "X4": [9], "X8": [201] } +{ "X1": [0], "X4": [10], "X8": [243] } +{ "X1": [0], "X4": [11], "X8": [250] } +{ "X1": [0], "X4": [12], "X8": [252] } +{ "X1": [0], "X4": [13], "X8": [253] } +{ "X1": [0], "X4": [14], "X8": [254] } +{ "X1": [0], "X4": [15], "X8": [255] } diff --git a/testdata/range_01.lisp b/testdata/range_01.lisp new file mode 100644 index 0000000..5ca78b4 --- /dev/null +++ b/testdata/range_01.lisp @@ -0,0 +1,5 @@ +(defcolumns X1 X4 X8) + +(definrange X1 2) +(definrange X4 16) +(definrange X8 256) diff --git a/testdata/range_01.rejects b/testdata/range_01.rejects new file mode 100644 index 0000000..3082bdf --- /dev/null +++ b/testdata/range_01.rejects @@ -0,0 +1,12 @@ +{ "X1": [-1], "X4": [0], "X8": [0] } +{ "X1": [0], "X4": [-1], "X8": [0] } +{ "X1": [0], "X4": [0], "X8": [-1] } +{ "X1": [2], "X4": [0], "X8": [0] } +{ "X1": [3], "X4": [0], "X8": [0] } +{ "X1": [0], "X4": [16], "X8": [0] } +{ "X1": [0], "X4": [17], "X8": [0] } +{ "X1": [0], "X4": [18], "X8": [0] } +{ "X1": [0], "X4": [0], "X8": [256] } +{ "X1": [0], "X4": [0], "X8": [16384] } +{ "X1": [0], "X4": [0], "X8": [32765] } +{ "X1": [0], "X4": [0], "X8": [65535] } diff --git a/testdata/range_02.accepts b/testdata/range_02.accepts new file mode 100644 index 0000000..3f72fc1 --- /dev/null +++ b/testdata/range_02.accepts @@ -0,0 +1,27 @@ +{ "X16": [0], "X32": [0] } +{ "X16": [1], "X32": [1] } +{ "X16": [2], "X32": [2] } +{ "X16": [255], "X32": [65535] } +{ "X16": [256], "X32": [65536] } +{ "X16": [16380], "X32": [16777212] } +{ "X16": [16381], "X32": [16777213] } +{ "X16": [16382], "X32": [16777214] } +{ "X16": [16383], "X32": [16777215] } +{ "X16": [16384], "X32": [16777216] } +{ "X16": [16385], "X32": [16777217] } +{ "X16": [16386], "X32": [16777218] } +{ "X16": [16387], "X32": [16777219] } +{ "X16": [32764], "X32": [268435452] } +{ "X16": [32765], "X32": [268435453] } +{ "X16": [32766], "X32": [268435454] } +{ "X16": [32767], "X32": [268435455] } +{ "X16": [32768], "X32": [268435456] } +{ "X16": [32769], "X32": [268435457] } +{ "X16": [32770], "X32": [268435458] } +{ "X16": [32771], "X32": [268435459] } +{ "X16": [65530], "X32": [4294967290] } +{ "X16": [65531], "X32": [4294967291] } +{ "X16": [65532], "X32": [4294967292] } +{ "X16": [65533], "X32": [4294967293] } +{ "X16": [65534], "X32": [4294967294] } +{ "X16": [65535], "X32": [4294967295] } diff --git a/testdata/range_02.lisp b/testdata/range_02.lisp new file mode 100644 index 0000000..4b88822 --- /dev/null +++ b/testdata/range_02.lisp @@ -0,0 +1,4 @@ +(defcolumns X16 X32) + +(definrange X16 65536) +(definrange X32 4294967296) diff --git a/testdata/range_02.rejects b/testdata/range_02.rejects new file mode 100644 index 0000000..866107c --- /dev/null +++ b/testdata/range_02.rejects @@ -0,0 +1,17 @@ +{ "X16": [-255], "X32": [0] } +{ "X16": [-10], "X32": [0] } +{ "X16": [-4], "X32": [0] } +{ "X16": [-1], "X32": [0] } +{ "X16": [0], "X32": [-255] } +{ "X16": [0], "X32": [-10] } +{ "X16": [0], "X32": [-4] } +{ "X16": [0], "X32": [-1] } +{ "X16": [65536], "X32": [0] } +{ "X16": [65537], "X32": [0] } +{ "X16": [65538], "X32": [0] } +{ "X16": [65539], "X32": [0] } +{ "X16": [0], "X32": [4294967296] } +{ "X16": [0], "X32": [4294967297] } +{ "X16": [0], "X32": [4294967298] } +{ "X16": [0], "X32": [4294967299] } +;; Raw diff --git a/testdata/range_03.accepts b/testdata/range_03.accepts new file mode 100644 index 0000000..3a85929 --- /dev/null +++ b/testdata/range_03.accepts @@ -0,0 +1,64 @@ +{ "X": [] } +{ "X": [0] } +{ "X": [1] } +{ "X": [2] } +{ "X": [3] } +{ "X": [4] } +{ "X": [5] } +{ "X": [6] } +;; +{ "X": [0,0] } +{ "X": [0,1] } +{ "X": [0,2] } +{ "X": [0,3] } +{ "X": [0,4] } +{ "X": [0,5] } +{ "X": [0,6] } +;; +{ "X": [1,0] } +{ "X": [1,1] } +{ "X": [1,2] } +{ "X": [1,3] } +{ "X": [1,4] } +{ "X": [1,5] } +{ "X": [1,6] } +;; +{ "X": [2,0] } +{ "X": [2,1] } +{ "X": [2,2] } +{ "X": [2,3] } +{ "X": [2,4] } +{ "X": [2,5] } +{ "X": [2,6] } +;; +{ "X": [3,0] } +{ "X": [3,1] } +{ "X": [3,2] } +{ "X": [3,3] } +{ "X": [3,4] } +{ "X": [3,5] } +{ "X": [3,6] } +;; +{ "X": [4,0] } +{ "X": [4,1] } +{ "X": [4,2] } +{ "X": [4,3] } +{ "X": [4,4] } +{ "X": [4,5] } +{ "X": [4,6] } +;; +{ "X": [5,0] } +{ "X": [5,1] } +{ "X": [5,2] } +{ "X": [5,3] } +{ "X": [5,4] } +{ "X": [5,5] } +{ "X": [5,6] } +;; +{ "X": [6,0] } +{ "X": [6,1] } +{ "X": [6,2] } +{ "X": [6,3] } +{ "X": [6,4] } +{ "X": [6,5] } +{ "X": [6,6] } diff --git a/testdata/range_03.lisp b/testdata/range_03.lisp new file mode 100644 index 0000000..fcdba30 --- /dev/null +++ b/testdata/range_03.lisp @@ -0,0 +1,3 @@ +(defcolumns X) + +(definrange (+ 1 X) 8) diff --git a/testdata/range_03.rejects b/testdata/range_03.rejects new file mode 100644 index 0000000..62bd528 --- /dev/null +++ b/testdata/range_03.rejects @@ -0,0 +1,68 @@ +{ "X": [7] } +{ "X": [8] } +{ "X": [9] } +{ "X": [10] } +;; +{ "X": [7,0] } +{ "X": [7,1] } +{ "X": [7,2] } +{ "X": [7,3] } +{ "X": [7,4] } +{ "X": [7,5] } +{ "X": [7,6] } +;; +{ "X": [0,7] } +{ "X": [1,7] } +{ "X": [2,7] } +{ "X": [3,7] } +{ "X": [4,7] } +{ "X": [5,7] } +{ "X": [6,7] } +;; +{ "X": [8,0] } +{ "X": [8,1] } +{ "X": [8,2] } +{ "X": [8,3] } +{ "X": [8,4] } +{ "X": [8,5] } +{ "X": [8,6] } +;; +{ "X": [0,8] } +{ "X": [1,8] } +{ "X": [2,8] } +{ "X": [3,8] } +{ "X": [4,8] } +{ "X": [5,8] } +{ "X": [6,8] } +;; +{ "X": [9,0] } +{ "X": [9,1] } +{ "X": [9,2] } +{ "X": [9,3] } +{ "X": [9,4] } +{ "X": [9,5] } +{ "X": [9,6] } +;; +{ "X": [0,9] } +{ "X": [1,9] } +{ "X": [2,9] } +{ "X": [3,9] } +{ "X": [4,9] } +{ "X": [5,9] } +{ "X": [6,9] } +;; +{ "X": [10,0] } +{ "X": [10,1] } +{ "X": [10,2] } +{ "X": [10,3] } +{ "X": [10,4] } +{ "X": [10,5] } +{ "X": [10,6] } +;; +{ "X": [0,10] } +{ "X": [1,10] } +{ "X": [2,10] } +{ "X": [3,10] } +{ "X": [4,10] } +{ "X": [5,10] } +{ "X": [6,10] } diff --git a/testdata/range_04.accepts b/testdata/range_04.accepts new file mode 100644 index 0000000..bd2d795 --- /dev/null +++ b/testdata/range_04.accepts @@ -0,0 +1,37 @@ +{ "X": [] } +;; +{ "X": [0] } +{ "X": [1] } +{ "X": [2] } +{ "X": [3] } +{ "X": [4] } +;; +{ "X": [0,0] } +{ "X": [0,1] } +{ "X": [0,2] } +{ "X": [0,3] } +{ "X": [0,4] } +;; +{ "X": [1,0] } +{ "X": [1,1] } +{ "X": [1,2] } +{ "X": [1,3] } +{ "X": [1,4] } +;; +{ "X": [2,0] } +{ "X": [2,1] } +{ "X": [2,2] } +{ "X": [2,3] } +{ "X": [2,4] } +;; +{ "X": [3,0] } +{ "X": [3,1] } +{ "X": [3,2] } +{ "X": [3,3] } +{ "X": [3,4] } +;; +{ "X": [4,0] } +{ "X": [4,1] } +{ "X": [4,2] } +{ "X": [4,3] } +{ "X": [4,4] } diff --git a/testdata/range_04.lisp b/testdata/range_04.lisp new file mode 100644 index 0000000..a47362f --- /dev/null +++ b/testdata/range_04.lisp @@ -0,0 +1,2 @@ +(defcolumns X) +(definrange X 5) diff --git a/testdata/range_04.rejects b/testdata/range_04.rejects new file mode 100644 index 0000000..0306a99 --- /dev/null +++ b/testdata/range_04.rejects @@ -0,0 +1,34 @@ +{ "X": [5] } +{ "X": [6] } +{ "X": [7] } +{ "X": [8] } +;; +{ "X": [5,0] } +{ "X": [5,1] } +{ "X": [5,2] } +{ "X": [5,3] } +{ "X": [5,4] } +;; +{ "X": [6,0] } +{ "X": [6,1] } +{ "X": [6,2] } +{ "X": [6,3] } +{ "X": [6,4] } +;; +{ "X": [7,0] } +{ "X": [7,1] } +{ "X": [7,2] } +{ "X": [7,3] } +{ "X": [7,4] } +;; +{ "X": [8,0] } +{ "X": [8,1] } +{ "X": [8,2] } +{ "X": [8,3] } +{ "X": [8,4] } +;; +{ "X": [9,0] } +{ "X": [9,1] } +{ "X": [9,2] } +{ "X": [9,3] } +{ "X": [9,4] } diff --git a/testdata/range_05.accepts b/testdata/range_05.accepts new file mode 100644 index 0000000..3fe37c2 --- /dev/null +++ b/testdata/range_05.accepts @@ -0,0 +1,82 @@ +{ "X": [], "Y": [] } +;; +{ "X": [0], "Y": [0] } +{ "X": [1], "Y": [0] } +{ "X": [2], "Y": [0] } +{ "X": [3], "Y": [0] } +{ "X": [4], "Y": [0] } +{ "X": [5], "Y": [0] } +{ "X": [6], "Y": [0] } +{ "X": [7], "Y": [0] } +;; +{ "X": [1], "Y": [1] } +{ "X": [2], "Y": [1] } +{ "X": [3], "Y": [1] } +{ "X": [4], "Y": [1] } +{ "X": [5], "Y": [1] } +{ "X": [6], "Y": [1] } +{ "X": [7], "Y": [1] } +{ "X": [8], "Y": [1] } +;; +{ "X": [2], "Y": [2] } +{ "X": [3], "Y": [2] } +{ "X": [4], "Y": [2] } +{ "X": [5], "Y": [2] } +{ "X": [6], "Y": [2] } +{ "X": [7], "Y": [2] } +{ "X": [8], "Y": [2] } +{ "X": [9], "Y": [2] } +;; +{ "X": [3], "Y": [3] } +{ "X": [4], "Y": [3] } +{ "X": [5], "Y": [3] } +{ "X": [6], "Y": [3] } +{ "X": [7], "Y": [3] } +{ "X": [8], "Y": [3] } +{ "X": [9], "Y": [3] } +{ "X": [10], "Y": [3] } +;; +{ "X": [4], "Y": [4] } +{ "X": [5], "Y": [4] } +{ "X": [6], "Y": [4] } +{ "X": [7], "Y": [4] } +{ "X": [8], "Y": [4] } +{ "X": [9], "Y": [4] } +{ "X": [10], "Y": [4] } +{ "X": [11], "Y": [4] } +;; +{ "X": [5], "Y": [5] } +{ "X": [6], "Y": [5] } +{ "X": [7], "Y": [5] } +{ "X": [8], "Y": [5] } +{ "X": [9], "Y": [5] } +{ "X": [10], "Y": [5] } +{ "X": [11], "Y": [5] } +{ "X": [12], "Y": [5] } +;; +{ "X": [6], "Y": [6] } +{ "X": [7], "Y": [6] } +{ "X": [8], "Y": [6] } +{ "X": [9], "Y": [6] } +{ "X": [10], "Y": [6] } +{ "X": [11], "Y": [6] } +{ "X": [12], "Y": [6] } +{ "X": [13], "Y": [6] } +;; +{ "X": [7], "Y": [7] } +{ "X": [8], "Y": [7] } +{ "X": [9], "Y": [7] } +{ "X": [10], "Y": [7] } +{ "X": [11], "Y": [7] } +{ "X": [12], "Y": [7] } +{ "X": [13], "Y": [7] } +{ "X": [14], "Y": [7] } +;; +{ "X": [8], "Y": [8] } +{ "X": [9], "Y": [8] } +{ "X": [10], "Y": [8] } +{ "X": [11], "Y": [8] } +{ "X": [12], "Y": [8] } +{ "X": [13], "Y": [8] } +{ "X": [14], "Y": [8] } +{ "X": [15], "Y": [8] } diff --git a/testdata/range_05.lisp b/testdata/range_05.lisp new file mode 100644 index 0000000..143da59 --- /dev/null +++ b/testdata/range_05.lisp @@ -0,0 +1,2 @@ +(defcolumns X Y) +(definrange (- X Y) 8) diff --git a/testdata/range_05.rejects b/testdata/range_05.rejects new file mode 100644 index 0000000..ec2f681 --- /dev/null +++ b/testdata/range_05.rejects @@ -0,0 +1,31 @@ +{ "X": [0], "Y": [1] } +{ "X": [0], "Y": [2] } +{ "X": [0], "Y": [3] } +;; +{ "X": [1], "Y": [2] } +{ "X": [1], "Y": [3] } +{ "X": [1], "Y": [4] } +;; +{ "X": [2], "Y": [3] } +{ "X": [2], "Y": [4] } +{ "X": [2], "Y": [5] } +;; +{ "X": [3], "Y": [4] } +{ "X": [3], "Y": [5] } +{ "X": [3], "Y": [6] } +;; +{ "X": [4], "Y": [5] } +{ "X": [4], "Y": [6] } +{ "X": [4], "Y": [7] } +;; +{ "X": [5], "Y": [6] } +{ "X": [5], "Y": [7] } +{ "X": [5], "Y": [8] } +;; +{ "X": [6], "Y": [7] } +{ "X": [6], "Y": [8] } +{ "X": [6], "Y": [9] } +;; +{ "X": [7], "Y": [8] } +{ "X": [7], "Y": [9] } +{ "X": [7], "Y": [10] }