From 1048f815abb1f27e9c84ab5b9568a3673c12a50a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C3=81lvaro=20Rodr=C3=ADguez?= Date: Tue, 13 Feb 2024 16:13:05 +0100 Subject: [PATCH 01/39] feat!: Restrict bit sizes (#4235) # Description Restricts allowed bit sizes for integer types to 1,8,32 and 64. ## Problem\* Resolves https://github.com/noir-lang/noir/issues/4162 ## Summary\* ## Additional Context ## Documentation\* Check one: - [ ] No documentation needed. - [x] Documentation included in this PR. - [ ] **[Exceptional Case]** Documentation to be submitted in a separate PR. # PR Checklist\* - [x] I have tested the changes locally. - [ ] I have formatted the changes with [Prettier](https://prettier.io/) and/or `cargo fmt` on default settings. --------- Co-authored-by: kevaundray --- aztec_macros/src/lib.rs | 5 +- .../src/ssa/ssa_gen/context.rs | 4 +- compiler/noirc_frontend/src/ast/mod.rs | 63 +++++++++++++++++-- .../src/hir/type_check/errors.rs | 3 +- .../noirc_frontend/src/hir/type_check/stmt.rs | 1 + compiler/noirc_frontend/src/hir_def/types.rs | 13 ++-- compiler/noirc_frontend/src/lexer/errors.rs | 10 --- compiler/noirc_frontend/src/lexer/lexer.rs | 2 +- compiler/noirc_frontend/src/lexer/token.rs | 8 +-- .../src/monomorphization/ast.rs | 7 ++- .../src/monomorphization/mod.rs | 15 ++--- compiler/noirc_frontend/src/parser/errors.rs | 13 ++-- compiler/noirc_frontend/src/parser/parser.rs | 22 +++---- cspell.json | 1 + .../docs/noir/concepts/data_types/integers.md | 9 +-- .../integer_literal_overflow/src/main.nr | 2 +- .../restricted_bit_sizes}/Nargo.toml | 3 +- .../restricted_bit_sizes/src/main.nr | 5 ++ .../regression_2854/Prover.toml | 1 - .../regression_2854/src/main.nr | 3 - tooling/noirc_abi/src/lib.rs | 2 +- 21 files changed, 114 insertions(+), 78 deletions(-) rename test_programs/{execution_success/regression_2854 => compile_failure/restricted_bit_sizes}/Nargo.toml (63%) create mode 100644 test_programs/compile_failure/restricted_bit_sizes/src/main.nr delete mode 100644 test_programs/execution_success/regression_2854/Prover.toml delete mode 100644 test_programs/execution_success/regression_2854/src/main.nr diff --git a/aztec_macros/src/lib.rs b/aztec_macros/src/lib.rs index 0f054c262bf..51a8b5361a6 100644 --- a/aztec_macros/src/lib.rs +++ b/aztec_macros/src/lib.rs @@ -1129,7 +1129,10 @@ fn create_context(ty: &str, params: &[Param]) -> Result, AztecMac add_array_to_hasher( &id, &UnresolvedType { - typ: UnresolvedTypeData::Integer(Signedness::Unsigned, 32), + typ: UnresolvedTypeData::Integer( + Signedness::Unsigned, + noirc_frontend::IntegerBitSize::ThirtyTwo, + ), span: None, }, ) diff --git a/compiler/noirc_evaluator/src/ssa/ssa_gen/context.rs b/compiler/noirc_evaluator/src/ssa/ssa_gen/context.rs index 6ee7f312660..845ffd15413 100644 --- a/compiler/noirc_evaluator/src/ssa/ssa_gen/context.rs +++ b/compiler/noirc_evaluator/src/ssa/ssa_gen/context.rs @@ -219,8 +219,8 @@ impl<'a> FunctionContext<'a> { let element_types = Self::convert_type(element).flatten(); Type::Array(Rc::new(element_types), *len as usize) } - ast::Type::Integer(Signedness::Signed, bits) => Type::signed(*bits), - ast::Type::Integer(Signedness::Unsigned, bits) => Type::unsigned(*bits), + ast::Type::Integer(Signedness::Signed, bits) => Type::signed((*bits).into()), + ast::Type::Integer(Signedness::Unsigned, bits) => Type::unsigned((*bits).into()), ast::Type::Bool => Type::unsigned(1), ast::Type::String(len) => Type::Array(Rc::new(vec![Type::char()]), *len as usize), ast::Type::FmtString(_, _) => { diff --git a/compiler/noirc_frontend/src/ast/mod.rs b/compiler/noirc_frontend/src/ast/mod.rs index 1223f822af3..29edbaca594 100644 --- a/compiler/noirc_frontend/src/ast/mod.rs +++ b/compiler/noirc_frontend/src/ast/mod.rs @@ -28,6 +28,55 @@ use crate::{ }; use iter_extended::vecmap; +#[derive(Debug, PartialEq, Eq, Clone, Copy, Hash, Ord, PartialOrd)] +pub enum IntegerBitSize { + One, + Eight, + ThirtyTwo, + SixtyFour, +} + +impl IntegerBitSize { + pub fn allowed_sizes() -> Vec { + vec![Self::One, Self::Eight, Self::ThirtyTwo, Self::SixtyFour] + } +} + +impl From for u32 { + fn from(size: IntegerBitSize) -> u32 { + use IntegerBitSize::*; + match size { + One => 1, + Eight => 8, + ThirtyTwo => 32, + SixtyFour => 64, + } + } +} + +pub struct InvalidIntegerBitSizeError(pub u32); + +impl TryFrom for IntegerBitSize { + type Error = InvalidIntegerBitSizeError; + + fn try_from(value: u32) -> Result { + use IntegerBitSize::*; + match value { + 1 => Ok(One), + 8 => Ok(Eight), + 32 => Ok(ThirtyTwo), + 64 => Ok(SixtyFour), + _ => Err(InvalidIntegerBitSizeError(value)), + } + } +} + +impl core::fmt::Display for IntegerBitSize { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(f, "{}", u32::from(*self)) + } +} + /// The parser parses types as 'UnresolvedType's which /// require name resolution to resolve any type names used /// for structs within, but are otherwise identical to Types. @@ -35,7 +84,7 @@ use iter_extended::vecmap; pub enum UnresolvedTypeData { FieldElement, Array(Option, Box), // [4]Witness = Array(4, Witness) - Integer(Signedness, u32), // u32 = Integer(unsigned, 32) + Integer(Signedness, IntegerBitSize), // u32 = Integer(unsigned, ThirtyTwo) Bool, Expression(UnresolvedTypeExpression), String(Option), @@ -197,11 +246,17 @@ impl UnresolvedType { } impl UnresolvedTypeData { - pub fn from_int_token(token: IntType) -> UnresolvedTypeData { + pub fn from_int_token( + token: IntType, + ) -> Result { use {IntType::*, UnresolvedTypeData::Integer}; match token { - Signed(num_bits) => Integer(Signedness::Signed, num_bits), - Unsigned(num_bits) => Integer(Signedness::Unsigned, num_bits), + Signed(num_bits) => { + Ok(Integer(Signedness::Signed, IntegerBitSize::try_from(num_bits)?)) + } + Unsigned(num_bits) => { + Ok(Integer(Signedness::Unsigned, IntegerBitSize::try_from(num_bits)?)) + } } } diff --git a/compiler/noirc_frontend/src/hir/type_check/errors.rs b/compiler/noirc_frontend/src/hir/type_check/errors.rs index 3967d7642f7..96d30100d8b 100644 --- a/compiler/noirc_frontend/src/hir/type_check/errors.rs +++ b/compiler/noirc_frontend/src/hir/type_check/errors.rs @@ -8,6 +8,7 @@ use crate::hir_def::expr::HirBinaryOp; use crate::hir_def::types::Type; use crate::BinaryOpKind; use crate::FunctionReturnType; +use crate::IntegerBitSize; use crate::Signedness; #[derive(Error, Debug, Clone, PartialEq, Eq)] @@ -67,7 +68,7 @@ pub enum TypeCheckError { #[error("Integers must have the same signedness LHS is {sign_x:?}, RHS is {sign_y:?}")] IntegerSignedness { sign_x: Signedness, sign_y: Signedness, span: Span }, #[error("Integers must have the same bit width LHS is {bit_width_x}, RHS is {bit_width_y}")] - IntegerBitWidth { bit_width_x: u32, bit_width_y: u32, span: Span }, + IntegerBitWidth { bit_width_x: IntegerBitSize, bit_width_y: IntegerBitSize, span: Span }, #[error("{kind} cannot be used in an infix operation")] InvalidInfixOp { kind: &'static str, span: Span }, #[error("{kind} cannot be used in a unary operation")] diff --git a/compiler/noirc_frontend/src/hir/type_check/stmt.rs b/compiler/noirc_frontend/src/hir/type_check/stmt.rs index 1bd6c16277b..d6a19bb74be 100644 --- a/compiler/noirc_frontend/src/hir/type_check/stmt.rs +++ b/compiler/noirc_frontend/src/hir/type_check/stmt.rs @@ -351,6 +351,7 @@ impl<'interner> TypeChecker<'interner> { HirExpression::Literal(HirLiteral::Integer(value, false)) => { let v = value.to_u128(); if let Type::Integer(_, bit_count) = annotated_type { + let bit_count: u32 = (*bit_count).into(); let max = 1 << bit_count; if v >= max { self.errors.push(TypeCheckError::OverflowingAssignment { diff --git a/compiler/noirc_frontend/src/hir_def/types.rs b/compiler/noirc_frontend/src/hir_def/types.rs index 30ca7054a77..14f8a8e8639 100644 --- a/compiler/noirc_frontend/src/hir_def/types.rs +++ b/compiler/noirc_frontend/src/hir_def/types.rs @@ -8,6 +8,7 @@ use std::{ use crate::{ hir::type_check::TypeCheckError, node_interner::{ExprId, NodeInterner, TraitId, TypeAliasId}, + IntegerBitSize, }; use iter_extended::vecmap; use noirc_errors::{Location, Span}; @@ -27,8 +28,8 @@ pub enum Type { Array(Box, Box), /// A primitive integer type with the given sign and bit count. - /// E.g. `u32` would be `Integer(Unsigned, 32)` - Integer(Signedness, u32), + /// E.g. `u32` would be `Integer(Unsigned, ThirtyTwo)` + Integer(Signedness, IntegerBitSize), /// The primitive `bool` type. Bool, @@ -538,7 +539,7 @@ impl Type { } pub fn default_range_loop_type() -> Type { - Type::Integer(Signedness::Unsigned, 64) + Type::Integer(Signedness::Unsigned, IntegerBitSize::SixtyFour) } pub fn type_variable(id: TypeVariableId) -> Type { @@ -1621,8 +1622,10 @@ impl From<&Type> for PrintableType { PrintableType::Array { length, typ: Box::new(typ.into()) } } Type::Integer(sign, bit_width) => match sign { - Signedness::Unsigned => PrintableType::UnsignedInteger { width: *bit_width }, - Signedness::Signed => PrintableType::SignedInteger { width: *bit_width }, + Signedness::Unsigned => { + PrintableType::UnsignedInteger { width: (*bit_width).into() } + } + Signedness::Signed => PrintableType::SignedInteger { width: (*bit_width).into() }, }, Type::TypeVariable(binding, TypeVariableKind::IntegerOrField) => { match &*binding.borrow() { diff --git a/compiler/noirc_frontend/src/lexer/errors.rs b/compiler/noirc_frontend/src/lexer/errors.rs index a2a4056f1d0..35a07c11e0a 100644 --- a/compiler/noirc_frontend/src/lexer/errors.rs +++ b/compiler/noirc_frontend/src/lexer/errors.rs @@ -17,8 +17,6 @@ pub enum LexerErrorKind { InvalidIntegerLiteral { span: Span, found: String }, #[error("{:?} is not a valid attribute", found)] MalformedFuncAttribute { span: Span, found: String }, - #[error("Integer type is larger than the maximum supported size of u127")] - TooManyBits { span: Span, max: u32, got: u32 }, #[error("Logical and used instead of bitwise and")] LogicalAnd { span: Span }, #[error("Unterminated block comment")] @@ -45,7 +43,6 @@ impl LexerErrorKind { LexerErrorKind::NotADoubleChar { span, .. } => *span, LexerErrorKind::InvalidIntegerLiteral { span, .. } => *span, LexerErrorKind::MalformedFuncAttribute { span, .. } => *span, - LexerErrorKind::TooManyBits { span, .. } => *span, LexerErrorKind::LogicalAnd { span } => *span, LexerErrorKind::UnterminatedBlockComment { span } => *span, LexerErrorKind::UnterminatedStringLiteral { span } => *span, @@ -85,13 +82,6 @@ impl LexerErrorKind { format!(" {found} is not a valid attribute"), *span, ), - LexerErrorKind::TooManyBits { span, max, got } => ( - "Integer literal too large".to_string(), - format!( - "The maximum number of bits needed to represent a field is {max}, This integer type needs {got} bits" - ), - *span, - ), LexerErrorKind::LogicalAnd { span } => ( "Noir has no logical-and (&&) operator since short-circuiting is much less efficient when compiling to circuits".to_string(), "Try `&` instead, or use `if` only if you require short-circuiting".to_string(), diff --git a/compiler/noirc_frontend/src/lexer/lexer.rs b/compiler/noirc_frontend/src/lexer/lexer.rs index fd8168e36c6..cf66ece0c30 100644 --- a/compiler/noirc_frontend/src/lexer/lexer.rs +++ b/compiler/noirc_frontend/src/lexer/lexer.rs @@ -307,7 +307,7 @@ impl<'a> Lexer<'a> { // Check if word an int type // if no error occurred, then it is either a valid integer type or it is not an int type - let parsed_token = IntType::lookup_int_type(&word, Span::inclusive(start, end))?; + let parsed_token = IntType::lookup_int_type(&word)?; // Check if it is an int type if let Some(int_type_token) = parsed_token { diff --git a/compiler/noirc_frontend/src/lexer/token.rs b/compiler/noirc_frontend/src/lexer/token.rs index 5d08ab03ad3..f7c07c5f5db 100644 --- a/compiler/noirc_frontend/src/lexer/token.rs +++ b/compiler/noirc_frontend/src/lexer/token.rs @@ -306,7 +306,7 @@ impl IntType { // XXX: Result // Is not the best API. We could split this into two functions. One that checks if the the // word is a integer, which only returns an Option - pub(crate) fn lookup_int_type(word: &str, span: Span) -> Result, LexerErrorKind> { + pub(crate) fn lookup_int_type(word: &str) -> Result, LexerErrorKind> { // Check if the first string is a 'u' or 'i' let is_signed = if word.starts_with('i') { @@ -324,12 +324,6 @@ impl IntType { Err(_) => return Ok(None), }; - let max_bits = FieldElement::max_num_bits() / 2; - - if str_as_u32 > max_bits { - return Err(LexerErrorKind::TooManyBits { span, max: max_bits, got: str_as_u32 }); - } - if is_signed { Ok(Some(Token::IntType(IntType::Signed(str_as_u32)))) } else { diff --git a/compiler/noirc_frontend/src/monomorphization/ast.rs b/compiler/noirc_frontend/src/monomorphization/ast.rs index 73e7ef372ab..e4e619d5d92 100644 --- a/compiler/noirc_frontend/src/monomorphization/ast.rs +++ b/compiler/noirc_frontend/src/monomorphization/ast.rs @@ -6,7 +6,8 @@ use noirc_errors::{ }; use crate::{ - hir_def::function::FunctionSignature, BinaryOpKind, Distinctness, Signedness, Visibility, + hir_def::function::FunctionSignature, BinaryOpKind, Distinctness, IntegerBitSize, Signedness, + Visibility, }; /// The monomorphized AST is expression-based, all statements are also @@ -217,8 +218,8 @@ pub struct Function { #[derive(Debug, PartialEq, Eq, Clone, Hash)] pub enum Type { Field, - Array(/*len:*/ u64, Box), // Array(4, Field) = [Field; 4] - Integer(Signedness, /*bits:*/ u32), // u32 = Integer(unsigned, 32) + Array(/*len:*/ u64, Box), // Array(4, Field) = [Field; 4] + Integer(Signedness, /*bits:*/ IntegerBitSize), // u32 = Integer(unsigned, ThirtyTwo) Bool, String(/*len:*/ u64), // String(4) = str[4] FmtString(/*len:*/ u64, Box), diff --git a/compiler/noirc_frontend/src/monomorphization/mod.rs b/compiler/noirc_frontend/src/monomorphization/mod.rs index 21c095eb877..62950c9d4f7 100644 --- a/compiler/noirc_frontend/src/monomorphization/mod.rs +++ b/compiler/noirc_frontend/src/monomorphization/mod.rs @@ -27,8 +27,8 @@ use crate::{ }, node_interner::{self, DefinitionKind, NodeInterner, StmtId, TraitImplKind, TraitMethodId}, token::FunctionAttribute, - ContractFunctionType, FunctionKind, Type, TypeBinding, TypeBindings, TypeVariable, - TypeVariableKind, UnaryOp, Visibility, + ContractFunctionType, FunctionKind, IntegerBitSize, Type, TypeBinding, TypeBindings, + TypeVariable, TypeVariableKind, UnaryOp, Visibility, }; use self::ast::{Definition, FuncId, Function, LocalId, Program}; @@ -354,6 +354,7 @@ impl<'interner> Monomorphizer<'interner> { match typ { ast::Type::Field => Literal(Integer(-value, typ, location)), ast::Type::Integer(_, bit_size) => { + let bit_size: u32 = bit_size.into(); let base = 1_u128 << bit_size; Literal(Integer(FieldElement::from(base) - value, typ, location)) } @@ -1109,19 +1110,19 @@ impl<'interner> Monomorphizer<'interner> { } "modulus_le_bits" => { let bits = FieldElement::modulus().to_radix_le(2); - Some(self.modulus_array_literal(bits, 1, location)) + Some(self.modulus_array_literal(bits, IntegerBitSize::One, location)) } "modulus_be_bits" => { let bits = FieldElement::modulus().to_radix_be(2); - Some(self.modulus_array_literal(bits, 1, location)) + Some(self.modulus_array_literal(bits, IntegerBitSize::One, location)) } "modulus_be_bytes" => { let bytes = FieldElement::modulus().to_bytes_be(); - Some(self.modulus_array_literal(bytes, 8, location)) + Some(self.modulus_array_literal(bytes, IntegerBitSize::Eight, location)) } "modulus_le_bytes" => { let bytes = FieldElement::modulus().to_bytes_le(); - Some(self.modulus_array_literal(bytes, 8, location)) + Some(self.modulus_array_literal(bytes, IntegerBitSize::Eight, location)) } _ => None, }; @@ -1133,7 +1134,7 @@ impl<'interner> Monomorphizer<'interner> { fn modulus_array_literal( &self, bytes: Vec, - arr_elem_bits: u32, + arr_elem_bits: IntegerBitSize, location: Location, ) -> ast::Expression { use ast::*; diff --git a/compiler/noirc_frontend/src/parser/errors.rs b/compiler/noirc_frontend/src/parser/errors.rs index 9158c68db72..43a1f96f13f 100644 --- a/compiler/noirc_frontend/src/parser/errors.rs +++ b/compiler/noirc_frontend/src/parser/errors.rs @@ -1,6 +1,7 @@ use crate::lexer::errors::LexerErrorKind; use crate::lexer::token::Token; use crate::Expression; +use crate::IntegerBitSize; use small_ord_set::SmallOrdSet; use thiserror::Error; @@ -40,8 +41,8 @@ pub enum ParserErrorReason { NoFunctionAttributesAllowedOnStruct, #[error("Assert statements can only accept string literals")] AssertMessageNotString, - #[error("Integer bit size {0} won't be supported")] - DeprecatedBitSize(u32), + #[error("Integer bit size {0} isn't supported")] + InvalidBitSize(u32), #[error("{0}")] Lexer(LexerErrorKind), } @@ -132,8 +133,6 @@ impl std::fmt::Display for ParserError { } } -pub(crate) static ALLOWED_INTEGER_BIT_SIZES: &[u32] = &[1, 8, 32, 64]; - impl From for Diagnostic { fn from(error: ParserError) -> Diagnostic { match error.reason { @@ -149,9 +148,9 @@ impl From for Diagnostic { "The 'comptime' keyword has been deprecated. It can be removed without affecting your program".into(), error.span, ), - ParserErrorReason::DeprecatedBitSize(bit_size) => Diagnostic::simple_warning( - format!("Use of deprecated bit size {}", bit_size), - format!("Bit sizes for integers will be restricted to {}", ALLOWED_INTEGER_BIT_SIZES.iter().map(|n| n.to_string()).collect::>().join(", ")), + ParserErrorReason::InvalidBitSize(bit_size) => Diagnostic::simple_error( + format!("Use of invalid bit size {}", bit_size), + format!("Allowed bit sizes for integers are {}", IntegerBitSize::allowed_sizes().iter().map(|n| n.to_string()).collect::>().join(", ")), error.span, ), ParserErrorReason::ExperimentalFeature(_) => Diagnostic::simple_warning( diff --git a/compiler/noirc_frontend/src/parser/parser.rs b/compiler/noirc_frontend/src/parser/parser.rs index b1ec18f5ec5..8bcd7670716 100644 --- a/compiler/noirc_frontend/src/parser/parser.rs +++ b/compiler/noirc_frontend/src/parser/parser.rs @@ -23,7 +23,6 @@ //! prevent other parsers from being tried afterward since there is no longer an error. Thus, they should //! be limited to cases like the above `fn` example where it is clear we shouldn't back out of the //! current parser to try alternative parsers in a `choice` expression. -use super::errors::ALLOWED_INTEGER_BIT_SIZES; use super::{ foldl_with_span, labels::ParsingRuleLabel, parameter_name_recovery, parameter_recovery, parenthesized, then_commit, then_commit_ignore, top_level_statement_recovery, ExprParser, @@ -36,7 +35,7 @@ use crate::ast::{ }; use crate::lexer::Lexer; use crate::parser::{force, ignore_then_commit, statement_recovery}; -use crate::token::{Attribute, Attributes, IntType, Keyword, SecondaryAttribute, Token, TokenKind}; +use crate::token::{Attribute, Attributes, Keyword, SecondaryAttribute, Token, TokenKind}; use crate::{ BinaryOp, BinaryOpKind, BlockExpression, ConstrainKind, ConstrainStatement, Distinctness, ForLoopStatement, ForRange, FunctionDefinition, FunctionReturnType, FunctionVisibility, Ident, @@ -1093,19 +1092,14 @@ fn int_type() -> impl NoirParser { Err(ParserError::expected_label(ParsingRuleLabel::IntegerType, unexpected, span)) } })) - .validate(|int_type, span, emit| { - let bit_size = match int_type.1 { - IntType::Signed(bit_size) | IntType::Unsigned(bit_size) => bit_size, - }; - if !ALLOWED_INTEGER_BIT_SIZES.contains(&bit_size) { - emit(ParserError::with_reason( - ParserErrorReason::DeprecatedBitSize(bit_size), - span, - )); - } - int_type + .validate(|(_, token), span, emit| { + UnresolvedTypeData::from_int_token(token) + .map(|data| data.with_span(span)) + .unwrap_or_else(|err| { + emit(ParserError::with_reason(ParserErrorReason::InvalidBitSize(err.0), span)); + UnresolvedType::error(span) + }) }) - .map_with_span(|(_, token), span| UnresolvedTypeData::from_int_token(token).with_span(span)) } fn named_type(type_parser: impl NoirParser) -> impl NoirParser { diff --git a/cspell.json b/cspell.json index 19e9a175ce0..2acca0633d3 100644 --- a/cspell.json +++ b/cspell.json @@ -106,6 +106,7 @@ "lvalue", "Maddiaa", "mathbb", + "memfs", "merkle", "metas", "minreq", diff --git a/docs/docs/noir/concepts/data_types/integers.md b/docs/docs/noir/concepts/data_types/integers.md index 30135d76e4a..4d58d96fed5 100644 --- a/docs/docs/noir/concepts/data_types/integers.md +++ b/docs/docs/noir/concepts/data_types/integers.md @@ -5,7 +5,7 @@ keywords: [noir, integer types, methods, examples, arithmetic] sidebar_position: 1 --- -An integer type is a range constrained field type. The Noir frontend supports arbitrarily-sized, both unsigned and signed integer types. +An integer type is a range constrained field type. The Noir frontend supports both unsigned and signed integer types. The allowed sizes are 1, 8, 32 and 64 bits. :::info @@ -45,13 +45,6 @@ fn main() { The bit size determines the maximum and minimum range of value the integer type can store. For example, an `i8` variable can store a value in the range of -128 to 127 (i.e. $\\-2^{7}\\$ to $\\2^{7}-1\\$). -:::tip - -If you are using the default proving backend with Noir, both even (e.g. _u2_, _i2_) and odd (e.g. _u3_, _i3_) arbitrarily-sized integer types up to 127 bits (i.e. _u127_ and _i127_) are supported. - -::: - - ## 128 bits Unsigned Integers The built-in structure `U128` allows you to use 128-bit unsigned integers almost like a native integer type. However, there are some differences to keep in mind: diff --git a/test_programs/compile_failure/integer_literal_overflow/src/main.nr b/test_programs/compile_failure/integer_literal_overflow/src/main.nr index d89505c0085..e4d21b5c3b9 100644 --- a/test_programs/compile_failure/integer_literal_overflow/src/main.nr +++ b/test_programs/compile_failure/integer_literal_overflow/src/main.nr @@ -2,4 +2,4 @@ fn main() { foo(1234) } -fn foo(_x: u4) {} +fn foo(_x: u8) {} diff --git a/test_programs/execution_success/regression_2854/Nargo.toml b/test_programs/compile_failure/restricted_bit_sizes/Nargo.toml similarity index 63% rename from test_programs/execution_success/regression_2854/Nargo.toml rename to test_programs/compile_failure/restricted_bit_sizes/Nargo.toml index fb2b3c42fdd..36f8253e8e7 100644 --- a/test_programs/execution_success/regression_2854/Nargo.toml +++ b/test_programs/compile_failure/restricted_bit_sizes/Nargo.toml @@ -1,6 +1,5 @@ [package] -name = "regression_2854" +name = "restricted_bit_sizes" type = "bin" authors = [""] - [dependencies] diff --git a/test_programs/compile_failure/restricted_bit_sizes/src/main.nr b/test_programs/compile_failure/restricted_bit_sizes/src/main.nr new file mode 100644 index 00000000000..01e72bfcfd7 --- /dev/null +++ b/test_programs/compile_failure/restricted_bit_sizes/src/main.nr @@ -0,0 +1,5 @@ +use dep::std::assert_constant; + +fn main() -> pub u63 { + 5 +} diff --git a/test_programs/execution_success/regression_2854/Prover.toml b/test_programs/execution_success/regression_2854/Prover.toml deleted file mode 100644 index 07890234a19..00000000000 --- a/test_programs/execution_success/regression_2854/Prover.toml +++ /dev/null @@ -1 +0,0 @@ -x = "3" diff --git a/test_programs/execution_success/regression_2854/src/main.nr b/test_programs/execution_success/regression_2854/src/main.nr deleted file mode 100644 index eccff8225b6..00000000000 --- a/test_programs/execution_success/regression_2854/src/main.nr +++ /dev/null @@ -1,3 +0,0 @@ -fn main(x: Field) -> pub i127 { - x as i127 -} diff --git a/tooling/noirc_abi/src/lib.rs b/tooling/noirc_abi/src/lib.rs index 1fc257c1676..2560e46b01d 100644 --- a/tooling/noirc_abi/src/lib.rs +++ b/tooling/noirc_abi/src/lib.rs @@ -142,7 +142,7 @@ impl AbiType { Signedness::Signed => Sign::Signed, }; - Self::Integer { sign, width: *bit_width } + Self::Integer { sign, width: (*bit_width).into() } } Type::TypeVariable(binding, TypeVariableKind::IntegerOrField) => { match &*binding.borrow() { From ea47d4a67c6a18e4a7d3a49079d9eb24a1026a25 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C3=81lvaro=20Rodr=C3=ADguez?= Date: Tue, 13 Feb 2024 16:17:27 +0100 Subject: [PATCH 02/39] fix: Brillig range check with consistent bit size (#4357) # Description ## Problem\* Partial work towards #4275 ## Summary\* ## Additional Context Tested in aztec-packages here https://github.com/AztecProtocol/aztec-packages/pull/4556 ## Documentation\* Check one: - [x] No documentation needed. - [ ] Documentation included in this PR. - [ ] **[Exceptional Case]** Documentation to be submitted in a separate PR. # PR Checklist\* - [x] I have tested the changes locally. - [x] I have formatted the changes with [Prettier](https://prettier.io/) and/or `cargo fmt` on default settings. --- .../src/brillig/brillig_gen/brillig_block.rs | 20 ++++++++++++------- 1 file changed, 13 insertions(+), 7 deletions(-) diff --git a/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_block.rs b/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_block.rs index 84b719f29aa..7697d7e65fa 100644 --- a/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_block.rs +++ b/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_block.rs @@ -619,23 +619,29 @@ impl<'block> BrilligBlock<'block> { ); } Instruction::RangeCheck { value, max_bit_size, assert_message } => { - let left = self.convert_ssa_register_value(*value, dfg); - let max = BigUint::from(2_u128).pow(*max_bit_size); - let right = self.brillig_context.allocate_register(); - self.brillig_context.const_instruction( - right, + let value = self.convert_ssa_register_value(*value, dfg); + // Cast original value to field + let left = self.brillig_context.allocate_register(); + self.convert_cast(left, value, &Type::field()); + + // Create a field constant with the max + let max = BigUint::from(2_u128).pow(*max_bit_size) - BigUint::from(1_u128); + let right = self.brillig_context.make_constant( FieldElement::from_be_bytes_reduce(&max.to_bytes_be()).into(), FieldElement::max_num_bits(), ); + // Check if lte max let brillig_binary_op = BrilligBinaryOp::Integer { - op: BinaryIntOp::LessThan, - bit_size: max_bit_size + 1, + op: BinaryIntOp::LessThanEquals, + bit_size: FieldElement::max_num_bits(), }; let condition = self.brillig_context.allocate_register(); self.brillig_context.binary_instruction(left, right, condition, brillig_binary_op); + self.brillig_context.constrain_instruction(condition, assert_message.clone()); self.brillig_context.deallocate_register(condition); + self.brillig_context.deallocate_register(left); self.brillig_context.deallocate_register(right); } Instruction::IncrementRc { value } => { From 20e6bf12c0eb79e70092216c862d6bada9daf1bf Mon Sep 17 00:00:00 2001 From: Tom French <15848336+TomAFrench@users.noreply.github.com> Date: Tue, 13 Feb 2024 15:19:16 +0000 Subject: [PATCH 03/39] chore: add `memfs` to cspell (#4358) # Description ## Problem\* Resolves ## Summary\* This fixes an issue where some codegened docs has an unrecognised word in it. ## Additional Context ## Documentation\* Check one: - [ ] No documentation needed. - [x] Documentation included in this PR. - [ ] **[Exceptional Case]** Documentation to be submitted in a separate PR. # PR Checklist\* - [x] I have tested the changes locally. - [x] I have formatted the changes with [Prettier](https://prettier.io/) and/or `cargo fmt` on default settings. From d202ee6a3b10852417152e5c1bb4f66a31570cb0 Mon Sep 17 00:00:00 2001 From: Aztec Bot <49558828+AztecBot@users.noreply.github.com> Date: Tue, 13 Feb 2024 10:28:07 -0500 Subject: [PATCH 04/39] feat: Sync from aztec-packages (#4359) BEGIN_COMMIT_OVERRIDE chore: switch noir pull to master branch (https://github.com/AztecProtocol/aztec-packages/pull/4581) END_COMMIT_OVERRIDE --- .../array_dynamic_blackbox_input/Nargo.toml | 7 ----- .../array_dynamic_blackbox_input/Prover.toml | 4 --- .../array_dynamic_blackbox_input/src/main.nr | 27 ------------------- 3 files changed, 38 deletions(-) delete mode 100644 test_programs/execution_success/array_dynamic_blackbox_input/Nargo.toml delete mode 100644 test_programs/execution_success/array_dynamic_blackbox_input/Prover.toml delete mode 100644 test_programs/execution_success/array_dynamic_blackbox_input/src/main.nr diff --git a/test_programs/execution_success/array_dynamic_blackbox_input/Nargo.toml b/test_programs/execution_success/array_dynamic_blackbox_input/Nargo.toml deleted file mode 100644 index 15d437a8f8d..00000000000 --- a/test_programs/execution_success/array_dynamic_blackbox_input/Nargo.toml +++ /dev/null @@ -1,7 +0,0 @@ -[package] -name = "array_dynamic_blackbox_input" -type = "bin" -authors = [""] -compiler_version = ">=0.23.0" - -[dependencies] \ No newline at end of file diff --git a/test_programs/execution_success/array_dynamic_blackbox_input/Prover.toml b/test_programs/execution_success/array_dynamic_blackbox_input/Prover.toml deleted file mode 100644 index 3797c0b0038..00000000000 --- a/test_programs/execution_success/array_dynamic_blackbox_input/Prover.toml +++ /dev/null @@ -1,4 +0,0 @@ -index = "1" -leaf = ["51", "109", "224", "175", "60", "42", "79", "222", "117", "255", "174", "79", "126", "242", "74", "34", "100", "35", "20", "200", "109", "89", "191", "219", "41", "10", "118", "217", "165", "224", "215", "109"] -path = ["0", "1", "2", "3", "4", "5", "6", "7", "8", "9", "10", "11", "12", "13", "14", "15", "16", "17", "18", "19", "20", "21", "22", "23", "24", "25", "26", "27", "28", "29", "30", "31", "32", "33", "34", "35", "36", "37", "38", "39", "40", "41", "42", "43", "44", "45", "46", "47", "48", "49", "50", "51", "52", "53", "54", "55", "56", "57", "58", "59", "60", "61", "62", "63"] -root = ["243", "212", "223", "132", "202", "119", "167", "60", "162", "158", "66", "192", "88", "114", "34", "191", "202", "195", "19", "102", "150", "88", "222", "176", "35", "51", "110", "97", "204", "224", "253", "171"] diff --git a/test_programs/execution_success/array_dynamic_blackbox_input/src/main.nr b/test_programs/execution_success/array_dynamic_blackbox_input/src/main.nr deleted file mode 100644 index aabf7fc9d5c..00000000000 --- a/test_programs/execution_success/array_dynamic_blackbox_input/src/main.nr +++ /dev/null @@ -1,27 +0,0 @@ -fn main(leaf: [u8; 32], path: [u8; 64], index: u32, root: [u8; 32]) { - compute_root(leaf, path, index, root); -} - -fn compute_root(leaf: [u8; 32], path: [u8; 64], _index: u32, root: [u8; 32]) { - let mut current = leaf; - let mut index = _index; - - for i in 0..2 { - let mut hash_input = [0; 64]; - let offset = i * 32; - let is_right = (index & 1) != 0; - let a = if is_right { 32 } else { 0 }; - let b = if is_right { 0 } else { 32 }; - - for j in 0..32 { - hash_input[j + a] = current[j]; - hash_input[j + b] = path[offset + j]; - } - - current = dep::std::hash::sha256(hash_input); - index = index >> 1; - } - - // Regression for issue #4258 - assert(root == current); -} \ No newline at end of file From c956be870fb47403a6da6585fce6bea2d40ee268 Mon Sep 17 00:00:00 2001 From: Michael J Klein Date: Tue, 13 Feb 2024 11:05:22 -0500 Subject: [PATCH 05/39] feat: TypeVariableKind for just Integers (#4118) # Description Add a new `TypeVariableKind` specifically for Integers: until this PR, `IntegerOrField` is used. ## Problem\* May resolve https://github.com/noir-lang/noir/issues/3639, https://github.com/noir-lang/noir/pull/4290 ## Summary\* Adds the new `TypeVariableKind` - [x] Unify - [x] Test - [ ] Docs ## Additional Context ## Documentation\* Check one: - [ ] No documentation needed. - [x] Documentation included in this PR. - [ ] **[Exceptional Case]** Documentation to be submitted in a separate PR. # PR Checklist\* - [x] I have tested the changes locally. - [x] I have formatted the changes with [Prettier](https://prettier.io/) and/or `cargo fmt` on default settings. --------- Co-authored-by: Tom French <15848336+TomAFrench@users.noreply.github.com> Co-authored-by: jfecher --- .../noirc_frontend/src/hir/type_check/expr.rs | 21 ++++- compiler/noirc_frontend/src/hir_def/types.rs | 78 ++++++++++++++++--- .../src/monomorphization/mod.rs | 14 ++-- compiler/noirc_frontend/src/node_interner.rs | 1 + tooling/noirc_abi/src/lib.rs | 11 ++- 5 files changed, 99 insertions(+), 26 deletions(-) diff --git a/compiler/noirc_frontend/src/hir/type_check/expr.rs b/compiler/noirc_frontend/src/hir/type_check/expr.rs index b3180e0dd20..62099d2d6a6 100644 --- a/compiler/noirc_frontend/src/hir/type_check/expr.rs +++ b/compiler/noirc_frontend/src/hir/type_check/expr.rs @@ -565,6 +565,7 @@ impl<'interner> TypeChecker<'interner> { Type::Integer(..) | Type::FieldElement | Type::TypeVariable(_, TypeVariableKind::IntegerOrField) + | Type::TypeVariable(_, TypeVariableKind::Integer) | Type::Bool => (), Type::TypeVariable(_, _) => { @@ -805,7 +806,7 @@ impl<'interner> TypeChecker<'interner> { // Matches on TypeVariable must be first to follow any type // bindings. - (TypeVariable(int, _), other) | (other, TypeVariable(int, _)) => { + (TypeVariable(int, int_kind), other) | (other, TypeVariable(int, int_kind)) => { if let TypeBinding::Bound(binding) = &*int.borrow() { return self.comparator_operand_type_rules(other, binding, op, span); } @@ -823,7 +824,13 @@ impl<'interner> TypeChecker<'interner> { } let mut bindings = TypeBindings::new(); - if other.try_bind_to_polymorphic_int(int, &mut bindings).is_ok() + if other + .try_bind_to_polymorphic_int( + int, + &mut bindings, + *int_kind == TypeVariableKind::Integer, + ) + .is_ok() || other == &Type::Error { Type::apply_type_bindings(bindings); @@ -1081,7 +1088,7 @@ impl<'interner> TypeChecker<'interner> { // Matches on TypeVariable must be first so that we follow any type // bindings. - (TypeVariable(int, _), other) | (other, TypeVariable(int, _)) => { + (TypeVariable(int, int_kind), other) | (other, TypeVariable(int, int_kind)) => { if let TypeBinding::Bound(binding) = &*int.borrow() { return self.infix_operand_type_rules(binding, op, other, span); } @@ -1114,7 +1121,13 @@ impl<'interner> TypeChecker<'interner> { } let mut bindings = TypeBindings::new(); - if other.try_bind_to_polymorphic_int(int, &mut bindings).is_ok() + if other + .try_bind_to_polymorphic_int( + int, + &mut bindings, + *int_kind == TypeVariableKind::Integer, + ) + .is_ok() || other == &Type::Error { Type::apply_type_bindings(bindings); diff --git a/compiler/noirc_frontend/src/hir_def/types.rs b/compiler/noirc_frontend/src/hir_def/types.rs index 14f8a8e8639..1f8f236a818 100644 --- a/compiler/noirc_frontend/src/hir_def/types.rs +++ b/compiler/noirc_frontend/src/hir_def/types.rs @@ -442,6 +442,10 @@ pub enum TypeVariableKind { /// type annotations on each integer literal. IntegerOrField, + /// A generic integer type. This is a more specific kind of TypeVariable + /// that can only be bound to Type::Integer, or other polymorphic integers. + Integer, + /// A potentially constant array size. This will only bind to itself, Type::NotConstant, or /// Type::Constant(n) with a matching size. This defaults to Type::Constant(n) if still unbound /// during monomorphization. @@ -747,6 +751,13 @@ impl std::fmt::Display for Type { Signedness::Unsigned => write!(f, "u{num_bits}"), }, Type::TypeVariable(var, TypeVariableKind::Normal) => write!(f, "{}", var.borrow()), + Type::TypeVariable(binding, TypeVariableKind::Integer) => { + if let TypeBinding::Unbound(_) = &*binding.borrow() { + write!(f, "{}", TypeVariableKind::Integer.default_type()) + } else { + write!(f, "{}", binding.borrow()) + } + } Type::TypeVariable(binding, TypeVariableKind::IntegerOrField) => { if let TypeBinding::Unbound(_) = &*binding.borrow() { // Show a Field by default if this TypeVariableKind::IntegerOrField is unbound, since that is @@ -911,6 +922,7 @@ impl Type { Ok(()) } TypeVariableKind::IntegerOrField => Err(UnificationError), + TypeVariableKind::Integer => Err(UnificationError), }, } } @@ -925,6 +937,7 @@ impl Type { &self, var: &TypeVariable, bindings: &mut TypeBindings, + only_integer: bool, ) -> Result<(), UnificationError> { let target_id = match &*var.borrow() { TypeBinding::Bound(_) => unreachable!(), @@ -940,7 +953,30 @@ impl Type { Type::TypeVariable(self_var, TypeVariableKind::IntegerOrField) => { let borrow = self_var.borrow(); match &*borrow { - TypeBinding::Bound(typ) => typ.try_bind_to_polymorphic_int(var, bindings), + TypeBinding::Bound(typ) => { + typ.try_bind_to_polymorphic_int(var, bindings, only_integer) + } + // Avoid infinitely recursive bindings + TypeBinding::Unbound(id) if *id == target_id => Ok(()), + TypeBinding::Unbound(new_target_id) => { + if only_integer { + // Integer is more specific than IntegerOrField so we bind the type + // variable to Integer instead. + let clone = Type::TypeVariable(var.clone(), TypeVariableKind::Integer); + bindings.insert(*new_target_id, (self_var.clone(), clone)); + } else { + bindings.insert(target_id, (var.clone(), this.clone())); + } + Ok(()) + } + } + } + Type::TypeVariable(self_var, TypeVariableKind::Integer) => { + let borrow = self_var.borrow(); + match &*borrow { + TypeBinding::Bound(typ) => { + typ.try_bind_to_polymorphic_int(var, bindings, only_integer) + } // Avoid infinitely recursive bindings TypeBinding::Unbound(id) if *id == target_id => Ok(()), TypeBinding::Unbound(_) => { @@ -949,18 +985,23 @@ impl Type { } } } - Type::TypeVariable(binding, TypeVariableKind::Normal) => { - let borrow = binding.borrow(); + Type::TypeVariable(self_var, TypeVariableKind::Normal) => { + let borrow = self_var.borrow(); match &*borrow { - TypeBinding::Bound(typ) => typ.try_bind_to_polymorphic_int(var, bindings), + TypeBinding::Bound(typ) => { + typ.try_bind_to_polymorphic_int(var, bindings, only_integer) + } // Avoid infinitely recursive bindings TypeBinding::Unbound(id) if *id == target_id => Ok(()), TypeBinding::Unbound(new_target_id) => { - // IntegerOrField is more specific than TypeVariable so we bind the type - // variable to IntegerOrField instead. - let clone = - Type::TypeVariable(var.clone(), TypeVariableKind::IntegerOrField); - bindings.insert(*new_target_id, (binding.clone(), clone)); + // Bind to the most specific type variable kind + let clone_kind = if only_integer { + TypeVariableKind::Integer + } else { + TypeVariableKind::IntegerOrField + }; + let clone = Type::TypeVariable(var.clone(), clone_kind); + bindings.insert(*new_target_id, (self_var.clone(), clone)); Ok(()) } } @@ -1050,7 +1091,16 @@ impl Type { (TypeVariable(var, Kind::IntegerOrField), other) | (other, TypeVariable(var, Kind::IntegerOrField)) => { other.try_unify_to_type_variable(var, bindings, |bindings| { - other.try_bind_to_polymorphic_int(var, bindings) + let only_integer = false; + other.try_bind_to_polymorphic_int(var, bindings, only_integer) + }) + } + + (TypeVariable(var, Kind::Integer), other) + | (other, TypeVariable(var, Kind::Integer)) => { + other.try_unify_to_type_variable(var, bindings, |bindings| { + let only_integer = true; + other.try_bind_to_polymorphic_int(var, bindings, only_integer) }) } @@ -1599,6 +1649,7 @@ impl TypeVariableKind { pub(crate) fn default_type(&self) -> Type { match self { TypeVariableKind::IntegerOrField | TypeVariableKind::Normal => Type::default_int_type(), + TypeVariableKind::Integer => Type::default_range_loop_type(), TypeVariableKind::Constant(length) => Type::Constant(*length), } } @@ -1627,6 +1678,10 @@ impl From<&Type> for PrintableType { } Signedness::Signed => PrintableType::SignedInteger { width: (*bit_width).into() }, }, + Type::TypeVariable(binding, TypeVariableKind::Integer) => match &*binding.borrow() { + TypeBinding::Bound(typ) => typ.into(), + TypeBinding::Unbound(_) => Type::default_range_loop_type().into(), + }, Type::TypeVariable(binding, TypeVariableKind::IntegerOrField) => { match &*binding.borrow() { TypeBinding::Bound(typ) => typ.into(), @@ -1685,6 +1740,9 @@ impl std::fmt::Debug for Type { Type::TypeVariable(binding, TypeVariableKind::IntegerOrField) => { write!(f, "IntOrField{:?}", binding) } + Type::TypeVariable(binding, TypeVariableKind::Integer) => { + write!(f, "Int{:?}", binding) + } Type::TypeVariable(binding, TypeVariableKind::Constant(n)) => { write!(f, "{}{:?}", n, binding) } diff --git a/compiler/noirc_frontend/src/monomorphization/mod.rs b/compiler/noirc_frontend/src/monomorphization/mod.rs index 62950c9d4f7..e9adf26ec98 100644 --- a/compiler/noirc_frontend/src/monomorphization/mod.rs +++ b/compiler/noirc_frontend/src/monomorphization/mod.rs @@ -803,12 +803,14 @@ impl<'interner> Monomorphizer<'interner> { // Default any remaining unbound type variables. // This should only happen if the variable in question is unused // and within a larger generic type. - let default = - if self.is_range_loop && matches!(kind, TypeVariableKind::IntegerOrField) { - Type::default_range_loop_type() - } else { - kind.default_type() - }; + let default = if self.is_range_loop + && (matches!(kind, TypeVariableKind::IntegerOrField) + || matches!(kind, TypeVariableKind::Integer)) + { + Type::default_range_loop_type() + } else { + kind.default_type() + }; let monomorphized_default = self.convert_type(&default); binding.bind(default); diff --git a/compiler/noirc_frontend/src/node_interner.rs b/compiler/noirc_frontend/src/node_interner.rs index 0051c1b4f5f..7420d4598d9 100644 --- a/compiler/noirc_frontend/src/node_interner.rs +++ b/compiler/noirc_frontend/src/node_interner.rs @@ -1700,6 +1700,7 @@ fn get_type_method_key(typ: &Type) -> Option { Type::Array(_, _) => Some(Array), Type::Integer(_, _) => Some(FieldOrInt), Type::TypeVariable(_, TypeVariableKind::IntegerOrField) => Some(FieldOrInt), + Type::TypeVariable(_, TypeVariableKind::Integer) => Some(FieldOrInt), Type::Bool => Some(Bool), Type::String(_) => Some(String), Type::FmtString(_, _) => Some(FmtString), diff --git a/tooling/noirc_abi/src/lib.rs b/tooling/noirc_abi/src/lib.rs index 2560e46b01d..52a3e3a19e9 100644 --- a/tooling/noirc_abi/src/lib.rs +++ b/tooling/noirc_abi/src/lib.rs @@ -144,12 +144,11 @@ impl AbiType { Self::Integer { sign, width: (*bit_width).into() } } - Type::TypeVariable(binding, TypeVariableKind::IntegerOrField) => { - match &*binding.borrow() { - TypeBinding::Bound(typ) => Self::from_type(context, typ), - TypeBinding::Unbound(_) => Self::from_type(context, &Type::default_int_type()), - } - } + Type::TypeVariable(binding, TypeVariableKind::IntegerOrField) + | Type::TypeVariable(binding, TypeVariableKind::Integer) => match &*binding.borrow() { + TypeBinding::Bound(typ) => Self::from_type(context, typ), + TypeBinding::Unbound(_) => Self::from_type(context, &Type::default_int_type()), + }, Type::Bool => Self::Boolean, Type::String(size) => { let size = size From 39af6cc453be428a05d64f63ed7ae9a8c48b5b21 Mon Sep 17 00:00:00 2001 From: Tom French <15848336+TomAFrench@users.noreply.github.com> Date: Tue, 13 Feb 2024 19:00:14 +0000 Subject: [PATCH 06/39] chore(ci): bump node to 18.19.0 (#4350) # Description ## Problem\* Resolves ## Summary\* Bumps the node version used in CI to the latest v18 version. ## Additional Context ## Documentation\* Check one: - [ ] No documentation needed. - [ ] Documentation included in this PR. - [ ] **[Exceptional Case]** Documentation to be submitted in a separate PR. # PR Checklist\* - [ ] I have tested the changes locally. - [ ] I have formatted the changes with [Prettier](https://prettier.io/) and/or `cargo fmt` on default settings. --- .github/actions/setup/action.yml | 2 +- .github/workflows/release.yml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/actions/setup/action.yml b/.github/actions/setup/action.yml index b265a63d29a..d0e83dedf67 100644 --- a/.github/actions/setup/action.yml +++ b/.github/actions/setup/action.yml @@ -7,7 +7,7 @@ runs: - uses: actions/setup-node@v4 id: node with: - node-version: 18.17.1 + node-version: 18.19.0 cache: 'yarn' cache-dependency-path: 'yarn.lock' diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 71a0ab6d894..83e8e479181 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -45,7 +45,7 @@ jobs: - uses: actions/setup-node@v3 with: - node-version: 18.17.1 + node-version: 18.19.0 cache: 'yarn' cache-dependency-path: 'yarn.lock' From c44ef14847a436733206b6dd9590a7ab214ecd97 Mon Sep 17 00:00:00 2001 From: jfecher Date: Tue, 13 Feb 2024 13:55:37 -0600 Subject: [PATCH 07/39] feat: Allow type aliases to reference other aliases (#4353) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit # Description ## Problem\* Resolves ## Summary\* Allows type aliases to reference other aliases, using the dependency graph to error if there are ever cycles. To prevent infinite recursion in the type checker, aliases now have their own `Type::Alias` node which is used to set the inner aliased type to `Type::Error` in the case of a cycle to break the cycle. ## Additional Context Example error: ``` error: Dependency cycle found ┌─ /home/user/Code/Noir/noir/short/src/main.nr:2:1 │ 2 │ type B = A; │ ---------- 'B' recursively depends on itself: B -> A -> B │ ``` ## Documentation\* Check one: - [ ] No documentation needed. - [x] Documentation included in this PR. - [ ] **[Exceptional Case]** Documentation to be submitted in a separate PR. # PR Checklist\* - [x] I have tested the changes locally. - [x] I have formatted the changes with [Prettier](https://prettier.io/) and/or `cargo fmt` on default settings. --------- Co-authored-by: kevaundray --- .../src/hir/resolution/resolver.rs | 44 +++++--- .../src/hir/resolution/type_aliases.rs | 6 +- .../noirc_frontend/src/hir/type_check/expr.rs | 8 ++ .../noirc_frontend/src/hir/type_check/stmt.rs | 6 +- compiler/noirc_frontend/src/hir_def/types.rs | 106 ++++++++++++++---- .../src/monomorphization/mod.rs | 2 + compiler/noirc_frontend/src/node_interner.rs | 30 +++-- .../noirc_frontend/src/resolve_locations.rs | 4 +- compiler/noirc_frontend/src/tests.rs | 22 ++++ docs/docs/noir/concepts/data_types/index.md | 14 +++ tooling/noirc_abi/src/lib.rs | 3 +- 11 files changed, 190 insertions(+), 55 deletions(-) diff --git a/compiler/noirc_frontend/src/hir/resolution/resolver.rs b/compiler/noirc_frontend/src/hir/resolution/resolver.rs index bef5e113428..d4aae133b35 100644 --- a/compiler/noirc_frontend/src/hir/resolution/resolver.rs +++ b/compiler/noirc_frontend/src/hir/resolution/resolver.rs @@ -29,7 +29,7 @@ use crate::hir::def_map::{LocalModuleId, ModuleDefId, TryFromModuleDefId, MAIN_F use crate::hir_def::stmt::{HirAssignStatement, HirForStatement, HirLValue, HirPattern}; use crate::node_interner::{ DefinitionId, DefinitionKind, DependencyId, ExprId, FuncId, GlobalId, NodeInterner, StmtId, - StructId, TraitId, TraitImplId, TraitMethodId, + StructId, TraitId, TraitImplId, TraitMethodId, TypeAliasId, }; use crate::{ hir::{def_map::CrateDefMap, resolution::path_resolver::PathResolver}, @@ -39,9 +39,9 @@ use crate::{ use crate::{ ArrayLiteral, ContractFunctionType, Distinctness, ForRange, FunctionDefinition, FunctionReturnType, FunctionVisibility, Generics, LValue, NoirStruct, NoirTypeAlias, Param, - Path, PathKind, Pattern, Shared, StructType, Type, TypeAliasType, TypeVariable, - TypeVariableKind, UnaryOp, UnresolvedGenerics, UnresolvedTraitConstraint, UnresolvedType, - UnresolvedTypeData, UnresolvedTypeExpression, Visibility, ERROR_IDENT, + Path, PathKind, Pattern, Shared, StructType, Type, TypeAlias, TypeVariable, TypeVariableKind, + UnaryOp, UnresolvedGenerics, UnresolvedTraitConstraint, UnresolvedType, UnresolvedTypeData, + UnresolvedTypeExpression, Visibility, ERROR_IDENT, }; use fm::FileId; use iter_extended::vecmap; @@ -573,16 +573,19 @@ impl<'a> Resolver<'a> { let span = path.span(); let mut args = vecmap(args, |arg| self.resolve_type_inner(arg, new_variables)); - if let Some(type_alias_type) = self.lookup_type_alias(path.clone()) { - let expected_generic_count = type_alias_type.generics.len(); - let type_alias_string = type_alias_type.to_string(); - let id = type_alias_type.id; + if let Some(type_alias) = self.lookup_type_alias(path.clone()) { + let type_alias = type_alias.borrow(); + let expected_generic_count = type_alias.generics.len(); + let type_alias_string = type_alias.to_string(); + let id = type_alias.id; self.verify_generics_count(expected_generic_count, &mut args, span, || { type_alias_string }); - let result = self.interner.get_type_alias(id).get_type(&args); + if let Some(item) = self.current_item { + self.interner.add_type_alias_dependency(item, id); + } // Collecting Type Alias references [Location]s to be used by LSP in order // to resolve the definition of the type alias @@ -593,9 +596,8 @@ impl<'a> Resolver<'a> { // equal to another type alias. Fixing this fully requires an analysis to create a DFG // of definition ordering, but for now we have an explicit check here so that we at // least issue an error that the type was not found instead of silently passing. - if result != Type::Error { - return result; - } + let alias = self.interner.get_type_alias(id); + return Type::Alias(alias, args); } match self.lookup_struct_or_error(path) { @@ -752,12 +754,15 @@ impl<'a> Resolver<'a> { resolved_type } - pub fn resolve_type_aliases( + pub fn resolve_type_alias( mut self, unresolved: NoirTypeAlias, + alias_id: TypeAliasId, ) -> (Type, Generics, Vec) { let generics = self.add_generics(&unresolved.generics); self.resolve_local_globals(); + + self.current_item = Some(DependencyId::Alias(alias_id)); let typ = self.resolve_type(unresolved.typ); (typ, generics, self.errors) @@ -1120,6 +1125,17 @@ impl<'a> Resolver<'a> { } } } + Type::Alias(alias, generics) => { + for (i, generic) in generics.iter().enumerate() { + if let Type::NamedGeneric(type_variable, name) = generic { + if alias.borrow().generic_is_numeric(i) { + found.insert(name.to_string(), type_variable.clone()); + } + } else { + Self::find_numeric_generics_in_type(generic, found); + } + } + } Type::MutableReference(element) => Self::find_numeric_generics_in_type(element, found), Type::String(length) => { if let Type::NamedGeneric(type_variable, name) = length.as_ref() { @@ -1791,7 +1807,7 @@ impl<'a> Resolver<'a> { } } - fn lookup_type_alias(&mut self, path: Path) -> Option<&TypeAliasType> { + fn lookup_type_alias(&mut self, path: Path) -> Option> { self.lookup(path).ok().map(|id| self.interner.get_type_alias(id)) } diff --git a/compiler/noirc_frontend/src/hir/resolution/type_aliases.rs b/compiler/noirc_frontend/src/hir/resolution/type_aliases.rs index f66f6c8dfa7..2e5ce611a7f 100644 --- a/compiler/noirc_frontend/src/hir/resolution/type_aliases.rs +++ b/compiler/noirc_frontend/src/hir/resolution/type_aliases.rs @@ -17,7 +17,7 @@ pub(crate) fn resolve_type_aliases( crate_id: CrateId, ) -> Vec<(CompilationError, FileId)> { let mut errors: Vec<(CompilationError, FileId)> = vec![]; - for (type_id, unresolved_typ) in type_aliases { + for (alias_id, unresolved_typ) in type_aliases { let path_resolver = StandardPathResolver::new(ModuleId { local_id: unresolved_typ.module_id, krate: crate_id, @@ -25,9 +25,9 @@ pub(crate) fn resolve_type_aliases( let file = unresolved_typ.file_id; let (typ, generics, resolver_errors) = Resolver::new(&mut context.def_interner, &path_resolver, &context.def_maps, file) - .resolve_type_aliases(unresolved_typ.type_alias_def); + .resolve_type_alias(unresolved_typ.type_alias_def, alias_id); errors.extend(resolver_errors.iter().cloned().map(|e| (e.into(), file))); - context.def_interner.set_type_alias(type_id, typ, generics); + context.def_interner.set_type_alias(alias_id, typ, generics); } errors } diff --git a/compiler/noirc_frontend/src/hir/type_check/expr.rs b/compiler/noirc_frontend/src/hir/type_check/expr.rs index 62099d2d6a6..96a79152f69 100644 --- a/compiler/noirc_frontend/src/hir/type_check/expr.rs +++ b/compiler/noirc_frontend/src/hir/type_check/expr.rs @@ -844,6 +844,10 @@ impl<'interner> TypeChecker<'interner> { }) } } + (Alias(alias, args), other) | (other, Alias(alias, args)) => { + let alias = alias.borrow().get_type(args); + self.comparator_operand_type_rules(&alias, other, op, span) + } (Integer(sign_x, bit_width_x), Integer(sign_y, bit_width_y)) => { if sign_x != sign_y { return Err(TypeCheckError::IntegerSignedness { @@ -1141,6 +1145,10 @@ impl<'interner> TypeChecker<'interner> { }) } } + (Alias(alias, args), other) | (other, Alias(alias, args)) => { + let alias = alias.borrow().get_type(args); + self.infix_operand_type_rules(&alias, op, other, span) + } (Integer(sign_x, bit_width_x), Integer(sign_y, bit_width_y)) => { if sign_x != sign_y { return Err(TypeCheckError::IntegerSignedness { diff --git a/compiler/noirc_frontend/src/hir/type_check/stmt.rs b/compiler/noirc_frontend/src/hir/type_check/stmt.rs index d6a19bb74be..03d61b93e0c 100644 --- a/compiler/noirc_frontend/src/hir/type_check/stmt.rs +++ b/compiler/noirc_frontend/src/hir/type_check/stmt.rs @@ -93,7 +93,7 @@ impl<'interner> TypeChecker<'interner> { match pattern { HirPattern::Identifier(ident) => self.interner.push_definition_type(ident.id, typ), HirPattern::Mutable(pattern, _) => self.bind_pattern(pattern, typ), - HirPattern::Tuple(fields, location) => match typ { + HirPattern::Tuple(fields, location) => match typ.follow_bindings() { Type::Tuple(field_types) if field_types.len() == fields.len() => { for (field, field_type) in fields.iter().zip(field_types) { self.bind_pattern(field, field_type); @@ -120,12 +120,12 @@ impl<'interner> TypeChecker<'interner> { source: Source::Assignment, }); - if let Type::Struct(struct_type, generics) = struct_type { + if let Type::Struct(struct_type, generics) = struct_type.follow_bindings() { let struct_type = struct_type.borrow(); for (field_name, field_pattern) in fields { if let Some((type_field, _)) = - struct_type.get_field(&field_name.0.contents, generics) + struct_type.get_field(&field_name.0.contents, &generics) { self.bind_pattern(field_pattern, type_field); } diff --git a/compiler/noirc_frontend/src/hir_def/types.rs b/compiler/noirc_frontend/src/hir_def/types.rs index 1f8f236a818..98b47f17cd4 100644 --- a/compiler/noirc_frontend/src/hir_def/types.rs +++ b/compiler/noirc_frontend/src/hir_def/types.rs @@ -45,13 +45,18 @@ pub enum Type { /// The unit type `()`. Unit, + /// A tuple type with the given list of fields in the order they appear in source code. + Tuple(Vec), + /// A user-defined struct type. The `Shared` field here refers to /// the shared definition for each instance of this struct type. The `Vec` /// represents the generic arguments (if any) to this struct type. Struct(Shared, Vec), - /// A tuple type with the given list of fields in the order they appear in source code. - Tuple(Vec), + /// A user-defined alias to another type. Similar to a Struct, this carries a shared + /// reference to the definition of the alias along with any generics that may have + /// been applied to the alias. + Alias(Shared, Vec), /// TypeVariables are stand-in variables for some type which is not yet known. /// They are not to be confused with NamedGenerics. While the later mostly works @@ -117,11 +122,16 @@ impl Type { let typ = typ.as_ref(); (length as u32) * typ.field_count() } - Type::Struct(ref def, args) => { + Type::Struct(def, args) => { let struct_type = def.borrow(); let fields = struct_type.get_fields(args); fields.iter().fold(0, |acc, (_, field_type)| acc + field_type.field_count()) } + Type::Alias(def, _) => { + // It is safe to access `typ` without instantiating generics here since generics + // cannot change the number of fields in `typ`. + def.borrow().typ.field_count() + } Type::Tuple(fields) => { fields.iter().fold(0, |acc, field_typ| acc + field_typ.field_count()) } @@ -310,7 +320,7 @@ impl std::fmt::Display for StructType { /// Wrap around an unsolved type #[derive(Debug, Clone, Eq)] -pub struct TypeAliasType { +pub struct TypeAlias { pub name: Ident, pub id: TypeAliasId, pub typ: Type, @@ -318,40 +328,33 @@ pub struct TypeAliasType { pub location: Location, } -impl std::hash::Hash for TypeAliasType { +impl std::hash::Hash for TypeAlias { fn hash(&self, state: &mut H) { self.id.hash(state); } } -impl PartialEq for TypeAliasType { +impl PartialEq for TypeAlias { fn eq(&self, other: &Self) -> bool { self.id == other.id } } -impl std::fmt::Display for TypeAliasType { +impl std::fmt::Display for TypeAlias { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - write!(f, "{}", self.name)?; - - if !self.generics.is_empty() { - let generics = vecmap(&self.generics, |binding| binding.borrow().to_string()); - write!(f, "{}", generics.join(", "))?; - } - - Ok(()) + write!(f, "{}", self.name) } } -impl TypeAliasType { +impl TypeAlias { pub fn new( id: TypeAliasId, name: Ident, location: Location, typ: Type, generics: Generics, - ) -> TypeAliasType { - TypeAliasType { id, typ, name, location, generics } + ) -> TypeAlias { + TypeAlias { id, typ, name, location, generics } } pub fn set_type_and_generics(&mut self, new_typ: Type, new_generics: Generics) { @@ -372,6 +375,14 @@ impl TypeAliasType { self.typ.substitute(&substitutions) } + + /// True if the given index is the same index as a generic type of this alias + /// which is expected to be a numeric generic. + /// This is needed because we infer type kinds in Noir and don't have extensive kind checking. + pub fn generic_is_numeric(&self, index_of_generic: usize) -> bool { + let target_id = self.generics[index_of_generic].0; + self.typ.contains_numeric_typevar(target_id) + } } /// A shared, mutable reference to some T. @@ -642,6 +653,13 @@ impl Type { } }) } + Type::Alias(alias, generics) => generics.iter().enumerate().any(|(i, generic)| { + if named_generic_id_matches_target(generic) { + alias.borrow().generic_is_numeric(i) + } else { + generic.contains_numeric_typevar(target_id) + } + }), Type::MutableReference(element) => element.contains_numeric_typevar(target_id), Type::String(length) => named_generic_id_matches_target(length), Type::FmtString(length, elements) => { @@ -678,6 +696,11 @@ impl Type { | Type::TraitAsType(..) | Type::NotConstant => false, + // This function is called during name resolution before we've verified aliases + // are not cyclic. As a result, it wouldn't be safe to check this alias' definition + // to see if the aliased type is valid. + Type::Alias(..) => false, + Type::Array(length, element) => { length.is_valid_for_program_input() && element.is_valid_for_program_input() } @@ -784,6 +807,14 @@ impl std::fmt::Display for Type { write!(f, "{}<{}>", s.borrow(), args.join(", ")) } } + Type::Alias(alias, args) => { + let args = vecmap(args, |arg| arg.to_string()); + if args.is_empty() { + write!(f, "{}", alias.borrow()) + } else { + write!(f, "{}<{}>", alias.borrow(), args.join(", ")) + } + } Type::TraitAsType(_id, name, generics) => { write!(f, "impl {}", name)?; if !generics.is_empty() { @@ -875,7 +906,7 @@ impl Type { TypeBinding::Unbound(id) => *id, }; - let this = self.substitute(bindings); + let this = self.substitute(bindings).follow_bindings(); match &this { Type::Constant(length) if *length == target_length => { @@ -944,7 +975,7 @@ impl Type { TypeBinding::Unbound(id) => *id, }; - let this = self.substitute(bindings); + let this = self.substitute(bindings).follow_bindings(); match &this { Type::FieldElement | Type::Integer(..) => { bindings.insert(target_id, (var.clone(), this)); @@ -1088,6 +1119,11 @@ impl Type { match (self, other) { (Error, _) | (_, Error) => Ok(()), + (Alias(alias, args), other) | (other, Alias(alias, args)) => { + let alias = alias.borrow().get_type(args); + alias.try_unify(other, bindings) + } + (TypeVariable(var, Kind::IntegerOrField), other) | (other, TypeVariable(var, Kind::IntegerOrField)) => { other.try_unify_to_type_variable(var, bindings, |bindings| { @@ -1458,7 +1494,7 @@ impl Type { Type::NamedGeneric(binding, _) | Type::TypeVariable(binding, _) => { substitute_binding(binding) } - // Do not substitute_helper fields, it ca, substitute_bound_typevarsn lead to infinite recursion + // Do not substitute_helper fields, it can lead to infinite recursion // and we should not match fields when type checking anyway. Type::Struct(fields, args) => { let args = vecmap(args, |arg| { @@ -1466,6 +1502,12 @@ impl Type { }); Type::Struct(fields.clone(), args) } + Type::Alias(alias, args) => { + let args = vecmap(args, |arg| { + arg.substitute_helper(type_bindings, substitute_bound_typevars) + }); + Type::Alias(alias.clone(), args) + } Type::Tuple(fields) => { let fields = vecmap(fields, |field| { field.substitute_helper(type_bindings, substitute_bound_typevars) @@ -1514,7 +1556,9 @@ impl Type { let field_occurs = fields.occurs(target_id); len_occurs || field_occurs } - Type::Struct(_, generic_args) => generic_args.iter().any(|arg| arg.occurs(target_id)), + Type::Struct(_, generic_args) | Type::Alias(_, generic_args) => { + generic_args.iter().any(|arg| arg.occurs(target_id)) + } Type::Tuple(fields) => fields.iter().any(|field| field.occurs(target_id)), Type::NamedGeneric(binding, _) | Type::TypeVariable(binding, _) => { match &*binding.borrow() { @@ -1565,6 +1609,11 @@ impl Type { let args = vecmap(args, |arg| arg.follow_bindings()); Struct(def.clone(), args) } + Alias(def, args) => { + // We don't need to vecmap(args, follow_bindings) since we're recursively + // calling follow_bindings here already. + def.borrow().get_type(args).follow_bindings() + } Tuple(args) => Tuple(vecmap(args, |arg| arg.follow_bindings())), TypeVariable(var, _) | NamedGeneric(var, _) => { if let TypeBinding::Bound(typ) = &*var.borrow() { @@ -1703,6 +1752,7 @@ impl From<&Type> for PrintableType { let fields = vecmap(fields, |(name, typ)| (name, typ.into())); PrintableType::Struct { fields, name: struct_type.name.to_string() } } + Type::Alias(alias, args) => alias.borrow().get_type(args).into(), Type::TraitAsType(_, _, _) => unreachable!(), Type::Tuple(types) => PrintableType::Tuple { types: vecmap(types, |typ| typ.into()) }, Type::TypeVariable(_, _) => unreachable!(), @@ -1749,9 +1799,17 @@ impl std::fmt::Debug for Type { Type::Struct(s, args) => { let args = vecmap(args, |arg| format!("{:?}", arg)); if args.is_empty() { - write!(f, "{:?}", s.borrow()) + write!(f, "{}", s.borrow()) + } else { + write!(f, "{}<{}>", s.borrow(), args.join(", ")) + } + } + Type::Alias(alias, args) => { + let args = vecmap(args, |arg| format!("{:?}", arg)); + if args.is_empty() { + write!(f, "{}", alias.borrow()) } else { - write!(f, "{:?}<{}>", s.borrow(), args.join(", ")) + write!(f, "{}<{}>", alias.borrow(), args.join(", ")) } } Type::TraitAsType(_id, name, generics) => { diff --git a/compiler/noirc_frontend/src/monomorphization/mod.rs b/compiler/noirc_frontend/src/monomorphization/mod.rs index e9adf26ec98..f691a0c9065 100644 --- a/compiler/noirc_frontend/src/monomorphization/mod.rs +++ b/compiler/noirc_frontend/src/monomorphization/mod.rs @@ -823,6 +823,8 @@ impl<'interner> Monomorphizer<'interner> { ast::Type::Tuple(fields) } + HirType::Alias(def, args) => self.convert_type(&def.borrow().get_type(args)), + HirType::Tuple(fields) => { let fields = vecmap(fields, |x| self.convert_type(x)); ast::Type::Tuple(fields) diff --git a/compiler/noirc_frontend/src/node_interner.rs b/compiler/noirc_frontend/src/node_interner.rs index 7420d4598d9..9a45268d111 100644 --- a/compiler/noirc_frontend/src/node_interner.rs +++ b/compiler/noirc_frontend/src/node_interner.rs @@ -29,7 +29,7 @@ use crate::hir_def::{ use crate::token::{Attributes, SecondaryAttribute}; use crate::{ BinaryOpKind, ContractFunctionType, FunctionDefinition, FunctionVisibility, Generics, Shared, - TypeAliasType, TypeBindings, TypeVariable, TypeVariableId, TypeVariableKind, + TypeAlias, TypeBindings, TypeVariable, TypeVariableId, TypeVariableKind, }; /// An arbitrary number to limit the recursion depth when searching for trait impls. @@ -90,11 +90,12 @@ pub struct NodeInterner { structs: HashMap>, struct_attributes: HashMap, - // Type Aliases map. + + // Maps TypeAliasId -> Shared // // Map type aliases to the actual type. // When resolving types, check against this map to see if a type alias is defined. - pub(crate) type_aliases: Vec, + pub(crate) type_aliases: Vec>, // Trait map. // @@ -604,13 +605,13 @@ impl NodeInterner { pub fn push_type_alias(&mut self, typ: &UnresolvedTypeAlias) -> TypeAliasId { let type_id = TypeAliasId(self.type_aliases.len()); - self.type_aliases.push(TypeAliasType::new( + self.type_aliases.push(Shared::new(TypeAlias::new( type_id, typ.type_alias_def.name.clone(), Location::new(typ.type_alias_def.span, typ.file_id), Type::Error, vecmap(&typ.type_alias_def.generics, |_| TypeVariable::unbound(TypeVariableId(0))), - )); + ))); type_id } @@ -632,7 +633,7 @@ impl NodeInterner { pub fn set_type_alias(&mut self, type_id: TypeAliasId, typ: Type, generics: Generics) { let type_alias_type = &mut self.type_aliases[type_id.0]; - type_alias_type.set_type_and_generics(typ, generics); + type_alias_type.borrow_mut().set_type_and_generics(typ, generics); } /// Returns the interned statement corresponding to `stmt_id` @@ -957,8 +958,8 @@ impl NodeInterner { self.traits.get(&id) } - pub fn get_type_alias(&self, id: TypeAliasId) -> &TypeAliasType { - &self.type_aliases[id.0] + pub fn get_type_alias(&self, id: TypeAliasId) -> Shared { + self.type_aliases[id.0].clone() } pub fn get_global(&self, global_id: GlobalId) -> &GlobalInfo { @@ -1539,6 +1540,10 @@ impl NodeInterner { self.add_dependency(dependent, DependencyId::Function(dependency)); } + pub fn add_type_alias_dependency(&mut self, dependent: DependencyId, dependency: TypeAliasId) { + self.add_dependency(dependent, DependencyId::Alias(dependency)); + } + fn add_dependency(&mut self, dependent: DependencyId, dependency: DependencyId) { let dependent_index = self.get_or_insert_dependency(dependent); let dependency_index = self.get_or_insert_dependency(dependency); @@ -1585,6 +1590,12 @@ impl NodeInterner { } DependencyId::Alias(alias_id) => { let alias = self.get_type_alias(alias_id); + // If type aliases form a cycle, we have to manually break the cycle + // here to prevent infinite recursion in the type checker. + alias.borrow_mut().typ = Type::Error; + + // push_error will borrow the alias so we have to drop the mutable borrow + let alias = alias.borrow(); push_error(alias.name.to_string(), &scc, i, alias.location); break; } @@ -1606,7 +1617,7 @@ impl NodeInterner { DependencyId::Struct(id) => Cow::Owned(self.get_struct(id).borrow().name.to_string()), DependencyId::Function(id) => Cow::Borrowed(self.function_name(&id)), DependencyId::Alias(id) => { - Cow::Borrowed(self.get_type_alias(id).name.0.contents.as_ref()) + Cow::Owned(self.get_type_alias(id).borrow().name.to_string()) } DependencyId::Global(id) => { Cow::Borrowed(self.get_global(id).ident.0.contents.as_ref()) @@ -1709,6 +1720,7 @@ fn get_type_method_key(typ: &Type) -> Option { Type::Function(_, _, _) => Some(Function), Type::NamedGeneric(_, _) => Some(Generic), Type::MutableReference(element) => get_type_method_key(element), + Type::Alias(alias, _) => get_type_method_key(&alias.borrow().typ), // We do not support adding methods to these types Type::TypeVariable(_, _) diff --git a/compiler/noirc_frontend/src/resolve_locations.rs b/compiler/noirc_frontend/src/resolve_locations.rs index cfb88966b9d..b5f1b1d0c64 100644 --- a/compiler/noirc_frontend/src/resolve_locations.rs +++ b/compiler/noirc_frontend/src/resolve_locations.rs @@ -212,6 +212,8 @@ impl NodeInterner { self.type_alias_ref .iter() .find(|(_, named_type_location)| named_type_location.span.contains(&location.span)) - .map(|(type_alias_id, _found_location)| self.get_type_alias(*type_alias_id).location) + .map(|(type_alias_id, _found_location)| { + self.get_type_alias(*type_alias_id).borrow().location + }) } } diff --git a/compiler/noirc_frontend/src/tests.rs b/compiler/noirc_frontend/src/tests.rs index 1deff446d7e..8a56b337398 100644 --- a/compiler/noirc_frontend/src/tests.rs +++ b/compiler/noirc_frontend/src/tests.rs @@ -1184,4 +1184,26 @@ fn lambda$f1(mut env$l1: (Field)) -> Field { "#; assert_eq!(get_program_errors(src).len(), 1); } + + #[test] + fn deny_cyclic_type_aliases() { + let src = r#" + type A = B; + type B = A; + fn main() {} + "#; + assert_eq!(get_program_errors(src).len(), 1); + } + + #[test] + fn ensure_nested_type_aliases_type_check() { + let src = r#" + type A = B; + type B = u8; + fn main() { + let _a: A = 0 as u16; + } + "#; + assert_eq!(get_program_errors(src).len(), 1); + } } diff --git a/docs/docs/noir/concepts/data_types/index.md b/docs/docs/noir/concepts/data_types/index.md index 3c9cd4c2437..97b3b2cb094 100644 --- a/docs/docs/noir/concepts/data_types/index.md +++ b/docs/docs/noir/concepts/data_types/index.md @@ -91,6 +91,20 @@ fn main() { } ``` +Type aliases can even refer to other aliases. An error will be issued if they form a cycle: + +```rust +// Ok! +type A = B; +type B = Field; + +type Bad1 = Bad2; + +// error: Dependency cycle found +type Bad2 = Bad1; +// ^^^^^^^^^^^ 'Bad2' recursively depends on itself: Bad2 -> Bad1 -> Bad2 +``` + ### BigInt You can achieve BigInt functionality using the [Noir BigInt](https://github.com/shuklaayush/noir-bigint) library. diff --git a/tooling/noirc_abi/src/lib.rs b/tooling/noirc_abi/src/lib.rs index 52a3e3a19e9..26feab65d83 100644 --- a/tooling/noirc_abi/src/lib.rs +++ b/tooling/noirc_abi/src/lib.rs @@ -157,7 +157,7 @@ impl AbiType { Self::String { length: size } } - Type::Struct(def, ref args) => { + Type::Struct(def, args) => { let struct_type = def.borrow(); let fields = struct_type.get_fields(args); let fields = vecmap(fields, |(name, typ)| (name, Self::from_type(context, &typ))); @@ -166,6 +166,7 @@ impl AbiType { context.fully_qualified_struct_path(context.root_crate_id(), struct_type.id); Self::Struct { fields, path } } + Type::Alias(def, args) => Self::from_type(context, &def.borrow().get_type(args)), Type::Tuple(fields) => { let fields = vecmap(fields, |typ| Self::from_type(context, typ)); Self::Tuple { fields } From 9256a7db3769ee26d99852460762302fd9e80884 Mon Sep 17 00:00:00 2001 From: Tom French <15848336+TomAFrench@users.noreply.github.com> Date: Wed, 14 Feb 2024 15:08:11 +0000 Subject: [PATCH 08/39] chore: fix JS package publishing (#4366) # Description ## Problem\* Resolves ## Summary\* Publishing of Noir packages is currently failing due to a `_` being used in place of a `-`. ## Additional Context ## Documentation\* Check one: - [x] No documentation needed. - [ ] Documentation included in this PR. - [ ] **[Exceptional Case]** Documentation to be submitted in a separate PR. # PR Checklist\* - [x] I have tested the changes locally. - [x] I have formatted the changes with [Prettier](https://prettier.io/) and/or `cargo fmt` on default settings. --- .github/workflows/publish-es-packages.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/publish-es-packages.yml b/.github/workflows/publish-es-packages.yml index 231a6124785..fa245883ced 100644 --- a/.github/workflows/publish-es-packages.yml +++ b/.github/workflows/publish-es-packages.yml @@ -114,7 +114,7 @@ jobs: - uses: actions/download-artifact@v4 with: - name: acvm_js + name: acvm-js path: acvm-repo/acvm_js - uses: actions/download-artifact@v4 From e0ad0b2b31f6d46be75d23aec6a82850a9c4bd75 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Gustavo=20Gir=C3=A1ldez?= Date: Wed, 14 Feb 2024 10:21:11 -0500 Subject: [PATCH 09/39] feat: DAP Preflight and debugger compilation options (#4185) # Description ## Problem\* Part of #3015 ## Summary\* This PR adds a preflight mode to DAP in order to make it easier to identify and report back problems to the user when compiling the project for debugging. This preflight mode is invoked from the VS.Code extension before starting the debugging session, and with the same arguments as those that will be used for the session. If the compiler finds any error either loading or compiling the project, the error is reported to stderr which allows the extension to parse the output and present the diagnostic messages to the user. This also changes the default compilation mode to output Brillig code and adds new commands line options to Nargo's `debug` command and launch options to the DAP mode to control the mode and whether to inject instrumentation code to track variables or not. The `debug` options are: - `--acir-mode`, force output of ACIR, which disables instrumentation by default - `--skip-instrumentation={true,false}` to control injection of instrumentation code to track variables values during the debugging session. Similarly, for DAP two launch options can be provided: `generateAcir` and `skipInstrumentation`. ## Additional Context The default is to output in Brillig mode with instrumentation for tracking variables, as this makes it easier to follow along with stepping through the code. If ACIR mode is selected, instrumentation is disabled by default. Instrumentation can be forcefully enabled or disabled by the provided CLI option. ## Documentation\* Check one: - [ ] No documentation needed. - [ ] Documentation included in this PR. - [X] **[Exceptional Case]** Documentation to be submitted in a separate PR. # PR Checklist\* - [X] I have tested the changes locally. - [X] I have formatted the changes with [Prettier](https://prettier.io/) and/or `cargo fmt` on default settings. --------- Co-authored-by: Martin Verzilli --- cspell.json | 1 + tooling/debugger/src/dap.rs | 7 +- tooling/debugger/src/errors.rs | 19 ++++ tooling/debugger/src/lib.rs | 1 + tooling/nargo_cli/src/cli/dap_cmd.rs | 148 +++++++++++++++++-------- tooling/nargo_cli/src/cli/debug_cmd.rs | 85 +++++++++----- tooling/nargo_cli/src/errors.rs | 3 +- 7 files changed, 192 insertions(+), 72 deletions(-) create mode 100644 tooling/debugger/src/errors.rs diff --git a/cspell.json b/cspell.json index 2acca0633d3..be6b7c5c7e8 100644 --- a/cspell.json +++ b/cspell.json @@ -90,6 +90,7 @@ "indexmap", "injective", "Inlines", + "instrumenter", "interner", "intrinsics", "jmp", diff --git a/tooling/debugger/src/dap.rs b/tooling/debugger/src/dap.rs index dd9a30d50da..184018e9fcc 100644 --- a/tooling/debugger/src/dap.rs +++ b/tooling/debugger/src/dap.rs @@ -510,7 +510,12 @@ impl<'a, R: Read, W: Write, B: BlackBoxFunctionSolver> DapSession<'a, R, W, B> { }; let found_index = match line_to_opcodes.binary_search_by(|x| x.0.cmp(&line)) { Ok(index) => line_to_opcodes[index].1, - Err(index) => line_to_opcodes[index].1, + Err(index) => { + if index >= line_to_opcodes.len() { + return None; + } + line_to_opcodes[index].1 + } }; Some(found_index) } diff --git a/tooling/debugger/src/errors.rs b/tooling/debugger/src/errors.rs new file mode 100644 index 00000000000..4578987d715 --- /dev/null +++ b/tooling/debugger/src/errors.rs @@ -0,0 +1,19 @@ +use thiserror::Error; + +#[derive(Debug, Error)] +pub enum DapError { + #[error("{0}")] + PreFlightGenericError(String), + + #[error(transparent)] + LoadError(#[from] LoadError), + + #[error(transparent)] + ServerError(#[from] dap::errors::ServerError), +} + +#[derive(Debug, Error)] +pub enum LoadError { + #[error("{0}")] + Generic(String), +} diff --git a/tooling/debugger/src/lib.rs b/tooling/debugger/src/lib.rs index 35014f9a8c8..4a25e3417a0 100644 --- a/tooling/debugger/src/lib.rs +++ b/tooling/debugger/src/lib.rs @@ -1,5 +1,6 @@ mod context; mod dap; +pub mod errors; mod foreign_calls; mod repl; mod source_code_printer; diff --git a/tooling/nargo_cli/src/cli/dap_cmd.rs b/tooling/nargo_cli/src/cli/dap_cmd.rs index 7c7e6056901..f4df309f1c9 100644 --- a/tooling/nargo_cli/src/cli/dap_cmd.rs +++ b/tooling/nargo_cli/src/cli/dap_cmd.rs @@ -3,38 +3,52 @@ use acvm::acir::native_types::WitnessMap; use backend_interface::Backend; use clap::Args; use nargo::constants::PROVER_INPUT_FILE; -use nargo::ops::compile_program_with_debug_instrumenter; use nargo::workspace::Workspace; -use nargo::{insert_all_files_for_workspace_into_file_manager, parse_all}; use nargo_toml::{get_package_manifest, resolve_workspace_from_toml, PackageSelection}; use noirc_abi::input_parser::Format; -use noirc_driver::{ - file_manager_with_stdlib, CompileOptions, CompiledProgram, NOIR_ARTIFACT_VERSION_STRING, -}; +use noirc_driver::{CompileOptions, CompiledProgram, NOIR_ARTIFACT_VERSION_STRING}; use noirc_frontend::graph::CrateName; use std::io::{BufReader, BufWriter, Read, Write}; use std::path::Path; -use dap::errors::ServerError; use dap::requests::Command; use dap::responses::ResponseBody; use dap::server::Server; use dap::types::Capabilities; use serde_json::Value; -use super::compile_cmd::report_errors; -use super::debug_cmd::instrument_package_files; +use super::debug_cmd::compile_bin_package_for_debugging; use super::fs::inputs::read_inputs_from_file; use crate::errors::CliError; use super::NargoConfig; +use noir_debugger::errors::{DapError, LoadError}; + #[derive(Debug, Clone, Args)] pub(crate) struct DapCommand { /// Override the expression width requested by the backend. #[arg(long, value_parser = parse_expression_width)] expression_width: Option, + + #[clap(long)] + preflight_check: bool, + + #[clap(long)] + preflight_project_folder: Option, + + #[clap(long)] + preflight_package: Option, + + #[clap(long)] + preflight_prover_name: Option, + + #[clap(long)] + preflight_generate_acir: bool, + + #[clap(long)] + preflight_skip_instrumentation: bool, } fn parse_expression_width(input: &str) -> Result { @@ -50,8 +64,6 @@ fn parse_expression_width(input: &str) -> Result) -> Option { let Ok(toml_path) = get_package_manifest(Path::new(project_folder)) else { eprintln!("ERROR: Failed to get package manifest"); @@ -72,55 +84,51 @@ fn find_workspace(project_folder: &str, package: Option<&str>) -> Option) -> String { + match package { + Some(pkg) => format!( + r#"Noir Debugger could not load program from {}, package {}"#, + project_folder, pkg + ), + None => format!(r#"Noir Debugger could not load program from {}"#, project_folder), + } +} + fn load_and_compile_project( project_folder: &str, package: Option<&str>, prover_name: &str, expression_width: ExpressionWidth, + acir_mode: bool, + skip_instrumentation: bool, ) -> Result<(CompiledProgram, WitnessMap), LoadError> { - let workspace = - find_workspace(project_folder, package).ok_or(LoadError("Cannot open workspace"))?; - + let workspace = find_workspace(project_folder, package) + .ok_or(LoadError::Generic(workspace_not_found_error_msg(project_folder, package)))?; let package = workspace .into_iter() .find(|p| p.is_binary()) - .ok_or(LoadError("No matching binary packages found in workspace"))?; - - let mut workspace_file_manager = file_manager_with_stdlib(std::path::Path::new("")); - insert_all_files_for_workspace_into_file_manager(&workspace, &mut workspace_file_manager); - let mut parsed_files = parse_all(&workspace_file_manager); + .ok_or(LoadError::Generic("No matching binary packages found in workspace".into()))?; - let compile_options = - CompileOptions { instrument_debug: true, force_brillig: true, ..CompileOptions::default() }; - - let debug_state = instrument_package_files(&mut parsed_files, &workspace_file_manager, package); - - let compilation_result = compile_program_with_debug_instrumenter( - &workspace_file_manager, - &parsed_files, + let compiled_program = compile_bin_package_for_debugging( + &workspace, package, - &compile_options, - None, - debug_state, - ); - - let compiled_program = report_errors( - compilation_result, - &workspace_file_manager, - compile_options.deny_warnings, - compile_options.silence_warnings, + acir_mode, + skip_instrumentation, + CompileOptions::default(), ) - .map_err(|_| LoadError("Failed to compile project"))?; + .map_err(|_| LoadError::Generic("Failed to compile project".into()))?; let compiled_program = nargo::ops::transform_program(compiled_program, expression_width); let (inputs_map, _) = read_inputs_from_file(&package.root_dir, prover_name, Format::Toml, &compiled_program.abi) - .map_err(|_| LoadError("Failed to read program inputs"))?; + .map_err(|_| { + LoadError::Generic(format!("Failed to read program inputs from {}", prover_name)) + })?; let initial_witness = compiled_program .abi .encode(&inputs_map, None) - .map_err(|_| LoadError("Failed to encode inputs"))?; + .map_err(|_| LoadError::Generic("Failed to encode inputs".into()))?; Ok((compiled_program, initial_witness)) } @@ -128,7 +136,7 @@ fn load_and_compile_project( fn loop_uninitialized_dap( mut server: Server, expression_width: ExpressionWidth, -) -> Result<(), ServerError> { +) -> Result<(), DapError> { loop { let req = match server.poll_request()? { Some(req) => req, @@ -163,6 +171,13 @@ fn loop_uninitialized_dap( .and_then(|v| v.as_str()) .unwrap_or(PROVER_INPUT_FILE); + let generate_acir = + additional_data.get("generateAcir").and_then(|v| v.as_bool()).unwrap_or(false); + let skip_instrumentation = additional_data + .get("skipInstrumentation") + .and_then(|v| v.as_bool()) + .unwrap_or(generate_acir); + eprintln!("Project folder: {}", project_folder); eprintln!("Package: {}", package.unwrap_or("(default)")); eprintln!("Prover name: {}", prover_name); @@ -172,6 +187,8 @@ fn loop_uninitialized_dap( package, prover_name, expression_width, + generate_acir, + skip_instrumentation, ) { Ok((compiled_program, initial_witness)) => { server.respond(req.ack()?)?; @@ -186,8 +203,8 @@ fn loop_uninitialized_dap( )?; break; } - Err(LoadError(message)) => { - server.respond(req.error(message))?; + Err(LoadError::Generic(message)) => { + server.respond(req.error(message.as_str()))?; } } } @@ -206,17 +223,58 @@ fn loop_uninitialized_dap( Ok(()) } +fn run_preflight_check( + expression_width: ExpressionWidth, + args: DapCommand, +) -> Result<(), DapError> { + let project_folder = if let Some(project_folder) = args.preflight_project_folder { + project_folder + } else { + return Err(DapError::PreFlightGenericError("Noir Debugger could not initialize because the IDE (for example, VS Code) did not specify a project folder to debug.".into())); + }; + + let package = args.preflight_package.as_deref(); + let prover_name = args.preflight_prover_name.as_deref().unwrap_or(PROVER_INPUT_FILE); + + let _ = load_and_compile_project( + project_folder.as_str(), + package, + prover_name, + expression_width, + args.preflight_generate_acir, + args.preflight_skip_instrumentation, + )?; + + Ok(()) +} + pub(crate) fn run( backend: &Backend, args: DapCommand, _config: NargoConfig, ) -> Result<(), CliError> { + let expression_width = + args.expression_width.unwrap_or_else(|| backend.get_backend_info_or_default()); + + // When the --preflight-check flag is present, we run Noir's DAP server in "pre-flight mode", which test runs + // the DAP initialization code without actually starting the DAP server. + // + // This lets the client IDE present any initialization issues (compiler version mismatches, missing prover files, etc) + // in its own interface. + // + // This was necessary due to the VS Code project being reluctant to let extension authors capture + // stderr output generated by a DAP server wrapped in DebugAdapterExecutable. + // + // Exposing this preflight mode lets us gracefully handle errors that happen *before* + // the DAP loop is established, which otherwise are considered "out of band" by the maintainers of the DAP spec. + // More details here: https://github.com/microsoft/vscode/issues/108138 + if args.preflight_check { + return run_preflight_check(expression_width, args).map_err(CliError::DapError); + } + let output = BufWriter::new(std::io::stdout()); let input = BufReader::new(std::io::stdin()); let server = Server::new(input, output); - let expression_width = - args.expression_width.unwrap_or_else(|| backend.get_backend_info_or_default()); - loop_uninitialized_dap(server, expression_width).map_err(CliError::DapError) } diff --git a/tooling/nargo_cli/src/cli/debug_cmd.rs b/tooling/nargo_cli/src/cli/debug_cmd.rs index b3ee9137530..6fcfee91457 100644 --- a/tooling/nargo_cli/src/cli/debug_cmd.rs +++ b/tooling/nargo_cli/src/cli/debug_cmd.rs @@ -7,8 +7,10 @@ use clap::Args; use fm::FileManager; use nargo::artifacts::debug::DebugArtifact; use nargo::constants::PROVER_INPUT_FILE; -use nargo::ops::compile_program_with_debug_instrumenter; +use nargo::errors::CompileError; +use nargo::ops::{compile_program, compile_program_with_debug_instrumenter}; use nargo::package::Package; +use nargo::workspace::Workspace; use nargo::{insert_all_files_for_workspace_into_file_manager, parse_all}; use nargo_toml::{get_package_manifest, resolve_workspace_from_toml, PackageSelection}; use noirc_abi::input_parser::{Format, InputValue}; @@ -42,6 +44,14 @@ pub(crate) struct DebugCommand { #[clap(flatten)] compile_options: CompileOptions, + + /// Force ACIR output (disabling instrumentation) + #[clap(long)] + acir_mode: bool, + + /// Disable vars debug instrumentation (enabled by default) + #[clap(long)] + skip_instrumentation: Option, } pub(crate) fn run( @@ -49,9 +59,8 @@ pub(crate) fn run( args: DebugCommand, config: NargoConfig, ) -> Result<(), CliError> { - // Override clap default for compiler option flag - let mut args = args.clone(); - args.compile_options.instrument_debug = true; + let acir_mode = args.acir_mode; + let skip_instrumentation = args.skip_instrumentation.unwrap_or(acir_mode); let toml_path = get_package_manifest(&config.program_dir)?; let selection = args.package.map_or(PackageSelection::DefaultOrAll, PackageSelection::Selected); @@ -66,10 +75,6 @@ pub(crate) fn run( .expression_width .unwrap_or_else(|| backend.get_backend_info_or_default()); - let mut workspace_file_manager = file_manager_with_stdlib(std::path::Path::new("")); - insert_all_files_for_workspace_into_file_manager(&workspace, &mut workspace_file_manager); - let mut parsed_files = parse_all(&workspace_file_manager); - let Some(package) = workspace.into_iter().find(|p| p.is_binary()) else { println!( "No matching binary packages found in workspace. Only binary packages can be debugged." @@ -77,23 +82,12 @@ pub(crate) fn run( return Ok(()); }; - let debug_instrumenter = - instrument_package_files(&mut parsed_files, &workspace_file_manager, package); - - let compilation_result = compile_program_with_debug_instrumenter( - &workspace_file_manager, - &parsed_files, + let compiled_program = compile_bin_package_for_debugging( + &workspace, package, - &args.compile_options, - None, - debug_instrumenter, - ); - - let compiled_program = report_errors( - compilation_result, - &workspace_file_manager, - args.compile_options.deny_warnings, - args.compile_options.silence_warnings, + acir_mode, + skip_instrumentation, + args.compile_options.clone(), )?; let compiled_program = nargo::ops::transform_program(compiled_program, expression_width); @@ -101,9 +95,50 @@ pub(crate) fn run( run_async(package, compiled_program, &args.prover_name, &args.witness_name, target_dir) } +pub(crate) fn compile_bin_package_for_debugging( + workspace: &Workspace, + package: &Package, + acir_mode: bool, + skip_instrumentation: bool, + compile_options: CompileOptions, +) -> Result { + let mut workspace_file_manager = file_manager_with_stdlib(std::path::Path::new("")); + insert_all_files_for_workspace_into_file_manager(workspace, &mut workspace_file_manager); + let mut parsed_files = parse_all(&workspace_file_manager); + + let compile_options = CompileOptions { + instrument_debug: !skip_instrumentation, + force_brillig: !acir_mode, + ..compile_options + }; + + let compilation_result = if !skip_instrumentation { + let debug_state = + instrument_package_files(&mut parsed_files, &workspace_file_manager, package); + + compile_program_with_debug_instrumenter( + &workspace_file_manager, + &parsed_files, + package, + &compile_options, + None, + debug_state, + ) + } else { + compile_program(&workspace_file_manager, &parsed_files, package, &compile_options, None) + }; + + report_errors( + compilation_result, + &workspace_file_manager, + compile_options.deny_warnings, + compile_options.silence_warnings, + ) +} + /// Add debugging instrumentation to all parsed files belonging to the package /// being compiled -pub(crate) fn instrument_package_files( +fn instrument_package_files( parsed_files: &mut ParsedFiles, file_manager: &FileManager, package: &Package, diff --git a/tooling/nargo_cli/src/errors.rs b/tooling/nargo_cli/src/errors.rs index 4636772231b..c2996f53420 100644 --- a/tooling/nargo_cli/src/errors.rs +++ b/tooling/nargo_cli/src/errors.rs @@ -2,6 +2,7 @@ use acvm::acir::native_types::WitnessMapError; use hex::FromHexError; use nargo::{errors::CompileError, NargoError}; use nargo_toml::ManifestError; +use noir_debugger::errors::DapError; use noirc_abi::errors::{AbiError, InputParserError}; use std::path::PathBuf; use thiserror::Error; @@ -54,7 +55,7 @@ pub(crate) enum CliError { LspError(#[from] async_lsp::Error), #[error(transparent)] - DapError(#[from] dap::errors::ServerError), + DapError(#[from] DapError), /// Error from Nargo #[error(transparent)] From dcd7a1e561a68504b9038ffbb3c80f5c981f9f0c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C3=81lvaro=20Rodr=C3=ADguez?= Date: Wed, 14 Feb 2024 18:03:44 +0100 Subject: [PATCH 10/39] fix: Consistent bit size for truncate (#4370) # Description ## Problem\* Work towards #4369 ## Summary\* Uses consistent bit sizes for truncate and starts a refactor where we start to track bit sizes for values in brillig IR ## Additional Context ## Documentation\* Check one: - [x] No documentation needed. - [ ] Documentation included in this PR. - [ ] **[Exceptional Case]** Documentation to be submitted in a separate PR. # PR Checklist\* - [x] I have tested the changes locally. - [x] I have formatted the changes with [Prettier](https://prettier.io/) and/or `cargo fmt` on default settings. --- .../brillig/brillig_gen/brillig_black_box.rs | 102 ++++---- .../src/brillig/brillig_gen/brillig_block.rs | 219 ++++++++++-------- .../brillig_gen/brillig_block_variables.rs | 27 ++- .../src/brillig/brillig_gen/brillig_fn.rs | 2 +- .../brillig/brillig_gen/brillig_slice_ops.rs | 74 +++--- .../noirc_evaluator/src/brillig/brillig_ir.rs | 91 ++++---- .../src/brillig/brillig_ir/artifact.rs | 4 +- .../brillig/brillig_ir/brillig_variable.rs | 20 +- .../src/brillig/brillig_ir/entry_point.rs | 44 ++-- 9 files changed, 331 insertions(+), 252 deletions(-) diff --git a/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_black_box.rs b/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_black_box.rs index dfe23b45034..d542240a40c 100644 --- a/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_black_box.rs +++ b/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_black_box.rs @@ -56,12 +56,12 @@ pub(crate) fn convert_black_box_call( } BlackBoxFunc::Keccak256 => { if let ( - [message, BrilligVariable::Simple(array_size)], + [message, BrilligVariable::SingleAddr(array_size)], [BrilligVariable::BrilligArray(result_array)], ) = (function_arguments, function_results) { let mut message_vector = convert_array_or_vector(brillig_context, message, bb_func); - message_vector.size = *array_size; + message_vector.size = array_size.address; brillig_context.black_box_op_instruction(BlackBoxOp::Keccak256 { message: message_vector.to_heap_vector(), @@ -88,7 +88,7 @@ pub(crate) fn convert_black_box_call( BlackBoxFunc::EcdsaSecp256k1 => { if let ( [BrilligVariable::BrilligArray(public_key_x), BrilligVariable::BrilligArray(public_key_y), BrilligVariable::BrilligArray(signature), message], - [BrilligVariable::Simple(result_register)], + [BrilligVariable::SingleAddr(result_register)], ) = (function_arguments, function_results) { let message_hash_vector = @@ -98,7 +98,7 @@ pub(crate) fn convert_black_box_call( public_key_x: public_key_x.to_heap_array(), public_key_y: public_key_y.to_heap_array(), signature: signature.to_heap_array(), - result: *result_register, + result: result_register.address, }); } else { unreachable!( @@ -109,7 +109,7 @@ pub(crate) fn convert_black_box_call( BlackBoxFunc::EcdsaSecp256r1 => { if let ( [BrilligVariable::BrilligArray(public_key_x), BrilligVariable::BrilligArray(public_key_y), BrilligVariable::BrilligArray(signature), message], - [BrilligVariable::Simple(result_register)], + [BrilligVariable::SingleAddr(result_register)], ) = (function_arguments, function_results) { let message_hash_vector = @@ -119,7 +119,7 @@ pub(crate) fn convert_black_box_call( public_key_x: public_key_x.to_heap_array(), public_key_y: public_key_y.to_heap_array(), signature: signature.to_heap_array(), - result: *result_register, + result: result_register.address, }); } else { unreachable!( @@ -130,14 +130,14 @@ pub(crate) fn convert_black_box_call( BlackBoxFunc::PedersenCommitment => { if let ( - [message, BrilligVariable::Simple(domain_separator)], + [message, BrilligVariable::SingleAddr(domain_separator)], [BrilligVariable::BrilligArray(result_array)], ) = (function_arguments, function_results) { let message_vector = convert_array_or_vector(brillig_context, message, bb_func); brillig_context.black_box_op_instruction(BlackBoxOp::PedersenCommitment { inputs: message_vector.to_heap_vector(), - domain_separator: *domain_separator, + domain_separator: domain_separator.address, output: result_array.to_heap_array(), }); } else { @@ -146,15 +146,15 @@ pub(crate) fn convert_black_box_call( } BlackBoxFunc::PedersenHash => { if let ( - [message, BrilligVariable::Simple(domain_separator)], - [BrilligVariable::Simple(result)], + [message, BrilligVariable::SingleAddr(domain_separator)], + [BrilligVariable::SingleAddr(result)], ) = (function_arguments, function_results) { let message_vector = convert_array_or_vector(brillig_context, message, bb_func); brillig_context.black_box_op_instruction(BlackBoxOp::PedersenHash { inputs: message_vector.to_heap_vector(), - domain_separator: *domain_separator, - output: *result, + domain_separator: domain_separator.address, + output: result.address, }); } else { unreachable!("ICE: Pedersen hash expects one array argument, a register for the domain separator, and one register result") @@ -162,18 +162,18 @@ pub(crate) fn convert_black_box_call( } BlackBoxFunc::SchnorrVerify => { if let ( - [BrilligVariable::Simple(public_key_x), BrilligVariable::Simple(public_key_y), BrilligVariable::BrilligArray(signature), message], - [BrilligVariable::Simple(result_register)], + [BrilligVariable::SingleAddr(public_key_x), BrilligVariable::SingleAddr(public_key_y), BrilligVariable::BrilligArray(signature), message], + [BrilligVariable::SingleAddr(result_register)], ) = (function_arguments, function_results) { let message_hash = convert_array_or_vector(brillig_context, message, bb_func); let signature = brillig_context.array_to_vector(signature); brillig_context.black_box_op_instruction(BlackBoxOp::SchnorrVerify { - public_key_x: *public_key_x, - public_key_y: *public_key_y, + public_key_x: public_key_x.address, + public_key_y: public_key_y.address, message: message_hash.to_heap_vector(), signature: signature.to_heap_vector(), - result: *result_register, + result: result_register.address, }); } else { unreachable!("ICE: Schnorr verify expects two registers for the public key, an array for signature, an array for the message hash and one result register") @@ -181,13 +181,13 @@ pub(crate) fn convert_black_box_call( } BlackBoxFunc::FixedBaseScalarMul => { if let ( - [BrilligVariable::Simple(low), BrilligVariable::Simple(high)], + [BrilligVariable::SingleAddr(low), BrilligVariable::SingleAddr(high)], [BrilligVariable::BrilligArray(result_array)], ) = (function_arguments, function_results) { brillig_context.black_box_op_instruction(BlackBoxOp::FixedBaseScalarMul { - low: *low, - high: *high, + low: low.address, + high: high.address, result: result_array.to_heap_array(), }); } else { @@ -198,15 +198,15 @@ pub(crate) fn convert_black_box_call( } BlackBoxFunc::EmbeddedCurveAdd => { if let ( - [BrilligVariable::Simple(input1_x), BrilligVariable::Simple(input1_y), BrilligVariable::Simple(input2_x), BrilligVariable::Simple(input2_y)], + [BrilligVariable::SingleAddr(input1_x), BrilligVariable::SingleAddr(input1_y), BrilligVariable::SingleAddr(input2_x), BrilligVariable::SingleAddr(input2_y)], [BrilligVariable::BrilligArray(result_array)], ) = (function_arguments, function_results) { brillig_context.black_box_op_instruction(BlackBoxOp::EmbeddedCurveAdd { - input1_x: *input1_x, - input1_y: *input1_y, - input2_x: *input2_x, - input2_y: *input2_y, + input1_x: input1_x.address, + input1_y: input1_y.address, + input2_x: input2_x.address, + input2_y: input2_y.address, result: result_array.to_heap_array(), }); } else { @@ -229,14 +229,14 @@ pub(crate) fn convert_black_box_call( ), BlackBoxFunc::BigIntAdd => { if let ( - [BrilligVariable::Simple(lhs), BrilligVariable::Simple(rhs)], - [BrilligVariable::Simple(output)], + [BrilligVariable::SingleAddr(lhs), BrilligVariable::SingleAddr(rhs)], + [BrilligVariable::SingleAddr(output)], ) = (function_arguments, function_results) { brillig_context.black_box_op_instruction(BlackBoxOp::BigIntAdd { - lhs: *lhs, - rhs: *rhs, - output: *output, + lhs: lhs.address, + rhs: rhs.address, + output: output.address, }); } else { unreachable!( @@ -246,14 +246,14 @@ pub(crate) fn convert_black_box_call( } BlackBoxFunc::BigIntSub => { if let ( - [BrilligVariable::Simple(lhs), BrilligVariable::Simple(rhs)], - [BrilligVariable::Simple(output)], + [BrilligVariable::SingleAddr(lhs), BrilligVariable::SingleAddr(rhs)], + [BrilligVariable::SingleAddr(output)], ) = (function_arguments, function_results) { brillig_context.black_box_op_instruction(BlackBoxOp::BigIntSub { - lhs: *lhs, - rhs: *rhs, - output: *output, + lhs: lhs.address, + rhs: rhs.address, + output: output.address, }); } else { unreachable!( @@ -263,14 +263,14 @@ pub(crate) fn convert_black_box_call( } BlackBoxFunc::BigIntMul => { if let ( - [BrilligVariable::Simple(lhs), BrilligVariable::Simple(rhs)], - [BrilligVariable::Simple(output)], + [BrilligVariable::SingleAddr(lhs), BrilligVariable::SingleAddr(rhs)], + [BrilligVariable::SingleAddr(output)], ) = (function_arguments, function_results) { brillig_context.black_box_op_instruction(BlackBoxOp::BigIntMul { - lhs: *lhs, - rhs: *rhs, - output: *output, + lhs: lhs.address, + rhs: rhs.address, + output: output.address, }); } else { unreachable!( @@ -280,14 +280,14 @@ pub(crate) fn convert_black_box_call( } BlackBoxFunc::BigIntDiv => { if let ( - [BrilligVariable::Simple(lhs), BrilligVariable::Simple(rhs)], - [BrilligVariable::Simple(output)], + [BrilligVariable::SingleAddr(lhs), BrilligVariable::SingleAddr(rhs)], + [BrilligVariable::SingleAddr(output)], ) = (function_arguments, function_results) { brillig_context.black_box_op_instruction(BlackBoxOp::BigIntDiv { - lhs: *lhs, - rhs: *rhs, - output: *output, + lhs: lhs.address, + rhs: rhs.address, + output: output.address, }); } else { unreachable!( @@ -296,7 +296,7 @@ pub(crate) fn convert_black_box_call( } } BlackBoxFunc::BigIntFromLeBytes => { - if let ([inputs, modulus], [BrilligVariable::Simple(output)]) = + if let ([inputs, modulus], [BrilligVariable::SingleAddr(output)]) = (function_arguments, function_results) { let inputs_vector = convert_array_or_vector(brillig_context, inputs, bb_func); @@ -304,7 +304,7 @@ pub(crate) fn convert_black_box_call( brillig_context.black_box_op_instruction(BlackBoxOp::BigIntFromLeBytes { inputs: inputs_vector.to_heap_vector(), modulus: modulus_vector.to_heap_vector(), - output: *output, + output: output.address, }); } else { unreachable!( @@ -314,12 +314,12 @@ pub(crate) fn convert_black_box_call( } BlackBoxFunc::BigIntToLeBytes => { if let ( - [BrilligVariable::Simple(input)], + [BrilligVariable::SingleAddr(input)], [BrilligVariable::BrilligVector(result_vector)], ) = (function_arguments, function_results) { brillig_context.black_box_op_instruction(BlackBoxOp::BigIntToLeBytes { - input: *input, + input: input.address, output: result_vector.to_heap_vector(), }); } else { @@ -330,7 +330,7 @@ pub(crate) fn convert_black_box_call( } BlackBoxFunc::Poseidon2Permutation => { if let ( - [message, BrilligVariable::Simple(state_len)], + [message, BrilligVariable::SingleAddr(state_len)], [BrilligVariable::BrilligArray(result_array)], ) = (function_arguments, function_results) { @@ -338,7 +338,7 @@ pub(crate) fn convert_black_box_call( brillig_context.black_box_op_instruction(BlackBoxOp::Poseidon2Permutation { message: message_vector.to_heap_vector(), output: result_array.to_heap_array(), - len: *state_len, + len: state_len.address, }); } else { unreachable!("ICE: Poseidon2Permutation expects one array argument, a length and one array result") diff --git a/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_block.rs b/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_block.rs index 7697d7e65fa..c299daa158a 100644 --- a/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_block.rs +++ b/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_block.rs @@ -1,5 +1,5 @@ use crate::brillig::brillig_ir::brillig_variable::{ - type_to_heap_value_type, BrilligArray, BrilligVariable, BrilligVector, + type_to_heap_value_type, BrilligArray, BrilligVariable, BrilligVector, SingleAddrVariable, }; use crate::brillig::brillig_ir::{ BrilligBinaryOp, BrilligContext, BRILLIG_INTEGER_ARITHMETIC_BIT_SIZE, @@ -117,9 +117,9 @@ impl<'block> BrilligBlock<'block> { ) { match terminator_instruction { TerminatorInstruction::JmpIf { condition, then_destination, else_destination } => { - let condition = self.convert_ssa_register_value(*condition, dfg); + let condition = self.convert_ssa_single_addr_value(*condition, dfg); self.brillig_context.jump_if_instruction( - condition, + condition.address, self.create_block_label_for_current_function(*then_destination), ); self.brillig_context.jump_instruction( @@ -164,10 +164,10 @@ impl<'block> BrilligBlock<'block> { fn pass_variable(&mut self, source: BrilligVariable, destination: BrilligVariable) { match (source, destination) { ( - BrilligVariable::Simple(source_register), - BrilligVariable::Simple(destination_register), + BrilligVariable::SingleAddr(source_var), + BrilligVariable::SingleAddr(destination_var), ) => { - self.brillig_context.mov_instruction(destination_register, source_register); + self.brillig_context.mov_instruction(destination_var.address, source_var.address); } ( BrilligVariable::BrilligArray(BrilligArray { @@ -241,16 +241,19 @@ impl<'block> BrilligBlock<'block> { match instruction { Instruction::Binary(binary) => { - let result_register = self.variables.define_register_variable( + let result_var = self.variables.define_single_addr_variable( self.function_context, self.brillig_context, dfg.instruction_results(instruction_id)[0], dfg, ); - self.convert_ssa_binary(binary, dfg, result_register); + self.convert_ssa_binary(binary, dfg, result_var); } Instruction::Constrain(lhs, rhs, assert_message) => { - let condition = self.brillig_context.allocate_register(); + let condition = SingleAddrVariable { + address: self.brillig_context.allocate_register(), + bit_size: 1, + }; self.convert_ssa_binary( &Binary { lhs: *lhs, rhs: *rhs, operator: BinaryOp::Eq }, @@ -281,12 +284,12 @@ impl<'block> BrilligBlock<'block> { None }; - self.brillig_context.constrain_instruction(condition, assert_message); - self.brillig_context.deallocate_register(condition); + self.brillig_context.constrain_instruction(condition.address, assert_message); + self.brillig_context.deallocate_register(condition.address); } Instruction::Allocate => { let result_value = dfg.instruction_results(instruction_id)[0]; - let address_register = self.variables.define_register_variable( + let address_register = self.variables.define_single_addr_variable( self.function_context, self.brillig_context, result_value, @@ -296,15 +299,16 @@ impl<'block> BrilligBlock<'block> { Type::Reference(element) => match *element { Type::Array(..) => { self.brillig_context - .allocate_array_reference_instruction(address_register); + .allocate_array_reference_instruction(address_register.address); } Type::Slice(..) => { self.brillig_context - .allocate_vector_reference_instruction(address_register); + .allocate_vector_reference_instruction(address_register.address); } _ => { - self.brillig_context - .allocate_simple_reference_instruction(address_register); + self.brillig_context.allocate_single_addr_reference_instruction( + address_register.address, + ); } }, _ => { @@ -313,10 +317,11 @@ impl<'block> BrilligBlock<'block> { } } Instruction::Store { address, value } => { - let address_register = self.convert_ssa_register_value(*address, dfg); + let address_var = self.convert_ssa_single_addr_value(*address, dfg); let source_variable = self.convert_ssa_value(*value, dfg); - self.brillig_context.store_variable_instruction(address_register, source_variable); + self.brillig_context + .store_variable_instruction(address_var.address, source_variable); } Instruction::Load { address } => { let target_variable = self.variables.define_variable( @@ -326,34 +331,34 @@ impl<'block> BrilligBlock<'block> { dfg, ); - let address_register = self.convert_ssa_register_value(*address, dfg); + let address_variable = self.convert_ssa_single_addr_value(*address, dfg); - self.brillig_context.load_variable_instruction(target_variable, address_register); + self.brillig_context + .load_variable_instruction(target_variable, address_variable.address); } Instruction::Not(value) => { - let condition_register = self.convert_ssa_register_value(*value, dfg); - let result_register = self.variables.define_register_variable( + let condition_register = self.convert_ssa_single_addr_value(*value, dfg); + let result_register = self.variables.define_single_addr_variable( self.function_context, self.brillig_context, dfg.instruction_results(instruction_id)[0], dfg, ); - let bit_size = get_bit_size_from_ssa_type(&dfg.type_of_value(*value)); - self.brillig_context.not_instruction(condition_register, bit_size, result_register); + self.brillig_context.not_instruction(condition_register, result_register); } Instruction::Call { func, arguments } => match &dfg[*func] { Value::ForeignFunction(func_name) => { let result_ids = dfg.instruction_results(instruction_id); let input_registers = vecmap(arguments, |value_id| { - self.convert_ssa_value(*value_id, dfg).to_register_or_memory() + self.convert_ssa_value(*value_id, dfg).to_value_or_array() }); let input_value_types = vecmap(arguments, |value_id| { let value_type = dfg.type_of_value(*value_id); type_to_heap_value_type(&value_type) }); let output_registers = vecmap(result_ids, |value_id| { - self.allocate_external_call_result(*value_id, dfg).to_register_or_memory() + self.allocate_external_call_result(*value_id, dfg).to_value_or_array() }); let output_value_types = vecmap(result_ids, |value_id| { let value_type = dfg.type_of_value(*value_id); @@ -431,7 +436,7 @@ impl<'block> BrilligBlock<'block> { ); } Value::Intrinsic(Intrinsic::ArrayLen) => { - let result_register = self.variables.define_register_variable( + let result_variable = self.variables.define_single_addr_variable( self.function_context, self.brillig_context, dfg.instruction_results(instruction_id)[0], @@ -443,10 +448,11 @@ impl<'block> BrilligBlock<'block> { // or an array in the case of an array. if let Type::Numeric(_) = dfg.type_of_value(param_id) { let len_variable = self.convert_ssa_value(arguments[0], dfg); - let len_register_index = len_variable.extract_register(); - self.brillig_context.mov_instruction(result_register, len_register_index); + let length = len_variable.extract_single_addr(); + self.brillig_context + .mov_instruction(result_variable.address, length.address); } else { - self.convert_ssa_array_len(arguments[0], result_register, dfg); + self.convert_ssa_array_len(arguments[0], result_variable.address, dfg); } } Value::Intrinsic( @@ -465,13 +471,13 @@ impl<'block> BrilligBlock<'block> { ); } Value::Intrinsic(Intrinsic::ToRadix(endianness)) => { - let source = self.convert_ssa_register_value(arguments[0], dfg); - let radix = self.convert_ssa_register_value(arguments[1], dfg); - let limb_count = self.convert_ssa_register_value(arguments[2], dfg); + let source = self.convert_ssa_single_addr_value(arguments[0], dfg); + let radix = self.convert_ssa_single_addr_value(arguments[1], dfg); + let limb_count = self.convert_ssa_single_addr_value(arguments[2], dfg); let results = dfg.instruction_results(instruction_id); - let target_len = self.variables.define_register_variable( + let target_len = self.variables.define_single_addr_variable( self.function_context, self.brillig_context, results[0], @@ -489,19 +495,19 @@ impl<'block> BrilligBlock<'block> { .extract_vector(); // Update the user-facing slice length - self.brillig_context.mov_instruction(target_len, limb_count); + self.brillig_context.mov_instruction(target_len.address, limb_count.address); self.brillig_context.radix_instruction( - source, + source.address, target_vector, - radix, - limb_count, + radix.address, + limb_count.address, matches!(endianness, Endian::Big), ); } Value::Intrinsic(Intrinsic::ToBits(endianness)) => { - let source = self.convert_ssa_register_value(arguments[0], dfg); - let limb_count = self.convert_ssa_register_value(arguments[1], dfg); + let source = self.convert_ssa_single_addr_value(arguments[0], dfg); + let limb_count = self.convert_ssa_single_addr_value(arguments[1], dfg); let results = dfg.instruction_results(instruction_id); @@ -511,7 +517,7 @@ impl<'block> BrilligBlock<'block> { results[0], dfg, ); - let target_len = target_len_variable.extract_register(); + let target_len = target_len_variable.extract_single_addr(); let target_vector = match self.variables.define_variable( self.function_context, @@ -523,7 +529,7 @@ impl<'block> BrilligBlock<'block> { self.brillig_context.array_to_vector(&array) } BrilligVariable::BrilligVector(vector) => vector, - BrilligVariable::Simple(..) => unreachable!("ICE: ToBits on non-array"), + BrilligVariable::SingleAddr(..) => unreachable!("ICE: ToBits on non-array"), }; let radix = self @@ -531,13 +537,13 @@ impl<'block> BrilligBlock<'block> { .make_constant(2_usize.into(), FieldElement::max_num_bits()); // Update the user-facing slice length - self.brillig_context.mov_instruction(target_len, limb_count); + self.brillig_context.mov_instruction(target_len.address, limb_count.address); self.brillig_context.radix_instruction( - source, + source.address, target_vector, radix, - limb_count, + limb_count.address, matches!(endianness, Endian::Big), ); @@ -549,29 +555,29 @@ impl<'block> BrilligBlock<'block> { }, Instruction::Truncate { value, bit_size, .. } => { let result_ids = dfg.instruction_results(instruction_id); - let destination_register = self.variables.define_register_variable( + let destination_register = self.variables.define_single_addr_variable( self.function_context, self.brillig_context, result_ids[0], dfg, ); - let source_register = self.convert_ssa_register_value(*value, dfg); + let source_register = self.convert_ssa_single_addr_value(*value, dfg); self.brillig_context.truncate_instruction( destination_register, source_register, *bit_size, ); } - Instruction::Cast(value, typ) => { + Instruction::Cast(value, _) => { let result_ids = dfg.instruction_results(instruction_id); - let destination_register = self.variables.define_register_variable( + let destination_variable = self.variables.define_single_addr_variable( self.function_context, self.brillig_context, result_ids[0], dfg, ); - let source_register = self.convert_ssa_register_value(*value, dfg); - self.convert_cast(destination_register, source_register, typ); + let source_variable = self.convert_ssa_single_addr_value(*value, dfg); + self.convert_cast(destination_variable, source_variable); } Instruction::ArrayGet { array, index } => { let result_ids = dfg.instruction_results(instruction_id); @@ -589,17 +595,17 @@ impl<'block> BrilligBlock<'block> { _ => unreachable!("ICE: array get on non-array"), }; - let index_register = self.convert_ssa_register_value(*index, dfg); - self.validate_array_index(array_variable, index_register); + let index_variable = self.convert_ssa_single_addr_value(*index, dfg); + self.validate_array_index(array_variable, index_variable); self.retrieve_variable_from_array( array_pointer, - index_register, + index_variable.address, destination_variable, ); } Instruction::ArraySet { array, index, value, .. } => { let source_variable = self.convert_ssa_value(*array, dfg); - let index_register = self.convert_ssa_register_value(*index, dfg); + let index_register = self.convert_ssa_single_addr_value(*index, dfg); let value_variable = self.convert_ssa_value(*value, dfg); let result_ids = dfg.instruction_results(instruction_id); @@ -614,15 +620,18 @@ impl<'block> BrilligBlock<'block> { self.convert_ssa_array_set( source_variable, destination_variable, - index_register, + index_register.address, value_variable, ); } Instruction::RangeCheck { value, max_bit_size, assert_message } => { - let value = self.convert_ssa_register_value(*value, dfg); + let value = self.convert_ssa_single_addr_value(*value, dfg); // Cast original value to field - let left = self.brillig_context.allocate_register(); - self.convert_cast(left, value, &Type::field()); + let left = SingleAddrVariable { + address: self.brillig_context.allocate_register(), + bit_size: FieldElement::max_num_bits(), + }; + self.convert_cast(left, value); // Create a field constant with the max let max = BigUint::from(2_u128).pow(*max_bit_size) - BigUint::from(1_u128); @@ -637,11 +646,16 @@ impl<'block> BrilligBlock<'block> { bit_size: FieldElement::max_num_bits(), }; let condition = self.brillig_context.allocate_register(); - self.brillig_context.binary_instruction(left, right, condition, brillig_binary_op); + self.brillig_context.binary_instruction( + left.address, + right, + condition, + brillig_binary_op, + ); self.brillig_context.constrain_instruction(condition, assert_message.clone()); self.brillig_context.deallocate_register(condition); - self.brillig_context.deallocate_register(left); + self.brillig_context.deallocate_register(left.address); self.brillig_context.deallocate_register(right); } Instruction::IncrementRc { value } => { @@ -730,7 +744,7 @@ impl<'block> BrilligBlock<'block> { fn validate_array_index( &mut self, array_variable: BrilligVariable, - index_register: MemoryAddress, + index_register: SingleAddrVariable, ) { let (size_as_register, should_deallocate_size) = match array_variable { BrilligVariable::BrilligArray(BrilligArray { size, .. }) => { @@ -743,7 +757,7 @@ impl<'block> BrilligBlock<'block> { let condition = self.brillig_context.allocate_register(); self.brillig_context.memory_op( - index_register, + index_register.address, size_as_register, condition, BinaryIntOp::LessThan, @@ -765,8 +779,12 @@ impl<'block> BrilligBlock<'block> { destination_variable: BrilligVariable, ) { match destination_variable { - BrilligVariable::Simple(destination_register) => { - self.brillig_context.array_get(array_pointer, index_register, destination_register); + BrilligVariable::SingleAddr(destination_register) => { + self.brillig_context.array_get( + array_pointer, + index_register, + destination_register.address, + ); } BrilligVariable::BrilligArray(..) | BrilligVariable::BrilligVector(..) => { let reference = self.brillig_context.allocate_register(); @@ -868,8 +886,8 @@ impl<'block> BrilligBlock<'block> { value_variable: BrilligVariable, ) { match value_variable { - BrilligVariable::Simple(value_register) => { - ctx.array_set(destination_pointer, index_register, value_register); + BrilligVariable::SingleAddr(value_variable) => { + ctx.array_set(destination_pointer, index_register, value_variable.address); } BrilligVariable::BrilligArray(_) => { let reference: MemoryAddress = ctx.allocate_register(); @@ -924,7 +942,7 @@ impl<'block> BrilligBlock<'block> { results[0], dfg, ) { - BrilligVariable::Simple(register_index) => register_index, + BrilligVariable::SingleAddr(register_index) => register_index, _ => unreachable!("ICE: first value of a slice must be a register index"), }; @@ -940,7 +958,7 @@ impl<'block> BrilligBlock<'block> { self.convert_ssa_value(*arg, dfg) }); - self.update_slice_length(target_len, arguments[0], dfg, BinaryIntOp::Add); + self.update_slice_length(target_len.address, arguments[0], dfg, BinaryIntOp::Add); self.slice_push_back_operation(target_vector, source_vector, &item_values); } @@ -951,7 +969,7 @@ impl<'block> BrilligBlock<'block> { results[0], dfg, ) { - BrilligVariable::Simple(register_index) => register_index, + BrilligVariable::SingleAddr(register_index) => register_index, _ => unreachable!("ICE: first value of a slice must be a register index"), }; @@ -966,7 +984,7 @@ impl<'block> BrilligBlock<'block> { self.convert_ssa_value(*arg, dfg) }); - self.update_slice_length(target_len, arguments[0], dfg, BinaryIntOp::Add); + self.update_slice_length(target_len.address, arguments[0], dfg, BinaryIntOp::Add); self.slice_push_front_operation(target_vector, source_vector, &item_values); } @@ -977,7 +995,7 @@ impl<'block> BrilligBlock<'block> { results[0], dfg, ) { - BrilligVariable::Simple(register_index) => register_index, + BrilligVariable::SingleAddr(register_index) => register_index, _ => unreachable!("ICE: first value of a slice must be a register index"), }; @@ -999,7 +1017,7 @@ impl<'block> BrilligBlock<'block> { ) }); - self.update_slice_length(target_len, arguments[0], dfg, BinaryIntOp::Sub); + self.update_slice_length(target_len.address, arguments[0], dfg, BinaryIntOp::Sub); self.slice_pop_back_operation(target_vector, source_vector, &pop_variables); } @@ -1010,7 +1028,7 @@ impl<'block> BrilligBlock<'block> { results[element_size], dfg, ) { - BrilligVariable::Simple(register_index) => register_index, + BrilligVariable::SingleAddr(register_index) => register_index, _ => unreachable!("ICE: first value of a slice must be a register index"), }; @@ -1031,7 +1049,7 @@ impl<'block> BrilligBlock<'block> { ); let target_vector = target_variable.extract_vector(); - self.update_slice_length(target_len, arguments[0], dfg, BinaryIntOp::Sub); + self.update_slice_length(target_len.address, arguments[0], dfg, BinaryIntOp::Sub); self.slice_pop_front_operation(target_vector, source_vector, &pop_variables); } @@ -1042,7 +1060,7 @@ impl<'block> BrilligBlock<'block> { results[0], dfg, ) { - BrilligVariable::Simple(register_index) => register_index, + BrilligVariable::SingleAddr(register_index) => register_index, _ => unreachable!("ICE: first value of a slice must be a register index"), }; @@ -1058,13 +1076,13 @@ impl<'block> BrilligBlock<'block> { // Remove if indexing in insert is changed to flattened indexing // https://github.com/noir-lang/noir/issues/1889#issuecomment-1668048587 - let user_index = self.convert_ssa_register_value(arguments[2], dfg); + let user_index = self.convert_ssa_single_addr_value(arguments[2], dfg); let converted_index = self.brillig_context.make_usize_constant(element_size.into()); self.brillig_context.memory_op( converted_index, - user_index, + user_index.address, converted_index, BinaryIntOp::Mul, ); @@ -1073,7 +1091,7 @@ impl<'block> BrilligBlock<'block> { self.convert_ssa_value(*arg, dfg) }); - self.update_slice_length(target_len, arguments[0], dfg, BinaryIntOp::Add); + self.update_slice_length(target_len.address, arguments[0], dfg, BinaryIntOp::Add); self.slice_insert_operation(target_vector, source_vector, converted_index, &items); self.brillig_context.deallocate_register(converted_index); @@ -1085,7 +1103,7 @@ impl<'block> BrilligBlock<'block> { results[0], dfg, ) { - BrilligVariable::Simple(register_index) => register_index, + BrilligVariable::SingleAddr(register_index) => register_index, _ => unreachable!("ICE: first value of a slice must be a register index"), }; @@ -1101,12 +1119,12 @@ impl<'block> BrilligBlock<'block> { // Remove if indexing in remove is changed to flattened indexing // https://github.com/noir-lang/noir/issues/1889#issuecomment-1668048587 - let user_index = self.convert_ssa_register_value(arguments[2], dfg); + let user_index = self.convert_ssa_single_addr_value(arguments[2], dfg); let converted_index = self.brillig_context.make_usize_constant(element_size.into()); self.brillig_context.memory_op( converted_index, - user_index, + user_index.address, converted_index, BinaryIntOp::Mul, ); @@ -1120,7 +1138,7 @@ impl<'block> BrilligBlock<'block> { ) }); - self.update_slice_length(target_len, arguments[0], dfg, BinaryIntOp::Sub); + self.update_slice_length(target_len.address, arguments[0], dfg, BinaryIntOp::Sub); self.slice_remove_operation( target_vector, @@ -1152,18 +1170,18 @@ impl<'block> BrilligBlock<'block> { binary_op: BinaryIntOp, ) { let source_len_variable = self.convert_ssa_value(source_value, dfg); - let source_len = source_len_variable.extract_register(); + let source_len = source_len_variable.extract_single_addr(); - self.brillig_context.usize_op(source_len, target_len, binary_op, 1); + self.brillig_context.usize_op(source_len.address, target_len, binary_op, 1); } /// Converts an SSA cast to a sequence of Brillig opcodes. /// Casting is only necessary when shrinking the bit size of a numeric value. - fn convert_cast(&mut self, destination: MemoryAddress, source: MemoryAddress, typ: &Type) { + fn convert_cast(&mut self, destination: SingleAddrVariable, source: SingleAddrVariable) { // We assume that `source` is a valid `target_type` as it's expected that a truncate instruction was emitted // to ensure this is the case. - self.brillig_context.cast_instruction(destination, source, get_bit_size_from_ssa_type(typ)); + self.brillig_context.cast_instruction(destination, source); } /// Converts the Binary instruction into a sequence of Brillig opcodes. @@ -1171,18 +1189,23 @@ impl<'block> BrilligBlock<'block> { &mut self, binary: &Binary, dfg: &DataFlowGraph, - result_register: MemoryAddress, + result_variable: SingleAddrVariable, ) { let binary_type = type_of_binary_operation(dfg[binary.lhs].get_type(), dfg[binary.rhs].get_type()); - let left = self.convert_ssa_register_value(binary.lhs, dfg); - let right = self.convert_ssa_register_value(binary.rhs, dfg); + let left = self.convert_ssa_single_addr_value(binary.lhs, dfg); + let right = self.convert_ssa_single_addr_value(binary.rhs, dfg); let brillig_binary_op = convert_ssa_binary_op_to_brillig_binary_op(binary.operator, &binary_type); - self.brillig_context.binary_instruction(left, right, result_register, brillig_binary_op); + self.brillig_context.binary_instruction( + left.address, + right.address, + result_variable.address, + brillig_binary_op, + ); } /// Converts an SSA `ValueId` into a `RegisterOrMemory`. Initializes if necessary. @@ -1204,10 +1227,10 @@ impl<'block> BrilligBlock<'block> { } else { let new_variable = self.variables.allocate_constant(self.brillig_context, value_id, dfg); - let register_index = new_variable.extract_register(); + let register_index = new_variable.extract_single_addr(); self.brillig_context.const_instruction( - register_index, + register_index.address, (*constant).into(), get_bit_size_from_ssa_type(typ), ); @@ -1273,10 +1296,10 @@ impl<'block> BrilligBlock<'block> { // value. let new_variable = self.variables.allocate_constant(self.brillig_context, value_id, dfg); - let register_index = new_variable.extract_register(); + let register_index = new_variable.extract_single_addr(); self.brillig_context.const_instruction( - register_index, + register_index.address, value_id.to_usize().into(), 32, ); @@ -1289,13 +1312,13 @@ impl<'block> BrilligBlock<'block> { } /// Converts an SSA `ValueId` into a `MemoryAddress`. Initializes if necessary. - fn convert_ssa_register_value( + fn convert_ssa_single_addr_value( &mut self, value_id: ValueId, dfg: &DataFlowGraph, - ) -> MemoryAddress { + ) -> SingleAddrVariable { let variable = self.convert_ssa_value(value_id, dfg); - variable.extract_register() + variable.extract_single_addr() } fn allocate_external_call_result( diff --git a/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_block_variables.rs b/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_block_variables.rs index b4c96de1969..f463bd4de4d 100644 --- a/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_block_variables.rs +++ b/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_block_variables.rs @@ -1,10 +1,9 @@ -use acvm::brillig_vm::brillig::MemoryAddress; use fxhash::{FxHashMap as HashMap, FxHashSet as HashSet}; use crate::{ brillig::brillig_ir::{ - brillig_variable::{BrilligArray, BrilligVariable, BrilligVector}, - BrilligContext, + brillig_variable::{BrilligArray, BrilligVariable, BrilligVector, SingleAddrVariable}, + BrilligContext, BRILLIG_MEMORY_ADDRESSING_BIT_SIZE, }, ssa::ir::{ basic_block::BasicBlockId, @@ -71,15 +70,15 @@ impl BlockVariables { } /// Defines a variable that fits in a single register and returns the allocated register. - pub(crate) fn define_register_variable( + pub(crate) fn define_single_addr_variable( &mut self, function_context: &mut FunctionContext, brillig_context: &mut BrilligContext, value: ValueId, dfg: &DataFlowGraph, - ) -> MemoryAddress { + ) -> SingleAddrVariable { let variable = self.define_variable(function_context, brillig_context, value, dfg); - variable.extract_register() + variable.extract_single_addr() } /// Removes a variable so it's not used anymore within this block. @@ -190,12 +189,22 @@ pub(crate) fn allocate_value( let typ = dfg.type_of_value(value_id); match typ { - Type::Numeric(_) | Type::Reference(_) | Type::Function => { + Type::Numeric(numeric_type) => BrilligVariable::SingleAddr(SingleAddrVariable { + address: brillig_context.allocate_register(), + bit_size: numeric_type.bit_size(), + }), + Type::Reference(_) => BrilligVariable::SingleAddr(SingleAddrVariable { + address: brillig_context.allocate_register(), + bit_size: BRILLIG_MEMORY_ADDRESSING_BIT_SIZE, + }), + Type::Function => { // NB. function references are converted to a constant when // translating from SSA to Brillig (to allow for debugger // instrumentation to work properly) - let register = brillig_context.allocate_register(); - BrilligVariable::Simple(register) + BrilligVariable::SingleAddr(SingleAddrVariable { + address: brillig_context.allocate_register(), + bit_size: 32, + }) } Type::Array(item_typ, elem_count) => { let pointer_register = brillig_context.allocate_register(); diff --git a/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_fn.rs b/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_fn.rs index e96a756a9ee..b5da8296ba5 100644 --- a/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_fn.rs +++ b/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_fn.rs @@ -74,7 +74,7 @@ impl FunctionContext { fn ssa_type_to_parameter(typ: &Type) -> BrilligParameter { match typ { Type::Numeric(_) | Type::Reference(_) => { - BrilligParameter::Simple(get_bit_size_from_ssa_type(typ)) + BrilligParameter::SingleAddr(get_bit_size_from_ssa_type(typ)) } Type::Array(item_type, size) => BrilligParameter::Array( vecmap(item_type.iter(), |item_typ| { diff --git a/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_slice_ops.rs b/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_slice_ops.rs index 933396be0cb..3fc0e981165 100644 --- a/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_slice_ops.rs +++ b/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_slice_ops.rs @@ -334,7 +334,7 @@ mod tests { use crate::brillig::brillig_gen::brillig_fn::FunctionContext; use crate::brillig::brillig_ir::artifact::BrilligParameter; use crate::brillig::brillig_ir::brillig_variable::{ - BrilligArray, BrilligVariable, BrilligVector, + BrilligArray, BrilligVariable, BrilligVector, SingleAddrVariable, }; use crate::brillig::brillig_ir::tests::{ create_and_run_vm, create_context, create_entry_point_bytecode, @@ -379,13 +379,13 @@ mod tests { ) { let arguments = vec![ BrilligParameter::Array( - vec![BrilligParameter::Simple(BRILLIG_MEMORY_ADDRESSING_BIT_SIZE)], + vec![BrilligParameter::SingleAddr(BRILLIG_MEMORY_ADDRESSING_BIT_SIZE)], array.len(), ), - BrilligParameter::Simple(BRILLIG_MEMORY_ADDRESSING_BIT_SIZE), + BrilligParameter::SingleAddr(BRILLIG_MEMORY_ADDRESSING_BIT_SIZE), ]; let returns = vec![BrilligParameter::Array( - vec![BrilligParameter::Simple(BRILLIG_MEMORY_ADDRESSING_BIT_SIZE)], + vec![BrilligParameter::SingleAddr(BRILLIG_MEMORY_ADDRESSING_BIT_SIZE)], array.len() + 1, )]; @@ -397,7 +397,10 @@ mod tests { size: array.len(), rc: context.allocate_register(), }; - let item_to_insert = context.allocate_register(); + let item_to_insert = SingleAddrVariable { + address: context.allocate_register(), + bit_size: BRILLIG_MEMORY_ADDRESSING_BIT_SIZE, + }; // Cast the source array to a vector let source_vector = context.array_to_vector(&array_variable); @@ -415,13 +418,13 @@ mod tests { block.slice_push_back_operation( target_vector, source_vector, - &[BrilligVariable::Simple(item_to_insert)], + &[BrilligVariable::SingleAddr(item_to_insert)], ); } else { block.slice_push_front_operation( target_vector, source_vector, - &[BrilligVariable::Simple(item_to_insert)], + &[BrilligVariable::SingleAddr(item_to_insert)], ); } @@ -472,15 +475,15 @@ mod tests { expected_return_item: Value, ) { let arguments = vec![BrilligParameter::Array( - vec![BrilligParameter::Simple(BRILLIG_MEMORY_ADDRESSING_BIT_SIZE)], + vec![BrilligParameter::SingleAddr(BRILLIG_MEMORY_ADDRESSING_BIT_SIZE)], array.len(), )]; let returns = vec![ BrilligParameter::Array( - vec![BrilligParameter::Simple(BRILLIG_MEMORY_ADDRESSING_BIT_SIZE)], + vec![BrilligParameter::SingleAddr(BRILLIG_MEMORY_ADDRESSING_BIT_SIZE)], array.len() - 1, ), - BrilligParameter::Simple(BRILLIG_MEMORY_ADDRESSING_BIT_SIZE), + BrilligParameter::SingleAddr(BRILLIG_MEMORY_ADDRESSING_BIT_SIZE), ]; let (_, mut function_context, mut context) = create_test_environment(); @@ -501,7 +504,10 @@ mod tests { size: context.allocate_register(), rc: context.allocate_register(), }; - let removed_item = context.allocate_register(); + let removed_item = SingleAddrVariable { + address: context.allocate_register(), + bit_size: BRILLIG_MEMORY_ADDRESSING_BIT_SIZE, + }; let mut block = create_brillig_block(&mut function_context, &mut context); @@ -509,17 +515,21 @@ mod tests { block.slice_pop_back_operation( target_vector, source_vector, - &[BrilligVariable::Simple(removed_item)], + &[BrilligVariable::SingleAddr(removed_item)], ); } else { block.slice_pop_front_operation( target_vector, source_vector, - &[BrilligVariable::Simple(removed_item)], + &[BrilligVariable::SingleAddr(removed_item)], ); } - context.return_instruction(&[target_vector.pointer, target_vector.rc, removed_item]); + context.return_instruction(&[ + target_vector.pointer, + target_vector.rc, + removed_item.address, + ]); let bytecode = create_entry_point_bytecode(context, arguments, returns).byte_code; let expected_return: Vec<_> = @@ -559,14 +569,14 @@ mod tests { ) { let arguments = vec![ BrilligParameter::Array( - vec![BrilligParameter::Simple(BRILLIG_MEMORY_ADDRESSING_BIT_SIZE)], + vec![BrilligParameter::SingleAddr(BRILLIG_MEMORY_ADDRESSING_BIT_SIZE)], array.len(), ), - BrilligParameter::Simple(BRILLIG_MEMORY_ADDRESSING_BIT_SIZE), - BrilligParameter::Simple(BRILLIG_MEMORY_ADDRESSING_BIT_SIZE), + BrilligParameter::SingleAddr(BRILLIG_MEMORY_ADDRESSING_BIT_SIZE), + BrilligParameter::SingleAddr(BRILLIG_MEMORY_ADDRESSING_BIT_SIZE), ]; let returns = vec![BrilligParameter::Array( - vec![BrilligParameter::Simple(BRILLIG_MEMORY_ADDRESSING_BIT_SIZE)], + vec![BrilligParameter::SingleAddr(BRILLIG_MEMORY_ADDRESSING_BIT_SIZE)], array.len() + 1, )]; @@ -578,7 +588,10 @@ mod tests { size: array.len(), rc: context.allocate_register(), }; - let item_to_insert = context.allocate_register(); + let item_to_insert = SingleAddrVariable { + address: context.allocate_register(), + bit_size: BRILLIG_MEMORY_ADDRESSING_BIT_SIZE, + }; let index_to_insert = context.allocate_register(); // Cast the source array to a vector @@ -597,7 +610,7 @@ mod tests { target_vector, source_vector, index_to_insert, - &[BrilligVariable::Simple(item_to_insert)], + &[BrilligVariable::SingleAddr(item_to_insert)], ); context.return_instruction(&[target_vector.pointer, target_vector.rc]); @@ -676,17 +689,17 @@ mod tests { ) { let arguments = vec![ BrilligParameter::Array( - vec![BrilligParameter::Simple(BRILLIG_MEMORY_ADDRESSING_BIT_SIZE)], + vec![BrilligParameter::SingleAddr(BRILLIG_MEMORY_ADDRESSING_BIT_SIZE)], array.len(), ), - BrilligParameter::Simple(BRILLIG_MEMORY_ADDRESSING_BIT_SIZE), + BrilligParameter::SingleAddr(BRILLIG_MEMORY_ADDRESSING_BIT_SIZE), ]; let returns = vec![ BrilligParameter::Array( - vec![BrilligParameter::Simple(BRILLIG_MEMORY_ADDRESSING_BIT_SIZE)], + vec![BrilligParameter::SingleAddr(BRILLIG_MEMORY_ADDRESSING_BIT_SIZE)], array.len() - 1, ), - BrilligParameter::Simple(BRILLIG_MEMORY_ADDRESSING_BIT_SIZE), + BrilligParameter::SingleAddr(BRILLIG_MEMORY_ADDRESSING_BIT_SIZE), ]; let (_, mut function_context, mut context) = create_test_environment(); @@ -708,7 +721,10 @@ mod tests { size: context.allocate_register(), rc: context.allocate_register(), }; - let removed_item = context.allocate_register(); + let removed_item = SingleAddrVariable { + address: context.allocate_register(), + bit_size: BRILLIG_MEMORY_ADDRESSING_BIT_SIZE, + }; let mut block = create_brillig_block(&mut function_context, &mut context); @@ -716,10 +732,14 @@ mod tests { target_vector, source_vector, index_to_insert, - &[BrilligVariable::Simple(removed_item)], + &[BrilligVariable::SingleAddr(removed_item)], ); - context.return_instruction(&[target_vector.pointer, target_vector.size, removed_item]); + context.return_instruction(&[ + target_vector.pointer, + target_vector.size, + removed_item.address, + ]); let calldata: Vec<_> = array.into_iter().chain(vec![index]).collect(); diff --git a/compiler/noirc_evaluator/src/brillig/brillig_ir.rs b/compiler/noirc_evaluator/src/brillig/brillig_ir.rs index 8bbde88c89e..073b0e6f59f 100644 --- a/compiler/noirc_evaluator/src/brillig/brillig_ir.rs +++ b/compiler/noirc_evaluator/src/brillig/brillig_ir.rs @@ -15,7 +15,7 @@ use crate::ssa::ir::dfg::CallStack; use self::{ artifact::{BrilligArtifact, UnresolvedJumpLocation}, - brillig_variable::{BrilligArray, BrilligVariable, BrilligVector}, + brillig_variable::{BrilligArray, BrilligVariable, BrilligVector, SingleAddrVariable}, registers::BrilligRegistersContext, }; use acvm::{ @@ -27,6 +27,7 @@ use acvm::{ FieldElement, }; use debug_show::DebugShow; +use num_bigint::BigUint; /// Integer arithmetic in Brillig is limited to 127 bit /// integers. @@ -189,7 +190,7 @@ impl BrilligContext { self.deallocate_register(size_register); } - pub(crate) fn allocate_simple_reference_instruction( + pub(crate) fn allocate_single_addr_reference_instruction( &mut self, pointer_register: MemoryAddress, ) { @@ -295,18 +296,21 @@ impl BrilligContext { // Loop body // Check if iterator < iteration_count - let iterator_less_than_iterations = self.allocate_register(); + let iterator_less_than_iterations = + SingleAddrVariable { address: self.allocate_register(), bit_size: 1 }; + self.memory_op( iterator_register, iteration_count, - iterator_less_than_iterations, + iterator_less_than_iterations.address, BinaryIntOp::LessThan, ); let (exit_loop_section, exit_loop_label) = self.reserve_next_section_label(); - self.not_instruction(iterator_less_than_iterations, 1, iterator_less_than_iterations); - self.jump_if_instruction(iterator_less_than_iterations, exit_loop_label); + self.not_instruction(iterator_less_than_iterations, iterator_less_than_iterations); + + self.jump_if_instruction(iterator_less_than_iterations.address, exit_loop_label); // Call the on iteration function on_iteration(self, iterator_register); @@ -320,7 +324,7 @@ impl BrilligContext { self.enter_section(exit_loop_section); // Deallocate our temporary registers - self.deallocate_register(iterator_less_than_iterations); + self.deallocate_register(iterator_less_than_iterations.address); self.deallocate_register(iterator_register); } @@ -507,12 +511,15 @@ impl BrilligContext { /// Cast truncates the value to the given bit size and converts the type of the value in memory to that bit size. pub(crate) fn cast_instruction( &mut self, - destination: MemoryAddress, - source: MemoryAddress, - bit_size: u32, + destination: SingleAddrVariable, + source: SingleAddrVariable, ) { - self.debug_show.cast_instruction(destination, source, bit_size); - self.push_opcode(BrilligOpcode::Cast { destination, source, bit_size }); + self.debug_show.cast_instruction(destination.address, source.address, destination.bit_size); + self.push_opcode(BrilligOpcode::Cast { + destination: destination.address, + source: source.address, + bit_size: destination.bit_size, + }); } /// Processes a binary instruction according `operation`. @@ -564,21 +571,20 @@ impl BrilligContext { /// in Brillig. pub(crate) fn not_instruction( &mut self, - input: MemoryAddress, - bit_size: u32, - result: MemoryAddress, + input: SingleAddrVariable, + result: SingleAddrVariable, ) { - self.debug_show.not_instruction(input, bit_size, result); + self.debug_show.not_instruction(input.address, input.bit_size, result.address); // Compile !x as ((-1) - x) - let u_max = FieldElement::from(2_i128).pow(&FieldElement::from(bit_size as i128)) + let u_max = FieldElement::from(2_i128).pow(&FieldElement::from(input.bit_size as i128)) - FieldElement::one(); - let max = self.make_constant(Value::from(u_max), bit_size); + let max = self.make_constant(Value::from(u_max), input.bit_size); let opcode = BrilligOpcode::BinaryIntOp { - destination: result, + destination: result.address, op: BinaryIntOp::Sub, - bit_size, + bit_size: input.bit_size, lhs: max, - rhs: input, + rhs: input.address, }; self.push_opcode(opcode); self.deallocate_register(max); @@ -626,8 +632,8 @@ impl BrilligContext { variable_pointer: MemoryAddress, ) { match destination { - BrilligVariable::Simple(register_index) => { - self.load_instruction(register_index, variable_pointer); + BrilligVariable::SingleAddr(single_addr) => { + self.load_instruction(single_addr.address, variable_pointer); } BrilligVariable::BrilligArray(BrilligArray { pointer, size: _, rc }) => { self.load_instruction(pointer, variable_pointer); @@ -676,8 +682,8 @@ impl BrilligContext { source: BrilligVariable, ) { match source { - BrilligVariable::Simple(register_index) => { - self.store_instruction(variable_pointer, register_index); + BrilligVariable::SingleAddr(single_addr) => { + self.store_instruction(variable_pointer, single_addr.address); } BrilligVariable::BrilligArray(BrilligArray { pointer, size: _, rc }) => { self.store_instruction(variable_pointer, pointer); @@ -717,31 +723,36 @@ impl BrilligContext { /// For Brillig, all integer operations will overflow as its cheap. pub(crate) fn truncate_instruction( &mut self, - destination_of_truncated_value: MemoryAddress, - value_to_truncate: MemoryAddress, + destination_of_truncated_value: SingleAddrVariable, + value_to_truncate: SingleAddrVariable, bit_size: u32, ) { self.debug_show.truncate_instruction( - destination_of_truncated_value, - value_to_truncate, + destination_of_truncated_value.address, + value_to_truncate.address, bit_size, ); assert!( - bit_size <= BRILLIG_INTEGER_ARITHMETIC_BIT_SIZE, - "tried to truncate to a bit size greater than allowed {bit_size}" + bit_size <= value_to_truncate.bit_size, + "tried to truncate to a bit size {} greater than the variable size {}", + bit_size, + value_to_truncate.bit_size + ); + + let mask = BigUint::from(2_u32).pow(bit_size) - BigUint::from(1_u32); + let mask_constant = self.make_constant( + FieldElement::from_be_bytes_reduce(&mask.to_bytes_be()).into(), + value_to_truncate.bit_size, ); - // The brillig VM performs all arithmetic operations modulo 2**bit_size - // So to truncate any value to a target bit size we can just issue a no-op arithmetic operation - // With bit size equal to target_bit_size - let zero_register = self.make_constant(Value::from(FieldElement::zero()), bit_size); self.binary_instruction( - value_to_truncate, - zero_register, - destination_of_truncated_value, - BrilligBinaryOp::Integer { op: BinaryIntOp::Add, bit_size }, + value_to_truncate.address, + mask_constant, + destination_of_truncated_value.address, + BrilligBinaryOp::Integer { op: BinaryIntOp::And, bit_size: value_to_truncate.bit_size }, ); - self.deallocate_register(zero_register); + + self.deallocate_register(mask_constant); } /// Emits a stop instruction diff --git a/compiler/noirc_evaluator/src/brillig/brillig_ir/artifact.rs b/compiler/noirc_evaluator/src/brillig/brillig_ir/artifact.rs index 4ef8c9d1dfc..d10dcf13d9f 100644 --- a/compiler/noirc_evaluator/src/brillig/brillig_ir/artifact.rs +++ b/compiler/noirc_evaluator/src/brillig/brillig_ir/artifact.rs @@ -6,8 +6,8 @@ use crate::ssa::ir::dfg::CallStack; /// Represents a parameter or a return value of a function. #[derive(Debug, Clone)] pub(crate) enum BrilligParameter { - /// A simple parameter or return value. Holds the bit size of the parameter. - Simple(u32), + /// A single address parameter or return value. Holds the bit size of the parameter. + SingleAddr(u32), /// An array parameter or return value. Holds the type of an array item and its size. Array(Vec, usize), /// A slice parameter or return value. Holds the type of a slice item. diff --git a/compiler/noirc_evaluator/src/brillig/brillig_ir/brillig_variable.rs b/compiler/noirc_evaluator/src/brillig/brillig_ir/brillig_variable.rs index 856fb709fa9..48ad3c5bae4 100644 --- a/compiler/noirc_evaluator/src/brillig/brillig_ir/brillig_variable.rs +++ b/compiler/noirc_evaluator/src/brillig/brillig_ir/brillig_variable.rs @@ -5,6 +5,12 @@ use serde::{Deserialize, Serialize}; use crate::ssa::ir::types::Type; +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize, Copy)] +pub(crate) struct SingleAddrVariable { + pub(crate) address: MemoryAddress, + pub(crate) bit_size: u32, +} + /// The representation of a noir array in the Brillig IR #[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize, Copy)] pub(crate) struct BrilligArray { @@ -52,15 +58,15 @@ impl BrilligVector { /// The representation of a noir value in the Brillig IR #[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize, Copy)] pub(crate) enum BrilligVariable { - Simple(MemoryAddress), + SingleAddr(SingleAddrVariable), BrilligArray(BrilligArray), BrilligVector(BrilligVector), } impl BrilligVariable { - pub(crate) fn extract_register(self) -> MemoryAddress { + pub(crate) fn extract_single_addr(self) -> SingleAddrVariable { match self { - BrilligVariable::Simple(register_index) => register_index, + BrilligVariable::SingleAddr(single_addr) => single_addr, _ => unreachable!("ICE: Expected register, got {self:?}"), } } @@ -81,15 +87,17 @@ impl BrilligVariable { pub(crate) fn extract_registers(self) -> Vec { match self { - BrilligVariable::Simple(register_index) => vec![register_index], + BrilligVariable::SingleAddr(single_addr) => vec![single_addr.address], BrilligVariable::BrilligArray(array) => array.extract_registers(), BrilligVariable::BrilligVector(vector) => vector.extract_registers(), } } - pub(crate) fn to_register_or_memory(self) -> ValueOrArray { + pub(crate) fn to_value_or_array(self) -> ValueOrArray { match self { - BrilligVariable::Simple(register_index) => ValueOrArray::MemoryAddress(register_index), + BrilligVariable::SingleAddr(single_addr) => { + ValueOrArray::MemoryAddress(single_addr.address) + } BrilligVariable::BrilligArray(array) => ValueOrArray::HeapArray(array.to_heap_array()), BrilligVariable::BrilligVector(vector) => { ValueOrArray::HeapVector(vector.to_heap_vector()) diff --git a/compiler/noirc_evaluator/src/brillig/brillig_ir/entry_point.rs b/compiler/noirc_evaluator/src/brillig/brillig_ir/entry_point.rs index 0eb4c8c31bd..9d186f9bc60 100644 --- a/compiler/noirc_evaluator/src/brillig/brillig_ir/entry_point.rs +++ b/compiler/noirc_evaluator/src/brillig/brillig_ir/entry_point.rs @@ -1,6 +1,6 @@ use super::{ artifact::{BrilligArtifact, BrilligParameter}, - brillig_variable::{BrilligArray, BrilligVariable}, + brillig_variable::{BrilligArray, BrilligVariable, SingleAddrVariable}, debug_show::DebugShow, registers::BrilligRegistersContext, BrilligContext, ReservedRegisters, BRILLIG_MEMORY_ADDRESSING_BIT_SIZE, @@ -63,10 +63,13 @@ impl BrilligContext { let mut argument_variables: Vec<_> = arguments .iter() .map(|argument| match argument { - BrilligParameter::Simple(_) => { - let simple_address = self.allocate_register(); - let var = BrilligVariable::Simple(simple_address); - self.mov_instruction(simple_address, MemoryAddress(current_calldata_pointer)); + BrilligParameter::SingleAddr(bit_size) => { + let single_address = self.allocate_register(); + let var = BrilligVariable::SingleAddr(SingleAddrVariable { + address: single_address, + bit_size: *bit_size, + }); + self.mov_instruction(single_address, MemoryAddress(current_calldata_pointer)); current_calldata_pointer += 1; var } @@ -116,7 +119,7 @@ impl BrilligContext { fn flat_bit_sizes(param: &BrilligParameter) -> Box + '_> { match param { - BrilligParameter::Simple(bit_size) => Box::new(std::iter::once(*bit_size)), + BrilligParameter::SingleAddr(bit_size) => Box::new(std::iter::once(*bit_size)), BrilligParameter::Array(item_types, item_count) => Box::new( (0..*item_count).flat_map(move |_| item_types.iter().flat_map(flat_bit_sizes)), ), @@ -139,7 +142,7 @@ impl BrilligContext { /// Computes the size of a parameter if it was flattened fn flattened_size(param: &BrilligParameter) -> usize { match param { - BrilligParameter::Simple(_) => 1, + BrilligParameter::SingleAddr(_) => 1, BrilligParameter::Array(item_types, item_count) => { let item_size: usize = item_types.iter().map(BrilligContext::flattened_size).sum(); item_count * item_size @@ -157,7 +160,7 @@ impl BrilligContext { /// Computes the size of a parameter if it was flattened fn has_nested_arrays(tuple: &[BrilligParameter]) -> bool { - tuple.iter().any(|param| !matches!(param, BrilligParameter::Simple(_))) + tuple.iter().any(|param| !matches!(param, BrilligParameter::SingleAddr(_))) } /// Deflatten an array by recursively allocating nested arrays and copying the plain values. @@ -194,7 +197,7 @@ impl BrilligContext { self.make_usize_constant((target_item_base_index + subitem_index).into()); match subitem { - BrilligParameter::Simple(_) => { + BrilligParameter::SingleAddr(_) => { self.array_get( flattened_array_pointer, source_index, @@ -279,7 +282,12 @@ impl BrilligContext { let returned_variables: Vec<_> = return_parameters .iter() .map(|return_parameter| match return_parameter { - BrilligParameter::Simple(_) => BrilligVariable::Simple(self.allocate_register()), + BrilligParameter::SingleAddr(bit_size) => { + BrilligVariable::SingleAddr(SingleAddrVariable { + address: self.allocate_register(), + bit_size: *bit_size, + }) + } BrilligParameter::Array(item_types, item_count) => { BrilligVariable::BrilligArray(BrilligArray { pointer: self.allocate_register(), @@ -301,10 +309,10 @@ impl BrilligContext { for (return_param, returned_variable) in return_parameters.iter().zip(&returned_variables) { match return_param { - BrilligParameter::Simple(_) => { + BrilligParameter::SingleAddr(_) => { self.mov_instruction( MemoryAddress(return_data_index), - returned_variable.extract_register(), + returned_variable.extract_single_addr().address, ); return_data_index += 1; } @@ -359,7 +367,7 @@ impl BrilligContext { self.make_usize_constant((target_item_base_index + target_offset).into()); match subitem { - BrilligParameter::Simple(_) => { + BrilligParameter::SingleAddr(_) => { self.array_get( deflattened_array_pointer, source_index, @@ -468,12 +476,12 @@ mod tests { ]; let arguments = vec![BrilligParameter::Array( vec![ - BrilligParameter::Array(vec![BrilligParameter::Simple(8)], 2), - BrilligParameter::Simple(8), + BrilligParameter::Array(vec![BrilligParameter::SingleAddr(8)], 2), + BrilligParameter::SingleAddr(8), ], 2, )]; - let returns = vec![BrilligParameter::Simple(8)]; + let returns = vec![BrilligParameter::SingleAddr(8)]; let mut context = create_context(); @@ -506,8 +514,8 @@ mod tests { ]; let array_param = BrilligParameter::Array( vec![ - BrilligParameter::Array(vec![BrilligParameter::Simple(8)], 2), - BrilligParameter::Simple(8), + BrilligParameter::Array(vec![BrilligParameter::SingleAddr(8)], 2), + BrilligParameter::SingleAddr(8), ], 2, ); From ba2c541ec45de92bba98de34771b73cbb7865c93 Mon Sep 17 00:00:00 2001 From: Maxim Vezenov Date: Wed, 14 Feb 2024 18:09:02 +0000 Subject: [PATCH 11/39] fix(acir): Use types on dynamic arrays (#4364) # Description ## Problem\* Resolves #4356 Supercedes https://github.com/noir-lang/noir/pull/4360 ## Summary\* An ACIR dynamic array is a pointer to flat memory. We have been treating this flat memory as a list of fields, however, this breaks if we do in fact need accurate numeric type information such as when working black box function inputs. For example for hash inputs we set up the byte array based upon the bit size. This needs to be the correct bit size or else we will get a lot of extra garbage when calling `fetch_nearest_bytes` on a FieldElement. This PR attaches a list of `Vec` to the `AcirDynamicArray` structure. This gives us the expected output result for `sha` then. We probably could restrict the `AcirDynamicArray` to be created only through a constructor where we check that the `value_types` match the supplied len in size. I left it for a follow-up as this is a quick fix but I can do it as part of this PR. ## Additional Context ## Documentation\* Check one: - [ ] No documentation needed. - [ ] Documentation included in this PR. - [ ] **[Exceptional Case]** Documentation to be submitted in a separate PR. # PR Checklist\* - [ ] I have tested the changes locally. - [ ] I have formatted the changes with [Prettier](https://prettier.io/) and/or `cargo fmt` on default settings. --------- Co-authored-by: TomAFrench --- .../src/ssa/acir_gen/acir_ir/acir_variable.rs | 17 +++- .../noirc_evaluator/src/ssa/acir_gen/mod.rs | 87 ++++++++++++++++--- .../array_dynamic_blackbox_input/Nargo.toml | 7 ++ .../array_dynamic_blackbox_input/Prover.toml | 4 + .../array_dynamic_blackbox_input/src/main.nr | 27 ++++++ .../Nargo.toml | 7 ++ .../Prover.toml | 23 +++++ .../src/main.nr | 20 +++++ 8 files changed, 177 insertions(+), 15 deletions(-) create mode 100644 test_programs/execution_success/array_dynamic_blackbox_input/Nargo.toml create mode 100644 test_programs/execution_success/array_dynamic_blackbox_input/Prover.toml create mode 100644 test_programs/execution_success/array_dynamic_blackbox_input/src/main.nr create mode 100644 test_programs/execution_success/array_dynamic_nested_blackbox_input/Nargo.toml create mode 100644 test_programs/execution_success/array_dynamic_nested_blackbox_input/Prover.toml create mode 100644 test_programs/execution_success/array_dynamic_nested_blackbox_input/src/main.nr diff --git a/compiler/noirc_evaluator/src/ssa/acir_gen/acir_ir/acir_variable.rs b/compiler/noirc_evaluator/src/ssa/acir_gen/acir_ir/acir_variable.rs index 2360d053887..fb11bae556c 100644 --- a/compiler/noirc_evaluator/src/ssa/acir_gen/acir_ir/acir_variable.rs +++ b/compiler/noirc_evaluator/src/ssa/acir_gen/acir_ir/acir_variable.rs @@ -67,6 +67,13 @@ impl AcirType { pub(crate) fn unsigned(bit_size: u32) -> Self { AcirType::NumericType(NumericType::Unsigned { bit_size }) } + + pub(crate) fn to_numeric_type(&self) -> NumericType { + match self { + AcirType::NumericType(numeric_type) => *numeric_type, + AcirType::Array(_, _) => unreachable!("cannot fetch a numeric type for an array type"), + } + } } impl From for AcirType { @@ -88,6 +95,12 @@ impl<'a> From<&'a SsaType> for AcirType { } } +impl From for AcirType { + fn from(value: NumericType) -> Self { + AcirType::NumericType(value) + } +} + #[derive(Debug, Default)] /// Context object which holds the relationship between /// `Variables`(AcirVar) and types such as `Expression` and `Witness` @@ -1415,13 +1428,13 @@ impl AcirContext { } Ok(values) } - AcirValue::DynamicArray(AcirDynamicArray { block_id, len, .. }) => { + AcirValue::DynamicArray(AcirDynamicArray { block_id, len, value_types, .. }) => { try_vecmap(0..len, |i| { let index_var = self.add_constant(i); Ok::<(AcirVar, AcirType), InternalError>(( self.read_from_memory(block_id, &index_var)?, - AcirType::field(), + value_types[i].into(), )) }) } diff --git a/compiler/noirc_evaluator/src/ssa/acir_gen/mod.rs b/compiler/noirc_evaluator/src/ssa/acir_gen/mod.rs index 46be6efcadd..8d4d0668534 100644 --- a/compiler/noirc_evaluator/src/ssa/acir_gen/mod.rs +++ b/compiler/noirc_evaluator/src/ssa/acir_gen/mod.rs @@ -99,6 +99,18 @@ pub(crate) struct AcirDynamicArray { block_id: BlockId, /// Length of the array len: usize, + /// An ACIR dynamic array is a flat structure, so we use + /// the inner structure of an `AcirType::NumericType` directly. + /// Some usages of ACIR arrays (e.g. black box functions) require the bit size + /// of every value to be known, thus we store the types as part of the dynamic + /// array definition. + /// + /// A dynamic non-homogenous array can potentially have values of differing types. + /// Thus, we store a vector of types rather than a single type, as a dynamic non-homogenous array + /// is still represented in ACIR by a single `AcirDynamicArray` structure. + /// + /// The length of the value types vector must match the `len` field in this structure. + value_types: Vec, /// Identification for the ACIR dynamic array /// inner element type sizes array element_type_sizes: Option, @@ -150,6 +162,16 @@ impl AcirValue { AcirValue::DynamicArray(_) => unimplemented!("Cannot flatten a dynamic array"), } } + + fn flat_numeric_types(self) -> Vec { + match self { + AcirValue::Array(_) => { + self.flatten().into_iter().map(|(_, typ)| typ.to_numeric_type()).collect() + } + AcirValue::DynamicArray(AcirDynamicArray { value_types, .. }) => value_types, + _ => unreachable!("An AcirValue::Var cannot be used as an array value"), + } + } } impl Ssa { @@ -1007,9 +1029,15 @@ impl Context { } else { None }; + + let value_types = self.convert_value(array_id, dfg).flat_numeric_types(); + // Compiler sanity check + assert_eq!(value_types.len(), array_len, "ICE: The length of the flattened type array should match the length of the dynamic array"); + let result_value = AcirValue::DynamicArray(AcirDynamicArray { block_id: result_block_id, len: array_len, + value_types, element_type_sizes, }); self.define_result(dfg, instruction, result_value); @@ -1093,7 +1121,7 @@ impl Context { &mut self, array_typ: &Type, array_id: ValueId, - array_acir_value: Option, + supplied_acir_value: Option<&AcirValue>, dfg: &DataFlowGraph, ) -> Result { let element_type_sizes = self.internal_block_id(&array_id); @@ -1119,26 +1147,23 @@ impl Context { Value::Instruction { .. } | Value::Param { .. } => { // An instruction representing the slice means it has been processed previously during ACIR gen. // Use the previously defined result of an array operation to fetch the internal type information. - let array_acir_value = if let Some(array_acir_value) = array_acir_value { - array_acir_value - } else { - self.convert_value(array_id, dfg) - }; + let array_acir_value = &self.convert_value(array_id, dfg); + let array_acir_value = supplied_acir_value.unwrap_or(array_acir_value); match array_acir_value { AcirValue::DynamicArray(AcirDynamicArray { element_type_sizes: inner_elem_type_sizes, .. }) => { if let Some(inner_elem_type_sizes) = inner_elem_type_sizes { - if self.initialized_arrays.contains(&inner_elem_type_sizes) { - let type_sizes_array_len = self.internal_mem_block_lengths.get(&inner_elem_type_sizes).copied().ok_or_else(|| + if self.initialized_arrays.contains(inner_elem_type_sizes) { + let type_sizes_array_len = *self.internal_mem_block_lengths.get(inner_elem_type_sizes).ok_or_else(|| InternalError::General { message: format!("Array {array_id}'s inner element type sizes array does not have a tracked length"), call_stack: self.acir_context.get_call_stack(), } )?; self.copy_dynamic_array( - inner_elem_type_sizes, + *inner_elem_type_sizes, element_type_sizes, type_sizes_array_len, )?; @@ -1683,15 +1708,24 @@ impl Context { Some(self.init_element_type_sizes_array( &slice_typ, slice_contents, - Some(new_slice_val), + Some(&new_slice_val), dfg, )?) } else { None }; + + let value_types = new_slice_val.flat_numeric_types(); + assert_eq!( + value_types.len(), + new_elem_size, + "ICE: Value types array must match new slice size" + ); + let result = AcirValue::DynamicArray(AcirDynamicArray { block_id: result_block_id, len: new_elem_size, + value_types, element_type_sizes, }); Ok(vec![AcirValue::Var(new_slice_length, AcirType::field()), result]) @@ -1738,15 +1772,24 @@ impl Context { Some(self.init_element_type_sizes_array( &slice_typ, slice_contents, - Some(new_slice_val), + Some(&new_slice_val), dfg, )?) } else { None }; + + let value_types = new_slice_val.flat_numeric_types(); + assert_eq!( + value_types.len(), + new_slice_size, + "ICE: Value types array must match new slice size" + ); + let result = AcirValue::DynamicArray(AcirDynamicArray { block_id: result_block_id, len: new_slice_size, + value_types, element_type_sizes, }); @@ -1943,15 +1986,24 @@ impl Context { Some(self.init_element_type_sizes_array( &slice_typ, slice_contents, - Some(slice), + Some(&slice), dfg, )?) } else { None }; + + let value_types = slice.flat_numeric_types(); + assert_eq!( + value_types.len(), + slice_size, + "ICE: Value types array must match new slice size" + ); + let result = AcirValue::DynamicArray(AcirDynamicArray { block_id: result_block_id, len: slice_size, + value_types, element_type_sizes, }); @@ -2059,15 +2111,24 @@ impl Context { Some(self.init_element_type_sizes_array( &slice_typ, slice_contents, - Some(new_slice_val), + Some(&new_slice_val), dfg, )?) } else { None }; + + let value_types = new_slice_val.flat_numeric_types(); + assert_eq!( + value_types.len(), + slice_size, + "ICE: Value types array must match new slice size" + ); + let result = AcirValue::DynamicArray(AcirDynamicArray { block_id: result_block_id, len: slice_size, + value_types, element_type_sizes, }); diff --git a/test_programs/execution_success/array_dynamic_blackbox_input/Nargo.toml b/test_programs/execution_success/array_dynamic_blackbox_input/Nargo.toml new file mode 100644 index 00000000000..03da304acc3 --- /dev/null +++ b/test_programs/execution_success/array_dynamic_blackbox_input/Nargo.toml @@ -0,0 +1,7 @@ +[package] +name = "array_dynamic_blackbox_input" +type = "bin" +authors = [""] +compiler_version = ">=0.24.0" + +[dependencies] \ No newline at end of file diff --git a/test_programs/execution_success/array_dynamic_blackbox_input/Prover.toml b/test_programs/execution_success/array_dynamic_blackbox_input/Prover.toml new file mode 100644 index 00000000000..cc60eb8a8ba --- /dev/null +++ b/test_programs/execution_success/array_dynamic_blackbox_input/Prover.toml @@ -0,0 +1,4 @@ +index = "1" +leaf = ["51", "109", "224", "175", "60", "42", "79", "222", "117", "255", "174", "79", "126", "242", "74", "34", "100", "35", "20", "200", "109", "89", "191", "219", "41", "10", "118", "217", "165", "224", "215", "109"] +path = ["0", "1", "2", "3", "4", "5", "6", "7", "8", "9", "10", "11", "12", "13", "14", "15", "16", "17", "18", "19", "20", "21", "22", "23", "24", "25", "26", "27", "28", "29", "30", "31", "32", "33", "34", "35", "36", "37", "38", "39", "40", "41", "42", "43", "44", "45", "46", "47", "48", "49", "50", "51", "52", "53", "54", "55", "56", "57", "58", "59", "60", "61", "62", "63"] +root = [79, 230, 126, 184, 98, 125, 226, 58, 117, 45, 140, 15, 72, 118, 89, 173, 117, 161, 166, 0, 214, 125, 13, 16, 113, 81, 173, 156, 97, 15, 57, 216] diff --git a/test_programs/execution_success/array_dynamic_blackbox_input/src/main.nr b/test_programs/execution_success/array_dynamic_blackbox_input/src/main.nr new file mode 100644 index 00000000000..aabf7fc9d5c --- /dev/null +++ b/test_programs/execution_success/array_dynamic_blackbox_input/src/main.nr @@ -0,0 +1,27 @@ +fn main(leaf: [u8; 32], path: [u8; 64], index: u32, root: [u8; 32]) { + compute_root(leaf, path, index, root); +} + +fn compute_root(leaf: [u8; 32], path: [u8; 64], _index: u32, root: [u8; 32]) { + let mut current = leaf; + let mut index = _index; + + for i in 0..2 { + let mut hash_input = [0; 64]; + let offset = i * 32; + let is_right = (index & 1) != 0; + let a = if is_right { 32 } else { 0 }; + let b = if is_right { 0 } else { 32 }; + + for j in 0..32 { + hash_input[j + a] = current[j]; + hash_input[j + b] = path[offset + j]; + } + + current = dep::std::hash::sha256(hash_input); + index = index >> 1; + } + + // Regression for issue #4258 + assert(root == current); +} \ No newline at end of file diff --git a/test_programs/execution_success/array_dynamic_nested_blackbox_input/Nargo.toml b/test_programs/execution_success/array_dynamic_nested_blackbox_input/Nargo.toml new file mode 100644 index 00000000000..07d867d433f --- /dev/null +++ b/test_programs/execution_success/array_dynamic_nested_blackbox_input/Nargo.toml @@ -0,0 +1,7 @@ +[package] +name = "array_dynamic_nested_blackbox_input" +type = "bin" +authors = [""] +compiler_version = ">=0.24.0" + +[dependencies] \ No newline at end of file diff --git a/test_programs/execution_success/array_dynamic_nested_blackbox_input/Prover.toml b/test_programs/execution_success/array_dynamic_nested_blackbox_input/Prover.toml new file mode 100644 index 00000000000..1f291532414 --- /dev/null +++ b/test_programs/execution_success/array_dynamic_nested_blackbox_input/Prover.toml @@ -0,0 +1,23 @@ +y = "3" +hash_result = [50, 53, 90, 252, 105, 236, 223, 30, 135, 229, 193, 172, 51, 139, 8, 32, 188, 104, 151, 115, 129, 168, 27, 71, 203, 47, 40, 228, 89, 177, 129, 100] + +[[x]] +a = "1" +b = ["2", "3", "20"] + +[x.bar] +inner = ["100", "101", "102"] + +[[x]] +a = "4" # idx = 3, flattened start idx = 7 +b = ["5", "6", "21"] # idx = 4, flattened start idx = 8 + +[x.bar] +inner = ["103", "104", "105"] # idx = 5, flattened start idx = 11 + +[[x]] +a = "7" +b = ["8", "9", "22"] + +[x.bar] +inner = ["106", "107", "108"] \ No newline at end of file diff --git a/test_programs/execution_success/array_dynamic_nested_blackbox_input/src/main.nr b/test_programs/execution_success/array_dynamic_nested_blackbox_input/src/main.nr new file mode 100644 index 00000000000..8faaf69dfc8 --- /dev/null +++ b/test_programs/execution_success/array_dynamic_nested_blackbox_input/src/main.nr @@ -0,0 +1,20 @@ +struct Bar { + inner: [u8; 3], +} + +struct Foo { + a: Field, + b: [Field; 3], + bar: Bar, +} + +fn main(mut x: [Foo; 3], y: pub Field, hash_result: pub [u8; 32]) { + // Simple dynamic array set for entire inner most array + x[y - 1].bar.inner = [106, 107, 10]; + let mut hash_input = x[y - 1].bar.inner; + // Make sure that we are passing a dynamic array to the black box function call + // by setting the array using a dynamic index here + hash_input[y - 1] = 0; + let hash = dep::std::hash::sha256(hash_input); + assert_eq(hash, hash_result); +} From 2952bf1960074413df538cee928c62b93d76ceb8 Mon Sep 17 00:00:00 2001 From: Tom French <15848336+TomAFrench@users.noreply.github.com> Date: Wed, 14 Feb 2024 19:04:24 +0000 Subject: [PATCH 12/39] chore(ci): add warning for external contributors force pushing (#4373) # Description ## Problem\* Resolves ## Summary\* This PR adds a sticky comment to any PRs which are opened from another repository which warns against force pushing once we've started reviewing the PR. ## Additional Context ## Documentation\* Check one: - [x] No documentation needed. - [ ] Documentation included in this PR. - [ ] **[Exceptional Case]** Documentation to be submitted in a separate PR. # PR Checklist\* - [x] I have tested the changes locally. - [x] I have formatted the changes with [Prettier](https://prettier.io/) and/or `cargo fmt` on default settings. --------- Co-authored-by: jfecher --- .github/EXTERNAL_CONTRIBUTOR_PR_COMMENT.md | 5 +++++ .github/workflows/pull-request-title.yml | 15 +++++++++++++++ 2 files changed, 20 insertions(+) create mode 100644 .github/EXTERNAL_CONTRIBUTOR_PR_COMMENT.md diff --git a/.github/EXTERNAL_CONTRIBUTOR_PR_COMMENT.md b/.github/EXTERNAL_CONTRIBUTOR_PR_COMMENT.md new file mode 100644 index 00000000000..4031bcdb61c --- /dev/null +++ b/.github/EXTERNAL_CONTRIBUTOR_PR_COMMENT.md @@ -0,0 +1,5 @@ +Thank you for your contribution to the Noir language. + +Please **do not force push to this branch** after the Noir team have reviewed this PR. Doing so will only delay us merging your PR as we will need to start the review process from scratch. + +Thanks for your understanding. \ No newline at end of file diff --git a/.github/workflows/pull-request-title.yml b/.github/workflows/pull-request-title.yml index 4b8a626a94e..7c2822aa954 100644 --- a/.github/workflows/pull-request-title.yml +++ b/.github/workflows/pull-request-title.yml @@ -27,3 +27,18 @@ jobs: fix feat chore + + force-push-comment: + name: Warn external contributors about force-pushing + runs-on: ubuntu-latest + if: github.repository != 'noir-lang/noir' && github.event_name == 'pull_request_target' + permissions: + pull-requests: write + + steps: + - name: Post comment on force pushes + uses: marocchino/sticky-pull-request-comment@v2 + with: + path: ./.github/EXTERNAL_CONTRIBUTOR_PR_COMMENT.md + + \ No newline at end of file From da1281fc755e55aee52beab467b6d58734f42f4a Mon Sep 17 00:00:00 2001 From: Tom French <15848336+TomAFrench@users.noreply.github.com> Date: Wed, 14 Feb 2024 19:37:44 +0000 Subject: [PATCH 13/39] chore: fix trigger for force-push sticky comment (#4377) # Description ## Problem\* Resolves ## Summary\* #4373 isn't being triggered properly currently. ## Additional Context ## Documentation\* Check one: - [ ] No documentation needed. - [ ] Documentation included in this PR. - [ ] **[Exceptional Case]** Documentation to be submitted in a separate PR. # PR Checklist\* - [ ] I have tested the changes locally. - [ ] I have formatted the changes with [Prettier](https://prettier.io/) and/or `cargo fmt` on default settings. --- .github/workflows/pull-request-title.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/pull-request-title.yml b/.github/workflows/pull-request-title.yml index 7c2822aa954..8f863160cf1 100644 --- a/.github/workflows/pull-request-title.yml +++ b/.github/workflows/pull-request-title.yml @@ -31,7 +31,7 @@ jobs: force-push-comment: name: Warn external contributors about force-pushing runs-on: ubuntu-latest - if: github.repository != 'noir-lang/noir' && github.event_name == 'pull_request_target' + if: ${{ github.event_name == 'pull_request_target' && github.event.pull_request.head.repo.full_name != 'noir-lang/noir' }} permissions: pull-requests: write From 8536c7c8ea8fc6b740b2ae6d1aef3bc7e1907b8c Mon Sep 17 00:00:00 2001 From: Tom French <15848336+TomAFrench@users.noreply.github.com> Date: Thu, 15 Feb 2024 11:47:11 +0000 Subject: [PATCH 14/39] fix: only add `.nr` files to file manager (#4380) # Description ## Problem\* Resolves https://github.com/noir-lang/noir/issues/4379 ## Summary\* This PR adds a filter so we only add files with a `.nr` extension to the file manager. ## Additional Context ## Documentation\* Check one: - [x] No documentation needed. - [ ] Documentation included in this PR. - [ ] **[Exceptional Case]** Documentation to be submitted in a separate PR. # PR Checklist\* - [x] I have tested the changes locally. - [x] I have formatted the changes with [Prettier](https://prettier.io/) and/or `cargo fmt` on default settings. --- tooling/nargo/src/lib.rs | 26 +++++++++++++++++++------- 1 file changed, 19 insertions(+), 7 deletions(-) diff --git a/tooling/nargo/src/lib.rs b/tooling/nargo/src/lib.rs index 0fdff8b202f..e12bf4d4ad1 100644 --- a/tooling/nargo/src/lib.rs +++ b/tooling/nargo/src/lib.rs @@ -16,7 +16,7 @@ pub mod workspace; use std::collections::BTreeMap; -use fm::FileManager; +use fm::{FileManager, FILE_EXTENSION}; use noirc_driver::{add_dep, prepare_crate, prepare_dependency}; use noirc_frontend::{ graph::{CrateId, CrateName}, @@ -69,8 +69,8 @@ fn insert_all_files_for_package_into_file_manager( .clone(); // Get all files in the package and add them to the file manager - let paths = - get_all_paths_in_dir(entry_path_parent).expect("could not get all paths in the package"); + let paths = get_all_noir_source_in_dir(entry_path_parent) + .expect("could not get all paths in the package"); for path in paths { let source = std::fs::read_to_string(path.as_path()) .unwrap_or_else(|_| panic!("could not read file {:?} into string", path)); @@ -125,6 +125,15 @@ pub fn prepare_package<'file_manager, 'parsed_files>( (context, crate_id) } +// Get all Noir source files in the directory and subdirectories. +// +// Panics: If the path is not a path to a directory. +fn get_all_noir_source_in_dir(dir: &std::path::Path) -> std::io::Result> { + get_all_paths_in_dir(dir, |path| { + path.extension().map_or(false, |extension| extension == FILE_EXTENSION) + }) +} + // Get all paths in the directory and subdirectories. // // Panics: If the path is not a path to a directory. @@ -132,7 +141,10 @@ pub fn prepare_package<'file_manager, 'parsed_files>( // TODO: Along with prepare_package, this function is an abstraction leak // TODO: given that this crate should not know about the file manager. // TODO: We can clean this up in a future refactor -fn get_all_paths_in_dir(dir: &std::path::Path) -> std::io::Result> { +fn get_all_paths_in_dir( + dir: &std::path::Path, + predicate: fn(&std::path::Path) -> bool, +) -> std::io::Result> { assert!(dir.is_dir(), "directory {dir:?} is not a path to a directory"); let mut paths = Vec::new(); @@ -142,9 +154,9 @@ fn get_all_paths_in_dir(dir: &std::path::Path) -> std::io::Result Date: Thu, 15 Feb 2024 21:04:37 +0900 Subject: [PATCH 15/39] chore(github): Auto tag Dev Rel on doc-changing PRs (#4375) # Description ## Problem\* Documentation-changing PRs do not currently notify @noir-lang/developerrelations, which the team prefers (let me know below if that is not actually the case). ## Summary\* Extend the documentation preview action to auto-comment and tag `@noir-lang/developerrelations` on doc-changing PRs. Preview of how it looks: https://github.com/Savio-Sou/noir/pull/3#issuecomment-1944321469 ## Documentation\* Check one: - [ ] No documentation needed. - [x] Documentation included in this PR. - [ ] **[Exceptional Case]** Documentation to be submitted in a separate PR. # PR Checklist\* - [x] I have tested the changes locally. - [x] I have formatted the changes with [Prettier](https://prettier.io/) and/or `cargo fmt` on default settings. Co-authored-by: Tom French <15848336+TomAFrench@users.noreply.github.com> --- .github/workflows/docs-pr.yml | 13 +++++++++++++ 1 file changed, 13 insertions(+) diff --git a/.github/workflows/docs-pr.yml b/.github/workflows/docs-pr.yml index 712fb100ba6..dddb309a3a4 100644 --- a/.github/workflows/docs-pr.yml +++ b/.github/workflows/docs-pr.yml @@ -114,3 +114,16 @@ jobs: NETLIFY_AUTH_TOKEN: ${{ secrets.NETLIFY_AUTH_TOKEN }} NETLIFY_SITE_ID: ${{ secrets.NETLIFY_SITE_ID }} timeout-minutes: 1 + + add_comment: + needs: [deploy_preview] + runs-on: ubuntu-latest + permissions: + pull-requests: write + steps: + - name: Tag dev rel in comment + uses: marocchino/sticky-pull-request-comment@v2 + with: + message: | + FYI @noir-lang/developerrelations on Noir doc changes. + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} \ No newline at end of file From 4f4f24f5ce2726b6955404139d4946086fd246e5 Mon Sep 17 00:00:00 2001 From: Tom French <15848336+TomAFrench@users.noreply.github.com> Date: Thu, 15 Feb 2024 18:01:05 +0000 Subject: [PATCH 16/39] chore: clippy fix (#4387) # Description ## Problem\* Resolves ## Summary\* As we're bumping the MSRV in #4385, we're getting a whole new version of clippy which is picking up more stuff. This PR applies clippy + cargo fmt changes from rustc 1.76.0 to reduce the diff on #4385 ## Additional Context ## Documentation\* Check one: - [x] No documentation needed. - [ ] Documentation included in this PR. - [ ] **[Exceptional Case]** Documentation to be submitted in a separate PR. # PR Checklist\* - [x] I have tested the changes locally. - [x] I have formatted the changes with [Prettier](https://prettier.io/) and/or `cargo fmt` on default settings. --- acvm-repo/acir_field/src/generic_ark.rs | 114 +++++++++--------- .../compiler/optimizers/redundant_range.rs | 5 +- acvm-repo/acvm/src/pwg/blackbox/bigint.rs | 2 +- aztec_macros/src/lib.rs | 8 +- compiler/noirc_errors/src/debug_info.rs | 2 +- .../src/brillig/brillig_gen/brillig_block.rs | 2 +- .../noirc_evaluator/src/ssa/ssa_gen/mod.rs | 4 +- compiler/noirc_frontend/src/debug/mod.rs | 4 +- .../noirc_frontend/src/hir/type_check/expr.rs | 2 +- compiler/noirc_frontend/src/lexer/token.rs | 42 +++---- .../src/monomorphization/mod.rs | 4 +- compiler/noirc_frontend/src/node_interner.rs | 4 +- compiler/noirc_frontend/src/parser/parser.rs | 48 ++++---- tooling/debugger/src/context.rs | 2 +- tooling/debugger/src/dap.rs | 57 +++++---- tooling/debugger/src/foreign_calls.rs | 8 +- tooling/lsp/src/lib.rs | 10 +- tooling/nargo/src/artifacts/debug_vars.rs | 20 +-- tooling/nargo/src/lib.rs | 3 +- tooling/nargo_cli/src/cli/check_cmd.rs | 36 +++--- tooling/nargo_cli/src/cli/dap_cmd.rs | 3 +- tooling/nargo_cli/src/cli/debug_cmd.rs | 3 +- tooling/nargo_fmt/src/rewrite/infix.rs | 4 +- tooling/nargo_fmt/src/visitor/expr.rs | 1 - 24 files changed, 199 insertions(+), 189 deletions(-) diff --git a/acvm-repo/acir_field/src/generic_ark.rs b/acvm-repo/acir_field/src/generic_ark.rs index dc54d271beb..3178011a075 100644 --- a/acvm-repo/acir_field/src/generic_ark.rs +++ b/acvm-repo/acir_field/src/generic_ark.rs @@ -429,63 +429,6 @@ impl SubAssign for FieldElement { } } -#[cfg(test)] -mod tests { - #[test] - fn and() { - let max = 10_000u32; - - let num_bits = (std::mem::size_of::() * 8) as u32 - max.leading_zeros(); - - for x in 0..max { - let x = crate::generic_ark::FieldElement::::from(x as i128); - let res = x.and(&x, num_bits); - assert_eq!(res.to_be_bytes(), x.to_be_bytes()); - } - } - - #[test] - fn serialize_fixed_test_vectors() { - // Serialized field elements from of 0, -1, -2, -3 - let hex_strings = vec![ - "0000000000000000000000000000000000000000000000000000000000000000", - "30644e72e131a029b85045b68181585d2833e84879b9709143e1f593f0000000", - "30644e72e131a029b85045b68181585d2833e84879b9709143e1f593efffffff", - "30644e72e131a029b85045b68181585d2833e84879b9709143e1f593effffffe", - ]; - - for (i, string) in hex_strings.into_iter().enumerate() { - let minus_i_field_element = - -crate::generic_ark::FieldElement::::from(i as i128); - assert_eq!(minus_i_field_element.to_hex(), string); - } - } - - #[test] - fn deserialize_even_and_odd_length_hex() { - // Test cases of (odd, even) length hex strings - let hex_strings = - vec![("0x0", "0x00"), ("0x1", "0x01"), ("0x002", "0x0002"), ("0x00003", "0x000003")]; - for (i, case) in hex_strings.into_iter().enumerate() { - let i_field_element = - crate::generic_ark::FieldElement::::from(i as i128); - let odd_field_element = - crate::generic_ark::FieldElement::::from_hex(case.0).unwrap(); - let even_field_element = - crate::generic_ark::FieldElement::::from_hex(case.1).unwrap(); - - assert_eq!(i_field_element, odd_field_element); - assert_eq!(odd_field_element, even_field_element); - } - } - - #[test] - fn max_num_bits_smoke() { - let max_num_bits_bn254 = crate::generic_ark::FieldElement::::max_num_bits(); - assert_eq!(max_num_bits_bn254, 254); - } -} - fn mask_vector_le(bytes: &mut [u8], num_bits: usize) { // reverse to big endian format bytes.reverse(); @@ -543,3 +486,60 @@ fn superscript(n: u64) -> String { panic!("{}", n.to_string() + " can't be converted to superscript."); } } + +#[cfg(test)] +mod tests { + #[test] + fn and() { + let max = 10_000u32; + + let num_bits = (std::mem::size_of::() * 8) as u32 - max.leading_zeros(); + + for x in 0..max { + let x = crate::generic_ark::FieldElement::::from(x as i128); + let res = x.and(&x, num_bits); + assert_eq!(res.to_be_bytes(), x.to_be_bytes()); + } + } + + #[test] + fn serialize_fixed_test_vectors() { + // Serialized field elements from of 0, -1, -2, -3 + let hex_strings = vec![ + "0000000000000000000000000000000000000000000000000000000000000000", + "30644e72e131a029b85045b68181585d2833e84879b9709143e1f593f0000000", + "30644e72e131a029b85045b68181585d2833e84879b9709143e1f593efffffff", + "30644e72e131a029b85045b68181585d2833e84879b9709143e1f593effffffe", + ]; + + for (i, string) in hex_strings.into_iter().enumerate() { + let minus_i_field_element = + -crate::generic_ark::FieldElement::::from(i as i128); + assert_eq!(minus_i_field_element.to_hex(), string); + } + } + + #[test] + fn deserialize_even_and_odd_length_hex() { + // Test cases of (odd, even) length hex strings + let hex_strings = + vec![("0x0", "0x00"), ("0x1", "0x01"), ("0x002", "0x0002"), ("0x00003", "0x000003")]; + for (i, case) in hex_strings.into_iter().enumerate() { + let i_field_element = + crate::generic_ark::FieldElement::::from(i as i128); + let odd_field_element = + crate::generic_ark::FieldElement::::from_hex(case.0).unwrap(); + let even_field_element = + crate::generic_ark::FieldElement::::from_hex(case.1).unwrap(); + + assert_eq!(i_field_element, odd_field_element); + assert_eq!(odd_field_element, even_field_element); + } + } + + #[test] + fn max_num_bits_smoke() { + let max_num_bits_bn254 = crate::generic_ark::FieldElement::::max_num_bits(); + assert_eq!(max_num_bits_bn254, 254); + } +} diff --git a/acvm-repo/acvm/src/compiler/optimizers/redundant_range.rs b/acvm-repo/acvm/src/compiler/optimizers/redundant_range.rs index 64fe5291cc6..c6ca18d30ae 100644 --- a/acvm-repo/acvm/src/compiler/optimizers/redundant_range.rs +++ b/acvm-repo/acvm/src/compiler/optimizers/redundant_range.rs @@ -72,12 +72,9 @@ impl RangeOptimizer { } } - Opcode::BlackBoxFuncCall(BlackBoxFuncCall::RANGE { input: FunctionInput { witness, num_bits }, - }) => { - Some((*witness, *num_bits)) - } + }) => Some((*witness, *num_bits)), _ => None, }) else { diff --git a/acvm-repo/acvm/src/pwg/blackbox/bigint.rs b/acvm-repo/acvm/src/pwg/blackbox/bigint.rs index 986afaa3ce7..f094bb1ba20 100644 --- a/acvm-repo/acvm/src/pwg/blackbox/bigint.rs +++ b/acvm-repo/acvm/src/pwg/blackbox/bigint.rs @@ -69,7 +69,7 @@ impl BigIntSolver { pub(crate) fn bigint_to_bytes( &self, input: u32, - outputs: &Vec, + outputs: &[Witness], initial_witness: &mut WitnessMap, ) -> Result<(), OpcodeResolutionError> { let bigint = self.get_bigint(input, BlackBoxFunc::BigIntToLeBytes)?; diff --git a/aztec_macros/src/lib.rs b/aztec_macros/src/lib.rs index 51a8b5361a6..21e3dd56e0d 100644 --- a/aztec_macros/src/lib.rs +++ b/aztec_macros/src/lib.rs @@ -809,7 +809,7 @@ fn get_serialized_length( ) -> Result { let (struct_name, maybe_stored_in_state) = match typ { Type::Struct(struct_type, generics) => { - Ok((struct_type.borrow().name.0.contents.clone(), generics.get(0))) + Ok((struct_type.borrow().name.0.contents.clone(), generics.first())) } _ => Err(AztecMacroError::CouldNotAssignStorageSlots { secondary_message: Some("State storage variable must be a struct".to_string()), @@ -859,7 +859,7 @@ fn get_serialized_length( let serialized_trait_impl_shared = interner.get_trait_implementation(*serialized_trait_impl_id); let serialized_trait_impl = serialized_trait_impl_shared.borrow(); - match serialized_trait_impl.trait_generics.get(0).unwrap() { + match serialized_trait_impl.trait_generics.first().unwrap() { Type::Constant(value) => Ok(*value), _ => Err(AztecMacroError::CouldNotAssignStorageSlots { secondary_message: None }), } @@ -946,9 +946,7 @@ fn assign_storage_slots( let slot_arg_expression = interner.expression(&new_call_expression.arguments[1]); let current_storage_slot = match slot_arg_expression { - HirExpression::Literal(HirLiteral::Integer(slot, _)) => { - Ok(slot.borrow().to_u128()) - } + HirExpression::Literal(HirLiteral::Integer(slot, _)) => Ok(slot.to_u128()), _ => Err(( AztecMacroError::CouldNotAssignStorageSlots { secondary_message: Some( diff --git a/compiler/noirc_errors/src/debug_info.rs b/compiler/noirc_errors/src/debug_info.rs index 25722aac57f..67ec851d46d 100644 --- a/compiler/noirc_errors/src/debug_info.rs +++ b/compiler/noirc_errors/src/debug_info.rs @@ -90,7 +90,7 @@ impl DebugInfo { for (opcode_location, locations) in self.locations.iter() { for location in locations.iter() { - let opcodes = accumulator.entry(*location).or_insert(Vec::new()); + let opcodes = accumulator.entry(*location).or_default(); opcodes.push(opcode_location); } } diff --git a/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_block.rs b/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_block.rs index c299daa158a..f01f60252f6 100644 --- a/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_block.rs +++ b/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_block.rs @@ -269,7 +269,7 @@ impl<'block> BrilligBlock<'block> { unreachable!("expected a call instruction") }; - let Value::Function(func_id) = &dfg[*func] else { + let Value::Function(func_id) = &dfg[*func] else { unreachable!("expected a function value") }; diff --git a/compiler/noirc_evaluator/src/ssa/ssa_gen/mod.rs b/compiler/noirc_evaluator/src/ssa/ssa_gen/mod.rs index ecc8bf87597..8f2c923d62c 100644 --- a/compiler/noirc_evaluator/src/ssa/ssa_gen/mod.rs +++ b/compiler/noirc_evaluator/src/ssa/ssa_gen/mod.rs @@ -684,9 +684,7 @@ impl<'a> FunctionContext<'a> { &mut self, assert_message: &Option>, ) -> Result>, RuntimeError> { - let Some(assert_message_expr) = assert_message else { - return Ok(None) - }; + let Some(assert_message_expr) = assert_message else { return Ok(None) }; if let ast::Expression::Literal(ast::Literal::Str(assert_message)) = assert_message_expr.as_ref() diff --git a/compiler/noirc_frontend/src/debug/mod.rs b/compiler/noirc_frontend/src/debug/mod.rs index 9056e821e8d..a88567fcaf9 100644 --- a/compiler/noirc_frontend/src/debug/mod.rs +++ b/compiler/noirc_frontend/src/debug/mod.rs @@ -96,7 +96,7 @@ impl DebugInstrumenter { self.walk_scope(&mut func.body.0, func.span); // prepend fn params: - func.body.0 = vec![set_fn_params, func.body.0.clone()].concat(); + func.body.0 = [set_fn_params, func.body.0.clone()].concat(); } // Modify a vector of statements in-place, adding instrumentation for sets and drops. @@ -130,7 +130,7 @@ impl DebugInstrumenter { let span = Span::empty(span.end()); // drop scope variables - let scope_vars = self.scope.pop().unwrap_or(HashMap::default()); + let scope_vars = self.scope.pop().unwrap_or_default(); let drop_vars_stmts = scope_vars.values().map(|var_id| build_drop_var_stmt(*var_id, span)); statements.extend(drop_vars_stmts); diff --git a/compiler/noirc_frontend/src/hir/type_check/expr.rs b/compiler/noirc_frontend/src/hir/type_check/expr.rs index 96a79152f69..b6bb5984bcd 100644 --- a/compiler/noirc_frontend/src/hir/type_check/expr.rs +++ b/compiler/noirc_frontend/src/hir/type_check/expr.rs @@ -65,7 +65,7 @@ impl<'interner> TypeChecker<'interner> { let elem_types = vecmap(&arr, |arg| self.check_expression(arg)); let first_elem_type = elem_types - .get(0) + .first() .cloned() .unwrap_or_else(|| self.interner.next_type_variable()); diff --git a/compiler/noirc_frontend/src/lexer/token.rs b/compiler/noirc_frontend/src/lexer/token.rs index f7c07c5f5db..fe12132e202 100644 --- a/compiler/noirc_frontend/src/lexer/token.rs +++ b/compiler/noirc_frontend/src/lexer/token.rs @@ -774,6 +774,27 @@ impl Keyword { } } +pub struct Tokens(pub Vec); + +type TokenMapIter = Map, fn(SpannedToken) -> (Token, Span)>; + +impl<'a> From for chumsky::Stream<'a, Token, Span, TokenMapIter> { + fn from(tokens: Tokens) -> Self { + let end_of_input = match tokens.0.last() { + Some(spanned_token) => spanned_token.to_span(), + None => Span::single_char(0), + }; + + fn get_span(token: SpannedToken) -> (Token, Span) { + let span = token.to_span(); + (token.into_token(), span) + } + + let iter = tokens.0.into_iter().map(get_span as fn(_) -> _); + chumsky::Stream::from_iter(end_of_input, iter) + } +} + #[cfg(test)] mod keywords { use strum::IntoEnumIterator; @@ -796,24 +817,3 @@ mod keywords { } } } - -pub struct Tokens(pub Vec); - -type TokenMapIter = Map, fn(SpannedToken) -> (Token, Span)>; - -impl<'a> From for chumsky::Stream<'a, Token, Span, TokenMapIter> { - fn from(tokens: Tokens) -> Self { - let end_of_input = match tokens.0.last() { - Some(spanned_token) => spanned_token.to_span(), - None => Span::single_char(0), - }; - - fn get_span(token: SpannedToken) -> (Token, Span) { - let span = token.to_span(); - (token.into_token(), span) - } - - let iter = tokens.0.into_iter().map(get_span as fn(_) -> _); - chumsky::Stream::from_iter(end_of_input, iter) - } -} diff --git a/compiler/noirc_frontend/src/monomorphization/mod.rs b/compiler/noirc_frontend/src/monomorphization/mod.rs index f691a0c9065..cfe671d7d58 100644 --- a/compiler/noirc_frontend/src/monomorphization/mod.rs +++ b/compiler/noirc_frontend/src/monomorphization/mod.rs @@ -733,7 +733,9 @@ impl<'interner> Monomorphizer<'interner> { } DefinitionKind::Global(global_id) => { let Some(let_) = self.interner.get_global_let_statement(*global_id) else { - unreachable!("Globals should have a corresponding let statement by monomorphization") + unreachable!( + "Globals should have a corresponding let statement by monomorphization" + ) }; self.expr(let_.expression) } diff --git a/compiler/noirc_frontend/src/node_interner.rs b/compiler/noirc_frontend/src/node_interner.rs index 9a45268d111..815bc4c5e9c 100644 --- a/compiler/noirc_frontend/src/node_interner.rs +++ b/compiler/noirc_frontend/src/node_interner.rs @@ -1147,7 +1147,7 @@ impl NodeInterner { }) .collect() }) - .unwrap_or(vec![]) + .unwrap_or_default() } /// Similar to `lookup_trait_implementation` but does not apply any type bindings on success. @@ -1670,7 +1670,7 @@ impl Methods { for method in self.iter() { match interner.function_meta(&method).typ.instantiate(interner).0 { Type::Function(args, _, _) => { - if let Some(object) = args.get(0) { + if let Some(object) = args.first() { let mut bindings = TypeBindings::new(); if object.try_unify(typ, &mut bindings).is_ok() { diff --git a/compiler/noirc_frontend/src/parser/parser.rs b/compiler/noirc_frontend/src/parser/parser.rs index 8bcd7670716..1cb81e26a0a 100644 --- a/compiler/noirc_frontend/src/parser/parser.rs +++ b/compiler/noirc_frontend/src/parser/parser.rs @@ -833,7 +833,7 @@ where ignore_then_commit(keyword(Keyword::Assert), parenthesized(argument_parser)) .labelled(ParsingRuleLabel::Statement) .validate(|expressions, span, _| { - let condition = expressions.get(0).unwrap_or(&Expression::error(span)).clone(); + let condition = expressions.first().unwrap_or(&Expression::error(span)).clone(); let message = expressions.get(1).cloned(); StatementKind::Constrain(ConstrainStatement(condition, message, ConstrainKind::Assert)) }) @@ -851,7 +851,7 @@ where .validate(|exprs: Vec, span, _| { let predicate = Expression::new( ExpressionKind::Infix(Box::new(InfixExpression { - lhs: exprs.get(0).unwrap_or(&Expression::error(span)).clone(), + lhs: exprs.first().unwrap_or(&Expression::error(span)).clone(), rhs: exprs.get(1).unwrap_or(&Expression::error(span)).clone(), operator: Spanned::from(span, BinaryOpKind::Equal), })), @@ -2483,7 +2483,7 @@ mod test { #[test] fn return_validation() { - let cases = vec![ + let cases = [ Case { source: "{ return 42; }", expect: concat!("{\n", " Error\n", "}",), @@ -2512,7 +2512,7 @@ mod test { #[test] fn expr_no_constructors() { - let cases = vec![ + let cases = [ Case { source: "{ if structure { a: 1 } {} }", expect: concat!( @@ -2567,10 +2567,10 @@ mod test { #[test] fn parse_raw_string_expr() { let cases = vec![ - Case { source: r##" r"foo" "##, expect: r##"r"foo""##, errors: 0 }, + Case { source: r#" r"foo" "#, expect: r#"r"foo""#, errors: 0 }, Case { source: r##" r#"foo"# "##, expect: r##"r#"foo"#"##, errors: 0 }, // backslash - Case { source: r##" r"\\" "##, expect: r##"r"\\""##, errors: 0 }, + Case { source: r#" r"\\" "#, expect: r#"r"\\""#, errors: 0 }, Case { source: r##" r#"\"# "##, expect: r##"r#"\"#"##, errors: 0 }, Case { source: r##" r#"\\"# "##, expect: r##"r#"\\"#"##, errors: 0 }, Case { source: r##" r#"\\\"# "##, expect: r##"r#"\\\"#"##, errors: 0 }, @@ -2582,27 +2582,27 @@ mod test { }, Case { source: r##" r#"\\\\\\\\"# "##, expect: r##"r#"\\\\\\\\"#"##, errors: 0 }, // mismatch - errors: - Case { source: r###" r#"foo"## "###, expect: r###"r#"foo"#"###, errors: 1 }, - Case { source: r###" r##"foo"# "###, expect: "(none)", errors: 2 }, + Case { source: r###" r#"foo"## "###, expect: r##"r#"foo"#"##, errors: 1 }, + Case { source: r##" r##"foo"# "##, expect: "(none)", errors: 2 }, // mismatch: short: - Case { source: r###" r"foo"# "###, expect: r###"r"foo""###, errors: 1 }, - Case { source: r###" r#"foo" "###, expect: "(none)", errors: 2 }, + Case { source: r##" r"foo"# "##, expect: r#"r"foo""#, errors: 1 }, + Case { source: r#" r#"foo" "#, expect: "(none)", errors: 2 }, // empty string - Case { source: r####"r"""####, expect: r####"r"""####, errors: 0 }, + Case { source: r#"r"""#, expect: r#"r"""#, errors: 0 }, Case { source: r####"r###""###"####, expect: r####"r###""###"####, errors: 0 }, // miscellaneous - Case { source: r###" r#\"foo\"# "###, expect: "plain::r", errors: 2 }, - Case { source: r###" r\"foo\" "###, expect: "plain::r", errors: 1 }, - Case { source: r###" r##"foo"# "###, expect: "(none)", errors: 2 }, + Case { source: r##" r#\"foo\"# "##, expect: "plain::r", errors: 2 }, + Case { source: r#" r\"foo\" "#, expect: "plain::r", errors: 1 }, + Case { source: r##" r##"foo"# "##, expect: "(none)", errors: 2 }, // missing 'r' letter - Case { source: r###" ##"foo"# "###, expect: r#""foo""#, errors: 2 }, - Case { source: r###" #"foo" "###, expect: "plain::foo", errors: 2 }, + Case { source: r##" ##"foo"# "##, expect: r#""foo""#, errors: 2 }, + Case { source: r#" #"foo" "#, expect: "plain::foo", errors: 2 }, // whitespace - Case { source: r###" r #"foo"# "###, expect: "plain::r", errors: 2 }, - Case { source: r###" r# "foo"# "###, expect: "plain::r", errors: 3 }, - Case { source: r###" r#"foo" # "###, expect: "(none)", errors: 2 }, + Case { source: r##" r #"foo"# "##, expect: "plain::r", errors: 2 }, + Case { source: r##" r# "foo"# "##, expect: "plain::r", errors: 3 }, + Case { source: r#" r#"foo" # "#, expect: "(none)", errors: 2 }, // after identifier - Case { source: r###" bar#"foo"# "###, expect: "plain::bar", errors: 2 }, + Case { source: r##" bar#"foo"# "##, expect: "plain::bar", errors: 2 }, // nested Case { source: r###"r##"foo r#"bar"# r"baz" ### bye"##"###, @@ -2617,10 +2617,10 @@ mod test { #[test] fn parse_raw_string_lit() { let lit_cases = vec![ - Case { source: r##" r"foo" "##, expect: r##"r"foo""##, errors: 0 }, + Case { source: r#" r"foo" "#, expect: r#"r"foo""#, errors: 0 }, Case { source: r##" r#"foo"# "##, expect: r##"r#"foo"#"##, errors: 0 }, // backslash - Case { source: r##" r"\\" "##, expect: r##"r"\\""##, errors: 0 }, + Case { source: r#" r"\\" "#, expect: r#"r"\\""#, errors: 0 }, Case { source: r##" r#"\"# "##, expect: r##"r#"\"#"##, errors: 0 }, Case { source: r##" r#"\\"# "##, expect: r##"r#"\\"#"##, errors: 0 }, Case { source: r##" r#"\\\"# "##, expect: r##"r#"\\\"#"##, errors: 0 }, @@ -2632,8 +2632,8 @@ mod test { }, Case { source: r##" r#"\\\\\\\\"# "##, expect: r##"r#"\\\\\\\\"#"##, errors: 0 }, // mismatch - errors: - Case { source: r###" r#"foo"## "###, expect: r###"r#"foo"#"###, errors: 1 }, - Case { source: r###" r##"foo"# "###, expect: "(none)", errors: 2 }, + Case { source: r###" r#"foo"## "###, expect: r##"r#"foo"#"##, errors: 1 }, + Case { source: r##" r##"foo"# "##, expect: "(none)", errors: 2 }, ]; check_cases_with_errors(&lit_cases[..], literal()); diff --git a/tooling/debugger/src/context.rs b/tooling/debugger/src/context.rs index 5ab2c63c365..515edf0bb06 100644 --- a/tooling/debugger/src/context.rs +++ b/tooling/debugger/src/context.rs @@ -138,7 +138,7 @@ impl<'a, B: BlackBoxFunctionSolver> DebugContext<'a, B> { }) .collect() }) - .unwrap_or(vec![]) + .unwrap_or_default() } /// Returns the current call stack with expanded source locations. In diff --git a/tooling/debugger/src/dap.rs b/tooling/debugger/src/dap.rs index 184018e9fcc..7e67a26b257 100644 --- a/tooling/debugger/src/dap.rs +++ b/tooling/debugger/src/dap.rs @@ -115,7 +115,8 @@ impl<'a, R: Read, W: Write, B: BlackBoxFunctionSolver> DapSession<'a, R, W, B> { let source_location = source_locations[0]; let span = source_location.span; let file_id = source_location.file; - let Ok(line_index) = &simple_files[&file_id].line_index((), span.start() as usize) else { + let Ok(line_index) = &simple_files[&file_id].line_index((), span.start() as usize) + else { return; }; let line_number = line_index + 1; @@ -143,7 +144,7 @@ impl<'a, R: Read, W: Write, B: BlackBoxFunctionSolver> DapSession<'a, R, W, B> { pub fn run_loop(&mut self) -> Result<(), ServerError> { self.running = self.context.get_current_opcode_location().is_some(); - if self.running && matches!(self.context.get_current_source_location(), None) { + if self.running && self.context.get_current_source_location().is_none() { // TODO: remove this? This is to ensure that the tool has a proper // source location to show when first starting the debugger, but // maybe the default behavior should be to start executing until the @@ -297,7 +298,7 @@ impl<'a, R: Read, W: Write, B: BlackBoxFunctionSolver> DapSession<'a, R, W, B> { } } // the actual opcodes - while count > 0 && !matches!(opcode_location, None) { + while count > 0 && opcode_location.is_some() { instructions.push(DisassembledInstruction { address: format!("{}", opcode_location.unwrap()), instruction: self.context.render_opcode_at_location(&opcode_location), @@ -446,29 +447,31 @@ impl<'a, R: Read, W: Write, B: BlackBoxFunctionSolver> DapSession<'a, R, W, B> { // compute breakpoints to set and return let mut breakpoints_to_set: Vec<(OpcodeLocation, i64)> = vec![]; - let breakpoints: Vec = args.breakpoints.iter().map(|breakpoint| { - let Ok(location) = OpcodeLocation::from_str(breakpoint.instruction_reference.as_str()) else { - return Breakpoint { - verified: false, - message: Some(String::from("Missing instruction reference")), - ..Breakpoint::default() - }; - }; - if !self.context.is_valid_opcode_location(&location) { - return Breakpoint { - verified: false, - message: Some(String::from("Invalid opcode location")), - ..Breakpoint::default() + let breakpoints: Vec = args + .breakpoints + .iter() + .map(|breakpoint| { + let Ok(location) = + OpcodeLocation::from_str(breakpoint.instruction_reference.as_str()) + else { + return Breakpoint { + verified: false, + message: Some(String::from("Missing instruction reference")), + ..Breakpoint::default() + }; }; - } - let id = self.get_next_breakpoint_id(); - breakpoints_to_set.push((location, id)); - Breakpoint { - id: Some(id), - verified: true, - ..Breakpoint::default() - } - }).collect(); + if !self.context.is_valid_opcode_location(&location) { + return Breakpoint { + verified: false, + message: Some(String::from("Invalid opcode location")), + ..Breakpoint::default() + }; + } + let id = self.get_next_breakpoint_id(); + breakpoints_to_set.push((location, id)); + Breakpoint { id: Some(id), verified: true, ..Breakpoint::default() } + }) + .collect(); // actually set the computed breakpoints self.instruction_breakpoints = breakpoints_to_set; @@ -539,7 +542,9 @@ impl<'a, R: Read, W: Write, B: BlackBoxFunctionSolver> DapSession<'a, R, W, B> { let Some(location) = self.find_opcode_for_source_location(&file_id, line) else { return Breakpoint { verified: false, - message: Some(String::from("Source location cannot be matched to opcode location")), + message: Some(String::from( + "Source location cannot be matched to opcode location", + )), ..Breakpoint::default() }; }; diff --git a/tooling/debugger/src/foreign_calls.rs b/tooling/debugger/src/foreign_calls.rs index 01676adfef3..68c4d3947b0 100644 --- a/tooling/debugger/src/foreign_calls.rs +++ b/tooling/debugger/src/foreign_calls.rs @@ -100,7 +100,7 @@ impl ForeignCallExecutor for DefaultDebugForeignCallExecutor { Ok(ForeignCallResult::default().into()) } Some(DebugForeignCall::MemberAssign(arity)) => { - if let Some(ForeignCallParam::Single(var_id_value)) = foreign_call.inputs.get(0) { + if let Some(ForeignCallParam::Single(var_id_value)) = foreign_call.inputs.first() { let arity = arity as usize; let var_id = debug_var_id(var_id_value); let n = foreign_call.inputs.len(); @@ -116,7 +116,11 @@ impl ForeignCallExecutor for DefaultDebugForeignCallExecutor { .collect(); let values: Vec = (0..n - 1 - arity) .flat_map(|i| { - foreign_call.inputs.get(1 + i).map(|fci| fci.values()).unwrap_or(vec![]) + foreign_call + .inputs + .get(1 + i) + .map(|fci| fci.values()) + .unwrap_or_default() }) .collect(); self.debug_vars.assign_field(var_id, indexes, &values); diff --git a/tooling/lsp/src/lib.rs b/tooling/lsp/src/lib.rs index a0e024c70fd..be9b83e02f6 100644 --- a/tooling/lsp/src/lib.rs +++ b/tooling/lsp/src/lib.rs @@ -222,11 +222,15 @@ pub(crate) fn resolve_workspace_for_source_path(file_path: &Path) -> Result ParsedFiles { cache_misses .into_iter() .map(|(id, _, _, parse_results)| (id, parse_results)) - .chain(cache_hits.into_iter()) + .chain(cache_hits) .collect() } else { parse_all(file_manager) diff --git a/tooling/nargo/src/artifacts/debug_vars.rs b/tooling/nargo/src/artifacts/debug_vars.rs index b5559ca53c8..20f2637f7d6 100644 --- a/tooling/nargo/src/artifacts/debug_vars.rs +++ b/tooling/nargo/src/artifacts/debug_vars.rs @@ -18,23 +18,25 @@ impl DebugVars { self.active .iter() .filter_map(|var_id| { - self.variables - .get(var_id) - .and_then(|debug_var| { - let Some(value) = self.values.get(var_id) else { return None; }; - let Some(ptype) = self.types.get(&debug_var.debug_type_id) else { return None; }; - Some((debug_var.name.as_str(), value, ptype)) - }) + self.variables.get(var_id).and_then(|debug_var| { + let Some(value) = self.values.get(var_id) else { + return None; + }; + let Some(ptype) = self.types.get(&debug_var.debug_type_id) else { + return None; + }; + Some((debug_var.name.as_str(), value, ptype)) + }) }) .collect() } pub fn insert_variables(&mut self, vars: &DebugVariables) { - self.variables.extend(vars.clone().into_iter()); + self.variables.extend(vars.clone()); } pub fn insert_types(&mut self, types: &DebugTypes) { - self.types.extend(types.clone().into_iter()); + self.types.extend(types.clone()); } pub fn assign_var(&mut self, var_id: DebugVarId, values: &[Value]) { diff --git a/tooling/nargo/src/lib.rs b/tooling/nargo/src/lib.rs index e12bf4d4ad1..3deced041f8 100644 --- a/tooling/nargo/src/lib.rs +++ b/tooling/nargo/src/lib.rs @@ -65,8 +65,7 @@ fn insert_all_files_for_package_into_file_manager( let entry_path_parent = package .entry_path .parent() - .unwrap_or_else(|| panic!("The entry path is expected to be a single file within a directory and so should have a parent {:?}", package.entry_path)) - .clone(); + .unwrap_or_else(|| panic!("The entry path is expected to be a single file within a directory and so should have a parent {:?}", package.entry_path)); // Get all files in the package and add them to the file manager let paths = get_all_noir_source_in_dir(entry_path_parent) diff --git a/tooling/nargo_cli/src/cli/check_cmd.rs b/tooling/nargo_cli/src/cli/check_cmd.rs index a8b9dbdeeb2..4da06d2536a 100644 --- a/tooling/nargo_cli/src/cli/check_cmd.rs +++ b/tooling/nargo_cli/src/cli/check_cmd.rs @@ -142,6 +142,24 @@ fn create_input_toml_template( toml::to_string(&map).unwrap() } +/// Run the lexing, parsing, name resolution, and type checking passes and report any warnings +/// and errors found. +pub(crate) fn check_crate_and_report_errors( + context: &mut Context, + crate_id: CrateId, + deny_warnings: bool, + disable_macros: bool, + silence_warnings: bool, +) -> Result<(), CompileError> { + let result = check_crate(context, crate_id, deny_warnings, disable_macros); + super::compile_cmd::report_errors( + result, + &context.file_manager, + deny_warnings, + silence_warnings, + ) +} + #[cfg(test)] mod tests { use noirc_abi::{AbiParameter, AbiType, AbiVisibility, Sign}; @@ -189,21 +207,3 @@ d2 = ["", "", ""] assert_eq!(toml_str, expected_toml_str); } } - -/// Run the lexing, parsing, name resolution, and type checking passes and report any warnings -/// and errors found. -pub(crate) fn check_crate_and_report_errors( - context: &mut Context, - crate_id: CrateId, - deny_warnings: bool, - disable_macros: bool, - silence_warnings: bool, -) -> Result<(), CompileError> { - let result = check_crate(context, crate_id, deny_warnings, disable_macros); - super::compile_cmd::report_errors( - result, - &context.file_manager, - deny_warnings, - silence_warnings, - ) -} diff --git a/tooling/nargo_cli/src/cli/dap_cmd.rs b/tooling/nargo_cli/src/cli/dap_cmd.rs index f4df309f1c9..ba4f91609ef 100644 --- a/tooling/nargo_cli/src/cli/dap_cmd.rs +++ b/tooling/nargo_cli/src/cli/dap_cmd.rs @@ -159,7 +159,8 @@ fn loop_uninitialized_dap( server.respond(req.error("Missing launch arguments"))?; continue; }; - let Some(Value::String(ref project_folder)) = additional_data.get("projectFolder") else { + let Some(Value::String(ref project_folder)) = additional_data.get("projectFolder") + else { server.respond(req.error("Missing project folder argument"))?; continue; }; diff --git a/tooling/nargo_cli/src/cli/debug_cmd.rs b/tooling/nargo_cli/src/cli/debug_cmd.rs index 6fcfee91457..130a07b5c90 100644 --- a/tooling/nargo_cli/src/cli/debug_cmd.rs +++ b/tooling/nargo_cli/src/cli/debug_cmd.rs @@ -147,8 +147,7 @@ fn instrument_package_files( let entry_path_parent = package .entry_path .parent() - .unwrap_or_else(|| panic!("The entry path is expected to be a single file within a directory and so should have a parent {:?}", package.entry_path)) - .clone(); + .unwrap_or_else(|| panic!("The entry path is expected to be a single file within a directory and so should have a parent {:?}", package.entry_path)); let mut debug_instrumenter = DebugInstrumenter::default(); diff --git a/tooling/nargo_fmt/src/rewrite/infix.rs b/tooling/nargo_fmt/src/rewrite/infix.rs index 15f5fe23aae..5d2b387496a 100644 --- a/tooling/nargo_fmt/src/rewrite/infix.rs +++ b/tooling/nargo_fmt/src/rewrite/infix.rs @@ -96,7 +96,9 @@ pub(crate) fn flatten( result.push(rewrite); - let Some(pop) = stack.pop() else { break; }; + let Some(pop) = stack.pop() else { + break; + }; match &pop.kind { ExpressionKind::Infix(infix) => { diff --git a/tooling/nargo_fmt/src/visitor/expr.rs b/tooling/nargo_fmt/src/visitor/expr.rs index 9b36911b1af..2cd0e881e84 100644 --- a/tooling/nargo_fmt/src/visitor/expr.rs +++ b/tooling/nargo_fmt/src/visitor/expr.rs @@ -202,7 +202,6 @@ pub(crate) fn format_seq( reduce: bool, ) -> String { let mut nested_indent = shape; - let shape = shape; nested_indent.indent.block_indent(visitor.config); From 0a1d109f478c997da5c43876fd12464af638bb15 Mon Sep 17 00:00:00 2001 From: jfecher Date: Thu, 15 Feb 2024 12:22:03 -0600 Subject: [PATCH 17/39] fix: Use correct type for numeric generics (#4386) # Description ## Problem\* Resolves #4290 ## Summary\* Previously, the monomorphizer would assume all numeric generics were Fields, but this was not necessarily true. ## Additional Context ## Documentation\* Check one: - [x] No documentation needed. - [ ] Documentation included in this PR. - [ ] **[Exceptional Case]** Documentation to be submitted in a separate PR. # PR Checklist\* - [x] I have tested the changes locally. - [x] I have formatted the changes with [Prettier](https://prettier.io/) and/or `cargo fmt` on default settings. --- compiler/noirc_frontend/src/monomorphization/mod.rs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/compiler/noirc_frontend/src/monomorphization/mod.rs b/compiler/noirc_frontend/src/monomorphization/mod.rs index cfe671d7d58..31a254d9f0a 100644 --- a/compiler/noirc_frontend/src/monomorphization/mod.rs +++ b/compiler/noirc_frontend/src/monomorphization/mod.rs @@ -714,7 +714,6 @@ impl<'interner> Monomorphizer<'interner> { let mutable = definition.mutable; let location = Some(ident.location); let name = definition.name.clone(); - let typ = self.interner.id_type(expr_id); let definition = self.lookup_function(*func_id, expr_id, &typ, None); let typ = self.convert_type(&typ); let ident = ast::Ident { location, mutable, definition, name, typ: typ.clone() }; @@ -755,7 +754,8 @@ impl<'interner> Monomorphizer<'interner> { let value = FieldElement::from(value as u128); let location = self.interner.id_location(expr_id); - ast::Expression::Literal(ast::Literal::Integer(value, ast::Type::Field, location)) + let typ = self.convert_type(&typ); + ast::Expression::Literal(ast::Literal::Integer(value, typ, location)) } } } From 1fbc6ab19a91a9cf6b12a56fb8f02a44fa7592f3 Mon Sep 17 00:00:00 2001 From: Maxim Vezenov Date: Thu, 15 Feb 2024 19:32:45 +0000 Subject: [PATCH 18/39] chore: Test for printing array of strings (#4389) # Description ## Problem\* Resolves #2903 ## Summary\* I added a function `regression_2903` to the `debug_logs` test that shows we can now accurately print arrays of strings. This was fixed in previous formatting work. I also removed `std::` prefixes from `println` now that we have a prelude. ## Additional Context ## Documentation\* Check one: - [X] No documentation needed. - [ ] Documentation included in this PR. - [ ] **[Exceptional Case]** Documentation to be submitted in a separate PR. # PR Checklist\* - [X] I have tested the changes locally. - [X] I have formatted the changes with [Prettier](https://prettier.io/) and/or `cargo fmt` on default settings. --- .../execution_success/debug_logs/src/main.nr | 76 +++++++++++-------- 1 file changed, 43 insertions(+), 33 deletions(-) diff --git a/test_programs/execution_success/debug_logs/src/main.nr b/test_programs/execution_success/debug_logs/src/main.nr index 49e0041594a..c628a9ae6a4 100644 --- a/test_programs/execution_success/debug_logs/src/main.nr +++ b/test_programs/execution_success/debug_logs/src/main.nr @@ -1,77 +1,76 @@ -use dep::std; - -fn main(x: Field, y: pub Field) { +fn main(x: Field, y: pub Field) { let string = "i: {i}, j: {j}"; - std::println(string); + println(string); // TODO: fmtstr cannot be printed // let fmt_str: fmtstr<14, (Field, Field)> = f"i: {x}, j: {y}"; // let fmt_fmt_str = f"fmtstr: {fmt_str}, i: {x}"; - // std::println(fmt_fmt_str); + // println(fmt_fmt_str); // A `fmtstr` lets you easily perform string interpolation. let fmt_str: fmtstr<14, (Field, Field)> = f"i: {x}, j: {y}"; let fmt_str = string_identity(fmt_str); - std::println(fmt_str); + println(fmt_str); let fmt_str_no_type = f"i: {x}, j: {y}"; - std::println(fmt_str_no_type); + println(fmt_str_no_type); let fmt_str_generic = string_with_generics(fmt_str_no_type); - std::println(fmt_str_generic); + println(fmt_str_generic); let s = myStruct { y: x, x: y }; - std::println(s); + println(s); - std::println(f"randomstring{x}{x}"); + println(f"randomstring{x}{x}"); let fmt_str = string_with_partial_generics(f"i: {x}, s: {s}"); - std::println(fmt_str); + println(fmt_str); - std::println(x); - std::println([x, y]); + println(x); + println([x, y]); let foo = fooStruct { my_struct: s, foo: 15 }; - std::println(f"s: {s}, foo: {foo}"); + println(f"s: {s}, foo: {foo}"); - std::println(f"x: 0, y: 1"); + println(f"x: 0, y: 1"); let s_2 = myStruct { x: 20, y: 30 }; - std::println(f"s1: {s}, s2: {s_2}"); + println(f"s1: {s}, s2: {s_2}"); let bar = fooStruct { my_struct: s_2, foo: 20 }; - std::println(f"foo1: {foo}, foo2: {bar}"); + println(f"foo1: {foo}, foo2: {bar}"); let struct_string = if x != 5 { f"{foo}" } else { f"{bar}" }; - std::println(struct_string); + println(struct_string); let one_tuple = (1, 2, 3); let another_tuple = (4, 5, 6); - std::println(f"one_tuple: {one_tuple}, another_tuple: {another_tuple}"); - std::println(one_tuple); + println(f"one_tuple: {one_tuple}, another_tuple: {another_tuple}"); + println(one_tuple); let tuples_nested = (one_tuple, another_tuple); - std::println(f"tuples_nested: {tuples_nested}"); - std::println(tuples_nested); + println(f"tuples_nested: {tuples_nested}"); + println(tuples_nested); + regression_2903(); regression_2906(); let first_array = [1, 2, 3]; let second_array = [4, 5, 6]; let arrays_nested = [first_array, second_array]; - std::println(f"first_array: {first_array}, second_array: {second_array}"); - std::println(f"arrays_nested: {arrays_nested}"); + println(f"first_array: {first_array}, second_array: {second_array}"); + println(f"arrays_nested: {arrays_nested}"); let free_lambda = |x| x + 1; let sentinel: u32 = 8888; - std::println(f"free_lambda: {free_lambda}, sentinel: {sentinel}"); - std::println(free_lambda); + println(f"free_lambda: {free_lambda}, sentinel: {sentinel}"); + println(free_lambda); let one = 1; let closured_lambda = |x| x + one; - std::println(f"closured_lambda: {closured_lambda}, sentinel: {sentinel}"); - std::println(closured_lambda); + println(f"closured_lambda: {closured_lambda}, sentinel: {sentinel}"); + println(closured_lambda); } fn string_identity(string: fmtstr<14, (Field, Field)>) -> fmtstr<14, (Field, Field)> { @@ -96,19 +95,30 @@ struct fooStruct { foo: Field, } +fn regression_2903() { + let v : [str<1>; 1] = ["1"; 1]; + println(v); // will print [1] + + let a = v[0]; + println(a); // will print `1` + + let bytes = [ "aaa", "bbb", "ccc" ]; + println(bytes); +} + fn regression_2906() { let array_two_vals = [1, 2]; - dep::std::println(f"array_two_vals: {array_two_vals}"); + println(f"array_two_vals: {array_two_vals}"); let label_two_vals = "12"; - dep::std::println(f"label_two_vals: {label_two_vals}"); + println(f"label_two_vals: {label_two_vals}"); let array_five_vals = [1, 2, 3, 4, 5]; - dep::std::println(f"array_five_vals: {array_five_vals}"); + println(f"array_five_vals: {array_five_vals}"); let label_five_vals = "12345"; - dep::std::println(f"label_five_vals: {label_five_vals}"); + println(f"label_five_vals: {label_five_vals}"); - dep::std::println(f"array_five_vals: {array_five_vals}, label_five_vals: {label_five_vals}"); + println(f"array_five_vals: {array_five_vals}, label_five_vals: {label_five_vals}"); } From 2fc95d2d82b3220267ce7d5815e7073e00ef1360 Mon Sep 17 00:00:00 2001 From: Tom French <15848336+TomAFrench@users.noreply.github.com> Date: Thu, 15 Feb 2024 20:07:16 +0000 Subject: [PATCH 19/39] chore(ci)!: Bump MSRV to 1.72.1 and enforce that ACVM can be published using updated lockfile (#4385) # Description ## Problem\* Resolves #4384 ## Summary\* This PR resolves #4384 by updating the lockfile so that it's using the latest versions of the dependencies used by all of the ACVM packages and then runs the test suite against it to ensure that we still work correctly. ## Additional Context ## Documentation\* Check one: - [x] No documentation needed. - [ ] Documentation included in this PR. - [ ] **[Exceptional Case]** Documentation to be submitted in a separate PR. # PR Checklist\* - [x] I have tested the changes locally. - [x] I have formatted the changes with [Prettier](https://prettier.io/) and/or `cargo fmt` on default settings. --------- Co-authored-by: kevaundray --- .../workflows/test-rust-workspace-msrv.yml | 112 ++++++++++++++++++ flake.nix | 2 +- rust-toolchain.toml | 2 +- 3 files changed, 114 insertions(+), 2 deletions(-) create mode 100644 .github/workflows/test-rust-workspace-msrv.yml diff --git a/.github/workflows/test-rust-workspace-msrv.yml b/.github/workflows/test-rust-workspace-msrv.yml new file mode 100644 index 00000000000..02444b52856 --- /dev/null +++ b/.github/workflows/test-rust-workspace-msrv.yml @@ -0,0 +1,112 @@ +name: Test (MSRV check) + +# TL;DR https://github.com/noir-lang/noir/issues/4384 +# +# This workflow acts to ensure that we can publish to crates.io, we need this extra check as libraries don't respect the Cargo.lock file committed in this repository. +# We must then always be able to build the workspace using the latest versions of all of our dependencies, so we explicitly update them and build in this workflow. + +on: + pull_request: + merge_group: + push: + branches: + - master + +# This will cancel previous runs when a branch or PR is updated +concurrency: + group: ${{ github.workflow }}-${{ github.head_ref || github.ref || github.run_id }} + cancel-in-progress: true + +jobs: + build-test-artifacts: + name: Build test artifacts + runs-on: ubuntu-latest + timeout-minutes: 30 + + steps: + - name: Checkout + uses: actions/checkout@v4 + + - name: Setup toolchain + uses: dtolnay/rust-toolchain@1.71.1 + with: + targets: x86_64-unknown-linux-gnu + + # We force the ACVM crate and all of its dependencies to update their dependencies + # This ensures that we'll be able to build the crates when they're being published. + - name: Update Cargo.lock + run: | + cargo update --package acvm --aggressive + cargo update --package bn254_blackbox_solver --aggressive + + - uses: Swatinem/rust-cache@v2 + with: + key: x86_64-unknown-linux-gnu-msrv-check + cache-on-failure: true + save-if: ${{ github.event_name != 'merge_group' }} + + - name: Install nextest + uses: taiki-e/install-action@v2 + with: + tool: nextest@0.9.67 + + - name: Build and archive tests + run: cargo nextest archive --workspace --release --archive-file nextest-archive.tar.zst + + - name: Upload archive to workflow + uses: actions/upload-artifact@v4 + with: + name: nextest-archive + path: nextest-archive.tar.zst + + run-tests: + name: "Run tests (partition ${{matrix.partition}})" + runs-on: ubuntu-latest + needs: [build-test-artifacts] + strategy: + fail-fast: false + matrix: + partition: [1, 2, 3, 4] + steps: + - uses: actions/checkout@v4 + + - name: Setup toolchain + uses: dtolnay/rust-toolchain@1.71.1 + with: + targets: x86_64-unknown-linux-gnu + + - name: Install nextest + uses: taiki-e/install-action@v2 + with: + tool: nextest@0.9.67 + + - name: Download archive + uses: actions/download-artifact@v4 + with: + name: nextest-archive + - name: Run tests + run: | + cargo nextest run --archive-file nextest-archive.tar.zst \ + --partition count:${{ matrix.partition }}/4 + + # This is a job which depends on all test jobs and reports the overall status. + # This allows us to add/remove test jobs without having to update the required workflows. + tests-end: + name: Rust End + runs-on: ubuntu-latest + # We want this job to always run (even if the dependant jobs fail) as we want this job to fail rather than skipping. + if: ${{ always() }} + needs: + - run-tests + + steps: + - name: Report overall success + run: | + if [[ $FAIL == true ]]; then + exit 1 + else + exit 0 + fi + env: + # We treat any cancelled, skipped or failing jobs as a failure for the workflow as a whole. + FAIL: ${{ contains(needs.*.result, 'failure') || contains(needs.*.result, 'cancelled') || contains(needs.*.result, 'skipped') }} diff --git a/flake.nix b/flake.nix index 659df12f260..f0d0a2eaebb 100644 --- a/flake.nix +++ b/flake.nix @@ -44,7 +44,7 @@ rustToolchain = fenix.packages.${system}.fromToolchainFile { file = ./rust-toolchain.toml; - sha256 = "sha256-R0F0Risbr74xg9mEYydyebx/z0Wu6HI0/KWwrV30vZo="; + sha256 = "sha256-dxE7lmCFWlq0nl/wKcmYvpP9zqQbBitAQgZ1zx9Ooik="; }; craneLib = (crane.mkLib pkgs).overrideToolchain rustToolchain; diff --git a/rust-toolchain.toml b/rust-toolchain.toml index b6f7edc4bde..955e24485fc 100644 --- a/rust-toolchain.toml +++ b/rust-toolchain.toml @@ -1,5 +1,5 @@ [toolchain] -channel = "1.71.1" +channel = "1.72.1" components = [ "rust-src" ] targets = [ "wasm32-unknown-unknown", "wasm32-wasi", "aarch64-apple-darwin" ] profile = "default" From a7cc16b85f642232939f2a23755700228c27912d Mon Sep 17 00:00:00 2001 From: Tom French <15848336+TomAFrench@users.noreply.github.com> Date: Thu, 15 Feb 2024 20:08:38 +0000 Subject: [PATCH 20/39] chore(ci): add alerts for failed publishes (#4388) # Description ## Problem\* Resolves ## Summary\* This PR adds an action to open an issue if either the ACVM crates or JS packages fail to publish. ## Additional Context ## Documentation\* Check one: - [x] No documentation needed. - [ ] Documentation included in this PR. - [ ] **[Exceptional Case]** Documentation to be submitted in a separate PR. # PR Checklist\* - [x] I have tested the changes locally. - [x] I have formatted the changes with [Prettier](https://prettier.io/) and/or `cargo fmt` on default settings. Co-authored-by: kevaundray --- .github/CRATES_IO_PUBLISH_FAILED.md | 10 ++++++++++ .github/JS_PUBLISH_FAILED.md | 11 +++++++++++ .github/workflows/publish-acvm.yml | 13 +++++++++++++ .github/workflows/publish-es-packages.yml | 13 +++++++++++++ 4 files changed, 47 insertions(+) create mode 100644 .github/CRATES_IO_PUBLISH_FAILED.md create mode 100644 .github/JS_PUBLISH_FAILED.md diff --git a/.github/CRATES_IO_PUBLISH_FAILED.md b/.github/CRATES_IO_PUBLISH_FAILED.md new file mode 100644 index 00000000000..ec4de319772 --- /dev/null +++ b/.github/CRATES_IO_PUBLISH_FAILED.md @@ -0,0 +1,10 @@ +--- +title: "ACVM crates failed to publish" +assignees: TomAFrench kevaundray savio-sou +--- + +The {{env.CRATE_VERSION}} release of the ACVM crates failed. + +Check the [Publish ACVM]({{env.WORKFLOW_URL}}) workflow for details. + +This issue was raised by the workflow `{{env.WORKFLOW_NAME}}` diff --git a/.github/JS_PUBLISH_FAILED.md b/.github/JS_PUBLISH_FAILED.md new file mode 100644 index 00000000000..5b9f79aac1f --- /dev/null +++ b/.github/JS_PUBLISH_FAILED.md @@ -0,0 +1,11 @@ +--- +title: "JS packages failed to publish" +assignees: TomAFrench kevaundray savio-sou +labels: js +--- + +The {{env.NPM_TAG}} release of the JS packages failed. + +Check the [Publish JS packages]({{env.WORKFLOW_URL}}) workflow for details. + +This issue was raised by the workflow `{{env.WORKFLOW_NAME}}` diff --git a/.github/workflows/publish-acvm.yml b/.github/workflows/publish-acvm.yml index 0251aaa0377..e19a61fff4f 100644 --- a/.github/workflows/publish-acvm.yml +++ b/.github/workflows/publish-acvm.yml @@ -62,3 +62,16 @@ jobs: cargo publish --package acvm env: CARGO_REGISTRY_TOKEN: ${{ secrets.ACVM_CRATES_IO_TOKEN }} + + # Raise an issue if any package failed to publish + - name: Alert on failed publish + uses: JasonEtco/create-an-issue@v2 + if: ${{ failure() }} + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + CRATE_VERSION: ${{ inputs.noir-ref }} + WORKFLOW_NAME: ${{ github.workflow }} + WORKFLOW_URL: ${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }} + with: + update_existing: true + filename: .github/JS_PUBLISH_FAILED.md \ No newline at end of file diff --git a/.github/workflows/publish-es-packages.yml b/.github/workflows/publish-es-packages.yml index fa245883ced..d4cd356a138 100644 --- a/.github/workflows/publish-es-packages.yml +++ b/.github/workflows/publish-es-packages.yml @@ -143,3 +143,16 @@ jobs: - name: Publish ES Packages run: yarn publish:all --access public --tag ${{ inputs.npm-tag }} + + # Raise an issue if any package failed to publish + - name: Alert on failed publish + uses: JasonEtco/create-an-issue@v2 + if: ${{ failure() }} + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + NPM_TAG: ${{ inputs.npm-tag }} + WORKFLOW_NAME: ${{ github.workflow }} + WORKFLOW_URL: ${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }} + with: + update_existing: true + filename: .github/JS_PUBLISH_FAILED.md \ No newline at end of file From f77f702e0cfb81dcce4dd97e274b831e887ba5d2 Mon Sep 17 00:00:00 2001 From: josh crites Date: Thu, 15 Feb 2024 21:11:53 -0500 Subject: [PATCH 21/39] fix(docs): Update noirjs_app for 0.23 (#4378) # Description Updates the tutorial to work with v 0.23.0 ## Problem\* Resolves ## Summary\* ## Additional Context ## Documentation\* Check one: - [ ] No documentation needed. - [ ] Documentation included in this PR. - [ ] **[Exceptional Case]** Documentation to be submitted in a separate PR. # PR Checklist\* - [ ] I have tested the changes locally. - [ ] I have formatted the changes with [Prettier](https://prettier.io/) and/or `cargo fmt` on default settings. --- .../version-v0.23.0/tutorials/noirjs_app.md | 22 ++++++++++++++++--- 1 file changed, 19 insertions(+), 3 deletions(-) diff --git a/docs/versioned_docs/version-v0.23.0/tutorials/noirjs_app.md b/docs/versioned_docs/version-v0.23.0/tutorials/noirjs_app.md index ad76dd255cc..82899217e61 100644 --- a/docs/versioned_docs/version-v0.23.0/tutorials/noirjs_app.md +++ b/docs/versioned_docs/version-v0.23.0/tutorials/noirjs_app.md @@ -14,9 +14,9 @@ You can find the complete app code for this guide [here](https://github.com/noir :::note -Feel free to use whatever versions, just keep in mind that Nargo and the NoirJS packages are meant to be in sync. For example, Nargo 0.19.x matches `noir_js@0.19.x`, etc. +Feel free to use whatever versions, just keep in mind that Nargo and the NoirJS packages are meant to be in sync. For example, Nargo 0.23.x matches `noir_js@0.23.x`, etc. -In this guide, we will be pinned to 0.19.4. +In this guide, we will be pinned to 0.23.0. ::: @@ -80,7 +80,7 @@ To do this this, go back to the previous folder (`cd ..`) and create a new vite You should see `vite-project` appear in your root folder. This seems like a good time to `cd` into it and install our NoirJS packages: ```bash -npm i @noir-lang/backend_barretenberg@0.19.4 @noir-lang/noir_js@0.19.4 +npm i @noir-lang/backend_barretenberg@0.23.0 @noir-lang/noir_js@0.23.0 vite-plugin-top-level-await ``` :::info @@ -99,6 +99,22 @@ At this point in the tutorial, your folder structure should look like this: #### Some cleanup +Add a `vite.config.js` file containing the following: + +```js +import { defineConfig } from 'vite'; +import topLevelAwait from "vite-plugin-top-level-await"; + +export default defineConfig({ + plugins: [ + topLevelAwait({ + promiseExportName: "__tla", + promiseImportName: i => `__tla_${i}` + }) + ] +}) +``` + `npx create vite` is amazing but it creates a bunch of files we don't really need for our simple example. Actually, let's just delete everything except for `index.html`, `main.js` and `package.json`. I feel lighter already. ![my heart is ready for you, noir.js](@site/static/img/memes/titanic.jpeg) From 70866aea976d59dbcbd4af34067fdd8f46555673 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C3=81lvaro=20Rodr=C3=ADguez?= Date: Fri, 16 Feb 2024 10:59:53 +0100 Subject: [PATCH 22/39] fix: Enforce matching types of binary ops in SSA (#4391) # Description ## Problem\* Resolves https://github.com/noir-lang/noir/issues/4275 ## Summary\* Adds a check in insert_binary to make sure we don't start codegening non-matching binary ops again. ## Additional Context ## Documentation\* Check one: - [x] No documentation needed. - [ ] Documentation included in this PR. - [ ] **[Exceptional Case]** Documentation to be submitted in a separate PR. # PR Checklist\* - [x] I have tested the changes locally. - [x] I have formatted the changes with [Prettier](https://prettier.io/) and/or `cargo fmt` on default settings. --- compiler/noirc_evaluator/src/ssa/function_builder/mod.rs | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/compiler/noirc_evaluator/src/ssa/function_builder/mod.rs b/compiler/noirc_evaluator/src/ssa/function_builder/mod.rs index 9e17595a033..fe71b876879 100644 --- a/compiler/noirc_evaluator/src/ssa/function_builder/mod.rs +++ b/compiler/noirc_evaluator/src/ssa/function_builder/mod.rs @@ -216,6 +216,11 @@ impl FunctionBuilder { operator: BinaryOp, rhs: ValueId, ) -> ValueId { + assert_eq!( + self.type_of_value(lhs), + self.type_of_value(rhs), + "ICE - Binary instruction operands must have the same type" + ); let instruction = Instruction::Binary(Binary { lhs, rhs, operator }); self.insert_instruction(instruction, None).first() } From b283637e092038eb296c468168aec2d41e1c2734 Mon Sep 17 00:00:00 2001 From: josh crites Date: Fri, 16 Feb 2024 12:13:50 -0500 Subject: [PATCH 23/39] fix(docs): update install versions (#4396) # Description Updates the install scripts to reference the correct versions. ## Problem\* Resolves #4271 ## Summary\* ## Additional Context ## Documentation\* Check one: - [ ] No documentation needed. - [ ] Documentation included in this PR. - [ ] **[Exceptional Case]** Documentation to be submitted in a separate PR. # PR Checklist\* - [ ] I have tested the changes locally. - [ ] I have formatted the changes with [Prettier](https://prettier.io/) and/or `cargo fmt` on default settings. --- .../getting_started/installation/other_install_methods.md | 6 +++--- .../getting_started/installation/other_install_methods.md | 6 +++--- .../getting_started/installation/other_install_methods.md | 6 +++--- 3 files changed, 9 insertions(+), 9 deletions(-) diff --git a/docs/docs/getting_started/installation/other_install_methods.md b/docs/docs/getting_started/installation/other_install_methods.md index 489f1eda802..076f26dfd94 100644 --- a/docs/docs/getting_started/installation/other_install_methods.md +++ b/docs/docs/getting_started/installation/other_install_methods.md @@ -112,7 +112,7 @@ Paste and run the following in the terminal to extract and install the binary: ```bash mkdir -p $HOME/.nargo/bin && \ -curl -o $HOME/.nargo/bin/nargo-aarch64-apple-darwin.tar.gz -L https://github.com/noir-lang/noir/releases/download/v0.6.0/nargo-aarch64-apple-darwin.tar.gz && \ +curl -o $HOME/.nargo/bin/nargo-aarch64-apple-darwin.tar.gz -L https://github.com/noir-lang/noir/releases/download/v0.24.0/nargo-aarch64-apple-darwin.tar.gz && \ tar -xvf $HOME/.nargo/bin/nargo-aarch64-apple-darwin.tar.gz -C $HOME/.nargo/bin/ && \ echo '\nexport PATH=$PATH:$HOME/.nargo/bin' >> ~/.zshrc && \ source ~/.zshrc @@ -122,7 +122,7 @@ source ~/.zshrc ```bash mkdir -p $HOME/.nargo/bin && \ -curl -o $HOME/.nargo/bin/nargo-x86_64-apple-darwin.tar.gz -L https://github.com/noir-lang/noir/releases/download/v0.6.0/nargo-x86_64-apple-darwin.tar.gz && \ +curl -o $HOME/.nargo/bin/nargo-x86_64-apple-darwin.tar.gz -L https://github.com/noir-lang/noir/releases/download/v0.24.0/nargo-x86_64-apple-darwin.tar.gz && \ tar -xvf $HOME/.nargo/bin/nargo-x86_64-apple-darwin.tar.gz -C $HOME/.nargo/bin/ && \ echo '\nexport PATH=$PATH:$HOME/.nargo/bin' >> ~/.zshrc && \ source ~/.zshrc @@ -132,7 +132,7 @@ source ~/.zshrc ```bash mkdir -p $HOME/.nargo/bin && \ -curl -o $HOME/.nargo/bin/nargo-x86_64-unknown-linux-gnu.tar.gz -L https://github.com/noir-lang/noir/releases/download/v0.6.0/nargo-x86_64-unknown-linux-gnu.tar.gz && \ +curl -o $HOME/.nargo/bin/nargo-x86_64-unknown-linux-gnu.tar.gz -L https://github.com/noir-lang/noir/releases/download/v0.24.0/nargo-x86_64-unknown-linux-gnu.tar.gz && \ tar -xvf $HOME/.nargo/bin/nargo-x86_64-unknown-linux-gnu.tar.gz -C $HOME/.nargo/bin/ && \ echo -e '\nexport PATH=$PATH:$HOME/.nargo/bin' >> ~/.bashrc && \ source ~/.bashrc diff --git a/docs/versioned_docs/version-v0.23.0/getting_started/installation/other_install_methods.md b/docs/versioned_docs/version-v0.23.0/getting_started/installation/other_install_methods.md index a532f83750e..746633b628d 100644 --- a/docs/versioned_docs/version-v0.23.0/getting_started/installation/other_install_methods.md +++ b/docs/versioned_docs/version-v0.23.0/getting_started/installation/other_install_methods.md @@ -48,7 +48,7 @@ Paste and run the following in the terminal to extract and install the binary: ```bash mkdir -p $HOME/.nargo/bin && \ -curl -o $HOME/.nargo/bin/nargo-aarch64-apple-darwin.tar.gz -L https://github.com/noir-lang/noir/releases/download/v0.6.0/nargo-aarch64-apple-darwin.tar.gz && \ +curl -o $HOME/.nargo/bin/nargo-aarch64-apple-darwin.tar.gz -L https://github.com/noir-lang/noir/releases/download/v0.23.0/nargo-aarch64-apple-darwin.tar.gz && \ tar -xvf $HOME/.nargo/bin/nargo-aarch64-apple-darwin.tar.gz -C $HOME/.nargo/bin/ && \ echo '\nexport PATH=$PATH:$HOME/.nargo/bin' >> ~/.zshrc && \ source ~/.zshrc @@ -58,7 +58,7 @@ source ~/.zshrc ```bash mkdir -p $HOME/.nargo/bin && \ -curl -o $HOME/.nargo/bin/nargo-x86_64-apple-darwin.tar.gz -L https://github.com/noir-lang/noir/releases/download/v0.6.0/nargo-x86_64-apple-darwin.tar.gz && \ +curl -o $HOME/.nargo/bin/nargo-x86_64-apple-darwin.tar.gz -L https://github.com/noir-lang/noir/releases/download/v0.23.0/nargo-x86_64-apple-darwin.tar.gz && \ tar -xvf $HOME/.nargo/bin/nargo-x86_64-apple-darwin.tar.gz -C $HOME/.nargo/bin/ && \ echo '\nexport PATH=$PATH:$HOME/.nargo/bin' >> ~/.zshrc && \ source ~/.zshrc @@ -68,7 +68,7 @@ source ~/.zshrc ```bash mkdir -p $HOME/.nargo/bin && \ -curl -o $HOME/.nargo/bin/nargo-x86_64-unknown-linux-gnu.tar.gz -L https://github.com/noir-lang/noir/releases/download/v0.6.0/nargo-x86_64-unknown-linux-gnu.tar.gz && \ +curl -o $HOME/.nargo/bin/nargo-x86_64-unknown-linux-gnu.tar.gz -L https://github.com/noir-lang/noir/releases/download/v0.23.0/nargo-x86_64-unknown-linux-gnu.tar.gz && \ tar -xvf $HOME/.nargo/bin/nargo-x86_64-unknown-linux-gnu.tar.gz -C $HOME/.nargo/bin/ && \ echo -e '\nexport PATH=$PATH:$HOME/.nargo/bin' >> ~/.bashrc && \ source ~/.bashrc diff --git a/docs/versioned_docs/version-v0.24.0/getting_started/installation/other_install_methods.md b/docs/versioned_docs/version-v0.24.0/getting_started/installation/other_install_methods.md index 489f1eda802..076f26dfd94 100644 --- a/docs/versioned_docs/version-v0.24.0/getting_started/installation/other_install_methods.md +++ b/docs/versioned_docs/version-v0.24.0/getting_started/installation/other_install_methods.md @@ -112,7 +112,7 @@ Paste and run the following in the terminal to extract and install the binary: ```bash mkdir -p $HOME/.nargo/bin && \ -curl -o $HOME/.nargo/bin/nargo-aarch64-apple-darwin.tar.gz -L https://github.com/noir-lang/noir/releases/download/v0.6.0/nargo-aarch64-apple-darwin.tar.gz && \ +curl -o $HOME/.nargo/bin/nargo-aarch64-apple-darwin.tar.gz -L https://github.com/noir-lang/noir/releases/download/v0.24.0/nargo-aarch64-apple-darwin.tar.gz && \ tar -xvf $HOME/.nargo/bin/nargo-aarch64-apple-darwin.tar.gz -C $HOME/.nargo/bin/ && \ echo '\nexport PATH=$PATH:$HOME/.nargo/bin' >> ~/.zshrc && \ source ~/.zshrc @@ -122,7 +122,7 @@ source ~/.zshrc ```bash mkdir -p $HOME/.nargo/bin && \ -curl -o $HOME/.nargo/bin/nargo-x86_64-apple-darwin.tar.gz -L https://github.com/noir-lang/noir/releases/download/v0.6.0/nargo-x86_64-apple-darwin.tar.gz && \ +curl -o $HOME/.nargo/bin/nargo-x86_64-apple-darwin.tar.gz -L https://github.com/noir-lang/noir/releases/download/v0.24.0/nargo-x86_64-apple-darwin.tar.gz && \ tar -xvf $HOME/.nargo/bin/nargo-x86_64-apple-darwin.tar.gz -C $HOME/.nargo/bin/ && \ echo '\nexport PATH=$PATH:$HOME/.nargo/bin' >> ~/.zshrc && \ source ~/.zshrc @@ -132,7 +132,7 @@ source ~/.zshrc ```bash mkdir -p $HOME/.nargo/bin && \ -curl -o $HOME/.nargo/bin/nargo-x86_64-unknown-linux-gnu.tar.gz -L https://github.com/noir-lang/noir/releases/download/v0.6.0/nargo-x86_64-unknown-linux-gnu.tar.gz && \ +curl -o $HOME/.nargo/bin/nargo-x86_64-unknown-linux-gnu.tar.gz -L https://github.com/noir-lang/noir/releases/download/v0.24.0/nargo-x86_64-unknown-linux-gnu.tar.gz && \ tar -xvf $HOME/.nargo/bin/nargo-x86_64-unknown-linux-gnu.tar.gz -C $HOME/.nargo/bin/ && \ echo -e '\nexport PATH=$PATH:$HOME/.nargo/bin' >> ~/.bashrc && \ source ~/.bashrc From 46f22045bd11f96ab95a5c2a338d19dd049d8635 Mon Sep 17 00:00:00 2001 From: Michael J Klein Date: Fri, 16 Feb 2024 14:38:24 -0500 Subject: [PATCH 24/39] chore: remove dependency on generational-arena (#4207) # Description ## Problem\* Resolves https://github.com/noir-lang/noir/issues/15#issuecomment-1914950104 ## Summary\* Replaces `generational-arena`'s `Arena` class with a thin wrapper around `Vec` ## Additional Context The thin wrapper is helpful for: 1. `insert` returning the index 2. `iter` iterating over the `(index, item)` pairs ## Documentation\* Check one: - [x] No documentation needed. - [ ] Documentation included in this PR. - [ ] **[Exceptional Case]** Documentation to be submitted in a separate PR. # PR Checklist\* - [x] I have tested the changes locally. - [ ] I have formatted the changes with [Prettier](https://prettier.io/) and/or `cargo fmt` on default settings. --------- Co-authored-by: kevaundray Co-authored-by: Jake Fecher Co-authored-by: jfecher --- Cargo.lock | 12 --- Cargo.toml | 4 - aztec_macros/src/lib.rs | 9 +- .../noirc_frontend/src/hir/def_map/mod.rs | 2 +- .../src/hir/resolution/resolver.rs | 2 +- .../noirc_frontend/src/hir/type_check/expr.rs | 15 ++-- .../noirc_frontend/src/hir/type_check/mod.rs | 4 +- .../noirc_frontend/src/hir/type_check/stmt.rs | 2 +- compiler/noirc_frontend/src/hir_def/types.rs | 9 +- .../src/monomorphization/debug.rs | 4 +- .../src/monomorphization/mod.rs | 4 +- compiler/noirc_frontend/src/node_interner.rs | 57 +++++------- compiler/utils/arena/Cargo.toml | 5 -- compiler/utils/arena/src/lib.rs | 88 ++++++++++++++++++- deny.toml | 2 +- 15 files changed, 134 insertions(+), 85 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index a7d721ef097..4d8b12d5379 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -213,9 +213,6 @@ checksum = "a4668cab20f66d8d020e1fbc0ebe47217433c1b6c8f2040faf858554e394ace6" [[package]] name = "arena" version = "0.24.0" -dependencies = [ - "generational-arena", -] [[package]] name = "ark-bls12-381" @@ -1842,15 +1839,6 @@ dependencies = [ "byteorder", ] -[[package]] -name = "generational-arena" -version = "0.2.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "877e94aff08e743b651baaea359664321055749b398adff8740a7399af7796e7" -dependencies = [ - "cfg-if 1.0.0", -] - [[package]] name = "generic-array" version = "0.14.7" diff --git a/Cargo.toml b/Cargo.toml index 4f95e3b0821..77058554aff 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -69,12 +69,10 @@ noirc_errors = { path = "compiler/noirc_errors" } noirc_evaluator = { path = "compiler/noirc_evaluator" } noirc_frontend = { path = "compiler/noirc_frontend" } noirc_printable_type = { path = "compiler/noirc_printable_type" } -noir_wasm = { path = "compiler/wasm" } # Noir tooling workspace dependencies nargo = { path = "tooling/nargo" } nargo_fmt = { path = "tooling/nargo_fmt" } -nargo_cli = { path = "tooling/nargo_cli" } nargo_toml = { path = "tooling/nargo_toml" } noir_lsp = { path = "tooling/lsp" } noir_debugger = { path = "tooling/debugger" } @@ -97,8 +95,6 @@ getrandom = "0.2" # Debugger dap = "0.4.1-alpha1" - -cfg-if = "1.0.0" clap = { version = "4.3.19", features = ["derive", "env"] } codespan = { version = "0.11.1", features = ["serialization"] } codespan-lsp = "0.11.1" diff --git a/aztec_macros/src/lib.rs b/aztec_macros/src/lib.rs index 21e3dd56e0d..0b93dbaa634 100644 --- a/aztec_macros/src/lib.rs +++ b/aztec_macros/src/lib.rs @@ -697,7 +697,7 @@ fn collect_traits(context: &HirContext) -> Vec { crates .flat_map(|crate_id| context.def_map(&crate_id).map(|def_map| def_map.modules())) .flatten() - .flat_map(|(_, module)| { + .flat_map(|module| { module.type_definitions().filter_map(|typ| { if let ModuleDefId::TraitId(struct_id) = typ { Some(struct_id) @@ -763,11 +763,11 @@ fn transform_event( HirExpression::Literal(HirLiteral::Str(signature)) if signature == SIGNATURE_PLACEHOLDER => { - let selector_literal_id = first_arg_id; + let selector_literal_id = *first_arg_id; let structure = interner.get_struct(struct_id); let signature = event_signature(&structure.borrow()); - interner.update_expression(*selector_literal_id, |expr| { + interner.update_expression(selector_literal_id, |expr| { *expr = HirExpression::Literal(HirLiteral::Str(signature.clone())); }); @@ -833,7 +833,7 @@ fn get_serialized_length( let serialized_trait_impl_kind = traits .iter() - .filter_map(|&trait_id| { + .find_map(|&trait_id| { let r#trait = interner.get_trait(trait_id); if r#trait.borrow().name.0.contents == "Serialize" && r#trait.borrow().generics.len() == 1 @@ -846,7 +846,6 @@ fn get_serialized_length( None } }) - .next() .ok_or(AztecMacroError::CouldNotAssignStorageSlots { secondary_message: Some("Stored data must implement Serialize trait".to_string()), })?; diff --git a/compiler/noirc_frontend/src/hir/def_map/mod.rs b/compiler/noirc_frontend/src/hir/def_map/mod.rs index 8c985e88e0b..8e0dacc294b 100644 --- a/compiler/noirc_frontend/src/hir/def_map/mod.rs +++ b/compiler/noirc_frontend/src/hir/def_map/mod.rs @@ -31,7 +31,7 @@ pub struct LocalModuleId(pub Index); impl LocalModuleId { pub fn dummy_id() -> LocalModuleId { - LocalModuleId(Index::from_raw_parts(std::usize::MAX, std::u64::MAX)) + LocalModuleId(Index::dummy()) } } diff --git a/compiler/noirc_frontend/src/hir/resolution/resolver.rs b/compiler/noirc_frontend/src/hir/resolution/resolver.rs index d4aae133b35..f05a69be7c2 100644 --- a/compiler/noirc_frontend/src/hir/resolution/resolver.rs +++ b/compiler/noirc_frontend/src/hir/resolution/resolver.rs @@ -1463,7 +1463,7 @@ impl<'a> Resolver<'a> { // they're used in expressions. We must do this here since the type // checker does not check definition kinds and otherwise expects // parameters to already be typed. - if self.interner.id_type(hir_ident.id) == Type::Error { + if self.interner.definition_type(hir_ident.id) == Type::Error { let typ = Type::polymorphic_integer(self.interner); self.interner.push_definition_type(hir_ident.id, typ); } diff --git a/compiler/noirc_frontend/src/hir/type_check/expr.rs b/compiler/noirc_frontend/src/hir/type_check/expr.rs index b6bb5984bcd..a669a4a246e 100644 --- a/compiler/noirc_frontend/src/hir/type_check/expr.rs +++ b/compiler/noirc_frontend/src/hir/type_check/expr.rs @@ -284,8 +284,9 @@ impl<'interner> TypeChecker<'interner> { Type::Tuple(vecmap(&elements, |elem| self.check_expression(elem))) } HirExpression::Lambda(lambda) => { - let captured_vars = - vecmap(lambda.captures, |capture| self.interner.id_type(capture.ident.id)); + let captured_vars = vecmap(lambda.captures, |capture| { + self.interner.definition_type(capture.ident.id) + }); let env_type: Type = if captured_vars.is_empty() { Type::Unit } else { Type::Tuple(captured_vars) }; @@ -308,7 +309,7 @@ impl<'interner> TypeChecker<'interner> { } }; - self.interner.push_expr_type(expr_id, typ.clone()); + self.interner.push_expr_type(*expr_id, typ.clone()); typ } @@ -459,7 +460,7 @@ impl<'interner> TypeChecker<'interner> { operator: UnaryOp::MutableReference, rhs: method_call.object, })); - self.interner.push_expr_type(&new_object, new_type); + self.interner.push_expr_type(new_object, new_type); self.interner.push_expr_location(new_object, location.span, location.file); new_object }); @@ -485,7 +486,7 @@ impl<'interner> TypeChecker<'interner> { operator: UnaryOp::Dereference { implicitly_added: true }, rhs: object, })); - self.interner.push_expr_type(&object, element.as_ref().clone()); + self.interner.push_expr_type(object, element.as_ref().clone()); self.interner.push_expr_location(object, location.span, location.file); // Recursively dereference to allow for converting &mut &mut T to T @@ -682,8 +683,8 @@ impl<'interner> TypeChecker<'interner> { operator: crate::UnaryOp::Dereference { implicitly_added: true }, rhs: old_lhs, })); - this.interner.push_expr_type(&old_lhs, lhs_type); - this.interner.push_expr_type(access_lhs, element); + this.interner.push_expr_type(old_lhs, lhs_type); + this.interner.push_expr_type(*access_lhs, element); let old_location = this.interner.id_location(old_lhs); this.interner.push_expr_location(*access_lhs, span, old_location.file); diff --git a/compiler/noirc_frontend/src/hir/type_check/mod.rs b/compiler/noirc_frontend/src/hir/type_check/mod.rs index 8952ba83586..225f5756d7a 100644 --- a/compiler/noirc_frontend/src/hir/type_check/mod.rs +++ b/compiler/noirc_frontend/src/hir/type_check/mod.rs @@ -458,7 +458,7 @@ mod test { } fn local_module_id(&self) -> LocalModuleId { - LocalModuleId(arena::Index::from_raw_parts(0, 0)) + LocalModuleId(arena::Index::unsafe_zeroed()) } fn module_id(&self) -> ModuleId { @@ -509,7 +509,7 @@ mod test { let mut def_maps = BTreeMap::new(); let file = FileId::default(); - let mut modules = arena::Arena::new(); + let mut modules = arena::Arena::default(); let location = Location::new(Default::default(), file); modules.insert(ModuleData::new(None, location, false)); diff --git a/compiler/noirc_frontend/src/hir/type_check/stmt.rs b/compiler/noirc_frontend/src/hir/type_check/stmt.rs index 03d61b93e0c..370b4ee7b17 100644 --- a/compiler/noirc_frontend/src/hir/type_check/stmt.rs +++ b/compiler/noirc_frontend/src/hir/type_check/stmt.rs @@ -192,7 +192,7 @@ impl<'interner> TypeChecker<'interner> { mutable = definition.mutable; } - let typ = self.interner.id_type(ident.id).instantiate(self.interner).0; + let typ = self.interner.definition_type(ident.id).instantiate(self.interner).0; typ.follow_bindings() }; diff --git a/compiler/noirc_frontend/src/hir_def/types.rs b/compiler/noirc_frontend/src/hir_def/types.rs index 98b47f17cd4..d4d8a948460 100644 --- a/compiler/noirc_frontend/src/hir_def/types.rs +++ b/compiler/noirc_frontend/src/hir_def/types.rs @@ -1672,11 +1672,10 @@ fn convert_array_expression_to_slice( interner.push_expr_location(call, location.span, location.file); interner.push_expr_location(func, location.span, location.file); - interner.push_expr_type(&call, target_type.clone()); - interner.push_expr_type( - &func, - Type::Function(vec![array_type], Box::new(target_type), Box::new(Type::Unit)), - ); + interner.push_expr_type(call, target_type.clone()); + + let func_type = Type::Function(vec![array_type], Box::new(target_type), Box::new(Type::Unit)); + interner.push_expr_type(func, func_type); } impl BinaryTypeOperator { diff --git a/compiler/noirc_frontend/src/monomorphization/debug.rs b/compiler/noirc_frontend/src/monomorphization/debug.rs index d36816e3d37..5837d67660a 100644 --- a/compiler/noirc_frontend/src/monomorphization/debug.rs +++ b/compiler/noirc_frontend/src/monomorphization/debug.rs @@ -143,7 +143,7 @@ impl<'interner> Monomorphizer<'interner> { let index_id = self.interner.push_expr(HirExpression::Literal( HirLiteral::Integer(field_index.into(), false), )); - self.interner.push_expr_type(&index_id, crate::Type::FieldElement); + self.interner.push_expr_type(index_id, crate::Type::FieldElement); self.interner.push_expr_location( index_id, call.location.span, @@ -171,7 +171,7 @@ impl<'interner> Monomorphizer<'interner> { fn intern_var_id(&mut self, var_id: DebugVarId, location: &Location) -> ExprId { let var_id_literal = HirLiteral::Integer((var_id.0 as u128).into(), false); let expr_id = self.interner.push_expr(HirExpression::Literal(var_id_literal)); - self.interner.push_expr_type(&expr_id, crate::Type::FieldElement); + self.interner.push_expr_type(expr_id, crate::Type::FieldElement); self.interner.push_expr_location(expr_id, location.span, location.file); expr_id } diff --git a/compiler/noirc_frontend/src/monomorphization/mod.rs b/compiler/noirc_frontend/src/monomorphization/mod.rs index 31a254d9f0a..0f243e47bbe 100644 --- a/compiler/noirc_frontend/src/monomorphization/mod.rs +++ b/compiler/noirc_frontend/src/monomorphization/mod.rs @@ -696,7 +696,7 @@ impl<'interner> Monomorphizer<'interner> { let mutable = definition.mutable; let definition = self.lookup_local(ident.id)?; - let typ = self.convert_type(&self.interner.id_type(ident.id)); + let typ = self.convert_type(&self.interner.definition_type(ident.id)); Some(ast::Ident { location: Some(ident.location), mutable, definition, name, typ }) } @@ -1040,7 +1040,7 @@ impl<'interner> Monomorphizer<'interner> { ) { match hir_argument { HirExpression::Ident(ident) => { - let typ = self.interner.id_type(ident.id); + let typ = self.interner.definition_type(ident.id); let typ: Type = typ.follow_bindings(); let is_fmt_str = match typ { // A format string has many different possible types that need to be handled. diff --git a/compiler/noirc_frontend/src/node_interner.rs b/compiler/noirc_frontend/src/node_interner.rs index 815bc4c5e9c..7d533947f65 100644 --- a/compiler/noirc_frontend/src/node_interner.rs +++ b/compiler/noirc_frontend/src/node_interner.rs @@ -75,13 +75,14 @@ pub struct NodeInterner { // Type checking map // - // Notice that we use `Index` as the Key and not an ExprId or IdentId - // Therefore, If a raw index is passed in, then it is not safe to assume that it will have - // a Type, as not all Ids have types associated to them. - // Further note, that an ExprId and an IdentId will never have the same underlying Index - // Because we use one Arena to store all Definitions/Nodes + // This should only be used with indices from the `nodes` arena. + // Otherwise the indices used may overwrite other existing indices. + // Each type for each index is filled in during type checking. id_to_type: HashMap, + // Similar to `id_to_type` but maps definitions to their type + definition_to_type: HashMap, + // Struct map. // // Each struct definition is possibly shared across multiple type nodes. @@ -277,12 +278,6 @@ impl DefinitionId { } } -impl From for Index { - fn from(id: DefinitionId) -> Self { - Index::from_raw_parts(id.0, u64::MAX) - } -} - /// An ID for a global value #[derive(Debug, Eq, PartialEq, Hash, Clone, Copy)] pub struct GlobalId(usize); @@ -302,7 +297,7 @@ impl StmtId { // This can be anything, as the program will ultimately fail // after resolution pub fn dummy_id() -> StmtId { - StmtId(Index::from_raw_parts(std::usize::MAX, 0)) + StmtId(Index::dummy()) } } @@ -311,7 +306,7 @@ pub struct ExprId(Index); impl ExprId { pub fn empty_block_id() -> ExprId { - ExprId(Index::from_raw_parts(0, 0)) + ExprId(Index::unsafe_zeroed()) } } #[derive(Debug, Eq, PartialEq, Hash, Copy, Clone)] @@ -322,7 +317,7 @@ impl FuncId { // This can be anything, as the program will ultimately fail // after resolution pub fn dummy_id() -> FuncId { - FuncId(Index::from_raw_parts(std::usize::MAX, 0)) + FuncId(Index::dummy()) } } @@ -396,23 +391,9 @@ macro_rules! into_index { }; } -macro_rules! partialeq { - ($id_type:ty) => { - impl PartialEq for &$id_type { - fn eq(&self, other: &usize) -> bool { - let (index, _) = self.0.into_raw_parts(); - index == *other - } - } - }; -} - into_index!(ExprId); into_index!(StmtId); -partialeq!(ExprId); -partialeq!(StmtId); - /// A Definition enum specifies anything that we can intern in the NodeInterner /// We use one Arena for all types that can be interned as that has better cache locality /// This data structure is never accessed directly, so API wise there is no difference between using @@ -496,6 +477,7 @@ impl Default for NodeInterner { id_to_location: HashMap::new(), definitions: vec![], id_to_type: HashMap::new(), + definition_to_type: HashMap::new(), structs: HashMap::new(), struct_attributes: HashMap::new(), type_aliases: Vec::new(), @@ -545,10 +527,15 @@ impl NodeInterner { } /// Store the type for an interned expression - pub fn push_expr_type(&mut self, expr_id: &ExprId, typ: Type) { + pub fn push_expr_type(&mut self, expr_id: ExprId, typ: Type) { self.id_to_type.insert(expr_id.into(), typ); } + /// Store the type for an interned expression + pub fn push_definition_type(&mut self, definition_id: DefinitionId, typ: Type) { + self.definition_to_type.insert(definition_id, typ); + } + pub fn push_empty_trait(&mut self, type_id: TraitId, unresolved_trait: &UnresolvedTrait) { let self_type_typevar_id = self.next_type_variable_id(); @@ -660,11 +647,6 @@ impl NodeInterner { } } - /// Store the type for an interned Identifier - pub fn push_definition_type(&mut self, definition_id: DefinitionId, typ: Type) { - self.id_to_type.insert(definition_id.into(), typ); - } - /// Store [Location] of [Type] reference pub fn push_type_ref_location(&mut self, typ: Type, location: Location) { self.type_ref_locations.push((typ, location)); @@ -980,8 +962,13 @@ impl NodeInterner { self.id_to_type.get(&index.into()).cloned().unwrap_or(Type::Error) } + /// Returns the type of the definition or `Type::Error` if it was not found. + pub fn definition_type(&self, id: DefinitionId) -> Type { + self.definition_to_type.get(&id).cloned().unwrap_or(Type::Error) + } + pub fn id_type_substitute_trait_as_type(&self, def_id: DefinitionId) -> Type { - let typ = self.id_type(def_id); + let typ = self.definition_type(def_id); if let Type::Function(args, ret, env) = &typ { let def = self.definition(def_id); if let Type::TraitAsType(..) = ret.as_ref() { diff --git a/compiler/utils/arena/Cargo.toml b/compiler/utils/arena/Cargo.toml index e82201a2cf4..41c6ebc9a8b 100644 --- a/compiler/utils/arena/Cargo.toml +++ b/compiler/utils/arena/Cargo.toml @@ -4,8 +4,3 @@ version.workspace = true authors.workspace = true edition.workspace = true license.workspace = true - -# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html - -[dependencies] -generational-arena = "0.2.8" diff --git a/compiler/utils/arena/src/lib.rs b/compiler/utils/arena/src/lib.rs index fc19f44ab6e..2d117304e16 100644 --- a/compiler/utils/arena/src/lib.rs +++ b/compiler/utils/arena/src/lib.rs @@ -3,5 +3,89 @@ #![warn(unreachable_pub)] #![warn(clippy::semicolon_if_nothing_returned)] -// For now we use a wrapper around generational-arena -pub use generational_arena::{Arena, Index}; +#[derive(Clone, Copy, Debug, Eq, Ord, PartialEq, PartialOrd, Hash)] +pub struct Index(usize); + +impl Index { + #[cfg(test)] + pub fn test_new(index: usize) -> Index { + Self(index) + } + + /// Return a dummy index (max value internally). + /// This should be avoided over `Option` if possible. + pub fn dummy() -> Self { + Self(usize::MAX) + } + + /// Return the zeroed index. This is unsafe since we don't know + /// if this is a valid index for any particular map yet. + pub fn unsafe_zeroed() -> Self { + Self(0) + } +} + +#[derive(Clone, Debug)] +pub struct Arena { + pub vec: Vec, +} + +impl Default for Arena { + fn default() -> Self { + Self { vec: Vec::new() } + } +} + +impl core::ops::Index for Arena { + type Output = T; + + fn index(&self, index: Index) -> &Self::Output { + self.vec.index(index.0) + } +} + +impl core::ops::IndexMut for Arena { + fn index_mut(&mut self, index: Index) -> &mut Self::Output { + self.vec.index_mut(index.0) + } +} + +impl IntoIterator for Arena { + type Item = T; + + type IntoIter = as IntoIterator>::IntoIter; + + fn into_iter(self) -> Self::IntoIter { + self.vec.into_iter() + } +} + +impl<'a, T> IntoIterator for &'a Arena { + type Item = &'a T; + + type IntoIter = <&'a Vec as IntoIterator>::IntoIter; + + fn into_iter(self) -> Self::IntoIter { + self.vec.iter() + } +} + +impl Arena { + pub fn insert(&mut self, item: T) -> Index { + let index = self.vec.len(); + self.vec.push(item); + Index(index) + } + + pub fn get(&self, index: Index) -> Option<&T> { + self.vec.get(index.0) + } + + pub fn get_mut(&mut self, index: Index) -> Option<&mut T> { + self.vec.get_mut(index.0) + } + + pub fn iter(&self) -> impl Iterator { + self.vec.iter().enumerate().map(|(index, item)| (Index(index), item)) + } +} diff --git a/deny.toml b/deny.toml index a3e506984c9..72150f08a3c 100644 --- a/deny.toml +++ b/deny.toml @@ -54,7 +54,7 @@ allow = [ "LicenseRef-ring", # https://github.com/rustls/webpki/blob/main/LICENSE ISC Style "LicenseRef-rustls-webpki", - # bitmaps 2.1.0, generational-arena 0.2.9,im 15.1.0 + # bitmaps 2.1.0, im 15.1.0 "MPL-2.0", # Boost Software License "BSL-1.0", From 5051ec4d434a9e5cf405c68357faaf213e68de9e Mon Sep 17 00:00:00 2001 From: Tom French <15848336+TomAFrench@users.noreply.github.com> Date: Sat, 17 Feb 2024 20:44:54 +0000 Subject: [PATCH 25/39] fix: correct invalid brillig codegen for `EmbeddedCurvePoint.add` (#4382) # Description ## Problem\* Resolves https://github.com/noir-lang/noir/issues/4260 ## Summary\* The error is explained in the comment I've added to the stdlib. This is a quick fix and we can clean it up once we're making serialisation changes in `aztec-packages` again. ## Additional Context ## Documentation\* Check one: - [x] No documentation needed. - [ ] Documentation included in this PR. - [ ] **[Exceptional Case]** Documentation to be submitted in a separate PR. # PR Checklist\* - [x] I have tested the changes locally. - [x] I have formatted the changes with [Prettier](https://prettier.io/) and/or `cargo fmt` on default settings. --- noir_stdlib/src/scalar_mul.nr | 11 ++++++++++- .../execution_success/brillig_scalar_mul/src/main.nr | 9 +++++++++ 2 files changed, 19 insertions(+), 1 deletion(-) diff --git a/noir_stdlib/src/scalar_mul.nr b/noir_stdlib/src/scalar_mul.nr index 26378e4839a..1a7f1ad707c 100644 --- a/noir_stdlib/src/scalar_mul.nr +++ b/noir_stdlib/src/scalar_mul.nr @@ -32,5 +32,14 @@ pub fn fixed_base_embedded_curve( // docs:end:fixed_base_embedded_curve {} +// This is a hack as returning an `EmbeddedCurvePoint` from a foreign function in brillig returns a [BrilligVariable::SingleAddr; 2] rather than BrilligVariable::BrilligArray +// as is defined in the brillig bytecode format. This is a workaround which allows us to fix this without modifying the serialization format. +fn embedded_curve_add(point1: EmbeddedCurvePoint, point2: EmbeddedCurvePoint) -> EmbeddedCurvePoint { + let point_array = embedded_curve_add_array_return(point1, point2); + let x = point_array[0]; + let y = point_array[1]; + EmbeddedCurvePoint { x, y } +} + #[foreign(embedded_curve_add)] -fn embedded_curve_add(_point1: EmbeddedCurvePoint, _point2: EmbeddedCurvePoint) -> EmbeddedCurvePoint {} +fn embedded_curve_add_array_return(_point1: EmbeddedCurvePoint, _point2: EmbeddedCurvePoint) -> [Field; 2] {} diff --git a/test_programs/execution_success/brillig_scalar_mul/src/main.nr b/test_programs/execution_success/brillig_scalar_mul/src/main.nr index ab2f79eb815..c7c3a85a4ff 100644 --- a/test_programs/execution_success/brillig_scalar_mul/src/main.nr +++ b/test_programs/execution_success/brillig_scalar_mul/src/main.nr @@ -20,4 +20,13 @@ unconstrained fn main( let res = std::scalar_mul::fixed_base_embedded_curve(priv_key, 0); assert(res[0] == pub_x); assert(res[1] == pub_y); + + let pub_point= std::scalar_mul::EmbeddedCurvePoint { x: pub_x, y: pub_y }; + let g1_y = 17631683881184975370165255887551781615748388533673675138860; + let g1= std::scalar_mul::EmbeddedCurvePoint { x: 1, y: g1_y }; + + let res = pub_point.double(); + let double = g1.add(g1); + + assert(double.x == res.x); } From 8aa39c042a4b689ef03fd761323163bac50aee1d Mon Sep 17 00:00:00 2001 From: Tom French <15848336+TomAFrench@users.noreply.github.com> Date: Sat, 17 Feb 2024 20:47:04 +0000 Subject: [PATCH 26/39] chore: bump webpack dependencies (#4346) # Description ## Problem\* Resolves ## Summary\* Bumping all the webpack dependencies to see if that helps with build stability. ## Additional Context ## Documentation\* Check one: - [ ] No documentation needed. - [ ] Documentation included in this PR. - [ ] **[Exceptional Case]** Documentation to be submitted in a separate PR. # PR Checklist\* - [ ] I have tested the changes locally. - [ ] I have formatted the changes with [Prettier](https://prettier.io/) and/or `cargo fmt` on default settings. --- compiler/wasm/package.json | 9 +- compiler/wasm/webpack.config.ts | 10 +- yarn.lock | 483 ++++++++++++++++++++++++++++---- 3 files changed, 432 insertions(+), 70 deletions(-) diff --git a/compiler/wasm/package.json b/compiler/wasm/package.json index b71058b3367..67584a2def1 100644 --- a/compiler/wasm/package.json +++ b/compiler/wasm/package.json @@ -61,10 +61,10 @@ "assert": "^2.1.0", "browserify-fs": "^1.0.0", "chai": "^4.3.10", - "copy-webpack-plugin": "^11.0.0", + "copy-webpack-plugin": "^12.0.2", "eslint": "^8.56.0", "eslint-plugin-prettier": "^5.0.0", - "html-webpack-plugin": "^5.5.4", + "html-webpack-plugin": "^5.6.0", "memfs": "^4.6.0", "mocha": "^10.2.0", "mocha-each": "^2.0.1", @@ -78,8 +78,9 @@ "typescript": "~5.2.2", "unzipit": "^1.4.3", "url": "^0.11.3", - "webpack": "^5.49.0", - "webpack-cli": "^4.7.2" + "webpack": "^5.90.1", + "webpack-cli": "^5.1.4", + "webpack-dev-server": "^5.0.0" }, "dependencies": { "@noir-lang/types": "workspace:*", diff --git a/compiler/wasm/webpack.config.ts b/compiler/wasm/webpack.config.ts index d5d70df2b8a..456c5d82dca 100644 --- a/compiler/wasm/webpack.config.ts +++ b/compiler/wasm/webpack.config.ts @@ -1,6 +1,6 @@ import { resolve, join } from 'path'; import webpack from 'webpack'; -import 'webpack-dev-server'; +import type { Configuration as DevServerConfiguration } from 'webpack-dev-server'; import WasmPackPlugin from '@wasm-tool/wasm-pack-plugin'; import HtmlWebpackPlugin from 'html-webpack-plugin'; import CopyWebpackPlugin from 'copy-webpack-plugin'; @@ -25,6 +25,10 @@ const config: webpack.Configuration = { }, }; +const devServerConfig: DevServerConfiguration = { + static: join(__dirname, 'dist'), +}; + const webConfig: webpack.Configuration = { name: 'web', entry: './src/index.mts', @@ -74,9 +78,7 @@ const webConfig: webpack.Configuration = { }, ], }, - devServer: { - static: join(__dirname, 'dist'), - }, + devServer: devServerConfig, resolve: { ...config.resolve, alias: { diff --git a/yarn.lock b/yarn.lock index 84cf3e593c6..ace7959279f 100644 --- a/yarn.lock +++ b/yarn.lock @@ -4050,6 +4050,16 @@ __metadata: languageName: node linkType: hard +"@jridgewell/trace-mapping@npm:^0.3.20": + version: 0.3.22 + resolution: "@jridgewell/trace-mapping@npm:0.3.22" + dependencies: + "@jridgewell/resolve-uri": ^3.1.0 + "@jridgewell/sourcemap-codec": ^1.4.14 + checksum: ac7dd2cfe0b479aa1b81776d40d789243131cc792dc8b6b6a028c70fcd6171958ae1a71bf67b618ffe3c0c3feead9870c095ee46a5e30319410d92976b28f498 + languageName: node + linkType: hard + "@leichtgewicht/ip-codec@npm:^2.0.1": version: 2.0.4 resolution: "@leichtgewicht/ip-codec@npm:2.0.4" @@ -4466,10 +4476,10 @@ __metadata: assert: ^2.1.0 browserify-fs: ^1.0.0 chai: ^4.3.10 - copy-webpack-plugin: ^11.0.0 + copy-webpack-plugin: ^12.0.2 eslint: ^8.56.0 eslint-plugin-prettier: ^5.0.0 - html-webpack-plugin: ^5.5.4 + html-webpack-plugin: ^5.6.0 memfs: ^4.6.0 mocha: ^10.2.0 mocha-each: ^2.0.1 @@ -4484,8 +4494,9 @@ __metadata: typescript: ~5.2.2 unzipit: ^1.4.3 url: ^0.11.3 - webpack: ^5.49.0 - webpack-cli: ^4.7.2 + webpack: ^5.90.1 + webpack-cli: ^5.1.4 + webpack-dev-server: ^5.0.0 languageName: unknown linkType: soft @@ -5267,6 +5278,13 @@ __metadata: languageName: node linkType: hard +"@sindresorhus/merge-streams@npm:^2.1.0": + version: 2.1.0 + resolution: "@sindresorhus/merge-streams@npm:2.1.0" + checksum: 8aa91a3fca68d4ba78f81cad80f2dc280fa82b6c49c9fa5fe37438b6b9082cf993adb2309163f924bef9d7173b2fae6bb40fc4070a344cbab8bcc19eb1ee0b7c + languageName: node + linkType: hard + "@sinonjs/commons@npm:^2.0.0": version: 2.0.0 resolution: "@sinonjs/commons@npm:2.0.0" @@ -5621,7 +5639,7 @@ __metadata: languageName: node linkType: hard -"@types/bonjour@npm:^3.5.9": +"@types/bonjour@npm:^3.5.13, @types/bonjour@npm:^3.5.9": version: 3.5.13 resolution: "@types/bonjour@npm:3.5.13" dependencies: @@ -5663,7 +5681,7 @@ __metadata: languageName: node linkType: hard -"@types/connect-history-api-fallback@npm:^1.3.5": +"@types/connect-history-api-fallback@npm:^1.3.5, @types/connect-history-api-fallback@npm:^1.5.4": version: 1.5.4 resolution: "@types/connect-history-api-fallback@npm:1.5.4" dependencies: @@ -5753,7 +5771,7 @@ __metadata: languageName: node linkType: hard -"@types/estree@npm:*, @types/estree@npm:1.0.5, @types/estree@npm:^1.0.0": +"@types/estree@npm:*, @types/estree@npm:1.0.5, @types/estree@npm:^1.0.0, @types/estree@npm:^1.0.5": version: 1.0.5 resolution: "@types/estree@npm:1.0.5" checksum: dd8b5bed28e6213b7acd0fb665a84e693554d850b0df423ac8076cc3ad5823a6bc26b0251d080bdc545af83179ede51dd3f6fa78cad2c46ed1f29624ddf3e41a @@ -5779,7 +5797,7 @@ __metadata: languageName: node linkType: hard -"@types/express@npm:*, @types/express@npm:^4.17.13": +"@types/express@npm:*, @types/express@npm:^4.17.13, @types/express@npm:^4.17.21": version: 4.17.21 resolution: "@types/express@npm:4.17.21" dependencies: @@ -6233,6 +6251,13 @@ __metadata: languageName: node linkType: hard +"@types/retry@npm:0.12.2": + version: 0.12.2 + resolution: "@types/retry@npm:0.12.2" + checksum: e5675035717b39ce4f42f339657cae9637cf0c0051cf54314a6a2c44d38d91f6544be9ddc0280587789b6afd056be5d99dbe3e9f4df68c286c36321579b1bf4a + languageName: node + linkType: hard + "@types/sax@npm:^1.2.1": version: 1.2.7 resolution: "@types/sax@npm:1.2.7" @@ -6275,7 +6300,7 @@ __metadata: languageName: node linkType: hard -"@types/serve-index@npm:^1.9.1": +"@types/serve-index@npm:^1.9.1, @types/serve-index@npm:^1.9.4": version: 1.9.4 resolution: "@types/serve-index@npm:1.9.4" dependencies: @@ -6284,7 +6309,7 @@ __metadata: languageName: node linkType: hard -"@types/serve-static@npm:*, @types/serve-static@npm:^1.13.10": +"@types/serve-static@npm:*, @types/serve-static@npm:^1.13.10, @types/serve-static@npm:^1.15.5": version: 1.15.5 resolution: "@types/serve-static@npm:1.15.5" dependencies: @@ -6311,7 +6336,7 @@ __metadata: languageName: node linkType: hard -"@types/sockjs@npm:^0.3.33": +"@types/sockjs@npm:^0.3.33, @types/sockjs@npm:^0.3.36": version: 0.3.36 resolution: "@types/sockjs@npm:0.3.36" dependencies: @@ -6343,7 +6368,7 @@ __metadata: languageName: node linkType: hard -"@types/ws@npm:^8.5.5": +"@types/ws@npm:^8.5.10, @types/ws@npm:^8.5.5": version: 8.5.10 resolution: "@types/ws@npm:8.5.10" dependencies: @@ -7181,36 +7206,36 @@ __metadata: languageName: node linkType: hard -"@webpack-cli/configtest@npm:^1.2.0": - version: 1.2.0 - resolution: "@webpack-cli/configtest@npm:1.2.0" +"@webpack-cli/configtest@npm:^2.1.1": + version: 2.1.1 + resolution: "@webpack-cli/configtest@npm:2.1.1" peerDependencies: - webpack: 4.x.x || 5.x.x - webpack-cli: 4.x.x - checksum: a2726cd9ec601d2b57e5fc15e0ebf5200a8892065e735911269ac2038e62be4bfc176ea1f88c2c46ff09b4d05d4c10ae045e87b3679372483d47da625a327e28 + webpack: 5.x.x + webpack-cli: 5.x.x + checksum: 9f9f9145c2d05471fc83d426db1df85cf49f329836b0c4b9f46b6948bed4b013464c00622b136d2a0a26993ce2306976682592245b08ee717500b1db45009a72 languageName: node linkType: hard -"@webpack-cli/info@npm:^1.5.0": - version: 1.5.0 - resolution: "@webpack-cli/info@npm:1.5.0" - dependencies: - envinfo: ^7.7.3 +"@webpack-cli/info@npm:^2.0.2": + version: 2.0.2 + resolution: "@webpack-cli/info@npm:2.0.2" peerDependencies: - webpack-cli: 4.x.x - checksum: 7f56fe037cd7d1fd5c7428588519fbf04a0cad33925ee4202ffbafd00f8ec1f2f67d991245e687d50e0f3e23f7b7814273d56cb9f7da4b05eed47c8d815c6296 + webpack: 5.x.x + webpack-cli: 5.x.x + checksum: 8f9a178afca5c82e113aed1efa552d64ee5ae4fdff63fe747c096a981ec74f18a5d07bd6e89bbe6715c3e57d96eea024a410e58977169489fe1df044c10dd94e languageName: node linkType: hard -"@webpack-cli/serve@npm:^1.7.0": - version: 1.7.0 - resolution: "@webpack-cli/serve@npm:1.7.0" +"@webpack-cli/serve@npm:^2.0.5": + version: 2.0.5 + resolution: "@webpack-cli/serve@npm:2.0.5" peerDependencies: - webpack-cli: 4.x.x + webpack: 5.x.x + webpack-cli: 5.x.x peerDependenciesMeta: webpack-dev-server: optional: true - checksum: d475e8effa23eb7ff9a48b14d4de425989fd82f906ce71c210921cc3852327c22873be00c35e181a25a6bd03d424ae2b83e7f3b3f410ac7ee31b128ab4ac7713 + checksum: 75f0e54681796d567a71ac3e2781d2901a8d8cf1cdfc82f261034dddac59a8343e8c3bc5e32b4bb9d6766759ba49fb29a5cd86ef1701d79c506fe886bb63ac75 languageName: node linkType: hard @@ -8050,6 +8075,16 @@ __metadata: languageName: node linkType: hard +"bonjour-service@npm:^1.2.1": + version: 1.2.1 + resolution: "bonjour-service@npm:1.2.1" + dependencies: + fast-deep-equal: ^3.1.3 + multicast-dns: ^7.2.5 + checksum: b65b3e6e3a07e97f2da5806afb76f3946d5a6426b72e849a0236dc3c9d3612fb8c5359ebade4be7eb63f74a37670c53a53be2ff17f4f709811fda77f600eb25b + languageName: node + linkType: hard + "boolbase@npm:^1.0.0": version: 1.0.0 resolution: "boolbase@npm:1.0.0" @@ -8300,6 +8335,15 @@ __metadata: languageName: node linkType: hard +"bundle-name@npm:^4.1.0": + version: 4.1.0 + resolution: "bundle-name@npm:4.1.0" + dependencies: + run-applescript: ^7.0.0 + checksum: 1d966c8d2dbf4d9d394e53b724ac756c2414c45c01340b37743621f59cc565a435024b394ddcb62b9b335d1c9a31f4640eb648c3fec7f97ee74dc0694c9beb6c + languageName: node + linkType: hard + "bytes@npm:3.0.0": version: 3.0.0 resolution: "bytes@npm:3.0.0" @@ -8669,6 +8713,25 @@ __metadata: languageName: node linkType: hard +"chokidar@npm:^3.6.0": + version: 3.6.0 + resolution: "chokidar@npm:3.6.0" + dependencies: + anymatch: ~3.1.2 + braces: ~3.0.2 + fsevents: ~2.3.2 + glob-parent: ~5.1.2 + is-binary-path: ~2.1.0 + is-glob: ~4.0.1 + normalize-path: ~3.0.0 + readdirp: ~3.6.0 + dependenciesMeta: + fsevents: + optional: true + checksum: d2f29f499705dcd4f6f3bbed79a9ce2388cf530460122eed3b9c48efeab7a4e28739c6551fd15bec9245c6b9eeca7a32baa64694d64d9b6faeb74ddb8c4a413d + languageName: node + linkType: hard + "chownr@npm:^1.1.1": version: 1.1.4 resolution: "chownr@npm:1.1.4" @@ -9101,7 +9164,7 @@ __metadata: languageName: node linkType: hard -"commander@npm:^7.0.0, commander@npm:^7.2.0": +"commander@npm:^7.2.0": version: 7.2.0 resolution: "commander@npm:7.2.0" checksum: 53501cbeee61d5157546c0bef0fedb6cdfc763a882136284bed9a07225f09a14b82d2a84e7637edfd1a679fb35ed9502fd58ef1d091e6287f60d790147f68ddc @@ -9327,6 +9390,22 @@ __metadata: languageName: node linkType: hard +"copy-webpack-plugin@npm:^12.0.2": + version: 12.0.2 + resolution: "copy-webpack-plugin@npm:12.0.2" + dependencies: + fast-glob: ^3.3.2 + glob-parent: ^6.0.1 + globby: ^14.0.0 + normalize-path: ^3.0.0 + schema-utils: ^4.2.0 + serialize-javascript: ^6.0.2 + peerDependencies: + webpack: ^5.1.0 + checksum: 98127735336c6db5924688486d3a1854a41835963d0c0b81695b2e3d58c6675164be7d23dee7090b84a56d3c9923175d3d0863ac1942bcc3317d2efc1962b927 + languageName: node + linkType: hard + "core-js-compat@npm:^3.31.0, core-js-compat@npm:^3.33.1": version: 3.34.0 resolution: "core-js-compat@npm:3.34.0" @@ -9949,6 +10028,13 @@ __metadata: languageName: node linkType: hard +"default-browser-id@npm:^5.0.0": + version: 5.0.0 + resolution: "default-browser-id@npm:5.0.0" + checksum: 185bfaecec2c75fa423544af722a3469b20704c8d1942794a86e4364fe7d9e8e9f63241a5b769d61c8151993bc65833a5b959026fa1ccea343b3db0a33aa6deb + languageName: node + linkType: hard + "default-browser@npm:^4.0.0": version: 4.0.0 resolution: "default-browser@npm:4.0.0" @@ -9961,6 +10047,16 @@ __metadata: languageName: node linkType: hard +"default-browser@npm:^5.2.1": + version: 5.2.1 + resolution: "default-browser@npm:5.2.1" + dependencies: + bundle-name: ^4.1.0 + default-browser-id: ^5.0.0 + checksum: afab7eff7b7f5f7a94d9114d1ec67273d3fbc539edf8c0f80019879d53aa71e867303c6f6d7cffeb10a6f3cfb59d4f963dba3f9c96830b4540cc7339a1bf9840 + languageName: node + linkType: hard + "default-gateway@npm:^6.0.3": version: 6.0.3 resolution: "default-gateway@npm:6.0.3" @@ -12202,6 +12298,20 @@ __metadata: languageName: node linkType: hard +"globby@npm:^14.0.0": + version: 14.0.1 + resolution: "globby@npm:14.0.1" + dependencies: + "@sindresorhus/merge-streams": ^2.1.0 + fast-glob: ^3.3.2 + ignore: ^5.2.4 + path-type: ^5.0.0 + slash: ^5.1.0 + unicorn-magic: ^0.1.0 + checksum: 33568444289afb1135ad62d52d5e8412900cec620e3b6ece533afa46d004066f14b97052b643833d7cf4ee03e7fac571430130cde44c333df91a45d313105170 + languageName: node + linkType: hard + "gopd@npm:^1.0.1": version: 1.0.1 resolution: "gopd@npm:1.0.1" @@ -12791,7 +12901,7 @@ __metadata: languageName: node linkType: hard -"html-entities@npm:^2.3.2": +"html-entities@npm:^2.3.2, html-entities@npm:^2.4.0": version: 2.4.0 resolution: "html-entities@npm:2.4.0" checksum: 25bea32642ce9ebd0eedc4d24381883ecb0335ccb8ac26379a0958b9b16652fdbaa725d70207ce54a51db24103436a698a8e454397d3ba8ad81460224751f1dc @@ -12860,7 +12970,7 @@ __metadata: languageName: node linkType: hard -"html-webpack-plugin@npm:^5.5.0, html-webpack-plugin@npm:^5.5.3, html-webpack-plugin@npm:^5.5.4": +"html-webpack-plugin@npm:^5.5.0, html-webpack-plugin@npm:^5.5.3": version: 5.5.4 resolution: "html-webpack-plugin@npm:5.5.4" dependencies: @@ -12875,6 +12985,27 @@ __metadata: languageName: node linkType: hard +"html-webpack-plugin@npm:^5.6.0": + version: 5.6.0 + resolution: "html-webpack-plugin@npm:5.6.0" + dependencies: + "@types/html-minifier-terser": ^6.0.0 + html-minifier-terser: ^6.0.2 + lodash: ^4.17.21 + pretty-error: ^4.0.0 + tapable: ^2.0.0 + peerDependencies: + "@rspack/core": 0.x || 1.x + webpack: ^5.20.0 + peerDependenciesMeta: + "@rspack/core": + optional: true + webpack: + optional: true + checksum: 32a6e41da538e798fd0be476637d7611a5e8a98a3508f031996e9eb27804dcdc282cb01f847cf5d066f21b49cfb8e21627fcf977ffd0c9bea81cf80e5a65070d + languageName: node + linkType: hard + "htmlparser2@npm:^6.1.0": version: 6.1.0 resolution: "htmlparser2@npm:6.1.0" @@ -13305,10 +13436,10 @@ __metadata: languageName: node linkType: hard -"interpret@npm:^2.2.0": - version: 2.2.0 - resolution: "interpret@npm:2.2.0" - checksum: f51efef7cb8d02da16408ffa3504cd6053014c5aeb7bb8c223727e053e4235bf565e45d67028b0c8740d917c603807aa3c27d7bd2f21bf20b6417e2bb3e5fd6e +"interpret@npm:^3.1.1": + version: 3.1.1 + resolution: "interpret@npm:3.1.1" + checksum: 35cebcf48c7351130437596d9ab8c8fe131ce4038da4561e6d665f25640e0034702a031cf7e3a5cea60ac7ac548bf17465e0571ede126f3d3a6933152171ac82 languageName: node linkType: hard @@ -13351,7 +13482,7 @@ __metadata: languageName: node linkType: hard -"ipaddr.js@npm:^2.0.1": +"ipaddr.js@npm:^2.0.1, ipaddr.js@npm:^2.1.0": version: 2.1.0 resolution: "ipaddr.js@npm:2.1.0" checksum: 807a054f2bd720c4d97ee479d6c9e865c233bea21f139fb8dabd5a35c4226d2621c42e07b4ad94ff3f82add926a607d8d9d37c625ad0319f0e08f9f2bd1968e2 @@ -13609,6 +13740,13 @@ __metadata: languageName: node linkType: hard +"is-network-error@npm:^1.0.0": + version: 1.0.1 + resolution: "is-network-error@npm:1.0.1" + checksum: 165d61500c4186c62db5a3a693d6bfa14ca40fe9b471ef4cd4f27b20ef6760880faf5386dc01ca9867531631782941fedaa94521d09959edf71f046e393c7b91 + languageName: node + linkType: hard + "is-npm@npm:^5.0.0": version: 5.0.0 resolution: "is-npm@npm:5.0.0" @@ -13792,6 +13930,15 @@ __metadata: languageName: node linkType: hard +"is-wsl@npm:^3.1.0": + version: 3.1.0 + resolution: "is-wsl@npm:3.1.0" + dependencies: + is-inside-container: ^1.0.0 + checksum: f9734c81f2f9cf9877c5db8356bfe1ff61680f1f4c1011e91278a9c0564b395ae796addb4bf33956871041476ec82c3e5260ed57b22ac91794d4ae70a1d2f0a9 + languageName: node + linkType: hard + "is-yarn-global@npm:^0.3.0": version: 0.3.0 resolution: "is-yarn-global@npm:0.3.0" @@ -14330,7 +14477,7 @@ __metadata: languageName: node linkType: hard -"launch-editor@npm:^2.6.0": +"launch-editor@npm:^2.6.0, launch-editor@npm:^2.6.1": version: 2.6.1 resolution: "launch-editor@npm:2.6.1" dependencies: @@ -16565,6 +16712,18 @@ __metadata: languageName: node linkType: hard +"open@npm:^10.0.3": + version: 10.0.3 + resolution: "open@npm:10.0.3" + dependencies: + default-browser: ^5.2.1 + define-lazy-prop: ^3.0.0 + is-inside-container: ^1.0.0 + is-wsl: ^3.1.0 + checksum: 3c4b4eb3c08210f7b7b3f3311d36440f4b83f0641ac70e5e56d637f48d4a7736e0fd49a604eebe0a55c51223d77f9ced11912223cab12d5e9fdc866727c6cb1d + languageName: node + linkType: hard + "open@npm:^8.0.2, open@npm:^8.0.9, open@npm:^8.4.0": version: 8.4.2 resolution: "open@npm:8.4.2" @@ -16748,6 +16907,17 @@ __metadata: languageName: node linkType: hard +"p-retry@npm:^6.2.0": + version: 6.2.0 + resolution: "p-retry@npm:6.2.0" + dependencies: + "@types/retry": 0.12.2 + is-network-error: ^1.0.0 + retry: ^0.13.1 + checksum: 6003573c559ee812329c9c3ede7ba12a783fdc8dd70602116646e850c920b4597dc502fe001c3f9526fca4e93275045db7a27341c458e51db179c1374a01ac44 + languageName: node + linkType: hard + "p-try@npm:^1.0.0": version: 1.0.0 resolution: "p-try@npm:1.0.0" @@ -17043,6 +17213,13 @@ __metadata: languageName: node linkType: hard +"path-type@npm:^5.0.0": + version: 5.0.0 + resolution: "path-type@npm:5.0.0" + checksum: 15ec24050e8932c2c98d085b72cfa0d6b4eeb4cbde151a0a05726d8afae85784fc5544f733d8dfc68536587d5143d29c0bd793623fad03d7e61cc00067291cd5 + languageName: node + linkType: hard + "pathval@npm:^1.1.1": version: 1.1.1 resolution: "pathval@npm:1.1.1" @@ -18273,12 +18450,12 @@ __metadata: languageName: node linkType: hard -"rechoir@npm:^0.7.0": - version: 0.7.1 - resolution: "rechoir@npm:0.7.1" +"rechoir@npm:^0.8.0": + version: 0.8.0 + resolution: "rechoir@npm:0.8.0" dependencies: - resolve: ^1.9.0 - checksum: 2a04aab4e28c05fcd6ee6768446bc8b859d8f108e71fc7f5bcbc5ef25e53330ce2c11d10f82a24591a2df4c49c4f61feabe1fd11f844c66feedd4cd7bb61146a + resolve: ^1.20.0 + checksum: ad3caed8afdefbc33fbc30e6d22b86c35b3d51c2005546f4e79bcc03c074df804b3640ad18945e6bef9ed12caedc035655ec1082f64a5e94c849ff939dc0a788 languageName: node linkType: hard @@ -18730,7 +18907,7 @@ __metadata: languageName: node linkType: hard -"resolve@npm:^1.1.6, resolve@npm:^1.14.2, resolve@npm:^1.19.0, resolve@npm:^1.22.1, resolve@npm:^1.3.2, resolve@npm:^1.9.0": +"resolve@npm:^1.1.6, resolve@npm:^1.14.2, resolve@npm:^1.19.0, resolve@npm:^1.20.0, resolve@npm:^1.22.1, resolve@npm:^1.3.2": version: 1.22.8 resolution: "resolve@npm:1.22.8" dependencies: @@ -18752,7 +18929,7 @@ __metadata: languageName: node linkType: hard -"resolve@patch:resolve@^1.1.6#~builtin, resolve@patch:resolve@^1.14.2#~builtin, resolve@patch:resolve@^1.19.0#~builtin, resolve@patch:resolve@^1.22.1#~builtin, resolve@patch:resolve@^1.3.2#~builtin, resolve@patch:resolve@^1.9.0#~builtin": +"resolve@patch:resolve@^1.1.6#~builtin, resolve@patch:resolve@^1.14.2#~builtin, resolve@patch:resolve@^1.19.0#~builtin, resolve@patch:resolve@^1.20.0#~builtin, resolve@patch:resolve@^1.22.1#~builtin, resolve@patch:resolve@^1.3.2#~builtin": version: 1.22.8 resolution: "resolve@patch:resolve@npm%3A1.22.8#~builtin::version=1.22.8&hash=c3c19d" dependencies: @@ -18966,6 +19143,13 @@ __metadata: languageName: node linkType: hard +"run-applescript@npm:^7.0.0": + version: 7.0.0 + resolution: "run-applescript@npm:7.0.0" + checksum: b02462454d8b182ad4117e5d4626e9e6782eb2072925c9fac582170b0627ae3c1ea92ee9b2df7daf84b5e9ffe14eb1cf5fb70bc44b15c8a0bfcdb47987e2410c + languageName: node + linkType: hard + "run-parallel-limit@npm:^1.1.0": version: 1.1.0 resolution: "run-parallel-limit@npm:1.1.0" @@ -19070,7 +19254,7 @@ __metadata: languageName: node linkType: hard -"schema-utils@npm:^4.0.0": +"schema-utils@npm:^4.0.0, schema-utils@npm:^4.2.0": version: 4.2.0 resolution: "schema-utils@npm:4.2.0" dependencies: @@ -19118,7 +19302,7 @@ __metadata: languageName: node linkType: hard -"selfsigned@npm:^2.1.1": +"selfsigned@npm:^2.1.1, selfsigned@npm:^2.4.1": version: 2.4.1 resolution: "selfsigned@npm:2.4.1" dependencies: @@ -19223,6 +19407,15 @@ __metadata: languageName: node linkType: hard +"serialize-javascript@npm:^6.0.2": + version: 6.0.2 + resolution: "serialize-javascript@npm:6.0.2" + dependencies: + randombytes: ^2.1.0 + checksum: c4839c6206c1d143c0f80763997a361310305751171dd95e4b57efee69b8f6edd8960a0b7fbfc45042aadff98b206d55428aee0dc276efe54f100899c7fa8ab7 + languageName: node + linkType: hard + "serve-handler@npm:6.1.5, serve-handler@npm:^6.1.3, serve-handler@npm:^6.1.5": version: 6.1.5 resolution: "serve-handler@npm:6.1.5" @@ -19490,6 +19683,13 @@ __metadata: languageName: node linkType: hard +"slash@npm:^5.1.0": + version: 5.1.0 + resolution: "slash@npm:5.1.0" + checksum: 70434b34c50eb21b741d37d455110258c42d2cf18c01e6518aeb7299f3c6e626330c889c0c552b5ca2ef54a8f5a74213ab48895f0640717cacefeef6830a1ba4 + languageName: node + linkType: hard + "slice-ansi@npm:^4.0.0": version: 4.0.0 resolution: "slice-ansi@npm:4.0.0" @@ -20114,6 +20314,28 @@ __metadata: languageName: node linkType: hard +"terser-webpack-plugin@npm:^5.3.10": + version: 5.3.10 + resolution: "terser-webpack-plugin@npm:5.3.10" + dependencies: + "@jridgewell/trace-mapping": ^0.3.20 + jest-worker: ^27.4.5 + schema-utils: ^3.1.1 + serialize-javascript: ^6.0.1 + terser: ^5.26.0 + peerDependencies: + webpack: ^5.1.0 + peerDependenciesMeta: + "@swc/core": + optional: true + esbuild: + optional: true + uglify-js: + optional: true + checksum: bd6e7596cf815f3353e2a53e79cbdec959a1b0276f5e5d4e63e9d7c3c5bb5306df567729da287d1c7b39d79093e56863c569c42c6c24cc34c76aa313bd2cbcea + languageName: node + linkType: hard + "terser-webpack-plugin@npm:^5.3.3, terser-webpack-plugin@npm:^5.3.7, terser-webpack-plugin@npm:^5.3.9": version: 5.3.9 resolution: "terser-webpack-plugin@npm:5.3.9" @@ -20150,6 +20372,20 @@ __metadata: languageName: node linkType: hard +"terser@npm:^5.26.0": + version: 5.27.0 + resolution: "terser@npm:5.27.0" + dependencies: + "@jridgewell/source-map": ^0.3.3 + acorn: ^8.8.2 + commander: ^2.20.0 + source-map-support: ~0.5.20 + bin: + terser: bin/terser + checksum: c165052cfea061e8512e9b9ba42a098c2ff6382886ae122b040fd5b6153443070cc2dcb4862269f1669c09c716763e856125a355ff984aa72be525d6fffd8729 + languageName: node + linkType: hard + "text-table@npm:^0.2.0": version: 0.2.0 resolution: "text-table@npm:0.2.0" @@ -20765,6 +21001,13 @@ __metadata: languageName: node linkType: hard +"unicorn-magic@npm:^0.1.0": + version: 0.1.0 + resolution: "unicorn-magic@npm:0.1.0" + checksum: 48c5882ca3378f380318c0b4eb1d73b7e3c5b728859b060276e0a490051d4180966beeb48962d850fd0c6816543bcdfc28629dcd030bb62a286a2ae2acb5acb6 + languageName: node + linkType: hard + "unified@npm:9.2.0": version: 9.2.0 resolution: "unified@npm:9.2.0" @@ -21409,36 +21652,35 @@ __metadata: languageName: node linkType: hard -"webpack-cli@npm:^4.7.2": - version: 4.10.0 - resolution: "webpack-cli@npm:4.10.0" +"webpack-cli@npm:^5.1.4": + version: 5.1.4 + resolution: "webpack-cli@npm:5.1.4" dependencies: "@discoveryjs/json-ext": ^0.5.0 - "@webpack-cli/configtest": ^1.2.0 - "@webpack-cli/info": ^1.5.0 - "@webpack-cli/serve": ^1.7.0 + "@webpack-cli/configtest": ^2.1.1 + "@webpack-cli/info": ^2.0.2 + "@webpack-cli/serve": ^2.0.5 colorette: ^2.0.14 - commander: ^7.0.0 + commander: ^10.0.1 cross-spawn: ^7.0.3 + envinfo: ^7.7.3 fastest-levenshtein: ^1.0.12 import-local: ^3.0.2 - interpret: ^2.2.0 - rechoir: ^0.7.0 + interpret: ^3.1.1 + rechoir: ^0.8.0 webpack-merge: ^5.7.3 peerDependencies: - webpack: 4.x.x || 5.x.x + webpack: 5.x.x peerDependenciesMeta: "@webpack-cli/generators": optional: true - "@webpack-cli/migrate": - optional: true webpack-bundle-analyzer: optional: true webpack-dev-server: optional: true bin: webpack-cli: bin/cli.js - checksum: 2ff5355ac348e6b40f2630a203b981728834dca96d6d621be96249764b2d0fc01dd54edfcc37f02214d02935de2cf0eefd6ce689d970d154ef493f01ba922390 + checksum: 3a4ad0d0342a6815c850ee4633cc2a8a5dae04f918e7847f180bf24ab400803cf8a8943707ffbed03eb20fe6ce647f996f60a2aade87b0b4a9954da3da172ce0 languageName: node linkType: hard @@ -21457,6 +21699,24 @@ __metadata: languageName: node linkType: hard +"webpack-dev-middleware@npm:^7.0.0": + version: 7.0.0 + resolution: "webpack-dev-middleware@npm:7.0.0" + dependencies: + colorette: ^2.0.10 + memfs: ^4.6.0 + mime-types: ^2.1.31 + range-parser: ^1.2.1 + schema-utils: ^4.0.0 + peerDependencies: + webpack: ^5.0.0 + peerDependenciesMeta: + webpack: + optional: true + checksum: 90f6c87c80bd5849c34f3a1761ac7dc1b123def2e6e9922f55102ff4b7532538641fa8c7169ce8254b0d471c27d882cdf4a1c32979952474fc8eacc8b3447915 + languageName: node + linkType: hard + "webpack-dev-server@npm:^4.15.1, webpack-dev-server@npm:^4.9.3": version: 4.15.1 resolution: "webpack-dev-server@npm:4.15.1" @@ -21504,6 +21764,53 @@ __metadata: languageName: node linkType: hard +"webpack-dev-server@npm:^5.0.0": + version: 5.0.0 + resolution: "webpack-dev-server@npm:5.0.0" + dependencies: + "@types/bonjour": ^3.5.13 + "@types/connect-history-api-fallback": ^1.5.4 + "@types/express": ^4.17.21 + "@types/serve-index": ^1.9.4 + "@types/serve-static": ^1.15.5 + "@types/sockjs": ^0.3.36 + "@types/ws": ^8.5.10 + ansi-html-community: ^0.0.8 + bonjour-service: ^1.2.1 + chokidar: ^3.6.0 + colorette: ^2.0.10 + compression: ^1.7.4 + connect-history-api-fallback: ^2.0.0 + default-gateway: ^6.0.3 + express: ^4.17.3 + graceful-fs: ^4.2.6 + html-entities: ^2.4.0 + http-proxy-middleware: ^2.0.3 + ipaddr.js: ^2.1.0 + launch-editor: ^2.6.1 + open: ^10.0.3 + p-retry: ^6.2.0 + rimraf: ^5.0.5 + schema-utils: ^4.2.0 + selfsigned: ^2.4.1 + serve-index: ^1.9.1 + sockjs: ^0.3.24 + spdy: ^4.0.2 + webpack-dev-middleware: ^7.0.0 + ws: ^8.16.0 + peerDependencies: + webpack: ^5.0.0 + peerDependenciesMeta: + webpack: + optional: true + webpack-cli: + optional: true + bin: + webpack-dev-server: bin/webpack-dev-server.js + checksum: 419d1af6b6164900fb01168c3ef965fe8d27a78939ef8f5c602f82af5be8a2b68a0b015df564623dd69996d5265c679202c5970b59797e83cf322e47bbcd6022 + languageName: node + linkType: hard + "webpack-merge@npm:^5.7.3, webpack-merge@npm:^5.8.0, webpack-merge@npm:^5.9.0": version: 5.10.0 resolution: "webpack-merge@npm:5.10.0" @@ -21522,7 +21829,7 @@ __metadata: languageName: node linkType: hard -"webpack@npm:^5.49.0, webpack@npm:^5.73.0, webpack@npm:^5.88.1": +"webpack@npm:^5.73.0, webpack@npm:^5.88.1": version: 5.89.0 resolution: "webpack@npm:5.89.0" dependencies: @@ -21559,6 +21866,43 @@ __metadata: languageName: node linkType: hard +"webpack@npm:^5.90.1": + version: 5.90.1 + resolution: "webpack@npm:5.90.1" + dependencies: + "@types/eslint-scope": ^3.7.3 + "@types/estree": ^1.0.5 + "@webassemblyjs/ast": ^1.11.5 + "@webassemblyjs/wasm-edit": ^1.11.5 + "@webassemblyjs/wasm-parser": ^1.11.5 + acorn: ^8.7.1 + acorn-import-assertions: ^1.9.0 + browserslist: ^4.21.10 + chrome-trace-event: ^1.0.2 + enhanced-resolve: ^5.15.0 + es-module-lexer: ^1.2.1 + eslint-scope: 5.1.1 + events: ^3.2.0 + glob-to-regexp: ^0.4.1 + graceful-fs: ^4.2.9 + json-parse-even-better-errors: ^2.3.1 + loader-runner: ^4.2.0 + mime-types: ^2.1.27 + neo-async: ^2.6.2 + schema-utils: ^3.2.0 + tapable: ^2.1.1 + terser-webpack-plugin: ^5.3.10 + watchpack: ^2.4.0 + webpack-sources: ^3.2.3 + peerDependenciesMeta: + webpack-cli: + optional: true + bin: + webpack: bin/webpack.js + checksum: a7be844d5720a0c6282fec012e6fa34b1137dff953c5d48bf2ef066a6c27c1dbc92a9b9effc05ee61c9fe269499266db9782073f2d82a589d3c5c966ffc56584 + languageName: node + linkType: hard + "webpackbar@npm:^5.0.2": version: 5.0.2 resolution: "webpackbar@npm:5.0.2" @@ -21833,6 +22177,21 @@ __metadata: languageName: node linkType: hard +"ws@npm:^8.16.0": + version: 8.16.0 + resolution: "ws@npm:8.16.0" + peerDependencies: + bufferutil: ^4.0.1 + utf-8-validate: ">=5.0.2" + peerDependenciesMeta: + bufferutil: + optional: true + utf-8-validate: + optional: true + checksum: feb3eecd2bae82fa8a8beef800290ce437d8b8063bdc69712725f21aef77c49cb2ff45c6e5e7fce622248f9c7abaee506bae0a9064067ffd6935460c7357321b + languageName: node + linkType: hard + "xdg-basedir@npm:^4.0.0": version: 4.0.0 resolution: "xdg-basedir@npm:4.0.0" From d2585e738a63208fca3c9e26242e896d7f1df1e4 Mon Sep 17 00:00:00 2001 From: Tom French <15848336+TomAFrench@users.noreply.github.com> Date: Sat, 17 Feb 2024 21:15:21 +0000 Subject: [PATCH 27/39] feat: update error message when trying to load workspace as dependency (#4393) # Description ## Problem\* Resolves ## Summary\* This addresses an issue that a user in the discord is running into where they're trying to load a workspace as a dependency and can't figure out how to fix their Nargo.toml. ## Additional Context We should probably be embedding links to the docs but that would require some setup so that we point to the correct version, etc. ## Documentation\* Check one: - [x] No documentation needed. - [ ] Documentation included in this PR. - [ ] **[Exceptional Case]** Documentation to be submitted in a separate PR. # PR Checklist\* - [x] I have tested the changes locally. - [x] I have formatted the changes with [Prettier](https://prettier.io/) and/or `cargo fmt` on default settings. Co-authored-by: kevaundray --- tooling/nargo_toml/src/errors.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tooling/nargo_toml/src/errors.rs b/tooling/nargo_toml/src/errors.rs index 440895056c3..77fe77bcdbb 100644 --- a/tooling/nargo_toml/src/errors.rs +++ b/tooling/nargo_toml/src/errors.rs @@ -28,7 +28,7 @@ pub enum ManifestError { #[error("Nargo.toml is badly formed, could not parse.\n\n {0}")] MalformedFile(#[from] toml::de::Error), - #[error("Unexpected workspace definition found in {0}")] + #[error("Unexpected workspace definition found in {0}. If you're attempting to load this as a dependency, you may need to add a `directory` field to your `Nargo.toml` to show which package within the workspace to use")] UnexpectedWorkspace(PathBuf), #[error("Cannot find file {entry} which was specified as the `entry` field in {toml}")] From 6169a5b2d85d22fcd1ac9f7fd90514f88d4ef8a6 Mon Sep 17 00:00:00 2001 From: jfecher Date: Sat, 17 Feb 2024 18:52:28 -0600 Subject: [PATCH 28/39] chore: Update Vec docs (#4400) # Description ## Problem\* Working towards #4348 ## Summary\* We'll soon have bounded vec and hashmap types documented so I thought I'd create a new `containers` folder for documentation of the `std::containers` module. ## Additional Context Minor updates in the Vec docs: - Removed implementation details from methods - Reordered `Vec::len` up to the top since it is important - Minor grammar fixes ## Documentation\* Check one: - [ ] No documentation needed. - [x] Documentation included in this PR. - [ ] **[Exceptional Case]** Documentation to be submitted in a separate PR. # PR Checklist\* - [x] I have tested the changes locally. - [x] I have formatted the changes with [Prettier](https://prettier.io/) and/or `cargo fmt` on default settings. Co-authored-by: kevaundray --- .../containers/vec.mdx} | 68 +++++++------------ 1 file changed, 24 insertions(+), 44 deletions(-) rename docs/docs/noir/{concepts/data_types/vectors.mdx => standard_library/containers/vec.mdx} (71%) diff --git a/docs/docs/noir/concepts/data_types/vectors.mdx b/docs/docs/noir/standard_library/containers/vec.mdx similarity index 71% rename from docs/docs/noir/concepts/data_types/vectors.mdx rename to docs/docs/noir/standard_library/containers/vec.mdx index aed13183719..1954f05bc76 100644 --- a/docs/docs/noir/concepts/data_types/vectors.mdx +++ b/docs/docs/noir/standard_library/containers/vec.mdx @@ -1,6 +1,6 @@ --- title: Vectors -description: Delve into the Vector data type in Noir. Learn about its methods, practical examples, and best practices for using Vectors in your Noir code. +description: Delve into the Vec data type in Noir. Learn about its methods, practical examples, and best practices for using Vectors in your Noir code. keywords: [noir, vector type, methods, examples, dynamic arrays] sidebar_position: 6 --- @@ -9,7 +9,7 @@ import Experimental from '@site/src/components/Notes/_experimental.mdx'; -A vector is a collection type similar to Rust's Vector type. It's convenient way to use slices as mutable arrays. +A vector is a collection type similar to Rust's `Vec` type. In Noir, it is a convenient way to use slices as mutable arrays. Example: @@ -28,9 +28,7 @@ assert(vector.len() == 5); Creates a new, empty vector. ```rust -pub fn new() -> Self { - Self { slice: [] } -} +pub fn new() -> Self ``` Example: @@ -45,9 +43,7 @@ assert(empty_vector.len() == 0); Creates a vector containing each element from a given slice. Mutations to the resulting vector will not affect the original slice. ```rust -pub fn from_slice(slice: [T]) -> Self { - Self { slice } -} +pub fn from_slice(slice: [T]) -> Self ``` Example: @@ -58,14 +54,27 @@ let vector_from_slice = Vec::from_slice(arr); assert(vector_from_slice.len() == 3); ``` +### len + +Returns the number of elements in the vector. + +```rust +pub fn len(self) -> Field +``` + +Example: + +```rust +let empty_vector: Vec = Vec::new(); +assert(empty_vector.len() == 0); +``` + ### get Retrieves an element from the vector at a given index. Panics if the index points beyond the vector's end. ```rust -pub fn get(self, index: Field) -> T { - self.slice[index] -} +pub fn get(self, index: Field) -> T ``` Example: @@ -80,9 +89,7 @@ assert(vector.get(1) == 20); Adds a new element to the vector's end, returning a new vector with a length one greater than the original unmodified vector. ```rust -pub fn push(&mut self, elem: T) { - self.slice = self.slice.push_back(elem); -} +pub fn push(&mut self, elem: T) ``` Example: @@ -98,11 +105,7 @@ assert(vector.len() == 1); Removes an element from the vector's end, returning a new vector with a length one less than the original vector, along with the removed element. Panics if the vector's length is zero. ```rust -pub fn pop(&mut self) -> T { - let (popped_slice, last_elem) = self.slice.pop_back(); - self.slice = popped_slice; - last_elem -} +pub fn pop(&mut self) -> T ``` Example: @@ -119,9 +122,7 @@ assert(vector.len() == 1); Inserts an element at a specified index, shifting subsequent elements to the right. ```rust -pub fn insert(&mut self, index: Field, elem: T) { - self.slice = self.slice.insert(index, elem); -} +pub fn insert(&mut self, index: Field, elem: T) ``` Example: @@ -137,11 +138,7 @@ assert(vector.get(1) == 20); Removes an element at a specified index, shifting subsequent elements to the left, and returns the removed element. ```rust -pub fn remove(&mut self, index: Field) -> T { - let (new_slice, elem) = self.slice.remove(index); - self.slice = new_slice; - elem -} +pub fn remove(&mut self, index: Field) -> T ``` Example: @@ -152,20 +149,3 @@ let removed_elem = vector.remove(1); assert(removed_elem == 20); assert(vector.len() == 2); ``` - -### len - -Returns the number of elements in the vector. - -```rust -pub fn len(self) -> Field { - self.slice.len() -} -``` - -Example: - -```rust -let empty_vector: Vec = Vec::new(); -assert(empty_vector.len() == 0); -``` From 722dc969e8b09e5a6fd56b094b6939e9330233f6 Mon Sep 17 00:00:00 2001 From: Tom French <15848336+TomAFrench@users.noreply.github.com> Date: Sun, 18 Feb 2024 18:33:37 +0000 Subject: [PATCH 29/39] chore: fix docker test workflows (#4308) # Description ## Problem\* Resolves ## Summary\* This adds a change which didn't make it into #4306 so that we only run node tests on the node image and vice versa. ## Additional Context ## Documentation\* Check one: - [x] No documentation needed. - [ ] Documentation included in this PR. - [ ] **[Exceptional Case]** Documentation to be submitted in a separate PR. # PR Checklist\* - [x] I have tested the changes locally. - [x] I have formatted the changes with [Prettier](https://prettier.io/) and/or `cargo fmt` on default settings. --- .github/scripts/integration-test-browser.sh | 2 +- .github/scripts/integration-test-node.sh | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/scripts/integration-test-browser.sh b/.github/scripts/integration-test-browser.sh index c9cda58aab8..12195a88928 100755 --- a/.github/scripts/integration-test-browser.sh +++ b/.github/scripts/integration-test-browser.sh @@ -2,4 +2,4 @@ set -eu ./.github/scripts/playwright-install.sh -yarn workspace integration-tests test +yarn workspace integration-tests test:browser \ No newline at end of file diff --git a/.github/scripts/integration-test-node.sh b/.github/scripts/integration-test-node.sh index 7260ca4bb0f..b7f00c65620 100755 --- a/.github/scripts/integration-test-node.sh +++ b/.github/scripts/integration-test-node.sh @@ -2,4 +2,4 @@ set -eu apt-get install libc++-dev -y -yarn workspace integration-tests test +yarn workspace integration-tests test:node From b5e5c30f4db52c79ef556e80660f39db369b1911 Mon Sep 17 00:00:00 2001 From: Tom French <15848336+TomAFrench@users.noreply.github.com> Date: Wed, 21 Feb 2024 17:39:09 +0000 Subject: [PATCH 30/39] chore!: bump msrv to 1.73.0 (#4406) # Description ## Problem\* Resolves ## Summary\* Bumpalo bumped their MSRV in a [patch/minor release](https://github.com/fitzgen/bumpalo/commit/f8597ceb3600807a902fa9692fb43c49e7b63b27) and wasmer is using an unlocked dependency so we need to bump to match. ## Additional Context ## Documentation\* Check one: - [x] No documentation needed. - [ ] Documentation included in this PR. - [ ] **[Exceptional Case]** Documentation to be submitted in a separate PR. # PR Checklist\* - [x] I have tested the changes locally. - [x] I have formatted the changes with [Prettier](https://prettier.io/) and/or `cargo fmt` on default settings. --- .github/workflows/docs-pr.yml | 2 +- .github/workflows/formatting.yml | 2 +- .github/workflows/gates_report.yml | 2 +- .github/workflows/publish-acvm.yml | 2 +- .github/workflows/publish-es-packages.yml | 6 +++--- .github/workflows/publish-nargo.yml | 4 ++-- .github/workflows/test-js-packages.yml | 8 ++++---- .github/workflows/test-rust-workspace-msrv.yml | 4 ++-- .github/workflows/test-rust-workspace.yml | 4 ++-- Cargo.toml | 2 +- Dockerfile.ci | 2 +- README.md | 2 +- .../getting_started/installation/other_install_methods.md | 2 +- flake.nix | 2 +- rust-toolchain.toml | 2 +- tooling/nargo/build.rs | 4 ++-- tooling/nargo_cli/build.rs | 4 ++-- 17 files changed, 27 insertions(+), 27 deletions(-) diff --git a/.github/workflows/docs-pr.yml b/.github/workflows/docs-pr.yml index dddb309a3a4..5d0b72c6ad8 100644 --- a/.github/workflows/docs-pr.yml +++ b/.github/workflows/docs-pr.yml @@ -55,7 +55,7 @@ jobs: uses: actions/checkout@v4 - name: Setup toolchain - uses: dtolnay/rust-toolchain@1.71.1 + uses: dtolnay/rust-toolchain@1.73.0 - uses: Swatinem/rust-cache@v2 with: diff --git a/.github/workflows/formatting.yml b/.github/workflows/formatting.yml index 97736e2415e..43fd6daa91d 100644 --- a/.github/workflows/formatting.yml +++ b/.github/workflows/formatting.yml @@ -32,7 +32,7 @@ jobs: uses: actions/checkout@v4 - name: Setup toolchain - uses: dtolnay/rust-toolchain@1.71.1 + uses: dtolnay/rust-toolchain@1.73.0 with: targets: ${{ matrix.target }} components: clippy, rustfmt diff --git a/.github/workflows/gates_report.yml b/.github/workflows/gates_report.yml index 39416e628a9..f3f798fc5ea 100644 --- a/.github/workflows/gates_report.yml +++ b/.github/workflows/gates_report.yml @@ -18,7 +18,7 @@ jobs: uses: actions/checkout@v4 - name: Setup toolchain - uses: dtolnay/rust-toolchain@1.71.1 + uses: dtolnay/rust-toolchain@1.73.0 - uses: Swatinem/rust-cache@v2 with: diff --git a/.github/workflows/publish-acvm.yml b/.github/workflows/publish-acvm.yml index e19a61fff4f..959cd8e4bca 100644 --- a/.github/workflows/publish-acvm.yml +++ b/.github/workflows/publish-acvm.yml @@ -18,7 +18,7 @@ jobs: ref: ${{ inputs.noir-ref }} - name: Setup toolchain - uses: dtolnay/rust-toolchain@1.71.1 + uses: dtolnay/rust-toolchain@1.73.0 # These steps are in a specific order so crate dependencies are updated first - name: Publish acir_field diff --git a/.github/workflows/publish-es-packages.yml b/.github/workflows/publish-es-packages.yml index d4cd356a138..b22a26c685e 100644 --- a/.github/workflows/publish-es-packages.yml +++ b/.github/workflows/publish-es-packages.yml @@ -22,7 +22,7 @@ jobs: uses: actions/checkout@v4 - name: Setup toolchain - uses: dtolnay/rust-toolchain@1.71.1 + uses: dtolnay/rust-toolchain@1.73.0 - uses: Swatinem/rust-cache@v2 with: @@ -51,7 +51,7 @@ jobs: ref: ${{ inputs.noir-ref }} - name: Setup toolchain - uses: dtolnay/rust-toolchain@1.71.1 + uses: dtolnay/rust-toolchain@1.73.0 - uses: Swatinem/rust-cache@v2 with: @@ -83,7 +83,7 @@ jobs: uses: actions/checkout@v4 - name: Setup toolchain - uses: dtolnay/rust-toolchain@1.71.1 + uses: dtolnay/rust-toolchain@1.73.0 - uses: Swatinem/rust-cache@v2 with: diff --git a/.github/workflows/publish-nargo.yml b/.github/workflows/publish-nargo.yml index 085ab013e4e..e47e1a13053 100644 --- a/.github/workflows/publish-nargo.yml +++ b/.github/workflows/publish-nargo.yml @@ -46,7 +46,7 @@ jobs: echo "MACOSX_DEPLOYMENT_TARGET=$(xcrun -sdk macosx$(sw_vers -productVersion) --show-sdk-platform-version)" >> $GITHUB_ENV - name: Setup toolchain - uses: dtolnay/rust-toolchain@1.71.1 + uses: dtolnay/rust-toolchain@1.73.0 with: targets: ${{ matrix.target }} @@ -120,7 +120,7 @@ jobs: ref: ${{ inputs.tag || env.GITHUB_REF }} - name: Setup toolchain - uses: dtolnay/rust-toolchain@1.71.1 + uses: dtolnay/rust-toolchain@1.73.0 with: targets: ${{ matrix.target }} diff --git a/.github/workflows/test-js-packages.yml b/.github/workflows/test-js-packages.yml index 1afd11c94fa..eb9c50d82dd 100644 --- a/.github/workflows/test-js-packages.yml +++ b/.github/workflows/test-js-packages.yml @@ -22,7 +22,7 @@ jobs: uses: actions/checkout@v4 - name: Setup toolchain - uses: dtolnay/rust-toolchain@1.71.1 + uses: dtolnay/rust-toolchain@1.73.0 - uses: Swatinem/rust-cache@v2 with: @@ -55,7 +55,7 @@ jobs: uses: actions/checkout@v4 - name: Setup toolchain - uses: dtolnay/rust-toolchain@1.71.1 + uses: dtolnay/rust-toolchain@1.73.0 - uses: Swatinem/rust-cache@v2 with: @@ -86,7 +86,7 @@ jobs: uses: actions/checkout@v4 - name: Setup toolchain - uses: dtolnay/rust-toolchain@1.71.1 + uses: dtolnay/rust-toolchain@1.73.0 - uses: Swatinem/rust-cache@v2 with: @@ -121,7 +121,7 @@ jobs: uses: actions/checkout@v4 - name: Setup toolchain - uses: dtolnay/rust-toolchain@1.71.1 + uses: dtolnay/rust-toolchain@1.73.0 - uses: Swatinem/rust-cache@v2 with: diff --git a/.github/workflows/test-rust-workspace-msrv.yml b/.github/workflows/test-rust-workspace-msrv.yml index 02444b52856..061fc65ca8b 100644 --- a/.github/workflows/test-rust-workspace-msrv.yml +++ b/.github/workflows/test-rust-workspace-msrv.yml @@ -28,7 +28,7 @@ jobs: uses: actions/checkout@v4 - name: Setup toolchain - uses: dtolnay/rust-toolchain@1.71.1 + uses: dtolnay/rust-toolchain@1.73.0 with: targets: x86_64-unknown-linux-gnu @@ -71,7 +71,7 @@ jobs: - uses: actions/checkout@v4 - name: Setup toolchain - uses: dtolnay/rust-toolchain@1.71.1 + uses: dtolnay/rust-toolchain@1.73.0 with: targets: x86_64-unknown-linux-gnu diff --git a/.github/workflows/test-rust-workspace.yml b/.github/workflows/test-rust-workspace.yml index bb31ab7873a..c12dcaba0ba 100644 --- a/.github/workflows/test-rust-workspace.yml +++ b/.github/workflows/test-rust-workspace.yml @@ -23,7 +23,7 @@ jobs: uses: actions/checkout@v4 - name: Setup toolchain - uses: dtolnay/rust-toolchain@1.71.1 + uses: dtolnay/rust-toolchain@1.73.0 with: targets: x86_64-unknown-linux-gnu @@ -59,7 +59,7 @@ jobs: - uses: actions/checkout@v4 - name: Setup toolchain - uses: dtolnay/rust-toolchain@1.71.1 + uses: dtolnay/rust-toolchain@1.73.0 with: targets: x86_64-unknown-linux-gnu diff --git a/Cargo.toml b/Cargo.toml index 77058554aff..7d5da7b00d0 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -45,7 +45,7 @@ version = "0.24.0" # x-release-please-end authors = ["The Noir Team "] edition = "2021" -rust-version = "1.71.1" +rust-version = "1.73.0" license = "MIT OR Apache-2.0" repository = "https://github.com/noir-lang/noir/" diff --git a/Dockerfile.ci b/Dockerfile.ci index a73ce4ab969..e0dc030980c 100644 --- a/Dockerfile.ci +++ b/Dockerfile.ci @@ -1,4 +1,4 @@ -FROM rust:1.71.1-slim-bookworm as base +FROM rust:1.73.0-slim-bookworm as base RUN apt-get update && apt-get upgrade -y && apt-get install build-essential git -y WORKDIR /usr/src/noir ENV PATH="${PATH}:/usr/src/noir/target/release" diff --git a/README.md b/README.md index 771c3f1c74d..5c93512ae26 100644 --- a/README.md +++ b/README.md @@ -54,7 +54,7 @@ Concretely the following items are on the road map: ## Minimum Rust version -This crate's minimum supported rustc version is 1.71.1. +This crate's minimum supported rustc version is 1.73.0. ## Working on this project diff --git a/docs/docs/getting_started/installation/other_install_methods.md b/docs/docs/getting_started/installation/other_install_methods.md index 076f26dfd94..a35e34aaf9c 100644 --- a/docs/docs/getting_started/installation/other_install_methods.md +++ b/docs/docs/getting_started/installation/other_install_methods.md @@ -212,7 +212,7 @@ code . #### Building and testing Assuming you are using `direnv` to populate your environment, building and testing the project can be done -with the typical `cargo build`, `cargo test`, and `cargo clippy` commands. You'll notice that the `cargo` version matches the version we specify in `rust-toolchain.toml`, which is 1.71.1 at the time of this writing. +with the typical `cargo build`, `cargo test`, and `cargo clippy` commands. You'll notice that the `cargo` version matches the version we specify in `rust-toolchain.toml`, which is 1.73.0 at the time of this writing. If you want to build the entire project in an isolated sandbox, you can use Nix commands: diff --git a/flake.nix b/flake.nix index f0d0a2eaebb..4c5db8bfaae 100644 --- a/flake.nix +++ b/flake.nix @@ -44,7 +44,7 @@ rustToolchain = fenix.packages.${system}.fromToolchainFile { file = ./rust-toolchain.toml; - sha256 = "sha256-dxE7lmCFWlq0nl/wKcmYvpP9zqQbBitAQgZ1zx9Ooik="; + sha256 = "sha256-rLP8+fTxnPHoR96ZJiCa/5Ans1OojI7MLsmSqR2ip8o="; }; craneLib = (crane.mkLib pkgs).overrideToolchain rustToolchain; diff --git a/rust-toolchain.toml b/rust-toolchain.toml index 955e24485fc..0e5ac891ce9 100644 --- a/rust-toolchain.toml +++ b/rust-toolchain.toml @@ -1,5 +1,5 @@ [toolchain] -channel = "1.72.1" +channel = "1.73.0" components = [ "rust-src" ] targets = [ "wasm32-unknown-unknown", "wasm32-wasi", "aarch64-apple-darwin" ] profile = "default" diff --git a/tooling/nargo/build.rs b/tooling/nargo/build.rs index 4fa7f58892a..ab2b7579132 100644 --- a/tooling/nargo/build.rs +++ b/tooling/nargo/build.rs @@ -2,8 +2,8 @@ use rustc_version::{version, Version}; fn check_rustc_version() { assert!( - version().unwrap() >= Version::parse("1.71.1").unwrap(), - "The minimal supported rustc version is 1.71.1." + version().unwrap() >= Version::parse("1.73.0").unwrap(), + "The minimal supported rustc version is 1.73.0." ); } diff --git a/tooling/nargo_cli/build.rs b/tooling/nargo_cli/build.rs index 57aa487f66a..1ca12b75dfb 100644 --- a/tooling/nargo_cli/build.rs +++ b/tooling/nargo_cli/build.rs @@ -6,8 +6,8 @@ use std::{env, fs}; fn check_rustc_version() { assert!( - version().unwrap() >= Version::parse("1.71.1").unwrap(), - "The minimal supported rustc version is 1.71.1." + version().unwrap() >= Version::parse("1.73.0").unwrap(), + "The minimal supported rustc version is 1.73.0." ); } From 292a972dfb23dd7c664be87916cccc313d7b134d Mon Sep 17 00:00:00 2001 From: Michael J Klein Date: Thu, 22 Feb 2024 06:06:54 -0500 Subject: [PATCH 31/39] chore: rename parameter 'filter' to 'level' in 'init_log_level' (#4403) # Description ## Problem\* Resolves https://github.com/noir-lang/noir/issues/3879 ## Summary\* ## Additional Context ## Documentation\* Check one: - [x] No documentation needed. - [ ] Documentation included in this PR. - [ ] **[Exceptional Case]** Documentation to be submitted in a separate PR. # PR Checklist\* - [x] I have tested the changes locally. - [x] I have formatted the changes with [Prettier](https://prettier.io/) and/or `cargo fmt` on default settings. --- compiler/wasm/src/lib.rs | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/compiler/wasm/src/lib.rs b/compiler/wasm/src/lib.rs index 6d737a0ea6d..174d9b9ce9c 100644 --- a/compiler/wasm/src/lib.rs +++ b/compiler/wasm/src/lib.rs @@ -32,12 +32,12 @@ pub struct BuildInfo { } #[wasm_bindgen] -pub fn init_log_level(filter: String) { +pub fn init_log_level(level: String) { // Set the static variable from Rust use std::sync::Once; - let filter: EnvFilter = - filter.parse().expect("Could not parse log filter while initializing logger"); + let level_filter: EnvFilter = + level.parse().expect("Could not parse log filter while initializing logger"); static SET_HOOK: Once = Once::new(); SET_HOOK.call_once(|| { @@ -46,7 +46,7 @@ pub fn init_log_level(filter: String) { .without_time() .with_writer(MakeWebConsoleWriter::new()); - tracing_subscriber::registry().with(fmt_layer.with_filter(filter)).init(); + tracing_subscriber::registry().with(fmt_layer.with_filter(level_filter)).init(); }); } From 49822511710a7f1c42b8ed343e80456f8e6db2d9 Mon Sep 17 00:00:00 2001 From: Tom French <15848336+TomAFrench@users.noreply.github.com> Date: Thu, 22 Feb 2024 11:07:15 +0000 Subject: [PATCH 32/39] fix: add handling to `noir_wasm` for projects without dependencies (#4344) # Description ## Problem\* Resolves https://github.com/noir-lang/noir/issues/4338 ## Summary\* This PR returns an empty dependencies map rather than undefined if the package being compiled doesn't have any dependencies. I've also updated the test suite so it also compiles more than just a contract ## Additional Context ## Documentation\* Check one: - [x] No documentation needed. - [ ] Documentation included in this PR. - [ ] **[Exceptional Case]** Documentation to be submitted in a separate PR. # PR Checklist\* - [x] I have tested the changes locally. - [x] I have formatted the changes with [Prettier](https://prettier.io/) and/or `cargo fmt` on default settings. --------- Co-authored-by: kevaundray --- compiler/wasm/src/noir/package.ts | 2 +- compiler/wasm/src/types/noir_artifact.ts | 2 + .../wasm/src/types/noir_package_config.ts | 2 +- .../test/compiler/browser/compile.test.ts | 79 +++++++++++++++++++ .../browser/compile_with_deps.test.ts | 43 ---------- .../wasm/test/compiler/node/compile.test.ts | 39 +++++++++ .../compiler/node/compile_with_deps.test.ts | 20 ----- ...pile_with_deps.test.ts => compile.test.ts} | 60 ++++++++++++-- compiler/wasm/test/shared.ts | 27 +++++-- 9 files changed, 197 insertions(+), 77 deletions(-) create mode 100644 compiler/wasm/test/compiler/browser/compile.test.ts delete mode 100644 compiler/wasm/test/compiler/browser/compile_with_deps.test.ts create mode 100644 compiler/wasm/test/compiler/node/compile.test.ts delete mode 100644 compiler/wasm/test/compiler/node/compile_with_deps.test.ts rename compiler/wasm/test/compiler/shared/{compile_with_deps.test.ts => compile.test.ts} (52%) diff --git a/compiler/wasm/src/noir/package.ts b/compiler/wasm/src/noir/package.ts index a2496a03b3a..81178e6ae96 100644 --- a/compiler/wasm/src/noir/package.ts +++ b/compiler/wasm/src/noir/package.ts @@ -91,7 +91,7 @@ export class Package { * Gets this package's dependencies. */ public getDependencies(): Record { - return this.#config.dependencies; + return this.#config.dependencies ?? {}; } /** diff --git a/compiler/wasm/src/types/noir_artifact.ts b/compiler/wasm/src/types/noir_artifact.ts index 350a4053a9a..e636212a487 100644 --- a/compiler/wasm/src/types/noir_artifact.ts +++ b/compiler/wasm/src/types/noir_artifact.ts @@ -73,6 +73,8 @@ export interface ContractArtifact { * The compilation result of an Noir contract. */ export interface ProgramArtifact { + /** Version of noir used for the build. */ + noir_version: string; /** The hash of the circuit. */ hash?: number; /** * The ABI of the function. */ diff --git a/compiler/wasm/src/types/noir_package_config.ts b/compiler/wasm/src/types/noir_package_config.ts index 5f07c380cf3..0203763039a 100644 --- a/compiler/wasm/src/types/noir_package_config.ts +++ b/compiler/wasm/src/types/noir_package_config.ts @@ -20,7 +20,7 @@ type NoirPackageConfigSchema = { backend?: string; license?: string; }; - dependencies: Record; + dependencies?: Record; }; /** diff --git a/compiler/wasm/test/compiler/browser/compile.test.ts b/compiler/wasm/test/compiler/browser/compile.test.ts new file mode 100644 index 00000000000..b7e6c27427f --- /dev/null +++ b/compiler/wasm/test/compiler/browser/compile.test.ts @@ -0,0 +1,79 @@ +/* eslint-disable @typescript-eslint/ban-ts-comment */ +import { getPaths } from '../../shared'; +import { expect } from '@esm-bundle/chai'; +import { compile, createFileManager } from '@noir-lang/noir_wasm'; +import { ContractArtifact, ProgramArtifact } from '../../../src/types/noir_artifact'; +import { shouldCompileContractIdentically, shouldCompileProgramIdentically } from '../shared/compile.test'; + +const paths = getPaths('.'); + +async function getFile(path: string) { + // @ts-ignore + const basePath = new URL('./../../', import.meta.url).toString().replace(/\/$/g, ''); + const url = `${basePath}${path.replace('.', '')}`; + const response = await fetch(url); + return response; +} + +// eslint-disable-next-line @typescript-eslint/no-explicit-any +async function getPrecompiledSource(path: string): Promise { + const response = await getFile(path); + const compiledData = await response.text(); + return JSON.parse(compiledData); +} + +describe('noir-compiler/browser', () => { + shouldCompileProgramIdentically( + async () => { + const { simpleScriptExpectedArtifact } = paths; + const fm = createFileManager('/'); + const files = Object.values(paths).filter((fileOrDir) => /^\.?\/.*\..*$/.test(fileOrDir)); + for (const path of files) { + console.log(path); + await fm.writeFile(path, (await getFile(path)).body as ReadableStream); + } + const nargoArtifact = (await getPrecompiledSource(simpleScriptExpectedArtifact)) as ProgramArtifact; + const noirWasmArtifact = await compile(fm, '/fixtures/simple'); + + return { nargoArtifact, noirWasmArtifact }; + }, + expect, + 60 * 20e3, + ); + + shouldCompileProgramIdentically( + async () => { + const { depsScriptExpectedArtifact } = paths; + const fm = createFileManager('/'); + const files = Object.values(paths).filter((fileOrDir) => /^\.?\/.*\..*$/.test(fileOrDir)); + for (const path of files) { + console.log(path); + await fm.writeFile(path, (await getFile(path)).body as ReadableStream); + } + const nargoArtifact = (await getPrecompiledSource(depsScriptExpectedArtifact)) as ProgramArtifact; + const noirWasmArtifact = await compile(fm, '/fixtures/with-deps'); + + return { nargoArtifact, noirWasmArtifact }; + }, + expect, + 60 * 20e3, + ); + + shouldCompileContractIdentically( + async () => { + const { contractExpectedArtifact } = paths; + const fm = createFileManager('/'); + const files = Object.values(paths).filter((fileOrDir) => /^\.?\/.*\..*$/.test(fileOrDir)); + for (const path of files) { + console.log(path); + await fm.writeFile(path, (await getFile(path)).body as ReadableStream); + } + const nargoArtifact = (await getPrecompiledSource(contractExpectedArtifact)) as ContractArtifact; + const noirWasmArtifact = await compile(fm, '/fixtures/noir-contract'); + + return { nargoArtifact, noirWasmArtifact }; + }, + expect, + 60 * 20e3, + ); +}); diff --git a/compiler/wasm/test/compiler/browser/compile_with_deps.test.ts b/compiler/wasm/test/compiler/browser/compile_with_deps.test.ts deleted file mode 100644 index 0d1e22e288f..00000000000 --- a/compiler/wasm/test/compiler/browser/compile_with_deps.test.ts +++ /dev/null @@ -1,43 +0,0 @@ -/* eslint-disable @typescript-eslint/ban-ts-comment */ -import { getPaths } from '../../shared'; -import { expect } from '@esm-bundle/chai'; -import { compile, createFileManager } from '@noir-lang/noir_wasm'; -import { ContractArtifact } from '../../../src/types/noir_artifact'; -import { shouldCompileIdentically } from '../shared/compile_with_deps.test'; - -const paths = getPaths('.'); - -async function getFile(path: string) { - // @ts-ignore - const basePath = new URL('./../../', import.meta.url).toString().replace(/\/$/g, ''); - const url = `${basePath}${path.replace('.', '')}`; - const response = await fetch(url); - return response; -} - -// eslint-disable-next-line @typescript-eslint/no-explicit-any -async function getPrecompiledSource(path: string): Promise { - const response = await getFile(path); - const compiledData = await response.text(); - return JSON.parse(compiledData); -} - -describe('noir-compiler/browser', () => { - shouldCompileIdentically( - async () => { - const { contractExpectedArtifact } = paths; - const fm = createFileManager('/'); - const files = Object.values(paths).filter((fileOrDir) => /^\.?\/.*\..*$/.test(fileOrDir)); - for (const path of files) { - console.log(path); - await fm.writeFile(path, (await getFile(path)).body as ReadableStream); - } - const nargoArtifact = (await getPrecompiledSource(contractExpectedArtifact)) as ContractArtifact; - const noirWasmArtifact = await compile(fm, '/fixtures/noir-contract'); - - return { nargoArtifact, noirWasmArtifact }; - }, - expect, - 60 * 20e3, - ); -}); diff --git a/compiler/wasm/test/compiler/node/compile.test.ts b/compiler/wasm/test/compiler/node/compile.test.ts new file mode 100644 index 00000000000..9af98195825 --- /dev/null +++ b/compiler/wasm/test/compiler/node/compile.test.ts @@ -0,0 +1,39 @@ +import { join, resolve } from 'path'; +import { getPaths } from '../../shared'; + +import { expect } from 'chai'; +import { compile, createFileManager } from '@noir-lang/noir_wasm'; +import { readFile } from 'fs/promises'; +import { ContractArtifact, ProgramArtifact } from '../../../src/types/noir_artifact'; +import { shouldCompileContractIdentically, shouldCompileProgramIdentically } from '../shared/compile.test'; + +const basePath = resolve(join(__dirname, '../../')); + +describe('noir-compiler/node', () => { + shouldCompileProgramIdentically(async () => { + const { simpleScriptProjectPath, simpleScriptExpectedArtifact } = getPaths(basePath); + + const fm = createFileManager(simpleScriptProjectPath); + const nargoArtifact = JSON.parse((await readFile(simpleScriptExpectedArtifact)).toString()) as ProgramArtifact; + const noirWasmArtifact = await compile(fm); + return { nargoArtifact, noirWasmArtifact }; + }, expect); + + shouldCompileProgramIdentically(async () => { + const { depsScriptProjectPath, depsScriptExpectedArtifact } = getPaths(basePath); + + const fm = createFileManager(depsScriptProjectPath); + const nargoArtifact = JSON.parse((await readFile(depsScriptExpectedArtifact)).toString()) as ProgramArtifact; + const noirWasmArtifact = await compile(fm); + return { nargoArtifact, noirWasmArtifact }; + }, expect); + + shouldCompileContractIdentically(async () => { + const { contractProjectPath, contractExpectedArtifact } = getPaths(basePath); + + const fm = createFileManager(contractProjectPath); + const nargoArtifact = JSON.parse((await readFile(contractExpectedArtifact)).toString()) as ContractArtifact; + const noirWasmArtifact = await compile(fm); + return { nargoArtifact, noirWasmArtifact }; + }, expect); +}); diff --git a/compiler/wasm/test/compiler/node/compile_with_deps.test.ts b/compiler/wasm/test/compiler/node/compile_with_deps.test.ts deleted file mode 100644 index 2a402dc9d02..00000000000 --- a/compiler/wasm/test/compiler/node/compile_with_deps.test.ts +++ /dev/null @@ -1,20 +0,0 @@ -import { join, resolve } from 'path'; -import { getPaths } from '../../shared'; - -import { expect } from 'chai'; -import { compile, createFileManager } from '@noir-lang/noir_wasm'; -import { readFile } from 'fs/promises'; -import { ContractArtifact } from '../../../src/types/noir_artifact'; -import { shouldCompileIdentically } from '../shared/compile_with_deps.test'; - -const basePath = resolve(join(__dirname, '../../')); -const { contractProjectPath, contractExpectedArtifact } = getPaths(basePath); - -describe('noir-compiler/node', () => { - shouldCompileIdentically(async () => { - const fm = createFileManager(contractProjectPath); - const nargoArtifact = JSON.parse((await readFile(contractExpectedArtifact)).toString()) as ContractArtifact; - const noirWasmArtifact = await compile(fm); - return { nargoArtifact, noirWasmArtifact }; - }, expect); -}); diff --git a/compiler/wasm/test/compiler/shared/compile_with_deps.test.ts b/compiler/wasm/test/compiler/shared/compile.test.ts similarity index 52% rename from compiler/wasm/test/compiler/shared/compile_with_deps.test.ts rename to compiler/wasm/test/compiler/shared/compile.test.ts index 0960cba0665..88e8e8c8e5a 100644 --- a/compiler/wasm/test/compiler/shared/compile_with_deps.test.ts +++ b/compiler/wasm/test/compiler/shared/compile.test.ts @@ -6,9 +6,47 @@ import { DebugFileMap, DebugInfo, NoirFunctionEntry, + ProgramArtifact, + ProgramCompilationArtifacts, } from '../../../src/types/noir_artifact'; -export function shouldCompileIdentically( +export function shouldCompileProgramIdentically( + compileFn: () => Promise<{ nargoArtifact: ProgramArtifact; noirWasmArtifact: CompilationResult }>, + expect: typeof Expect, + timeout = 5000, +) { + it('both nargo and noir_wasm should compile identically', async () => { + // Compile! + const { nargoArtifact, noirWasmArtifact } = await compileFn(); + + // Prepare nargo artifact + const [_nargoDebugInfos, nargoFileMap] = deleteProgramDebugMetadata(nargoArtifact); + normalizeVersion(nargoArtifact); + + // Prepare noir-wasm artifact + const noirWasmProgram = (noirWasmArtifact as unknown as ProgramCompilationArtifacts).program; + expect(noirWasmProgram).not.to.be.undefined; + const [_noirWasmDebugInfos, norWasmFileMap] = deleteProgramDebugMetadata(noirWasmProgram); + normalizeVersion(noirWasmProgram); + + // We first compare both contracts without considering debug info + delete (noirWasmProgram as Partial).hash; + delete (nargoArtifact as Partial).hash; + expect(nargoArtifact).to.deep.eq(noirWasmProgram); + + // Compare the file maps, ignoring keys, since those depend in the order in which files are visited, + // which may change depending on the file manager implementation. Also ignores paths, since the base + // path is reported differently between nargo and noir-wasm. + expect(getSources(nargoFileMap)).to.have.members(getSources(norWasmFileMap)); + + // Compare the debug symbol information, ignoring the actual ids used for file identifiers. + // Debug symbol info looks like the following, what we need is to ignore the 'file' identifiers + // {"locations":{"0":[{"span":{"start":141,"end":156},"file":39},{"span":{"start":38,"end":76},"file":38},{"span":{"start":824,"end":862},"file":23}]}} + // expect(nargoDebugInfos).to.deep.eq(noirWasmDebugInfos); + }).timeout(timeout); +} + +export function shouldCompileContractIdentically( compileFn: () => Promise<{ nargoArtifact: ContractArtifact; noirWasmArtifact: CompilationResult }>, expect: typeof Expect, timeout = 5000, @@ -18,13 +56,13 @@ export function shouldCompileIdentically( const { nargoArtifact, noirWasmArtifact } = await compileFn(); // Prepare nargo artifact - const [nargoDebugInfos, nargoFileMap] = deleteDebugMetadata(nargoArtifact); + const [nargoDebugInfos, nargoFileMap] = deleteContractDebugMetadata(nargoArtifact); normalizeVersion(nargoArtifact); // Prepare noir-wasm artifact - const noirWasmContract = (noirWasmArtifact as ContractCompilationArtifacts).contract; + const noirWasmContract = (noirWasmArtifact as unknown as ContractCompilationArtifacts).contract; expect(noirWasmContract).not.to.be.undefined; - const [noirWasmDebugInfos, norWasmFileMap] = deleteDebugMetadata(noirWasmContract); + const [noirWasmDebugInfos, norWasmFileMap] = deleteContractDebugMetadata(noirWasmContract); normalizeVersion(noirWasmContract); // We first compare both contracts without considering debug info @@ -43,7 +81,7 @@ export function shouldCompileIdentically( } /** Remove commit identifier from version, which may not match depending on cached nargo and noir-wasm */ -function normalizeVersion(contract: ContractArtifact) { +function normalizeVersion(contract: ProgramArtifact | ContractArtifact) { contract.noir_version = contract.noir_version.replace(/\+.+$/, ''); } @@ -57,8 +95,18 @@ function extractDebugInfos(fns: NoirFunctionEntry[]) { }); } +/** Deletes all debug info from a program and returns it. */ +function deleteProgramDebugMetadata(program: ProgramArtifact) { + const debugSymbols = inflateDebugSymbols(program.debug_symbols); + const fileMap = program.file_map; + + delete (program as Partial).debug_symbols; + delete (program as Partial).file_map; + return [debugSymbols, fileMap]; +} + /** Deletes all debug info from a contract and returns it. */ -function deleteDebugMetadata(contract: ContractArtifact) { +function deleteContractDebugMetadata(contract: ContractArtifact) { contract.functions.sort((a, b) => a.name.localeCompare(b.name)); const fileMap = contract.file_map; delete (contract as Partial).file_map; diff --git a/compiler/wasm/test/shared.ts b/compiler/wasm/test/shared.ts index 9181919ff39..9f4d417a614 100644 --- a/compiler/wasm/test/shared.ts +++ b/compiler/wasm/test/shared.ts @@ -1,14 +1,23 @@ export function getPaths(basePath: string) { const fixtures = `${basePath}/fixtures`; - const simpleScriptSourcePath = `${fixtures}/simple/src/main.nr`; - const simpleScriptExpectedArtifact = `${fixtures}/simple/target/noir_wasm_testing.json`; + const simpleScriptProjectPath = `${fixtures}/simple`; + const simpleScriptSourcePath = `${simpleScriptProjectPath}/src/main.nr`; + const simpleScriptTOMLPath = `${simpleScriptProjectPath}/Nargo.toml`; + const simpleScriptExpectedArtifact = `${simpleScriptProjectPath}/target/noir_wasm_testing.json`; - const depsScriptSourcePath = `${fixtures}/with-deps/src/main.nr`; - const depsScriptExpectedArtifact = `${fixtures}/with-deps/target/noir_wasm_testing.json`; + const depsScriptProjectPath = `${fixtures}/with-deps`; + const depsScriptSourcePath = `${depsScriptProjectPath}/src/main.nr`; + const depsScriptTOMLPath = `${depsScriptProjectPath}/Nargo.toml`; + const depsScriptExpectedArtifact = `${depsScriptProjectPath}/target/noir_wasm_testing.json`; - const libASourcePath = `${fixtures}/deps/lib-a/src/lib.nr`; - const libBSourcePath = `${fixtures}/deps/lib-b/src/lib.nr`; + const libAProjectPath = `${fixtures}/deps/lib-a`; + const libASourcePath = `${libAProjectPath}/src/lib.nr`; + const libATOMLPath = `${libAProjectPath}/Nargo.toml`; + + const libBProjectPath = `${fixtures}/deps/lib-b`; + const libBSourcePath = `${libBProjectPath}/src/lib.nr`; + const libBTOMLPath = `${libBProjectPath}/Nargo.toml`; const contractProjectPath = `${fixtures}/noir-contract`; const contractSourcePath = `${contractProjectPath}/src/main.nr`; @@ -22,12 +31,18 @@ export function getPaths(basePath: string) { const libCTOMLPath = `${libCProjectPath}/Nargo.toml`; return { + simpleScriptProjectPath, simpleScriptSourcePath, + simpleScriptTOMLPath, simpleScriptExpectedArtifact, + depsScriptProjectPath, depsScriptSourcePath, + depsScriptTOMLPath, depsScriptExpectedArtifact, libASourcePath, + libATOMLPath, libBSourcePath, + libBTOMLPath, contractProjectPath, contractSourcePath, contractTOMLPath, From e3829213d8411f84e117a14b43816967925095e0 Mon Sep 17 00:00:00 2001 From: Tom French <15848336+TomAFrench@users.noreply.github.com> Date: Thu, 22 Feb 2024 11:07:46 +0000 Subject: [PATCH 33/39] feat(ci): Use wasm-opt when compiling wasm packages (#4334) # Description ## Problem\* Resolves ## Summary\* We're currently building unoptimised wasm binaries in CI as wasm-opt isn't installed. This PR installs wasm-opt to perform these optimisations. ## Additional Context ## Documentation\* Check one: - [ ] No documentation needed. - [ ] Documentation included in this PR. - [ ] **[Exceptional Case]** Documentation to be submitted in a separate PR. # PR Checklist\* - [ ] I have tested the changes locally. - [ ] I have formatted the changes with [Prettier](https://prettier.io/) and/or `cargo fmt` on default settings. --- .github/scripts/wasm-opt-install.sh | 8 ++++++++ .github/workflows/publish-es-packages.yml | 9 +++++++++ .github/workflows/test-js-packages.yml | 9 +++++++++ 3 files changed, 26 insertions(+) create mode 100755 .github/scripts/wasm-opt-install.sh diff --git a/.github/scripts/wasm-opt-install.sh b/.github/scripts/wasm-opt-install.sh new file mode 100755 index 00000000000..cbdeb8f2bfe --- /dev/null +++ b/.github/scripts/wasm-opt-install.sh @@ -0,0 +1,8 @@ +#!/bin/bash +set -eu + +cd $(dirname "$0") + +./cargo-binstall-install.sh + +cargo-binstall wasm-opt --version 0.116.0 -y diff --git a/.github/workflows/publish-es-packages.yml b/.github/workflows/publish-es-packages.yml index b22a26c685e..f72a97b2684 100644 --- a/.github/workflows/publish-es-packages.yml +++ b/.github/workflows/publish-es-packages.yml @@ -32,6 +32,9 @@ jobs: - name: Install Yarn dependencies uses: ./.github/actions/setup + - name: Install wasm-opt + run: ./.github/scripts/wasm-opt-install.sh + - name: Build noirc_abi run: ./.github/scripts/noirc-abi-build.sh @@ -61,6 +64,9 @@ jobs: - name: Install Yarn dependencies uses: ./.github/actions/setup + - name: Install wasm-opt + run: ./.github/scripts/wasm-opt-install.sh + - name: Build noir_js_types run: yarn workspace @noir-lang/types build @@ -93,6 +99,9 @@ jobs: - name: Install Yarn dependencies uses: ./.github/actions/setup + - name: Install wasm-opt + run: ./.github/scripts/wasm-opt-install.sh + - name: Build acvm_js run: ./.github/scripts/acvm_js-build.sh diff --git a/.github/workflows/test-js-packages.yml b/.github/workflows/test-js-packages.yml index eb9c50d82dd..b3908ee5d3e 100644 --- a/.github/workflows/test-js-packages.yml +++ b/.github/workflows/test-js-packages.yml @@ -66,6 +66,9 @@ jobs: - name: Install Yarn dependencies uses: ./.github/actions/setup + - name: Install wasm-opt + run: ./.github/scripts/wasm-opt-install.sh + - name: Build noirc_abi run: ./.github/scripts/noirc-abi-build.sh @@ -97,6 +100,9 @@ jobs: - name: Install Yarn dependencies uses: ./.github/actions/setup + - name: Install wasm-opt + run: ./.github/scripts/wasm-opt-install.sh + - name: Build noir_js_types run: yarn workspace @noir-lang/types build @@ -132,6 +138,9 @@ jobs: - name: Install Yarn dependencies uses: ./.github/actions/setup + - name: Install wasm-opt + run: ./.github/scripts/wasm-opt-install.sh + - name: Build acvm_js run: ./.github/scripts/acvm_js-build.sh From ceb8001e213066bd8a01d90657951ce5f1419f3a Mon Sep 17 00:00:00 2001 From: paulallensuxs <114240091+paulallensuxs@users.noreply.github.com> Date: Thu, 22 Feb 2024 12:46:10 +0000 Subject: [PATCH 34/39] chore: Add #[recursive] Explainer to Documentation (#4399) # Description ## Problem Step towards [#4392](https://github.com/noir-lang/noir/issues/4392) The documentation for Recursive Proofs was lacking information on the newly introduced `#[recursive]` attribute, which is crucial for understanding how to mark circuits for recursive proof generation. ## Summary This pull request updates the Recursive Proofs documentation page to include a comprehensive section on the `#[recursive]` attribute. It explains the attribute's purpose, how it should be used, and provides an example demonstrating its application within a circuit definition. ## Additional Context The introduction of the `#[recursive]` attribute simplifies the process of designating circuits for recursive proofs, eliminating the need for manual flagging in the tooling infrastructure. ## Documentation - [ ] No documentation needed. - [x] Documentation included in this PR. - [ ] **[Exceptional Case]** Documentation to be submitted in a separate PR. # PR Checklist - [x] I have tested the changes locally. - [x] I have formatted the changes with [Prettier](https://prettier.io/) and/or `cargo fmt` on default settings. --------- Co-authored-by: Savio <72797635+Savio-Sou@users.noreply.github.com> --- docs/docs/noir/standard_library/recursion.md | 20 +++++++++++++++++++ .../noir/standard_library/recursion.md | 20 +++++++++++++++++++ 2 files changed, 40 insertions(+) diff --git a/docs/docs/noir/standard_library/recursion.md b/docs/docs/noir/standard_library/recursion.md index f252150c8b5..9337499dac8 100644 --- a/docs/docs/noir/standard_library/recursion.md +++ b/docs/docs/noir/standard_library/recursion.md @@ -8,6 +8,26 @@ Noir supports recursively verifying proofs, meaning you verify the proof of a No Read [the explainer on recursion](../../explainers/explainer-recursion.md) to know more about this function and the [guide on how to use it.](../../how_to/how-to-recursion.md) +## The `#[recursive]` Attribute + +In Noir, the `#[recursive]` attribute is used to indicate that a circuit is designed for recursive proof generation. When applied, it informs the compiler and the tooling that the circuit should be compiled in a way that makes its proofs suitable for recursive verification. This attribute eliminates the need for manual flagging of recursion at the tooling level, streamlining the proof generation process for recursive circuits. + +### Example usage with `#[recursive]` + +```rust +#[recursive] +fn main(x: Field, y: pub Field) { + assert(x == y, "x and y are not equal"); +} + +// This marks the circuit as recursion-friendly and indicates that proofs generated from this circuit +// are intended for recursive verification. +``` + +By incorporating this attribute directly in the circuit's definition, tooling like Nargo and NoirJS can automatically execute recursive-specific duties for Noir programs (e.g. recursive-friendly proof artifact generation) without additional flags or configurations. + +## Verifying Recursive Proofs + ```rust #[foreign(verify_proof)] fn verify_proof(_verification_key : [Field], _proof : [Field], _public_input : Field, _key_hash : Field) {} diff --git a/docs/versioned_docs/version-v0.24.0/noir/standard_library/recursion.md b/docs/versioned_docs/version-v0.24.0/noir/standard_library/recursion.md index f252150c8b5..9337499dac8 100644 --- a/docs/versioned_docs/version-v0.24.0/noir/standard_library/recursion.md +++ b/docs/versioned_docs/version-v0.24.0/noir/standard_library/recursion.md @@ -8,6 +8,26 @@ Noir supports recursively verifying proofs, meaning you verify the proof of a No Read [the explainer on recursion](../../explainers/explainer-recursion.md) to know more about this function and the [guide on how to use it.](../../how_to/how-to-recursion.md) +## The `#[recursive]` Attribute + +In Noir, the `#[recursive]` attribute is used to indicate that a circuit is designed for recursive proof generation. When applied, it informs the compiler and the tooling that the circuit should be compiled in a way that makes its proofs suitable for recursive verification. This attribute eliminates the need for manual flagging of recursion at the tooling level, streamlining the proof generation process for recursive circuits. + +### Example usage with `#[recursive]` + +```rust +#[recursive] +fn main(x: Field, y: pub Field) { + assert(x == y, "x and y are not equal"); +} + +// This marks the circuit as recursion-friendly and indicates that proofs generated from this circuit +// are intended for recursive verification. +``` + +By incorporating this attribute directly in the circuit's definition, tooling like Nargo and NoirJS can automatically execute recursive-specific duties for Noir programs (e.g. recursive-friendly proof artifact generation) without additional flags or configurations. + +## Verifying Recursive Proofs + ```rust #[foreign(verify_proof)] fn verify_proof(_verification_key : [Field], _proof : [Field], _public_input : Field, _key_hash : Field) {} From 601fd9afc502236af1db0c4492698ba2298c7501 Mon Sep 17 00:00:00 2001 From: jfecher Date: Thu, 22 Feb 2024 15:31:36 +0000 Subject: [PATCH 35/39] fix!: Ban Fields in for loop indices and bitwise ops (#4376) # Description ## Problem\* Resolves https://github.com/noir-lang/noir/issues/3639 Resolves https://github.com/noir-lang/noir/issues/4193 ## Summary\* Uses the new TypeVariableKind::Integer in for loops and bitwise operations to prevent `Field` types from being used there. Removes the old `delayed_type_checks` hack. ## Additional Context ## Documentation\* Check one: - [x] No documentation needed. - [ ] Documentation included in this PR. - [ ] **[Exceptional Case]** Documentation to be submitted in a separate PR. # PR Checklist\* - [x] I have tested the changes locally. - [x] I have formatted the changes with [Prettier](https://prettier.io/) and/or `cargo fmt` on default settings. --------- Co-authored-by: kevaundray Co-authored-by: TomAFrench --- .../src/ssa/function_builder/mod.rs | 5 + .../src/ssa/ir/instruction/call.rs | 12 +- compiler/noirc_evaluator/src/ssa/ir/types.rs | 5 + .../src/ssa/ssa_gen/context.rs | 10 +- .../noirc_evaluator/src/ssa/ssa_gen/mod.rs | 9 +- compiler/noirc_frontend/src/ast/expression.rs | 18 +-- .../src/hir/resolution/resolver.rs | 2 +- .../noirc_frontend/src/hir/type_check/expr.rs | 148 +++++++----------- .../noirc_frontend/src/hir/type_check/mod.rs | 31 +--- .../noirc_frontend/src/hir/type_check/stmt.rs | 13 +- compiler/noirc_frontend/src/hir_def/expr.rs | 13 -- compiler/noirc_frontend/src/hir_def/types.rs | 17 +- .../src/monomorphization/mod.rs | 7 +- compiler/noirc_frontend/src/tests.rs | 2 +- noir_stdlib/src/array.nr | 10 +- noir_stdlib/src/collections/bounded_vec.nr | 20 ++- noir_stdlib/src/collections/vec.nr | 8 +- noir_stdlib/src/field.nr | 2 +- noir_stdlib/src/hash/poseidon.nr | 8 +- noir_stdlib/src/hash/poseidon/bn254.nr | 16 +- noir_stdlib/src/slice.nr | 4 +- .../execution_success/array_len/src/main.nr | 6 +- .../brillig_cow_regression/src/main.nr | 14 +- .../brillig_oracle/Prover.toml | 2 +- .../brillig_oracle/src/main.nr | 4 +- .../brillig_slices/src/main.nr | 2 +- .../global_consts/src/baz.nr | 2 +- .../global_consts/src/foo.nr | 6 +- .../global_consts/src/foo/bar.nr | 4 +- .../global_consts/src/main.nr | 14 +- .../slice_dynamic_index/src/main.nr | 6 +- .../execution_success/slices/src/main.nr | 2 +- 32 files changed, 184 insertions(+), 238 deletions(-) diff --git a/compiler/noirc_evaluator/src/ssa/function_builder/mod.rs b/compiler/noirc_evaluator/src/ssa/function_builder/mod.rs index fe71b876879..9d27554dcaa 100644 --- a/compiler/noirc_evaluator/src/ssa/function_builder/mod.rs +++ b/compiler/noirc_evaluator/src/ssa/function_builder/mod.rs @@ -115,6 +115,11 @@ impl FunctionBuilder { self.numeric_constant(value.into(), Type::field()) } + /// Insert a numeric constant into the current function of type Type::length_type() + pub(crate) fn length_constant(&mut self, value: impl Into) -> ValueId { + self.numeric_constant(value.into(), Type::length_type()) + } + /// Insert an array constant into the current function with the given element values. pub(crate) fn array_constant(&mut self, elements: im::Vector, typ: Type) -> ValueId { self.current_function.dfg.make_array(elements, typ) diff --git a/compiler/noirc_evaluator/src/ssa/ir/instruction/call.rs b/compiler/noirc_evaluator/src/ssa/ir/instruction/call.rs index 4217a3d4710..9349d58c4d9 100644 --- a/compiler/noirc_evaluator/src/ssa/ir/instruction/call.rs +++ b/compiler/noirc_evaluator/src/ssa/ir/instruction/call.rs @@ -77,7 +77,7 @@ pub(super) fn simplify_call( Intrinsic::ArrayLen => { if let Some(length) = dfg.try_get_array_length(arguments[0]) { let length = FieldElement::from(length as u128); - SimplifyResult::SimplifiedTo(dfg.make_constant(length, Type::field())) + SimplifyResult::SimplifiedTo(dfg.make_constant(length, Type::length_type())) } else if matches!(dfg.type_of_value(arguments[1]), Type::Slice(_)) { SimplifyResult::SimplifiedTo(arguments[0]) } else { @@ -283,7 +283,7 @@ fn update_slice_length( operator: BinaryOp, block: BasicBlockId, ) -> ValueId { - let one = dfg.make_constant(FieldElement::one(), Type::field()); + let one = dfg.make_constant(FieldElement::one(), Type::length_type()); let instruction = Instruction::Binary(Binary { lhs: slice_len, operator, rhs: one }); let call_stack = dfg.get_value_call_stack(slice_len); dfg.insert_instruction_and_results(instruction, block, None, call_stack).first() @@ -296,8 +296,8 @@ fn simplify_slice_push_back( dfg: &mut DataFlowGraph, block: BasicBlockId, ) -> SimplifyResult { - // The capacity must be an integer so that we can compare it against the slice length which is represented as a field - let capacity = dfg.make_constant((slice.len() as u128).into(), Type::unsigned(64)); + // The capacity must be an integer so that we can compare it against the slice length + let capacity = dfg.make_constant((slice.len() as u128).into(), Type::length_type()); let len_equals_capacity_instr = Instruction::Binary(Binary { lhs: arguments[0], operator: BinaryOp::Eq, rhs: capacity }); let call_stack = dfg.get_value_call_stack(arguments[0]); @@ -362,7 +362,7 @@ fn simplify_slice_pop_back( let new_slice_length = update_slice_length(arguments[0], dfg, BinaryOp::Sub, block); - let element_size = dfg.make_constant((element_count as u128).into(), Type::field()); + let element_size = dfg.make_constant((element_count as u128).into(), Type::length_type()); let flattened_len_instr = Instruction::binary(BinaryOp::Mul, arguments[0], element_size); let mut flattened_len = dfg .insert_instruction_and_results(flattened_len_instr, block, None, CallStack::new()) @@ -478,7 +478,7 @@ fn make_constant_slice( let typ = Type::Slice(Rc::new(vec![typ])); let length = FieldElement::from(result_constants.len() as u128); - (dfg.make_constant(length, Type::field()), dfg.make_array(result_constants.into(), typ)) + (dfg.make_constant(length, Type::length_type()), dfg.make_array(result_constants.into(), typ)) } /// Returns a slice (represented by a tuple (len, slice)) of constants corresponding to the limbs of the radix decomposition. diff --git a/compiler/noirc_evaluator/src/ssa/ir/types.rs b/compiler/noirc_evaluator/src/ssa/ir/types.rs index 8dc9e67db79..ea3f5393245 100644 --- a/compiler/noirc_evaluator/src/ssa/ir/types.rs +++ b/compiler/noirc_evaluator/src/ssa/ir/types.rs @@ -90,6 +90,11 @@ impl Type { Type::Numeric(NumericType::NativeField) } + /// Creates the type of an array's length. + pub(crate) fn length_type() -> Type { + Type::unsigned(64) + } + /// Returns the bit size of the provided numeric type. /// /// # Panics diff --git a/compiler/noirc_evaluator/src/ssa/ssa_gen/context.rs b/compiler/noirc_evaluator/src/ssa/ssa_gen/context.rs index 845ffd15413..9c760c013a9 100644 --- a/compiler/noirc_evaluator/src/ssa/ssa_gen/context.rs +++ b/compiler/noirc_evaluator/src/ssa/ssa_gen/context.rs @@ -192,7 +192,7 @@ impl<'a> FunctionContext<'a> { ast::Type::Slice(elements) => { let element_types = Self::convert_type(elements).flatten(); Tree::Branch(vec![ - Tree::Leaf(f(Type::field())), + Tree::Leaf(f(Type::length_type())), Tree::Leaf(f(Type::Slice(Rc::new(element_types)))), ]) } @@ -640,13 +640,13 @@ impl<'a> FunctionContext<'a> { let result_alloc = self.builder.set_location(location).insert_allocate(Type::bool()); let true_value = self.builder.numeric_constant(1u128, Type::bool()); self.builder.insert_store(result_alloc, true_value); - let zero = self.builder.field_constant(0u128); + let zero = self.builder.length_constant(0u128); self.builder.terminate_with_jmp(loop_start, vec![zero]); // loop_start self.builder.switch_to_block(loop_start); - let i = self.builder.add_block_parameter(loop_start, Type::field()); - let array_length = self.builder.field_constant(array_length as u128); + let i = self.builder.add_block_parameter(loop_start, Type::length_type()); + let array_length = self.builder.length_constant(array_length as u128); let v0 = self.builder.insert_binary(i, BinaryOp::Lt, array_length); self.builder.terminate_with_jmpif(v0, loop_body, loop_end); @@ -658,7 +658,7 @@ impl<'a> FunctionContext<'a> { let v4 = self.builder.insert_load(result_alloc, Type::bool()); let v5 = self.builder.insert_binary(v4, BinaryOp::And, v3); self.builder.insert_store(result_alloc, v5); - let one = self.builder.field_constant(1u128); + let one = self.builder.length_constant(1u128); let v6 = self.builder.insert_binary(i, BinaryOp::Add, one); self.builder.terminate_with_jmp(loop_start, vec![v6]); diff --git a/compiler/noirc_evaluator/src/ssa/ssa_gen/mod.rs b/compiler/noirc_evaluator/src/ssa/ssa_gen/mod.rs index 8f2c923d62c..6f59fa13274 100644 --- a/compiler/noirc_evaluator/src/ssa/ssa_gen/mod.rs +++ b/compiler/noirc_evaluator/src/ssa/ssa_gen/mod.rs @@ -128,6 +128,7 @@ impl<'a> FunctionContext<'a> { } fn codegen_expression(&mut self, expr: &Expression) -> Result { + eprintln!("Codegen {expr}"); match expr { Expression::Ident(ident) => Ok(self.codegen_ident(ident)), Expression::Literal(literal) => self.codegen_literal(literal), @@ -196,7 +197,7 @@ impl<'a> FunctionContext<'a> { } ast::Type::Slice(_) => { let slice_length = - self.builder.field_constant(array.contents.len() as u128); + self.builder.length_constant(array.contents.len() as u128); let slice_contents = self.codegen_array_checked(elements, typ[1].clone())?; Tree::Branch(vec![slice_length.into(), slice_contents]) @@ -221,7 +222,7 @@ impl<'a> FunctionContext<'a> { // A caller needs multiple pieces of information to make use of a format string // The message string, the number of fields to be formatted, and the fields themselves let string = self.codegen_string(string); - let field_count = self.builder.field_constant(*number_of_fields as u128); + let field_count = self.builder.length_constant(*number_of_fields as u128); let fields = self.codegen_expression(fields)?; Ok(Tree::Branch(vec![string, field_count.into(), fields])) @@ -347,8 +348,10 @@ impl<'a> FunctionContext<'a> { } fn codegen_binary(&mut self, binary: &ast::Binary) -> Result { + eprintln!("Start binary"); let lhs = self.codegen_non_tuple_expression(&binary.lhs)?; let rhs = self.codegen_non_tuple_expression(&binary.rhs)?; + eprintln!("Insert binary"); Ok(self.insert_binary(lhs, binary.operator, rhs, binary.location)) } @@ -615,7 +618,7 @@ impl<'a> FunctionContext<'a> { { match intrinsic { Intrinsic::SliceInsert => { - let one = self.builder.field_constant(1u128); + let one = self.builder.length_constant(1u128); // We add one here in the case of a slice insert as a slice insert at the length of the slice // can be converted to a slice push back diff --git a/compiler/noirc_frontend/src/ast/expression.rs b/compiler/noirc_frontend/src/ast/expression.rs index c78deaf6dbb..2a252633a29 100644 --- a/compiler/noirc_frontend/src/ast/expression.rs +++ b/compiler/noirc_frontend/src/ast/expression.rs @@ -236,7 +236,15 @@ impl BinaryOpKind { } pub fn is_valid_for_field_type(self) -> bool { - matches!(self, BinaryOpKind::Equal | BinaryOpKind::NotEqual) + matches!( + self, + BinaryOpKind::Add + | BinaryOpKind::Subtract + | BinaryOpKind::Multiply + | BinaryOpKind::Divide + | BinaryOpKind::Equal + | BinaryOpKind::NotEqual + ) } pub fn as_string(self) -> &'static str { @@ -280,14 +288,6 @@ impl BinaryOpKind { BinaryOpKind::Modulo => Token::Percent, } } - - pub fn is_bit_shift(&self) -> bool { - matches!(self, BinaryOpKind::ShiftRight | BinaryOpKind::ShiftLeft) - } - - pub fn is_modulo(&self) -> bool { - matches!(self, BinaryOpKind::Modulo) - } } #[derive(PartialEq, PartialOrd, Eq, Ord, Hash, Debug, Copy, Clone)] diff --git a/compiler/noirc_frontend/src/hir/resolution/resolver.rs b/compiler/noirc_frontend/src/hir/resolution/resolver.rs index f05a69be7c2..7f9e48353a7 100644 --- a/compiler/noirc_frontend/src/hir/resolution/resolver.rs +++ b/compiler/noirc_frontend/src/hir/resolution/resolver.rs @@ -1464,7 +1464,7 @@ impl<'a> Resolver<'a> { // checker does not check definition kinds and otherwise expects // parameters to already be typed. if self.interner.definition_type(hir_ident.id) == Type::Error { - let typ = Type::polymorphic_integer(self.interner); + let typ = Type::polymorphic_integer_or_field(self.interner); self.interner.push_definition_type(hir_ident.id, typ); } } diff --git a/compiler/noirc_frontend/src/hir/type_check/expr.rs b/compiler/noirc_frontend/src/hir/type_check/expr.rs index a669a4a246e..b78f07c88f2 100644 --- a/compiler/noirc_frontend/src/hir/type_check/expr.rs +++ b/compiler/noirc_frontend/src/hir/type_check/expr.rs @@ -104,7 +104,7 @@ impl<'interner> TypeChecker<'interner> { Type::Array(Box::new(length), Box::new(elem_type)) } HirLiteral::Bool(_) => Type::Bool, - HirLiteral::Integer(_, _) => Type::polymorphic_integer(self.interner), + HirLiteral::Integer(_, _) => Type::polymorphic_integer_or_field(self.interner), HirLiteral::Str(string) => { let len = Type::Constant(string.len() as u64); Type::String(Box::new(len)) @@ -529,13 +529,15 @@ impl<'interner> TypeChecker<'interner> { let index_type = self.check_expression(&index_expr.index); let span = self.interner.expr_span(&index_expr.index); - index_type.unify(&Type::polymorphic_integer(self.interner), &mut self.errors, || { - TypeCheckError::TypeMismatch { + index_type.unify( + &Type::polymorphic_integer_or_field(self.interner), + &mut self.errors, + || TypeCheckError::TypeMismatch { expected_typ: "an integer".to_owned(), expr_typ: index_type.to_string(), expr_span: span, - } - }); + }, + ); // When writing `a[i]`, if `a : &mut ...` then automatically dereference `a` as many // times as needed to get the underlying array. @@ -807,43 +809,13 @@ impl<'interner> TypeChecker<'interner> { // Matches on TypeVariable must be first to follow any type // bindings. - (TypeVariable(int, int_kind), other) | (other, TypeVariable(int, int_kind)) => { - if let TypeBinding::Bound(binding) = &*int.borrow() { + (TypeVariable(var, _), other) | (other, TypeVariable(var, _)) => { + if let TypeBinding::Bound(binding) = &*var.borrow() { return self.comparator_operand_type_rules(other, binding, op, span); } - if !op.kind.is_valid_for_field_type() && (other.is_bindable() || other.is_field()) { - let other = other.follow_bindings(); - - self.push_delayed_type_check(Box::new(move || { - if other.is_field() || other.is_bindable() { - Err(TypeCheckError::InvalidComparisonOnField { span }) - } else { - Ok(()) - } - })); - } - - let mut bindings = TypeBindings::new(); - if other - .try_bind_to_polymorphic_int( - int, - &mut bindings, - *int_kind == TypeVariableKind::Integer, - ) - .is_ok() - || other == &Type::Error - { - Type::apply_type_bindings(bindings); - Ok((Bool, false)) - } else { - Err(TypeCheckError::TypeMismatchWithSource { - expected: lhs_type.clone(), - actual: rhs_type.clone(), - span, - source: Source::Binary, - }) - } + self.bind_type_variables_for_infix(lhs_type, op, rhs_type, span); + Ok((Bool, false)) } (Alias(alias, args), other) | (other, Alias(alias, args)) => { let alias = alias.borrow().get_type(args); @@ -1071,6 +1043,38 @@ impl<'interner> TypeChecker<'interner> { } } + fn bind_type_variables_for_infix( + &mut self, + lhs_type: &Type, + op: &HirBinaryOp, + rhs_type: &Type, + span: Span, + ) { + self.unify(lhs_type, rhs_type, || TypeCheckError::TypeMismatchWithSource { + expected: lhs_type.clone(), + actual: rhs_type.clone(), + source: Source::Binary, + span, + }); + + // In addition to unifying both types, we also have to bind either + // the lhs or rhs to an integer type variable. This ensures if both lhs + // and rhs are type variables, that they will have the correct integer + // type variable kind instead of TypeVariableKind::Normal. + let target = if op.kind.is_valid_for_field_type() { + Type::polymorphic_integer_or_field(self.interner) + } else { + Type::polymorphic_integer(self.interner) + }; + + self.unify(lhs_type, &target, || TypeCheckError::TypeMismatchWithSource { + expected: lhs_type.clone(), + actual: rhs_type.clone(), + source: Source::Binary, + span, + }); + } + // Given a binary operator and another type. This method will produce the output type // and a boolean indicating whether to use the trait impl corresponding to the operator // or not. A value of false indicates the caller to use a primitive operation for this @@ -1093,58 +1097,15 @@ impl<'interner> TypeChecker<'interner> { // Matches on TypeVariable must be first so that we follow any type // bindings. - (TypeVariable(int, int_kind), other) | (other, TypeVariable(int, int_kind)) => { + (TypeVariable(int, _), other) | (other, TypeVariable(int, _)) => { if let TypeBinding::Bound(binding) = &*int.borrow() { return self.infix_operand_type_rules(binding, op, other, span); } - if (op.is_modulo() || op.is_bitwise()) && (other.is_bindable() || other.is_field()) - { - let other = other.follow_bindings(); - let kind = op.kind; - // This will be an error if these types later resolve to a Field, or stay - // polymorphic as the bit size will be unknown. Delay this error until the function - // finishes resolving so we can still allow cases like `let x: u8 = 1 << 2;`. - self.push_delayed_type_check(Box::new(move || { - if other.is_field() { - if kind == BinaryOpKind::Modulo { - Err(TypeCheckError::FieldModulo { span }) - } else { - Err(TypeCheckError::InvalidBitwiseOperationOnField { span }) - } - } else if other.is_bindable() { - Err(TypeCheckError::AmbiguousBitWidth { span }) - } else if kind.is_bit_shift() && other.is_signed() { - Err(TypeCheckError::TypeCannotBeUsed { - typ: other, - place: "bit shift", - span, - }) - } else { - Ok(()) - } - })); - } - let mut bindings = TypeBindings::new(); - if other - .try_bind_to_polymorphic_int( - int, - &mut bindings, - *int_kind == TypeVariableKind::Integer, - ) - .is_ok() - || other == &Type::Error - { - Type::apply_type_bindings(bindings); - Ok((other.clone(), false)) - } else { - Err(TypeCheckError::TypeMismatchWithSource { - expected: lhs_type.clone(), - actual: rhs_type.clone(), - source: Source::Binary, - span, - }) - } + self.bind_type_variables_for_infix(lhs_type, op, rhs_type, span); + + // Both types are unified so the choice of which to return is arbitrary + Ok((other.clone(), false)) } (Alias(alias, args), other) | (other, Alias(alias, args)) => { let alias = alias.borrow().get_type(args); @@ -1169,11 +1130,12 @@ impl<'interner> TypeChecker<'interner> { } // The result of two Fields is always a witness (FieldElement, FieldElement) => { - if op.is_bitwise() { - return Err(TypeCheckError::InvalidBitwiseOperationOnField { span }); - } - if op.is_modulo() { - return Err(TypeCheckError::FieldModulo { span }); + if !op.kind.is_valid_for_field_type() { + if op.kind == BinaryOpKind::Modulo { + return Err(TypeCheckError::FieldModulo { span }); + } else { + return Err(TypeCheckError::InvalidBitwiseOperationOnField { span }); + } } Ok((FieldElement, false)) } @@ -1213,7 +1175,7 @@ impl<'interner> TypeChecker<'interner> { self.errors .push(TypeCheckError::InvalidUnaryOp { kind: rhs_type.to_string(), span }); } - let expected = Type::polymorphic_integer(self.interner); + let expected = Type::polymorphic_integer_or_field(self.interner); rhs_type.unify(&expected, &mut self.errors, || TypeCheckError::InvalidUnaryOp { kind: rhs_type.to_string(), span, diff --git a/compiler/noirc_frontend/src/hir/type_check/mod.rs b/compiler/noirc_frontend/src/hir/type_check/mod.rs index 225f5756d7a..21d1c75a0f2 100644 --- a/compiler/noirc_frontend/src/hir/type_check/mod.rs +++ b/compiler/noirc_frontend/src/hir/type_check/mod.rs @@ -21,10 +21,7 @@ use crate::{ use self::errors::Source; -type TypeCheckFn = Box Result<(), TypeCheckError>>; - pub struct TypeChecker<'interner> { - delayed_type_checks: Vec, interner: &'interner mut NodeInterner, errors: Vec, current_function: Option, @@ -80,15 +77,7 @@ pub fn type_check_func(interner: &mut NodeInterner, func_id: FuncId) -> Vec (noirc_e impl<'interner> TypeChecker<'interner> { fn new(interner: &'interner mut NodeInterner) -> Self { - Self { - delayed_type_checks: Vec::new(), - interner, - errors: Vec::new(), - trait_constraints: Vec::new(), - current_function: None, - } - } - - pub fn push_delayed_type_check(&mut self, f: TypeCheckFn) { - self.delayed_type_checks.push(f); + Self { interner, errors: Vec::new(), trait_constraints: Vec::new(), current_function: None } } - fn check_function_body(&mut self, body: &ExprId) -> (Type, Vec) { - let body_type = self.check_expression(body); - (body_type, std::mem::take(&mut self.delayed_type_checks)) + fn check_function_body(&mut self, body: &ExprId) -> Type { + self.check_expression(body) } pub fn check_global( @@ -198,7 +176,6 @@ impl<'interner> TypeChecker<'interner> { interner: &'interner mut NodeInterner, ) -> Vec { let mut this = Self { - delayed_type_checks: Vec::new(), interner, errors: Vec::new(), trait_constraints: Vec::new(), diff --git a/compiler/noirc_frontend/src/hir/type_check/stmt.rs b/compiler/noirc_frontend/src/hir/type_check/stmt.rs index 370b4ee7b17..358bea86922 100644 --- a/compiler/noirc_frontend/src/hir/type_check/stmt.rs +++ b/compiler/noirc_frontend/src/hir/type_check/stmt.rs @@ -73,13 +73,10 @@ impl<'interner> TypeChecker<'interner> { let expected_type = Type::polymorphic_integer(self.interner); - self.unify(&start_range_type, &expected_type, || { - TypeCheckError::TypeCannotBeUsed { - typ: start_range_type.clone(), - place: "for loop", - span: range_span, - } - .add_context("The range of a loop must be known at compile-time") + self.unify(&start_range_type, &expected_type, || TypeCheckError::TypeCannotBeUsed { + typ: start_range_type.clone(), + place: "for loop", + span: range_span, }); self.interner.push_definition_type(for_loop.identifier.id, start_range_type); @@ -235,7 +232,7 @@ impl<'interner> TypeChecker<'interner> { let expr_span = self.interner.expr_span(index); index_type.unify( - &Type::polymorphic_integer(self.interner), + &Type::polymorphic_integer_or_field(self.interner), &mut self.errors, || TypeCheckError::TypeMismatch { expected_typ: "an integer".to_owned(), diff --git a/compiler/noirc_frontend/src/hir_def/expr.rs b/compiler/noirc_frontend/src/hir_def/expr.rs index 75ed68af0f6..b4c590de491 100644 --- a/compiler/noirc_frontend/src/hir_def/expr.rs +++ b/compiler/noirc_frontend/src/hir_def/expr.rs @@ -94,19 +94,6 @@ impl HirBinaryOp { let location = Location::new(op.span(), file); HirBinaryOp { location, kind } } - - pub fn is_bitwise(&self) -> bool { - use BinaryOpKind::*; - matches!(self.kind, And | Or | Xor | ShiftRight | ShiftLeft) - } - - pub fn is_bit_shift(&self) -> bool { - self.kind.is_bit_shift() - } - - pub fn is_modulo(&self) -> bool { - self.kind.is_modulo() - } } #[derive(Debug, Clone)] diff --git a/compiler/noirc_frontend/src/hir_def/types.rs b/compiler/noirc_frontend/src/hir_def/types.rs index d4d8a948460..e105da1ccf0 100644 --- a/compiler/noirc_frontend/src/hir_def/types.rs +++ b/compiler/noirc_frontend/src/hir_def/types.rs @@ -571,13 +571,20 @@ impl Type { Type::TypeVariable(var, kind) } - pub fn polymorphic_integer(interner: &mut NodeInterner) -> Type { + pub fn polymorphic_integer_or_field(interner: &mut NodeInterner) -> Type { let id = interner.next_type_variable_id(); let kind = TypeVariableKind::IntegerOrField; let var = TypeVariable::unbound(id); Type::TypeVariable(var, kind) } + pub fn polymorphic_integer(interner: &mut NodeInterner) -> Type { + let id = interner.next_type_variable_id(); + let kind = TypeVariableKind::Integer; + let var = TypeVariable::unbound(id); + Type::TypeVariable(var, kind) + } + /// A bit of an awkward name for this function - this function returns /// true for type variables or polymorphic integers which are unbound. /// NamedGenerics will always be false as although they are bindable, @@ -964,7 +971,7 @@ impl Type { /// Try to bind a PolymorphicInt variable to self, succeeding if self is an integer, field, /// other PolymorphicInt type, or type variable. If successful, the binding is placed in the /// given TypeBindings map rather than linked immediately. - pub fn try_bind_to_polymorphic_int( + fn try_bind_to_polymorphic_int( &self, var: &TypeVariable, bindings: &mut TypeBindings, @@ -977,7 +984,11 @@ impl Type { let this = self.substitute(bindings).follow_bindings(); match &this { - Type::FieldElement | Type::Integer(..) => { + Type::Integer(..) => { + bindings.insert(target_id, (var.clone(), this)); + Ok(()) + } + Type::FieldElement if !only_integer => { bindings.insert(target_id, (var.clone(), this)); Ok(()) } diff --git a/compiler/noirc_frontend/src/monomorphization/mod.rs b/compiler/noirc_frontend/src/monomorphization/mod.rs index 0f243e47bbe..2e714da21c6 100644 --- a/compiler/noirc_frontend/src/monomorphization/mod.rs +++ b/compiler/noirc_frontend/src/monomorphization/mod.rs @@ -27,8 +27,8 @@ use crate::{ }, node_interner::{self, DefinitionKind, NodeInterner, StmtId, TraitImplKind, TraitMethodId}, token::FunctionAttribute, - ContractFunctionType, FunctionKind, IntegerBitSize, Type, TypeBinding, TypeBindings, - TypeVariable, TypeVariableKind, UnaryOp, Visibility, + ContractFunctionType, FunctionKind, IntegerBitSize, Signedness, Type, TypeBinding, + TypeBindings, TypeVariable, TypeVariableKind, UnaryOp, Visibility, }; use self::ast::{Definition, FuncId, Function, LocalId, Program}; @@ -1107,7 +1107,8 @@ impl<'interner> Monomorphizer<'interner> { return match opcode.as_str() { "modulus_num_bits" => { let bits = (FieldElement::max_num_bits() as u128).into(); - let typ = ast::Type::Field; + let typ = + ast::Type::Integer(Signedness::Unsigned, IntegerBitSize::SixtyFour); Some(ast::Expression::Literal(ast::Literal::Integer(bits, typ, location))) } "zeroed" => { diff --git a/compiler/noirc_frontend/src/tests.rs b/compiler/noirc_frontend/src/tests.rs index 8a56b337398..c18379f1c26 100644 --- a/compiler/noirc_frontend/src/tests.rs +++ b/compiler/noirc_frontend/src/tests.rs @@ -956,7 +956,7 @@ mod test { #[test] fn resolve_for_expr() { let src = r#" - fn main(x : Field) { + fn main(x : u64) { for i in 1..20 { let _z = x + i; }; diff --git a/noir_stdlib/src/array.nr b/noir_stdlib/src/array.nr index 995af6c4c6f..7871b1a6f9a 100644 --- a/noir_stdlib/src/array.nr +++ b/noir_stdlib/src/array.nr @@ -4,10 +4,10 @@ use crate::cmp::{Ord}; // by the methods in the `slice` module impl [T; N] { #[builtin(array_len)] - pub fn len(self) -> Field {} + pub fn len(self) -> u64 {} pub fn sort(self) -> Self where T: Ord { - self.sort_via(|a, b| a <= b) + self.sort_via(|a: T, b: T| a <= b) } pub fn sort_via(self, ordering: fn[Env](T, T) -> bool) -> Self { @@ -31,7 +31,7 @@ impl [T; N] { } /// Returns the index of the elements in the array that would sort it, using the provided custom sorting function. - unconstrained fn get_sorting_index(self, ordering: fn[Env](T, T) -> bool) -> [Field; N] { + unconstrained fn get_sorting_index(self, ordering: fn[Env](T, T) -> bool) -> [u64; N] { let mut result = [0;N]; let mut a = self; for i in 0..N { @@ -117,7 +117,7 @@ impl [T; N] { // helper function used to look up the position of a value in an array of Field // Note that function returns 0 if the value is not found -unconstrained fn find_index(a: [Field;N], find: Field) -> Field { +unconstrained fn find_index(a: [u64; N], find: u64) -> u64 { let mut result = 0; for i in 0..a.len() { if a[i] == find { @@ -125,4 +125,4 @@ unconstrained fn find_index(a: [Field;N], find: Field) -> Field { } } result -} \ No newline at end of file +} diff --git a/noir_stdlib/src/collections/bounded_vec.nr b/noir_stdlib/src/collections/bounded_vec.nr index 332fefa63f9..a4aa4823f38 100644 --- a/noir_stdlib/src/collections/bounded_vec.nr +++ b/noir_stdlib/src/collections/bounded_vec.nr @@ -1,8 +1,6 @@ struct BoundedVec { storage: [T; MaxLen], - // TODO: change this to return a u64 as Noir now - // uses u64 for indexing - len: Field, + len: u64, empty_value: T, } @@ -11,27 +9,27 @@ impl BoundedVec { BoundedVec { storage: [initial_value; MaxLen], len: 0, empty_value: initial_value } } - pub fn get(mut self: Self, index: Field) -> T { - assert(index as u64 < self.len as u64); + pub fn get(mut self: Self, index: u64) -> T { + assert(index as u64 < self.len); self.storage[index] } - pub fn get_unchecked(mut self: Self, index: Field) -> T { + pub fn get_unchecked(mut self: Self, index: u64) -> T { self.storage[index] } pub fn push(&mut self, elem: T) { - assert(self.len as u64 < MaxLen as u64, "push out of bounds"); + assert(self.len < MaxLen as u64, "push out of bounds"); self.storage[self.len] = elem; self.len += 1; } - pub fn len(self) -> Field { + pub fn len(self) -> u64 { self.len } - pub fn max_len(_self: BoundedVec) -> Field { + pub fn max_len(_self: BoundedVec) -> u64{ MaxLen } @@ -59,7 +57,7 @@ impl BoundedVec { for i in 0..Len { exceeded_len |= i == append_len; if !exceeded_len { - self.storage[self.len + (i as Field)] = vec.get_unchecked(i as Field); + self.storage[self.len + i] = vec.get_unchecked(i); } } self.len = new_len; @@ -85,4 +83,4 @@ impl BoundedVec { } ret } -} \ No newline at end of file +} diff --git a/noir_stdlib/src/collections/vec.nr b/noir_stdlib/src/collections/vec.nr index 43d68e1d1e7..2e7945be827 100644 --- a/noir_stdlib/src/collections/vec.nr +++ b/noir_stdlib/src/collections/vec.nr @@ -17,7 +17,7 @@ impl Vec { /// Get an element from the vector at the given index. /// Panics if the given index /// points beyond the end of the vector. - pub fn get(self, index: Field) -> T { + pub fn get(self, index: u64) -> T { self.slice[index] } @@ -40,20 +40,20 @@ impl Vec { /// Insert an element at a specified index, shifting all elements /// after it to the right - pub fn insert(&mut self, index: Field, elem: T) { + pub fn insert(&mut self, index: u64, elem: T) { self.slice = self.slice.insert(index, elem); } /// Remove an element at a specified index, shifting all elements /// after it to the left, returning the removed element - pub fn remove(&mut self, index: Field) -> T { + pub fn remove(&mut self, index: u64) -> T { let (new_slice, elem) = self.slice.remove(index); self.slice = new_slice; elem } /// Returns the number of elements in the vector - pub fn len(self) -> Field { + pub fn len(self) -> u64 { self.slice.len() } } diff --git a/noir_stdlib/src/field.nr b/noir_stdlib/src/field.nr index 66fb50119f9..a7278d85999 100644 --- a/noir_stdlib/src/field.nr +++ b/noir_stdlib/src/field.nr @@ -89,7 +89,7 @@ impl Field { } #[builtin(modulus_num_bits)] -pub fn modulus_num_bits() -> Field {} +pub fn modulus_num_bits() -> u64 {} #[builtin(modulus_be_bits)] pub fn modulus_be_bits() -> [u1] {} diff --git a/noir_stdlib/src/hash/poseidon.nr b/noir_stdlib/src/hash/poseidon.nr index 3f4de73c0db..b1a7c4a2367 100644 --- a/noir_stdlib/src/hash/poseidon.nr +++ b/noir_stdlib/src/hash/poseidon.nr @@ -21,7 +21,7 @@ pub fn config( // Input checks let mul = crate::wrapping_mul(t as u8, (rf + rp)); assert(mul == ark.len() as u8); - assert(t * t == mds.len()); + assert(t * t == mds.len() as Field); assert(alpha != 0); PoseidonConfig { t, rf, rp, alpha, ark, mds } @@ -30,7 +30,7 @@ pub fn config( fn permute(pos_conf: PoseidonConfig, mut state: [Field; O]) -> [Field; O] { let PoseidonConfig {t, rf, rp, alpha, ark, mds} = pos_conf; - assert(t == state.len()); + assert(t == state.len() as Field); let mut count = 0; // for r in 0..rf + rp @@ -47,7 +47,7 @@ fn permute(pos_conf: PoseidonConfig, mut state: [Field; O]) -> [F } state = apply_matrix(mds, state); // Apply MDS matrix - count = count + t; + count = count + t as u64; } state @@ -85,7 +85,7 @@ fn absorb( fn check_security(rate: Field, width: Field, security: Field) -> bool { let n = modulus_num_bits(); - ((n - 1) * (width - rate) / 2) as u8 > security as u8 + ((n - 1) as Field * (width - rate) / 2) as u8 > security as u8 } // A*x where A is an n x n matrix in row-major order and x an n-vector fn apply_matrix(a: [Field; M], x: [Field; N]) -> [Field; N] { diff --git a/noir_stdlib/src/hash/poseidon/bn254.nr b/noir_stdlib/src/hash/poseidon/bn254.nr index 0db6d9546dc..37b08e3c8fb 100644 --- a/noir_stdlib/src/hash/poseidon/bn254.nr +++ b/noir_stdlib/src/hash/poseidon/bn254.nr @@ -9,12 +9,12 @@ use crate::hash::poseidon::apply_matrix; #[field(bn254)] pub fn permute(pos_conf: PoseidonConfig, mut state: [Field; O]) -> [Field; O] { let PoseidonConfig {t, rf: config_rf, rp: config_rp, alpha, ark, mds} = pos_conf; - let rf = 8; - let rp = [56, 57, 56, 60, 60, 63, 64, 63, 60, 66, 60, 65, 70, 60, 64, 68][state.len() - 2]; + let rf: u8 = 8; + let rp: u8 = [56, 57, 56, 60, 60, 63, 64, 63, 60, 66, 60, 65, 70, 60, 64, 68][state.len() - 2]; - assert(t == state.len()); - assert(rf == config_rf as Field); - assert(rp == config_rp as Field); + assert(t == state.len() as Field); + assert(rf == config_rf); + assert(rp == config_rp); let mut count = 0; // First half of full rounds @@ -27,7 +27,7 @@ pub fn permute(pos_conf: PoseidonConfig, mut state: [Field; O]) - } state = apply_matrix(mds, state); // Apply MDS matrix - count = count + t; + count = count + t as u64; } // Partial rounds for _r in 0..rp { @@ -37,7 +37,7 @@ pub fn permute(pos_conf: PoseidonConfig, mut state: [Field; O]) - state[0] = state[0].pow_32(alpha); state = apply_matrix(mds, state); // Apply MDS matrix - count = count + t; + count = count + t as u64; } // Second half of full rounds for _r in 0..rf / 2 { @@ -49,7 +49,7 @@ pub fn permute(pos_conf: PoseidonConfig, mut state: [Field; O]) - } state = apply_matrix(mds, state); // Apply MDS matrix - count = count + t; + count = count + t as u64; } state diff --git a/noir_stdlib/src/slice.nr b/noir_stdlib/src/slice.nr index aa4b73edc1a..bb5c43e497b 100644 --- a/noir_stdlib/src/slice.nr +++ b/noir_stdlib/src/slice.nr @@ -24,13 +24,13 @@ impl [T] { /// Insert an element at a specified index, shifting all elements /// after it to the right #[builtin(slice_insert)] - pub fn insert(self, index: Field, elem: T) -> Self { } + pub fn insert(self, index: u64, elem: T) -> Self { } /// Remove an element at a specified index, shifting all elements /// after it to the left, returning the altered slice and /// the removed element #[builtin(slice_remove)] - pub fn remove(self, index: Field) -> (Self, T) { } + pub fn remove(self, index: u64) -> (Self, T) { } // Append each element of the `other` slice to the end of `self`. // This returns a new slice and leaves both input slices unchanged. diff --git a/test_programs/execution_success/array_len/src/main.nr b/test_programs/execution_success/array_len/src/main.nr index b60762f4636..f846cfb9844 100644 --- a/test_programs/execution_success/array_len/src/main.nr +++ b/test_programs/execution_success/array_len/src/main.nr @@ -1,12 +1,12 @@ -fn len_plus_1(array: [T; N]) -> Field { +fn len_plus_1(array: [T; N]) -> u64 { array.len() + 1 } -fn add_lens(a: [T; N], b: [Field; M]) -> Field { +fn add_lens(a: [T; N], b: [Field; M]) -> u64 { a.len() + b.len() } -fn nested_call(b: [Field; N]) -> Field { +fn nested_call(b: [Field; N]) -> u64 { len_plus_1(b) } diff --git a/test_programs/execution_success/brillig_cow_regression/src/main.nr b/test_programs/execution_success/brillig_cow_regression/src/main.nr index 974c17dfbc9..74aeda18261 100644 --- a/test_programs/execution_success/brillig_cow_regression/src/main.nr +++ b/test_programs/execution_success/brillig_cow_regression/src/main.nr @@ -1,12 +1,12 @@ // Tests a performance regression found in aztec-packages with brillig cow optimization -global MAX_NEW_COMMITMENTS_PER_TX: Field = 64; -global MAX_NEW_NULLIFIERS_PER_TX: Field = 64; -global MAX_NEW_L2_TO_L1_MSGS_PER_TX: Field = 2; -global MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX: Field = 16; -global MAX_NEW_CONTRACTS_PER_TX: Field = 1; -global NUM_ENCRYPTED_LOGS_HASHES_PER_TX: Field = 1; -global NUM_UNENCRYPTED_LOGS_HASHES_PER_TX: Field = 1; +global MAX_NEW_COMMITMENTS_PER_TX = 64; +global MAX_NEW_NULLIFIERS_PER_TX = 64; +global MAX_NEW_L2_TO_L1_MSGS_PER_TX = 2; +global MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX = 16; +global MAX_NEW_CONTRACTS_PER_TX = 1; +global NUM_ENCRYPTED_LOGS_HASHES_PER_TX = 1; +global NUM_UNENCRYPTED_LOGS_HASHES_PER_TX = 1; global NUM_FIELDS_PER_SHA256 = 2; global CALLDATA_HASH_INPUT_SIZE = 169; global CALL_DATA_HASH_LOG_FIELDS = 4; diff --git a/test_programs/execution_success/brillig_oracle/Prover.toml b/test_programs/execution_success/brillig_oracle/Prover.toml index 2b26a4ce471..161f4fb62c0 100644 --- a/test_programs/execution_success/brillig_oracle/Prover.toml +++ b/test_programs/execution_success/brillig_oracle/Prover.toml @@ -1,2 +1,2 @@ -x = "10" +_x = "10" diff --git a/test_programs/execution_success/brillig_oracle/src/main.nr b/test_programs/execution_success/brillig_oracle/src/main.nr index 490b7b605e3..6a9e5806621 100644 --- a/test_programs/execution_success/brillig_oracle/src/main.nr +++ b/test_programs/execution_success/brillig_oracle/src/main.nr @@ -2,7 +2,7 @@ use dep::std::slice; use dep::std::test::OracleMock; // Tests oracle usage in brillig/unconstrained functions -fn main(x: Field) { +fn main(_x: Field) { let size = 20; // TODO: Add a method along the lines of `(0..size).to_array()`. let mut mock_oracle_response = [0; 20]; @@ -17,7 +17,7 @@ fn main(x: Field) { let _ = OracleMock::mock("get_number_sequence").with_params(size).returns((20, mock_oracle_response)); let _ = OracleMock::mock("get_reverse_number_sequence").with_params(size).returns((20, reversed_mock_oracle_response)); - get_number_sequence_wrapper(size); + get_number_sequence_wrapper(size as Field); } // Define oracle functions which we have mocked above diff --git a/test_programs/execution_success/brillig_slices/src/main.nr b/test_programs/execution_success/brillig_slices/src/main.nr index 48bc8a76bb8..847c41de25c 100644 --- a/test_programs/execution_success/brillig_slices/src/main.nr +++ b/test_programs/execution_success/brillig_slices/src/main.nr @@ -131,7 +131,7 @@ unconstrained fn merge_slices_mutate_in_loop(x: Field, y: Field) -> [Field] { let mut slice = [0; 2]; if x != y { for i in 0..5 { - slice = slice.push_back(i); + slice = slice.push_back(i as Field); } } else { slice = slice.push_back(x); diff --git a/test_programs/execution_success/global_consts/src/baz.nr b/test_programs/execution_success/global_consts/src/baz.nr index 4271de81118..384cf9d3569 100644 --- a/test_programs/execution_success/global_consts/src/baz.nr +++ b/test_programs/execution_success/global_consts/src/baz.nr @@ -1,5 +1,5 @@ pub fn from_baz(x: [Field; crate::foo::MAGIC_NUMBER]) { for i in 0..crate::foo::MAGIC_NUMBER { - assert(x[i] == crate::foo::MAGIC_NUMBER); + assert(x[i] == crate::foo::MAGIC_NUMBER as Field); } } diff --git a/test_programs/execution_success/global_consts/src/foo.nr b/test_programs/execution_success/global_consts/src/foo.nr index 7b0ae75b74b..413b9c3a74b 100644 --- a/test_programs/execution_success/global_consts/src/foo.nr +++ b/test_programs/execution_success/global_consts/src/foo.nr @@ -1,11 +1,11 @@ mod bar; -global N: Field = 5; -global MAGIC_NUMBER: Field = 3; +global N: u64 = 5; +global MAGIC_NUMBER: u64 = 3; global TYPE_INFERRED = 42; pub fn from_foo(x: [Field; bar::N]) { for i in 0..bar::N { - assert(x[i] == bar::N); + assert(x[i] == bar::N as Field); } } diff --git a/test_programs/execution_success/global_consts/src/foo/bar.nr b/test_programs/execution_success/global_consts/src/foo/bar.nr index b8d0b85b0f3..5404c9cf1e3 100644 --- a/test_programs/execution_success/global_consts/src/foo/bar.nr +++ b/test_programs/execution_success/global_consts/src/foo/bar.nr @@ -1,5 +1,5 @@ -global N: Field = 5; +global N: u64 = 5; pub fn from_bar(x: Field) -> Field { - x * N + x * N as Field } diff --git a/test_programs/execution_success/global_consts/src/main.nr b/test_programs/execution_success/global_consts/src/main.nr index 25cc0e4dd36..3c8ecc67a0c 100644 --- a/test_programs/execution_success/global_consts/src/main.nr +++ b/test_programs/execution_success/global_consts/src/main.nr @@ -3,7 +3,7 @@ mod baz; global M: Field = 32; global L: Field = 10; // Unused globals currently allowed -global N: Field = 5; +global N: u64 = 5; global T_LEN = 2; // Type inference is allowed on globals // Globals can reference other globals @@ -36,12 +36,12 @@ fn main( let test_struct = Dummy { x: d, y: c }; for i in 0..foo::MAGIC_NUMBER { - assert(c[i] == foo::MAGIC_NUMBER); - assert(test_struct.y[i] == foo::MAGIC_NUMBER); + assert(c[i] == foo::MAGIC_NUMBER as Field); + assert(test_struct.y[i] == foo::MAGIC_NUMBER as Field); assert(test_struct.y[i] != NESTED[1][0].v); } - assert(N != M); + assert(N as Field != M); let expected: u32 = 42; assert(foo::TYPE_INFERRED == expected); @@ -62,12 +62,12 @@ fn main( arrays_neq(a, b); - let t: [Field; T_LEN] = [N, M]; + let t: [Field; T_LEN] = [N as Field, M]; assert(t[1] == 32); assert(15 == my_submodule::my_helper()); - let add_submodules_N = my_submodule::N + foo::bar::N; + let add_submodules_N = my_submodule::N + foo::bar::N as Field; assert(15 == add_submodules_N); let add_from_bar_N = my_submodule::N + foo::bar::from_bar(1); assert(15 == add_from_bar_N); @@ -75,7 +75,7 @@ fn main( let sugared = [0; my_submodule::N + 2]; assert(sugared[my_submodule::N + 1] == 0); - let arr: [Field; my_submodule::N] = [N; 10]; + let arr: [Field; my_submodule::N] = [N as Field; 10]; assert((arr[0] == 5) & (arr[9] == 5)); foo::from_foo(d); diff --git a/test_programs/execution_success/slice_dynamic_index/src/main.nr b/test_programs/execution_success/slice_dynamic_index/src/main.nr index 374d2ba4c26..41fc9a645c1 100644 --- a/test_programs/execution_success/slice_dynamic_index/src/main.nr +++ b/test_programs/execution_success/slice_dynamic_index/src/main.nr @@ -6,7 +6,7 @@ fn main(x: Field) { fn regression_dynamic_slice_index(x: Field, y: Field) { let mut slice = []; for i in 0..5 { - slice = slice.push_back(i); + slice = slice.push_back(i as Field); } assert(slice.len() == 5); @@ -124,12 +124,12 @@ fn dynamic_slice_merge_if(mut slice: [Field], x: Field) { assert(first_elem == 12); assert(rest_of_slice.len() == 6); - slice = rest_of_slice.insert(x - 2, 20); + slice = rest_of_slice.insert(x as u64 - 2, 20); assert(slice[2] == 20); assert(slice[6] == 30); assert(slice.len() == 7); - let (removed_slice, removed_elem) = slice.remove(x - 1); + let (removed_slice, removed_elem) = slice.remove(x as u64 - 1); // The deconstructed tuple assigns to the slice but is not seen outside of the if statement // without a direct assignment slice = removed_slice; diff --git a/test_programs/execution_success/slices/src/main.nr b/test_programs/execution_success/slices/src/main.nr index c377d2e5b2f..eca42a660c4 100644 --- a/test_programs/execution_success/slices/src/main.nr +++ b/test_programs/execution_success/slices/src/main.nr @@ -167,7 +167,7 @@ fn merge_slices_mutate_in_loop(x: Field, y: Field) -> [Field] { let mut slice = [0; 2]; if x != y { for i in 0..5 { - slice = slice.push_back(i); + slice = slice.push_back(i as Field); } } else { slice = slice.push_back(x); From 650ffc5053cdca4b6ad2e027fa1f4fd90ef64871 Mon Sep 17 00:00:00 2001 From: Nikita Masych <92444221+NikitaMasych@users.noreply.github.com> Date: Fri, 23 Feb 2024 16:55:17 +0200 Subject: [PATCH 36/39] feat: Add HashMap to the stdlib (#4242) # Description This PR shall bring HashMap into the `stdlib` of Noir. ## Problem\* Resolves #4241 ## Summary\* Implementation of `HashMap` with open addressing and quadratic probing scheme. Since Noir requires knowing loop bounds (and recursive calls) at compile time, `HashMap` is of fixed capacity and **no** dynamic resize is accomplished with regard to load factor. Furthermore, contribution includes implementation of `PedersenHasher` to be used for now. One can examine potentially better and less heavy prehash functions. I tried to conform with best practices of engineering, however since Noir is in rapid development, there are certain things which may be optimized in future, both from the code style and performance point of view. ## Additional Context I put the `PedersenHasher` among the `poseidon.nr` and `mimc.nr`, so one can consider moving declaration of other pedersen-related functionality there, however that would be a breaking change. ## Documentation\* Check one: - [ ] No documentation needed. - [ ] Documentation included in this PR. - [x] **[Exceptional Case]** Documentation to be submitted in a separate PR. # PR Checklist\* - [x] I have tested the changes locally. - [x] I have formatted the changes with [Prettier](https://prettier.io/) and/or `cargo fmt` on default settings. --- noir_stdlib/src/collections.nr | 1 + noir_stdlib/src/collections/map.nr | 456 ++++++++++++++++++ noir_stdlib/src/hash.nr | 52 ++ noir_stdlib/src/hash/pedersen.nr | 24 + .../hashmap_load_factor/Nargo.toml | 6 + .../hashmap_load_factor/Prover.toml | 26 + .../hashmap_load_factor/src/main.nr | 35 ++ .../execution_success/hashmap/Nargo.toml | 6 + .../execution_success/hashmap/Prover.toml | 26 + .../execution_success/hashmap/src/main.nr | 192 ++++++++ .../execution_success/hashmap/src/utils.nr | 10 + 11 files changed, 834 insertions(+) create mode 100644 noir_stdlib/src/collections/map.nr create mode 100644 noir_stdlib/src/hash/pedersen.nr create mode 100644 test_programs/compile_failure/hashmap_load_factor/Nargo.toml create mode 100644 test_programs/compile_failure/hashmap_load_factor/Prover.toml create mode 100644 test_programs/compile_failure/hashmap_load_factor/src/main.nr create mode 100644 test_programs/execution_success/hashmap/Nargo.toml create mode 100644 test_programs/execution_success/hashmap/Prover.toml create mode 100644 test_programs/execution_success/hashmap/src/main.nr create mode 100644 test_programs/execution_success/hashmap/src/utils.nr diff --git a/noir_stdlib/src/collections.nr b/noir_stdlib/src/collections.nr index 177ca96816f..2d952f4d6cd 100644 --- a/noir_stdlib/src/collections.nr +++ b/noir_stdlib/src/collections.nr @@ -1,2 +1,3 @@ mod vec; mod bounded_vec; +mod map; diff --git a/noir_stdlib/src/collections/map.nr b/noir_stdlib/src/collections/map.nr new file mode 100644 index 00000000000..d9eb83ff5dc --- /dev/null +++ b/noir_stdlib/src/collections/map.nr @@ -0,0 +1,456 @@ +use crate::cmp::Eq; +use crate::collections::vec::Vec; +use crate::option::Option; +use crate::default::Default; +use crate::hash::{Hash, Hasher, BuildHasher}; + +// We use load factor α_max = 0.75. +// Upon exceeding it, assert will fail in order to inform the user +// about performance degradation, so that he can adjust the capacity. +global MAX_LOAD_FACTOR_NUMERATOR = 3; +global MAX_LOAD_FACTOR_DEN0MINATOR = 4; + +// Hash table with open addressing and quadratic probing. +// Size of the underlying table must be known at compile time. +// It is advised to select capacity N as a power of two, or a prime number +// because utilized probing scheme is best tailored for it. +struct HashMap { + _table: [Slot; N], + + // Amount of valid elements in the map. + _len: u64, + + _build_hasher: B +} + +// Data unit in the HashMap table. +// In case Noir adds support for enums in the future, this +// should be refactored to have three states: +// 1. (key, value) +// 2. (empty) +// 3. (deleted) +struct Slot { + _key_value: Option<(K, V)>, + _is_deleted: bool, +} + +impl Default for Slot{ + fn default() -> Self{ + Slot{ + _key_value: Option::none(), + _is_deleted: false + } + } +} + +impl Slot { + fn is_valid(self) -> bool { + !self._is_deleted & self._key_value.is_some() + } + + fn is_available(self) -> bool { + self._is_deleted | self._key_value.is_none() + } + + fn key_value(self) -> Option<(K, V)> { + self._key_value + } + + fn key_value_unchecked(self) -> (K, V) { + self._key_value.unwrap_unchecked() + } + + fn set(&mut self, key: K, value: V) { + self._key_value = Option::some((key, value)); + self._is_deleted = false; + } + + // Shall not override `_key_value` with Option::none(), + // because we must be able to differentiate empty + // and deleted slots for lookup. + fn mark_deleted(&mut self) { + self._is_deleted = true; + } +} + +// While conducting lookup, we iterate attempt from 0 to N - 1 due to heuristic, +// that if we have went that far without finding desired, +// it is very unlikely to be after - performance will be heavily degraded. +impl HashMap { + // Creates a new instance of HashMap with specified BuildHasher. + pub fn with_hasher(_build_hasher: B) -> Self + where + B: BuildHasher { + let _table = [Slot::default(); N]; + let _len = 0; + Self { _table, _len, _build_hasher } + } + + // Clears the map, removing all key-value entries. + pub fn clear(&mut self) { + self._table = [Slot::default(); N]; + self._len = 0; + } + + // Returns true if the map contains a value for the specified key. + pub fn contains_key( + self, + key: K + ) -> bool + where + K: Hash + Eq, + B: BuildHasher, + H: Hasher { + self.get(key).is_some() + } + + // Returns true if the map contains no elements. + pub fn is_empty(self) -> bool { + self._len == 0 + } + + // Get the Option<(K, V) array of valid entries + // with a length of map capacity. First len() elements + // are safe to unwrap_unchecked(), whilst remaining + // are guaranteed to be Option::none(). + // + // This design is reasoned by compile-time limitations and + // temporary nested slices ban. + pub fn entries(self) -> [Option<(K, V)>; N] { + let mut entries = [Option::none(); N]; + let mut valid_amount = 0; + + for slot in self._table { + if slot.is_valid() { + entries[valid_amount] = slot.key_value(); + valid_amount += 1; + } + } + + let msg = f"Amount of valid elements should have been {self._len} times, but got {valid_amount}."; + assert(valid_amount == self._len, msg); + + entries + } + + // Get the Option array of valid keys + // with a length of map capacity. First len() elements + // are safe to unwrap_unchecked(), whilst remaining + // are guaranteed to be Option::none(). + // + // This design is reasoned by compile-time limitations and + // temporary nested slices ban. + pub fn keys(self) -> [Option; N] { + let mut keys = [Option::none(); N]; + let mut valid_amount = 0; + + for slot in self._table { + if slot.is_valid() { + let (key, _) = slot.key_value_unchecked(); + keys[valid_amount] = Option::some(key); + valid_amount += 1; + } + } + + let msg = f"Amount of valid elements should have been {self._len} times, but got {valid_amount}."; + assert(valid_amount == self._len, msg); + + keys + } + + // Get the Option array of valid values + // with a length of map capacity. First len() elements + // are safe to unwrap_unchecked(), whilst remaining + // are guaranteed to be Option::none(). + // + // This design is reasoned by compile-time limitations and + // temporary nested slices ban. + pub fn values(self) -> [Option; N] { + let mut values = [Option::none(); N]; + let mut valid_amount = 0; + + for slot in self._table { + if slot.is_valid() { + let (_, value) = slot.key_value_unchecked(); + values[valid_amount] = Option::some(value); + valid_amount += 1; + } + } + + let msg = f"Amount of valid elements should have been {self._len} times, but got {valid_amount}."; + assert(valid_amount == self._len, msg); + + values + } + + // For each key-value entry applies mutator function. + pub fn iter_mut( + &mut self, + f: fn(K, V) -> (K, V) + ) + where + K: Eq + Hash, + B: BuildHasher, + H: Hasher { + let mut entries = self.entries(); + let mut new_map = HashMap::with_hasher(self._build_hasher); + + for i in 0..N { + if i < self._len { + let entry = entries[i].unwrap_unchecked(); + let (key, value) = f(entry.0, entry.1); + new_map.insert(key, value); + } + } + + self._table = new_map._table; + } + + // For each key applies mutator function. + pub fn iter_keys_mut( + &mut self, + f: fn(K) -> K + ) + where + K: Eq + Hash, + B: BuildHasher, + H: Hasher { + let mut entries = self.entries(); + let mut new_map = HashMap::with_hasher(self._build_hasher); + + for i in 0..N { + if i < self._len { + let entry = entries[i].unwrap_unchecked(); + let (key, value) = (f(entry.0), entry.1); + new_map.insert(key, value); + } + } + + self._table = new_map._table; + } + + // For each value applies mutator function. + pub fn iter_values_mut(&mut self, f: fn(V) -> V) { + for i in 0..N { + let mut slot = self._table[i]; + if slot.is_valid() { + let (key, value) = slot.key_value_unchecked(); + slot.set(key, f(value)); + self._table[i] = slot; + } + } + } + + // Retains only the elements specified by the predicate. + pub fn retain(&mut self, f: fn(K, V) -> bool) { + for index in 0..N { + let mut slot = self._table[index]; + if slot.is_valid() { + let (key, value) = slot.key_value_unchecked(); + if !f(key, value) { + slot.mark_deleted(); + self._len -= 1; + self._table[index] = slot; + } + } + } + } + + // Amount of active key-value entries. + pub fn len(self) -> u64 { + self._len + } + + // Get the compile-time map capacity. + pub fn capacity(_self: Self) -> u64 { + N + } + + // Get the value by key. If it does not exist, returns none(). + pub fn get( + self, + key: K + ) -> Option + where + K: Eq + Hash, + B: BuildHasher, + H: Hasher { + let mut result = Option::none(); + + let hash = self.hash(key); + let mut break = false; + + for attempt in 0..N { + if !break { + let index = self.quadratic_probe(hash, attempt as u64); + let slot = self._table[index]; + + // Not marked as deleted and has key-value. + if slot.is_valid() { + let (current_key, value) = slot.key_value_unchecked(); + if current_key == key { + result = Option::some(value); + break = true; + } + } + } + } + + result + } + + // Insert key-value entry. In case key was already present, value is overridden. + pub fn insert( + &mut self, + key: K, + value: V + ) + where + K: Eq + Hash, + B: BuildHasher, + H: Hasher { + self.assert_load_factor(); + + let hash = self.hash(key); + let mut break = false; + + for attempt in 0..N { + if !break { + let index = self.quadratic_probe(hash, attempt as u64); + let mut slot = self._table[index]; + let mut insert = false; + + // Either marked as deleted or has unset key-value. + if slot.is_available() { + insert = true; + self._len += 1; + } else { + let (current_key, _) = slot.key_value_unchecked(); + if current_key == key { + insert = true; + } + } + + if insert { + slot.set(key, value); + self._table[index] = slot; + break = true; + } + } + } + } + + // Remove key-value entry. If key is not present, HashMap remains unchanged. + pub fn remove( + &mut self, + key: K + ) + where + K: Eq + Hash, + B: BuildHasher, + H: Hasher { + let hash = self.hash(key); + let mut break = false; + + for attempt in 0..N { + if !break { + let index = self.quadratic_probe(hash, attempt as u64); + let mut slot = self._table[index]; + + // Not marked as deleted and has key-value. + if slot.is_valid() { + let (current_key, _) = slot.key_value_unchecked(); + if current_key == key { + slot.mark_deleted(); + self._table[index] = slot; + self._len -= 1; + break = true; + } + } + } + } + } + + // Apply HashMap's hasher onto key to obtain pre-hash for probing. + fn hash( + self, + key: K + ) -> u64 + where + K: Hash, + B: BuildHasher, + H: Hasher { + let mut hasher = self._build_hasher.build_hasher(); + key.hash(&mut hasher); + hasher.finish() as u64 + } + + // Probing scheme: quadratic function. + // We use 0.5 constant near variadic attempt and attempt^2 monomials. + // This ensures good uniformity of distribution for table sizes + // equal to prime numbers or powers of two. + fn quadratic_probe(_self: Self, hash: u64, attempt: u64) -> u64 { + (hash + (attempt + attempt * attempt) / 2) % N + } + + // Amount of elements in the table in relation to available slots exceeds α_max. + // To avoid a comparatively more expensive division operation + // we conduct cross-multiplication instead. + // n / m >= MAX_LOAD_FACTOR_NUMERATOR / MAX_LOAD_FACTOR_DEN0MINATOR + // n * MAX_LOAD_FACTOR_DEN0MINATOR >= m * MAX_LOAD_FACTOR_NUMERATOR + fn assert_load_factor(self) { + let lhs = self._len * MAX_LOAD_FACTOR_DEN0MINATOR; + let rhs = self._table.len() as u64 * MAX_LOAD_FACTOR_NUMERATOR; + let exceeded = lhs >= rhs; + assert(!exceeded, "Load factor is exceeded, consider increasing the capacity."); + } +} + +// Equality class on HashMap has to test that they have +// equal sets of key-value entries, +// thus one is a subset of the other and vice versa. +impl Eq for HashMap +where + K: Eq + Hash, + V: Eq, + B: BuildHasher, + H: Hasher +{ + fn eq(self, other: HashMap) -> bool{ + let mut equal = false; + + if self.len() == other.len(){ + equal = true; + for slot in self._table{ + // Not marked as deleted and has key-value. + if equal & slot.is_valid(){ + let (key, value) = slot.key_value_unchecked(); + let other_value = other.get(key); + + if other_value.is_none(){ + equal = false; + }else{ + let other_value = other_value.unwrap_unchecked(); + if value != other_value{ + equal = false; + } + } + } + } + } + + equal + } +} + +impl Default for HashMap +where + B: BuildHasher + Default, + H: Hasher + Default +{ + fn default() -> Self{ + let _build_hasher = B::default(); + let map: HashMap = HashMap::with_hasher(_build_hasher); + map + } +} diff --git a/noir_stdlib/src/hash.nr b/noir_stdlib/src/hash.nr index cc864039a90..7a931f7c047 100644 --- a/noir_stdlib/src/hash.nr +++ b/noir_stdlib/src/hash.nr @@ -1,5 +1,8 @@ mod poseidon; mod mimc; +mod pedersen; + +use crate::default::Default; #[foreign(sha256)] // docs:start:sha256 @@ -74,3 +77,52 @@ pub fn poseidon2_permutation(_input: [u8; N], _state_length: u32) -> [u8; N] #[foreign(sha256_compression)] pub fn sha256_compression(_input: [u32; 16], _state: [u32; 8]) -> [u32; 8] {} + +// Generic hashing support. +// Partially ported and impacted by rust. + +// Hash trait shall be implemented per type. +trait Hash{ + fn hash(self, state: &mut H) where H: Hasher; +} + +// Hasher trait shall be implemented by algorithms to provide hash-agnostic means. +// TODO: consider making the types generic here ([u8], [Field], etc.) +trait Hasher{ + fn finish(self) -> Field; + + fn write(&mut self, input: [Field]); +} + +// BuildHasher is a factory trait, responsible for production of specific Hasher. +trait BuildHasher where H: Hasher{ + fn build_hasher(self) -> H; +} + +struct BuildHasherDefault; + +impl BuildHasher for BuildHasherDefault +where + H: Hasher + Default +{ + fn build_hasher(_self: Self) -> H{ + H::default() + } +} + +impl Default for BuildHasherDefault +where + H: Hasher + Default +{ + fn default() -> Self{ + BuildHasherDefault{} + } +} + +// TODO: add implementations for the remainder of primitive types. +impl Hash for Field{ + fn hash(self, state: &mut H) where H: Hasher{ + let input: [Field] = [self]; + H::write(state, input); + } +} diff --git a/noir_stdlib/src/hash/pedersen.nr b/noir_stdlib/src/hash/pedersen.nr new file mode 100644 index 00000000000..ace6851099d --- /dev/null +++ b/noir_stdlib/src/hash/pedersen.nr @@ -0,0 +1,24 @@ +use crate::hash::{Hasher, pedersen_hash}; +use crate::default::Default; + +struct PedersenHasher{ + _state: [Field] +} + +impl Hasher for PedersenHasher { + fn finish(self) -> Field { + pedersen_hash(self._state) + } + + fn write(&mut self, input: [Field]){ + self._state = self._state.append(input); + } +} + +impl Default for PedersenHasher{ + fn default() -> Self{ + PedersenHasher{ + _state: [] + } + } +} diff --git a/test_programs/compile_failure/hashmap_load_factor/Nargo.toml b/test_programs/compile_failure/hashmap_load_factor/Nargo.toml new file mode 100644 index 00000000000..92da5a357f4 --- /dev/null +++ b/test_programs/compile_failure/hashmap_load_factor/Nargo.toml @@ -0,0 +1,6 @@ +[package] +name = "hashmap_load_factor" +type = "bin" +authors = [""] + +[dependencies] \ No newline at end of file diff --git a/test_programs/compile_failure/hashmap_load_factor/Prover.toml b/test_programs/compile_failure/hashmap_load_factor/Prover.toml new file mode 100644 index 00000000000..e54319c61e9 --- /dev/null +++ b/test_programs/compile_failure/hashmap_load_factor/Prover.toml @@ -0,0 +1,26 @@ +# Expected 6 key-value entries for hashmap capacity of 8. +# These must be distinct (both key-to-key, and value-to-value) for correct testing. + +[[input]] +key = 2 +value = 17 + +[[input]] +key = 3 +value = 19 + +[[input]] +key = 5 +value = 23 + +[[input]] +key = 7 +value = 29 + +[[input]] +key = 11 +value = 31 + +[[input]] +key = 41 +value = 43 \ No newline at end of file diff --git a/test_programs/compile_failure/hashmap_load_factor/src/main.nr b/test_programs/compile_failure/hashmap_load_factor/src/main.nr new file mode 100644 index 00000000000..ade43f898e1 --- /dev/null +++ b/test_programs/compile_failure/hashmap_load_factor/src/main.nr @@ -0,0 +1,35 @@ +use dep::std::collections::map::HashMap; +use dep::std::hash::BuildHasherDefault; +use dep::std::hash::pedersen::PedersenHasher; + +struct Entry{ + key: Field, + value: Field +} + +global HASHMAP_CAP = 8; +global HASHMAP_LEN = 6; + +fn allocate_hashmap() -> HashMap> { + HashMap::default() +} + +fn main(input: [Entry; HASHMAP_LEN]) { + test_load_factor(input); +} + +// In this test we exceed load factor: +// α_max = 0.75, thus for capacity of 8 and lenght of 6 +// insertion of new unique key (7-th) should throw assertion error. +fn test_load_factor(input: [Entry; HASHMAP_LEN]) { + let mut hashmap = allocate_hashmap(); + + for entry in input { + hashmap.insert(entry.key, entry.value); + } + + // We use prime numbers for testing, + // therefore it is guaranteed that doubling key we get unique value. + let key = input[0].key * 2; + hashmap.insert(key, input[0].value); +} diff --git a/test_programs/execution_success/hashmap/Nargo.toml b/test_programs/execution_success/hashmap/Nargo.toml new file mode 100644 index 00000000000..c09debc9833 --- /dev/null +++ b/test_programs/execution_success/hashmap/Nargo.toml @@ -0,0 +1,6 @@ +[package] +name = "hashmap" +type = "bin" +authors = [""] + +[dependencies] \ No newline at end of file diff --git a/test_programs/execution_success/hashmap/Prover.toml b/test_programs/execution_success/hashmap/Prover.toml new file mode 100644 index 00000000000..84d4c0733e4 --- /dev/null +++ b/test_programs/execution_success/hashmap/Prover.toml @@ -0,0 +1,26 @@ +# Input: 6 key-value entries for hashmap capacity of 8. +# These must be distinct (both key-to-key, and value-to-value) for correct testing. + +[[input]] +key = 2 +value = 17 + +[[input]] +key = 3 +value = 19 + +[[input]] +key = 5 +value = 23 + +[[input]] +key = 7 +value = 29 + +[[input]] +key = 11 +value = 31 + +[[input]] +key = 41 +value = 43 \ No newline at end of file diff --git a/test_programs/execution_success/hashmap/src/main.nr b/test_programs/execution_success/hashmap/src/main.nr new file mode 100644 index 00000000000..597a5c0b7de --- /dev/null +++ b/test_programs/execution_success/hashmap/src/main.nr @@ -0,0 +1,192 @@ +mod utils; + +use dep::std::collections::map::HashMap; +use dep::std::hash::BuildHasherDefault; +use dep::std::hash::pedersen::PedersenHasher; +use dep::std::cmp::Eq; + +use utils::cut; + +type K = Field; +type V = Field; + +// It is more convenient and readable to use structs as input. +struct Entry{ + key: Field, + value: Field +} + +global HASHMAP_CAP = 8; +global HASHMAP_LEN = 6; + +global FIELD_CMP = |a: Field, b: Field| a.lt(b); + +global K_CMP = FIELD_CMP; +global V_CMP = FIELD_CMP; +global KV_CMP = |a: (K, V), b: (K, V)| a.0.lt(b.0); + +global ALLOCATE_HASHMAP = || -> HashMap> + HashMap::default(); + +fn main(input: [Entry; HASHMAP_LEN]) { + test_sequential(input[0].key, input[0].value); + test_multiple_equal_insert(input[1].key, input[1].value); + test_value_override(input[2].key, input[2].value, input[3].value); + test_insert_and_methods(input); + test_hashmaps_equality(input); + test_retain(); + test_iterators(); + test_mut_iterators(); +} + +// Insert, get, remove. +fn test_sequential(key: K, value: V) { + let mut hashmap = ALLOCATE_HASHMAP(); + assert(hashmap.is_empty(), "New HashMap should be empty."); + + hashmap.insert(key, value); + assert(hashmap.len() == 1, "HashMap after one insert should have a length of 1 element."); + + let got = hashmap.get(key); + assert(got.is_some(), "Got none value."); + let got = got.unwrap_unchecked(); + assert(value == got, f"Inserted {value} but got {got} for the same key."); + + hashmap.remove(key); + assert(hashmap.is_empty(), "HashMap after one insert and corresponding removal should be empty."); + let got = hashmap.get(key); + assert(got.is_none(), "Value has been removed, but is still available (not none)."); +} + +// Insert same pair several times. +fn test_multiple_equal_insert(key: K, value: V) { + let mut hashmap = ALLOCATE_HASHMAP(); + assert(hashmap.is_empty(), "New HashMap should be empty."); + + for _ in 0..HASHMAP_LEN { + hashmap.insert(key, value); + } + + let len = hashmap.len(); + assert(len == 1, f"HashMap length must be 1, got {len}."); + + let got = hashmap.get(key); + assert(got.is_some(), "Got none value."); + let got = got.unwrap_unchecked(); + assert(value == got, f"Inserted {value} but got {got} for the same key."); +} + +// Override value for existing pair. +fn test_value_override(key: K, value: V, new_value: V) { + let mut hashmap = ALLOCATE_HASHMAP(); + assert(hashmap.is_empty(), "New hashmap should be empty."); + + hashmap.insert(key, value); + hashmap.insert(key, new_value); + assert(hashmap.len() == 1, "HashMap length is invalid."); + + let got = hashmap.get(key); + assert(got.is_some(), "Got none value."); + let got = got.unwrap_unchecked(); + assert(got == new_value, f"Expected {new_value}, but got {got}."); +} + +// Insert several distinct pairs and test auxiliary methods. +fn test_insert_and_methods(input: [Entry; HASHMAP_LEN]) { + let mut hashmap = ALLOCATE_HASHMAP(); + assert(hashmap.is_empty(), "New HashMap should be empty."); + + for entry in input { + hashmap.insert(entry.key, entry.value); + } + + assert(hashmap.len() == HASHMAP_LEN, "hashmap.len() does not match input lenght."); + + for entry in input { + assert(hashmap.contains_key(entry.key), f"Not found inserted key {entry.key}."); + } + + hashmap.clear(); + assert(hashmap.is_empty(), "HashMap after clear() should be empty."); +} + +// Insert several pairs and test retaining. +fn test_retain() { + let mut hashmap = ALLOCATE_HASHMAP(); + assert(hashmap.is_empty(), "New HashMap should be empty."); + + let (key, value) = (5, 11); + hashmap.insert(key, value); + let (key, value) = (2, 13); + hashmap.insert(key, value); + let (key, value) = (11, 5); + hashmap.insert(key, value); + + let predicate = |key: K, value: V| -> bool {key * value == 55}; + hashmap.retain(predicate); + + assert(hashmap.len() == 2, "HashMap should have retained 2 elements."); + assert(hashmap.get(2).is_none(), "Pair should have been removed, since it does not match predicate."); +} + +// Equality trait check. +fn test_hashmaps_equality(input: [Entry; HASHMAP_LEN]) { + let mut hashmap_1 = ALLOCATE_HASHMAP(); + let mut hashmap_2 = ALLOCATE_HASHMAP(); + + for entry in input { + hashmap_1.insert(entry.key, entry.value); + hashmap_2.insert(entry.key, entry.value); + } + + assert(hashmap_1 == hashmap_2, "HashMaps should be equal."); + + hashmap_2.remove(input[0].key); + + assert(hashmap_1 != hashmap_2, "HashMaps should not be equal."); +} + +// Test entries, keys, values. +fn test_iterators() { + let mut hashmap = ALLOCATE_HASHMAP(); + + hashmap.insert(2, 3); + hashmap.insert(5, 7); + hashmap.insert(11, 13); + + let keys: [K; 3] = cut(hashmap.keys()).map(|k: Option| k.unwrap_unchecked()).sort_via(K_CMP); + let values: [V; 3] = cut(hashmap.values()).map(|v: Option| v.unwrap_unchecked()).sort_via(V_CMP); + let entries: [(K, V); 3] = cut(hashmap.entries()).map(|e: Option<(K, V)>| e.unwrap_unchecked()).sort_via(KV_CMP); + + assert(keys == [2, 5, 11], "Got incorrect iteration of keys."); + assert(values == [3, 7, 13], "Got incorrect iteration of values."); + assert(entries == [(2, 3), (5, 7), (11, 13)], "Got incorrect iteration of entries."); +} + +// Test mutable iteration over keys, values and entries. +fn test_mut_iterators() { + let mut hashmap = ALLOCATE_HASHMAP(); + + hashmap.insert(2, 3); + hashmap.insert(5, 7); + hashmap.insert(11, 13); + + let f = |k: K| -> K{ k * 3}; + hashmap.iter_keys_mut(f); + + let f = |v: V| -> V{ v * 5}; + hashmap.iter_values_mut(f); + + let keys: [K; 3] = cut(hashmap.keys()).map(|k: Option| k.unwrap_unchecked()).sort_via(K_CMP); + let values: [V; 3] = cut(hashmap.values()).map(|v: Option| v.unwrap_unchecked()).sort_via(V_CMP); + + assert(keys == [6, 15, 33], f"Got incorrect iteration of keys: {keys}"); + assert(values == [15, 35, 65], "Got incorrect iteration of values."); + + let f = |k: K, v: V| -> (K, V){(k * 2, v * 2)}; + hashmap.iter_mut(f); + + let entries: [(K, V); 3] = cut(hashmap.entries()).map(|e: Option<(K, V)>| e.unwrap_unchecked()).sort_via(KV_CMP); + + assert(entries == [(12, 30), (30, 70), (66, 130)], "Got incorrect iteration of entries."); +} diff --git a/test_programs/execution_success/hashmap/src/utils.nr b/test_programs/execution_success/hashmap/src/utils.nr new file mode 100644 index 00000000000..45c9ca9bbf7 --- /dev/null +++ b/test_programs/execution_success/hashmap/src/utils.nr @@ -0,0 +1,10 @@ +// Compile-time: cuts the M first elements from the [T; N] array. +pub(crate) fn cut(input: [T; N]) -> [T; M] { + assert(M as u64 < N as u64, "M should be less than N."); + + let mut new = [dep::std::unsafe::zeroed(); M]; + for i in 0..M { + new[i] = input[i]; + } + new +} From 16d5f18c68cc3da1d11c98e101e3942d2437c3a8 Mon Sep 17 00:00:00 2001 From: Maxim Vezenov Date: Fri, 23 Feb 2024 15:06:15 +0000 Subject: [PATCH 37/39] chore(ssa): Remove mem2reg run before flattening (#4415) # Description ## Problem\* Before https://github.com/noir-lang/noir/pull/4240 we needed mem2reg to be run as to not panic when fetching slice lengths. ## Summary\* After the linked PR we have an improved startegy for tracking slice capacities by generating a slice capacities map before merging of values. This should enable us to remove a mem2reg pass that is run before flattening. ## Additional Context ## Documentation\* Check one: - [ ] No documentation needed. - [ ] Documentation included in this PR. - [ ] **[Exceptional Case]** Documentation to be submitted in a separate PR. # PR Checklist\* - [ ] I have tested the changes locally. - [ ] I have formatted the changes with [Prettier](https://prettier.io/) and/or `cargo fmt` on default settings. --- compiler/noirc_evaluator/src/ssa.rs | 5 ----- 1 file changed, 5 deletions(-) diff --git a/compiler/noirc_evaluator/src/ssa.rs b/compiler/noirc_evaluator/src/ssa.rs index d19c4467235..0bb81efe977 100644 --- a/compiler/noirc_evaluator/src/ssa.rs +++ b/compiler/noirc_evaluator/src/ssa.rs @@ -54,11 +54,6 @@ pub(crate) fn optimize_into_acir( .try_run_pass(Ssa::evaluate_assert_constant, "After Assert Constant:")? .try_run_pass(Ssa::unroll_loops, "After Unrolling:")? .run_pass(Ssa::simplify_cfg, "After Simplifying:") - // Run mem2reg before flattening to handle any promotion - // of values that can be accessed after loop unrolling. - // If there are slice mergers uncovered by loop unrolling - // and this pass is missed, slice merging will fail inside of flattening. - .run_pass(Ssa::mem2reg, "After Mem2Reg:") .run_pass(Ssa::flatten_cfg, "After Flattening:") .run_pass(Ssa::remove_bit_shifts, "After Removing Bit Shifts:") // Run mem2reg once more with the flattened CFG to catch any remaining loads/stores From 27c66b3d0741e68ed591ae8a16b47b30bc87175f Mon Sep 17 00:00:00 2001 From: Tom French <15848336+TomAFrench@users.noreply.github.com> Date: Sat, 24 Feb 2024 14:00:22 +0000 Subject: [PATCH 38/39] fix: remove print from monomorphization pass (#4417) # Description ## Problem\* Resolves ## Summary\* We're currently printing out every expression we're monomorphising. ## Additional Context ## Documentation\* Check one: - [x] No documentation needed. - [ ] Documentation included in this PR. - [ ] **[Exceptional Case]** Documentation to be submitted in a separate PR. # PR Checklist\* - [x] I have tested the changes locally. - [x] I have formatted the changes with [Prettier](https://prettier.io/) and/or `cargo fmt` on default settings. --- compiler/noirc_evaluator/src/ssa/ssa_gen/mod.rs | 1 - 1 file changed, 1 deletion(-) diff --git a/compiler/noirc_evaluator/src/ssa/ssa_gen/mod.rs b/compiler/noirc_evaluator/src/ssa/ssa_gen/mod.rs index 6f59fa13274..d8857f9e599 100644 --- a/compiler/noirc_evaluator/src/ssa/ssa_gen/mod.rs +++ b/compiler/noirc_evaluator/src/ssa/ssa_gen/mod.rs @@ -128,7 +128,6 @@ impl<'a> FunctionContext<'a> { } fn codegen_expression(&mut self, expr: &Expression) -> Result { - eprintln!("Codegen {expr}"); match expr { Expression::Ident(ident) => Ok(self.codegen_ident(ident)), Expression::Literal(literal) => self.codegen_literal(literal), From 33860678a642a76d8251ef42ffbe6d8a5a013528 Mon Sep 17 00:00:00 2001 From: Tom French <15848336+TomAFrench@users.noreply.github.com> Date: Sat, 24 Feb 2024 14:15:25 +0000 Subject: [PATCH 39/39] chore: remove unwanted prints (#4419) # Description ## Problem\* Resolves ## Summary\* This removes some unwanted prints which were left in from #4376 ## Additional Context ## Documentation\* Check one: - [x] No documentation needed. - [ ] Documentation included in this PR. - [ ] **[Exceptional Case]** Documentation to be submitted in a separate PR. # PR Checklist\* - [x] I have tested the changes locally. - [x] I have formatted the changes with [Prettier](https://prettier.io/) and/or `cargo fmt` on default settings. --- compiler/noirc_evaluator/src/ssa/ssa_gen/mod.rs | 2 -- 1 file changed, 2 deletions(-) diff --git a/compiler/noirc_evaluator/src/ssa/ssa_gen/mod.rs b/compiler/noirc_evaluator/src/ssa/ssa_gen/mod.rs index d8857f9e599..d95295ae3c9 100644 --- a/compiler/noirc_evaluator/src/ssa/ssa_gen/mod.rs +++ b/compiler/noirc_evaluator/src/ssa/ssa_gen/mod.rs @@ -347,10 +347,8 @@ impl<'a> FunctionContext<'a> { } fn codegen_binary(&mut self, binary: &ast::Binary) -> Result { - eprintln!("Start binary"); let lhs = self.codegen_non_tuple_expression(&binary.lhs)?; let rhs = self.codegen_non_tuple_expression(&binary.rhs)?; - eprintln!("Insert binary"); Ok(self.insert_binary(lhs, binary.operator, rhs, binary.location)) }