Skip to content

Commit

Permalink
Merge branch 'master' into michaeljklein/vec-arena
Browse files Browse the repository at this point in the history
  • Loading branch information
michaeljklein authored Feb 16, 2024
2 parents 7885574 + b283637 commit 881373d
Show file tree
Hide file tree
Showing 37 changed files with 438 additions and 238 deletions.
10 changes: 10 additions & 0 deletions .github/CRATES_IO_PUBLISH_FAILED.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,10 @@
---
title: "ACVM crates failed to publish"
assignees: TomAFrench kevaundray savio-sou
---

The {{env.CRATE_VERSION}} release of the ACVM crates failed.

Check the [Publish ACVM]({{env.WORKFLOW_URL}}) workflow for details.

This issue was raised by the workflow `{{env.WORKFLOW_NAME}}`
11 changes: 11 additions & 0 deletions .github/JS_PUBLISH_FAILED.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,11 @@
---
title: "JS packages failed to publish"
assignees: TomAFrench kevaundray savio-sou
labels: js
---

The {{env.NPM_TAG}} release of the JS packages failed.

Check the [Publish JS packages]({{env.WORKFLOW_URL}}) workflow for details.

This issue was raised by the workflow `{{env.WORKFLOW_NAME}}`
13 changes: 13 additions & 0 deletions .github/workflows/publish-acvm.yml
Original file line number Diff line number Diff line change
Expand Up @@ -62,3 +62,16 @@ jobs:
cargo publish --package acvm
env:
CARGO_REGISTRY_TOKEN: ${{ secrets.ACVM_CRATES_IO_TOKEN }}

# Raise an issue if any package failed to publish
- name: Alert on failed publish
uses: JasonEtco/create-an-issue@v2
if: ${{ failure() }}
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
CRATE_VERSION: ${{ inputs.noir-ref }}
WORKFLOW_NAME: ${{ github.workflow }}
WORKFLOW_URL: ${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}
with:
update_existing: true
filename: .github/JS_PUBLISH_FAILED.md
13 changes: 13 additions & 0 deletions .github/workflows/publish-es-packages.yml
Original file line number Diff line number Diff line change
Expand Up @@ -143,3 +143,16 @@ jobs:

- name: Publish ES Packages
run: yarn publish:all --access public --tag ${{ inputs.npm-tag }}

# Raise an issue if any package failed to publish
- name: Alert on failed publish
uses: JasonEtco/create-an-issue@v2
if: ${{ failure() }}
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
NPM_TAG: ${{ inputs.npm-tag }}
WORKFLOW_NAME: ${{ github.workflow }}
WORKFLOW_URL: ${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}
with:
update_existing: true
filename: .github/JS_PUBLISH_FAILED.md
112 changes: 112 additions & 0 deletions .github/workflows/test-rust-workspace-msrv.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,112 @@
name: Test (MSRV check)

# TL;DR https://github.com/noir-lang/noir/issues/4384
#
# This workflow acts to ensure that we can publish to crates.io, we need this extra check as libraries don't respect the Cargo.lock file committed in this repository.
# We must then always be able to build the workspace using the latest versions of all of our dependencies, so we explicitly update them and build in this workflow.

on:
pull_request:
merge_group:
push:
branches:
- master

# This will cancel previous runs when a branch or PR is updated
concurrency:
group: ${{ github.workflow }}-${{ github.head_ref || github.ref || github.run_id }}
cancel-in-progress: true

jobs:
build-test-artifacts:
name: Build test artifacts
runs-on: ubuntu-latest
timeout-minutes: 30

steps:
- name: Checkout
uses: actions/checkout@v4

- name: Setup toolchain
uses: dtolnay/[email protected]
with:
targets: x86_64-unknown-linux-gnu

# We force the ACVM crate and all of its dependencies to update their dependencies
# This ensures that we'll be able to build the crates when they're being published.
- name: Update Cargo.lock
run: |
cargo update --package acvm --aggressive
cargo update --package bn254_blackbox_solver --aggressive
- uses: Swatinem/rust-cache@v2
with:
key: x86_64-unknown-linux-gnu-msrv-check
cache-on-failure: true
save-if: ${{ github.event_name != 'merge_group' }}

- name: Install nextest
uses: taiki-e/install-action@v2
with:
tool: [email protected]

- name: Build and archive tests
run: cargo nextest archive --workspace --release --archive-file nextest-archive.tar.zst

- name: Upload archive to workflow
uses: actions/upload-artifact@v4
with:
name: nextest-archive
path: nextest-archive.tar.zst

run-tests:
name: "Run tests (partition ${{matrix.partition}})"
runs-on: ubuntu-latest
needs: [build-test-artifacts]
strategy:
fail-fast: false
matrix:
partition: [1, 2, 3, 4]
steps:
- uses: actions/checkout@v4

- name: Setup toolchain
uses: dtolnay/[email protected]
with:
targets: x86_64-unknown-linux-gnu

- name: Install nextest
uses: taiki-e/install-action@v2
with:
tool: [email protected]

- name: Download archive
uses: actions/download-artifact@v4
with:
name: nextest-archive
- name: Run tests
run: |
cargo nextest run --archive-file nextest-archive.tar.zst \
--partition count:${{ matrix.partition }}/4
# This is a job which depends on all test jobs and reports the overall status.
# This allows us to add/remove test jobs without having to update the required workflows.
tests-end:
name: Rust End
runs-on: ubuntu-latest
# We want this job to always run (even if the dependant jobs fail) as we want this job to fail rather than skipping.
if: ${{ always() }}
needs:
- run-tests

steps:
- name: Report overall success
run: |
if [[ $FAIL == true ]]; then
exit 1
else
exit 0
fi
env:
# We treat any cancelled, skipped or failing jobs as a failure for the workflow as a whole.
FAIL: ${{ contains(needs.*.result, 'failure') || contains(needs.*.result, 'cancelled') || contains(needs.*.result, 'skipped') }}
114 changes: 57 additions & 57 deletions acvm-repo/acir_field/src/generic_ark.rs
Original file line number Diff line number Diff line change
Expand Up @@ -429,63 +429,6 @@ impl<F: PrimeField> SubAssign for FieldElement<F> {
}
}

#[cfg(test)]
mod tests {
#[test]
fn and() {
let max = 10_000u32;

let num_bits = (std::mem::size_of::<u32>() * 8) as u32 - max.leading_zeros();

for x in 0..max {
let x = crate::generic_ark::FieldElement::<ark_bn254::Fr>::from(x as i128);
let res = x.and(&x, num_bits);
assert_eq!(res.to_be_bytes(), x.to_be_bytes());
}
}

#[test]
fn serialize_fixed_test_vectors() {
// Serialized field elements from of 0, -1, -2, -3
let hex_strings = vec![
"0000000000000000000000000000000000000000000000000000000000000000",
"30644e72e131a029b85045b68181585d2833e84879b9709143e1f593f0000000",
"30644e72e131a029b85045b68181585d2833e84879b9709143e1f593efffffff",
"30644e72e131a029b85045b68181585d2833e84879b9709143e1f593effffffe",
];

for (i, string) in hex_strings.into_iter().enumerate() {
let minus_i_field_element =
-crate::generic_ark::FieldElement::<ark_bn254::Fr>::from(i as i128);
assert_eq!(minus_i_field_element.to_hex(), string);
}
}

#[test]
fn deserialize_even_and_odd_length_hex() {
// Test cases of (odd, even) length hex strings
let hex_strings =
vec![("0x0", "0x00"), ("0x1", "0x01"), ("0x002", "0x0002"), ("0x00003", "0x000003")];
for (i, case) in hex_strings.into_iter().enumerate() {
let i_field_element =
crate::generic_ark::FieldElement::<ark_bn254::Fr>::from(i as i128);
let odd_field_element =
crate::generic_ark::FieldElement::<ark_bn254::Fr>::from_hex(case.0).unwrap();
let even_field_element =
crate::generic_ark::FieldElement::<ark_bn254::Fr>::from_hex(case.1).unwrap();

assert_eq!(i_field_element, odd_field_element);
assert_eq!(odd_field_element, even_field_element);
}
}

#[test]
fn max_num_bits_smoke() {
let max_num_bits_bn254 = crate::generic_ark::FieldElement::<ark_bn254::Fr>::max_num_bits();
assert_eq!(max_num_bits_bn254, 254);
}
}

fn mask_vector_le(bytes: &mut [u8], num_bits: usize) {
// reverse to big endian format
bytes.reverse();
Expand Down Expand Up @@ -543,3 +486,60 @@ fn superscript(n: u64) -> String {
panic!("{}", n.to_string() + " can't be converted to superscript.");
}
}

#[cfg(test)]
mod tests {
#[test]
fn and() {
let max = 10_000u32;

let num_bits = (std::mem::size_of::<u32>() * 8) as u32 - max.leading_zeros();

for x in 0..max {
let x = crate::generic_ark::FieldElement::<ark_bn254::Fr>::from(x as i128);
let res = x.and(&x, num_bits);
assert_eq!(res.to_be_bytes(), x.to_be_bytes());
}
}

#[test]
fn serialize_fixed_test_vectors() {
// Serialized field elements from of 0, -1, -2, -3
let hex_strings = vec![
"0000000000000000000000000000000000000000000000000000000000000000",
"30644e72e131a029b85045b68181585d2833e84879b9709143e1f593f0000000",
"30644e72e131a029b85045b68181585d2833e84879b9709143e1f593efffffff",
"30644e72e131a029b85045b68181585d2833e84879b9709143e1f593effffffe",
];

for (i, string) in hex_strings.into_iter().enumerate() {
let minus_i_field_element =
-crate::generic_ark::FieldElement::<ark_bn254::Fr>::from(i as i128);
assert_eq!(minus_i_field_element.to_hex(), string);
}
}

#[test]
fn deserialize_even_and_odd_length_hex() {
// Test cases of (odd, even) length hex strings
let hex_strings =
vec![("0x0", "0x00"), ("0x1", "0x01"), ("0x002", "0x0002"), ("0x00003", "0x000003")];
for (i, case) in hex_strings.into_iter().enumerate() {
let i_field_element =
crate::generic_ark::FieldElement::<ark_bn254::Fr>::from(i as i128);
let odd_field_element =
crate::generic_ark::FieldElement::<ark_bn254::Fr>::from_hex(case.0).unwrap();
let even_field_element =
crate::generic_ark::FieldElement::<ark_bn254::Fr>::from_hex(case.1).unwrap();

assert_eq!(i_field_element, odd_field_element);
assert_eq!(odd_field_element, even_field_element);
}
}

#[test]
fn max_num_bits_smoke() {
let max_num_bits_bn254 = crate::generic_ark::FieldElement::<ark_bn254::Fr>::max_num_bits();
assert_eq!(max_num_bits_bn254, 254);
}
}
5 changes: 1 addition & 4 deletions acvm-repo/acvm/src/compiler/optimizers/redundant_range.rs
Original file line number Diff line number Diff line change
Expand Up @@ -72,12 +72,9 @@ impl RangeOptimizer {
}
}


Opcode::BlackBoxFuncCall(BlackBoxFuncCall::RANGE {
input: FunctionInput { witness, num_bits },
}) => {
Some((*witness, *num_bits))
}
}) => Some((*witness, *num_bits)),

_ => None,
}) else {
Expand Down
2 changes: 1 addition & 1 deletion acvm-repo/acvm/src/pwg/blackbox/bigint.rs
Original file line number Diff line number Diff line change
Expand Up @@ -69,7 +69,7 @@ impl BigIntSolver {
pub(crate) fn bigint_to_bytes(
&self,
input: u32,
outputs: &Vec<Witness>,
outputs: &[Witness],
initial_witness: &mut WitnessMap,
) -> Result<(), OpcodeResolutionError> {
let bigint = self.get_bigint(input, BlackBoxFunc::BigIntToLeBytes)?;
Expand Down
8 changes: 3 additions & 5 deletions aztec_macros/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -809,7 +809,7 @@ fn get_serialized_length(
) -> Result<u64, AztecMacroError> {
let (struct_name, maybe_stored_in_state) = match typ {
Type::Struct(struct_type, generics) => {
Ok((struct_type.borrow().name.0.contents.clone(), generics.get(0)))
Ok((struct_type.borrow().name.0.contents.clone(), generics.first()))
}
_ => Err(AztecMacroError::CouldNotAssignStorageSlots {
secondary_message: Some("State storage variable must be a struct".to_string()),
Expand Down Expand Up @@ -858,7 +858,7 @@ fn get_serialized_length(
let serialized_trait_impl_shared = interner.get_trait_implementation(*serialized_trait_impl_id);
let serialized_trait_impl = serialized_trait_impl_shared.borrow();

match serialized_trait_impl.trait_generics.get(0).unwrap() {
match serialized_trait_impl.trait_generics.first().unwrap() {
Type::Constant(value) => Ok(*value),
_ => Err(AztecMacroError::CouldNotAssignStorageSlots { secondary_message: None }),
}
Expand Down Expand Up @@ -945,9 +945,7 @@ fn assign_storage_slots(
let slot_arg_expression = interner.expression(&new_call_expression.arguments[1]);

let current_storage_slot = match slot_arg_expression {
HirExpression::Literal(HirLiteral::Integer(slot, _)) => {
Ok(slot.borrow().to_u128())
}
HirExpression::Literal(HirLiteral::Integer(slot, _)) => Ok(slot.to_u128()),
_ => Err((
AztecMacroError::CouldNotAssignStorageSlots {
secondary_message: Some(
Expand Down
2 changes: 1 addition & 1 deletion compiler/noirc_errors/src/debug_info.rs
Original file line number Diff line number Diff line change
Expand Up @@ -90,7 +90,7 @@ impl DebugInfo {

for (opcode_location, locations) in self.locations.iter() {
for location in locations.iter() {
let opcodes = accumulator.entry(*location).or_insert(Vec::new());
let opcodes = accumulator.entry(*location).or_default();
opcodes.push(opcode_location);
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -269,7 +269,7 @@ impl<'block> BrilligBlock<'block> {
unreachable!("expected a call instruction")
};

let Value::Function(func_id) = &dfg[*func] else {
let Value::Function(func_id) = &dfg[*func] else {
unreachable!("expected a function value")
};

Expand Down
5 changes: 5 additions & 0 deletions compiler/noirc_evaluator/src/ssa/function_builder/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -216,6 +216,11 @@ impl FunctionBuilder {
operator: BinaryOp,
rhs: ValueId,
) -> ValueId {
assert_eq!(
self.type_of_value(lhs),
self.type_of_value(rhs),
"ICE - Binary instruction operands must have the same type"
);
let instruction = Instruction::Binary(Binary { lhs, rhs, operator });
self.insert_instruction(instruction, None).first()
}
Expand Down
4 changes: 1 addition & 3 deletions compiler/noirc_evaluator/src/ssa/ssa_gen/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -684,9 +684,7 @@ impl<'a> FunctionContext<'a> {
&mut self,
assert_message: &Option<Box<Expression>>,
) -> Result<Option<Box<ConstrainError>>, RuntimeError> {
let Some(assert_message_expr) = assert_message else {
return Ok(None)
};
let Some(assert_message_expr) = assert_message else { return Ok(None) };

if let ast::Expression::Literal(ast::Literal::Str(assert_message)) =
assert_message_expr.as_ref()
Expand Down
Loading

0 comments on commit 881373d

Please sign in to comment.