Skip to content

Commit

Permalink
Merge pull request #315 from monadicus/deps-snarkos
Browse files Browse the repository at this point in the history
feat: reconcile 2.0, event streams, version enforcement
  • Loading branch information
Meshiest authored Dec 6, 2024
2 parents c52b686 + cb59208 commit 88073d5
Show file tree
Hide file tree
Showing 141 changed files with 7,724 additions and 3,101 deletions.
178 changes: 94 additions & 84 deletions Cargo.lock

Large diffs are not rendered by default.

10 changes: 6 additions & 4 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -81,6 +81,8 @@ reqwest = { version = "0.12", default-features = false, features = [
] }
# Can't update this cause snarkos/vm
rocksdb = { version = "0.21", default-features = false }
rustls = { version = "0.23.15", features = ["ring"] }
semver = { version = "1.0", features = ["serde"] }
serde = { version = "1", default-features = false, features = [
"alloc",
"derive",
Expand Down Expand Up @@ -129,9 +131,9 @@ snops-common = { path = "./crates/common" }
# snarkos-node-metrics = { version = "3.0" }
# snarkvm = { version = "1.0", features = ["rocks"] }

snarkos-account = { git = "https://github.com/AleoNet/snarkOS", rev = "c6de459" }
snarkos-node = { git = "https://github.com/AleoNet/snarkOS", rev = "c6de459" }
snarkos-node-metrics = { git = "https://github.com/AleoNet/snarkOS", rev = "c6de459" }
snarkvm = { git = "https://github.com/AleoNet/snarkVM", rev = "4eb83d7", default-features = false, features = [
snarkos-account = { git = "https://github.com/AleoNet/snarkOS", rev = "ba41197" }
snarkos-node = { git = "https://github.com/AleoNet/snarkOS", rev = "ba41197" }
snarkos-node-metrics = { git = "https://github.com/AleoNet/snarkOS", rev = "ba41197" }
snarkvm = { git = "https://github.com/AleoNet/snarkVM", rev = "1de86e7", default-features = false, features = [
"rocks",
] }
12 changes: 6 additions & 6 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,7 @@ To learn more about `snops` we recommend checking out the mdbook [here](https://
The controlplane is the webserver that communicates to agents how to
run snarkOS, or what transactions to execute.

1. In another terminal, build the cli: `cargo install --path ./crates/snops-cli`
1. In another terminal, install the cli: `cargo install --path ./crates/snops-cli`, or build with `cargo xtask build cli` and use from `target/release-big/snops-cli`.

The cli is used to interact with the controlplane and manage environments.
It provides JSON based output. We recommend pairing our cli with [`jq`](https://jqlang.github.io/jq/) when leveraging other scripts and tools
Expand All @@ -64,15 +64,15 @@ To learn more about `snops` we recommend checking out the mdbook [here](https://
Each of these can be dynamically configured as snarkos nodes. The default
agent configuration should connect to a locally operated controlplane.

### Local Isonets
### Local Isolated Networks (Isonets)

This example requires 4 agents and the control plane to be running.
This example requires 4 agents and the control plane to be running. It allows you to run a devnet with a custom genesis block.

1. Start the environment: `snops-cli env prepare specs/test-4-validators.yaml`
1. Start the environment: `snops-cli env apply specs/test-4-validators.yaml`
1. Check the current network height: `snops-cli env height`
1. Look at the latest block: `snops-cli env block`
1. Look at the genesis block: `snops-cli env block 0`
1. Stop the environment: `snops-cli env clean`
1. Stop the environment: `snops-cli env delete`

### Isonet Transfers

Expand Down Expand Up @@ -156,7 +156,7 @@ Deploying and executing Aleo programs on your isonets is easiest with snops. You
`snarkos-aot` provides various CLI tools to help with developing and executing
Aleo programs as well as interact with snarkOS ledgers.

Build `snarkos-aot` with: `cargo install --profile release-big -p snarkos-aot`.
Build `snarkos-aot` with: `cargo xtask build aot`.
The compiled binary can be found in `target/release-big/snarkos-aot`.

Use the `NETWORK` environment variable to specify `mainnet` (default),
Expand Down
6 changes: 3 additions & 3 deletions crates/agent/Cargo.toml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
[package]
name = "snops-agent"
version = "0.1.0"
version = "0.2.0"
edition = "2021"
license = "MIT"
description = "A snarkops agent for communicating with snarkos nodes and the control plane"
Expand All @@ -14,17 +14,18 @@ mangen = ["snops-common/mangen"]
[dependencies]
anyhow.workspace = true
axum = { workspace = true, features = ["http2", "json", "tokio", "ws"] }
bincode.workspace = true
chrono.workspace = true
clap.workspace = true
dashmap.workspace = true
futures.workspace = true
futures-util.workspace = true
http.workspace = true
httpdate.workspace = true
indexmap.workspace = true
local-ip-address.workspace = true
nix = { workspace = true, features = ["signal"] }
reqwest = { workspace = true, features = ["json", "stream"] }
rustls.workspace = true
serde_json.workspace = true
sha2.workspace = true
simple_moving_average.workspace = true
Expand All @@ -43,4 +44,3 @@ tracing-appender.workspace = true
tracing.workspace = true
tracing-subscriber.workspace = true
url.workspace = true
rustls = { version = "0.23.15", features = ["ring"] }
149 changes: 91 additions & 58 deletions crates/agent/src/api.rs
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,8 @@ use reqwest::IntoUrl;
use sha2::{Digest, Sha256};
use snops_common::{
binaries::{BinaryEntry, BinarySource},
state::TransferStatusUpdate,
rpc::error::ReconcileError,
state::{TransferId, TransferStatusUpdate},
util::sha256_file,
};
use tokio::{fs::File, io::AsyncWriteExt};
Expand All @@ -24,6 +25,7 @@ const TRANSFER_UPDATE_RATE: Duration = Duration::from_secs(2);

/// Download a file. Returns a None if 404.
pub async fn download_file(
tx_id: TransferId,
client: &reqwest::Client,
url: impl IntoUrl,
to: impl AsRef<Path>,
Expand All @@ -35,8 +37,7 @@ pub async fn download_file(
return Ok(None);
}

// create a new transfer
let tx_id = transfers::next_id();
// start a new transfer
transfer_tx.send((
tx_id,
TransferStatusUpdate::Start {
Expand Down Expand Up @@ -98,26 +99,6 @@ pub async fn download_file(
Ok(Some((file, sha256, downloaded)))
}

pub async fn check_file(
url: impl IntoUrl,
to: &Path,
transfer_tx: TransferTx,
) -> anyhow::Result<()> {
let client = reqwest::Client::new();

if !should_download_file(&client, url.as_str(), to, None)
.await
.unwrap_or(true)
{
return Ok(());
}

info!("downloading {to:?}");
download_file(&client, url, to, transfer_tx).await?;

Ok(())
}

pub async fn check_binary(
binary: &BinaryEntry,
base_url: &str,
Expand All @@ -136,23 +117,30 @@ pub async fn check_binary(

// this also checks for sha256 differences, along with last modified time
// against the target
if !should_download_file(&client, &source_url, path, Some(binary))
.await
.unwrap_or(true)
{
let file_issues = get_file_issues(
&client,
&source_url,
path,
binary.size,
binary.sha256.as_deref(),
false,
)
.await;

if file_issues.is_ok_and(|issues| issues.is_none()) {
// check permissions and ensure 0o755
let perms = path.metadata()?.permissions();
if perms.mode() != 0o755 {
tokio::fs::set_permissions(path, std::fs::Permissions::from_mode(0o755)).await?;
}

// TODO: check sha256 and size

return Ok(());
}
info!("downloading binary update to {}: {binary}", path.display());

let Some((file, sha256, size)) = download_file(&client, &source_url, path, transfer_tx).await?
let tx_id = transfers::next_id();
let Some((file, sha256, size)) =
download_file(tx_id, &client, &source_url, path, transfer_tx).await?
else {
bail!("downloading binary returned 404");
};
Expand Down Expand Up @@ -186,47 +174,92 @@ pub async fn check_binary(
Ok(())
}

pub async fn should_download_file(
#[derive(Debug)]
pub enum BadFileReason {
/// File is missing
NotFound,
/// File size mismatch
Size,
/// SHA256 mismatch
Sha256,
/// A new version is available based on modified header
Stale,
}

pub async fn get_file_issues(
client: &reqwest::Client,
loc: &str,
path: &Path,
binary: Option<&BinaryEntry>,
) -> anyhow::Result<bool> {
if !path.exists() {
return Ok(true);
src: &str,
dst: &Path,
size: Option<u64>,
sha256: Option<&str>,
offline: bool,
) -> Result<Option<BadFileReason>, ReconcileError> {
if !dst.try_exists().unwrap_or(false) {
return Ok(Some(BadFileReason::NotFound));
}

let meta = tokio::fs::metadata(&path).await?;
let meta = tokio::fs::metadata(&dst)
.await
.map_err(|e| ReconcileError::FileStatError(dst.to_path_buf(), e.to_string()))?;
let local_content_length = meta.len();

// if the binary entry is provided, check if the file size and sha256 match
if let Some(binary) = binary {
// file size is incorrect
if binary.size.is_some_and(|s| s != local_content_length) {
return Ok(true);
}
// file size is incorrect
if size.is_some_and(|s| s != local_content_length) {
return Ok(Some(BadFileReason::Size));
}

// if sha256 is present, only download if the sha256 is different
if let Some(sha256) = binary.sha256.as_ref() {
return Ok(sha256_file(&path.to_path_buf())? != sha256.to_ascii_lowercase());
}
// if sha256 is present, only download if the sha256 is different
if let Some(sha256) = sha256 {
let bad_sha256 = sha256_file(&dst.to_path_buf())
.map_err(|e| ReconcileError::FileReadError(dst.to_path_buf(), e.to_string()))?
!= sha256.to_ascii_lowercase();
return Ok(bad_sha256.then_some(BadFileReason::Sha256));
}

// if we're offline, don't download
if offline {
return Ok(None);
}

// check last modified
let res = client.head(loc).send().await?;
let res = client
.head(src)
.send()
.await
.map_err(|e| ReconcileError::HttpError {
method: String::from("HEAD"),
url: src.to_owned(),
error: e.to_string(),
})?;

let Some(last_modified_header) = res.headers().get(http::header::LAST_MODIFIED) else {
return Ok(true);
let Some(last_modified_header) = res
.headers()
.get(http::header::LAST_MODIFIED)
// parse as a string
.and_then(|e| e.to_str().ok())
else {
return Ok(Some(BadFileReason::Stale));
};

let Some(content_length_header) = res.headers().get(http::header::CONTENT_LENGTH) else {
return Ok(true);
let Some(remote_content_length) = res
.headers()
.get(http::header::CONTENT_LENGTH)
// parse the header as a u64
.and_then(|e| e.to_str().ok().and_then(|s| s.parse::<u64>().ok()))
else {
return Ok(Some(BadFileReason::Size));
};

let remote_last_modified = httpdate::parse_http_date(last_modified_header.to_str()?)?;
let local_last_modified = meta.modified()?;

let remote_content_length = content_length_header.to_str()?.parse::<u64>()?;

Ok(remote_last_modified > local_last_modified || remote_content_length != local_content_length)
let remote_last_modified = httpdate::parse_http_date(last_modified_header);
let local_last_modified = meta
.modified()
.map_err(|e| ReconcileError::FileStatError(dst.to_path_buf(), e.to_string()))?;

let is_stale = remote_last_modified
.map(|res| res > local_last_modified)
.unwrap_or(true);
Ok(is_stale
.then_some(BadFileReason::Stale)
.or_else(|| (remote_content_length != local_content_length).then_some(BadFileReason::Size)))
}
38 changes: 35 additions & 3 deletions crates/agent/src/cli.rs
Original file line number Diff line number Diff line change
Expand Up @@ -10,9 +10,11 @@ use std::{
use clap::CommandFactory;
use clap::Parser;
use http::Uri;
use snops_common::state::{AgentId, AgentModeOptions, PortConfig};
use snops_common::state::{AgentId, AgentModeOptions, NetworkId, PortConfig, StorageId};
use tracing::{info, warn};

use crate::net;

pub const ENV_ENDPOINT: &str = "SNOPS_ENDPOINT";
pub const ENV_ENDPOINT_DEFAULT: &str = "127.0.0.1:1234";

Expand Down Expand Up @@ -119,6 +121,9 @@ impl Cli {

let mut query = format!("/agent?mode={}", u8::from(self.modes));

// Add agent version
query.push_str(&format!("&version={}", env!("CARGO_PKG_VERSION")));

// add &id=
query.push_str(&format!("&id={}", self.id));

Expand All @@ -127,13 +132,13 @@ impl Cli {
if fs::metadata(file).is_ok() {
query.push_str("&local_pk=true");
} else {
warn!("private-key-file flag ignored as the file was not found: {file:?}")
warn!("Private-key-file flag ignored as the file was not found: {file:?}")
}
}

// add &labels= if id is present
if let Some(labels) = &self.labels {
info!("using labels: {:?}", labels);
info!("Using labels: {:?}", labels);
query.push_str(&format!(
"&labels={}",
labels
Expand Down Expand Up @@ -167,4 +172,31 @@ impl Cli {
ws_uri,
)
}

pub fn addrs(&self) -> (Vec<IpAddr>, Option<IpAddr>) {
let internal_addrs = match (self.internal, self.external) {
// use specified internal address
(Some(internal), _) => vec![internal],
// use no internal address if the external address is loopback
(None, Some(external)) if external.is_loopback() => vec![],
// otherwise, get the local network interfaces available to this node
(None, _) => net::get_internal_addrs().expect("failed to get network interfaces"),
};

let external_addr = self.external;
if let Some(addr) = external_addr {
info!("Using external addr: {}", addr);
} else {
info!("Skipping external addr");
}

(internal_addrs, external_addr)
}

pub fn storage_path(&self, network: NetworkId, storage_id: StorageId) -> PathBuf {
let mut path = self.path.join("storage");
path.push(network.to_string());
path.push(storage_id.to_string());
path
}
}
Loading

0 comments on commit 88073d5

Please sign in to comment.