Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[WIP]nydus-image: modify compact subcommand #1489

Open
wants to merge 2 commits into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
36 changes: 28 additions & 8 deletions builder/src/compact.rs
Original file line number Diff line number Diff line change
Expand Up @@ -79,7 +79,7 @@ impl ChunkKey {
match c {
ChunkWrapper::V5(_) => Self::Digest(*c.id()),
ChunkWrapper::V6(_) => Self::Offset(c.blob_index(), c.compressed_offset()),
ChunkWrapper::Ref(_) => unimplemented!("unsupport ChunkWrapper::Ref(c)"),
ChunkWrapper::Ref(_) => Self::Offset(c.blob_index(), c.compressed_offset()),
}
}
}
Expand Down Expand Up @@ -332,6 +332,20 @@ impl BlobCompactor {
cs.add_chunk(&chunk.inner);
}
}
} else if let Some(c) = all_chunks.get_chunk(&chunk_key) {
let mut chunk_inner = chunk.inner.deref().clone();
apply_chunk_change(c, &mut chunk_inner)?;
chunk.inner = Arc::new(chunk_inner);
} else {
all_chunks.add_chunk(&chunk.inner);
// add to per blob ChunkSet
let blob_index = chunk.inner.blob_index() as usize;
if self.states[blob_index].is_invalid() {
self.states[blob_index] = State::Original(ChunkSet::new());
}
if let State::Original(cs) = &mut self.states[blob_index] {
cs.add_chunk(&chunk.inner);
}
}

// construct blobs/chunk --> nodes index map
Expand Down Expand Up @@ -404,7 +418,7 @@ impl BlobCompactor {
}

fn prepare_to_rebuild(&mut self, idx: usize) -> Result<()> {
if !self.states[idx].is_rebuild() {
if self.states[idx].is_rebuild() {
return Ok(());
}

Expand Down Expand Up @@ -443,8 +457,8 @@ impl BlobCompactor {
};

info!(
"compactor: original blob size {}, used data ratio {}%",
blob_info.blob_id, used_ratio
"compactor: original blob id is {}, blob size is {}, used data ratio {}%",
blob_info.blob_id, blob_info.compressed_blob_size, used_ratio
);
if used_ratio < ratio {
self.prepare_to_rebuild(idx)?;
Expand Down Expand Up @@ -531,7 +545,7 @@ impl BlobCompactor {
let ori_blob_ids = self.original_blob_ids();
ensure!(self.states.len() == self.ori_blob_mgr.len());

for idx in 0..self.states.len() {
for idx in (0..self.states.len()).rev() {
match &self.states[idx] {
State::Original(_) | State::ChunkDict => {
info!("compactor: keep original data blob {}", ori_blob_ids[idx]);
Expand Down Expand Up @@ -613,6 +627,13 @@ impl BlobCompactor {
Features::new(),
false,
);
if rs.meta.is_v5() {
build_ctx.fs_version = RafsVersion::V5;
info!("Version:V5");
} else {
build_ctx.fs_version = RafsVersion::V6;
info!("Version:V6");
}
let mut bootstrap_mgr =
BootstrapManager::new(Some(ArtifactStorage::SingleFile(d_bootstrap)), None);
let mut bootstrap_ctx = bootstrap_mgr.create_ctx()?;
Expand Down Expand Up @@ -786,7 +807,6 @@ mod tests {
}

#[test]
#[should_panic = "not implemented: unsupport ChunkWrapper::Ref(c)"]
fn test_chunk_key_from() {
let cw = ChunkWrapper::new(RafsVersion::V5);
matches!(ChunkKey::from(&cw), ChunkKey::Digest(_));
Expand All @@ -807,7 +827,7 @@ mod tests {
reserved: 0,
}) as Arc<dyn BlobChunkInfo>;
let cw = ChunkWrapper::Ref(chunk);
ChunkKey::from(&cw);
matches!(ChunkKey::from(&cw), ChunkKey::Offset(_, _));
}

#[test]
Expand Down Expand Up @@ -1190,7 +1210,7 @@ mod tests {
compactor.ori_blob_mgr.add_blob(blob_ctx4);
compactor.ori_blob_mgr.add_blob(blob_ctx5);

compactor.states[0] = State::Invalid;
compactor.states[4] = State::Invalid;

let tmp_dir = TempDir::new().unwrap();
let dir = tmp_dir.as_path().to_str().unwrap();
Expand Down
49 changes: 49 additions & 0 deletions smoke/tests/image_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -140,6 +140,55 @@ func (i *ImageTestSuite) TestConvertAndCopyImage(t *testing.T, ctx tool.Context,
tool.RunWithoutOutput(t, checkCmd)
}

func (i *ImageTestSuite) TestCompactImages() test.Generator {

scenarios := tool.DescartesIterator{}
scenarios.Dimension(paramImage, []interface{}{"nginx:latest"})

return func() (name string, testCase test.Case) {
if !scenarios.HasNext() {
return
}
scenario := scenarios.Next()
ctx := tool.DefaultContext(i.T)


image := i.prepareImage(i.T, scenario.GetString(paramImage))
return scenario.Str(), func(t *testing.T) {
i.TestCompact(t, *ctx, image)
}
}
}

func (i *ImageTestSuite) TestCompact(t *testing.T, ctx tool.Context, source string) {

// Prepare work directory
ctx.PrepareWorkDir(t)
defer ctx.Destroy(t)

target := fmt.Sprintf("%s-nydus-%s", source, uuid.NewString())
fsVersion := fmt.Sprintf("--fs-version %s", ctx.Build.FSVersion)
logLevel := "--log-level warn"

// Convert image
convertCmd := fmt.Sprintf(
"%s %s convert --source %s --target %s %s --nydus-image %s --work-dir %s",
ctx.Binary.Nydusify, logLevel, source, target, fsVersion, ctx.Binary.Builder, ctx.Env.WorkDir,
)
tool.RunWithoutOutput(t, convertCmd)

// Compact image
bootstrapPath := "/home/runner/work/image-service/chunkdict/image-service/contrib/nydusify/output/localhost:5000:redis:nydus6_7.0.2/nydus_bootstrap"
compactconfigPath := "/home/runner/compact.json"
backendcompactPath := "/home/runner/backend-config.json"

compactCmd := fmt.Sprintf(
"%s %s compact --bootstrap %s --compact-config %s --backend-config %s",
ctx.Binary.Builder, logLevel, bootstrapPath, compactconfigPath, backendcompactPath,
)
tool.RunWithoutOutput(t, compactCmd)
}

func (i *ImageTestSuite) prepareImage(t *testing.T, image string) string {
if i.preparedImages == nil {
i.preparedImages = make(map[string]string)
Expand Down
8 changes: 4 additions & 4 deletions src/bin/nydus-image/main.rs
Original file line number Diff line number Diff line change
Expand Up @@ -654,8 +654,8 @@ fn prepare_cmd_args(bti_string: &'static str) -> App {
.required(true),
)
.arg(
Arg::new("config")
.long("config")
Arg::new("compact-config")
.long("compact-config")
.short('C')
.help("config to compactor")
.required(true),
Expand Down Expand Up @@ -1293,7 +1293,7 @@ impl Command {
Self::get_configuration(matches).context("failed to get configuration information")?;
config
.internal
.set_blob_accessible(matches.get_one::<String>("config").is_some());
.set_blob_accessible(matches.get_one::<String>("bootstrap").is_some());
let bootstrap_path = PathBuf::from(Self::get_bootstrap(matches)?);
let dst_bootstrap = match matches.get_one::<String>("output-bootstrap") {
None => bootstrap_path.with_extension("bootstrap.compact"),
Expand All @@ -1313,7 +1313,7 @@ impl Command {

let backend = Self::get_backend(matches, "compactor")?;

let config_file_path = matches.get_one::<String>("config").unwrap();
let config_file_path = matches.get_one::<String>("compact-config").unwrap();
let file = File::open(config_file_path)
.with_context(|| format!("failed to open config file {}", config_file_path))?;
let config = serde_json::from_reader(file)
Expand Down
5 changes: 4 additions & 1 deletion storage/src/meta/chunk_info_v1.rs
Original file line number Diff line number Diff line change
Expand Up @@ -58,7 +58,10 @@ impl BlobMetaChunkInfo for BlobChunkInfoV1Ondisk {
}

fn set_uncompressed_offset(&mut self, offset: u64) {
assert_eq!(offset & !BLOB_CC_V1_CHUNK_UNCOMP_OFFSET_MASK, 0);
assert_eq!(
((offset & !BLOB_CC_V1_CHUNK_UNCOMP_OFFSET_MASK) >> 32) as u32,
0
);
self.uncomp_info &= u64::to_le(!BLOB_CC_V1_CHUNK_UNCOMP_OFFSET_MASK);
self.uncomp_info |= u64::to_le(offset & BLOB_CC_V1_CHUNK_UNCOMP_OFFSET_MASK);
}
Expand Down
Loading