diff --git a/mdio/dataset.h b/mdio/dataset.h index b5154f7..9b0c581 100644 --- a/mdio/dataset.h +++ b/mdio/dataset.h @@ -799,7 +799,11 @@ class Dataset { } } )"; - nlohmann::json base = nlohmann::json::parse(baseStr); + nlohmann::json base = nlohmann::json::parse(baseStr, nullptr, false); + if (base.is_discarded()) { + return absl::Status(absl::StatusCode::kInternal, + "Failed to parse base JSON."); + } if (found >= 0) { base["field"] = specJson["metadata"]["dtype"][found][0]; } else { diff --git a/mdio/dataset_test.cc b/mdio/dataset_test.cc index e25d813..9bf3374 100644 --- a/mdio/dataset_test.cc +++ b/mdio/dataset_test.cc @@ -529,4 +529,28 @@ TEST(Dataset, commitSlicedMetadata) { EXPECT_TRUE(commitRes.status().ok()) << commitRes.status(); } + +TEST(Dataset, openNonExistent) { + auto json_vars = GetToyExample(); + + auto datasetRes = + mdio::Dataset::from_json(json_vars, "zarrs/DNE", mdio::constants::kOpen); + ASSERT_FALSE(datasetRes.status().ok()) + << "Opened a non-existent dataset without error!"; +} + +TEST(Dataset, kCreateOverExisting) { + auto json_vars = GetToyExample(); + + auto datasetRes = mdio::Dataset::from_json(json_vars, "zarrs/acceptance", + mdio::constants::kCreateClean); + ASSERT_TRUE(datasetRes.status().ok()) << datasetRes.status(); + + datasetRes = mdio::Dataset::from_json(json_vars, "zarrs/acceptance", + mdio::constants::kCreate); + ASSERT_FALSE(datasetRes.status().ok()) + << "Created a dataset over an existing " + "one without error!"; +} + } // namespace diff --git a/mdio/dataset_validator.h b/mdio/dataset_validator.h index 10f566a..99f0c14 100644 --- a/mdio/dataset_validator.h +++ b/mdio/dataset_validator.h @@ -43,7 +43,11 @@ bool contains(const std::unordered_set& set, * InvalidArgumentError if validation fails for any reason */ absl::Status validate_schema(nlohmann::json& spec /*NOLINT*/) { - nlohmann::json targetSchema = nlohmann::json::parse(kDatasetSchema); + nlohmann::json targetSchema = + nlohmann::json::parse(kDatasetSchema, nullptr, false); + if (targetSchema.is_discarded()) { + return absl::NotFoundError("Failed to load schema"); + } nlohmann::json_schema::json_validator validator( nullptr, nlohmann::json_schema::default_string_format_check); diff --git a/mdio/variable.h b/mdio/variable.h index 9b8aca8..84889a2 100644 --- a/mdio/variable.h +++ b/mdio/variable.h @@ -455,8 +455,8 @@ Future> OpenVariable(const nlohmann::json& json_store, // go read the attributes return json ... auto parse = [](const tensorstore::kvstore::ReadResult& kvs_read, const ::nlohmann::json& spec) { - // FIXME - if attributes supplied then validate with values - auto attributes = nlohmann::json::parse(std::string(kvs_read.value)); + auto attributes = + nlohmann::json::parse(std::string(kvs_read.value), nullptr, false); return attributes; };