summaryrefslogtreecommitdiff
path: root/src
diff options
context:
space:
mode:
authorOliver Reiche <oliver.reiche@huawei.com>2024-06-27 17:12:06 +0200
committerOliver Reiche <oliver.reiche@huawei.com>2024-06-28 10:48:05 +0200
commit2e954a436b7c990e12eb98de9a6a9ba995dd9429 (patch)
treea215cf9b4fb8fe5e479ee7aa507f137b670bc660 /src
parent67cab970091d5b23c07890deb29018c7eeb4edbc (diff)
downloadjustbuild-2e954a436b7c990e12eb98de9a6a9ba995dd9429.tar.gz
Use (un)expected for CAS access
Diffstat (limited to 'src')
-rw-r--r--src/buildtool/execution_api/execution_service/cas_utils.cpp19
-rw-r--r--src/buildtool/storage/TARGETS1
-rw-r--r--src/buildtool/storage/compactifier.cpp10
-rw-r--r--src/buildtool/storage/large_object_cas.hpp8
-rw-r--r--src/buildtool/storage/large_object_cas.tpp56
-rw-r--r--src/buildtool/storage/local_cas.hpp17
-rw-r--r--src/buildtool/storage/local_cas.tpp43
7 files changed, 69 insertions, 85 deletions
diff --git a/src/buildtool/execution_api/execution_service/cas_utils.cpp b/src/buildtool/execution_api/execution_service/cas_utils.cpp
index 8ab7b131..7dee7dc7 100644
--- a/src/buildtool/execution_api/execution_service/cas_utils.cpp
+++ b/src/buildtool/execution_api/execution_service/cas_utils.cpp
@@ -93,14 +93,11 @@ auto CASUtils::SplitBlobFastCDC(bazel_re::Digest const& blob_digest,
: storage.CAS().SplitBlob(blob_digest);
// Process result:
- if (auto* result = std::get_if<std::vector<bazel_re::Digest>>(&split)) {
- return std::move(*result);
+ if (split) {
+ return *std::move(split);
}
// Process errors
- if (auto* error = std::get_if<LargeObjectError>(&split)) {
- return ToGrpc(std::move(*error));
- }
- return grpc::Status{grpc::StatusCode::INTERNAL, "an unknown error"};
+ return ToGrpc(std::move(split).error());
}
auto CASUtils::SpliceBlob(bazel_re::Digest const& blob_digest,
@@ -114,12 +111,8 @@ auto CASUtils::SpliceBlob(bazel_re::Digest const& blob_digest,
: storage.CAS().SpliceBlob(blob_digest, chunk_digests, false);
// Process result:
- if (auto* result = std::get_if<bazel_re::Digest>(&splice)) {
- return std::move(*result);
+ if (splice) {
+ return *std::move(splice);
}
- // Process errors
- if (auto* error = std::get_if<LargeObjectError>(&splice)) {
- return ToGrpc(std::move(*error));
- }
- return grpc::Status{grpc::StatusCode::INTERNAL, "an unknown error"};
+ return ToGrpc(std::move(splice).error());
}
diff --git a/src/buildtool/storage/TARGETS b/src/buildtool/storage/TARGETS
index c9551874..ec395ec5 100644
--- a/src/buildtool/storage/TARGETS
+++ b/src/buildtool/storage/TARGETS
@@ -54,6 +54,7 @@
, ["src/buildtool/build_engine/expression", "expression"]
, ["src/utils/cpp", "file_locking"]
, ["src/utils/cpp", "gsl"]
+ , ["src/utils/cpp", "expected"]
, ["@", "gsl", "", "gsl"]
, ["@", "json", "", "json"]
, ["@", "fmt", "", "fmt"]
diff --git a/src/buildtool/storage/compactifier.cpp b/src/buildtool/storage/compactifier.cpp
index c8d65e8e..e9997efb 100644
--- a/src/buildtool/storage/compactifier.cpp
+++ b/src/buildtool/storage/compactifier.cpp
@@ -19,7 +19,6 @@
#include <filesystem>
#include <functional>
#include <optional>
-#include <variant>
#include <vector>
#include "src/buildtool/common/bazel_types.hpp"
@@ -283,21 +282,18 @@ template <ObjectType kType>
// Split the entry:
auto split_result = IsTreeObject(kType) ? task.cas.SplitTree(*digest)
: task.cas.SplitBlob(*digest);
- auto* parts = std::get_if<std::vector<bazel_re::Digest>>(&split_result);
- if (parts == nullptr) {
- auto* error = std::get_if<LargeObjectError>(&split_result);
- auto const error_message = error ? std::move(*error).Message() : "";
+ if (not split_result) {
task.Log(LogLevel::Error,
"Failed to split {}\nDigest: {}\nMessage: {}",
path.string(),
digest->hash(),
- error_message);
+ std::move(split_result).error().Message());
return false;
}
// If the file cannot actually be split (the threshold is too low), the
// file must not be deleted.
- if (parts->size() < 2) {
+ if (split_result->size() < 2) {
task.Log(LogLevel::Debug,
"{} cannot be compactified. The compactification "
"threshold is too low.",
diff --git a/src/buildtool/storage/large_object_cas.hpp b/src/buildtool/storage/large_object_cas.hpp
index cb81d8b5..6f287379 100644
--- a/src/buildtool/storage/large_object_cas.hpp
+++ b/src/buildtool/storage/large_object_cas.hpp
@@ -19,13 +19,13 @@
#include <optional>
#include <string>
#include <utility>
-#include <variant>
#include <vector>
#include "src/buildtool/common/bazel_types.hpp"
#include "src/buildtool/file_system/file_storage.hpp"
#include "src/buildtool/file_system/object_type.hpp"
#include "src/buildtool/storage/config.hpp"
+#include "src/utils/cpp/expected.hpp"
#include "src/utils/cpp/tmp_dir.hpp"
template <bool>
@@ -124,7 +124,7 @@ class LargeObjectCAS final {
/// \return A set of chunks the resulting object is composed of
/// or an error on failure.
[[nodiscard]] auto Split(bazel_re::Digest const& digest) const noexcept
- -> std::variant<LargeObjectError, std::vector<bazel_re::Digest>>;
+ -> expected<std::vector<bazel_re::Digest>, LargeObjectError>;
/// \brief Splice an object based on the reconstruction rules from the
/// storage. This method doesn't check whether the result of splicing is
@@ -133,7 +133,7 @@ class LargeObjectCAS final {
/// \return A temporary directory that contains a single file
/// "result" on success or an error on failure.
[[nodiscard]] auto TrySplice(bazel_re::Digest const& digest) const noexcept
- -> std::variant<LargeObjectError, LargeObject>;
+ -> expected<LargeObject, LargeObjectError>;
/// \brief Splice an object from parts. This method doesn't check whether
/// the result of splicing is already in the CAS.
@@ -143,7 +143,7 @@ class LargeObjectCAS final {
/// "result" on success or an error on failure.
[[nodiscard]] auto Splice(bazel_re::Digest const& digest,
std::vector<bazel_re::Digest> const& parts)
- const noexcept -> std::variant<LargeObjectError, LargeObject>;
+ const noexcept -> expected<LargeObject, LargeObjectError>;
/// \brief Uplink large entry from this generation to latest LocalCAS
/// generation. For the large entry it's parts get promoted first and then
diff --git a/src/buildtool/storage/large_object_cas.tpp b/src/buildtool/storage/large_object_cas.tpp
index db2f46a9..d216c8e8 100644
--- a/src/buildtool/storage/large_object_cas.tpp
+++ b/src/buildtool/storage/large_object_cas.tpp
@@ -119,7 +119,7 @@ auto LargeObjectCAS<kDoGlobalUplink, kType>::WriteEntry(
template <bool kDoGlobalUplink, ObjectType kType>
auto LargeObjectCAS<kDoGlobalUplink, kType>::Split(
bazel_re::Digest const& digest) const noexcept
- -> std::variant<LargeObjectError, std::vector<bazel_re::Digest>> {
+ -> expected<std::vector<bazel_re::Digest>, LargeObjectError> {
if (auto large_entry = ReadEntry(digest)) {
return std::move(*large_entry);
}
@@ -138,17 +138,17 @@ auto LargeObjectCAS<kDoGlobalUplink, kType>::Split(
}
if (not file_path) {
- return LargeObjectError{
- LargeObjectErrorCode::FileNotFound,
- fmt::format("could not find {}", digest.hash())};
+ return unexpected{
+ LargeObjectError{LargeObjectErrorCode::FileNotFound,
+ fmt::format("could not find {}", digest.hash())}};
}
// Split file into chunks:
FileChunker chunker{*file_path};
if (not chunker.IsOpen()) {
- return LargeObjectError{
- LargeObjectErrorCode::Internal,
- fmt::format("could not split {}", digest.hash())};
+ return unexpected{
+ LargeObjectError{LargeObjectErrorCode::Internal,
+ fmt::format("could not split {}", digest.hash())}};
}
std::vector<bazel_re::Digest> parts;
@@ -156,19 +156,19 @@ auto LargeObjectCAS<kDoGlobalUplink, kType>::Split(
while (auto chunk = chunker.NextChunk()) {
auto part = local_cas_.StoreBlob(*chunk, /*is_executable=*/false);
if (not part) {
- return LargeObjectError{LargeObjectErrorCode::Internal,
- "could not store a part."};
+ return unexpected{LargeObjectError{
+ LargeObjectErrorCode::Internal, "could not store a part."}};
}
parts.push_back(std::move(*part));
}
} catch (...) {
- return LargeObjectError{LargeObjectErrorCode::Internal,
- "an unknown error occured."};
+ return unexpected{LargeObjectError{LargeObjectErrorCode::Internal,
+ "an unknown error occured."}};
}
if (not chunker.Finished()) {
- return LargeObjectError{
- LargeObjectErrorCode::Internal,
- fmt::format("could not split {}", digest.hash())};
+ return unexpected{
+ LargeObjectError{LargeObjectErrorCode::Internal,
+ fmt::format("could not split {}", digest.hash())}};
}
std::ignore = WriteEntry(digest, parts);
@@ -178,12 +178,12 @@ auto LargeObjectCAS<kDoGlobalUplink, kType>::Split(
template <bool kDoGlobalUplink, ObjectType kType>
auto LargeObjectCAS<kDoGlobalUplink, kType>::TrySplice(
bazel_re::Digest const& digest) const noexcept
- -> std::variant<LargeObjectError, LargeObject> {
+ -> expected<LargeObject, LargeObjectError> {
auto parts = ReadEntry(digest);
if (not parts) {
- return LargeObjectError{
+ return unexpected{LargeObjectError{
LargeObjectErrorCode::FileNotFound,
- fmt::format("could not find large entry for {}", digest.hash())};
+ fmt::format("could not find large entry for {}", digest.hash())}};
}
return Splice(digest, *parts);
}
@@ -192,14 +192,14 @@ template <bool kDoGlobalUplink, ObjectType kType>
auto LargeObjectCAS<kDoGlobalUplink, kType>::Splice(
bazel_re::Digest const& digest,
std::vector<bazel_re::Digest> const& parts) const noexcept
- -> std::variant<LargeObjectError, LargeObject> {
+ -> expected<LargeObject, LargeObjectError> {
// Create temporary space for splicing:
LargeObject large_object;
if (not large_object.IsValid()) {
- return LargeObjectError{
+ return unexpected{LargeObjectError{
LargeObjectErrorCode::Internal,
fmt::format("could not create a temporary space for {}",
- digest.hash())};
+ digest.hash())}};
}
// Splice the object from parts
@@ -208,32 +208,32 @@ auto LargeObjectCAS<kDoGlobalUplink, kType>::Splice(
for (auto const& part : parts) {
auto part_path = local_cas_.BlobPath(part, /*is_executable=*/false);
if (not part_path) {
- return LargeObjectError{
+ return unexpected{LargeObjectError{
LargeObjectErrorCode::FileNotFound,
- fmt::format("could not find the part {}", part.hash())};
+ fmt::format("could not find the part {}", part.hash())}};
}
auto part_content = FileSystemManager::ReadFile(*part_path);
if (not part_content) {
- return LargeObjectError{
+ return unexpected{LargeObjectError{
LargeObjectErrorCode::Internal,
fmt::format("could not read the part content {}",
- part.hash())};
+ part.hash())}};
}
if (stream.good()) {
stream << *part_content;
}
else {
- return LargeObjectError{
+ return unexpected{LargeObjectError{
LargeObjectErrorCode::Internal,
- fmt::format("could not splice {}", digest.hash())};
+ fmt::format("could not splice {}", digest.hash())}};
}
}
stream.close();
} catch (...) {
- return LargeObjectError{LargeObjectErrorCode::Internal,
- "an unknown error occured"};
+ return unexpected{LargeObjectError{LargeObjectErrorCode::Internal,
+ "an unknown error occured"}};
}
return large_object;
}
diff --git a/src/buildtool/storage/local_cas.hpp b/src/buildtool/storage/local_cas.hpp
index 9309ef5d..b29c870b 100644
--- a/src/buildtool/storage/local_cas.hpp
+++ b/src/buildtool/storage/local_cas.hpp
@@ -18,7 +18,6 @@
#include <filesystem>
#include <optional>
#include <unordered_set>
-#include <variant>
#include <vector>
#include "gsl/gsl"
@@ -26,6 +25,7 @@
#include "src/buildtool/file_system/object_cas.hpp"
#include "src/buildtool/storage/garbage_collector.hpp"
#include "src/buildtool/storage/large_object_cas.hpp"
+#include "src/utils/cpp/expected.hpp"
/// \brief The local (logical) CAS for storing blobs and trees.
/// Blobs can be stored/queried as executable or non-executable. Trees might be
@@ -145,7 +145,7 @@ class LocalCAS {
/// \returns Digests of the parts of the large object or an
/// error code on failure.
[[nodiscard]] auto SplitBlob(bazel_re::Digest const& digest) const noexcept
- -> std::variant<LargeObjectError, std::vector<bazel_re::Digest>> {
+ -> expected<std::vector<bazel_re::Digest>, LargeObjectError> {
return cas_file_large_.Split(digest);
}
@@ -158,7 +158,7 @@ class LocalCAS {
[[nodiscard]] auto SpliceBlob(bazel_re::Digest const& digest,
std::vector<bazel_re::Digest> const& parts,
bool is_executable) const noexcept
- -> std::variant<LargeObjectError, bazel_re::Digest> {
+ -> expected<bazel_re::Digest, LargeObjectError> {
return is_executable ? Splice<ObjectType::Executable>(digest, parts)
: Splice<ObjectType::File>(digest, parts);
}
@@ -176,7 +176,7 @@ class LocalCAS {
/// \returns Digests of the parts of the large object or an
/// error code on failure.
[[nodiscard]] auto SplitTree(bazel_re::Digest const& digest) const noexcept
- -> std::variant<LargeObjectError, std::vector<bazel_re::Digest>> {
+ -> expected<std::vector<bazel_re::Digest>, LargeObjectError> {
return cas_tree_large_.Split(digest);
}
@@ -187,7 +187,7 @@ class LocalCAS {
/// failure.
[[nodiscard]] auto SpliceTree(bazel_re::Digest const& digest,
std::vector<bazel_re::Digest> const& parts)
- const noexcept -> std::variant<LargeObjectError, bazel_re::Digest> {
+ const noexcept -> expected<bazel_re::Digest, LargeObjectError> {
return Splice<ObjectType::Tree>(digest, parts);
}
@@ -315,7 +315,7 @@ class LocalCAS {
template <ObjectType kType>
[[nodiscard]] auto Splice(bazel_re::Digest const& digest,
std::vector<bazel_re::Digest> const& parts)
- const noexcept -> std::variant<LargeObjectError, bazel_re::Digest>;
+ const noexcept -> expected<bazel_re::Digest, LargeObjectError>;
};
#ifndef BOOTSTRAP_BUILD_TOOL
@@ -334,8 +334,9 @@ template <ObjectType kType>
auto LocalCAS<kDoGlobalUplink>::Splice(
bazel_re::Digest const& digest,
std::vector<bazel_re::Digest> const& parts) const noexcept
- -> std::variant<LargeObjectError, bazel_re::Digest> {
- return LargeObjectError{LargeObjectErrorCode::Internal, "not allowed"};
+ -> expected<bazel_re::Digest, LargeObjectError> {
+ return unexpected{
+ LargeObjectError{LargeObjectErrorCode::Internal, "not allowed"}};
}
#endif
diff --git a/src/buildtool/storage/local_cas.tpp b/src/buildtool/storage/local_cas.tpp
index e03d3062..e0cb3883 100644
--- a/src/buildtool/storage/local_cas.tpp
+++ b/src/buildtool/storage/local_cas.tpp
@@ -284,9 +284,8 @@ requires(kIsLocalGeneration) auto LocalCAS<kDoGlobalUplink>::TrySplice(
-> std::optional<LargeObject> {
auto spliced = IsTreeObject(kType) ? cas_tree_large_.TrySplice(digest)
: cas_file_large_.TrySplice(digest);
- auto* large = std::get_if<LargeObject>(&spliced);
- return large and large->IsValid() ? std::optional{std::move(*large)}
- : std::nullopt;
+ return spliced and spliced->IsValid() ? std::optional{std::move(*spliced)}
+ : std::nullopt;
}
template <bool kDoGlobalUplink>
@@ -343,7 +342,7 @@ template <ObjectType kType>
auto LocalCAS<kDoGlobalUplink>::Splice(
bazel_re::Digest const& digest,
std::vector<bazel_re::Digest> const& parts) const noexcept
- -> std::variant<LargeObjectError, bazel_re::Digest> {
+ -> expected<bazel_re::Digest, LargeObjectError> {
static constexpr bool kIsTree = IsTreeObject(kType);
static constexpr bool kIsExec = IsExecutableObject(kType);
@@ -353,39 +352,32 @@ auto LocalCAS<kDoGlobalUplink>::Splice(
}
// Splice the result from parts:
- std::optional<LargeObject> large_object;
auto splice_result = kIsTree ? cas_tree_large_.Splice(digest, parts)
: cas_file_large_.Splice(digest, parts);
- if (auto* result = std::get_if<LargeObject>(&splice_result)) {
- large_object = *result;
- }
- else if (auto* error = std::get_if<LargeObjectError>(&splice_result)) {
- return std::move(*error);
- }
- else {
- return LargeObjectError{
- LargeObjectErrorCode::Internal,
- fmt::format("could not splice {}", digest.hash())};
+ if (not splice_result) {
+ return unexpected{std::move(splice_result).error()};
}
+ auto const& large_object = *splice_result;
+
// Check digest consistency:
// Using Store{Tree, Blob} to calculate the resulting hash and later
// decide whether the result is valid is unreasonable, because these
// methods can refer to a file that existed before. The direct hash
// calculation is done instead.
- auto const file_path = large_object->GetPath();
+ auto const& file_path = large_object.GetPath();
auto spliced_digest = ObjectCAS<kType>::CreateDigest(file_path);
if (not spliced_digest) {
- return LargeObjectError{LargeObjectErrorCode::Internal,
- "could not calculate digest"};
+ return unexpected{LargeObjectError{LargeObjectErrorCode::Internal,
+ "could not calculate digest"}};
}
if (not detail::CheckDigestConsistency(*spliced_digest, digest)) {
- return LargeObjectError{
+ return unexpected{LargeObjectError{
LargeObjectErrorCode::InvalidResult,
fmt::format("actual result {} differs from the expected one {}",
spliced_digest->hash(),
- digest.hash())};
+ digest.hash())}};
}
// Check tree invariants:
@@ -394,12 +386,12 @@ auto LocalCAS<kDoGlobalUplink>::Splice(
// Read tree entries:
auto const tree_data = FileSystemManager::ReadFile(file_path);
if (not tree_data) {
- return LargeObjectError{
+ return unexpected{LargeObjectError{
LargeObjectErrorCode::Internal,
- fmt::format("could not read tree {}", digest.hash())};
+ fmt::format("could not read tree {}", digest.hash())}};
}
if (auto error = CheckTreeInvariant(digest, *tree_data)) {
- return std::move(*error);
+ return unexpected{std::move(*error)};
}
}
}
@@ -410,8 +402,9 @@ auto LocalCAS<kDoGlobalUplink>::Splice(
if (stored_digest) {
return std::move(*stored_digest);
}
- return LargeObjectError{LargeObjectErrorCode::Internal,
- fmt::format("could not splice {}", digest.hash())};
+ return unexpected{
+ LargeObjectError{LargeObjectErrorCode::Internal,
+ fmt::format("could not splice {}", digest.hash())}};
}
#endif // INCLUDED_SRC_BUILDTOOL_STORAGE_LOCAL_CAS_TPP