diff options
Diffstat (limited to 'src/buildtool/execution_api')
11 files changed, 57 insertions, 51 deletions
diff --git a/src/buildtool/execution_api/bazel_msg/bazel_msg_factory.cpp b/src/buildtool/execution_api/bazel_msg/bazel_msg_factory.cpp index 34fbccb4..6eb35e4c 100644 --- a/src/buildtool/execution_api/bazel_msg/bazel_msg_factory.cpp +++ b/src/buildtool/execution_api/bazel_msg/bazel_msg_factory.cpp @@ -347,7 +347,7 @@ auto BazelMsgFactory::CreateDirectoryDigestFromTree( return std::nullopt; } - auto digest = bundle->blob.digest; + auto digest = bundle->blob.GetDigest(); try { if (not process_blob(std::move(bundle->blob))) { return std::nullopt; @@ -921,12 +921,12 @@ auto BazelMsgFactory::CreateActionDigestFromCommandLine( return std::nullopt; } - auto action = CreateActionBundle(cmd->digest, request); + auto action = CreateActionBundle(cmd->GetDigest(), request); if (not action) { return std::nullopt; } - auto result = action->digest; + auto result = action->GetDigest(); if (request.store_blob) { std::invoke(*request.store_blob, *std::move(cmd)); std::invoke(*request.store_blob, *std::move(action)); diff --git a/src/buildtool/execution_api/common/blob_tree.cpp b/src/buildtool/execution_api/common/blob_tree.cpp index a0d362b4..7224406a 100644 --- a/src/buildtool/execution_api/common/blob_tree.cpp +++ b/src/buildtool/execution_api/common/blob_tree.cpp @@ -43,7 +43,8 @@ auto BlobTree::FromDirectoryTree(DirectoryTreePtr const& tree, if (not blob_tree) { return std::nullopt; } - auto raw_id = FromHexString((*blob_tree)->Blob().digest.hash()); + auto raw_id = + FromHexString((*blob_tree)->blob_.GetDigest().hash()); if (not raw_id) { return std::nullopt; } diff --git a/src/buildtool/execution_api/common/blob_tree.hpp b/src/buildtool/execution_api/common/blob_tree.hpp index 47b3c3ae..2d5a8a25 100644 --- a/src/buildtool/execution_api/common/blob_tree.hpp +++ b/src/buildtool/execution_api/common/blob_tree.hpp @@ -38,7 +38,7 @@ class BlobTree { [[nodiscard]] auto Blob() const noexcept -> ArtifactBlob { return blob_; } [[nodiscard]] auto IsTree() const noexcept -> bool { - return blob_.digest.IsTree(); + return blob_.GetDigest().IsTree(); } /// \brief Create a `BlobTree` from a `DirectoryTree`. diff --git a/src/buildtool/execution_api/common/common_api.cpp b/src/buildtool/execution_api/common/common_api.cpp index 1c696731..68b49be3 100644 --- a/src/buildtool/execution_api/common/common_api.cpp +++ b/src/buildtool/execution_api/common/common_api.cpp @@ -106,7 +106,7 @@ auto CommonUploadBlobTree(BlobTreePtr const& blob_tree, { auto back_map = BackMap<ArtifactDigest, BlobTreePtr>::Make( &*blob_tree, - [](BlobTreePtr const& node) { return node->Blob().digest; }); + [](BlobTreePtr const& node) { return node->Blob().GetDigest(); }); if (back_map == nullptr) { Logger::Log(LogLevel::Error, "Failed to retrieve the missing tree blobs for upload"); @@ -188,7 +188,7 @@ auto CommonUploadTreeNative(IExecutionApi const& api, auto tree_blob = (*blob_tree)->Blob(); // Upload blob tree if tree is not available at the remote side (content // first). - if (not api.IsAvailable(tree_blob.digest)) { + if (not api.IsAvailable(tree_blob.GetDigest())) { if (not CommonUploadBlobTree(*blob_tree, api)) { Logger::Log(LogLevel::Debug, "failed to upload blob tree for build root."); @@ -201,7 +201,7 @@ auto CommonUploadTreeNative(IExecutionApi const& api, return std::nullopt; } } - return tree_blob.digest; + return tree_blob.GetDigest(); } auto UpdateContainerAndUpload( @@ -214,7 +214,7 @@ auto UpdateContainerAndUpload( // that we never store unnecessarily more data in the container than we need // per remote transfer. try { - if (blob.data->size() > MessageLimits::kMaxGrpcLength) { + if (blob.GetContentSize() > MessageLimits::kMaxGrpcLength) { // large blobs use individual stream upload if (not uploader( std::unordered_set<ArtifactBlob>{{std::move(blob)}})) { @@ -225,10 +225,10 @@ auto UpdateContainerAndUpload( if (not container->contains(blob)) { std::size_t content_size = 0; for (auto const& blob : *container) { - content_size += blob.data->size(); + content_size += blob.GetContentSize(); } - if (content_size + blob.data->size() > + if (content_size + blob.GetContentSize() > MessageLimits::kMaxGrpcLength) { // swap away from original container to allow move during // upload diff --git a/src/buildtool/execution_api/local/local_api.cpp b/src/buildtool/execution_api/local/local_api.cpp index 5a332ab8..c9352d25 100644 --- a/src/buildtool/execution_api/local/local_api.cpp +++ b/src/buildtool/execution_api/local/local_api.cpp @@ -237,9 +237,10 @@ auto LocalApi::Upload(std::unordered_set<ArtifactBlob>&& blobs, blobs.end(), [&cas = local_context_.storage->CAS()](ArtifactBlob const& blob) { auto const cas_digest = - blob.digest.IsTree() ? cas.StoreTree(*blob.data) - : cas.StoreBlob(*blob.data, blob.is_exec); - return cas_digest and *cas_digest == blob.digest; + blob.GetDigest().IsTree() + ? cas.StoreTree(*blob.ReadContent()) + : cas.StoreBlob(*blob.ReadContent(), blob.IsExecutable()); + return cas_digest and *cas_digest == blob.GetDigest(); }); } diff --git a/src/buildtool/execution_api/remote/bazel/bazel_api.cpp b/src/buildtool/execution_api/remote/bazel/bazel_api.cpp index 39000194..ea6b71b9 100644 --- a/src/buildtool/execution_api/remote/bazel/bazel_api.cpp +++ b/src/buildtool/execution_api/remote/bazel/bazel_api.cpp @@ -73,9 +73,9 @@ namespace { return false; } for (auto& blob : blobs) { - auto const info = back_map->GetReference(blob.digest); - blob.is_exec = - info.has_value() and IsExecutableObject(info.value()->type); + auto const info = back_map->GetReference(blob.GetDigest()); + blob.SetExecutable(info.has_value() and + IsExecutableObject(info.value()->type)); // Collect blob and upload to other CAS if transfer size reached. if (not UpdateContainerAndUpload( &container, @@ -239,7 +239,7 @@ auto BazelApi::CreateAction( auto const& type = artifacts_info[gpos].type; if (not FileSystemManager::WriteFileAs</*kSetEpochTime=*/true, /*kSetWritable=*/true>( - *blobs[pos].data, output_paths[gpos], type)) { + *blobs[pos].ReadContent(), output_paths[gpos], type)) { Logger::Log(LogLevel::Warning, "staging to output path {} failed.", output_paths[gpos].string()); @@ -486,7 +486,7 @@ auto BazelApi::CreateAction( -> std::optional<std::string> { auto reader = network_->CreateReader(); if (auto blob = reader.ReadSingleBlob(artifact_info.digest)) { - return *blob->data; + return *blob->ReadContent(); } return std::nullopt; } @@ -520,7 +520,7 @@ auto BazelApi::CreateAction( targets->reserve(digests.size()); for (auto blobs : reader.ReadIncrementally(&digests)) { for (auto const& blob : blobs) { - targets->emplace_back(*blob.data); + targets->emplace_back(*blob.ReadContent()); } } }); diff --git a/src/buildtool/execution_api/remote/bazel/bazel_cas_client.cpp b/src/buildtool/execution_api/remote/bazel/bazel_cas_client.cpp index ed9a00a5..20b4b750 100644 --- a/src/buildtool/execution_api/remote/bazel/bazel_cas_client.cpp +++ b/src/buildtool/execution_api/remote/bazel/bazel_cas_client.cpp @@ -46,7 +46,7 @@ namespace { [[nodiscard]] auto GetContentSize(ArtifactBlob const& blob) noexcept -> std::size_t { - return blob.data->size(); + return blob.GetContentSize(); } template <typename TRequest, @@ -382,15 +382,15 @@ auto BazelCasClient::UpdateSingleBlob(std::string const& instance_name, logger_.Emit(LogLevel::Trace, [&blob]() { std::ostringstream oss{}; oss << "upload single blob" << std::endl; - oss << fmt::format(" - {}", blob.digest.hash()) << std::endl; + oss << fmt::format(" - {}", blob.GetDigest().hash()) << std::endl; return oss.str(); }); if (not stream_->Write(instance_name, blob)) { logger_.Emit(LogLevel::Error, "Failed to write {}:{}", - blob.digest.hash(), - blob.digest.size()); + blob.GetDigest().hash(), + blob.GetDigest().size()); return false; } return true; @@ -599,8 +599,9 @@ auto BazelCasClient::BatchUpdateBlobs(std::string const& instance_name, request.set_instance_name(instance_name); auto& r = *request.add_requests(); - (*r.mutable_digest()) = ArtifactDigestFactory::ToBazel(blob.digest); - r.set_data(*blob.data); + (*r.mutable_digest()) = + ArtifactDigestFactory::ToBazel(blob.GetDigest()); + r.set_data(*blob.ReadContent()); return request; }; @@ -623,7 +624,7 @@ auto BazelCasClient::BatchUpdateBlobs(std::string const& instance_name, logger_.Emit( LogLevel::Warning, "BatchUpdateBlobs: Failed to prepare request for {}", - it->digest.hash()); + it->GetDigest().hash()); ++it; continue; } @@ -681,7 +682,7 @@ auto BazelCasClient::BatchUpdateBlobs(std::string const& instance_name, std::ostringstream oss{}; oss << "upload blobs" << std::endl; for (auto const& blob : blobs) { - oss << fmt::format(" - {}", blob.digest.hash()) << std::endl; + oss << fmt::format(" - {}", blob.GetDigest().hash()) << std::endl; } oss << "received blobs" << std::endl; for (auto const& digest : updated) { @@ -701,7 +702,8 @@ auto BazelCasClient::BatchUpdateBlobs(std::string const& instance_name, std::unordered_set<ArtifactBlob> missing_blobs; missing_blobs.reserve(missing); for (auto const& blob : blobs) { - auto bazel_digest = ArtifactDigestFactory::ToBazel(blob.digest); + auto bazel_digest = + ArtifactDigestFactory::ToBazel(blob.GetDigest()); if (not updated.contains(bazel_digest)) { missing_blobs.emplace(blob); } diff --git a/src/buildtool/execution_api/remote/bazel/bazel_network.cpp b/src/buildtool/execution_api/remote/bazel/bazel_network.cpp index a11ff408..1a8e6d8f 100644 --- a/src/buildtool/execution_api/remote/bazel/bazel_network.cpp +++ b/src/buildtool/execution_api/remote/bazel/bazel_network.cpp @@ -90,7 +90,7 @@ auto BazelNetwork::DoUploadBlobs( // First upload all blobs that must use bytestream api because of their // size: for (auto it = blobs.begin(); it != blobs.end();) { - if (it->data->size() <= MessageLimits::kMaxGrpcLength) { + if (it->GetContentSize() <= MessageLimits::kMaxGrpcLength) { ++it; continue; } @@ -114,7 +114,7 @@ auto BazelNetwork::UploadBlobs(std::unordered_set<ArtifactBlob>&& blobs, bool skip_find_missing) noexcept -> bool { if (not skip_find_missing) { auto const back_map = BackMap<ArtifactDigest, ArtifactBlob>::Make( - &blobs, [](ArtifactBlob const& blob) { return blob.digest; }); + &blobs, [](ArtifactBlob const& blob) { return blob.GetDigest(); }); if (back_map == nullptr) { return false; } diff --git a/src/buildtool/execution_api/remote/bazel/bazel_network_reader.cpp b/src/buildtool/execution_api/remote/bazel/bazel_network_reader.cpp index ed637535..7fe90b72 100644 --- a/src/buildtool/execution_api/remote/bazel/bazel_network_reader.cpp +++ b/src/buildtool/execution_api/remote/bazel/bazel_network_reader.cpp @@ -67,7 +67,7 @@ auto BazelNetworkReader::ReadDirectory(ArtifactDigest const& digest) if (auto blob = ReadSingleBlob(digest)) { return BazelMsgFactory::MessageFromString<bazel_re::Directory>( - *blob->data); + *blob->ReadContent()); } Logger::Log( LogLevel::Debug, "Directory {} not found in CAS", digest.hash()); @@ -94,7 +94,7 @@ auto BazelNetworkReader::ReadGitTree(ArtifactDigest const& digest) } bool valid = std::all_of( blobs.begin(), blobs.end(), [](ArtifactBlob const& blob) { - return PathIsNonUpwards(*blob.data); + return PathIsNonUpwards(*blob.ReadContent()); }); if (not valid) { return false; @@ -104,7 +104,7 @@ auto BazelNetworkReader::ReadGitTree(ArtifactDigest const& digest) return true; }; - std::string const& content = *read_blob->data; + std::string const content = *read_blob->ReadContent(); return GitRepo::ReadTreeData(content, hash_function_.HashTreeData(content).Bytes(), check_symlinks, @@ -122,7 +122,7 @@ auto BazelNetworkReader::DumpRawTree(Artifact::ObjectInfo const& info, } try { - return std::invoke(dumper, *read_blob->data); + return std::invoke(dumper, *read_blob->ReadContent()); } catch (...) { return false; } @@ -193,7 +193,8 @@ auto BazelNetworkReader::BatchReadBlobs( // Map digests to blobs for further lookup: auto const back_map = BackMap<ArtifactDigest, ArtifactBlob>::Make( - &batched_blobs, [](ArtifactBlob const& blob) { return blob.digest; }); + &batched_blobs, + [](ArtifactBlob const& blob) { return blob.GetDigest(); }); if (back_map == nullptr) { return {}; @@ -235,12 +236,12 @@ auto BazelNetworkReader::BatchReadBlobs( auto BazelNetworkReader::Validate(ArtifactBlob const& blob) const noexcept -> bool { - auto rehashed = blob.digest.IsTree() + auto rehashed = blob.GetDigest().IsTree() ? ArtifactDigestFactory::HashDataAs<ObjectType::Tree>( - hash_function_, *blob.data) + hash_function_, *blob.ReadContent()) : ArtifactDigestFactory::HashDataAs<ObjectType::File>( - hash_function_, *blob.data); - return rehashed == blob.digest; + hash_function_, *blob.ReadContent()); + return rehashed == blob.GetDigest(); } auto BazelNetworkReader::GetMaxBatchTransferSize() const noexcept diff --git a/src/buildtool/execution_api/remote/bazel/bazel_response.cpp b/src/buildtool/execution_api/remote/bazel/bazel_response.cpp index ee207ac7..a0a621a4 100644 --- a/src/buildtool/execution_api/remote/bazel/bazel_response.cpp +++ b/src/buildtool/execution_api/remote/bazel/bazel_response.cpp @@ -67,7 +67,7 @@ auto BazelResponse::ReadStringBlob(bazel_re::Digest const& id) noexcept if (digest.has_value()) { auto reader = network_->CreateReader(); if (auto blob = reader.ReadSingleBlob(*digest)) { - return *blob->data; + return *blob->ReadContent(); } } Logger::Log(LogLevel::Warning, @@ -240,11 +240,11 @@ auto BazelResponse::Populate() noexcept -> std::optional<std::string> { for (auto const& tree_blob : tree_blobs) { try { auto tree = BazelMsgFactory::MessageFromString<bazel_re::Tree>( - *tree_blob.data); + *tree_blob.ReadContent()); if (not tree) { return fmt::format( "BazelResponse: failed to create Tree for {}", - tree_blob.digest.hash()); + tree_blob.GetDigest().hash()); } // The server does not store the Directory messages it just @@ -266,7 +266,7 @@ auto BazelResponse::Populate() noexcept -> std::optional<std::string> { return fmt::format( "BazelResponse: unexpected failure gathering digest for " "{}:\n{}", - tree_blob.digest.hash(), + tree_blob.GetDigest().hash(), ex.what()); } ++pos; @@ -291,7 +291,7 @@ auto BazelResponse::UploadTreeMessageDirectories( if (not rootdir_blob) { return unexpected{std::move(rootdir_blob).error()}; } - auto const root_digest = rootdir_blob->digest; + auto const root_digest = rootdir_blob->GetDigest(); // store or upload rootdir blob, taking maximum transfer size into account if (not UpdateContainerAndUpload(&dir_blobs, *std::move(rootdir_blob), @@ -307,7 +307,7 @@ auto BazelResponse::UploadTreeMessageDirectories( if (not blob) { return unexpected{std::move(blob).error()}; } - auto const blob_digest = blob->digest; + auto const blob_digest = blob->GetDigest(); if (not UpdateContainerAndUpload(&dir_blobs, *std::move(blob), /*exception_is_fatal=*/false, diff --git a/src/buildtool/execution_api/remote/bazel/bytestream_client.hpp b/src/buildtool/execution_api/remote/bazel/bytestream_client.hpp index 432e31ab..99ad4e81 100644 --- a/src/buildtool/execution_api/remote/bazel/bytestream_client.hpp +++ b/src/buildtool/execution_api/remote/bazel/bytestream_client.hpp @@ -137,14 +137,15 @@ class ByteStreamClient { auto writer = stub_->Write(&ctx, &response); auto const resource_name = ByteStreamUtils::WriteRequest::ToString( - instance_name, uuid, blob.digest); + instance_name, uuid, blob.GetDigest()); google::bytestream::WriteRequest request{}; request.set_resource_name(resource_name); request.mutable_data()->reserve(ByteStreamUtils::kChunkSize); + auto const data_to_read = blob.ReadContent(); auto const to_read = ::IncrementalReader::FromMemory( - ByteStreamUtils::kChunkSize, &*blob.data); + ByteStreamUtils::kChunkSize, data_to_read.get()); if (not to_read.has_value()) { logger_.Emit( LogLevel::Error, @@ -169,7 +170,7 @@ class ByteStreamClient { request.set_write_offset(static_cast<int>(pos)); request.set_finish_write(pos + chunk->size() >= - blob.data->size()); + blob.GetContentSize()); if (writer->Write(request)) { pos += chunk->size(); ++it; @@ -206,12 +207,12 @@ class ByteStreamClient { return false; } if (gsl::narrow<std::size_t>(response.committed_size()) != - blob.data->size()) { + blob.GetContentSize()) { logger_.Emit( LogLevel::Warning, "Commited size {} is different from the original one {}.", response.committed_size(), - blob.data->size()); + blob.GetContentSize()); return false; } return true; |