diff options
13 files changed, 42 insertions, 35 deletions
diff --git a/src/buildtool/execution_api/common/TARGETS b/src/buildtool/execution_api/common/TARGETS index 956fb3a3..50e556a9 100644 --- a/src/buildtool/execution_api/common/TARGETS +++ b/src/buildtool/execution_api/common/TARGETS @@ -96,6 +96,6 @@ , "name": ["content_blob_container"] , "hdrs": ["content_blob_container.hpp"] , "stage": ["src", "buildtool", "execution_api", "common"] - , "deps": [["src/utils/cpp", "transformed_range"]] + , "deps": [["src/utils/cpp", "transformed_range"], ["@", "gsl", "", "gsl"]] } } diff --git a/src/buildtool/execution_api/common/content_blob_container.hpp b/src/buildtool/execution_api/common/content_blob_container.hpp index fcd10f09..f2d7476e 100644 --- a/src/buildtool/execution_api/common/content_blob_container.hpp +++ b/src/buildtool/execution_api/common/content_blob_container.hpp @@ -16,22 +16,29 @@ #define INCLUDED_SRC_BUILDTOOL_EXECUTION_API_COMMON_CONTENT_BLOB_CONTAINER_HPP #include <cstddef> +#include <memory> #include <string> #include <unordered_map> #include <utility> //std::move #include <vector> +#include "gsl/gsl" #include "src/utils/cpp/transformed_range.hpp" template <typename TDigest> struct ContentBlob final { ContentBlob(TDigest mydigest, std::string mydata, bool is_exec) noexcept : digest{std::move(mydigest)}, - data{std::move(mydata)}, + data{std::make_shared<std::string>(std::move(mydata))}, is_exec{is_exec} {} + ContentBlob(TDigest mydigest, + gsl::not_null<std::shared_ptr<std::string>> const& mydata, + bool is_exec) noexcept + : digest{std::move(mydigest)}, data(mydata), is_exec{is_exec} {} + TDigest digest{}; - std::string data{}; + std::shared_ptr<std::string> data{}; bool is_exec{}; }; diff --git a/src/buildtool/execution_api/local/local_api.hpp b/src/buildtool/execution_api/local/local_api.hpp index 153f9971..d8f0481d 100644 --- a/src/buildtool/execution_api/local/local_api.hpp +++ b/src/buildtool/execution_api/local/local_api.hpp @@ -272,8 +272,8 @@ class LocalApi final : public IExecutionApi { auto const is_tree = NativeSupport::IsTree( static_cast<bazel_re::Digest>(blob.digest).hash()); auto cas_digest = - is_tree ? storage_->CAS().StoreTree(blob.data) - : storage_->CAS().StoreBlob(blob.data, blob.is_exec); + is_tree ? storage_->CAS().StoreTree(*blob.data) + : storage_->CAS().StoreBlob(*blob.data, blob.is_exec); if (not cas_digest or not std::equal_to<bazel_re::Digest>{}( *cas_digest, blob.digest)) { return false; diff --git a/src/buildtool/execution_api/remote/bazel/bazel_api.cpp b/src/buildtool/execution_api/remote/bazel/bazel_api.cpp index e56228eb..00f7308c 100644 --- a/src/buildtool/execution_api/remote/bazel/bazel_api.cpp +++ b/src/buildtool/execution_api/remote/bazel/bazel_api.cpp @@ -275,7 +275,7 @@ auto BazelApi::CreateAction( auto const& type = artifacts_info[gpos].type; if (not FileSystemManager::WriteFileAs</*kSetEpochTime=*/true, /*kSetWritable=*/true>( - blobs[pos].data, output_paths[gpos], type)) { + *blobs[pos].data, output_paths[gpos], type)) { Logger::Log(LogLevel::Warning, "staging to output path {} failed.", output_paths[gpos].string()); @@ -432,7 +432,7 @@ auto BazelApi::CreateAction( -> std::optional<std::string> { auto blobs = network_->ReadBlobs({artifact_info.digest}).Next(); if (blobs.size() == 1) { - return blobs.at(0).data; + return *blobs.at(0).data; } return std::nullopt; } @@ -465,7 +465,7 @@ auto BazelApi::CreateAction( targets->reserve(digests.size()); while (not blobs.empty()) { for (auto const& blob : blobs) { - targets->emplace_back(blob.data); + targets->emplace_back(*blob.data); } blobs = reader.Next(); } diff --git a/src/buildtool/execution_api/remote/bazel/bazel_cas_client.cpp b/src/buildtool/execution_api/remote/bazel/bazel_cas_client.cpp index 5feabb57..e5976ead 100644 --- a/src/buildtool/execution_api/remote/bazel/bazel_cas_client.cpp +++ b/src/buildtool/execution_api/remote/bazel/bazel_cas_client.cpp @@ -320,7 +320,7 @@ auto BazelCasClient::UpdateSingleBlob(std::string const& instance_name, uuid, blob.digest.hash(), blob.digest.size_bytes()), - blob.data); + *blob.data); if (!ok) { logger_.Emit(LogLevel::Error, "Failed to write {}:{}", @@ -654,7 +654,7 @@ auto BazelCasClient::CreateUpdateBlobsSingleRequest(BazelBlob const& b) noexcept bazel_re::BatchUpdateBlobsRequest_Request r{}; r.set_allocated_digest( gsl::owner<bazel_re::Digest*>{new bazel_re::Digest{b.digest}}); - r.set_data(b.data); + r.set_data(*b.data); return r; } diff --git a/src/buildtool/execution_api/remote/bazel/bazel_network.cpp b/src/buildtool/execution_api/remote/bazel/bazel_network.cpp index a537dcdc..85bb53d9 100644 --- a/src/buildtool/execution_api/remote/bazel/bazel_network.cpp +++ b/src/buildtool/execution_api/remote/bazel/bazel_network.cpp @@ -84,7 +84,7 @@ auto BazelNetwork::DoUploadBlobs(T_Iter const& first, auto it = std::stable_partition( sorted.begin(), sorted.end(), [](BazelBlob const* x) { - return x->data.size() <= kMaxBatchTransferSize; + return x->data->size() <= kMaxBatchTransferSize; }); auto digests_count = cas_->BatchUpdateBlobs(instance_name_, sorted.begin(), it); diff --git a/src/buildtool/execution_api/remote/bazel/bazel_network_reader.cpp b/src/buildtool/execution_api/remote/bazel/bazel_network_reader.cpp index 90790ef4..9c5b4b4b 100644 --- a/src/buildtool/execution_api/remote/bazel/bazel_network_reader.cpp +++ b/src/buildtool/execution_api/remote/bazel/bazel_network_reader.cpp @@ -48,7 +48,7 @@ auto BazelNetworkReader::ReadDirectory(ArtifactDigest const& digest) auto blobs = network_.ReadBlobs({digest}).Next(); if (blobs.size() == 1) { return BazelMsgFactory::MessageFromString<bazel_re::Directory>( - blobs.at(0).data); + *blobs.at(0).data); } Logger::Log( LogLevel::Debug, "Directory {} not found in CAS", digest.hash()); @@ -59,7 +59,7 @@ auto BazelNetworkReader::ReadGitTree(ArtifactDigest const& digest) const noexcept -> std::optional<GitRepo::tree_entries_t> { auto blobs = network_.ReadBlobs({digest}).Next(); if (blobs.size() == 1) { - auto const& content = blobs.at(0).data; + auto const& content = *blobs.at(0).data; auto check_symlinks = [this](std::vector<bazel_re::Digest> const& ids) { auto size = ids.size(); auto reader = network_.ReadBlobs(ids); @@ -72,7 +72,7 @@ auto BazelNetworkReader::ReadGitTree(ArtifactDigest const& digest) return false; } for (auto const& blob : blobs) { - if (not PathIsNonUpwards(blob.data)) { + if (not PathIsNonUpwards(*blob.data)) { return false; } } @@ -102,7 +102,7 @@ auto BazelNetworkReader::DumpRawTree(Artifact::ObjectInfo const& info, } try { - return std::invoke(dumper, blobs.at(0).data); + return std::invoke(dumper, *blobs.at(0).data); } catch (...) { return false; } diff --git a/src/buildtool/execution_api/remote/bazel/bazel_response.cpp b/src/buildtool/execution_api/remote/bazel/bazel_response.cpp index 9db3bb94..39196bf9 100644 --- a/src/buildtool/execution_api/remote/bazel/bazel_response.cpp +++ b/src/buildtool/execution_api/remote/bazel/bazel_response.cpp @@ -44,7 +44,7 @@ auto BazelResponse::ReadStringBlob(bazel_re::Digest const& id) noexcept id.hash()); return std::string{}; } - return blobs[0].data; + return *blobs[0].data; } auto BazelResponse::Artifacts() noexcept -> ArtifactInfos { @@ -181,7 +181,7 @@ auto BazelResponse::Populate() noexcept -> bool { for (auto const& tree_blob : tree_blobs) { try { auto tree = BazelMsgFactory::MessageFromString<bazel_re::Tree>( - tree_blob.data); + *tree_blob.data); if (not tree) { return false; } diff --git a/test/buildtool/execution_api/bazel/bazel_cas_client.test.cpp b/test/buildtool/execution_api/bazel/bazel_cas_client.test.cpp index d9fec4dc..8daceb31 100644 --- a/test/buildtool/execution_api/bazel/bazel_cas_client.test.cpp +++ b/test/buildtool/execution_api/bazel/bazel_cas_client.test.cpp @@ -58,7 +58,7 @@ TEST_CASE("Bazel internals: CAS Client", "[execution_api]") { instance_name, to_read.begin(), to_read.end()); REQUIRE(blobs.size() == 1); CHECK(std::equal_to<bazel_re::Digest>{}(blobs[0].digest, digest)); - CHECK(blobs[0].data == content); + CHECK(*blobs[0].data == content); } SECTION("Invalid digest and blob") { diff --git a/test/buildtool/execution_api/bazel/bazel_msg_factory.test.cpp b/test/buildtool/execution_api/bazel/bazel_msg_factory.test.cpp index 730420e9..0f9039d9 100644 --- a/test/buildtool/execution_api/bazel/bazel_msg_factory.test.cpp +++ b/test/buildtool/execution_api/bazel/bazel_msg_factory.test.cpp @@ -45,7 +45,7 @@ TEST_CASE("Bazel internals: MessageFactory", "[execution_api]") { CHECK(link_blob); // both files are the same and should result in identical blobs - CHECK(file1_blob->data == file2_blob->data); + CHECK(*file1_blob->data == *file2_blob->data); CHECK(file1_blob->digest.hash() == file2_blob->digest.hash()); CHECK(file1_blob->digest.size_bytes() == file2_blob->digest.size_bytes()); diff --git a/test/buildtool/execution_api/bazel/bazel_network.test.cpp b/test/buildtool/execution_api/bazel/bazel_network.test.cpp index 9125fbc4..dff26a44 100644 --- a/test/buildtool/execution_api/bazel/bazel_network.test.cpp +++ b/test/buildtool/execution_api/bazel/bazel_network.test.cpp @@ -63,11 +63,11 @@ TEST_CASE("Bazel network: write/read blobs", "[execution_api]") { // Check order maintained REQUIRE(blobs.size() == 5); - CHECK(blobs[0].data == content_foo); - CHECK(blobs[1].data == content_bar); - CHECK(blobs[2].data == content_baz); - CHECK(blobs[3].data == content_bar); - CHECK(blobs[4].data == content_foo); + CHECK(*blobs[0].data == content_foo); + CHECK(*blobs[1].data == content_bar); + CHECK(*blobs[2].data == content_baz); + CHECK(*blobs[3].data == content_bar); + CHECK(*blobs[4].data == content_foo); } TEST_CASE("Bazel network: read blobs with unknown size", "[execution_api]") { @@ -110,6 +110,6 @@ TEST_CASE("Bazel network: read blobs with unknown size", "[execution_api]") { // Check order maintained REQUIRE(blobs.size() == 2); - CHECK(blobs[0].data == content_foo); - CHECK(blobs[1].data == content_bar); + CHECK(*blobs[0].data == content_foo); + CHECK(*blobs[1].data == content_bar); } diff --git a/test/buildtool/execution_api/bazel/bytestream_client.test.cpp b/test/buildtool/execution_api/bazel/bytestream_client.test.cpp index 285ec7d7..fea6edea 100644 --- a/test/buildtool/execution_api/bazel/bytestream_client.test.cpp +++ b/test/buildtool/execution_api/bazel/bytestream_client.test.cpp @@ -130,7 +130,7 @@ TEST_CASE("ByteStream Client: Transfer multiple blobs", "[execution_api]") { blob.digest.hash(), blob.digest.size_bytes()); }, - [](auto const& blob) { return blob.data; })); + [](auto const& blob) { return *blob.data; })); SECTION("Download small blobs") { std::vector<std::string> contents{}; @@ -146,9 +146,9 @@ TEST_CASE("ByteStream Client: Transfer multiple blobs", "[execution_api]") { contents.emplace_back(std::move(data)); }); REQUIRE(contents.size() == 3); - CHECK(contents[0] == foo.data); - CHECK(contents[1] == bar.data); - CHECK(contents[2] == baz.data); + CHECK(contents[0] == *foo.data); + CHECK(contents[1] == *bar.data); + CHECK(contents[2] == *baz.data); } } @@ -183,7 +183,7 @@ TEST_CASE("ByteStream Client: Transfer multiple blobs", "[execution_api]") { blob.digest.hash(), blob.digest.size_bytes()); }, - [](auto const& blob) { return blob.data; })); + [](auto const& blob) { return *blob.data; })); SECTION("Download large blobs") { std::vector<std::string> contents{}; @@ -199,9 +199,9 @@ TEST_CASE("ByteStream Client: Transfer multiple blobs", "[execution_api]") { contents.emplace_back(std::move(data)); }); REQUIRE(contents.size() == 3); - CHECK(contents[0] == foo.data); - CHECK(contents[1] == bar.data); - CHECK(contents[2] == baz.data); + CHECK(contents[0] == *foo.data); + CHECK(contents[1] == *bar.data); + CHECK(contents[2] == *baz.data); } } } diff --git a/test/buildtool/execution_engine/executor/executor.test.cpp b/test/buildtool/execution_engine/executor/executor.test.cpp index 69e06df9..8eece895 100644 --- a/test/buildtool/execution_engine/executor/executor.test.cpp +++ b/test/buildtool/execution_engine/executor/executor.test.cpp @@ -179,7 +179,7 @@ class TestApi : public IExecutionApi { auto blob_range = blobs.Blobs(); return std::all_of( blob_range.begin(), blob_range.end(), [this](auto const& blob) { - return config_.artifacts[blob.data] + return config_.artifacts[*blob.data] .uploads // for local artifacts or config_.artifacts[blob.digest.hash()] .uploads; // for known and action artifacts |