From ee6f54124360d8a72b9b545dcc581f3c80d3c8a9 Mon Sep 17 00:00:00 2001 From: Sascha Roloff Date: Mon, 11 Jul 2022 19:55:10 +0200 Subject: Modified artifact digest to provide wire digest on demand --- .../execution_api/bazel/bytestream_client.test.cpp | 29 ++++++++++++++-------- 1 file changed, 19 insertions(+), 10 deletions(-) (limited to 'test/buildtool/execution_api/bazel/bytestream_client.test.cpp') diff --git a/test/buildtool/execution_api/bazel/bytestream_client.test.cpp b/test/buildtool/execution_api/bazel/bytestream_client.test.cpp index 3187c99c..6ce67e53 100644 --- a/test/buildtool/execution_api/bazel/bytestream_client.test.cpp +++ b/test/buildtool/execution_api/bazel/bytestream_client.test.cpp @@ -18,18 +18,21 @@ TEST_CASE("ByteStream Client: Transfer single blob", "[execution_api]") { std::string content("foobar"); // digest of "foobar" - auto digest = ArtifactDigest::Create(content); + auto digest = + static_cast(ArtifactDigest::Create(content)); CHECK(stream.Write(fmt::format("{}/uploads/{}/blobs/{}/{}", instance_name, uuid, digest.hash(), - digest.size()), + digest.size_bytes()), content)); SECTION("Download small blob") { - auto data = stream.Read(fmt::format( - "{}/blobs/{}/{}", instance_name, digest.hash(), digest.size())); + auto data = stream.Read(fmt::format("{}/blobs/{}/{}", + instance_name, + digest.hash(), + digest.size_bytes())); CHECK(data == content); } @@ -44,25 +47,31 @@ TEST_CASE("ByteStream Client: Transfer single blob", "[execution_api]") { } // digest of "instance_nameinstance_nameinstance_..." - auto digest = ArtifactDigest::Create(content); + auto digest = + static_cast(ArtifactDigest::Create(content)); CHECK(stream.Write(fmt::format("{}/uploads/{}/blobs/{}/{}", instance_name, uuid, digest.hash(), - digest.size()), + digest.size_bytes()), content)); SECTION("Download large blob") { - auto data = stream.Read(fmt::format( - "{}/blobs/{}/{}", instance_name, digest.hash(), digest.size())); + auto data = stream.Read(fmt::format("{}/blobs/{}/{}", + instance_name, + digest.hash(), + digest.size_bytes())); CHECK(data == content); } SECTION("Incrementally download large blob") { - auto reader = stream.IncrementalRead(fmt::format( - "{}/blobs/{}/{}", instance_name, digest.hash(), digest.size())); + auto reader = + stream.IncrementalRead(fmt::format("{}/blobs/{}/{}", + instance_name, + digest.hash(), + digest.size_bytes())); std::string data{}; auto chunk = reader.Next(); -- cgit v1.2.3