From 2ba863c83b946fb08acfa2257d838987d373208f Mon Sep 17 00:00:00 2001 From: Maksim Denisov Date: Tue, 17 Sep 2024 12:06:50 +0200 Subject: Implement ByteStreamUtils::WriteRequest class ...and remove split serialization/deserialization logic. --- .../execution_api/bazel/bytestream_client.test.cpp | 27 ++++++++-------------- 1 file changed, 9 insertions(+), 18 deletions(-) (limited to 'test/buildtool/execution_api/bazel/bytestream_client.test.cpp') diff --git a/test/buildtool/execution_api/bazel/bytestream_client.test.cpp b/test/buildtool/execution_api/bazel/bytestream_client.test.cpp index 80b78d61..16adb0cd 100644 --- a/test/buildtool/execution_api/bazel/bytestream_client.test.cpp +++ b/test/buildtool/execution_api/bazel/bytestream_client.test.cpp @@ -53,12 +53,9 @@ TEST_CASE("ByteStream Client: Transfer single blob", "[execution_api]") { auto digest = BazelDigestFactory::HashDataAs( hash_function, content); - CHECK(stream.Write(fmt::format("{}/uploads/{}/blobs/{}/{}", - instance_name, - uuid, - digest.hash(), - digest.size_bytes()), - content)); + CHECK(stream.Write( + ByteStreamUtils::WriteRequest{instance_name, uuid, digest}, + content)); SECTION("Download small blob") { auto const data = stream.Read( @@ -77,12 +74,9 @@ TEST_CASE("ByteStream Client: Transfer single blob", "[execution_api]") { auto digest = BazelDigestFactory::HashDataAs( hash_function, other_content); - CHECK(not stream.Write(fmt::format("{}/uploads/{}/blobs/{}/{}", - instance_name, - uuid, - digest.hash(), - digest.size_bytes()), - content)); + CHECK(not stream.Write( + ByteStreamUtils::WriteRequest{instance_name, uuid, digest}, + content)); } SECTION("Upload large blob") { @@ -99,12 +93,9 @@ TEST_CASE("ByteStream Client: Transfer single blob", "[execution_api]") { auto digest = BazelDigestFactory::HashDataAs( hash_function, content); - CHECK(stream.Write(fmt::format("{}/uploads/{}/blobs/{}/{}", - instance_name, - uuid, - digest.hash(), - digest.size_bytes()), - content)); + CHECK(stream.Write( + ByteStreamUtils::WriteRequest{instance_name, uuid, digest}, + content)); SECTION("Download large blob") { auto const data = stream.Read( -- cgit v1.2.3