summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorMaksim Denisov <denisov.maksim@huawei.com>2025-02-19 17:03:51 +0100
committerMaksim Denisov <denisov.maksim@huawei.com>2025-02-21 14:46:30 +0100
commit0de6104fc0acf73d3d276c0c6af6e7e2fc1cc87a (patch)
treeeedff3e2ca32eb7d8435b504ca19d6ecc2b79fb9
parent14b7a5aff05509f0e7544972b6cbcf53840a542f (diff)
downloadjustbuild-0de6104fc0acf73d3d276c0c6af6e7e2fc1cc87a.tar.gz
HashFunction: Use IncrementalReader
-rw-r--r--src/buildtool/crypto/TARGETS2
-rw-r--r--src/buildtool/crypto/hash_function.cpp43
2 files changed, 25 insertions, 20 deletions
diff --git a/src/buildtool/crypto/TARGETS b/src/buildtool/crypto/TARGETS
index 89a12c87..31c0037b 100644
--- a/src/buildtool/crypto/TARGETS
+++ b/src/buildtool/crypto/TARGETS
@@ -21,6 +21,8 @@
, "private-deps":
[ ["src/buildtool/logging", "log_level"]
, ["src/buildtool/logging", "logging"]
+ , ["src/utils/cpp", "expected"]
+ , ["src/utils/cpp", "incremental_reader"]
]
, "stage": ["src", "buildtool", "crypto"]
}
diff --git a/src/buildtool/crypto/hash_function.cpp b/src/buildtool/crypto/hash_function.cpp
index 4be9033d..985ef888 100644
--- a/src/buildtool/crypto/hash_function.cpp
+++ b/src/buildtool/crypto/hash_function.cpp
@@ -16,11 +16,12 @@
#include <cstddef>
#include <exception>
-#include <fstream>
-#include <limits>
+#include <string_view>
#include "src/buildtool/logging/log_level.hpp"
#include "src/buildtool/logging/logger.hpp"
+#include "src/utils/cpp/expected.hpp"
+#include "src/utils/cpp/incremental_reader.hpp"
namespace {
[[nodiscard]] auto CreateGitTreeTag(std::size_t size) noexcept -> std::string {
@@ -73,35 +74,37 @@ auto HashFunction::HashTaggedFile(std::filesystem::path const& path,
auto const size = std::filesystem::file_size(path);
static constexpr std::size_t kChunkSize{4048};
- static_assert(kChunkSize < std::numeric_limits<std::streamsize>::max(),
- "An overflow will occur while reading");
-
auto hasher = MakeHasher();
if (type_ == Type::GitSHA1) {
hasher.Update(std::invoke(tag_creator, size));
}
- try {
- auto chunk = std::string(kChunkSize, '\0');
- std::ifstream file_reader(path.string(), std::ios::binary);
- if (not file_reader.is_open()) {
- return std::nullopt;
- }
-
- while (file_reader.good()) {
- file_reader.read(chunk.data(),
- static_cast<std::streamsize>(kChunkSize));
+ auto const to_read = IncrementalReader::FromFile(kChunkSize, path);
+ if (not to_read.has_value()) {
+ Logger::Log(LogLevel::Debug,
+ "Failed to create a reader for {}: {}",
+ path.string(),
+ to_read.error());
+ return std::nullopt;
+ }
- auto const count = static_cast<std::size_t>(file_reader.gcount());
- hasher.Update(chunk.substr(0, count));
+ try {
+ for (auto chunk : *to_read) {
+ if (not chunk.has_value()) {
+ Logger::Log(LogLevel::Debug,
+ "Error while trying to hash {}: {}",
+ path.string(),
+ chunk.error());
+ return std::nullopt;
+ }
+ hasher.Update(std::string{*chunk});
}
- file_reader.close();
- return std::make_pair(std::move(hasher).Finalize(), size);
} catch (std::exception const& e) {
Logger::Log(LogLevel::Debug,
"Error while trying to hash {}: {}",
path.string(),
e.what());
+ return std::nullopt;
}
- return std::nullopt;
+ return std::make_pair(std::move(hasher).Finalize(), size);
}