// Copyright 2023 Huawei Cloud Computing Technology Co., Ltd. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. #ifndef INCLUDED_SRC_BUILDTOOL_EXECUTION_API_GIT_GIT_API_HPP #define INCLUDED_SRC_BUILDTOOL_EXECUTION_API_GIT_GIT_API_HPP #include #include #include #include #include #include #include "gsl/gsl" #include "src/buildtool/common/artifact_digest.hpp" #include "src/buildtool/common/repository_config.hpp" #include "src/buildtool/execution_api/bazel_msg/bazel_msg_factory.hpp" #include "src/buildtool/execution_api/common/artifact_blob_container.hpp" #include "src/buildtool/execution_api/common/common_api.hpp" #include "src/buildtool/execution_api/common/execution_api.hpp" #include "src/buildtool/logging/log_level.hpp" #include "src/buildtool/logging/logger.hpp" /// \brief API for local execution. class GitApi final : public IExecutionApi { public: GitApi() = delete; explicit GitApi(gsl::not_null const& repo_config) : repo_config_{repo_config} {} auto CreateAction( ArtifactDigest const& /*root_digest*/, std::vector const& /*command*/, std::vector const& /*output_files*/, std::vector const& /*output_dirs*/, std::map const& /*env_vars*/, std::map const& /*properties*/) noexcept -> IExecutionAction::Ptr final { // Execution not supported from git cas return nullptr; } // NOLINTNEXTLINE(misc-no-recursion,google-default-arguments) [[nodiscard]] auto RetrieveToPaths( std::vector const& artifacts_info, std::vector const& output_paths, std::optional> const& /*alternative*/ = std::nullopt) noexcept -> bool override { if (artifacts_info.size() != output_paths.size()) { Logger::Log(LogLevel::Error, "different number of digests and output paths."); return false; } for (std::size_t i{}; i < artifacts_info.size(); ++i) { auto const& info = artifacts_info[i]; if (IsTreeObject(info.type)) { auto tree = repo_config_->ReadTreeFromGitCAS(info.digest.hash()); if (not tree) { return false; } for (auto const& [path, entry] : *tree) { if (not RetrieveToPaths( {Artifact::ObjectInfo{ .digest = ArtifactDigest{entry->Hash(), /*size*/ 0, entry->IsTree()}, .type = entry->Type(), .failed = false}}, {output_paths[i] / path})) { return false; } } } else { auto blob = repo_config_->ReadBlobFromGitCAS(info.digest.hash()); if (not blob) { return false; } if (not FileSystemManager::CreateDirectory( output_paths[i].parent_path()) or not FileSystemManager::WriteFileAs( *blob, output_paths[i], info.type)) { Logger::Log(LogLevel::Error, "staging to output path {} failed.", output_paths[i].string()); return false; } } } return true; } [[nodiscard]] auto RetrieveToFds( std::vector const& artifacts_info, std::vector const& fds, bool raw_tree) noexcept -> bool override { if (artifacts_info.size() != fds.size()) { Logger::Log(LogLevel::Error, "different number of digests and file descriptors."); return false; } for (std::size_t i{}; i < artifacts_info.size(); ++i) { auto fd = fds[i]; auto const& info = artifacts_info[i]; if (IsTreeObject(info.type) and not raw_tree) { auto tree = repo_config_->ReadTreeFromGitCAS(info.digest.hash()); if (not tree) { Logger::Log(LogLevel::Debug, "Tree {} not known to git", info.digest.hash()); return false; } auto json = nlohmann::json::object(); for (auto const& [path, entry] : *tree) { json[path] = Artifact::ObjectInfo{ .digest = ArtifactDigest{entry->Hash(), /*size*/ 0, entry->IsTree()}, .type = entry->Type(), .failed = false} .ToString(/*size_unknown*/ true); } auto msg = json.dump(2) + "\n"; if (gsl::owner out = fdopen(fd, "wb")) { // NOLINT std::fwrite(msg.data(), 1, msg.size(), out); std::fclose(out); } else { Logger::Log(LogLevel::Error, "dumping to file descriptor {} failed.", fd); return false; } } else { auto blob = repo_config_->ReadBlobFromGitCAS(info.digest.hash()); if (not blob) { Logger::Log(LogLevel::Debug, "Blob {} not known to git", info.digest.hash()); return false; } auto msg = *blob; if (gsl::owner out = fdopen(fd, "wb")) { // NOLINT std::fwrite(msg.data(), 1, msg.size(), out); std::fclose(out); } else { Logger::Log(LogLevel::Error, "dumping to file descriptor {} failed.", fd); return false; } } } return true; } // NOLINTNEXTLINE(misc-no-recursion) [[nodiscard]] auto RetrieveToCas( std::vector const& artifacts_info, gsl::not_null const& api) noexcept -> bool override { // Return immediately if target CAS is this CAS if (this == api) { return true; } // Determine missing artifacts in other CAS. auto missing_artifacts_info = GetMissingArtifactsInfo( api, artifacts_info.begin(), artifacts_info.end(), [](Artifact::ObjectInfo const& info) { return info.digest; }); if (not missing_artifacts_info) { Logger::Log(LogLevel::Error, "GitApi: Failed to retrieve the missing artifacts"); return false; } // Collect blobs of missing artifacts from local CAS. Trees are // processed recursively before any blob is uploaded. ArtifactBlobContainer container{}; for (auto const& dgst : missing_artifacts_info->digests) { auto const& info = missing_artifacts_info->back_map[dgst]; std::optional content; // Recursively process trees. if (IsTreeObject(info.type)) { auto tree = repo_config_->ReadTreeFromGitCAS(info.digest.hash()); if (not tree) { return false; } ArtifactBlobContainer tree_deps_only_blobs{}; for (auto const& [path, entry] : *tree) { if (entry->IsTree()) { if (not RetrieveToCas( {Artifact::ObjectInfo{ .digest = ArtifactDigest{entry->Hash(), /*size*/ 0, entry->IsTree()}, .type = entry->Type(), .failed = false}}, api)) { return false; } } else { auto const& entry_content = entry->RawData(); if (not entry_content) { return false; } auto digest = ArtifactDigest::Create( *entry_content); // Collect blob and upload to remote CAS if transfer // size reached. if (not UpdateContainerAndUpload( &tree_deps_only_blobs, ArtifactBlob{std::move(digest), *entry_content, IsExecutableObject(entry->Type())}, /*exception_is_fatal=*/true, [&api](ArtifactBlobContainer&& blobs) -> bool { return api->Upload(std::move(blobs)); })) { return false; } } } // Upload remaining blobs. if (not api->Upload(std::move(tree_deps_only_blobs))) { return false; } content = tree->RawData(); } else { content = repo_config_->ReadBlobFromGitCAS(info.digest.hash()); } if (not content) { return false; } ArtifactDigest digest = IsTreeObject(info.type) ? ArtifactDigest::Create(*content) : ArtifactDigest::Create(*content); // Collect blob and upload to remote CAS if transfer size reached. if (not UpdateContainerAndUpload( &container, ArtifactBlob{std::move(digest), std::move(*content), IsExecutableObject(info.type)}, /*exception_is_fatal=*/true, [&api](ArtifactBlobContainer&& blobs) { return api->Upload(std::move(blobs), /*skip_find_missing=*/true); })) { return false; } } // Upload remaining blobs to remote CAS. return api->Upload(std::move(container), /*skip_find_missing=*/true); } [[nodiscard]] auto RetrieveToMemory( Artifact::ObjectInfo const& artifact_info) noexcept -> std::optional override { return repo_config_->ReadBlobFromGitCAS(artifact_info.digest.hash()); } /// NOLINTNEXTLINE(google-default-arguments) [[nodiscard]] auto Upload(ArtifactBlobContainer&& /*blobs*/, bool /*skip_find_missing*/ = false) noexcept -> bool override { // Upload to git cas not supported return false; } [[nodiscard]] auto UploadTree( std::vector const& /*artifacts*/) noexcept -> std::optional override { // Upload to git cas not supported return std::nullopt; } [[nodiscard]] auto IsAvailable(ArtifactDigest const& digest) const noexcept -> bool override { return repo_config_->ReadBlobFromGitCAS(digest.hash()).has_value(); } [[nodiscard]] auto IsAvailable(std::vector const& digests) const noexcept -> std::vector override { std::vector result; for (auto const& digest : digests) { if (not IsAvailable(digest)) { result.push_back(digest); } } return result; } private: gsl::not_null repo_config_; }; #endif